aboutsummaryrefslogtreecommitdiff
path: root/go/tools/builders
diff options
context:
space:
mode:
Diffstat (limited to 'go/tools/builders')
-rw-r--r--go/tools/builders/BUILD.bazel173
-rw-r--r--go/tools/builders/ar.go104
-rw-r--r--go/tools/builders/asm.go138
-rw-r--r--go/tools/builders/builder.go64
-rw-r--r--go/tools/builders/cgo2.go397
-rw-r--r--go/tools/builders/compilepkg.go615
-rw-r--r--go/tools/builders/cover.go110
-rw-r--r--go/tools/builders/cover_test.go130
-rw-r--r--go/tools/builders/edit.go95
-rw-r--r--go/tools/builders/embed.go340
-rw-r--r--go/tools/builders/embedcfg.go439
-rw-r--r--go/tools/builders/env.go474
-rw-r--r--go/tools/builders/filter.go168
-rw-r--r--go/tools/builders/filter_buildid.go44
-rw-r--r--go/tools/builders/filter_test.go136
-rw-r--r--go/tools/builders/flags.go135
-rw-r--r--go/tools/builders/generate_nogo_main.go196
-rw-r--r--go/tools/builders/generate_test_main.go416
-rw-r--r--go/tools/builders/go_path.go203
-rw-r--r--go/tools/builders/importcfg.go261
-rw-r--r--go/tools/builders/info.go64
-rw-r--r--go/tools/builders/link.go163
-rw-r--r--go/tools/builders/md5sum.go89
-rw-r--r--go/tools/builders/nogo_main.go654
-rw-r--r--go/tools/builders/nogo_typeparams_go117.go23
-rw-r--r--go/tools/builders/nogo_typeparams_go118.go28
-rw-r--r--go/tools/builders/nolint.go39
-rw-r--r--go/tools/builders/nolint_test.go79
-rw-r--r--go/tools/builders/pack.go388
-rw-r--r--go/tools/builders/path.go7
-rw-r--r--go/tools/builders/path_windows.go25
-rw-r--r--go/tools/builders/protoc.go219
-rw-r--r--go/tools/builders/read.go551
-rw-r--r--go/tools/builders/replicate.go167
-rw-r--r--go/tools/builders/stdlib.go169
-rw-r--r--go/tools/builders/stdliblist.go293
-rw-r--r--go/tools/builders/stdliblist_test.go48
37 files changed, 7644 insertions, 0 deletions
diff --git a/go/tools/builders/BUILD.bazel b/go/tools/builders/BUILD.bazel
new file mode 100644
index 00000000..28724714
--- /dev/null
+++ b/go/tools/builders/BUILD.bazel
@@ -0,0 +1,173 @@
+load("//go:def.bzl", "go_binary", "go_source", "go_test")
+load("//go/private/rules:transition.bzl", "go_reset_target")
+
+go_test(
+ name = "filter_test",
+ size = "small",
+ srcs = [
+ "filter.go",
+ "filter_test.go",
+ "read.go",
+ ],
+)
+
+go_test(
+ name = "cover_test",
+ size = "small",
+ srcs = [
+ "cover.go",
+ "cover_test.go",
+ "edit.go",
+ "env.go",
+ "flags.go",
+ ],
+)
+
+go_test(
+ name = "stdliblist_test",
+ size = "small",
+ srcs = [
+ "env.go",
+ "flags.go",
+ "replicate.go",
+ "stdliblist.go",
+ "stdliblist_test.go",
+ ],
+ data = ["@go_sdk//:files"],
+ rundir = ".",
+)
+
+go_test(
+ name = "nolint_test",
+ size = "small",
+ srcs = [
+ "nolint.go",
+ "nolint_test.go",
+ ],
+)
+
+filegroup(
+ name = "builder_srcs",
+ srcs = [
+ "ar.go",
+ "asm.go",
+ "builder.go",
+ "cgo2.go",
+ "compilepkg.go",
+ "cover.go",
+ "edit.go",
+ "embedcfg.go",
+ "env.go",
+ "filter.go",
+ "filter_buildid.go",
+ "flags.go",
+ "generate_nogo_main.go",
+ "generate_test_main.go",
+ "importcfg.go",
+ "link.go",
+ "pack.go",
+ "read.go",
+ "replicate.go",
+ "stdlib.go",
+ "stdliblist.go",
+ ] + select({
+ "@bazel_tools//src/conditions:windows": ["path_windows.go"],
+ "//conditions:default": ["path.go"],
+ }),
+ visibility = ["//visibility:public"],
+)
+
+go_binary(
+ name = "embed",
+ srcs = ["embed.go"],
+ visibility = ["//visibility:public"],
+)
+
+go_source(
+ name = "nogo_srcs",
+ srcs = [
+ "env.go",
+ "flags.go",
+ "nogo_main.go",
+ "nogo_typeparams_go117.go",
+ "nogo_typeparams_go118.go",
+ "nolint.go",
+ "pack.go",
+ ],
+ # //go/tools/builders:nogo_srcs is considered a different target by
+ # Bazel's visibility check than
+ # @io_bazel_rules_go//go/tools/builders:nogo_srcs. Only the latter is
+ # allowed to depend on
+ # @org_golang_x_tools//go/analysis/internal/facts:go_tool_library.
+ tags = ["manual"],
+ visibility = ["//visibility:public"],
+ deps = [
+ "@org_golang_x_tools//go/analysis",
+ "@org_golang_x_tools//go/gcexportdata",
+ "@org_golang_x_tools//internal/facts",
+ ],
+)
+
+go_binary(
+ name = "go_path-bin",
+ srcs = [
+ "env.go",
+ "flags.go",
+ "go_path.go",
+ ],
+ visibility = ["//visibility:public"],
+)
+
+go_reset_target(
+ name = "go_path",
+ dep = ":go_path-bin",
+ visibility = ["//visibility:public"],
+)
+
+go_binary(
+ name = "info",
+ srcs = [
+ "env.go",
+ "flags.go",
+ "info.go",
+ ],
+ visibility = ["//visibility:public"],
+)
+
+go_binary(
+ name = "md5sum",
+ srcs = [
+ "md5sum.go",
+ ],
+ visibility = ["//visibility:public"],
+)
+
+go_binary(
+ name = "go-protoc-bin",
+ srcs = [
+ "env.go",
+ "flags.go",
+ "protoc.go",
+ ],
+ visibility = ["//visibility:private"],
+)
+
+go_reset_target(
+ name = "go-protoc",
+ dep = ":go-protoc-bin",
+ visibility = ["//visibility:public"],
+)
+
+filegroup(
+ name = "all_builder_srcs",
+ testonly = True,
+ srcs = glob(["*.go"]),
+ visibility = ["//:__subpackages__"],
+)
+
+filegroup(
+ name = "all_files",
+ testonly = True,
+ srcs = glob(["**"]),
+ visibility = ["//visibility:public"],
+)
diff --git a/go/tools/builders/ar.go b/go/tools/builders/ar.go
new file mode 100644
index 00000000..2f4b36c8
--- /dev/null
+++ b/go/tools/builders/ar.go
@@ -0,0 +1,104 @@
+// Copyright 2018 The Bazel Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+ "encoding/binary"
+ "fmt"
+ "io"
+ "os"
+ "strconv"
+ "strings"
+)
+
+type header struct {
+ NameRaw [16]byte
+ ModTimeRaw [12]byte
+ OwnerIdRaw [6]byte
+ GroupIdRaw [6]byte
+ FileModeRaw [8]byte
+ FileSizeRaw [10]byte
+ EndRaw [2]byte
+}
+
+func (h *header) name() string {
+ return strings.TrimRight(string(h.NameRaw[:]), " ")
+}
+
+func (h *header) size() int64 {
+ s, err := strconv.Atoi(strings.TrimRight(string(h.FileSizeRaw[:]), " "))
+ if err != nil {
+ panic(err)
+ }
+ return int64(s)
+}
+
+func (h *header) next() int64 {
+ size := h.size()
+ return size + size%2
+}
+
+func (h *header) deterministic() *header {
+ h2 := *h
+ copy(h2.ModTimeRaw[:], zeroBytes)
+ copy(h2.OwnerIdRaw[:], zeroBytes)
+ copy(h2.GroupIdRaw[:], zeroBytes)
+ copy(h2.FileModeRaw[:], zeroBytes) // GNU ar also clears this
+ return &h2
+}
+
+// stripArMetadata strips the archive metadata of non-deterministic data:
+// - Timestamps
+// - User IDs
+// - Group IDs
+// - File Modes
+// The archive is modified in place.
+func stripArMetadata(archivePath string) error {
+ archive, err := os.OpenFile(archivePath, os.O_RDWR, 0)
+ if err != nil {
+ return err
+ }
+ defer archive.Close()
+
+ magic := make([]byte, len(arHeader))
+ if _, err := io.ReadFull(archive, magic); err != nil {
+ return err
+ }
+
+ if string(magic) != arHeader {
+ return fmt.Errorf("%s is not an archive", archivePath)
+ }
+
+ for {
+ hdr := &header{}
+ if err := binary.Read(archive, binary.BigEndian, hdr); err == io.EOF {
+ return nil
+ } else if err != nil {
+ return err
+ }
+
+ // Seek back at the beginning of the header and overwrite it.
+ archive.Seek(-entryLength, os.SEEK_CUR)
+ if err := binary.Write(archive, binary.BigEndian, hdr.deterministic()); err != nil {
+ return err
+ }
+
+ if _, err := archive.Seek(hdr.next(), os.SEEK_CUR); err == io.EOF {
+ return nil
+ } else if err != nil {
+ return err
+ }
+ }
+}
diff --git a/go/tools/builders/asm.go b/go/tools/builders/asm.go
new file mode 100644
index 00000000..3d64c9ba
--- /dev/null
+++ b/go/tools/builders/asm.go
@@ -0,0 +1,138 @@
+// Copyright 2017 The Bazel Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+ "go/build"
+ "io/ioutil"
+ "os"
+ "path/filepath"
+ "regexp"
+ "runtime"
+ "strconv"
+ "strings"
+)
+
+var ASM_DEFINES = []string{
+ "-D", "GOOS_" + build.Default.GOOS,
+ "-D", "GOARCH_" + build.Default.GOARCH,
+ "-D", "GOOS_GOARCH_" + build.Default.GOOS + "_" + build.Default.GOARCH,
+}
+
+// buildSymabisFile generates a file from assembly files that is consumed
+// by the compiler. This is only needed in go1.12+ when there is at least one
+// .s file. If the symabis file is not needed, no file will be generated,
+// and "", nil will be returned.
+func buildSymabisFile(goenv *env, sFiles, hFiles []fileInfo, asmhdr string) (string, error) {
+ if len(sFiles) == 0 {
+ return "", nil
+ }
+
+ // Check version. The symabis file is only required and can only be built
+ // starting at go1.12.
+ version := runtime.Version()
+ if strings.HasPrefix(version, "go1.") {
+ minor := version[len("go1."):]
+ if i := strings.IndexByte(minor, '.'); i >= 0 {
+ minor = minor[:i]
+ }
+ n, err := strconv.Atoi(minor)
+ if err == nil && n <= 11 {
+ return "", nil
+ }
+ // Fall through if the version can't be parsed. It's probably a newer
+ // development version.
+ }
+
+ // Create an empty go_asm.h file. The compiler will write this later, but
+ // we need one to exist now.
+ asmhdrFile, err := os.Create(asmhdr)
+ if err != nil {
+ return "", err
+ }
+ if err := asmhdrFile.Close(); err != nil {
+ return "", err
+ }
+ asmhdrDir := filepath.Dir(asmhdr)
+
+ // Create a temporary output file. The caller is responsible for deleting it.
+ var symabisName string
+ symabisFile, err := ioutil.TempFile("", "symabis")
+ if err != nil {
+ return "", err
+ }
+ symabisName = symabisFile.Name()
+ symabisFile.Close()
+
+ // Run the assembler.
+ wd, err := os.Getwd()
+ if err != nil {
+ return symabisName, err
+ }
+ asmargs := goenv.goTool("asm")
+ asmargs = append(asmargs, "-trimpath", wd)
+ asmargs = append(asmargs, "-I", wd)
+ asmargs = append(asmargs, "-I", filepath.Join(os.Getenv("GOROOT"), "pkg", "include"))
+ asmargs = append(asmargs, "-I", asmhdrDir)
+ seenHdrDirs := map[string]bool{wd: true, asmhdrDir: true}
+ for _, hFile := range hFiles {
+ hdrDir := filepath.Dir(abs(hFile.filename))
+ if !seenHdrDirs[hdrDir] {
+ asmargs = append(asmargs, "-I", hdrDir)
+ seenHdrDirs[hdrDir] = true
+ }
+ }
+ asmargs = append(asmargs, ASM_DEFINES...)
+ asmargs = append(asmargs, "-gensymabis", "-o", symabisName, "--")
+ for _, sFile := range sFiles {
+ asmargs = append(asmargs, sFile.filename)
+ }
+
+ err = goenv.runCommand(asmargs)
+ return symabisName, err
+}
+
+func asmFile(goenv *env, srcPath, packagePath string, asmFlags []string, outPath string) error {
+ args := goenv.goTool("asm")
+ args = append(args, asmFlags...)
+ // The package path has to be specified as of Go 1.19 or the resulting
+ // object will be unlinkable, but the -p flag is also only available
+ // since Go 1.19.
+ if packagePath != "" && isGo119OrHigher() {
+ args = append(args, "-p", packagePath)
+ }
+ args = append(args, ASM_DEFINES...)
+ args = append(args, "-trimpath", ".")
+ args = append(args, "-o", outPath)
+ args = append(args, "--", srcPath)
+ absArgs(args, []string{"-I", "-o", "-trimpath"})
+ return goenv.runCommand(args)
+}
+
+var goMinorVersionRegexp = regexp.MustCompile(`^go1\.(\d+)`)
+
+func isGo119OrHigher() bool {
+ match := goMinorVersionRegexp.FindStringSubmatch(runtime.Version())
+ if match == nil {
+ // Developer version or something with an unparseable version string,
+ // assume Go 1.19 or higher.
+ return true
+ }
+ minorVersion, err := strconv.Atoi(match[1])
+ if err != nil {
+ return true
+ }
+ return minorVersion >= 19
+}
diff --git a/go/tools/builders/builder.go b/go/tools/builders/builder.go
new file mode 100644
index 00000000..5d691839
--- /dev/null
+++ b/go/tools/builders/builder.go
@@ -0,0 +1,64 @@
+// Copyright 2018 The Bazel Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// builder implements most of the actions for Bazel to compile and link
+// go code. We use a single binary for most actions, since this reduces
+// the number of inputs needed for each action and allows us to build
+// multiple related files in a single action.
+
+package main
+
+import (
+ "log"
+ "os"
+)
+
+func main() {
+ log.SetFlags(0)
+ log.SetPrefix("builder: ")
+
+ args, _, err := expandParamsFiles(os.Args[1:])
+ if err != nil {
+ log.Fatal(err)
+ }
+ if len(args) == 0 {
+ log.Fatalf("usage: %s verb options...", os.Args[0])
+ }
+ verb, rest := args[0], args[1:]
+
+ var action func(args []string) error
+ switch verb {
+ case "compilepkg":
+ action = compilePkg
+ case "filterbuildid":
+ action = filterBuildID
+ case "gentestmain":
+ action = genTestMain
+ case "link":
+ action = link
+ case "gennogomain":
+ action = genNogoMain
+ case "stdlib":
+ action = stdlib
+ case "stdliblist":
+ action = stdliblist
+ default:
+ log.Fatalf("unknown action: %s", verb)
+ }
+ log.SetPrefix(verb + ": ")
+
+ if err := action(rest); err != nil {
+ log.Fatal(err)
+ }
+}
diff --git a/go/tools/builders/cgo2.go b/go/tools/builders/cgo2.go
new file mode 100644
index 00000000..fc2876a9
--- /dev/null
+++ b/go/tools/builders/cgo2.go
@@ -0,0 +1,397 @@
+// Copyright 2019 The Bazel Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// cgo2.go provides new cgo functionality for use by the GoCompilePkg action.
+// We can't use the functionality in cgo.go, since it relies too heavily
+// on logic in cgo.bzl. Ideally, we'd be able to replace cgo.go with this
+// file eventually, but not until Bazel gives us enough toolchain information
+// to compile ObjC files.
+
+package main
+
+import (
+ "bytes"
+ "fmt"
+ "io/ioutil"
+ "os"
+ "path/filepath"
+ "strings"
+)
+
+// cgo2 processes a set of mixed source files with cgo.
+func cgo2(goenv *env, goSrcs, cgoSrcs, cSrcs, cxxSrcs, objcSrcs, objcxxSrcs, sSrcs, hSrcs []string, packagePath, packageName string, cc string, cppFlags, cFlags, cxxFlags, objcFlags, objcxxFlags, ldFlags []string, cgoExportHPath string) (srcDir string, allGoSrcs, cObjs []string, err error) {
+ // Report an error if the C/C++ toolchain wasn't configured.
+ if cc == "" {
+ err := cgoError(cgoSrcs[:])
+ err = append(err, cSrcs...)
+ err = append(err, cxxSrcs...)
+ err = append(err, objcSrcs...)
+ err = append(err, objcxxSrcs...)
+ err = append(err, sSrcs...)
+ return "", nil, nil, err
+ }
+
+ // If we only have C/C++ sources without cgo, just compile and pack them
+ // without generating code. The Go command forbids this, but we've
+ // historically allowed it.
+ // TODO(jayconrod): this doesn't write CGO_LDFLAGS into the archive. We
+ // might miss dependencies like -lstdc++ if they aren't referenced in
+ // some other way.
+ if len(cgoSrcs) == 0 {
+ cObjs, err = compileCSources(goenv, cSrcs, cxxSrcs, objcSrcs, objcxxSrcs, sSrcs, hSrcs, cc, cppFlags, cFlags, cxxFlags, objcFlags, objcxxFlags)
+ return ".", nil, cObjs, err
+ }
+
+ workDir, cleanup, err := goenv.workDir()
+ if err != nil {
+ return "", nil, nil, err
+ }
+ defer cleanup()
+
+ // cgo2 will gather sources into a single temporary directory, since nogo
+ // scanners might want to include or exclude these sources we need to ensure
+ // that a fragment of the path is stable and human friendly enough to be
+ // referenced in nogo configuration.
+ workDir = filepath.Join(workDir, "cgo", packagePath)
+ if err := os.MkdirAll(workDir, 0700); err != nil {
+ return "", nil, nil, err
+ }
+
+ // Filter out -lstdc++ and -lc++ from ldflags if we don't have C++ sources,
+ // and set CGO_LDFLAGS. These flags get written as special comments into cgo
+ // generated sources. The compiler encodes those flags in the compiled .a
+ // file, and the linker passes them on to the external linker.
+ haveCxx := len(cxxSrcs)+len(objcxxSrcs) > 0
+ if !haveCxx {
+ for _, f := range ldFlags {
+ if strings.HasSuffix(f, ".a") {
+ // These flags come from cdeps options. Assume C++.
+ haveCxx = true
+ break
+ }
+ }
+ }
+ var combinedLdFlags []string
+ if haveCxx {
+ combinedLdFlags = append(combinedLdFlags, ldFlags...)
+ } else {
+ for _, f := range ldFlags {
+ if f != "-lc++" && f != "-lstdc++" {
+ combinedLdFlags = append(combinedLdFlags, f)
+ }
+ }
+ }
+ combinedLdFlags = append(combinedLdFlags, defaultLdFlags()...)
+ os.Setenv("CGO_LDFLAGS", strings.Join(combinedLdFlags, " "))
+
+ // If cgo sources are in different directories, gather them into a temporary
+ // directory so we can use -srcdir.
+ srcDir = filepath.Dir(cgoSrcs[0])
+ srcsInSingleDir := true
+ for _, src := range cgoSrcs[1:] {
+ if filepath.Dir(src) != srcDir {
+ srcsInSingleDir = false
+ break
+ }
+ }
+
+ if srcsInSingleDir {
+ for i := range cgoSrcs {
+ cgoSrcs[i] = filepath.Base(cgoSrcs[i])
+ }
+ } else {
+ srcDir = filepath.Join(workDir, "cgosrcs")
+ if err := os.Mkdir(srcDir, 0777); err != nil {
+ return "", nil, nil, err
+ }
+ copiedSrcs, err := gatherSrcs(srcDir, cgoSrcs)
+ if err != nil {
+ return "", nil, nil, err
+ }
+ cgoSrcs = copiedSrcs
+ }
+
+ // Generate Go and C code.
+ hdrDirs := map[string]bool{}
+ var hdrIncludes []string
+ for _, hdr := range hSrcs {
+ hdrDir := filepath.Dir(hdr)
+ if !hdrDirs[hdrDir] {
+ hdrDirs[hdrDir] = true
+ hdrIncludes = append(hdrIncludes, "-iquote", hdrDir)
+ }
+ }
+ hdrIncludes = append(hdrIncludes, "-iquote", workDir) // for _cgo_export.h
+
+ execRoot, err := bazelExecRoot()
+ if err != nil {
+ return "", nil, nil, err
+ }
+ // Trim the execroot from the //line comments emitted by cgo.
+ args := goenv.goTool("cgo", "-srcdir", srcDir, "-objdir", workDir, "-trimpath", execRoot)
+ if packagePath != "" {
+ args = append(args, "-importpath", packagePath)
+ }
+ args = append(args, "--")
+ args = append(args, cppFlags...)
+ args = append(args, hdrIncludes...)
+ args = append(args, cFlags...)
+ args = append(args, cgoSrcs...)
+ if err := goenv.runCommand(args); err != nil {
+ return "", nil, nil, err
+ }
+
+ if cgoExportHPath != "" {
+ if err := copyFile(filepath.Join(workDir, "_cgo_export.h"), cgoExportHPath); err != nil {
+ return "", nil, nil, err
+ }
+ }
+ genGoSrcs := make([]string, 1+len(cgoSrcs))
+ genGoSrcs[0] = filepath.Join(workDir, "_cgo_gotypes.go")
+ genCSrcs := make([]string, 1+len(cgoSrcs))
+ genCSrcs[0] = filepath.Join(workDir, "_cgo_export.c")
+ for i, src := range cgoSrcs {
+ stem := strings.TrimSuffix(filepath.Base(src), ".go")
+ genGoSrcs[i+1] = filepath.Join(workDir, stem+".cgo1.go")
+ genCSrcs[i+1] = filepath.Join(workDir, stem+".cgo2.c")
+ }
+ cgoMainC := filepath.Join(workDir, "_cgo_main.c")
+
+ // Compile C, C++, Objective-C/C++, and assembly code.
+ defaultCFlags := defaultCFlags(workDir)
+ combinedCFlags := combineFlags(cppFlags, hdrIncludes, cFlags, defaultCFlags)
+ for _, lang := range []struct{ srcs, flags []string }{
+ {genCSrcs, combinedCFlags},
+ {cSrcs, combinedCFlags},
+ {cxxSrcs, combineFlags(cppFlags, hdrIncludes, cxxFlags, defaultCFlags)},
+ {objcSrcs, combineFlags(cppFlags, hdrIncludes, objcFlags, defaultCFlags)},
+ {objcxxSrcs, combineFlags(cppFlags, hdrIncludes, objcxxFlags, defaultCFlags)},
+ {sSrcs, nil},
+ } {
+ for _, src := range lang.srcs {
+ obj := filepath.Join(workDir, fmt.Sprintf("_x%d.o", len(cObjs)))
+ cObjs = append(cObjs, obj)
+ if err := cCompile(goenv, src, cc, lang.flags, obj); err != nil {
+ return "", nil, nil, err
+ }
+ }
+ }
+
+ mainObj := filepath.Join(workDir, "_cgo_main.o")
+ if err := cCompile(goenv, cgoMainC, cc, combinedCFlags, mainObj); err != nil {
+ return "", nil, nil, err
+ }
+
+ // Link cgo binary and use the symbols to generate _cgo_import.go.
+ mainBin := filepath.Join(workDir, "_cgo_.o") // .o is a lie; it's an executable
+ args = append([]string{cc, "-o", mainBin, mainObj}, cObjs...)
+ args = append(args, combinedLdFlags...)
+ var originalErrBuf bytes.Buffer
+ if err := goenv.runCommandToFile(os.Stdout, &originalErrBuf, args); err != nil {
+ // If linking the binary for cgo fails, this is usually because the
+ // object files reference external symbols that can't be resolved yet.
+ // Since the binary is only produced to have its symbols read by the cgo
+ // command, there is no harm in trying to build it allowing unresolved
+ // symbols - the real link that happens at the end will fail if they
+ // rightfully can't be resolved.
+ var allowUnresolvedSymbolsLdFlag string
+ switch os.Getenv("GOOS") {
+ case "windows":
+ // MinGW's linker doesn't seem to support --unresolved-symbols
+ // and MSVC isn't supported at all.
+ return "", nil, nil, err
+ case "darwin", "ios":
+ allowUnresolvedSymbolsLdFlag = "-Wl,-undefined,dynamic_lookup"
+ default:
+ allowUnresolvedSymbolsLdFlag = "-Wl,--unresolved-symbols=ignore-all"
+ }
+ // Print and return the original error if we can't link the binary with
+ // the additional linker flags as they may simply be incorrect for the
+ // particular compiler/linker pair and would obscure the true reason for
+ // the failure of the original command.
+ if err2 := goenv.runCommandToFile(
+ os.Stdout,
+ ioutil.Discard,
+ append(args, allowUnresolvedSymbolsLdFlag),
+ ); err2 != nil {
+ os.Stderr.Write(relativizePaths(originalErrBuf.Bytes()))
+ return "", nil, nil, err
+ }
+ // Do not print the original error - rerunning the command with the
+ // additional linker flag fixed it.
+ }
+
+ cgoImportsGo := filepath.Join(workDir, "_cgo_imports.go")
+ args = goenv.goTool("cgo", "-dynpackage", packageName, "-dynimport", mainBin, "-dynout", cgoImportsGo)
+ if err := goenv.runCommand(args); err != nil {
+ return "", nil, nil, err
+ }
+ genGoSrcs = append(genGoSrcs, cgoImportsGo)
+
+ // Copy regular Go source files into the work directory so that we can
+ // use -trimpath=workDir.
+ goBases, err := gatherSrcs(workDir, goSrcs)
+ if err != nil {
+ return "", nil, nil, err
+ }
+
+ allGoSrcs = make([]string, len(goSrcs)+len(genGoSrcs))
+ for i := range goSrcs {
+ allGoSrcs[i] = filepath.Join(workDir, goBases[i])
+ }
+ copy(allGoSrcs[len(goSrcs):], genGoSrcs)
+ return workDir, allGoSrcs, cObjs, nil
+}
+
+// compileCSources compiles a list of C, C++, Objective-C, Objective-C++,
+// and assembly sources into .o files to be packed into the archive.
+// It does not run cgo. This is used for packages with "cgo = True" but
+// without any .go files that import "C". The Go command forbids this,
+// but we have historically allowed it.
+func compileCSources(goenv *env, cSrcs, cxxSrcs, objcSrcs, objcxxSrcs, sSrcs, hSrcs []string, cc string, cppFlags, cFlags, cxxFlags, objcFlags, objcxxFlags []string) (cObjs []string, err error) {
+ workDir, cleanup, err := goenv.workDir()
+ if err != nil {
+ return nil, err
+ }
+ defer cleanup()
+
+ hdrDirs := map[string]bool{}
+ var hdrIncludes []string
+ for _, hdr := range hSrcs {
+ hdrDir := filepath.Dir(hdr)
+ if !hdrDirs[hdrDir] {
+ hdrDirs[hdrDir] = true
+ hdrIncludes = append(hdrIncludes, "-iquote", hdrDir)
+ }
+ }
+
+ defaultCFlags := defaultCFlags(workDir)
+ for _, lang := range []struct{ srcs, flags []string }{
+ {cSrcs, combineFlags(cppFlags, hdrIncludes, cFlags, defaultCFlags)},
+ {cxxSrcs, combineFlags(cppFlags, hdrIncludes, cxxFlags, defaultCFlags)},
+ {objcSrcs, combineFlags(cppFlags, hdrIncludes, objcFlags, defaultCFlags)},
+ {objcxxSrcs, combineFlags(cppFlags, hdrIncludes, objcxxFlags, defaultCFlags)},
+ {sSrcs, nil},
+ } {
+ for _, src := range lang.srcs {
+ obj := filepath.Join(workDir, fmt.Sprintf("_x%d.o", len(cObjs)))
+ cObjs = append(cObjs, obj)
+ if err := cCompile(goenv, src, cc, lang.flags, obj); err != nil {
+ return nil, err
+ }
+ }
+ }
+ return cObjs, nil
+}
+
+func combineFlags(lists ...[]string) []string {
+ n := 0
+ for _, list := range lists {
+ n += len(list)
+ }
+ flags := make([]string, 0, n)
+ for _, list := range lists {
+ flags = append(flags, list...)
+ }
+ return flags
+}
+
+func cCompile(goenv *env, src, cc string, flags []string, out string) error {
+ args := []string{cc}
+ args = append(args, flags...)
+ args = append(args, "-c", src, "-o", out)
+ return goenv.runCommand(args)
+}
+
+func defaultCFlags(workDir string) []string {
+ flags := []string{
+ "-fdebug-prefix-map=" + abs(".") + "=.",
+ "-fdebug-prefix-map=" + workDir + "=.",
+ }
+ goos, goarch := os.Getenv("GOOS"), os.Getenv("GOARCH")
+ switch {
+ case goos == "darwin" || goos == "ios":
+ return flags
+ case goos == "windows" && goarch == "amd64":
+ return append(flags, "-mthreads")
+ default:
+ return append(flags, "-pthread")
+ }
+}
+
+func defaultLdFlags() []string {
+ goos, goarch := os.Getenv("GOOS"), os.Getenv("GOARCH")
+ switch {
+ case goos == "android":
+ return []string{"-llog", "-ldl"}
+ case goos == "darwin" || goos == "ios":
+ return nil
+ case goos == "windows" && goarch == "amd64":
+ return []string{"-mthreads"}
+ default:
+ return []string{"-pthread"}
+ }
+}
+
+// gatherSrcs copies or links files listed in srcs into dir. This is needed
+// to effectively use -trimpath with generated sources. It's also needed by cgo.
+//
+// gatherSrcs returns the basenames of copied files in the directory.
+func gatherSrcs(dir string, srcs []string) ([]string, error) {
+ copiedBases := make([]string, len(srcs))
+ for i, src := range srcs {
+ base := filepath.Base(src)
+ ext := filepath.Ext(base)
+ stem := base[:len(base)-len(ext)]
+ var err error
+ for j := 1; j < 10000; j++ {
+ if err = copyOrLinkFile(src, filepath.Join(dir, base)); err == nil {
+ break
+ } else if !os.IsExist(err) {
+ return nil, err
+ } else {
+ base = fmt.Sprintf("%s_%d%s", stem, j, ext)
+ }
+ }
+ if err != nil {
+ return nil, fmt.Errorf("could not find unique name for file %s", src)
+ }
+ copiedBases[i] = base
+ }
+ return copiedBases, nil
+}
+
+func bazelExecRoot() (string, error) {
+ // Bazel executes the builder with a working directory of the form
+ // .../execroot/<workspace name>. By stripping the last segment, we obtain a
+ // prefix of all possible source files, even when contained in external
+ // repositories.
+ cwd, err := os.Getwd()
+ if err != nil {
+ return "", err
+ }
+ return filepath.Dir(cwd), nil
+}
+
+type cgoError []string
+
+func (e cgoError) Error() string {
+ b := &bytes.Buffer{}
+ fmt.Fprint(b, "CC is not set and files need to be processed with cgo:\n")
+ for _, f := range e {
+ fmt.Fprintf(b, "\t%s\n", f)
+ }
+ fmt.Fprintf(b, "Ensure that 'cgo = True' is set and the C/C++ toolchain is configured.")
+ return b.String()
+}
diff --git a/go/tools/builders/compilepkg.go b/go/tools/builders/compilepkg.go
new file mode 100644
index 00000000..6e21ca24
--- /dev/null
+++ b/go/tools/builders/compilepkg.go
@@ -0,0 +1,615 @@
+// Copyright 2019 The Bazel Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// compilepkg compiles a complete Go package from Go, C, and assembly files. It
+// supports cgo, coverage, and nogo. It is invoked by the Go rules as an action.
+package main
+
+import (
+ "bytes"
+ "context"
+ "errors"
+ "flag"
+ "fmt"
+ "io/ioutil"
+ "os"
+ "os/exec"
+ "path"
+ "path/filepath"
+ "sort"
+ "strings"
+)
+
+type nogoResult int
+
+const (
+ nogoNotRun nogoResult = iota
+ nogoError
+ nogoFailed
+ nogoSucceeded
+)
+
+func compilePkg(args []string) error {
+ // Parse arguments.
+ args, _, err := expandParamsFiles(args)
+ if err != nil {
+ return err
+ }
+
+ fs := flag.NewFlagSet("GoCompilePkg", flag.ExitOnError)
+ goenv := envFlags(fs)
+ var unfilteredSrcs, coverSrcs, embedSrcs, embedLookupDirs, embedRoots, recompileInternalDeps multiFlag
+ var deps archiveMultiFlag
+ var importPath, packagePath, nogoPath, packageListPath, coverMode string
+ var outPath, outFactsPath, cgoExportHPath string
+ var testFilter string
+ var gcFlags, asmFlags, cppFlags, cFlags, cxxFlags, objcFlags, objcxxFlags, ldFlags quoteMultiFlag
+ var coverFormat string
+ fs.Var(&unfilteredSrcs, "src", ".go, .c, .cc, .m, .mm, .s, or .S file to be filtered and compiled")
+ fs.Var(&coverSrcs, "cover", ".go file that should be instrumented for coverage (must also be a -src)")
+ fs.Var(&embedSrcs, "embedsrc", "file that may be compiled into the package with a //go:embed directive")
+ fs.Var(&embedLookupDirs, "embedlookupdir", "Root-relative paths to directories relative to which //go:embed directives are resolved")
+ fs.Var(&embedRoots, "embedroot", "Bazel output root under which a file passed via -embedsrc resides")
+ fs.Var(&deps, "arc", "Import path, package path, and file name of a direct dependency, separated by '='")
+ fs.StringVar(&importPath, "importpath", "", "The import path of the package being compiled. Not passed to the compiler, but may be displayed in debug data.")
+ fs.StringVar(&packagePath, "p", "", "The package path (importmap) of the package being compiled")
+ fs.Var(&gcFlags, "gcflags", "Go compiler flags")
+ fs.Var(&asmFlags, "asmflags", "Go assembler flags")
+ fs.Var(&cppFlags, "cppflags", "C preprocessor flags")
+ fs.Var(&cFlags, "cflags", "C compiler flags")
+ fs.Var(&cxxFlags, "cxxflags", "C++ compiler flags")
+ fs.Var(&objcFlags, "objcflags", "Objective-C compiler flags")
+ fs.Var(&objcxxFlags, "objcxxflags", "Objective-C++ compiler flags")
+ fs.Var(&ldFlags, "ldflags", "C linker flags")
+ fs.StringVar(&nogoPath, "nogo", "", "The nogo binary. If unset, nogo will not be run.")
+ fs.StringVar(&packageListPath, "package_list", "", "The file containing the list of standard library packages")
+ fs.StringVar(&coverMode, "cover_mode", "", "The coverage mode to use. Empty if coverage instrumentation should not be added.")
+ fs.StringVar(&outPath, "o", "", "The output archive file to write compiled code")
+ fs.StringVar(&outFactsPath, "x", "", "The output archive file to write export data and nogo facts")
+ fs.StringVar(&cgoExportHPath, "cgoexport", "", "The _cgo_exports.h file to write")
+ fs.StringVar(&testFilter, "testfilter", "off", "Controls test package filtering")
+ fs.StringVar(&coverFormat, "cover_format", "", "Emit source file paths in coverage instrumentation suitable for the specified coverage format")
+ fs.Var(&recompileInternalDeps, "recompile_internal_deps", "The import path of the direct dependencies that needs to be recompiled.")
+ if err := fs.Parse(args); err != nil {
+ return err
+ }
+ if err := goenv.checkFlags(); err != nil {
+ return err
+ }
+ if importPath == "" {
+ importPath = packagePath
+ }
+ cgoEnabled := os.Getenv("CGO_ENABLED") == "1"
+ cc := os.Getenv("CC")
+ outPath = abs(outPath)
+ for i := range unfilteredSrcs {
+ unfilteredSrcs[i] = abs(unfilteredSrcs[i])
+ }
+ for i := range embedSrcs {
+ embedSrcs[i] = abs(embedSrcs[i])
+ }
+
+ // Filter sources.
+ srcs, err := filterAndSplitFiles(unfilteredSrcs)
+ if err != nil {
+ return err
+ }
+
+ // TODO(jayconrod): remove -testfilter flag. The test action should compile
+ // the main, internal, and external packages by calling compileArchive
+ // with the correct sources for each.
+ switch testFilter {
+ case "off":
+ case "only":
+ testSrcs := make([]fileInfo, 0, len(srcs.goSrcs))
+ for _, f := range srcs.goSrcs {
+ if strings.HasSuffix(f.pkg, "_test") {
+ testSrcs = append(testSrcs, f)
+ }
+ }
+ srcs.goSrcs = testSrcs
+ case "exclude":
+ libSrcs := make([]fileInfo, 0, len(srcs.goSrcs))
+ for _, f := range srcs.goSrcs {
+ if !strings.HasSuffix(f.pkg, "_test") {
+ libSrcs = append(libSrcs, f)
+ }
+ }
+ srcs.goSrcs = libSrcs
+ default:
+ return fmt.Errorf("invalid test filter %q", testFilter)
+ }
+
+ return compileArchive(
+ goenv,
+ importPath,
+ packagePath,
+ srcs,
+ deps,
+ coverMode,
+ coverSrcs,
+ embedSrcs,
+ embedLookupDirs,
+ embedRoots,
+ cgoEnabled,
+ cc,
+ gcFlags,
+ asmFlags,
+ cppFlags,
+ cFlags,
+ cxxFlags,
+ objcFlags,
+ objcxxFlags,
+ ldFlags,
+ nogoPath,
+ packageListPath,
+ outPath,
+ outFactsPath,
+ cgoExportHPath,
+ coverFormat,
+ recompileInternalDeps)
+}
+
+func compileArchive(
+ goenv *env,
+ importPath string,
+ packagePath string,
+ srcs archiveSrcs,
+ deps []archive,
+ coverMode string,
+ coverSrcs []string,
+ embedSrcs []string,
+ embedLookupDirs []string,
+ embedRoots []string,
+ cgoEnabled bool,
+ cc string,
+ gcFlags []string,
+ asmFlags []string,
+ cppFlags []string,
+ cFlags []string,
+ cxxFlags []string,
+ objcFlags []string,
+ objcxxFlags []string,
+ ldFlags []string,
+ nogoPath string,
+ packageListPath string,
+ outPath string,
+ outXPath string,
+ cgoExportHPath string,
+ coverFormat string,
+ recompileInternalDeps []string,
+) error {
+ workDir, cleanup, err := goenv.workDir()
+ if err != nil {
+ return err
+ }
+ defer cleanup()
+
+ // As part of compilation process, rules_go does generate and/or rewrite code
+ // based on the original source files. We should only run static analysis
+ // over original source files and not the generated source as end users have
+ // little control over the generated source.
+ //
+ // nogoSrcsOrigin maps generated/rewritten source files back to original source.
+ // If the original source path is an empty string, exclude generated source from nogo run.
+ nogoSrcsOrigin := make(map[string]string)
+
+ if len(srcs.goSrcs) == 0 {
+ // We need to run the compiler to create a valid archive, even if there's nothing in it.
+ // Otherwise, GoPack will complain if we try to add assembly or cgo objects.
+ // A truly empty archive does not include any references to source file paths, which
+ // ensures hermeticity even though the temp file path is random.
+ emptyGoFile, err := os.CreateTemp(filepath.Dir(outPath), "*.go")
+ if err != nil {
+ return err
+ }
+ defer os.Remove(emptyGoFile.Name())
+ defer emptyGoFile.Close()
+ if _, err := emptyGoFile.WriteString("package empty\n"); err != nil {
+ return err
+ }
+ if err := emptyGoFile.Close(); err != nil {
+ return err
+ }
+
+ srcs.goSrcs = append(srcs.goSrcs, fileInfo{
+ filename: emptyGoFile.Name(),
+ ext: goExt,
+ matched: true,
+ pkg: "empty",
+ })
+
+ nogoSrcsOrigin[emptyGoFile.Name()] = ""
+ }
+ packageName := srcs.goSrcs[0].pkg
+ var goSrcs, cgoSrcs []string
+ for _, src := range srcs.goSrcs {
+ if src.isCgo {
+ cgoSrcs = append(cgoSrcs, src.filename)
+ } else {
+ goSrcs = append(goSrcs, src.filename)
+ }
+ }
+ cSrcs := make([]string, len(srcs.cSrcs))
+ for i, src := range srcs.cSrcs {
+ cSrcs[i] = src.filename
+ }
+ cxxSrcs := make([]string, len(srcs.cxxSrcs))
+ for i, src := range srcs.cxxSrcs {
+ cxxSrcs[i] = src.filename
+ }
+ objcSrcs := make([]string, len(srcs.objcSrcs))
+ for i, src := range srcs.objcSrcs {
+ objcSrcs[i] = src.filename
+ }
+ objcxxSrcs := make([]string, len(srcs.objcxxSrcs))
+ for i, src := range srcs.objcxxSrcs {
+ objcxxSrcs[i] = src.filename
+ }
+ sSrcs := make([]string, len(srcs.sSrcs))
+ for i, src := range srcs.sSrcs {
+ sSrcs[i] = src.filename
+ }
+ hSrcs := make([]string, len(srcs.hSrcs))
+ for i, src := range srcs.hSrcs {
+ hSrcs[i] = src.filename
+ }
+ haveCgo := len(cgoSrcs)+len(cSrcs)+len(cxxSrcs)+len(objcSrcs)+len(objcxxSrcs) > 0
+
+ // Instrument source files for coverage.
+ if coverMode != "" {
+ relCoverPath := make(map[string]string)
+ for _, s := range coverSrcs {
+ relCoverPath[abs(s)] = s
+ }
+
+ combined := append([]string{}, goSrcs...)
+ if cgoEnabled {
+ combined = append(combined, cgoSrcs...)
+ }
+ for i, origSrc := range combined {
+ if _, ok := relCoverPath[origSrc]; !ok {
+ continue
+ }
+
+ var srcName string
+ switch coverFormat {
+ case "go_cover":
+ srcName = origSrc
+ if importPath != "" {
+ srcName = path.Join(importPath, filepath.Base(origSrc))
+ }
+ case "lcov":
+ // Bazel merges lcov reports across languages and thus assumes
+ // that the source file paths are relative to the exec root.
+ srcName = relCoverPath[origSrc]
+ default:
+ return fmt.Errorf("invalid value for -cover_format: %q", coverFormat)
+ }
+
+ stem := filepath.Base(origSrc)
+ if ext := filepath.Ext(stem); ext != "" {
+ stem = stem[:len(stem)-len(ext)]
+ }
+ coverVar := fmt.Sprintf("Cover_%s_%d_%s", sanitizePathForIdentifier(importPath), i, sanitizePathForIdentifier(stem))
+ coverVar = strings.ReplaceAll(coverVar, "_", "Z")
+ coverSrc := filepath.Join(workDir, fmt.Sprintf("cover_%d.go", i))
+ if err := instrumentForCoverage(goenv, origSrc, srcName, coverVar, coverMode, coverSrc); err != nil {
+ return err
+ }
+
+ if i < len(goSrcs) {
+ goSrcs[i] = coverSrc
+ nogoSrcsOrigin[coverSrc] = origSrc
+ continue
+ }
+
+ cgoSrcs[i-len(goSrcs)] = coverSrc
+ }
+ }
+
+ // If we have cgo, generate separate C and go files, and compile the
+ // C files.
+ var objFiles []string
+ if cgoEnabled && haveCgo {
+ // TODO(#2006): Compile .s and .S files with cgo2, not the Go assembler.
+ // If cgo is not enabled or we don't have other cgo sources, don't
+ // compile .S files.
+ var srcDir string
+ srcDir, goSrcs, objFiles, err = cgo2(goenv, goSrcs, cgoSrcs, cSrcs, cxxSrcs, objcSrcs, objcxxSrcs, nil, hSrcs, packagePath, packageName, cc, cppFlags, cFlags, cxxFlags, objcFlags, objcxxFlags, ldFlags, cgoExportHPath)
+ if err != nil {
+ return err
+ }
+
+ gcFlags = append(gcFlags, createTrimPath(gcFlags, srcDir))
+ } else {
+ if cgoExportHPath != "" {
+ if err := ioutil.WriteFile(cgoExportHPath, nil, 0o666); err != nil {
+ return err
+ }
+ }
+ gcFlags = append(gcFlags, createTrimPath(gcFlags, "."))
+ }
+
+ // Check that the filtered sources don't import anything outside of
+ // the standard library and the direct dependencies.
+ imports, err := checkImports(srcs.goSrcs, deps, packageListPath, importPath, recompileInternalDeps)
+ if err != nil {
+ return err
+ }
+ if cgoEnabled && len(cgoSrcs) != 0 {
+ // cgo generated code imports some extra packages.
+ imports["runtime/cgo"] = nil
+ imports["syscall"] = nil
+ imports["unsafe"] = nil
+ }
+ if coverMode != "" {
+ if coverMode == "atomic" {
+ imports["sync/atomic"] = nil
+ }
+ const coverdataPath = "github.com/bazelbuild/rules_go/go/tools/coverdata"
+ var coverdata *archive
+ for i := range deps {
+ if deps[i].importPath == coverdataPath {
+ coverdata = &deps[i]
+ break
+ }
+ }
+ if coverdata == nil {
+ return errors.New("coverage requested but coverdata dependency not provided")
+ }
+ imports[coverdataPath] = coverdata
+ }
+
+ // Build an importcfg file for the compiler.
+ importcfgPath, err := buildImportcfgFileForCompile(imports, goenv.installSuffix, filepath.Dir(outPath))
+ if err != nil {
+ return err
+ }
+ if !goenv.shouldPreserveWorkDir {
+ defer os.Remove(importcfgPath)
+ }
+
+ // Build an embedcfg file mapping embed patterns to filenames.
+ // Embed patterns are relative to any one of a list of root directories
+ // that may contain embeddable files. Source files containing embed patterns
+ // must be in one of these root directories so the pattern appears to be
+ // relative to the source file. Due to transitions, source files can reside
+ // under Bazel roots different from both those of the go srcs and those of
+ // the compilation output. Thus, we have to consider all combinations of
+ // Bazel roots embedsrcs and root-relative paths of source files and the
+ // output binary.
+ var embedRootDirs []string
+ for _, root := range embedRoots {
+ for _, lookupDir := range embedLookupDirs {
+ embedRootDir := abs(filepath.Join(root, lookupDir))
+ // Since we are iterating over all combinations of roots and
+ // root-relative paths, some resulting paths may not exist and
+ // should be filtered out before being passed to buildEmbedcfgFile.
+ // Since Bazel uniquified both the roots and the root-relative
+ // paths, the combinations are automatically unique.
+ if _, err := os.Stat(embedRootDir); err == nil {
+ embedRootDirs = append(embedRootDirs, embedRootDir)
+ }
+ }
+ }
+ embedcfgPath, err := buildEmbedcfgFile(srcs.goSrcs, embedSrcs, embedRootDirs, workDir)
+ if err != nil {
+ return err
+ }
+ if embedcfgPath != "" {
+ if !goenv.shouldPreserveWorkDir {
+ defer os.Remove(embedcfgPath)
+ }
+ }
+
+ // Run nogo concurrently.
+ var nogoChan chan error
+ outFactsPath := filepath.Join(workDir, nogoFact)
+ nogoSrcs := make([]string, 0, len(goSrcs))
+ for _, goSrc := range goSrcs {
+ // If source is found in the origin map, that means it's likely to be a generated source file
+ // so feed the original source file to static analyzers instead of the generated one.
+ //
+ // If origin is empty, that means the generated source file is not based on a user-provided source file
+ // thus ignore that entry entirely.
+ if originSrc, ok := nogoSrcsOrigin[goSrc]; ok {
+ if originSrc != "" {
+ nogoSrcs = append(nogoSrcs, originSrc)
+ }
+ continue
+ }
+
+ // TODO(sluongng): most likely what remains here are CGO-generated source files as the result of calling cgo2()
+ // Need to determine whether we want to feed these CGO-generated files into static analyzers.
+ //
+ // Add unknown origin source files into the mix.
+ nogoSrcs = append(nogoSrcs, goSrc)
+ }
+ if nogoPath != "" && len(nogoSrcs) > 0 {
+ ctx, cancel := context.WithCancel(context.Background())
+ nogoChan = make(chan error)
+ go func() {
+ nogoChan <- runNogo(ctx, workDir, nogoPath, nogoSrcs, deps, packagePath, importcfgPath, outFactsPath)
+ }()
+ defer func() {
+ if nogoChan != nil {
+ cancel()
+ <-nogoChan
+ }
+ }()
+ }
+
+ // If there are assembly files, and this is go1.12+, generate symbol ABIs.
+ asmHdrPath := ""
+ if len(srcs.sSrcs) > 0 {
+ asmHdrPath = filepath.Join(workDir, "go_asm.h")
+ }
+ symabisPath, err := buildSymabisFile(goenv, srcs.sSrcs, srcs.hSrcs, asmHdrPath)
+ if symabisPath != "" {
+ if !goenv.shouldPreserveWorkDir {
+ defer os.Remove(symabisPath)
+ }
+ }
+ if err != nil {
+ return err
+ }
+
+ // Compile the filtered .go files.
+ if err := compileGo(goenv, goSrcs, packagePath, importcfgPath, embedcfgPath, asmHdrPath, symabisPath, gcFlags, outPath); err != nil {
+ return err
+ }
+
+ // Compile the .s files.
+ if len(srcs.sSrcs) > 0 {
+ includeSet := map[string]struct{}{
+ filepath.Join(os.Getenv("GOROOT"), "pkg", "include"): {},
+ workDir: {},
+ }
+ for _, hdr := range srcs.hSrcs {
+ includeSet[filepath.Dir(hdr.filename)] = struct{}{}
+ }
+ includes := make([]string, len(includeSet))
+ for inc := range includeSet {
+ includes = append(includes, inc)
+ }
+ sort.Strings(includes)
+ for _, inc := range includes {
+ asmFlags = append(asmFlags, "-I", inc)
+ }
+ for i, sSrc := range srcs.sSrcs {
+ obj := filepath.Join(workDir, fmt.Sprintf("s%d.o", i))
+ if err := asmFile(goenv, sSrc.filename, packagePath, asmFlags, obj); err != nil {
+ return err
+ }
+ objFiles = append(objFiles, obj)
+ }
+ }
+
+ // Pack .o files into the archive. These may come from cgo generated code,
+ // cgo dependencies (cdeps), or assembly.
+ if len(objFiles) > 0 {
+ if err := appendFiles(goenv, outPath, objFiles); err != nil {
+ return err
+ }
+ }
+
+ // Check results from nogo.
+ nogoStatus := nogoNotRun
+ if nogoChan != nil {
+ err := <-nogoChan
+ nogoChan = nil // no cancellation needed
+ if err != nil {
+ nogoStatus = nogoFailed
+ // TODO: should we still create the .x file without nogo facts in this case?
+ return err
+ }
+ nogoStatus = nogoSucceeded
+ }
+
+ // Extract the export data file and pack it in an .x archive together with the
+ // nogo facts file (if there is one). This allows compile actions to depend
+ // on .x files only, so we don't need to recompile a package when one of its
+ // imports changes in a way that doesn't affect export data.
+ // TODO(golang/go#33820): After Go 1.16 is the minimum supported version,
+ // use -linkobj to tell the compiler to create separate .a and .x files for
+ // compiled code and export data. Before that version, the linker needed
+ // export data in the .a file when building a plugin. To work around that,
+ // we copy the export data into .x ourselves.
+ if err = extractFileFromArchive(outPath, workDir, pkgDef); err != nil {
+ return err
+ }
+ pkgDefPath := filepath.Join(workDir, pkgDef)
+ if nogoStatus == nogoSucceeded {
+ return appendFiles(goenv, outXPath, []string{pkgDefPath, outFactsPath})
+ }
+ return appendFiles(goenv, outXPath, []string{pkgDefPath})
+}
+
+func compileGo(goenv *env, srcs []string, packagePath, importcfgPath, embedcfgPath, asmHdrPath, symabisPath string, gcFlags []string, outPath string) error {
+ args := goenv.goTool("compile")
+ args = append(args, "-p", packagePath, "-importcfg", importcfgPath, "-pack")
+ if embedcfgPath != "" {
+ args = append(args, "-embedcfg", embedcfgPath)
+ }
+ if asmHdrPath != "" {
+ args = append(args, "-asmhdr", asmHdrPath)
+ }
+ if symabisPath != "" {
+ args = append(args, "-symabis", symabisPath)
+ }
+ args = append(args, gcFlags...)
+ args = append(args, "-o", outPath)
+ args = append(args, "--")
+ args = append(args, srcs...)
+ absArgs(args, []string{"-I", "-o", "-trimpath", "-importcfg"})
+ return goenv.runCommand(args)
+}
+
+func runNogo(ctx context.Context, workDir string, nogoPath string, srcs []string, deps []archive, packagePath, importcfgPath, outFactsPath string) error {
+ args := []string{nogoPath}
+ args = append(args, "-p", packagePath)
+ args = append(args, "-importcfg", importcfgPath)
+ for _, dep := range deps {
+ args = append(args, "-fact", fmt.Sprintf("%s=%s", dep.importPath, dep.file))
+ }
+ args = append(args, "-x", outFactsPath)
+ args = append(args, srcs...)
+
+ paramsFile := filepath.Join(workDir, "nogo.param")
+ if err := writeParamsFile(paramsFile, args[1:]); err != nil {
+ return fmt.Errorf("error writing nogo params file: %v", err)
+ }
+
+ cmd := exec.CommandContext(ctx, args[0], "-param="+paramsFile)
+ out := &bytes.Buffer{}
+ cmd.Stdout, cmd.Stderr = out, out
+ if err := cmd.Run(); err != nil {
+ if exitErr, ok := err.(*exec.ExitError); ok {
+ if !exitErr.Exited() {
+ cmdLine := strings.Join(args, " ")
+ return fmt.Errorf("nogo command '%s' exited unexpectedly: %s", cmdLine, exitErr.String())
+ }
+ return errors.New(string(relativizePaths(out.Bytes())))
+ } else {
+ if out.Len() != 0 {
+ fmt.Fprintln(os.Stderr, out.String())
+ }
+ return fmt.Errorf("error running nogo: %v", err)
+ }
+ }
+ return nil
+}
+
+func createTrimPath(gcFlags []string, path string) string {
+ for _, flag := range gcFlags {
+ if strings.HasPrefix(flag, "-trimpath=") {
+ return flag + ":" + path
+ }
+ }
+
+ return "-trimpath=" + path
+}
+
+func sanitizePathForIdentifier(path string) string {
+ return strings.Map(func(r rune) rune {
+ if 'A' <= r && r <= 'Z' ||
+ 'a' <= r && r <= 'z' ||
+ '0' <= r && r <= '9' ||
+ r == '_' {
+ return r
+ }
+ return '_'
+ }, path)
+}
diff --git a/go/tools/builders/cover.go b/go/tools/builders/cover.go
new file mode 100644
index 00000000..fadc4fd7
--- /dev/null
+++ b/go/tools/builders/cover.go
@@ -0,0 +1,110 @@
+// Copyright 2017 The Bazel Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+ "bytes"
+ "fmt"
+ "go/parser"
+ "go/token"
+ "io/ioutil"
+ "os"
+ "strconv"
+)
+
+// instrumentForCoverage runs "go tool cover" on a source file to produce
+// a coverage-instrumented version of the file. It also registers the file
+// with the coverdata package.
+func instrumentForCoverage(goenv *env, srcPath, srcName, coverVar, mode, outPath string) error {
+ goargs := goenv.goTool("cover", "-var", coverVar, "-mode", mode, "-o", outPath, srcPath)
+ if err := goenv.runCommand(goargs); err != nil {
+ return err
+ }
+
+ return registerCoverage(outPath, coverVar, srcName)
+}
+
+// registerCoverage modifies coverSrcFilename, the output file from go tool cover.
+// It adds a call to coverdata.RegisterCoverage, which ensures the coverage
+// data from each file is reported. The name by which the file is registered
+// need not match its original name (it may use the importpath).
+func registerCoverage(coverSrcFilename, varName, srcName string) error {
+ coverSrc, err := os.ReadFile(coverSrcFilename)
+ if err != nil {
+ return fmt.Errorf("instrumentForCoverage: reading instrumented source: %w", err)
+ }
+
+ // Parse the file.
+ fset := token.NewFileSet()
+ f, err := parser.ParseFile(fset, coverSrcFilename, coverSrc, parser.ParseComments)
+ if err != nil {
+ return nil // parse error: proceed and let the compiler fail
+ }
+
+ // Perform edits using a byte buffer instead of the AST, because
+ // we can not use go/format to write the AST back out without
+ // changing line numbers.
+ editor := NewBuffer(coverSrc)
+
+ // Ensure coverdata is imported. Use an existing import if present
+ // or add a new one.
+ const coverdataPath = "github.com/bazelbuild/rules_go/go/tools/coverdata"
+ var coverdataName string
+ for _, imp := range f.Imports {
+ path, err := strconv.Unquote(imp.Path.Value)
+ if err != nil {
+ return nil // parse error: proceed and let the compiler fail
+ }
+ if path == coverdataPath {
+ if imp.Name != nil {
+ // renaming import
+ if imp.Name.Name == "_" {
+ // Change blank import to named import
+ editor.Replace(
+ fset.Position(imp.Name.Pos()).Offset,
+ fset.Position(imp.Name.End()).Offset,
+ "coverdata")
+ coverdataName = "coverdata"
+ } else {
+ coverdataName = imp.Name.Name
+ }
+ } else {
+ // default import
+ coverdataName = "coverdata"
+ }
+ break
+ }
+ }
+ if coverdataName == "" {
+ // No existing import. Add a new one.
+ coverdataName = "coverdata"
+ editor.Insert(fset.Position(f.Name.End()).Offset, fmt.Sprintf("; import %q", coverdataPath))
+ }
+
+ // Append an init function.
+ var buf = bytes.NewBuffer(editor.Bytes())
+ fmt.Fprintf(buf, `
+func init() {
+ %s.RegisterFile(%q,
+ %[3]s.Count[:],
+ %[3]s.Pos[:],
+ %[3]s.NumStmt[:])
+}
+`, coverdataName, srcName, varName)
+ if err := ioutil.WriteFile(coverSrcFilename, buf.Bytes(), 0666); err != nil {
+ return fmt.Errorf("registerCoverage: %v", err)
+ }
+ return nil
+}
diff --git a/go/tools/builders/cover_test.go b/go/tools/builders/cover_test.go
new file mode 100644
index 00000000..fc1ba818
--- /dev/null
+++ b/go/tools/builders/cover_test.go
@@ -0,0 +1,130 @@
+package main
+
+import (
+ "io/ioutil"
+ "os"
+ "path/filepath"
+ "testing"
+)
+
+type test struct {
+ name string
+ in string
+ out string
+}
+
+var tests = []test{
+ {
+ name: "no imports",
+ in: `package main
+`,
+ out: `package main; import "github.com/bazelbuild/rules_go/go/tools/coverdata"
+
+func init() {
+ coverdata.RegisterFile("srcName",
+ varName.Count[:],
+ varName.Pos[:],
+ varName.NumStmt[:])
+}
+`,
+ },
+ {
+ name: "other imports",
+ in: `package main
+
+import (
+ "os"
+)
+`,
+ out: `package main; import "github.com/bazelbuild/rules_go/go/tools/coverdata"
+
+import (
+ "os"
+)
+
+func init() {
+ coverdata.RegisterFile("srcName",
+ varName.Count[:],
+ varName.Pos[:],
+ varName.NumStmt[:])
+}
+`,
+ },
+ {
+ name: "existing import",
+ in: `package main
+
+import "github.com/bazelbuild/rules_go/go/tools/coverdata"
+`,
+ out: `package main
+
+import "github.com/bazelbuild/rules_go/go/tools/coverdata"
+
+func init() {
+ coverdata.RegisterFile("srcName",
+ varName.Count[:],
+ varName.Pos[:],
+ varName.NumStmt[:])
+}
+`,
+ },
+ {
+ name: "existing _ import",
+ in: `package main
+
+import _ "github.com/bazelbuild/rules_go/go/tools/coverdata"
+`,
+ out: `package main
+
+import coverdata "github.com/bazelbuild/rules_go/go/tools/coverdata"
+
+func init() {
+ coverdata.RegisterFile("srcName",
+ varName.Count[:],
+ varName.Pos[:],
+ varName.NumStmt[:])
+}
+`,
+ },
+ {
+ name: "existing renamed import",
+ in: `package main
+
+import cover0 "github.com/bazelbuild/rules_go/go/tools/coverdata"
+`,
+ out: `package main
+
+import cover0 "github.com/bazelbuild/rules_go/go/tools/coverdata"
+
+func init() {
+ cover0.RegisterFile("srcName",
+ varName.Count[:],
+ varName.Pos[:],
+ varName.NumStmt[:])
+}
+`,
+ },
+}
+
+func TestRegisterCoverage(t *testing.T) {
+ var filename = filepath.Join(t.TempDir(), "test_input.go")
+ for _, test := range tests {
+ if err := ioutil.WriteFile(filename, []byte(test.in), 0666); err != nil {
+ t.Errorf("writing input file: %v", err)
+ return
+ }
+ err := registerCoverage(filename, "varName", "srcName")
+ if err != nil {
+ t.Errorf("%q: %+v", test.name, err)
+ continue
+ }
+ coverSrc, err := os.ReadFile(filename)
+ if err != nil {
+ t.Errorf("%q: %+v", test.name, err)
+ continue
+ }
+ if got, want := string(coverSrc), test.out; got != want {
+ t.Errorf("%q: got %v, want %v", test.name, got, want)
+ }
+ }
+}
diff --git a/go/tools/builders/edit.go b/go/tools/builders/edit.go
new file mode 100644
index 00000000..f8ccd52b
--- /dev/null
+++ b/go/tools/builders/edit.go
@@ -0,0 +1,95 @@
+// Copyright 2017 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Copied from go1.17 tree: //src/cmd/internal/edit/edit.go
+
+// Package edit implements buffered position-based editing of byte slices.
+package main
+
+import (
+ "fmt"
+ "sort"
+)
+
+// A Buffer is a queue of edits to apply to a given byte slice.
+type Buffer struct {
+ old []byte
+ q edits
+}
+
+// An edit records a single text modification: change the bytes in [start,end) to new.
+type edit struct {
+ start int
+ end int
+ new string
+}
+
+// An edits is a list of edits that is sortable by start offset, breaking ties by end offset.
+type edits []edit
+
+func (x edits) Len() int { return len(x) }
+func (x edits) Swap(i, j int) { x[i], x[j] = x[j], x[i] }
+func (x edits) Less(i, j int) bool {
+ if x[i].start != x[j].start {
+ return x[i].start < x[j].start
+ }
+ return x[i].end < x[j].end
+}
+
+// NewBuffer returns a new buffer to accumulate changes to an initial data slice.
+// The returned buffer maintains a reference to the data, so the caller must ensure
+// the data is not modified until after the Buffer is done being used.
+func NewBuffer(data []byte) *Buffer {
+ return &Buffer{old: data}
+}
+
+func (b *Buffer) Insert(pos int, new string) {
+ if pos < 0 || pos > len(b.old) {
+ panic("invalid edit position")
+ }
+ b.q = append(b.q, edit{pos, pos, new})
+}
+
+func (b *Buffer) Delete(start, end int) {
+ if end < start || start < 0 || end > len(b.old) {
+ panic("invalid edit position")
+ }
+ b.q = append(b.q, edit{start, end, ""})
+}
+
+func (b *Buffer) Replace(start, end int, new string) {
+ if end < start || start < 0 || end > len(b.old) {
+ panic("invalid edit position")
+ }
+ b.q = append(b.q, edit{start, end, new})
+}
+
+// Bytes returns a new byte slice containing the original data
+// with the queued edits applied.
+func (b *Buffer) Bytes() []byte {
+ // Sort edits by starting position and then by ending position.
+ // Breaking ties by ending position allows insertions at point x
+ // to be applied before a replacement of the text at [x, y).
+ sort.Stable(b.q)
+
+ var new []byte
+ offset := 0
+ for i, e := range b.q {
+ if e.start < offset {
+ e0 := b.q[i-1]
+ panic(fmt.Sprintf("overlapping edits: [%d,%d)->%q, [%d,%d)->%q", e0.start, e0.end, e0.new, e.start, e.end, e.new))
+ }
+ new = append(new, b.old[offset:e.start]...)
+ offset = e.end
+ new = append(new, e.new...)
+ }
+ new = append(new, b.old[offset:]...)
+ return new
+}
+
+// String returns a string containing the original data
+// with the queued edits applied.
+func (b *Buffer) String() string {
+ return string(b.Bytes())
+}
diff --git a/go/tools/builders/embed.go b/go/tools/builders/embed.go
new file mode 100644
index 00000000..e68da974
--- /dev/null
+++ b/go/tools/builders/embed.go
@@ -0,0 +1,340 @@
+// Copyright 2017 The Bazel Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// embed generates a .go file from the contents of a list of data files. It is
+// invoked by go_embed_data as an action.
+package main
+
+import (
+ "archive/tar"
+ "archive/zip"
+ "bufio"
+ "errors"
+ "flag"
+ "fmt"
+ "io"
+ "log"
+ "os"
+ "path"
+ "path/filepath"
+ "strconv"
+ "strings"
+ "text/template"
+ "unicode/utf8"
+)
+
+var headerTpl = template.Must(template.New("embed").Parse(`// Generated by go_embed_data for {{.Label}}. DO NOT EDIT.
+
+package {{.Package}}
+
+`))
+
+var multiFooterTpl = template.Must(template.New("embed").Parse(`
+var {{.Var}} = map[string]{{.Type}}{
+{{- range $i, $f := .FoundSources}}
+ {{$.Key $f}}: {{$.Var}}_{{$i}},
+{{- end}}
+}
+
+`))
+
+func main() {
+ log.SetPrefix("embed: ")
+ log.SetFlags(0) // don't print timestamps
+ if err := run(os.Args); err != nil {
+ log.Fatal(err)
+ }
+}
+
+type configuration struct {
+ Label, Package, Var string
+ Multi bool
+ sources []string
+ FoundSources []string
+ out, workspace string
+ flatten, unpack, strData bool
+}
+
+func (c *configuration) Type() string {
+ if c.strData {
+ return "string"
+ } else {
+ return "[]byte"
+ }
+}
+
+func (c *configuration) Key(filename string) string {
+ workspacePrefix := "external/" + c.workspace + "/"
+ key := filepath.FromSlash(strings.TrimPrefix(filename, workspacePrefix))
+ if c.flatten {
+ key = path.Base(filename)
+ }
+ return strconv.Quote(key)
+}
+
+func run(args []string) error {
+ c, err := newConfiguration(args)
+ if err != nil {
+ return err
+ }
+
+ f, err := os.Create(c.out)
+ if err != nil {
+ return err
+ }
+ defer f.Close()
+ w := bufio.NewWriter(f)
+ defer w.Flush()
+
+ if err := headerTpl.Execute(w, c); err != nil {
+ return err
+ }
+
+ if c.Multi {
+ return embedMultipleFiles(c, w)
+ }
+ return embedSingleFile(c, w)
+}
+
+func newConfiguration(args []string) (*configuration, error) {
+ var c configuration
+ flags := flag.NewFlagSet("embed", flag.ExitOnError)
+ flags.StringVar(&c.Label, "label", "", "Label of the rule being executed (required)")
+ flags.StringVar(&c.Package, "package", "", "Go package name (required)")
+ flags.StringVar(&c.Var, "var", "", "Variable name (required)")
+ flags.BoolVar(&c.Multi, "multi", false, "Whether the variable is a map or a single value")
+ flags.StringVar(&c.out, "out", "", "Go file to generate (required)")
+ flags.StringVar(&c.workspace, "workspace", "", "Name of the workspace (required)")
+ flags.BoolVar(&c.flatten, "flatten", false, "Whether to access files by base name")
+ flags.BoolVar(&c.strData, "string", false, "Whether to store contents as strings")
+ flags.BoolVar(&c.unpack, "unpack", false, "Whether to treat files as archives to unpack.")
+ flags.Parse(args[1:])
+ if c.Label == "" {
+ return nil, errors.New("error: -label option not provided")
+ }
+ if c.Package == "" {
+ return nil, errors.New("error: -package option not provided")
+ }
+ if c.Var == "" {
+ return nil, errors.New("error: -var option not provided")
+ }
+ if c.out == "" {
+ return nil, errors.New("error: -out option not provided")
+ }
+ if c.workspace == "" {
+ return nil, errors.New("error: -workspace option not provided")
+ }
+ c.sources = flags.Args()
+ if !c.Multi && len(c.sources) != 1 {
+ return nil, fmt.Errorf("error: -multi flag not given, so want exactly one source; got %d", len(c.sources))
+ }
+ if c.unpack {
+ if !c.Multi {
+ return nil, errors.New("error: -multi flag is required for -unpack mode.")
+ }
+ for _, src := range c.sources {
+ if ext := filepath.Ext(src); ext != ".zip" && ext != ".tar" {
+ return nil, fmt.Errorf("error: -unpack flag expects .zip or .tar extension (got %q)", ext)
+ }
+ }
+ }
+ return &c, nil
+}
+
+func embedSingleFile(c *configuration, w io.Writer) error {
+ dataBegin, dataEnd := "\"", "\"\n"
+ if !c.strData {
+ dataBegin, dataEnd = "[]byte(\"", "\")\n"
+ }
+
+ if _, err := fmt.Fprintf(w, "var %s = %s", c.Var, dataBegin); err != nil {
+ return err
+ }
+ if err := embedFileContents(w, c.sources[0]); err != nil {
+ return err
+ }
+ _, err := fmt.Fprint(w, dataEnd)
+ return err
+}
+
+func embedMultipleFiles(c *configuration, w io.Writer) error {
+ dataBegin, dataEnd := "\"", "\"\n"
+ if !c.strData {
+ dataBegin, dataEnd = "[]byte(\"", "\")\n"
+ }
+
+ if _, err := fmt.Fprint(w, "var (\n"); err != nil {
+ return err
+ }
+ if err := findSources(c, func(i int, f io.Reader) error {
+ if _, err := fmt.Fprintf(w, "\t%s_%d = %s", c.Var, i, dataBegin); err != nil {
+ return err
+ }
+ if _, err := io.Copy(&escapeWriter{w}, f); err != nil {
+ return err
+ }
+ if _, err := fmt.Fprint(w, dataEnd); err != nil {
+ return err
+ }
+ return nil
+ }); err != nil {
+ return err
+ }
+ if _, err := fmt.Fprint(w, ")\n"); err != nil {
+ return err
+ }
+ if err := multiFooterTpl.Execute(w, c); err != nil {
+ return err
+ }
+ return nil
+}
+
+func findSources(c *configuration, cb func(i int, f io.Reader) error) error {
+ if c.unpack {
+ for _, filename := range c.sources {
+ ext := filepath.Ext(filename)
+ if ext == ".zip" {
+ if err := findZipSources(c, filename, cb); err != nil {
+ return err
+ }
+ } else if ext == ".tar" {
+ if err := findTarSources(c, filename, cb); err != nil {
+ return err
+ }
+ } else {
+ panic("unknown archive extension: " + ext)
+ }
+ }
+ return nil
+ }
+ for _, filename := range c.sources {
+ f, err := os.Open(filename)
+ if err != nil {
+ return err
+ }
+ err = cb(len(c.FoundSources), bufio.NewReader(f))
+ f.Close()
+ if err != nil {
+ return err
+ }
+ c.FoundSources = append(c.FoundSources, filename)
+ }
+ return nil
+}
+
+func findZipSources(c *configuration, filename string, cb func(i int, f io.Reader) error) error {
+ r, err := zip.OpenReader(filename)
+ if err != nil {
+ return err
+ }
+ defer r.Close()
+ for _, file := range r.File {
+ f, err := file.Open()
+ if err != nil {
+ return err
+ }
+ err = cb(len(c.FoundSources), f)
+ f.Close()
+ if err != nil {
+ return err
+ }
+ c.FoundSources = append(c.FoundSources, file.Name)
+ }
+ return nil
+}
+
+func findTarSources(c *configuration, filename string, cb func(i int, f io.Reader) error) error {
+ tf, err := os.Open(filename)
+ if err != nil {
+ return err
+ }
+ defer tf.Close()
+ reader := tar.NewReader(bufio.NewReader(tf))
+ for {
+ h, err := reader.Next()
+ if err == io.EOF {
+ return nil
+ }
+ if err != nil {
+ return err
+ }
+ if h.Typeflag != tar.TypeReg {
+ continue
+ }
+ if err := cb(len(c.FoundSources), &io.LimitedReader{
+ R: reader,
+ N: h.Size,
+ }); err != nil {
+ return err
+ }
+ c.FoundSources = append(c.FoundSources, h.Name)
+ }
+}
+
+func embedFileContents(w io.Writer, filename string) error {
+ f, err := os.Open(filename)
+ if err != nil {
+ return err
+ }
+ defer f.Close()
+
+ _, err = io.Copy(&escapeWriter{w}, bufio.NewReader(f))
+ return err
+}
+
+type escapeWriter struct {
+ w io.Writer
+}
+
+func (w *escapeWriter) Write(data []byte) (n int, err error) {
+ n = len(data)
+
+ for err == nil && len(data) > 0 {
+ // https://golang.org/ref/spec#String_literals: "Within the quotes, any
+ // character may appear except newline and unescaped double quote. The
+ // text between the quotes forms the value of the literal, with backslash
+ // escapes interpreted as they are in rune literals […]."
+ switch b := data[0]; b {
+ case '\\':
+ _, err = w.w.Write([]byte(`\\`))
+ case '"':
+ _, err = w.w.Write([]byte(`\"`))
+ case '\n':
+ _, err = w.w.Write([]byte(`\n`))
+
+ case '\x00':
+ // https://golang.org/ref/spec#Source_code_representation: "Implementation
+ // restriction: For compatibility with other tools, a compiler may
+ // disallow the NUL character (U+0000) in the source text."
+ _, err = w.w.Write([]byte(`\x00`))
+
+ default:
+ // https://golang.org/ref/spec#Source_code_representation: "Implementation
+ // restriction: […] A byte order mark may be disallowed anywhere else in
+ // the source."
+ const byteOrderMark = '\uFEFF'
+
+ if r, size := utf8.DecodeRune(data); r != utf8.RuneError && r != byteOrderMark {
+ _, err = w.w.Write(data[:size])
+ data = data[size:]
+ continue
+ }
+
+ _, err = fmt.Fprintf(w.w, `\x%02x`, b)
+ }
+ data = data[1:]
+ }
+
+ return n - len(data), err
+}
diff --git a/go/tools/builders/embedcfg.go b/go/tools/builders/embedcfg.go
new file mode 100644
index 00000000..2de4f3b9
--- /dev/null
+++ b/go/tools/builders/embedcfg.go
@@ -0,0 +1,439 @@
+// Copyright 2021 The Bazel Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+ "encoding/json"
+ "errors"
+ "fmt"
+ "io/ioutil"
+ "os"
+ "path"
+ "path/filepath"
+ "runtime"
+ "sort"
+ "strings"
+)
+
+// buildEmbedcfgFile writes an embedcfg file to be read by the compiler.
+// An embedcfg file can be used in Go 1.16 or higher if the "embed" package
+// is imported and there are one or more //go:embed comments in .go files.
+// The embedcfg file maps //go:embed patterns to actual file names.
+//
+// The embedcfg file will be created in workDir, and its name is returned.
+// The caller is responsible for deleting it. If no embedcfg file is needed,
+// "" is returned with no error.
+//
+// All source files listed in goSrcs with //go:embed comments must be in one
+// of the directories in embedRootDirs (not in a subdirectory). Embed patterns
+// are evaluated relative to the source directory. Embed sources (embedSrcs)
+// outside those directories are ignored, since they can't be matched by any
+// valid pattern.
+func buildEmbedcfgFile(goSrcs []fileInfo, embedSrcs, embedRootDirs []string, workDir string) (string, error) {
+ // Check whether this package uses embedding and whether the toolchain
+ // supports it (Go 1.16+). With Go 1.15 and lower, we'll try to compile
+ // without an embedcfg file, and the compiler will complain the "embed"
+ // package is missing.
+ var major, minor int
+ if n, err := fmt.Sscanf(runtime.Version(), "go%d.%d", &major, &minor); n != 2 || err != nil {
+ // Can't parse go version. Maybe it's a development version; fall through.
+ } else if major < 1 || (major == 1 && minor < 16) {
+ return "", nil
+ }
+ importEmbed := false
+ haveEmbed := false
+ for _, src := range goSrcs {
+ if len(src.embeds) > 0 {
+ haveEmbed = true
+ rootDir := findInRootDirs(src.filename, embedRootDirs)
+ if rootDir == "" || strings.Contains(src.filename[len(rootDir)+1:], string(filepath.Separator)) {
+ // Report an error if a source files appears in a subdirectory of
+ // another source directory. In this situation, the same file could be
+ // referenced with different paths.
+ return "", fmt.Errorf("%s: source files with //go:embed should be in same directory. Allowed directories are:\n\t%s",
+ src.filename,
+ strings.Join(embedRootDirs, "\n\t"))
+ }
+ }
+ for _, imp := range src.imports {
+ if imp.path == "embed" {
+ importEmbed = true
+ }
+ }
+ }
+ if !importEmbed || !haveEmbed {
+ return "", nil
+ }
+
+ // Build a tree of embeddable files. This includes paths listed with
+ // -embedsrc. If one of those paths is a directory, the tree includes
+ // its files and subdirectories. Paths in the tree are relative to the
+ // path in embedRootDirs that contains them.
+ root, err := buildEmbedTree(embedSrcs, embedRootDirs)
+ if err != nil {
+ return "", err
+ }
+
+ // Resolve patterns to sets of files.
+ var embedcfg struct {
+ Patterns map[string][]string
+ Files map[string]string
+ }
+ embedcfg.Patterns = make(map[string][]string)
+ embedcfg.Files = make(map[string]string)
+ for _, src := range goSrcs {
+ for _, embed := range src.embeds {
+ matchedPaths, matchedFiles, err := resolveEmbed(embed, root)
+ if err != nil {
+ return "", err
+ }
+ embedcfg.Patterns[embed.pattern] = matchedPaths
+ for i, rel := range matchedPaths {
+ embedcfg.Files[rel] = matchedFiles[i]
+ }
+ }
+ }
+
+ // Write the configuration to a JSON file.
+ embedcfgData, err := json.MarshalIndent(&embedcfg, "", "\t")
+ if err != nil {
+ return "", err
+ }
+ embedcfgName := filepath.Join(workDir, "embedcfg")
+ if err := ioutil.WriteFile(embedcfgName, embedcfgData, 0o666); err != nil {
+ return "", err
+ }
+ return embedcfgName, nil
+}
+
+// findInRootDirs returns a string from rootDirs which is a parent of the
+// file path p. If there is no such string, findInRootDirs returns "".
+func findInRootDirs(p string, rootDirs []string) string {
+ dir := filepath.Dir(p)
+ for _, rootDir := range rootDirs {
+ if rootDir == dir ||
+ (strings.HasPrefix(dir, rootDir) && len(dir) > len(rootDir)+1 && dir[len(rootDir)] == filepath.Separator) {
+ return rootDir
+ }
+ }
+ return ""
+}
+
+// embedNode represents an embeddable file or directory in a tree.
+type embedNode struct {
+ name string // base name
+ path string // absolute file path
+ children map[string]*embedNode // non-nil for directory
+ childNames []string // sorted
+}
+
+// add inserts file nodes into the tree rooted at f for the slash-separated
+// path src, relative to the absolute file path rootDir. If src points to a
+// directory, add recursively inserts nodes for its contents. If a node already
+// exists (for example, if a source file and a generated file have the same
+// name), add leaves the existing node in place.
+func (n *embedNode) add(rootDir, src string) error {
+ // Create nodes for parents of src.
+ parent := n
+ parts := strings.Split(src, "/")
+ for _, p := range parts[:len(parts)-1] {
+ if parent.children[p] == nil {
+ parent.children[p] = &embedNode{
+ name: p,
+ children: make(map[string]*embedNode),
+ }
+ }
+ parent = parent.children[p]
+ }
+
+ // Create a node for src. If src is a directory, recursively create nodes for
+ // its contents. Go embedding ignores symbolic links, but Bazel may use links
+ // for generated files and directories, so we follow them here.
+ var visit func(*embedNode, string, os.FileInfo) error
+ visit = func(parent *embedNode, path string, fi os.FileInfo) error {
+ base := filepath.Base(path)
+ if parent.children[base] == nil {
+ parent.children[base] = &embedNode{name: base, path: path}
+ }
+ if !fi.IsDir() {
+ return nil
+ }
+ node := parent.children[base]
+ node.children = make(map[string]*embedNode)
+ f, err := os.Open(path)
+ if err != nil {
+ return err
+ }
+ names, err := f.Readdirnames(0)
+ f.Close()
+ if err != nil {
+ return err
+ }
+ for _, name := range names {
+ cPath := filepath.Join(path, name)
+ cfi, err := os.Stat(cPath)
+ if err != nil {
+ return err
+ }
+ if err := visit(node, cPath, cfi); err != nil {
+ return err
+ }
+ }
+ return nil
+ }
+
+ path := filepath.Join(rootDir, src)
+ fi, err := os.Stat(path)
+ if err != nil {
+ return err
+ }
+ return visit(parent, path, fi)
+}
+
+func (n *embedNode) isDir() bool {
+ return n.children != nil
+}
+
+// get returns a tree node, given a slash-separated path relative to the
+// receiver. get returns nil if no node exists with that path.
+func (n *embedNode) get(path string) *embedNode {
+ if path == "." || path == "" {
+ return n
+ }
+ for _, part := range strings.Split(path, "/") {
+ n = n.children[part]
+ if n == nil {
+ return nil
+ }
+ }
+ return n
+}
+
+var errSkip = errors.New("skip")
+
+// walk calls fn on each node in the tree rooted at n in depth-first pre-order.
+func (n *embedNode) walk(fn func(rel string, n *embedNode) error) error {
+ var visit func(string, *embedNode) error
+ visit = func(rel string, node *embedNode) error {
+ err := fn(rel, node)
+ if err == errSkip {
+ return nil
+ } else if err != nil {
+ return err
+ }
+ for _, name := range node.childNames {
+ if err := visit(path.Join(rel, name), node.children[name]); err != nil && err != errSkip {
+ return err
+ }
+ }
+ return nil
+ }
+ err := visit("", n)
+ if err == errSkip {
+ return nil
+ }
+ return err
+}
+
+// buildEmbedTree constructs a logical directory tree of embeddable files.
+// The tree may contain a mix of static and generated files from multiple
+// root directories. Directory artifacts are recursively expanded.
+func buildEmbedTree(embedSrcs, embedRootDirs []string) (root *embedNode, err error) {
+ defer func() {
+ if err != nil {
+ err = fmt.Errorf("building tree of embeddable files in directories %s: %v", strings.Join(embedRootDirs, string(filepath.ListSeparator)), err)
+ }
+ }()
+
+ // Add each path to the tree.
+ root = &embedNode{name: "", children: make(map[string]*embedNode)}
+ for _, src := range embedSrcs {
+ rootDir := findInRootDirs(src, embedRootDirs)
+ if rootDir == "" {
+ // Embedded path cannot be matched by any valid pattern. Ignore.
+ continue
+ }
+ rel := filepath.ToSlash(src[len(rootDir)+1:])
+ if err := root.add(rootDir, rel); err != nil {
+ return nil, err
+ }
+ }
+
+ // Sort children in each directory node.
+ var visit func(*embedNode)
+ visit = func(node *embedNode) {
+ node.childNames = make([]string, 0, len(node.children))
+ for name, child := range node.children {
+ node.childNames = append(node.childNames, name)
+ visit(child)
+ }
+ sort.Strings(node.childNames)
+ }
+ visit(root)
+
+ return root, nil
+}
+
+// resolveEmbed matches a //go:embed pattern in a source file to a set of
+// embeddable files in the given tree.
+func resolveEmbed(embed fileEmbed, root *embedNode) (matchedPaths, matchedFiles []string, err error) {
+ defer func() {
+ if err != nil {
+ err = fmt.Errorf("%v: could not embed %s: %v", embed.pos, embed.pattern, err)
+ }
+ }()
+
+ // Remove optional "all:" prefix from pattern and set matchAll flag if present.
+ // See https://pkg.go.dev/embed#hdr-Directives for details.
+ pattern := embed.pattern
+ var matchAll bool
+ if strings.HasPrefix(pattern, "all:") {
+ matchAll = true
+ pattern = pattern[4:]
+ }
+
+ // Check that the pattern has valid syntax.
+ if _, err := path.Match(pattern, ""); err != nil || !validEmbedPattern(pattern) {
+ return nil, nil, fmt.Errorf("invalid pattern syntax")
+ }
+
+ // Search for matching files.
+ err = root.walk(func(matchRel string, matchNode *embedNode) error {
+ if ok, _ := path.Match(pattern, matchRel); !ok {
+ // Non-matching file or directory.
+ return nil
+ }
+
+ // TODO: Should check that directories along path do not begin a new module
+ // (do not contain a go.mod).
+ // https://cs.opensource.google/go/go/+/master:src/cmd/go/internal/load/pkg.go;l=2158;drc=261fe25c83a94fc3defe064baed3944cd3d16959
+ for dir := matchRel; len(dir) > 1; dir = filepath.Dir(dir) {
+ if base := path.Base(matchRel); isBadEmbedName(base) {
+ what := "file"
+ if matchNode.isDir() {
+ what = "directory"
+ }
+ if dir == matchRel {
+ return fmt.Errorf("cannot embed %s %s: invalid name %s", what, matchRel, base)
+ } else {
+ return fmt.Errorf("cannot embed %s %s: in invalid directory %s", what, matchRel, base)
+ }
+ }
+ }
+
+ if !matchNode.isDir() {
+ // Matching file. Add to list.
+ matchedPaths = append(matchedPaths, matchRel)
+ matchedFiles = append(matchedFiles, matchNode.path)
+ return nil
+ }
+
+ // Matching directory. Recursively add all files in subdirectories.
+ // Don't add hidden files or directories (starting with "." or "_"),
+ // unless "all:" prefix was set.
+ // See golang/go#42328.
+ matchTreeErr := matchNode.walk(func(childRel string, childNode *embedNode) error {
+ // TODO: Should check that directories along path do not begin a new module
+ // https://cs.opensource.google/go/go/+/master:src/cmd/go/internal/load/pkg.go;l=2158;drc=261fe25c83a94fc3defe064baed3944cd3d16959
+ if childRel != "" {
+ base := path.Base(childRel)
+ if isBadEmbedName(base) || (!matchAll && (strings.HasPrefix(base, ".") || strings.HasPrefix(base, "_"))) {
+ if childNode.isDir() {
+ return errSkip
+ }
+ return nil
+ }
+ }
+ if !childNode.isDir() {
+ matchedPaths = append(matchedPaths, path.Join(matchRel, childRel))
+ matchedFiles = append(matchedFiles, childNode.path)
+ }
+ return nil
+ })
+ if matchTreeErr != nil {
+ return matchTreeErr
+ }
+ return errSkip
+ })
+ if err != nil && err != errSkip {
+ return nil, nil, err
+ }
+ if len(matchedPaths) == 0 {
+ return nil, nil, fmt.Errorf("no matching files found")
+ }
+ return matchedPaths, matchedFiles, nil
+}
+
+func validEmbedPattern(pattern string) bool {
+ return pattern != "." && fsValidPath(pattern)
+}
+
+// validPath reports whether the given path name
+// is valid for use in a call to Open.
+// Path names passed to open are unrooted, slash-separated
+// sequences of path elements, like “x/y/z”.
+// Path names must not contain a “.” or “..” or empty element,
+// except for the special case that the root directory is named “.”.
+//
+// Paths are slash-separated on all systems, even Windows.
+// Backslashes must not appear in path names.
+//
+// Copied from io/fs.ValidPath in Go 1.16beta1.
+func fsValidPath(name string) bool {
+ if name == "." {
+ // special case
+ return true
+ }
+
+ // Iterate over elements in name, checking each.
+ for {
+ i := 0
+ for i < len(name) && name[i] != '/' {
+ if name[i] == '\\' {
+ return false
+ }
+ i++
+ }
+ elem := name[:i]
+ if elem == "" || elem == "." || elem == ".." {
+ return false
+ }
+ if i == len(name) {
+ return true // reached clean ending
+ }
+ name = name[i+1:]
+ }
+}
+
+// isBadEmbedName reports whether name is the base name of a file that
+// can't or won't be included in modules and therefore shouldn't be treated
+// as existing for embedding.
+//
+// TODO: This should use the equivalent of golang.org/x/mod/module.CheckFilePath instead of fsValidPath.
+// https://cs.opensource.google/go/go/+/master:src/cmd/go/internal/load/pkg.go;l=2200;drc=261fe25c83a94fc3defe064baed3944cd3d16959
+func isBadEmbedName(name string) bool {
+ if !fsValidPath(name) {
+ return true
+ }
+ switch name {
+ // Empty string should be impossible but make it bad.
+ case "":
+ return true
+ // Version control directories won't be present in module.
+ case ".bzr", ".hg", ".git", ".svn":
+ return true
+ }
+ return false
+}
diff --git a/go/tools/builders/env.go b/go/tools/builders/env.go
new file mode 100644
index 00000000..177617f8
--- /dev/null
+++ b/go/tools/builders/env.go
@@ -0,0 +1,474 @@
+// Copyright 2017 The Bazel Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+ "bytes"
+ "errors"
+ "flag"
+ "fmt"
+ "io"
+ "io/ioutil"
+ "log"
+ "os"
+ "os/exec"
+ "path/filepath"
+ "runtime"
+ "strconv"
+ "strings"
+)
+
+var (
+ // cgoEnvVars is the list of all cgo environment variable
+ cgoEnvVars = []string{"CGO_CFLAGS", "CGO_CXXFLAGS", "CGO_CPPFLAGS", "CGO_LDFLAGS"}
+ // cgoAbsEnvFlags are all the flags that need absolute path in cgoEnvVars
+ cgoAbsEnvFlags = []string{"-I", "-L", "-isysroot", "-isystem", "-iquote", "-include", "-gcc-toolchain", "--sysroot", "-resource-dir", "-fsanitize-blacklist", "-fsanitize-ignorelist"}
+)
+
+// env holds a small amount of Go environment and toolchain information
+// which is common to multiple builders. Most Bazel-agnostic build information
+// is collected in go/build.Default though.
+//
+// See ./README.rst for more information about handling arguments and
+// environment variables.
+type env struct {
+ // sdk is the path to the Go SDK, which contains tools for the host
+ // platform. This may be different than GOROOT.
+ sdk string
+
+ // installSuffix is the name of the directory below GOROOT/pkg that contains
+ // the .a files for the standard library we should build against.
+ // For example, linux_amd64_race.
+ installSuffix string
+
+ // verbose indicates whether subprocess command lines should be printed.
+ verbose bool
+
+ // workDirPath is a temporary work directory. It is created lazily.
+ workDirPath string
+
+ shouldPreserveWorkDir bool
+}
+
+// envFlags registers flags common to multiple builders and returns an env
+// configured with those flags.
+func envFlags(flags *flag.FlagSet) *env {
+ env := &env{}
+ flags.StringVar(&env.sdk, "sdk", "", "Path to the Go SDK.")
+ flags.Var(&tagFlag{}, "tags", "List of build tags considered true.")
+ flags.StringVar(&env.installSuffix, "installsuffix", "", "Standard library under GOROOT/pkg")
+ flags.BoolVar(&env.verbose, "v", false, "Whether subprocess command lines should be printed")
+ flags.BoolVar(&env.shouldPreserveWorkDir, "work", false, "if true, the temporary work directory will be preserved")
+ return env
+}
+
+// checkFlags checks whether env flags were set to valid values. checkFlags
+// should be called after parsing flags.
+func (e *env) checkFlags() error {
+ if e.sdk == "" {
+ return errors.New("-sdk was not set")
+ }
+ return nil
+}
+
+// workDir returns a path to a temporary work directory. The same directory
+// is returned on multiple calls. The caller is responsible for cleaning
+// up the work directory by calling cleanup.
+func (e *env) workDir() (path string, cleanup func(), err error) {
+ if e.workDirPath != "" {
+ return e.workDirPath, func() {}, nil
+ }
+ // Keep the stem "rules_go_work" in sync with reproducible_binary_test.go.
+ e.workDirPath, err = ioutil.TempDir("", "rules_go_work-")
+ if err != nil {
+ return "", func() {}, err
+ }
+ if e.verbose {
+ log.Printf("WORK=%s\n", e.workDirPath)
+ }
+ if e.shouldPreserveWorkDir {
+ cleanup = func() {}
+ } else {
+ cleanup = func() { os.RemoveAll(e.workDirPath) }
+ }
+ return e.workDirPath, cleanup, nil
+}
+
+// goTool returns a slice containing the path to an executable at
+// $GOROOT/pkg/$GOOS_$GOARCH/$tool and additional arguments.
+func (e *env) goTool(tool string, args ...string) []string {
+ platform := fmt.Sprintf("%s_%s", runtime.GOOS, runtime.GOARCH)
+ toolPath := filepath.Join(e.sdk, "pkg", "tool", platform, tool)
+ if runtime.GOOS == "windows" {
+ toolPath += ".exe"
+ }
+ return append([]string{toolPath}, args...)
+}
+
+// goCmd returns a slice containing the path to the go executable
+// and additional arguments.
+func (e *env) goCmd(cmd string, args ...string) []string {
+ exe := filepath.Join(e.sdk, "bin", "go")
+ if runtime.GOOS == "windows" {
+ exe += ".exe"
+ }
+ return append([]string{exe, cmd}, args...)
+}
+
+// runCommand executes a subprocess that inherits stdout, stderr, and the
+// environment from this process.
+func (e *env) runCommand(args []string) error {
+ cmd := exec.Command(args[0], args[1:]...)
+ // Redirecting stdout to stderr. This mirrors behavior in the go command:
+ // https://go.googlesource.com/go/+/refs/tags/go1.15.2/src/cmd/go/internal/work/exec.go#1958
+ buf := &bytes.Buffer{}
+ cmd.Stdout = buf
+ cmd.Stderr = buf
+ err := runAndLogCommand(cmd, e.verbose)
+ os.Stderr.Write(relativizePaths(buf.Bytes()))
+ return err
+}
+
+// runCommandToFile executes a subprocess and writes stdout/stderr to the given
+// writers.
+func (e *env) runCommandToFile(out, err io.Writer, args []string) error {
+ cmd := exec.Command(args[0], args[1:]...)
+ cmd.Stdout = out
+ cmd.Stderr = err
+ return runAndLogCommand(cmd, e.verbose)
+}
+
+func absEnv(envNameList []string, argList []string) error {
+ for _, envName := range envNameList {
+ splitedEnv := strings.Fields(os.Getenv(envName))
+ absArgs(splitedEnv, argList)
+ if err := os.Setenv(envName, strings.Join(splitedEnv, " ")); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+func runAndLogCommand(cmd *exec.Cmd, verbose bool) error {
+ if verbose {
+ fmt.Fprintln(os.Stderr, formatCommand(cmd))
+ }
+ cleanup := passLongArgsInResponseFiles(cmd)
+ defer cleanup()
+ if err := cmd.Run(); err != nil {
+ return fmt.Errorf("error running subcommand %s: %v", cmd.Path, err)
+ }
+ return nil
+}
+
+// expandParamsFiles looks for arguments in args of the form
+// "-param=filename". When it finds these arguments it reads the file "filename"
+// and replaces the argument with its content.
+// It returns the expanded arguments as well as a bool that is true if any param
+// files have been passed.
+func expandParamsFiles(args []string) ([]string, bool, error) {
+ var paramsIndices []int
+ for i, arg := range args {
+ if strings.HasPrefix(arg, "-param=") {
+ paramsIndices = append(paramsIndices, i)
+ }
+ }
+ if len(paramsIndices) == 0 {
+ return args, false, nil
+ }
+ var expandedArgs []string
+ last := 0
+ for _, pi := range paramsIndices {
+ expandedArgs = append(expandedArgs, args[last:pi]...)
+ last = pi + 1
+
+ fileName := args[pi][len("-param="):]
+ fileArgs, err := readParamsFile(fileName)
+ if err != nil {
+ return nil, true, err
+ }
+ expandedArgs = append(expandedArgs, fileArgs...)
+ }
+ expandedArgs = append(expandedArgs, args[last:]...)
+ return expandedArgs, true, nil
+}
+
+// readParamsFiles parses a Bazel params file in "shell" format. The file
+// should contain one argument per line. Arguments may be quoted with single
+// quotes. All characters within quoted strings are interpreted literally
+// including newlines and excepting single quotes. Characters outside quoted
+// strings may be escaped with a backslash.
+func readParamsFile(name string) ([]string, error) {
+ data, err := ioutil.ReadFile(name)
+ if err != nil {
+ return nil, err
+ }
+
+ var args []string
+ var arg []byte
+ quote := false
+ escape := false
+ for p := 0; p < len(data); p++ {
+ b := data[p]
+ switch {
+ case escape:
+ arg = append(arg, b)
+ escape = false
+
+ case b == '\'':
+ quote = !quote
+
+ case !quote && b == '\\':
+ escape = true
+
+ case !quote && b == '\n':
+ args = append(args, string(arg))
+ arg = arg[:0]
+
+ default:
+ arg = append(arg, b)
+ }
+ }
+ if quote {
+ return nil, fmt.Errorf("unterminated quote")
+ }
+ if escape {
+ return nil, fmt.Errorf("unterminated escape")
+ }
+ if len(arg) > 0 {
+ args = append(args, string(arg))
+ }
+ return args, nil
+}
+
+// writeParamsFile formats a list of arguments in Bazel's "shell" format and writes
+// it to a file.
+func writeParamsFile(path string, args []string) error {
+ buf := new(bytes.Buffer)
+ for _, arg := range args {
+ if !strings.ContainsAny(arg, "'\n\\") {
+ fmt.Fprintln(buf, arg)
+ continue
+ }
+ buf.WriteByte('\'')
+ for _, r := range arg {
+ if r == '\'' {
+ buf.WriteString(`'\''`)
+ } else {
+ buf.WriteRune(r)
+ }
+ }
+ buf.WriteString("'\n")
+ }
+ return ioutil.WriteFile(path, buf.Bytes(), 0666)
+}
+
+// splitArgs splits a list of command line arguments into two parts: arguments
+// that should be interpreted by the builder (before "--"), and arguments
+// that should be passed through to the underlying tool (after "--").
+func splitArgs(args []string) (builderArgs []string, toolArgs []string) {
+ for i, arg := range args {
+ if arg == "--" {
+ return args[:i], args[i+1:]
+ }
+ }
+ return args, nil
+}
+
+// abs returns the absolute representation of path. Some tools/APIs require
+// absolute paths to work correctly. Most notably, golang on Windows cannot
+// handle relative paths to files whose absolute path is > ~250 chars, while
+// it can handle absolute paths. See http://goo.gl/eqeWjm.
+//
+// Note that strings that begin with "__BAZEL_" are not absolutized. These are
+// used on macOS for paths that the compiler wrapper (wrapped_clang) is
+// supposed to know about.
+func abs(path string) string {
+ if strings.HasPrefix(path, "__BAZEL_") {
+ return path
+ }
+
+ if abs, err := filepath.Abs(path); err != nil {
+ return path
+ } else {
+ return abs
+ }
+}
+
+// absArgs applies abs to strings that appear in args. Only paths that are
+// part of options named by flags are modified.
+func absArgs(args []string, flags []string) {
+ absNext := false
+ for i := range args {
+ if absNext {
+ args[i] = abs(args[i])
+ absNext = false
+ continue
+ }
+ for _, f := range flags {
+ if !strings.HasPrefix(args[i], f) {
+ continue
+ }
+ possibleValue := args[i][len(f):]
+ if len(possibleValue) == 0 {
+ absNext = true
+ break
+ }
+ separator := ""
+ if possibleValue[0] == '=' {
+ possibleValue = possibleValue[1:]
+ separator = "="
+ }
+ args[i] = fmt.Sprintf("%s%s%s", f, separator, abs(possibleValue))
+ break
+ }
+ }
+}
+
+// relativizePaths converts absolute paths found in the given output string to
+// relative, if they are within the working directory.
+func relativizePaths(output []byte) []byte {
+ dir, err := os.Getwd()
+ if dir == "" || err != nil {
+ return output
+ }
+ dirBytes := make([]byte, len(dir), len(dir)+1)
+ copy(dirBytes, dir)
+ if bytes.HasSuffix(dirBytes, []byte{filepath.Separator}) {
+ return bytes.ReplaceAll(output, dirBytes, nil)
+ }
+
+ // This is the common case.
+ // Replace "$CWD/" with "" and "$CWD" with "."
+ dirBytes = append(dirBytes, filepath.Separator)
+ output = bytes.ReplaceAll(output, dirBytes, nil)
+ dirBytes = dirBytes[:len(dirBytes)-1]
+ return bytes.ReplaceAll(output, dirBytes, []byte{'.'})
+}
+
+// formatCommand formats cmd as a string that can be pasted into a shell.
+// Spaces in environment variables and arguments are escaped as needed.
+func formatCommand(cmd *exec.Cmd) string {
+ quoteIfNeeded := func(s string) string {
+ if strings.IndexByte(s, ' ') < 0 {
+ return s
+ }
+ return strconv.Quote(s)
+ }
+ quoteEnvIfNeeded := func(s string) string {
+ eq := strings.IndexByte(s, '=')
+ if eq < 0 {
+ return s
+ }
+ key, value := s[:eq], s[eq+1:]
+ if strings.IndexByte(value, ' ') < 0 {
+ return s
+ }
+ return fmt.Sprintf("%s=%s", key, strconv.Quote(value))
+ }
+ var w bytes.Buffer
+ environ := cmd.Env
+ if environ == nil {
+ environ = os.Environ()
+ }
+ for _, e := range environ {
+ fmt.Fprintf(&w, "%s \\\n", quoteEnvIfNeeded(e))
+ }
+
+ sep := ""
+ for _, arg := range cmd.Args {
+ fmt.Fprintf(&w, "%s%s", sep, quoteIfNeeded(arg))
+ sep = " "
+ }
+ return w.String()
+}
+
+// passLongArgsInResponseFiles modifies cmd such that, for
+// certain programs, long arguments are passed in "response files", a
+// file on disk with the arguments, with one arg per line. An actual
+// argument starting with '@' means that the rest of the argument is
+// a filename of arguments to expand.
+//
+// See https://github.com/golang/go/issues/18468 (Windows) and
+// https://github.com/golang/go/issues/37768 (Darwin).
+func passLongArgsInResponseFiles(cmd *exec.Cmd) (cleanup func()) {
+ cleanup = func() {} // no cleanup by default
+ var argLen int
+ for _, arg := range cmd.Args {
+ argLen += len(arg)
+ }
+ // If we're not approaching 32KB of args, just pass args normally.
+ // (use 30KB instead to be conservative; not sure how accounting is done)
+ if !useResponseFile(cmd.Path, argLen) {
+ return
+ }
+ tf, err := ioutil.TempFile("", "args")
+ if err != nil {
+ log.Fatalf("error writing long arguments to response file: %v", err)
+ }
+ cleanup = func() { os.Remove(tf.Name()) }
+ var buf bytes.Buffer
+ for _, arg := range cmd.Args[1:] {
+ fmt.Fprintf(&buf, "%s\n", arg)
+ }
+ if _, err := tf.Write(buf.Bytes()); err != nil {
+ tf.Close()
+ cleanup()
+ log.Fatalf("error writing long arguments to response file: %v", err)
+ }
+ if err := tf.Close(); err != nil {
+ cleanup()
+ log.Fatalf("error writing long arguments to response file: %v", err)
+ }
+ cmd.Args = []string{cmd.Args[0], "@" + tf.Name()}
+ return cleanup
+}
+
+// quotePathIfNeeded quotes path if it contains whitespace and isn't already quoted.
+// Use this for paths that will be passed through
+// https://github.com/golang/go/blob/06264b740e3bfe619f5e90359d8f0d521bd47806/src/cmd/internal/quoted/quoted.go#L25
+func quotePathIfNeeded(path string) string {
+ if strings.HasPrefix(path, "\"") || strings.HasPrefix(path, "'") {
+ // Assume already quoted
+ return path
+ }
+ // https://github.com/golang/go/blob/06264b740e3bfe619f5e90359d8f0d521bd47806/src/cmd/internal/quoted/quoted.go#L16
+ if strings.IndexAny(path, " \t\n\r") < 0 {
+ // Does not require quoting
+ return path
+ }
+ // Escaping quotes is not supported, so we can assume path doesn't contain any quotes.
+ return "'" + path + "'"
+}
+
+func useResponseFile(path string, argLen int) bool {
+ // Unless the program uses objabi.Flagparse, which understands
+ // response files, don't use response files.
+ // TODO: do we need more commands? asm? cgo? For now, no.
+ prog := strings.TrimSuffix(filepath.Base(path), ".exe")
+ switch prog {
+ case "compile", "link":
+ default:
+ return false
+ }
+ // Windows has a limit of 32 KB arguments. To be conservative and not
+ // worry about whether that includes spaces or not, just use 30 KB.
+ // Darwin's limit is less clear. The OS claims 256KB, but we've seen
+ // failures with arglen as small as 50KB.
+ if argLen > (30 << 10) {
+ return true
+ }
+ return false
+}
diff --git a/go/tools/builders/filter.go b/go/tools/builders/filter.go
new file mode 100644
index 00000000..fbb0f2ac
--- /dev/null
+++ b/go/tools/builders/filter.go
@@ -0,0 +1,168 @@
+// Copyright 2017 The Bazel Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+ "fmt"
+ "go/ast"
+ "go/build"
+ "go/token"
+ "os"
+ "path/filepath"
+ "strings"
+)
+
+type fileInfo struct {
+ filename string
+ ext ext
+ header []byte
+ fset *token.FileSet
+ parsed *ast.File
+ parseErr error
+ matched bool
+ isCgo bool
+ pkg string
+ imports []fileImport
+ embeds []fileEmbed
+}
+
+type ext int
+
+const (
+ goExt ext = iota
+ cExt
+ cxxExt
+ objcExt
+ objcxxExt
+ sExt
+ hExt
+)
+
+type fileImport struct {
+ path string
+ pos token.Pos
+ doc *ast.CommentGroup
+}
+
+type fileEmbed struct {
+ pattern string
+ pos token.Position
+}
+
+type archiveSrcs struct {
+ goSrcs, cSrcs, cxxSrcs, objcSrcs, objcxxSrcs, sSrcs, hSrcs []fileInfo
+}
+
+// filterAndSplitFiles filters files using build constraints and collates
+// them by extension.
+func filterAndSplitFiles(fileNames []string) (archiveSrcs, error) {
+ var res archiveSrcs
+ for _, s := range fileNames {
+ src, err := readFileInfo(build.Default, s)
+ if err != nil {
+ return archiveSrcs{}, err
+ }
+ if !src.matched {
+ continue
+ }
+ var srcs *[]fileInfo
+ switch src.ext {
+ case goExt:
+ srcs = &res.goSrcs
+ case cExt:
+ srcs = &res.cSrcs
+ case cxxExt:
+ srcs = &res.cxxSrcs
+ case objcExt:
+ srcs = &res.objcSrcs
+ case objcxxExt:
+ srcs = &res.objcxxSrcs
+ case sExt:
+ srcs = &res.sSrcs
+ case hExt:
+ srcs = &res.hSrcs
+ }
+ *srcs = append(*srcs, src)
+ }
+ return res, nil
+}
+
+// readFileInfo applies build constraints to an input file and returns whether
+// it should be compiled.
+func readFileInfo(bctx build.Context, input string) (fileInfo, error) {
+ fi := fileInfo{filename: input}
+ if ext := filepath.Ext(input); ext == ".C" {
+ fi.ext = cxxExt
+ } else {
+ switch strings.ToLower(ext) {
+ case ".go":
+ fi.ext = goExt
+ case ".c":
+ fi.ext = cExt
+ case ".cc", ".cxx", ".cpp":
+ fi.ext = cxxExt
+ case ".m":
+ fi.ext = objcExt
+ case ".mm":
+ fi.ext = objcxxExt
+ case ".s":
+ fi.ext = sExt
+ case ".h", ".hh", ".hpp", ".hxx":
+ fi.ext = hExt
+ default:
+ return fileInfo{}, fmt.Errorf("unrecognized file extension: %s", ext)
+ }
+ }
+
+ dir, base := filepath.Split(input)
+ // Check build constraints on non-cgo files.
+ // Skip cgo files, since they get rejected (due to leading '_') and won't
+ // have any build constraints anyway.
+ if strings.HasPrefix(base, "_cgo") {
+ fi.matched = true
+ } else {
+ match, err := bctx.MatchFile(dir, base)
+ if err != nil {
+ return fi, err
+ }
+ fi.matched = match
+ }
+ // If it's not a go file, there's nothing more to read.
+ if fi.ext != goExt {
+ return fi, nil
+ }
+
+ // Scan the file for imports and embeds.
+ f, err := os.Open(input)
+ if err != nil {
+ return fileInfo{}, err
+ }
+ defer f.Close()
+ fi.fset = token.NewFileSet()
+ if err := readGoInfo(f, &fi); err != nil {
+ return fileInfo{}, err
+ }
+
+ // Exclude cgo files if cgo is not enabled.
+ for _, imp := range fi.imports {
+ if imp.path == "C" {
+ fi.isCgo = true
+ break
+ }
+ }
+ fi.matched = fi.matched && (bctx.CgoEnabled || !fi.isCgo)
+
+ return fi, nil
+}
diff --git a/go/tools/builders/filter_buildid.go b/go/tools/builders/filter_buildid.go
new file mode 100644
index 00000000..893a0f6a
--- /dev/null
+++ b/go/tools/builders/filter_buildid.go
@@ -0,0 +1,44 @@
+// Copyright 2018 The Bazel Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+ "os"
+ "os/exec"
+ "runtime"
+ "syscall"
+)
+
+// filterBuildID executes the tool on the command line, filtering out any
+// -buildid arguments. It is intended to be used with -toolexec.
+func filterBuildID(args []string) error {
+ newArgs := make([]string, 0, len(args))
+ for i := 0; i < len(args); i++ {
+ arg := args[i]
+ if arg == "-buildid" {
+ i++
+ continue
+ }
+ newArgs = append(newArgs, arg)
+ }
+ if runtime.GOOS == "windows" {
+ cmd := exec.Command(newArgs[0], newArgs[1:]...)
+ cmd.Stdout = os.Stdout
+ cmd.Stderr = os.Stderr
+ return cmd.Run()
+ } else {
+ return syscall.Exec(newArgs[0], newArgs, os.Environ())
+ }
+}
diff --git a/go/tools/builders/filter_test.go b/go/tools/builders/filter_test.go
new file mode 100644
index 00000000..61ec385b
--- /dev/null
+++ b/go/tools/builders/filter_test.go
@@ -0,0 +1,136 @@
+/* Copyright 2016 The Bazel Authors. All rights reserved.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+package main
+
+import (
+ "go/build"
+ "io/ioutil"
+ "os"
+ "path/filepath"
+ "reflect"
+ "sort"
+ "testing"
+)
+
+var testfiles = map[string]string{
+ "cgo.go": `
+//+build cgo
+
+package tags
+
+/*
+#include <stdio.h>
+#include <stdlib.h>
+
+void myprint(char* s) {
+ printf("%s", s);
+}
+*/
+
+import "C"
+
+func main() {
+ C.myprint("hello")
+}
+`,
+ "extra.go": `
+//+build a,b b,c
+
+package tags
+`,
+ "ignore.go": `
+//+build ignore
+
+package tags
+`,
+ "normal.go": `
+package tags
+`,
+ "on_darwin.go": `
+package tags
+`,
+ "system.go": `
+//+build arm,darwin linux,amd64
+
+package tags
+`,
+}
+
+func TestTags(t *testing.T) {
+ tempdir, err := ioutil.TempDir("", "goruletest")
+ if err != nil {
+ t.Fatalf("Error creating temporary directory: %v", err)
+ }
+ defer os.RemoveAll(tempdir)
+
+ input := []string{}
+ for k, v := range testfiles {
+ p := filepath.Join(tempdir, k)
+ if err := ioutil.WriteFile(p, []byte(v), 0644); err != nil {
+ t.Fatalf("WriteFile(%s): %v", p, err)
+ }
+ input = append(input, k)
+ }
+ sort.Strings(input)
+
+ wd, err := os.Getwd()
+ if err != nil {
+ t.Fatalf("Getwd: %v", err)
+ }
+
+ err = os.Chdir(tempdir)
+ if err != nil {
+ t.Fatalf("Chdir(%s): %v", tempdir, err)
+ }
+ defer os.Chdir(wd)
+
+ bctx := build.Default
+ // Always fake the os and arch
+ bctx.GOOS = "darwin"
+ bctx.GOARCH = "amd64"
+ bctx.CgoEnabled = false
+ runTest(t, bctx, input, []string{"normal.go", "on_darwin.go"})
+ bctx.GOOS = "linux"
+ runTest(t, bctx, input, []string{"normal.go", "system.go"})
+ bctx.GOARCH = "arm"
+ runTest(t, bctx, input, []string{"normal.go"})
+ bctx.BuildTags = []string{"a", "b"}
+ runTest(t, bctx, input, []string{"extra.go", "normal.go"})
+ bctx.BuildTags = []string{"a", "c"}
+ runTest(t, bctx, input, []string{"normal.go"})
+ bctx.CgoEnabled = true
+ runTest(t, bctx, input, []string{"cgo.go", "normal.go"})
+}
+
+func runTest(t *testing.T, bctx build.Context, inputs []string, expect []string) {
+ build.Default = bctx
+ got, err := filterAndSplitFiles(inputs)
+ if err != nil {
+ t.Errorf("filter %v,%v,%v,%v failed: %v", bctx.GOOS, bctx.GOARCH, bctx.CgoEnabled, bctx.BuildTags, err)
+ }
+ gotGoFilenames := make([]string, len(got.goSrcs))
+ for i, src := range got.goSrcs {
+ gotGoFilenames[i] = src.filename
+ }
+ if !reflect.DeepEqual(expect, gotGoFilenames) {
+ t.Errorf("filter %v,%v,%v,%v: expect %v got %v", bctx.GOOS, bctx.GOARCH, bctx.CgoEnabled, bctx.BuildTags, expect, got)
+ }
+}
+
+// abs is a dummy env.go abs to avoid depending on env.go and flags.go.
+func abs(p string) string {
+ return p
+}
diff --git a/go/tools/builders/flags.go b/go/tools/builders/flags.go
new file mode 100644
index 00000000..e3604cbd
--- /dev/null
+++ b/go/tools/builders/flags.go
@@ -0,0 +1,135 @@
+// Copyright 2017 The Bazel Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+ "errors"
+ "fmt"
+ "go/build"
+ "strings"
+ "unicode"
+)
+
+// multiFlag allows repeated string flags to be collected into a slice
+type multiFlag []string
+
+func (m *multiFlag) String() string {
+ if m == nil || len(*m) == 0 {
+ return ""
+ }
+ return fmt.Sprint(*m)
+}
+
+func (m *multiFlag) Set(v string) error {
+ (*m) = append(*m, v)
+ return nil
+}
+
+// quoteMultiFlag allows repeated string flags to be collected into a slice.
+// Flags are split on spaces. Single quotes are removed, and spaces within
+// quotes are removed. Literal quotes may be escaped with a backslash.
+type quoteMultiFlag []string
+
+func (m *quoteMultiFlag) String() string {
+ if m == nil || len(*m) == 0 {
+ return ""
+ }
+ return fmt.Sprint(*m)
+}
+
+func (m *quoteMultiFlag) Set(v string) error {
+ fs, err := splitQuoted(v)
+ if err != nil {
+ return err
+ }
+ *m = append(*m, fs...)
+ return nil
+}
+
+// splitQuoted splits the string s around each instance of one or more consecutive
+// white space characters while taking into account quotes and escaping, and
+// returns an array of substrings of s or an empty list if s contains only white space.
+// Single quotes and double quotes are recognized to prevent splitting within the
+// quoted region, and are removed from the resulting substrings. If a quote in s
+// isn't closed err will be set and r will have the unclosed argument as the
+// last element. The backslash is used for escaping.
+//
+// For example, the following string:
+//
+// a b:"c d" 'e''f' "g\""
+//
+// Would be parsed as:
+//
+// []string{"a", "b:c d", "ef", `g"`}
+//
+// Copied from go/build.splitQuoted. Also in Gazelle (where tests are).
+func splitQuoted(s string) (r []string, err error) {
+ var args []string
+ arg := make([]rune, len(s))
+ escaped := false
+ quoted := false
+ quote := '\x00'
+ i := 0
+ for _, rune := range s {
+ switch {
+ case escaped:
+ escaped = false
+ case rune == '\\':
+ escaped = true
+ continue
+ case quote != '\x00':
+ if rune == quote {
+ quote = '\x00'
+ continue
+ }
+ case rune == '"' || rune == '\'':
+ quoted = true
+ quote = rune
+ continue
+ case unicode.IsSpace(rune):
+ if quoted || i > 0 {
+ quoted = false
+ args = append(args, string(arg[:i]))
+ i = 0
+ }
+ continue
+ }
+ arg[i] = rune
+ i++
+ }
+ if quoted || i > 0 {
+ args = append(args, string(arg[:i]))
+ }
+ if quote != 0 {
+ err = errors.New("unclosed quote")
+ } else if escaped {
+ err = errors.New("unfinished escaping")
+ }
+ return args, err
+}
+
+// tagFlag adds tags to the build.Default context. Tags are expected to be
+// formatted as a comma-separated list.
+type tagFlag struct{}
+
+func (f *tagFlag) String() string {
+ return strings.Join(build.Default.BuildTags, ",")
+}
+
+func (f *tagFlag) Set(opt string) error {
+ tags := strings.Split(opt, ",")
+ build.Default.BuildTags = append(build.Default.BuildTags, tags...)
+ return nil
+}
diff --git a/go/tools/builders/generate_nogo_main.go b/go/tools/builders/generate_nogo_main.go
new file mode 100644
index 00000000..872b9b0a
--- /dev/null
+++ b/go/tools/builders/generate_nogo_main.go
@@ -0,0 +1,196 @@
+/* Copyright 2018 The Bazel Authors. All rights reserved.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+// Generates the nogo binary to analyze Go source code at build time.
+
+package main
+
+import (
+ "encoding/json"
+ "errors"
+ "flag"
+ "fmt"
+ "io/ioutil"
+ "math"
+ "os"
+ "regexp"
+ "strconv"
+ "text/template"
+)
+
+const nogoMainTpl = `
+package main
+
+
+import (
+{{- if .NeedRegexp }}
+ "regexp"
+{{- end}}
+{{- range $import := .Imports}}
+ {{$import.Name}} "{{$import.Path}}"
+{{- end}}
+ "golang.org/x/tools/go/analysis"
+)
+
+var analyzers = []*analysis.Analyzer{
+{{- range $import := .Imports}}
+ {{$import.Name}}.Analyzer,
+{{- end}}
+}
+
+// configs maps analysis names to configurations.
+var configs = map[string]config{
+{{- range $name, $config := .Configs}}
+ {{printf "%q" $name}}: config{
+ {{- if $config.AnalyzerFlags }}
+ analyzerFlags: map[string]string {
+ {{- range $flagKey, $flagValue := $config.AnalyzerFlags}}
+ {{printf "%q: %q" $flagKey $flagValue}},
+ {{- end}}
+ },
+ {{- end -}}
+ {{- if $config.OnlyFiles}}
+ onlyFiles: []*regexp.Regexp{
+ {{- range $path, $comment := $config.OnlyFiles}}
+ {{- if $comment}}
+ // {{$comment}}
+ {{end -}}
+ {{printf "regexp.MustCompile(%q)" $path}},
+ {{- end}}
+ },
+ {{- end -}}
+ {{- if $config.ExcludeFiles}}
+ excludeFiles: []*regexp.Regexp{
+ {{- range $path, $comment := $config.ExcludeFiles}}
+ {{- if $comment}}
+ // {{$comment}}
+ {{end -}}
+ {{printf "regexp.MustCompile(%q)" $path}},
+ {{- end}}
+ },
+ {{- end}}
+ },
+{{- end}}
+}
+`
+
+func genNogoMain(args []string) error {
+ analyzerImportPaths := multiFlag{}
+ flags := flag.NewFlagSet("generate_nogo_main", flag.ExitOnError)
+ out := flags.String("output", "", "output file to write (defaults to stdout)")
+ flags.Var(&analyzerImportPaths, "analyzer_importpath", "import path of an analyzer library")
+ configFile := flags.String("config", "", "nogo config file")
+ if err := flags.Parse(args); err != nil {
+ return err
+ }
+ if *out == "" {
+ return errors.New("must provide output file")
+ }
+
+ outFile := os.Stdout
+ var cErr error
+ outFile, err := os.Create(*out)
+ if err != nil {
+ return fmt.Errorf("os.Create(%q): %v", *out, err)
+ }
+ defer func() {
+ if err := outFile.Close(); err != nil {
+ cErr = fmt.Errorf("error closing %s: %v", outFile.Name(), err)
+ }
+ }()
+
+ config, err := buildConfig(*configFile)
+ if err != nil {
+ return err
+ }
+
+ type Import struct {
+ Path, Name string
+ }
+ // Create unique name for each imported analyzer.
+ suffix := 1
+ imports := make([]Import, 0, len(analyzerImportPaths))
+ for _, path := range analyzerImportPaths {
+ imports = append(imports, Import{
+ Path: path,
+ Name: "analyzer" + strconv.Itoa(suffix)})
+ if suffix == math.MaxInt32 {
+ return fmt.Errorf("cannot generate more than %d analyzers", suffix)
+ }
+ suffix++
+ }
+ data := struct {
+ Imports []Import
+ Configs Configs
+ NeedRegexp bool
+ }{
+ Imports: imports,
+ Configs: config,
+ }
+ for _, c := range config {
+ if len(c.OnlyFiles) > 0 || len(c.ExcludeFiles) > 0 {
+ data.NeedRegexp = true
+ break
+ }
+ }
+
+ tpl := template.Must(template.New("source").Parse(nogoMainTpl))
+ if err := tpl.Execute(outFile, data); err != nil {
+ return fmt.Errorf("template.Execute failed: %v", err)
+ }
+ return cErr
+}
+
+func buildConfig(path string) (Configs, error) {
+ if path == "" {
+ return Configs{}, nil
+ }
+ b, err := ioutil.ReadFile(path)
+ if err != nil {
+ return Configs{}, fmt.Errorf("failed to read config file: %v", err)
+ }
+ configs := make(Configs)
+ if err = json.Unmarshal(b, &configs); err != nil {
+ return Configs{}, fmt.Errorf("failed to unmarshal config file: %v", err)
+ }
+ for name, config := range configs {
+ for pattern := range config.OnlyFiles {
+ if _, err := regexp.Compile(pattern); err != nil {
+ return Configs{}, fmt.Errorf("invalid pattern for analysis %q: %v", name, err)
+ }
+ }
+ for pattern := range config.ExcludeFiles {
+ if _, err := regexp.Compile(pattern); err != nil {
+ return Configs{}, fmt.Errorf("invalid pattern for analysis %q: %v", name, err)
+ }
+ }
+ configs[name] = Config{
+ // Description is currently unused.
+ OnlyFiles: config.OnlyFiles,
+ ExcludeFiles: config.ExcludeFiles,
+ AnalyzerFlags: config.AnalyzerFlags,
+ }
+ }
+ return configs, nil
+}
+
+type Configs map[string]Config
+
+type Config struct {
+ Description string
+ OnlyFiles map[string]string `json:"only_files"`
+ ExcludeFiles map[string]string `json:"exclude_files"`
+ AnalyzerFlags map[string]string `json:"analyzer_flags"`
+}
diff --git a/go/tools/builders/generate_test_main.go b/go/tools/builders/generate_test_main.go
new file mode 100644
index 00000000..6d545b9d
--- /dev/null
+++ b/go/tools/builders/generate_test_main.go
@@ -0,0 +1,416 @@
+/* Copyright 2016 The Bazel Authors. All rights reserved.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+// Go testing support for Bazel.
+//
+// A Go test comprises three packages:
+//
+// 1. An internal test package, compiled from the sources of the library being
+// tested and any _test.go files with the same package name.
+// 2. An external test package, compiled from _test.go files with a package
+// name ending with "_test".
+// 3. A generated main package that imports both packages and initializes the
+// test framework with a list of tests, benchmarks, examples, and fuzz
+// targets read from source files.
+//
+// This action generates the source code for (3). The equivalent code for
+// 'go test' is in $GOROOT/src/cmd/go/internal/load/test.go.
+
+package main
+
+import (
+ "flag"
+ "fmt"
+ "go/ast"
+ "go/build"
+ "go/doc"
+ "go/parser"
+ "go/token"
+ "os"
+ "sort"
+ "strings"
+ "text/template"
+)
+
+type Import struct {
+ Name string
+ Path string
+}
+
+type TestCase struct {
+ Package string
+ Name string
+}
+
+type Example struct {
+ Package string
+ Name string
+ Output string
+ Unordered bool
+}
+
+// Cases holds template data.
+type Cases struct {
+ Imports []*Import
+ Tests []TestCase
+ Benchmarks []TestCase
+ FuzzTargets []TestCase
+ Examples []Example
+ TestMain string
+ CoverMode string
+ CoverFormat string
+ Pkgname string
+}
+
+// Version returns whether v is a supported Go version (like "go1.18").
+func (c *Cases) Version(v string) bool {
+ for _, r := range build.Default.ReleaseTags {
+ if v == r {
+ return true
+ }
+ }
+ return false
+}
+
+const testMainTpl = `
+package main
+
+// This package must be initialized before packages being tested.
+// NOTE: this relies on the order of package initialization, which is the spec
+// is somewhat unclear about-- it only clearly guarantees that imported packages
+// are initialized before their importers, though in practice (and implied) it
+// also respects declaration order, which we're relying on here.
+import "github.com/bazelbuild/rules_go/go/tools/bzltestutil"
+
+import (
+ "flag"
+ "log"
+ "os"
+ "os/exec"
+{{if .TestMain}}
+ "reflect"
+{{end}}
+ "strconv"
+ "testing"
+ "testing/internal/testdeps"
+
+{{if ne .CoverMode ""}}
+ "github.com/bazelbuild/rules_go/go/tools/coverdata"
+{{end}}
+
+{{range $p := .Imports}}
+ {{$p.Name}} "{{$p.Path}}"
+{{end}}
+)
+
+var allTests = []testing.InternalTest{
+{{range .Tests}}
+ {"{{.Name}}", {{.Package}}.{{.Name}} },
+{{end}}
+}
+
+var benchmarks = []testing.InternalBenchmark{
+{{range .Benchmarks}}
+ {"{{.Name}}", {{.Package}}.{{.Name}} },
+{{end}}
+}
+
+{{if .Version "go1.18"}}
+var fuzzTargets = []testing.InternalFuzzTarget{
+{{range .FuzzTargets}}
+ {"{{.Name}}", {{.Package}}.{{.Name}} },
+{{end}}
+}
+{{end}}
+
+var examples = []testing.InternalExample{
+{{range .Examples}}
+ {Name: "{{.Name}}", F: {{.Package}}.{{.Name}}, Output: {{printf "%q" .Output}}, Unordered: {{.Unordered}} },
+{{end}}
+}
+
+func testsInShard() []testing.InternalTest {
+ totalShards, err := strconv.Atoi(os.Getenv("TEST_TOTAL_SHARDS"))
+ if err != nil || totalShards <= 1 {
+ return allTests
+ }
+ file, err := os.Create(os.Getenv("TEST_SHARD_STATUS_FILE"))
+ if err != nil {
+ log.Fatalf("Failed to touch TEST_SHARD_STATUS_FILE: %v", err)
+ }
+ _ = file.Close()
+ shardIndex, err := strconv.Atoi(os.Getenv("TEST_SHARD_INDEX"))
+ if err != nil || shardIndex < 0 {
+ return allTests
+ }
+ tests := []testing.InternalTest{}
+ for i, t := range allTests {
+ if i % totalShards == shardIndex {
+ tests = append(tests, t)
+ }
+ }
+ return tests
+}
+
+func main() {
+ if bzltestutil.ShouldWrap() {
+ err := bzltestutil.Wrap("{{.Pkgname}}")
+ if xerr, ok := err.(*exec.ExitError); ok {
+ os.Exit(xerr.ExitCode())
+ } else if err != nil {
+ log.Print(err)
+ os.Exit(bzltestutil.TestWrapperAbnormalExit)
+ } else {
+ os.Exit(0)
+ }
+ }
+
+ testDeps :=
+ {{if eq .CoverFormat "lcov"}}
+ bzltestutil.LcovTestDeps{TestDeps: testdeps.TestDeps{}}
+ {{else}}
+ testdeps.TestDeps{}
+ {{end}}
+ {{if .Version "go1.18"}}
+ m := testing.MainStart(testDeps, testsInShard(), benchmarks, fuzzTargets, examples)
+ {{else}}
+ m := testing.MainStart(testDeps, testsInShard(), benchmarks, examples)
+ {{end}}
+
+ if filter := os.Getenv("TESTBRIDGE_TEST_ONLY"); filter != "" {
+ flag.Lookup("test.run").Value.Set(filter)
+ }
+
+ if failfast := os.Getenv("TESTBRIDGE_TEST_RUNNER_FAIL_FAST"); failfast != "" {
+ flag.Lookup("test.failfast").Value.Set("true")
+ }
+{{if eq .CoverFormat "lcov"}}
+ panicOnExit0Flag := flag.Lookup("test.paniconexit0").Value
+ testDeps.OriginalPanicOnExit = panicOnExit0Flag.(flag.Getter).Get().(bool)
+ // Setting this flag provides a way to run hooks right before testing.M.Run() returns.
+ panicOnExit0Flag.Set("true")
+{{end}}
+{{if ne .CoverMode ""}}
+ if len(coverdata.Counters) > 0 {
+ testing.RegisterCover(testing.Cover{
+ Mode: "{{ .CoverMode }}",
+ Counters: coverdata.Counters,
+ Blocks: coverdata.Blocks,
+ })
+
+ if coverageDat, ok := os.LookupEnv("COVERAGE_OUTPUT_FILE"); ok {
+ {{if eq .CoverFormat "lcov"}}
+ flag.Lookup("test.coverprofile").Value.Set(coverageDat+".cover")
+ {{else}}
+ flag.Lookup("test.coverprofile").Value.Set(coverageDat)
+ {{end}}
+ }
+ }
+ {{end}}
+
+ {{if not .TestMain}}
+ res := m.Run()
+ {{else}}
+ {{.TestMain}}(m)
+ {{/* See golang.org/issue/34129 and golang.org/cl/219639 */}}
+ res := int(reflect.ValueOf(m).Elem().FieldByName("exitCode").Int())
+ {{end}}
+ os.Exit(res)
+}
+`
+
+func genTestMain(args []string) error {
+ // Prepare our flags
+ args, _, err := expandParamsFiles(args)
+ if err != nil {
+ return err
+ }
+ imports := multiFlag{}
+ sources := multiFlag{}
+ flags := flag.NewFlagSet("GoTestGenTest", flag.ExitOnError)
+ goenv := envFlags(flags)
+ out := flags.String("output", "", "output file to write. Defaults to stdout.")
+ coverMode := flags.String("cover_mode", "", "the coverage mode to use")
+ coverFormat := flags.String("cover_format", "", "the coverage report type to generate (go_cover or lcov)")
+ pkgname := flags.String("pkgname", "", "package name of test")
+ flags.Var(&imports, "import", "Packages to import")
+ flags.Var(&sources, "src", "Sources to process for tests")
+ if err := flags.Parse(args); err != nil {
+ return err
+ }
+ if err := goenv.checkFlags(); err != nil {
+ return err
+ }
+ // Process import args
+ importMap := map[string]*Import{}
+ for _, imp := range imports {
+ parts := strings.Split(imp, "=")
+ if len(parts) != 2 {
+ return fmt.Errorf("Invalid import %q specified", imp)
+ }
+ i := &Import{Name: parts[0], Path: parts[1]}
+ importMap[i.Name] = i
+ }
+ // Process source args
+ sourceList := []string{}
+ sourceMap := map[string]string{}
+ for _, s := range sources {
+ parts := strings.Split(s, "=")
+ if len(parts) != 2 {
+ return fmt.Errorf("Invalid source %q specified", s)
+ }
+ sourceList = append(sourceList, parts[1])
+ sourceMap[parts[1]] = parts[0]
+ }
+
+ // filter our input file list
+ filteredSrcs, err := filterAndSplitFiles(sourceList)
+ if err != nil {
+ return err
+ }
+ goSrcs := filteredSrcs.goSrcs
+
+ outFile := os.Stdout
+ if *out != "" {
+ var err error
+ outFile, err = os.Create(*out)
+ if err != nil {
+ return fmt.Errorf("os.Create(%q): %v", *out, err)
+ }
+ defer outFile.Close()
+ }
+
+ cases := Cases{
+ CoverFormat: *coverFormat,
+ CoverMode: *coverMode,
+ Pkgname: *pkgname,
+ }
+
+ testFileSet := token.NewFileSet()
+ pkgs := map[string]bool{}
+ for _, f := range goSrcs {
+ parse, err := parser.ParseFile(testFileSet, f.filename, nil, parser.ParseComments)
+ if err != nil {
+ return fmt.Errorf("ParseFile(%q): %v", f.filename, err)
+ }
+ pkg := sourceMap[f.filename]
+ if strings.HasSuffix(parse.Name.String(), "_test") {
+ pkg += "_test"
+ }
+ for _, e := range doc.Examples(parse) {
+ if e.Output == "" && !e.EmptyOutput {
+ continue
+ }
+ cases.Examples = append(cases.Examples, Example{
+ Name: "Example" + e.Name,
+ Package: pkg,
+ Output: e.Output,
+ Unordered: e.Unordered,
+ })
+ pkgs[pkg] = true
+ }
+ for _, d := range parse.Decls {
+ fn, ok := d.(*ast.FuncDecl)
+ if !ok {
+ continue
+ }
+ if fn.Recv != nil {
+ continue
+ }
+ if fn.Name.Name == "TestMain" {
+ // TestMain is not, itself, a test
+ pkgs[pkg] = true
+ cases.TestMain = fmt.Sprintf("%s.%s", pkg, fn.Name.Name)
+ continue
+ }
+
+ // Here we check the signature of the Test* function. To
+ // be considered a test:
+
+ // 1. The function should have a single argument.
+ if len(fn.Type.Params.List) != 1 {
+ continue
+ }
+
+ // 2. The function should return nothing.
+ if fn.Type.Results != nil {
+ continue
+ }
+
+ // 3. The only parameter should have a type identified as
+ // *<something>.T
+ starExpr, ok := fn.Type.Params.List[0].Type.(*ast.StarExpr)
+ if !ok {
+ continue
+ }
+ selExpr, ok := starExpr.X.(*ast.SelectorExpr)
+ if !ok {
+ continue
+ }
+
+ // We do not descriminate on the referenced type of the
+ // parameter being *testing.T. Instead we assert that it
+ // should be *<something>.T. This is because the import
+ // could have been aliased as a different identifier.
+
+ if strings.HasPrefix(fn.Name.Name, "Test") {
+ if selExpr.Sel.Name != "T" {
+ continue
+ }
+ pkgs[pkg] = true
+ cases.Tests = append(cases.Tests, TestCase{
+ Package: pkg,
+ Name: fn.Name.Name,
+ })
+ }
+ if strings.HasPrefix(fn.Name.Name, "Benchmark") {
+ if selExpr.Sel.Name != "B" {
+ continue
+ }
+ pkgs[pkg] = true
+ cases.Benchmarks = append(cases.Benchmarks, TestCase{
+ Package: pkg,
+ Name: fn.Name.Name,
+ })
+ }
+ if strings.HasPrefix(fn.Name.Name, "Fuzz") {
+ if selExpr.Sel.Name != "F" {
+ continue
+ }
+ pkgs[pkg] = true
+ cases.FuzzTargets = append(cases.FuzzTargets, TestCase{
+ Package: pkg,
+ Name: fn.Name.Name,
+ })
+ }
+ }
+ }
+
+ for name := range importMap {
+ // Set the names for all unused imports to "_"
+ if !pkgs[name] {
+ importMap[name].Name = "_"
+ }
+ cases.Imports = append(cases.Imports, importMap[name])
+ }
+ sort.Slice(cases.Imports, func(i, j int) bool {
+ return cases.Imports[i].Name < cases.Imports[j].Name
+ })
+ tpl := template.Must(template.New("source").Parse(testMainTpl))
+ if err := tpl.Execute(outFile, &cases); err != nil {
+ return fmt.Errorf("template.Execute(%v): %v", cases, err)
+ }
+ return nil
+}
diff --git a/go/tools/builders/go_path.go b/go/tools/builders/go_path.go
new file mode 100644
index 00000000..58a7b8a9
--- /dev/null
+++ b/go/tools/builders/go_path.go
@@ -0,0 +1,203 @@
+// Copyright 2018 The Bazel Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+ "archive/zip"
+ "encoding/json"
+ "errors"
+ "flag"
+ "fmt"
+ "io"
+ "io/ioutil"
+ "log"
+ "os"
+ "path/filepath"
+)
+
+type mode int
+
+const (
+ invalidMode mode = iota
+ archiveMode
+ copyMode
+ linkMode
+)
+
+func modeFromString(s string) (mode, error) {
+ switch s {
+ case "archive":
+ return archiveMode, nil
+ case "copy":
+ return copyMode, nil
+ case "link":
+ return linkMode, nil
+ default:
+ return invalidMode, fmt.Errorf("invalid mode: %s", s)
+ }
+}
+
+type manifestEntry struct {
+ Src, Dst string
+}
+
+func main() {
+ log.SetPrefix("GoPath: ")
+ log.SetFlags(0)
+ if err := run(os.Args[1:]); err != nil {
+ log.Fatal(err)
+ }
+}
+
+func run(args []string) error {
+ var manifest, out string
+ flags := flag.NewFlagSet("go_path", flag.ContinueOnError)
+ flags.StringVar(&manifest, "manifest", "", "name of json file listing files to include")
+ flags.StringVar(&out, "out", "", "output file or directory")
+ modeFlag := flags.String("mode", "", "copy, link, or archive")
+ if err := flags.Parse(args); err != nil {
+ return err
+ }
+ if manifest == "" {
+ return errors.New("-manifest not set")
+ }
+ if out == "" {
+ return errors.New("-out not set")
+ }
+ if *modeFlag == "" {
+ return errors.New("-mode not set")
+ }
+ mode, err := modeFromString(*modeFlag)
+ if err != nil {
+ return err
+ }
+
+ entries, err := readManifest(manifest)
+ if err != nil {
+ return err
+ }
+
+ switch mode {
+ case archiveMode:
+ err = archivePath(out, entries)
+ case copyMode:
+ err = copyPath(out, entries)
+ case linkMode:
+ err = linkPath(out, entries)
+ }
+ return err
+}
+
+func readManifest(path string) ([]manifestEntry, error) {
+ data, err := ioutil.ReadFile(path)
+ if err != nil {
+ return nil, fmt.Errorf("error reading manifest: %v", err)
+ }
+ var entries []manifestEntry
+ if err := json.Unmarshal(data, &entries); err != nil {
+ return nil, fmt.Errorf("error unmarshalling manifest %s: %v", path, err)
+ }
+ return entries, nil
+}
+
+func archivePath(out string, manifest []manifestEntry) (err error) {
+ outFile, err := os.Create(out)
+ if err != nil {
+ return err
+ }
+ defer func() {
+ if e := outFile.Close(); err == nil && e != nil {
+ err = fmt.Errorf("error closing archive %s: %v", out, e)
+ }
+ }()
+ outZip := zip.NewWriter(outFile)
+
+ for _, entry := range manifest {
+ srcFile, err := os.Open(abs(filepath.FromSlash(entry.Src)))
+ if err != nil {
+ return err
+ }
+ w, err := outZip.Create(entry.Dst)
+ if err != nil {
+ srcFile.Close()
+ return err
+ }
+ if _, err := io.Copy(w, srcFile); err != nil {
+ srcFile.Close()
+ return err
+ }
+ if err := srcFile.Close(); err != nil {
+ return err
+ }
+ }
+
+ if err := outZip.Close(); err != nil {
+ return fmt.Errorf("error constructing archive %s: %v", out, err)
+ }
+ return nil
+}
+
+func copyPath(out string, manifest []manifestEntry) error {
+ if err := os.MkdirAll(out, 0777); err != nil {
+ return err
+ }
+ for _, entry := range manifest {
+ dst := abs(filepath.Join(out, filepath.FromSlash(entry.Dst)))
+ if err := os.MkdirAll(filepath.Dir(dst), 0777); err != nil {
+ return err
+ }
+ srcFile, err := os.Open(abs(filepath.FromSlash(entry.Src)))
+ if err != nil {
+ return err
+ }
+ dstFile, err := os.Create(dst)
+ if err != nil {
+ srcFile.Close()
+ return err
+ }
+ if _, err := io.Copy(dstFile, srcFile); err != nil {
+ dstFile.Close()
+ srcFile.Close()
+ return err
+ }
+ srcFile.Close()
+ if err := dstFile.Close(); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+func linkPath(out string, manifest []manifestEntry) error {
+ // out directory may already exist and may contain old symlinks. Delete.
+ if err := os.RemoveAll(out); err != nil {
+ return err
+ }
+ if err := os.MkdirAll(out, 0777); err != nil {
+ return err
+ }
+ for _, entry := range manifest {
+ dst := filepath.Join(out, filepath.FromSlash(entry.Dst))
+ dstDir := filepath.Dir(dst)
+ src, _ := filepath.Rel(dstDir, entry.Src)
+ if err := os.MkdirAll(dstDir, 0777); err != nil {
+ return err
+ }
+ if err := os.Symlink(src, dst); err != nil {
+ return err
+ }
+ }
+ return nil
+}
diff --git a/go/tools/builders/importcfg.go b/go/tools/builders/importcfg.go
new file mode 100644
index 00000000..9fe55b42
--- /dev/null
+++ b/go/tools/builders/importcfg.go
@@ -0,0 +1,261 @@
+// Copyright 2019 The Bazel Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+ "bufio"
+ "bytes"
+ "errors"
+ "fmt"
+ "io"
+ "io/ioutil"
+ "os"
+ "path/filepath"
+ "sort"
+ "strings"
+)
+
+type archive struct {
+ label, importPath, packagePath, file string
+ importPathAliases []string
+}
+
+// checkImports verifies that each import in files refers to a
+// direct dependency in archives or to a standard library package
+// listed in the file at stdPackageListPath. checkImports returns
+// a map from source import paths to elements of archives or to nil
+// for standard library packages.
+func checkImports(files []fileInfo, archives []archive, stdPackageListPath string, importPath string, recompileInternalDeps []string) (map[string]*archive, error) {
+ // Read the standard package list.
+ packagesTxt, err := ioutil.ReadFile(stdPackageListPath)
+ if err != nil {
+ return nil, err
+ }
+ stdPkgs := make(map[string]bool)
+ for len(packagesTxt) > 0 {
+ n := bytes.IndexByte(packagesTxt, '\n')
+ var line string
+ if n < 0 {
+ line = string(packagesTxt)
+ packagesTxt = nil
+ } else {
+ line = string(packagesTxt[:n])
+ packagesTxt = packagesTxt[n+1:]
+ }
+ line = strings.TrimSpace(line)
+ if line == "" {
+ continue
+ }
+ stdPkgs[line] = true
+ }
+
+ // Index the archives.
+ importToArchive := make(map[string]*archive)
+ importAliasToArchive := make(map[string]*archive)
+ for i := range archives {
+ arc := &archives[i]
+ importToArchive[arc.importPath] = arc
+ for _, imp := range arc.importPathAliases {
+ importAliasToArchive[imp] = arc
+ }
+ }
+ // Construct recompileInternalDeps as a map to check if there are imports that are disallowed.
+ recompileInternalDepMap := make(map[string]struct{})
+ for _, dep := range recompileInternalDeps {
+ recompileInternalDepMap[dep] = struct{}{}
+ }
+ // Build the import map.
+ imports := make(map[string]*archive)
+ var derr depsError
+ for _, f := range files {
+ for _, imp := range f.imports {
+ path := imp.path
+ if _, ok := imports[path]; ok || path == "C" || isRelative(path) {
+ // TODO(#1645): Support local (relative) import paths. We don't emit
+ // errors for them here, but they will probably break something else.
+ continue
+ }
+ if _, ok := recompileInternalDepMap[path]; ok {
+ return nil, fmt.Errorf("dependency cycle detected between %q and %q in file %q", importPath, path, f.filename)
+ }
+ if stdPkgs[path] {
+ imports[path] = nil
+ } else if arc := importToArchive[path]; arc != nil {
+ imports[path] = arc
+ } else if arc := importAliasToArchive[path]; arc != nil {
+ imports[path] = arc
+ } else {
+ derr.missing = append(derr.missing, missingDep{f.filename, path})
+ }
+ }
+ }
+ if len(derr.missing) > 0 {
+ return nil, derr
+ }
+ return imports, nil
+}
+
+// buildImportcfgFileForCompile writes an importcfg file to be consumed by the
+// compiler. The file is constructed from direct dependencies and std imports.
+// The caller is responsible for deleting the importcfg file.
+func buildImportcfgFileForCompile(imports map[string]*archive, installSuffix, dir string) (string, error) {
+ buf := &bytes.Buffer{}
+ goroot, ok := os.LookupEnv("GOROOT")
+ if !ok {
+ return "", errors.New("GOROOT not set")
+ }
+ goroot = abs(goroot)
+
+ sortedImports := make([]string, 0, len(imports))
+ for imp := range imports {
+ sortedImports = append(sortedImports, imp)
+ }
+ sort.Strings(sortedImports)
+
+ for _, imp := range sortedImports {
+ if arc := imports[imp]; arc == nil {
+ // std package
+ path := filepath.Join(goroot, "pkg", installSuffix, filepath.FromSlash(imp))
+ fmt.Fprintf(buf, "packagefile %s=%s.a\n", imp, path)
+ } else {
+ if imp != arc.packagePath {
+ fmt.Fprintf(buf, "importmap %s=%s\n", imp, arc.packagePath)
+ }
+ fmt.Fprintf(buf, "packagefile %s=%s\n", arc.packagePath, arc.file)
+ }
+ }
+
+ f, err := ioutil.TempFile(dir, "importcfg")
+ if err != nil {
+ return "", err
+ }
+ filename := f.Name()
+ if _, err := io.Copy(f, buf); err != nil {
+ f.Close()
+ os.Remove(filename)
+ return "", err
+ }
+ if err := f.Close(); err != nil {
+ os.Remove(filename)
+ return "", err
+ }
+ return filename, nil
+}
+
+func buildImportcfgFileForLink(archives []archive, stdPackageListPath, installSuffix, dir string) (string, error) {
+ buf := &bytes.Buffer{}
+ goroot, ok := os.LookupEnv("GOROOT")
+ if !ok {
+ return "", errors.New("GOROOT not set")
+ }
+ prefix := abs(filepath.Join(goroot, "pkg", installSuffix))
+ stdPackageListFile, err := os.Open(stdPackageListPath)
+ if err != nil {
+ return "", err
+ }
+ defer stdPackageListFile.Close()
+ scanner := bufio.NewScanner(stdPackageListFile)
+ for scanner.Scan() {
+ line := strings.TrimSpace(scanner.Text())
+ if line == "" {
+ continue
+ }
+ fmt.Fprintf(buf, "packagefile %s=%s.a\n", line, filepath.Join(prefix, filepath.FromSlash(line)))
+ }
+ if err := scanner.Err(); err != nil {
+ return "", err
+ }
+ depsSeen := map[string]string{}
+ for _, arc := range archives {
+ if _, ok := depsSeen[arc.packagePath]; ok {
+ return "", fmt.Errorf("internal error: package %s provided multiple times. This should have been detected during analysis.", arc.packagePath)
+ }
+ depsSeen[arc.packagePath] = arc.label
+ fmt.Fprintf(buf, "packagefile %s=%s\n", arc.packagePath, arc.file)
+ }
+ f, err := ioutil.TempFile(dir, "importcfg")
+ if err != nil {
+ return "", err
+ }
+ filename := f.Name()
+ if _, err := io.Copy(f, buf); err != nil {
+ f.Close()
+ os.Remove(filename)
+ return "", err
+ }
+ if err := f.Close(); err != nil {
+ os.Remove(filename)
+ return "", err
+ }
+ return filename, nil
+}
+
+type depsError struct {
+ missing []missingDep
+ known []string
+}
+
+type missingDep struct {
+ filename, imp string
+}
+
+var _ error = depsError{}
+
+func (e depsError) Error() string {
+ buf := bytes.NewBuffer(nil)
+ fmt.Fprintf(buf, "missing strict dependencies:\n")
+ for _, dep := range e.missing {
+ fmt.Fprintf(buf, "\t%s: import of %q\n", dep.filename, dep.imp)
+ }
+ if len(e.known) == 0 {
+ fmt.Fprintln(buf, "No dependencies were provided.")
+ } else {
+ fmt.Fprintln(buf, "Known dependencies are:")
+ for _, imp := range e.known {
+ fmt.Fprintf(buf, "\t%s\n", imp)
+ }
+ }
+ fmt.Fprint(buf, "Check that imports in Go sources match importpath attributes in deps.")
+ return buf.String()
+}
+
+func isRelative(path string) bool {
+ return strings.HasPrefix(path, "./") || strings.HasPrefix(path, "../")
+}
+
+type archiveMultiFlag []archive
+
+func (m *archiveMultiFlag) String() string {
+ if m == nil || len(*m) == 0 {
+ return ""
+ }
+ return fmt.Sprint(*m)
+}
+
+func (m *archiveMultiFlag) Set(v string) error {
+ parts := strings.Split(v, "=")
+ if len(parts) != 3 {
+ return fmt.Errorf("badly formed -arc flag: %s", v)
+ }
+ importPaths := strings.Split(parts[0], ":")
+ a := archive{
+ importPath: importPaths[0],
+ importPathAliases: importPaths[1:],
+ packagePath: parts[1],
+ file: abs(parts[2]),
+ }
+ *m = append(*m, a)
+ return nil
+}
diff --git a/go/tools/builders/info.go b/go/tools/builders/info.go
new file mode 100644
index 00000000..f7f1fd03
--- /dev/null
+++ b/go/tools/builders/info.go
@@ -0,0 +1,64 @@
+// Copyright 2017 The Bazel Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// info prints debugging information about the go environment.
+// It is used to help examine the execution environment of rules_go
+package main
+
+import (
+ "flag"
+ "fmt"
+ "log"
+ "os"
+)
+
+func run(args []string) error {
+ args, _, err := expandParamsFiles(args)
+ if err != nil {
+ return err
+ }
+ filename := ""
+ flags := flag.NewFlagSet("info", flag.ExitOnError)
+ flags.StringVar(&filename, "out", filename, "The file to write the report to")
+ goenv := envFlags(flags)
+ if err := flags.Parse(args); err != nil {
+ return err
+ }
+ if err := goenv.checkFlags(); err != nil {
+ return err
+ }
+ os.Setenv("GO111MODULE", "off")
+ f := os.Stderr
+ if filename != "" {
+ var err error
+ f, err = os.Create(filename)
+ if err != nil {
+ return fmt.Errorf("Could not create report file: %v", err)
+ }
+ defer f.Close()
+ }
+ if err := goenv.runCommandToFile(f, os.Stderr, goenv.goCmd("version")); err != nil {
+ return err
+ }
+ if err := goenv.runCommandToFile(f, os.Stderr, goenv.goCmd("env")); err != nil {
+ return err
+ }
+ return nil
+}
+
+func main() {
+ if err := run(os.Args[1:]); err != nil {
+ log.Fatal(err)
+ }
+}
diff --git a/go/tools/builders/link.go b/go/tools/builders/link.go
new file mode 100644
index 00000000..723bb193
--- /dev/null
+++ b/go/tools/builders/link.go
@@ -0,0 +1,163 @@
+// Copyright 2017 The Bazel Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// link combines the results of a compile step using "go tool link". It is invoked by the
+// Go rules as an action.
+package main
+
+import (
+ "bufio"
+ "bytes"
+ "errors"
+ "flag"
+ "fmt"
+ "io/ioutil"
+ "os"
+ "path/filepath"
+ "regexp"
+ "runtime"
+ "strings"
+)
+
+func link(args []string) error {
+ // Parse arguments.
+ args, _, err := expandParamsFiles(args)
+ if err != nil {
+ return err
+ }
+ builderArgs, toolArgs := splitArgs(args)
+ stamps := multiFlag{}
+ xdefs := multiFlag{}
+ archives := archiveMultiFlag{}
+ flags := flag.NewFlagSet("link", flag.ExitOnError)
+ goenv := envFlags(flags)
+ main := flags.String("main", "", "Path to the main archive.")
+ packagePath := flags.String("p", "", "Package path of the main archive.")
+ outFile := flags.String("o", "", "Path to output file.")
+ flags.Var(&archives, "arc", "Label, package path, and file name of a dependency, separated by '='")
+ packageList := flags.String("package_list", "", "The file containing the list of standard library packages")
+ buildmode := flags.String("buildmode", "", "Build mode used.")
+ flags.Var(&xdefs, "X", "A string variable to replace in the linked binary (repeated).")
+ flags.Var(&stamps, "stamp", "The name of a file with stamping values.")
+ conflictErrMsg := flags.String("conflict_err", "", "Error message about conflicts to report if there's a link error.")
+ if err := flags.Parse(builderArgs); err != nil {
+ return err
+ }
+ if err := goenv.checkFlags(); err != nil {
+ return err
+ }
+
+ if *conflictErrMsg != "" {
+ return errors.New(*conflictErrMsg)
+ }
+
+ // On Windows, take the absolute path of the output file and main file.
+ // This is needed on Windows because the relative path is frequently too long.
+ // os.Open on Windows converts absolute paths to some other path format with
+ // longer length limits. Absolute paths do not work on macOS for .dylib
+ // outputs because they get baked in as the "install path".
+ if runtime.GOOS != "darwin" && runtime.GOOS != "ios" {
+ *outFile = abs(*outFile)
+ }
+ *main = abs(*main)
+
+ // If we were given any stamp value files, read and parse them
+ stampMap := map[string]string{}
+ for _, stampfile := range stamps {
+ stampbuf, err := ioutil.ReadFile(stampfile)
+ if err != nil {
+ return fmt.Errorf("Failed reading stamp file %s: %v", stampfile, err)
+ }
+ scanner := bufio.NewScanner(bytes.NewReader(stampbuf))
+ for scanner.Scan() {
+ line := strings.SplitN(scanner.Text(), " ", 2)
+ switch len(line) {
+ case 0:
+ // Nothing to do here
+ case 1:
+ // Map to the empty string
+ stampMap[line[0]] = ""
+ case 2:
+ // Key and value
+ stampMap[line[0]] = line[1]
+ }
+ }
+ }
+
+ // Build an importcfg file.
+ importcfgName, err := buildImportcfgFileForLink(archives, *packageList, goenv.installSuffix, filepath.Dir(*outFile))
+ if err != nil {
+ return err
+ }
+ if !goenv.shouldPreserveWorkDir {
+ defer os.Remove(importcfgName)
+ }
+
+ // generate any additional link options we need
+ goargs := goenv.goTool("link")
+ goargs = append(goargs, "-importcfg", importcfgName)
+
+ parseXdef := func(xdef string) (pkg, name, value string, err error) {
+ eq := strings.IndexByte(xdef, '=')
+ if eq < 0 {
+ return "", "", "", fmt.Errorf("-X flag does not contain '=': %s", xdef)
+ }
+ dot := strings.LastIndexByte(xdef[:eq], '.')
+ if dot < 0 {
+ return "", "", "", fmt.Errorf("-X flag does not contain '.': %s", xdef)
+ }
+ pkg, name, value = xdef[:dot], xdef[dot+1:eq], xdef[eq+1:]
+ if pkg == *packagePath {
+ pkg = "main"
+ }
+ return pkg, name, value, nil
+ }
+ for _, xdef := range xdefs {
+ pkg, name, value, err := parseXdef(xdef)
+ if err != nil {
+ return err
+ }
+ var missingKey bool
+ value = regexp.MustCompile(`\{.+?\}`).ReplaceAllStringFunc(value, func(key string) string {
+ if value, ok := stampMap[key[1:len(key)-1]]; ok {
+ return value
+ }
+ missingKey = true
+ return key
+ })
+ if !missingKey {
+ goargs = append(goargs, "-X", fmt.Sprintf("%s.%s=%s", pkg, name, value))
+ }
+ }
+
+ if *buildmode != "" {
+ goargs = append(goargs, "-buildmode", *buildmode)
+ }
+ goargs = append(goargs, "-o", *outFile)
+
+ // add in the unprocess pass through options
+ goargs = append(goargs, toolArgs...)
+ goargs = append(goargs, *main)
+ if err := goenv.runCommand(goargs); err != nil {
+ return err
+ }
+
+ if *buildmode == "c-archive" {
+ if err := stripArMetadata(*outFile); err != nil {
+ return fmt.Errorf("error stripping archive metadata: %v", err)
+ }
+ }
+
+ return nil
+}
diff --git a/go/tools/builders/md5sum.go b/go/tools/builders/md5sum.go
new file mode 100644
index 00000000..834eb272
--- /dev/null
+++ b/go/tools/builders/md5sum.go
@@ -0,0 +1,89 @@
+// Copyright 2017 The Bazel Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// md5sum replicates the equivalent functionality of the unix tool of the same name.
+package main
+
+import (
+ "crypto/md5"
+ "flag"
+ "fmt"
+ "io"
+ "log"
+ "os"
+ "path/filepath"
+)
+
+func md5SumFile(filename string) ([]byte, error) {
+ var result []byte
+ f, err := os.Open(filename)
+ if err != nil {
+ return result, err
+ }
+ defer f.Close()
+ hash := md5.New()
+ if _, err := io.Copy(hash, f); err != nil {
+ return nil, err
+ }
+ return hash.Sum(result), nil
+}
+
+func run(args []string) error {
+ // Prepare our flags
+ flags := flag.NewFlagSet("md5sum", flag.ExitOnError)
+ output := flags.String("output", "", "If set, write the results to this file, instead of stdout.")
+ if err := flags.Parse(args); err != nil {
+ return err
+ }
+ // print the outputs if we need not
+ to := os.Stdout
+ if *output != "" {
+ f, err := os.Create(*output)
+ if err != nil {
+ return err
+ }
+ defer f.Close()
+ to = f
+ }
+ for _, path := range flags.Args() {
+ walkFn := func(path string, info os.FileInfo, err error) error {
+ if err != nil {
+ return err
+ }
+ if info.IsDir() {
+ return nil
+ }
+
+ if b, err := md5SumFile(path); err != nil {
+ return err
+ } else {
+ fmt.Fprintf(to, "%s %x\n", path, b)
+ }
+ return nil
+ }
+
+ if err := filepath.Walk(path, walkFn); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+func main() {
+ log.SetFlags(0)
+ log.SetPrefix("GoMd5sum: ")
+ if err := run(os.Args[1:]); err != nil {
+ log.Fatal(err)
+ }
+}
diff --git a/go/tools/builders/nogo_main.go b/go/tools/builders/nogo_main.go
new file mode 100644
index 00000000..c6156e1d
--- /dev/null
+++ b/go/tools/builders/nogo_main.go
@@ -0,0 +1,654 @@
+/* Copyright 2018 The Bazel Authors. All rights reserved.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+// Loads and runs registered analyses on a well-typed Go package.
+// The code in this file is combined with the code generated by
+// generate_nogo_main.go.
+
+package main
+
+import (
+ "bytes"
+ "encoding/gob"
+ "errors"
+ "flag"
+ "fmt"
+ "go/ast"
+ "go/parser"
+ "go/token"
+ "go/types"
+ "io/ioutil"
+ "log"
+ "os"
+ "reflect"
+ "regexp"
+ "sort"
+ "strings"
+ "sync"
+
+ "golang.org/x/tools/go/analysis"
+ "golang.org/x/tools/go/gcexportdata"
+ "golang.org/x/tools/internal/facts"
+)
+
+const nogoBaseConfigName = "_base"
+
+func init() {
+ if err := analysis.Validate(analyzers); err != nil {
+ log.Fatal(err)
+ }
+}
+
+var typesSizes = types.SizesFor("gc", os.Getenv("GOARCH"))
+
+func main() {
+ log.SetFlags(0) // no timestamp
+ log.SetPrefix("nogo: ")
+ if err := run(os.Args[1:]); err != nil {
+ log.Fatal(err)
+ }
+}
+
+// run returns an error if there is a problem loading the package or if any
+// analysis fails.
+func run(args []string) error {
+ args, _, err := expandParamsFiles(args)
+ if err != nil {
+ return fmt.Errorf("error reading paramfiles: %v", err)
+ }
+
+ factMap := factMultiFlag{}
+ flags := flag.NewFlagSet("nogo", flag.ExitOnError)
+ flags.Var(&factMap, "fact", "Import path and file containing facts for that library, separated by '=' (may be repeated)'")
+ importcfg := flags.String("importcfg", "", "The import configuration file")
+ packagePath := flags.String("p", "", "The package path (importmap) of the package being compiled")
+ xPath := flags.String("x", "", "The archive file where serialized facts should be written")
+ flags.Parse(args)
+ srcs := flags.Args()
+
+ packageFile, importMap, err := readImportCfg(*importcfg)
+ if err != nil {
+ return fmt.Errorf("error parsing importcfg: %v", err)
+ }
+
+ diagnostics, facts, err := checkPackage(analyzers, *packagePath, packageFile, importMap, factMap, srcs)
+ if err != nil {
+ return fmt.Errorf("error running analyzers: %v", err)
+ }
+ if diagnostics != "" {
+ return fmt.Errorf("errors found by nogo during build-time code analysis:\n%s\n", diagnostics)
+ }
+ if *xPath != "" {
+ if err := ioutil.WriteFile(abs(*xPath), facts, 0o666); err != nil {
+ return fmt.Errorf("error writing facts: %v", err)
+ }
+ }
+
+ return nil
+}
+
+// Adapted from go/src/cmd/compile/internal/gc/main.go. Keep in sync.
+func readImportCfg(file string) (packageFile map[string]string, importMap map[string]string, err error) {
+ packageFile, importMap = make(map[string]string), make(map[string]string)
+ data, err := ioutil.ReadFile(file)
+ if err != nil {
+ return nil, nil, fmt.Errorf("-importcfg: %v", err)
+ }
+
+ for lineNum, line := range strings.Split(string(data), "\n") {
+ lineNum++ // 1-based
+ line = strings.TrimSpace(line)
+ if line == "" || strings.HasPrefix(line, "#") {
+ continue
+ }
+
+ var verb, args string
+ if i := strings.Index(line, " "); i < 0 {
+ verb = line
+ } else {
+ verb, args = line[:i], strings.TrimSpace(line[i+1:])
+ }
+ var before, after string
+ if i := strings.Index(args, "="); i >= 0 {
+ before, after = args[:i], args[i+1:]
+ }
+ switch verb {
+ default:
+ return nil, nil, fmt.Errorf("%s:%d: unknown directive %q", file, lineNum, verb)
+ case "importmap":
+ if before == "" || after == "" {
+ return nil, nil, fmt.Errorf(`%s:%d: invalid importmap: syntax is "importmap old=new"`, file, lineNum)
+ }
+ importMap[before] = after
+ case "packagefile":
+ if before == "" || after == "" {
+ return nil, nil, fmt.Errorf(`%s:%d: invalid packagefile: syntax is "packagefile path=filename"`, file, lineNum)
+ }
+ packageFile[before] = after
+ }
+ }
+ return packageFile, importMap, nil
+}
+
+// checkPackage runs all the given analyzers on the specified package and
+// returns the source code diagnostics that the must be printed in the build log.
+// It returns an empty string if no source code diagnostics need to be printed.
+//
+// This implementation was adapted from that of golang.org/x/tools/go/checker/internal/checker.
+func checkPackage(analyzers []*analysis.Analyzer, packagePath string, packageFile, importMap map[string]string, factMap map[string]string, filenames []string) (string, []byte, error) {
+ // Register fact types and establish dependencies between analyzers.
+ actions := make(map[*analysis.Analyzer]*action)
+ var visit func(a *analysis.Analyzer) *action
+ visit = func(a *analysis.Analyzer) *action {
+ act, ok := actions[a]
+ if !ok {
+ act = &action{a: a}
+ actions[a] = act
+ for _, f := range a.FactTypes {
+ act.usesFacts = true
+ gob.Register(f)
+ }
+ act.deps = make([]*action, len(a.Requires))
+ for i, req := range a.Requires {
+ dep := visit(req)
+ if dep.usesFacts {
+ act.usesFacts = true
+ }
+ act.deps[i] = dep
+ }
+ }
+ return act
+ }
+
+ roots := make([]*action, 0, len(analyzers))
+ for _, a := range analyzers {
+ if cfg, ok := configs[a.Name]; ok {
+ for flagKey, flagVal := range cfg.analyzerFlags {
+ if strings.HasPrefix(flagKey, "-") {
+ return "", nil, fmt.Errorf(
+ "%s: flag should not begin with '-': %s", a.Name, flagKey)
+ }
+ if flag := a.Flags.Lookup(flagKey); flag == nil {
+ return "", nil, fmt.Errorf("%s: unrecognized flag: %s", a.Name, flagKey)
+ }
+ if err := a.Flags.Set(flagKey, flagVal); err != nil {
+ return "", nil, fmt.Errorf(
+ "%s: invalid value for flag: %s=%s: %w", a.Name, flagKey, flagVal, err)
+ }
+ }
+ }
+ roots = append(roots, visit(a))
+ }
+
+ // Load the package, including AST, types, and facts.
+ imp := newImporter(importMap, packageFile, factMap)
+ pkg, err := load(packagePath, imp, filenames)
+ if err != nil {
+ return "", nil, fmt.Errorf("error loading package: %v", err)
+ }
+ for _, act := range actions {
+ act.pkg = pkg
+ }
+
+ // Process nolint directives similar to golangci-lint.
+ for _, f := range pkg.syntax {
+ // CommentMap will correctly associate comments to the largest node group
+ // applicable. This handles inline comments that might trail a large
+ // assignment and will apply the comment to the entire assignment.
+ commentMap := ast.NewCommentMap(pkg.fset, f, f.Comments)
+ for node, groups := range commentMap {
+ rng := &Range{
+ from: pkg.fset.Position(node.Pos()),
+ to: pkg.fset.Position(node.End()).Line,
+ }
+ for _, group := range groups {
+ for _, comm := range group.List {
+ linters, ok := parseNolint(comm.Text)
+ if !ok {
+ continue
+ }
+ for analyzer, act := range actions {
+ if linters == nil || linters[analyzer.Name] {
+ act.nolint = append(act.nolint, rng)
+ }
+ }
+ }
+ }
+ }
+ }
+
+ // Execute the analyzers.
+ execAll(roots)
+
+ // Process diagnostics and encode facts for importers of this package.
+ diagnostics := checkAnalysisResults(roots, pkg)
+ facts := pkg.facts.Encode()
+ return diagnostics, facts, nil
+}
+
+type Range struct {
+ from token.Position
+ to int
+}
+
+// An action represents one unit of analysis work: the application of
+// one analysis to one package. Actions form a DAG within a
+// package (as different analyzers are applied, either in sequence or
+// parallel).
+type action struct {
+ once sync.Once
+ a *analysis.Analyzer
+ pass *analysis.Pass
+ pkg *goPackage
+ deps []*action
+ inputs map[*analysis.Analyzer]interface{}
+ result interface{}
+ diagnostics []analysis.Diagnostic
+ usesFacts bool
+ err error
+ nolint []*Range
+}
+
+func (act *action) String() string {
+ return fmt.Sprintf("%s@%s", act.a, act.pkg)
+}
+
+func execAll(actions []*action) {
+ var wg sync.WaitGroup
+ wg.Add(len(actions))
+ for _, act := range actions {
+ go func(act *action) {
+ defer wg.Done()
+ act.exec()
+ }(act)
+ }
+ wg.Wait()
+}
+
+func (act *action) exec() { act.once.Do(act.execOnce) }
+
+func (act *action) execOnce() {
+ // Analyze dependencies.
+ execAll(act.deps)
+
+ // Report an error if any dependency failed.
+ var failed []string
+ for _, dep := range act.deps {
+ if dep.err != nil {
+ failed = append(failed, dep.String())
+ }
+ }
+ if failed != nil {
+ sort.Strings(failed)
+ act.err = fmt.Errorf("failed prerequisites: %s", strings.Join(failed, ", "))
+ return
+ }
+
+ // Plumb the output values of the dependencies
+ // into the inputs of this action.
+ inputs := make(map[*analysis.Analyzer]interface{})
+ for _, dep := range act.deps {
+ // Same package, different analysis (horizontal edge):
+ // in-memory outputs of prerequisite analyzers
+ // become inputs to this analysis pass.
+ inputs[dep.a] = dep.result
+ }
+
+ ignoreNolintReporter := func(d analysis.Diagnostic) {
+ pos := act.pkg.fset.Position(d.Pos)
+ for _, rng := range act.nolint {
+ // The list of nolint ranges is built for the entire package. Make sure we
+ // only apply ranges to the correct file.
+ if pos.Filename != rng.from.Filename {
+ continue
+ }
+ if pos.Line < rng.from.Line || pos.Line > rng.to {
+ continue
+ }
+ // Found a nolint range. Ignore the issue.
+ return
+ }
+ act.diagnostics = append(act.diagnostics, d)
+ }
+
+ // Run the analysis.
+ factFilter := make(map[reflect.Type]bool)
+ for _, f := range act.a.FactTypes {
+ factFilter[reflect.TypeOf(f)] = true
+ }
+ pass := &analysis.Pass{
+ Analyzer: act.a,
+ Fset: act.pkg.fset,
+ Files: act.pkg.syntax,
+ Pkg: act.pkg.types,
+ TypesInfo: act.pkg.typesInfo,
+ ResultOf: inputs,
+ Report: ignoreNolintReporter,
+ ImportPackageFact: act.pkg.facts.ImportPackageFact,
+ ExportPackageFact: act.pkg.facts.ExportPackageFact,
+ ImportObjectFact: act.pkg.facts.ImportObjectFact,
+ ExportObjectFact: act.pkg.facts.ExportObjectFact,
+ AllPackageFacts: func() []analysis.PackageFact { return act.pkg.facts.AllPackageFacts(factFilter) },
+ AllObjectFacts: func() []analysis.ObjectFact { return act.pkg.facts.AllObjectFacts(factFilter) },
+ TypesSizes: typesSizes,
+ }
+ act.pass = pass
+
+ var err error
+ if act.pkg.illTyped && !pass.Analyzer.RunDespiteErrors {
+ err = fmt.Errorf("analysis skipped due to type-checking error: %v", act.pkg.typeCheckError)
+ } else {
+ act.result, err = pass.Analyzer.Run(pass)
+ if err == nil {
+ if got, want := reflect.TypeOf(act.result), pass.Analyzer.ResultType; got != want {
+ err = fmt.Errorf(
+ "internal error: on package %s, analyzer %s returned a result of type %v, but declared ResultType %v",
+ pass.Pkg.Path(), pass.Analyzer, got, want)
+ }
+ }
+ }
+ act.err = err
+}
+
+// load parses and type checks the source code in each file in filenames.
+// load also deserializes facts stored for imported packages.
+func load(packagePath string, imp *importer, filenames []string) (*goPackage, error) {
+ if len(filenames) == 0 {
+ return nil, errors.New("no filenames")
+ }
+ var syntax []*ast.File
+ for _, file := range filenames {
+ s, err := parser.ParseFile(imp.fset, file, nil, parser.ParseComments)
+ if err != nil {
+ return nil, err
+ }
+ syntax = append(syntax, s)
+ }
+ pkg := &goPackage{fset: imp.fset, syntax: syntax}
+
+ config := types.Config{Importer: imp}
+ info := &types.Info{
+ Types: make(map[ast.Expr]types.TypeAndValue),
+ Uses: make(map[*ast.Ident]types.Object),
+ Defs: make(map[*ast.Ident]types.Object),
+ Implicits: make(map[ast.Node]types.Object),
+ Scopes: make(map[ast.Node]*types.Scope),
+ Selections: make(map[*ast.SelectorExpr]*types.Selection),
+ }
+
+ initInstanceInfo(info)
+
+ types, err := config.Check(packagePath, pkg.fset, syntax, info)
+ if err != nil {
+ pkg.illTyped, pkg.typeCheckError = true, err
+ }
+ pkg.types, pkg.typesInfo = types, info
+
+ pkg.facts, err = facts.NewDecoder(pkg.types).Decode(imp.readFacts)
+ if err != nil {
+ return nil, fmt.Errorf("internal error decoding facts: %v", err)
+ }
+
+ return pkg, nil
+}
+
+// A goPackage describes a loaded Go package.
+type goPackage struct {
+ // fset provides position information for types, typesInfo, and syntax.
+ // It is set only when types is set.
+ fset *token.FileSet
+ // syntax is the package's syntax trees.
+ syntax []*ast.File
+ // types provides type information for the package.
+ types *types.Package
+ // facts contains information saved by the analysis framework. Passes may
+ // import facts for imported packages and may also export facts for this
+ // package to be consumed by analyses in downstream packages.
+ facts *facts.Set
+ // illTyped indicates whether the package or any dependency contains errors.
+ // It is set only when types is set.
+ illTyped bool
+ // typeCheckError contains any error encountered during type-checking. It is
+ // only set when illTyped is true.
+ typeCheckError error
+ // typesInfo provides type information about the package's syntax trees.
+ // It is set only when syntax is set.
+ typesInfo *types.Info
+}
+
+func (g *goPackage) String() string {
+ return g.types.Path()
+}
+
+// checkAnalysisResults checks the analysis diagnostics in the given actions
+// and returns a string containing all the diagnostics that should be printed
+// to the build log.
+func checkAnalysisResults(actions []*action, pkg *goPackage) string {
+ type entry struct {
+ analysis.Diagnostic
+ *analysis.Analyzer
+ }
+ var diagnostics []entry
+ var errs []error
+ for _, act := range actions {
+ if act.err != nil {
+ // Analyzer failed.
+ errs = append(errs, fmt.Errorf("analyzer %q failed: %v", act.a.Name, act.err))
+ continue
+ }
+ if len(act.diagnostics) == 0 {
+ continue
+ }
+ var currentConfig config
+ // Use the base config if it exists.
+ if baseConfig, ok := configs[nogoBaseConfigName]; ok {
+ currentConfig = baseConfig
+ }
+ // Overwrite the config with the desired config. Any unset fields
+ // in the config will default to the base config.
+ if actionConfig, ok := configs[act.a.Name]; ok {
+ if actionConfig.analyzerFlags != nil {
+ currentConfig.analyzerFlags = actionConfig.analyzerFlags
+ }
+ if actionConfig.onlyFiles != nil {
+ currentConfig.onlyFiles = actionConfig.onlyFiles
+ }
+ if actionConfig.excludeFiles != nil {
+ currentConfig.excludeFiles = actionConfig.excludeFiles
+ }
+ }
+
+ if currentConfig.onlyFiles == nil && currentConfig.excludeFiles == nil {
+ for _, diag := range act.diagnostics {
+ diagnostics = append(diagnostics, entry{Diagnostic: diag, Analyzer: act.a})
+ }
+ continue
+ }
+ // Discard diagnostics based on the analyzer configuration.
+ for _, d := range act.diagnostics {
+ // NOTE(golang.org/issue/31008): nilness does not set positions,
+ // so don't assume the position is valid.
+ p := pkg.fset.Position(d.Pos)
+ filename := "-"
+ if p.IsValid() {
+ filename = p.Filename
+ }
+ include := true
+ if len(currentConfig.onlyFiles) > 0 {
+ // This analyzer emits diagnostics for only a set of files.
+ include = false
+ for _, pattern := range currentConfig.onlyFiles {
+ if pattern.MatchString(filename) {
+ include = true
+ break
+ }
+ }
+ }
+ if include {
+ for _, pattern := range currentConfig.excludeFiles {
+ if pattern.MatchString(filename) {
+ include = false
+ break
+ }
+ }
+ }
+ if include {
+ diagnostics = append(diagnostics, entry{Diagnostic: d, Analyzer: act.a})
+ }
+ }
+ }
+ if len(diagnostics) == 0 && len(errs) == 0 {
+ return ""
+ }
+
+ sort.Slice(diagnostics, func(i, j int) bool {
+ return diagnostics[i].Pos < diagnostics[j].Pos
+ })
+ errMsg := &bytes.Buffer{}
+ sep := ""
+ for _, err := range errs {
+ errMsg.WriteString(sep)
+ sep = "\n"
+ errMsg.WriteString(err.Error())
+ }
+ for _, d := range diagnostics {
+ errMsg.WriteString(sep)
+ sep = "\n"
+ fmt.Fprintf(errMsg, "%s: %s (%s)", pkg.fset.Position(d.Pos), d.Message, d.Name)
+ }
+ return errMsg.String()
+}
+
+// config determines which source files an analyzer will emit diagnostics for.
+// config values are generated in another file that is compiled with
+// nogo_main.go by the nogo rule.
+type config struct {
+ // onlyFiles is a list of regular expressions that match files an analyzer
+ // will emit diagnostics for. When empty, the analyzer will emit diagnostics
+ // for all files.
+ onlyFiles []*regexp.Regexp
+
+ // excludeFiles is a list of regular expressions that match files that an
+ // analyzer will not emit diagnostics for.
+ excludeFiles []*regexp.Regexp
+
+ // analyzerFlags is a map of flag names to flag values which will be passed
+ // to Analyzer.Flags. Note that no leading '-' should be present in a flag
+ // name
+ analyzerFlags map[string]string
+}
+
+// importer is an implementation of go/types.Importer that imports type
+// information from the export data in compiled .a files.
+type importer struct {
+ fset *token.FileSet
+ importMap map[string]string // map import path in source code to package path
+ packageCache map[string]*types.Package // cache of previously imported packages
+ packageFile map[string]string // map package path to .a file with export data
+ factMap map[string]string // map import path in source code to file containing serialized facts
+}
+
+func newImporter(importMap, packageFile map[string]string, factMap map[string]string) *importer {
+ return &importer{
+ fset: token.NewFileSet(),
+ importMap: importMap,
+ packageCache: make(map[string]*types.Package),
+ packageFile: packageFile,
+ factMap: factMap,
+ }
+}
+
+func (i *importer) Import(path string) (*types.Package, error) {
+ if imp, ok := i.importMap[path]; ok {
+ // Translate import path if necessary.
+ path = imp
+ }
+ if path == "unsafe" {
+ // Special case: go/types has pre-defined type information for unsafe.
+ // See https://github.com/golang/go/issues/13882.
+ return types.Unsafe, nil
+ }
+ if pkg, ok := i.packageCache[path]; ok && pkg.Complete() {
+ return pkg, nil // cache hit
+ }
+
+ archive, ok := i.packageFile[path]
+ if !ok {
+ return nil, fmt.Errorf("could not import %q", path)
+ }
+ // open file
+ f, err := os.Open(archive)
+ if err != nil {
+ return nil, err
+ }
+ defer func() {
+ f.Close()
+ if err != nil {
+ // add file name to error
+ err = fmt.Errorf("reading export data: %s: %v", archive, err)
+ }
+ }()
+
+ r, err := gcexportdata.NewReader(f)
+ if err != nil {
+ return nil, err
+ }
+
+ return gcexportdata.Read(r, i.fset, i.packageCache, path)
+}
+
+func (i *importer) readFacts(pkg *types.Package) ([]byte, error) {
+ archive := i.factMap[pkg.Path()]
+ if archive == "" {
+ // Packages that were not built with the nogo toolchain will not be
+ // analyzed, so there's no opportunity to store facts. This includes
+ // packages in the standard library and packages built with go_tool_library,
+ // such as coverdata. Analyzers are expected to hard code information
+ // about standard library definitions and must gracefully handle packages
+ // that don't have facts. For example, the "printf" analyzer must know
+ // fmt.Printf accepts a format string.
+ return nil, nil
+ }
+ factReader, err := readFileInArchive(nogoFact, archive)
+ if os.IsNotExist(err) {
+ // Packages that were not built with the nogo toolchain will not be
+ // analyzed, so there's no opportunity to store facts. This includes
+ // packages in the standard library and packages built with go_tool_library,
+ // such as coverdata.
+ return nil, nil
+ } else if err != nil {
+ return nil, err
+ }
+ defer factReader.Close()
+ return ioutil.ReadAll(factReader)
+}
+
+type factMultiFlag map[string]string
+
+func (m *factMultiFlag) String() string {
+ if m == nil || len(*m) == 0 {
+ return ""
+ }
+ return fmt.Sprintf("%v", *m)
+}
+
+func (m *factMultiFlag) Set(v string) error {
+ parts := strings.Split(v, "=")
+ if len(parts) != 2 {
+ return fmt.Errorf("badly formatted -fact flag: %s", v)
+ }
+ (*m)[parts[0]] = parts[1]
+ return nil
+}
diff --git a/go/tools/builders/nogo_typeparams_go117.go b/go/tools/builders/nogo_typeparams_go117.go
new file mode 100644
index 00000000..9b6fe9ac
--- /dev/null
+++ b/go/tools/builders/nogo_typeparams_go117.go
@@ -0,0 +1,23 @@
+/* Copyright 2022 The Bazel Authors. All rights reserved.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+//go:build !go1.18
+// +build !go1.18
+
+package main
+
+import "go/types"
+
+func initInstanceInfo(*types.Info) {}
diff --git a/go/tools/builders/nogo_typeparams_go118.go b/go/tools/builders/nogo_typeparams_go118.go
new file mode 100644
index 00000000..787b492a
--- /dev/null
+++ b/go/tools/builders/nogo_typeparams_go118.go
@@ -0,0 +1,28 @@
+/* Copyright 2022 The Bazel Authors. All rights reserved.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+//go:build go1.18
+// +build go1.18
+
+package main
+
+import (
+ "go/ast"
+ "go/types"
+)
+
+func initInstanceInfo(info *types.Info) {
+ info.Instances = make(map[*ast.Ident]types.Instance)
+}
diff --git a/go/tools/builders/nolint.go b/go/tools/builders/nolint.go
new file mode 100644
index 00000000..e6e3c043
--- /dev/null
+++ b/go/tools/builders/nolint.go
@@ -0,0 +1,39 @@
+// Copyright 2023 The Bazel Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import "strings"
+
+// Parse nolint directives and return the applicable linters. If all linters
+// apply, returns (nil, true).
+func parseNolint(text string) (map[string]bool, bool) {
+ text = strings.TrimLeft(text, "/ ")
+ if !strings.HasPrefix(text, "nolint") {
+ return nil, false
+ }
+ parts := strings.Split(text, ":")
+ if len(parts) == 1 {
+ return nil, true
+ }
+ linters := strings.Split(parts[1], ",")
+ result := map[string]bool{}
+ for _, linter := range linters {
+ if strings.EqualFold(linter, "all") {
+ return nil, true
+ }
+ result[linter] = true
+ }
+ return result, true
+}
diff --git a/go/tools/builders/nolint_test.go b/go/tools/builders/nolint_test.go
new file mode 100644
index 00000000..2870eaaf
--- /dev/null
+++ b/go/tools/builders/nolint_test.go
@@ -0,0 +1,79 @@
+// Copyright 2023 The Bazel Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+ "reflect"
+ "testing"
+)
+
+func TestParseNolint(t *testing.T) {
+ tests := []struct {
+ Name string
+ Comment string
+ Valid bool
+ Linters []string
+ }{
+ {
+ Name: "Invalid",
+ Comment: "not a comment",
+ },
+ {
+ Name: "No match",
+ Comment: "// comment",
+ },
+ {
+ Name: "All linters",
+ Comment: "//nolint",
+ Valid: true,
+ },
+ {
+ Name: "All linters (explicit)",
+ Comment: "//nolint:all",
+ Valid: true,
+ },
+ {
+ Name: "Single linter",
+ Comment: "// nolint:foo",
+ Valid: true,
+ Linters: []string{"foo"},
+ },
+ {
+ Name: "Multiple linters",
+ Comment: "// nolint:a,b,c",
+ Valid: true,
+ Linters: []string{"a", "b", "c"},
+ },
+ }
+
+ for _, tc := range tests {
+ t.Run(tc.Name, func(t *testing.T) {
+ result, ok := parseNolint(tc.Comment)
+ if tc.Valid != ok {
+ t.Fatalf("parseNolint expect %t got %t", tc.Valid, ok)
+ }
+ var linters map[string]bool
+ if len(tc.Linters) != 0 {
+ linters = make(map[string]bool)
+ for _, l := range tc.Linters {
+ linters[l] = true
+ }
+ }
+ if !reflect.DeepEqual(result, linters) {
+ t.Fatalf("parseNolint expect %v got %v", linters, result)
+ }
+ })
+ }
+}
diff --git a/go/tools/builders/pack.go b/go/tools/builders/pack.go
new file mode 100644
index 00000000..ddbb1930
--- /dev/null
+++ b/go/tools/builders/pack.go
@@ -0,0 +1,388 @@
+// Copyright 2017 The Bazel Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+ "bufio"
+ "bytes"
+ "errors"
+ "fmt"
+ "io"
+ "io/ioutil"
+ "os"
+ "path/filepath"
+ "runtime"
+ "strconv"
+ "strings"
+)
+
+func copyFile(inPath, outPath string) error {
+ inFile, err := os.Open(inPath)
+ if err != nil {
+ return err
+ }
+ defer inFile.Close()
+ outFile, err := os.OpenFile(outPath, os.O_WRONLY|os.O_CREATE|os.O_EXCL, 0666)
+ if err != nil {
+ return err
+ }
+ defer outFile.Close()
+ _, err = io.Copy(outFile, inFile)
+ return err
+}
+
+func linkFile(inPath, outPath string) error {
+ inPath, err := filepath.Abs(inPath)
+ if err != nil {
+ return err
+ }
+ return os.Symlink(inPath, outPath)
+}
+
+func copyOrLinkFile(inPath, outPath string) error {
+ if runtime.GOOS == "windows" {
+ return copyFile(inPath, outPath)
+ } else {
+ return linkFile(inPath, outPath)
+ }
+}
+
+const (
+ // arHeader appears at the beginning of archives created by "ar" and
+ // "go tool pack" on all platforms.
+ arHeader = "!<arch>\n"
+
+ // entryLength is the size in bytes of the metadata preceding each file
+ // in an archive.
+ entryLength = 60
+
+ // pkgDef is the name of the export data file within an archive
+ pkgDef = "__.PKGDEF"
+
+ // nogoFact is the name of the nogo fact file
+ nogoFact = "nogo.out"
+)
+
+var zeroBytes = []byte("0 ")
+
+type bufioReaderWithCloser struct {
+ // bufio.Reader is needed to skip bytes in archives
+ *bufio.Reader
+ io.Closer
+}
+
+func extractFiles(archive, dir string, names map[string]struct{}) (files []string, err error) {
+ rc, err := openArchive(archive)
+ if err != nil {
+ return nil, err
+ }
+ defer rc.Close()
+
+ var nameData []byte
+ bufReader := rc.Reader
+ for {
+ name, size, err := readMetadata(bufReader, &nameData)
+ if err == io.EOF {
+ return files, nil
+ }
+ if err != nil {
+ return nil, err
+ }
+ if !isObjectFile(name) {
+ if err := skipFile(bufReader, size); err != nil {
+ return nil, err
+ }
+ continue
+ }
+ name, err = simpleName(name, names)
+ if err != nil {
+ return nil, err
+ }
+ name = filepath.Join(dir, name)
+ if err := extractFile(bufReader, name, size); err != nil {
+ return nil, err
+ }
+ files = append(files, name)
+ }
+}
+
+func openArchive(archive string) (bufioReaderWithCloser, error) {
+ f, err := os.Open(archive)
+ if err != nil {
+ return bufioReaderWithCloser{}, err
+ }
+ r := bufio.NewReader(f)
+ header := make([]byte, len(arHeader))
+ if _, err := io.ReadFull(r, header); err != nil || string(header) != arHeader {
+ f.Close()
+ return bufioReaderWithCloser{}, fmt.Errorf("%s: bad header", archive)
+ }
+ return bufioReaderWithCloser{r, f}, nil
+}
+
+// readMetadata reads the relevant fields of an entry. Before calling,
+// r must be positioned at the beginning of an entry. Afterward, r will
+// be positioned at the beginning of the file data. io.EOF is returned if
+// there are no more files in the archive.
+//
+// Both BSD and GNU / SysV naming conventions are supported.
+func readMetadata(r *bufio.Reader, nameData *[]byte) (name string, size int64, err error) {
+retry:
+ // Each file is preceded by a 60-byte header that contains its metadata.
+ // We only care about two fields, name and size. Other fields (mtime,
+ // owner, group, mode) are ignored because they don't affect compilation.
+ var entry [entryLength]byte
+ if _, err := io.ReadFull(r, entry[:]); err != nil {
+ return "", 0, err
+ }
+
+ sizeField := strings.TrimSpace(string(entry[48:58]))
+ size, err = strconv.ParseInt(sizeField, 10, 64)
+ if err != nil {
+ return "", 0, err
+ }
+
+ nameField := strings.TrimRight(string(entry[:16]), " ")
+ switch {
+ case strings.HasPrefix(nameField, "#1/"):
+ // BSD-style name. The number of bytes in the name is written here in
+ // ASCII, right-padded with spaces. The actual name is stored at the
+ // beginning of the file data, left-padded with NUL bytes.
+ nameField = nameField[len("#1/"):]
+ nameLen, err := strconv.ParseInt(nameField, 10, 64)
+ if err != nil {
+ return "", 0, err
+ }
+ nameBuf := make([]byte, nameLen)
+ if _, err := io.ReadFull(r, nameBuf); err != nil {
+ return "", 0, err
+ }
+ name = strings.TrimRight(string(nameBuf), "\x00")
+ size -= nameLen
+
+ case nameField == "//":
+ // GNU / SysV-style name data. This is a fake file that contains names
+ // for files with long names. We read this into nameData, then read
+ // the next entry.
+ *nameData = make([]byte, size)
+ if _, err := io.ReadFull(r, *nameData); err != nil {
+ return "", 0, err
+ }
+ if size%2 != 0 {
+ // Files are aligned at 2-byte offsets. Discard the padding byte if the
+ // size was odd.
+ if _, err := r.ReadByte(); err != nil {
+ return "", 0, err
+ }
+ }
+ goto retry
+
+ case nameField == "/":
+ // GNU / SysV-style symbol lookup table. Skip.
+ if err := skipFile(r, size); err != nil {
+ return "", 0, err
+ }
+ goto retry
+
+ case strings.HasPrefix(nameField, "/"):
+ // GNU / SysV-style long file name. The number that follows the slash is
+ // an offset into the name data that should have been read earlier.
+ // The file name ends with a slash.
+ nameField = nameField[1:]
+ nameOffset, err := strconv.Atoi(nameField)
+ if err != nil {
+ return "", 0, err
+ }
+ if nameData == nil || nameOffset < 0 || nameOffset >= len(*nameData) {
+ return "", 0, fmt.Errorf("invalid name length: %d", nameOffset)
+ }
+ i := bytes.IndexByte((*nameData)[nameOffset:], '/')
+ if i < 0 {
+ return "", 0, errors.New("file name does not end with '/'")
+ }
+ name = string((*nameData)[nameOffset : nameOffset+i])
+
+ case strings.HasSuffix(nameField, "/"):
+ // GNU / SysV-style short file name.
+ name = nameField[:len(nameField)-1]
+
+ default:
+ // Common format name.
+ name = nameField
+ }
+
+ return name, size, err
+}
+
+// extractFile reads size bytes from r and writes them to a new file, name.
+func extractFile(r *bufio.Reader, name string, size int64) error {
+ w, err := os.Create(name)
+ if err != nil {
+ return err
+ }
+ defer w.Close()
+ _, err = io.CopyN(w, r, size)
+ if err != nil {
+ return err
+ }
+ if size%2 != 0 {
+ // Files are aligned at 2-byte offsets. Discard the padding byte if the
+ // size was odd.
+ if _, err := r.ReadByte(); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+func skipFile(r *bufio.Reader, size int64) error {
+ if size%2 != 0 {
+ // Files are aligned at 2-byte offsets. Discard the padding byte if the
+ // size was odd.
+ size += 1
+ }
+ _, err := r.Discard(int(size))
+ return err
+}
+
+func isObjectFile(name string) bool {
+ return strings.HasSuffix(name, ".o")
+}
+
+// simpleName returns a file name which is at most 15 characters
+// and doesn't conflict with other names. If it is not possible to choose
+// such a name, simpleName will truncate the given name to 15 characters.
+// The original file extension will be preserved.
+func simpleName(name string, names map[string]struct{}) (string, error) {
+ if _, ok := names[name]; !ok && len(name) < 16 {
+ names[name] = struct{}{}
+ return name, nil
+ }
+ var stem, ext string
+ if i := strings.LastIndexByte(name, '.'); i < 0 {
+ stem = name
+ } else {
+ stem = strings.Replace(name[:i], ".", "_", -1)
+ ext = name[i:]
+ }
+ for n := 0; n < len(names)+1; n++ {
+ ns := strconv.Itoa(n)
+ stemLen := 15 - len(ext) - len(ns)
+ if stemLen < 0 {
+ break
+ }
+ if stemLen > len(stem) {
+ stemLen = len(stem)
+ }
+ candidate := stem[:stemLen] + ns + ext
+ if _, ok := names[candidate]; !ok {
+ names[candidate] = struct{}{}
+ return candidate, nil
+ }
+ }
+ return "", fmt.Errorf("cannot shorten file name: %q", name)
+}
+
+func appendFiles(goenv *env, archive string, files []string) error {
+ archive = abs(archive) // required for long filenames on Windows.
+
+ // Create an empty archive if one doesn't already exist.
+ // In Go 1.16, 'go tool pack r' reports an error if the archive doesn't exist.
+ // 'go tool pack c' copies export data in addition to creating the archive,
+ // so we don't want to use that directly.
+ _, err := os.Stat(archive)
+ if err != nil && !os.IsNotExist(err) {
+ return err
+ }
+ if os.IsNotExist(err) {
+ if err := ioutil.WriteFile(archive, []byte(arHeader), 0666); err != nil {
+ return err
+ }
+ }
+
+ // Append files to the archive.
+ // TODO(jayconrod): copy cmd/internal/archive and use that instead of
+ // shelling out to cmd/pack.
+ args := goenv.goTool("pack", "r", archive)
+ args = append(args, files...)
+ return goenv.runCommand(args)
+}
+
+type readWithCloser struct {
+ io.Reader
+ io.Closer
+}
+
+func readFileInArchive(fileName, archive string) (io.ReadCloser, error) {
+ rc, err := openArchive(archive)
+ if err != nil {
+ return nil, err
+ }
+ var nameData []byte
+ bufReader := rc.Reader
+ for err == nil {
+ // avoid shadowing err in the loop it can be returned correctly in the end
+ var (
+ name string
+ size int64
+ )
+ name, size, err = readMetadata(bufReader, &nameData)
+ if err != nil {
+ break
+ }
+ if name == fileName {
+ return readWithCloser{
+ Reader: io.LimitReader(rc, size),
+ Closer: rc,
+ }, nil
+ }
+ err = skipFile(bufReader, size)
+ }
+ if err == io.EOF {
+ err = os.ErrNotExist
+ }
+ rc.Close()
+ return nil, err
+}
+
+func extractFileFromArchive(archive, dir, name string) (err error) {
+ archiveReader, err := readFileInArchive(name, archive)
+ if err != nil {
+ return fmt.Errorf("error reading %s from %s: %v", name, archive, err)
+ }
+ defer func() {
+ e := archiveReader.Close()
+ if e != nil && err == nil {
+ err = fmt.Errorf("error closing %q: %v", archive, e)
+ }
+ }()
+ outPath := filepath.Join(dir, pkgDef)
+ outFile, err := os.Create(outPath)
+ if err != nil {
+ return fmt.Errorf("error creating %s: %v", outPath, err)
+ }
+ defer func() {
+ e := outFile.Close()
+ if e != nil && err == nil {
+ err = fmt.Errorf("error closing %q: %v", outPath, e)
+ }
+ }()
+ if size, err := io.Copy(outFile, archiveReader); err != nil {
+ return fmt.Errorf("error writing %s: %v", outPath, err)
+ } else if size == 0 {
+ return fmt.Errorf("%s is empty in %s", name, archive)
+ }
+ return err
+}
diff --git a/go/tools/builders/path.go b/go/tools/builders/path.go
new file mode 100644
index 00000000..f60e4deb
--- /dev/null
+++ b/go/tools/builders/path.go
@@ -0,0 +1,7 @@
+// +build !windows
+
+package main
+
+func processPath(path string) (string, error) {
+ return path, nil
+}
diff --git a/go/tools/builders/path_windows.go b/go/tools/builders/path_windows.go
new file mode 100644
index 00000000..23b1b65b
--- /dev/null
+++ b/go/tools/builders/path_windows.go
@@ -0,0 +1,25 @@
+// +build windows
+
+package main
+
+import (
+ "runtime"
+ "syscall"
+)
+
+func processPath(path string) (string, error) {
+ if runtime.GOOS != "windows" {
+ return path, nil
+ }
+
+ var buf [258]uint16
+ up, err := syscall.UTF16PtrFromString(path)
+ if err != nil {
+ return path, err
+ }
+ _, err = syscall.GetShortPathName(up, &buf[0], 258)
+ if err != nil {
+ return path, err
+ }
+ return syscall.UTF16ToString(buf[:]), nil
+}
diff --git a/go/tools/builders/protoc.go b/go/tools/builders/protoc.go
new file mode 100644
index 00000000..46a9f012
--- /dev/null
+++ b/go/tools/builders/protoc.go
@@ -0,0 +1,219 @@
+// Copyright 2017 The Bazel Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// protoc invokes the protobuf compiler and captures the resulting .pb.go file.
+package main
+
+import (
+ "bytes"
+ "errors"
+ "flag"
+ "fmt"
+ "io/ioutil"
+ "log"
+ "os"
+ "os/exec"
+ "path/filepath"
+ "runtime"
+ "strings"
+)
+
+type genFileInfo struct {
+ base string // The basename of the path
+ path string // The full path to the final file
+ expected bool // Whether the file is expected by the rules
+ created bool // Whether the file was created by protoc
+ from *genFileInfo // The actual file protoc produced if not Path
+ unique bool // True if this base name is unique in expected results
+ ambiguious bool // True if there were more than one possible outputs that matched this file
+}
+
+func run(args []string) error {
+ // process the args
+ args, useParamFile, err := expandParamsFiles(args)
+ if err != nil {
+ return err
+ }
+ options := multiFlag{}
+ descriptors := multiFlag{}
+ expected := multiFlag{}
+ imports := multiFlag{}
+ flags := flag.NewFlagSet("protoc", flag.ExitOnError)
+ protoc := flags.String("protoc", "", "The path to the real protoc.")
+ outPath := flags.String("out_path", "", "The base output path to write to.")
+ plugin := flags.String("plugin", "", "The go plugin to use.")
+ importpath := flags.String("importpath", "", "The importpath for the generated sources.")
+ flags.Var(&options, "option", "The plugin options.")
+ flags.Var(&descriptors, "descriptor_set", "The descriptor set to read.")
+ flags.Var(&expected, "expected", "The expected output files.")
+ flags.Var(&imports, "import", "Map a proto file to an import path.")
+ if err := flags.Parse(args); err != nil {
+ return err
+ }
+
+ // Output to a temporary folder and then move the contents into place below.
+ // This is to work around long file paths on Windows.
+ tmpDir, err := ioutil.TempDir("", "go_proto")
+ if err != nil {
+ return err
+ }
+ tmpDir = abs(tmpDir) // required to work with long paths on Windows
+ absOutPath := abs(*outPath) // required to work with long paths on Windows
+ defer os.RemoveAll(tmpDir)
+
+ pluginBase := filepath.Base(*plugin)
+ pluginName := strings.TrimSuffix(
+ strings.TrimPrefix(filepath.Base(*plugin), "protoc-gen-"), ".exe")
+ for _, m := range imports {
+ options = append(options, fmt.Sprintf("M%v", m))
+ }
+ if runtime.GOOS == "windows" {
+ // Turn the plugin path into raw form, since we're handing it off to a non-go binary.
+ // This is required to work with long paths on Windows.
+ *plugin = "\\\\?\\" + abs(*plugin)
+ }
+ protoc_args := []string{
+ fmt.Sprintf("--%v_out=%v:%v", pluginName, strings.Join(options, ","), tmpDir),
+ "--plugin", fmt.Sprintf("%v=%v", strings.TrimSuffix(pluginBase, ".exe"), *plugin),
+ "--descriptor_set_in", strings.Join(descriptors, string(os.PathListSeparator)),
+ }
+ protoc_args = append(protoc_args, flags.Args()...)
+
+ var cmd *exec.Cmd
+ if useParamFile {
+ paramFile, err := ioutil.TempFile(tmpDir, "protoc-*.params")
+ if err != nil {
+ return fmt.Errorf("error creating param file for protoc: %v", err)
+ }
+ for _, arg := range protoc_args {
+ _, err := fmt.Fprintln(paramFile, arg)
+ if err != nil {
+ return fmt.Errorf("error writing param file for protoc: %v", err)
+ }
+ }
+ cmd = exec.Command(*protoc, "@"+paramFile.Name())
+ } else {
+ cmd = exec.Command(*protoc, protoc_args...)
+ }
+
+ cmd.Stdout = os.Stdout
+ cmd.Stderr = os.Stderr
+ if err := cmd.Run(); err != nil {
+ return fmt.Errorf("error running protoc: %v", err)
+ }
+ // Build our file map, and test for existance
+ files := map[string]*genFileInfo{}
+ byBase := map[string]*genFileInfo{}
+ for _, path := range expected {
+ info := &genFileInfo{
+ path: path,
+ base: filepath.Base(path),
+ expected: true,
+ unique: true,
+ }
+ files[info.path] = info
+ if byBase[info.base] != nil {
+ info.unique = false
+ byBase[info.base].unique = false
+ } else {
+ byBase[info.base] = info
+ }
+ }
+ // Walk the generated files
+ filepath.Walk(tmpDir, func(path string, f os.FileInfo, err error) error {
+ relPath, err := filepath.Rel(tmpDir, path)
+ if err != nil {
+ return err
+ }
+ if relPath == "." {
+ return nil
+ }
+
+ if f.IsDir() {
+ if err := os.Mkdir(filepath.Join(absOutPath, relPath), f.Mode()); !os.IsExist(err) {
+ return err
+ }
+ return nil
+ }
+
+ if !strings.HasSuffix(path, ".go") {
+ return nil
+ }
+
+ info := &genFileInfo{
+ path: path,
+ base: filepath.Base(path),
+ created: true,
+ }
+
+ if foundInfo, ok := files[relPath]; ok {
+ foundInfo.created = true
+ foundInfo.from = info
+ return nil
+ }
+ files[relPath] = info
+ copyTo := byBase[info.base]
+ switch {
+ case copyTo == nil:
+ // Unwanted output
+ case !copyTo.unique:
+ // not unique, no copy allowed
+ case copyTo.from != nil:
+ copyTo.ambiguious = true
+ info.ambiguious = true
+ default:
+ copyTo.from = info
+ copyTo.created = true
+ info.expected = true
+ }
+ return nil
+ })
+ buf := &bytes.Buffer{}
+ for _, f := range files {
+ switch {
+ case f.expected && !f.created:
+ // Some plugins only create output files if the proto source files have
+ // have relevant definitions (e.g., services for grpc_gateway). Create
+ // trivial files that the compiler will ignore for missing outputs.
+ data := []byte("// +build ignore\n\npackage ignore")
+ if err := ioutil.WriteFile(abs(f.path), data, 0644); err != nil {
+ return err
+ }
+ case f.expected && f.ambiguious:
+ fmt.Fprintf(buf, "Ambiguious output %v.\n", f.path)
+ case f.from != nil:
+ data, err := ioutil.ReadFile(f.from.path)
+ if err != nil {
+ return err
+ }
+ if err := ioutil.WriteFile(abs(f.path), data, 0644); err != nil {
+ return err
+ }
+ case !f.expected:
+ //fmt.Fprintf(buf, "Unexpected output %v.\n", f.path)
+ }
+ if buf.Len() > 0 {
+ fmt.Fprintf(buf, "Check that the go_package option is %q.", *importpath)
+ return errors.New(buf.String())
+ }
+ }
+
+ return nil
+}
+
+func main() {
+ if err := run(os.Args[1:]); err != nil {
+ log.Fatal(err)
+ }
+}
diff --git a/go/tools/builders/read.go b/go/tools/builders/read.go
new file mode 100644
index 00000000..b03c02bf
--- /dev/null
+++ b/go/tools/builders/read.go
@@ -0,0 +1,551 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This file was adapted from Go src/go/build/read.go at commit 8634a234df2a
+// on 2021-01-26. It's used to extract metadata from .go files without requiring
+// them to be in the same directory.
+
+package main
+
+import (
+ "bufio"
+ "errors"
+ "fmt"
+ "go/ast"
+ "go/parser"
+ "go/token"
+ "io"
+ "strconv"
+ "strings"
+ "unicode"
+ "unicode/utf8"
+)
+
+type importReader struct {
+ b *bufio.Reader
+ buf []byte
+ peek byte
+ err error
+ eof bool
+ nerr int
+ pos token.Position
+}
+
+func newImportReader(name string, r io.Reader) *importReader {
+ return &importReader{
+ b: bufio.NewReader(r),
+ pos: token.Position{
+ Filename: name,
+ Line: 1,
+ Column: 1,
+ },
+ }
+}
+
+func isIdent(c byte) bool {
+ return 'A' <= c && c <= 'Z' || 'a' <= c && c <= 'z' || '0' <= c && c <= '9' || c == '_' || c >= utf8.RuneSelf
+}
+
+var (
+ errSyntax = errors.New("syntax error")
+ errNUL = errors.New("unexpected NUL in input")
+)
+
+// syntaxError records a syntax error, but only if an I/O error has not already been recorded.
+func (r *importReader) syntaxError() {
+ if r.err == nil {
+ r.err = errSyntax
+ }
+}
+
+// readByte reads the next byte from the input, saves it in buf, and returns it.
+// If an error occurs, readByte records the error in r.err and returns 0.
+func (r *importReader) readByte() byte {
+ c, err := r.b.ReadByte()
+ if err == nil {
+ r.buf = append(r.buf, c)
+ if c == 0 {
+ err = errNUL
+ }
+ }
+ if err != nil {
+ if err == io.EOF {
+ r.eof = true
+ } else if r.err == nil {
+ r.err = err
+ }
+ c = 0
+ }
+ return c
+}
+
+// readByteNoBuf is like readByte but doesn't buffer the byte.
+// It exhausts r.buf before reading from r.b.
+func (r *importReader) readByteNoBuf() byte {
+ var c byte
+ var err error
+ if len(r.buf) > 0 {
+ c = r.buf[0]
+ r.buf = r.buf[1:]
+ } else {
+ c, err = r.b.ReadByte()
+ if err == nil && c == 0 {
+ err = errNUL
+ }
+ }
+
+ if err != nil {
+ if err == io.EOF {
+ r.eof = true
+ } else if r.err == nil {
+ r.err = err
+ }
+ return 0
+ }
+ r.pos.Offset++
+ if c == '\n' {
+ r.pos.Line++
+ r.pos.Column = 1
+ } else {
+ r.pos.Column++
+ }
+ return c
+}
+
+// peekByte returns the next byte from the input reader but does not advance beyond it.
+// If skipSpace is set, peekByte skips leading spaces and comments.
+func (r *importReader) peekByte(skipSpace bool) byte {
+ if r.err != nil {
+ if r.nerr++; r.nerr > 10000 {
+ panic("go/build: import reader looping")
+ }
+ return 0
+ }
+
+ // Use r.peek as first input byte.
+ // Don't just return r.peek here: it might have been left by peekByte(false)
+ // and this might be peekByte(true).
+ c := r.peek
+ if c == 0 {
+ c = r.readByte()
+ }
+ for r.err == nil && !r.eof {
+ if skipSpace {
+ // For the purposes of this reader, semicolons are never necessary to
+ // understand the input and are treated as spaces.
+ switch c {
+ case ' ', '\f', '\t', '\r', '\n', ';':
+ c = r.readByte()
+ continue
+
+ case '/':
+ c = r.readByte()
+ if c == '/' {
+ for c != '\n' && r.err == nil && !r.eof {
+ c = r.readByte()
+ }
+ } else if c == '*' {
+ var c1 byte
+ for (c != '*' || c1 != '/') && r.err == nil {
+ if r.eof {
+ r.syntaxError()
+ }
+ c, c1 = c1, r.readByte()
+ }
+ } else {
+ r.syntaxError()
+ }
+ c = r.readByte()
+ continue
+ }
+ }
+ break
+ }
+ r.peek = c
+ return r.peek
+}
+
+// nextByte is like peekByte but advances beyond the returned byte.
+func (r *importReader) nextByte(skipSpace bool) byte {
+ c := r.peekByte(skipSpace)
+ r.peek = 0
+ return c
+}
+
+var goEmbed = []byte("go:embed")
+
+// findEmbed advances the input reader to the next //go:embed comment.
+// It reports whether it found a comment.
+// (Otherwise it found an error or EOF.)
+func (r *importReader) findEmbed(first bool) bool {
+ // The import block scan stopped after a non-space character,
+ // so the reader is not at the start of a line on the first call.
+ // After that, each //go:embed extraction leaves the reader
+ // at the end of a line.
+ startLine := !first
+ var c byte
+ for r.err == nil && !r.eof {
+ c = r.readByteNoBuf()
+ Reswitch:
+ switch c {
+ default:
+ startLine = false
+
+ case '\n':
+ startLine = true
+
+ case ' ', '\t':
+ // leave startLine alone
+
+ case '"':
+ startLine = false
+ for r.err == nil {
+ if r.eof {
+ r.syntaxError()
+ }
+ c = r.readByteNoBuf()
+ if c == '\\' {
+ r.readByteNoBuf()
+ if r.err != nil {
+ r.syntaxError()
+ return false
+ }
+ continue
+ }
+ if c == '"' {
+ c = r.readByteNoBuf()
+ goto Reswitch
+ }
+ }
+ goto Reswitch
+
+ case '`':
+ startLine = false
+ for r.err == nil {
+ if r.eof {
+ r.syntaxError()
+ }
+ c = r.readByteNoBuf()
+ if c == '`' {
+ c = r.readByteNoBuf()
+ goto Reswitch
+ }
+ }
+
+ case '/':
+ c = r.readByteNoBuf()
+ switch c {
+ default:
+ startLine = false
+ goto Reswitch
+
+ case '*':
+ var c1 byte
+ for (c != '*' || c1 != '/') && r.err == nil {
+ if r.eof {
+ r.syntaxError()
+ }
+ c, c1 = c1, r.readByteNoBuf()
+ }
+ startLine = false
+
+ case '/':
+ if startLine {
+ // Try to read this as a //go:embed comment.
+ for i := range goEmbed {
+ c = r.readByteNoBuf()
+ if c != goEmbed[i] {
+ goto SkipSlashSlash
+ }
+ }
+ c = r.readByteNoBuf()
+ if c == ' ' || c == '\t' {
+ // Found one!
+ return true
+ }
+ }
+ SkipSlashSlash:
+ for c != '\n' && r.err == nil && !r.eof {
+ c = r.readByteNoBuf()
+ }
+ startLine = true
+ }
+ }
+ }
+ return false
+}
+
+// readKeyword reads the given keyword from the input.
+// If the keyword is not present, readKeyword records a syntax error.
+func (r *importReader) readKeyword(kw string) {
+ r.peekByte(true)
+ for i := 0; i < len(kw); i++ {
+ if r.nextByte(false) != kw[i] {
+ r.syntaxError()
+ return
+ }
+ }
+ if isIdent(r.peekByte(false)) {
+ r.syntaxError()
+ }
+}
+
+// readIdent reads an identifier from the input.
+// If an identifier is not present, readIdent records a syntax error.
+func (r *importReader) readIdent() {
+ c := r.peekByte(true)
+ if !isIdent(c) {
+ r.syntaxError()
+ return
+ }
+ for isIdent(r.peekByte(false)) {
+ r.peek = 0
+ }
+}
+
+// readString reads a quoted string literal from the input.
+// If an identifier is not present, readString records a syntax error.
+func (r *importReader) readString() {
+ switch r.nextByte(true) {
+ case '`':
+ for r.err == nil {
+ if r.nextByte(false) == '`' {
+ break
+ }
+ if r.eof {
+ r.syntaxError()
+ }
+ }
+ case '"':
+ for r.err == nil {
+ c := r.nextByte(false)
+ if c == '"' {
+ break
+ }
+ if r.eof || c == '\n' {
+ r.syntaxError()
+ }
+ if c == '\\' {
+ r.nextByte(false)
+ }
+ }
+ default:
+ r.syntaxError()
+ }
+}
+
+// readImport reads an import clause - optional identifier followed by quoted string -
+// from the input.
+func (r *importReader) readImport() {
+ c := r.peekByte(true)
+ if c == '.' {
+ r.peek = 0
+ } else if isIdent(c) {
+ r.readIdent()
+ }
+ r.readString()
+}
+
+// readComments is like io.ReadAll, except that it only reads the leading
+// block of comments in the file.
+func readComments(f io.Reader) ([]byte, error) {
+ r := newImportReader("", f)
+ r.peekByte(true)
+ if r.err == nil && !r.eof {
+ // Didn't reach EOF, so must have found a non-space byte. Remove it.
+ r.buf = r.buf[:len(r.buf)-1]
+ }
+ return r.buf, r.err
+}
+
+// readGoInfo expects a Go file as input and reads the file up to and including the import section.
+// It records what it learned in *info.
+// If info.fset is non-nil, readGoInfo parses the file and sets info.parsed, info.parseErr,
+// info.imports, info.embeds, and info.embedErr.
+//
+// It only returns an error if there are problems reading the file,
+// not for syntax errors in the file itself.
+func readGoInfo(f io.Reader, info *fileInfo) error {
+ r := newImportReader(info.filename, f)
+
+ r.readKeyword("package")
+ r.readIdent()
+ for r.peekByte(true) == 'i' {
+ r.readKeyword("import")
+ if r.peekByte(true) == '(' {
+ r.nextByte(false)
+ for r.peekByte(true) != ')' && r.err == nil {
+ r.readImport()
+ }
+ r.nextByte(false)
+ } else {
+ r.readImport()
+ }
+ }
+
+ info.header = r.buf
+
+ // If we stopped successfully before EOF, we read a byte that told us we were done.
+ // Return all but that last byte, which would cause a syntax error if we let it through.
+ if r.err == nil && !r.eof {
+ info.header = r.buf[:len(r.buf)-1]
+ }
+
+ // If we stopped for a syntax error, consume the whole file so that
+ // we are sure we don't change the errors that go/parser returns.
+ if r.err == errSyntax {
+ r.err = nil
+ for r.err == nil && !r.eof {
+ r.readByte()
+ }
+ info.header = r.buf
+ }
+ if r.err != nil {
+ return r.err
+ }
+
+ if info.fset == nil {
+ return nil
+ }
+
+ // Parse file header & record imports.
+ info.parsed, info.parseErr = parser.ParseFile(info.fset, info.filename, info.header, parser.ImportsOnly|parser.ParseComments)
+ if info.parseErr != nil {
+ return nil
+ }
+ info.pkg = info.parsed.Name.Name
+
+ hasEmbed := false
+ for _, decl := range info.parsed.Decls {
+ d, ok := decl.(*ast.GenDecl)
+ if !ok {
+ continue
+ }
+ for _, dspec := range d.Specs {
+ spec, ok := dspec.(*ast.ImportSpec)
+ if !ok {
+ continue
+ }
+ quoted := spec.Path.Value
+ path, err := strconv.Unquote(quoted)
+ if err != nil {
+ return fmt.Errorf("parser returned invalid quoted string: <%s>", quoted)
+ }
+ if path == "embed" {
+ hasEmbed = true
+ }
+
+ doc := spec.Doc
+ if doc == nil && len(d.Specs) == 1 {
+ doc = d.Doc
+ }
+ info.imports = append(info.imports, fileImport{path, spec.Pos(), doc})
+ }
+ }
+
+ // If the file imports "embed",
+ // we have to look for //go:embed comments
+ // in the remainder of the file.
+ // The compiler will enforce the mapping of comments to
+ // declared variables. We just need to know the patterns.
+ // If there were //go:embed comments earlier in the file
+ // (near the package statement or imports), the compiler
+ // will reject them. They can be (and have already been) ignored.
+ if hasEmbed {
+ var line []byte
+ for first := true; r.findEmbed(first); first = false {
+ line = line[:0]
+ pos := r.pos
+ for {
+ c := r.readByteNoBuf()
+ if c == '\n' || r.err != nil || r.eof {
+ break
+ }
+ line = append(line, c)
+ }
+ // Add args if line is well-formed.
+ // Ignore badly-formed lines - the compiler will report them when it finds them,
+ // and we can pretend they are not there to help go list succeed with what it knows.
+ embs, err := parseGoEmbed(string(line), pos)
+ if err == nil {
+ info.embeds = append(info.embeds, embs...)
+ }
+ }
+ }
+
+ return nil
+}
+
+// parseGoEmbed parses the text following "//go:embed" to extract the glob patterns.
+// It accepts unquoted space-separated patterns as well as double-quoted and back-quoted Go strings.
+// This is based on a similar function in cmd/compile/internal/gc/noder.go;
+// this version calculates position information as well.
+func parseGoEmbed(args string, pos token.Position) ([]fileEmbed, error) {
+ trimBytes := func(n int) {
+ pos.Offset += n
+ pos.Column += utf8.RuneCountInString(args[:n])
+ args = args[n:]
+ }
+ trimSpace := func() {
+ trim := strings.TrimLeftFunc(args, unicode.IsSpace)
+ trimBytes(len(args) - len(trim))
+ }
+
+ var list []fileEmbed
+ for trimSpace(); args != ""; trimSpace() {
+ var path string
+ pathPos := pos
+ Switch:
+ switch args[0] {
+ default:
+ i := len(args)
+ for j, c := range args {
+ if unicode.IsSpace(c) {
+ i = j
+ break
+ }
+ }
+ path = args[:i]
+ trimBytes(i)
+
+ case '`':
+ i := strings.Index(args[1:], "`")
+ if i < 0 {
+ return nil, fmt.Errorf("invalid quoted string in //go:embed: %s", args)
+ }
+ path = args[1 : 1+i]
+ trimBytes(1 + i + 1)
+
+ case '"':
+ i := 1
+ for ; i < len(args); i++ {
+ if args[i] == '\\' {
+ i++
+ continue
+ }
+ if args[i] == '"' {
+ q, err := strconv.Unquote(args[:i+1])
+ if err != nil {
+ return nil, fmt.Errorf("invalid quoted string in //go:embed: %s", args[:i+1])
+ }
+ path = q
+ trimBytes(i + 1)
+ break Switch
+ }
+ }
+ if i >= len(args) {
+ return nil, fmt.Errorf("invalid quoted string in //go:embed: %s", args)
+ }
+ }
+
+ if args != "" {
+ r, _ := utf8.DecodeRuneInString(args)
+ if !unicode.IsSpace(r) {
+ return nil, fmt.Errorf("invalid quoted string in //go:embed: %s", args)
+ }
+ }
+ list = append(list, fileEmbed{path, pathPos})
+ }
+ return list, nil
+}
diff --git a/go/tools/builders/replicate.go b/go/tools/builders/replicate.go
new file mode 100644
index 00000000..117f882c
--- /dev/null
+++ b/go/tools/builders/replicate.go
@@ -0,0 +1,167 @@
+// Copyright 2018 The Bazel Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// stdlib builds the standard library in the appropriate mode into a new goroot.
+package main
+
+import (
+ "fmt"
+ "io"
+ "os"
+ "path/filepath"
+)
+
+type replicateMode int
+
+const (
+ copyMode replicateMode = iota
+ hardlinkMode
+ softlinkMode
+)
+
+type replicateOption func(*replicateConfig)
+type replicateConfig struct {
+ removeFirst bool
+ fileMode replicateMode
+ dirMode replicateMode
+ paths []string
+}
+
+func replicatePaths(paths ...string) replicateOption {
+ return func(config *replicateConfig) {
+ config.paths = append(config.paths, paths...)
+ }
+}
+
+// replicatePrepare is the common preparation steps for a replication entry
+func replicatePrepare(dst string, config *replicateConfig) error {
+ dir := filepath.Dir(dst)
+ if err := os.MkdirAll(dir, 0755); err != nil {
+ return fmt.Errorf("Failed to make %s: %v", dir, err)
+ }
+ if config.removeFirst {
+ _ = os.Remove(dst)
+ }
+ return nil
+}
+
+// replicateFile is called internally by replicate to map a single file from src into dst.
+func replicateFile(src, dst string, config *replicateConfig) error {
+ if err := replicatePrepare(dst, config); err != nil {
+ return err
+ }
+ switch config.fileMode {
+ case copyMode:
+ in, err := os.Open(src)
+ if err != nil {
+ return err
+ }
+ defer in.Close()
+ out, err := os.Create(dst)
+ if err != nil {
+ return err
+ }
+ _, err = io.Copy(out, in)
+ closeerr := out.Close()
+ if err != nil {
+ return err
+ }
+ if closeerr != nil {
+ return closeerr
+ }
+ s, err := os.Stat(src)
+ if err != nil {
+ return err
+ }
+ if err := os.Chmod(dst, s.Mode()); err != nil {
+ return err
+ }
+ return nil
+ case hardlinkMode:
+ return os.Link(src, dst)
+ case softlinkMode:
+ return os.Symlink(src, dst)
+ default:
+ return fmt.Errorf("Invalid replication mode %d", config.fileMode)
+ }
+}
+
+// replicateDir makes a tree of files visible in a new location.
+// It is allowed to take any efficient method of doing so.
+func replicateDir(src, dst string, config *replicateConfig) error {
+ if err := replicatePrepare(dst, config); err != nil {
+ return err
+ }
+ switch config.dirMode {
+ case copyMode:
+ return filepath.Walk(src, func(path string, f os.FileInfo, err error) error {
+ if f.IsDir() {
+ return nil
+ }
+ relative, err := filepath.Rel(src, path)
+ if err != nil {
+ return err
+ }
+ return replicateFile(path, filepath.Join(dst, relative), config)
+ })
+ case hardlinkMode:
+ return os.Link(src, dst)
+ case softlinkMode:
+ return os.Symlink(src, dst)
+ default:
+ return fmt.Errorf("Invalid replication mode %d", config.fileMode)
+ }
+}
+
+// replicateTree is called for each single src dst pair.
+func replicateTree(src, dst string, config *replicateConfig) error {
+ if err := os.RemoveAll(dst); err != nil {
+ return fmt.Errorf("Failed to remove file at destination %s: %v", dst, err)
+ }
+ if l, err := filepath.EvalSymlinks(src); err != nil {
+ return err
+ } else {
+ src = l
+ }
+ if s, err := os.Stat(src); err != nil {
+ return err
+ } else if s.IsDir() {
+ return replicateDir(src, dst, config)
+ }
+ return replicateFile(src, dst, config)
+}
+
+// replicate makes a tree of files visible in a new location.
+// You control how it does so using options, by default it presumes the entire tree
+// of files rooted at src must be visible at dst, and that it should do so by copying.
+// src is allowed to be a file, in which case just the one file is copied.
+func replicate(src, dst string, options ...replicateOption) error {
+ config := replicateConfig{
+ removeFirst: true,
+ }
+ for _, option := range options {
+ option(&config)
+ }
+ if len(config.paths) == 0 {
+ return replicateTree(src, dst, &config)
+ }
+ for _, base := range config.paths {
+ from := filepath.Join(src, base)
+ to := filepath.Join(dst, base)
+ if err := replicateTree(from, to, &config); err != nil {
+ return err
+ }
+ }
+ return nil
+}
diff --git a/go/tools/builders/stdlib.go b/go/tools/builders/stdlib.go
new file mode 100644
index 00000000..d7b2bf0b
--- /dev/null
+++ b/go/tools/builders/stdlib.go
@@ -0,0 +1,169 @@
+// Copyright 2018 The Bazel Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+ "flag"
+ "fmt"
+ "go/build"
+ "os"
+ "path/filepath"
+ "regexp"
+ "strings"
+)
+
+// stdlib builds the standard library in the appropriate mode into a new goroot.
+func stdlib(args []string) error {
+ // process the args
+ flags := flag.NewFlagSet("stdlib", flag.ExitOnError)
+ goenv := envFlags(flags)
+ out := flags.String("out", "", "Path to output go root")
+ race := flags.Bool("race", false, "Build in race mode")
+ shared := flags.Bool("shared", false, "Build in shared mode")
+ dynlink := flags.Bool("dynlink", false, "Build in dynlink mode")
+ var packages multiFlag
+ flags.Var(&packages, "package", "Packages to build")
+ var gcflags quoteMultiFlag
+ flags.Var(&gcflags, "gcflags", "Go compiler flags")
+ if err := flags.Parse(args); err != nil {
+ return err
+ }
+ if err := goenv.checkFlags(); err != nil {
+ return err
+ }
+ goroot := os.Getenv("GOROOT")
+ if goroot == "" {
+ return fmt.Errorf("GOROOT not set")
+ }
+ output := abs(*out)
+
+ // Fail fast if cgo is required but a toolchain is not configured.
+ if os.Getenv("CGO_ENABLED") == "1" && filepath.Base(os.Getenv("CC")) == "vc_installation_error.bat" {
+ return fmt.Errorf(`cgo is required, but a C toolchain has not been configured.
+You may need to use the flags --cpu=x64_windows --compiler=mingw-gcc.`)
+ }
+
+ // Link in the bare minimum needed to the new GOROOT
+ if err := replicate(goroot, output, replicatePaths("src", "pkg/tool", "pkg/include")); err != nil {
+ return err
+ }
+
+ output, err := processPath(output)
+ if err != nil {
+ return err
+ }
+
+ // Now switch to the newly created GOROOT
+ os.Setenv("GOROOT", output)
+
+ // Create a temporary cache directory. "go build" requires this starting
+ // in Go 1.12.
+ cachePath := filepath.Join(output, ".gocache")
+ os.Setenv("GOCACHE", cachePath)
+ defer os.RemoveAll(cachePath)
+
+ // Disable modules for the 'go install' command. Depending on the sandboxing
+ // mode, there may be a go.mod file in a parent directory which will turn
+ // modules on in "auto" mode.
+ os.Setenv("GO111MODULE", "off")
+
+ // Make sure we have an absolute path to the C compiler.
+ // TODO(#1357): also take absolute paths of includes and other paths in flags.
+ os.Setenv("CC", quotePathIfNeeded(abs(os.Getenv("CC"))))
+
+ // Ensure paths are absolute.
+ absPaths := []string{}
+ for _, path := range filepath.SplitList(os.Getenv("PATH")) {
+ absPaths = append(absPaths, abs(path))
+ }
+ os.Setenv("PATH", strings.Join(absPaths, string(os.PathListSeparator)))
+
+ sandboxPath := abs(".")
+
+ // Strip path prefix from source files in debug information.
+ os.Setenv("CGO_CFLAGS", os.Getenv("CGO_CFLAGS")+" "+strings.Join(defaultCFlags(output), " "))
+ os.Setenv("CGO_LDFLAGS", os.Getenv("CGO_LDFLAGS")+" "+strings.Join(defaultLdFlags(), " "))
+
+ // Allow flags in CGO_LDFLAGS that wouldn't pass the security check.
+ // Workaround for golang.org/issue/42565.
+ var b strings.Builder
+ sep := ""
+ cgoLdflags, _ := splitQuoted(os.Getenv("CGO_LDFLAGS"))
+ for _, f := range cgoLdflags {
+ b.WriteString(sep)
+ sep = "|"
+ b.WriteString(regexp.QuoteMeta(f))
+ // If the flag if -framework, the flag value needs to be in the same
+ // condition.
+ if f == "-framework" {
+ sep = " "
+ }
+ }
+ os.Setenv("CGO_LDFLAGS_ALLOW", b.String())
+ os.Setenv("GODEBUG", "installgoroot=all")
+
+ // Build the commands needed to build the std library in the right mode
+ // NOTE: the go command stamps compiled .a files with build ids, which are
+ // cryptographic sums derived from the inputs. This prevents us from
+ // creating reproducible builds because the build ids are hashed from
+ // CGO_CFLAGS, which frequently contains absolute paths. As a workaround,
+ // we strip the build ids, since they won't be used after this.
+ installArgs := goenv.goCmd("install", "-toolexec", abs(os.Args[0])+" filterbuildid")
+ if len(build.Default.BuildTags) > 0 {
+ installArgs = append(installArgs, "-tags", strings.Join(build.Default.BuildTags, ","))
+ }
+
+ ldflags := []string{"-trimpath", sandboxPath}
+ asmflags := []string{"-trimpath", output}
+ if *race {
+ installArgs = append(installArgs, "-race")
+ }
+ if *shared {
+ gcflags = append(gcflags, "-shared")
+ ldflags = append(ldflags, "-shared")
+ asmflags = append(asmflags, "-shared")
+ }
+ if *dynlink {
+ gcflags = append(gcflags, "-dynlink")
+ ldflags = append(ldflags, "-dynlink")
+ asmflags = append(asmflags, "-dynlink")
+ }
+
+ // Since Go 1.10, an all= prefix indicates the flags should apply to the package
+ // and its dependencies, rather than just the package itself. This was the
+ // default behavior before Go 1.10.
+ allSlug := ""
+ for _, t := range build.Default.ReleaseTags {
+ if t == "go1.10" {
+ allSlug = "all="
+ break
+ }
+ }
+ installArgs = append(installArgs, "-gcflags="+allSlug+strings.Join(gcflags, " "))
+ installArgs = append(installArgs, "-ldflags="+allSlug+strings.Join(ldflags, " "))
+ installArgs = append(installArgs, "-asmflags="+allSlug+strings.Join(asmflags, " "))
+
+ // Modifying CGO flags to use only absolute path
+ // because go is having its own sandbox, all CGO flags must use absolute path
+ if err := absEnv(cgoEnvVars, cgoAbsEnvFlags); err != nil {
+ return fmt.Errorf("error modifying cgo environment to absolute path: %v", err)
+ }
+
+ installArgs = append(installArgs, packages...)
+ if err := goenv.runCommand(installArgs); err != nil {
+ return err
+ }
+ return nil
+}
diff --git a/go/tools/builders/stdliblist.go b/go/tools/builders/stdliblist.go
new file mode 100644
index 00000000..f6a61442
--- /dev/null
+++ b/go/tools/builders/stdliblist.go
@@ -0,0 +1,293 @@
+// Copyright 2021 The Bazel Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+ "bytes"
+ "encoding/json"
+ "flag"
+ "fmt"
+ "go/build"
+ "os"
+ "path/filepath"
+ "strings"
+)
+
+// Copy and pasted from golang.org/x/tools/go/packages
+type flatPackagesError struct {
+ Pos string // "file:line:col" or "file:line" or "" or "-"
+ Msg string
+ Kind flatPackagesErrorKind
+}
+
+type flatPackagesErrorKind int
+
+const (
+ UnknownError flatPackagesErrorKind = iota
+ ListError
+ ParseError
+ TypeError
+)
+
+func (err flatPackagesError) Error() string {
+ pos := err.Pos
+ if pos == "" {
+ pos = "-" // like token.Position{}.String()
+ }
+ return pos + ": " + err.Msg
+}
+
+// flatPackage is the JSON form of Package
+// It drops all the type and syntax fields, and transforms the Imports
+type flatPackage struct {
+ ID string
+ Name string `json:",omitempty"`
+ PkgPath string `json:",omitempty"`
+ Standard bool `json:",omitempty"`
+ Errors []flatPackagesError `json:",omitempty"`
+ GoFiles []string `json:",omitempty"`
+ CompiledGoFiles []string `json:",omitempty"`
+ OtherFiles []string `json:",omitempty"`
+ ExportFile string `json:",omitempty"`
+ Imports map[string]string `json:",omitempty"`
+}
+
+type goListPackage struct {
+ Dir string // directory containing package sources
+ ImportPath string // import path of package in dir
+ Name string // package name
+ Target string // install path
+ Goroot bool // is this package in the Go root?
+ Standard bool // is this package part of the standard Go library?
+ Root string // Go root or Go path dir containing this package
+ Export string // file containing export data (when using -export)
+ // Source files
+ GoFiles []string // .go source files (excluding CgoFiles, TestGoFiles, XTestGoFiles)
+ CgoFiles []string // .go source files that import "C"
+ CompiledGoFiles []string // .go files presented to compiler (when using -compiled)
+ IgnoredGoFiles []string // .go source files ignored due to build constraints
+ IgnoredOtherFiles []string // non-.go source files ignored due to build constraints
+ CFiles []string // .c source files
+ CXXFiles []string // .cc, .cxx and .cpp source files
+ MFiles []string // .m source files
+ HFiles []string // .h, .hh, .hpp and .hxx source files
+ FFiles []string // .f, .F, .for and .f90 Fortran source files
+ SFiles []string // .s source files
+ SwigFiles []string // .swig files
+ SwigCXXFiles []string // .swigcxx files
+ SysoFiles []string // .syso object files to add to archive
+ TestGoFiles []string // _test.go files in package
+ XTestGoFiles []string // _test.go files outside package
+ // Embedded files
+ EmbedPatterns []string // //go:embed patterns
+ EmbedFiles []string // files matched by EmbedPatterns
+ TestEmbedPatterns []string // //go:embed patterns in TestGoFiles
+ TestEmbedFiles []string // files matched by TestEmbedPatterns
+ XTestEmbedPatterns []string // //go:embed patterns in XTestGoFiles
+ XTestEmbedFiles []string // files matched by XTestEmbedPatterns
+ // Dependency information
+ Imports []string // import paths used by this package
+ ImportMap map[string]string // map from source import to ImportPath (identity entries omitted)
+ // Error information
+ Incomplete bool // this package or a dependency has an error
+ Error *flatPackagesError // error loading package
+ DepsErrors []*flatPackagesError // errors loading dependencies
+}
+
+func stdlibPackageID(importPath string) string {
+ return "@io_bazel_rules_go//stdlib:" + importPath
+}
+
+// outputBasePath replace the cloneBase with output base label
+func outputBasePath(cloneBase, p string) string {
+ dir, _ := filepath.Rel(cloneBase, p)
+ return filepath.Join("__BAZEL_OUTPUT_BASE__", dir)
+}
+
+// absoluteSourcesPaths replace cloneBase of the absolution
+// paths with the label for all source files in a package
+func absoluteSourcesPaths(cloneBase, pkgDir string, srcs []string) []string {
+ ret := make([]string, 0, len(srcs))
+ pkgDir = outputBasePath(cloneBase, pkgDir)
+ for _, src := range srcs {
+ absPath := src
+
+ // Generated files will already have an absolute path. These come from
+ // the compiler's cache.
+ if !filepath.IsAbs(src) {
+ absPath = filepath.Join(pkgDir, src)
+ }
+
+ ret = append(ret, absPath)
+ }
+ return ret
+}
+
+// filterGoFiles keeps only files either ending in .go or those without an
+// extension (which are from the cache). This is a work around for
+// https://golang.org/issue/28749: cmd/go puts assembly, C, and C++ files in
+// CompiledGoFiles.
+func filterGoFiles(srcs []string) []string {
+ ret := make([]string, 0, len(srcs))
+ for _, f := range srcs {
+ if ext := filepath.Ext(f); ext == ".go" || ext == "" {
+ ret = append(ret, f)
+ }
+ }
+
+ return ret
+}
+
+func flatPackageForStd(cloneBase string, pkg *goListPackage) *flatPackage {
+ goFiles := absoluteSourcesPaths(cloneBase, pkg.Dir, pkg.GoFiles)
+ compiledGoFiles := absoluteSourcesPaths(cloneBase, pkg.Dir, pkg.CompiledGoFiles)
+
+ newPkg := &flatPackage{
+ ID: stdlibPackageID(pkg.ImportPath),
+ Name: pkg.Name,
+ PkgPath: pkg.ImportPath,
+ ExportFile: outputBasePath(cloneBase, pkg.Target),
+ Imports: map[string]string{},
+ Standard: pkg.Standard,
+ GoFiles: goFiles,
+ CompiledGoFiles: filterGoFiles(compiledGoFiles),
+ }
+
+ // imports
+ //
+ // Imports contains the IDs of all imported packages.
+ // ImportsMap records (path, ID) only where they differ.
+ ids := make(map[string]struct{})
+ for _, id := range pkg.Imports {
+ ids[id] = struct{}{}
+ }
+
+ for path, id := range pkg.ImportMap {
+ newPkg.Imports[path] = stdlibPackageID(id)
+ delete(ids, id)
+ }
+
+ for id := range ids {
+ if id != "C" {
+ newPkg.Imports[id] = stdlibPackageID(id)
+ }
+ }
+
+ return newPkg
+}
+
+// stdliblist runs `go list -json` on the standard library and saves it to a file.
+func stdliblist(args []string) error {
+ // process the args
+ flags := flag.NewFlagSet("stdliblist", flag.ExitOnError)
+ goenv := envFlags(flags)
+ out := flags.String("out", "", "Path to output go list json")
+ if err := flags.Parse(args); err != nil {
+ return err
+ }
+ if err := goenv.checkFlags(); err != nil {
+ return err
+ }
+
+ if filepath.IsAbs(goenv.sdk) {
+ return fmt.Errorf("-sdk needs to be a relative path, but got %s", goenv.sdk)
+ }
+
+ // In Go 1.18, the standard library started using go:embed directives.
+ // When Bazel runs this action, it does so inside a sandbox where GOROOT points
+ // to an external/go_sdk directory that contains a symlink farm of all files in
+ // the Go SDK.
+ // If we run "go list" with that GOROOT, this action will fail because those
+ // go:embed directives will refuse to include the symlinks in the sandbox.
+ //
+ // To work around this, cloneGoRoot creates a copy of a subset of external/go_sdk
+ // that is sufficient to call "go list" into a new cloneBase directory, e.g.
+ // "go list" needs to call "compile", which needs "pkg/tool".
+ // We also need to retain the same relative path to the root directory, e.g.
+ // "$OUTPUT_BASE/external/go_sdk" becomes
+ // {cloneBase}/external/go_sdk", which will be set at GOROOT later. This ensures
+ // that file paths in the generated JSON are still valid.
+ //
+ // Here we replicate goRoot(absolute path of goenv.sdk) to newGoRoot.
+ cloneBase, cleanup, err := goenv.workDir()
+ if err != nil {
+ return err
+ }
+ defer func() { cleanup() }()
+
+ newGoRoot := filepath.Join(cloneBase, goenv.sdk)
+ if err := replicate(abs(goenv.sdk), abs(newGoRoot), replicatePaths("src", "pkg/tool", "pkg/include")); err != nil {
+ return err
+ }
+
+ // Ensure paths are absolute.
+ absPaths := []string{}
+ for _, path := range filepath.SplitList(os.Getenv("PATH")) {
+ absPaths = append(absPaths, abs(path))
+ }
+ os.Setenv("PATH", strings.Join(absPaths, string(os.PathListSeparator)))
+ os.Setenv("GOROOT", newGoRoot)
+
+ cgoEnabled := os.Getenv("CGO_ENABLED") == "1"
+ // Make sure we have an absolute path to the C compiler.
+ // TODO(#1357): also take absolute paths of includes and other paths in flags.
+ ccEnv, ok := os.LookupEnv("CC")
+ if cgoEnabled && !ok {
+ return fmt.Errorf("CC must be set")
+ }
+ os.Setenv("CC", quotePathIfNeeded(abs(ccEnv)))
+
+ // We want to keep the cache around so that the processed files can be used by other tools.
+ cachePath := abs(*out + ".gocache")
+ os.Setenv("GOCACHE", cachePath)
+ os.Setenv("GOMODCACHE", cachePath)
+ os.Setenv("GOPATH", cachePath)
+
+ listArgs := goenv.goCmd("list")
+ if len(build.Default.BuildTags) > 0 {
+ listArgs = append(listArgs, "-tags", strings.Join(build.Default.BuildTags, ","))
+ }
+
+ if cgoEnabled {
+ listArgs = append(listArgs, "-compiled=true")
+ }
+
+ listArgs = append(listArgs, "-json", "builtin", "std", "runtime/cgo")
+
+ jsonFile, err := os.Create(*out)
+ if err != nil {
+ return err
+ }
+ defer jsonFile.Close()
+
+ jsonData := &bytes.Buffer{}
+ if err := goenv.runCommandToFile(jsonData, os.Stderr, listArgs); err != nil {
+ return err
+ }
+
+ encoder := json.NewEncoder(jsonFile)
+ decoder := json.NewDecoder(jsonData)
+ for decoder.More() {
+ var pkg *goListPackage
+ if err := decoder.Decode(&pkg); err != nil {
+ return err
+ }
+ if err := encoder.Encode(flatPackageForStd(cloneBase, pkg)); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
diff --git a/go/tools/builders/stdliblist_test.go b/go/tools/builders/stdliblist_test.go
new file mode 100644
index 00000000..b456b0be
--- /dev/null
+++ b/go/tools/builders/stdliblist_test.go
@@ -0,0 +1,48 @@
+package main
+
+import (
+ "encoding/json"
+ "fmt"
+ "os"
+ "path/filepath"
+ "strings"
+ "testing"
+)
+
+func Test_stdliblist(t *testing.T) {
+ testDir := t.TempDir()
+ outJSON := filepath.Join(testDir, "out.json")
+
+ test_args := []string{
+ fmt.Sprintf("-out=%s", outJSON),
+ "-sdk=external/go_sdk",
+ }
+
+ if err := stdliblist(test_args); err != nil {
+ t.Errorf("calling stdliblist got err: %v", err)
+ }
+ f, err := os.Open(outJSON)
+ if err != nil {
+ t.Errorf("cannot open output json: %v", err)
+ }
+ defer func() { _ = f.Close() }()
+ decoder := json.NewDecoder(f)
+ for decoder.More() {
+ var result *flatPackage
+ if err := decoder.Decode(&result); err != nil {
+ t.Errorf("unable to decode output json: %v\n", err)
+ }
+
+ if !strings.HasPrefix(result.ID, "@io_bazel_rules_go//stdlib") {
+ t.Errorf("ID should be prefixed with @io_bazel_rules_go//stdlib :%v", result)
+ }
+ if !strings.HasPrefix(result.ExportFile, "__BAZEL_OUTPUT_BASE__") {
+ t.Errorf("export file should be prefixed with __BAZEL_OUTPUT_BASE__ :%v", result)
+ }
+ for _, gofile := range result.GoFiles {
+ if !strings.HasPrefix(gofile, "__BAZEL_OUTPUT_BASE__/external/go_sdk") {
+ t.Errorf("all go files should be prefixed with __BAZEL_OUTPUT_BASE__/external/go_sdk :%v", result)
+ }
+ }
+ }
+}