mirror of https://github.com/dexidp/dex.git
20 changed files with 7768 additions and 0 deletions
@ -0,0 +1,27 @@
|
||||
Copyright (c) 2013 The Go Authors. All rights reserved. |
||||
|
||||
Redistribution and use in source and binary forms, with or without |
||||
modification, are permitted provided that the following conditions are |
||||
met: |
||||
|
||||
* Redistributions of source code must retain the above copyright |
||||
notice, this list of conditions and the following disclaimer. |
||||
* Redistributions in binary form must reproduce the above |
||||
copyright notice, this list of conditions and the following disclaimer |
||||
in the documentation and/or other materials provided with the |
||||
distribution. |
||||
* Neither the name of Google Inc. nor the names of its |
||||
contributors may be used to endorse or promote products derived from |
||||
this software without specific prior written permission. |
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS |
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT |
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR |
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT |
||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
||||
@ -0,0 +1,159 @@
|
||||
// Copyright (c) 2013 The Go Authors. All rights reserved.
|
||||
//
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file or at
|
||||
// https://developers.google.com/open-source/licenses/bsd.
|
||||
|
||||
// golint lints the Go source files named on its command line.
|
||||
package main // import "golang.org/x/lint/golint"
|
||||
|
||||
import ( |
||||
"flag" |
||||
"fmt" |
||||
"go/build" |
||||
"io/ioutil" |
||||
"os" |
||||
"path/filepath" |
||||
"strings" |
||||
|
||||
"golang.org/x/lint" |
||||
) |
||||
|
||||
var ( |
||||
minConfidence = flag.Float64("min_confidence", 0.8, "minimum confidence of a problem to print it") |
||||
setExitStatus = flag.Bool("set_exit_status", false, "set exit status to 1 if any issues are found") |
||||
suggestions int |
||||
) |
||||
|
||||
func usage() { |
||||
fmt.Fprintf(os.Stderr, "Usage of %s:\n", os.Args[0]) |
||||
fmt.Fprintf(os.Stderr, "\tgolint [flags] # runs on package in current directory\n") |
||||
fmt.Fprintf(os.Stderr, "\tgolint [flags] [packages]\n") |
||||
fmt.Fprintf(os.Stderr, "\tgolint [flags] [directories] # where a '/...' suffix includes all sub-directories\n") |
||||
fmt.Fprintf(os.Stderr, "\tgolint [flags] [files] # all must belong to a single package\n") |
||||
fmt.Fprintf(os.Stderr, "Flags:\n") |
||||
flag.PrintDefaults() |
||||
} |
||||
|
||||
func main() { |
||||
flag.Usage = usage |
||||
flag.Parse() |
||||
|
||||
if flag.NArg() == 0 { |
||||
lintDir(".") |
||||
} else { |
||||
// dirsRun, filesRun, and pkgsRun indicate whether golint is applied to
|
||||
// directory, file or package targets. The distinction affects which
|
||||
// checks are run. It is no valid to mix target types.
|
||||
var dirsRun, filesRun, pkgsRun int |
||||
var args []string |
||||
for _, arg := range flag.Args() { |
||||
if strings.HasSuffix(arg, "/...") && isDir(arg[:len(arg)-len("/...")]) { |
||||
dirsRun = 1 |
||||
for _, dirname := range allPackagesInFS(arg) { |
||||
args = append(args, dirname) |
||||
} |
||||
} else if isDir(arg) { |
||||
dirsRun = 1 |
||||
args = append(args, arg) |
||||
} else if exists(arg) { |
||||
filesRun = 1 |
||||
args = append(args, arg) |
||||
} else { |
||||
pkgsRun = 1 |
||||
args = append(args, arg) |
||||
} |
||||
} |
||||
|
||||
if dirsRun+filesRun+pkgsRun != 1 { |
||||
usage() |
||||
os.Exit(2) |
||||
} |
||||
switch { |
||||
case dirsRun == 1: |
||||
for _, dir := range args { |
||||
lintDir(dir) |
||||
} |
||||
case filesRun == 1: |
||||
lintFiles(args...) |
||||
case pkgsRun == 1: |
||||
for _, pkg := range importPaths(args) { |
||||
lintPackage(pkg) |
||||
} |
||||
} |
||||
} |
||||
|
||||
if *setExitStatus && suggestions > 0 { |
||||
fmt.Fprintf(os.Stderr, "Found %d lint suggestions; failing.\n", suggestions) |
||||
os.Exit(1) |
||||
} |
||||
} |
||||
|
||||
func isDir(filename string) bool { |
||||
fi, err := os.Stat(filename) |
||||
return err == nil && fi.IsDir() |
||||
} |
||||
|
||||
func exists(filename string) bool { |
||||
_, err := os.Stat(filename) |
||||
return err == nil |
||||
} |
||||
|
||||
func lintFiles(filenames ...string) { |
||||
files := make(map[string][]byte) |
||||
for _, filename := range filenames { |
||||
src, err := ioutil.ReadFile(filename) |
||||
if err != nil { |
||||
fmt.Fprintln(os.Stderr, err) |
||||
continue |
||||
} |
||||
files[filename] = src |
||||
} |
||||
|
||||
l := new(lint.Linter) |
||||
ps, err := l.LintFiles(files) |
||||
if err != nil { |
||||
fmt.Fprintf(os.Stderr, "%v\n", err) |
||||
return |
||||
} |
||||
for _, p := range ps { |
||||
if p.Confidence >= *minConfidence { |
||||
fmt.Printf("%v: %s\n", p.Position, p.Text) |
||||
suggestions++ |
||||
} |
||||
} |
||||
} |
||||
|
||||
func lintDir(dirname string) { |
||||
pkg, err := build.ImportDir(dirname, 0) |
||||
lintImportedPackage(pkg, err) |
||||
} |
||||
|
||||
func lintPackage(pkgname string) { |
||||
pkg, err := build.Import(pkgname, ".", 0) |
||||
lintImportedPackage(pkg, err) |
||||
} |
||||
|
||||
func lintImportedPackage(pkg *build.Package, err error) { |
||||
if err != nil { |
||||
if _, nogo := err.(*build.NoGoError); nogo { |
||||
// Don't complain if the failure is due to no Go source files.
|
||||
return |
||||
} |
||||
fmt.Fprintln(os.Stderr, err) |
||||
return |
||||
} |
||||
|
||||
var files []string |
||||
files = append(files, pkg.GoFiles...) |
||||
files = append(files, pkg.CgoFiles...) |
||||
files = append(files, pkg.TestGoFiles...) |
||||
if pkg.Dir != "." { |
||||
for i, f := range files { |
||||
files[i] = filepath.Join(pkg.Dir, f) |
||||
} |
||||
} |
||||
// TODO(dsymonds): Do foo_test too (pkg.XTestGoFiles)
|
||||
|
||||
lintFiles(files...) |
||||
} |
||||
@ -0,0 +1,309 @@
|
||||
package main |
||||
|
||||
/* |
||||
|
||||
This file holds a direct copy of the import path matching code of |
||||
https://github.com/golang/go/blob/master/src/cmd/go/main.go. It can be
|
||||
replaced when https://golang.org/issue/8768 is resolved.
|
||||
|
||||
It has been updated to follow upstream changes in a few ways. |
||||
|
||||
*/ |
||||
|
||||
import ( |
||||
"fmt" |
||||
"go/build" |
||||
"log" |
||||
"os" |
||||
"path" |
||||
"path/filepath" |
||||
"regexp" |
||||
"runtime" |
||||
"strings" |
||||
) |
||||
|
||||
var ( |
||||
buildContext = build.Default |
||||
goroot = filepath.Clean(runtime.GOROOT()) |
||||
gorootSrc = filepath.Join(goroot, "src") |
||||
) |
||||
|
||||
// importPathsNoDotExpansion returns the import paths to use for the given
|
||||
// command line, but it does no ... expansion.
|
||||
func importPathsNoDotExpansion(args []string) []string { |
||||
if len(args) == 0 { |
||||
return []string{"."} |
||||
} |
||||
var out []string |
||||
for _, a := range args { |
||||
// Arguments are supposed to be import paths, but
|
||||
// as a courtesy to Windows developers, rewrite \ to /
|
||||
// in command-line arguments. Handles .\... and so on.
|
||||
if filepath.Separator == '\\' { |
||||
a = strings.Replace(a, `\`, `/`, -1) |
||||
} |
||||
|
||||
// Put argument in canonical form, but preserve leading ./.
|
||||
if strings.HasPrefix(a, "./") { |
||||
a = "./" + path.Clean(a) |
||||
if a == "./." { |
||||
a = "." |
||||
} |
||||
} else { |
||||
a = path.Clean(a) |
||||
} |
||||
if a == "all" || a == "std" { |
||||
out = append(out, allPackages(a)...) |
||||
continue |
||||
} |
||||
out = append(out, a) |
||||
} |
||||
return out |
||||
} |
||||
|
||||
// importPaths returns the import paths to use for the given command line.
|
||||
func importPaths(args []string) []string { |
||||
args = importPathsNoDotExpansion(args) |
||||
var out []string |
||||
for _, a := range args { |
||||
if strings.Contains(a, "...") { |
||||
if build.IsLocalImport(a) { |
||||
out = append(out, allPackagesInFS(a)...) |
||||
} else { |
||||
out = append(out, allPackages(a)...) |
||||
} |
||||
continue |
||||
} |
||||
out = append(out, a) |
||||
} |
||||
return out |
||||
} |
||||
|
||||
// matchPattern(pattern)(name) reports whether
|
||||
// name matches pattern. Pattern is a limited glob
|
||||
// pattern in which '...' means 'any string' and there
|
||||
// is no other special syntax.
|
||||
func matchPattern(pattern string) func(name string) bool { |
||||
re := regexp.QuoteMeta(pattern) |
||||
re = strings.Replace(re, `\.\.\.`, `.*`, -1) |
||||
// Special case: foo/... matches foo too.
|
||||
if strings.HasSuffix(re, `/.*`) { |
||||
re = re[:len(re)-len(`/.*`)] + `(/.*)?` |
||||
} |
||||
reg := regexp.MustCompile(`^` + re + `$`) |
||||
return func(name string) bool { |
||||
return reg.MatchString(name) |
||||
} |
||||
} |
||||
|
||||
// hasPathPrefix reports whether the path s begins with the
|
||||
// elements in prefix.
|
||||
func hasPathPrefix(s, prefix string) bool { |
||||
switch { |
||||
default: |
||||
return false |
||||
case len(s) == len(prefix): |
||||
return s == prefix |
||||
case len(s) > len(prefix): |
||||
if prefix != "" && prefix[len(prefix)-1] == '/' { |
||||
return strings.HasPrefix(s, prefix) |
||||
} |
||||
return s[len(prefix)] == '/' && s[:len(prefix)] == prefix |
||||
} |
||||
} |
||||
|
||||
// treeCanMatchPattern(pattern)(name) reports whether
|
||||
// name or children of name can possibly match pattern.
|
||||
// Pattern is the same limited glob accepted by matchPattern.
|
||||
func treeCanMatchPattern(pattern string) func(name string) bool { |
||||
wildCard := false |
||||
if i := strings.Index(pattern, "..."); i >= 0 { |
||||
wildCard = true |
||||
pattern = pattern[:i] |
||||
} |
||||
return func(name string) bool { |
||||
return len(name) <= len(pattern) && hasPathPrefix(pattern, name) || |
||||
wildCard && strings.HasPrefix(name, pattern) |
||||
} |
||||
} |
||||
|
||||
// allPackages returns all the packages that can be found
|
||||
// under the $GOPATH directories and $GOROOT matching pattern.
|
||||
// The pattern is either "all" (all packages), "std" (standard packages)
|
||||
// or a path including "...".
|
||||
func allPackages(pattern string) []string { |
||||
pkgs := matchPackages(pattern) |
||||
if len(pkgs) == 0 { |
||||
fmt.Fprintf(os.Stderr, "warning: %q matched no packages\n", pattern) |
||||
} |
||||
return pkgs |
||||
} |
||||
|
||||
func matchPackages(pattern string) []string { |
||||
match := func(string) bool { return true } |
||||
treeCanMatch := func(string) bool { return true } |
||||
if pattern != "all" && pattern != "std" { |
||||
match = matchPattern(pattern) |
||||
treeCanMatch = treeCanMatchPattern(pattern) |
||||
} |
||||
|
||||
have := map[string]bool{ |
||||
"builtin": true, // ignore pseudo-package that exists only for documentation
|
||||
} |
||||
if !buildContext.CgoEnabled { |
||||
have["runtime/cgo"] = true // ignore during walk
|
||||
} |
||||
var pkgs []string |
||||
|
||||
// Commands
|
||||
cmd := filepath.Join(goroot, "src/cmd") + string(filepath.Separator) |
||||
filepath.Walk(cmd, func(path string, fi os.FileInfo, err error) error { |
||||
if err != nil || !fi.IsDir() || path == cmd { |
||||
return nil |
||||
} |
||||
name := path[len(cmd):] |
||||
if !treeCanMatch(name) { |
||||
return filepath.SkipDir |
||||
} |
||||
// Commands are all in cmd/, not in subdirectories.
|
||||
if strings.Contains(name, string(filepath.Separator)) { |
||||
return filepath.SkipDir |
||||
} |
||||
|
||||
// We use, e.g., cmd/gofmt as the pseudo import path for gofmt.
|
||||
name = "cmd/" + name |
||||
if have[name] { |
||||
return nil |
||||
} |
||||
have[name] = true |
||||
if !match(name) { |
||||
return nil |
||||
} |
||||
_, err = buildContext.ImportDir(path, 0) |
||||
if err != nil { |
||||
if _, noGo := err.(*build.NoGoError); !noGo { |
||||
log.Print(err) |
||||
} |
||||
return nil |
||||
} |
||||
pkgs = append(pkgs, name) |
||||
return nil |
||||
}) |
||||
|
||||
for _, src := range buildContext.SrcDirs() { |
||||
if (pattern == "std" || pattern == "cmd") && src != gorootSrc { |
||||
continue |
||||
} |
||||
src = filepath.Clean(src) + string(filepath.Separator) |
||||
root := src |
||||
if pattern == "cmd" { |
||||
root += "cmd" + string(filepath.Separator) |
||||
} |
||||
filepath.Walk(root, func(path string, fi os.FileInfo, err error) error { |
||||
if err != nil || !fi.IsDir() || path == src { |
||||
return nil |
||||
} |
||||
|
||||
// Avoid .foo, _foo, and testdata directory trees.
|
||||
_, elem := filepath.Split(path) |
||||
if strings.HasPrefix(elem, ".") || strings.HasPrefix(elem, "_") || elem == "testdata" { |
||||
return filepath.SkipDir |
||||
} |
||||
|
||||
name := filepath.ToSlash(path[len(src):]) |
||||
if pattern == "std" && (strings.Contains(name, ".") || name == "cmd") { |
||||
// The name "std" is only the standard library.
|
||||
// If the name is cmd, it's the root of the command tree.
|
||||
return filepath.SkipDir |
||||
} |
||||
if !treeCanMatch(name) { |
||||
return filepath.SkipDir |
||||
} |
||||
if have[name] { |
||||
return nil |
||||
} |
||||
have[name] = true |
||||
if !match(name) { |
||||
return nil |
||||
} |
||||
_, err = buildContext.ImportDir(path, 0) |
||||
if err != nil { |
||||
if _, noGo := err.(*build.NoGoError); noGo { |
||||
return nil |
||||
} |
||||
} |
||||
pkgs = append(pkgs, name) |
||||
return nil |
||||
}) |
||||
} |
||||
return pkgs |
||||
} |
||||
|
||||
// allPackagesInFS is like allPackages but is passed a pattern
|
||||
// beginning ./ or ../, meaning it should scan the tree rooted
|
||||
// at the given directory. There are ... in the pattern too.
|
||||
func allPackagesInFS(pattern string) []string { |
||||
pkgs := matchPackagesInFS(pattern) |
||||
if len(pkgs) == 0 { |
||||
fmt.Fprintf(os.Stderr, "warning: %q matched no packages\n", pattern) |
||||
} |
||||
return pkgs |
||||
} |
||||
|
||||
func matchPackagesInFS(pattern string) []string { |
||||
// Find directory to begin the scan.
|
||||
// Could be smarter but this one optimization
|
||||
// is enough for now, since ... is usually at the
|
||||
// end of a path.
|
||||
i := strings.Index(pattern, "...") |
||||
dir, _ := path.Split(pattern[:i]) |
||||
|
||||
// pattern begins with ./ or ../.
|
||||
// path.Clean will discard the ./ but not the ../.
|
||||
// We need to preserve the ./ for pattern matching
|
||||
// and in the returned import paths.
|
||||
prefix := "" |
||||
if strings.HasPrefix(pattern, "./") { |
||||
prefix = "./" |
||||
} |
||||
match := matchPattern(pattern) |
||||
|
||||
var pkgs []string |
||||
filepath.Walk(dir, func(path string, fi os.FileInfo, err error) error { |
||||
if err != nil || !fi.IsDir() { |
||||
return nil |
||||
} |
||||
if path == dir { |
||||
// filepath.Walk starts at dir and recurses. For the recursive case,
|
||||
// the path is the result of filepath.Join, which calls filepath.Clean.
|
||||
// The initial case is not Cleaned, though, so we do this explicitly.
|
||||
//
|
||||
// This converts a path like "./io/" to "io". Without this step, running
|
||||
// "cd $GOROOT/src/pkg; go list ./io/..." would incorrectly skip the io
|
||||
// package, because prepending the prefix "./" to the unclean path would
|
||||
// result in "././io", and match("././io") returns false.
|
||||
path = filepath.Clean(path) |
||||
} |
||||
|
||||
// Avoid .foo, _foo, and testdata directory trees, but do not avoid "." or "..".
|
||||
_, elem := filepath.Split(path) |
||||
dot := strings.HasPrefix(elem, ".") && elem != "." && elem != ".." |
||||
if dot || strings.HasPrefix(elem, "_") || elem == "testdata" { |
||||
return filepath.SkipDir |
||||
} |
||||
|
||||
name := prefix + filepath.ToSlash(path) |
||||
if !match(name) { |
||||
return nil |
||||
} |
||||
if _, err = build.ImportDir(path, 0); err != nil { |
||||
if _, noGo := err.(*build.NoGoError); !noGo { |
||||
log.Print(err) |
||||
} |
||||
return nil |
||||
} |
||||
pkgs = append(pkgs, name) |
||||
return nil |
||||
}) |
||||
return pkgs |
||||
} |
||||
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,27 @@
|
||||
Copyright (c) 2009 The Go Authors. All rights reserved. |
||||
|
||||
Redistribution and use in source and binary forms, with or without |
||||
modification, are permitted provided that the following conditions are |
||||
met: |
||||
|
||||
* Redistributions of source code must retain the above copyright |
||||
notice, this list of conditions and the following disclaimer. |
||||
* Redistributions in binary form must reproduce the above |
||||
copyright notice, this list of conditions and the following disclaimer |
||||
in the documentation and/or other materials provided with the |
||||
distribution. |
||||
* Neither the name of Google Inc. nor the names of its |
||||
contributors may be used to endorse or promote products derived from |
||||
this software without specific prior written permission. |
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS |
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT |
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR |
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT |
||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
||||
@ -0,0 +1,22 @@
|
||||
Additional IP Rights Grant (Patents) |
||||
|
||||
"This implementation" means the copyrightable works distributed by |
||||
Google as part of the Go project. |
||||
|
||||
Google hereby grants to You a perpetual, worldwide, non-exclusive, |
||||
no-charge, royalty-free, irrevocable (except as stated in this section) |
||||
patent license to make, have made, use, offer to sell, sell, import, |
||||
transfer and otherwise run, modify and propagate the contents of this |
||||
implementation of Go, where such license applies only to those patent |
||||
claims, both currently owned or controlled by Google and acquired in |
||||
the future, licensable by Google that are necessarily infringed by this |
||||
implementation of Go. This grant does not include claims that would be |
||||
infringed only as a consequence of further modification of this |
||||
implementation. If you or your agent or exclusive licensee institute or |
||||
order or agree to the institution of patent litigation against any |
||||
entity (including a cross-claim or counterclaim in a lawsuit) alleging |
||||
that this implementation of Go or any code incorporated within this |
||||
implementation of Go constitutes direct or contributory patent |
||||
infringement, or inducement of patent infringement, then any patent |
||||
rights granted to you under this License for this implementation of Go |
||||
shall terminate as of the date such litigation is filed. |
||||
@ -0,0 +1,627 @@
|
||||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package astutil |
||||
|
||||
// This file defines utilities for working with source positions.
|
||||
|
||||
import ( |
||||
"fmt" |
||||
"go/ast" |
||||
"go/token" |
||||
"sort" |
||||
) |
||||
|
||||
// PathEnclosingInterval returns the node that encloses the source
|
||||
// interval [start, end), and all its ancestors up to the AST root.
|
||||
//
|
||||
// The definition of "enclosing" used by this function considers
|
||||
// additional whitespace abutting a node to be enclosed by it.
|
||||
// In this example:
|
||||
//
|
||||
// z := x + y // add them
|
||||
// <-A->
|
||||
// <----B----->
|
||||
//
|
||||
// the ast.BinaryExpr(+) node is considered to enclose interval B
|
||||
// even though its [Pos()..End()) is actually only interval A.
|
||||
// This behaviour makes user interfaces more tolerant of imperfect
|
||||
// input.
|
||||
//
|
||||
// This function treats tokens as nodes, though they are not included
|
||||
// in the result. e.g. PathEnclosingInterval("+") returns the
|
||||
// enclosing ast.BinaryExpr("x + y").
|
||||
//
|
||||
// If start==end, the 1-char interval following start is used instead.
|
||||
//
|
||||
// The 'exact' result is true if the interval contains only path[0]
|
||||
// and perhaps some adjacent whitespace. It is false if the interval
|
||||
// overlaps multiple children of path[0], or if it contains only
|
||||
// interior whitespace of path[0].
|
||||
// In this example:
|
||||
//
|
||||
// z := x + y // add them
|
||||
// <--C--> <---E-->
|
||||
// ^
|
||||
// D
|
||||
//
|
||||
// intervals C, D and E are inexact. C is contained by the
|
||||
// z-assignment statement, because it spans three of its children (:=,
|
||||
// x, +). So too is the 1-char interval D, because it contains only
|
||||
// interior whitespace of the assignment. E is considered interior
|
||||
// whitespace of the BlockStmt containing the assignment.
|
||||
//
|
||||
// Precondition: [start, end) both lie within the same file as root.
|
||||
// TODO(adonovan): return (nil, false) in this case and remove precond.
|
||||
// Requires FileSet; see loader.tokenFileContainsPos.
|
||||
//
|
||||
// Postcondition: path is never nil; it always contains at least 'root'.
|
||||
//
|
||||
func PathEnclosingInterval(root *ast.File, start, end token.Pos) (path []ast.Node, exact bool) { |
||||
// fmt.Printf("EnclosingInterval %d %d\n", start, end) // debugging
|
||||
|
||||
// Precondition: node.[Pos..End) and adjoining whitespace contain [start, end).
|
||||
var visit func(node ast.Node) bool |
||||
visit = func(node ast.Node) bool { |
||||
path = append(path, node) |
||||
|
||||
nodePos := node.Pos() |
||||
nodeEnd := node.End() |
||||
|
||||
// fmt.Printf("visit(%T, %d, %d)\n", node, nodePos, nodeEnd) // debugging
|
||||
|
||||
// Intersect [start, end) with interval of node.
|
||||
if start < nodePos { |
||||
start = nodePos |
||||
} |
||||
if end > nodeEnd { |
||||
end = nodeEnd |
||||
} |
||||
|
||||
// Find sole child that contains [start, end).
|
||||
children := childrenOf(node) |
||||
l := len(children) |
||||
for i, child := range children { |
||||
// [childPos, childEnd) is unaugmented interval of child.
|
||||
childPos := child.Pos() |
||||
childEnd := child.End() |
||||
|
||||
// [augPos, augEnd) is whitespace-augmented interval of child.
|
||||
augPos := childPos |
||||
augEnd := childEnd |
||||
if i > 0 { |
||||
augPos = children[i-1].End() // start of preceding whitespace
|
||||
} |
||||
if i < l-1 { |
||||
nextChildPos := children[i+1].Pos() |
||||
// Does [start, end) lie between child and next child?
|
||||
if start >= augEnd && end <= nextChildPos { |
||||
return false // inexact match
|
||||
} |
||||
augEnd = nextChildPos // end of following whitespace
|
||||
} |
||||
|
||||
// fmt.Printf("\tchild %d: [%d..%d)\tcontains interval [%d..%d)?\n",
|
||||
// i, augPos, augEnd, start, end) // debugging
|
||||
|
||||
// Does augmented child strictly contain [start, end)?
|
||||
if augPos <= start && end <= augEnd { |
||||
_, isToken := child.(tokenNode) |
||||
return isToken || visit(child) |
||||
} |
||||
|
||||
// Does [start, end) overlap multiple children?
|
||||
// i.e. left-augmented child contains start
|
||||
// but LR-augmented child does not contain end.
|
||||
if start < childEnd && end > augEnd { |
||||
break |
||||
} |
||||
} |
||||
|
||||
// No single child contained [start, end),
|
||||
// so node is the result. Is it exact?
|
||||
|
||||
// (It's tempting to put this condition before the
|
||||
// child loop, but it gives the wrong result in the
|
||||
// case where a node (e.g. ExprStmt) and its sole
|
||||
// child have equal intervals.)
|
||||
if start == nodePos && end == nodeEnd { |
||||
return true // exact match
|
||||
} |
||||
|
||||
return false // inexact: overlaps multiple children
|
||||
} |
||||
|
||||
if start > end { |
||||
start, end = end, start |
||||
} |
||||
|
||||
if start < root.End() && end > root.Pos() { |
||||
if start == end { |
||||
end = start + 1 // empty interval => interval of size 1
|
||||
} |
||||
exact = visit(root) |
||||
|
||||
// Reverse the path:
|
||||
for i, l := 0, len(path); i < l/2; i++ { |
||||
path[i], path[l-1-i] = path[l-1-i], path[i] |
||||
} |
||||
} else { |
||||
// Selection lies within whitespace preceding the
|
||||
// first (or following the last) declaration in the file.
|
||||
// The result nonetheless always includes the ast.File.
|
||||
path = append(path, root) |
||||
} |
||||
|
||||
return |
||||
} |
||||
|
||||
// tokenNode is a dummy implementation of ast.Node for a single token.
|
||||
// They are used transiently by PathEnclosingInterval but never escape
|
||||
// this package.
|
||||
//
|
||||
type tokenNode struct { |
||||
pos token.Pos |
||||
end token.Pos |
||||
} |
||||
|
||||
func (n tokenNode) Pos() token.Pos { |
||||
return n.pos |
||||
} |
||||
|
||||
func (n tokenNode) End() token.Pos { |
||||
return n.end |
||||
} |
||||
|
||||
func tok(pos token.Pos, len int) ast.Node { |
||||
return tokenNode{pos, pos + token.Pos(len)} |
||||
} |
||||
|
||||
// childrenOf returns the direct non-nil children of ast.Node n.
|
||||
// It may include fake ast.Node implementations for bare tokens.
|
||||
// it is not safe to call (e.g.) ast.Walk on such nodes.
|
||||
//
|
||||
func childrenOf(n ast.Node) []ast.Node { |
||||
var children []ast.Node |
||||
|
||||
// First add nodes for all true subtrees.
|
||||
ast.Inspect(n, func(node ast.Node) bool { |
||||
if node == n { // push n
|
||||
return true // recur
|
||||
} |
||||
if node != nil { // push child
|
||||
children = append(children, node) |
||||
} |
||||
return false // no recursion
|
||||
}) |
||||
|
||||
// Then add fake Nodes for bare tokens.
|
||||
switch n := n.(type) { |
||||
case *ast.ArrayType: |
||||
children = append(children, |
||||
tok(n.Lbrack, len("[")), |
||||
tok(n.Elt.End(), len("]"))) |
||||
|
||||
case *ast.AssignStmt: |
||||
children = append(children, |
||||
tok(n.TokPos, len(n.Tok.String()))) |
||||
|
||||
case *ast.BasicLit: |
||||
children = append(children, |
||||
tok(n.ValuePos, len(n.Value))) |
||||
|
||||
case *ast.BinaryExpr: |
||||
children = append(children, tok(n.OpPos, len(n.Op.String()))) |
||||
|
||||
case *ast.BlockStmt: |
||||
children = append(children, |
||||
tok(n.Lbrace, len("{")), |
||||
tok(n.Rbrace, len("}"))) |
||||
|
||||
case *ast.BranchStmt: |
||||
children = append(children, |
||||
tok(n.TokPos, len(n.Tok.String()))) |
||||
|
||||
case *ast.CallExpr: |
||||
children = append(children, |
||||
tok(n.Lparen, len("(")), |
||||
tok(n.Rparen, len(")"))) |
||||
if n.Ellipsis != 0 { |
||||
children = append(children, tok(n.Ellipsis, len("..."))) |
||||
} |
||||
|
||||
case *ast.CaseClause: |
||||
if n.List == nil { |
||||
children = append(children, |
||||
tok(n.Case, len("default"))) |
||||
} else { |
||||
children = append(children, |
||||
tok(n.Case, len("case"))) |
||||
} |
||||
children = append(children, tok(n.Colon, len(":"))) |
||||
|
||||
case *ast.ChanType: |
||||
switch n.Dir { |
||||
case ast.RECV: |
||||
children = append(children, tok(n.Begin, len("<-chan"))) |
||||
case ast.SEND: |
||||
children = append(children, tok(n.Begin, len("chan<-"))) |
||||
case ast.RECV | ast.SEND: |
||||
children = append(children, tok(n.Begin, len("chan"))) |
||||
} |
||||
|
||||
case *ast.CommClause: |
||||
if n.Comm == nil { |
||||
children = append(children, |
||||
tok(n.Case, len("default"))) |
||||
} else { |
||||
children = append(children, |
||||
tok(n.Case, len("case"))) |
||||
} |
||||
children = append(children, tok(n.Colon, len(":"))) |
||||
|
||||
case *ast.Comment: |
||||
// nop
|
||||
|
||||
case *ast.CommentGroup: |
||||
// nop
|
||||
|
||||
case *ast.CompositeLit: |
||||
children = append(children, |
||||
tok(n.Lbrace, len("{")), |
||||
tok(n.Rbrace, len("{"))) |
||||
|
||||
case *ast.DeclStmt: |
||||
// nop
|
||||
|
||||
case *ast.DeferStmt: |
||||
children = append(children, |
||||
tok(n.Defer, len("defer"))) |
||||
|
||||
case *ast.Ellipsis: |
||||
children = append(children, |
||||
tok(n.Ellipsis, len("..."))) |
||||
|
||||
case *ast.EmptyStmt: |
||||
// nop
|
||||
|
||||
case *ast.ExprStmt: |
||||
// nop
|
||||
|
||||
case *ast.Field: |
||||
// TODO(adonovan): Field.{Doc,Comment,Tag}?
|
||||
|
||||
case *ast.FieldList: |
||||
children = append(children, |
||||
tok(n.Opening, len("(")), |
||||
tok(n.Closing, len(")"))) |
||||
|
||||
case *ast.File: |
||||
// TODO test: Doc
|
||||
children = append(children, |
||||
tok(n.Package, len("package"))) |
||||
|
||||
case *ast.ForStmt: |
||||
children = append(children, |
||||
tok(n.For, len("for"))) |
||||
|
||||
case *ast.FuncDecl: |
||||
// TODO(adonovan): FuncDecl.Comment?
|
||||
|
||||
// Uniquely, FuncDecl breaks the invariant that
|
||||
// preorder traversal yields tokens in lexical order:
|
||||
// in fact, FuncDecl.Recv precedes FuncDecl.Type.Func.
|
||||
//
|
||||
// As a workaround, we inline the case for FuncType
|
||||
// here and order things correctly.
|
||||
//
|
||||
children = nil // discard ast.Walk(FuncDecl) info subtrees
|
||||
children = append(children, tok(n.Type.Func, len("func"))) |
||||
if n.Recv != nil { |
||||
children = append(children, n.Recv) |
||||
} |
||||
children = append(children, n.Name) |
||||
if n.Type.Params != nil { |
||||
children = append(children, n.Type.Params) |
||||
} |
||||
if n.Type.Results != nil { |
||||
children = append(children, n.Type.Results) |
||||
} |
||||
if n.Body != nil { |
||||
children = append(children, n.Body) |
||||
} |
||||
|
||||
case *ast.FuncLit: |
||||
// nop
|
||||
|
||||
case *ast.FuncType: |
||||
if n.Func != 0 { |
||||
children = append(children, |
||||
tok(n.Func, len("func"))) |
||||
} |
||||
|
||||
case *ast.GenDecl: |
||||
children = append(children, |
||||
tok(n.TokPos, len(n.Tok.String()))) |
||||
if n.Lparen != 0 { |
||||
children = append(children, |
||||
tok(n.Lparen, len("(")), |
||||
tok(n.Rparen, len(")"))) |
||||
} |
||||
|
||||
case *ast.GoStmt: |
||||
children = append(children, |
||||
tok(n.Go, len("go"))) |
||||
|
||||
case *ast.Ident: |
||||
children = append(children, |
||||
tok(n.NamePos, len(n.Name))) |
||||
|
||||
case *ast.IfStmt: |
||||
children = append(children, |
||||
tok(n.If, len("if"))) |
||||
|
||||
case *ast.ImportSpec: |
||||
// TODO(adonovan): ImportSpec.{Doc,EndPos}?
|
||||
|
||||
case *ast.IncDecStmt: |
||||
children = append(children, |
||||
tok(n.TokPos, len(n.Tok.String()))) |
||||
|
||||
case *ast.IndexExpr: |
||||
children = append(children, |
||||
tok(n.Lbrack, len("{")), |
||||
tok(n.Rbrack, len("}"))) |
||||
|
||||
case *ast.InterfaceType: |
||||
children = append(children, |
||||
tok(n.Interface, len("interface"))) |
||||
|
||||
case *ast.KeyValueExpr: |
||||
children = append(children, |
||||
tok(n.Colon, len(":"))) |
||||
|
||||
case *ast.LabeledStmt: |
||||
children = append(children, |
||||
tok(n.Colon, len(":"))) |
||||
|
||||
case *ast.MapType: |
||||
children = append(children, |
||||
tok(n.Map, len("map"))) |
||||
|
||||
case *ast.ParenExpr: |
||||
children = append(children, |
||||
tok(n.Lparen, len("(")), |
||||
tok(n.Rparen, len(")"))) |
||||
|
||||
case *ast.RangeStmt: |
||||
children = append(children, |
||||
tok(n.For, len("for")), |
||||
tok(n.TokPos, len(n.Tok.String()))) |
||||
|
||||
case *ast.ReturnStmt: |
||||
children = append(children, |
||||
tok(n.Return, len("return"))) |
||||
|
||||
case *ast.SelectStmt: |
||||
children = append(children, |
||||
tok(n.Select, len("select"))) |
||||
|
||||
case *ast.SelectorExpr: |
||||
// nop
|
||||
|
||||
case *ast.SendStmt: |
||||
children = append(children, |
||||
tok(n.Arrow, len("<-"))) |
||||
|
||||
case *ast.SliceExpr: |
||||
children = append(children, |
||||
tok(n.Lbrack, len("[")), |
||||
tok(n.Rbrack, len("]"))) |
||||
|
||||
case *ast.StarExpr: |
||||
children = append(children, tok(n.Star, len("*"))) |
||||
|
||||
case *ast.StructType: |
||||
children = append(children, tok(n.Struct, len("struct"))) |
||||
|
||||
case *ast.SwitchStmt: |
||||
children = append(children, tok(n.Switch, len("switch"))) |
||||
|
||||
case *ast.TypeAssertExpr: |
||||
children = append(children, |
||||
tok(n.Lparen-1, len(".")), |
||||
tok(n.Lparen, len("(")), |
||||
tok(n.Rparen, len(")"))) |
||||
|
||||
case *ast.TypeSpec: |
||||
// TODO(adonovan): TypeSpec.{Doc,Comment}?
|
||||
|
||||
case *ast.TypeSwitchStmt: |
||||
children = append(children, tok(n.Switch, len("switch"))) |
||||
|
||||
case *ast.UnaryExpr: |
||||
children = append(children, tok(n.OpPos, len(n.Op.String()))) |
||||
|
||||
case *ast.ValueSpec: |
||||
// TODO(adonovan): ValueSpec.{Doc,Comment}?
|
||||
|
||||
case *ast.BadDecl, *ast.BadExpr, *ast.BadStmt: |
||||
// nop
|
||||
} |
||||
|
||||
// TODO(adonovan): opt: merge the logic of ast.Inspect() into
|
||||
// the switch above so we can make interleaved callbacks for
|
||||
// both Nodes and Tokens in the right order and avoid the need
|
||||
// to sort.
|
||||
sort.Sort(byPos(children)) |
||||
|
||||
return children |
||||
} |
||||
|
||||
type byPos []ast.Node |
||||
|
||||
func (sl byPos) Len() int { |
||||
return len(sl) |
||||
} |
||||
func (sl byPos) Less(i, j int) bool { |
||||
return sl[i].Pos() < sl[j].Pos() |
||||
} |
||||
func (sl byPos) Swap(i, j int) { |
||||
sl[i], sl[j] = sl[j], sl[i] |
||||
} |
||||
|
||||
// NodeDescription returns a description of the concrete type of n suitable
|
||||
// for a user interface.
|
||||
//
|
||||
// TODO(adonovan): in some cases (e.g. Field, FieldList, Ident,
|
||||
// StarExpr) we could be much more specific given the path to the AST
|
||||
// root. Perhaps we should do that.
|
||||
//
|
||||
func NodeDescription(n ast.Node) string { |
||||
switch n := n.(type) { |
||||
case *ast.ArrayType: |
||||
return "array type" |
||||
case *ast.AssignStmt: |
||||
return "assignment" |
||||
case *ast.BadDecl: |
||||
return "bad declaration" |
||||
case *ast.BadExpr: |
||||
return "bad expression" |
||||
case *ast.BadStmt: |
||||
return "bad statement" |
||||
case *ast.BasicLit: |
||||
return "basic literal" |
||||
case *ast.BinaryExpr: |
||||
return fmt.Sprintf("binary %s operation", n.Op) |
||||
case *ast.BlockStmt: |
||||
return "block" |
||||
case *ast.BranchStmt: |
||||
switch n.Tok { |
||||
case token.BREAK: |
||||
return "break statement" |
||||
case token.CONTINUE: |
||||
return "continue statement" |
||||
case token.GOTO: |
||||
return "goto statement" |
||||
case token.FALLTHROUGH: |
||||
return "fall-through statement" |
||||
} |
||||
case *ast.CallExpr: |
||||
if len(n.Args) == 1 && !n.Ellipsis.IsValid() { |
||||
return "function call (or conversion)" |
||||
} |
||||
return "function call" |
||||
case *ast.CaseClause: |
||||
return "case clause" |
||||
case *ast.ChanType: |
||||
return "channel type" |
||||
case *ast.CommClause: |
||||
return "communication clause" |
||||
case *ast.Comment: |
||||
return "comment" |
||||
case *ast.CommentGroup: |
||||
return "comment group" |
||||
case *ast.CompositeLit: |
||||
return "composite literal" |
||||
case *ast.DeclStmt: |
||||
return NodeDescription(n.Decl) + " statement" |
||||
case *ast.DeferStmt: |
||||
return "defer statement" |
||||
case *ast.Ellipsis: |
||||
return "ellipsis" |
||||
case *ast.EmptyStmt: |
||||
return "empty statement" |
||||
case *ast.ExprStmt: |
||||
return "expression statement" |
||||
case *ast.Field: |
||||
// Can be any of these:
|
||||
// struct {x, y int} -- struct field(s)
|
||||
// struct {T} -- anon struct field
|
||||
// interface {I} -- interface embedding
|
||||
// interface {f()} -- interface method
|
||||
// func (A) func(B) C -- receiver, param(s), result(s)
|
||||
return "field/method/parameter" |
||||
case *ast.FieldList: |
||||
return "field/method/parameter list" |
||||
case *ast.File: |
||||
return "source file" |
||||
case *ast.ForStmt: |
||||
return "for loop" |
||||
case *ast.FuncDecl: |
||||
return "function declaration" |
||||
case *ast.FuncLit: |
||||
return "function literal" |
||||
case *ast.FuncType: |
||||
return "function type" |
||||
case *ast.GenDecl: |
||||
switch n.Tok { |
||||
case token.IMPORT: |
||||
return "import declaration" |
||||
case token.CONST: |
||||
return "constant declaration" |
||||
case token.TYPE: |
||||
return "type declaration" |
||||
case token.VAR: |
||||
return "variable declaration" |
||||
} |
||||
case *ast.GoStmt: |
||||
return "go statement" |
||||
case *ast.Ident: |
||||
return "identifier" |
||||
case *ast.IfStmt: |
||||
return "if statement" |
||||
case *ast.ImportSpec: |
||||
return "import specification" |
||||
case *ast.IncDecStmt: |
||||
if n.Tok == token.INC { |
||||
return "increment statement" |
||||
} |
||||
return "decrement statement" |
||||
case *ast.IndexExpr: |
||||
return "index expression" |
||||
case *ast.InterfaceType: |
||||
return "interface type" |
||||
case *ast.KeyValueExpr: |
||||
return "key/value association" |
||||
case *ast.LabeledStmt: |
||||
return "statement label" |
||||
case *ast.MapType: |
||||
return "map type" |
||||
case *ast.Package: |
||||
return "package" |
||||
case *ast.ParenExpr: |
||||
return "parenthesized " + NodeDescription(n.X) |
||||
case *ast.RangeStmt: |
||||
return "range loop" |
||||
case *ast.ReturnStmt: |
||||
return "return statement" |
||||
case *ast.SelectStmt: |
||||
return "select statement" |
||||
case *ast.SelectorExpr: |
||||
return "selector" |
||||
case *ast.SendStmt: |
||||
return "channel send" |
||||
case *ast.SliceExpr: |
||||
return "slice expression" |
||||
case *ast.StarExpr: |
||||
return "*-operation" // load/store expr or pointer type
|
||||
case *ast.StructType: |
||||
return "struct type" |
||||
case *ast.SwitchStmt: |
||||
return "switch statement" |
||||
case *ast.TypeAssertExpr: |
||||
return "type assertion" |
||||
case *ast.TypeSpec: |
||||
return "type specification" |
||||
case *ast.TypeSwitchStmt: |
||||
return "type switch" |
||||
case *ast.UnaryExpr: |
||||
return fmt.Sprintf("unary %s operation", n.Op) |
||||
case *ast.ValueSpec: |
||||
return "value specification" |
||||
|
||||
} |
||||
panic(fmt.Sprintf("unexpected node type: %T", n)) |
||||
} |
||||
@ -0,0 +1,471 @@
|
||||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package astutil contains common utilities for working with the Go AST.
|
||||
package astutil // import "golang.org/x/tools/go/ast/astutil"
|
||||
|
||||
import ( |
||||
"fmt" |
||||
"go/ast" |
||||
"go/token" |
||||
"strconv" |
||||
"strings" |
||||
) |
||||
|
||||
// AddImport adds the import path to the file f, if absent.
|
||||
func AddImport(fset *token.FileSet, f *ast.File, ipath string) (added bool) { |
||||
return AddNamedImport(fset, f, "", ipath) |
||||
} |
||||
|
||||
// AddNamedImport adds the import path to the file f, if absent.
|
||||
// If name is not empty, it is used to rename the import.
|
||||
//
|
||||
// For example, calling
|
||||
// AddNamedImport(fset, f, "pathpkg", "path")
|
||||
// adds
|
||||
// import pathpkg "path"
|
||||
func AddNamedImport(fset *token.FileSet, f *ast.File, name, ipath string) (added bool) { |
||||
if imports(f, ipath) { |
||||
return false |
||||
} |
||||
|
||||
newImport := &ast.ImportSpec{ |
||||
Path: &ast.BasicLit{ |
||||
Kind: token.STRING, |
||||
Value: strconv.Quote(ipath), |
||||
}, |
||||
} |
||||
if name != "" { |
||||
newImport.Name = &ast.Ident{Name: name} |
||||
} |
||||
|
||||
// Find an import decl to add to.
|
||||
// The goal is to find an existing import
|
||||
// whose import path has the longest shared
|
||||
// prefix with ipath.
|
||||
var ( |
||||
bestMatch = -1 // length of longest shared prefix
|
||||
lastImport = -1 // index in f.Decls of the file's final import decl
|
||||
impDecl *ast.GenDecl // import decl containing the best match
|
||||
impIndex = -1 // spec index in impDecl containing the best match
|
||||
|
||||
isThirdPartyPath = isThirdParty(ipath) |
||||
) |
||||
for i, decl := range f.Decls { |
||||
gen, ok := decl.(*ast.GenDecl) |
||||
if ok && gen.Tok == token.IMPORT { |
||||
lastImport = i |
||||
// Do not add to import "C", to avoid disrupting the
|
||||
// association with its doc comment, breaking cgo.
|
||||
if declImports(gen, "C") { |
||||
continue |
||||
} |
||||
|
||||
// Match an empty import decl if that's all that is available.
|
||||
if len(gen.Specs) == 0 && bestMatch == -1 { |
||||
impDecl = gen |
||||
} |
||||
|
||||
// Compute longest shared prefix with imports in this group and find best
|
||||
// matched import spec.
|
||||
// 1. Always prefer import spec with longest shared prefix.
|
||||
// 2. While match length is 0,
|
||||
// - for stdlib package: prefer first import spec.
|
||||
// - for third party package: prefer first third party import spec.
|
||||
// We cannot use last import spec as best match for third party package
|
||||
// because grouped imports are usually placed last by goimports -local
|
||||
// flag.
|
||||
// See issue #19190.
|
||||
seenAnyThirdParty := false |
||||
for j, spec := range gen.Specs { |
||||
impspec := spec.(*ast.ImportSpec) |
||||
p := importPath(impspec) |
||||
n := matchLen(p, ipath) |
||||
if n > bestMatch || (bestMatch == 0 && !seenAnyThirdParty && isThirdPartyPath) { |
||||
bestMatch = n |
||||
impDecl = gen |
||||
impIndex = j |
||||
} |
||||
seenAnyThirdParty = seenAnyThirdParty || isThirdParty(p) |
||||
} |
||||
} |
||||
} |
||||
|
||||
// If no import decl found, add one after the last import.
|
||||
if impDecl == nil { |
||||
impDecl = &ast.GenDecl{ |
||||
Tok: token.IMPORT, |
||||
} |
||||
if lastImport >= 0 { |
||||
impDecl.TokPos = f.Decls[lastImport].End() |
||||
} else { |
||||
// There are no existing imports.
|
||||
// Our new import, preceded by a blank line, goes after the package declaration
|
||||
// and after the comment, if any, that starts on the same line as the
|
||||
// package declaration.
|
||||
impDecl.TokPos = f.Package |
||||
|
||||
file := fset.File(f.Package) |
||||
pkgLine := file.Line(f.Package) |
||||
for _, c := range f.Comments { |
||||
if file.Line(c.Pos()) > pkgLine { |
||||
break |
||||
} |
||||
// +2 for a blank line
|
||||
impDecl.TokPos = c.End() + 2 |
||||
} |
||||
} |
||||
f.Decls = append(f.Decls, nil) |
||||
copy(f.Decls[lastImport+2:], f.Decls[lastImport+1:]) |
||||
f.Decls[lastImport+1] = impDecl |
||||
} |
||||
|
||||
// Insert new import at insertAt.
|
||||
insertAt := 0 |
||||
if impIndex >= 0 { |
||||
// insert after the found import
|
||||
insertAt = impIndex + 1 |
||||
} |
||||
impDecl.Specs = append(impDecl.Specs, nil) |
||||
copy(impDecl.Specs[insertAt+1:], impDecl.Specs[insertAt:]) |
||||
impDecl.Specs[insertAt] = newImport |
||||
pos := impDecl.Pos() |
||||
if insertAt > 0 { |
||||
// If there is a comment after an existing import, preserve the comment
|
||||
// position by adding the new import after the comment.
|
||||
if spec, ok := impDecl.Specs[insertAt-1].(*ast.ImportSpec); ok && spec.Comment != nil { |
||||
pos = spec.Comment.End() |
||||
} else { |
||||
// Assign same position as the previous import,
|
||||
// so that the sorter sees it as being in the same block.
|
||||
pos = impDecl.Specs[insertAt-1].Pos() |
||||
} |
||||
} |
||||
if newImport.Name != nil { |
||||
newImport.Name.NamePos = pos |
||||
} |
||||
newImport.Path.ValuePos = pos |
||||
newImport.EndPos = pos |
||||
|
||||
// Clean up parens. impDecl contains at least one spec.
|
||||
if len(impDecl.Specs) == 1 { |
||||
// Remove unneeded parens.
|
||||
impDecl.Lparen = token.NoPos |
||||
} else if !impDecl.Lparen.IsValid() { |
||||
// impDecl needs parens added.
|
||||
impDecl.Lparen = impDecl.Specs[0].Pos() |
||||
} |
||||
|
||||
f.Imports = append(f.Imports, newImport) |
||||
|
||||
if len(f.Decls) <= 1 { |
||||
return true |
||||
} |
||||
|
||||
// Merge all the import declarations into the first one.
|
||||
var first *ast.GenDecl |
||||
for i := 0; i < len(f.Decls); i++ { |
||||
decl := f.Decls[i] |
||||
gen, ok := decl.(*ast.GenDecl) |
||||
if !ok || gen.Tok != token.IMPORT || declImports(gen, "C") { |
||||
continue |
||||
} |
||||
if first == nil { |
||||
first = gen |
||||
continue // Don't touch the first one.
|
||||
} |
||||
// We now know there is more than one package in this import
|
||||
// declaration. Ensure that it ends up parenthesized.
|
||||
first.Lparen = first.Pos() |
||||
// Move the imports of the other import declaration to the first one.
|
||||
for _, spec := range gen.Specs { |
||||
spec.(*ast.ImportSpec).Path.ValuePos = first.Pos() |
||||
first.Specs = append(first.Specs, spec) |
||||
} |
||||
f.Decls = append(f.Decls[:i], f.Decls[i+1:]...) |
||||
i-- |
||||
} |
||||
|
||||
return true |
||||
} |
||||
|
||||
func isThirdParty(importPath string) bool { |
||||
// Third party package import path usually contains "." (".com", ".org", ...)
|
||||
// This logic is taken from golang.org/x/tools/imports package.
|
||||
return strings.Contains(importPath, ".") |
||||
} |
||||
|
||||
// DeleteImport deletes the import path from the file f, if present.
|
||||
func DeleteImport(fset *token.FileSet, f *ast.File, path string) (deleted bool) { |
||||
return DeleteNamedImport(fset, f, "", path) |
||||
} |
||||
|
||||
// DeleteNamedImport deletes the import with the given name and path from the file f, if present.
|
||||
func DeleteNamedImport(fset *token.FileSet, f *ast.File, name, path string) (deleted bool) { |
||||
var delspecs []*ast.ImportSpec |
||||
var delcomments []*ast.CommentGroup |
||||
|
||||
// Find the import nodes that import path, if any.
|
||||
for i := 0; i < len(f.Decls); i++ { |
||||
decl := f.Decls[i] |
||||
gen, ok := decl.(*ast.GenDecl) |
||||
if !ok || gen.Tok != token.IMPORT { |
||||
continue |
||||
} |
||||
for j := 0; j < len(gen.Specs); j++ { |
||||
spec := gen.Specs[j] |
||||
impspec := spec.(*ast.ImportSpec) |
||||
if impspec.Name == nil && name != "" { |
||||
continue |
||||
} |
||||
if impspec.Name != nil && impspec.Name.Name != name { |
||||
continue |
||||
} |
||||
if importPath(impspec) != path { |
||||
continue |
||||
} |
||||
|
||||
// We found an import spec that imports path.
|
||||
// Delete it.
|
||||
delspecs = append(delspecs, impspec) |
||||
deleted = true |
||||
copy(gen.Specs[j:], gen.Specs[j+1:]) |
||||
gen.Specs = gen.Specs[:len(gen.Specs)-1] |
||||
|
||||
// If this was the last import spec in this decl,
|
||||
// delete the decl, too.
|
||||
if len(gen.Specs) == 0 { |
||||
copy(f.Decls[i:], f.Decls[i+1:]) |
||||
f.Decls = f.Decls[:len(f.Decls)-1] |
||||
i-- |
||||
break |
||||
} else if len(gen.Specs) == 1 { |
||||
if impspec.Doc != nil { |
||||
delcomments = append(delcomments, impspec.Doc) |
||||
} |
||||
if impspec.Comment != nil { |
||||
delcomments = append(delcomments, impspec.Comment) |
||||
} |
||||
for _, cg := range f.Comments { |
||||
// Found comment on the same line as the import spec.
|
||||
if cg.End() < impspec.Pos() && fset.Position(cg.End()).Line == fset.Position(impspec.Pos()).Line { |
||||
delcomments = append(delcomments, cg) |
||||
break |
||||
} |
||||
} |
||||
|
||||
spec := gen.Specs[0].(*ast.ImportSpec) |
||||
|
||||
// Move the documentation right after the import decl.
|
||||
if spec.Doc != nil { |
||||
for fset.Position(gen.TokPos).Line+1 < fset.Position(spec.Doc.Pos()).Line { |
||||
fset.File(gen.TokPos).MergeLine(fset.Position(gen.TokPos).Line) |
||||
} |
||||
} |
||||
for _, cg := range f.Comments { |
||||
if cg.End() < spec.Pos() && fset.Position(cg.End()).Line == fset.Position(spec.Pos()).Line { |
||||
for fset.Position(gen.TokPos).Line+1 < fset.Position(spec.Pos()).Line { |
||||
fset.File(gen.TokPos).MergeLine(fset.Position(gen.TokPos).Line) |
||||
} |
||||
break |
||||
} |
||||
} |
||||
} |
||||
if j > 0 { |
||||
lastImpspec := gen.Specs[j-1].(*ast.ImportSpec) |
||||
lastLine := fset.Position(lastImpspec.Path.ValuePos).Line |
||||
line := fset.Position(impspec.Path.ValuePos).Line |
||||
|
||||
// We deleted an entry but now there may be
|
||||
// a blank line-sized hole where the import was.
|
||||
if line-lastLine > 1 { |
||||
// There was a blank line immediately preceding the deleted import,
|
||||
// so there's no need to close the hole.
|
||||
// Do nothing.
|
||||
} else if line != fset.File(gen.Rparen).LineCount() { |
||||
// There was no blank line. Close the hole.
|
||||
fset.File(gen.Rparen).MergeLine(line) |
||||
} |
||||
} |
||||
j-- |
||||
} |
||||
} |
||||
|
||||
// Delete imports from f.Imports.
|
||||
for i := 0; i < len(f.Imports); i++ { |
||||
imp := f.Imports[i] |
||||
for j, del := range delspecs { |
||||
if imp == del { |
||||
copy(f.Imports[i:], f.Imports[i+1:]) |
||||
f.Imports = f.Imports[:len(f.Imports)-1] |
||||
copy(delspecs[j:], delspecs[j+1:]) |
||||
delspecs = delspecs[:len(delspecs)-1] |
||||
i-- |
||||
break |
||||
} |
||||
} |
||||
} |
||||
|
||||
// Delete comments from f.Comments.
|
||||
for i := 0; i < len(f.Comments); i++ { |
||||
cg := f.Comments[i] |
||||
for j, del := range delcomments { |
||||
if cg == del { |
||||
copy(f.Comments[i:], f.Comments[i+1:]) |
||||
f.Comments = f.Comments[:len(f.Comments)-1] |
||||
copy(delcomments[j:], delcomments[j+1:]) |
||||
delcomments = delcomments[:len(delcomments)-1] |
||||
i-- |
||||
break |
||||
} |
||||
} |
||||
} |
||||
|
||||
if len(delspecs) > 0 { |
||||
panic(fmt.Sprintf("deleted specs from Decls but not Imports: %v", delspecs)) |
||||
} |
||||
|
||||
return |
||||
} |
||||
|
||||
// RewriteImport rewrites any import of path oldPath to path newPath.
|
||||
func RewriteImport(fset *token.FileSet, f *ast.File, oldPath, newPath string) (rewrote bool) { |
||||
for _, imp := range f.Imports { |
||||
if importPath(imp) == oldPath { |
||||
rewrote = true |
||||
// record old End, because the default is to compute
|
||||
// it using the length of imp.Path.Value.
|
||||
imp.EndPos = imp.End() |
||||
imp.Path.Value = strconv.Quote(newPath) |
||||
} |
||||
} |
||||
return |
||||
} |
||||
|
||||
// UsesImport reports whether a given import is used.
|
||||
func UsesImport(f *ast.File, path string) (used bool) { |
||||
spec := importSpec(f, path) |
||||
if spec == nil { |
||||
return |
||||
} |
||||
|
||||
name := spec.Name.String() |
||||
switch name { |
||||
case "<nil>": |
||||
// If the package name is not explicitly specified,
|
||||
// make an educated guess. This is not guaranteed to be correct.
|
||||
lastSlash := strings.LastIndex(path, "/") |
||||
if lastSlash == -1 { |
||||
name = path |
||||
} else { |
||||
name = path[lastSlash+1:] |
||||
} |
||||
case "_", ".": |
||||
// Not sure if this import is used - err on the side of caution.
|
||||
return true |
||||
} |
||||
|
||||
ast.Walk(visitFn(func(n ast.Node) { |
||||
sel, ok := n.(*ast.SelectorExpr) |
||||
if ok && isTopName(sel.X, name) { |
||||
used = true |
||||
} |
||||
}), f) |
||||
|
||||
return |
||||
} |
||||
|
||||
type visitFn func(node ast.Node) |
||||
|
||||
func (fn visitFn) Visit(node ast.Node) ast.Visitor { |
||||
fn(node) |
||||
return fn |
||||
} |
||||
|
||||
// imports returns true if f imports path.
|
||||
func imports(f *ast.File, path string) bool { |
||||
return importSpec(f, path) != nil |
||||
} |
||||
|
||||
// importSpec returns the import spec if f imports path,
|
||||
// or nil otherwise.
|
||||
func importSpec(f *ast.File, path string) *ast.ImportSpec { |
||||
for _, s := range f.Imports { |
||||
if importPath(s) == path { |
||||
return s |
||||
} |
||||
} |
||||
return nil |
||||
} |
||||
|
||||
// importPath returns the unquoted import path of s,
|
||||
// or "" if the path is not properly quoted.
|
||||
func importPath(s *ast.ImportSpec) string { |
||||
t, err := strconv.Unquote(s.Path.Value) |
||||
if err == nil { |
||||
return t |
||||
} |
||||
return "" |
||||
} |
||||
|
||||
// declImports reports whether gen contains an import of path.
|
||||
func declImports(gen *ast.GenDecl, path string) bool { |
||||
if gen.Tok != token.IMPORT { |
||||
return false |
||||
} |
||||
for _, spec := range gen.Specs { |
||||
impspec := spec.(*ast.ImportSpec) |
||||
if importPath(impspec) == path { |
||||
return true |
||||
} |
||||
} |
||||
return false |
||||
} |
||||
|
||||
// matchLen returns the length of the longest path segment prefix shared by x and y.
|
||||
func matchLen(x, y string) int { |
||||
n := 0 |
||||
for i := 0; i < len(x) && i < len(y) && x[i] == y[i]; i++ { |
||||
if x[i] == '/' { |
||||
n++ |
||||
} |
||||
} |
||||
return n |
||||
} |
||||
|
||||
// isTopName returns true if n is a top-level unresolved identifier with the given name.
|
||||
func isTopName(n ast.Expr, name string) bool { |
||||
id, ok := n.(*ast.Ident) |
||||
return ok && id.Name == name && id.Obj == nil |
||||
} |
||||
|
||||
// Imports returns the file imports grouped by paragraph.
|
||||
func Imports(fset *token.FileSet, f *ast.File) [][]*ast.ImportSpec { |
||||
var groups [][]*ast.ImportSpec |
||||
|
||||
for _, decl := range f.Decls { |
||||
genDecl, ok := decl.(*ast.GenDecl) |
||||
if !ok || genDecl.Tok != token.IMPORT { |
||||
break |
||||
} |
||||
|
||||
group := []*ast.ImportSpec{} |
||||
|
||||
var lastLine int |
||||
for _, spec := range genDecl.Specs { |
||||
importSpec := spec.(*ast.ImportSpec) |
||||
pos := importSpec.Path.ValuePos |
||||
line := fset.Position(pos).Line |
||||
if lastLine > 0 && pos > 0 && line-lastLine > 1 { |
||||
groups = append(groups, group) |
||||
group = []*ast.ImportSpec{} |
||||
} |
||||
group = append(group, importSpec) |
||||
lastLine = line |
||||
} |
||||
groups = append(groups, group) |
||||
} |
||||
|
||||
return groups |
||||
} |
||||
@ -0,0 +1,477 @@
|
||||
// Copyright 2017 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package astutil |
||||
|
||||
import ( |
||||
"fmt" |
||||
"go/ast" |
||||
"reflect" |
||||
"sort" |
||||
) |
||||
|
||||
// An ApplyFunc is invoked by Apply for each node n, even if n is nil,
|
||||
// before and/or after the node's children, using a Cursor describing
|
||||
// the current node and providing operations on it.
|
||||
//
|
||||
// The return value of ApplyFunc controls the syntax tree traversal.
|
||||
// See Apply for details.
|
||||
type ApplyFunc func(*Cursor) bool |
||||
|
||||
// Apply traverses a syntax tree recursively, starting with root,
|
||||
// and calling pre and post for each node as described below.
|
||||
// Apply returns the syntax tree, possibly modified.
|
||||
//
|
||||
// If pre is not nil, it is called for each node before the node's
|
||||
// children are traversed (pre-order). If pre returns false, no
|
||||
// children are traversed, and post is not called for that node.
|
||||
//
|
||||
// If post is not nil, and a prior call of pre didn't return false,
|
||||
// post is called for each node after its children are traversed
|
||||
// (post-order). If post returns false, traversal is terminated and
|
||||
// Apply returns immediately.
|
||||
//
|
||||
// Only fields that refer to AST nodes are considered children;
|
||||
// i.e., token.Pos, Scopes, Objects, and fields of basic types
|
||||
// (strings, etc.) are ignored.
|
||||
//
|
||||
// Children are traversed in the order in which they appear in the
|
||||
// respective node's struct definition. A package's files are
|
||||
// traversed in the filenames' alphabetical order.
|
||||
//
|
||||
func Apply(root ast.Node, pre, post ApplyFunc) (result ast.Node) { |
||||
parent := &struct{ ast.Node }{root} |
||||
defer func() { |
||||
if r := recover(); r != nil && r != abort { |
||||
panic(r) |
||||
} |
||||
result = parent.Node |
||||
}() |
||||
a := &application{pre: pre, post: post} |
||||
a.apply(parent, "Node", nil, root) |
||||
return |
||||
} |
||||
|
||||
var abort = new(int) // singleton, to signal termination of Apply
|
||||
|
||||
// A Cursor describes a node encountered during Apply.
|
||||
// Information about the node and its parent is available
|
||||
// from the Node, Parent, Name, and Index methods.
|
||||
//
|
||||
// If p is a variable of type and value of the current parent node
|
||||
// c.Parent(), and f is the field identifier with name c.Name(),
|
||||
// the following invariants hold:
|
||||
//
|
||||
// p.f == c.Node() if c.Index() < 0
|
||||
// p.f[c.Index()] == c.Node() if c.Index() >= 0
|
||||
//
|
||||
// The methods Replace, Delete, InsertBefore, and InsertAfter
|
||||
// can be used to change the AST without disrupting Apply.
|
||||
type Cursor struct { |
||||
parent ast.Node |
||||
name string |
||||
iter *iterator // valid if non-nil
|
||||
node ast.Node |
||||
} |
||||
|
||||
// Node returns the current Node.
|
||||
func (c *Cursor) Node() ast.Node { return c.node } |
||||
|
||||
// Parent returns the parent of the current Node.
|
||||
func (c *Cursor) Parent() ast.Node { return c.parent } |
||||
|
||||
// Name returns the name of the parent Node field that contains the current Node.
|
||||
// If the parent is a *ast.Package and the current Node is a *ast.File, Name returns
|
||||
// the filename for the current Node.
|
||||
func (c *Cursor) Name() string { return c.name } |
||||
|
||||
// Index reports the index >= 0 of the current Node in the slice of Nodes that
|
||||
// contains it, or a value < 0 if the current Node is not part of a slice.
|
||||
// The index of the current node changes if InsertBefore is called while
|
||||
// processing the current node.
|
||||
func (c *Cursor) Index() int { |
||||
if c.iter != nil { |
||||
return c.iter.index |
||||
} |
||||
return -1 |
||||
} |
||||
|
||||
// field returns the current node's parent field value.
|
||||
func (c *Cursor) field() reflect.Value { |
||||
return reflect.Indirect(reflect.ValueOf(c.parent)).FieldByName(c.name) |
||||
} |
||||
|
||||
// Replace replaces the current Node with n.
|
||||
// The replacement node is not walked by Apply.
|
||||
func (c *Cursor) Replace(n ast.Node) { |
||||
if _, ok := c.node.(*ast.File); ok { |
||||
file, ok := n.(*ast.File) |
||||
if !ok { |
||||
panic("attempt to replace *ast.File with non-*ast.File") |
||||
} |
||||
c.parent.(*ast.Package).Files[c.name] = file |
||||
return |
||||
} |
||||
|
||||
v := c.field() |
||||
if i := c.Index(); i >= 0 { |
||||
v = v.Index(i) |
||||
} |
||||
v.Set(reflect.ValueOf(n)) |
||||
} |
||||
|
||||
// Delete deletes the current Node from its containing slice.
|
||||
// If the current Node is not part of a slice, Delete panics.
|
||||
// As a special case, if the current node is a package file,
|
||||
// Delete removes it from the package's Files map.
|
||||
func (c *Cursor) Delete() { |
||||
if _, ok := c.node.(*ast.File); ok { |
||||
delete(c.parent.(*ast.Package).Files, c.name) |
||||
return |
||||
} |
||||
|
||||
i := c.Index() |
||||
if i < 0 { |
||||
panic("Delete node not contained in slice") |
||||
} |
||||
v := c.field() |
||||
l := v.Len() |
||||
reflect.Copy(v.Slice(i, l), v.Slice(i+1, l)) |
||||
v.Index(l - 1).Set(reflect.Zero(v.Type().Elem())) |
||||
v.SetLen(l - 1) |
||||
c.iter.step-- |
||||
} |
||||
|
||||
// InsertAfter inserts n after the current Node in its containing slice.
|
||||
// If the current Node is not part of a slice, InsertAfter panics.
|
||||
// Apply does not walk n.
|
||||
func (c *Cursor) InsertAfter(n ast.Node) { |
||||
i := c.Index() |
||||
if i < 0 { |
||||
panic("InsertAfter node not contained in slice") |
||||
} |
||||
v := c.field() |
||||
v.Set(reflect.Append(v, reflect.Zero(v.Type().Elem()))) |
||||
l := v.Len() |
||||
reflect.Copy(v.Slice(i+2, l), v.Slice(i+1, l)) |
||||
v.Index(i + 1).Set(reflect.ValueOf(n)) |
||||
c.iter.step++ |
||||
} |
||||
|
||||
// InsertBefore inserts n before the current Node in its containing slice.
|
||||
// If the current Node is not part of a slice, InsertBefore panics.
|
||||
// Apply will not walk n.
|
||||
func (c *Cursor) InsertBefore(n ast.Node) { |
||||
i := c.Index() |
||||
if i < 0 { |
||||
panic("InsertBefore node not contained in slice") |
||||
} |
||||
v := c.field() |
||||
v.Set(reflect.Append(v, reflect.Zero(v.Type().Elem()))) |
||||
l := v.Len() |
||||
reflect.Copy(v.Slice(i+1, l), v.Slice(i, l)) |
||||
v.Index(i).Set(reflect.ValueOf(n)) |
||||
c.iter.index++ |
||||
} |
||||
|
||||
// application carries all the shared data so we can pass it around cheaply.
|
||||
type application struct { |
||||
pre, post ApplyFunc |
||||
cursor Cursor |
||||
iter iterator |
||||
} |
||||
|
||||
func (a *application) apply(parent ast.Node, name string, iter *iterator, n ast.Node) { |
||||
// convert typed nil into untyped nil
|
||||
if v := reflect.ValueOf(n); v.Kind() == reflect.Ptr && v.IsNil() { |
||||
n = nil |
||||
} |
||||
|
||||
// avoid heap-allocating a new cursor for each apply call; reuse a.cursor instead
|
||||
saved := a.cursor |
||||
a.cursor.parent = parent |
||||
a.cursor.name = name |
||||
a.cursor.iter = iter |
||||
a.cursor.node = n |
||||
|
||||
if a.pre != nil && !a.pre(&a.cursor) { |
||||
a.cursor = saved |
||||
return |
||||
} |
||||
|
||||
// walk children
|
||||
// (the order of the cases matches the order of the corresponding node types in go/ast)
|
||||
switch n := n.(type) { |
||||
case nil: |
||||
// nothing to do
|
||||
|
||||
// Comments and fields
|
||||
case *ast.Comment: |
||||
// nothing to do
|
||||
|
||||
case *ast.CommentGroup: |
||||
if n != nil { |
||||
a.applyList(n, "List") |
||||
} |
||||
|
||||
case *ast.Field: |
||||
a.apply(n, "Doc", nil, n.Doc) |
||||
a.applyList(n, "Names") |
||||
a.apply(n, "Type", nil, n.Type) |
||||
a.apply(n, "Tag", nil, n.Tag) |
||||
a.apply(n, "Comment", nil, n.Comment) |
||||
|
||||
case *ast.FieldList: |
||||
a.applyList(n, "List") |
||||
|
||||
// Expressions
|
||||
case *ast.BadExpr, *ast.Ident, *ast.BasicLit: |
||||
// nothing to do
|
||||
|
||||
case *ast.Ellipsis: |
||||
a.apply(n, "Elt", nil, n.Elt) |
||||
|
||||
case *ast.FuncLit: |
||||
a.apply(n, "Type", nil, n.Type) |
||||
a.apply(n, "Body", nil, n.Body) |
||||
|
||||
case *ast.CompositeLit: |
||||
a.apply(n, "Type", nil, n.Type) |
||||
a.applyList(n, "Elts") |
||||
|
||||
case *ast.ParenExpr: |
||||
a.apply(n, "X", nil, n.X) |
||||
|
||||
case *ast.SelectorExpr: |
||||
a.apply(n, "X", nil, n.X) |
||||
a.apply(n, "Sel", nil, n.Sel) |
||||
|
||||
case *ast.IndexExpr: |
||||
a.apply(n, "X", nil, n.X) |
||||
a.apply(n, "Index", nil, n.Index) |
||||
|
||||
case *ast.SliceExpr: |
||||
a.apply(n, "X", nil, n.X) |
||||
a.apply(n, "Low", nil, n.Low) |
||||
a.apply(n, "High", nil, n.High) |
||||
a.apply(n, "Max", nil, n.Max) |
||||
|
||||
case *ast.TypeAssertExpr: |
||||
a.apply(n, "X", nil, n.X) |
||||
a.apply(n, "Type", nil, n.Type) |
||||
|
||||
case *ast.CallExpr: |
||||
a.apply(n, "Fun", nil, n.Fun) |
||||
a.applyList(n, "Args") |
||||
|
||||
case *ast.StarExpr: |
||||
a.apply(n, "X", nil, n.X) |
||||
|
||||
case *ast.UnaryExpr: |
||||
a.apply(n, "X", nil, n.X) |
||||
|
||||
case *ast.BinaryExpr: |
||||
a.apply(n, "X", nil, n.X) |
||||
a.apply(n, "Y", nil, n.Y) |
||||
|
||||
case *ast.KeyValueExpr: |
||||
a.apply(n, "Key", nil, n.Key) |
||||
a.apply(n, "Value", nil, n.Value) |
||||
|
||||
// Types
|
||||
case *ast.ArrayType: |
||||
a.apply(n, "Len", nil, n.Len) |
||||
a.apply(n, "Elt", nil, n.Elt) |
||||
|
||||
case *ast.StructType: |
||||
a.apply(n, "Fields", nil, n.Fields) |
||||
|
||||
case *ast.FuncType: |
||||
a.apply(n, "Params", nil, n.Params) |
||||
a.apply(n, "Results", nil, n.Results) |
||||
|
||||
case *ast.InterfaceType: |
||||
a.apply(n, "Methods", nil, n.Methods) |
||||
|
||||
case *ast.MapType: |
||||
a.apply(n, "Key", nil, n.Key) |
||||
a.apply(n, "Value", nil, n.Value) |
||||
|
||||
case *ast.ChanType: |
||||
a.apply(n, "Value", nil, n.Value) |
||||
|
||||
// Statements
|
||||
case *ast.BadStmt: |
||||
// nothing to do
|
||||
|
||||
case *ast.DeclStmt: |
||||
a.apply(n, "Decl", nil, n.Decl) |
||||
|
||||
case *ast.EmptyStmt: |
||||
// nothing to do
|
||||
|
||||
case *ast.LabeledStmt: |
||||
a.apply(n, "Label", nil, n.Label) |
||||
a.apply(n, "Stmt", nil, n.Stmt) |
||||
|
||||
case *ast.ExprStmt: |
||||
a.apply(n, "X", nil, n.X) |
||||
|
||||
case *ast.SendStmt: |
||||
a.apply(n, "Chan", nil, n.Chan) |
||||
a.apply(n, "Value", nil, n.Value) |
||||
|
||||
case *ast.IncDecStmt: |
||||
a.apply(n, "X", nil, n.X) |
||||
|
||||
case *ast.AssignStmt: |
||||
a.applyList(n, "Lhs") |
||||
a.applyList(n, "Rhs") |
||||
|
||||
case *ast.GoStmt: |
||||
a.apply(n, "Call", nil, n.Call) |
||||
|
||||
case *ast.DeferStmt: |
||||
a.apply(n, "Call", nil, n.Call) |
||||
|
||||
case *ast.ReturnStmt: |
||||
a.applyList(n, "Results") |
||||
|
||||
case *ast.BranchStmt: |
||||
a.apply(n, "Label", nil, n.Label) |
||||
|
||||
case *ast.BlockStmt: |
||||
a.applyList(n, "List") |
||||
|
||||
case *ast.IfStmt: |
||||
a.apply(n, "Init", nil, n.Init) |
||||
a.apply(n, "Cond", nil, n.Cond) |
||||
a.apply(n, "Body", nil, n.Body) |
||||
a.apply(n, "Else", nil, n.Else) |
||||
|
||||
case *ast.CaseClause: |
||||
a.applyList(n, "List") |
||||
a.applyList(n, "Body") |
||||
|
||||
case *ast.SwitchStmt: |
||||
a.apply(n, "Init", nil, n.Init) |
||||
a.apply(n, "Tag", nil, n.Tag) |
||||
a.apply(n, "Body", nil, n.Body) |
||||
|
||||
case *ast.TypeSwitchStmt: |
||||
a.apply(n, "Init", nil, n.Init) |
||||
a.apply(n, "Assign", nil, n.Assign) |
||||
a.apply(n, "Body", nil, n.Body) |
||||
|
||||
case *ast.CommClause: |
||||
a.apply(n, "Comm", nil, n.Comm) |
||||
a.applyList(n, "Body") |
||||
|
||||
case *ast.SelectStmt: |
||||
a.apply(n, "Body", nil, n.Body) |
||||
|
||||
case *ast.ForStmt: |
||||
a.apply(n, "Init", nil, n.Init) |
||||
a.apply(n, "Cond", nil, n.Cond) |
||||
a.apply(n, "Post", nil, n.Post) |
||||
a.apply(n, "Body", nil, n.Body) |
||||
|
||||
case *ast.RangeStmt: |
||||
a.apply(n, "Key", nil, n.Key) |
||||
a.apply(n, "Value", nil, n.Value) |
||||
a.apply(n, "X", nil, n.X) |
||||
a.apply(n, "Body", nil, n.Body) |
||||
|
||||
// Declarations
|
||||
case *ast.ImportSpec: |
||||
a.apply(n, "Doc", nil, n.Doc) |
||||
a.apply(n, "Name", nil, n.Name) |
||||
a.apply(n, "Path", nil, n.Path) |
||||
a.apply(n, "Comment", nil, n.Comment) |
||||
|
||||
case *ast.ValueSpec: |
||||
a.apply(n, "Doc", nil, n.Doc) |
||||
a.applyList(n, "Names") |
||||
a.apply(n, "Type", nil, n.Type) |
||||
a.applyList(n, "Values") |
||||
a.apply(n, "Comment", nil, n.Comment) |
||||
|
||||
case *ast.TypeSpec: |
||||
a.apply(n, "Doc", nil, n.Doc) |
||||
a.apply(n, "Name", nil, n.Name) |
||||
a.apply(n, "Type", nil, n.Type) |
||||
a.apply(n, "Comment", nil, n.Comment) |
||||
|
||||
case *ast.BadDecl: |
||||
// nothing to do
|
||||
|
||||
case *ast.GenDecl: |
||||
a.apply(n, "Doc", nil, n.Doc) |
||||
a.applyList(n, "Specs") |
||||
|
||||
case *ast.FuncDecl: |
||||
a.apply(n, "Doc", nil, n.Doc) |
||||
a.apply(n, "Recv", nil, n.Recv) |
||||
a.apply(n, "Name", nil, n.Name) |
||||
a.apply(n, "Type", nil, n.Type) |
||||
a.apply(n, "Body", nil, n.Body) |
||||
|
||||
// Files and packages
|
||||
case *ast.File: |
||||
a.apply(n, "Doc", nil, n.Doc) |
||||
a.apply(n, "Name", nil, n.Name) |
||||
a.applyList(n, "Decls") |
||||
// Don't walk n.Comments; they have either been walked already if
|
||||
// they are Doc comments, or they can be easily walked explicitly.
|
||||
|
||||
case *ast.Package: |
||||
// collect and sort names for reproducible behavior
|
||||
var names []string |
||||
for name := range n.Files { |
||||
names = append(names, name) |
||||
} |
||||
sort.Strings(names) |
||||
for _, name := range names { |
||||
a.apply(n, name, nil, n.Files[name]) |
||||
} |
||||
|
||||
default: |
||||
panic(fmt.Sprintf("Apply: unexpected node type %T", n)) |
||||
} |
||||
|
||||
if a.post != nil && !a.post(&a.cursor) { |
||||
panic(abort) |
||||
} |
||||
|
||||
a.cursor = saved |
||||
} |
||||
|
||||
// An iterator controls iteration over a slice of nodes.
|
||||
type iterator struct { |
||||
index, step int |
||||
} |
||||
|
||||
func (a *application) applyList(parent ast.Node, name string) { |
||||
// avoid heap-allocating a new iterator for each applyList call; reuse a.iter instead
|
||||
saved := a.iter |
||||
a.iter.index = 0 |
||||
for { |
||||
// must reload parent.name each time, since cursor modifications might change it
|
||||
v := reflect.Indirect(reflect.ValueOf(parent)).FieldByName(name) |
||||
if a.iter.index >= v.Len() { |
||||
break |
||||
} |
||||
|
||||
// element x may be nil in a bad AST - be cautious
|
||||
var x ast.Node |
||||
if e := v.Index(a.iter.index); e.IsValid() { |
||||
x = e.Interface().(ast.Node) |
||||
} |
||||
|
||||
a.iter.step = 1 |
||||
a.apply(parent, name, &a.iter, x) |
||||
a.iter.index += a.iter.step |
||||
} |
||||
a.iter = saved |
||||
} |
||||
@ -0,0 +1,14 @@
|
||||
package astutil |
||||
|
||||
import "go/ast" |
||||
|
||||
// Unparen returns e with any enclosing parentheses stripped.
|
||||
func Unparen(e ast.Expr) ast.Expr { |
||||
for { |
||||
p, ok := e.(*ast.ParenExpr) |
||||
if !ok { |
||||
return e |
||||
} |
||||
e = p.X |
||||
} |
||||
} |
||||
@ -0,0 +1,109 @@
|
||||
// Copyright 2016 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package gcexportdata provides functions for locating, reading, and
|
||||
// writing export data files containing type information produced by the
|
||||
// gc compiler. This package supports go1.7 export data format and all
|
||||
// later versions.
|
||||
//
|
||||
// Although it might seem convenient for this package to live alongside
|
||||
// go/types in the standard library, this would cause version skew
|
||||
// problems for developer tools that use it, since they must be able to
|
||||
// consume the outputs of the gc compiler both before and after a Go
|
||||
// update such as from Go 1.7 to Go 1.8. Because this package lives in
|
||||
// golang.org/x/tools, sites can update their version of this repo some
|
||||
// time before the Go 1.8 release and rebuild and redeploy their
|
||||
// developer tools, which will then be able to consume both Go 1.7 and
|
||||
// Go 1.8 export data files, so they will work before and after the
|
||||
// Go update. (See discussion at https://github.com/golang/go/issues/15651.)
|
||||
//
|
||||
package gcexportdata // import "golang.org/x/tools/go/gcexportdata"
|
||||
|
||||
import ( |
||||
"bufio" |
||||
"bytes" |
||||
"fmt" |
||||
"go/token" |
||||
"go/types" |
||||
"io" |
||||
"io/ioutil" |
||||
|
||||
"golang.org/x/tools/go/internal/gcimporter" |
||||
) |
||||
|
||||
// Find returns the name of an object (.o) or archive (.a) file
|
||||
// containing type information for the specified import path,
|
||||
// using the workspace layout conventions of go/build.
|
||||
// If no file was found, an empty filename is returned.
|
||||
//
|
||||
// A relative srcDir is interpreted relative to the current working directory.
|
||||
//
|
||||
// Find also returns the package's resolved (canonical) import path,
|
||||
// reflecting the effects of srcDir and vendoring on importPath.
|
||||
func Find(importPath, srcDir string) (filename, path string) { |
||||
return gcimporter.FindPkg(importPath, srcDir) |
||||
} |
||||
|
||||
// NewReader returns a reader for the export data section of an object
|
||||
// (.o) or archive (.a) file read from r. The new reader may provide
|
||||
// additional trailing data beyond the end of the export data.
|
||||
func NewReader(r io.Reader) (io.Reader, error) { |
||||
buf := bufio.NewReader(r) |
||||
_, err := gcimporter.FindExportData(buf) |
||||
// If we ever switch to a zip-like archive format with the ToC
|
||||
// at the end, we can return the correct portion of export data,
|
||||
// but for now we must return the entire rest of the file.
|
||||
return buf, err |
||||
} |
||||
|
||||
// Read reads export data from in, decodes it, and returns type
|
||||
// information for the package.
|
||||
// The package name is specified by path.
|
||||
// File position information is added to fset.
|
||||
//
|
||||
// Read may inspect and add to the imports map to ensure that references
|
||||
// within the export data to other packages are consistent. The caller
|
||||
// must ensure that imports[path] does not exist, or exists but is
|
||||
// incomplete (see types.Package.Complete), and Read inserts the
|
||||
// resulting package into this map entry.
|
||||
//
|
||||
// On return, the state of the reader is undefined.
|
||||
func Read(in io.Reader, fset *token.FileSet, imports map[string]*types.Package, path string) (*types.Package, error) { |
||||
data, err := ioutil.ReadAll(in) |
||||
if err != nil { |
||||
return nil, fmt.Errorf("reading export data for %q: %v", path, err) |
||||
} |
||||
|
||||
if bytes.HasPrefix(data, []byte("!<arch>")) { |
||||
return nil, fmt.Errorf("can't read export data for %q directly from an archive file (call gcexportdata.NewReader first to extract export data)", path) |
||||
} |
||||
|
||||
// The App Engine Go runtime v1.6 uses the old export data format.
|
||||
// TODO(adonovan): delete once v1.7 has been around for a while.
|
||||
if bytes.HasPrefix(data, []byte("package ")) { |
||||
return gcimporter.ImportData(imports, path, path, bytes.NewReader(data)) |
||||
} |
||||
|
||||
// The indexed export format starts with an 'i'; the older
|
||||
// binary export format starts with a 'c', 'd', or 'v'
|
||||
// (from "version"). Select appropriate importer.
|
||||
if len(data) > 0 && data[0] == 'i' { |
||||
_, pkg, err := gcimporter.IImportData(fset, imports, data[1:], path) |
||||
return pkg, err |
||||
} |
||||
|
||||
_, pkg, err := gcimporter.BImportData(fset, imports, data, path) |
||||
return pkg, err |
||||
} |
||||
|
||||
// Write writes encoded type information for the specified package to out.
|
||||
// The FileSet provides file position information for named objects.
|
||||
func Write(out io.Writer, fset *token.FileSet, pkg *types.Package) error { |
||||
b, err := gcimporter.BExportData(fset, pkg) |
||||
if err != nil { |
||||
return err |
||||
} |
||||
_, err = out.Write(b) |
||||
return err |
||||
} |
||||
@ -0,0 +1,73 @@
|
||||
// Copyright 2016 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package gcexportdata |
||||
|
||||
import ( |
||||
"fmt" |
||||
"go/token" |
||||
"go/types" |
||||
"os" |
||||
) |
||||
|
||||
// NewImporter returns a new instance of the types.Importer interface
|
||||
// that reads type information from export data files written by gc.
|
||||
// The Importer also satisfies types.ImporterFrom.
|
||||
//
|
||||
// Export data files are located using "go build" workspace conventions
|
||||
// and the build.Default context.
|
||||
//
|
||||
// Use this importer instead of go/importer.For("gc", ...) to avoid the
|
||||
// version-skew problems described in the documentation of this package,
|
||||
// or to control the FileSet or access the imports map populated during
|
||||
// package loading.
|
||||
//
|
||||
func NewImporter(fset *token.FileSet, imports map[string]*types.Package) types.ImporterFrom { |
||||
return importer{fset, imports} |
||||
} |
||||
|
||||
type importer struct { |
||||
fset *token.FileSet |
||||
imports map[string]*types.Package |
||||
} |
||||
|
||||
func (imp importer) Import(importPath string) (*types.Package, error) { |
||||
return imp.ImportFrom(importPath, "", 0) |
||||
} |
||||
|
||||
func (imp importer) ImportFrom(importPath, srcDir string, mode types.ImportMode) (_ *types.Package, err error) { |
||||
filename, path := Find(importPath, srcDir) |
||||
if filename == "" { |
||||
if importPath == "unsafe" { |
||||
// Even for unsafe, call Find first in case
|
||||
// the package was vendored.
|
||||
return types.Unsafe, nil |
||||
} |
||||
return nil, fmt.Errorf("can't find import: %s", importPath) |
||||
} |
||||
|
||||
if pkg, ok := imp.imports[path]; ok && pkg.Complete() { |
||||
return pkg, nil // cache hit
|
||||
} |
||||
|
||||
// open file
|
||||
f, err := os.Open(filename) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
defer func() { |
||||
f.Close() |
||||
if err != nil { |
||||
// add file name to error
|
||||
err = fmt.Errorf("reading export data: %s: %v", filename, err) |
||||
} |
||||
}() |
||||
|
||||
r, err := NewReader(f) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
|
||||
return Read(r, imp.fset, imp.imports, path) |
||||
} |
||||
@ -0,0 +1,99 @@
|
||||
// Copyright 2017 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build ignore
|
||||
|
||||
// The gcexportdata command is a diagnostic tool that displays the
|
||||
// contents of gc export data files.
|
||||
package main |
||||
|
||||
import ( |
||||
"flag" |
||||
"fmt" |
||||
"go/token" |
||||
"go/types" |
||||
"log" |
||||
"os" |
||||
|
||||
"golang.org/x/tools/go/gcexportdata" |
||||
"golang.org/x/tools/go/types/typeutil" |
||||
) |
||||
|
||||
var packageFlag = flag.String("package", "", "alternative package to print") |
||||
|
||||
func main() { |
||||
log.SetPrefix("gcexportdata: ") |
||||
log.SetFlags(0) |
||||
flag.Usage = func() { |
||||
fmt.Fprintln(os.Stderr, "usage: gcexportdata [-package path] file.a") |
||||
} |
||||
flag.Parse() |
||||
if flag.NArg() != 1 { |
||||
flag.Usage() |
||||
os.Exit(2) |
||||
} |
||||
filename := flag.Args()[0] |
||||
|
||||
f, err := os.Open(filename) |
||||
if err != nil { |
||||
log.Fatal(err) |
||||
} |
||||
|
||||
r, err := gcexportdata.NewReader(f) |
||||
if err != nil { |
||||
log.Fatalf("%s: %s", filename, err) |
||||
} |
||||
|
||||
// Decode the package.
|
||||
const primary = "<primary>" |
||||
imports := make(map[string]*types.Package) |
||||
fset := token.NewFileSet() |
||||
pkg, err := gcexportdata.Read(r, fset, imports, primary) |
||||
if err != nil { |
||||
log.Fatalf("%s: %s", filename, err) |
||||
} |
||||
|
||||
// Optionally select an indirectly mentioned package.
|
||||
if *packageFlag != "" { |
||||
pkg = imports[*packageFlag] |
||||
if pkg == nil { |
||||
fmt.Fprintf(os.Stderr, "export data file %s does not mention %s; has:\n", |
||||
filename, *packageFlag) |
||||
for p := range imports { |
||||
if p != primary { |
||||
fmt.Fprintf(os.Stderr, "\t%s\n", p) |
||||
} |
||||
} |
||||
os.Exit(1) |
||||
} |
||||
} |
||||
|
||||
// Print all package-level declarations, including non-exported ones.
|
||||
fmt.Printf("package %s\n", pkg.Name()) |
||||
for _, imp := range pkg.Imports() { |
||||
fmt.Printf("import %q\n", imp.Path()) |
||||
} |
||||
qual := func(p *types.Package) string { |
||||
if pkg == p { |
||||
return "" |
||||
} |
||||
return p.Name() |
||||
} |
||||
scope := pkg.Scope() |
||||
for _, name := range scope.Names() { |
||||
obj := scope.Lookup(name) |
||||
fmt.Printf("%s: %s\n", |
||||
fset.Position(obj.Pos()), |
||||
types.ObjectString(obj, qual)) |
||||
|
||||
// For types, print each method.
|
||||
if _, ok := obj.(*types.TypeName); ok { |
||||
for _, method := range typeutil.IntuitiveMethodSet(obj.Type(), nil) { |
||||
fmt.Printf("%s: %s\n", |
||||
fset.Position(method.Obj().Pos()), |
||||
types.SelectionString(method, qual)) |
||||
} |
||||
} |
||||
} |
||||
} |
||||
@ -0,0 +1,852 @@
|
||||
// Copyright 2016 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Binary package export.
|
||||
// This file was derived from $GOROOT/src/cmd/compile/internal/gc/bexport.go;
|
||||
// see that file for specification of the format.
|
||||
|
||||
package gcimporter |
||||
|
||||
import ( |
||||
"bytes" |
||||
"encoding/binary" |
||||
"fmt" |
||||
"go/ast" |
||||
"go/constant" |
||||
"go/token" |
||||
"go/types" |
||||
"math" |
||||
"math/big" |
||||
"sort" |
||||
"strings" |
||||
) |
||||
|
||||
// If debugFormat is set, each integer and string value is preceded by a marker
|
||||
// and position information in the encoding. This mechanism permits an importer
|
||||
// to recognize immediately when it is out of sync. The importer recognizes this
|
||||
// mode automatically (i.e., it can import export data produced with debugging
|
||||
// support even if debugFormat is not set at the time of import). This mode will
|
||||
// lead to massively larger export data (by a factor of 2 to 3) and should only
|
||||
// be enabled during development and debugging.
|
||||
//
|
||||
// NOTE: This flag is the first flag to enable if importing dies because of
|
||||
// (suspected) format errors, and whenever a change is made to the format.
|
||||
const debugFormat = false // default: false
|
||||
|
||||
// If trace is set, debugging output is printed to std out.
|
||||
const trace = false // default: false
|
||||
|
||||
// Current export format version. Increase with each format change.
|
||||
// Note: The latest binary (non-indexed) export format is at version 6.
|
||||
// This exporter is still at level 4, but it doesn't matter since
|
||||
// the binary importer can handle older versions just fine.
|
||||
// 6: package height (CL 105038) -- NOT IMPLEMENTED HERE
|
||||
// 5: improved position encoding efficiency (issue 20080, CL 41619) -- NOT IMPLEMEMTED HERE
|
||||
// 4: type name objects support type aliases, uses aliasTag
|
||||
// 3: Go1.8 encoding (same as version 2, aliasTag defined but never used)
|
||||
// 2: removed unused bool in ODCL export (compiler only)
|
||||
// 1: header format change (more regular), export package for _ struct fields
|
||||
// 0: Go1.7 encoding
|
||||
const exportVersion = 4 |
||||
|
||||
// trackAllTypes enables cycle tracking for all types, not just named
|
||||
// types. The existing compiler invariants assume that unnamed types
|
||||
// that are not completely set up are not used, or else there are spurious
|
||||
// errors.
|
||||
// If disabled, only named types are tracked, possibly leading to slightly
|
||||
// less efficient encoding in rare cases. It also prevents the export of
|
||||
// some corner-case type declarations (but those are not handled correctly
|
||||
// with with the textual export format either).
|
||||
// TODO(gri) enable and remove once issues caused by it are fixed
|
||||
const trackAllTypes = false |
||||
|
||||
type exporter struct { |
||||
fset *token.FileSet |
||||
out bytes.Buffer |
||||
|
||||
// object -> index maps, indexed in order of serialization
|
||||
strIndex map[string]int |
||||
pkgIndex map[*types.Package]int |
||||
typIndex map[types.Type]int |
||||
|
||||
// position encoding
|
||||
posInfoFormat bool |
||||
prevFile string |
||||
prevLine int |
||||
|
||||
// debugging support
|
||||
written int // bytes written
|
||||
indent int // for trace
|
||||
} |
||||
|
||||
// internalError represents an error generated inside this package.
|
||||
type internalError string |
||||
|
||||
func (e internalError) Error() string { return "gcimporter: " + string(e) } |
||||
|
||||
func internalErrorf(format string, args ...interface{}) error { |
||||
return internalError(fmt.Sprintf(format, args...)) |
||||
} |
||||
|
||||
// BExportData returns binary export data for pkg.
|
||||
// If no file set is provided, position info will be missing.
|
||||
func BExportData(fset *token.FileSet, pkg *types.Package) (b []byte, err error) { |
||||
defer func() { |
||||
if e := recover(); e != nil { |
||||
if ierr, ok := e.(internalError); ok { |
||||
err = ierr |
||||
return |
||||
} |
||||
// Not an internal error; panic again.
|
||||
panic(e) |
||||
} |
||||
}() |
||||
|
||||
p := exporter{ |
||||
fset: fset, |
||||
strIndex: map[string]int{"": 0}, // empty string is mapped to 0
|
||||
pkgIndex: make(map[*types.Package]int), |
||||
typIndex: make(map[types.Type]int), |
||||
posInfoFormat: true, // TODO(gri) might become a flag, eventually
|
||||
} |
||||
|
||||
// write version info
|
||||
// The version string must start with "version %d" where %d is the version
|
||||
// number. Additional debugging information may follow after a blank; that
|
||||
// text is ignored by the importer.
|
||||
p.rawStringln(fmt.Sprintf("version %d", exportVersion)) |
||||
var debug string |
||||
if debugFormat { |
||||
debug = "debug" |
||||
} |
||||
p.rawStringln(debug) // cannot use p.bool since it's affected by debugFormat; also want to see this clearly
|
||||
p.bool(trackAllTypes) |
||||
p.bool(p.posInfoFormat) |
||||
|
||||
// --- generic export data ---
|
||||
|
||||
// populate type map with predeclared "known" types
|
||||
for index, typ := range predeclared { |
||||
p.typIndex[typ] = index |
||||
} |
||||
if len(p.typIndex) != len(predeclared) { |
||||
return nil, internalError("duplicate entries in type map?") |
||||
} |
||||
|
||||
// write package data
|
||||
p.pkg(pkg, true) |
||||
if trace { |
||||
p.tracef("\n") |
||||
} |
||||
|
||||
// write objects
|
||||
objcount := 0 |
||||
scope := pkg.Scope() |
||||
for _, name := range scope.Names() { |
||||
if !ast.IsExported(name) { |
||||
continue |
||||
} |
||||
if trace { |
||||
p.tracef("\n") |
||||
} |
||||
p.obj(scope.Lookup(name)) |
||||
objcount++ |
||||
} |
||||
|
||||
// indicate end of list
|
||||
if trace { |
||||
p.tracef("\n") |
||||
} |
||||
p.tag(endTag) |
||||
|
||||
// for self-verification only (redundant)
|
||||
p.int(objcount) |
||||
|
||||
if trace { |
||||
p.tracef("\n") |
||||
} |
||||
|
||||
// --- end of export data ---
|
||||
|
||||
return p.out.Bytes(), nil |
||||
} |
||||
|
||||
func (p *exporter) pkg(pkg *types.Package, emptypath bool) { |
||||
if pkg == nil { |
||||
panic(internalError("unexpected nil pkg")) |
||||
} |
||||
|
||||
// if we saw the package before, write its index (>= 0)
|
||||
if i, ok := p.pkgIndex[pkg]; ok { |
||||
p.index('P', i) |
||||
return |
||||
} |
||||
|
||||
// otherwise, remember the package, write the package tag (< 0) and package data
|
||||
if trace { |
||||
p.tracef("P%d = { ", len(p.pkgIndex)) |
||||
defer p.tracef("} ") |
||||
} |
||||
p.pkgIndex[pkg] = len(p.pkgIndex) |
||||
|
||||
p.tag(packageTag) |
||||
p.string(pkg.Name()) |
||||
if emptypath { |
||||
p.string("") |
||||
} else { |
||||
p.string(pkg.Path()) |
||||
} |
||||
} |
||||
|
||||
func (p *exporter) obj(obj types.Object) { |
||||
switch obj := obj.(type) { |
||||
case *types.Const: |
||||
p.tag(constTag) |
||||
p.pos(obj) |
||||
p.qualifiedName(obj) |
||||
p.typ(obj.Type()) |
||||
p.value(obj.Val()) |
||||
|
||||
case *types.TypeName: |
||||
if obj.IsAlias() { |
||||
p.tag(aliasTag) |
||||
p.pos(obj) |
||||
p.qualifiedName(obj) |
||||
} else { |
||||
p.tag(typeTag) |
||||
} |
||||
p.typ(obj.Type()) |
||||
|
||||
case *types.Var: |
||||
p.tag(varTag) |
||||
p.pos(obj) |
||||
p.qualifiedName(obj) |
||||
p.typ(obj.Type()) |
||||
|
||||
case *types.Func: |
||||
p.tag(funcTag) |
||||
p.pos(obj) |
||||
p.qualifiedName(obj) |
||||
sig := obj.Type().(*types.Signature) |
||||
p.paramList(sig.Params(), sig.Variadic()) |
||||
p.paramList(sig.Results(), false) |
||||
|
||||
default: |
||||
panic(internalErrorf("unexpected object %v (%T)", obj, obj)) |
||||
} |
||||
} |
||||
|
||||
func (p *exporter) pos(obj types.Object) { |
||||
if !p.posInfoFormat { |
||||
return |
||||
} |
||||
|
||||
file, line := p.fileLine(obj) |
||||
if file == p.prevFile { |
||||
// common case: write line delta
|
||||
// delta == 0 means different file or no line change
|
||||
delta := line - p.prevLine |
||||
p.int(delta) |
||||
if delta == 0 { |
||||
p.int(-1) // -1 means no file change
|
||||
} |
||||
} else { |
||||
// different file
|
||||
p.int(0) |
||||
// Encode filename as length of common prefix with previous
|
||||
// filename, followed by (possibly empty) suffix. Filenames
|
||||
// frequently share path prefixes, so this can save a lot
|
||||
// of space and make export data size less dependent on file
|
||||
// path length. The suffix is unlikely to be empty because
|
||||
// file names tend to end in ".go".
|
||||
n := commonPrefixLen(p.prevFile, file) |
||||
p.int(n) // n >= 0
|
||||
p.string(file[n:]) // write suffix only
|
||||
p.prevFile = file |
||||
p.int(line) |
||||
} |
||||
p.prevLine = line |
||||
} |
||||
|
||||
func (p *exporter) fileLine(obj types.Object) (file string, line int) { |
||||
if p.fset != nil { |
||||
pos := p.fset.Position(obj.Pos()) |
||||
file = pos.Filename |
||||
line = pos.Line |
||||
} |
||||
return |
||||
} |
||||
|
||||
func commonPrefixLen(a, b string) int { |
||||
if len(a) > len(b) { |
||||
a, b = b, a |
||||
} |
||||
// len(a) <= len(b)
|
||||
i := 0 |
||||
for i < len(a) && a[i] == b[i] { |
||||
i++ |
||||
} |
||||
return i |
||||
} |
||||
|
||||
func (p *exporter) qualifiedName(obj types.Object) { |
||||
p.string(obj.Name()) |
||||
p.pkg(obj.Pkg(), false) |
||||
} |
||||
|
||||
func (p *exporter) typ(t types.Type) { |
||||
if t == nil { |
||||
panic(internalError("nil type")) |
||||
} |
||||
|
||||
// Possible optimization: Anonymous pointer types *T where
|
||||
// T is a named type are common. We could canonicalize all
|
||||
// such types *T to a single type PT = *T. This would lead
|
||||
// to at most one *T entry in typIndex, and all future *T's
|
||||
// would be encoded as the respective index directly. Would
|
||||
// save 1 byte (pointerTag) per *T and reduce the typIndex
|
||||
// size (at the cost of a canonicalization map). We can do
|
||||
// this later, without encoding format change.
|
||||
|
||||
// if we saw the type before, write its index (>= 0)
|
||||
if i, ok := p.typIndex[t]; ok { |
||||
p.index('T', i) |
||||
return |
||||
} |
||||
|
||||
// otherwise, remember the type, write the type tag (< 0) and type data
|
||||
if trackAllTypes { |
||||
if trace { |
||||
p.tracef("T%d = {>\n", len(p.typIndex)) |
||||
defer p.tracef("<\n} ") |
||||
} |
||||
p.typIndex[t] = len(p.typIndex) |
||||
} |
||||
|
||||
switch t := t.(type) { |
||||
case *types.Named: |
||||
if !trackAllTypes { |
||||
// if we don't track all types, track named types now
|
||||
p.typIndex[t] = len(p.typIndex) |
||||
} |
||||
|
||||
p.tag(namedTag) |
||||
p.pos(t.Obj()) |
||||
p.qualifiedName(t.Obj()) |
||||
p.typ(t.Underlying()) |
||||
if !types.IsInterface(t) { |
||||
p.assocMethods(t) |
||||
} |
||||
|
||||
case *types.Array: |
||||
p.tag(arrayTag) |
||||
p.int64(t.Len()) |
||||
p.typ(t.Elem()) |
||||
|
||||
case *types.Slice: |
||||
p.tag(sliceTag) |
||||
p.typ(t.Elem()) |
||||
|
||||
case *dddSlice: |
||||
p.tag(dddTag) |
||||
p.typ(t.elem) |
||||
|
||||
case *types.Struct: |
||||
p.tag(structTag) |
||||
p.fieldList(t) |
||||
|
||||
case *types.Pointer: |
||||
p.tag(pointerTag) |
||||
p.typ(t.Elem()) |
||||
|
||||
case *types.Signature: |
||||
p.tag(signatureTag) |
||||
p.paramList(t.Params(), t.Variadic()) |
||||
p.paramList(t.Results(), false) |
||||
|
||||
case *types.Interface: |
||||
p.tag(interfaceTag) |
||||
p.iface(t) |
||||
|
||||
case *types.Map: |
||||
p.tag(mapTag) |
||||
p.typ(t.Key()) |
||||
p.typ(t.Elem()) |
||||
|
||||
case *types.Chan: |
||||
p.tag(chanTag) |
||||
p.int(int(3 - t.Dir())) // hack
|
||||
p.typ(t.Elem()) |
||||
|
||||
default: |
||||
panic(internalErrorf("unexpected type %T: %s", t, t)) |
||||
} |
||||
} |
||||
|
||||
func (p *exporter) assocMethods(named *types.Named) { |
||||
// Sort methods (for determinism).
|
||||
var methods []*types.Func |
||||
for i := 0; i < named.NumMethods(); i++ { |
||||
methods = append(methods, named.Method(i)) |
||||
} |
||||
sort.Sort(methodsByName(methods)) |
||||
|
||||
p.int(len(methods)) |
||||
|
||||
if trace && methods != nil { |
||||
p.tracef("associated methods {>\n") |
||||
} |
||||
|
||||
for i, m := range methods { |
||||
if trace && i > 0 { |
||||
p.tracef("\n") |
||||
} |
||||
|
||||
p.pos(m) |
||||
name := m.Name() |
||||
p.string(name) |
||||
if !exported(name) { |
||||
p.pkg(m.Pkg(), false) |
||||
} |
||||
|
||||
sig := m.Type().(*types.Signature) |
||||
p.paramList(types.NewTuple(sig.Recv()), false) |
||||
p.paramList(sig.Params(), sig.Variadic()) |
||||
p.paramList(sig.Results(), false) |
||||
p.int(0) // dummy value for go:nointerface pragma - ignored by importer
|
||||
} |
||||
|
||||
if trace && methods != nil { |
||||
p.tracef("<\n} ") |
||||
} |
||||
} |
||||
|
||||
type methodsByName []*types.Func |
||||
|
||||
func (x methodsByName) Len() int { return len(x) } |
||||
func (x methodsByName) Swap(i, j int) { x[i], x[j] = x[j], x[i] } |
||||
func (x methodsByName) Less(i, j int) bool { return x[i].Name() < x[j].Name() } |
||||
|
||||
func (p *exporter) fieldList(t *types.Struct) { |
||||
if trace && t.NumFields() > 0 { |
||||
p.tracef("fields {>\n") |
||||
defer p.tracef("<\n} ") |
||||
} |
||||
|
||||
p.int(t.NumFields()) |
||||
for i := 0; i < t.NumFields(); i++ { |
||||
if trace && i > 0 { |
||||
p.tracef("\n") |
||||
} |
||||
p.field(t.Field(i)) |
||||
p.string(t.Tag(i)) |
||||
} |
||||
} |
||||
|
||||
func (p *exporter) field(f *types.Var) { |
||||
if !f.IsField() { |
||||
panic(internalError("field expected")) |
||||
} |
||||
|
||||
p.pos(f) |
||||
p.fieldName(f) |
||||
p.typ(f.Type()) |
||||
} |
||||
|
||||
func (p *exporter) iface(t *types.Interface) { |
||||
// TODO(gri): enable importer to load embedded interfaces,
|
||||
// then emit Embeddeds and ExplicitMethods separately here.
|
||||
p.int(0) |
||||
|
||||
n := t.NumMethods() |
||||
if trace && n > 0 { |
||||
p.tracef("methods {>\n") |
||||
defer p.tracef("<\n} ") |
||||
} |
||||
p.int(n) |
||||
for i := 0; i < n; i++ { |
||||
if trace && i > 0 { |
||||
p.tracef("\n") |
||||
} |
||||
p.method(t.Method(i)) |
||||
} |
||||
} |
||||
|
||||
func (p *exporter) method(m *types.Func) { |
||||
sig := m.Type().(*types.Signature) |
||||
if sig.Recv() == nil { |
||||
panic(internalError("method expected")) |
||||
} |
||||
|
||||
p.pos(m) |
||||
p.string(m.Name()) |
||||
if m.Name() != "_" && !ast.IsExported(m.Name()) { |
||||
p.pkg(m.Pkg(), false) |
||||
} |
||||
|
||||
// interface method; no need to encode receiver.
|
||||
p.paramList(sig.Params(), sig.Variadic()) |
||||
p.paramList(sig.Results(), false) |
||||
} |
||||
|
||||
func (p *exporter) fieldName(f *types.Var) { |
||||
name := f.Name() |
||||
|
||||
if f.Anonymous() { |
||||
// anonymous field - we distinguish between 3 cases:
|
||||
// 1) field name matches base type name and is exported
|
||||
// 2) field name matches base type name and is not exported
|
||||
// 3) field name doesn't match base type name (alias name)
|
||||
bname := basetypeName(f.Type()) |
||||
if name == bname { |
||||
if ast.IsExported(name) { |
||||
name = "" // 1) we don't need to know the field name or package
|
||||
} else { |
||||
name = "?" // 2) use unexported name "?" to force package export
|
||||
} |
||||
} else { |
||||
// 3) indicate alias and export name as is
|
||||
// (this requires an extra "@" but this is a rare case)
|
||||
p.string("@") |
||||
} |
||||
} |
||||
|
||||
p.string(name) |
||||
if name != "" && !ast.IsExported(name) { |
||||
p.pkg(f.Pkg(), false) |
||||
} |
||||
} |
||||
|
||||
func basetypeName(typ types.Type) string { |
||||
switch typ := deref(typ).(type) { |
||||
case *types.Basic: |
||||
return typ.Name() |
||||
case *types.Named: |
||||
return typ.Obj().Name() |
||||
default: |
||||
return "" // unnamed type
|
||||
} |
||||
} |
||||
|
||||
func (p *exporter) paramList(params *types.Tuple, variadic bool) { |
||||
// use negative length to indicate unnamed parameters
|
||||
// (look at the first parameter only since either all
|
||||
// names are present or all are absent)
|
||||
n := params.Len() |
||||
if n > 0 && params.At(0).Name() == "" { |
||||
n = -n |
||||
} |
||||
p.int(n) |
||||
for i := 0; i < params.Len(); i++ { |
||||
q := params.At(i) |
||||
t := q.Type() |
||||
if variadic && i == params.Len()-1 { |
||||
t = &dddSlice{t.(*types.Slice).Elem()} |
||||
} |
||||
p.typ(t) |
||||
if n > 0 { |
||||
name := q.Name() |
||||
p.string(name) |
||||
if name != "_" { |
||||
p.pkg(q.Pkg(), false) |
||||
} |
||||
} |
||||
p.string("") // no compiler-specific info
|
||||
} |
||||
} |
||||
|
||||
func (p *exporter) value(x constant.Value) { |
||||
if trace { |
||||
p.tracef("= ") |
||||
} |
||||
|
||||
switch x.Kind() { |
||||
case constant.Bool: |
||||
tag := falseTag |
||||
if constant.BoolVal(x) { |
||||
tag = trueTag |
||||
} |
||||
p.tag(tag) |
||||
|
||||
case constant.Int: |
||||
if v, exact := constant.Int64Val(x); exact { |
||||
// common case: x fits into an int64 - use compact encoding
|
||||
p.tag(int64Tag) |
||||
p.int64(v) |
||||
return |
||||
} |
||||
// uncommon case: large x - use float encoding
|
||||
// (powers of 2 will be encoded efficiently with exponent)
|
||||
p.tag(floatTag) |
||||
p.float(constant.ToFloat(x)) |
||||
|
||||
case constant.Float: |
||||
p.tag(floatTag) |
||||
p.float(x) |
||||
|
||||
case constant.Complex: |
||||
p.tag(complexTag) |
||||
p.float(constant.Real(x)) |
||||
p.float(constant.Imag(x)) |
||||
|
||||
case constant.String: |
||||
p.tag(stringTag) |
||||
p.string(constant.StringVal(x)) |
||||
|
||||
case constant.Unknown: |
||||
// package contains type errors
|
||||
p.tag(unknownTag) |
||||
|
||||
default: |
||||
panic(internalErrorf("unexpected value %v (%T)", x, x)) |
||||
} |
||||
} |
||||
|
||||
func (p *exporter) float(x constant.Value) { |
||||
if x.Kind() != constant.Float { |
||||
panic(internalErrorf("unexpected constant %v, want float", x)) |
||||
} |
||||
// extract sign (there is no -0)
|
||||
sign := constant.Sign(x) |
||||
if sign == 0 { |
||||
// x == 0
|
||||
p.int(0) |
||||
return |
||||
} |
||||
// x != 0
|
||||
|
||||
var f big.Float |
||||
if v, exact := constant.Float64Val(x); exact { |
||||
// float64
|
||||
f.SetFloat64(v) |
||||
} else if num, denom := constant.Num(x), constant.Denom(x); num.Kind() == constant.Int { |
||||
// TODO(gri): add big.Rat accessor to constant.Value.
|
||||
r := valueToRat(num) |
||||
f.SetRat(r.Quo(r, valueToRat(denom))) |
||||
} else { |
||||
// Value too large to represent as a fraction => inaccessible.
|
||||
// TODO(gri): add big.Float accessor to constant.Value.
|
||||
f.SetFloat64(math.MaxFloat64) // FIXME
|
||||
} |
||||
|
||||
// extract exponent such that 0.5 <= m < 1.0
|
||||
var m big.Float |
||||
exp := f.MantExp(&m) |
||||
|
||||
// extract mantissa as *big.Int
|
||||
// - set exponent large enough so mant satisfies mant.IsInt()
|
||||
// - get *big.Int from mant
|
||||
m.SetMantExp(&m, int(m.MinPrec())) |
||||
mant, acc := m.Int(nil) |
||||
if acc != big.Exact { |
||||
panic(internalError("internal error")) |
||||
} |
||||
|
||||
p.int(sign) |
||||
p.int(exp) |
||||
p.string(string(mant.Bytes())) |
||||
} |
||||
|
||||
func valueToRat(x constant.Value) *big.Rat { |
||||
// Convert little-endian to big-endian.
|
||||
// I can't believe this is necessary.
|
||||
bytes := constant.Bytes(x) |
||||
for i := 0; i < len(bytes)/2; i++ { |
||||
bytes[i], bytes[len(bytes)-1-i] = bytes[len(bytes)-1-i], bytes[i] |
||||
} |
||||
return new(big.Rat).SetInt(new(big.Int).SetBytes(bytes)) |
||||
} |
||||
|
||||
func (p *exporter) bool(b bool) bool { |
||||
if trace { |
||||
p.tracef("[") |
||||
defer p.tracef("= %v] ", b) |
||||
} |
||||
|
||||
x := 0 |
||||
if b { |
||||
x = 1 |
||||
} |
||||
p.int(x) |
||||
return b |
||||
} |
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
// Low-level encoders
|
||||
|
||||
func (p *exporter) index(marker byte, index int) { |
||||
if index < 0 { |
||||
panic(internalError("invalid index < 0")) |
||||
} |
||||
if debugFormat { |
||||
p.marker('t') |
||||
} |
||||
if trace { |
||||
p.tracef("%c%d ", marker, index) |
||||
} |
||||
p.rawInt64(int64(index)) |
||||
} |
||||
|
||||
func (p *exporter) tag(tag int) { |
||||
if tag >= 0 { |
||||
panic(internalError("invalid tag >= 0")) |
||||
} |
||||
if debugFormat { |
||||
p.marker('t') |
||||
} |
||||
if trace { |
||||
p.tracef("%s ", tagString[-tag]) |
||||
} |
||||
p.rawInt64(int64(tag)) |
||||
} |
||||
|
||||
func (p *exporter) int(x int) { |
||||
p.int64(int64(x)) |
||||
} |
||||
|
||||
func (p *exporter) int64(x int64) { |
||||
if debugFormat { |
||||
p.marker('i') |
||||
} |
||||
if trace { |
||||
p.tracef("%d ", x) |
||||
} |
||||
p.rawInt64(x) |
||||
} |
||||
|
||||
func (p *exporter) string(s string) { |
||||
if debugFormat { |
||||
p.marker('s') |
||||
} |
||||
if trace { |
||||
p.tracef("%q ", s) |
||||
} |
||||
// if we saw the string before, write its index (>= 0)
|
||||
// (the empty string is mapped to 0)
|
||||
if i, ok := p.strIndex[s]; ok { |
||||
p.rawInt64(int64(i)) |
||||
return |
||||
} |
||||
// otherwise, remember string and write its negative length and bytes
|
||||
p.strIndex[s] = len(p.strIndex) |
||||
p.rawInt64(-int64(len(s))) |
||||
for i := 0; i < len(s); i++ { |
||||
p.rawByte(s[i]) |
||||
} |
||||
} |
||||
|
||||
// marker emits a marker byte and position information which makes
|
||||
// it easy for a reader to detect if it is "out of sync". Used for
|
||||
// debugFormat format only.
|
||||
func (p *exporter) marker(m byte) { |
||||
p.rawByte(m) |
||||
// Enable this for help tracking down the location
|
||||
// of an incorrect marker when running in debugFormat.
|
||||
if false && trace { |
||||
p.tracef("#%d ", p.written) |
||||
} |
||||
p.rawInt64(int64(p.written)) |
||||
} |
||||
|
||||
// rawInt64 should only be used by low-level encoders.
|
||||
func (p *exporter) rawInt64(x int64) { |
||||
var tmp [binary.MaxVarintLen64]byte |
||||
n := binary.PutVarint(tmp[:], x) |
||||
for i := 0; i < n; i++ { |
||||
p.rawByte(tmp[i]) |
||||
} |
||||
} |
||||
|
||||
// rawStringln should only be used to emit the initial version string.
|
||||
func (p *exporter) rawStringln(s string) { |
||||
for i := 0; i < len(s); i++ { |
||||
p.rawByte(s[i]) |
||||
} |
||||
p.rawByte('\n') |
||||
} |
||||
|
||||
// rawByte is the bottleneck interface to write to p.out.
|
||||
// rawByte escapes b as follows (any encoding does that
|
||||
// hides '$'):
|
||||
//
|
||||
// '$' => '|' 'S'
|
||||
// '|' => '|' '|'
|
||||
//
|
||||
// Necessary so other tools can find the end of the
|
||||
// export data by searching for "$$".
|
||||
// rawByte should only be used by low-level encoders.
|
||||
func (p *exporter) rawByte(b byte) { |
||||
switch b { |
||||
case '$': |
||||
// write '$' as '|' 'S'
|
||||
b = 'S' |
||||
fallthrough |
||||
case '|': |
||||
// write '|' as '|' '|'
|
||||
p.out.WriteByte('|') |
||||
p.written++ |
||||
} |
||||
p.out.WriteByte(b) |
||||
p.written++ |
||||
} |
||||
|
||||
// tracef is like fmt.Printf but it rewrites the format string
|
||||
// to take care of indentation.
|
||||
func (p *exporter) tracef(format string, args ...interface{}) { |
||||
if strings.ContainsAny(format, "<>\n") { |
||||
var buf bytes.Buffer |
||||
for i := 0; i < len(format); i++ { |
||||
// no need to deal with runes
|
||||
ch := format[i] |
||||
switch ch { |
||||
case '>': |
||||
p.indent++ |
||||
continue |
||||
case '<': |
||||
p.indent-- |
||||
continue |
||||
} |
||||
buf.WriteByte(ch) |
||||
if ch == '\n' { |
||||
for j := p.indent; j > 0; j-- { |
||||
buf.WriteString(". ") |
||||
} |
||||
} |
||||
} |
||||
format = buf.String() |
||||
} |
||||
fmt.Printf(format, args...) |
||||
} |
||||
|
||||
// Debugging support.
|
||||
// (tagString is only used when tracing is enabled)
|
||||
var tagString = [...]string{ |
||||
// Packages
|
||||
-packageTag: "package", |
||||
|
||||
// Types
|
||||
-namedTag: "named type", |
||||
-arrayTag: "array", |
||||
-sliceTag: "slice", |
||||
-dddTag: "ddd", |
||||
-structTag: "struct", |
||||
-pointerTag: "pointer", |
||||
-signatureTag: "signature", |
||||
-interfaceTag: "interface", |
||||
-mapTag: "map", |
||||
-chanTag: "chan", |
||||
|
||||
// Values
|
||||
-falseTag: "false", |
||||
-trueTag: "true", |
||||
-int64Tag: "int64", |
||||
-floatTag: "float", |
||||
-fractionTag: "fraction", |
||||
-complexTag: "complex", |
||||
-stringTag: "string", |
||||
-unknownTag: "unknown", |
||||
|
||||
// Type aliases
|
||||
-aliasTag: "alias", |
||||
} |
||||
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,93 @@
|
||||
// Copyright 2011 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// This file is a copy of $GOROOT/src/go/internal/gcimporter/exportdata.go.
|
||||
|
||||
// This file implements FindExportData.
|
||||
|
||||
package gcimporter |
||||
|
||||
import ( |
||||
"bufio" |
||||
"fmt" |
||||
"io" |
||||
"strconv" |
||||
"strings" |
||||
) |
||||
|
||||
func readGopackHeader(r *bufio.Reader) (name string, size int, err error) { |
||||
// See $GOROOT/include/ar.h.
|
||||
hdr := make([]byte, 16+12+6+6+8+10+2) |
||||
_, err = io.ReadFull(r, hdr) |
||||
if err != nil { |
||||
return |
||||
} |
||||
// leave for debugging
|
||||
if false { |
||||
fmt.Printf("header: %s", hdr) |
||||
} |
||||
s := strings.TrimSpace(string(hdr[16+12+6+6+8:][:10])) |
||||
size, err = strconv.Atoi(s) |
||||
if err != nil || hdr[len(hdr)-2] != '`' || hdr[len(hdr)-1] != '\n' { |
||||
err = fmt.Errorf("invalid archive header") |
||||
return |
||||
} |
||||
name = strings.TrimSpace(string(hdr[:16])) |
||||
return |
||||
} |
||||
|
||||
// FindExportData positions the reader r at the beginning of the
|
||||
// export data section of an underlying GC-created object/archive
|
||||
// file by reading from it. The reader must be positioned at the
|
||||
// start of the file before calling this function. The hdr result
|
||||
// is the string before the export data, either "$$" or "$$B".
|
||||
//
|
||||
func FindExportData(r *bufio.Reader) (hdr string, err error) { |
||||
// Read first line to make sure this is an object file.
|
||||
line, err := r.ReadSlice('\n') |
||||
if err != nil { |
||||
err = fmt.Errorf("can't find export data (%v)", err) |
||||
return |
||||
} |
||||
|
||||
if string(line) == "!<arch>\n" { |
||||
// Archive file. Scan to __.PKGDEF.
|
||||
var name string |
||||
if name, _, err = readGopackHeader(r); err != nil { |
||||
return |
||||
} |
||||
|
||||
// First entry should be __.PKGDEF.
|
||||
if name != "__.PKGDEF" { |
||||
err = fmt.Errorf("go archive is missing __.PKGDEF") |
||||
return |
||||
} |
||||
|
||||
// Read first line of __.PKGDEF data, so that line
|
||||
// is once again the first line of the input.
|
||||
if line, err = r.ReadSlice('\n'); err != nil { |
||||
err = fmt.Errorf("can't find export data (%v)", err) |
||||
return |
||||
} |
||||
} |
||||
|
||||
// Now at __.PKGDEF in archive or still at beginning of file.
|
||||
// Either way, line should begin with "go object ".
|
||||
if !strings.HasPrefix(string(line), "go object ") { |
||||
err = fmt.Errorf("not a Go object file") |
||||
return |
||||
} |
||||
|
||||
// Skip over object header to export data.
|
||||
// Begins after first line starting with $$.
|
||||
for line[0] != '$' { |
||||
if line, err = r.ReadSlice('\n'); err != nil { |
||||
err = fmt.Errorf("can't find export data (%v)", err) |
||||
return |
||||
} |
||||
} |
||||
hdr = string(line) |
||||
|
||||
return |
||||
} |
||||
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,598 @@
|
||||
// Copyright 2018 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Indexed package import.
|
||||
// See cmd/compile/internal/gc/iexport.go for the export data format.
|
||||
|
||||
// This file is a copy of $GOROOT/src/go/internal/gcimporter/iimport.go.
|
||||
|
||||
package gcimporter |
||||
|
||||
import ( |
||||
"bytes" |
||||
"encoding/binary" |
||||
"fmt" |
||||
"go/constant" |
||||
"go/token" |
||||
"go/types" |
||||
"io" |
||||
"sort" |
||||
) |
||||
|
||||
type intReader struct { |
||||
*bytes.Reader |
||||
path string |
||||
} |
||||
|
||||
func (r *intReader) int64() int64 { |
||||
i, err := binary.ReadVarint(r.Reader) |
||||
if err != nil { |
||||
errorf("import %q: read varint error: %v", r.path, err) |
||||
} |
||||
return i |
||||
} |
||||
|
||||
func (r *intReader) uint64() uint64 { |
||||
i, err := binary.ReadUvarint(r.Reader) |
||||
if err != nil { |
||||
errorf("import %q: read varint error: %v", r.path, err) |
||||
} |
||||
return i |
||||
} |
||||
|
||||
const predeclReserved = 32 |
||||
|
||||
type itag uint64 |
||||
|
||||
const ( |
||||
// Types
|
||||
definedType itag = iota |
||||
pointerType |
||||
sliceType |
||||
arrayType |
||||
chanType |
||||
mapType |
||||
signatureType |
||||
structType |
||||
interfaceType |
||||
) |
||||
|
||||
// IImportData imports a package from the serialized package data
|
||||
// and returns the number of bytes consumed and a reference to the package.
|
||||
// If the export data version is not recognized or the format is otherwise
|
||||
// compromised, an error is returned.
|
||||
func IImportData(fset *token.FileSet, imports map[string]*types.Package, data []byte, path string) (_ int, pkg *types.Package, err error) { |
||||
const currentVersion = 0 |
||||
version := -1 |
||||
defer func() { |
||||
if e := recover(); e != nil { |
||||
if version > currentVersion { |
||||
err = fmt.Errorf("cannot import %q (%v), export data is newer version - update tool", path, e) |
||||
} else { |
||||
err = fmt.Errorf("cannot import %q (%v), possibly version skew - reinstall package", path, e) |
||||
} |
||||
} |
||||
}() |
||||
|
||||
r := &intReader{bytes.NewReader(data), path} |
||||
|
||||
version = int(r.uint64()) |
||||
switch version { |
||||
case currentVersion: |
||||
default: |
||||
errorf("unknown iexport format version %d", version) |
||||
} |
||||
|
||||
sLen := int64(r.uint64()) |
||||
dLen := int64(r.uint64()) |
||||
|
||||
whence, _ := r.Seek(0, io.SeekCurrent) |
||||
stringData := data[whence : whence+sLen] |
||||
declData := data[whence+sLen : whence+sLen+dLen] |
||||
r.Seek(sLen+dLen, io.SeekCurrent) |
||||
|
||||
p := iimporter{ |
||||
ipath: path, |
||||
|
||||
stringData: stringData, |
||||
stringCache: make(map[uint64]string), |
||||
pkgCache: make(map[uint64]*types.Package), |
||||
|
||||
declData: declData, |
||||
pkgIndex: make(map[*types.Package]map[string]uint64), |
||||
typCache: make(map[uint64]types.Type), |
||||
|
||||
fake: fakeFileSet{ |
||||
fset: fset, |
||||
files: make(map[string]*token.File), |
||||
}, |
||||
} |
||||
|
||||
for i, pt := range predeclared { |
||||
p.typCache[uint64(i)] = pt |
||||
} |
||||
|
||||
pkgList := make([]*types.Package, r.uint64()) |
||||
for i := range pkgList { |
||||
pkgPathOff := r.uint64() |
||||
pkgPath := p.stringAt(pkgPathOff) |
||||
pkgName := p.stringAt(r.uint64()) |
||||
_ = r.uint64() // package height; unused by go/types
|
||||
|
||||
if pkgPath == "" { |
||||
pkgPath = path |
||||
} |
||||
pkg := imports[pkgPath] |
||||
if pkg == nil { |
||||
pkg = types.NewPackage(pkgPath, pkgName) |
||||
imports[pkgPath] = pkg |
||||
} else if pkg.Name() != pkgName { |
||||
errorf("conflicting names %s and %s for package %q", pkg.Name(), pkgName, path) |
||||
} |
||||
|
||||
p.pkgCache[pkgPathOff] = pkg |
||||
|
||||
nameIndex := make(map[string]uint64) |
||||
for nSyms := r.uint64(); nSyms > 0; nSyms-- { |
||||
name := p.stringAt(r.uint64()) |
||||
nameIndex[name] = r.uint64() |
||||
} |
||||
|
||||
p.pkgIndex[pkg] = nameIndex |
||||
pkgList[i] = pkg |
||||
} |
||||
|
||||
localpkg := pkgList[0] |
||||
|
||||
names := make([]string, 0, len(p.pkgIndex[localpkg])) |
||||
for name := range p.pkgIndex[localpkg] { |
||||
names = append(names, name) |
||||
} |
||||
sort.Strings(names) |
||||
for _, name := range names { |
||||
p.doDecl(localpkg, name) |
||||
} |
||||
|
||||
for _, typ := range p.interfaceList { |
||||
typ.Complete() |
||||
} |
||||
|
||||
// record all referenced packages as imports
|
||||
list := append(([]*types.Package)(nil), pkgList[1:]...) |
||||
sort.Sort(byPath(list)) |
||||
localpkg.SetImports(list) |
||||
|
||||
// package was imported completely and without errors
|
||||
localpkg.MarkComplete() |
||||
|
||||
consumed, _ := r.Seek(0, io.SeekCurrent) |
||||
return int(consumed), localpkg, nil |
||||
} |
||||
|
||||
type iimporter struct { |
||||
ipath string |
||||
|
||||
stringData []byte |
||||
stringCache map[uint64]string |
||||
pkgCache map[uint64]*types.Package |
||||
|
||||
declData []byte |
||||
pkgIndex map[*types.Package]map[string]uint64 |
||||
typCache map[uint64]types.Type |
||||
|
||||
fake fakeFileSet |
||||
interfaceList []*types.Interface |
||||
} |
||||
|
||||
func (p *iimporter) doDecl(pkg *types.Package, name string) { |
||||
// See if we've already imported this declaration.
|
||||
if obj := pkg.Scope().Lookup(name); obj != nil { |
||||
return |
||||
} |
||||
|
||||
off, ok := p.pkgIndex[pkg][name] |
||||
if !ok { |
||||
errorf("%v.%v not in index", pkg, name) |
||||
} |
||||
|
||||
r := &importReader{p: p, currPkg: pkg} |
||||
r.declReader.Reset(p.declData[off:]) |
||||
|
||||
r.obj(name) |
||||
} |
||||
|
||||
func (p *iimporter) stringAt(off uint64) string { |
||||
if s, ok := p.stringCache[off]; ok { |
||||
return s |
||||
} |
||||
|
||||
slen, n := binary.Uvarint(p.stringData[off:]) |
||||
if n <= 0 { |
||||
errorf("varint failed") |
||||
} |
||||
spos := off + uint64(n) |
||||
s := string(p.stringData[spos : spos+slen]) |
||||
p.stringCache[off] = s |
||||
return s |
||||
} |
||||
|
||||
func (p *iimporter) pkgAt(off uint64) *types.Package { |
||||
if pkg, ok := p.pkgCache[off]; ok { |
||||
return pkg |
||||
} |
||||
path := p.stringAt(off) |
||||
errorf("missing package %q in %q", path, p.ipath) |
||||
return nil |
||||
} |
||||
|
||||
func (p *iimporter) typAt(off uint64, base *types.Named) types.Type { |
||||
if t, ok := p.typCache[off]; ok && (base == nil || !isInterface(t)) { |
||||
return t |
||||
} |
||||
|
||||
if off < predeclReserved { |
||||
errorf("predeclared type missing from cache: %v", off) |
||||
} |
||||
|
||||
r := &importReader{p: p} |
||||
r.declReader.Reset(p.declData[off-predeclReserved:]) |
||||
t := r.doType(base) |
||||
|
||||
if base == nil || !isInterface(t) { |
||||
p.typCache[off] = t |
||||
} |
||||
return t |
||||
} |
||||
|
||||
type importReader struct { |
||||
p *iimporter |
||||
declReader bytes.Reader |
||||
currPkg *types.Package |
||||
prevFile string |
||||
prevLine int64 |
||||
} |
||||
|
||||
func (r *importReader) obj(name string) { |
||||
tag := r.byte() |
||||
pos := r.pos() |
||||
|
||||
switch tag { |
||||
case 'A': |
||||
typ := r.typ() |
||||
|
||||
r.declare(types.NewTypeName(pos, r.currPkg, name, typ)) |
||||
|
||||
case 'C': |
||||
typ, val := r.value() |
||||
|
||||
r.declare(types.NewConst(pos, r.currPkg, name, typ, val)) |
||||
|
||||
case 'F': |
||||
sig := r.signature(nil) |
||||
|
||||
r.declare(types.NewFunc(pos, r.currPkg, name, sig)) |
||||
|
||||
case 'T': |
||||
// Types can be recursive. We need to setup a stub
|
||||
// declaration before recursing.
|
||||
obj := types.NewTypeName(pos, r.currPkg, name, nil) |
||||
named := types.NewNamed(obj, nil, nil) |
||||
r.declare(obj) |
||||
|
||||
underlying := r.p.typAt(r.uint64(), named).Underlying() |
||||
named.SetUnderlying(underlying) |
||||
|
||||
if !isInterface(underlying) { |
||||
for n := r.uint64(); n > 0; n-- { |
||||
mpos := r.pos() |
||||
mname := r.ident() |
||||
recv := r.param() |
||||
msig := r.signature(recv) |
||||
|
||||
named.AddMethod(types.NewFunc(mpos, r.currPkg, mname, msig)) |
||||
} |
||||
} |
||||
|
||||
case 'V': |
||||
typ := r.typ() |
||||
|
||||
r.declare(types.NewVar(pos, r.currPkg, name, typ)) |
||||
|
||||
default: |
||||
errorf("unexpected tag: %v", tag) |
||||
} |
||||
} |
||||
|
||||
func (r *importReader) declare(obj types.Object) { |
||||
obj.Pkg().Scope().Insert(obj) |
||||
} |
||||
|
||||
func (r *importReader) value() (typ types.Type, val constant.Value) { |
||||
typ = r.typ() |
||||
|
||||
switch b := typ.Underlying().(*types.Basic); b.Info() & types.IsConstType { |
||||
case types.IsBoolean: |
||||
val = constant.MakeBool(r.bool()) |
||||
|
||||
case types.IsString: |
||||
val = constant.MakeString(r.string()) |
||||
|
||||
case types.IsInteger: |
||||
val = r.mpint(b) |
||||
|
||||
case types.IsFloat: |
||||
val = r.mpfloat(b) |
||||
|
||||
case types.IsComplex: |
||||
re := r.mpfloat(b) |
||||
im := r.mpfloat(b) |
||||
val = constant.BinaryOp(re, token.ADD, constant.MakeImag(im)) |
||||
|
||||
default: |
||||
errorf("unexpected type %v", typ) // panics
|
||||
panic("unreachable") |
||||
} |
||||
|
||||
return |
||||
} |
||||
|
||||
func intSize(b *types.Basic) (signed bool, maxBytes uint) { |
||||
if (b.Info() & types.IsUntyped) != 0 { |
||||
return true, 64 |
||||
} |
||||
|
||||
switch b.Kind() { |
||||
case types.Float32, types.Complex64: |
||||
return true, 3 |
||||
case types.Float64, types.Complex128: |
||||
return true, 7 |
||||
} |
||||
|
||||
signed = (b.Info() & types.IsUnsigned) == 0 |
||||
switch b.Kind() { |
||||
case types.Int8, types.Uint8: |
||||
maxBytes = 1 |
||||
case types.Int16, types.Uint16: |
||||
maxBytes = 2 |
||||
case types.Int32, types.Uint32: |
||||
maxBytes = 4 |
||||
default: |
||||
maxBytes = 8 |
||||
} |
||||
|
||||
return |
||||
} |
||||
|
||||
func (r *importReader) mpint(b *types.Basic) constant.Value { |
||||
signed, maxBytes := intSize(b) |
||||
|
||||
maxSmall := 256 - maxBytes |
||||
if signed { |
||||
maxSmall = 256 - 2*maxBytes |
||||
} |
||||
if maxBytes == 1 { |
||||
maxSmall = 256 |
||||
} |
||||
|
||||
n, _ := r.declReader.ReadByte() |
||||
if uint(n) < maxSmall { |
||||
v := int64(n) |
||||
if signed { |
||||
v >>= 1 |
||||
if n&1 != 0 { |
||||
v = ^v |
||||
} |
||||
} |
||||
return constant.MakeInt64(v) |
||||
} |
||||
|
||||
v := -n |
||||
if signed { |
||||
v = -(n &^ 1) >> 1 |
||||
} |
||||
if v < 1 || uint(v) > maxBytes { |
||||
errorf("weird decoding: %v, %v => %v", n, signed, v) |
||||
} |
||||
|
||||
buf := make([]byte, v) |
||||
io.ReadFull(&r.declReader, buf) |
||||
|
||||
// convert to little endian
|
||||
// TODO(gri) go/constant should have a more direct conversion function
|
||||
// (e.g., once it supports a big.Float based implementation)
|
||||
for i, j := 0, len(buf)-1; i < j; i, j = i+1, j-1 { |
||||
buf[i], buf[j] = buf[j], buf[i] |
||||
} |
||||
|
||||
x := constant.MakeFromBytes(buf) |
||||
if signed && n&1 != 0 { |
||||
x = constant.UnaryOp(token.SUB, x, 0) |
||||
} |
||||
return x |
||||
} |
||||
|
||||
func (r *importReader) mpfloat(b *types.Basic) constant.Value { |
||||
x := r.mpint(b) |
||||
if constant.Sign(x) == 0 { |
||||
return x |
||||
} |
||||
|
||||
exp := r.int64() |
||||
switch { |
||||
case exp > 0: |
||||
x = constant.Shift(x, token.SHL, uint(exp)) |
||||
case exp < 0: |
||||
d := constant.Shift(constant.MakeInt64(1), token.SHL, uint(-exp)) |
||||
x = constant.BinaryOp(x, token.QUO, d) |
||||
} |
||||
return x |
||||
} |
||||
|
||||
func (r *importReader) ident() string { |
||||
return r.string() |
||||
} |
||||
|
||||
func (r *importReader) qualifiedIdent() (*types.Package, string) { |
||||
name := r.string() |
||||
pkg := r.pkg() |
||||
return pkg, name |
||||
} |
||||
|
||||
func (r *importReader) pos() token.Pos { |
||||
delta := r.int64() |
||||
if delta != deltaNewFile { |
||||
r.prevLine += delta |
||||
} else if l := r.int64(); l == -1 { |
||||
r.prevLine += deltaNewFile |
||||
} else { |
||||
r.prevFile = r.string() |
||||
r.prevLine = l |
||||
} |
||||
|
||||
if r.prevFile == "" && r.prevLine == 0 { |
||||
return token.NoPos |
||||
} |
||||
|
||||
return r.p.fake.pos(r.prevFile, int(r.prevLine)) |
||||
} |
||||
|
||||
func (r *importReader) typ() types.Type { |
||||
return r.p.typAt(r.uint64(), nil) |
||||
} |
||||
|
||||
func isInterface(t types.Type) bool { |
||||
_, ok := t.(*types.Interface) |
||||
return ok |
||||
} |
||||
|
||||
func (r *importReader) pkg() *types.Package { return r.p.pkgAt(r.uint64()) } |
||||
func (r *importReader) string() string { return r.p.stringAt(r.uint64()) } |
||||
|
||||
func (r *importReader) doType(base *types.Named) types.Type { |
||||
switch k := r.kind(); k { |
||||
default: |
||||
errorf("unexpected kind tag in %q: %v", r.p.ipath, k) |
||||
return nil |
||||
|
||||
case definedType: |
||||
pkg, name := r.qualifiedIdent() |
||||
r.p.doDecl(pkg, name) |
||||
return pkg.Scope().Lookup(name).(*types.TypeName).Type() |
||||
case pointerType: |
||||
return types.NewPointer(r.typ()) |
||||
case sliceType: |
||||
return types.NewSlice(r.typ()) |
||||
case arrayType: |
||||
n := r.uint64() |
||||
return types.NewArray(r.typ(), int64(n)) |
||||
case chanType: |
||||
dir := chanDir(int(r.uint64())) |
||||
return types.NewChan(dir, r.typ()) |
||||
case mapType: |
||||
return types.NewMap(r.typ(), r.typ()) |
||||
case signatureType: |
||||
r.currPkg = r.pkg() |
||||
return r.signature(nil) |
||||
|
||||
case structType: |
||||
r.currPkg = r.pkg() |
||||
|
||||
fields := make([]*types.Var, r.uint64()) |
||||
tags := make([]string, len(fields)) |
||||
for i := range fields { |
||||
fpos := r.pos() |
||||
fname := r.ident() |
||||
ftyp := r.typ() |
||||
emb := r.bool() |
||||
tag := r.string() |
||||
|
||||
fields[i] = types.NewField(fpos, r.currPkg, fname, ftyp, emb) |
||||
tags[i] = tag |
||||
} |
||||
return types.NewStruct(fields, tags) |
||||
|
||||
case interfaceType: |
||||
r.currPkg = r.pkg() |
||||
|
||||
embeddeds := make([]types.Type, r.uint64()) |
||||
for i := range embeddeds { |
||||
_ = r.pos() |
||||
embeddeds[i] = r.typ() |
||||
} |
||||
|
||||
methods := make([]*types.Func, r.uint64()) |
||||
for i := range methods { |
||||
mpos := r.pos() |
||||
mname := r.ident() |
||||
|
||||
// TODO(mdempsky): Matches bimport.go, but I
|
||||
// don't agree with this.
|
||||
var recv *types.Var |
||||
if base != nil { |
||||
recv = types.NewVar(token.NoPos, r.currPkg, "", base) |
||||
} |
||||
|
||||
msig := r.signature(recv) |
||||
methods[i] = types.NewFunc(mpos, r.currPkg, mname, msig) |
||||
} |
||||
|
||||
typ := newInterface(methods, embeddeds) |
||||
r.p.interfaceList = append(r.p.interfaceList, typ) |
||||
return typ |
||||
} |
||||
} |
||||
|
||||
func (r *importReader) kind() itag { |
||||
return itag(r.uint64()) |
||||
} |
||||
|
||||
func (r *importReader) signature(recv *types.Var) *types.Signature { |
||||
params := r.paramList() |
||||
results := r.paramList() |
||||
variadic := params.Len() > 0 && r.bool() |
||||
return types.NewSignature(recv, params, results, variadic) |
||||
} |
||||
|
||||
func (r *importReader) paramList() *types.Tuple { |
||||
xs := make([]*types.Var, r.uint64()) |
||||
for i := range xs { |
||||
xs[i] = r.param() |
||||
} |
||||
return types.NewTuple(xs...) |
||||
} |
||||
|
||||
func (r *importReader) param() *types.Var { |
||||
pos := r.pos() |
||||
name := r.ident() |
||||
typ := r.typ() |
||||
return types.NewParam(pos, r.currPkg, name, typ) |
||||
} |
||||
|
||||
func (r *importReader) bool() bool { |
||||
return r.uint64() != 0 |
||||
} |
||||
|
||||
func (r *importReader) int64() int64 { |
||||
n, err := binary.ReadVarint(&r.declReader) |
||||
if err != nil { |
||||
errorf("readVarint: %v", err) |
||||
} |
||||
return n |
||||
} |
||||
|
||||
func (r *importReader) uint64() uint64 { |
||||
n, err := binary.ReadUvarint(&r.declReader) |
||||
if err != nil { |
||||
errorf("readUvarint: %v", err) |
||||
} |
||||
return n |
||||
} |
||||
|
||||
func (r *importReader) byte() byte { |
||||
x, err := r.declReader.ReadByte() |
||||
if err != nil { |
||||
errorf("declReader.ReadByte: %v", err) |
||||
} |
||||
return x |
||||
} |
||||
@ -0,0 +1,21 @@
|
||||
// Copyright 2018 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build !go1.11
|
||||
|
||||
package gcimporter |
||||
|
||||
import "go/types" |
||||
|
||||
func newInterface(methods []*types.Func, embeddeds []types.Type) *types.Interface { |
||||
named := make([]*types.Named, len(embeddeds)) |
||||
for i, e := range embeddeds { |
||||
var ok bool |
||||
named[i], ok = e.(*types.Named) |
||||
if !ok { |
||||
panic("embedding of non-defined interfaces in interfaces is not supported before Go 1.11") |
||||
} |
||||
} |
||||
return types.NewInterface(methods, named) |
||||
} |
||||
@ -0,0 +1,13 @@
|
||||
// Copyright 2018 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build go1.11
|
||||
|
||||
package gcimporter |
||||
|
||||
import "go/types" |
||||
|
||||
func newInterface(methods []*types.Func, embeddeds []types.Type) *types.Interface { |
||||
return types.NewInterfaceType(methods, embeddeds) |
||||
} |
||||
Loading…
Reference in new issue