➕ Update vendor
This commit is contained in:
parent
0b8da63175
commit
982ef9c07f
|
@ -6,11 +6,13 @@
|
||||||
package astview
|
package astview
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"fmt"
|
||||||
"go/ast"
|
"go/ast"
|
||||||
"go/token"
|
"go/token"
|
||||||
"regexp"
|
"regexp"
|
||||||
"sort"
|
"sort"
|
||||||
"strconv"
|
"strconv"
|
||||||
|
"strings"
|
||||||
)
|
)
|
||||||
|
|
||||||
// ----------------------------------------------------------------------------
|
// ----------------------------------------------------------------------------
|
||||||
|
@ -41,6 +43,7 @@ type docReader struct {
|
||||||
funcs map[string]*ast.FuncDecl
|
funcs map[string]*ast.FuncDecl
|
||||||
imports map[string]int
|
imports map[string]int
|
||||||
bugs []*ast.CommentGroup
|
bugs []*ast.CommentGroup
|
||||||
|
todos []*TodoDoc
|
||||||
}
|
}
|
||||||
|
|
||||||
func (doc *docReader) init(pkgName string, showAll bool) {
|
func (doc *docReader) init(pkgName string, showAll bool) {
|
||||||
|
@ -279,8 +282,10 @@ func copyCommentList(list []*ast.Comment) []*ast.Comment {
|
||||||
}
|
}
|
||||||
|
|
||||||
var (
|
var (
|
||||||
|
todoList = "TODO,BUG,FIXME,NOTE,SECBUG"
|
||||||
bug_markers = regexp.MustCompile("^/[/*][ \t]*BUG\\(.*\\):[ \t]*") // BUG(uid):
|
bug_markers = regexp.MustCompile("^/[/*][ \t]*BUG\\(.*\\):[ \t]*") // BUG(uid):
|
||||||
bug_content = regexp.MustCompile("[^ \n\r\t]+") // at least one non-whitespace char
|
bug_content = regexp.MustCompile("[^ \n\r\t]+") // at least one non-whitespace char
|
||||||
|
todo_markers = regexp.MustCompile(fmt.Sprintf("^/[/*][ \t]*(%s)[\\s\\:\\(\\,].*$", strings.Replace(todoList, ",", "|", -1)))
|
||||||
)
|
)
|
||||||
|
|
||||||
// addFile adds the AST for a source file to the docReader.
|
// addFile adds the AST for a source file to the docReader.
|
||||||
|
@ -297,19 +302,21 @@ func (doc *docReader) addFile(src *ast.File) {
|
||||||
for _, decl := range src.Decls {
|
for _, decl := range src.Decls {
|
||||||
doc.addDecl(decl)
|
doc.addDecl(decl)
|
||||||
}
|
}
|
||||||
|
|
||||||
// collect BUG(...) comments
|
// collect BUG(...) comments
|
||||||
for _, c := range src.Comments {
|
for _, c := range src.Comments {
|
||||||
text := c.List[0].Text
|
text := c.List[0].Text
|
||||||
if m := bug_markers.FindStringIndex(text); m != nil {
|
if m := todo_markers.FindStringSubmatchIndex(text); m != nil {
|
||||||
// found a BUG comment; maybe empty
|
doc.todos = append(doc.todos, &TodoDoc{text[m[2]:m[3]], text[m[2]:], c})
|
||||||
if btxt := text[m[1]:]; bug_content.MatchString(btxt) {
|
|
||||||
// non-empty BUG comment; collect comment without BUG prefix
|
|
||||||
list := copyCommentList(c.List)
|
|
||||||
list[0].Text = text[m[1]:]
|
|
||||||
doc.bugs = append(doc.bugs, &ast.CommentGroup{list})
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
// if m := bug_markers.FindStringIndex(text); m != nil {
|
||||||
|
// // found a BUG comment; maybe empty
|
||||||
|
// if btxt := text[m[1]:]; bug_content.MatchString(btxt) {
|
||||||
|
// // non-empty BUG comment; collect comment without BUG prefix
|
||||||
|
// list := copyCommentList(c.List)
|
||||||
|
// list[0].Text = text[m[1]:]
|
||||||
|
// doc.bugs = append(doc.bugs, &ast.CommentGroup{list})
|
||||||
|
// }
|
||||||
|
// }
|
||||||
}
|
}
|
||||||
src.Comments = nil // consumed unassociated comments - remove from ast.File node
|
src.Comments = nil // consumed unassociated comments - remove from ast.File node
|
||||||
}
|
}
|
||||||
|
@ -519,6 +526,12 @@ func makeBugDocs(list []*ast.CommentGroup) []string {
|
||||||
return d
|
return d
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type TodoDoc struct {
|
||||||
|
Tag string
|
||||||
|
Text string
|
||||||
|
Comments *ast.CommentGroup
|
||||||
|
}
|
||||||
|
|
||||||
// PackageDoc is the documentation for an entire package.
|
// PackageDoc is the documentation for an entire package.
|
||||||
//
|
//
|
||||||
type PackageDoc struct {
|
type PackageDoc struct {
|
||||||
|
@ -532,6 +545,7 @@ type PackageDoc struct {
|
||||||
Vars []*ValueDoc
|
Vars []*ValueDoc
|
||||||
Funcs []*FuncDoc
|
Funcs []*FuncDoc
|
||||||
Factorys []*FuncDoc
|
Factorys []*FuncDoc
|
||||||
|
Todos []*TodoDoc
|
||||||
Bugs []string
|
Bugs []string
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -553,6 +567,7 @@ func (doc *docReader) newDoc(importpath string, filenames []string) *PackageDoc
|
||||||
p.Vars = makeValueDocs(doc.values, token.VAR)
|
p.Vars = makeValueDocs(doc.values, token.VAR)
|
||||||
p.Funcs = makeFuncDocs(doc.funcs)
|
p.Funcs = makeFuncDocs(doc.funcs)
|
||||||
p.Bugs = makeBugDocs(doc.bugs)
|
p.Bugs = makeBugDocs(doc.bugs)
|
||||||
|
p.Todos = doc.todos
|
||||||
|
|
||||||
for _, d := range p.Types {
|
for _, d := range p.Types {
|
||||||
switch d.Type.Type.(type) {
|
switch d.Type.Type.(type) {
|
||||||
|
|
|
@ -9,14 +9,15 @@ import (
|
||||||
"go/ast"
|
"go/ast"
|
||||||
"go/parser"
|
"go/parser"
|
||||||
"go/token"
|
"go/token"
|
||||||
|
"go/types"
|
||||||
"io"
|
"io"
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
"os"
|
"os"
|
||||||
|
"path/filepath"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/visualfc/gotools/command"
|
"github.com/visualfc/gotools/command"
|
||||||
|
"github.com/visualfc/gotools/pkgutil"
|
||||||
"golang.org/x/tools/go/types"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
var Command = &command.Command{
|
var Command = &command.Command{
|
||||||
|
@ -27,9 +28,13 @@ var Command = &command.Command{
|
||||||
}
|
}
|
||||||
|
|
||||||
var astViewStdin bool
|
var astViewStdin bool
|
||||||
|
var astViewShowEndPos bool
|
||||||
|
var astViewShowTodo bool
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
Command.Flag.BoolVar(&astViewStdin, "stdin", false, "input from stdin")
|
Command.Flag.BoolVar(&astViewStdin, "stdin", false, "input from stdin")
|
||||||
|
Command.Flag.BoolVar(&astViewShowEndPos, "end", false, "show decl end pos")
|
||||||
|
Command.Flag.BoolVar(&astViewShowTodo, "todo", false, "show todo list")
|
||||||
}
|
}
|
||||||
|
|
||||||
func runAstView(cmd *command.Command, args []string) error {
|
func runAstView(cmd *command.Command, args []string) error {
|
||||||
|
@ -73,6 +78,8 @@ const (
|
||||||
tag_type_method = "tm"
|
tag_type_method = "tm"
|
||||||
tag_type_factor = "tf"
|
tag_type_factor = "tf"
|
||||||
tag_type_value = "tv"
|
tag_type_value = "tv"
|
||||||
|
tag_todo = "b"
|
||||||
|
tag_todo_folder = "+b"
|
||||||
)
|
)
|
||||||
|
|
||||||
type PackageView struct {
|
type PackageView struct {
|
||||||
|
@ -99,8 +106,11 @@ func (p *PackageView) posFileIndex(pos token.Position) int {
|
||||||
return index
|
return index
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *PackageView) posText(pos token.Position) (s string) {
|
func (p *PackageView) posText(pos token.Position, end token.Position) (s string) {
|
||||||
index := p.posFileIndex(pos)
|
index := p.posFileIndex(pos)
|
||||||
|
if astViewShowEndPos {
|
||||||
|
return fmt.Sprintf("%d:%d:%d:%d:%d", index, pos.Line, pos.Column, end.Line, end.Column)
|
||||||
|
}
|
||||||
return fmt.Sprintf("%d:%d:%d", index, pos.Line, pos.Column)
|
return fmt.Sprintf("%d:%d:%d", index, pos.Line, pos.Column)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -156,7 +166,11 @@ func ParseFiles(fset *token.FileSet, filenames []string, mode parser.Mode) (pkgs
|
||||||
|
|
||||||
func PrintFilesTree(filenames []string, w io.Writer, expr bool) error {
|
func PrintFilesTree(filenames []string, w io.Writer, expr bool) error {
|
||||||
fset := token.NewFileSet()
|
fset := token.NewFileSet()
|
||||||
pkgs, pkgsfiles, err := ParseFiles(fset, filenames, parser.AllErrors)
|
mode := parser.AllErrors
|
||||||
|
if astViewShowTodo {
|
||||||
|
mode |= parser.ParseComments
|
||||||
|
}
|
||||||
|
pkgs, pkgsfiles, err := ParseFiles(fset, filenames, mode)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
@ -182,7 +196,11 @@ func NewFilePackageSource(filename string, f *os.File, expr bool) (*PackageView,
|
||||||
p := new(PackageView)
|
p := new(PackageView)
|
||||||
p.fset = token.NewFileSet()
|
p.fset = token.NewFileSet()
|
||||||
p.expr = expr
|
p.expr = expr
|
||||||
file, err := parser.ParseFile(p.fset, filename, src, 0)
|
mode := parser.AllErrors
|
||||||
|
if astViewShowTodo {
|
||||||
|
mode |= parser.ParseComments
|
||||||
|
}
|
||||||
|
file, err := parser.ParseFile(p.fset, filename, src, mode)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
@ -200,10 +218,11 @@ func NewFilePackageSource(filename string, f *os.File, expr bool) (*PackageView,
|
||||||
func (p *PackageView) printFuncsHelper(w io.Writer, funcs []*FuncDoc, level int, tag string, tag_folder string) {
|
func (p *PackageView) printFuncsHelper(w io.Writer, funcs []*FuncDoc, level int, tag string, tag_folder string) {
|
||||||
for _, f := range funcs {
|
for _, f := range funcs {
|
||||||
pos := p.fset.Position(f.Decl.Pos())
|
pos := p.fset.Position(f.Decl.Pos())
|
||||||
|
end := p.fset.Position(f.Decl.End())
|
||||||
if p.expr {
|
if p.expr {
|
||||||
fmt.Fprintf(w, "%d,%s,%s,%s@%s\n", level, tag, f.Name, p.posText(pos), types.ExprString(f.Decl.Type))
|
fmt.Fprintf(w, "%d,%s,%s,%s@%s\n", level, tag, f.Name, p.posText(pos, end), types.ExprString(f.Decl.Type))
|
||||||
} else {
|
} else {
|
||||||
fmt.Fprintf(w, "%d,%s,%s,%s\n", level, tag, f.Name, p.posText(pos))
|
fmt.Fprintf(w, "%d,%s,%s,%s\n", level, tag, f.Name, p.posText(pos, end))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -224,11 +243,12 @@ func (p *PackageView) PrintVars(w io.Writer, vars []*ValueDoc, level int, tag st
|
||||||
for _, s := range v.Decl.Specs {
|
for _, s := range v.Decl.Specs {
|
||||||
if m, ok := s.(*ast.ValueSpec); ok {
|
if m, ok := s.(*ast.ValueSpec); ok {
|
||||||
pos := p.fset.Position(m.Pos())
|
pos := p.fset.Position(m.Pos())
|
||||||
|
end := p.fset.Position(m.End())
|
||||||
for i := 0; i < len(m.Names); i++ {
|
for i := 0; i < len(m.Names); i++ {
|
||||||
if p.expr && m.Type != nil {
|
if p.expr && m.Type != nil {
|
||||||
fmt.Fprintf(w, "%d,%s,%s,%s@%s\n", level, tag, m.Names[i], p.posText(pos), types.ExprString(m.Type))
|
fmt.Fprintf(w, "%d,%s,%s,%s@%s\n", level, tag, m.Names[i], p.posText(pos, end), types.ExprString(m.Type))
|
||||||
} else {
|
} else {
|
||||||
fmt.Fprintf(w, "%d,%s,%s,%s\n", level, tag, m.Names[i], p.posText(pos))
|
fmt.Fprintf(w, "%d,%s,%s,%s\n", level, tag, m.Names[i], p.posText(pos, end))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -248,7 +268,8 @@ func (p *PackageView) PrintTypes(w io.Writer, types []*TypeDoc, level int) {
|
||||||
tag = tag_struct
|
tag = tag_struct
|
||||||
}
|
}
|
||||||
pos := p.fset.Position(d.Decl.Pos())
|
pos := p.fset.Position(d.Decl.Pos())
|
||||||
fmt.Fprintf(w, "%d,%s,%s,%s\n", level, tag, d.Type.Name, p.posText(pos))
|
end := p.fset.Position(d.Decl.End())
|
||||||
|
fmt.Fprintf(w, "%d,%s,%s,%s\n", level, tag, d.Type.Name, p.posText(pos, end))
|
||||||
p.printFuncsHelper(w, d.Funcs, level+1, tag_type_factor, "")
|
p.printFuncsHelper(w, d.Funcs, level+1, tag_type_factor, "")
|
||||||
p.printFuncsHelper(w, d.Methods, level+1, tag_type_method, "")
|
p.printFuncsHelper(w, d.Methods, level+1, tag_type_method, "")
|
||||||
p.PrintTypeFields(w, d.Decl, level+1)
|
p.PrintTypeFields(w, d.Decl, level+1)
|
||||||
|
@ -270,10 +291,11 @@ func (p *PackageView) PrintTypeFields(w io.Writer, decl *ast.GenDecl, level int)
|
||||||
}
|
}
|
||||||
for _, m := range list.Names {
|
for _, m := range list.Names {
|
||||||
pos := p.fset.Position(m.Pos())
|
pos := p.fset.Position(m.Pos())
|
||||||
|
end := p.fset.Position(m.End())
|
||||||
if list.Type != nil {
|
if list.Type != nil {
|
||||||
fmt.Fprintf(w, "%d,%s,%s,%s@%s\n", level, tag_type_value, m.Name, p.posText(pos), types.ExprString(list.Type))
|
fmt.Fprintf(w, "%d,%s,%s,%s@%s\n", level, tag_type_value, m.Name, p.posText(pos, end), types.ExprString(list.Type))
|
||||||
} else {
|
} else {
|
||||||
fmt.Fprintf(w, "%d,%s,%s,%s\n", level, tag_type_value, m.Name, p.posText(pos))
|
fmt.Fprintf(w, "%d,%s,%s,%s\n", level, tag_type_value, m.Name, p.posText(pos, end))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -284,7 +306,8 @@ func (p *PackageView) PrintTypeFields(w io.Writer, decl *ast.GenDecl, level int)
|
||||||
}
|
}
|
||||||
for _, m := range list.Names {
|
for _, m := range list.Names {
|
||||||
pos := p.fset.Position(m.Pos())
|
pos := p.fset.Position(m.Pos())
|
||||||
fmt.Fprintf(w, "%d,%s,%s,%s\n", level, tag_type_method, m.Name, p.posText(pos))
|
end := p.fset.Position(m.End())
|
||||||
|
fmt.Fprintf(w, "%d,%s,%s,%s\n", level, tag_type_method, m.Name, p.posText(pos, end))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -299,6 +322,19 @@ func (p *PackageView) PrintImports(w io.Writer, level int, tag, tag_folder strin
|
||||||
fmt.Fprintf(w, "%d,%s,%s\n", level, tag_folder, "Imports")
|
fmt.Fprintf(w, "%d,%s,%s\n", level, tag_folder, "Imports")
|
||||||
level++
|
level++
|
||||||
}
|
}
|
||||||
|
var parentPkg *pkgutil.Package
|
||||||
|
if pkgutil.IsVendorExperiment() {
|
||||||
|
for filename, _ := range p.pkg.Files {
|
||||||
|
if !filepath.IsAbs(filename) {
|
||||||
|
name, err := filepath.Abs(filename)
|
||||||
|
if err == nil {
|
||||||
|
filename = name
|
||||||
|
}
|
||||||
|
}
|
||||||
|
parentPkg = pkgutil.ImportFile(filename)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
for _, name := range p.pdoc.Imports {
|
for _, name := range p.pdoc.Imports {
|
||||||
vname := "\"" + name + "\""
|
vname := "\"" + name + "\""
|
||||||
var ps []string
|
var ps []string
|
||||||
|
@ -306,10 +342,14 @@ func (p *PackageView) PrintImports(w io.Writer, level int, tag, tag_folder strin
|
||||||
for _, v := range file.Imports {
|
for _, v := range file.Imports {
|
||||||
if v.Path.Value == vname {
|
if v.Path.Value == vname {
|
||||||
pos := p.fset.Position(v.Pos())
|
pos := p.fset.Position(v.Pos())
|
||||||
ps = append(ps, p.posText(pos))
|
end := p.fset.Position(v.End())
|
||||||
|
ps = append(ps, p.posText(pos, end))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if parentPkg != nil {
|
||||||
|
name = pkgutil.VendoredImportPath(parentPkg, name)
|
||||||
|
}
|
||||||
fmt.Fprintf(w, "%d,%s,%s,%s\n", level, tag, name, strings.Join(ps, ";"))
|
fmt.Fprintf(w, "%d,%s,%s,%s\n", level, tag, name, strings.Join(ps, ";"))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -330,6 +370,27 @@ func (p *PackageView) PrintFuncs(w io.Writer, level int, tag_folder string) {
|
||||||
p.printFuncsHelper(w, p.pdoc.Funcs, level, tag_func, tag_func_folder)
|
p.printFuncsHelper(w, p.pdoc.Funcs, level, tag_func, tag_func_folder)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (p *PackageView) PrintTodos(w io.Writer, level int, tag, tag_folder string) {
|
||||||
|
hasFolder := false
|
||||||
|
if len(p.pdoc.Todos) > 0 {
|
||||||
|
hasFolder = true
|
||||||
|
}
|
||||||
|
if !hasFolder {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if len(tag_folder) > 0 {
|
||||||
|
fmt.Fprintf(w, "%d,%s,TodoList\n", level, tag_folder)
|
||||||
|
level++
|
||||||
|
}
|
||||||
|
for _, todo := range p.pdoc.Todos {
|
||||||
|
c := todo.Comments.List[0]
|
||||||
|
pos := p.fset.Position(c.Pos())
|
||||||
|
end := p.fset.Position(c.End())
|
||||||
|
ps := p.posText(pos, end)
|
||||||
|
fmt.Fprintf(w, "%d,%s,%s,%s@%s\n", level, tag, todo.Tag, ps, todo.Text)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func (p *PackageView) PrintPackage(w io.Writer, level int) {
|
func (p *PackageView) PrintPackage(w io.Writer, level int) {
|
||||||
p.PrintHeader(w, level)
|
p.PrintHeader(w, level)
|
||||||
level++
|
level++
|
||||||
|
@ -338,6 +399,7 @@ func (p *PackageView) PrintPackage(w io.Writer, level int) {
|
||||||
p.PrintVars(w, p.pdoc.Consts, level, tag_const, tag_const_folder)
|
p.PrintVars(w, p.pdoc.Consts, level, tag_const, tag_const_folder)
|
||||||
p.PrintFuncs(w, level, tag_func_folder)
|
p.PrintFuncs(w, level, tag_func_folder)
|
||||||
p.PrintTypes(w, p.pdoc.Types, level)
|
p.PrintTypes(w, p.pdoc.Types, level)
|
||||||
|
p.PrintTodos(w, level, tag_todo, tag_todo_folder)
|
||||||
}
|
}
|
||||||
|
|
||||||
// level,tag,pos@info
|
// level,tag,pos@info
|
||||||
|
|
|
@ -35,6 +35,7 @@ import (
|
||||||
"go/parser"
|
"go/parser"
|
||||||
"go/printer"
|
"go/printer"
|
||||||
"go/token"
|
"go/token"
|
||||||
|
"go/types"
|
||||||
"os"
|
"os"
|
||||||
"path"
|
"path"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
@ -43,8 +44,6 @@ import (
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/visualfc/gotools/command"
|
"github.com/visualfc/gotools/command"
|
||||||
_ "golang.org/x/tools/go/gcimporter"
|
|
||||||
"golang.org/x/tools/go/types"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
const usageDoc = `Find documentation for names.
|
const usageDoc = `Find documentation for names.
|
||||||
|
|
|
@ -1,394 +0,0 @@
|
||||||
// Copyright 2013 The Go Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
package goimports
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"go/ast"
|
|
||||||
"go/build"
|
|
||||||
"go/parser"
|
|
||||||
"go/token"
|
|
||||||
"os"
|
|
||||||
"path"
|
|
||||||
"path/filepath"
|
|
||||||
"strings"
|
|
||||||
"sync"
|
|
||||||
|
|
||||||
"github.com/visualfc/gotools/stdlib"
|
|
||||||
|
|
||||||
"golang.org/x/tools/go/ast/astutil"
|
|
||||||
)
|
|
||||||
|
|
||||||
// importToGroup is a list of functions which map from an import path to
|
|
||||||
// a group number.
|
|
||||||
var importToGroup = []func(importPath string) (num int, ok bool){
|
|
||||||
func(importPath string) (num int, ok bool) {
|
|
||||||
if strings.HasPrefix(importPath, "appengine") {
|
|
||||||
return 2, true
|
|
||||||
}
|
|
||||||
return
|
|
||||||
},
|
|
||||||
func(importPath string) (num int, ok bool) {
|
|
||||||
if strings.Contains(importPath, ".") {
|
|
||||||
return 1, true
|
|
||||||
}
|
|
||||||
return
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
func importGroup(importPath string) int {
|
|
||||||
for _, fn := range importToGroup {
|
|
||||||
if n, ok := fn(importPath); ok {
|
|
||||||
return n
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return 0
|
|
||||||
}
|
|
||||||
|
|
||||||
func fixImports(fset *token.FileSet, f *ast.File) (added []string, err error) {
|
|
||||||
// refs are a set of possible package references currently unsatisfied by imports.
|
|
||||||
// first key: either base package (e.g. "fmt") or renamed package
|
|
||||||
// second key: referenced package symbol (e.g. "Println")
|
|
||||||
refs := make(map[string]map[string]bool)
|
|
||||||
|
|
||||||
// decls are the current package imports. key is base package or renamed package.
|
|
||||||
decls := make(map[string]*ast.ImportSpec)
|
|
||||||
|
|
||||||
// collect potential uses of packages.
|
|
||||||
var visitor visitFn
|
|
||||||
visitor = visitFn(func(node ast.Node) ast.Visitor {
|
|
||||||
if node == nil {
|
|
||||||
return visitor
|
|
||||||
}
|
|
||||||
switch v := node.(type) {
|
|
||||||
case *ast.ImportSpec:
|
|
||||||
if v.Name != nil {
|
|
||||||
decls[v.Name.Name] = v
|
|
||||||
} else {
|
|
||||||
local := importPathToName(strings.Trim(v.Path.Value, `\"`))
|
|
||||||
decls[local] = v
|
|
||||||
}
|
|
||||||
case *ast.SelectorExpr:
|
|
||||||
xident, ok := v.X.(*ast.Ident)
|
|
||||||
if !ok {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
if xident.Obj != nil {
|
|
||||||
// if the parser can resolve it, it's not a package ref
|
|
||||||
break
|
|
||||||
}
|
|
||||||
pkgName := xident.Name
|
|
||||||
if refs[pkgName] == nil {
|
|
||||||
refs[pkgName] = make(map[string]bool)
|
|
||||||
}
|
|
||||||
if decls[pkgName] == nil {
|
|
||||||
refs[pkgName][v.Sel.Name] = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return visitor
|
|
||||||
})
|
|
||||||
ast.Walk(visitor, f)
|
|
||||||
|
|
||||||
// Search for imports matching potential package references.
|
|
||||||
searches := 0
|
|
||||||
type result struct {
|
|
||||||
ipath string
|
|
||||||
name string
|
|
||||||
err error
|
|
||||||
}
|
|
||||||
results := make(chan result)
|
|
||||||
for pkgName, symbols := range refs {
|
|
||||||
if len(symbols) == 0 {
|
|
||||||
continue // skip over packages already imported
|
|
||||||
}
|
|
||||||
go func(pkgName string, symbols map[string]bool) {
|
|
||||||
ipath, rename, err := findImport(pkgName, symbols)
|
|
||||||
r := result{ipath: ipath, err: err}
|
|
||||||
if rename {
|
|
||||||
r.name = pkgName
|
|
||||||
}
|
|
||||||
results <- r
|
|
||||||
}(pkgName, symbols)
|
|
||||||
searches++
|
|
||||||
}
|
|
||||||
for i := 0; i < searches; i++ {
|
|
||||||
result := <-results
|
|
||||||
if result.err != nil {
|
|
||||||
return nil, result.err
|
|
||||||
}
|
|
||||||
if result.ipath != "" {
|
|
||||||
if result.name != "" {
|
|
||||||
astutil.AddNamedImport(fset, f, result.name, result.ipath)
|
|
||||||
} else {
|
|
||||||
astutil.AddImport(fset, f, result.ipath)
|
|
||||||
}
|
|
||||||
added = append(added, result.ipath)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Nil out any unused ImportSpecs, to be removed in following passes
|
|
||||||
unusedImport := map[string]bool{}
|
|
||||||
for pkg, is := range decls {
|
|
||||||
if refs[pkg] == nil && pkg != "_" && pkg != "." {
|
|
||||||
unusedImport[strings.Trim(is.Path.Value, `"`)] = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for ipath := range unusedImport {
|
|
||||||
if ipath == "C" {
|
|
||||||
// Don't remove cgo stuff.
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
astutil.DeleteImport(fset, f, ipath)
|
|
||||||
}
|
|
||||||
|
|
||||||
return added, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// importPathToName returns the package name for the given import path.
|
|
||||||
var importPathToName = importPathToNameGoPath
|
|
||||||
|
|
||||||
// importPathToNameBasic assumes the package name is the base of import path.
|
|
||||||
func importPathToNameBasic(importPath string) (packageName string) {
|
|
||||||
return path.Base(importPath)
|
|
||||||
}
|
|
||||||
|
|
||||||
// importPathToNameGoPath finds out the actual package name, as declared in its .go files.
|
|
||||||
// If there's a problem, it falls back to using importPathToNameBasic.
|
|
||||||
func importPathToNameGoPath(importPath string) (packageName string) {
|
|
||||||
if stdlib.IsStdPkg(importPath) {
|
|
||||||
return path.Base(importPath)
|
|
||||||
}
|
|
||||||
if buildPkg, err := build.Import(importPath, "", 0); err == nil {
|
|
||||||
return buildPkg.Name
|
|
||||||
} else {
|
|
||||||
return importPathToNameBasic(importPath)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type pkg struct {
|
|
||||||
importpath string // full pkg import path, e.g. "net/http"
|
|
||||||
dir string // absolute file path to pkg directory e.g. "/usr/lib/go/src/fmt"
|
|
||||||
}
|
|
||||||
|
|
||||||
var pkgIndexOnce sync.Once
|
|
||||||
|
|
||||||
var pkgIndex struct {
|
|
||||||
sync.Mutex
|
|
||||||
m map[string][]pkg // shortname => []pkg, e.g "http" => "net/http"
|
|
||||||
}
|
|
||||||
|
|
||||||
// gate is a semaphore for limiting concurrency.
|
|
||||||
type gate chan struct{}
|
|
||||||
|
|
||||||
func (g gate) enter() { g <- struct{}{} }
|
|
||||||
func (g gate) leave() { <-g }
|
|
||||||
|
|
||||||
// fsgate protects the OS & filesystem from too much concurrency.
|
|
||||||
// Too much disk I/O -> too many threads -> swapping and bad scheduling.
|
|
||||||
var fsgate = make(gate, 8)
|
|
||||||
|
|
||||||
func loadPkgIndex() {
|
|
||||||
pkgIndex.Lock()
|
|
||||||
pkgIndex.m = make(map[string][]pkg)
|
|
||||||
pkgIndex.Unlock()
|
|
||||||
|
|
||||||
var wg sync.WaitGroup
|
|
||||||
for _, path := range build.Default.SrcDirs() {
|
|
||||||
fsgate.enter()
|
|
||||||
f, err := os.Open(path)
|
|
||||||
if err != nil {
|
|
||||||
fsgate.leave()
|
|
||||||
fmt.Fprint(os.Stderr, err)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
children, err := f.Readdir(-1)
|
|
||||||
f.Close()
|
|
||||||
fsgate.leave()
|
|
||||||
if err != nil {
|
|
||||||
fmt.Fprint(os.Stderr, err)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
for _, child := range children {
|
|
||||||
if child.IsDir() {
|
|
||||||
wg.Add(1)
|
|
||||||
go func(path, name string) {
|
|
||||||
defer wg.Done()
|
|
||||||
loadPkg(&wg, path, name)
|
|
||||||
}(path, child.Name())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
wg.Wait()
|
|
||||||
}
|
|
||||||
|
|
||||||
func loadPkg(wg *sync.WaitGroup, root, pkgrelpath string) {
|
|
||||||
importpath := filepath.ToSlash(pkgrelpath)
|
|
||||||
dir := filepath.Join(root, importpath)
|
|
||||||
|
|
||||||
fsgate.enter()
|
|
||||||
defer fsgate.leave()
|
|
||||||
pkgDir, err := os.Open(dir)
|
|
||||||
if err != nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
children, err := pkgDir.Readdir(-1)
|
|
||||||
pkgDir.Close()
|
|
||||||
if err != nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
// hasGo tracks whether a directory actually appears to be a
|
|
||||||
// Go source code directory. If $GOPATH == $HOME, and
|
|
||||||
// $HOME/src has lots of other large non-Go projects in it,
|
|
||||||
// then the calls to importPathToName below can be expensive.
|
|
||||||
hasGo := false
|
|
||||||
for _, child := range children {
|
|
||||||
name := child.Name()
|
|
||||||
if name == "" {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if c := name[0]; c == '.' || ('0' <= c && c <= '9') {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if strings.HasSuffix(name, ".go") {
|
|
||||||
hasGo = true
|
|
||||||
}
|
|
||||||
if child.IsDir() {
|
|
||||||
wg.Add(1)
|
|
||||||
go func(root, name string) {
|
|
||||||
defer wg.Done()
|
|
||||||
loadPkg(wg, root, name)
|
|
||||||
}(root, filepath.Join(importpath, name))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if hasGo {
|
|
||||||
shortName := importPathToName(importpath)
|
|
||||||
pkgIndex.Lock()
|
|
||||||
pkgIndex.m[shortName] = append(pkgIndex.m[shortName], pkg{
|
|
||||||
importpath: importpath,
|
|
||||||
dir: dir,
|
|
||||||
})
|
|
||||||
pkgIndex.Unlock()
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
// loadExports returns a list exports for a package.
|
|
||||||
var loadExports = loadExportsGoPath
|
|
||||||
|
|
||||||
func loadExportsGoPath(dir string) map[string]bool {
|
|
||||||
exports := make(map[string]bool)
|
|
||||||
buildPkg, err := build.ImportDir(dir, 0)
|
|
||||||
if err != nil {
|
|
||||||
if strings.Contains(err.Error(), "no buildable Go source files in") {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
fmt.Fprintf(os.Stderr, "could not import %q: %v\n", dir, err)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
fset := token.NewFileSet()
|
|
||||||
for _, files := range [...][]string{buildPkg.GoFiles, buildPkg.CgoFiles} {
|
|
||||||
for _, file := range files {
|
|
||||||
f, err := parser.ParseFile(fset, filepath.Join(dir, file), nil, 0)
|
|
||||||
if err != nil {
|
|
||||||
fmt.Fprintf(os.Stderr, "could not parse %q: %v\n", file, err)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
for name := range f.Scope.Objects {
|
|
||||||
if ast.IsExported(name) {
|
|
||||||
exports[name] = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return exports
|
|
||||||
}
|
|
||||||
|
|
||||||
// findImport searches for a package with the given symbols.
|
|
||||||
// If no package is found, findImport returns "".
|
|
||||||
// Declared as a variable rather than a function so goimports can be easily
|
|
||||||
// extended by adding a file with an init function.
|
|
||||||
var findImport = findImportGoPath
|
|
||||||
|
|
||||||
func findImportGoPath(pkgName string, symbols map[string]bool) (string, bool, error) {
|
|
||||||
// Fast path for the standard library.
|
|
||||||
// In the common case we hopefully never have to scan the GOPATH, which can
|
|
||||||
// be slow with moving disks.
|
|
||||||
if pkg, rename, ok := findImportStdlib(pkgName, symbols); ok {
|
|
||||||
return pkg, rename, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO(sameer): look at the import lines for other Go files in the
|
|
||||||
// local directory, since the user is likely to import the same packages
|
|
||||||
// in the current Go file. Return rename=true when the other Go files
|
|
||||||
// use a renamed package that's also used in the current file.
|
|
||||||
|
|
||||||
pkgIndexOnce.Do(loadPkgIndex)
|
|
||||||
|
|
||||||
// Collect exports for packages with matching names.
|
|
||||||
var wg sync.WaitGroup
|
|
||||||
var pkgsMu sync.Mutex // guards pkgs
|
|
||||||
// full importpath => exported symbol => True
|
|
||||||
// e.g. "net/http" => "Client" => True
|
|
||||||
pkgs := make(map[string]map[string]bool)
|
|
||||||
pkgIndex.Lock()
|
|
||||||
for _, pkg := range pkgIndex.m[pkgName] {
|
|
||||||
wg.Add(1)
|
|
||||||
go func(importpath, dir string) {
|
|
||||||
defer wg.Done()
|
|
||||||
exports := loadExports(dir)
|
|
||||||
if exports != nil {
|
|
||||||
pkgsMu.Lock()
|
|
||||||
pkgs[importpath] = exports
|
|
||||||
pkgsMu.Unlock()
|
|
||||||
}
|
|
||||||
}(pkg.importpath, pkg.dir)
|
|
||||||
}
|
|
||||||
pkgIndex.Unlock()
|
|
||||||
wg.Wait()
|
|
||||||
|
|
||||||
// Filter out packages missing required exported symbols.
|
|
||||||
for symbol := range symbols {
|
|
||||||
for importpath, exports := range pkgs {
|
|
||||||
if !exports[symbol] {
|
|
||||||
delete(pkgs, importpath)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if len(pkgs) == 0 {
|
|
||||||
return "", false, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// If there are multiple candidate packages, the shortest one wins.
|
|
||||||
// This is a heuristic to prefer the standard library (e.g. "bytes")
|
|
||||||
// over e.g. "github.com/foo/bar/bytes".
|
|
||||||
shortest := ""
|
|
||||||
for importPath := range pkgs {
|
|
||||||
if shortest == "" || len(importPath) < len(shortest) {
|
|
||||||
shortest = importPath
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return shortest, false, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
type visitFn func(node ast.Node) ast.Visitor
|
|
||||||
|
|
||||||
func (fn visitFn) Visit(node ast.Node) ast.Visitor {
|
|
||||||
return fn(node)
|
|
||||||
}
|
|
||||||
|
|
||||||
func findImportStdlib(shortPkg string, symbols map[string]bool) (importPath string, rename, ok bool) {
|
|
||||||
for symbol := range symbols {
|
|
||||||
path := stdlib.Symbols[shortPkg+"."+symbol]
|
|
||||||
if path == "" {
|
|
||||||
return "", false, false
|
|
||||||
}
|
|
||||||
if importPath != "" && importPath != path {
|
|
||||||
// Ambiguous. Symbols pointed to different things.
|
|
||||||
return "", false, false
|
|
||||||
}
|
|
||||||
importPath = path
|
|
||||||
}
|
|
||||||
return importPath, false, importPath != ""
|
|
||||||
}
|
|
|
@ -1,206 +0,0 @@
|
||||||
// Copyright 2013 The Go Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
package goimports
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"fmt"
|
|
||||||
"go/parser"
|
|
||||||
"go/printer"
|
|
||||||
"go/scanner"
|
|
||||||
"go/token"
|
|
||||||
"io"
|
|
||||||
"io/ioutil"
|
|
||||||
"os"
|
|
||||||
"os/exec"
|
|
||||||
"path/filepath"
|
|
||||||
"runtime"
|
|
||||||
"strings"
|
|
||||||
"sync"
|
|
||||||
|
|
||||||
"github.com/visualfc/gotools/command"
|
|
||||||
)
|
|
||||||
|
|
||||||
var Command = &command.Command{
|
|
||||||
Run: runGoimports,
|
|
||||||
UsageLine: "goimports [flags] [path ...]",
|
|
||||||
Short: "updates go import lines",
|
|
||||||
Long: `goimports updates your Go import lines, adding missing ones and removing unreferenced ones. `,
|
|
||||||
}
|
|
||||||
|
|
||||||
var (
|
|
||||||
goimportsList bool
|
|
||||||
goimportsWrite bool
|
|
||||||
goimportsDiff bool
|
|
||||||
goimportsAllErrors bool
|
|
||||||
|
|
||||||
// layout control
|
|
||||||
goimportsComments bool
|
|
||||||
goimportsTabWidth int
|
|
||||||
goimportsTabIndent bool
|
|
||||||
)
|
|
||||||
|
|
||||||
//func init
|
|
||||||
func init() {
|
|
||||||
Command.Flag.BoolVar(&goimportsList, "l", false, "list files whose formatting differs from goimport's")
|
|
||||||
Command.Flag.BoolVar(&goimportsWrite, "w", false, "write result to (source) file instead of stdout")
|
|
||||||
Command.Flag.BoolVar(&goimportsDiff, "d", false, "display diffs instead of rewriting files")
|
|
||||||
Command.Flag.BoolVar(&goimportsAllErrors, "e", false, "report all errors (not just the first 10 on different lines)")
|
|
||||||
|
|
||||||
// layout control
|
|
||||||
Command.Flag.BoolVar(&goimportsComments, "comments", true, "print comments")
|
|
||||||
Command.Flag.IntVar(&goimportsTabWidth, "tabwidth", 8, "tab width")
|
|
||||||
Command.Flag.BoolVar(&goimportsTabIndent, "tabs", true, "indent with tabs")
|
|
||||||
}
|
|
||||||
|
|
||||||
var (
|
|
||||||
fileSet = token.NewFileSet() // per process FileSet
|
|
||||||
exitCode = 0
|
|
||||||
|
|
||||||
initModesOnce sync.Once // guards calling initModes
|
|
||||||
parserMode parser.Mode
|
|
||||||
printerMode printer.Mode
|
|
||||||
options *Options
|
|
||||||
)
|
|
||||||
|
|
||||||
func report(err error) {
|
|
||||||
scanner.PrintError(os.Stderr, err)
|
|
||||||
exitCode = 2
|
|
||||||
}
|
|
||||||
|
|
||||||
func runGoimports(cmd *command.Command, args []string) error {
|
|
||||||
runtime.GOMAXPROCS(runtime.NumCPU())
|
|
||||||
|
|
||||||
if goimportsTabWidth < 0 {
|
|
||||||
fmt.Fprintf(os.Stderr, "negative tabwidth %d\n", goimportsTabWidth)
|
|
||||||
exitCode = 2
|
|
||||||
os.Exit(exitCode)
|
|
||||||
return os.ErrInvalid
|
|
||||||
}
|
|
||||||
|
|
||||||
options = &Options{
|
|
||||||
TabWidth: goimportsTabWidth,
|
|
||||||
TabIndent: goimportsTabIndent,
|
|
||||||
Comments: goimportsComments,
|
|
||||||
AllErrors: goimportsAllErrors,
|
|
||||||
Fragment: true,
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(args) == 0 {
|
|
||||||
if err := processFile("<standard input>", os.Stdin, os.Stdout, true); err != nil {
|
|
||||||
report(err)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
for _, path := range args {
|
|
||||||
switch dir, err := os.Stat(path); {
|
|
||||||
case err != nil:
|
|
||||||
report(err)
|
|
||||||
case dir.IsDir():
|
|
||||||
walkDir(path)
|
|
||||||
default:
|
|
||||||
if err := processFile(path, nil, os.Stdout, false); err != nil {
|
|
||||||
report(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
os.Exit(exitCode)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func isGoFile(f os.FileInfo) bool {
|
|
||||||
// ignore non-Go files
|
|
||||||
name := f.Name()
|
|
||||||
return !f.IsDir() && !strings.HasPrefix(name, ".") && strings.HasSuffix(name, ".go")
|
|
||||||
}
|
|
||||||
|
|
||||||
func processFile(filename string, in io.Reader, out io.Writer, stdin bool) error {
|
|
||||||
if in == nil {
|
|
||||||
f, err := os.Open(filename)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
defer f.Close()
|
|
||||||
in = f
|
|
||||||
}
|
|
||||||
|
|
||||||
src, err := ioutil.ReadAll(in)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
res, err := Process(filename, src, options)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if !bytes.Equal(src, res) {
|
|
||||||
// formatting has changed
|
|
||||||
if goimportsList {
|
|
||||||
fmt.Fprintln(out, filename)
|
|
||||||
}
|
|
||||||
if goimportsWrite {
|
|
||||||
err = ioutil.WriteFile(filename, res, 0)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if goimportsDiff {
|
|
||||||
data, err := diff(src, res)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("computing diff: %s", err)
|
|
||||||
}
|
|
||||||
fmt.Printf("diff %s gofmt/%s\n", filename, filename)
|
|
||||||
out.Write(data)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if !goimportsList && !goimportsWrite && !goimportsDiff {
|
|
||||||
_, err = out.Write(res)
|
|
||||||
}
|
|
||||||
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
func visitFile(path string, f os.FileInfo, err error) error {
|
|
||||||
if err == nil && isGoFile(f) {
|
|
||||||
err = processFile(path, nil, os.Stdout, false)
|
|
||||||
}
|
|
||||||
if err != nil {
|
|
||||||
report(err)
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func walkDir(path string) {
|
|
||||||
filepath.Walk(path, visitFile)
|
|
||||||
}
|
|
||||||
|
|
||||||
func diff(b1, b2 []byte) (data []byte, err error) {
|
|
||||||
f1, err := ioutil.TempFile("", "gofmt")
|
|
||||||
if err != nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
defer os.Remove(f1.Name())
|
|
||||||
defer f1.Close()
|
|
||||||
|
|
||||||
f2, err := ioutil.TempFile("", "gofmt")
|
|
||||||
if err != nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
defer os.Remove(f2.Name())
|
|
||||||
defer f2.Close()
|
|
||||||
|
|
||||||
f1.Write(b1)
|
|
||||||
f2.Write(b2)
|
|
||||||
|
|
||||||
data, err = exec.Command("diff", "-u", f1.Name(), f2.Name()).CombinedOutput()
|
|
||||||
if len(data) > 0 {
|
|
||||||
// diff exits with a non-zero status when the files don't match.
|
|
||||||
// Ignore that failure as long as we get output.
|
|
||||||
err = nil
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
|
@ -1,281 +0,0 @@
|
||||||
// Copyright 2013 The Go Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
// Package imports implements a Go pretty-printer (like package "go/format")
|
|
||||||
// that also adds or removes import statements as necessary.
|
|
||||||
package goimports
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bufio"
|
|
||||||
"bytes"
|
|
||||||
"fmt"
|
|
||||||
"go/ast"
|
|
||||||
"go/format"
|
|
||||||
"go/parser"
|
|
||||||
"go/printer"
|
|
||||||
"go/token"
|
|
||||||
"io"
|
|
||||||
"regexp"
|
|
||||||
"strconv"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"golang.org/x/tools/go/ast/astutil"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Options specifies options for processing files.
|
|
||||||
type Options struct {
|
|
||||||
Fragment bool // Accept fragment of a source file (no package statement)
|
|
||||||
AllErrors bool // Report all errors (not just the first 10 on different lines)
|
|
||||||
|
|
||||||
Comments bool // Print comments (true if nil *Options provided)
|
|
||||||
TabIndent bool // Use tabs for indent (true if nil *Options provided)
|
|
||||||
Format bool
|
|
||||||
TabWidth int // Tab width (8 if nil *Options provided)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Process formats and adjusts imports for the provided file.
|
|
||||||
// If opt is nil the defaults are used.
|
|
||||||
func Process(filename string, src []byte, opt *Options) ([]byte, error) {
|
|
||||||
if opt == nil {
|
|
||||||
opt = &Options{Comments: true, TabIndent: true, TabWidth: 8}
|
|
||||||
}
|
|
||||||
|
|
||||||
fileSet := token.NewFileSet()
|
|
||||||
file, adjust, err := goImportParse(fileSet, filename, src, opt)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
_, err = fixImports(fileSet, file)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
sortImports(fileSet, file)
|
|
||||||
imps := astutil.Imports(fileSet, file)
|
|
||||||
|
|
||||||
var spacesBefore []string // import paths we need spaces before
|
|
||||||
for _, impSection := range imps {
|
|
||||||
// Within each block of contiguous imports, see if any
|
|
||||||
// import lines are in different group numbers. If so,
|
|
||||||
// we'll need to put a space between them so it's
|
|
||||||
// compatible with gofmt.
|
|
||||||
lastGroup := -1
|
|
||||||
for _, importSpec := range impSection {
|
|
||||||
importPath, _ := strconv.Unquote(importSpec.Path.Value)
|
|
||||||
groupNum := importGroup(importPath)
|
|
||||||
if groupNum != lastGroup && lastGroup != -1 {
|
|
||||||
spacesBefore = append(spacesBefore, importPath)
|
|
||||||
}
|
|
||||||
lastGroup = groupNum
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
printerMode := printer.UseSpaces
|
|
||||||
if opt.TabIndent {
|
|
||||||
printerMode |= printer.TabIndent
|
|
||||||
}
|
|
||||||
printConfig := &printer.Config{Mode: printerMode, Tabwidth: opt.TabWidth}
|
|
||||||
|
|
||||||
var buf bytes.Buffer
|
|
||||||
err = printConfig.Fprint(&buf, fileSet, file)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
out := buf.Bytes()
|
|
||||||
if adjust != nil {
|
|
||||||
out = adjust(src, out)
|
|
||||||
}
|
|
||||||
if len(spacesBefore) > 0 {
|
|
||||||
out = addImportSpaces(bytes.NewReader(out), spacesBefore)
|
|
||||||
}
|
|
||||||
if opt.Format {
|
|
||||||
out, err = format.Source(out)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return out, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// parse parses src, which was read from filename,
|
|
||||||
// as a Go source file or statement list.
|
|
||||||
func goImportParse(fset *token.FileSet, filename string, src []byte, opt *Options) (*ast.File, func(orig, src []byte) []byte, error) {
|
|
||||||
parserMode := parser.Mode(0)
|
|
||||||
if opt.Comments {
|
|
||||||
parserMode |= parser.ParseComments
|
|
||||||
}
|
|
||||||
if opt.AllErrors {
|
|
||||||
parserMode |= parser.AllErrors
|
|
||||||
}
|
|
||||||
|
|
||||||
// Try as whole source file.
|
|
||||||
file, err := parser.ParseFile(fset, filename, src, parserMode)
|
|
||||||
if err == nil {
|
|
||||||
return file, nil, nil
|
|
||||||
}
|
|
||||||
// If the error is that the source file didn't begin with a
|
|
||||||
// package line and we accept fragmented input, fall through to
|
|
||||||
// try as a source fragment. Stop and return on any other error.
|
|
||||||
if !opt.Fragment || !strings.Contains(err.Error(), "expected 'package'") {
|
|
||||||
return nil, nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
// If this is a declaration list, make it a source file
|
|
||||||
// by inserting a package clause.
|
|
||||||
// Insert using a ;, not a newline, so that the line numbers
|
|
||||||
// in psrc match the ones in src.
|
|
||||||
psrc := append([]byte("package main;"), src...)
|
|
||||||
file, err = parser.ParseFile(fset, filename, psrc, parserMode)
|
|
||||||
if err == nil {
|
|
||||||
// If a main function exists, we will assume this is a main
|
|
||||||
// package and leave the file.
|
|
||||||
if containsMainFunc(file) {
|
|
||||||
return file, nil, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
adjust := func(orig, src []byte) []byte {
|
|
||||||
// Remove the package clause.
|
|
||||||
// Gofmt has turned the ; into a \n.
|
|
||||||
src = src[len("package main\n"):]
|
|
||||||
return matchSpace(orig, src)
|
|
||||||
}
|
|
||||||
return file, adjust, nil
|
|
||||||
}
|
|
||||||
// If the error is that the source file didn't begin with a
|
|
||||||
// declaration, fall through to try as a statement list.
|
|
||||||
// Stop and return on any other error.
|
|
||||||
if !strings.Contains(err.Error(), "expected declaration") {
|
|
||||||
return nil, nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
// If this is a statement list, make it a source file
|
|
||||||
// by inserting a package clause and turning the list
|
|
||||||
// into a function body. This handles expressions too.
|
|
||||||
// Insert using a ;, not a newline, so that the line numbers
|
|
||||||
// in fsrc match the ones in src.
|
|
||||||
fsrc := append(append([]byte("package p; func _() {"), src...), '}')
|
|
||||||
file, err = parser.ParseFile(fset, filename, fsrc, parserMode)
|
|
||||||
if err == nil {
|
|
||||||
adjust := func(orig, src []byte) []byte {
|
|
||||||
// Remove the wrapping.
|
|
||||||
// Gofmt has turned the ; into a \n\n.
|
|
||||||
src = src[len("package p\n\nfunc _() {"):]
|
|
||||||
src = src[:len(src)-len("}\n")]
|
|
||||||
// Gofmt has also indented the function body one level.
|
|
||||||
// Remove that indent.
|
|
||||||
src = bytes.Replace(src, []byte("\n\t"), []byte("\n"), -1)
|
|
||||||
return matchSpace(orig, src)
|
|
||||||
}
|
|
||||||
return file, adjust, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// Failed, and out of options.
|
|
||||||
return nil, nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
// containsMainFunc checks if a file contains a function declaration with the
|
|
||||||
// function signature 'func main()'
|
|
||||||
func containsMainFunc(file *ast.File) bool {
|
|
||||||
for _, decl := range file.Decls {
|
|
||||||
if f, ok := decl.(*ast.FuncDecl); ok {
|
|
||||||
if f.Name.Name != "main" {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(f.Type.Params.List) != 0 {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
if f.Type.Results != nil && len(f.Type.Results.List) != 0 {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
func cutSpace(b []byte) (before, middle, after []byte) {
|
|
||||||
i := 0
|
|
||||||
for i < len(b) && (b[i] == ' ' || b[i] == '\t' || b[i] == '\n') {
|
|
||||||
i++
|
|
||||||
}
|
|
||||||
j := len(b)
|
|
||||||
for j > 0 && (b[j-1] == ' ' || b[j-1] == '\t' || b[j-1] == '\n') {
|
|
||||||
j--
|
|
||||||
}
|
|
||||||
if i <= j {
|
|
||||||
return b[:i], b[i:j], b[j:]
|
|
||||||
}
|
|
||||||
return nil, nil, b[j:]
|
|
||||||
}
|
|
||||||
|
|
||||||
// matchSpace reformats src to use the same space context as orig.
|
|
||||||
// 1) If orig begins with blank lines, matchSpace inserts them at the beginning of src.
|
|
||||||
// 2) matchSpace copies the indentation of the first non-blank line in orig
|
|
||||||
// to every non-blank line in src.
|
|
||||||
// 3) matchSpace copies the trailing space from orig and uses it in place
|
|
||||||
// of src's trailing space.
|
|
||||||
func matchSpace(orig []byte, src []byte) []byte {
|
|
||||||
before, _, after := cutSpace(orig)
|
|
||||||
i := bytes.LastIndex(before, []byte{'\n'})
|
|
||||||
before, indent := before[:i+1], before[i+1:]
|
|
||||||
|
|
||||||
_, src, _ = cutSpace(src)
|
|
||||||
|
|
||||||
var b bytes.Buffer
|
|
||||||
b.Write(before)
|
|
||||||
for len(src) > 0 {
|
|
||||||
line := src
|
|
||||||
if i := bytes.IndexByte(line, '\n'); i >= 0 {
|
|
||||||
line, src = line[:i+1], line[i+1:]
|
|
||||||
} else {
|
|
||||||
src = nil
|
|
||||||
}
|
|
||||||
if len(line) > 0 && line[0] != '\n' { // not blank
|
|
||||||
b.Write(indent)
|
|
||||||
}
|
|
||||||
b.Write(line)
|
|
||||||
}
|
|
||||||
b.Write(after)
|
|
||||||
return b.Bytes()
|
|
||||||
}
|
|
||||||
|
|
||||||
var impLine = regexp.MustCompile(`^\s+(?:[\w\.]+\s+)?"(.+)"`)
|
|
||||||
|
|
||||||
func addImportSpaces(r io.Reader, breaks []string) []byte {
|
|
||||||
var out bytes.Buffer
|
|
||||||
sc := bufio.NewScanner(r)
|
|
||||||
inImports := false
|
|
||||||
done := false
|
|
||||||
for sc.Scan() {
|
|
||||||
s := sc.Text()
|
|
||||||
|
|
||||||
if !inImports && !done && strings.HasPrefix(s, "import") {
|
|
||||||
inImports = true
|
|
||||||
}
|
|
||||||
if inImports && (strings.HasPrefix(s, "var") ||
|
|
||||||
strings.HasPrefix(s, "func") ||
|
|
||||||
strings.HasPrefix(s, "const") ||
|
|
||||||
strings.HasPrefix(s, "type")) {
|
|
||||||
done = true
|
|
||||||
inImports = false
|
|
||||||
}
|
|
||||||
if inImports && len(breaks) > 0 {
|
|
||||||
if m := impLine.FindStringSubmatch(s); m != nil {
|
|
||||||
if m[1] == string(breaks[0]) {
|
|
||||||
out.WriteByte('\n')
|
|
||||||
breaks = breaks[1:]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fmt.Fprintln(&out, s)
|
|
||||||
}
|
|
||||||
return out.Bytes()
|
|
||||||
}
|
|
|
@ -1,214 +0,0 @@
|
||||||
// +build go1.2
|
|
||||||
|
|
||||||
// Copyright 2013 The Go Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
// Hacked up copy of go/ast/import.go
|
|
||||||
|
|
||||||
package goimports
|
|
||||||
|
|
||||||
import (
|
|
||||||
"go/ast"
|
|
||||||
"go/token"
|
|
||||||
"sort"
|
|
||||||
"strconv"
|
|
||||||
)
|
|
||||||
|
|
||||||
// sortImports sorts runs of consecutive import lines in import blocks in f.
|
|
||||||
// It also removes duplicate imports when it is possible to do so without data loss.
|
|
||||||
func sortImports(fset *token.FileSet, f *ast.File) {
|
|
||||||
for i, d := range f.Decls {
|
|
||||||
d, ok := d.(*ast.GenDecl)
|
|
||||||
if !ok || d.Tok != token.IMPORT {
|
|
||||||
// Not an import declaration, so we're done.
|
|
||||||
// Imports are always first.
|
|
||||||
break
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(d.Specs) == 0 {
|
|
||||||
// Empty import block, remove it.
|
|
||||||
f.Decls = append(f.Decls[:i], f.Decls[i+1:]...)
|
|
||||||
}
|
|
||||||
|
|
||||||
if !d.Lparen.IsValid() {
|
|
||||||
// Not a block: sorted by default.
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
// Identify and sort runs of specs on successive lines.
|
|
||||||
i := 0
|
|
||||||
specs := d.Specs[:0]
|
|
||||||
for j, s := range d.Specs {
|
|
||||||
if j > i && fset.Position(s.Pos()).Line > 1+fset.Position(d.Specs[j-1].End()).Line {
|
|
||||||
// j begins a new run. End this one.
|
|
||||||
specs = append(specs, sortSpecs(fset, f, d.Specs[i:j])...)
|
|
||||||
i = j
|
|
||||||
}
|
|
||||||
}
|
|
||||||
specs = append(specs, sortSpecs(fset, f, d.Specs[i:])...)
|
|
||||||
d.Specs = specs
|
|
||||||
|
|
||||||
// Deduping can leave a blank line before the rparen; clean that up.
|
|
||||||
if len(d.Specs) > 0 {
|
|
||||||
lastSpec := d.Specs[len(d.Specs)-1]
|
|
||||||
lastLine := fset.Position(lastSpec.Pos()).Line
|
|
||||||
if rParenLine := fset.Position(d.Rparen).Line; rParenLine > lastLine+1 {
|
|
||||||
fset.File(d.Rparen).MergeLine(rParenLine - 1)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func importPath(s ast.Spec) string {
|
|
||||||
t, err := strconv.Unquote(s.(*ast.ImportSpec).Path.Value)
|
|
||||||
if err == nil {
|
|
||||||
return t
|
|
||||||
}
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
|
|
||||||
func importName(s ast.Spec) string {
|
|
||||||
n := s.(*ast.ImportSpec).Name
|
|
||||||
if n == nil {
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
return n.Name
|
|
||||||
}
|
|
||||||
|
|
||||||
func importComment(s ast.Spec) string {
|
|
||||||
c := s.(*ast.ImportSpec).Comment
|
|
||||||
if c == nil {
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
return c.Text()
|
|
||||||
}
|
|
||||||
|
|
||||||
// collapse indicates whether prev may be removed, leaving only next.
|
|
||||||
func collapse(prev, next ast.Spec) bool {
|
|
||||||
if importPath(next) != importPath(prev) || importName(next) != importName(prev) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
return prev.(*ast.ImportSpec).Comment == nil
|
|
||||||
}
|
|
||||||
|
|
||||||
type posSpan struct {
|
|
||||||
Start token.Pos
|
|
||||||
End token.Pos
|
|
||||||
}
|
|
||||||
|
|
||||||
func sortSpecs(fset *token.FileSet, f *ast.File, specs []ast.Spec) []ast.Spec {
|
|
||||||
// Can't short-circuit here even if specs are already sorted,
|
|
||||||
// since they might yet need deduplication.
|
|
||||||
// A lone import, however, may be safely ignored.
|
|
||||||
if len(specs) <= 1 {
|
|
||||||
return specs
|
|
||||||
}
|
|
||||||
|
|
||||||
// Record positions for specs.
|
|
||||||
pos := make([]posSpan, len(specs))
|
|
||||||
for i, s := range specs {
|
|
||||||
pos[i] = posSpan{s.Pos(), s.End()}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Identify comments in this range.
|
|
||||||
// Any comment from pos[0].Start to the final line counts.
|
|
||||||
lastLine := fset.Position(pos[len(pos)-1].End).Line
|
|
||||||
cstart := len(f.Comments)
|
|
||||||
cend := len(f.Comments)
|
|
||||||
for i, g := range f.Comments {
|
|
||||||
if g.Pos() < pos[0].Start {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if i < cstart {
|
|
||||||
cstart = i
|
|
||||||
}
|
|
||||||
if fset.Position(g.End()).Line > lastLine {
|
|
||||||
cend = i
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
comments := f.Comments[cstart:cend]
|
|
||||||
|
|
||||||
// Assign each comment to the import spec preceding it.
|
|
||||||
importComment := map[*ast.ImportSpec][]*ast.CommentGroup{}
|
|
||||||
specIndex := 0
|
|
||||||
for _, g := range comments {
|
|
||||||
for specIndex+1 < len(specs) && pos[specIndex+1].Start <= g.Pos() {
|
|
||||||
specIndex++
|
|
||||||
}
|
|
||||||
s := specs[specIndex].(*ast.ImportSpec)
|
|
||||||
importComment[s] = append(importComment[s], g)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Sort the import specs by import path.
|
|
||||||
// Remove duplicates, when possible without data loss.
|
|
||||||
// Reassign the import paths to have the same position sequence.
|
|
||||||
// Reassign each comment to abut the end of its spec.
|
|
||||||
// Sort the comments by new position.
|
|
||||||
sort.Sort(byImportSpec(specs))
|
|
||||||
|
|
||||||
// Dedup. Thanks to our sorting, we can just consider
|
|
||||||
// adjacent pairs of imports.
|
|
||||||
deduped := specs[:0]
|
|
||||||
for i, s := range specs {
|
|
||||||
if i == len(specs)-1 || !collapse(s, specs[i+1]) {
|
|
||||||
deduped = append(deduped, s)
|
|
||||||
} else {
|
|
||||||
p := s.Pos()
|
|
||||||
fset.File(p).MergeLine(fset.Position(p).Line)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
specs = deduped
|
|
||||||
|
|
||||||
// Fix up comment positions
|
|
||||||
for i, s := range specs {
|
|
||||||
s := s.(*ast.ImportSpec)
|
|
||||||
if s.Name != nil {
|
|
||||||
s.Name.NamePos = pos[i].Start
|
|
||||||
}
|
|
||||||
s.Path.ValuePos = pos[i].Start
|
|
||||||
s.EndPos = pos[i].End
|
|
||||||
for _, g := range importComment[s] {
|
|
||||||
for _, c := range g.List {
|
|
||||||
c.Slash = pos[i].End
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
sort.Sort(byCommentPos(comments))
|
|
||||||
|
|
||||||
return specs
|
|
||||||
}
|
|
||||||
|
|
||||||
type byImportSpec []ast.Spec // slice of *ast.ImportSpec
|
|
||||||
|
|
||||||
func (x byImportSpec) Len() int { return len(x) }
|
|
||||||
func (x byImportSpec) Swap(i, j int) { x[i], x[j] = x[j], x[i] }
|
|
||||||
func (x byImportSpec) Less(i, j int) bool {
|
|
||||||
ipath := importPath(x[i])
|
|
||||||
jpath := importPath(x[j])
|
|
||||||
|
|
||||||
igroup := importGroup(ipath)
|
|
||||||
jgroup := importGroup(jpath)
|
|
||||||
if igroup != jgroup {
|
|
||||||
return igroup < jgroup
|
|
||||||
}
|
|
||||||
|
|
||||||
if ipath != jpath {
|
|
||||||
return ipath < jpath
|
|
||||||
}
|
|
||||||
iname := importName(x[i])
|
|
||||||
jname := importName(x[j])
|
|
||||||
|
|
||||||
if iname != jname {
|
|
||||||
return iname < jname
|
|
||||||
}
|
|
||||||
return importComment(x[i]) < importComment(x[j])
|
|
||||||
}
|
|
||||||
|
|
||||||
type byCommentPos []*ast.CommentGroup
|
|
||||||
|
|
||||||
func (x byCommentPos) Len() int { return len(x) }
|
|
||||||
func (x byCommentPos) Swap(i, j int) { x[i], x[j] = x[j], x[i] }
|
|
||||||
func (x byCommentPos) Less(i, j int) bool { return x[i].Pos() < x[j].Pos() }
|
|
|
@ -1,14 +0,0 @@
|
||||||
// +build !go1.2
|
|
||||||
|
|
||||||
// Copyright 2013 The Go Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
package goimports
|
|
||||||
|
|
||||||
import "go/ast"
|
|
||||||
|
|
||||||
// Go 1.1 users don't get fancy package grouping.
|
|
||||||
// But this is still gofmt-compliant:
|
|
||||||
|
|
||||||
var sortImports = ast.SortImports
|
|
|
@ -7,13 +7,11 @@ package oracle
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"go/build"
|
"go/build"
|
||||||
"log"
|
|
||||||
"os"
|
"os"
|
||||||
"runtime"
|
"runtime"
|
||||||
|
|
||||||
"github.com/visualfc/gotools/command"
|
"github.com/visualfc/gotools/command"
|
||||||
|
"github.com/visualfc/gotools/oracle/oracle"
|
||||||
"golang.org/x/tools/oracle"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
//The mode argument determines the query to perform:
|
//The mode argument determines the query to perform:
|
||||||
|
@ -60,21 +58,21 @@ func runOracle(cmd *command.Command, args []string) error {
|
||||||
}
|
}
|
||||||
mode := args[0]
|
mode := args[0]
|
||||||
args = args[1:]
|
args = args[1:]
|
||||||
if args[0] == "." {
|
// if args[0] == "." {
|
||||||
pkgPath, err := os.Getwd()
|
// pkgPath, err := os.Getwd()
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
log.Fatalln(err)
|
// log.Fatalln(err)
|
||||||
}
|
// }
|
||||||
pkg, err := build.Default.ImportDir(pkgPath, 0)
|
// pkg, err := build.Default.ImportDir(pkgPath, 0)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
log.Fatalln(err)
|
// log.Fatalln(err)
|
||||||
}
|
// }
|
||||||
args = pkg.GoFiles
|
// args = pkg.GoFiles
|
||||||
//log.Println(pkg.ImportPath)
|
// //log.Println(pkg.ImportPath)
|
||||||
if pkg.ImportPath != "." && pkg.ImportPath != "" {
|
// if pkg.ImportPath != "." && pkg.ImportPath != "" {
|
||||||
args = []string{pkg.ImportPath}
|
// args = []string{pkg.ImportPath}
|
||||||
}
|
// }
|
||||||
}
|
// }
|
||||||
query := oracle.Query{
|
query := oracle.Query{
|
||||||
Mode: mode,
|
Mode: mode,
|
||||||
Pos: oraclePos,
|
Pos: oraclePos,
|
||||||
|
|
|
@ -0,0 +1,83 @@
|
||||||
|
|
||||||
|
|
||||||
|
ORACLE TODO
|
||||||
|
===========
|
||||||
|
|
||||||
|
General
|
||||||
|
=======
|
||||||
|
|
||||||
|
Save unsaved editor buffers into an archive and provide that to the
|
||||||
|
tools, which should act as if they were saved.
|
||||||
|
|
||||||
|
Include complete pos/end information Serial output.
|
||||||
|
But beware that sometimes a single token (e.g. +) is more helpful
|
||||||
|
than the pos/end of the containing expression (e.g. x \n + \n y).
|
||||||
|
|
||||||
|
Specific queries
|
||||||
|
================
|
||||||
|
|
||||||
|
callers, callees
|
||||||
|
|
||||||
|
Use a type-based (e.g. RTA) callgraph when a callers/callees query is
|
||||||
|
outside the analysis scope.
|
||||||
|
|
||||||
|
implements
|
||||||
|
|
||||||
|
Make it require that the selection is a type, and show only the
|
||||||
|
implements relation as it applies to that type.
|
||||||
|
|
||||||
|
definition, referrers
|
||||||
|
|
||||||
|
definition: Make it work with qualified identifiers (SelectorExpr) too.
|
||||||
|
|
||||||
|
references: Make it work on things that are implicit idents, like
|
||||||
|
import specs, perhaps?
|
||||||
|
|
||||||
|
what
|
||||||
|
|
||||||
|
Report def/ref info if available.
|
||||||
|
Editors could use it to highlight all idents of the same local var.
|
||||||
|
|
||||||
|
More tests.
|
||||||
|
|
||||||
|
pointsto
|
||||||
|
|
||||||
|
When invoked on a function Ident, we get an error.
|
||||||
|
|
||||||
|
When invoked on a named return parameter, we get an error.
|
||||||
|
|
||||||
|
describe
|
||||||
|
|
||||||
|
When invoked on a var, we want to see the type and its methods.
|
||||||
|
|
||||||
|
Split "show type" and "describe syntax" into separate commands?
|
||||||
|
|
||||||
|
peers
|
||||||
|
|
||||||
|
Permit querying from a makechan, for...range, or reflective op.
|
||||||
|
|
||||||
|
Report aliasing reflect.{Send,Recv,Close} and close() operations.
|
||||||
|
|
||||||
|
New queries
|
||||||
|
|
||||||
|
"updaters": show all statements that may update the selected lvalue
|
||||||
|
(local, global, field, etc).
|
||||||
|
|
||||||
|
"creators": show all places where an object of type T is created
|
||||||
|
(&T{}, var t T, new(T), new(struct{array [3]T}), etc.
|
||||||
|
(Useful for datatypes whose zero value is not safe)
|
||||||
|
|
||||||
|
|
||||||
|
Editor-specific
|
||||||
|
===============
|
||||||
|
|
||||||
|
Add support for "what" to .el; clean up.
|
||||||
|
|
||||||
|
Emacs: use JSON to get the raw information from the oracle. Don't
|
||||||
|
open an editor buffer for simpler queries, just jump to the result
|
||||||
|
and/or display it in the modeline.
|
||||||
|
|
||||||
|
Emacs: go-root-and-paths depends on the current buffer, so be sure to
|
||||||
|
call it from within the source file, not the *go-oracle* buffer:
|
||||||
|
the user may have switched workspaces and the oracle should run in
|
||||||
|
the new one.
|
|
@ -0,0 +1,260 @@
|
||||||
|
// Copyright 2013 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build go1.5
|
||||||
|
|
||||||
|
package oracle
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"go/ast"
|
||||||
|
"go/token"
|
||||||
|
"go/types"
|
||||||
|
"sort"
|
||||||
|
|
||||||
|
"github.com/visualfc/gotools/oracle/oracle/serial"
|
||||||
|
"golang.org/x/tools/go/loader"
|
||||||
|
"golang.org/x/tools/go/pointer"
|
||||||
|
"golang.org/x/tools/go/ssa"
|
||||||
|
"golang.org/x/tools/go/ssa/ssautil"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Callees reports the possible callees of the function call site
|
||||||
|
// identified by the specified source location.
|
||||||
|
func callees(q *Query) error {
|
||||||
|
lconf := loader.Config{Build: q.Build}
|
||||||
|
|
||||||
|
if err := setPTAScope(&lconf, q.Scope); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load/parse/type-check the program.
|
||||||
|
lprog, err := lconf.Load()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
q.Fset = lprog.Fset
|
||||||
|
|
||||||
|
qpos, err := parseQueryPos(lprog, q.Pos, true) // needs exact pos
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Determine the enclosing call for the specified position.
|
||||||
|
var e *ast.CallExpr
|
||||||
|
for _, n := range qpos.path {
|
||||||
|
if e, _ = n.(*ast.CallExpr); e != nil {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if e == nil {
|
||||||
|
return fmt.Errorf("there is no function call here")
|
||||||
|
}
|
||||||
|
// TODO(adonovan): issue an error if the call is "too far
|
||||||
|
// away" from the current selection, as this most likely is
|
||||||
|
// not what the user intended.
|
||||||
|
|
||||||
|
// Reject type conversions.
|
||||||
|
if qpos.info.Types[e.Fun].IsType() {
|
||||||
|
return fmt.Errorf("this is a type conversion, not a function call")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Deal with obviously static calls before constructing SSA form.
|
||||||
|
// Some static calls may yet require SSA construction,
|
||||||
|
// e.g. f := func(){}; f().
|
||||||
|
switch funexpr := unparen(e.Fun).(type) {
|
||||||
|
case *ast.Ident:
|
||||||
|
switch obj := qpos.info.Uses[funexpr].(type) {
|
||||||
|
case *types.Builtin:
|
||||||
|
// Reject calls to built-ins.
|
||||||
|
return fmt.Errorf("this is a call to the built-in '%s' operator", obj.Name())
|
||||||
|
case *types.Func:
|
||||||
|
// This is a static function call
|
||||||
|
q.result = &calleesTypesResult{
|
||||||
|
site: e,
|
||||||
|
callee: obj,
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
case *ast.SelectorExpr:
|
||||||
|
sel := qpos.info.Selections[funexpr]
|
||||||
|
if sel == nil {
|
||||||
|
// qualified identifier.
|
||||||
|
// May refer to top level function variable
|
||||||
|
// or to top level function.
|
||||||
|
callee := qpos.info.Uses[funexpr.Sel]
|
||||||
|
if obj, ok := callee.(*types.Func); ok {
|
||||||
|
q.result = &calleesTypesResult{
|
||||||
|
site: e,
|
||||||
|
callee: obj,
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
} else if sel.Kind() == types.MethodVal {
|
||||||
|
// Inspect the receiver type of the selected method.
|
||||||
|
// If it is concrete, the call is statically dispatched.
|
||||||
|
// (Due to implicit field selections, it is not enough to look
|
||||||
|
// at sel.Recv(), the type of the actual receiver expression.)
|
||||||
|
method := sel.Obj().(*types.Func)
|
||||||
|
recvtype := method.Type().(*types.Signature).Recv().Type()
|
||||||
|
if !types.IsInterface(recvtype) {
|
||||||
|
// static method call
|
||||||
|
q.result = &calleesTypesResult{
|
||||||
|
site: e,
|
||||||
|
callee: method,
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
prog := ssautil.CreateProgram(lprog, ssa.GlobalDebug)
|
||||||
|
|
||||||
|
ptaConfig, err := setupPTA(prog, lprog, q.PTALog, q.Reflection)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
pkg := prog.Package(qpos.info.Pkg)
|
||||||
|
if pkg == nil {
|
||||||
|
return fmt.Errorf("no SSA package")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Defer SSA construction till after errors are reported.
|
||||||
|
prog.Build()
|
||||||
|
|
||||||
|
// Ascertain calling function and call site.
|
||||||
|
callerFn := ssa.EnclosingFunction(pkg, qpos.path)
|
||||||
|
if callerFn == nil {
|
||||||
|
return fmt.Errorf("no SSA function built for this location (dead code?)")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find the call site.
|
||||||
|
site, err := findCallSite(callerFn, e)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
funcs, err := findCallees(ptaConfig, site)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
q.result = &calleesSSAResult{
|
||||||
|
site: site,
|
||||||
|
funcs: funcs,
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func findCallSite(fn *ssa.Function, call *ast.CallExpr) (ssa.CallInstruction, error) {
|
||||||
|
instr, _ := fn.ValueForExpr(call)
|
||||||
|
callInstr, _ := instr.(ssa.CallInstruction)
|
||||||
|
if instr == nil {
|
||||||
|
return nil, fmt.Errorf("this call site is unreachable in this analysis")
|
||||||
|
}
|
||||||
|
return callInstr, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func findCallees(conf *pointer.Config, site ssa.CallInstruction) ([]*ssa.Function, error) {
|
||||||
|
// Avoid running the pointer analysis for static calls.
|
||||||
|
if callee := site.Common().StaticCallee(); callee != nil {
|
||||||
|
switch callee.String() {
|
||||||
|
case "runtime.SetFinalizer", "(reflect.Value).Call":
|
||||||
|
// The PTA treats calls to these intrinsics as dynamic.
|
||||||
|
// TODO(adonovan): avoid reliance on PTA internals.
|
||||||
|
|
||||||
|
default:
|
||||||
|
return []*ssa.Function{callee}, nil // singleton
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Dynamic call: use pointer analysis.
|
||||||
|
conf.BuildCallGraph = true
|
||||||
|
cg := ptrAnalysis(conf).CallGraph
|
||||||
|
cg.DeleteSyntheticNodes()
|
||||||
|
|
||||||
|
// Find all call edges from the site.
|
||||||
|
n := cg.Nodes[site.Parent()]
|
||||||
|
if n == nil {
|
||||||
|
return nil, fmt.Errorf("this call site is unreachable in this analysis")
|
||||||
|
}
|
||||||
|
calleesMap := make(map[*ssa.Function]bool)
|
||||||
|
for _, edge := range n.Out {
|
||||||
|
if edge.Site == site {
|
||||||
|
calleesMap[edge.Callee.Func] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// De-duplicate and sort.
|
||||||
|
funcs := make([]*ssa.Function, 0, len(calleesMap))
|
||||||
|
for f := range calleesMap {
|
||||||
|
funcs = append(funcs, f)
|
||||||
|
}
|
||||||
|
sort.Sort(byFuncPos(funcs))
|
||||||
|
return funcs, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type calleesSSAResult struct {
|
||||||
|
site ssa.CallInstruction
|
||||||
|
funcs []*ssa.Function
|
||||||
|
}
|
||||||
|
|
||||||
|
type calleesTypesResult struct {
|
||||||
|
site *ast.CallExpr
|
||||||
|
callee *types.Func
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *calleesSSAResult) display(printf printfFunc) {
|
||||||
|
if len(r.funcs) == 0 {
|
||||||
|
// dynamic call on a provably nil func/interface
|
||||||
|
printf(r.site, "%s on nil value", r.site.Common().Description())
|
||||||
|
} else {
|
||||||
|
printf(r.site, "this %s dispatches to:", r.site.Common().Description())
|
||||||
|
for _, callee := range r.funcs {
|
||||||
|
printf(callee, "\t%s", callee)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *calleesSSAResult) toSerial(res *serial.Result, fset *token.FileSet) {
|
||||||
|
j := &serial.Callees{
|
||||||
|
Pos: fset.Position(r.site.Pos()).String(),
|
||||||
|
Desc: r.site.Common().Description(),
|
||||||
|
}
|
||||||
|
for _, callee := range r.funcs {
|
||||||
|
j.Callees = append(j.Callees, &serial.CalleesItem{
|
||||||
|
Name: callee.String(),
|
||||||
|
Pos: fset.Position(callee.Pos()).String(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
res.Callees = j
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *calleesTypesResult) display(printf printfFunc) {
|
||||||
|
printf(r.site, "this static function call dispatches to:")
|
||||||
|
printf(r.callee, "\t%s", r.callee.FullName())
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *calleesTypesResult) toSerial(res *serial.Result, fset *token.FileSet) {
|
||||||
|
j := &serial.Callees{
|
||||||
|
Pos: fset.Position(r.site.Pos()).String(),
|
||||||
|
Desc: "static function call",
|
||||||
|
}
|
||||||
|
j.Callees = []*serial.CalleesItem{
|
||||||
|
&serial.CalleesItem{
|
||||||
|
Name: r.callee.FullName(),
|
||||||
|
Pos: fset.Position(r.callee.Pos()).String(),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
res.Callees = j
|
||||||
|
}
|
||||||
|
|
||||||
|
// NB: byFuncPos is not deterministic across packages since it depends on load order.
|
||||||
|
// Use lessPos if the tests need it.
|
||||||
|
type byFuncPos []*ssa.Function
|
||||||
|
|
||||||
|
func (a byFuncPos) Len() int { return len(a) }
|
||||||
|
func (a byFuncPos) Less(i, j int) bool { return a[i].Pos() < a[j].Pos() }
|
||||||
|
func (a byFuncPos) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
|
|
@ -0,0 +1,115 @@
|
||||||
|
// Copyright 2013 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
package oracle
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"go/token"
|
||||||
|
|
||||||
|
"github.com/visualfc/gotools/oracle/oracle/serial"
|
||||||
|
"golang.org/x/tools/go/callgraph"
|
||||||
|
"golang.org/x/tools/go/loader"
|
||||||
|
"golang.org/x/tools/go/ssa"
|
||||||
|
"golang.org/x/tools/go/ssa/ssautil"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Callers reports the possible callers of the function
|
||||||
|
// immediately enclosing the specified source location.
|
||||||
|
//
|
||||||
|
func callers(q *Query) error {
|
||||||
|
lconf := loader.Config{Build: q.Build}
|
||||||
|
|
||||||
|
if err := setPTAScope(&lconf, q.Scope); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load/parse/type-check the program.
|
||||||
|
lprog, err := lconf.Load()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
q.Fset = lprog.Fset
|
||||||
|
|
||||||
|
qpos, err := parseQueryPos(lprog, q.Pos, false)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
prog := ssautil.CreateProgram(lprog, 0)
|
||||||
|
|
||||||
|
ptaConfig, err := setupPTA(prog, lprog, q.PTALog, q.Reflection)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
pkg := prog.Package(qpos.info.Pkg)
|
||||||
|
if pkg == nil {
|
||||||
|
return fmt.Errorf("no SSA package")
|
||||||
|
}
|
||||||
|
if !ssa.HasEnclosingFunction(pkg, qpos.path) {
|
||||||
|
return fmt.Errorf("this position is not inside a function")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Defer SSA construction till after errors are reported.
|
||||||
|
prog.Build()
|
||||||
|
|
||||||
|
target := ssa.EnclosingFunction(pkg, qpos.path)
|
||||||
|
if target == nil {
|
||||||
|
return fmt.Errorf("no SSA function built for this location (dead code?)")
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO(adonovan): opt: if function is never address-taken, skip
|
||||||
|
// the pointer analysis. Just look for direct calls. This can
|
||||||
|
// be done in a single pass over the SSA.
|
||||||
|
|
||||||
|
// Run the pointer analysis, recording each
|
||||||
|
// call found to originate from target.
|
||||||
|
ptaConfig.BuildCallGraph = true
|
||||||
|
cg := ptrAnalysis(ptaConfig).CallGraph
|
||||||
|
cg.DeleteSyntheticNodes()
|
||||||
|
edges := cg.CreateNode(target).In
|
||||||
|
// TODO(adonovan): sort + dedup calls to ensure test determinism.
|
||||||
|
|
||||||
|
q.result = &callersResult{
|
||||||
|
target: target,
|
||||||
|
callgraph: cg,
|
||||||
|
edges: edges,
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type callersResult struct {
|
||||||
|
target *ssa.Function
|
||||||
|
callgraph *callgraph.Graph
|
||||||
|
edges []*callgraph.Edge
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *callersResult) display(printf printfFunc) {
|
||||||
|
root := r.callgraph.Root
|
||||||
|
if r.edges == nil {
|
||||||
|
printf(r.target, "%s is not reachable in this program.", r.target)
|
||||||
|
} else {
|
||||||
|
printf(r.target, "%s is called from these %d sites:", r.target, len(r.edges))
|
||||||
|
for _, edge := range r.edges {
|
||||||
|
if edge.Caller == root {
|
||||||
|
printf(r.target, "the root of the call graph")
|
||||||
|
} else {
|
||||||
|
printf(edge, "\t%s from %s", edge.Description(), edge.Caller.Func)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *callersResult) toSerial(res *serial.Result, fset *token.FileSet) {
|
||||||
|
var callers []serial.Caller
|
||||||
|
for _, edge := range r.edges {
|
||||||
|
callers = append(callers, serial.Caller{
|
||||||
|
Caller: edge.Caller.Func.String(),
|
||||||
|
Pos: fset.Position(edge.Pos()).String(),
|
||||||
|
Desc: edge.Description(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
res.Callers = callers
|
||||||
|
}
|
|
@ -0,0 +1,126 @@
|
||||||
|
// Copyright 2013 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
package oracle
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"go/token"
|
||||||
|
|
||||||
|
"github.com/visualfc/gotools/oracle/oracle/serial"
|
||||||
|
"golang.org/x/tools/go/callgraph"
|
||||||
|
"golang.org/x/tools/go/loader"
|
||||||
|
"golang.org/x/tools/go/ssa"
|
||||||
|
"golang.org/x/tools/go/ssa/ssautil"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Callstack displays an arbitrary path from a root of the callgraph
|
||||||
|
// to the function at the current position.
|
||||||
|
//
|
||||||
|
// The information may be misleading in a context-insensitive
|
||||||
|
// analysis. e.g. the call path X->Y->Z might be infeasible if Y never
|
||||||
|
// calls Z when it is called from X. TODO(adonovan): think about UI.
|
||||||
|
//
|
||||||
|
// TODO(adonovan): permit user to specify a starting point other than
|
||||||
|
// the analysis root.
|
||||||
|
//
|
||||||
|
func callstack(q *Query) error {
|
||||||
|
fset := token.NewFileSet()
|
||||||
|
lconf := loader.Config{Fset: fset, Build: q.Build}
|
||||||
|
|
||||||
|
if err := setPTAScope(&lconf, q.Scope); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load/parse/type-check the program.
|
||||||
|
lprog, err := lconf.Load()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
qpos, err := parseQueryPos(lprog, q.Pos, false)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
prog := ssautil.CreateProgram(lprog, 0)
|
||||||
|
|
||||||
|
ptaConfig, err := setupPTA(prog, lprog, q.PTALog, q.Reflection)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
pkg := prog.Package(qpos.info.Pkg)
|
||||||
|
if pkg == nil {
|
||||||
|
return fmt.Errorf("no SSA package")
|
||||||
|
}
|
||||||
|
|
||||||
|
if !ssa.HasEnclosingFunction(pkg, qpos.path) {
|
||||||
|
return fmt.Errorf("this position is not inside a function")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Defer SSA construction till after errors are reported.
|
||||||
|
prog.Build()
|
||||||
|
|
||||||
|
target := ssa.EnclosingFunction(pkg, qpos.path)
|
||||||
|
if target == nil {
|
||||||
|
return fmt.Errorf("no SSA function built for this location (dead code?)")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Run the pointer analysis and build the complete call graph.
|
||||||
|
ptaConfig.BuildCallGraph = true
|
||||||
|
cg := ptrAnalysis(ptaConfig).CallGraph
|
||||||
|
cg.DeleteSyntheticNodes()
|
||||||
|
|
||||||
|
// Search for an arbitrary path from a root to the target function.
|
||||||
|
isEnd := func(n *callgraph.Node) bool { return n.Func == target }
|
||||||
|
callpath := callgraph.PathSearch(cg.Root, isEnd)
|
||||||
|
if callpath != nil {
|
||||||
|
callpath = callpath[1:] // remove synthetic edge from <root>
|
||||||
|
}
|
||||||
|
|
||||||
|
q.Fset = fset
|
||||||
|
q.result = &callstackResult{
|
||||||
|
qpos: qpos,
|
||||||
|
target: target,
|
||||||
|
callpath: callpath,
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type callstackResult struct {
|
||||||
|
qpos *queryPos
|
||||||
|
target *ssa.Function
|
||||||
|
callpath []*callgraph.Edge
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *callstackResult) display(printf printfFunc) {
|
||||||
|
if r.callpath != nil {
|
||||||
|
printf(r.qpos, "Found a call path from root to %s", r.target)
|
||||||
|
printf(r.target, "%s", r.target)
|
||||||
|
for i := len(r.callpath) - 1; i >= 0; i-- {
|
||||||
|
edge := r.callpath[i]
|
||||||
|
printf(edge, "%s from %s", edge.Description(), edge.Caller.Func)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
printf(r.target, "%s is unreachable in this analysis scope", r.target)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *callstackResult) toSerial(res *serial.Result, fset *token.FileSet) {
|
||||||
|
var callers []serial.Caller
|
||||||
|
for i := len(r.callpath) - 1; i >= 0; i-- { // (innermost first)
|
||||||
|
edge := r.callpath[i]
|
||||||
|
callers = append(callers, serial.Caller{
|
||||||
|
Pos: fset.Position(edge.Pos()).String(),
|
||||||
|
Caller: edge.Caller.Func.String(),
|
||||||
|
Desc: edge.Description(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
res.Callstack = &serial.CallStack{
|
||||||
|
Pos: fset.Position(r.target.Pos()).String(),
|
||||||
|
Target: r.target.String(),
|
||||||
|
Callers: callers,
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,78 @@
|
||||||
|
// Copyright 2013 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build go1.5
|
||||||
|
|
||||||
|
package oracle
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"go/ast"
|
||||||
|
"go/token"
|
||||||
|
"go/types"
|
||||||
|
|
||||||
|
"github.com/visualfc/gotools/oracle/oracle/serial"
|
||||||
|
"golang.org/x/tools/go/loader"
|
||||||
|
)
|
||||||
|
|
||||||
|
// definition reports the location of the definition of an identifier.
|
||||||
|
//
|
||||||
|
// TODO(adonovan): opt: for intra-file references, the parser's
|
||||||
|
// resolution might be enough; we should start with that.
|
||||||
|
//
|
||||||
|
func definition(q *Query) error {
|
||||||
|
lconf := loader.Config{Build: q.Build}
|
||||||
|
allowErrors(&lconf)
|
||||||
|
|
||||||
|
if _, err := importQueryPackage(q.Pos, &lconf); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load/parse/type-check the program.
|
||||||
|
lprog, err := lconf.Load()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
q.Fset = lprog.Fset
|
||||||
|
|
||||||
|
qpos, err := parseQueryPos(lprog, q.Pos, false)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
id, _ := qpos.path[0].(*ast.Ident)
|
||||||
|
if id == nil {
|
||||||
|
return fmt.Errorf("no identifier here")
|
||||||
|
}
|
||||||
|
|
||||||
|
obj := qpos.info.ObjectOf(id)
|
||||||
|
if obj == nil {
|
||||||
|
// Happens for y in "switch y := x.(type)",
|
||||||
|
// and the package declaration,
|
||||||
|
// but I think that's all.
|
||||||
|
return fmt.Errorf("no object for identifier")
|
||||||
|
}
|
||||||
|
|
||||||
|
q.result = &definitionResult{qpos, obj}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type definitionResult struct {
|
||||||
|
qpos *queryPos
|
||||||
|
obj types.Object // object it denotes
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *definitionResult) display(printf printfFunc) {
|
||||||
|
printf(r.obj, "defined here as %s", r.qpos.objectString(r.obj))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *definitionResult) toSerial(res *serial.Result, fset *token.FileSet) {
|
||||||
|
definition := &serial.Definition{
|
||||||
|
Desc: r.obj.String(),
|
||||||
|
}
|
||||||
|
if pos := r.obj.Pos(); pos != token.NoPos { // Package objects have no Pos()
|
||||||
|
definition.ObjPos = fset.Position(pos).String()
|
||||||
|
}
|
||||||
|
res.Definition = definition
|
||||||
|
}
|
|
@ -0,0 +1,775 @@
|
||||||
|
// Copyright 2013 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build go1.6
|
||||||
|
|
||||||
|
package oracle
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"fmt"
|
||||||
|
"go/ast"
|
||||||
|
exact "go/constant"
|
||||||
|
"go/token"
|
||||||
|
"go/types"
|
||||||
|
"log"
|
||||||
|
"os"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/visualfc/gotools/oracle/oracle/serial"
|
||||||
|
"golang.org/x/tools/go/ast/astutil"
|
||||||
|
"golang.org/x/tools/go/loader"
|
||||||
|
"golang.org/x/tools/go/types/typeutil"
|
||||||
|
)
|
||||||
|
|
||||||
|
// describe describes the syntax node denoted by the query position,
|
||||||
|
// including:
|
||||||
|
// - its syntactic category
|
||||||
|
// - the definition of its referent (for identifiers) [now redundant]
|
||||||
|
// - its type and method set (for an expression or type expression)
|
||||||
|
//
|
||||||
|
func describe(q *Query) error {
|
||||||
|
lconf := loader.Config{Build: q.Build}
|
||||||
|
allowErrors(&lconf)
|
||||||
|
|
||||||
|
if _, err := importQueryPackage(q.Pos, &lconf); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load/parse/type-check the program.
|
||||||
|
lprog, err := lconf.Load()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
q.Fset = lprog.Fset
|
||||||
|
|
||||||
|
qpos, err := parseQueryPos(lprog, q.Pos, true) // (need exact pos)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if false { // debugging
|
||||||
|
fprintf(os.Stderr, lprog.Fset, qpos.path[0], "you selected: %s %s",
|
||||||
|
astutil.NodeDescription(qpos.path[0]), pathToString(qpos.path))
|
||||||
|
}
|
||||||
|
|
||||||
|
path, action := findInterestingNode(qpos.info, qpos.path)
|
||||||
|
switch action {
|
||||||
|
case actionExpr:
|
||||||
|
q.result, err = describeValue(qpos, path)
|
||||||
|
|
||||||
|
case actionType:
|
||||||
|
q.result, err = describeType(qpos, path)
|
||||||
|
|
||||||
|
case actionPackage:
|
||||||
|
q.result, err = describePackage(qpos, path)
|
||||||
|
|
||||||
|
case actionStmt:
|
||||||
|
q.result, err = describeStmt(qpos, path)
|
||||||
|
|
||||||
|
case actionUnknown:
|
||||||
|
q.result = &describeUnknownResult{path[0]}
|
||||||
|
|
||||||
|
default:
|
||||||
|
panic(action) // unreachable
|
||||||
|
}
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
type describeUnknownResult struct {
|
||||||
|
node ast.Node
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *describeUnknownResult) display(printf printfFunc) {
|
||||||
|
// Nothing much to say about misc syntax.
|
||||||
|
printf(r.node, "%s", astutil.NodeDescription(r.node))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *describeUnknownResult) toSerial(res *serial.Result, fset *token.FileSet) {
|
||||||
|
res.Describe = &serial.Describe{
|
||||||
|
Desc: astutil.NodeDescription(r.node),
|
||||||
|
Pos: fset.Position(r.node.Pos()).String(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type action int
|
||||||
|
|
||||||
|
const (
|
||||||
|
actionUnknown action = iota // None of the below
|
||||||
|
actionExpr // FuncDecl, true Expr or Ident(types.{Const,Var})
|
||||||
|
actionType // type Expr or Ident(types.TypeName).
|
||||||
|
actionStmt // Stmt or Ident(types.Label)
|
||||||
|
actionPackage // Ident(types.Package) or ImportSpec
|
||||||
|
)
|
||||||
|
|
||||||
|
// findInterestingNode classifies the syntax node denoted by path as one of:
|
||||||
|
// - an expression, part of an expression or a reference to a constant
|
||||||
|
// or variable;
|
||||||
|
// - a type, part of a type, or a reference to a named type;
|
||||||
|
// - a statement, part of a statement, or a label referring to a statement;
|
||||||
|
// - part of a package declaration or import spec.
|
||||||
|
// - none of the above.
|
||||||
|
// and returns the most "interesting" associated node, which may be
|
||||||
|
// the same node, an ancestor or a descendent.
|
||||||
|
//
|
||||||
|
func findInterestingNode(pkginfo *loader.PackageInfo, path []ast.Node) ([]ast.Node, action) {
|
||||||
|
// TODO(adonovan): integrate with go/types/stdlib_test.go and
|
||||||
|
// apply this to every AST node we can find to make sure it
|
||||||
|
// doesn't crash.
|
||||||
|
|
||||||
|
// TODO(adonovan): audit for ParenExpr safety, esp. since we
|
||||||
|
// traverse up and down.
|
||||||
|
|
||||||
|
// TODO(adonovan): if the users selects the "." in
|
||||||
|
// "fmt.Fprintf()", they'll get an ambiguous selection error;
|
||||||
|
// we won't even reach here. Can we do better?
|
||||||
|
|
||||||
|
// TODO(adonovan): describing a field within 'type T struct {...}'
|
||||||
|
// describes the (anonymous) struct type and concludes "no methods".
|
||||||
|
// We should ascend to the enclosing type decl, if any.
|
||||||
|
|
||||||
|
for len(path) > 0 {
|
||||||
|
switch n := path[0].(type) {
|
||||||
|
case *ast.GenDecl:
|
||||||
|
if len(n.Specs) == 1 {
|
||||||
|
// Descend to sole {Import,Type,Value}Spec child.
|
||||||
|
path = append([]ast.Node{n.Specs[0]}, path...)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
return path, actionUnknown // uninteresting
|
||||||
|
|
||||||
|
case *ast.FuncDecl:
|
||||||
|
// Descend to function name.
|
||||||
|
path = append([]ast.Node{n.Name}, path...)
|
||||||
|
continue
|
||||||
|
|
||||||
|
case *ast.ImportSpec:
|
||||||
|
return path, actionPackage
|
||||||
|
|
||||||
|
case *ast.ValueSpec:
|
||||||
|
if len(n.Names) == 1 {
|
||||||
|
// Descend to sole Ident child.
|
||||||
|
path = append([]ast.Node{n.Names[0]}, path...)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
return path, actionUnknown // uninteresting
|
||||||
|
|
||||||
|
case *ast.TypeSpec:
|
||||||
|
// Descend to type name.
|
||||||
|
path = append([]ast.Node{n.Name}, path...)
|
||||||
|
continue
|
||||||
|
|
||||||
|
case ast.Stmt:
|
||||||
|
return path, actionStmt
|
||||||
|
|
||||||
|
case *ast.ArrayType,
|
||||||
|
*ast.StructType,
|
||||||
|
*ast.FuncType,
|
||||||
|
*ast.InterfaceType,
|
||||||
|
*ast.MapType,
|
||||||
|
*ast.ChanType:
|
||||||
|
return path, actionType
|
||||||
|
|
||||||
|
case *ast.Comment, *ast.CommentGroup, *ast.File, *ast.KeyValueExpr, *ast.CommClause:
|
||||||
|
return path, actionUnknown // uninteresting
|
||||||
|
|
||||||
|
case *ast.Ellipsis:
|
||||||
|
// Continue to enclosing node.
|
||||||
|
// e.g. [...]T in ArrayType
|
||||||
|
// f(x...) in CallExpr
|
||||||
|
// f(x...T) in FuncType
|
||||||
|
|
||||||
|
case *ast.Field:
|
||||||
|
// TODO(adonovan): this needs more thought,
|
||||||
|
// since fields can be so many things.
|
||||||
|
if len(n.Names) == 1 {
|
||||||
|
// Descend to sole Ident child.
|
||||||
|
path = append([]ast.Node{n.Names[0]}, path...)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
// Zero names (e.g. anon field in struct)
|
||||||
|
// or multiple field or param names:
|
||||||
|
// continue to enclosing field list.
|
||||||
|
|
||||||
|
case *ast.FieldList:
|
||||||
|
// Continue to enclosing node:
|
||||||
|
// {Struct,Func,Interface}Type or FuncDecl.
|
||||||
|
|
||||||
|
case *ast.BasicLit:
|
||||||
|
if _, ok := path[1].(*ast.ImportSpec); ok {
|
||||||
|
return path[1:], actionPackage
|
||||||
|
}
|
||||||
|
return path, actionExpr
|
||||||
|
|
||||||
|
case *ast.SelectorExpr:
|
||||||
|
// TODO(adonovan): use Selections info directly.
|
||||||
|
if pkginfo.Uses[n.Sel] == nil {
|
||||||
|
// TODO(adonovan): is this reachable?
|
||||||
|
return path, actionUnknown
|
||||||
|
}
|
||||||
|
// Descend to .Sel child.
|
||||||
|
path = append([]ast.Node{n.Sel}, path...)
|
||||||
|
continue
|
||||||
|
|
||||||
|
case *ast.Ident:
|
||||||
|
switch pkginfo.ObjectOf(n).(type) {
|
||||||
|
case *types.PkgName:
|
||||||
|
return path, actionPackage
|
||||||
|
|
||||||
|
case *types.Const:
|
||||||
|
return path, actionExpr
|
||||||
|
|
||||||
|
case *types.Label:
|
||||||
|
return path, actionStmt
|
||||||
|
|
||||||
|
case *types.TypeName:
|
||||||
|
return path, actionType
|
||||||
|
|
||||||
|
case *types.Var:
|
||||||
|
// For x in 'struct {x T}', return struct type, for now.
|
||||||
|
if _, ok := path[1].(*ast.Field); ok {
|
||||||
|
_ = path[2].(*ast.FieldList) // assertion
|
||||||
|
if _, ok := path[3].(*ast.StructType); ok {
|
||||||
|
return path[3:], actionType
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return path, actionExpr
|
||||||
|
|
||||||
|
case *types.Func:
|
||||||
|
return path, actionExpr
|
||||||
|
|
||||||
|
case *types.Builtin:
|
||||||
|
// For reference to built-in function, return enclosing call.
|
||||||
|
path = path[1:] // ascend to enclosing function call
|
||||||
|
continue
|
||||||
|
|
||||||
|
case *types.Nil:
|
||||||
|
return path, actionExpr
|
||||||
|
}
|
||||||
|
|
||||||
|
// No object.
|
||||||
|
switch path[1].(type) {
|
||||||
|
case *ast.SelectorExpr:
|
||||||
|
// Return enclosing selector expression.
|
||||||
|
return path[1:], actionExpr
|
||||||
|
|
||||||
|
case *ast.Field:
|
||||||
|
// TODO(adonovan): test this.
|
||||||
|
// e.g. all f in:
|
||||||
|
// struct { f, g int }
|
||||||
|
// interface { f() }
|
||||||
|
// func (f T) method(f, g int) (f, g bool)
|
||||||
|
//
|
||||||
|
// switch path[3].(type) {
|
||||||
|
// case *ast.FuncDecl:
|
||||||
|
// case *ast.StructType:
|
||||||
|
// case *ast.InterfaceType:
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// return path[1:], actionExpr
|
||||||
|
//
|
||||||
|
// Unclear what to do with these.
|
||||||
|
// Struct.Fields -- field
|
||||||
|
// Interface.Methods -- field
|
||||||
|
// FuncType.{Params.Results} -- actionExpr
|
||||||
|
// FuncDecl.Recv -- actionExpr
|
||||||
|
|
||||||
|
case *ast.File:
|
||||||
|
// 'package foo'
|
||||||
|
return path, actionPackage
|
||||||
|
|
||||||
|
case *ast.ImportSpec:
|
||||||
|
// TODO(adonovan): fix: why no package object? go/types bug?
|
||||||
|
return path[1:], actionPackage
|
||||||
|
|
||||||
|
default:
|
||||||
|
// e.g. blank identifier
|
||||||
|
// or y in "switch y := x.(type)"
|
||||||
|
// or code in a _test.go file that's not part of the package.
|
||||||
|
log.Printf("unknown reference %s in %T\n", n, path[1])
|
||||||
|
return path, actionUnknown
|
||||||
|
}
|
||||||
|
|
||||||
|
case *ast.StarExpr:
|
||||||
|
if pkginfo.Types[n].IsType() {
|
||||||
|
return path, actionType
|
||||||
|
}
|
||||||
|
return path, actionExpr
|
||||||
|
|
||||||
|
case ast.Expr:
|
||||||
|
// All Expr but {BasicLit,Ident,StarExpr} are
|
||||||
|
// "true" expressions that evaluate to a value.
|
||||||
|
return path, actionExpr
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ascend to parent.
|
||||||
|
path = path[1:]
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, actionUnknown // unreachable
|
||||||
|
}
|
||||||
|
|
||||||
|
func describeValue(qpos *queryPos, path []ast.Node) (*describeValueResult, error) {
|
||||||
|
var expr ast.Expr
|
||||||
|
var obj types.Object
|
||||||
|
switch n := path[0].(type) {
|
||||||
|
case *ast.ValueSpec:
|
||||||
|
// ambiguous ValueSpec containing multiple names
|
||||||
|
return nil, fmt.Errorf("multiple value specification")
|
||||||
|
case *ast.Ident:
|
||||||
|
obj = qpos.info.ObjectOf(n)
|
||||||
|
expr = n
|
||||||
|
case ast.Expr:
|
||||||
|
expr = n
|
||||||
|
default:
|
||||||
|
// TODO(adonovan): is this reachable?
|
||||||
|
return nil, fmt.Errorf("unexpected AST for expr: %T", n)
|
||||||
|
}
|
||||||
|
|
||||||
|
typ := qpos.info.TypeOf(expr)
|
||||||
|
constVal := qpos.info.Types[expr].Value
|
||||||
|
|
||||||
|
return &describeValueResult{
|
||||||
|
qpos: qpos,
|
||||||
|
expr: expr,
|
||||||
|
typ: typ,
|
||||||
|
constVal: constVal,
|
||||||
|
obj: obj,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type describeValueResult struct {
|
||||||
|
qpos *queryPos
|
||||||
|
expr ast.Expr // query node
|
||||||
|
typ types.Type // type of expression
|
||||||
|
constVal exact.Value // value of expression, if constant
|
||||||
|
obj types.Object // var/func/const object, if expr was Ident
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *describeValueResult) display(printf printfFunc) {
|
||||||
|
var prefix, suffix string
|
||||||
|
if r.constVal != nil {
|
||||||
|
suffix = fmt.Sprintf(" of constant value %s", r.constVal)
|
||||||
|
}
|
||||||
|
switch obj := r.obj.(type) {
|
||||||
|
case *types.Func:
|
||||||
|
if recv := obj.Type().(*types.Signature).Recv(); recv != nil {
|
||||||
|
if _, ok := recv.Type().Underlying().(*types.Interface); ok {
|
||||||
|
prefix = "interface method "
|
||||||
|
} else {
|
||||||
|
prefix = "method "
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Describe the expression.
|
||||||
|
if r.obj != nil {
|
||||||
|
if r.obj.Pos() == r.expr.Pos() {
|
||||||
|
// defining ident
|
||||||
|
printf(r.expr, "definition of %s%s%s", prefix, r.qpos.objectString(r.obj), suffix)
|
||||||
|
} else {
|
||||||
|
// referring ident
|
||||||
|
printf(r.expr, "reference to %s%s%s", prefix, r.qpos.objectString(r.obj), suffix)
|
||||||
|
if def := r.obj.Pos(); def != token.NoPos {
|
||||||
|
printf(def, "defined here")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
desc := astutil.NodeDescription(r.expr)
|
||||||
|
if suffix != "" {
|
||||||
|
// constant expression
|
||||||
|
printf(r.expr, "%s%s", desc, suffix)
|
||||||
|
} else {
|
||||||
|
// non-constant expression
|
||||||
|
printf(r.expr, "%s of type %s", desc, r.qpos.typeString(r.typ))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *describeValueResult) toSerial(res *serial.Result, fset *token.FileSet) {
|
||||||
|
var value, objpos string
|
||||||
|
if r.constVal != nil {
|
||||||
|
value = r.constVal.String()
|
||||||
|
}
|
||||||
|
if r.obj != nil {
|
||||||
|
objpos = fset.Position(r.obj.Pos()).String()
|
||||||
|
}
|
||||||
|
|
||||||
|
res.Describe = &serial.Describe{
|
||||||
|
Desc: astutil.NodeDescription(r.expr),
|
||||||
|
Pos: fset.Position(r.expr.Pos()).String(),
|
||||||
|
Detail: "value",
|
||||||
|
Value: &serial.DescribeValue{
|
||||||
|
Type: r.qpos.typeString(r.typ),
|
||||||
|
Value: value,
|
||||||
|
ObjPos: objpos,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---- TYPE ------------------------------------------------------------
|
||||||
|
|
||||||
|
func describeType(qpos *queryPos, path []ast.Node) (*describeTypeResult, error) {
|
||||||
|
var description string
|
||||||
|
var t types.Type
|
||||||
|
switch n := path[0].(type) {
|
||||||
|
case *ast.Ident:
|
||||||
|
t = qpos.info.TypeOf(n)
|
||||||
|
switch t := t.(type) {
|
||||||
|
case *types.Basic:
|
||||||
|
description = "reference to built-in "
|
||||||
|
|
||||||
|
case *types.Named:
|
||||||
|
isDef := t.Obj().Pos() == n.Pos() // see caveats at isDef above
|
||||||
|
if isDef {
|
||||||
|
description = "definition of "
|
||||||
|
} else {
|
||||||
|
description = "reference to "
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
case ast.Expr:
|
||||||
|
t = qpos.info.TypeOf(n)
|
||||||
|
|
||||||
|
default:
|
||||||
|
// Unreachable?
|
||||||
|
return nil, fmt.Errorf("unexpected AST for type: %T", n)
|
||||||
|
}
|
||||||
|
|
||||||
|
description = description + "type " + qpos.typeString(t)
|
||||||
|
|
||||||
|
// Show sizes for structs and named types (it's fairly obvious for others).
|
||||||
|
switch t.(type) {
|
||||||
|
case *types.Named, *types.Struct:
|
||||||
|
szs := types.StdSizes{WordSize: 8, MaxAlign: 8} // assume amd64
|
||||||
|
description = fmt.Sprintf("%s (size %d, align %d)", description,
|
||||||
|
szs.Sizeof(t), szs.Alignof(t))
|
||||||
|
}
|
||||||
|
|
||||||
|
return &describeTypeResult{
|
||||||
|
qpos: qpos,
|
||||||
|
node: path[0],
|
||||||
|
description: description,
|
||||||
|
typ: t,
|
||||||
|
methods: accessibleMethods(t, qpos.info.Pkg),
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type describeTypeResult struct {
|
||||||
|
qpos *queryPos
|
||||||
|
node ast.Node
|
||||||
|
description string
|
||||||
|
typ types.Type
|
||||||
|
methods []*types.Selection
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *describeTypeResult) display(printf printfFunc) {
|
||||||
|
printf(r.node, "%s", r.description)
|
||||||
|
|
||||||
|
// Show the underlying type for a reference to a named type.
|
||||||
|
if nt, ok := r.typ.(*types.Named); ok && r.node.Pos() != nt.Obj().Pos() {
|
||||||
|
printf(nt.Obj(), "defined as %s", r.qpos.typeString(nt.Underlying()))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Print the method set, if the type kind is capable of bearing methods.
|
||||||
|
switch r.typ.(type) {
|
||||||
|
case *types.Interface, *types.Struct, *types.Named:
|
||||||
|
if len(r.methods) > 0 {
|
||||||
|
printf(r.node, "Method set:")
|
||||||
|
for _, meth := range r.methods {
|
||||||
|
// TODO(adonovan): print these relative
|
||||||
|
// to the owning package, not the
|
||||||
|
// query package.
|
||||||
|
printf(meth.Obj(), "\t%s", r.qpos.selectionString(meth))
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
printf(r.node, "No methods.")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *describeTypeResult) toSerial(res *serial.Result, fset *token.FileSet) {
|
||||||
|
var namePos, nameDef string
|
||||||
|
if nt, ok := r.typ.(*types.Named); ok {
|
||||||
|
namePos = fset.Position(nt.Obj().Pos()).String()
|
||||||
|
nameDef = nt.Underlying().String()
|
||||||
|
}
|
||||||
|
res.Describe = &serial.Describe{
|
||||||
|
Desc: r.description,
|
||||||
|
Pos: fset.Position(r.node.Pos()).String(),
|
||||||
|
Detail: "type",
|
||||||
|
Type: &serial.DescribeType{
|
||||||
|
Type: r.qpos.typeString(r.typ),
|
||||||
|
NamePos: namePos,
|
||||||
|
NameDef: nameDef,
|
||||||
|
Methods: methodsToSerial(r.qpos.info.Pkg, r.methods, fset),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---- PACKAGE ------------------------------------------------------------
|
||||||
|
|
||||||
|
func describePackage(qpos *queryPos, path []ast.Node) (*describePackageResult, error) {
|
||||||
|
var description string
|
||||||
|
var pkg *types.Package
|
||||||
|
switch n := path[0].(type) {
|
||||||
|
case *ast.ImportSpec:
|
||||||
|
var obj types.Object
|
||||||
|
if n.Name != nil {
|
||||||
|
obj = qpos.info.Defs[n.Name]
|
||||||
|
} else {
|
||||||
|
obj = qpos.info.Implicits[n]
|
||||||
|
}
|
||||||
|
pkgname, _ := obj.(*types.PkgName)
|
||||||
|
if pkgname == nil {
|
||||||
|
return nil, fmt.Errorf("can't import package %s", n.Path.Value)
|
||||||
|
}
|
||||||
|
pkg = pkgname.Imported()
|
||||||
|
description = fmt.Sprintf("import of package %q", pkg.Path())
|
||||||
|
|
||||||
|
case *ast.Ident:
|
||||||
|
if _, isDef := path[1].(*ast.File); isDef {
|
||||||
|
// e.g. package id
|
||||||
|
pkg = qpos.info.Pkg
|
||||||
|
description = fmt.Sprintf("definition of package %q", pkg.Path())
|
||||||
|
} else {
|
||||||
|
// e.g. import id "..."
|
||||||
|
// or id.F()
|
||||||
|
pkg = qpos.info.ObjectOf(n).(*types.PkgName).Imported()
|
||||||
|
description = fmt.Sprintf("reference to package %q", pkg.Path())
|
||||||
|
}
|
||||||
|
|
||||||
|
default:
|
||||||
|
// Unreachable?
|
||||||
|
return nil, fmt.Errorf("unexpected AST for package: %T", n)
|
||||||
|
}
|
||||||
|
|
||||||
|
var members []*describeMember
|
||||||
|
// NB: "unsafe" has no types.Package
|
||||||
|
if pkg != nil {
|
||||||
|
// Enumerate the accessible package members
|
||||||
|
// in lexicographic order.
|
||||||
|
for _, name := range pkg.Scope().Names() {
|
||||||
|
if pkg == qpos.info.Pkg || ast.IsExported(name) {
|
||||||
|
mem := pkg.Scope().Lookup(name)
|
||||||
|
var methods []*types.Selection
|
||||||
|
if mem, ok := mem.(*types.TypeName); ok {
|
||||||
|
methods = accessibleMethods(mem.Type(), qpos.info.Pkg)
|
||||||
|
}
|
||||||
|
members = append(members, &describeMember{
|
||||||
|
mem,
|
||||||
|
methods,
|
||||||
|
})
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return &describePackageResult{qpos.fset, path[0], description, pkg, members}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type describePackageResult struct {
|
||||||
|
fset *token.FileSet
|
||||||
|
node ast.Node
|
||||||
|
description string
|
||||||
|
pkg *types.Package
|
||||||
|
members []*describeMember // in lexicographic name order
|
||||||
|
}
|
||||||
|
|
||||||
|
type describeMember struct {
|
||||||
|
obj types.Object
|
||||||
|
methods []*types.Selection // in types.MethodSet order
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *describePackageResult) display(printf printfFunc) {
|
||||||
|
printf(r.node, "%s", r.description)
|
||||||
|
|
||||||
|
// Compute max width of name "column".
|
||||||
|
maxname := 0
|
||||||
|
for _, mem := range r.members {
|
||||||
|
if l := len(mem.obj.Name()); l > maxname {
|
||||||
|
maxname = l
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, mem := range r.members {
|
||||||
|
printf(mem.obj, "\t%s", formatMember(mem.obj, maxname))
|
||||||
|
for _, meth := range mem.methods {
|
||||||
|
printf(meth.Obj(), "\t\t%s", types.SelectionString(meth, types.RelativeTo(r.pkg)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func formatMember(obj types.Object, maxname int) string {
|
||||||
|
qualifier := types.RelativeTo(obj.Pkg())
|
||||||
|
var buf bytes.Buffer
|
||||||
|
fmt.Fprintf(&buf, "%-5s %-*s", tokenOf(obj), maxname, obj.Name())
|
||||||
|
switch obj := obj.(type) {
|
||||||
|
case *types.Const:
|
||||||
|
fmt.Fprintf(&buf, " %s = %s", types.TypeString(obj.Type(), qualifier), obj.Val().String())
|
||||||
|
|
||||||
|
case *types.Func:
|
||||||
|
fmt.Fprintf(&buf, " %s", types.TypeString(obj.Type(), qualifier))
|
||||||
|
|
||||||
|
case *types.TypeName:
|
||||||
|
// Abbreviate long aggregate type names.
|
||||||
|
var abbrev string
|
||||||
|
switch t := obj.Type().Underlying().(type) {
|
||||||
|
case *types.Interface:
|
||||||
|
if t.NumMethods() > 1 {
|
||||||
|
abbrev = "interface{...}"
|
||||||
|
}
|
||||||
|
case *types.Struct:
|
||||||
|
if t.NumFields() > 1 {
|
||||||
|
abbrev = "struct{...}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if abbrev == "" {
|
||||||
|
fmt.Fprintf(&buf, " %s", types.TypeString(obj.Type().Underlying(), qualifier))
|
||||||
|
} else {
|
||||||
|
fmt.Fprintf(&buf, " %s", abbrev)
|
||||||
|
}
|
||||||
|
|
||||||
|
case *types.Var:
|
||||||
|
fmt.Fprintf(&buf, " %s", types.TypeString(obj.Type(), qualifier))
|
||||||
|
}
|
||||||
|
return buf.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *describePackageResult) toSerial(res *serial.Result, fset *token.FileSet) {
|
||||||
|
var members []*serial.DescribeMember
|
||||||
|
for _, mem := range r.members {
|
||||||
|
typ := mem.obj.Type()
|
||||||
|
var val string
|
||||||
|
switch mem := mem.obj.(type) {
|
||||||
|
case *types.Const:
|
||||||
|
val = mem.Val().String()
|
||||||
|
case *types.TypeName:
|
||||||
|
typ = typ.Underlying()
|
||||||
|
}
|
||||||
|
members = append(members, &serial.DescribeMember{
|
||||||
|
Name: mem.obj.Name(),
|
||||||
|
Type: typ.String(),
|
||||||
|
Value: val,
|
||||||
|
Pos: fset.Position(mem.obj.Pos()).String(),
|
||||||
|
Kind: tokenOf(mem.obj),
|
||||||
|
Methods: methodsToSerial(r.pkg, mem.methods, fset),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
res.Describe = &serial.Describe{
|
||||||
|
Desc: r.description,
|
||||||
|
Pos: fset.Position(r.node.Pos()).String(),
|
||||||
|
Detail: "package",
|
||||||
|
Package: &serial.DescribePackage{
|
||||||
|
Path: r.pkg.Path(),
|
||||||
|
Members: members,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func tokenOf(o types.Object) string {
|
||||||
|
switch o.(type) {
|
||||||
|
case *types.Func:
|
||||||
|
return "func"
|
||||||
|
case *types.Var:
|
||||||
|
return "var"
|
||||||
|
case *types.TypeName:
|
||||||
|
return "type"
|
||||||
|
case *types.Const:
|
||||||
|
return "const"
|
||||||
|
case *types.PkgName:
|
||||||
|
return "package"
|
||||||
|
case *types.Builtin:
|
||||||
|
return "builtin" // e.g. when describing package "unsafe"
|
||||||
|
case *types.Nil:
|
||||||
|
return "nil"
|
||||||
|
case *types.Label:
|
||||||
|
return "label"
|
||||||
|
}
|
||||||
|
panic(o)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---- STATEMENT ------------------------------------------------------------
|
||||||
|
|
||||||
|
func describeStmt(qpos *queryPos, path []ast.Node) (*describeStmtResult, error) {
|
||||||
|
var description string
|
||||||
|
switch n := path[0].(type) {
|
||||||
|
case *ast.Ident:
|
||||||
|
if qpos.info.Defs[n] != nil {
|
||||||
|
description = "labelled statement"
|
||||||
|
} else {
|
||||||
|
description = "reference to labelled statement"
|
||||||
|
}
|
||||||
|
|
||||||
|
default:
|
||||||
|
// Nothing much to say about statements.
|
||||||
|
description = astutil.NodeDescription(n)
|
||||||
|
}
|
||||||
|
return &describeStmtResult{qpos.fset, path[0], description}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type describeStmtResult struct {
|
||||||
|
fset *token.FileSet
|
||||||
|
node ast.Node
|
||||||
|
description string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *describeStmtResult) display(printf printfFunc) {
|
||||||
|
printf(r.node, "%s", r.description)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *describeStmtResult) toSerial(res *serial.Result, fset *token.FileSet) {
|
||||||
|
res.Describe = &serial.Describe{
|
||||||
|
Desc: r.description,
|
||||||
|
Pos: fset.Position(r.node.Pos()).String(),
|
||||||
|
Detail: "unknown",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ------------------- Utilities -------------------
|
||||||
|
|
||||||
|
// pathToString returns a string containing the concrete types of the
|
||||||
|
// nodes in path.
|
||||||
|
func pathToString(path []ast.Node) string {
|
||||||
|
var buf bytes.Buffer
|
||||||
|
fmt.Fprint(&buf, "[")
|
||||||
|
for i, n := range path {
|
||||||
|
if i > 0 {
|
||||||
|
fmt.Fprint(&buf, " ")
|
||||||
|
}
|
||||||
|
fmt.Fprint(&buf, strings.TrimPrefix(fmt.Sprintf("%T", n), "*ast."))
|
||||||
|
}
|
||||||
|
fmt.Fprint(&buf, "]")
|
||||||
|
return buf.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
func accessibleMethods(t types.Type, from *types.Package) []*types.Selection {
|
||||||
|
var methods []*types.Selection
|
||||||
|
for _, meth := range typeutil.IntuitiveMethodSet(t, nil) {
|
||||||
|
if isAccessibleFrom(meth.Obj(), from) {
|
||||||
|
methods = append(methods, meth)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return methods
|
||||||
|
}
|
||||||
|
|
||||||
|
func isAccessibleFrom(obj types.Object, pkg *types.Package) bool {
|
||||||
|
return ast.IsExported(obj.Name()) || obj.Pkg() == pkg
|
||||||
|
}
|
||||||
|
|
||||||
|
func methodsToSerial(this *types.Package, methods []*types.Selection, fset *token.FileSet) []serial.DescribeMethod {
|
||||||
|
qualifier := types.RelativeTo(this)
|
||||||
|
var jmethods []serial.DescribeMethod
|
||||||
|
for _, meth := range methods {
|
||||||
|
var ser serial.DescribeMethod
|
||||||
|
if meth != nil { // may contain nils when called by implements (on a method)
|
||||||
|
ser = serial.DescribeMethod{
|
||||||
|
Name: types.SelectionString(meth, qualifier),
|
||||||
|
Pos: fset.Position(meth.Obj().Pos()).String(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
jmethods = append(jmethods, ser)
|
||||||
|
}
|
||||||
|
return jmethods
|
||||||
|
}
|
|
@ -0,0 +1,786 @@
|
||||||
|
// Copyright 2013 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build go1.5,!go1.6
|
||||||
|
|
||||||
|
package oracle
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"fmt"
|
||||||
|
"go/ast"
|
||||||
|
exact "go/constant"
|
||||||
|
"go/token"
|
||||||
|
"go/types"
|
||||||
|
"log"
|
||||||
|
"os"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/visualfc/gotools/oracle/oracle/serial"
|
||||||
|
"golang.org/x/tools/go/ast/astutil"
|
||||||
|
"golang.org/x/tools/go/loader"
|
||||||
|
"golang.org/x/tools/go/types/typeutil"
|
||||||
|
)
|
||||||
|
|
||||||
|
// describe describes the syntax node denoted by the query position,
|
||||||
|
// including:
|
||||||
|
// - its syntactic category
|
||||||
|
// - the definition of its referent (for identifiers) [now redundant]
|
||||||
|
// - its type and method set (for an expression or type expression)
|
||||||
|
//
|
||||||
|
func describe(q *Query) error {
|
||||||
|
lconf := loader.Config{Build: q.Build}
|
||||||
|
allowErrors(&lconf)
|
||||||
|
|
||||||
|
if _, err := importQueryPackage(q.Pos, &lconf); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load/parse/type-check the program.
|
||||||
|
lprog, err := lconf.Load()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
q.Fset = lprog.Fset
|
||||||
|
|
||||||
|
qpos, err := parseQueryPos(lprog, q.Pos, true) // (need exact pos)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if false { // debugging
|
||||||
|
fprintf(os.Stderr, lprog.Fset, qpos.path[0], "you selected: %s %s",
|
||||||
|
astutil.NodeDescription(qpos.path[0]), pathToString(qpos.path))
|
||||||
|
}
|
||||||
|
|
||||||
|
path, action := findInterestingNode(qpos.info, qpos.path)
|
||||||
|
switch action {
|
||||||
|
case actionExpr:
|
||||||
|
q.result, err = describeValue(qpos, path)
|
||||||
|
|
||||||
|
case actionType:
|
||||||
|
q.result, err = describeType(qpos, path)
|
||||||
|
|
||||||
|
case actionPackage:
|
||||||
|
q.result, err = describePackage(qpos, path)
|
||||||
|
|
||||||
|
case actionStmt:
|
||||||
|
q.result, err = describeStmt(qpos, path)
|
||||||
|
|
||||||
|
case actionUnknown:
|
||||||
|
q.result = &describeUnknownResult{path[0]}
|
||||||
|
|
||||||
|
default:
|
||||||
|
panic(action) // unreachable
|
||||||
|
}
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
type describeUnknownResult struct {
|
||||||
|
node ast.Node
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *describeUnknownResult) display(printf printfFunc) {
|
||||||
|
// Nothing much to say about misc syntax.
|
||||||
|
printf(r.node, "%s", astutil.NodeDescription(r.node))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *describeUnknownResult) toSerial(res *serial.Result, fset *token.FileSet) {
|
||||||
|
res.Describe = &serial.Describe{
|
||||||
|
Desc: astutil.NodeDescription(r.node),
|
||||||
|
Pos: fset.Position(r.node.Pos()).String(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type action int
|
||||||
|
|
||||||
|
const (
|
||||||
|
actionUnknown action = iota // None of the below
|
||||||
|
actionExpr // FuncDecl, true Expr or Ident(types.{Const,Var})
|
||||||
|
actionType // type Expr or Ident(types.TypeName).
|
||||||
|
actionStmt // Stmt or Ident(types.Label)
|
||||||
|
actionPackage // Ident(types.Package) or ImportSpec
|
||||||
|
)
|
||||||
|
|
||||||
|
// findInterestingNode classifies the syntax node denoted by path as one of:
|
||||||
|
// - an expression, part of an expression or a reference to a constant
|
||||||
|
// or variable;
|
||||||
|
// - a type, part of a type, or a reference to a named type;
|
||||||
|
// - a statement, part of a statement, or a label referring to a statement;
|
||||||
|
// - part of a package declaration or import spec.
|
||||||
|
// - none of the above.
|
||||||
|
// and returns the most "interesting" associated node, which may be
|
||||||
|
// the same node, an ancestor or a descendent.
|
||||||
|
//
|
||||||
|
func findInterestingNode(pkginfo *loader.PackageInfo, path []ast.Node) ([]ast.Node, action) {
|
||||||
|
// TODO(adonovan): integrate with go/types/stdlib_test.go and
|
||||||
|
// apply this to every AST node we can find to make sure it
|
||||||
|
// doesn't crash.
|
||||||
|
|
||||||
|
// TODO(adonovan): audit for ParenExpr safety, esp. since we
|
||||||
|
// traverse up and down.
|
||||||
|
|
||||||
|
// TODO(adonovan): if the users selects the "." in
|
||||||
|
// "fmt.Fprintf()", they'll get an ambiguous selection error;
|
||||||
|
// we won't even reach here. Can we do better?
|
||||||
|
|
||||||
|
// TODO(adonovan): describing a field within 'type T struct {...}'
|
||||||
|
// describes the (anonymous) struct type and concludes "no methods".
|
||||||
|
// We should ascend to the enclosing type decl, if any.
|
||||||
|
|
||||||
|
for len(path) > 0 {
|
||||||
|
switch n := path[0].(type) {
|
||||||
|
case *ast.GenDecl:
|
||||||
|
if len(n.Specs) == 1 {
|
||||||
|
// Descend to sole {Import,Type,Value}Spec child.
|
||||||
|
path = append([]ast.Node{n.Specs[0]}, path...)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
return path, actionUnknown // uninteresting
|
||||||
|
|
||||||
|
case *ast.FuncDecl:
|
||||||
|
// Descend to function name.
|
||||||
|
path = append([]ast.Node{n.Name}, path...)
|
||||||
|
continue
|
||||||
|
|
||||||
|
case *ast.ImportSpec:
|
||||||
|
return path, actionPackage
|
||||||
|
|
||||||
|
case *ast.ValueSpec:
|
||||||
|
if len(n.Names) == 1 {
|
||||||
|
// Descend to sole Ident child.
|
||||||
|
path = append([]ast.Node{n.Names[0]}, path...)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
return path, actionUnknown // uninteresting
|
||||||
|
|
||||||
|
case *ast.TypeSpec:
|
||||||
|
// Descend to type name.
|
||||||
|
path = append([]ast.Node{n.Name}, path...)
|
||||||
|
continue
|
||||||
|
|
||||||
|
case ast.Stmt:
|
||||||
|
return path, actionStmt
|
||||||
|
|
||||||
|
case *ast.ArrayType,
|
||||||
|
*ast.StructType,
|
||||||
|
*ast.FuncType,
|
||||||
|
*ast.InterfaceType,
|
||||||
|
*ast.MapType,
|
||||||
|
*ast.ChanType:
|
||||||
|
return path, actionType
|
||||||
|
|
||||||
|
case *ast.Comment, *ast.CommentGroup, *ast.File, *ast.KeyValueExpr, *ast.CommClause:
|
||||||
|
return path, actionUnknown // uninteresting
|
||||||
|
|
||||||
|
case *ast.Ellipsis:
|
||||||
|
// Continue to enclosing node.
|
||||||
|
// e.g. [...]T in ArrayType
|
||||||
|
// f(x...) in CallExpr
|
||||||
|
// f(x...T) in FuncType
|
||||||
|
|
||||||
|
case *ast.Field:
|
||||||
|
// TODO(adonovan): this needs more thought,
|
||||||
|
// since fields can be so many things.
|
||||||
|
if len(n.Names) == 1 {
|
||||||
|
// Descend to sole Ident child.
|
||||||
|
path = append([]ast.Node{n.Names[0]}, path...)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
// Zero names (e.g. anon field in struct)
|
||||||
|
// or multiple field or param names:
|
||||||
|
// continue to enclosing field list.
|
||||||
|
|
||||||
|
case *ast.FieldList:
|
||||||
|
// Continue to enclosing node:
|
||||||
|
// {Struct,Func,Interface}Type or FuncDecl.
|
||||||
|
|
||||||
|
case *ast.BasicLit:
|
||||||
|
if _, ok := path[1].(*ast.ImportSpec); ok {
|
||||||
|
return path[1:], actionPackage
|
||||||
|
}
|
||||||
|
return path, actionExpr
|
||||||
|
|
||||||
|
case *ast.SelectorExpr:
|
||||||
|
// TODO(adonovan): use Selections info directly.
|
||||||
|
if pkginfo.Uses[n.Sel] == nil {
|
||||||
|
// TODO(adonovan): is this reachable?
|
||||||
|
return path, actionUnknown
|
||||||
|
}
|
||||||
|
// Descend to .Sel child.
|
||||||
|
path = append([]ast.Node{n.Sel}, path...)
|
||||||
|
continue
|
||||||
|
|
||||||
|
case *ast.Ident:
|
||||||
|
switch pkginfo.ObjectOf(n).(type) {
|
||||||
|
case *types.PkgName:
|
||||||
|
return path, actionPackage
|
||||||
|
|
||||||
|
case *types.Const:
|
||||||
|
return path, actionExpr
|
||||||
|
|
||||||
|
case *types.Label:
|
||||||
|
return path, actionStmt
|
||||||
|
|
||||||
|
case *types.TypeName:
|
||||||
|
return path, actionType
|
||||||
|
|
||||||
|
case *types.Var:
|
||||||
|
// For x in 'struct {x T}', return struct type, for now.
|
||||||
|
if _, ok := path[1].(*ast.Field); ok {
|
||||||
|
_ = path[2].(*ast.FieldList) // assertion
|
||||||
|
if _, ok := path[3].(*ast.StructType); ok {
|
||||||
|
return path[3:], actionType
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return path, actionExpr
|
||||||
|
|
||||||
|
case *types.Func:
|
||||||
|
return path, actionExpr
|
||||||
|
|
||||||
|
case *types.Builtin:
|
||||||
|
// For reference to built-in function, return enclosing call.
|
||||||
|
path = path[1:] // ascend to enclosing function call
|
||||||
|
continue
|
||||||
|
|
||||||
|
case *types.Nil:
|
||||||
|
return path, actionExpr
|
||||||
|
}
|
||||||
|
|
||||||
|
// No object.
|
||||||
|
switch path[1].(type) {
|
||||||
|
case *ast.SelectorExpr:
|
||||||
|
// Return enclosing selector expression.
|
||||||
|
return path[1:], actionExpr
|
||||||
|
|
||||||
|
case *ast.Field:
|
||||||
|
// TODO(adonovan): test this.
|
||||||
|
// e.g. all f in:
|
||||||
|
// struct { f, g int }
|
||||||
|
// interface { f() }
|
||||||
|
// func (f T) method(f, g int) (f, g bool)
|
||||||
|
//
|
||||||
|
// switch path[3].(type) {
|
||||||
|
// case *ast.FuncDecl:
|
||||||
|
// case *ast.StructType:
|
||||||
|
// case *ast.InterfaceType:
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// return path[1:], actionExpr
|
||||||
|
//
|
||||||
|
// Unclear what to do with these.
|
||||||
|
// Struct.Fields -- field
|
||||||
|
// Interface.Methods -- field
|
||||||
|
// FuncType.{Params.Results} -- actionExpr
|
||||||
|
// FuncDecl.Recv -- actionExpr
|
||||||
|
|
||||||
|
case *ast.File:
|
||||||
|
// 'package foo'
|
||||||
|
return path, actionPackage
|
||||||
|
|
||||||
|
case *ast.ImportSpec:
|
||||||
|
// TODO(adonovan): fix: why no package object? go/types bug?
|
||||||
|
return path[1:], actionPackage
|
||||||
|
|
||||||
|
default:
|
||||||
|
// e.g. blank identifier
|
||||||
|
// or y in "switch y := x.(type)"
|
||||||
|
// or code in a _test.go file that's not part of the package.
|
||||||
|
log.Printf("unknown reference %s in %T\n", n, path[1])
|
||||||
|
return path, actionUnknown
|
||||||
|
}
|
||||||
|
|
||||||
|
case *ast.StarExpr:
|
||||||
|
if pkginfo.Types[n].IsType() {
|
||||||
|
return path, actionType
|
||||||
|
}
|
||||||
|
return path, actionExpr
|
||||||
|
|
||||||
|
case ast.Expr:
|
||||||
|
// All Expr but {BasicLit,Ident,StarExpr} are
|
||||||
|
// "true" expressions that evaluate to a value.
|
||||||
|
return path, actionExpr
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ascend to parent.
|
||||||
|
path = path[1:]
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, actionUnknown // unreachable
|
||||||
|
}
|
||||||
|
|
||||||
|
func describeValue(qpos *queryPos, path []ast.Node) (*describeValueResult, error) {
|
||||||
|
var expr ast.Expr
|
||||||
|
var obj types.Object
|
||||||
|
switch n := path[0].(type) {
|
||||||
|
case *ast.ValueSpec:
|
||||||
|
// ambiguous ValueSpec containing multiple names
|
||||||
|
return nil, fmt.Errorf("multiple value specification")
|
||||||
|
case *ast.Ident:
|
||||||
|
obj = qpos.info.ObjectOf(n)
|
||||||
|
expr = n
|
||||||
|
case ast.Expr:
|
||||||
|
expr = n
|
||||||
|
default:
|
||||||
|
// TODO(adonovan): is this reachable?
|
||||||
|
return nil, fmt.Errorf("unexpected AST for expr: %T", n)
|
||||||
|
}
|
||||||
|
|
||||||
|
typ := qpos.info.TypeOf(expr)
|
||||||
|
constVal := qpos.info.Types[expr].Value
|
||||||
|
|
||||||
|
return &describeValueResult{
|
||||||
|
qpos: qpos,
|
||||||
|
expr: expr,
|
||||||
|
typ: typ,
|
||||||
|
constVal: constVal,
|
||||||
|
obj: obj,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type describeValueResult struct {
|
||||||
|
qpos *queryPos
|
||||||
|
expr ast.Expr // query node
|
||||||
|
typ types.Type // type of expression
|
||||||
|
constVal exact.Value // value of expression, if constant
|
||||||
|
obj types.Object // var/func/const object, if expr was Ident
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *describeValueResult) display(printf printfFunc) {
|
||||||
|
var prefix, suffix string
|
||||||
|
if r.constVal != nil {
|
||||||
|
suffix = fmt.Sprintf(" of constant value %s", constValString(r.constVal))
|
||||||
|
}
|
||||||
|
switch obj := r.obj.(type) {
|
||||||
|
case *types.Func:
|
||||||
|
if recv := obj.Type().(*types.Signature).Recv(); recv != nil {
|
||||||
|
if _, ok := recv.Type().Underlying().(*types.Interface); ok {
|
||||||
|
prefix = "interface method "
|
||||||
|
} else {
|
||||||
|
prefix = "method "
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Describe the expression.
|
||||||
|
if r.obj != nil {
|
||||||
|
if r.obj.Pos() == r.expr.Pos() {
|
||||||
|
// defining ident
|
||||||
|
printf(r.expr, "definition of %s%s%s", prefix, r.qpos.objectString(r.obj), suffix)
|
||||||
|
} else {
|
||||||
|
// referring ident
|
||||||
|
printf(r.expr, "reference to %s%s%s", prefix, r.qpos.objectString(r.obj), suffix)
|
||||||
|
if def := r.obj.Pos(); def != token.NoPos {
|
||||||
|
printf(def, "defined here")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
desc := astutil.NodeDescription(r.expr)
|
||||||
|
if suffix != "" {
|
||||||
|
// constant expression
|
||||||
|
printf(r.expr, "%s%s", desc, suffix)
|
||||||
|
} else {
|
||||||
|
// non-constant expression
|
||||||
|
printf(r.expr, "%s of type %s", desc, r.qpos.typeString(r.typ))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *describeValueResult) toSerial(res *serial.Result, fset *token.FileSet) {
|
||||||
|
var value, objpos string
|
||||||
|
if r.constVal != nil {
|
||||||
|
value = r.constVal.String()
|
||||||
|
}
|
||||||
|
if r.obj != nil {
|
||||||
|
objpos = fset.Position(r.obj.Pos()).String()
|
||||||
|
}
|
||||||
|
|
||||||
|
res.Describe = &serial.Describe{
|
||||||
|
Desc: astutil.NodeDescription(r.expr),
|
||||||
|
Pos: fset.Position(r.expr.Pos()).String(),
|
||||||
|
Detail: "value",
|
||||||
|
Value: &serial.DescribeValue{
|
||||||
|
Type: r.qpos.typeString(r.typ),
|
||||||
|
Value: value,
|
||||||
|
ObjPos: objpos,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---- TYPE ------------------------------------------------------------
|
||||||
|
|
||||||
|
func describeType(qpos *queryPos, path []ast.Node) (*describeTypeResult, error) {
|
||||||
|
var description string
|
||||||
|
var t types.Type
|
||||||
|
switch n := path[0].(type) {
|
||||||
|
case *ast.Ident:
|
||||||
|
t = qpos.info.TypeOf(n)
|
||||||
|
switch t := t.(type) {
|
||||||
|
case *types.Basic:
|
||||||
|
description = "reference to built-in "
|
||||||
|
|
||||||
|
case *types.Named:
|
||||||
|
isDef := t.Obj().Pos() == n.Pos() // see caveats at isDef above
|
||||||
|
if isDef {
|
||||||
|
description = "definition of "
|
||||||
|
} else {
|
||||||
|
description = "reference to "
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
case ast.Expr:
|
||||||
|
t = qpos.info.TypeOf(n)
|
||||||
|
|
||||||
|
default:
|
||||||
|
// Unreachable?
|
||||||
|
return nil, fmt.Errorf("unexpected AST for type: %T", n)
|
||||||
|
}
|
||||||
|
|
||||||
|
description = description + "type " + qpos.typeString(t)
|
||||||
|
|
||||||
|
// Show sizes for structs and named types (it's fairly obvious for others).
|
||||||
|
switch t.(type) {
|
||||||
|
case *types.Named, *types.Struct:
|
||||||
|
szs := types.StdSizes{8, 8} // assume amd64
|
||||||
|
description = fmt.Sprintf("%s (size %d, align %d)", description,
|
||||||
|
szs.Sizeof(t), szs.Alignof(t))
|
||||||
|
}
|
||||||
|
|
||||||
|
return &describeTypeResult{
|
||||||
|
qpos: qpos,
|
||||||
|
node: path[0],
|
||||||
|
description: description,
|
||||||
|
typ: t,
|
||||||
|
methods: accessibleMethods(t, qpos.info.Pkg),
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type describeTypeResult struct {
|
||||||
|
qpos *queryPos
|
||||||
|
node ast.Node
|
||||||
|
description string
|
||||||
|
typ types.Type
|
||||||
|
methods []*types.Selection
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *describeTypeResult) display(printf printfFunc) {
|
||||||
|
printf(r.node, "%s", r.description)
|
||||||
|
|
||||||
|
// Show the underlying type for a reference to a named type.
|
||||||
|
if nt, ok := r.typ.(*types.Named); ok && r.node.Pos() != nt.Obj().Pos() {
|
||||||
|
printf(nt.Obj(), "defined as %s", r.qpos.typeString(nt.Underlying()))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Print the method set, if the type kind is capable of bearing methods.
|
||||||
|
switch r.typ.(type) {
|
||||||
|
case *types.Interface, *types.Struct, *types.Named:
|
||||||
|
if len(r.methods) > 0 {
|
||||||
|
printf(r.node, "Method set:")
|
||||||
|
for _, meth := range r.methods {
|
||||||
|
// TODO(adonovan): print these relative
|
||||||
|
// to the owning package, not the
|
||||||
|
// query package.
|
||||||
|
printf(meth.Obj(), "\t%s", r.qpos.selectionString(meth))
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
printf(r.node, "No methods.")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *describeTypeResult) toSerial(res *serial.Result, fset *token.FileSet) {
|
||||||
|
var namePos, nameDef string
|
||||||
|
if nt, ok := r.typ.(*types.Named); ok {
|
||||||
|
namePos = fset.Position(nt.Obj().Pos()).String()
|
||||||
|
nameDef = nt.Underlying().String()
|
||||||
|
}
|
||||||
|
res.Describe = &serial.Describe{
|
||||||
|
Desc: r.description,
|
||||||
|
Pos: fset.Position(r.node.Pos()).String(),
|
||||||
|
Detail: "type",
|
||||||
|
Type: &serial.DescribeType{
|
||||||
|
Type: r.qpos.typeString(r.typ),
|
||||||
|
NamePos: namePos,
|
||||||
|
NameDef: nameDef,
|
||||||
|
Methods: methodsToSerial(r.qpos.info.Pkg, r.methods, fset),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---- PACKAGE ------------------------------------------------------------
|
||||||
|
|
||||||
|
func describePackage(qpos *queryPos, path []ast.Node) (*describePackageResult, error) {
|
||||||
|
var description string
|
||||||
|
var pkg *types.Package
|
||||||
|
switch n := path[0].(type) {
|
||||||
|
case *ast.ImportSpec:
|
||||||
|
var obj types.Object
|
||||||
|
if n.Name != nil {
|
||||||
|
obj = qpos.info.Defs[n.Name]
|
||||||
|
} else {
|
||||||
|
obj = qpos.info.Implicits[n]
|
||||||
|
}
|
||||||
|
pkgname, _ := obj.(*types.PkgName)
|
||||||
|
if pkgname == nil {
|
||||||
|
return nil, fmt.Errorf("can't import package %s", n.Path.Value)
|
||||||
|
}
|
||||||
|
pkg = pkgname.Imported()
|
||||||
|
description = fmt.Sprintf("import of package %q", pkg.Path())
|
||||||
|
|
||||||
|
case *ast.Ident:
|
||||||
|
if _, isDef := path[1].(*ast.File); isDef {
|
||||||
|
// e.g. package id
|
||||||
|
pkg = qpos.info.Pkg
|
||||||
|
description = fmt.Sprintf("definition of package %q", pkg.Path())
|
||||||
|
} else {
|
||||||
|
// e.g. import id "..."
|
||||||
|
// or id.F()
|
||||||
|
pkg = qpos.info.ObjectOf(n).(*types.PkgName).Imported()
|
||||||
|
description = fmt.Sprintf("reference to package %q", pkg.Path())
|
||||||
|
}
|
||||||
|
|
||||||
|
default:
|
||||||
|
// Unreachable?
|
||||||
|
return nil, fmt.Errorf("unexpected AST for package: %T", n)
|
||||||
|
}
|
||||||
|
|
||||||
|
var members []*describeMember
|
||||||
|
// NB: "unsafe" has no types.Package
|
||||||
|
if pkg != nil {
|
||||||
|
// Enumerate the accessible package members
|
||||||
|
// in lexicographic order.
|
||||||
|
for _, name := range pkg.Scope().Names() {
|
||||||
|
if pkg == qpos.info.Pkg || ast.IsExported(name) {
|
||||||
|
mem := pkg.Scope().Lookup(name)
|
||||||
|
var methods []*types.Selection
|
||||||
|
if mem, ok := mem.(*types.TypeName); ok {
|
||||||
|
methods = accessibleMethods(mem.Type(), qpos.info.Pkg)
|
||||||
|
}
|
||||||
|
members = append(members, &describeMember{
|
||||||
|
mem,
|
||||||
|
methods,
|
||||||
|
})
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return &describePackageResult{qpos.fset, path[0], description, pkg, members}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type describePackageResult struct {
|
||||||
|
fset *token.FileSet
|
||||||
|
node ast.Node
|
||||||
|
description string
|
||||||
|
pkg *types.Package
|
||||||
|
members []*describeMember // in lexicographic name order
|
||||||
|
}
|
||||||
|
|
||||||
|
type describeMember struct {
|
||||||
|
obj types.Object
|
||||||
|
methods []*types.Selection // in types.MethodSet order
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *describePackageResult) display(printf printfFunc) {
|
||||||
|
printf(r.node, "%s", r.description)
|
||||||
|
|
||||||
|
// Compute max width of name "column".
|
||||||
|
maxname := 0
|
||||||
|
for _, mem := range r.members {
|
||||||
|
if l := len(mem.obj.Name()); l > maxname {
|
||||||
|
maxname = l
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, mem := range r.members {
|
||||||
|
printf(mem.obj, "\t%s", formatMember(mem.obj, maxname))
|
||||||
|
for _, meth := range mem.methods {
|
||||||
|
printf(meth.Obj(), "\t\t%s", types.SelectionString(meth, types.RelativeTo(r.pkg)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func formatMember(obj types.Object, maxname int) string {
|
||||||
|
qualifier := types.RelativeTo(obj.Pkg())
|
||||||
|
var buf bytes.Buffer
|
||||||
|
fmt.Fprintf(&buf, "%-5s %-*s", tokenOf(obj), maxname, obj.Name())
|
||||||
|
switch obj := obj.(type) {
|
||||||
|
case *types.Const:
|
||||||
|
fmt.Fprintf(&buf, " %s = %s", types.TypeString(obj.Type(), qualifier), constValString(obj.Val()))
|
||||||
|
|
||||||
|
case *types.Func:
|
||||||
|
fmt.Fprintf(&buf, " %s", types.TypeString(obj.Type(), qualifier))
|
||||||
|
|
||||||
|
case *types.TypeName:
|
||||||
|
// Abbreviate long aggregate type names.
|
||||||
|
var abbrev string
|
||||||
|
switch t := obj.Type().Underlying().(type) {
|
||||||
|
case *types.Interface:
|
||||||
|
if t.NumMethods() > 1 {
|
||||||
|
abbrev = "interface{...}"
|
||||||
|
}
|
||||||
|
case *types.Struct:
|
||||||
|
if t.NumFields() > 1 {
|
||||||
|
abbrev = "struct{...}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if abbrev == "" {
|
||||||
|
fmt.Fprintf(&buf, " %s", types.TypeString(obj.Type().Underlying(), qualifier))
|
||||||
|
} else {
|
||||||
|
fmt.Fprintf(&buf, " %s", abbrev)
|
||||||
|
}
|
||||||
|
|
||||||
|
case *types.Var:
|
||||||
|
fmt.Fprintf(&buf, " %s", types.TypeString(obj.Type(), qualifier))
|
||||||
|
}
|
||||||
|
return buf.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *describePackageResult) toSerial(res *serial.Result, fset *token.FileSet) {
|
||||||
|
var members []*serial.DescribeMember
|
||||||
|
for _, mem := range r.members {
|
||||||
|
typ := mem.obj.Type()
|
||||||
|
var val string
|
||||||
|
switch mem := mem.obj.(type) {
|
||||||
|
case *types.Const:
|
||||||
|
val = constValString(mem.Val())
|
||||||
|
case *types.TypeName:
|
||||||
|
typ = typ.Underlying()
|
||||||
|
}
|
||||||
|
members = append(members, &serial.DescribeMember{
|
||||||
|
Name: mem.obj.Name(),
|
||||||
|
Type: typ.String(),
|
||||||
|
Value: val,
|
||||||
|
Pos: fset.Position(mem.obj.Pos()).String(),
|
||||||
|
Kind: tokenOf(mem.obj),
|
||||||
|
Methods: methodsToSerial(r.pkg, mem.methods, fset),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
res.Describe = &serial.Describe{
|
||||||
|
Desc: r.description,
|
||||||
|
Pos: fset.Position(r.node.Pos()).String(),
|
||||||
|
Detail: "package",
|
||||||
|
Package: &serial.DescribePackage{
|
||||||
|
Path: r.pkg.Path(),
|
||||||
|
Members: members,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func tokenOf(o types.Object) string {
|
||||||
|
switch o.(type) {
|
||||||
|
case *types.Func:
|
||||||
|
return "func"
|
||||||
|
case *types.Var:
|
||||||
|
return "var"
|
||||||
|
case *types.TypeName:
|
||||||
|
return "type"
|
||||||
|
case *types.Const:
|
||||||
|
return "const"
|
||||||
|
case *types.PkgName:
|
||||||
|
return "package"
|
||||||
|
case *types.Builtin:
|
||||||
|
return "builtin" // e.g. when describing package "unsafe"
|
||||||
|
case *types.Nil:
|
||||||
|
return "nil"
|
||||||
|
case *types.Label:
|
||||||
|
return "label"
|
||||||
|
}
|
||||||
|
panic(o)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---- STATEMENT ------------------------------------------------------------
|
||||||
|
|
||||||
|
func describeStmt(qpos *queryPos, path []ast.Node) (*describeStmtResult, error) {
|
||||||
|
var description string
|
||||||
|
switch n := path[0].(type) {
|
||||||
|
case *ast.Ident:
|
||||||
|
if qpos.info.Defs[n] != nil {
|
||||||
|
description = "labelled statement"
|
||||||
|
} else {
|
||||||
|
description = "reference to labelled statement"
|
||||||
|
}
|
||||||
|
|
||||||
|
default:
|
||||||
|
// Nothing much to say about statements.
|
||||||
|
description = astutil.NodeDescription(n)
|
||||||
|
}
|
||||||
|
return &describeStmtResult{qpos.fset, path[0], description}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type describeStmtResult struct {
|
||||||
|
fset *token.FileSet
|
||||||
|
node ast.Node
|
||||||
|
description string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *describeStmtResult) display(printf printfFunc) {
|
||||||
|
printf(r.node, "%s", r.description)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *describeStmtResult) toSerial(res *serial.Result, fset *token.FileSet) {
|
||||||
|
res.Describe = &serial.Describe{
|
||||||
|
Desc: r.description,
|
||||||
|
Pos: fset.Position(r.node.Pos()).String(),
|
||||||
|
Detail: "unknown",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ------------------- Utilities -------------------
|
||||||
|
|
||||||
|
// pathToString returns a string containing the concrete types of the
|
||||||
|
// nodes in path.
|
||||||
|
func pathToString(path []ast.Node) string {
|
||||||
|
var buf bytes.Buffer
|
||||||
|
fmt.Fprint(&buf, "[")
|
||||||
|
for i, n := range path {
|
||||||
|
if i > 0 {
|
||||||
|
fmt.Fprint(&buf, " ")
|
||||||
|
}
|
||||||
|
fmt.Fprint(&buf, strings.TrimPrefix(fmt.Sprintf("%T", n), "*ast."))
|
||||||
|
}
|
||||||
|
fmt.Fprint(&buf, "]")
|
||||||
|
return buf.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
func accessibleMethods(t types.Type, from *types.Package) []*types.Selection {
|
||||||
|
var methods []*types.Selection
|
||||||
|
for _, meth := range typeutil.IntuitiveMethodSet(t, nil) {
|
||||||
|
if isAccessibleFrom(meth.Obj(), from) {
|
||||||
|
methods = append(methods, meth)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return methods
|
||||||
|
}
|
||||||
|
|
||||||
|
func isAccessibleFrom(obj types.Object, pkg *types.Package) bool {
|
||||||
|
return ast.IsExported(obj.Name()) || obj.Pkg() == pkg
|
||||||
|
}
|
||||||
|
|
||||||
|
func methodsToSerial(this *types.Package, methods []*types.Selection, fset *token.FileSet) []serial.DescribeMethod {
|
||||||
|
qualifier := types.RelativeTo(this)
|
||||||
|
var jmethods []serial.DescribeMethod
|
||||||
|
for _, meth := range methods {
|
||||||
|
var ser serial.DescribeMethod
|
||||||
|
if meth != nil { // may contain nils when called by implements (on a method)
|
||||||
|
ser = serial.DescribeMethod{
|
||||||
|
Name: types.SelectionString(meth, qualifier),
|
||||||
|
Pos: fset.Position(meth.Obj().Pos()).String(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
jmethods = append(jmethods, ser)
|
||||||
|
}
|
||||||
|
return jmethods
|
||||||
|
}
|
||||||
|
|
||||||
|
// constValString emulates Go 1.6's go/constant.ExactString well enough
|
||||||
|
// to make the tests pass. This is just a stopgap until we throw away
|
||||||
|
// all the *15.go files.
|
||||||
|
func constValString(v exact.Value) string {
|
||||||
|
if v.Kind() == exact.Float {
|
||||||
|
f, _ := exact.Float64Val(v)
|
||||||
|
return fmt.Sprintf("%g", f)
|
||||||
|
}
|
||||||
|
return v.String()
|
||||||
|
}
|
|
@ -0,0 +1,224 @@
|
||||||
|
// Copyright 2013 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build go1.5
|
||||||
|
|
||||||
|
package oracle
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"go/ast"
|
||||||
|
"go/printer"
|
||||||
|
"go/token"
|
||||||
|
"go/types"
|
||||||
|
"sort"
|
||||||
|
|
||||||
|
"github.com/visualfc/gotools/oracle/oracle/serial"
|
||||||
|
"golang.org/x/tools/go/loader"
|
||||||
|
)
|
||||||
|
|
||||||
|
// freevars displays the lexical (not package-level) free variables of
|
||||||
|
// the selection.
|
||||||
|
//
|
||||||
|
// It treats A.B.C as a separate variable from A to reveal the parts
|
||||||
|
// of an aggregate type that are actually needed.
|
||||||
|
// This aids refactoring.
|
||||||
|
//
|
||||||
|
// TODO(adonovan): optionally display the free references to
|
||||||
|
// file/package scope objects, and to objects from other packages.
|
||||||
|
// Depending on where the resulting function abstraction will go,
|
||||||
|
// these might be interesting. Perhaps group the results into three
|
||||||
|
// bands.
|
||||||
|
//
|
||||||
|
func freevars(q *Query) error {
|
||||||
|
lconf := loader.Config{Build: q.Build}
|
||||||
|
allowErrors(&lconf)
|
||||||
|
|
||||||
|
if _, err := importQueryPackage(q.Pos, &lconf); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load/parse/type-check the program.
|
||||||
|
lprog, err := lconf.Load()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
q.Fset = lprog.Fset
|
||||||
|
|
||||||
|
qpos, err := parseQueryPos(lprog, q.Pos, false)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
file := qpos.path[len(qpos.path)-1] // the enclosing file
|
||||||
|
fileScope := qpos.info.Scopes[file]
|
||||||
|
pkgScope := fileScope.Parent()
|
||||||
|
|
||||||
|
// The id and sel functions return non-nil if they denote an
|
||||||
|
// object o or selection o.x.y that is referenced by the
|
||||||
|
// selection but defined neither within the selection nor at
|
||||||
|
// file scope, i.e. it is in the lexical environment.
|
||||||
|
var id func(n *ast.Ident) types.Object
|
||||||
|
var sel func(n *ast.SelectorExpr) types.Object
|
||||||
|
|
||||||
|
sel = func(n *ast.SelectorExpr) types.Object {
|
||||||
|
switch x := unparen(n.X).(type) {
|
||||||
|
case *ast.SelectorExpr:
|
||||||
|
return sel(x)
|
||||||
|
case *ast.Ident:
|
||||||
|
return id(x)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
id = func(n *ast.Ident) types.Object {
|
||||||
|
obj := qpos.info.Uses[n]
|
||||||
|
if obj == nil {
|
||||||
|
return nil // not a reference
|
||||||
|
}
|
||||||
|
if _, ok := obj.(*types.PkgName); ok {
|
||||||
|
return nil // imported package
|
||||||
|
}
|
||||||
|
if !(file.Pos() <= obj.Pos() && obj.Pos() <= file.End()) {
|
||||||
|
return nil // not defined in this file
|
||||||
|
}
|
||||||
|
scope := obj.Parent()
|
||||||
|
if scope == nil {
|
||||||
|
return nil // e.g. interface method, struct field
|
||||||
|
}
|
||||||
|
if scope == fileScope || scope == pkgScope {
|
||||||
|
return nil // defined at file or package scope
|
||||||
|
}
|
||||||
|
if qpos.start <= obj.Pos() && obj.Pos() <= qpos.end {
|
||||||
|
return nil // defined within selection => not free
|
||||||
|
}
|
||||||
|
return obj
|
||||||
|
}
|
||||||
|
|
||||||
|
// Maps each reference that is free in the selection
|
||||||
|
// to the object it refers to.
|
||||||
|
// The map de-duplicates repeated references.
|
||||||
|
refsMap := make(map[string]freevarsRef)
|
||||||
|
|
||||||
|
// Visit all the identifiers in the selected ASTs.
|
||||||
|
ast.Inspect(qpos.path[0], func(n ast.Node) bool {
|
||||||
|
if n == nil {
|
||||||
|
return true // popping DFS stack
|
||||||
|
}
|
||||||
|
|
||||||
|
// Is this node contained within the selection?
|
||||||
|
// (freevars permits inexact selections,
|
||||||
|
// like two stmts in a block.)
|
||||||
|
if qpos.start <= n.Pos() && n.End() <= qpos.end {
|
||||||
|
var obj types.Object
|
||||||
|
var prune bool
|
||||||
|
switch n := n.(type) {
|
||||||
|
case *ast.Ident:
|
||||||
|
obj = id(n)
|
||||||
|
|
||||||
|
case *ast.SelectorExpr:
|
||||||
|
obj = sel(n)
|
||||||
|
prune = true
|
||||||
|
}
|
||||||
|
|
||||||
|
if obj != nil {
|
||||||
|
var kind string
|
||||||
|
switch obj.(type) {
|
||||||
|
case *types.Var:
|
||||||
|
kind = "var"
|
||||||
|
case *types.Func:
|
||||||
|
kind = "func"
|
||||||
|
case *types.TypeName:
|
||||||
|
kind = "type"
|
||||||
|
case *types.Const:
|
||||||
|
kind = "const"
|
||||||
|
case *types.Label:
|
||||||
|
kind = "label"
|
||||||
|
default:
|
||||||
|
panic(obj)
|
||||||
|
}
|
||||||
|
|
||||||
|
typ := qpos.info.TypeOf(n.(ast.Expr))
|
||||||
|
ref := freevarsRef{kind, printNode(lprog.Fset, n), typ, obj}
|
||||||
|
refsMap[ref.ref] = ref
|
||||||
|
|
||||||
|
if prune {
|
||||||
|
return false // don't descend
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return true // descend
|
||||||
|
})
|
||||||
|
|
||||||
|
refs := make([]freevarsRef, 0, len(refsMap))
|
||||||
|
for _, ref := range refsMap {
|
||||||
|
refs = append(refs, ref)
|
||||||
|
}
|
||||||
|
sort.Sort(byRef(refs))
|
||||||
|
|
||||||
|
q.result = &freevarsResult{
|
||||||
|
qpos: qpos,
|
||||||
|
refs: refs,
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type freevarsResult struct {
|
||||||
|
qpos *queryPos
|
||||||
|
refs []freevarsRef
|
||||||
|
}
|
||||||
|
|
||||||
|
type freevarsRef struct {
|
||||||
|
kind string
|
||||||
|
ref string
|
||||||
|
typ types.Type
|
||||||
|
obj types.Object
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *freevarsResult) display(printf printfFunc) {
|
||||||
|
if len(r.refs) == 0 {
|
||||||
|
printf(r.qpos, "No free identifiers.")
|
||||||
|
} else {
|
||||||
|
printf(r.qpos, "Free identifiers:")
|
||||||
|
qualifier := types.RelativeTo(r.qpos.info.Pkg)
|
||||||
|
for _, ref := range r.refs {
|
||||||
|
// Avoid printing "type T T".
|
||||||
|
var typstr string
|
||||||
|
if ref.kind != "type" {
|
||||||
|
typstr = " " + types.TypeString(ref.typ, qualifier)
|
||||||
|
}
|
||||||
|
printf(ref.obj, "%s %s%s", ref.kind, ref.ref, typstr)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *freevarsResult) toSerial(res *serial.Result, fset *token.FileSet) {
|
||||||
|
var refs []*serial.FreeVar
|
||||||
|
for _, ref := range r.refs {
|
||||||
|
refs = append(refs,
|
||||||
|
&serial.FreeVar{
|
||||||
|
Pos: fset.Position(ref.obj.Pos()).String(),
|
||||||
|
Kind: ref.kind,
|
||||||
|
Ref: ref.ref,
|
||||||
|
Type: ref.typ.String(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
res.Freevars = refs
|
||||||
|
}
|
||||||
|
|
||||||
|
// -------- utils --------
|
||||||
|
|
||||||
|
type byRef []freevarsRef
|
||||||
|
|
||||||
|
func (p byRef) Len() int { return len(p) }
|
||||||
|
func (p byRef) Less(i, j int) bool { return p[i].ref < p[j].ref }
|
||||||
|
func (p byRef) Swap(i, j int) { p[i], p[j] = p[j], p[i] }
|
||||||
|
|
||||||
|
// printNode returns the pretty-printed syntax of n.
|
||||||
|
func printNode(fset *token.FileSet, n ast.Node) string {
|
||||||
|
var buf bytes.Buffer
|
||||||
|
printer.Fprint(&buf, fset, n)
|
||||||
|
return buf.String()
|
||||||
|
}
|
|
@ -0,0 +1,354 @@
|
||||||
|
// Copyright 2013 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build go1.5
|
||||||
|
|
||||||
|
package oracle
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"go/ast"
|
||||||
|
"go/token"
|
||||||
|
"go/types"
|
||||||
|
"reflect"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/visualfc/gotools/oracle/oracle/serial"
|
||||||
|
"golang.org/x/tools/go/loader"
|
||||||
|
"golang.org/x/tools/go/types/typeutil"
|
||||||
|
"golang.org/x/tools/refactor/importgraph"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Implements displays the "implements" relation as it pertains to the
|
||||||
|
// selected type.
|
||||||
|
// If the selection is a method, 'implements' displays
|
||||||
|
// the corresponding methods of the types that would have been reported
|
||||||
|
// by an implements query on the receiver type.
|
||||||
|
//
|
||||||
|
func implements(q *Query) error {
|
||||||
|
lconf := loader.Config{Build: q.Build}
|
||||||
|
allowErrors(&lconf)
|
||||||
|
|
||||||
|
qpkg, err := importQueryPackage(q.Pos, &lconf)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set the packages to search.
|
||||||
|
if len(q.Scope) > 0 {
|
||||||
|
// Inspect all packages in the analysis scope, if specified.
|
||||||
|
if err := setPTAScope(&lconf, q.Scope); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Otherwise inspect the forward and reverse
|
||||||
|
// transitive closure of the selected package.
|
||||||
|
// (In theory even this is incomplete.)
|
||||||
|
_, rev, _ := importgraph.Build(q.Build)
|
||||||
|
for path := range rev.Search(qpkg) {
|
||||||
|
lconf.ImportWithTests(path)
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO(adonovan): for completeness, we should also
|
||||||
|
// type-check and inspect function bodies in all
|
||||||
|
// imported packages. This would be expensive, but we
|
||||||
|
// could optimize by skipping functions that do not
|
||||||
|
// contain type declarations. This would require
|
||||||
|
// changing the loader's TypeCheckFuncBodies hook to
|
||||||
|
// provide the []*ast.File.
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load/parse/type-check the program.
|
||||||
|
lprog, err := lconf.Load()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
q.Fset = lprog.Fset
|
||||||
|
|
||||||
|
qpos, err := parseQueryPos(lprog, q.Pos, false)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find the selected type.
|
||||||
|
path, action := findInterestingNode(qpos.info, qpos.path)
|
||||||
|
|
||||||
|
var method *types.Func
|
||||||
|
var T types.Type // selected type (receiver if method != nil)
|
||||||
|
|
||||||
|
switch action {
|
||||||
|
case actionExpr:
|
||||||
|
// method?
|
||||||
|
if id, ok := path[0].(*ast.Ident); ok {
|
||||||
|
if obj, ok := qpos.info.ObjectOf(id).(*types.Func); ok {
|
||||||
|
recv := obj.Type().(*types.Signature).Recv()
|
||||||
|
if recv == nil {
|
||||||
|
return fmt.Errorf("this function is not a method")
|
||||||
|
}
|
||||||
|
method = obj
|
||||||
|
T = recv.Type()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case actionType:
|
||||||
|
T = qpos.info.TypeOf(path[0].(ast.Expr))
|
||||||
|
}
|
||||||
|
if T == nil {
|
||||||
|
return fmt.Errorf("no type or method here")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find all named types, even local types (which can have
|
||||||
|
// methods via promotion) and the built-in "error".
|
||||||
|
var allNamed []types.Type
|
||||||
|
for _, info := range lprog.AllPackages {
|
||||||
|
for _, obj := range info.Defs {
|
||||||
|
if obj, ok := obj.(*types.TypeName); ok {
|
||||||
|
allNamed = append(allNamed, obj.Type())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
allNamed = append(allNamed, types.Universe.Lookup("error").Type())
|
||||||
|
|
||||||
|
var msets typeutil.MethodSetCache
|
||||||
|
|
||||||
|
// Test each named type.
|
||||||
|
var to, from, fromPtr []types.Type
|
||||||
|
for _, U := range allNamed {
|
||||||
|
if isInterface(T) {
|
||||||
|
if msets.MethodSet(T).Len() == 0 {
|
||||||
|
continue // empty interface
|
||||||
|
}
|
||||||
|
if isInterface(U) {
|
||||||
|
if msets.MethodSet(U).Len() == 0 {
|
||||||
|
continue // empty interface
|
||||||
|
}
|
||||||
|
|
||||||
|
// T interface, U interface
|
||||||
|
if !types.Identical(T, U) {
|
||||||
|
if types.AssignableTo(U, T) {
|
||||||
|
to = append(to, U)
|
||||||
|
}
|
||||||
|
if types.AssignableTo(T, U) {
|
||||||
|
from = append(from, U)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// T interface, U concrete
|
||||||
|
if types.AssignableTo(U, T) {
|
||||||
|
to = append(to, U)
|
||||||
|
} else if pU := types.NewPointer(U); types.AssignableTo(pU, T) {
|
||||||
|
to = append(to, pU)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if isInterface(U) {
|
||||||
|
if msets.MethodSet(U).Len() == 0 {
|
||||||
|
continue // empty interface
|
||||||
|
}
|
||||||
|
|
||||||
|
// T concrete, U interface
|
||||||
|
if types.AssignableTo(T, U) {
|
||||||
|
from = append(from, U)
|
||||||
|
} else if pT := types.NewPointer(T); types.AssignableTo(pT, U) {
|
||||||
|
fromPtr = append(fromPtr, U)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var pos interface{} = qpos
|
||||||
|
if nt, ok := deref(T).(*types.Named); ok {
|
||||||
|
pos = nt.Obj()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sort types (arbitrarily) to ensure test determinism.
|
||||||
|
sort.Sort(typesByString(to))
|
||||||
|
sort.Sort(typesByString(from))
|
||||||
|
sort.Sort(typesByString(fromPtr))
|
||||||
|
|
||||||
|
var toMethod, fromMethod, fromPtrMethod []*types.Selection // contain nils
|
||||||
|
if method != nil {
|
||||||
|
for _, t := range to {
|
||||||
|
toMethod = append(toMethod,
|
||||||
|
types.NewMethodSet(t).Lookup(method.Pkg(), method.Name()))
|
||||||
|
}
|
||||||
|
for _, t := range from {
|
||||||
|
fromMethod = append(fromMethod,
|
||||||
|
types.NewMethodSet(t).Lookup(method.Pkg(), method.Name()))
|
||||||
|
}
|
||||||
|
for _, t := range fromPtr {
|
||||||
|
fromPtrMethod = append(fromPtrMethod,
|
||||||
|
types.NewMethodSet(t).Lookup(method.Pkg(), method.Name()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
q.result = &implementsResult{
|
||||||
|
qpos, T, pos, to, from, fromPtr, method, toMethod, fromMethod, fromPtrMethod,
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type implementsResult struct {
|
||||||
|
qpos *queryPos
|
||||||
|
|
||||||
|
t types.Type // queried type (not necessarily named)
|
||||||
|
pos interface{} // pos of t (*types.Name or *QueryPos)
|
||||||
|
to []types.Type // named or ptr-to-named types assignable to interface T
|
||||||
|
from []types.Type // named interfaces assignable from T
|
||||||
|
fromPtr []types.Type // named interfaces assignable only from *T
|
||||||
|
|
||||||
|
// if a method was queried:
|
||||||
|
method *types.Func // queried method
|
||||||
|
toMethod []*types.Selection // method of type to[i], if any
|
||||||
|
fromMethod []*types.Selection // method of type from[i], if any
|
||||||
|
fromPtrMethod []*types.Selection // method of type fromPtrMethod[i], if any
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *implementsResult) display(printf printfFunc) {
|
||||||
|
relation := "is implemented by"
|
||||||
|
|
||||||
|
meth := func(sel *types.Selection) {
|
||||||
|
if sel != nil {
|
||||||
|
printf(sel.Obj(), "\t%s method (%s).%s",
|
||||||
|
relation, r.qpos.typeString(sel.Recv()), sel.Obj().Name())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if isInterface(r.t) {
|
||||||
|
if types.NewMethodSet(r.t).Len() == 0 { // TODO(adonovan): cache mset
|
||||||
|
printf(r.pos, "empty interface type %s", r.qpos.typeString(r.t))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if r.method == nil {
|
||||||
|
printf(r.pos, "interface type %s", r.qpos.typeString(r.t))
|
||||||
|
} else {
|
||||||
|
printf(r.method, "abstract method %s", r.qpos.objectString(r.method))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Show concrete types (or methods) first; use two passes.
|
||||||
|
for i, sub := range r.to {
|
||||||
|
if !isInterface(sub) {
|
||||||
|
if r.method == nil {
|
||||||
|
printf(deref(sub).(*types.Named).Obj(), "\t%s %s type %s",
|
||||||
|
relation, typeKind(sub), r.qpos.typeString(sub))
|
||||||
|
} else {
|
||||||
|
meth(r.toMethod[i])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for i, sub := range r.to {
|
||||||
|
if isInterface(sub) {
|
||||||
|
if r.method == nil {
|
||||||
|
printf(sub.(*types.Named).Obj(), "\t%s %s type %s",
|
||||||
|
relation, typeKind(sub), r.qpos.typeString(sub))
|
||||||
|
} else {
|
||||||
|
meth(r.toMethod[i])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
relation = "implements"
|
||||||
|
for i, super := range r.from {
|
||||||
|
if r.method == nil {
|
||||||
|
printf(super.(*types.Named).Obj(), "\t%s %s",
|
||||||
|
relation, r.qpos.typeString(super))
|
||||||
|
} else {
|
||||||
|
meth(r.fromMethod[i])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
relation = "implements"
|
||||||
|
|
||||||
|
if r.from != nil {
|
||||||
|
if r.method == nil {
|
||||||
|
printf(r.pos, "%s type %s",
|
||||||
|
typeKind(r.t), r.qpos.typeString(r.t))
|
||||||
|
} else {
|
||||||
|
printf(r.method, "concrete method %s",
|
||||||
|
r.qpos.objectString(r.method))
|
||||||
|
}
|
||||||
|
for i, super := range r.from {
|
||||||
|
if r.method == nil {
|
||||||
|
printf(super.(*types.Named).Obj(), "\t%s %s",
|
||||||
|
relation, r.qpos.typeString(super))
|
||||||
|
} else {
|
||||||
|
meth(r.fromMethod[i])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if r.fromPtr != nil {
|
||||||
|
if r.method == nil {
|
||||||
|
printf(r.pos, "pointer type *%s", r.qpos.typeString(r.t))
|
||||||
|
} else {
|
||||||
|
// TODO(adonovan): de-dup (C).f and (*C).f implementing (I).f.
|
||||||
|
printf(r.method, "concrete method %s",
|
||||||
|
r.qpos.objectString(r.method))
|
||||||
|
}
|
||||||
|
|
||||||
|
for i, psuper := range r.fromPtr {
|
||||||
|
if r.method == nil {
|
||||||
|
printf(psuper.(*types.Named).Obj(), "\t%s %s",
|
||||||
|
relation, r.qpos.typeString(psuper))
|
||||||
|
} else {
|
||||||
|
meth(r.fromPtrMethod[i])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if r.from == nil {
|
||||||
|
printf(r.pos, "%s type %s implements only interface{}",
|
||||||
|
typeKind(r.t), r.qpos.typeString(r.t))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *implementsResult) toSerial(res *serial.Result, fset *token.FileSet) {
|
||||||
|
res.Implements = &serial.Implements{
|
||||||
|
T: makeImplementsType(r.t, fset),
|
||||||
|
AssignableTo: makeImplementsTypes(r.to, fset),
|
||||||
|
AssignableFrom: makeImplementsTypes(r.from, fset),
|
||||||
|
AssignableFromPtr: makeImplementsTypes(r.fromPtr, fset),
|
||||||
|
AssignableToMethod: methodsToSerial(r.qpos.info.Pkg, r.toMethod, fset),
|
||||||
|
AssignableFromMethod: methodsToSerial(r.qpos.info.Pkg, r.fromMethod, fset),
|
||||||
|
AssignableFromPtrMethod: methodsToSerial(r.qpos.info.Pkg, r.fromPtrMethod, fset),
|
||||||
|
}
|
||||||
|
if r.method != nil {
|
||||||
|
res.Implements.Method = &serial.DescribeMethod{
|
||||||
|
Name: r.qpos.objectString(r.method),
|
||||||
|
Pos: fset.Position(r.method.Pos()).String(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func makeImplementsTypes(tt []types.Type, fset *token.FileSet) []serial.ImplementsType {
|
||||||
|
var r []serial.ImplementsType
|
||||||
|
for _, t := range tt {
|
||||||
|
r = append(r, makeImplementsType(t, fset))
|
||||||
|
}
|
||||||
|
return r
|
||||||
|
}
|
||||||
|
|
||||||
|
func makeImplementsType(T types.Type, fset *token.FileSet) serial.ImplementsType {
|
||||||
|
var pos token.Pos
|
||||||
|
if nt, ok := deref(T).(*types.Named); ok { // implementsResult.t may be non-named
|
||||||
|
pos = nt.Obj().Pos()
|
||||||
|
}
|
||||||
|
return serial.ImplementsType{
|
||||||
|
Name: T.String(),
|
||||||
|
Pos: fset.Position(pos).String(),
|
||||||
|
Kind: typeKind(T),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// typeKind returns a string describing the underlying kind of type,
|
||||||
|
// e.g. "slice", "array", "struct".
|
||||||
|
func typeKind(T types.Type) string {
|
||||||
|
s := reflect.TypeOf(T.Underlying()).String()
|
||||||
|
return strings.ToLower(strings.TrimPrefix(s, "*types."))
|
||||||
|
}
|
||||||
|
|
||||||
|
func isInterface(T types.Type) bool { return types.IsInterface(T) }
|
||||||
|
|
||||||
|
type typesByString []types.Type
|
||||||
|
|
||||||
|
func (p typesByString) Len() int { return len(p) }
|
||||||
|
func (p typesByString) Less(i, j int) bool { return p[i].String() < p[j].String() }
|
||||||
|
func (p typesByString) Swap(i, j int) { p[i], p[j] = p[j], p[i] }
|
|
@ -0,0 +1,381 @@
|
||||||
|
// Copyright 2014 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build go1.5
|
||||||
|
|
||||||
|
// Package oracle contains the implementation of the oracle tool whose
|
||||||
|
// command-line is provided by golang.org/x/tools/cmd/oracle.
|
||||||
|
//
|
||||||
|
// http://golang.org/s/oracle-design
|
||||||
|
// http://golang.org/s/oracle-user-manual
|
||||||
|
//
|
||||||
|
package oracle
|
||||||
|
|
||||||
|
// This file defines oracle.Query, the entry point for the oracle tool.
|
||||||
|
// The actual executable is defined in cmd/oracle.
|
||||||
|
|
||||||
|
// TODO(adonovan): new queries
|
||||||
|
// - show all statements that may update the selected lvalue
|
||||||
|
// (local, global, field, etc).
|
||||||
|
// - show all places where an object of type T is created
|
||||||
|
// (&T{}, var t T, new(T), new(struct{array [3]T}), etc.
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"go/ast"
|
||||||
|
"go/build"
|
||||||
|
"go/parser"
|
||||||
|
"go/token"
|
||||||
|
"go/types"
|
||||||
|
"io"
|
||||||
|
"path/filepath"
|
||||||
|
|
||||||
|
"github.com/visualfc/gotools/oracle/oracle/serial"
|
||||||
|
"golang.org/x/tools/go/ast/astutil"
|
||||||
|
"golang.org/x/tools/go/buildutil"
|
||||||
|
"golang.org/x/tools/go/loader"
|
||||||
|
"golang.org/x/tools/go/pointer"
|
||||||
|
"golang.org/x/tools/go/ssa"
|
||||||
|
)
|
||||||
|
|
||||||
|
type printfFunc func(pos interface{}, format string, args ...interface{})
|
||||||
|
|
||||||
|
// queryResult is the interface of each query-specific result type.
|
||||||
|
type queryResult interface {
|
||||||
|
toSerial(res *serial.Result, fset *token.FileSet)
|
||||||
|
display(printf printfFunc)
|
||||||
|
}
|
||||||
|
|
||||||
|
// A QueryPos represents the position provided as input to a query:
|
||||||
|
// a textual extent in the program's source code, the AST node it
|
||||||
|
// corresponds to, and the package to which it belongs.
|
||||||
|
// Instances are created by parseQueryPos.
|
||||||
|
type queryPos struct {
|
||||||
|
fset *token.FileSet
|
||||||
|
start, end token.Pos // source extent of query
|
||||||
|
path []ast.Node // AST path from query node to root of ast.File
|
||||||
|
exact bool // 2nd result of PathEnclosingInterval
|
||||||
|
info *loader.PackageInfo // type info for the queried package (nil for fastQueryPos)
|
||||||
|
}
|
||||||
|
|
||||||
|
// TypeString prints type T relative to the query position.
|
||||||
|
func (qpos *queryPos) typeString(T types.Type) string {
|
||||||
|
return types.TypeString(T, types.RelativeTo(qpos.info.Pkg))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ObjectString prints object obj relative to the query position.
|
||||||
|
func (qpos *queryPos) objectString(obj types.Object) string {
|
||||||
|
return types.ObjectString(obj, types.RelativeTo(qpos.info.Pkg))
|
||||||
|
}
|
||||||
|
|
||||||
|
// SelectionString prints selection sel relative to the query position.
|
||||||
|
func (qpos *queryPos) selectionString(sel *types.Selection) string {
|
||||||
|
return types.SelectionString(sel, types.RelativeTo(qpos.info.Pkg))
|
||||||
|
}
|
||||||
|
|
||||||
|
// A Query specifies a single oracle query.
|
||||||
|
type Query struct {
|
||||||
|
Mode string // query mode ("callers", etc)
|
||||||
|
Pos string // query position
|
||||||
|
Build *build.Context // package loading configuration
|
||||||
|
|
||||||
|
// pointer analysis options
|
||||||
|
Scope []string // main packages in (*loader.Config).FromArgs syntax
|
||||||
|
PTALog io.Writer // (optional) pointer-analysis log file
|
||||||
|
Reflection bool // model reflection soundly (currently slow).
|
||||||
|
|
||||||
|
// Populated during Run()
|
||||||
|
Fset *token.FileSet
|
||||||
|
result queryResult
|
||||||
|
}
|
||||||
|
|
||||||
|
// Serial returns an instance of serial.Result, which implements the
|
||||||
|
// {xml,json}.Marshaler interfaces so that query results can be
|
||||||
|
// serialized as JSON or XML.
|
||||||
|
//
|
||||||
|
func (q *Query) Serial() *serial.Result {
|
||||||
|
resj := &serial.Result{Mode: q.Mode}
|
||||||
|
q.result.toSerial(resj, q.Fset)
|
||||||
|
return resj
|
||||||
|
}
|
||||||
|
|
||||||
|
// WriteTo writes the oracle query result res to out in a compiler diagnostic format.
|
||||||
|
func (q *Query) WriteTo(out io.Writer) {
|
||||||
|
printf := func(pos interface{}, format string, args ...interface{}) {
|
||||||
|
fprintf(out, q.Fset, pos, format, args...)
|
||||||
|
}
|
||||||
|
q.result.display(printf)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Run runs an oracle query and populates its Fset and Result.
|
||||||
|
func Run(q *Query) error {
|
||||||
|
switch q.Mode {
|
||||||
|
case "callees":
|
||||||
|
return callees(q)
|
||||||
|
case "callers":
|
||||||
|
return callers(q)
|
||||||
|
case "callstack":
|
||||||
|
return callstack(q)
|
||||||
|
case "peers":
|
||||||
|
return peers(q)
|
||||||
|
case "pointsto":
|
||||||
|
return pointsto(q)
|
||||||
|
case "whicherrs":
|
||||||
|
return whicherrs(q)
|
||||||
|
case "definition":
|
||||||
|
return definition(q)
|
||||||
|
case "describe":
|
||||||
|
return describe(q)
|
||||||
|
case "freevars":
|
||||||
|
return freevars(q)
|
||||||
|
case "implements":
|
||||||
|
return implements(q)
|
||||||
|
case "referrers":
|
||||||
|
return referrers(q)
|
||||||
|
case "what":
|
||||||
|
return what(q)
|
||||||
|
default:
|
||||||
|
return fmt.Errorf("invalid mode: %q", q.Mode)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func setPTAScope(lconf *loader.Config, scope []string) error {
|
||||||
|
pkgs := buildutil.ExpandPatterns(lconf.Build, scope)
|
||||||
|
if len(pkgs) == 0 {
|
||||||
|
return fmt.Errorf("no packages specified for pointer analysis scope")
|
||||||
|
}
|
||||||
|
// The value of each entry in pkgs is true,
|
||||||
|
// giving ImportWithTests (not Import) semantics.
|
||||||
|
lconf.ImportPkgs = pkgs
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
//func setPTAScope(lconf *loader.Config, scope []string) error {
|
||||||
|
// if len(scope) == 0 {
|
||||||
|
// return fmt.Errorf("no packages specified for pointer analysis scope")
|
||||||
|
// }
|
||||||
|
|
||||||
|
// // Determine initial packages for PTA.
|
||||||
|
// args, err := lconf.FromArgs(scope, true)
|
||||||
|
// if err != nil {
|
||||||
|
// return err
|
||||||
|
// }
|
||||||
|
// if len(args) > 0 {
|
||||||
|
// return fmt.Errorf("surplus arguments: %q", args)
|
||||||
|
// }
|
||||||
|
// return nil
|
||||||
|
//}
|
||||||
|
|
||||||
|
// Create a pointer.Config whose scope is the initial packages of lprog
|
||||||
|
// and their dependencies.
|
||||||
|
func setupPTA(prog *ssa.Program, lprog *loader.Program, ptaLog io.Writer, reflection bool) (*pointer.Config, error) {
|
||||||
|
// TODO(adonovan): the body of this function is essentially
|
||||||
|
// duplicated in all go/pointer clients. Refactor.
|
||||||
|
|
||||||
|
// For each initial package (specified on the command line),
|
||||||
|
// if it has a main function, analyze that,
|
||||||
|
// otherwise analyze its tests, if any.
|
||||||
|
var testPkgs, mains []*ssa.Package
|
||||||
|
for _, info := range lprog.InitialPackages() {
|
||||||
|
initialPkg := prog.Package(info.Pkg)
|
||||||
|
|
||||||
|
// Add package to the pointer analysis scope.
|
||||||
|
if initialPkg.Func("main") != nil {
|
||||||
|
mains = append(mains, initialPkg)
|
||||||
|
} else {
|
||||||
|
testPkgs = append(testPkgs, initialPkg)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if testPkgs != nil {
|
||||||
|
for _, testPkg := range testPkgs {
|
||||||
|
if p := prog.CreateTestMainPackage(testPkg); p != nil {
|
||||||
|
mains = append(mains, p)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if mains == nil {
|
||||||
|
return nil, fmt.Errorf("analysis scope has no main and no tests")
|
||||||
|
}
|
||||||
|
return &pointer.Config{
|
||||||
|
Log: ptaLog,
|
||||||
|
Reflection: reflection,
|
||||||
|
Mains: mains,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// importQueryPackage finds the package P containing the
|
||||||
|
// query position and tells conf to import it.
|
||||||
|
// It returns the package's path.
|
||||||
|
func importQueryPackage(pos string, conf *loader.Config) (string, error) {
|
||||||
|
fqpos, err := fastQueryPos(pos)
|
||||||
|
if err != nil {
|
||||||
|
return "", err // bad query
|
||||||
|
}
|
||||||
|
filename := fqpos.fset.File(fqpos.start).Name()
|
||||||
|
|
||||||
|
// This will not work for ad-hoc packages
|
||||||
|
// such as $GOROOT/src/net/http/triv.go.
|
||||||
|
// TODO(adonovan): ensure we report a clear error.
|
||||||
|
_, importPath, err := guessImportPath(filename, conf.Build)
|
||||||
|
if err != nil {
|
||||||
|
return "", err // can't find GOPATH dir
|
||||||
|
}
|
||||||
|
if importPath == "" {
|
||||||
|
return "", fmt.Errorf("can't guess import path from %s", filename)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check that it's possible to load the queried package.
|
||||||
|
// (e.g. oracle tests contain different 'package' decls in same dir.)
|
||||||
|
// Keep consistent with logic in loader/util.go!
|
||||||
|
cfg2 := *conf.Build
|
||||||
|
cfg2.CgoEnabled = false
|
||||||
|
bp, err := cfg2.Import(importPath, "", 0)
|
||||||
|
if err != nil {
|
||||||
|
return "", err // no files for package
|
||||||
|
}
|
||||||
|
|
||||||
|
switch pkgContainsFile(bp, filename) {
|
||||||
|
case 'T':
|
||||||
|
conf.ImportWithTests(importPath)
|
||||||
|
case 'X':
|
||||||
|
conf.ImportWithTests(importPath)
|
||||||
|
importPath += "_test" // for TypeCheckFuncBodies
|
||||||
|
case 'G':
|
||||||
|
conf.Import(importPath)
|
||||||
|
default:
|
||||||
|
return "", fmt.Errorf("package %q doesn't contain file %s",
|
||||||
|
importPath, filename)
|
||||||
|
}
|
||||||
|
|
||||||
|
conf.TypeCheckFuncBodies = func(p string) bool { return p == importPath }
|
||||||
|
|
||||||
|
return importPath, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// pkgContainsFile reports whether file was among the packages Go
|
||||||
|
// files, Test files, eXternal test files, or not found.
|
||||||
|
func pkgContainsFile(bp *build.Package, filename string) byte {
|
||||||
|
for i, files := range [][]string{bp.GoFiles, bp.TestGoFiles, bp.XTestGoFiles} {
|
||||||
|
for _, file := range files {
|
||||||
|
if sameFile(filepath.Join(bp.Dir, file), filename) {
|
||||||
|
return "GTX"[i]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return 0 // not found
|
||||||
|
}
|
||||||
|
|
||||||
|
// ParseQueryPos parses the source query position pos and returns the
|
||||||
|
// AST node of the loaded program lprog that it identifies.
|
||||||
|
// If needExact, it must identify a single AST subtree;
|
||||||
|
// this is appropriate for queries that allow fairly arbitrary syntax,
|
||||||
|
// e.g. "describe".
|
||||||
|
//
|
||||||
|
func parseQueryPos(lprog *loader.Program, posFlag string, needExact bool) (*queryPos, error) {
|
||||||
|
filename, startOffset, endOffset, err := parsePosFlag(posFlag)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
start, end, err := findQueryPos(lprog.Fset, filename, startOffset, endOffset)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
info, path, exact := lprog.PathEnclosingInterval(start, end)
|
||||||
|
if path == nil {
|
||||||
|
return nil, fmt.Errorf("no syntax here")
|
||||||
|
}
|
||||||
|
if needExact && !exact {
|
||||||
|
return nil, fmt.Errorf("ambiguous selection within %s", astutil.NodeDescription(path[0]))
|
||||||
|
}
|
||||||
|
return &queryPos{lprog.Fset, start, end, path, exact, info}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------- Utilities ----------
|
||||||
|
|
||||||
|
// allowErrors causes type errors to be silently ignored.
|
||||||
|
// (Not suitable if SSA construction follows.)
|
||||||
|
func allowErrors(lconf *loader.Config) {
|
||||||
|
ctxt := *lconf.Build // copy
|
||||||
|
ctxt.CgoEnabled = false
|
||||||
|
lconf.Build = &ctxt
|
||||||
|
lconf.AllowErrors = true
|
||||||
|
// AllErrors makes the parser always return an AST instead of
|
||||||
|
// bailing out after 10 errors and returning an empty ast.File.
|
||||||
|
lconf.ParserMode = parser.AllErrors
|
||||||
|
lconf.TypeChecker.Error = func(err error) {}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ptrAnalysis runs the pointer analysis and returns its result.
|
||||||
|
func ptrAnalysis(conf *pointer.Config) *pointer.Result {
|
||||||
|
result, err := pointer.Analyze(conf)
|
||||||
|
if err != nil {
|
||||||
|
panic(err) // pointer analysis internal error
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
func unparen(e ast.Expr) ast.Expr { return astutil.Unparen(e) }
|
||||||
|
|
||||||
|
// deref returns a pointer's element type; otherwise it returns typ.
|
||||||
|
func deref(typ types.Type) types.Type {
|
||||||
|
if p, ok := typ.Underlying().(*types.Pointer); ok {
|
||||||
|
return p.Elem()
|
||||||
|
}
|
||||||
|
return typ
|
||||||
|
}
|
||||||
|
|
||||||
|
// fprintf prints to w a message of the form "location: message\n"
|
||||||
|
// where location is derived from pos.
|
||||||
|
//
|
||||||
|
// pos must be one of:
|
||||||
|
// - a token.Pos, denoting a position
|
||||||
|
// - an ast.Node, denoting an interval
|
||||||
|
// - anything with a Pos() method:
|
||||||
|
// ssa.Member, ssa.Value, ssa.Instruction, types.Object, pointer.Label, etc.
|
||||||
|
// - a QueryPos, denoting the extent of the user's query.
|
||||||
|
// - nil, meaning no position at all.
|
||||||
|
//
|
||||||
|
// The output format is is compatible with the 'gnu'
|
||||||
|
// compilation-error-regexp in Emacs' compilation mode.
|
||||||
|
// TODO(adonovan): support other editors.
|
||||||
|
//
|
||||||
|
func fprintf(w io.Writer, fset *token.FileSet, pos interface{}, format string, args ...interface{}) {
|
||||||
|
var start, end token.Pos
|
||||||
|
switch pos := pos.(type) {
|
||||||
|
case ast.Node:
|
||||||
|
start = pos.Pos()
|
||||||
|
end = pos.End()
|
||||||
|
case token.Pos:
|
||||||
|
start = pos
|
||||||
|
end = start
|
||||||
|
case interface {
|
||||||
|
Pos() token.Pos
|
||||||
|
}:
|
||||||
|
start = pos.Pos()
|
||||||
|
end = start
|
||||||
|
case *queryPos:
|
||||||
|
start = pos.start
|
||||||
|
end = pos.end
|
||||||
|
case nil:
|
||||||
|
// no-op
|
||||||
|
default:
|
||||||
|
panic(fmt.Sprintf("invalid pos: %T", pos))
|
||||||
|
}
|
||||||
|
|
||||||
|
if sp := fset.Position(start); start == end {
|
||||||
|
// (prints "-: " for token.NoPos)
|
||||||
|
fmt.Fprintf(w, "%s: ", sp)
|
||||||
|
} else {
|
||||||
|
ep := fset.Position(end)
|
||||||
|
// The -1 below is a concession to Emacs's broken use of
|
||||||
|
// inclusive (not half-open) intervals.
|
||||||
|
// Other editors may not want it.
|
||||||
|
// TODO(adonovan): add an -editor=vim|emacs|acme|auto
|
||||||
|
// flag; auto uses EMACS=t / VIM=... / etc env vars.
|
||||||
|
fmt.Fprintf(w, "%s:%d.%d-%d.%d: ",
|
||||||
|
sp.Filename, sp.Line, sp.Column, ep.Line, ep.Column-1)
|
||||||
|
}
|
||||||
|
fmt.Fprintf(w, format, args...)
|
||||||
|
io.WriteString(w, "\n")
|
||||||
|
}
|
|
@ -0,0 +1,254 @@
|
||||||
|
// Copyright 2013 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build go1.5
|
||||||
|
|
||||||
|
package oracle
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"go/ast"
|
||||||
|
"go/token"
|
||||||
|
"go/types"
|
||||||
|
"sort"
|
||||||
|
|
||||||
|
"github.com/visualfc/gotools/oracle/oracle/serial"
|
||||||
|
"golang.org/x/tools/go/loader"
|
||||||
|
"golang.org/x/tools/go/ssa"
|
||||||
|
"golang.org/x/tools/go/ssa/ssautil"
|
||||||
|
)
|
||||||
|
|
||||||
|
// peers enumerates, for a given channel send (or receive) operation,
|
||||||
|
// the set of possible receives (or sends) that correspond to it.
|
||||||
|
//
|
||||||
|
// TODO(adonovan): support reflect.{Select,Recv,Send,Close}.
|
||||||
|
// TODO(adonovan): permit the user to query based on a MakeChan (not send/recv),
|
||||||
|
// or the implicit receive in "for v := range ch".
|
||||||
|
func peers(q *Query) error {
|
||||||
|
lconf := loader.Config{Build: q.Build}
|
||||||
|
|
||||||
|
if err := setPTAScope(&lconf, q.Scope); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load/parse/type-check the program.
|
||||||
|
lprog, err := lconf.Load()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
q.Fset = lprog.Fset
|
||||||
|
|
||||||
|
qpos, err := parseQueryPos(lprog, q.Pos, false)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
prog := ssautil.CreateProgram(lprog, ssa.GlobalDebug)
|
||||||
|
|
||||||
|
ptaConfig, err := setupPTA(prog, lprog, q.PTALog, q.Reflection)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
opPos := findOp(qpos)
|
||||||
|
if opPos == token.NoPos {
|
||||||
|
return fmt.Errorf("there is no channel operation here")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Defer SSA construction till after errors are reported.
|
||||||
|
prog.Build()
|
||||||
|
|
||||||
|
var queryOp chanOp // the originating send or receive operation
|
||||||
|
var ops []chanOp // all sends/receives of opposite direction
|
||||||
|
|
||||||
|
// Look at all channel operations in the whole ssa.Program.
|
||||||
|
// Build a list of those of same type as the query.
|
||||||
|
allFuncs := ssautil.AllFunctions(prog)
|
||||||
|
for fn := range allFuncs {
|
||||||
|
for _, b := range fn.Blocks {
|
||||||
|
for _, instr := range b.Instrs {
|
||||||
|
for _, op := range chanOps(instr) {
|
||||||
|
ops = append(ops, op)
|
||||||
|
if op.pos == opPos {
|
||||||
|
queryOp = op // we found the query op
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if queryOp.ch == nil {
|
||||||
|
return fmt.Errorf("ssa.Instruction for send/receive not found")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Discard operations of wrong channel element type.
|
||||||
|
// Build set of channel ssa.Values as query to pointer analysis.
|
||||||
|
// We compare channels by element types, not channel types, to
|
||||||
|
// ignore both directionality and type names.
|
||||||
|
queryType := queryOp.ch.Type()
|
||||||
|
queryElemType := queryType.Underlying().(*types.Chan).Elem()
|
||||||
|
ptaConfig.AddQuery(queryOp.ch)
|
||||||
|
i := 0
|
||||||
|
for _, op := range ops {
|
||||||
|
if types.Identical(op.ch.Type().Underlying().(*types.Chan).Elem(), queryElemType) {
|
||||||
|
ptaConfig.AddQuery(op.ch)
|
||||||
|
ops[i] = op
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ops = ops[:i]
|
||||||
|
|
||||||
|
// Run the pointer analysis.
|
||||||
|
ptares := ptrAnalysis(ptaConfig)
|
||||||
|
|
||||||
|
// Find the points-to set.
|
||||||
|
queryChanPtr := ptares.Queries[queryOp.ch]
|
||||||
|
|
||||||
|
// Ascertain which make(chan) labels the query's channel can alias.
|
||||||
|
var makes []token.Pos
|
||||||
|
for _, label := range queryChanPtr.PointsTo().Labels() {
|
||||||
|
makes = append(makes, label.Pos())
|
||||||
|
}
|
||||||
|
sort.Sort(byPos(makes))
|
||||||
|
|
||||||
|
// Ascertain which channel operations can alias the same make(chan) labels.
|
||||||
|
var sends, receives, closes []token.Pos
|
||||||
|
for _, op := range ops {
|
||||||
|
if ptr, ok := ptares.Queries[op.ch]; ok && ptr.MayAlias(queryChanPtr) {
|
||||||
|
switch op.dir {
|
||||||
|
case types.SendOnly:
|
||||||
|
sends = append(sends, op.pos)
|
||||||
|
case types.RecvOnly:
|
||||||
|
receives = append(receives, op.pos)
|
||||||
|
case types.SendRecv:
|
||||||
|
closes = append(closes, op.pos)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
sort.Sort(byPos(sends))
|
||||||
|
sort.Sort(byPos(receives))
|
||||||
|
sort.Sort(byPos(closes))
|
||||||
|
|
||||||
|
q.result = &peersResult{
|
||||||
|
queryPos: opPos,
|
||||||
|
queryType: queryType,
|
||||||
|
makes: makes,
|
||||||
|
sends: sends,
|
||||||
|
receives: receives,
|
||||||
|
closes: closes,
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// findOp returns the position of the enclosing send/receive/close op.
|
||||||
|
// For send and receive operations, this is the position of the <- token;
|
||||||
|
// for close operations, it's the Lparen of the function call.
|
||||||
|
//
|
||||||
|
// TODO(adonovan): handle implicit receive operations from 'for...range chan' statements.
|
||||||
|
func findOp(qpos *queryPos) token.Pos {
|
||||||
|
for _, n := range qpos.path {
|
||||||
|
switch n := n.(type) {
|
||||||
|
case *ast.UnaryExpr:
|
||||||
|
if n.Op == token.ARROW {
|
||||||
|
return n.OpPos
|
||||||
|
}
|
||||||
|
case *ast.SendStmt:
|
||||||
|
return n.Arrow
|
||||||
|
case *ast.CallExpr:
|
||||||
|
// close function call can only exist as a direct identifier
|
||||||
|
if close, ok := unparen(n.Fun).(*ast.Ident); ok {
|
||||||
|
if b, ok := qpos.info.Info.Uses[close].(*types.Builtin); ok && b.Name() == "close" {
|
||||||
|
return n.Lparen
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return token.NoPos
|
||||||
|
}
|
||||||
|
|
||||||
|
// chanOp abstracts an ssa.Send, ssa.Unop(ARROW), or a SelectState.
|
||||||
|
type chanOp struct {
|
||||||
|
ch ssa.Value
|
||||||
|
dir types.ChanDir // SendOnly=send, RecvOnly=recv, SendRecv=close
|
||||||
|
pos token.Pos
|
||||||
|
}
|
||||||
|
|
||||||
|
// chanOps returns a slice of all the channel operations in the instruction.
|
||||||
|
func chanOps(instr ssa.Instruction) []chanOp {
|
||||||
|
// TODO(adonovan): handle calls to reflect.{Select,Recv,Send,Close} too.
|
||||||
|
var ops []chanOp
|
||||||
|
switch instr := instr.(type) {
|
||||||
|
case *ssa.UnOp:
|
||||||
|
if instr.Op == token.ARROW {
|
||||||
|
ops = append(ops, chanOp{instr.X, types.RecvOnly, instr.Pos()})
|
||||||
|
}
|
||||||
|
case *ssa.Send:
|
||||||
|
ops = append(ops, chanOp{instr.Chan, types.SendOnly, instr.Pos()})
|
||||||
|
case *ssa.Select:
|
||||||
|
for _, st := range instr.States {
|
||||||
|
ops = append(ops, chanOp{st.Chan, st.Dir, st.Pos})
|
||||||
|
}
|
||||||
|
case ssa.CallInstruction:
|
||||||
|
cc := instr.Common()
|
||||||
|
if b, ok := cc.Value.(*ssa.Builtin); ok && b.Name() == "close" {
|
||||||
|
ops = append(ops, chanOp{cc.Args[0], types.SendRecv, cc.Pos()})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ops
|
||||||
|
}
|
||||||
|
|
||||||
|
type peersResult struct {
|
||||||
|
queryPos token.Pos // of queried channel op
|
||||||
|
queryType types.Type // type of queried channel
|
||||||
|
makes, sends, receives, closes []token.Pos // positions of aliased makechan/send/receive/close instrs
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *peersResult) display(printf printfFunc) {
|
||||||
|
if len(r.makes) == 0 {
|
||||||
|
printf(r.queryPos, "This channel can't point to anything.")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
printf(r.queryPos, "This channel of type %s may be:", r.queryType)
|
||||||
|
for _, alloc := range r.makes {
|
||||||
|
printf(alloc, "\tallocated here")
|
||||||
|
}
|
||||||
|
for _, send := range r.sends {
|
||||||
|
printf(send, "\tsent to, here")
|
||||||
|
}
|
||||||
|
for _, receive := range r.receives {
|
||||||
|
printf(receive, "\treceived from, here")
|
||||||
|
}
|
||||||
|
for _, clos := range r.closes {
|
||||||
|
printf(clos, "\tclosed, here")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *peersResult) toSerial(res *serial.Result, fset *token.FileSet) {
|
||||||
|
peers := &serial.Peers{
|
||||||
|
Pos: fset.Position(r.queryPos).String(),
|
||||||
|
Type: r.queryType.String(),
|
||||||
|
}
|
||||||
|
for _, alloc := range r.makes {
|
||||||
|
peers.Allocs = append(peers.Allocs, fset.Position(alloc).String())
|
||||||
|
}
|
||||||
|
for _, send := range r.sends {
|
||||||
|
peers.Sends = append(peers.Sends, fset.Position(send).String())
|
||||||
|
}
|
||||||
|
for _, receive := range r.receives {
|
||||||
|
peers.Receives = append(peers.Receives, fset.Position(receive).String())
|
||||||
|
}
|
||||||
|
for _, clos := range r.closes {
|
||||||
|
peers.Closes = append(peers.Closes, fset.Position(clos).String())
|
||||||
|
}
|
||||||
|
res.Peers = peers
|
||||||
|
}
|
||||||
|
|
||||||
|
// -------- utils --------
|
||||||
|
|
||||||
|
// NB: byPos is not deterministic across packages since it depends on load order.
|
||||||
|
// Use lessPos if the tests need it.
|
||||||
|
type byPos []token.Pos
|
||||||
|
|
||||||
|
func (p byPos) Len() int { return len(p) }
|
||||||
|
func (p byPos) Less(i, j int) bool { return p[i] < p[j] }
|
||||||
|
func (p byPos) Swap(i, j int) { p[i], p[j] = p[j], p[i] }
|
|
@ -0,0 +1,293 @@
|
||||||
|
// Copyright 2013 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build go1.5
|
||||||
|
|
||||||
|
package oracle
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"go/ast"
|
||||||
|
"go/token"
|
||||||
|
"go/types"
|
||||||
|
"sort"
|
||||||
|
|
||||||
|
"github.com/visualfc/gotools/oracle/oracle/serial"
|
||||||
|
"golang.org/x/tools/go/ast/astutil"
|
||||||
|
"golang.org/x/tools/go/loader"
|
||||||
|
"golang.org/x/tools/go/pointer"
|
||||||
|
"golang.org/x/tools/go/ssa"
|
||||||
|
"golang.org/x/tools/go/ssa/ssautil"
|
||||||
|
)
|
||||||
|
|
||||||
|
// pointsto runs the pointer analysis on the selected expression,
|
||||||
|
// and reports its points-to set (for a pointer-like expression)
|
||||||
|
// or its dynamic types (for an interface, reflect.Value, or
|
||||||
|
// reflect.Type expression) and their points-to sets.
|
||||||
|
//
|
||||||
|
// All printed sets are sorted to ensure determinism.
|
||||||
|
//
|
||||||
|
func pointsto(q *Query) error {
|
||||||
|
lconf := loader.Config{Build: q.Build}
|
||||||
|
|
||||||
|
if err := setPTAScope(&lconf, q.Scope); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load/parse/type-check the program.
|
||||||
|
lprog, err := lconf.Load()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
q.Fset = lprog.Fset
|
||||||
|
|
||||||
|
qpos, err := parseQueryPos(lprog, q.Pos, true) // needs exact pos
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
prog := ssautil.CreateProgram(lprog, ssa.GlobalDebug)
|
||||||
|
|
||||||
|
ptaConfig, err := setupPTA(prog, lprog, q.PTALog, q.Reflection)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
path, action := findInterestingNode(qpos.info, qpos.path)
|
||||||
|
if action != actionExpr {
|
||||||
|
return fmt.Errorf("pointer analysis wants an expression; got %s",
|
||||||
|
astutil.NodeDescription(qpos.path[0]))
|
||||||
|
}
|
||||||
|
|
||||||
|
var expr ast.Expr
|
||||||
|
var obj types.Object
|
||||||
|
switch n := path[0].(type) {
|
||||||
|
case *ast.ValueSpec:
|
||||||
|
// ambiguous ValueSpec containing multiple names
|
||||||
|
return fmt.Errorf("multiple value specification")
|
||||||
|
case *ast.Ident:
|
||||||
|
obj = qpos.info.ObjectOf(n)
|
||||||
|
expr = n
|
||||||
|
case ast.Expr:
|
||||||
|
expr = n
|
||||||
|
default:
|
||||||
|
// TODO(adonovan): is this reachable?
|
||||||
|
return fmt.Errorf("unexpected AST for expr: %T", n)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Reject non-pointerlike types (includes all constants---except nil).
|
||||||
|
// TODO(adonovan): reject nil too.
|
||||||
|
typ := qpos.info.TypeOf(expr)
|
||||||
|
if !pointer.CanPoint(typ) {
|
||||||
|
return fmt.Errorf("pointer analysis wants an expression of reference type; got %s", typ)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Determine the ssa.Value for the expression.
|
||||||
|
var value ssa.Value
|
||||||
|
var isAddr bool
|
||||||
|
if obj != nil {
|
||||||
|
// def/ref of func/var object
|
||||||
|
value, isAddr, err = ssaValueForIdent(prog, qpos.info, obj, path)
|
||||||
|
} else {
|
||||||
|
value, isAddr, err = ssaValueForExpr(prog, qpos.info, path)
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return err // e.g. trivially dead code
|
||||||
|
}
|
||||||
|
|
||||||
|
// Defer SSA construction till after errors are reported.
|
||||||
|
prog.Build()
|
||||||
|
|
||||||
|
// Run the pointer analysis.
|
||||||
|
ptrs, err := runPTA(ptaConfig, value, isAddr)
|
||||||
|
if err != nil {
|
||||||
|
return err // e.g. analytically unreachable
|
||||||
|
}
|
||||||
|
|
||||||
|
q.result = &pointstoResult{
|
||||||
|
qpos: qpos,
|
||||||
|
typ: typ,
|
||||||
|
ptrs: ptrs,
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ssaValueForIdent returns the ssa.Value for the ast.Ident whose path
|
||||||
|
// to the root of the AST is path. isAddr reports whether the
|
||||||
|
// ssa.Value is the address denoted by the ast.Ident, not its value.
|
||||||
|
//
|
||||||
|
func ssaValueForIdent(prog *ssa.Program, qinfo *loader.PackageInfo, obj types.Object, path []ast.Node) (value ssa.Value, isAddr bool, err error) {
|
||||||
|
switch obj := obj.(type) {
|
||||||
|
case *types.Var:
|
||||||
|
pkg := prog.Package(qinfo.Pkg)
|
||||||
|
pkg.Build()
|
||||||
|
if v, addr := prog.VarValue(obj, pkg, path); v != nil {
|
||||||
|
return v, addr, nil
|
||||||
|
}
|
||||||
|
return nil, false, fmt.Errorf("can't locate SSA Value for var %s", obj.Name())
|
||||||
|
|
||||||
|
case *types.Func:
|
||||||
|
fn := prog.FuncValue(obj)
|
||||||
|
if fn == nil {
|
||||||
|
return nil, false, fmt.Errorf("%s is an interface method", obj)
|
||||||
|
}
|
||||||
|
// TODO(adonovan): there's no point running PTA on a *Func ident.
|
||||||
|
// Eliminate this feature.
|
||||||
|
return fn, false, nil
|
||||||
|
}
|
||||||
|
panic(obj)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ssaValueForExpr returns the ssa.Value of the non-ast.Ident
|
||||||
|
// expression whose path to the root of the AST is path.
|
||||||
|
//
|
||||||
|
func ssaValueForExpr(prog *ssa.Program, qinfo *loader.PackageInfo, path []ast.Node) (value ssa.Value, isAddr bool, err error) {
|
||||||
|
pkg := prog.Package(qinfo.Pkg)
|
||||||
|
pkg.SetDebugMode(true)
|
||||||
|
pkg.Build()
|
||||||
|
|
||||||
|
fn := ssa.EnclosingFunction(pkg, path)
|
||||||
|
if fn == nil {
|
||||||
|
return nil, false, fmt.Errorf("no SSA function built for this location (dead code?)")
|
||||||
|
}
|
||||||
|
|
||||||
|
if v, addr := fn.ValueForExpr(path[0].(ast.Expr)); v != nil {
|
||||||
|
return v, addr, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, false, fmt.Errorf("can't locate SSA Value for expression in %s", fn)
|
||||||
|
}
|
||||||
|
|
||||||
|
// runPTA runs the pointer analysis of the selected SSA value or address.
|
||||||
|
func runPTA(conf *pointer.Config, v ssa.Value, isAddr bool) (ptrs []pointerResult, err error) {
|
||||||
|
T := v.Type()
|
||||||
|
if isAddr {
|
||||||
|
conf.AddIndirectQuery(v)
|
||||||
|
T = deref(T)
|
||||||
|
} else {
|
||||||
|
conf.AddQuery(v)
|
||||||
|
}
|
||||||
|
ptares := ptrAnalysis(conf)
|
||||||
|
|
||||||
|
var ptr pointer.Pointer
|
||||||
|
if isAddr {
|
||||||
|
ptr = ptares.IndirectQueries[v]
|
||||||
|
} else {
|
||||||
|
ptr = ptares.Queries[v]
|
||||||
|
}
|
||||||
|
if ptr == (pointer.Pointer{}) {
|
||||||
|
return nil, fmt.Errorf("pointer analysis did not find expression (dead code?)")
|
||||||
|
}
|
||||||
|
pts := ptr.PointsTo()
|
||||||
|
|
||||||
|
if pointer.CanHaveDynamicTypes(T) {
|
||||||
|
// Show concrete types for interface/reflect.Value expression.
|
||||||
|
if concs := pts.DynamicTypes(); concs.Len() > 0 {
|
||||||
|
concs.Iterate(func(conc types.Type, pta interface{}) {
|
||||||
|
labels := pta.(pointer.PointsToSet).Labels()
|
||||||
|
sort.Sort(byPosAndString(labels)) // to ensure determinism
|
||||||
|
ptrs = append(ptrs, pointerResult{conc, labels})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Show labels for other expressions.
|
||||||
|
labels := pts.Labels()
|
||||||
|
sort.Sort(byPosAndString(labels)) // to ensure determinism
|
||||||
|
ptrs = append(ptrs, pointerResult{T, labels})
|
||||||
|
}
|
||||||
|
sort.Sort(byTypeString(ptrs)) // to ensure determinism
|
||||||
|
return ptrs, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type pointerResult struct {
|
||||||
|
typ types.Type // type of the pointer (always concrete)
|
||||||
|
labels []*pointer.Label // set of labels
|
||||||
|
}
|
||||||
|
|
||||||
|
type pointstoResult struct {
|
||||||
|
qpos *queryPos
|
||||||
|
typ types.Type // type of expression
|
||||||
|
ptrs []pointerResult // pointer info (typ is concrete => len==1)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *pointstoResult) display(printf printfFunc) {
|
||||||
|
if pointer.CanHaveDynamicTypes(r.typ) {
|
||||||
|
// Show concrete types for interface, reflect.Type or
|
||||||
|
// reflect.Value expression.
|
||||||
|
|
||||||
|
if len(r.ptrs) > 0 {
|
||||||
|
printf(r.qpos, "this %s may contain these dynamic types:", r.qpos.typeString(r.typ))
|
||||||
|
for _, ptr := range r.ptrs {
|
||||||
|
var obj types.Object
|
||||||
|
if nt, ok := deref(ptr.typ).(*types.Named); ok {
|
||||||
|
obj = nt.Obj()
|
||||||
|
}
|
||||||
|
if len(ptr.labels) > 0 {
|
||||||
|
printf(obj, "\t%s, may point to:", r.qpos.typeString(ptr.typ))
|
||||||
|
printLabels(printf, ptr.labels, "\t\t")
|
||||||
|
} else {
|
||||||
|
printf(obj, "\t%s", r.qpos.typeString(ptr.typ))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
printf(r.qpos, "this %s cannot contain any dynamic types.", r.typ)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Show labels for other expressions.
|
||||||
|
if ptr := r.ptrs[0]; len(ptr.labels) > 0 {
|
||||||
|
printf(r.qpos, "this %s may point to these objects:",
|
||||||
|
r.qpos.typeString(r.typ))
|
||||||
|
printLabels(printf, ptr.labels, "\t")
|
||||||
|
} else {
|
||||||
|
printf(r.qpos, "this %s may not point to anything.",
|
||||||
|
r.qpos.typeString(r.typ))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *pointstoResult) toSerial(res *serial.Result, fset *token.FileSet) {
|
||||||
|
var pts []serial.PointsTo
|
||||||
|
for _, ptr := range r.ptrs {
|
||||||
|
var namePos string
|
||||||
|
if nt, ok := deref(ptr.typ).(*types.Named); ok {
|
||||||
|
namePos = fset.Position(nt.Obj().Pos()).String()
|
||||||
|
}
|
||||||
|
var labels []serial.PointsToLabel
|
||||||
|
for _, l := range ptr.labels {
|
||||||
|
labels = append(labels, serial.PointsToLabel{
|
||||||
|
Pos: fset.Position(l.Pos()).String(),
|
||||||
|
Desc: l.String(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
pts = append(pts, serial.PointsTo{
|
||||||
|
Type: r.qpos.typeString(ptr.typ),
|
||||||
|
NamePos: namePos,
|
||||||
|
Labels: labels,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
res.PointsTo = pts
|
||||||
|
}
|
||||||
|
|
||||||
|
type byTypeString []pointerResult
|
||||||
|
|
||||||
|
func (a byTypeString) Len() int { return len(a) }
|
||||||
|
func (a byTypeString) Less(i, j int) bool { return a[i].typ.String() < a[j].typ.String() }
|
||||||
|
func (a byTypeString) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
|
||||||
|
|
||||||
|
type byPosAndString []*pointer.Label
|
||||||
|
|
||||||
|
func (a byPosAndString) Len() int { return len(a) }
|
||||||
|
func (a byPosAndString) Less(i, j int) bool {
|
||||||
|
cmp := a[i].Pos() - a[j].Pos()
|
||||||
|
return cmp < 0 || (cmp == 0 && a[i].String() < a[j].String())
|
||||||
|
}
|
||||||
|
func (a byPosAndString) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
|
||||||
|
|
||||||
|
func printLabels(printf printfFunc, labels []*pointer.Label, prefix string) {
|
||||||
|
// TODO(adonovan): due to context-sensitivity, many of these
|
||||||
|
// labels may differ only by context, which isn't apparent.
|
||||||
|
for _, label := range labels {
|
||||||
|
printf(label, "%s%s", prefix, label)
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,143 @@
|
||||||
|
package oracle
|
||||||
|
|
||||||
|
// This file defines utilities for working with file positions.
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"go/parser"
|
||||||
|
"go/token"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"golang.org/x/tools/go/ast/astutil"
|
||||||
|
)
|
||||||
|
|
||||||
|
// parseOctothorpDecimal returns the numeric value if s matches "#%d",
|
||||||
|
// otherwise -1.
|
||||||
|
func parseOctothorpDecimal(s string) int {
|
||||||
|
if s != "" && s[0] == '#' {
|
||||||
|
if s, err := strconv.ParseInt(s[1:], 10, 32); err == nil {
|
||||||
|
return int(s)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return -1
|
||||||
|
}
|
||||||
|
|
||||||
|
// parsePosFlag parses a string of the form "file:pos" or
|
||||||
|
// file:start,end" where pos, start, end match #%d and represent byte
|
||||||
|
// offsets, and returns its components.
|
||||||
|
//
|
||||||
|
// (Numbers without a '#' prefix are reserved for future use,
|
||||||
|
// e.g. to indicate line/column positions.)
|
||||||
|
//
|
||||||
|
func parsePosFlag(posFlag string) (filename string, startOffset, endOffset int, err error) {
|
||||||
|
if posFlag == "" {
|
||||||
|
err = fmt.Errorf("no source position specified (-pos flag)")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
colon := strings.LastIndex(posFlag, ":")
|
||||||
|
if colon < 0 {
|
||||||
|
err = fmt.Errorf("invalid source position -pos=%q", posFlag)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
filename, offset := posFlag[:colon], posFlag[colon+1:]
|
||||||
|
startOffset = -1
|
||||||
|
endOffset = -1
|
||||||
|
if hyphen := strings.Index(offset, ","); hyphen < 0 {
|
||||||
|
// e.g. "foo.go:#123"
|
||||||
|
startOffset = parseOctothorpDecimal(offset)
|
||||||
|
endOffset = startOffset
|
||||||
|
} else {
|
||||||
|
// e.g. "foo.go:#123,#456"
|
||||||
|
startOffset = parseOctothorpDecimal(offset[:hyphen])
|
||||||
|
endOffset = parseOctothorpDecimal(offset[hyphen+1:])
|
||||||
|
}
|
||||||
|
if startOffset < 0 || endOffset < 0 {
|
||||||
|
err = fmt.Errorf("invalid -pos offset %q", offset)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// findQueryPos searches fset for filename and translates the
|
||||||
|
// specified file-relative byte offsets into token.Pos form. It
|
||||||
|
// returns an error if the file was not found or the offsets were out
|
||||||
|
// of bounds.
|
||||||
|
//
|
||||||
|
func findQueryPos(fset *token.FileSet, filename string, startOffset, endOffset int) (start, end token.Pos, err error) {
|
||||||
|
var file *token.File
|
||||||
|
fset.Iterate(func(f *token.File) bool {
|
||||||
|
if sameFile(filename, f.Name()) {
|
||||||
|
// (f.Name() is absolute)
|
||||||
|
file = f
|
||||||
|
return false // done
|
||||||
|
}
|
||||||
|
return true // continue
|
||||||
|
})
|
||||||
|
if file == nil {
|
||||||
|
err = fmt.Errorf("couldn't find file containing position")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Range check [start..end], inclusive of both end-points.
|
||||||
|
|
||||||
|
if 0 <= startOffset && startOffset <= file.Size() {
|
||||||
|
start = file.Pos(int(startOffset))
|
||||||
|
} else {
|
||||||
|
err = fmt.Errorf("start position is beyond end of file")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if 0 <= endOffset && endOffset <= file.Size() {
|
||||||
|
end = file.Pos(int(endOffset))
|
||||||
|
} else {
|
||||||
|
err = fmt.Errorf("end position is beyond end of file")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// sameFile returns true if x and y have the same basename and denote
|
||||||
|
// the same file.
|
||||||
|
//
|
||||||
|
func sameFile(x, y string) bool {
|
||||||
|
if filepath.Base(x) == filepath.Base(y) { // (optimisation)
|
||||||
|
if xi, err := os.Stat(x); err == nil {
|
||||||
|
if yi, err := os.Stat(y); err == nil {
|
||||||
|
return os.SameFile(xi, yi)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// fastQueryPos parses the -pos flag and returns a QueryPos.
|
||||||
|
// It parses only a single file, and does not run the type checker.
|
||||||
|
func fastQueryPos(posFlag string) (*queryPos, error) {
|
||||||
|
filename, startOffset, endOffset, err := parsePosFlag(posFlag)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
fset := token.NewFileSet()
|
||||||
|
f, err := parser.ParseFile(fset, filename, nil, 0)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
start, end, err := findQueryPos(fset, filename, startOffset, endOffset)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
path, exact := astutil.PathEnclosingInterval(f, start, end)
|
||||||
|
if path == nil {
|
||||||
|
return nil, fmt.Errorf("no syntax here")
|
||||||
|
}
|
||||||
|
|
||||||
|
return &queryPos{fset, start, end, path, exact, nil}, nil
|
||||||
|
}
|
|
@ -0,0 +1,243 @@
|
||||||
|
// Copyright 2013 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build go1.5
|
||||||
|
|
||||||
|
package oracle
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"fmt"
|
||||||
|
"go/ast"
|
||||||
|
"go/token"
|
||||||
|
"go/types"
|
||||||
|
"io/ioutil"
|
||||||
|
"sort"
|
||||||
|
|
||||||
|
"github.com/visualfc/gotools/oracle/oracle/serial"
|
||||||
|
"golang.org/x/tools/go/loader"
|
||||||
|
"golang.org/x/tools/refactor/importgraph"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Referrers reports all identifiers that resolve to the same object
|
||||||
|
// as the queried identifier, within any package in the analysis scope.
|
||||||
|
func referrers(q *Query) error {
|
||||||
|
lconf := loader.Config{Build: q.Build}
|
||||||
|
allowErrors(&lconf)
|
||||||
|
|
||||||
|
if _, err := importQueryPackage(q.Pos, &lconf); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
var id *ast.Ident
|
||||||
|
var obj types.Object
|
||||||
|
var lprog *loader.Program
|
||||||
|
var pass2 bool
|
||||||
|
var qpos *queryPos
|
||||||
|
for {
|
||||||
|
// Load/parse/type-check the program.
|
||||||
|
var err error
|
||||||
|
lprog, err = lconf.Load()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
q.Fset = lprog.Fset
|
||||||
|
|
||||||
|
qpos, err = parseQueryPos(lprog, q.Pos, false)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
id, _ = qpos.path[0].(*ast.Ident)
|
||||||
|
if id == nil {
|
||||||
|
return fmt.Errorf("no identifier here")
|
||||||
|
}
|
||||||
|
|
||||||
|
obj = qpos.info.ObjectOf(id)
|
||||||
|
if obj == nil {
|
||||||
|
// Happens for y in "switch y := x.(type)",
|
||||||
|
// the package declaration,
|
||||||
|
// and unresolved identifiers.
|
||||||
|
if _, ok := qpos.path[1].(*ast.File); ok { // package decl?
|
||||||
|
pkg := qpos.info.Pkg
|
||||||
|
obj = types.NewPkgName(id.Pos(), pkg, pkg.Name(), pkg)
|
||||||
|
} else {
|
||||||
|
return fmt.Errorf("no object for identifier: %T", qpos.path[1])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if pass2 {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
// If the identifier is exported, we must load all packages that
|
||||||
|
// depend transitively upon the package that defines it.
|
||||||
|
// Treat PkgNames as exported, even though they're lowercase.
|
||||||
|
if _, isPkg := obj.(*types.PkgName); !(isPkg || obj.Exported()) {
|
||||||
|
break // not exported
|
||||||
|
}
|
||||||
|
|
||||||
|
// Scan the workspace and build the import graph.
|
||||||
|
// Ignore broken packages.
|
||||||
|
_, rev, _ := importgraph.Build(q.Build)
|
||||||
|
|
||||||
|
// Re-load the larger program.
|
||||||
|
// Create a new file set so that ...
|
||||||
|
// External test packages are never imported,
|
||||||
|
// so they will never appear in the graph.
|
||||||
|
// (We must reset the Config here, not just reset the Fset field.)
|
||||||
|
lconf = loader.Config{
|
||||||
|
Fset: token.NewFileSet(),
|
||||||
|
Build: q.Build,
|
||||||
|
}
|
||||||
|
allowErrors(&lconf)
|
||||||
|
for path := range rev.Search(obj.Pkg().Path()) {
|
||||||
|
lconf.ImportWithTests(path)
|
||||||
|
}
|
||||||
|
pass2 = true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Iterate over all go/types' Uses facts for the entire program.
|
||||||
|
var refs []*ast.Ident
|
||||||
|
for _, info := range lprog.AllPackages {
|
||||||
|
for id2, obj2 := range info.Uses {
|
||||||
|
if sameObj(obj, obj2) {
|
||||||
|
refs = append(refs, id2)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
sort.Sort(byNamePos{q.Fset, refs})
|
||||||
|
|
||||||
|
q.result = &referrersResult{
|
||||||
|
qpos: qpos,
|
||||||
|
query: id,
|
||||||
|
obj: obj,
|
||||||
|
refs: refs,
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// same reports whether x and y are identical, or both are PkgNames
|
||||||
|
// that import the same Package.
|
||||||
|
//
|
||||||
|
func sameObj(x, y types.Object) bool {
|
||||||
|
if x == y {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
if x, ok := x.(*types.PkgName); ok {
|
||||||
|
if y, ok := y.(*types.PkgName); ok {
|
||||||
|
return x.Imported() == y.Imported()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// -------- utils --------
|
||||||
|
|
||||||
|
// An deterministic ordering for token.Pos that doesn't
|
||||||
|
// depend on the order in which packages were loaded.
|
||||||
|
func lessPos(fset *token.FileSet, x, y token.Pos) bool {
|
||||||
|
fx := fset.File(x)
|
||||||
|
fy := fset.File(y)
|
||||||
|
if fx != fy {
|
||||||
|
return fx.Name() < fy.Name()
|
||||||
|
}
|
||||||
|
return x < y
|
||||||
|
}
|
||||||
|
|
||||||
|
type byNamePos struct {
|
||||||
|
fset *token.FileSet
|
||||||
|
ids []*ast.Ident
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p byNamePos) Len() int { return len(p.ids) }
|
||||||
|
func (p byNamePos) Swap(i, j int) { p.ids[i], p.ids[j] = p.ids[j], p.ids[i] }
|
||||||
|
func (p byNamePos) Less(i, j int) bool {
|
||||||
|
return lessPos(p.fset, p.ids[i].NamePos, p.ids[j].NamePos)
|
||||||
|
}
|
||||||
|
|
||||||
|
type referrersResult struct {
|
||||||
|
qpos *queryPos
|
||||||
|
query *ast.Ident // identifier of query
|
||||||
|
obj types.Object // object it denotes
|
||||||
|
refs []*ast.Ident // set of all other references to it
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *referrersResult) display(printf printfFunc) {
|
||||||
|
printf(r.obj, "%d references to %s", len(r.refs), r.qpos.objectString(r.obj))
|
||||||
|
|
||||||
|
// Show referring lines, like grep.
|
||||||
|
type fileinfo struct {
|
||||||
|
refs []*ast.Ident
|
||||||
|
linenums []int // line number of refs[i]
|
||||||
|
data chan interface{} // file contents or error
|
||||||
|
}
|
||||||
|
var fileinfos []*fileinfo
|
||||||
|
fileinfosByName := make(map[string]*fileinfo)
|
||||||
|
|
||||||
|
// First pass: start the file reads concurrently.
|
||||||
|
sema := make(chan struct{}, 20) // counting semaphore to limit I/O concurrency
|
||||||
|
for _, ref := range r.refs {
|
||||||
|
posn := r.qpos.fset.Position(ref.Pos())
|
||||||
|
fi := fileinfosByName[posn.Filename]
|
||||||
|
if fi == nil {
|
||||||
|
fi = &fileinfo{data: make(chan interface{})}
|
||||||
|
fileinfosByName[posn.Filename] = fi
|
||||||
|
fileinfos = append(fileinfos, fi)
|
||||||
|
|
||||||
|
// First request for this file:
|
||||||
|
// start asynchronous read.
|
||||||
|
go func() {
|
||||||
|
sema <- struct{}{} // acquire token
|
||||||
|
content, err := ioutil.ReadFile(posn.Filename)
|
||||||
|
<-sema // release token
|
||||||
|
if err != nil {
|
||||||
|
fi.data <- err
|
||||||
|
} else {
|
||||||
|
fi.data <- content
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
}
|
||||||
|
fi.refs = append(fi.refs, ref)
|
||||||
|
fi.linenums = append(fi.linenums, posn.Line)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Second pass: print refs in original order.
|
||||||
|
// One line may have several refs at different columns.
|
||||||
|
for _, fi := range fileinfos {
|
||||||
|
v := <-fi.data // wait for I/O completion
|
||||||
|
|
||||||
|
// Print one item for all refs in a file that could not
|
||||||
|
// be loaded (perhaps due to //line directives).
|
||||||
|
if err, ok := v.(error); ok {
|
||||||
|
var suffix string
|
||||||
|
if more := len(fi.refs) - 1; more > 0 {
|
||||||
|
suffix = fmt.Sprintf(" (+ %d more refs in this file)", more)
|
||||||
|
}
|
||||||
|
printf(fi.refs[0], "%v%s", err, suffix)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
lines := bytes.Split(v.([]byte), []byte("\n"))
|
||||||
|
for i, ref := range fi.refs {
|
||||||
|
printf(ref, "%s", lines[fi.linenums[i]-1])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO(adonovan): encode extent, not just Pos info, in Serial form.
|
||||||
|
|
||||||
|
func (r *referrersResult) toSerial(res *serial.Result, fset *token.FileSet) {
|
||||||
|
referrers := &serial.Referrers{
|
||||||
|
Pos: fset.Position(r.query.Pos()).String(),
|
||||||
|
Desc: r.obj.String(),
|
||||||
|
}
|
||||||
|
if pos := r.obj.Pos(); pos != token.NoPos { // Package objects have no Pos()
|
||||||
|
referrers.ObjPos = fset.Position(pos).String()
|
||||||
|
}
|
||||||
|
for _, ref := range r.refs {
|
||||||
|
referrers.Refs = append(referrers.Refs, fset.Position(ref.NamePos).String())
|
||||||
|
}
|
||||||
|
res.Referrers = referrers
|
||||||
|
}
|
258
vendor/github.com/visualfc/gotools/oracle/oracle/serial/serial.go
generated
vendored
Normal file
258
vendor/github.com/visualfc/gotools/oracle/oracle/serial/serial.go
generated
vendored
Normal file
|
@ -0,0 +1,258 @@
|
||||||
|
// Copyright 2013 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// Package serial defines the oracle's schema for structured data
|
||||||
|
// serialization using JSON, XML, etc.
|
||||||
|
package serial
|
||||||
|
|
||||||
|
// All 'pos' strings are of the form "file:line:col".
|
||||||
|
// TODO(adonovan): improve performance by sharing filename strings.
|
||||||
|
// TODO(adonovan): improve precision by providing the start/end
|
||||||
|
// interval when available.
|
||||||
|
//
|
||||||
|
// TODO(adonovan): consider richer encodings of types, functions,
|
||||||
|
// methods, etc.
|
||||||
|
|
||||||
|
// A Peers is the result of a 'peers' query.
|
||||||
|
// If Allocs is empty, the selected channel can't point to anything.
|
||||||
|
type Peers struct {
|
||||||
|
Pos string `json:"pos"` // location of the selected channel op (<-)
|
||||||
|
Type string `json:"type"` // type of the selected channel
|
||||||
|
Allocs []string `json:"allocs,omitempty"` // locations of aliased make(chan) ops
|
||||||
|
Sends []string `json:"sends,omitempty"` // locations of aliased ch<-x ops
|
||||||
|
Receives []string `json:"receives,omitempty"` // locations of aliased <-ch ops
|
||||||
|
Closes []string `json:"closes,omitempty"` // locations of aliased close(ch) ops
|
||||||
|
}
|
||||||
|
|
||||||
|
// A Referrers is the result of a 'referrers' query.
|
||||||
|
type Referrers struct {
|
||||||
|
Pos string `json:"pos"` // location of the query reference
|
||||||
|
ObjPos string `json:"objpos,omitempty"` // location of the definition
|
||||||
|
Desc string `json:"desc"` // description of the denoted object
|
||||||
|
Refs []string `json:"refs,omitempty"` // locations of all references
|
||||||
|
}
|
||||||
|
|
||||||
|
// A Definition is the result of a 'definition' query.
|
||||||
|
type Definition struct {
|
||||||
|
ObjPos string `json:"objpos,omitempty"` // location of the definition
|
||||||
|
Desc string `json:"desc"` // description of the denoted object
|
||||||
|
}
|
||||||
|
|
||||||
|
type CalleesItem struct {
|
||||||
|
Name string `json:"name"` // full name of called function
|
||||||
|
Pos string `json:"pos"` // location of called function
|
||||||
|
}
|
||||||
|
|
||||||
|
// A Callees is the result of a 'callees' query.
|
||||||
|
//
|
||||||
|
// Callees is nonempty unless the call was a dynamic call on a
|
||||||
|
// provably nil func or interface value.
|
||||||
|
type Callees struct {
|
||||||
|
Pos string `json:"pos"` // location of selected call site
|
||||||
|
Desc string `json:"desc"` // description of call site
|
||||||
|
Callees []*CalleesItem `json:"callees,omitempty"` // set of possible call targets
|
||||||
|
}
|
||||||
|
|
||||||
|
// A Caller is one element of the slice returned by a 'callers' query.
|
||||||
|
// (Callstack also contains a similar slice.)
|
||||||
|
//
|
||||||
|
// The root of the callgraph has an unspecified "Caller" string.
|
||||||
|
type Caller struct {
|
||||||
|
Pos string `json:"pos,omitempty"` // location of the calling function
|
||||||
|
Desc string `json:"desc"` // description of call site
|
||||||
|
Caller string `json:"caller"` // full name of calling function
|
||||||
|
}
|
||||||
|
|
||||||
|
// A CallStack is the result of a 'callstack' query.
|
||||||
|
// It indicates an arbitrary path from the root of the callgraph to
|
||||||
|
// the query function.
|
||||||
|
//
|
||||||
|
// If the Callers slice is empty, the function was unreachable in this
|
||||||
|
// analysis scope.
|
||||||
|
type CallStack struct {
|
||||||
|
Pos string `json:"pos"` // location of the selected function
|
||||||
|
Target string `json:"target"` // the selected function
|
||||||
|
Callers []Caller `json:"callers"` // enclosing calls, innermost first.
|
||||||
|
}
|
||||||
|
|
||||||
|
// A FreeVar is one element of the slice returned by a 'freevars'
|
||||||
|
// query. Each one identifies an expression referencing a local
|
||||||
|
// identifier defined outside the selected region.
|
||||||
|
type FreeVar struct {
|
||||||
|
Pos string `json:"pos"` // location of the identifier's definition
|
||||||
|
Kind string `json:"kind"` // one of {var,func,type,const,label}
|
||||||
|
Ref string `json:"ref"` // referring expression (e.g. "x" or "x.y.z")
|
||||||
|
Type string `json:"type"` // type of the expression
|
||||||
|
}
|
||||||
|
|
||||||
|
// An Implements contains the result of an 'implements' query.
|
||||||
|
// It describes the queried type, the set of named non-empty interface
|
||||||
|
// types to which it is assignable, and the set of named/*named types
|
||||||
|
// (concrete or non-empty interface) which may be assigned to it.
|
||||||
|
//
|
||||||
|
type Implements struct {
|
||||||
|
T ImplementsType `json:"type,omitempty"` // the queried type
|
||||||
|
AssignableTo []ImplementsType `json:"to,omitempty"` // types assignable to T
|
||||||
|
AssignableFrom []ImplementsType `json:"from,omitempty"` // interface types assignable from T
|
||||||
|
AssignableFromPtr []ImplementsType `json:"fromptr,omitempty"` // interface types assignable only from *T
|
||||||
|
|
||||||
|
// The following fields are set only if the query was a method.
|
||||||
|
// Assignable{To,From,FromPtr}Method[i] is the corresponding
|
||||||
|
// method of type Assignable{To,From,FromPtr}[i], or blank
|
||||||
|
// {"",""} if that type lacks the method.
|
||||||
|
Method *DescribeMethod `json:"method,omitempty"` // the queried method
|
||||||
|
AssignableToMethod []DescribeMethod `json:"to_method,omitempty"`
|
||||||
|
AssignableFromMethod []DescribeMethod `json:"from_method,omitempty"`
|
||||||
|
AssignableFromPtrMethod []DescribeMethod `json:"fromptr_method,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// An ImplementsType describes a single type as part of an 'implements' query.
|
||||||
|
type ImplementsType struct {
|
||||||
|
Name string `json:"name"` // full name of the type
|
||||||
|
Pos string `json:"pos"` // location of its definition
|
||||||
|
Kind string `json:"kind"` // "basic", "array", etc
|
||||||
|
}
|
||||||
|
|
||||||
|
// A SyntaxNode is one element of a stack of enclosing syntax nodes in
|
||||||
|
// a "what" query.
|
||||||
|
type SyntaxNode struct {
|
||||||
|
Description string `json:"desc"` // description of syntax tree
|
||||||
|
Start int `json:"start"` // start byte offset, 0-based
|
||||||
|
End int `json:"end"` // end byte offset
|
||||||
|
}
|
||||||
|
|
||||||
|
// A What is the result of the "what" query, which quickly identifies
|
||||||
|
// the selection, parsing only a single file. It is intended for use
|
||||||
|
// in low-latency GUIs.
|
||||||
|
type What struct {
|
||||||
|
Enclosing []SyntaxNode `json:"enclosing"` // enclosing nodes of syntax tree
|
||||||
|
Modes []string `json:"modes"` // query modes enabled for this selection.
|
||||||
|
SrcDir string `json:"srcdir,omitempty"` // $GOROOT src directory containing queried package
|
||||||
|
ImportPath string `json:"importpath,omitempty"` // import path of queried package
|
||||||
|
}
|
||||||
|
|
||||||
|
// A PointsToLabel describes a pointer analysis label.
|
||||||
|
//
|
||||||
|
// A "label" is an object that may be pointed to by a pointer, map,
|
||||||
|
// channel, 'func', slice or interface. Labels include:
|
||||||
|
// - functions
|
||||||
|
// - globals
|
||||||
|
// - arrays created by literals (e.g. []byte("foo")) and conversions ([]byte(s))
|
||||||
|
// - stack- and heap-allocated variables (including composite literals)
|
||||||
|
// - arrays allocated by append()
|
||||||
|
// - channels, maps and arrays created by make()
|
||||||
|
// - and their subelements, e.g. "alloc.y[*].z"
|
||||||
|
//
|
||||||
|
type PointsToLabel struct {
|
||||||
|
Pos string `json:"pos"` // location of syntax that allocated the object
|
||||||
|
Desc string `json:"desc"` // description of the label
|
||||||
|
}
|
||||||
|
|
||||||
|
// A PointsTo is one element of the result of a 'pointsto' query on an
|
||||||
|
// expression. It describes a single pointer: its type and the set of
|
||||||
|
// "labels" it points to.
|
||||||
|
//
|
||||||
|
// If the pointer is of interface type, it will have one PTS entry
|
||||||
|
// describing each concrete type that it may contain. For each
|
||||||
|
// concrete type that is a pointer, the PTS entry describes the labels
|
||||||
|
// it may point to. The same is true for reflect.Values, except the
|
||||||
|
// dynamic types needn't be concrete.
|
||||||
|
//
|
||||||
|
type PointsTo struct {
|
||||||
|
Type string `json:"type"` // (concrete) type of the pointer
|
||||||
|
NamePos string `json:"namepos,omitempty"` // location of type defn, if Named
|
||||||
|
Labels []PointsToLabel `json:"labels,omitempty"` // pointed-to objects
|
||||||
|
}
|
||||||
|
|
||||||
|
// A DescribeValue is the additional result of a 'describe' query
|
||||||
|
// if the selection indicates a value or expression.
|
||||||
|
type DescribeValue struct {
|
||||||
|
Type string `json:"type"` // type of the expression
|
||||||
|
Value string `json:"value,omitempty"` // value of the expression, if constant
|
||||||
|
ObjPos string `json:"objpos,omitempty"` // location of the definition, if an Ident
|
||||||
|
}
|
||||||
|
|
||||||
|
type DescribeMethod struct {
|
||||||
|
Name string `json:"name"` // method name, as defined by types.Selection.String()
|
||||||
|
Pos string `json:"pos"` // location of the method's definition
|
||||||
|
}
|
||||||
|
|
||||||
|
// A DescribeType is the additional result of a 'describe' query
|
||||||
|
// if the selection indicates a type.
|
||||||
|
type DescribeType struct {
|
||||||
|
Type string `json:"type"` // the string form of the type
|
||||||
|
NamePos string `json:"namepos,omitempty"` // location of definition of type, if named
|
||||||
|
NameDef string `json:"namedef,omitempty"` // underlying definition of type, if named
|
||||||
|
Methods []DescribeMethod `json:"methods,omitempty"` // methods of the type
|
||||||
|
}
|
||||||
|
|
||||||
|
type DescribeMember struct {
|
||||||
|
Name string `json:"name"` // name of member
|
||||||
|
Type string `json:"type,omitempty"` // type of member (underlying, if 'type')
|
||||||
|
Value string `json:"value,omitempty"` // value of member (if 'const')
|
||||||
|
Pos string `json:"pos"` // location of definition of member
|
||||||
|
Kind string `json:"kind"` // one of {var,const,func,type}
|
||||||
|
Methods []DescribeMethod `json:"methods,omitempty"` // methods (if member is a type)
|
||||||
|
}
|
||||||
|
|
||||||
|
// A DescribePackage is the additional result of a 'describe' if
|
||||||
|
// the selection indicates a package.
|
||||||
|
type DescribePackage struct {
|
||||||
|
Path string `json:"path"` // import path of the package
|
||||||
|
Members []*DescribeMember `json:"members,omitempty"` // accessible members of the package
|
||||||
|
}
|
||||||
|
|
||||||
|
// A Describe is the result of a 'describe' query.
|
||||||
|
// It may contain an element describing the selected semantic entity
|
||||||
|
// in detail.
|
||||||
|
type Describe struct {
|
||||||
|
Desc string `json:"desc"` // description of the selected syntax node
|
||||||
|
Pos string `json:"pos"` // location of the selected syntax node
|
||||||
|
Detail string `json:"detail,omitempty"` // one of {package, type, value}, or "".
|
||||||
|
|
||||||
|
// At most one of the following fields is populated:
|
||||||
|
// the one specified by 'detail'.
|
||||||
|
Package *DescribePackage `json:"package,omitempty"`
|
||||||
|
Type *DescribeType `json:"type,omitempty"`
|
||||||
|
Value *DescribeValue `json:"value,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// A WhichErrs is the result of a 'whicherrs' query.
|
||||||
|
// It contains the position of the queried error and the possible globals,
|
||||||
|
// constants, and types it may point to.
|
||||||
|
type WhichErrs struct {
|
||||||
|
ErrPos string `json:"errpos,omitempty"` // location of queried error
|
||||||
|
Globals []string `json:"globals,omitempty"` // locations of globals
|
||||||
|
Constants []string `json:"constants,omitempty"` // locations of constants
|
||||||
|
Types []WhichErrsType `json:"types,omitempty"` // Types
|
||||||
|
}
|
||||||
|
|
||||||
|
type WhichErrsType struct {
|
||||||
|
Type string `json:"type,omitempty"`
|
||||||
|
Position string `json:"position,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// A Result is the common result of any oracle query.
|
||||||
|
// It contains a query-specific result element.
|
||||||
|
//
|
||||||
|
// TODO(adonovan): perhaps include other info such as: analysis scope,
|
||||||
|
// raw query position, stack of ast nodes, query package, etc.
|
||||||
|
type Result struct {
|
||||||
|
Mode string `json:"mode"` // mode of the query
|
||||||
|
|
||||||
|
// Exactly one of the following fields is populated:
|
||||||
|
// the one specified by 'mode'.
|
||||||
|
Callees *Callees `json:"callees,omitempty"`
|
||||||
|
Callers []Caller `json:"callers,omitempty"`
|
||||||
|
Callstack *CallStack `json:"callstack,omitempty"`
|
||||||
|
Definition *Definition `json:"definition,omitempty"`
|
||||||
|
Describe *Describe `json:"describe,omitempty"`
|
||||||
|
Freevars []*FreeVar `json:"freevars,omitempty"`
|
||||||
|
Implements *Implements `json:"implements,omitempty"`
|
||||||
|
Peers *Peers `json:"peers,omitempty"`
|
||||||
|
PointsTo []PointsTo `json:"pointsto,omitempty"`
|
||||||
|
Referrers *Referrers `json:"referrers,omitempty"`
|
||||||
|
What *What `json:"what,omitempty"`
|
||||||
|
WhichErrs *WhichErrs `json:"whicherrs,omitempty"`
|
||||||
|
}
|
|
@ -0,0 +1,210 @@
|
||||||
|
// Copyright 2013 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
package oracle
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"go/ast"
|
||||||
|
"go/build"
|
||||||
|
"go/token"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/visualfc/gotools/oracle/oracle/serial"
|
||||||
|
"golang.org/x/tools/go/ast/astutil"
|
||||||
|
)
|
||||||
|
|
||||||
|
// what reports all the information about the query selection that can be
|
||||||
|
// obtained from parsing only its containing source file.
|
||||||
|
// It is intended to be a very low-latency query callable from GUI
|
||||||
|
// tools, e.g. to populate a menu of options of slower queries about
|
||||||
|
// the selected location.
|
||||||
|
//
|
||||||
|
func what(q *Query) error {
|
||||||
|
qpos, err := fastQueryPos(q.Pos)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
q.Fset = qpos.fset
|
||||||
|
|
||||||
|
// (ignore errors)
|
||||||
|
srcdir, importPath, _ := guessImportPath(q.Fset.File(qpos.start).Name(), q.Build)
|
||||||
|
|
||||||
|
// Determine which query modes are applicable to the selection.
|
||||||
|
enable := map[string]bool{
|
||||||
|
"describe": true, // any syntax; always enabled
|
||||||
|
}
|
||||||
|
|
||||||
|
if qpos.end > qpos.start {
|
||||||
|
enable["freevars"] = true // nonempty selection?
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, n := range qpos.path {
|
||||||
|
switch n := n.(type) {
|
||||||
|
case *ast.Ident:
|
||||||
|
enable["definition"] = true
|
||||||
|
enable["referrers"] = true
|
||||||
|
enable["implements"] = true
|
||||||
|
case *ast.CallExpr:
|
||||||
|
enable["callees"] = true
|
||||||
|
case *ast.FuncDecl:
|
||||||
|
enable["callers"] = true
|
||||||
|
enable["callstack"] = true
|
||||||
|
case *ast.SendStmt:
|
||||||
|
enable["peers"] = true
|
||||||
|
case *ast.UnaryExpr:
|
||||||
|
if n.Op == token.ARROW {
|
||||||
|
enable["peers"] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// For implements, we approximate findInterestingNode.
|
||||||
|
if _, ok := enable["implements"]; !ok {
|
||||||
|
switch n.(type) {
|
||||||
|
case *ast.ArrayType,
|
||||||
|
*ast.StructType,
|
||||||
|
*ast.FuncType,
|
||||||
|
*ast.InterfaceType,
|
||||||
|
*ast.MapType,
|
||||||
|
*ast.ChanType:
|
||||||
|
enable["implements"] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// For pointsto, we approximate findInterestingNode.
|
||||||
|
if _, ok := enable["pointsto"]; !ok {
|
||||||
|
switch n.(type) {
|
||||||
|
case ast.Stmt,
|
||||||
|
*ast.ArrayType,
|
||||||
|
*ast.StructType,
|
||||||
|
*ast.FuncType,
|
||||||
|
*ast.InterfaceType,
|
||||||
|
*ast.MapType,
|
||||||
|
*ast.ChanType:
|
||||||
|
enable["pointsto"] = false // not an expr
|
||||||
|
|
||||||
|
case ast.Expr, ast.Decl, *ast.ValueSpec:
|
||||||
|
enable["pointsto"] = true // an expr, maybe
|
||||||
|
|
||||||
|
default:
|
||||||
|
// Comment, Field, KeyValueExpr, etc: ascend.
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// If we don't have an exact selection, disable modes that need one.
|
||||||
|
if !qpos.exact {
|
||||||
|
enable["callees"] = false
|
||||||
|
enable["pointsto"] = false
|
||||||
|
enable["whicherrs"] = false
|
||||||
|
enable["describe"] = false
|
||||||
|
}
|
||||||
|
|
||||||
|
var modes []string
|
||||||
|
for mode := range enable {
|
||||||
|
modes = append(modes, mode)
|
||||||
|
}
|
||||||
|
sort.Strings(modes)
|
||||||
|
|
||||||
|
q.result = &whatResult{
|
||||||
|
path: qpos.path,
|
||||||
|
srcdir: srcdir,
|
||||||
|
importPath: importPath,
|
||||||
|
modes: modes,
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// guessImportPath finds the package containing filename, and returns
|
||||||
|
// its source directory (an element of $GOPATH) and its import path
|
||||||
|
// relative to it.
|
||||||
|
//
|
||||||
|
// TODO(adonovan): what about _test.go files that are not part of the
|
||||||
|
// package?
|
||||||
|
//
|
||||||
|
func guessImportPath(filename string, buildContext *build.Context) (srcdir, importPath string, err error) {
|
||||||
|
absFile, err := filepath.Abs(filename)
|
||||||
|
if err != nil {
|
||||||
|
err = fmt.Errorf("can't form absolute path of %s", filename)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
absFileDir := segments(filepath.Dir(absFile))
|
||||||
|
|
||||||
|
// Find the innermost directory in $GOPATH that encloses filename.
|
||||||
|
minD := 1024
|
||||||
|
for _, gopathDir := range buildContext.SrcDirs() {
|
||||||
|
absDir, err := filepath.Abs(gopathDir)
|
||||||
|
if err != nil {
|
||||||
|
continue // e.g. non-existent dir on $GOPATH
|
||||||
|
}
|
||||||
|
d := prefixLen(segments(absDir), absFileDir)
|
||||||
|
// If there are multiple matches,
|
||||||
|
// prefer the innermost enclosing directory
|
||||||
|
// (smallest d).
|
||||||
|
if d >= 0 && d < minD {
|
||||||
|
minD = d
|
||||||
|
srcdir = gopathDir
|
||||||
|
importPath = strings.Join(absFileDir[len(absFileDir)-minD:], string(os.PathSeparator))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if srcdir == "" {
|
||||||
|
err = fmt.Errorf("directory %s is not beneath any of these GOROOT/GOPATH directories: %s",
|
||||||
|
filepath.Dir(absFile), strings.Join(buildContext.SrcDirs(), ", "))
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
func segments(path string) []string {
|
||||||
|
return strings.Split(path, string(os.PathSeparator))
|
||||||
|
}
|
||||||
|
|
||||||
|
// prefixLen returns the length of the remainder of y if x is a prefix
|
||||||
|
// of y, a negative number otherwise.
|
||||||
|
func prefixLen(x, y []string) int {
|
||||||
|
d := len(y) - len(x)
|
||||||
|
if d >= 0 {
|
||||||
|
for i := range x {
|
||||||
|
if y[i] != x[i] {
|
||||||
|
return -1 // not a prefix
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return d
|
||||||
|
}
|
||||||
|
|
||||||
|
type whatResult struct {
|
||||||
|
path []ast.Node
|
||||||
|
modes []string
|
||||||
|
srcdir string
|
||||||
|
importPath string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *whatResult) display(printf printfFunc) {
|
||||||
|
for _, n := range r.path {
|
||||||
|
printf(n, "%s", astutil.NodeDescription(n))
|
||||||
|
}
|
||||||
|
printf(nil, "modes: %s", r.modes)
|
||||||
|
printf(nil, "srcdir: %s", r.srcdir)
|
||||||
|
printf(nil, "import path: %s", r.importPath)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *whatResult) toSerial(res *serial.Result, fset *token.FileSet) {
|
||||||
|
var enclosing []serial.SyntaxNode
|
||||||
|
for _, n := range r.path {
|
||||||
|
enclosing = append(enclosing, serial.SyntaxNode{
|
||||||
|
Description: astutil.NodeDescription(n),
|
||||||
|
Start: fset.Position(n.Pos()).Offset,
|
||||||
|
End: fset.Position(n.End()).Offset,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
res.What = &serial.What{
|
||||||
|
Modes: r.modes,
|
||||||
|
SrcDir: r.srcdir,
|
||||||
|
ImportPath: r.importPath,
|
||||||
|
Enclosing: enclosing,
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,328 @@
|
||||||
|
// Copyright 2014 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build go1.5
|
||||||
|
|
||||||
|
package oracle
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"go/ast"
|
||||||
|
"go/token"
|
||||||
|
"go/types"
|
||||||
|
"sort"
|
||||||
|
|
||||||
|
"github.com/visualfc/gotools/oracle/oracle/serial"
|
||||||
|
"golang.org/x/tools/go/ast/astutil"
|
||||||
|
"golang.org/x/tools/go/loader"
|
||||||
|
"golang.org/x/tools/go/ssa"
|
||||||
|
"golang.org/x/tools/go/ssa/ssautil"
|
||||||
|
)
|
||||||
|
|
||||||
|
var builtinErrorType = types.Universe.Lookup("error").Type()
|
||||||
|
|
||||||
|
// whicherrs takes an position to an error and tries to find all types, constants
|
||||||
|
// and global value which a given error can point to and which can be checked from the
|
||||||
|
// scope where the error lives.
|
||||||
|
// In short, it returns a list of things that can be checked against in order to handle
|
||||||
|
// an error properly.
|
||||||
|
//
|
||||||
|
// TODO(dmorsing): figure out if fields in errors like *os.PathError.Err
|
||||||
|
// can be queried recursively somehow.
|
||||||
|
func whicherrs(q *Query) error {
|
||||||
|
lconf := loader.Config{Build: q.Build}
|
||||||
|
|
||||||
|
if err := setPTAScope(&lconf, q.Scope); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load/parse/type-check the program.
|
||||||
|
lprog, err := lconf.Load()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
q.Fset = lprog.Fset
|
||||||
|
|
||||||
|
qpos, err := parseQueryPos(lprog, q.Pos, true) // needs exact pos
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
prog := ssautil.CreateProgram(lprog, ssa.GlobalDebug)
|
||||||
|
|
||||||
|
ptaConfig, err := setupPTA(prog, lprog, q.PTALog, q.Reflection)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
path, action := findInterestingNode(qpos.info, qpos.path)
|
||||||
|
if action != actionExpr {
|
||||||
|
return fmt.Errorf("whicherrs wants an expression; got %s",
|
||||||
|
astutil.NodeDescription(qpos.path[0]))
|
||||||
|
}
|
||||||
|
var expr ast.Expr
|
||||||
|
var obj types.Object
|
||||||
|
switch n := path[0].(type) {
|
||||||
|
case *ast.ValueSpec:
|
||||||
|
// ambiguous ValueSpec containing multiple names
|
||||||
|
return fmt.Errorf("multiple value specification")
|
||||||
|
case *ast.Ident:
|
||||||
|
obj = qpos.info.ObjectOf(n)
|
||||||
|
expr = n
|
||||||
|
case ast.Expr:
|
||||||
|
expr = n
|
||||||
|
default:
|
||||||
|
return fmt.Errorf("unexpected AST for expr: %T", n)
|
||||||
|
}
|
||||||
|
|
||||||
|
typ := qpos.info.TypeOf(expr)
|
||||||
|
if !types.Identical(typ, builtinErrorType) {
|
||||||
|
return fmt.Errorf("selection is not an expression of type 'error'")
|
||||||
|
}
|
||||||
|
// Determine the ssa.Value for the expression.
|
||||||
|
var value ssa.Value
|
||||||
|
if obj != nil {
|
||||||
|
// def/ref of func/var object
|
||||||
|
value, _, err = ssaValueForIdent(prog, qpos.info, obj, path)
|
||||||
|
} else {
|
||||||
|
value, _, err = ssaValueForExpr(prog, qpos.info, path)
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return err // e.g. trivially dead code
|
||||||
|
}
|
||||||
|
|
||||||
|
// Defer SSA construction till after errors are reported.
|
||||||
|
prog.Build()
|
||||||
|
|
||||||
|
globals := findVisibleErrs(prog, qpos)
|
||||||
|
constants := findVisibleConsts(prog, qpos)
|
||||||
|
|
||||||
|
res := &whicherrsResult{
|
||||||
|
qpos: qpos,
|
||||||
|
errpos: expr.Pos(),
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO(adonovan): the following code is heavily duplicated
|
||||||
|
// w.r.t. "pointsto". Refactor?
|
||||||
|
|
||||||
|
// Find the instruction which initialized the
|
||||||
|
// global error. If more than one instruction has stored to the global
|
||||||
|
// remove the global from the set of values that we want to query.
|
||||||
|
allFuncs := ssautil.AllFunctions(prog)
|
||||||
|
for fn := range allFuncs {
|
||||||
|
for _, b := range fn.Blocks {
|
||||||
|
for _, instr := range b.Instrs {
|
||||||
|
store, ok := instr.(*ssa.Store)
|
||||||
|
if !ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
gval, ok := store.Addr.(*ssa.Global)
|
||||||
|
if !ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
gbl, ok := globals[gval]
|
||||||
|
if !ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
// we already found a store to this global
|
||||||
|
// The normal error define is just one store in the init
|
||||||
|
// so we just remove this global from the set we want to query
|
||||||
|
if gbl != nil {
|
||||||
|
delete(globals, gval)
|
||||||
|
}
|
||||||
|
globals[gval] = store.Val
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
ptaConfig.AddQuery(value)
|
||||||
|
for _, v := range globals {
|
||||||
|
ptaConfig.AddQuery(v)
|
||||||
|
}
|
||||||
|
|
||||||
|
ptares := ptrAnalysis(ptaConfig)
|
||||||
|
valueptr := ptares.Queries[value]
|
||||||
|
for g, v := range globals {
|
||||||
|
ptr, ok := ptares.Queries[v]
|
||||||
|
if !ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if !ptr.MayAlias(valueptr) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
res.globals = append(res.globals, g)
|
||||||
|
}
|
||||||
|
pts := valueptr.PointsTo()
|
||||||
|
dedup := make(map[*ssa.NamedConst]bool)
|
||||||
|
for _, label := range pts.Labels() {
|
||||||
|
// These values are either MakeInterfaces or reflect
|
||||||
|
// generated interfaces. For the purposes of this
|
||||||
|
// analysis, we don't care about reflect generated ones
|
||||||
|
makeiface, ok := label.Value().(*ssa.MakeInterface)
|
||||||
|
if !ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
constval, ok := makeiface.X.(*ssa.Const)
|
||||||
|
if !ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
c := constants[*constval]
|
||||||
|
if c != nil && !dedup[c] {
|
||||||
|
dedup[c] = true
|
||||||
|
res.consts = append(res.consts, c)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
concs := pts.DynamicTypes()
|
||||||
|
concs.Iterate(func(conc types.Type, _ interface{}) {
|
||||||
|
// go/types is a bit annoying here.
|
||||||
|
// We want to find all the types that we can
|
||||||
|
// typeswitch or assert to. This means finding out
|
||||||
|
// if the type pointed to can be seen by us.
|
||||||
|
//
|
||||||
|
// For the purposes of this analysis, the type is always
|
||||||
|
// either a Named type or a pointer to one.
|
||||||
|
// There are cases where error can be implemented
|
||||||
|
// by unnamed types, but in that case, we can't assert to
|
||||||
|
// it, so we don't care about it for this analysis.
|
||||||
|
var name *types.TypeName
|
||||||
|
switch t := conc.(type) {
|
||||||
|
case *types.Pointer:
|
||||||
|
named, ok := t.Elem().(*types.Named)
|
||||||
|
if !ok {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
name = named.Obj()
|
||||||
|
case *types.Named:
|
||||||
|
name = t.Obj()
|
||||||
|
default:
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if !isAccessibleFrom(name, qpos.info.Pkg) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
res.types = append(res.types, &errorType{conc, name})
|
||||||
|
})
|
||||||
|
sort.Sort(membersByPosAndString(res.globals))
|
||||||
|
sort.Sort(membersByPosAndString(res.consts))
|
||||||
|
sort.Sort(sorterrorType(res.types))
|
||||||
|
|
||||||
|
q.result = res
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// findVisibleErrs returns a mapping from each package-level variable of type "error" to nil.
|
||||||
|
func findVisibleErrs(prog *ssa.Program, qpos *queryPos) map[*ssa.Global]ssa.Value {
|
||||||
|
globals := make(map[*ssa.Global]ssa.Value)
|
||||||
|
for _, pkg := range prog.AllPackages() {
|
||||||
|
for _, mem := range pkg.Members {
|
||||||
|
gbl, ok := mem.(*ssa.Global)
|
||||||
|
if !ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
gbltype := gbl.Type()
|
||||||
|
// globals are always pointers
|
||||||
|
if !types.Identical(deref(gbltype), builtinErrorType) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if !isAccessibleFrom(gbl.Object(), qpos.info.Pkg) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
globals[gbl] = nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return globals
|
||||||
|
}
|
||||||
|
|
||||||
|
// findVisibleConsts returns a mapping from each package-level constant assignable to type "error", to nil.
|
||||||
|
func findVisibleConsts(prog *ssa.Program, qpos *queryPos) map[ssa.Const]*ssa.NamedConst {
|
||||||
|
constants := make(map[ssa.Const]*ssa.NamedConst)
|
||||||
|
for _, pkg := range prog.AllPackages() {
|
||||||
|
for _, mem := range pkg.Members {
|
||||||
|
obj, ok := mem.(*ssa.NamedConst)
|
||||||
|
if !ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
consttype := obj.Type()
|
||||||
|
if !types.AssignableTo(consttype, builtinErrorType) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if !isAccessibleFrom(obj.Object(), qpos.info.Pkg) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
constants[*obj.Value] = obj
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return constants
|
||||||
|
}
|
||||||
|
|
||||||
|
type membersByPosAndString []ssa.Member
|
||||||
|
|
||||||
|
func (a membersByPosAndString) Len() int { return len(a) }
|
||||||
|
func (a membersByPosAndString) Less(i, j int) bool {
|
||||||
|
cmp := a[i].Pos() - a[j].Pos()
|
||||||
|
return cmp < 0 || cmp == 0 && a[i].String() < a[j].String()
|
||||||
|
}
|
||||||
|
func (a membersByPosAndString) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
|
||||||
|
|
||||||
|
type sorterrorType []*errorType
|
||||||
|
|
||||||
|
func (a sorterrorType) Len() int { return len(a) }
|
||||||
|
func (a sorterrorType) Less(i, j int) bool {
|
||||||
|
cmp := a[i].obj.Pos() - a[j].obj.Pos()
|
||||||
|
return cmp < 0 || cmp == 0 && a[i].typ.String() < a[j].typ.String()
|
||||||
|
}
|
||||||
|
func (a sorterrorType) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
|
||||||
|
|
||||||
|
type errorType struct {
|
||||||
|
typ types.Type // concrete type N or *N that implements error
|
||||||
|
obj *types.TypeName // the named type N
|
||||||
|
}
|
||||||
|
|
||||||
|
type whicherrsResult struct {
|
||||||
|
qpos *queryPos
|
||||||
|
errpos token.Pos
|
||||||
|
globals []ssa.Member
|
||||||
|
consts []ssa.Member
|
||||||
|
types []*errorType
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *whicherrsResult) display(printf printfFunc) {
|
||||||
|
if len(r.globals) > 0 {
|
||||||
|
printf(r.qpos, "this error may point to these globals:")
|
||||||
|
for _, g := range r.globals {
|
||||||
|
printf(g.Pos(), "\t%s", g.RelString(r.qpos.info.Pkg))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if len(r.consts) > 0 {
|
||||||
|
printf(r.qpos, "this error may contain these constants:")
|
||||||
|
for _, c := range r.consts {
|
||||||
|
printf(c.Pos(), "\t%s", c.RelString(r.qpos.info.Pkg))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if len(r.types) > 0 {
|
||||||
|
printf(r.qpos, "this error may contain these dynamic types:")
|
||||||
|
for _, t := range r.types {
|
||||||
|
printf(t.obj.Pos(), "\t%s", r.qpos.typeString(t.typ))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *whicherrsResult) toSerial(res *serial.Result, fset *token.FileSet) {
|
||||||
|
we := &serial.WhichErrs{}
|
||||||
|
we.ErrPos = fset.Position(r.errpos).String()
|
||||||
|
for _, g := range r.globals {
|
||||||
|
we.Globals = append(we.Globals, fset.Position(g.Pos()).String())
|
||||||
|
}
|
||||||
|
for _, c := range r.consts {
|
||||||
|
we.Constants = append(we.Constants, fset.Position(c.Pos()).String())
|
||||||
|
}
|
||||||
|
for _, t := range r.types {
|
||||||
|
var et serial.WhichErrsType
|
||||||
|
et.Type = r.qpos.typeString(t.typ)
|
||||||
|
et.Position = fset.Position(t.obj.Pos()).String()
|
||||||
|
we.Types = append(we.Types, et)
|
||||||
|
}
|
||||||
|
res.WhichErrs = we
|
||||||
|
}
|
|
@ -21,9 +21,9 @@ import (
|
||||||
|
|
||||||
var Command = &command.Command{
|
var Command = &command.Command{
|
||||||
Run: runPkgs,
|
Run: runPkgs,
|
||||||
UsageLine: "pkgs",
|
UsageLine: "pkgs [-list|-json] [-std]",
|
||||||
Short: "print liteide_stub version",
|
Short: "print go package",
|
||||||
Long: `Version prints the liteide_stub version.`,
|
Long: `print go package.`,
|
||||||
}
|
}
|
||||||
|
|
||||||
var (
|
var (
|
||||||
|
|
|
@ -0,0 +1,233 @@
|
||||||
|
package pkgutil
|
||||||
|
|
||||||
|
import (
|
||||||
|
"go/build"
|
||||||
|
"io/ioutil"
|
||||||
|
"log"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
//var go15VendorExperiment = os.Getenv("GO15VENDOREXPERIMENT") == "1"
|
||||||
|
|
||||||
|
func IsVendorExperiment() bool {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// matchPattern(pattern)(name) reports whether
|
||||||
|
// name matches pattern. Pattern is a limited glob
|
||||||
|
// pattern in which '...' means 'any string' and there
|
||||||
|
// is no other special syntax.
|
||||||
|
func matchPattern(pattern string) func(name string) bool {
|
||||||
|
re := regexp.QuoteMeta(pattern)
|
||||||
|
re = strings.Replace(re, `\.\.\.`, `.*`, -1)
|
||||||
|
// Special case: foo/... matches foo too.
|
||||||
|
if strings.HasSuffix(re, `/.*`) {
|
||||||
|
re = re[:len(re)-len(`/.*`)] + `(/.*)?`
|
||||||
|
}
|
||||||
|
reg := regexp.MustCompile(`^` + re + `$`)
|
||||||
|
return func(name string) bool {
|
||||||
|
return reg.MatchString(name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// hasPathPrefix reports whether the path s begins with the
|
||||||
|
// elements in prefix.
|
||||||
|
func hasPathPrefix(s, prefix string) bool {
|
||||||
|
switch {
|
||||||
|
default:
|
||||||
|
return false
|
||||||
|
case len(s) == len(prefix):
|
||||||
|
return s == prefix
|
||||||
|
case len(s) > len(prefix):
|
||||||
|
if prefix != "" && prefix[len(prefix)-1] == '/' {
|
||||||
|
return strings.HasPrefix(s, prefix)
|
||||||
|
}
|
||||||
|
return s[len(prefix)] == '/' && s[:len(prefix)] == prefix
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// hasFilePathPrefix reports whether the filesystem path s begins with the
|
||||||
|
// elements in prefix.
|
||||||
|
func hasFilePathPrefix(s, prefix string) bool {
|
||||||
|
sv := strings.ToUpper(filepath.VolumeName(s))
|
||||||
|
pv := strings.ToUpper(filepath.VolumeName(prefix))
|
||||||
|
s = s[len(sv):]
|
||||||
|
prefix = prefix[len(pv):]
|
||||||
|
switch {
|
||||||
|
default:
|
||||||
|
return false
|
||||||
|
case sv != pv:
|
||||||
|
return false
|
||||||
|
case len(s) == len(prefix):
|
||||||
|
return s == prefix
|
||||||
|
case len(s) > len(prefix):
|
||||||
|
if prefix != "" && prefix[len(prefix)-1] == filepath.Separator {
|
||||||
|
return strings.HasPrefix(s, prefix)
|
||||||
|
}
|
||||||
|
return s[len(prefix)] == filepath.Separator && s[:len(prefix)] == prefix
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// treeCanMatchPattern(pattern)(name) reports whether
|
||||||
|
// name or children of name can possibly match pattern.
|
||||||
|
// Pattern is the same limited glob accepted by matchPattern.
|
||||||
|
func treeCanMatchPattern(pattern string) func(name string) bool {
|
||||||
|
wildCard := false
|
||||||
|
if i := strings.Index(pattern, "..."); i >= 0 {
|
||||||
|
wildCard = true
|
||||||
|
pattern = pattern[:i]
|
||||||
|
}
|
||||||
|
return func(name string) bool {
|
||||||
|
return len(name) <= len(pattern) && hasPathPrefix(pattern, name) ||
|
||||||
|
wildCard && strings.HasPrefix(name, pattern)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var isDirCache = map[string]bool{}
|
||||||
|
|
||||||
|
func isDir(path string) bool {
|
||||||
|
result, ok := isDirCache[path]
|
||||||
|
if ok {
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
fi, err := os.Stat(path)
|
||||||
|
result = err == nil && fi.IsDir()
|
||||||
|
isDirCache[path] = result
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
type Package struct {
|
||||||
|
Root string
|
||||||
|
Dir string
|
||||||
|
ImportPath string
|
||||||
|
}
|
||||||
|
|
||||||
|
func ImportFile(fileName string) *Package {
|
||||||
|
return ImportDir(filepath.Dir(fileName))
|
||||||
|
}
|
||||||
|
|
||||||
|
func ImportDir(dir string) *Package {
|
||||||
|
pkg, err := build.ImportDir(dir, build.FindOnly)
|
||||||
|
if err != nil {
|
||||||
|
return &Package{"", dir, ""}
|
||||||
|
}
|
||||||
|
return &Package{pkg.Root, pkg.Dir, pkg.ImportPath}
|
||||||
|
}
|
||||||
|
|
||||||
|
// expandPath returns the symlink-expanded form of path.
|
||||||
|
func expandPath(p string) string {
|
||||||
|
x, err := filepath.EvalSymlinks(p)
|
||||||
|
if err == nil {
|
||||||
|
return x
|
||||||
|
}
|
||||||
|
return p
|
||||||
|
}
|
||||||
|
|
||||||
|
// vendoredImportPath returns the expansion of path when it appears in parent.
|
||||||
|
// If parent is x/y/z, then path might expand to x/y/z/vendor/path, x/y/vendor/path,
|
||||||
|
// x/vendor/path, vendor/path, or else stay path if none of those exist.
|
||||||
|
// vendoredImportPath returns the expanded path or, if no expansion is found, the original.
|
||||||
|
func VendoredImportPath(parent *Package, path string) (found string) {
|
||||||
|
if parent == nil || parent.Root == "" {
|
||||||
|
return path
|
||||||
|
}
|
||||||
|
|
||||||
|
dir := filepath.Clean(parent.Dir)
|
||||||
|
root := filepath.Join(parent.Root, "src")
|
||||||
|
if !hasFilePathPrefix(dir, root) {
|
||||||
|
// Look for symlinks before reporting error.
|
||||||
|
dir = expandPath(dir)
|
||||||
|
root = expandPath(root)
|
||||||
|
}
|
||||||
|
if !hasFilePathPrefix(dir, root) || len(dir) <= len(root) || dir[len(root)] != filepath.Separator {
|
||||||
|
log.Println("invalid vendoredImportPath: dir=%q root=%q separator=%q", dir, root, string(filepath.Separator))
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
vpath := "vendor/" + path
|
||||||
|
for i := len(dir); i >= len(root); i-- {
|
||||||
|
if i < len(dir) && dir[i] != filepath.Separator {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
// Note: checking for the vendor directory before checking
|
||||||
|
// for the vendor/path directory helps us hit the
|
||||||
|
// isDir cache more often. It also helps us prepare a more useful
|
||||||
|
// list of places we looked, to report when an import is not found.
|
||||||
|
if !isDir(filepath.Join(dir[:i], "vendor")) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
targ := filepath.Join(dir[:i], vpath)
|
||||||
|
if isDir(targ) && hasGoFiles(targ) {
|
||||||
|
importPath := parent.ImportPath
|
||||||
|
if importPath == "command-line-arguments" {
|
||||||
|
// If parent.ImportPath is 'command-line-arguments'.
|
||||||
|
// set to relative directory to root (also chopped root directory)
|
||||||
|
importPath = dir[len(root)+1:]
|
||||||
|
}
|
||||||
|
// We started with parent's dir c:\gopath\src\foo\bar\baz\quux\xyzzy.
|
||||||
|
// We know the import path for parent's dir.
|
||||||
|
// We chopped off some number of path elements and
|
||||||
|
// added vendor\path to produce c:\gopath\src\foo\bar\baz\vendor\path.
|
||||||
|
// Now we want to know the import path for that directory.
|
||||||
|
// Construct it by chopping the same number of path elements
|
||||||
|
// (actually the same number of bytes) from parent's import path
|
||||||
|
// and then append /vendor/path.
|
||||||
|
chopped := len(dir) - i
|
||||||
|
if chopped == len(importPath)+1 {
|
||||||
|
// We walked up from c:\gopath\src\foo\bar
|
||||||
|
// and found c:\gopath\src\vendor\path.
|
||||||
|
// We chopped \foo\bar (length 8) but the import path is "foo/bar" (length 7).
|
||||||
|
// Use "vendor/path" without any prefix.
|
||||||
|
return vpath
|
||||||
|
}
|
||||||
|
return importPath[:len(importPath)-chopped] + "/" + vpath
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return path
|
||||||
|
}
|
||||||
|
|
||||||
|
// hasGoFiles reports whether dir contains any files with names ending in .go.
|
||||||
|
// For a vendor check we must exclude directories that contain no .go files.
|
||||||
|
// Otherwise it is not possible to vendor just a/b/c and still import the
|
||||||
|
// non-vendored a/b. See golang.org/issue/13832.
|
||||||
|
func hasGoFiles(dir string) bool {
|
||||||
|
fis, _ := ioutil.ReadDir(dir)
|
||||||
|
for _, fi := range fis {
|
||||||
|
if !fi.IsDir() && strings.HasSuffix(fi.Name(), ".go") {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// findVendor looks for the last non-terminating "vendor" path element in the given import path.
|
||||||
|
// If there isn't one, findVendor returns ok=false.
|
||||||
|
// Otherwise, findVendor returns ok=true and the index of the "vendor".
|
||||||
|
//
|
||||||
|
// Note that terminating "vendor" elements don't count: "x/vendor" is its own package,
|
||||||
|
// not the vendored copy of an import "" (the empty import path).
|
||||||
|
// This will allow people to have packages or commands named vendor.
|
||||||
|
// This may help reduce breakage, or it may just be confusing. We'll see.
|
||||||
|
func findVendor(path string) (index int, ok bool) {
|
||||||
|
// Two cases, depending on internal at start of string or not.
|
||||||
|
// The order matters: we must return the index of the final element,
|
||||||
|
// because the final one is where the effective import path starts.
|
||||||
|
switch {
|
||||||
|
case strings.Contains(path, "/vendor/"):
|
||||||
|
return strings.LastIndex(path, "/vendor/") + 1, true
|
||||||
|
case strings.HasPrefix(path, "vendor/"):
|
||||||
|
return 0, true
|
||||||
|
}
|
||||||
|
return 0, false
|
||||||
|
}
|
||||||
|
|
||||||
|
func VendorPathToImportPath(path string) string {
|
||||||
|
if i, ok := findVendor(path); ok {
|
||||||
|
return path[i+len("vendor/"):]
|
||||||
|
}
|
||||||
|
return path
|
||||||
|
}
|
|
@ -8,11 +8,6 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
var pkgList = `
|
var pkgList = `
|
||||||
cmd/cgo
|
|
||||||
cmd/fix
|
|
||||||
cmd/go
|
|
||||||
cmd/gofmt
|
|
||||||
cmd/yacc
|
|
||||||
archive/tar
|
archive/tar
|
||||||
archive/zip
|
archive/zip
|
||||||
bufio
|
bufio
|
||||||
|
@ -25,6 +20,7 @@ compress/zlib
|
||||||
container/heap
|
container/heap
|
||||||
container/list
|
container/list
|
||||||
container/ring
|
container/ring
|
||||||
|
context
|
||||||
crypto
|
crypto
|
||||||
crypto/aes
|
crypto/aes
|
||||||
crypto/cipher
|
crypto/cipher
|
||||||
|
@ -51,6 +47,7 @@ debug/elf
|
||||||
debug/gosym
|
debug/gosym
|
||||||
debug/macho
|
debug/macho
|
||||||
debug/pe
|
debug/pe
|
||||||
|
debug/plan9obj
|
||||||
encoding
|
encoding
|
||||||
encoding/ascii85
|
encoding/ascii85
|
||||||
encoding/asn1
|
encoding/asn1
|
||||||
|
@ -69,12 +66,15 @@ flag
|
||||||
fmt
|
fmt
|
||||||
go/ast
|
go/ast
|
||||||
go/build
|
go/build
|
||||||
|
go/constant
|
||||||
go/doc
|
go/doc
|
||||||
go/format
|
go/format
|
||||||
|
go/importer
|
||||||
go/parser
|
go/parser
|
||||||
go/printer
|
go/printer
|
||||||
go/scanner
|
go/scanner
|
||||||
go/token
|
go/token
|
||||||
|
go/types
|
||||||
hash
|
hash
|
||||||
hash/adler32
|
hash/adler32
|
||||||
hash/crc32
|
hash/crc32
|
||||||
|
@ -96,16 +96,19 @@ log
|
||||||
log/syslog
|
log/syslog
|
||||||
math
|
math
|
||||||
math/big
|
math/big
|
||||||
|
math/bits
|
||||||
math/cmplx
|
math/cmplx
|
||||||
math/rand
|
math/rand
|
||||||
mime
|
mime
|
||||||
mime/multipart
|
mime/multipart
|
||||||
|
mime/quotedprintable
|
||||||
net
|
net
|
||||||
net/http
|
net/http
|
||||||
net/http/cgi
|
net/http/cgi
|
||||||
net/http/cookiejar
|
net/http/cookiejar
|
||||||
net/http/fcgi
|
net/http/fcgi
|
||||||
net/http/httptest
|
net/http/httptest
|
||||||
|
net/http/httptrace
|
||||||
net/http/httputil
|
net/http/httputil
|
||||||
net/http/pprof
|
net/http/pprof
|
||||||
net/mail
|
net/mail
|
||||||
|
@ -120,6 +123,7 @@ os/signal
|
||||||
os/user
|
os/user
|
||||||
path
|
path
|
||||||
path/filepath
|
path/filepath
|
||||||
|
plugin
|
||||||
reflect
|
reflect
|
||||||
regexp
|
regexp
|
||||||
regexp/syntax
|
regexp/syntax
|
||||||
|
@ -128,6 +132,7 @@ runtime/cgo
|
||||||
runtime/debug
|
runtime/debug
|
||||||
runtime/pprof
|
runtime/pprof
|
||||||
runtime/race
|
runtime/race
|
||||||
|
runtime/trace
|
||||||
sort
|
sort
|
||||||
strconv
|
strconv
|
||||||
strings
|
strings
|
||||||
|
|
|
@ -11,6 +11,7 @@ import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"go/format"
|
"go/format"
|
||||||
"io"
|
"io"
|
||||||
|
"io/ioutil"
|
||||||
"log"
|
"log"
|
||||||
"os"
|
"os"
|
||||||
"path"
|
"path"
|
||||||
|
@ -40,13 +41,19 @@ func main() {
|
||||||
fmt.Fprintf(&buf, format, args...)
|
fmt.Fprintf(&buf, format, args...)
|
||||||
}
|
}
|
||||||
outf("// AUTO-GENERATED BY mkstdlib.go\n\n")
|
outf("// AUTO-GENERATED BY mkstdlib.go\n\n")
|
||||||
outf("package imports\n")
|
outf("package stdlib\n")
|
||||||
outf("var stdlib = map[string]string{\n")
|
outf("var Symbols = map[string]string{\n")
|
||||||
f := io.MultiReader(
|
f := io.MultiReader(
|
||||||
mustOpen(api("go1.txt")),
|
mustOpen(api("go1.txt")),
|
||||||
mustOpen(api("go1.1.txt")),
|
mustOpen(api("go1.1.txt")),
|
||||||
mustOpen(api("go1.2.txt")),
|
mustOpen(api("go1.2.txt")),
|
||||||
mustOpen(api("go1.3.txt")),
|
mustOpen(api("go1.3.txt")),
|
||||||
|
mustOpen(api("go1.4.txt")),
|
||||||
|
mustOpen(api("go1.5.txt")),
|
||||||
|
mustOpen(api("go1.6.txt")),
|
||||||
|
mustOpen(api("go1.7.txt")),
|
||||||
|
mustOpen(api("go1.8.txt")),
|
||||||
|
mustOpen(api("go1.9.txt")),
|
||||||
)
|
)
|
||||||
sc := bufio.NewScanner(f)
|
sc := bufio.NewScanner(f)
|
||||||
fullImport := map[string]string{} // "zip.NewReader" => "archive/zip"
|
fullImport := map[string]string{} // "zip.NewReader" => "archive/zip"
|
||||||
|
@ -87,5 +94,6 @@ func main() {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatal(err)
|
log.Fatal(err)
|
||||||
}
|
}
|
||||||
os.Stdout.Write(fmtbuf)
|
//os.Stdout.Write(fmtbuf)
|
||||||
|
ioutil.WriteFile("./zstdlib.go", fmtbuf, 0777)
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,41 +1,42 @@
|
||||||
package stdlib
|
package stdlib
|
||||||
|
|
||||||
var Packages = []string{
|
var Packages = []string{
|
||||||
"cmd/cgo", "cmd/fix", "cmd/go", "cmd/gofmt",
|
"archive/tar", "archive/zip", "bufio", "bytes",
|
||||||
"cmd/yacc", "archive/tar", "archive/zip", "bufio",
|
"compress/bzip2", "compress/flate", "compress/gzip", "compress/lzw",
|
||||||
"bytes", "compress/bzip2", "compress/flate", "compress/gzip",
|
"compress/zlib", "container/heap", "container/list", "container/ring",
|
||||||
"compress/lzw", "compress/zlib", "container/heap", "container/list",
|
"context", "crypto", "crypto/aes", "crypto/cipher",
|
||||||
"container/ring", "crypto", "crypto/aes", "crypto/cipher",
|
|
||||||
"crypto/des", "crypto/dsa", "crypto/ecdsa", "crypto/elliptic",
|
"crypto/des", "crypto/dsa", "crypto/ecdsa", "crypto/elliptic",
|
||||||
"crypto/hmac", "crypto/md5", "crypto/rand", "crypto/rc4",
|
"crypto/hmac", "crypto/md5", "crypto/rand", "crypto/rc4",
|
||||||
"crypto/rsa", "crypto/sha1", "crypto/sha256", "crypto/sha512",
|
"crypto/rsa", "crypto/sha1", "crypto/sha256", "crypto/sha512",
|
||||||
"crypto/subtle", "crypto/tls", "crypto/x509", "crypto/x509/pkix",
|
"crypto/subtle", "crypto/tls", "crypto/x509", "crypto/x509/pkix",
|
||||||
"database/sql", "database/sql/driver", "debug/dwarf", "debug/elf",
|
"database/sql", "database/sql/driver", "debug/dwarf", "debug/elf",
|
||||||
"debug/gosym", "debug/macho", "debug/pe", "encoding",
|
"debug/gosym", "debug/macho", "debug/pe", "debug/plan9obj",
|
||||||
"encoding/ascii85", "encoding/asn1", "encoding/base32", "encoding/base64",
|
"encoding", "encoding/ascii85", "encoding/asn1", "encoding/base32",
|
||||||
"encoding/binary", "encoding/csv", "encoding/gob", "encoding/hex",
|
"encoding/base64", "encoding/binary", "encoding/csv", "encoding/gob",
|
||||||
"encoding/json", "encoding/pem", "encoding/xml", "errors",
|
"encoding/hex", "encoding/json", "encoding/pem", "encoding/xml",
|
||||||
"expvar", "flag", "fmt", "go/ast",
|
"errors", "expvar", "flag", "fmt",
|
||||||
"go/build", "go/doc", "go/format", "go/parser",
|
"go/ast", "go/build", "go/constant", "go/doc",
|
||||||
"go/printer", "go/scanner", "go/token", "hash",
|
"go/format", "go/importer", "go/parser", "go/printer",
|
||||||
|
"go/scanner", "go/token", "go/types", "hash",
|
||||||
"hash/adler32", "hash/crc32", "hash/crc64", "hash/fnv",
|
"hash/adler32", "hash/crc32", "hash/crc64", "hash/fnv",
|
||||||
"html", "html/template", "image", "image/color",
|
"html", "html/template", "image", "image/color",
|
||||||
"image/color/palette", "image/draw", "image/gif", "image/jpeg",
|
"image/color/palette", "image/draw", "image/gif", "image/jpeg",
|
||||||
"image/png", "index/suffixarray", "io", "io/ioutil",
|
"image/png", "index/suffixarray", "io", "io/ioutil",
|
||||||
"log", "log/syslog", "math", "math/big",
|
"log", "log/syslog", "math", "math/big",
|
||||||
"math/cmplx", "math/rand", "mime", "mime/multipart",
|
"math/bits", "math/cmplx", "math/rand", "mime",
|
||||||
"net", "net/http", "net/http/cgi", "net/http/cookiejar",
|
"mime/multipart", "mime/quotedprintable", "net", "net/http",
|
||||||
"net/http/fcgi", "net/http/httptest", "net/http/httputil", "net/http/pprof",
|
"net/http/cgi", "net/http/cookiejar", "net/http/fcgi", "net/http/httptest",
|
||||||
"net/mail", "net/rpc", "net/rpc/jsonrpc", "net/smtp",
|
"net/http/httptrace", "net/http/httputil", "net/http/pprof", "net/mail",
|
||||||
"net/textproto", "net/url", "os", "os/exec",
|
"net/rpc", "net/rpc/jsonrpc", "net/smtp", "net/textproto",
|
||||||
"os/signal", "os/user", "path", "path/filepath",
|
"net/url", "os", "os/exec", "os/signal",
|
||||||
|
"os/user", "path", "path/filepath", "plugin",
|
||||||
"reflect", "regexp", "regexp/syntax", "runtime",
|
"reflect", "regexp", "regexp/syntax", "runtime",
|
||||||
"runtime/cgo", "runtime/debug", "runtime/pprof", "runtime/race",
|
"runtime/cgo", "runtime/debug", "runtime/pprof", "runtime/race",
|
||||||
"sort", "strconv", "strings", "sync",
|
"runtime/trace", "sort", "strconv", "strings",
|
||||||
"sync/atomic", "syscall", "testing", "testing/iotest",
|
"sync", "sync/atomic", "syscall", "testing",
|
||||||
"testing/quick", "text/scanner", "text/tabwriter", "text/template",
|
"testing/iotest", "testing/quick", "text/scanner", "text/tabwriter",
|
||||||
"text/template/parse", "time", "unicode", "unicode/utf16",
|
"text/template", "text/template/parse", "time", "unicode",
|
||||||
"unicode/utf8", "unsafe",
|
"unicode/utf16", "unicode/utf8", "unsafe",
|
||||||
}
|
}
|
||||||
|
|
||||||
func IsStdPkg(pkg string) bool {
|
func IsStdPkg(pkg string) bool {
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -9,9 +9,11 @@ import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"go/ast"
|
"go/ast"
|
||||||
"go/build"
|
"go/build"
|
||||||
|
"go/importer"
|
||||||
"go/parser"
|
"go/parser"
|
||||||
"go/printer"
|
"go/printer"
|
||||||
"go/token"
|
"go/token"
|
||||||
|
"go/types"
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
"log"
|
"log"
|
||||||
"os"
|
"os"
|
||||||
|
@ -23,10 +25,9 @@ import (
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/visualfc/gotools/command"
|
"github.com/visualfc/gotools/command"
|
||||||
|
"github.com/visualfc/gotools/pkgutil"
|
||||||
"github.com/visualfc/gotools/stdlib"
|
"github.com/visualfc/gotools/stdlib"
|
||||||
"golang.org/x/tools/go/buildutil"
|
"golang.org/x/tools/go/buildutil"
|
||||||
"golang.org/x/tools/go/gcimporter"
|
|
||||||
"golang.org/x/tools/go/types"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
var Command = &command.Command{
|
var Command = &command.Command{
|
||||||
|
@ -46,6 +47,8 @@ var (
|
||||||
typesFindUseAll bool
|
typesFindUseAll bool
|
||||||
typesFindInfo bool
|
typesFindInfo bool
|
||||||
typesFindDoc bool
|
typesFindDoc bool
|
||||||
|
typesTags string
|
||||||
|
typesTagList = []string{} // exploded version of tags flag; set in main
|
||||||
)
|
)
|
||||||
|
|
||||||
//func init
|
//func init
|
||||||
|
@ -59,6 +62,7 @@ func init() {
|
||||||
Command.Flag.BoolVar(&typesFindUse, "use", false, "find cursor usages")
|
Command.Flag.BoolVar(&typesFindUse, "use", false, "find cursor usages")
|
||||||
Command.Flag.BoolVar(&typesFindUseAll, "all", false, "find cursor all usages in GOPATH")
|
Command.Flag.BoolVar(&typesFindUseAll, "all", false, "find cursor all usages in GOPATH")
|
||||||
Command.Flag.BoolVar(&typesFindDoc, "doc", false, "find cursor def doc")
|
Command.Flag.BoolVar(&typesFindDoc, "doc", false, "find cursor def doc")
|
||||||
|
Command.Flag.StringVar(&typesTags, "tags", "", "space-separated list of build tags to apply when parsing")
|
||||||
}
|
}
|
||||||
|
|
||||||
type ObjKind int
|
type ObjKind int
|
||||||
|
@ -77,13 +81,17 @@ const (
|
||||||
ObjLabel
|
ObjLabel
|
||||||
ObjBuiltin
|
ObjBuiltin
|
||||||
ObjNil
|
ObjNil
|
||||||
|
ObjImplicit
|
||||||
|
ObjUnknown
|
||||||
|
ObjComment
|
||||||
)
|
)
|
||||||
|
|
||||||
var ObjKindName = []string{"none", "package",
|
var ObjKindName = []string{"none", "package",
|
||||||
"type", "interface", "struct",
|
"type", "interface", "struct",
|
||||||
"const", "var", "field",
|
"const", "var", "field",
|
||||||
"func", "method",
|
"func", "method",
|
||||||
"label", "builtin", "nil"}
|
"label", "builtin", "nil",
|
||||||
|
"implicit", "unknown", "comment"}
|
||||||
|
|
||||||
func (k ObjKind) String() string {
|
func (k ObjKind) String() string {
|
||||||
if k >= 0 && int(k) < len(ObjKindName) {
|
if k >= 0 && int(k) < len(ObjKindName) {
|
||||||
|
@ -109,6 +117,9 @@ var builtinInfoMap = map[string]string{
|
||||||
"print": "func print(args ...Type)",
|
"print": "func print(args ...Type)",
|
||||||
"println": "func println(args ...Type)",
|
"println": "func println(args ...Type)",
|
||||||
"error": "type error interface {Error() string}",
|
"error": "type error interface {Error() string}",
|
||||||
|
"Sizeof": "func unsafe.Sizeof(any) uintptr",
|
||||||
|
"Offsetof": "func unsafe.Offsetof(any) uintptr",
|
||||||
|
"Alignof": "func unsafe.Alignof(any) uintptr",
|
||||||
}
|
}
|
||||||
|
|
||||||
func builtinInfo(id string) string {
|
func builtinInfo(id string) string {
|
||||||
|
@ -119,10 +130,14 @@ func builtinInfo(id string) string {
|
||||||
}
|
}
|
||||||
|
|
||||||
func simpleObjInfo(obj types.Object) string {
|
func simpleObjInfo(obj types.Object) string {
|
||||||
|
s := obj.String()
|
||||||
pkg := obj.Pkg()
|
pkg := obj.Pkg()
|
||||||
s := simpleType(obj.String())
|
if pkg != nil {
|
||||||
if pkg != nil && pkg.Name() == "main" {
|
s = strings.Replace(s, pkg.Path(), pkg.Name(), -1)
|
||||||
return strings.Replace(s, simpleType(pkg.Path())+".", "", -1)
|
s = simpleType(s)
|
||||||
|
if pkg.Name() == "main" {
|
||||||
|
s = strings.Replace(s, "main.", "", -1)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return s
|
return s
|
||||||
}
|
}
|
||||||
|
@ -152,7 +167,11 @@ func runTypes(cmd *command.Command, args []string) error {
|
||||||
log.Println("time", time.Now().Sub(now))
|
log.Println("time", time.Now().Sub(now))
|
||||||
}()
|
}()
|
||||||
}
|
}
|
||||||
w := NewPkgWalker(&build.Default)
|
typesTagList = strings.Split(typesTags, " ")
|
||||||
|
context := build.Default
|
||||||
|
context.BuildTags = append(typesTagList, context.BuildTags...)
|
||||||
|
|
||||||
|
w := NewPkgWalker(&context)
|
||||||
var cursor *FileCursor
|
var cursor *FileCursor
|
||||||
if typesFilePos != "" {
|
if typesFilePos != "" {
|
||||||
var cursorInfo FileCursor
|
var cursorInfo FileCursor
|
||||||
|
@ -171,6 +190,7 @@ func runTypes(cmd *command.Command, args []string) error {
|
||||||
}
|
}
|
||||||
cursor = &cursorInfo
|
cursor = &cursorInfo
|
||||||
}
|
}
|
||||||
|
w.cursor = cursor
|
||||||
for _, pkgName := range args {
|
for _, pkgName := range args {
|
||||||
if pkgName == "." {
|
if pkgName == "." {
|
||||||
pkgPath, err := os.Getwd()
|
pkgPath, err := os.Getwd()
|
||||||
|
@ -181,6 +201,7 @@ func runTypes(cmd *command.Command, args []string) error {
|
||||||
}
|
}
|
||||||
conf := &PkgConfig{IgnoreFuncBodies: true, AllowBinary: true, WithTestFiles: true}
|
conf := &PkgConfig{IgnoreFuncBodies: true, AllowBinary: true, WithTestFiles: true}
|
||||||
if cursor != nil {
|
if cursor != nil {
|
||||||
|
cursor.pkgName = pkgName
|
||||||
conf.Cursor = cursor
|
conf.Cursor = cursor
|
||||||
conf.IgnoreFuncBodies = false
|
conf.IgnoreFuncBodies = false
|
||||||
conf.Info = &types.Info{
|
conf.Info = &types.Info{
|
||||||
|
@ -209,7 +230,7 @@ func runTypes(cmd *command.Command, args []string) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
type FileCursor struct {
|
type FileCursor struct {
|
||||||
pkg string
|
pkgName string
|
||||||
fileName string
|
fileName string
|
||||||
fileDir string
|
fileDir string
|
||||||
cursorPos int
|
cursorPos int
|
||||||
|
@ -228,7 +249,6 @@ type PkgConfig struct {
|
||||||
Info *types.Info
|
Info *types.Info
|
||||||
XInfo *types.Info
|
XInfo *types.Info
|
||||||
Files map[string]*ast.File
|
Files map[string]*ast.File
|
||||||
TestFiles map[string]*ast.File
|
|
||||||
XTestFiles map[string]*ast.File
|
XTestFiles map[string]*ast.File
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -237,8 +257,9 @@ func NewPkgWalker(context *build.Context) *PkgWalker {
|
||||||
context: context,
|
context: context,
|
||||||
fset: token.NewFileSet(),
|
fset: token.NewFileSet(),
|
||||||
parsedFileCache: map[string]*ast.File{},
|
parsedFileCache: map[string]*ast.File{},
|
||||||
|
importingName: map[string]bool{},
|
||||||
imported: map[string]*types.Package{"unsafe": types.Unsafe},
|
imported: map[string]*types.Package{"unsafe": types.Unsafe},
|
||||||
gcimporter: map[string]*types.Package{"unsafe": types.Unsafe},
|
gcimported: importer.Default(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -246,10 +267,12 @@ type PkgWalker struct {
|
||||||
fset *token.FileSet
|
fset *token.FileSet
|
||||||
context *build.Context
|
context *build.Context
|
||||||
current *types.Package
|
current *types.Package
|
||||||
importing types.Package
|
importingName map[string]bool
|
||||||
parsedFileCache map[string]*ast.File
|
parsedFileCache map[string]*ast.File
|
||||||
imported map[string]*types.Package // packages already imported
|
imported map[string]*types.Package // packages already imported
|
||||||
gcimporter map[string]*types.Package
|
gcimported types.Importer
|
||||||
|
cursor *FileCursor
|
||||||
|
//importing types.Package
|
||||||
}
|
}
|
||||||
|
|
||||||
func contains(list []string, s string) bool {
|
func contains(list []string, s string) bool {
|
||||||
|
@ -283,19 +306,25 @@ func (w *PkgWalker) Import(parentDir string, name string, conf *PkgConfig) (pkg
|
||||||
}
|
}
|
||||||
}()
|
}()
|
||||||
|
|
||||||
if strings.HasPrefix(name, ".") && parentDir != "" {
|
if parentDir != "" {
|
||||||
|
if strings.HasPrefix(name, ".") {
|
||||||
name = filepath.Join(parentDir, name)
|
name = filepath.Join(parentDir, name)
|
||||||
|
} else if pkgutil.IsVendorExperiment() {
|
||||||
|
parentPkg := pkgutil.ImportDir(parentDir)
|
||||||
|
name = pkgutil.VendoredImportPath(parentPkg, name)
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pkg = w.imported[name]
|
pkg = w.imported[name]
|
||||||
if pkg != nil {
|
if pkg != nil {
|
||||||
if pkg == &w.importing {
|
// if pkg == &w.importing {
|
||||||
return nil, fmt.Errorf("cycle importing package %q", name)
|
// return nil, fmt.Errorf("cycle importing package %q", name)
|
||||||
}
|
// }
|
||||||
return pkg, nil
|
return pkg, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
if typesVerbose {
|
if typesVerbose {
|
||||||
log.Println("parser pkg", name)
|
log.Println("parser pkg", parentDir, name)
|
||||||
}
|
}
|
||||||
|
|
||||||
bp, err := w.importPath(name, 0)
|
bp, err := w.importPath(name, 0)
|
||||||
|
@ -311,33 +340,70 @@ func (w *PkgWalker) Import(parentDir string, name string, conf *PkgConfig) (pkg
|
||||||
checkName = bp.ImportPath
|
checkName = bp.ImportPath
|
||||||
}
|
}
|
||||||
|
|
||||||
if err != nil {
|
if w.importingName[checkName] {
|
||||||
return nil, err
|
return nil, fmt.Errorf("cycle importing package %q", name)
|
||||||
//if _, nogo := err.(*build.NoGoError); nogo {
|
|
||||||
// return
|
|
||||||
//}
|
|
||||||
//return
|
|
||||||
//log.Fatalf("pkg %q, dir %q: ScanDir: %v", name, info.Dir, err)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
filenames := append(append([]string{}, bp.GoFiles...), bp.CgoFiles...)
|
w.importingName[checkName] = true
|
||||||
|
|
||||||
|
// if err != nil {
|
||||||
|
// return nil, err
|
||||||
|
// //if _, nogo := err.(*build.NoGoError); nogo {
|
||||||
|
// // return
|
||||||
|
// //}
|
||||||
|
// //return
|
||||||
|
// //log.Fatalf("pkg %q, dir %q: ScanDir: %v", name, info.Dir, err)
|
||||||
|
// }
|
||||||
|
|
||||||
|
GoFiles := append(append([]string{}, bp.GoFiles...), bp.CgoFiles...)
|
||||||
|
XTestFiles := append([]string{}, bp.XTestGoFiles...)
|
||||||
|
|
||||||
if conf.WithTestFiles {
|
if conf.WithTestFiles {
|
||||||
filenames = append(filenames, bp.TestGoFiles...)
|
GoFiles = append(GoFiles, bp.TestGoFiles...)
|
||||||
}
|
}
|
||||||
|
|
||||||
if name == "runtime" {
|
if name == "runtime" {
|
||||||
n := fmt.Sprintf("zgoos_%s.go", w.context.GOOS)
|
n := fmt.Sprintf("zgoos_%s.go", w.context.GOOS)
|
||||||
if !contains(filenames, n) {
|
if !contains(GoFiles, n) {
|
||||||
filenames = append(filenames, n)
|
GoFiles = append(GoFiles, n)
|
||||||
}
|
}
|
||||||
|
|
||||||
n = fmt.Sprintf("zgoarch_%s.go", w.context.GOARCH)
|
n = fmt.Sprintf("zgoarch_%s.go", w.context.GOARCH)
|
||||||
if !contains(filenames, n) {
|
if !contains(GoFiles, n) {
|
||||||
filenames = append(filenames, n)
|
GoFiles = append(GoFiles, n)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
parserFiles := func(filenames []string, cursor *FileCursor, xtest bool) (files []*ast.File) {
|
if conf.Cursor != nil && conf.Cursor.fileName != "" {
|
||||||
|
cursor := conf.Cursor
|
||||||
|
f, _ := w.parseFileEx(bp.Dir, cursor.fileName, cursor.src, true)
|
||||||
|
if f != nil {
|
||||||
|
cursor.pos = token.Pos(w.fset.File(f.Pos()).Base()) + token.Pos(cursor.cursorPos)
|
||||||
|
cursor.fileDir = bp.Dir
|
||||||
|
isTest := strings.HasSuffix(cursor.fileName, "_test.go")
|
||||||
|
isXTest := false
|
||||||
|
if isTest && strings.HasSuffix(f.Name.Name, "_test") {
|
||||||
|
isXTest = true
|
||||||
|
}
|
||||||
|
cursor.xtest = isXTest
|
||||||
|
checkAppend := func(filenames []string, file string) []string {
|
||||||
|
for _, f := range filenames {
|
||||||
|
if f == file {
|
||||||
|
return filenames
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return append(filenames, file)
|
||||||
|
}
|
||||||
|
if isXTest {
|
||||||
|
XTestFiles = checkAppend(XTestFiles, cursor.fileName)
|
||||||
|
} else {
|
||||||
|
GoFiles = checkAppend(GoFiles, cursor.fileName)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
parserFiles := func(filenames []string, cursor *FileCursor, xtest bool) (files []*ast.File, fileMap map[string]*ast.File) {
|
||||||
|
fileMap = make(map[string]*ast.File)
|
||||||
for _, file := range filenames {
|
for _, file := range filenames {
|
||||||
var f *ast.File
|
var f *ast.File
|
||||||
if cursor != nil && cursor.fileName == file {
|
if cursor != nil && cursor.fileName == file {
|
||||||
|
@ -352,33 +418,19 @@ func (w *PkgWalker) Import(parentDir string, name string, conf *PkgConfig) (pkg
|
||||||
log.Printf("error parsing package %s: %s\n", name, err)
|
log.Printf("error parsing package %s: %s\n", name, err)
|
||||||
}
|
}
|
||||||
files = append(files, f)
|
files = append(files, f)
|
||||||
|
fileMap[file] = f
|
||||||
}
|
}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
files := parserFiles(filenames, conf.Cursor, false)
|
var files []*ast.File
|
||||||
xfiles := parserFiles(bp.XTestGoFiles, conf.Cursor, true)
|
var xfiles []*ast.File
|
||||||
|
files, conf.Files = parserFiles(GoFiles, conf.Cursor, false)
|
||||||
|
xfiles, conf.XTestFiles = parserFiles(bp.XTestGoFiles, conf.Cursor, true)
|
||||||
|
|
||||||
typesConf := types.Config{
|
typesConf := types.Config{
|
||||||
IgnoreFuncBodies: conf.IgnoreFuncBodies,
|
IgnoreFuncBodies: conf.IgnoreFuncBodies,
|
||||||
FakeImportC: true,
|
FakeImportC: true,
|
||||||
Packages: w.gcimporter,
|
Importer: &Importer{w, conf, bp.Dir},
|
||||||
Import: func(imports map[string]*types.Package, name string) (pkg *types.Package, err error) {
|
|
||||||
if pkg != nil {
|
|
||||||
return pkg, nil
|
|
||||||
}
|
|
||||||
if conf.AllowBinary && w.isBinaryPkg(name) {
|
|
||||||
pkg = w.gcimporter[name]
|
|
||||||
if pkg != nil && pkg.Complete() {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
pkg, err = gcimporter.Import(imports, name)
|
|
||||||
if pkg != nil && pkg.Complete() {
|
|
||||||
w.gcimporter[name] = pkg
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return w.Import(bp.Dir, name, &PkgConfig{IgnoreFuncBodies: true, AllowBinary: true, WithTestFiles: false})
|
|
||||||
},
|
|
||||||
Error: func(err error) {
|
Error: func(err error) {
|
||||||
if typesVerbose {
|
if typesVerbose {
|
||||||
log.Println(err)
|
log.Println(err)
|
||||||
|
@ -389,6 +441,7 @@ func (w *PkgWalker) Import(parentDir string, name string, conf *PkgConfig) (pkg
|
||||||
pkg, err = typesConf.Check(checkName, w.fset, files, conf.Info)
|
pkg, err = typesConf.Check(checkName, w.fset, files, conf.Info)
|
||||||
conf.Pkg = pkg
|
conf.Pkg = pkg
|
||||||
}
|
}
|
||||||
|
w.importingName[checkName] = false
|
||||||
w.imported[name] = pkg
|
w.imported[name] = pkg
|
||||||
|
|
||||||
if len(xfiles) > 0 {
|
if len(xfiles) > 0 {
|
||||||
|
@ -399,7 +452,36 @@ func (w *PkgWalker) Import(parentDir string, name string, conf *PkgConfig) (pkg
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type Importer struct {
|
||||||
|
w *PkgWalker
|
||||||
|
conf *PkgConfig
|
||||||
|
dir string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (im *Importer) Import(name string) (pkg *types.Package, err error) {
|
||||||
|
if im.conf.AllowBinary && im.w.isBinaryPkg(name) {
|
||||||
|
pkg, err = im.w.gcimported.Import(name)
|
||||||
|
if pkg != nil && pkg.Complete() {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
// pkg = im.w.gcimporter[name]
|
||||||
|
// if pkg != nil && pkg.Complete() {
|
||||||
|
// return
|
||||||
|
// }
|
||||||
|
// pkg, err = importer.Default().Import(name)
|
||||||
|
// if pkg != nil && pkg.Complete() {
|
||||||
|
// im.w.gcimporter[name] = pkg
|
||||||
|
// return
|
||||||
|
// }
|
||||||
|
}
|
||||||
|
return im.w.Import(im.dir, name, &PkgConfig{IgnoreFuncBodies: true, AllowBinary: true, WithTestFiles: false})
|
||||||
|
}
|
||||||
|
|
||||||
func (w *PkgWalker) parseFile(dir, file string, src interface{}) (*ast.File, error) {
|
func (w *PkgWalker) parseFile(dir, file string, src interface{}) (*ast.File, error) {
|
||||||
|
return w.parseFileEx(dir, file, src, typesFindDoc)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *PkgWalker) parseFileEx(dir, file string, src interface{}, findDoc bool) (*ast.File, error) {
|
||||||
filename := filepath.Join(dir, file)
|
filename := filepath.Join(dir, file)
|
||||||
f, _ := w.parsedFileCache[filename]
|
f, _ := w.parsedFileCache[filename]
|
||||||
if f != nil {
|
if f != nil {
|
||||||
|
@ -427,7 +509,7 @@ func (w *PkgWalker) parseFile(dir, file string, src interface{}) (*ast.File, err
|
||||||
|
|
||||||
if f == nil {
|
if f == nil {
|
||||||
flag := parser.AllErrors
|
flag := parser.AllErrors
|
||||||
if typesFindDoc {
|
if findDoc {
|
||||||
flag |= parser.ParseComments
|
flag |= parser.ParseComments
|
||||||
}
|
}
|
||||||
f, err = parser.ParseFile(w.fset, filename, src, flag)
|
f, err = parser.ParseFile(w.fset, filename, src, flag)
|
||||||
|
@ -441,8 +523,9 @@ func (w *PkgWalker) parseFile(dir, file string, src interface{}) (*ast.File, err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (w *PkgWalker) LookupCursor(pkg *types.Package, conf *PkgConfig, cursor *FileCursor) {
|
func (w *PkgWalker) LookupCursor(pkg *types.Package, conf *PkgConfig, cursor *FileCursor) {
|
||||||
is := w.CheckIsImport(cursor)
|
if nm := w.CheckIsName(cursor); nm != nil {
|
||||||
if is != nil {
|
w.LookupName(pkg, conf, cursor, nm)
|
||||||
|
} else if is := w.CheckIsImport(cursor); is != nil {
|
||||||
if cursor.xtest {
|
if cursor.xtest {
|
||||||
w.LookupImport(conf.XPkg, conf.XInfo, cursor, is)
|
w.LookupImport(conf.XPkg, conf.XInfo, cursor, is)
|
||||||
} else {
|
} else {
|
||||||
|
@ -453,6 +536,44 @@ func (w *PkgWalker) LookupCursor(pkg *types.Package, conf *PkgConfig, cursor *Fi
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (w *PkgWalker) LookupName(pkg *types.Package, conf *PkgConfig, cursor *FileCursor, nm *ast.Ident) {
|
||||||
|
if typesFindDef {
|
||||||
|
fmt.Println(w.fset.Position(nm.Pos()))
|
||||||
|
}
|
||||||
|
if typesFindInfo {
|
||||||
|
if cursor.xtest {
|
||||||
|
fmt.Printf("package %s (%q)\n", pkg.Name()+"_test", pkg.Path())
|
||||||
|
} else {
|
||||||
|
if pkg.Path() == pkg.Name() {
|
||||||
|
fmt.Printf("package %s\n", pkg.Name())
|
||||||
|
} else {
|
||||||
|
fmt.Printf("package %s (%q)\n", pkg.Name(), pkg.Path())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if !typesFindUse {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
var usages []int
|
||||||
|
findUsage := func(fileMap map[string]*ast.File) {
|
||||||
|
for _, f := range fileMap {
|
||||||
|
if f != nil && f.Name != nil && f.Name.Name == nm.Name {
|
||||||
|
usages = append(usages, int(f.Name.Pos()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if cursor.xtest {
|
||||||
|
findUsage(conf.XTestFiles)
|
||||||
|
} else {
|
||||||
|
findUsage(conf.Files)
|
||||||
|
}
|
||||||
|
(sort.IntSlice(usages)).Sort()
|
||||||
|
for _, pos := range usages {
|
||||||
|
fmt.Println(w.fset.Position(token.Pos(pos)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func (w *PkgWalker) LookupImport(pkg *types.Package, pkgInfo *types.Info, cursor *FileCursor, is *ast.ImportSpec) {
|
func (w *PkgWalker) LookupImport(pkg *types.Package, pkgInfo *types.Info, cursor *FileCursor, is *ast.ImportSpec) {
|
||||||
fpath, err := strconv.Unquote(is.Path.Value)
|
fpath, err := strconv.Unquote(is.Path.Value)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -480,7 +601,7 @@ func (w *PkgWalker) LookupImport(pkg *types.Package, pkgInfo *types.Info, cursor
|
||||||
if fname == fpath {
|
if fname == fpath {
|
||||||
fmt.Printf("package %s\n", fname)
|
fmt.Printf("package %s\n", fname)
|
||||||
} else {
|
} else {
|
||||||
fmt.Printf("package %s (\"%s\")\n", fname, fpath)
|
fmt.Printf("package %s (%q)\n", fname, fpath)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -489,18 +610,29 @@ func (w *PkgWalker) LookupImport(pkg *types.Package, pkgInfo *types.Info, cursor
|
||||||
}
|
}
|
||||||
|
|
||||||
fid := pkg.Path() + "." + fname
|
fid := pkg.Path() + "." + fname
|
||||||
|
|
||||||
var usages []int
|
var usages []int
|
||||||
for id, obj := range pkgInfo.Uses {
|
for id, obj := range pkgInfo.Uses {
|
||||||
if obj != nil && obj.Id() == fid { //!= nil && cursorObj.Pos() == obj.Pos() {
|
if obj != nil && obj.Id() == fid { //!= nil && cursorObj.Pos() == obj.Pos() {
|
||||||
|
if _, ok := obj.(*types.PkgName); ok {
|
||||||
usages = append(usages, int(id.Pos()))
|
usages = append(usages, int(id.Pos()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
(sort.IntSlice(usages)).Sort()
|
(sort.IntSlice(usages)).Sort()
|
||||||
for _, pos := range usages {
|
for _, pos := range usages {
|
||||||
fmt.Println(w.fset.Position(token.Pos(pos)))
|
fmt.Println(w.fset.Position(token.Pos(pos)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func testObjKind(obj types.Object, kind ObjKind) bool {
|
||||||
|
k, err := parserObjKind(obj)
|
||||||
|
if err != nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return k == kind
|
||||||
|
}
|
||||||
|
|
||||||
func parserObjKind(obj types.Object) (ObjKind, error) {
|
func parserObjKind(obj types.Object) (ObjKind, error) {
|
||||||
var kind ObjKind
|
var kind ObjKind
|
||||||
switch t := obj.(type) {
|
switch t := obj.(type) {
|
||||||
|
@ -662,6 +794,14 @@ func IsSameObject(a, b types.Object) bool {
|
||||||
if a.Id() != b.Id() {
|
if a.Id() != b.Id() {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
if a.Type().String() != b.Type().String() {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
t1, ok1 := a.(*types.TypeName)
|
||||||
|
t2, ok2 := b.(*types.TypeName)
|
||||||
|
if ok1 && ok2 {
|
||||||
|
return t1.Type().String() == t2.Type().String()
|
||||||
|
}
|
||||||
return a.String() == b.String()
|
return a.String() == b.String()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -698,10 +838,12 @@ func (w *PkgWalker) LookupObjects(conf *PkgConfig, cursor *FileCursor) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
var cursorId *ast.Ident
|
||||||
if cursorObj == nil {
|
if cursorObj == nil {
|
||||||
for id, obj := range pkgInfo.Defs {
|
for id, obj := range pkgInfo.Defs {
|
||||||
if cursor.pos >= id.Pos() && cursor.pos <= id.End() {
|
if cursor.pos >= id.Pos() && cursor.pos <= id.End() {
|
||||||
cursorObj = obj
|
cursorObj = obj
|
||||||
|
cursorId = id
|
||||||
cursorObjIsDef = true
|
cursorObjIsDef = true
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
@ -716,13 +858,19 @@ func (w *PkgWalker) LookupObjects(conf *PkgConfig, cursor *FileCursor) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if cursorObj == nil {
|
|
||||||
return
|
var kind ObjKind
|
||||||
}
|
if cursorObj != nil {
|
||||||
kind, err := parserObjKind(cursorObj)
|
var err error
|
||||||
|
kind, err = parserObjKind(cursorObj)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatalln(err)
|
log.Fatalln(err)
|
||||||
}
|
}
|
||||||
|
} else if cursorId != nil {
|
||||||
|
kind = ObjImplicit
|
||||||
|
} else {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
if kind == ObjField {
|
if kind == ObjField {
|
||||||
if cursorObj.(*types.Var).Anonymous() {
|
if cursorObj.(*types.Var).Anonymous() {
|
||||||
|
@ -732,8 +880,18 @@ func (w *PkgWalker) LookupObjects(conf *PkgConfig, cursor *FileCursor) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
cursorPkg := cursorObj.Pkg()
|
|
||||||
cursorPos := cursorObj.Pos()
|
var cursorPkg *types.Package
|
||||||
|
var cursorPos token.Pos
|
||||||
|
|
||||||
|
if cursorObj != nil {
|
||||||
|
cursorPkg = cursorObj.Pkg()
|
||||||
|
cursorPos = cursorObj.Pos()
|
||||||
|
} else {
|
||||||
|
cursorPkg = pkg
|
||||||
|
cursorPos = cursorId.Pos()
|
||||||
|
}
|
||||||
|
|
||||||
//var fieldTypeInfo *types.Info
|
//var fieldTypeInfo *types.Info
|
||||||
var fieldTypeObj types.Object
|
var fieldTypeObj types.Object
|
||||||
// if cursorPkg == pkg {
|
// if cursorPkg == pkg {
|
||||||
|
@ -741,6 +899,8 @@ func (w *PkgWalker) LookupObjects(conf *PkgConfig, cursor *FileCursor) {
|
||||||
// }
|
// }
|
||||||
cursorIsInterfaceMethod := false
|
cursorIsInterfaceMethod := false
|
||||||
var cursorInterfaceTypeName string
|
var cursorInterfaceTypeName string
|
||||||
|
var cursorInterfaceTypeNamed *types.Named
|
||||||
|
|
||||||
if kind == ObjMethod && cursorSelection != nil && cursorSelection.Recv() != nil {
|
if kind == ObjMethod && cursorSelection != nil && cursorSelection.Recv() != nil {
|
||||||
sig := cursorObj.(*types.Func).Type().Underlying().(*types.Signature)
|
sig := cursorObj.(*types.Func).Type().Underlying().(*types.Signature)
|
||||||
if _, ok := sig.Recv().Type().Underlying().(*types.Interface); ok {
|
if _, ok := sig.Recv().Type().Underlying().(*types.Interface); ok {
|
||||||
|
@ -754,6 +914,7 @@ func (w *PkgWalker) LookupObjects(conf *PkgConfig, cursor *FileCursor) {
|
||||||
cursorInterfaceTypeName = typ.Obj().Name()
|
cursorInterfaceTypeName = typ.Obj().Name()
|
||||||
}
|
}
|
||||||
cursorIsInterfaceMethod = true
|
cursorIsInterfaceMethod = true
|
||||||
|
cursorInterfaceTypeNamed = named
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else if kind == ObjField && cursorSelection != nil {
|
} else if kind == ObjField && cursorSelection != nil {
|
||||||
|
@ -770,6 +931,9 @@ func (w *PkgWalker) LookupObjects(conf *PkgConfig, cursor *FileCursor) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if typesVerbose {
|
||||||
|
log.Println("parser", cursorObj, kind, cursorIsInterfaceMethod)
|
||||||
|
}
|
||||||
if cursorPkg != nil && cursorPkg != pkg &&
|
if cursorPkg != nil && cursorPkg != pkg &&
|
||||||
kind != ObjPkgName && w.isBinaryPkg(cursorPkg.Path()) {
|
kind != ObjPkgName && w.isBinaryPkg(cursorPkg.Path()) {
|
||||||
conf := &PkgConfig{
|
conf := &PkgConfig{
|
||||||
|
@ -783,23 +947,41 @@ func (w *PkgWalker) LookupObjects(conf *PkgConfig, cursor *FileCursor) {
|
||||||
pkg, _ := w.Import("", cursorPkg.Path(), conf)
|
pkg, _ := w.Import("", cursorPkg.Path(), conf)
|
||||||
if pkg != nil {
|
if pkg != nil {
|
||||||
if cursorIsInterfaceMethod {
|
if cursorIsInterfaceMethod {
|
||||||
for _, obj := range conf.Info.Defs {
|
for k, v := range conf.Info.Defs {
|
||||||
if obj == nil {
|
if k != nil && v != nil && IsSameObject(v, cursorInterfaceTypeNamed.Obj()) {
|
||||||
continue
|
named := v.Type().(*types.Named)
|
||||||
}
|
obj, typ := w.lookupNamedMethod(named, cursorObj.Name())
|
||||||
if fn, ok := obj.(*types.Func); ok {
|
if obj != nil {
|
||||||
if fn.Name() == cursorObj.Name() {
|
cursorObj = obj
|
||||||
if sig, ok := fn.Type().Underlying().(*types.Signature); ok {
|
|
||||||
if named, ok := sig.Recv().Type().(*types.Named); ok {
|
|
||||||
if named.Obj() != nil && named.Obj().Name() == cursorInterfaceTypeName {
|
|
||||||
cursorPos = obj.Pos()
|
cursorPos = obj.Pos()
|
||||||
|
}
|
||||||
|
if obj != nil {
|
||||||
|
cursorObj = obj
|
||||||
|
}
|
||||||
|
if typ != nil {
|
||||||
|
cursorPkg = typ.Obj().Pkg()
|
||||||
|
cursorInterfaceTypeName = typ.Obj().Name()
|
||||||
|
}
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
// for _, obj := range conf.Info.Defs {
|
||||||
}
|
// if obj == nil {
|
||||||
}
|
// continue
|
||||||
}
|
// }
|
||||||
|
// if fn, ok := obj.(*types.Func); ok {
|
||||||
|
// if fn.Name() == cursorObj.Name() {
|
||||||
|
// if sig, ok := fn.Type().Underlying().(*types.Signature); ok {
|
||||||
|
// if named, ok := sig.Recv().Type().(*types.Named); ok {
|
||||||
|
// if named.Obj() != nil && named.Obj().Name() == cursorInterfaceTypeName {
|
||||||
|
// cursorPos = obj.Pos()
|
||||||
|
// break
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
// }
|
||||||
} else if kind == ObjField && fieldTypeObj != nil {
|
} else if kind == ObjField && fieldTypeObj != nil {
|
||||||
for _, obj := range conf.Info.Defs {
|
for _, obj := range conf.Info.Defs {
|
||||||
if obj == nil {
|
if obj == nil {
|
||||||
|
@ -839,16 +1021,19 @@ func (w *PkgWalker) LookupObjects(conf *PkgConfig, cursor *FileCursor) {
|
||||||
fmt.Println(w.fset.Position(cursorPos))
|
fmt.Println(w.fset.Position(cursorPos))
|
||||||
}
|
}
|
||||||
if typesFindInfo {
|
if typesFindInfo {
|
||||||
if kind == ObjField && fieldTypeObj != nil {
|
/*if kind == ObjField && fieldTypeObj != nil {
|
||||||
typeName := fieldTypeObj.Name()
|
typeName := fieldTypeObj.Name()
|
||||||
if fieldTypeObj.Pkg() != nil && fieldTypeObj.Pkg() != pkg {
|
if fieldTypeObj.Pkg() != nil && fieldTypeObj.Pkg() != pkg {
|
||||||
typeName = fieldTypeObj.Pkg().Name() + "." + fieldTypeObj.Name()
|
typeName = fieldTypeObj.Pkg().Name() + "." + fieldTypeObj.Name()
|
||||||
}
|
}
|
||||||
fmt.Println(typeName, simpleObjInfo(cursorObj))
|
fmt.Println(typeName, simpleObjInfo(cursorObj))
|
||||||
} else if kind == ObjBuiltin {
|
} else */
|
||||||
|
if kind == ObjBuiltin {
|
||||||
fmt.Println(builtinInfo(cursorObj.Name()))
|
fmt.Println(builtinInfo(cursorObj.Name()))
|
||||||
} else if kind == ObjPkgName {
|
} else if kind == ObjPkgName {
|
||||||
fmt.Println(cursorObj.String())
|
fmt.Println(cursorObj.String())
|
||||||
|
} else if kind == ObjImplicit {
|
||||||
|
fmt.Printf("%s is implicit\n", cursorId.Name)
|
||||||
} else if cursorIsInterfaceMethod {
|
} else if cursorIsInterfaceMethod {
|
||||||
fmt.Println(strings.Replace(simpleObjInfo(cursorObj), "(interface)", cursorPkg.Name()+"."+cursorInterfaceTypeName, 1))
|
fmt.Println(strings.Replace(simpleObjInfo(cursorObj), "(interface)", cursorPkg.Name()+"."+cursorInterfaceTypeName, 1))
|
||||||
} else {
|
} else {
|
||||||
|
@ -883,20 +1068,30 @@ func (w *PkgWalker) LookupObjects(conf *PkgConfig, cursor *FileCursor) {
|
||||||
if kind == ObjPkgName {
|
if kind == ObjPkgName {
|
||||||
for id, obj := range pkgInfo.Uses {
|
for id, obj := range pkgInfo.Uses {
|
||||||
if obj != nil && obj.Id() == cursorObj.Id() { //!= nil && cursorObj.Pos() == obj.Pos() {
|
if obj != nil && obj.Id() == cursorObj.Id() { //!= nil && cursorObj.Pos() == obj.Pos() {
|
||||||
|
if _, ok := obj.(*types.PkgName); ok {
|
||||||
usages = append(usages, int(id.Pos()))
|
usages = append(usages, int(id.Pos()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
// for id, obj := range pkgInfo.Defs {
|
// for id, obj := range pkgInfo.Defs {
|
||||||
// if obj == cursorObj { //!= nil && cursorObj.Pos() == obj.Pos() {
|
// if obj == cursorObj { //!= nil && cursorObj.Pos() == obj.Pos() {
|
||||||
// usages = append(usages, int(id.Pos()))
|
// usages = append(usages, int(id.Pos()))
|
||||||
// }
|
// }
|
||||||
// }
|
// }
|
||||||
|
if cursorObj != nil {
|
||||||
for id, obj := range pkgInfo.Uses {
|
for id, obj := range pkgInfo.Uses {
|
||||||
if obj == cursorObj { //!= nil && cursorObj.Pos() == obj.Pos() {
|
if obj == cursorObj { //!= nil && cursorObj.Pos() == obj.Pos() {
|
||||||
usages = append(usages, int(id.Pos()))
|
usages = append(usages, int(id.Pos()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
for id, obj := range pkgInfo.Uses {
|
||||||
|
if obj != nil && obj.Pos() == cursorPos { //!= nil && cursorObj.Pos() == obj.Pos() {
|
||||||
|
usages = append(usages, int(id.Pos()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
var pkg_path string
|
var pkg_path string
|
||||||
var xpkg_path string
|
var xpkg_path string
|
||||||
|
@ -907,8 +1102,9 @@ func (w *PkgWalker) LookupObjects(conf *PkgConfig, cursor *FileCursor) {
|
||||||
xpkg_path = conf.XPkg.Path()
|
xpkg_path = conf.XPkg.Path()
|
||||||
}
|
}
|
||||||
|
|
||||||
if cursorPkg != nil && (cursorPkg.Path() == pkg_path ||
|
if cursorPkg != nil &&
|
||||||
cursorPkg.Path() == xpkg_path) {
|
(cursorPkg.Path() == pkg_path || cursorPkg.Path() == xpkg_path) &&
|
||||||
|
kind != ObjPkgName {
|
||||||
usages = append(usages, int(cursorPos))
|
usages = append(usages, int(cursorPos))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -957,7 +1153,12 @@ func (w *PkgWalker) LookupObjects(conf *PkgConfig, cursor *FileCursor) {
|
||||||
uses_paths = append(uses_paths, cursorPkg.Path())
|
uses_paths = append(uses_paths, cursorPkg.Path())
|
||||||
}
|
}
|
||||||
|
|
||||||
buildutil.ForEachPackage(&build.Default, func(importPath string, err error) {
|
cursorPkgPath := cursorObj.Pkg().Path()
|
||||||
|
if pkgutil.IsVendorExperiment() {
|
||||||
|
cursorPkgPath = pkgutil.VendorPathToImportPath(cursorPkgPath)
|
||||||
|
}
|
||||||
|
|
||||||
|
buildutil.ForEachPackage(w.context, func(importPath string, err error) {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
@ -973,7 +1174,7 @@ func (w *PkgWalker) LookupObjects(conf *PkgConfig, cursor *FileCursor) {
|
||||||
find = true
|
find = true
|
||||||
} else {
|
} else {
|
||||||
for _, v := range bp.Imports {
|
for _, v := range bp.Imports {
|
||||||
if v == cursorObj.Pkg().Path() {
|
if v == cursorPkgPath {
|
||||||
find = true
|
find = true
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
@ -1031,6 +1232,20 @@ func (w *PkgWalker) LookupObjects(conf *PkgConfig, cursor *FileCursor) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (w *PkgWalker) CheckIsName(cursor *FileCursor) *ast.Ident {
|
||||||
|
if cursor.fileDir == "" {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
file, _ := w.parseFile(cursor.fileDir, cursor.fileName, cursor.src)
|
||||||
|
if file == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if inRange(file.Name, cursor.pos) {
|
||||||
|
return file.Name
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
func (w *PkgWalker) CheckIsImport(cursor *FileCursor) *ast.ImportSpec {
|
func (w *PkgWalker) CheckIsImport(cursor *FileCursor) *ast.ImportSpec {
|
||||||
if cursor.fileDir == "" {
|
if cursor.fileDir == "" {
|
||||||
return nil
|
return nil
|
||||||
|
|
|
@ -5,8 +5,8 @@
|
||||||
{
|
{
|
||||||
"checksumSHA1": "ZesPb1w0u5/uv/fdVc+G6wOee30=",
|
"checksumSHA1": "ZesPb1w0u5/uv/fdVc+G6wOee30=",
|
||||||
"path": "github.com/b3log/wide",
|
"path": "github.com/b3log/wide",
|
||||||
"revision": "0a51a03f35a3fa9edbfa790698aa615f4dd49783",
|
"revision": "0b8da631759171441b7c46b78151fbf6345d5bb8",
|
||||||
"revisionTime": "2018-03-13T05:05:45Z"
|
"revisionTime": "2018-03-13T05:11:10Z"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"checksumSHA1": "83iEp3SqOoIkZUYyR7BOVP4vaGE=",
|
"checksumSHA1": "83iEp3SqOoIkZUYyR7BOVP4vaGE=",
|
||||||
|
@ -71,10 +71,10 @@
|
||||||
"revisionTime": "2018-02-17T01:27:43Z"
|
"revisionTime": "2018-02-17T01:27:43Z"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"checksumSHA1": "QBwzyczN2ftqO2Vxag5TS79q32E=",
|
"checksumSHA1": "h9/MfYdq0vlJ5n/MszzARxhAY88=",
|
||||||
"path": "github.com/visualfc/gotools/astview",
|
"path": "github.com/visualfc/gotools/astview",
|
||||||
"revision": "b8348693492ca3791bccfa028f3c19634c11c5b5",
|
"revision": "a74583d2ad8773983f273657291df981033308a4",
|
||||||
"revisionTime": "2015-04-09T14:25:36Z"
|
"revisionTime": "2018-02-17T01:27:43Z"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"checksumSHA1": "02f7N5GTTddWpIDhsRthRKZs4nI=",
|
"checksumSHA1": "02f7N5GTTddWpIDhsRthRKZs4nI=",
|
||||||
|
@ -85,8 +85,8 @@
|
||||||
{
|
{
|
||||||
"checksumSHA1": "sA2FN414rFhNEgac/a0IyUR8dMA=",
|
"checksumSHA1": "sA2FN414rFhNEgac/a0IyUR8dMA=",
|
||||||
"path": "github.com/visualfc/gotools/docview",
|
"path": "github.com/visualfc/gotools/docview",
|
||||||
"revision": "b8348693492ca3791bccfa028f3c19634c11c5b5",
|
"revision": "a74583d2ad8773983f273657291df981033308a4",
|
||||||
"revisionTime": "2015-04-09T14:25:36Z"
|
"revisionTime": "2018-02-17T01:27:43Z"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"checksumSHA1": "5sCU5oJIkeXHHhzNyzusSH+de/8=",
|
"checksumSHA1": "5sCU5oJIkeXHHhzNyzusSH+de/8=",
|
||||||
|
@ -95,16 +95,16 @@
|
||||||
"revisionTime": "2018-02-17T01:27:43Z"
|
"revisionTime": "2018-02-17T01:27:43Z"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"checksumSHA1": "LvxdwB9kJ77cFS4qiRAh9ts2nz4=",
|
"checksumSHA1": "HmZqKNWLGAznshKi6RaoNCfUiX4=",
|
||||||
"path": "github.com/visualfc/gotools/finddoc",
|
"path": "github.com/visualfc/gotools/finddoc",
|
||||||
"revision": "b8348693492ca3791bccfa028f3c19634c11c5b5",
|
"revision": "a74583d2ad8773983f273657291df981033308a4",
|
||||||
"revisionTime": "2015-04-09T14:25:36Z"
|
"revisionTime": "2018-02-17T01:27:43Z"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"checksumSHA1": "Sxih6vWVTlJNBdVNeyCkxI213yI=",
|
"checksumSHA1": "Sxih6vWVTlJNBdVNeyCkxI213yI=",
|
||||||
"path": "github.com/visualfc/gotools/goapi",
|
"path": "github.com/visualfc/gotools/goapi",
|
||||||
"revision": "b8348693492ca3791bccfa028f3c19634c11c5b5",
|
"revision": "a74583d2ad8773983f273657291df981033308a4",
|
||||||
"revisionTime": "2015-04-09T14:25:36Z"
|
"revisionTime": "2018-02-17T01:27:43Z"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"checksumSHA1": "o0usAkdrTaqufvGTZu++Cc/DVMM=",
|
"checksumSHA1": "o0usAkdrTaqufvGTZu++Cc/DVMM=",
|
||||||
|
@ -127,8 +127,8 @@
|
||||||
{
|
{
|
||||||
"checksumSHA1": "R6+qmg1mEs4XhihZd5ujddVI+FI=",
|
"checksumSHA1": "R6+qmg1mEs4XhihZd5ujddVI+FI=",
|
||||||
"path": "github.com/visualfc/gotools/gopresent",
|
"path": "github.com/visualfc/gotools/gopresent",
|
||||||
"revision": "b8348693492ca3791bccfa028f3c19634c11c5b5",
|
"revision": "a74583d2ad8773983f273657291df981033308a4",
|
||||||
"revisionTime": "2015-04-09T14:25:36Z"
|
"revisionTime": "2018-02-17T01:27:43Z"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"checksumSHA1": "f8zd5auG22z/GqXqW9giRDYEw6A=",
|
"checksumSHA1": "f8zd5auG22z/GqXqW9giRDYEw6A=",
|
||||||
|
@ -139,32 +139,50 @@
|
||||||
{
|
{
|
||||||
"checksumSHA1": "kyKBo2ux+HflucDj/qzoko5YoMA=",
|
"checksumSHA1": "kyKBo2ux+HflucDj/qzoko5YoMA=",
|
||||||
"path": "github.com/visualfc/gotools/jsonfmt",
|
"path": "github.com/visualfc/gotools/jsonfmt",
|
||||||
"revision": "b8348693492ca3791bccfa028f3c19634c11c5b5",
|
"revision": "a74583d2ad8773983f273657291df981033308a4",
|
||||||
"revisionTime": "2015-04-09T14:25:36Z"
|
"revisionTime": "2018-02-17T01:27:43Z"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"checksumSHA1": "QldbuEtO8CONs3q/5mkva+CVeNg=",
|
"checksumSHA1": "1B56213xrQhxQG1+ZILXNK6cuoo=",
|
||||||
"path": "github.com/visualfc/gotools/oracle",
|
"path": "github.com/visualfc/gotools/oracle",
|
||||||
"revision": "b8348693492ca3791bccfa028f3c19634c11c5b5",
|
"revision": "a74583d2ad8773983f273657291df981033308a4",
|
||||||
"revisionTime": "2015-04-09T14:25:36Z"
|
"revisionTime": "2018-02-17T01:27:43Z"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"checksumSHA1": "zG4ar8LbudIKQvsfvAgkBSS0k3Q=",
|
"checksumSHA1": "YwA6y+SSXb/JiRGCNowOTrxzP5c=",
|
||||||
|
"path": "github.com/visualfc/gotools/oracle/oracle",
|
||||||
|
"revision": "a74583d2ad8773983f273657291df981033308a4",
|
||||||
|
"revisionTime": "2018-02-17T01:27:43Z"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"checksumSHA1": "QlJ4yti//o5hoD6w8J6UgLeWjHI=",
|
||||||
|
"path": "github.com/visualfc/gotools/oracle/oracle/serial",
|
||||||
|
"revision": "a74583d2ad8773983f273657291df981033308a4",
|
||||||
|
"revisionTime": "2018-02-17T01:27:43Z"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"checksumSHA1": "rPh1Qefw9BjwxY/jcpXWePPwYCU=",
|
||||||
"path": "github.com/visualfc/gotools/pkgs",
|
"path": "github.com/visualfc/gotools/pkgs",
|
||||||
"revision": "b8348693492ca3791bccfa028f3c19634c11c5b5",
|
"revision": "a74583d2ad8773983f273657291df981033308a4",
|
||||||
"revisionTime": "2015-04-09T14:25:36Z"
|
"revisionTime": "2018-02-17T01:27:43Z"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"checksumSHA1": "dnnib3Gp6gDR8f1HSd4q2DENHiM=",
|
||||||
|
"path": "github.com/visualfc/gotools/pkgutil",
|
||||||
|
"revision": "a74583d2ad8773983f273657291df981033308a4",
|
||||||
|
"revisionTime": "2018-02-17T01:27:43Z"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"checksumSHA1": "RQoGIbxntMsUZvi4Js0WQqDBDYM=",
|
"checksumSHA1": "RQoGIbxntMsUZvi4Js0WQqDBDYM=",
|
||||||
"path": "github.com/visualfc/gotools/runcmd",
|
"path": "github.com/visualfc/gotools/runcmd",
|
||||||
"revision": "b8348693492ca3791bccfa028f3c19634c11c5b5",
|
"revision": "a74583d2ad8773983f273657291df981033308a4",
|
||||||
"revisionTime": "2015-04-09T14:25:36Z"
|
"revisionTime": "2018-02-17T01:27:43Z"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"checksumSHA1": "q/EpLJHMTcnbxjLGogryQ6IA7kc=",
|
"checksumSHA1": "nQZTp19L2U+0vVarHKt+3PcsrJM=",
|
||||||
"path": "github.com/visualfc/gotools/stdlib",
|
"path": "github.com/visualfc/gotools/stdlib",
|
||||||
"revision": "b8348693492ca3791bccfa028f3c19634c11c5b5",
|
"revision": "a74583d2ad8773983f273657291df981033308a4",
|
||||||
"revisionTime": "2015-04-09T14:25:36Z"
|
"revisionTime": "2018-02-17T01:27:43Z"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"checksumSHA1": "6CSw3Gtu+hJ6QTfhJ+985cualxs=",
|
"checksumSHA1": "6CSw3Gtu+hJ6QTfhJ+985cualxs=",
|
||||||
|
@ -173,10 +191,10 @@
|
||||||
"revisionTime": "2018-02-17T01:27:43Z"
|
"revisionTime": "2018-02-17T01:27:43Z"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"checksumSHA1": "qlJsyinWZ07PaOyTYyxXOmhDN/I=",
|
"checksumSHA1": "7NOVBr5xAnJIiAGOnIPCvTSxKO4=",
|
||||||
"path": "github.com/visualfc/gotools/types",
|
"path": "github.com/visualfc/gotools/types",
|
||||||
"revision": "b8348693492ca3791bccfa028f3c19634c11c5b5",
|
"revision": "a74583d2ad8773983f273657291df981033308a4",
|
||||||
"revisionTime": "2015-04-09T14:25:36Z"
|
"revisionTime": "2018-02-17T01:27:43Z"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"checksumSHA1": "xe5hMqClV1HmKZ4GVg4bmSsVRE8=",
|
"checksumSHA1": "xe5hMqClV1HmKZ4GVg4bmSsVRE8=",
|
||||||
|
|
Loading…
Reference in New Issue