update x/tools

Significantly improve CPU and memory usage when not using SSA-powered linters.
Improve readability of go/packages errors.
Improve debugging capabilities and write doc about debugging.
This commit is contained in:
Denis Isaev 2019-06-09 14:15:58 +03:00 committed by Isaev Denis
parent d2b1eea2c6
commit e39e8fb5d5
36 changed files with 1643 additions and 572 deletions

@ -5,7 +5,8 @@ go:
- 1.12.x - 1.12.x
before_script: before_script:
- go get github.com/valyala/quicktemplate - go get github.com/valyala/quicktemplate # for tests
- go get github.com/pkg/errors # for tests
script: make check_generated test script: make check_generated test

@ -30,7 +30,8 @@ test_linters:
# Maintenance # Maintenance
generate: docs/demo.svg README.md install.sh pkg/logutils/log_mock.go vendor generate: README.md install.sh pkg/logutils/log_mock.go vendor
generate_svg: docs/demo.svg
maintainer-clean: clean maintainer-clean: clean
rm -f docs/demo.svg README.md install.sh pkg/logutils/log_mock.go rm -f docs/demo.svg README.md install.sh pkg/logutils/log_mock.go
rm -rf vendor rm -rf vendor

@ -28,6 +28,7 @@ Sponsored by [GolangCI.com](https://golangci.com): SaaS service for running lint
* [FAQ](#faq) * [FAQ](#faq)
* [Thanks](#thanks) * [Thanks](#thanks)
* [Changelog](#changelog) * [Changelog](#changelog)
* [Debug](#debug)
* [Future Plans](#future-plans) * [Future Plans](#future-plans)
* [Contact Information](#contact-information) * [Contact Information](#contact-information)
@ -1140,6 +1141,20 @@ There is the most valuable changes log:
1. Support GitHub Releases 1. Support GitHub Releases
2. Installation via Homebrew and Docker 2. Installation via Homebrew and Docker
## Debug
You can see a verbose output of linter by using `-v` option.
If you would like to see more detailed logs you can set environment variable `GL_DEBUG` to debug `golangci-lint`.
It's value is a list of debug tags. For example, `GL_DEBUG=loader,gocritic golangci-lint run`.
Existing debug tags:
1. `gocritic` - debug `go-critic` linter;
2. `env` - debug `go env` command;
3. `loader` - debug packages loading (including `go/packages` internal debugging);
4. `autogen_exclude` - debug a filter excluding autogenerated source code;
5. `nolint` - debug a filter excluding issues by `//nolint` comments.
## Future Plans ## Future Plans
1. Upstream all changes of forked linters. 1. Upstream all changes of forked linters.

@ -28,6 +28,7 @@ Sponsored by [GolangCI.com](https://golangci.com): SaaS service for running lint
* [FAQ](#faq) * [FAQ](#faq)
* [Thanks](#thanks) * [Thanks](#thanks)
* [Changelog](#changelog) * [Changelog](#changelog)
* [Debug](#debug)
* [Future Plans](#future-plans) * [Future Plans](#future-plans)
* [Contact Information](#contact-information) * [Contact Information](#contact-information)
@ -668,6 +669,20 @@ There is the most valuable changes log:
1. Support GitHub Releases 1. Support GitHub Releases
2. Installation via Homebrew and Docker 2. Installation via Homebrew and Docker
## Debug
You can see a verbose output of linter by using `-v` option.
If you would like to see more detailed logs you can set environment variable `GL_DEBUG` to debug `golangci-lint`.
It's value is a list of debug tags. For example, `GL_DEBUG=loader,gocritic golangci-lint run`.
Existing debug tags:
1. `gocritic` - debug `go-critic` linter;
2. `env` - debug `go env` command;
3. `loader` - debug packages loading (including `go/packages` internal debugging);
4. `autogen_exclude` - debug a filter excluding autogenerated source code;
5. `nolint` - debug a filter excluding issues by `//nolint` comments.
## Future Plans ## Future Plans
1. Upstream all changes of forked linters. 1. Upstream all changes of forked linters.

8
go.mod

@ -52,7 +52,6 @@ require (
github.com/timakin/bodyclose v0.0.0-00010101000000-000000000000 github.com/timakin/bodyclose v0.0.0-00010101000000-000000000000
github.com/valyala/quicktemplate v1.1.1 github.com/valyala/quicktemplate v1.1.1
golang.org/x/crypto v0.0.0-20190313024323-a1f597ede03a // indirect golang.org/x/crypto v0.0.0-20190313024323-a1f597ede03a // indirect
golang.org/x/net v0.0.0-20190313220215-9f648a60d977 // indirect
golang.org/x/sys v0.0.0-20190312061237-fead79001313 // indirect golang.org/x/sys v0.0.0-20190312061237-fead79001313 // indirect
golang.org/x/tools v0.0.0-20190521203540-521d6ed310dd golang.org/x/tools v0.0.0-20190521203540-521d6ed310dd
gopkg.in/airbrake/gobrake.v2 v2.0.9 // indirect gopkg.in/airbrake/gobrake.v2 v2.0.9 // indirect
@ -63,5 +62,8 @@ require (
mvdan.cc/unparam v0.0.0-20190124213536-fbb59629db34 mvdan.cc/unparam v0.0.0-20190124213536-fbb59629db34
) )
// https://github.com/timakin/bodyclose/pull/17 replace (
replace github.com/timakin/bodyclose => github.com/golangci/bodyclose v0.0.0-20190713050349-65da19158fa2 // https://github.com/timakin/bodyclose/pull/17
github.com/timakin/bodyclose => github.com/golangci/bodyclose v0.0.0-20190713050349-65da19158fa2
golang.org/x/tools => github.com/golangci/tools v0.0.0-20190713050349-979bdb7f8cc8
)

18
go.sum

@ -81,6 +81,8 @@ github.com/golangci/prealloc v0.0.0-20180630174525-215b22d4de21 h1:leSNB7iYzLYSS
github.com/golangci/prealloc v0.0.0-20180630174525-215b22d4de21/go.mod h1:tf5+bzsHdTM0bsB7+8mt0GUMvjCgwLpTapNZHU8AajI= github.com/golangci/prealloc v0.0.0-20180630174525-215b22d4de21/go.mod h1:tf5+bzsHdTM0bsB7+8mt0GUMvjCgwLpTapNZHU8AajI=
github.com/golangci/revgrep v0.0.0-20180526074752-d9c87f5ffaf0 h1:HVfrLniijszjS1aiNg8JbBMO2+E1WIQ+j/gL4SQqGPg= github.com/golangci/revgrep v0.0.0-20180526074752-d9c87f5ffaf0 h1:HVfrLniijszjS1aiNg8JbBMO2+E1WIQ+j/gL4SQqGPg=
github.com/golangci/revgrep v0.0.0-20180526074752-d9c87f5ffaf0/go.mod h1:qOQCunEYvmd/TLamH+7LlVccLvUH5kZNhbCgTHoBbp4= github.com/golangci/revgrep v0.0.0-20180526074752-d9c87f5ffaf0/go.mod h1:qOQCunEYvmd/TLamH+7LlVccLvUH5kZNhbCgTHoBbp4=
github.com/golangci/tools v0.0.0-20190713050349-979bdb7f8cc8 h1:rv5pCF5e6hFuSWEDuP3R+r8l0n/srMta+VWVEskASSQ=
github.com/golangci/tools v0.0.0-20190713050349-979bdb7f8cc8/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
github.com/golangci/unconvert v0.0.0-20180507085042-28b1c447d1f4 h1:zwtduBRr5SSWhqsYNgcuWO2kFlpdOZbP0+yRjmvPGys= github.com/golangci/unconvert v0.0.0-20180507085042-28b1c447d1f4 h1:zwtduBRr5SSWhqsYNgcuWO2kFlpdOZbP0+yRjmvPGys=
github.com/golangci/unconvert v0.0.0-20180507085042-28b1c447d1f4/go.mod h1:Izgrg8RkN3rCIMLGE9CyYmU9pY2Jer6DgANEnZ/L/cQ= github.com/golangci/unconvert v0.0.0-20180507085042-28b1c447d1f4/go.mod h1:Izgrg8RkN3rCIMLGE9CyYmU9pY2Jer6DgANEnZ/L/cQ=
github.com/google/go-cmp v0.2.0 h1:+dTQ8DZQJz0Mb/HjFlkptS1FeQ4cWSnN941F8aEG4SQ= github.com/google/go-cmp v0.2.0 h1:+dTQ8DZQJz0Mb/HjFlkptS1FeQ4cWSnN941F8aEG4SQ=
@ -177,9 +179,8 @@ golang.org/x/net v0.0.0-20170915142106-8351a756f30f/go.mod h1:mL1N/T3taQHkDXs73r
golang.org/x/net v0.0.0-20180906233101-161cd47e91fd h1:nTDtHvHSdCn1m6ITfMRqtOd/9+7a3s8RBNOZ3eYZzJA= golang.org/x/net v0.0.0-20180906233101-161cd47e91fd h1:nTDtHvHSdCn1m6ITfMRqtOd/9+7a3s8RBNOZ3eYZzJA=
golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180911220305-26e67e76b6c3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180911220305-26e67e76b6c3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190620200207-3b0461eec859 h1:R/3boaszxrf1GEUWTVDzSKVwLmSJpwZ1yqXm8j0v2QI=
golang.org/x/net v0.0.0-20190313220215-9f648a60d977 h1:actzWV6iWn3GLqN8dZjzsB+CLt+gaV2+wsxroxiQI8I= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20190313220215-9f648a60d977/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f h1:wMNYb4v58l5UBM7MYRLPG6ZhfOqbKu7X5eyFl8ZhKvA= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f h1:wMNYb4v58l5UBM7MYRLPG6ZhfOqbKu7X5eyFl8ZhKvA=
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190423024810-112230192c58 h1:8gQV6CLnAEikrhgkHFbMAEhagSSnXWGV915qUMm9mrU= golang.org/x/sync v0.0.0-20190423024810-112230192c58 h1:8gQV6CLnAEikrhgkHFbMAEhagSSnXWGV915qUMm9mrU=
@ -193,16 +194,7 @@ golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/text v0.0.0-20170915090833-1cbadb444a80/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.0.0-20170915090833-1cbadb444a80/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg= golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/tools v0.0.0-20170915040203-e531a2a1c15f/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20181117154741-2ddaf7f79a09/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190110163146-51295c7ec13a/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190121143147-24cd39ecf745/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190311215038-5c2858a9cfe5/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190322203728-c1a832b0ad89/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190521203540-521d6ed310dd h1:7E3PabyysDSEjnaANKBgums/hyvMI/HoHQ50qZEzTrg=
golang.org/x/tools v0.0.0-20190521203540-521d6ed310dd/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
gopkg.in/airbrake/gobrake.v2 v2.0.9 h1:7z2uVWwn7oVeeugY1DtlPAy5H+KYgB1KeKTnqjNatLo= gopkg.in/airbrake/gobrake.v2 v2.0.9 h1:7z2uVWwn7oVeeugY1DtlPAy5H+KYgB1KeKTnqjNatLo=
gopkg.in/airbrake/gobrake.v2 v2.0.9/go.mod h1:/h5ZAUhDkGaJfjzjKLSjv6zCL6O0LLBxU4K+aSYdM/U= gopkg.in/airbrake/gobrake.v2 v2.0.9/go.mod h1:/h5ZAUhDkGaJfjzjKLSjv6zCL6O0LLBxU4K+aSYdM/U=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=

@ -124,13 +124,13 @@ func (lnt Linter) Run(ctx context.Context, lintCtx *linter.Context) ([]result.Is
} }
var issues []result.Issue var issues []result.Issue
for _, diag := range diags { for i := range diags {
i := result.Issue{ diag := &diags[i]
issues = append(issues, result.Issue{
FromLinter: lnt.Name(), FromLinter: lnt.Name(),
Text: fmt.Sprintf("%s: %s", diag.AnalyzerName, diag.Message), Text: fmt.Sprintf("%s: %s", diag.AnalyzerName, diag.Message),
Pos: diag.Position, Pos: diag.Position,
} })
issues = append(issues, i)
} }
return issues, nil return issues, nil

@ -148,39 +148,19 @@ func (cl ContextLoader) buildSSAProgram(pkgs []*packages.Package) *ssa.Program {
} }
func (cl ContextLoader) findLoadMode(linters []*linter.Config) packages.LoadMode { func (cl ContextLoader) findLoadMode(linters []*linter.Config) packages.LoadMode {
maxLoadMode := packages.LoadFiles //TODO: specify them in linters: need more fine-grained control.
// e.g. NeedTypesSizes is needed only for go vet
loadMode := packages.NeedName | packages.NeedFiles | packages.NeedCompiledGoFiles
for _, lc := range linters { for _, lc := range linters {
curLoadMode := packages.LoadFiles
if lc.NeedsTypeInfo { if lc.NeedsTypeInfo {
curLoadMode = packages.LoadSyntax loadMode |= packages.NeedImports | packages.NeedTypes | packages.NeedTypesSizes | packages.NeedTypesInfo | packages.NeedSyntax
} }
if lc.NeedsSSARepr { if lc.NeedsSSARepr {
curLoadMode = packages.LoadAllSyntax loadMode |= packages.NeedDeps
}
if curLoadMode > maxLoadMode {
maxLoadMode = curLoadMode
} }
} }
return maxLoadMode return loadMode
}
func stringifyLoadMode(mode packages.LoadMode) string {
switch mode {
case packages.LoadFiles:
return "load files"
case packages.LoadImports:
return "load imports"
case packages.LoadTypes:
return "load types"
case packages.LoadSyntax:
return "load types and syntax"
}
// it may be an alias, and may be not
if mode == packages.LoadAllSyntax {
return "load deps types and syntax"
}
return "unknown"
} }
func (cl ContextLoader) buildArgs() []string { func (cl ContextLoader) buildArgs() []string {
@ -231,6 +211,58 @@ func (cl ContextLoader) makeBuildFlags() ([]string, error) {
return buildFlags, nil return buildFlags, nil
} }
func stringifyLoadMode(mode packages.LoadMode) string {
m := map[packages.LoadMode]string{
packages.NeedCompiledGoFiles: "compiled_files",
packages.NeedDeps: "deps",
packages.NeedExportsFile: "exports_file",
packages.NeedFiles: "files",
packages.NeedImports: "imports",
packages.NeedName: "name",
packages.NeedSyntax: "syntax",
packages.NeedTypes: "types",
packages.NeedTypesInfo: "types_info",
packages.NeedTypesSizes: "types_sizes",
}
var flags []string
for flag, flagStr := range m {
if mode&flag != 0 {
flags = append(flags, flagStr)
}
}
return fmt.Sprintf("%d (%s)", mode, strings.Join(flags, "|"))
}
func (cl ContextLoader) debugPrintLoadedPackages(pkgs []*packages.Package) {
cl.debugf("loaded %d pkgs", len(pkgs))
for i, pkg := range pkgs {
var syntaxFiles []string
for _, sf := range pkg.Syntax {
syntaxFiles = append(syntaxFiles, pkg.Fset.Position(sf.Pos()).Filename)
}
cl.debugf("Loaded pkg #%d: ID=%s GoFiles=%s CompiledGoFiles=%s Syntax=%s",
i, pkg.ID, pkg.GoFiles, pkg.CompiledGoFiles, syntaxFiles)
}
}
func (cl ContextLoader) parseLoadedPackagesErrors(pkgs []*packages.Package) error {
for _, pkg := range pkgs {
for _, err := range pkg.Errors {
if strings.Contains(err.Msg, "no Go files") {
return errors.Wrapf(exitcodes.ErrNoGoFiles, "package %s", pkg.PkgPath)
}
if strings.Contains(err.Msg, "cannot find package") {
// when analyzing not existing directory
return errors.Wrap(exitcodes.ErrFailure, err.Msg)
}
}
}
return nil
}
func (cl ContextLoader) loadPackages(ctx context.Context, loadMode packages.LoadMode) ([]*packages.Package, error) { func (cl ContextLoader) loadPackages(ctx context.Context, loadMode packages.LoadMode) ([]*packages.Package, error) {
defer func(startedAt time.Time) { defer func(startedAt time.Time) {
cl.log.Infof("Go packages loading at mode %s took %s", stringifyLoadMode(loadMode), time.Since(startedAt)) cl.log.Infof("Go packages loading at mode %s took %s", stringifyLoadMode(loadMode), time.Since(startedAt))
@ -248,6 +280,7 @@ func (cl ContextLoader) loadPackages(ctx context.Context, loadMode packages.Load
Tests: cl.cfg.Run.AnalyzeTests, Tests: cl.cfg.Run.AnalyzeTests,
Context: ctx, Context: ctx,
BuildFlags: buildFlags, BuildFlags: buildFlags,
Logf: cl.debugf,
//TODO: use fset, parsefile, overlay //TODO: use fset, parsefile, overlay
} }
@ -257,26 +290,10 @@ func (cl ContextLoader) loadPackages(ctx context.Context, loadMode packages.Load
if err != nil { if err != nil {
return nil, errors.Wrap(err, "failed to load program with go/packages") return nil, errors.Wrap(err, "failed to load program with go/packages")
} }
cl.debugf("loaded %d pkgs", len(pkgs)) cl.debugPrintLoadedPackages(pkgs)
for i, pkg := range pkgs {
var syntaxFiles []string
for _, sf := range pkg.Syntax {
syntaxFiles = append(syntaxFiles, pkg.Fset.Position(sf.Pos()).Filename)
}
cl.debugf("Loaded pkg #%d: ID=%s GoFiles=%s CompiledGoFiles=%s Syntax=%s",
i, pkg.ID, pkg.GoFiles, pkg.CompiledGoFiles, syntaxFiles)
}
for _, pkg := range pkgs { if err := cl.parseLoadedPackagesErrors(pkgs); err != nil {
for _, err := range pkg.Errors { return nil, err
if strings.Contains(err.Msg, "no Go files") {
return nil, errors.Wrapf(exitcodes.ErrNoGoFiles, "package %s", pkg.PkgPath)
}
if strings.Contains(err.Msg, "cannot find package") {
// when analyzing not existing directory
return nil, errors.Wrap(exitcodes.ErrFailure, err.Msg)
}
}
} }
return cl.filterPackages(pkgs), nil return cl.filterPackages(pkgs), nil
@ -341,12 +358,12 @@ func (cl ContextLoader) Load(ctx context.Context, linters []*linter.Config) (*li
} }
var prog *loader.Program var prog *loader.Program
if loadMode >= packages.LoadSyntax { if loadMode&packages.NeedTypes != 0 {
prog = cl.makeFakeLoaderProgram(pkgs) prog = cl.makeFakeLoaderProgram(pkgs)
} }
var ssaProg *ssa.Program var ssaProg *ssa.Program
if loadMode == packages.LoadAllSyntax { if loadMode&packages.NeedDeps != 0 {
ssaProg = cl.buildSSAProgram(pkgs) ssaProg = cl.buildSSAProgram(pkgs)
} }

@ -128,11 +128,13 @@ type Pass struct {
// See comments for ExportObjectFact. // See comments for ExportObjectFact.
ExportPackageFact func(fact Fact) ExportPackageFact func(fact Fact)
// AllPackageFacts returns a new slice containing all package facts in unspecified order. // AllPackageFacts returns a new slice containing all package facts of the analysis's FactTypes
// in unspecified order.
// WARNING: This is an experimental API and may change in the future. // WARNING: This is an experimental API and may change in the future.
AllPackageFacts func() []PackageFact AllPackageFacts func() []PackageFact
// AllObjectFacts returns a new slice containing all object facts in unspecified order. // AllObjectFacts returns a new slice containing all object facts of the analysis's FactTypes
// in unspecified order.
// WARNING: This is an experimental API and may change in the future. // WARNING: This is an experimental API and may change in the future.
AllObjectFacts func() []ObjectFact AllObjectFacts func() []ObjectFact
@ -161,6 +163,15 @@ func (pass *Pass) Reportf(pos token.Pos, format string, args ...interface{}) {
pass.Report(Diagnostic{Pos: pos, Message: msg}) pass.Report(Diagnostic{Pos: pos, Message: msg})
} }
// reportNodef is a helper function that reports a Diagnostic using the
// range denoted by the AST node.
//
// WARNING: This is an experimental API and may change in the future.
func (pass *Pass) reportNodef(node ast.Node, format string, args ...interface{}) {
msg := fmt.Sprintf(format, args...)
pass.Report(Diagnostic{Pos: node.Pos(), End: node.End(), Message: msg})
}
func (pass *Pass) String() string { func (pass *Pass) String() string {
return fmt.Sprintf("%s@%s", pass.Analyzer.Name, pass.Pkg.Path()) return fmt.Sprintf("%s@%s", pass.Analyzer.Name, pass.Pkg.Path())
} }
@ -202,14 +213,3 @@ func (pass *Pass) String() string {
type Fact interface { type Fact interface {
AFact() // dummy method to avoid type errors AFact() // dummy method to avoid type errors
} }
// A Diagnostic is a message associated with a source location.
//
// An Analyzer may return a variety of diagnostics; the optional Category,
// which should be a constant, may be used to classify them.
// It is primarily intended to make it easy to look up documentation.
type Diagnostic struct {
Pos token.Pos
Category string // optional
Message string
}

48
vendor/golang.org/x/tools/go/analysis/diagnostic.go generated vendored Normal file

@ -0,0 +1,48 @@
package analysis
import "go/token"
// A Diagnostic is a message associated with a source location or range.
//
// An Analyzer may return a variety of diagnostics; the optional Category,
// which should be a constant, may be used to classify them.
// It is primarily intended to make it easy to look up documentation.
//
// If End is provided, the diagnostic is specified to apply to the range between
// Pos and End.
type Diagnostic struct {
Pos token.Pos
End token.Pos // optional
Category string // optional
Message string
// SuggestedFixes contains suggested fixes for a diagnostic which can be used to perform
// edits to a file that address the diagnostic.
// TODO(matloob): Should multiple SuggestedFixes be allowed for a diagnostic?
// Diagnostics should not contain SuggestedFixes that overlap.
// Experimental: This API is experimental and may change in the future.
SuggestedFixes []SuggestedFix // optional
}
// A SuggestedFix is a code change associated with a Diagnostic that a user can choose
// to apply to their code. Usually the SuggestedFix is meant to fix the issue flagged
// by the diagnostic.
// TextEdits for a SuggestedFix should not overlap. TextEdits for a SuggestedFix
// should not contain edits for other packages.
// Experimental: This API is experimental and may change in the future.
type SuggestedFix struct {
// A description for this suggested fix to be shown to a user deciding
// whether to accept it.
Message string
TextEdits []TextEdit
}
// A TextEdit represents the replacement of the code between Pos and End with the new text.
// Each TextEdit should apply to a single file. End should not be earlier in the file than Pos.
// Experimental: This API is experimental and may change in the future.
type TextEdit struct {
// For a pure insertion, End can either be set to Pos or token.NoPos.
Pos token.Pos
End token.Pos
NewText []byte
}

@ -107,7 +107,7 @@ func checkCgo(fset *token.FileSet, f *ast.File, info *types.Info, reportf func(t
// cgo files of a package (those that import "C"). Such files are not // cgo files of a package (those that import "C"). Such files are not
// Go, so there may be gaps in type information around C.f references. // Go, so there may be gaps in type information around C.f references.
// //
// This checker was initially written in vet to inpect raw cgo source // This checker was initially written in vet to inspect raw cgo source
// files using partial type information. However, Analyzers in the new // files using partial type information. However, Analyzers in the new
// analysis API are presented with the type-checked, "cooked" Go ASTs // analysis API are presented with the type-checked, "cooked" Go ASTs
// resulting from cgo-processing files, so we must choose between // resulting from cgo-processing files, so we must choose between
@ -133,7 +133,7 @@ func checkCgo(fset *token.FileSet, f *ast.File, info *types.Info, reportf func(t
// func (T) f(int) string { ... } // func (T) f(int) string { ... }
// //
// we synthesize a new ast.File, shown below, that dot-imports the // we synthesize a new ast.File, shown below, that dot-imports the
// orginal "cooked" package using a special name ("·this·"), so that all // original "cooked" package using a special name ("·this·"), so that all
// references to package members resolve correctly. (References to // references to package members resolve correctly. (References to
// unexported names cause an "unexported" error, which we ignore.) // unexported names cause an "unexported" error, which we ignore.)
// //

@ -24,6 +24,7 @@ var unkeyedLiteral = map[string]bool{
"image.Uniform": true, "image.Uniform": true,
"unicode.Range16": true, "unicode.Range16": true,
"unicode.Range32": true,
// These three structs are used in generated test main files, // These three structs are used in generated test main files,
// but the generator can be trusted. // but the generator can be trusted.

@ -102,10 +102,11 @@ func run(pass *analysis.Pass) (interface{}, error) {
inspect.Preorder(nodeFilter, func(n ast.Node) { inspect.Preorder(nodeFilter, func(n ast.Node) {
switch n := n.(type) { switch n := n.(type) {
case *ast.FuncDecl: case *ast.FuncDecl:
fn := pass.TypesInfo.Defs[n.Name].(*types.Func) // Type information may be incomplete.
funcDecls[fn] = &declInfo{decl: n} if fn, ok := pass.TypesInfo.Defs[n.Name].(*types.Func); ok {
decls = append(decls, fn) funcDecls[fn] = &declInfo{decl: n}
decls = append(decls, fn)
}
case *ast.FuncLit: case *ast.FuncLit:
funcLits[n] = new(litInfo) funcLits[n] = new(litInfo)
lits = append(lits, n) lits = append(lits, n)

@ -67,15 +67,20 @@ of arguments with no format string.
` `
// isWrapper is a fact indicating that a function is a print or printf wrapper. // isWrapper is a fact indicating that a function is a print or printf wrapper.
type isWrapper struct{ Printf bool } type isWrapper struct{ Kind funcKind }
func (f *isWrapper) AFact() {} func (f *isWrapper) AFact() {}
func (f *isWrapper) String() string { func (f *isWrapper) String() string {
if f.Printf { switch f.Kind {
case kindPrintf:
return "printfWrapper" return "printfWrapper"
} else { case kindPrint:
return "printWrapper" return "printWrapper"
case kindErrorf:
return "errorfWrapper"
default:
return "unknownWrapper"
} }
} }
@ -112,7 +117,11 @@ func maybePrintfWrapper(info *types.Info, decl ast.Decl) *printfWrapper {
if !ok || fdecl.Body == nil { if !ok || fdecl.Body == nil {
return nil return nil
} }
fn := info.Defs[fdecl.Name].(*types.Func) fn, ok := info.Defs[fdecl.Name].(*types.Func)
// Type information may be incomplete.
if !ok {
return nil
}
sig := fn.Type().(*types.Signature) sig := fn.Type().(*types.Signature)
if !sig.Variadic() { if !sig.Variadic() {
@ -223,16 +232,20 @@ func match(info *types.Info, arg ast.Expr, param *types.Var) bool {
return ok && info.ObjectOf(id) == param return ok && info.ObjectOf(id) == param
} }
type funcKind int
const ( const (
kindPrintf = 1 kindUnknown funcKind = iota
kindPrint = 2 kindPrintf = iota
kindPrint
kindErrorf
) )
// checkPrintfFwd checks that a printf-forwarding wrapper is forwarding correctly. // checkPrintfFwd checks that a printf-forwarding wrapper is forwarding correctly.
// It diagnoses writing fmt.Printf(format, args) instead of fmt.Printf(format, args...). // It diagnoses writing fmt.Printf(format, args) instead of fmt.Printf(format, args...).
func checkPrintfFwd(pass *analysis.Pass, w *printfWrapper, call *ast.CallExpr, kind int) { func checkPrintfFwd(pass *analysis.Pass, w *printfWrapper, call *ast.CallExpr, kind funcKind) {
matched := kind == kindPrint || matched := kind == kindPrint ||
kind == kindPrintf && len(call.Args) >= 2 && match(pass.TypesInfo, call.Args[len(call.Args)-2], w.format) kind != kindUnknown && len(call.Args) >= 2 && match(pass.TypesInfo, call.Args[len(call.Args)-2], w.format)
if !matched { if !matched {
return return
} }
@ -262,7 +275,7 @@ func checkPrintfFwd(pass *analysis.Pass, w *printfWrapper, call *ast.CallExpr, k
fn := w.obj fn := w.obj
var fact isWrapper var fact isWrapper
if !pass.ImportObjectFact(fn, &fact) { if !pass.ImportObjectFact(fn, &fact) {
fact.Printf = kind == kindPrintf fact.Kind = kind
pass.ExportObjectFact(fn, &fact) pass.ExportObjectFact(fn, &fact)
for _, caller := range w.callers { for _, caller := range w.callers {
checkPrintfFwd(pass, caller.w, caller.call, kind) checkPrintfFwd(pass, caller.w, caller.call, kind)
@ -414,42 +427,42 @@ func checkCall(pass *analysis.Pass) {
call := n.(*ast.CallExpr) call := n.(*ast.CallExpr)
fn, kind := printfNameAndKind(pass, call) fn, kind := printfNameAndKind(pass, call)
switch kind { switch kind {
case kindPrintf: case kindPrintf, kindErrorf:
checkPrintf(pass, call, fn) checkPrintf(pass, kind, call, fn)
case kindPrint: case kindPrint:
checkPrint(pass, call, fn) checkPrint(pass, call, fn)
} }
}) })
} }
func printfNameAndKind(pass *analysis.Pass, call *ast.CallExpr) (fn *types.Func, kind int) { func printfNameAndKind(pass *analysis.Pass, call *ast.CallExpr) (fn *types.Func, kind funcKind) {
fn, _ = typeutil.Callee(pass.TypesInfo, call).(*types.Func) fn, _ = typeutil.Callee(pass.TypesInfo, call).(*types.Func)
if fn == nil { if fn == nil {
return nil, 0 return nil, 0
} }
var fact isWrapper
if pass.ImportObjectFact(fn, &fact) {
if fact.Printf {
return fn, kindPrintf
} else {
return fn, kindPrint
}
}
_, ok := isPrint[fn.FullName()] _, ok := isPrint[fn.FullName()]
if !ok { if !ok {
// Next look up just "printf", for use with -printf.funcs. // Next look up just "printf", for use with -printf.funcs.
_, ok = isPrint[strings.ToLower(fn.Name())] _, ok = isPrint[strings.ToLower(fn.Name())]
} }
if ok { if ok {
if strings.HasSuffix(fn.Name(), "f") { if fn.Name() == "Errorf" {
kind = kindErrorf
} else if strings.HasSuffix(fn.Name(), "f") {
kind = kindPrintf kind = kindPrintf
} else { } else {
kind = kindPrint kind = kindPrint
} }
return fn, kind
} }
return fn, kind
var fact isWrapper
if pass.ImportObjectFact(fn, &fact) {
return fn, fact.Kind
}
return fn, kindUnknown
} }
// isFormatter reports whether t satisfies fmt.Formatter. // isFormatter reports whether t satisfies fmt.Formatter.
@ -491,7 +504,7 @@ type formatState struct {
} }
// checkPrintf checks a call to a formatted print routine such as Printf. // checkPrintf checks a call to a formatted print routine such as Printf.
func checkPrintf(pass *analysis.Pass, call *ast.CallExpr, fn *types.Func) { func checkPrintf(pass *analysis.Pass, kind funcKind, call *ast.CallExpr, fn *types.Func) {
format, idx := formatString(pass, call) format, idx := formatString(pass, call)
if idx < 0 { if idx < 0 {
if false { if false {
@ -511,6 +524,7 @@ func checkPrintf(pass *analysis.Pass, call *ast.CallExpr, fn *types.Func) {
argNum := firstArg argNum := firstArg
maxArgNum := firstArg maxArgNum := firstArg
anyIndex := false anyIndex := false
anyW := false
for i, w := 0, 0; i < len(format); i += w { for i, w := 0, 0; i < len(format); i += w {
w = 1 w = 1
if format[i] != '%' { if format[i] != '%' {
@ -527,6 +541,17 @@ func checkPrintf(pass *analysis.Pass, call *ast.CallExpr, fn *types.Func) {
if state.hasIndex { if state.hasIndex {
anyIndex = true anyIndex = true
} }
if state.verb == 'w' {
if kind != kindErrorf {
pass.Reportf(call.Pos(), "%s call has error-wrapping directive %%w", state.name)
return
}
if anyW {
pass.Reportf(call.Pos(), "%s call has more than one error-wrapping directive %%w", state.name)
return
}
anyW = true
}
if len(state.argNums) > 0 { if len(state.argNums) > 0 {
// Continue with the next sequential argument. // Continue with the next sequential argument.
argNum = state.argNums[len(state.argNums)-1] + 1 argNum = state.argNums[len(state.argNums)-1] + 1
@ -697,6 +722,7 @@ const (
argFloat argFloat
argComplex argComplex
argPointer argPointer
argError
anyType printfArgType = ^0 anyType printfArgType = ^0
) )
@ -739,7 +765,7 @@ var printVerbs = []printVerb{
{'T', "-", anyType}, {'T', "-", anyType},
{'U', "-#", argRune | argInt}, {'U', "-#", argRune | argInt},
{'v', allFlags, anyType}, {'v', allFlags, anyType},
{'w', noFlag, anyType}, {'w', allFlags, argError},
{'x', sharpNumFlag, argRune | argInt | argString | argPointer}, {'x', sharpNumFlag, argRune | argInt | argString | argPointer},
{'X', sharpNumFlag, argRune | argInt | argString | argPointer}, {'X', sharpNumFlag, argRune | argInt | argString | argPointer},
} }

@ -37,6 +37,12 @@ func matchArgTypeInternal(pass *analysis.Pass, t printfArgType, typ types.Type,
return true // probably a type check problem return true // probably a type check problem
} }
} }
// %w accepts only errors.
if t == argError {
return types.ConvertibleTo(typ, errorType)
}
// If the type implements fmt.Formatter, we have nothing to check. // If the type implements fmt.Formatter, we have nothing to check.
if isFormatter(typ) { if isFormatter(typ) {
return true return true

@ -40,8 +40,12 @@ func run(pass *analysis.Pass) (interface{}, error) {
(*ast.StructType)(nil), (*ast.StructType)(nil),
} }
inspect.Preorder(nodeFilter, func(n ast.Node) { inspect.Preorder(nodeFilter, func(n ast.Node) {
styp := pass.TypesInfo.Types[n.(*ast.StructType)].Type.(*types.Struct) styp, ok := pass.TypesInfo.Types[n.(*ast.StructType)].Type.(*types.Struct)
var seen map[[2]string]token.Pos // Type information may be incomplete.
if !ok {
return
}
var seen namesSeen
for i := 0; i < styp.NumFields(); i++ { for i := 0; i < styp.NumFields(); i++ {
field := styp.Field(i) field := styp.Field(i)
tag := styp.Tag(i) tag := styp.Tag(i)
@ -51,11 +55,38 @@ func run(pass *analysis.Pass) (interface{}, error) {
return nil, nil return nil, nil
} }
// namesSeen keeps track of encoding tags by their key, name, and nested level
// from the initial struct. The level is taken into account because equal
// encoding key names only conflict when at the same level; otherwise, the lower
// level shadows the higher level.
type namesSeen map[uniqueName]token.Pos
type uniqueName struct {
key string // "xml" or "json"
name string // the encoding name
level int // anonymous struct nesting level
}
func (s *namesSeen) Get(key, name string, level int) (token.Pos, bool) {
if *s == nil {
*s = make(map[uniqueName]token.Pos)
}
pos, ok := (*s)[uniqueName{key, name, level}]
return pos, ok
}
func (s *namesSeen) Set(key, name string, level int, pos token.Pos) {
if *s == nil {
*s = make(map[uniqueName]token.Pos)
}
(*s)[uniqueName{key, name, level}] = pos
}
var checkTagDups = []string{"json", "xml"} var checkTagDups = []string{"json", "xml"}
var checkTagSpaces = map[string]bool{"json": true, "xml": true, "asn1": true} var checkTagSpaces = map[string]bool{"json": true, "xml": true, "asn1": true}
// checkCanonicalFieldTag checks a single struct field tag. // checkCanonicalFieldTag checks a single struct field tag.
func checkCanonicalFieldTag(pass *analysis.Pass, field *types.Var, tag string, seen *map[[2]string]token.Pos) { func checkCanonicalFieldTag(pass *analysis.Pass, field *types.Var, tag string, seen *namesSeen) {
switch pass.Pkg.Path() { switch pass.Pkg.Path() {
case "encoding/json", "encoding/xml": case "encoding/json", "encoding/xml":
// These packages know how to use their own APIs. // These packages know how to use their own APIs.
@ -64,7 +95,7 @@ func checkCanonicalFieldTag(pass *analysis.Pass, field *types.Var, tag string, s
} }
for _, key := range checkTagDups { for _, key := range checkTagDups {
checkTagDuplicates(pass, tag, key, field, field, seen) checkTagDuplicates(pass, tag, key, field, field, seen, 1)
} }
if err := validateStructTag(tag); err != nil { if err := validateStructTag(tag); err != nil {
@ -95,28 +126,29 @@ func checkCanonicalFieldTag(pass *analysis.Pass, field *types.Var, tag string, s
// checkTagDuplicates checks a single struct field tag to see if any tags are // checkTagDuplicates checks a single struct field tag to see if any tags are
// duplicated. nearest is the field that's closest to the field being checked, // duplicated. nearest is the field that's closest to the field being checked,
// while still being part of the top-level struct type. // while still being part of the top-level struct type.
func checkTagDuplicates(pass *analysis.Pass, tag, key string, nearest, field *types.Var, seen *map[[2]string]token.Pos) { func checkTagDuplicates(pass *analysis.Pass, tag, key string, nearest, field *types.Var, seen *namesSeen, level int) {
val := reflect.StructTag(tag).Get(key) val := reflect.StructTag(tag).Get(key)
if val == "-" { if val == "-" {
// Ignored, even if the field is anonymous. // Ignored, even if the field is anonymous.
return return
} }
if val == "" || val[0] == ',' { if val == "" || val[0] == ',' {
if field.Anonymous() { if !field.Anonymous() {
typ, ok := field.Type().Underlying().(*types.Struct) // Ignored if the field isn't anonymous.
if !ok { return
return }
} typ, ok := field.Type().Underlying().(*types.Struct)
for i := 0; i < typ.NumFields(); i++ { if !ok {
field := typ.Field(i) return
if !field.Exported() { }
continue for i := 0; i < typ.NumFields(); i++ {
} field := typ.Field(i)
tag := typ.Tag(i) if !field.Exported() {
checkTagDuplicates(pass, tag, key, nearest, field, seen) continue
} }
tag := typ.Tag(i)
checkTagDuplicates(pass, tag, key, nearest, field, seen, level+1)
} }
// Ignored if the field isn't anonymous.
return return
} }
if key == "xml" && field.Name() == "XMLName" { if key == "xml" && field.Name() == "XMLName" {
@ -139,10 +171,7 @@ func checkTagDuplicates(pass *analysis.Pass, tag, key string, nearest, field *ty
} }
val = val[:i] val = val[:i]
} }
if *seen == nil { if pos, ok := seen.Get(key, val, level); ok {
*seen = map[[2]string]token.Pos{}
}
if pos, ok := (*seen)[[2]string{key, val}]; ok {
alsoPos := pass.Fset.Position(pos) alsoPos := pass.Fset.Position(pos)
alsoPos.Column = 0 alsoPos.Column = 0
@ -161,7 +190,7 @@ func checkTagDuplicates(pass *analysis.Pass, tag, key string, nearest, field *ty
pass.Reportf(nearest.Pos(), "struct field %s repeats %s tag %q also at %s", field.Name(), key, val, alsoPos) pass.Reportf(nearest.Pos(), "struct field %s repeats %s tag %q also at %s", field.Name(), key, val, alsoPos)
} else { } else {
(*seen)[[2]string{key, val}] = field.Pos() seen.Set(key, val, level, field.Pos())
} }
} }

@ -20,7 +20,10 @@ const Doc = `check for common mistaken usages of tests and examples
The tests checker walks Test, Benchmark and Example functions checking The tests checker walks Test, Benchmark and Example functions checking
malformed names, wrong signatures and examples documenting non-existent malformed names, wrong signatures and examples documenting non-existent
identifiers.` identifiers.
Please see the documentation for package testing in golang.org/pkg/testing
for the conventions that are enforced for Tests, Benchmarks, and Examples.`
var Analyzer = &analysis.Analyzer{ var Analyzer = &analysis.Analyzer{
Name: "tests", Name: "tests",

@ -10,7 +10,7 @@ import (
// Checks include: // Checks include:
// that the name is a valid identifier; // that the name is a valid identifier;
// that analyzer names are unique; // that analyzer names are unique;
// that the Requires graph is acylic; // that the Requires graph is acyclic;
// that analyzer fact types are unique; // that analyzer fact types are unique;
// that each fact type is a pointer. // that each fact type is a pointer.
func Validate(analyzers []*Analyzer) error { func Validate(analyzers []*Analyzer) error {

@ -149,7 +149,7 @@ func (b *builder) branchStmt(s *ast.BranchStmt) {
} }
case token.FALLTHROUGH: case token.FALLTHROUGH:
for t := b.targets; t != nil; t = t.tail { for t := b.targets; t != nil && block == nil; t = t.tail {
block = t._fallthrough block = t._fallthrough
} }

@ -976,10 +976,11 @@ const (
aliasTag aliasTag
) )
var predeclOnce sync.Once
var predecl []types.Type // initialized lazily var predecl []types.Type // initialized lazily
func predeclared() []types.Type { func predeclared() []types.Type {
if predecl == nil { predeclOnce.Do(func() {
// initialize lazily to be sure that all // initialize lazily to be sure that all
// elements have been initialized before // elements have been initialized before
predecl = []types.Type{ // basic types predecl = []types.Type{ // basic types
@ -1026,7 +1027,7 @@ func predeclared() []types.Type {
// used internally by gc; never used by this package or in .a files // used internally by gc; never used by this package or in .a files
anyType{}, anyType{},
} }
} })
return predecl return predecl
} }

@ -82,15 +82,28 @@ func GetSizesGolist(ctx context.Context, buildFlags, env []string, dir string, u
args = append(args, buildFlags...) args = append(args, buildFlags...)
args = append(args, "--", "unsafe") args = append(args, "--", "unsafe")
stdout, err := InvokeGo(ctx, env, dir, usesExportData, args...) stdout, err := InvokeGo(ctx, env, dir, usesExportData, args...)
var goarch, compiler string
if err != nil { if err != nil {
return nil, err if strings.Contains(err.Error(), "cannot find main module") {
// User's running outside of a module. All bets are off. Get GOARCH and guess compiler is gc.
// TODO(matloob): Is this a problem in practice?
envout, enverr := InvokeGo(ctx, env, dir, usesExportData, "env", "GOARCH")
if enverr != nil {
return nil, err
}
goarch = strings.TrimSpace(envout.String())
compiler = "gc"
} else {
return nil, err
}
} else {
fields := strings.Fields(stdout.String())
if len(fields) < 2 {
return nil, fmt.Errorf("could not determine GOARCH and Go compiler")
}
goarch = fields[0]
compiler = fields[1]
} }
fields := strings.Fields(stdout.String())
if len(fields) < 2 {
return nil, fmt.Errorf("could not determine GOARCH and Go compiler")
}
goarch := fields[0]
compiler := fields[1]
return types.SizesFor(compiler, goarch), nil return types.SizesFor(compiler, goarch), nil
} }

@ -7,7 +7,8 @@
// of dependencies. The ASTs and the derived facts are retained for // of dependencies. The ASTs and the derived facts are retained for
// later use. // later use.
// //
// THIS INTERFACE IS EXPERIMENTAL AND IS LIKELY TO CHANGE. // Deprecated: This is an older API and does not have support
// for modules. Use golang.org/x/tools/go/packages instead.
// //
// The package defines two primary types: Config, which specifies a // The package defines two primary types: Config, which specifies a
// set of initial packages to load and various other options; and // set of initial packages to load and various other options; and
@ -201,5 +202,3 @@ package loader
// the error. // the error.
// //
// The result of using concurrency is about a 2.5x speedup for stdlib_test. // The result of using concurrency is about a 2.5x speedup for stdlib_test.
// TODO(adonovan): overhaul the package documentation.

@ -811,7 +811,15 @@ func (imp *importer) doImport(from *PackageInfo, to string) (*types.Package, err
// Import of incomplete package: this indicates a cycle. // Import of incomplete package: this indicates a cycle.
fromPath := from.Pkg.Path() fromPath := from.Pkg.Path()
if cycle := imp.findPath(path, fromPath); cycle != nil { if cycle := imp.findPath(path, fromPath); cycle != nil {
cycle = append([]string{fromPath}, cycle...) // Normalize cycle: start from alphabetically largest node.
pos, start := -1, ""
for i, s := range cycle {
if pos < 0 || s > start {
pos, start = i, s
}
}
cycle = append(cycle, cycle[:pos]...)[pos:] // rotate cycle to start from largest
cycle = append(cycle, cycle[0]) // add start node to end to show cycliness
return nil, fmt.Errorf("import cycle: %s", strings.Join(cycle, " -> ")) return nil, fmt.Errorf("import cycle: %s", strings.Join(cycle, " -> "))
} }

@ -16,14 +16,29 @@ import (
"strings" "strings"
) )
// Driver // The Driver Protocol
//
// The driver, given the inputs to a call to Load, returns metadata about the packages specified.
// This allows for different build systems to support go/packages by telling go/packages how the
// packages' source is organized.
// The driver is a binary, either specified by the GOPACKAGESDRIVER environment variable or in
// the path as gopackagesdriver. It's given the inputs to load in its argv. See the package
// documentation in doc.go for the full description of the patterns that need to be supported.
// A driver receives as a JSON-serialized driverRequest struct in standard input and will
// produce a JSON-serialized driverResponse (see definition in packages.go) in its standard output.
// driverRequest is used to provide the portion of Load's Config that is needed by a driver.
type driverRequest struct { type driverRequest struct {
Command string `json:"command"` Mode LoadMode `json:"mode"`
Mode LoadMode `json:"mode"` // Env specifies the environment the underlying build system should be run in.
Env []string `json:"env"` Env []string `json:"env"`
BuildFlags []string `json:"build_flags"` // BuildFlags are flags that should be passed to the underlying build system.
Tests bool `json:"tests"` BuildFlags []string `json:"build_flags"`
Overlay map[string][]byte `json:"overlay"` // Tests specifies whether the patterns should also return test packages.
Tests bool `json:"tests"`
// Overlay maps file paths (relative to the driver's working directory) to the byte contents
// of overlay files.
Overlay map[string][]byte `json:"overlay"`
} }
// findExternalDriver returns the file path of a tool that supplies // findExternalDriver returns the file path of a tool that supplies

@ -13,6 +13,7 @@ import (
"log" "log"
"os" "os"
"os/exec" "os/exec"
"path"
"path/filepath" "path/filepath"
"reflect" "reflect"
"regexp" "regexp"
@ -71,6 +72,28 @@ func (r *responseDeduper) addRoot(id string) {
r.dr.Roots = append(r.dr.Roots, id) r.dr.Roots = append(r.dr.Roots, id)
} }
// goInfo contains global information from the go tool.
type goInfo struct {
rootDirs map[string]string
env goEnv
}
type goEnv struct {
modulesOn bool
}
func determineEnv(cfg *Config) goEnv {
buf, err := invokeGo(cfg, "env", "GOMOD")
if err != nil {
return goEnv{}
}
gomod := bytes.TrimSpace(buf.Bytes())
env := goEnv{}
env.modulesOn = len(gomod) > 0
return env
}
// goListDriver uses the go list command to interpret the patterns and produce // goListDriver uses the go list command to interpret the patterns and produce
// the build system package structure. // the build system package structure.
// See driver for more details. // See driver for more details.
@ -86,6 +109,28 @@ func goListDriver(cfg *Config, patterns ...string) (*driverResponse, error) {
}() }()
} }
// start fetching rootDirs
var info goInfo
var rootDirsReady, envReady = make(chan struct{}), make(chan struct{})
go func() {
info.rootDirs = determineRootDirs(cfg)
close(rootDirsReady)
}()
go func() {
info.env = determineEnv(cfg)
close(envReady)
}()
getGoInfo := func() *goInfo {
<-rootDirsReady
<-envReady
return &info
}
// always pass getGoInfo to golistDriver
golistDriver := func(cfg *Config, patterns ...string) (*driverResponse, error) {
return golistDriver(cfg, getGoInfo, patterns...)
}
// Determine files requested in contains patterns // Determine files requested in contains patterns
var containFiles []string var containFiles []string
var packagesNamed []string var packagesNamed []string
@ -147,7 +192,7 @@ extractQueries:
var containsCandidates []string var containsCandidates []string
if len(containFiles) != 0 { if len(containFiles) != 0 {
if err := runContainsQueries(cfg, golistDriver, response, containFiles); err != nil { if err := runContainsQueries(cfg, golistDriver, response, containFiles, getGoInfo); err != nil {
return nil, err return nil, err
} }
} }
@ -158,7 +203,7 @@ extractQueries:
} }
} }
modifiedPkgs, needPkgs, err := processGolistOverlay(cfg, response.dr) modifiedPkgs, needPkgs, err := processGolistOverlay(cfg, response, getGoInfo)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -166,13 +211,25 @@ extractQueries:
containsCandidates = append(containsCandidates, modifiedPkgs...) containsCandidates = append(containsCandidates, modifiedPkgs...)
containsCandidates = append(containsCandidates, needPkgs...) containsCandidates = append(containsCandidates, needPkgs...)
} }
if err := addNeededOverlayPackages(cfg, golistDriver, response, needPkgs); err != nil { if err := addNeededOverlayPackages(cfg, golistDriver, response, needPkgs, getGoInfo); err != nil {
return nil, err return nil, err
} }
// Check candidate packages for containFiles. // Check candidate packages for containFiles.
if len(containFiles) > 0 { if len(containFiles) > 0 {
for _, id := range containsCandidates { for _, id := range containsCandidates {
pkg := response.seenPackages[id] pkg, ok := response.seenPackages[id]
if !ok {
response.addPackage(&Package{
ID: id,
Errors: []Error{
{
Kind: ListError,
Msg: fmt.Sprintf("package %s expected but not seen", id),
},
},
})
continue
}
for _, f := range containFiles { for _, f := range containFiles {
for _, g := range pkg.GoFiles { for _, g := range pkg.GoFiles {
if sameFile(f, g) { if sameFile(f, g) {
@ -186,26 +243,33 @@ extractQueries:
return response.dr, nil return response.dr, nil
} }
func addNeededOverlayPackages(cfg *Config, driver driver, response *responseDeduper, pkgs []string) error { func addNeededOverlayPackages(cfg *Config, driver driver, response *responseDeduper, pkgs []string, getGoInfo func() *goInfo) error {
if len(pkgs) == 0 { if len(pkgs) == 0 {
return nil return nil
} }
dr, err := driver(cfg, pkgs...) drivercfg := *cfg
if getGoInfo().env.modulesOn {
drivercfg.BuildFlags = append(drivercfg.BuildFlags, "-mod=readonly")
}
dr, err := driver(&drivercfg, pkgs...)
if err != nil { if err != nil {
return err return err
} }
for _, pkg := range dr.Packages { for _, pkg := range dr.Packages {
response.addPackage(pkg) response.addPackage(pkg)
} }
_, needPkgs, err := processGolistOverlay(cfg, response.dr) _, needPkgs, err := processGolistOverlay(cfg, response, getGoInfo)
if err != nil { if err != nil {
return err return err
} }
addNeededOverlayPackages(cfg, driver, response, needPkgs) if err := addNeededOverlayPackages(cfg, driver, response, needPkgs, getGoInfo); err != nil {
return err
}
return nil return nil
} }
func runContainsQueries(cfg *Config, driver driver, response *responseDeduper, queries []string) error { func runContainsQueries(cfg *Config, driver driver, response *responseDeduper, queries []string, goInfo func() *goInfo) error {
for _, query := range queries { for _, query := range queries {
// TODO(matloob): Do only one query per directory. // TODO(matloob): Do only one query per directory.
fdir := filepath.Dir(query) fdir := filepath.Dir(query)
@ -216,8 +280,31 @@ func runContainsQueries(cfg *Config, driver driver, response *responseDeduper, q
return fmt.Errorf("could not determine absolute path of file= query path %q: %v", query, err) return fmt.Errorf("could not determine absolute path of file= query path %q: %v", query, err)
} }
dirResponse, err := driver(cfg, pattern) dirResponse, err := driver(cfg, pattern)
if err != nil { if err != nil || (len(dirResponse.Packages) == 1 && len(dirResponse.Packages[0].Errors) == 1) {
return err // There was an error loading the package. Try to load the file as an ad-hoc package.
// Usually the error will appear in a returned package, but may not if we're in modules mode
// and the ad-hoc is located outside a module.
var queryErr error
dirResponse, queryErr = driver(cfg, query)
if queryErr != nil {
// Return the original error if the attempt to fall back failed.
return err
}
// Special case to handle issue #33482:
// If this is a file= query for ad-hoc packages where the file only exists on an overlay,
// and exists outside of a module, add the file in for the package.
if len(dirResponse.Packages) == 1 && len(dirResponse.Packages) == 1 &&
dirResponse.Packages[0].ID == "command-line-arguments" && len(dirResponse.Packages[0].GoFiles) == 0 {
filename := filepath.Join(pattern, filepath.Base(query)) // avoid recomputing abspath
// TODO(matloob): check if the file is outside of a root dir?
for path := range cfg.Overlay {
if path == filename {
dirResponse.Packages[0].Errors = nil
dirResponse.Packages[0].GoFiles = []string{path}
dirResponse.Packages[0].CompiledGoFiles = []string{path}
}
}
}
} }
isRoot := make(map[string]bool, len(dirResponse.Roots)) isRoot := make(map[string]bool, len(dirResponse.Roots))
for _, root := range dirResponse.Roots { for _, root := range dirResponse.Roots {
@ -294,9 +381,7 @@ func runNamedQueries(cfg *Config, driver driver, response *responseDeduper, quer
startWalk := time.Now() startWalk := time.Now()
gopathwalk.Walk(roots, add, gopathwalk.Options{ModulesEnabled: modRoot != "", Debug: debug}) gopathwalk.Walk(roots, add, gopathwalk.Options{ModulesEnabled: modRoot != "", Debug: debug})
if debug { cfg.Logf("%v for walk", time.Since(startWalk))
log.Printf("%v for walk", time.Since(startWalk))
}
// Weird special case: the top-level package in a module will be in // Weird special case: the top-level package in a module will be in
// whatever directory the user checked the repository out into. It's // whatever directory the user checked the repository out into. It's
@ -547,7 +632,7 @@ func otherFiles(p *jsonPackage) [][]string {
// golistDriver uses the "go list" command to expand the pattern // golistDriver uses the "go list" command to expand the pattern
// words and return metadata for the specified packages. dir may be // words and return metadata for the specified packages. dir may be
// "" and env may be nil, as per os/exec.Command. // "" and env may be nil, as per os/exec.Command.
func golistDriver(cfg *Config, words ...string) (*driverResponse, error) { func golistDriver(cfg *Config, rootsDirs func() *goInfo, words ...string) (*driverResponse, error) {
// go list uses the following identifiers in ImportPath and Imports: // go list uses the following identifiers in ImportPath and Imports:
// //
// "p" -- importable package or main (command) // "p" -- importable package or main (command)
@ -588,6 +673,20 @@ func golistDriver(cfg *Config, words ...string) (*driverResponse, error) {
return nil, fmt.Errorf("package missing import path: %+v", p) return nil, fmt.Errorf("package missing import path: %+v", p)
} }
// Work around https://golang.org/issue/33157:
// go list -e, when given an absolute path, will find the package contained at
// that directory. But when no package exists there, it will return a fake package
// with an error and the ImportPath set to the absolute path provided to go list.
// Try toto convert that absolute path to what its package path would be if it's
// contained in a known module or GOPATH entry. This will allow the package to be
// properly "reclaimed" when overlays are processed.
if filepath.IsAbs(p.ImportPath) && p.Error != nil {
pkgPath, ok := getPkgPath(p.ImportPath, rootsDirs)
if ok {
p.ImportPath = pkgPath
}
}
if old, found := seen[p.ImportPath]; found { if old, found := seen[p.ImportPath]; found {
if !reflect.DeepEqual(p, old) { if !reflect.DeepEqual(p, old) {
return nil, fmt.Errorf("internal error: go list gives conflicting information for package %v", p.ImportPath) return nil, fmt.Errorf("internal error: go list gives conflicting information for package %v", p.ImportPath)
@ -681,7 +780,7 @@ func golistDriver(cfg *Config, words ...string) (*driverResponse, error) {
if p.Error != nil { if p.Error != nil {
pkg.Errors = append(pkg.Errors, Error{ pkg.Errors = append(pkg.Errors, Error{
Pos: p.Error.Pos, Pos: p.Error.Pos,
Msg: p.Error.Err, Msg: strings.TrimSpace(p.Error.Err), // Trim to work around golang.org/issue/32363.
}) })
} }
@ -691,6 +790,27 @@ func golistDriver(cfg *Config, words ...string) (*driverResponse, error) {
return &response, nil return &response, nil
} }
// getPkgPath finds the package path of a directory if it's relative to a root directory.
func getPkgPath(dir string, goInfo func() *goInfo) (string, bool) {
for rdir, rpath := range goInfo().rootDirs {
// TODO(matloob): This doesn't properly handle symlinks.
r, err := filepath.Rel(rdir, dir)
if err != nil {
continue
}
if rpath != "" {
// We choose only ore root even though the directory even it can belong in multiple modules
// or GOPATH entries. This is okay because we only need to work with absolute dirs when a
// file is missing from disk, for instance when gopls calls go/packages in an overlay.
// Once the file is saved, gopls, or the next invocation of the tool will get the correct
// result straight from golist.
// TODO(matloob): Implement module tiebreaking?
return path.Join(rpath, filepath.ToSlash(r)), true
}
}
return "", false
}
// absJoin absolutizes and flattens the lists of files. // absJoin absolutizes and flattens the lists of files.
func absJoin(dir string, fileses ...[]string) (res []string) { func absJoin(dir string, fileses ...[]string) (res []string) {
for _, files := range fileses { for _, files := range fileses {
@ -711,7 +831,7 @@ func golistargs(cfg *Config, words []string) []string {
fmt.Sprintf("-compiled=%t", cfg.Mode&(NeedCompiledGoFiles|NeedSyntax|NeedTypesInfo|NeedTypesSizes) != 0), fmt.Sprintf("-compiled=%t", cfg.Mode&(NeedCompiledGoFiles|NeedSyntax|NeedTypesInfo|NeedTypesSizes) != 0),
fmt.Sprintf("-test=%t", cfg.Tests), fmt.Sprintf("-test=%t", cfg.Tests),
fmt.Sprintf("-export=%t", usesExportData(cfg)), fmt.Sprintf("-export=%t", usesExportData(cfg)),
fmt.Sprintf("-deps=%t", cfg.Mode&NeedDeps != 0), fmt.Sprintf("-deps=%t", cfg.Mode&NeedImports != 0),
// go list doesn't let you pass -test and -find together, // go list doesn't let you pass -test and -find together,
// probably because you'd just get the TestMain. // probably because you'd just get the TestMain.
fmt.Sprintf("-find=%t", !cfg.Tests && cfg.Mode&findFlags == 0), fmt.Sprintf("-find=%t", !cfg.Tests && cfg.Mode&findFlags == 0),
@ -737,11 +857,9 @@ func invokeGo(cfg *Config, args ...string) (*bytes.Buffer, error) {
cmd.Dir = cfg.Dir cmd.Dir = cfg.Dir
cmd.Stdout = stdout cmd.Stdout = stdout
cmd.Stderr = stderr cmd.Stderr = stderr
if debug { defer func(start time.Time) {
defer func(start time.Time) { cfg.Logf("%s for %v, stderr: <<%s>>\n", time.Since(start), cmdDebugStr(cmd, args...), stderr)
log.Printf("%s for %v, stderr: <<%s>>\n", time.Since(start), cmdDebugStr(cmd, args...), stderr) }(time.Now())
}(time.Now())
}
if err := cmd.Run(); err != nil { if err := cmd.Run(); err != nil {
// Check for 'go' executable not being found. // Check for 'go' executable not being found.
@ -761,6 +879,19 @@ func invokeGo(cfg *Config, args ...string) (*bytes.Buffer, error) {
return nil, goTooOldError{fmt.Errorf("unsupported version of go: %s: %s", exitErr, stderr)} return nil, goTooOldError{fmt.Errorf("unsupported version of go: %s: %s", exitErr, stderr)}
} }
// Related to #24854
if len(stderr.String()) > 0 && strings.Contains(stderr.String(), "unexpected directory layout") {
return nil, fmt.Errorf("%s", stderr.String())
}
// Is there an error running the C compiler in cgo? This will be reported in the "Error" field
// and should be suppressed by go list -e.
//
// This condition is not perfect yet because the error message can include other error messages than runtime/cgo.
if len(stderr.String()) > 0 && strings.HasPrefix(stderr.String(), "# runtime/cgo\n") {
return stdout, nil
}
// This error only appears in stderr. See golang.org/cl/166398 for a fix in go list to show // This error only appears in stderr. See golang.org/cl/166398 for a fix in go list to show
// the error in the Err section of stdout in case -e option is provided. // the error in the Err section of stdout in case -e option is provided.
// This fix is provided for backwards compatibility. // This fix is provided for backwards compatibility.
@ -770,13 +901,49 @@ func invokeGo(cfg *Config, args ...string) (*bytes.Buffer, error) {
return bytes.NewBufferString(output), nil return bytes.NewBufferString(output), nil
} }
// Similar to the previous error, but currently lacks a fix in Go.
if len(stderr.String()) > 0 && strings.Contains(stderr.String(), "named files must all be in one directory") {
output := fmt.Sprintf(`{"ImportPath": "command-line-arguments","Incomplete": true,"Error": {"Pos": "","Err": %q}}`,
strings.Trim(stderr.String(), "\n"))
return bytes.NewBufferString(output), nil
}
// Backwards compatibility for Go 1.11 because 1.12 and 1.13 put the directory in the ImportPath.
// If the package doesn't exist, put the absolute path of the directory into the error message,
// as Go 1.13 list does.
const noSuchDirectory = "no such directory"
if len(stderr.String()) > 0 && strings.Contains(stderr.String(), noSuchDirectory) {
errstr := stderr.String()
abspath := strings.TrimSpace(errstr[strings.Index(errstr, noSuchDirectory)+len(noSuchDirectory):])
output := fmt.Sprintf(`{"ImportPath": %q,"Incomplete": true,"Error": {"Pos": "","Err": %q}}`,
abspath, strings.Trim(stderr.String(), "\n"))
return bytes.NewBufferString(output), nil
}
// Workaround for #29280: go list -e has incorrect behavior when an ad-hoc package doesn't exist. // Workaround for #29280: go list -e has incorrect behavior when an ad-hoc package doesn't exist.
// Note that the error message we look for in this case is different that the one looked for above.
if len(stderr.String()) > 0 && strings.Contains(stderr.String(), "no such file or directory") { if len(stderr.String()) > 0 && strings.Contains(stderr.String(), "no such file or directory") {
output := fmt.Sprintf(`{"ImportPath": "command-line-arguments","Incomplete": true,"Error": {"Pos": "","Err": %q}}`, output := fmt.Sprintf(`{"ImportPath": "command-line-arguments","Incomplete": true,"Error": {"Pos": "","Err": %q}}`,
strings.Trim(stderr.String(), "\n")) strings.Trim(stderr.String(), "\n"))
return bytes.NewBufferString(output), nil return bytes.NewBufferString(output), nil
} }
// Workaround for an instance of golang.org/issue/26755: go list -e will return a non-zero exit
// status if there's a dependency on a package that doesn't exist. But it should return
// a zero exit status and set an error on that package.
if len(stderr.String()) > 0 && strings.Contains(stderr.String(), "no Go files in") {
// try to extract package name from string
stderrStr := stderr.String()
var importPath string
colon := strings.Index(stderrStr, ":")
if colon > 0 && strings.HasPrefix(stderrStr, "go build ") {
importPath = stderrStr[len("go build "):colon]
}
output := fmt.Sprintf(`{"ImportPath": %q,"Incomplete": true,"Error": {"Pos": "","Err": %q}}`,
importPath, strings.Trim(stderrStr, "\n"))
return bytes.NewBufferString(output), nil
}
// Export mode entails a build. // Export mode entails a build.
// If that build fails, errors appear on stderr // If that build fails, errors appear on stderr
// (despite the -e flag) and the Export field is blank. // (despite the -e flag) and the Export field is blank.

@ -1,8 +1,12 @@
package packages package packages
import ( import (
"bytes"
"encoding/json"
"fmt"
"go/parser" "go/parser"
"go/token" "go/token"
"path"
"path/filepath" "path/filepath"
"strconv" "strconv"
"strings" "strings"
@ -12,74 +16,167 @@ import (
// files that don't exist on disk to an overlay. The results can be // files that don't exist on disk to an overlay. The results can be
// sometimes incorrect. // sometimes incorrect.
// TODO(matloob): Handle unsupported cases, including the following: // TODO(matloob): Handle unsupported cases, including the following:
// - test files
// - adding test and non-test files to test variants of packages
// - determining the correct package to add given a new import path // - determining the correct package to add given a new import path
// - creating packages that don't exist func processGolistOverlay(cfg *Config, response *responseDeduper, rootDirs func() *goInfo) (modifiedPkgs, needPkgs []string, err error) {
func processGolistOverlay(cfg *Config, response *driverResponse) (modifiedPkgs, needPkgs []string, err error) {
havePkgs := make(map[string]string) // importPath -> non-test package ID havePkgs := make(map[string]string) // importPath -> non-test package ID
needPkgsSet := make(map[string]bool) needPkgsSet := make(map[string]bool)
modifiedPkgsSet := make(map[string]bool) modifiedPkgsSet := make(map[string]bool)
for _, pkg := range response.Packages { for _, pkg := range response.dr.Packages {
// This is an approximation of import path to id. This can be // This is an approximation of import path to id. This can be
// wrong for tests, vendored packages, and a number of other cases. // wrong for tests, vendored packages, and a number of other cases.
havePkgs[pkg.PkgPath] = pkg.ID havePkgs[pkg.PkgPath] = pkg.ID
} }
outer: // If no new imports are added, it is safe to avoid loading any needPkgs.
for path, contents := range cfg.Overlay { // Otherwise, it's hard to tell which package is actually being loaded
base := filepath.Base(path) // (due to vendoring) and whether any modified package will show up
if strings.HasSuffix(path, "_test.go") { // in the transitive set of dependencies (because new imports are added,
// Overlays don't support adding new test files yet. // potentially modifying the transitive set of dependencies).
// TODO(matloob): support adding new test files. var overlayAddsImports bool
for opath, contents := range cfg.Overlay {
base := filepath.Base(opath)
dir := filepath.Dir(opath)
var pkg *Package
var testVariantOf *Package // if opath is a test file, this is the package it is testing
var fileExists bool
isTest := strings.HasSuffix(opath, "_test.go")
pkgName, ok := extractPackageName(opath, contents)
if !ok {
// Don't bother adding a file that doesn't even have a parsable package statement
// to the overlay.
continue continue
} }
dir := filepath.Dir(path) nextPackage:
for _, pkg := range response.Packages { for _, p := range response.dr.Packages {
var dirContains, fileExists bool if pkgName != p.Name && p.ID != "command-line-arguments" {
for _, f := range pkg.GoFiles { continue
if sameFile(filepath.Dir(f), dir) { }
dirContains = true for _, f := range p.GoFiles {
if !sameFile(filepath.Dir(f), dir) {
continue
} }
if isTest && !hasTestFiles(p) {
// TODO(matloob): Are there packages other than the 'production' variant
// of a package that this can match? This shouldn't match the test main package
// because the file is generated in another directory.
testVariantOf = p
continue nextPackage
}
pkg = p
if filepath.Base(f) == base { if filepath.Base(f) == base {
fileExists = true fileExists = true
} }
} }
// The overlay could have included an entirely new package. }
isNewPackage := extractPackage(pkg, path, contents) // The overlay could have included an entirely new package.
if dirContains || isNewPackage { if pkg == nil {
if !fileExists { // Try to find the module or gopath dir the file is contained in.
pkg.GoFiles = append(pkg.GoFiles, path) // TODO(matloob): should the file just be added to GoFiles? // Then for modules, add the module opath to the beginning.
pkg.CompiledGoFiles = append(pkg.CompiledGoFiles, path) var pkgPath string
modifiedPkgsSet[pkg.ID] = true for rdir, rpath := range rootDirs().rootDirs {
} // TODO(matloob): This doesn't properly handle symlinks.
imports, err := extractImports(path, contents) r, err := filepath.Rel(rdir, dir)
if err != nil { if err != nil {
// Let the parser or type checker report errors later. continue
continue outer
} }
for _, imp := range imports { pkgPath = filepath.ToSlash(r)
_, found := pkg.Imports[imp] if rpath != "" {
if !found { pkgPath = path.Join(rpath, pkgPath)
needPkgsSet[imp] = true
// TODO(matloob): Handle cases when the following block isn't correct.
// These include imports of test variants, imports of vendored packages, etc.
id, ok := havePkgs[imp]
if !ok {
id = imp
}
pkg.Imports[imp] = &Package{ID: id}
}
} }
continue outer // We only create one new package even it can belong in multiple modules or GOPATH entries.
// This is okay because tools (such as the LSP) that use overlays will recompute the overlay
// once the file is saved, and golist will do the right thing.
// TODO(matloob): Implement module tiebreaking?
break
}
if pkgPath == "" {
continue
}
isXTest := strings.HasSuffix(pkgName, "_test")
if isXTest {
pkgPath += "_test"
}
id := pkgPath
if isTest && !isXTest {
id = fmt.Sprintf("%s [%s.test]", pkgPath, pkgPath)
}
// Try to reclaim a package with the same id if it exists in the response.
for _, p := range response.dr.Packages {
if reclaimPackage(p, id, opath, contents) {
pkg = p
break
}
}
// Otherwise, create a new package
if pkg == nil {
pkg = &Package{PkgPath: pkgPath, ID: id, Name: pkgName, Imports: make(map[string]*Package)}
response.addPackage(pkg)
havePkgs[pkg.PkgPath] = id
// Add the production package's sources for a test variant.
if isTest && !isXTest && testVariantOf != nil {
pkg.GoFiles = append(pkg.GoFiles, testVariantOf.GoFiles...)
pkg.CompiledGoFiles = append(pkg.CompiledGoFiles, testVariantOf.CompiledGoFiles...)
}
}
}
if !fileExists {
pkg.GoFiles = append(pkg.GoFiles, opath)
// TODO(matloob): Adding the file to CompiledGoFiles can exhibit the wrong behavior
// if the file will be ignored due to its build tags.
pkg.CompiledGoFiles = append(pkg.CompiledGoFiles, opath)
modifiedPkgsSet[pkg.ID] = true
}
imports, err := extractImports(opath, contents)
if err != nil {
// Let the parser or type checker report errors later.
continue
}
for _, imp := range imports {
_, found := pkg.Imports[imp]
if !found {
overlayAddsImports = true
// TODO(matloob): Handle cases when the following block isn't correct.
// These include imports of test variants, imports of vendored packages, etc.
id, ok := havePkgs[imp]
if !ok {
id = imp
}
pkg.Imports[imp] = &Package{ID: id}
}
}
continue
}
// toPkgPath tries to guess the package path given the id.
// This isn't always correct -- it's certainly wrong for
// vendored packages' paths.
toPkgPath := func(id string) string {
// TODO(matloob): Handle vendor paths.
i := strings.IndexByte(id, ' ')
if i >= 0 {
return id[:i]
}
return id
}
// Do another pass now that new packages have been created to determine the
// set of missing packages.
for _, pkg := range response.dr.Packages {
for _, imp := range pkg.Imports {
pkgPath := toPkgPath(imp.ID)
if _, ok := havePkgs[pkgPath]; !ok {
needPkgsSet[pkgPath] = true
} }
} }
} }
needPkgs = make([]string, 0, len(needPkgsSet)) if overlayAddsImports {
for pkg := range needPkgsSet { needPkgs = make([]string, 0, len(needPkgsSet))
needPkgs = append(needPkgs, pkg) for pkg := range needPkgsSet {
needPkgs = append(needPkgs, pkg)
}
} }
modifiedPkgs = make([]string, 0, len(modifiedPkgsSet)) modifiedPkgs = make([]string, 0, len(modifiedPkgsSet))
for pkg := range modifiedPkgsSet { for pkg := range modifiedPkgsSet {
@ -88,6 +185,55 @@ outer:
return modifiedPkgs, needPkgs, err return modifiedPkgs, needPkgs, err
} }
func hasTestFiles(p *Package) bool {
for _, f := range p.GoFiles {
if strings.HasSuffix(f, "_test.go") {
return true
}
}
return false
}
// determineRootDirs returns a mapping from directories code can be contained in to the
// corresponding import path prefixes of those directories.
// Its result is used to try to determine the import path for a package containing
// an overlay file.
func determineRootDirs(cfg *Config) map[string]string {
// Assume modules first:
out, err := invokeGo(cfg, "list", "-m", "-json", "all")
if err != nil {
return determineRootDirsGOPATH(cfg)
}
m := map[string]string{}
type jsonMod struct{ Path, Dir string }
for dec := json.NewDecoder(out); dec.More(); {
mod := new(jsonMod)
if err := dec.Decode(mod); err != nil {
return m // Give up and return an empty map. Package won't be found for overlay.
}
if mod.Dir != "" && mod.Path != "" {
// This is a valid module; add it to the map.
m[mod.Dir] = mod.Path
}
}
return m
}
func determineRootDirsGOPATH(cfg *Config) map[string]string {
m := map[string]string{}
out, err := invokeGo(cfg, "env", "GOPATH")
if err != nil {
// Could not determine root dir mapping. Everything is best-effort, so just return an empty map.
// When we try to find the import path for a directory, there will be no root-dir match and
// we'll give up.
return m
}
for _, p := range filepath.SplitList(string(bytes.TrimSpace(out.Bytes()))) {
m[filepath.Join(p, "src")] = ""
}
return m
}
func extractImports(filename string, contents []byte) ([]string, error) { func extractImports(filename string, contents []byte) ([]string, error) {
f, err := parser.ParseFile(token.NewFileSet(), filename, contents, parser.ImportsOnly) // TODO(matloob): reuse fileset? f, err := parser.ParseFile(token.NewFileSet(), filename, contents, parser.ImportsOnly) // TODO(matloob): reuse fileset?
if err != nil { if err != nil {
@ -105,13 +251,16 @@ func extractImports(filename string, contents []byte) ([]string, error) {
return res, nil return res, nil
} }
// extractPackage attempts to extract a package defined in an overlay. // reclaimPackage attempts to reuse a package that failed to load in an overlay.
// //
// If the package has errors and has no Name, GoFiles, or Imports, // If the package has errors and has no Name, GoFiles, or Imports,
// then it's possible that it doesn't yet exist on disk. // then it's possible that it doesn't yet exist on disk.
func extractPackage(pkg *Package, filename string, contents []byte) bool { func reclaimPackage(pkg *Package, id string, filename string, contents []byte) bool {
// TODO(rstambler): Check the message of the actual error? // TODO(rstambler): Check the message of the actual error?
// It differs between $GOPATH and module mode. // It differs between $GOPATH and module mode.
if pkg.ID != id {
return false
}
if len(pkg.Errors) != 1 { if len(pkg.Errors) != 1 {
return false return false
} }
@ -124,15 +273,21 @@ func extractPackage(pkg *Package, filename string, contents []byte) bool {
if len(pkg.Imports) > 0 { if len(pkg.Imports) > 0 {
return false return false
} }
f, err := parser.ParseFile(token.NewFileSet(), filename, contents, parser.PackageClauseOnly) // TODO(matloob): reuse fileset? pkgName, ok := extractPackageName(filename, contents)
if err != nil { if !ok {
return false return false
} }
// TODO(rstambler): This doesn't work for main packages. pkg.Name = pkgName
if filepath.Base(pkg.PkgPath) != f.Name.Name {
return false
}
pkg.Name = f.Name.Name
pkg.Errors = nil pkg.Errors = nil
return true return true
} }
func extractPackageName(filename string, contents []byte) (string, bool) {
// TODO(rstambler): Check the message of the actual error?
// It differs between $GOPATH and module mode.
f, err := parser.ParseFile(token.NewFileSet(), filename, contents, parser.PackageClauseOnly) // TODO(matloob): reuse fileset?
if err != nil {
return "", false
}
return f.Name.Name, true
}

@ -25,24 +25,16 @@ import (
"golang.org/x/tools/go/gcexportdata" "golang.org/x/tools/go/gcexportdata"
) )
// A LoadMode specifies the amount of detail to return when loading. // A LoadMode controls the amount of detail to return when loading.
// Higher-numbered modes cause Load to return more information, // The bits below can be combined to specify which fields should be
// but may be slower. Load may return more information than requested. // filled in the result packages.
// The zero value is a special case, equivalent to combining
// the NeedName, NeedFiles, and NeedCompiledGoFiles bits.
// ID and Errors (if present) will always be filled.
// Load may return more information than requested.
type LoadMode int type LoadMode int
const ( const (
// The following constants are used to specify which fields of the Package
// should be filled when loading is done. As a special case to provide
// backwards compatibility, a LoadMode of 0 is equivalent to LoadFiles.
// For all other LoadModes, the bits below specify which fields will be filled
// in the result packages.
// WARNING: This part of the go/packages API is EXPERIMENTAL. It might
// be changed or removed up until April 15 2019. After that date it will
// be frozen.
// TODO(matloob): Remove this comment on April 15.
// ID and Errors (if present) will always be filled.
// NeedName adds Name and PkgPath. // NeedName adds Name and PkgPath.
NeedName LoadMode = 1 << iota NeedName LoadMode = 1 << iota
@ -56,8 +48,8 @@ const (
// "placeholder" Packages with only the ID set. // "placeholder" Packages with only the ID set.
NeedImports NeedImports
// NeedDeps adds the fields requested by the LoadMode in the packages in Imports. If NeedImports // NeedDeps adds the fields requested by the LoadMode in the packages in Imports.
// is not set NeedDeps has no effect. // If NeedImports is not set, it will be added automatically.
NeedDeps NeedDeps
// NeedExportsFile adds ExportsFile. // NeedExportsFile adds ExportsFile.
@ -69,7 +61,7 @@ const (
// NeedSyntax adds Syntax. // NeedSyntax adds Syntax.
NeedSyntax NeedSyntax
// NeedTypesInfo adds TypesInfo. // NeedTypesInfo adds TypesInfo. If NeedImports is not set, it will be added automatically.
NeedTypesInfo NeedTypesInfo
// NeedTypesSizes adds TypesSizes. // NeedTypesSizes adds TypesSizes.
@ -77,31 +69,25 @@ const (
) )
const ( const (
// LoadFiles finds the packages and computes their source file lists. // Deprecated: LoadFiles exists for historical compatibility
// Package fields: ID, Name, Errors, GoFiles, CompiledGoFiles, and OtherFiles. // and should not be used. Please directly specify the needed fields using the Need values.
LoadFiles = NeedName | NeedFiles | NeedCompiledGoFiles LoadFiles = NeedName | NeedFiles | NeedCompiledGoFiles
// LoadImports adds import information for each package // Deprecated: LoadImports exists for historical compatibility
// and its dependencies. // and should not be used. Please directly specify the needed fields using the Need values.
// Package fields added: Imports. LoadImports = LoadFiles | NeedImports
LoadImports = LoadFiles | NeedImports | NeedDeps
// LoadTypes adds type information for package-level // Deprecated: LoadTypes exists for historical compatibility
// declarations in the packages matching the patterns. // and should not be used. Please directly specify the needed fields using the Need values.
// Package fields added: Types, TypesSizes, Fset, and IllTyped.
// This mode uses type information provided by the build system when
// possible, and may fill in the ExportFile field.
LoadTypes = LoadImports | NeedTypes | NeedTypesSizes LoadTypes = LoadImports | NeedTypes | NeedTypesSizes
// LoadSyntax adds typed syntax trees for the packages matching the patterns. // Deprecated: LoadSyntax exists for historical compatibility
// Package fields added: Syntax, and TypesInfo, for direct pattern matches only. // and should not be used. Please directly specify the needed fields using the Need values.
LoadSyntax = LoadTypes | NeedSyntax | NeedTypesInfo LoadSyntax = LoadTypes | NeedSyntax | NeedTypesInfo
// LoadAllSyntax adds typed syntax trees for the packages matching the patterns // Deprecated: LoadAllSyntax exists for historical compatibility
// and all dependencies. // and should not be used. Please directly specify the needed fields using the Need values.
// Package fields added: Types, Fset, IllTyped, Syntax, and TypesInfo, LoadAllSyntax = LoadSyntax | NeedDeps
// for all packages in the import graph.
LoadAllSyntax = LoadSyntax
) )
// A Config specifies details about how packages should be loaded. // A Config specifies details about how packages should be loaded.
@ -117,6 +103,12 @@ type Config struct {
// If Context is nil, the load cannot be cancelled. // If Context is nil, the load cannot be cancelled.
Context context.Context Context context.Context
// Logf is the logger for the config.
// If the user provides a logger, debug logging is enabled.
// If the GOPACKAGESDEBUG environment variable is set to true,
// but the logger is nil, default to log.Printf.
Logf func(format string, args ...interface{})
// Dir is the directory in which to run the build system's query tool // Dir is the directory in which to run the build system's query tool
// that provides information about the packages. // that provides information about the packages.
// If Dir is empty, the tool is run in the current directory. // If Dir is empty, the tool is run in the current directory.
@ -275,9 +267,9 @@ type Package struct {
Imports map[string]*Package Imports map[string]*Package
// Types provides type information for the package. // Types provides type information for the package.
// Modes LoadTypes and above set this field for packages matching the // The NeedTypes LoadMode bit sets this field for packages matching the
// patterns; type information for dependencies may be missing or incomplete. // patterns; type information for dependencies may be missing or incomplete,
// Mode LoadAllSyntax sets this field for all packages, including dependencies. // unless NeedDeps and NeedImports are also set.
Types *types.Package Types *types.Package
// Fset provides position information for Types, TypesInfo, and Syntax. // Fset provides position information for Types, TypesInfo, and Syntax.
@ -290,8 +282,9 @@ type Package struct {
// Syntax is the package's syntax trees, for the files listed in CompiledGoFiles. // Syntax is the package's syntax trees, for the files listed in CompiledGoFiles.
// //
// Mode LoadSyntax sets this field for packages matching the patterns. // The NeedSyntax LoadMode bit populates this field for packages matching the patterns.
// Mode LoadAllSyntax sets this field for all packages, including dependencies. // If NeedDeps and NeedImports are also set, this field will also be populated
// for dependencies.
Syntax []*ast.File Syntax []*ast.File
// TypesInfo provides type information about the package's syntax trees. // TypesInfo provides type information about the package's syntax trees.
@ -442,9 +435,20 @@ func newLoader(cfg *Config) *loader {
} }
if cfg != nil { if cfg != nil {
ld.Config = *cfg ld.Config = *cfg
// If the user has provided a logger, use it.
ld.Config.Logf = cfg.Logf
}
if ld.Config.Logf == nil {
// If the GOPACKAGESDEBUG environment variable is set to true,
// but the user has not provided a logger, default to log.Printf.
if debug {
ld.Config.Logf = log.Printf
} else {
ld.Config.Logf = func(format string, args ...interface{}) {}
}
} }
if ld.Config.Mode == 0 { if ld.Config.Mode == 0 {
ld.Config.Mode = LoadFiles // Preserve zero behavior of Mode for backwards compatibility. ld.Config.Mode = NeedName | NeedFiles | NeedCompiledGoFiles // Preserve zero behavior of Mode for backwards compatibility.
} }
if ld.Config.Env == nil { if ld.Config.Env == nil {
ld.Config.Env = os.Environ() ld.Config.Env = os.Environ()
@ -472,6 +476,8 @@ func newLoader(cfg *Config) *loader {
} }
} }
} }
ld.addDependingLoadModes()
return ld return ld
} }
@ -492,8 +498,8 @@ func (ld *loader) refine(roots []string, list ...*Package) ([]*Package, error) {
} }
lpkg := &loaderPackage{ lpkg := &loaderPackage{
Package: pkg, Package: pkg,
needtypes: (ld.Mode&(NeedTypes|NeedTypesInfo) != 0 && rootIndex < 0) || rootIndex >= 0, needtypes: (ld.Mode&(NeedTypes|NeedTypesInfo) != 0 && ld.Mode&NeedDeps != 0 && rootIndex < 0) || rootIndex >= 0,
needsrc: (ld.Mode&(NeedSyntax|NeedTypesInfo) != 0 && rootIndex < 0) || rootIndex >= 0 || needsrc: (ld.Mode&(NeedSyntax|NeedTypesInfo) != 0 && ld.Mode&NeedDeps != 0 && rootIndex < 0) || rootIndex >= 0 ||
len(ld.Overlay) > 0 || // Overlays can invalidate export data. TODO(matloob): make this check fine-grained based on dependencies on overlaid files len(ld.Overlay) > 0 || // Overlays can invalidate export data. TODO(matloob): make this check fine-grained based on dependencies on overlaid files
pkg.ExportFile == "" && pkg.PkgPath != "unsafe", pkg.ExportFile == "" && pkg.PkgPath != "unsafe",
} }
@ -540,28 +546,31 @@ func (ld *loader) refine(roots []string, list ...*Package) ([]*Package, error) {
lpkg.color = grey lpkg.color = grey
stack = append(stack, lpkg) // push stack = append(stack, lpkg) // push
stubs := lpkg.Imports // the structure form has only stubs with the ID in the Imports stubs := lpkg.Imports // the structure form has only stubs with the ID in the Imports
lpkg.Imports = make(map[string]*Package, len(stubs)) // If NeedImports isn't set, the imports fields will all be zeroed out.
for importPath, ipkg := range stubs { if ld.Mode&NeedImports != 0 {
var importErr error lpkg.Imports = make(map[string]*Package, len(stubs))
imp := ld.pkgs[ipkg.ID] for importPath, ipkg := range stubs {
if imp == nil { var importErr error
// (includes package "C" when DisableCgo) imp := ld.pkgs[ipkg.ID]
importErr = fmt.Errorf("missing package: %q", ipkg.ID) if imp == nil {
} else if imp.color == grey { // (includes package "C" when DisableCgo)
importErr = fmt.Errorf("import cycle: %s", stack) importErr = fmt.Errorf("missing package: %q", ipkg.ID)
} } else if imp.color == grey {
if importErr != nil { importErr = fmt.Errorf("import cycle: %s", stack)
if lpkg.importErrors == nil { }
lpkg.importErrors = make(map[string]error) if importErr != nil {
if lpkg.importErrors == nil {
lpkg.importErrors = make(map[string]error)
}
lpkg.importErrors[importPath] = importErr
continue
} }
lpkg.importErrors[importPath] = importErr
continue
}
if visit(imp) { if visit(imp) {
lpkg.needsrc = true lpkg.needsrc = true
}
lpkg.Imports[importPath] = imp.Package
} }
lpkg.Imports[importPath] = imp.Package
} }
if lpkg.needsrc { if lpkg.needsrc {
srcPkgs = append(srcPkgs, lpkg) srcPkgs = append(srcPkgs, lpkg)
@ -575,7 +584,7 @@ func (ld *loader) refine(roots []string, list ...*Package) ([]*Package, error) {
return lpkg.needsrc return lpkg.needsrc
} }
if ld.Mode&(NeedImports|NeedDeps) == 0 { if ld.Mode&NeedImports == 0 {
// We do this to drop the stub import packages that we are not even going to try to resolve. // We do this to drop the stub import packages that we are not even going to try to resolve.
for _, lpkg := range initial { for _, lpkg := range initial {
lpkg.Imports = nil lpkg.Imports = nil
@ -586,7 +595,7 @@ func (ld *loader) refine(roots []string, list ...*Package) ([]*Package, error) {
visit(lpkg) visit(lpkg)
} }
} }
if ld.Mode&NeedDeps != 0 { // TODO(matloob): This is only the case if NeedTypes is also set, right? if ld.Mode&NeedImports != 0 && ld.Mode&NeedTypes != 0 {
for _, lpkg := range srcPkgs { for _, lpkg := range srcPkgs {
// Complete type information is required for the // Complete type information is required for the
// immediate dependencies of each source package. // immediate dependencies of each source package.
@ -611,7 +620,6 @@ func (ld *loader) refine(roots []string, list ...*Package) ([]*Package, error) {
} }
result := make([]*Package, len(initial)) result := make([]*Package, len(initial))
importPlaceholders := make(map[string]*Package)
for i, lpkg := range initial { for i, lpkg := range initial {
result[i] = lpkg.Package result[i] = lpkg.Package
} }
@ -648,17 +656,8 @@ func (ld *loader) refine(roots []string, list ...*Package) ([]*Package, error) {
if ld.Mode&NeedTypesSizes == 0 { if ld.Mode&NeedTypesSizes == 0 {
ld.pkgs[i].TypesSizes = nil ld.pkgs[i].TypesSizes = nil
} }
if ld.Mode&NeedDeps == 0 {
for j, pkg := range ld.pkgs[i].Imports {
ph, ok := importPlaceholders[pkg.ID]
if !ok {
ph = &Package{ID: pkg.ID}
importPlaceholders[pkg.ID] = ph
}
ld.pkgs[i].Imports[j] = ph
}
}
} }
return result, nil return result, nil
} }
@ -679,7 +678,6 @@ func (ld *loader) loadRecursive(lpkg *loaderPackage) {
}(imp) }(imp)
} }
wg.Wait() wg.Wait()
ld.loadPackage(lpkg) ld.loadPackage(lpkg)
}) })
} }
@ -687,7 +685,7 @@ func (ld *loader) loadRecursive(lpkg *loaderPackage) {
// loadPackage loads the specified package. // loadPackage loads the specified package.
// It must be called only once per Package, // It must be called only once per Package,
// after immediate dependencies are loaded. // after immediate dependencies are loaded.
// Precondition: ld.Mode >= LoadTypes. // Precondition: ld.Mode & NeedTypes.
func (ld *loader) loadPackage(lpkg *loaderPackage) { func (ld *loader) loadPackage(lpkg *loaderPackage) {
if lpkg.PkgPath == "unsafe" { if lpkg.PkgPath == "unsafe" {
// Fill in the blanks to avoid surprises. // Fill in the blanks to avoid surprises.
@ -806,7 +804,7 @@ func (ld *loader) loadPackage(lpkg *loaderPackage) {
if ipkg.Types != nil && ipkg.Types.Complete() { if ipkg.Types != nil && ipkg.Types.Complete() {
return ipkg.Types, nil return ipkg.Types, nil
} }
log.Fatalf("internal error: nil Pkg importing %q from %q", path, lpkg) log.Fatalf("internal error: package %q without types was imported from %q", path, lpkg)
panic("unreachable") panic("unreachable")
}) })
@ -817,7 +815,7 @@ func (ld *loader) loadPackage(lpkg *loaderPackage) {
// Type-check bodies of functions only in non-initial packages. // Type-check bodies of functions only in non-initial packages.
// Example: for import graph A->B->C and initial packages {A,C}, // Example: for import graph A->B->C and initial packages {A,C},
// we can ignore function bodies in B. // we can ignore function bodies in B.
IgnoreFuncBodies: (ld.Mode&(NeedDeps|NeedTypesInfo) == 0) && !lpkg.initial, IgnoreFuncBodies: ld.Mode&NeedDeps == 0 && !lpkg.initial,
Error: appendError, Error: appendError,
Sizes: ld.sizes, Sizes: ld.sizes,
@ -1079,6 +1077,25 @@ func (ld *loader) loadFromExportData(lpkg *loaderPackage) (*types.Package, error
return tpkg, nil return tpkg, nil
} }
func usesExportData(cfg *Config) bool { // addDependingLoadModes adds dependencies for choosed LoadMode in ld.Mode
return cfg.Mode&NeedExportsFile != 0 || cfg.Mode&NeedTypes != 0 && cfg.Mode&NeedTypesInfo == 0 func (ld *loader) addDependingLoadModes() {
if ld.Mode&NeedTypesInfo != 0 && ld.Mode&NeedImports == 0 {
// If NeedTypesInfo, go/packages needs to do typechecking itself so it can
// associate type info with the AST. To do so, we need the export data
// for dependencies, which means we need to ask for the direct dependencies.
// NeedImports is used to ask for the direct dependencies.
ld.Mode |= NeedImports
ld.Logf("Added load mode dependency of NeedTypesInfo: NeedImports")
}
if ld.Mode&NeedDeps != 0 && ld.Mode&NeedImports == 0 {
// With NeedDeps we need to load at least direct dependencies.
// NeedImports is used to ask for the direct dependencies.
ld.Mode |= NeedImports
ld.Logf("Added load mode dependency of NeedDeps: NeedImports")
}
}
func usesExportData(cfg *Config) bool {
return cfg.Mode&NeedExportsFile != 0 || cfg.Mode&NeedTypes != 0 && cfg.Mode&NeedDeps == 0
} }

@ -101,7 +101,7 @@ func doPackages(initial []*packages.Package, mode ssa.BuilderMode, deps bool) (*
// //
// The mode parameter controls diagnostics and checking during SSA construction. // The mode parameter controls diagnostics and checking during SSA construction.
// //
// Deprecated: use golang.org/x/tools/go/packages and the Packages // Deprecated: Use golang.org/x/tools/go/packages and the Packages
// function instead; see ssa.ExampleLoadPackages. // function instead; see ssa.ExampleLoadPackages.
// //
func CreateProgram(lprog *loader.Program, mode ssa.BuilderMode) *ssa.Program { func CreateProgram(lprog *loader.Program, mode ssa.BuilderMode) *ssa.Program {

@ -27,7 +27,8 @@ import (
// (as defined by "go test") defined in the specified package, // (as defined by "go test") defined in the specified package,
// and its TestMain function, if any. // and its TestMain function, if any.
// //
// Deprecated: use x/tools/go/packages to access synthetic testmain packages. // Deprecated: Use golang.org/x/tools/go/packages to access synthetic
// testmain packages.
func FindTests(pkg *Package) (tests, benchmarks, examples []*Function, main *Function) { func FindTests(pkg *Package) (tests, benchmarks, examples []*Function, main *Function) {
prog := pkg.Prog prog := pkg.Prog
@ -112,7 +113,8 @@ func isTest(name, prefix string) bool {
// Subsequent calls to prog.AllPackages include the new package. // Subsequent calls to prog.AllPackages include the new package.
// The package pkg must belong to the program prog. // The package pkg must belong to the program prog.
// //
// Deprecated: use x/tools/go/packages to access synthetic testmain packages. // Deprecated: Use golang.org/x/tools/go/packages to access synthetic
// testmain packages.
func (prog *Program) CreateTestMainPackage(pkg *Package) *Package { func (prog *Program) CreateTestMainPackage(pkg *Package) *Package {
if pkg.Prog != prog { if pkg.Prog != prog {
log.Fatal("Package does not belong to Program") log.Fatal("Package does not belong to Program")

@ -59,15 +59,27 @@ func SrcDirsRoots(ctx *build.Context) []Root {
// paths of the containing source directory and the package directory. // paths of the containing source directory and the package directory.
// add will be called concurrently. // add will be called concurrently.
func Walk(roots []Root, add func(root Root, dir string), opts Options) { func Walk(roots []Root, add func(root Root, dir string), opts Options) {
WalkSkip(roots, add, func(Root, string) bool { return false }, opts)
}
// WalkSkip walks Go source directories ($GOROOT, $GOPATH, etc) to find packages.
// For each package found, add will be called (concurrently) with the absolute
// paths of the containing source directory and the package directory.
// For each directory that will be scanned, skip will be called (concurrently)
// with the absolute paths of the containing source directory and the directory.
// If skip returns false on a directory it will be processed.
// add will be called concurrently.
// skip will be called concurrently.
func WalkSkip(roots []Root, add func(root Root, dir string), skip func(root Root, dir string) bool, opts Options) {
for _, root := range roots { for _, root := range roots {
walkDir(root, add, opts) walkDir(root, add, skip, opts)
} }
} }
func walkDir(root Root, add func(Root, string), opts Options) { func walkDir(root Root, add func(Root, string), skip func(root Root, dir string) bool, opts Options) {
if _, err := os.Stat(root.Path); os.IsNotExist(err) { if _, err := os.Stat(root.Path); os.IsNotExist(err) {
if opts.Debug { if opts.Debug {
log.Printf("skipping nonexistant directory: %v", root.Path) log.Printf("skipping nonexistent directory: %v", root.Path)
} }
return return
} }
@ -77,6 +89,7 @@ func walkDir(root Root, add func(Root, string), opts Options) {
w := &walker{ w := &walker{
root: root, root: root,
add: add, add: add,
skip: skip,
opts: opts, opts: opts,
} }
w.init() w.init()
@ -91,9 +104,10 @@ func walkDir(root Root, add func(Root, string), opts Options) {
// walker is the callback for fastwalk.Walk. // walker is the callback for fastwalk.Walk.
type walker struct { type walker struct {
root Root // The source directory to scan. root Root // The source directory to scan.
add func(Root, string) // The callback that will be invoked for every possible Go package dir. add func(Root, string) // The callback that will be invoked for every possible Go package dir.
opts Options // Options passed to Walk by the user. skip func(Root, string) bool // The callback that will be invoked for every dir. dir is skipped if it returns true.
opts Options // Options passed to Walk by the user.
ignoredDirs []os.FileInfo // The ignored directories, loaded from .goimportsignore files. ignoredDirs []os.FileInfo // The ignored directories, loaded from .goimportsignore files.
} }
@ -151,12 +165,16 @@ func (w *walker) getIgnoredDirs(path string) []string {
return ignoredDirs return ignoredDirs
} }
func (w *walker) shouldSkipDir(fi os.FileInfo) bool { func (w *walker) shouldSkipDir(fi os.FileInfo, dir string) bool {
for _, ignoredDir := range w.ignoredDirs { for _, ignoredDir := range w.ignoredDirs {
if os.SameFile(fi, ignoredDir) { if os.SameFile(fi, ignoredDir) {
return true return true
} }
} }
if w.skip != nil {
// Check with the user specified callback.
return w.skip(w.root, dir)
}
return false return false
} }
@ -184,7 +202,7 @@ func (w *walker) walk(path string, typ os.FileMode) error {
return filepath.SkipDir return filepath.SkipDir
} }
fi, err := os.Lstat(path) fi, err := os.Lstat(path)
if err == nil && w.shouldSkipDir(fi) { if err == nil && w.shouldSkipDir(fi, path) {
return filepath.SkipDir return filepath.SkipDir
} }
return nil return nil
@ -224,7 +242,7 @@ func (w *walker) shouldTraverse(dir string, fi os.FileInfo) bool {
if !ts.IsDir() { if !ts.IsDir() {
return false return false
} }
if w.shouldSkipDir(ts) { if w.shouldSkipDir(ts, dir) {
return false return false
} }
// Check for symlink loops by statting each directory component // Check for symlink loops by statting each directory component

@ -13,7 +13,6 @@ import (
"go/parser" "go/parser"
"go/token" "go/token"
"io/ioutil" "io/ioutil"
"log"
"os" "os"
"os/exec" "os/exec"
"path" "path"
@ -68,10 +67,27 @@ func importGroup(env *ProcessEnv, importPath string) int {
return 0 return 0
} }
// An importInfo represents a single import statement. type ImportFixType int
type importInfo struct {
importPath string // import path, e.g. "crypto/rand". const (
name string // import name, e.g. "crand", or "" if none. AddImport ImportFixType = iota
DeleteImport
SetImportName
)
type ImportFix struct {
// StmtInfo represents the import statement this fix will add, remove, or change.
StmtInfo ImportInfo
// IdentName is the identifier that this fix will add or remove.
IdentName string
// FixType is the type of fix this is (AddImport, DeleteImport, SetImportName).
FixType ImportFixType
}
// An ImportInfo represents a single import statement.
type ImportInfo struct {
ImportPath string // import path, e.g. "crypto/rand".
Name string // import name, e.g. "crand", or "" if none.
} }
// A packageInfo represents what's known about a package. // A packageInfo represents what's known about a package.
@ -169,10 +185,10 @@ func collectReferences(f *ast.File) references {
return refs return refs
} }
// collectImports returns all the imports in f, keyed by their package name as // collectImports returns all the imports in f.
// determined by pathToName. Unnamed imports (., _) and "C" are ignored. // Unnamed imports (., _) and "C" are ignored.
func collectImports(f *ast.File) []*importInfo { func collectImports(f *ast.File) []*ImportInfo {
var imports []*importInfo var imports []*ImportInfo
for _, imp := range f.Imports { for _, imp := range f.Imports {
var name string var name string
if imp.Name != nil { if imp.Name != nil {
@ -182,9 +198,9 @@ func collectImports(f *ast.File) []*importInfo {
continue continue
} }
path := strings.Trim(imp.Path.Value, `"`) path := strings.Trim(imp.Path.Value, `"`)
imports = append(imports, &importInfo{ imports = append(imports, &ImportInfo{
name: name, Name: name,
importPath: path, ImportPath: path,
}) })
} }
return imports return imports
@ -192,9 +208,9 @@ func collectImports(f *ast.File) []*importInfo {
// findMissingImport searches pass's candidates for an import that provides // findMissingImport searches pass's candidates for an import that provides
// pkg, containing all of syms. // pkg, containing all of syms.
func (p *pass) findMissingImport(pkg string, syms map[string]bool) *importInfo { func (p *pass) findMissingImport(pkg string, syms map[string]bool) *ImportInfo {
for _, candidate := range p.candidates { for _, candidate := range p.candidates {
pkgInfo, ok := p.knownPackages[candidate.importPath] pkgInfo, ok := p.knownPackages[candidate.ImportPath]
if !ok { if !ok {
continue continue
} }
@ -234,27 +250,33 @@ type pass struct {
otherFiles []*ast.File // sibling files. otherFiles []*ast.File // sibling files.
// Intermediate state, generated by load. // Intermediate state, generated by load.
existingImports map[string]*importInfo existingImports map[string]*ImportInfo
allRefs references allRefs references
missingRefs references missingRefs references
// Inputs to fix. These can be augmented between successive fix calls. // Inputs to fix. These can be augmented between successive fix calls.
lastTry bool // indicates that this is the last call and fix should clean up as best it can. lastTry bool // indicates that this is the last call and fix should clean up as best it can.
candidates []*importInfo // candidate imports in priority order. candidates []*ImportInfo // candidate imports in priority order.
knownPackages map[string]*packageInfo // information about all known packages. knownPackages map[string]*packageInfo // information about all known packages.
} }
// loadPackageNames saves the package names for everything referenced by imports. // loadPackageNames saves the package names for everything referenced by imports.
func (p *pass) loadPackageNames(imports []*importInfo) error { func (p *pass) loadPackageNames(imports []*ImportInfo) error {
if p.env.Debug {
p.env.Logf("loading package names for %v packages", len(imports))
defer func() {
p.env.Logf("done loading package names for %v packages", len(imports))
}()
}
var unknown []string var unknown []string
for _, imp := range imports { for _, imp := range imports {
if _, ok := p.knownPackages[imp.importPath]; ok { if _, ok := p.knownPackages[imp.ImportPath]; ok {
continue continue
} }
unknown = append(unknown, imp.importPath) unknown = append(unknown, imp.ImportPath)
} }
names, err := p.env.getResolver().loadPackageNames(unknown, p.srcDir) names, err := p.env.GetResolver().loadPackageNames(unknown, p.srcDir)
if err != nil { if err != nil {
return err return err
} }
@ -271,24 +293,24 @@ func (p *pass) loadPackageNames(imports []*importInfo) error {
// importIdentifier returns the identifier that imp will introduce. It will // importIdentifier returns the identifier that imp will introduce. It will
// guess if the package name has not been loaded, e.g. because the source // guess if the package name has not been loaded, e.g. because the source
// is not available. // is not available.
func (p *pass) importIdentifier(imp *importInfo) string { func (p *pass) importIdentifier(imp *ImportInfo) string {
if imp.name != "" { if imp.Name != "" {
return imp.name return imp.Name
} }
known := p.knownPackages[imp.importPath] known := p.knownPackages[imp.ImportPath]
if known != nil && known.name != "" { if known != nil && known.name != "" {
return known.name return known.name
} }
return importPathToAssumedName(imp.importPath) return importPathToAssumedName(imp.ImportPath)
} }
// load reads in everything necessary to run a pass, and reports whether the // load reads in everything necessary to run a pass, and reports whether the
// file already has all the imports it needs. It fills in p.missingRefs with the // file already has all the imports it needs. It fills in p.missingRefs with the
// file's missing symbols, if any, or removes unused imports if not. // file's missing symbols, if any, or removes unused imports if not.
func (p *pass) load() bool { func (p *pass) load() ([]*ImportFix, bool) {
p.knownPackages = map[string]*packageInfo{} p.knownPackages = map[string]*packageInfo{}
p.missingRefs = references{} p.missingRefs = references{}
p.existingImports = map[string]*importInfo{} p.existingImports = map[string]*ImportInfo{}
// Load basic information about the file in question. // Load basic information about the file in question.
p.allRefs = collectReferences(p.f) p.allRefs = collectReferences(p.f)
@ -313,9 +335,9 @@ func (p *pass) load() bool {
err := p.loadPackageNames(append(imports, p.candidates...)) err := p.loadPackageNames(append(imports, p.candidates...))
if err != nil { if err != nil {
if p.env.Debug { if p.env.Debug {
log.Printf("loading package names: %v", err) p.env.Logf("loading package names: %v", err)
} }
return false return nil, false
} }
} }
for _, imp := range imports { for _, imp := range imports {
@ -334,18 +356,18 @@ func (p *pass) load() bool {
} }
} }
if len(p.missingRefs) != 0 { if len(p.missingRefs) != 0 {
return false return nil, false
} }
return p.fix() return p.fix()
} }
// fix attempts to satisfy missing imports using p.candidates. If it finds // fix attempts to satisfy missing imports using p.candidates. If it finds
// everything, or if p.lastTry is true, it adds the imports it found, // everything, or if p.lastTry is true, it updates fixes to add the imports it found,
// removes anything unused, and returns true. // delete anything unused, and update import names, and returns true.
func (p *pass) fix() bool { func (p *pass) fix() ([]*ImportFix, bool) {
// Find missing imports. // Find missing imports.
var selected []*importInfo var selected []*ImportInfo
for left, rights := range p.missingRefs { for left, rights := range p.missingRefs {
if imp := p.findMissingImport(left, rights); imp != nil { if imp := p.findMissingImport(left, rights); imp != nil {
selected = append(selected, imp) selected = append(selected, imp)
@ -353,10 +375,11 @@ func (p *pass) fix() bool {
} }
if !p.lastTry && len(selected) != len(p.missingRefs) { if !p.lastTry && len(selected) != len(p.missingRefs) {
return false return nil, false
} }
// Found everything, or giving up. Add the new imports and remove any unused. // Found everything, or giving up. Add the new imports and remove any unused.
var fixes []*ImportFix
for _, imp := range p.existingImports { for _, imp := range p.existingImports {
// We deliberately ignore globals here, because we can't be sure // We deliberately ignore globals here, because we can't be sure
// they're in the same package. People do things like put multiple // they're in the same package. People do things like put multiple
@ -364,28 +387,80 @@ func (p *pass) fix() bool {
// remove imports if they happen to have the same name as a var in // remove imports if they happen to have the same name as a var in
// a different package. // a different package.
if _, ok := p.allRefs[p.importIdentifier(imp)]; !ok { if _, ok := p.allRefs[p.importIdentifier(imp)]; !ok {
astutil.DeleteNamedImport(p.fset, p.f, imp.name, imp.importPath) fixes = append(fixes, &ImportFix{
StmtInfo: *imp,
IdentName: p.importIdentifier(imp),
FixType: DeleteImport,
})
continue
}
// An existing import may need to update its import name to be correct.
if name := p.importSpecName(imp); name != imp.Name {
fixes = append(fixes, &ImportFix{
StmtInfo: ImportInfo{
Name: name,
ImportPath: imp.ImportPath,
},
IdentName: p.importIdentifier(imp),
FixType: SetImportName,
})
} }
} }
for _, imp := range selected { for _, imp := range selected {
astutil.AddNamedImport(p.fset, p.f, imp.name, imp.importPath) fixes = append(fixes, &ImportFix{
StmtInfo: ImportInfo{
Name: p.importSpecName(imp),
ImportPath: imp.ImportPath,
},
IdentName: p.importIdentifier(imp),
FixType: AddImport,
})
} }
if p.loadRealPackageNames { return fixes, true
for _, imp := range p.f.Imports { }
if imp.Name != nil {
continue // importSpecName gets the import name of imp in the import spec.
} //
path := strings.Trim(imp.Path.Value, `""`) // When the import identifier matches the assumed import name, the import name does
ident := p.importIdentifier(&importInfo{importPath: path}) // not appear in the import spec.
if ident != importPathToAssumedName(path) { func (p *pass) importSpecName(imp *ImportInfo) string {
imp.Name = &ast.Ident{Name: ident, NamePos: imp.Pos()} // If we did not load the real package names, or the name is already set,
// we just return the existing name.
if !p.loadRealPackageNames || imp.Name != "" {
return imp.Name
}
ident := p.importIdentifier(imp)
if ident == importPathToAssumedName(imp.ImportPath) {
return "" // ident not needed since the assumed and real names are the same.
}
return ident
}
// apply will perform the fixes on f in order.
func apply(fset *token.FileSet, f *ast.File, fixes []*ImportFix) {
for _, fix := range fixes {
switch fix.FixType {
case DeleteImport:
astutil.DeleteNamedImport(fset, f, fix.StmtInfo.Name, fix.StmtInfo.ImportPath)
case AddImport:
astutil.AddNamedImport(fset, f, fix.StmtInfo.Name, fix.StmtInfo.ImportPath)
case SetImportName:
// Find the matching import path and change the name.
for _, spec := range f.Imports {
path := strings.Trim(spec.Path.Value, `"`)
if path == fix.StmtInfo.ImportPath {
spec.Name = &ast.Ident{
Name: fix.StmtInfo.Name,
NamePos: spec.Pos(),
}
}
} }
} }
} }
return true
} }
// assumeSiblingImportsValid assumes that siblings' use of packages is valid, // assumeSiblingImportsValid assumes that siblings' use of packages is valid,
@ -394,15 +469,15 @@ func (p *pass) assumeSiblingImportsValid() {
for _, f := range p.otherFiles { for _, f := range p.otherFiles {
refs := collectReferences(f) refs := collectReferences(f)
imports := collectImports(f) imports := collectImports(f)
importsByName := map[string]*importInfo{} importsByName := map[string]*ImportInfo{}
for _, imp := range imports { for _, imp := range imports {
importsByName[p.importIdentifier(imp)] = imp importsByName[p.importIdentifier(imp)] = imp
} }
for left, rights := range refs { for left, rights := range refs {
if imp, ok := importsByName[left]; ok { if imp, ok := importsByName[left]; ok {
if _, ok := stdlib[imp.importPath]; ok { if _, ok := stdlib[imp.ImportPath]; ok {
// We have the stdlib in memory; no need to guess. // We have the stdlib in memory; no need to guess.
rights = stdlib[imp.importPath] rights = stdlib[imp.ImportPath]
} }
p.addCandidate(imp, &packageInfo{ p.addCandidate(imp, &packageInfo{
// no name; we already know it. // no name; we already know it.
@ -415,9 +490,9 @@ func (p *pass) assumeSiblingImportsValid() {
// addCandidate adds a candidate import to p, and merges in the information // addCandidate adds a candidate import to p, and merges in the information
// in pkg. // in pkg.
func (p *pass) addCandidate(imp *importInfo, pkg *packageInfo) { func (p *pass) addCandidate(imp *ImportInfo, pkg *packageInfo) {
p.candidates = append(p.candidates, imp) p.candidates = append(p.candidates, imp)
if existing, ok := p.knownPackages[imp.importPath]; ok { if existing, ok := p.knownPackages[imp.ImportPath]; ok {
if existing.name == "" { if existing.name == "" {
existing.name = pkg.name existing.name = pkg.name
} }
@ -425,7 +500,7 @@ func (p *pass) addCandidate(imp *importInfo, pkg *packageInfo) {
existing.exports[export] = true existing.exports[export] = true
} }
} else { } else {
p.knownPackages[imp.importPath] = pkg p.knownPackages[imp.ImportPath] = pkg
} }
} }
@ -437,13 +512,24 @@ func (p *pass) addCandidate(imp *importInfo, pkg *packageInfo) {
var fixImports = fixImportsDefault var fixImports = fixImportsDefault
func fixImportsDefault(fset *token.FileSet, f *ast.File, filename string, env *ProcessEnv) error { func fixImportsDefault(fset *token.FileSet, f *ast.File, filename string, env *ProcessEnv) error {
abs, err := filepath.Abs(filename) fixes, err := getFixes(fset, f, filename, env)
if err != nil { if err != nil {
return err return err
} }
apply(fset, f, fixes)
return err
}
// getFixes gets the import fixes that need to be made to f in order to fix the imports.
// It does not modify the ast.
func getFixes(fset *token.FileSet, f *ast.File, filename string, env *ProcessEnv) ([]*ImportFix, error) {
abs, err := filepath.Abs(filename)
if err != nil {
return nil, err
}
srcDir := filepath.Dir(abs) srcDir := filepath.Dir(abs)
if env.Debug { if env.Debug {
log.Printf("fixImports(filename=%q), abs=%q, srcDir=%q ...", filename, abs, srcDir) env.Logf("fixImports(filename=%q), abs=%q, srcDir=%q ...", filename, abs, srcDir)
} }
// First pass: looking only at f, and using the naive algorithm to // First pass: looking only at f, and using the naive algorithm to
@ -451,8 +537,8 @@ func fixImportsDefault(fset *token.FileSet, f *ast.File, filename string, env *P
// complete. We can't add any imports yet, because we don't know // complete. We can't add any imports yet, because we don't know
// if missing references are actually package vars. // if missing references are actually package vars.
p := &pass{fset: fset, f: f, srcDir: srcDir} p := &pass{fset: fset, f: f, srcDir: srcDir}
if p.load() { if fixes, done := p.load(); done {
return nil return fixes, nil
} }
otherFiles := parseOtherFiles(fset, srcDir, filename) otherFiles := parseOtherFiles(fset, srcDir, filename)
@ -460,15 +546,15 @@ func fixImportsDefault(fset *token.FileSet, f *ast.File, filename string, env *P
// Second pass: add information from other files in the same package, // Second pass: add information from other files in the same package,
// like their package vars and imports. // like their package vars and imports.
p.otherFiles = otherFiles p.otherFiles = otherFiles
if p.load() { if fixes, done := p.load(); done {
return nil return fixes, nil
} }
// Now we can try adding imports from the stdlib. // Now we can try adding imports from the stdlib.
p.assumeSiblingImportsValid() p.assumeSiblingImportsValid()
addStdlibCandidates(p, p.missingRefs) addStdlibCandidates(p, p.missingRefs)
if p.fix() { if fixes, done := p.fix(); done {
return nil return fixes, nil
} }
// Third pass: get real package names where we had previously used // Third pass: get real package names where we had previously used
@ -477,25 +563,50 @@ func fixImportsDefault(fset *token.FileSet, f *ast.File, filename string, env *P
p = &pass{fset: fset, f: f, srcDir: srcDir, env: env} p = &pass{fset: fset, f: f, srcDir: srcDir, env: env}
p.loadRealPackageNames = true p.loadRealPackageNames = true
p.otherFiles = otherFiles p.otherFiles = otherFiles
if p.load() { if fixes, done := p.load(); done {
return nil return fixes, nil
} }
addStdlibCandidates(p, p.missingRefs) addStdlibCandidates(p, p.missingRefs)
p.assumeSiblingImportsValid() p.assumeSiblingImportsValid()
if p.fix() { if fixes, done := p.fix(); done {
return nil return fixes, nil
} }
// Go look for candidates in $GOPATH, etc. We don't necessarily load // Go look for candidates in $GOPATH, etc. We don't necessarily load
// the real exports of sibling imports, so keep assuming their contents. // the real exports of sibling imports, so keep assuming their contents.
if err := addExternalCandidates(p, p.missingRefs, filename); err != nil { if err := addExternalCandidates(p, p.missingRefs, filename); err != nil {
return err return nil, err
} }
p.lastTry = true p.lastTry = true
p.fix() fixes, _ := p.fix()
return nil return fixes, nil
}
// getAllCandidates gets all of the candidates to be imported, regardless of if they are needed.
func getAllCandidates(filename string, env *ProcessEnv) ([]ImportFix, error) {
// TODO(suzmue): scan for additional candidates and filter out
// current package.
// Get the stdlib candidates and sort by import path.
var paths []string
for importPath := range stdlib {
paths = append(paths, importPath)
}
sort.Strings(paths)
var imports []ImportFix
for _, importPath := range paths {
imports = append(imports, ImportFix{
StmtInfo: ImportInfo{
ImportPath: importPath,
},
IdentName: path.Base(importPath),
FixType: AddImport,
})
}
return imports, nil
} }
// ProcessEnv contains environment variables and settings that affect the use of // ProcessEnv contains environment variables and settings that affect the use of
@ -506,13 +617,16 @@ type ProcessEnv struct {
// If non-empty, these will be used instead of the // If non-empty, these will be used instead of the
// process-wide values. // process-wide values.
GOPATH, GOROOT, GO111MODULE, GOPROXY, GOFLAGS string GOPATH, GOROOT, GO111MODULE, GOPROXY, GOFLAGS, GOSUMDB string
WorkingDir string WorkingDir string
// If true, use go/packages regardless of the environment. // If true, use go/packages regardless of the environment.
ForceGoPackages bool ForceGoPackages bool
resolver resolver // Logf is the default logger for the ProcessEnv.
Logf func(format string, args ...interface{})
resolver Resolver
} }
func (e *ProcessEnv) env() []string { func (e *ProcessEnv) env() []string {
@ -527,25 +641,29 @@ func (e *ProcessEnv) env() []string {
add("GO111MODULE", e.GO111MODULE) add("GO111MODULE", e.GO111MODULE)
add("GOPROXY", e.GOPROXY) add("GOPROXY", e.GOPROXY)
add("GOFLAGS", e.GOFLAGS) add("GOFLAGS", e.GOFLAGS)
add("GOSUMDB", e.GOSUMDB)
if e.WorkingDir != "" { if e.WorkingDir != "" {
add("PWD", e.WorkingDir) add("PWD", e.WorkingDir)
} }
return env return env
} }
func (e *ProcessEnv) getResolver() resolver { func (e *ProcessEnv) GetResolver() Resolver {
if e.resolver != nil { if e.resolver != nil {
return e.resolver return e.resolver
} }
if e.ForceGoPackages { if e.ForceGoPackages {
return &goPackagesResolver{env: e} e.resolver = &goPackagesResolver{env: e}
return e.resolver
} }
out, err := e.invokeGo("env", "GOMOD") out, err := e.invokeGo("env", "GOMOD")
if err != nil || len(bytes.TrimSpace(out.Bytes())) == 0 { if err != nil || len(bytes.TrimSpace(out.Bytes())) == 0 {
return &gopathResolver{env: e} e.resolver = &gopathResolver{env: e}
return e.resolver
} }
return &moduleResolver{env: e} e.resolver = &ModuleResolver{env: e}
return e.resolver
} }
func (e *ProcessEnv) newPackagesConfig(mode packages.LoadMode) *packages.Config { func (e *ProcessEnv) newPackagesConfig(mode packages.LoadMode) *packages.Config {
@ -573,7 +691,7 @@ func (e *ProcessEnv) invokeGo(args ...string) (*bytes.Buffer, error) {
cmd.Dir = e.WorkingDir cmd.Dir = e.WorkingDir
if e.Debug { if e.Debug {
defer func(start time.Time) { log.Printf("%s for %v", time.Since(start), cmdDebugStr(cmd)) }(time.Now()) defer func(start time.Time) { e.Logf("%s for %v", time.Since(start), cmdDebugStr(cmd)) }(time.Now())
} }
if err := cmd.Run(); err != nil { if err := cmd.Run(); err != nil {
return nil, fmt.Errorf("running go: %v (stderr:\n%s)", err, stderr) return nil, fmt.Errorf("running go: %v (stderr:\n%s)", err, stderr)
@ -595,7 +713,7 @@ func cmdDebugStr(cmd *exec.Cmd) string {
func addStdlibCandidates(pass *pass, refs references) { func addStdlibCandidates(pass *pass, refs references) {
add := func(pkg string) { add := func(pkg string) {
pass.addCandidate( pass.addCandidate(
&importInfo{importPath: pkg}, &ImportInfo{ImportPath: pkg},
&packageInfo{name: path.Base(pkg), exports: stdlib[pkg]}) &packageInfo{name: path.Base(pkg), exports: stdlib[pkg]})
} }
for left := range refs { for left := range refs {
@ -613,20 +731,27 @@ func addStdlibCandidates(pass *pass, refs references) {
} }
} }
// A resolver does the build-system-specific parts of goimports. // A Resolver does the build-system-specific parts of goimports.
type resolver interface { type Resolver interface {
// loadPackageNames loads the package names in importPaths. // loadPackageNames loads the package names in importPaths.
loadPackageNames(importPaths []string, srcDir string) (map[string]string, error) loadPackageNames(importPaths []string, srcDir string) (map[string]string, error)
// scan finds (at least) the packages satisfying refs. The returned slice is unordered. // scan finds (at least) the packages satisfying refs. The returned slice is unordered.
scan(refs references) ([]*pkg, error) scan(refs references) ([]*pkg, error)
// loadExports returns the set of exported symbols in the package at dir.
// It returns an error if the package name in dir does not match expectPackage.
// loadExports may be called concurrently.
loadExports(ctx context.Context, expectPackage string, pkg *pkg) (map[string]bool, error)
} }
// gopathResolver implements resolver for GOPATH and module workspaces using go/packages. // gopackagesResolver implements resolver for GOPATH and module workspaces using go/packages.
type goPackagesResolver struct { type goPackagesResolver struct {
env *ProcessEnv env *ProcessEnv
} }
func (r *goPackagesResolver) loadPackageNames(importPaths []string, srcDir string) (map[string]string, error) { func (r *goPackagesResolver) loadPackageNames(importPaths []string, srcDir string) (map[string]string, error) {
if len(importPaths) == 0 {
return nil, nil
}
cfg := r.env.newPackagesConfig(packages.LoadFiles) cfg := r.env.newPackagesConfig(packages.LoadFiles)
pkgs, err := packages.Load(cfg, importPaths...) pkgs, err := packages.Load(cfg, importPaths...)
if err != nil { if err != nil {
@ -670,15 +795,35 @@ func (r *goPackagesResolver) scan(refs references) ([]*pkg, error) {
return scan, nil return scan, nil
} }
func (r *goPackagesResolver) loadExports(ctx context.Context, expectPackage string, pkg *pkg) (map[string]bool, error) {
if pkg.goPackage == nil {
return nil, fmt.Errorf("goPackage not set")
}
exports := map[string]bool{}
fset := token.NewFileSet()
for _, fname := range pkg.goPackage.CompiledGoFiles {
f, err := parser.ParseFile(fset, fname, nil, 0)
if err != nil {
return nil, fmt.Errorf("parsing %s: %v", fname, err)
}
for name := range f.Scope.Objects {
if ast.IsExported(name) {
exports[name] = true
}
}
}
return exports, nil
}
func addExternalCandidates(pass *pass, refs references, filename string) error { func addExternalCandidates(pass *pass, refs references, filename string) error {
dirScan, err := pass.env.getResolver().scan(refs) dirScan, err := pass.env.GetResolver().scan(refs)
if err != nil { if err != nil {
return err return err
} }
// Search for imports matching potential package references. // Search for imports matching potential package references.
type result struct { type result struct {
imp *importInfo imp *ImportInfo
pkg *packageInfo pkg *packageInfo
} }
results := make(chan result, len(refs)) results := make(chan result, len(refs))
@ -698,7 +843,7 @@ func addExternalCandidates(pass *pass, refs references, filename string) error {
go func(pkgName string, symbols map[string]bool) { go func(pkgName string, symbols map[string]bool) {
defer wg.Done() defer wg.Done()
found, err := findImport(ctx, pass.env, dirScan, pkgName, symbols, filename) found, err := findImport(ctx, pass, dirScan, pkgName, symbols, filename)
if err != nil { if err != nil {
firstErrOnce.Do(func() { firstErrOnce.Do(func() {
@ -712,8 +857,8 @@ func addExternalCandidates(pass *pass, refs references, filename string) error {
return // No matching package. return // No matching package.
} }
imp := &importInfo{ imp := &ImportInfo{
importPath: found.importPathShort, ImportPath: found.importPathShort,
} }
pkg := &packageInfo{ pkg := &packageInfo{
@ -780,7 +925,7 @@ func (r *gopathResolver) loadPackageNames(importPaths []string, srcDir string) (
return names, nil return names, nil
} }
// importPathToNameGoPath finds out the actual package name, as declared in its .go files. // importPathToName finds out the actual package name, as declared in its .go files.
// If there's a problem, it returns "". // If there's a problem, it returns "".
func importPathToName(env *ProcessEnv, importPath, srcDir string) (packageName string) { func importPathToName(env *ProcessEnv, importPath, srcDir string) (packageName string) {
// Fast path for standard library without going to disk. // Fast path for standard library without going to disk.
@ -800,8 +945,8 @@ func importPathToName(env *ProcessEnv, importPath, srcDir string) (packageName s
} }
// packageDirToName is a faster version of build.Import if // packageDirToName is a faster version of build.Import if
// the only thing desired is the package name. It uses build.FindOnly // the only thing desired is the package name. Given a directory,
// to find the directory and then only parses one file in the package, // packageDirToName then only parses one file in the package,
// trusting that the files in the directory are consistent. // trusting that the files in the directory are consistent.
func packageDirToName(dir string) (packageName string, err error) { func packageDirToName(dir string) (packageName string, err error) {
d, err := os.Open(dir) d, err := os.Open(dir)
@ -922,6 +1067,10 @@ func (r *gopathResolver) scan(_ references) ([]*pkg, error) {
return result, nil return result, nil
} }
func (r *gopathResolver) loadExports(ctx context.Context, expectPackage string, pkg *pkg) (map[string]bool, error) {
return loadExportsFromFiles(ctx, r.env, expectPackage, pkg.dir)
}
// VendorlessPath returns the devendorized version of the import path ipath. // VendorlessPath returns the devendorized version of the import path ipath.
// For example, VendorlessPath("foo/bar/vendor/a/b") returns "a/b". // For example, VendorlessPath("foo/bar/vendor/a/b") returns "a/b".
func VendorlessPath(ipath string) string { func VendorlessPath(ipath string) string {
@ -935,33 +1084,11 @@ func VendorlessPath(ipath string) string {
return ipath return ipath
} }
// loadExports returns the set of exported symbols in the package at dir. func loadExportsFromFiles(ctx context.Context, env *ProcessEnv, expectPackage string, dir string) (map[string]bool, error) {
// It returns nil on error or if the package name in dir does not match expectPackage.
func loadExports(ctx context.Context, env *ProcessEnv, expectPackage string, pkg *pkg) (map[string]bool, error) {
if env.Debug {
log.Printf("loading exports in dir %s (seeking package %s)", pkg.dir, expectPackage)
}
if pkg.goPackage != nil {
exports := map[string]bool{}
fset := token.NewFileSet()
for _, fname := range pkg.goPackage.CompiledGoFiles {
f, err := parser.ParseFile(fset, fname, nil, 0)
if err != nil {
return nil, fmt.Errorf("parsing %s: %v", fname, err)
}
for name := range f.Scope.Objects {
if ast.IsExported(name) {
exports[name] = true
}
}
}
return exports, nil
}
exports := make(map[string]bool) exports := make(map[string]bool)
// Look for non-test, buildable .go files which could provide exports. // Look for non-test, buildable .go files which could provide exports.
all, err := ioutil.ReadDir(pkg.dir) all, err := ioutil.ReadDir(dir)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -971,7 +1098,7 @@ func loadExports(ctx context.Context, env *ProcessEnv, expectPackage string, pkg
if !strings.HasSuffix(name, ".go") || strings.HasSuffix(name, "_test.go") { if !strings.HasSuffix(name, ".go") || strings.HasSuffix(name, "_test.go") {
continue continue
} }
match, err := env.buildContext().MatchFile(pkg.dir, fi.Name()) match, err := env.buildContext().MatchFile(dir, fi.Name())
if err != nil || !match { if err != nil || !match {
continue continue
} }
@ -979,7 +1106,7 @@ func loadExports(ctx context.Context, env *ProcessEnv, expectPackage string, pkg
} }
if len(files) == 0 { if len(files) == 0 {
return nil, fmt.Errorf("dir %v contains no buildable, non-test .go files", pkg.dir) return nil, fmt.Errorf("dir %v contains no buildable, non-test .go files", dir)
} }
fset := token.NewFileSet() fset := token.NewFileSet()
@ -990,7 +1117,7 @@ func loadExports(ctx context.Context, env *ProcessEnv, expectPackage string, pkg
default: default:
} }
fullFile := filepath.Join(pkg.dir, fi.Name()) fullFile := filepath.Join(dir, fi.Name())
f, err := parser.ParseFile(fset, fullFile, nil, 0) f, err := parser.ParseFile(fset, fullFile, nil, 0)
if err != nil { if err != nil {
return nil, fmt.Errorf("parsing %s: %v", fullFile, err) return nil, fmt.Errorf("parsing %s: %v", fullFile, err)
@ -1002,7 +1129,7 @@ func loadExports(ctx context.Context, env *ProcessEnv, expectPackage string, pkg
continue continue
} }
if pkgName != expectPackage { if pkgName != expectPackage {
return nil, fmt.Errorf("scan of dir %v is not expected package %v (actually %v)", pkg.dir, expectPackage, pkgName) return nil, fmt.Errorf("scan of dir %v is not expected package %v (actually %v)", dir, expectPackage, pkgName)
} }
for name := range f.Scope.Objects { for name := range f.Scope.Objects {
if ast.IsExported(name) { if ast.IsExported(name) {
@ -1017,14 +1144,14 @@ func loadExports(ctx context.Context, env *ProcessEnv, expectPackage string, pkg
exportList = append(exportList, k) exportList = append(exportList, k)
} }
sort.Strings(exportList) sort.Strings(exportList)
log.Printf("loaded exports in dir %v (package %v): %v", pkg.dir, expectPackage, strings.Join(exportList, ", ")) env.Logf("loaded exports in dir %v (package %v): %v", dir, expectPackage, strings.Join(exportList, ", "))
} }
return exports, nil return exports, nil
} }
// findImport searches for a package with the given symbols. // findImport searches for a package with the given symbols.
// If no package is found, findImport returns ("", false, nil) // If no package is found, findImport returns ("", false, nil)
func findImport(ctx context.Context, env *ProcessEnv, dirScan []*pkg, pkgName string, symbols map[string]bool, filename string) (*pkg, error) { func findImport(ctx context.Context, pass *pass, dirScan []*pkg, pkgName string, symbols map[string]bool, filename string) (*pkg, error) {
pkgDir, err := filepath.Abs(filename) pkgDir, err := filepath.Abs(filename)
if err != nil { if err != nil {
return nil, err return nil, err
@ -1034,7 +1161,12 @@ func findImport(ctx context.Context, env *ProcessEnv, dirScan []*pkg, pkgName st
// Find candidate packages, looking only at their directory names first. // Find candidate packages, looking only at their directory names first.
var candidates []pkgDistance var candidates []pkgDistance
for _, pkg := range dirScan { for _, pkg := range dirScan {
if pkg.dir != pkgDir && pkgIsCandidate(filename, pkgName, pkg) { if pkg.dir == pkgDir && pass.f.Name.Name == pkgName {
// The candidate is in the same directory and has the
// same package name. Don't try to import ourselves.
continue
}
if pkgIsCandidate(filename, pkgName, pkg) {
candidates = append(candidates, pkgDistance{ candidates = append(candidates, pkgDistance{
pkg: pkg, pkg: pkg,
distance: distance(pkgDir, pkg.dir), distance: distance(pkgDir, pkg.dir),
@ -1047,9 +1179,9 @@ func findImport(ctx context.Context, env *ProcessEnv, dirScan []*pkg, pkgName st
// ones. Note that this sorts by the de-vendored name, so // ones. Note that this sorts by the de-vendored name, so
// there's no "penalty" for vendoring. // there's no "penalty" for vendoring.
sort.Sort(byDistanceOrImportPathShortLength(candidates)) sort.Sort(byDistanceOrImportPathShortLength(candidates))
if env.Debug { if pass.env.Debug {
for i, c := range candidates { for i, c := range candidates {
log.Printf("%s candidate %d/%d: %v in %v", pkgName, i+1, len(candidates), c.pkg.importPathShort, c.pkg.dir) pass.env.Logf("%s candidate %d/%d: %v in %v", pkgName, i+1, len(candidates), c.pkg.importPathShort, c.pkg.dir)
} }
} }
@ -1086,10 +1218,13 @@ func findImport(ctx context.Context, env *ProcessEnv, dirScan []*pkg, pkgName st
wg.Done() wg.Done()
}() }()
exports, err := loadExports(ctx, env, pkgName, c.pkg) if pass.env.Debug {
pass.env.Logf("loading exports in dir %s (seeking package %s)", c.pkg.dir, pkgName)
}
exports, err := pass.env.GetResolver().loadExports(ctx, pkgName, c.pkg)
if err != nil { if err != nil {
if env.Debug { if pass.env.Debug {
log.Printf("loading exports in dir %s (seeking package %s): %v", c.pkg.dir, pkgName, err) pass.env.Logf("loading exports in dir %s (seeking package %s): %v", c.pkg.dir, pkgName, err)
} }
resc <- nil resc <- nil
return return

@ -13,12 +13,14 @@ import (
"bytes" "bytes"
"fmt" "fmt"
"go/ast" "go/ast"
"go/build"
"go/format" "go/format"
"go/parser" "go/parser"
"go/printer" "go/printer"
"go/token" "go/token"
"io" "io"
"io/ioutil" "io/ioutil"
"log"
"regexp" "regexp"
"strconv" "strconv"
"strings" "strings"
@ -41,13 +43,10 @@ type Options struct {
} }
// Process implements golang.org/x/tools/imports.Process with explicit context in env. // Process implements golang.org/x/tools/imports.Process with explicit context in env.
func Process(filename string, src []byte, opt *Options) ([]byte, error) { func Process(filename string, src []byte, opt *Options) (formatted []byte, err error) {
if src == nil { src, opt, err = initialize(filename, src, opt)
b, err := ioutil.ReadFile(filename) if err != nil {
if err != nil { return nil, err
return nil, err
}
src = b
} }
fileSet := token.NewFileSet() fileSet := token.NewFileSet()
@ -61,7 +60,93 @@ func Process(filename string, src []byte, opt *Options) ([]byte, error) {
return nil, err return nil, err
} }
} }
return formatFile(fileSet, file, src, adjust, opt)
}
// FixImports returns a list of fixes to the imports that, when applied,
// will leave the imports in the same state as Process.
//
// Note that filename's directory influences which imports can be chosen,
// so it is important that filename be accurate.
func FixImports(filename string, src []byte, opt *Options) (fixes []*ImportFix, err error) {
src, opt, err = initialize(filename, src, opt)
if err != nil {
return nil, err
}
fileSet := token.NewFileSet()
file, _, err := parse(fileSet, filename, src, opt)
if err != nil {
return nil, err
}
return getFixes(fileSet, file, filename, opt.Env)
}
// ApplyFix will apply all of the fixes to the file and format it.
func ApplyFixes(fixes []*ImportFix, filename string, src []byte, opt *Options) (formatted []byte, err error) {
src, opt, err = initialize(filename, src, opt)
if err != nil {
return nil, err
}
fileSet := token.NewFileSet()
file, adjust, err := parse(fileSet, filename, src, opt)
if err != nil {
return nil, err
}
// Apply the fixes to the file.
apply(fileSet, file, fixes)
return formatFile(fileSet, file, src, adjust, opt)
}
// GetAllCandidates gets all of the standard library candidate packages to import in
// sorted order on import path.
func GetAllCandidates(filename string, opt *Options) (pkgs []ImportFix, err error) {
_, opt, err = initialize(filename, []byte{}, opt)
if err != nil {
return nil, err
}
return getAllCandidates(filename, opt.Env)
}
// initialize sets the values for opt and src.
// If they are provided, they are not changed. Otherwise opt is set to the
// default values and src is read from the file system.
func initialize(filename string, src []byte, opt *Options) ([]byte, *Options, error) {
// Use defaults if opt is nil.
if opt == nil {
opt = &Options{Comments: true, TabIndent: true, TabWidth: 8}
}
// Set the env if the user has not provided it.
if opt.Env == nil {
opt.Env = &ProcessEnv{
GOPATH: build.Default.GOPATH,
GOROOT: build.Default.GOROOT,
}
}
// Set the logger if the user has not provided it.
if opt.Env.Logf == nil {
opt.Env.Logf = log.Printf
}
if src == nil {
b, err := ioutil.ReadFile(filename)
if err != nil {
return nil, nil, err
}
src = b
}
return src, opt, nil
}
func formatFile(fileSet *token.FileSet, file *ast.File, src []byte, adjust func(orig []byte, src []byte) []byte, opt *Options) ([]byte, error) {
mergeImports(opt.Env, fileSet, file)
sortImports(opt.Env, fileSet, file) sortImports(opt.Env, fileSet, file)
imps := astutil.Imports(fileSet, file) imps := astutil.Imports(fileSet, file)
var spacesBefore []string // import paths we need spaces before var spacesBefore []string // import paths we need spaces before
@ -89,7 +174,7 @@ func Process(filename string, src []byte, opt *Options) ([]byte, error) {
printConfig := &printer.Config{Mode: printerMode, Tabwidth: opt.TabWidth} printConfig := &printer.Config{Mode: printerMode, Tabwidth: opt.TabWidth}
var buf bytes.Buffer var buf bytes.Buffer
err = printConfig.Fprint(&buf, fileSet, file) err := printConfig.Fprint(&buf, fileSet, file)
if err != nil { if err != nil {
return nil, err return nil, err
} }

@ -2,9 +2,10 @@ package imports
import ( import (
"bytes" "bytes"
"context"
"encoding/json" "encoding/json"
"fmt"
"io/ioutil" "io/ioutil"
"log"
"os" "os"
"path" "path"
"path/filepath" "path/filepath"
@ -19,37 +20,41 @@ import (
"golang.org/x/tools/internal/module" "golang.org/x/tools/internal/module"
) )
// moduleResolver implements resolver for modules using the go command as little // ModuleResolver implements resolver for modules using the go command as little
// as feasible. // as feasible.
type moduleResolver struct { type ModuleResolver struct {
env *ProcessEnv env *ProcessEnv
moduleCacheDir string
initialized bool Initialized bool
main *moduleJSON Main *ModuleJSON
modsByModPath []*moduleJSON // All modules, ordered by # of path components in module Path... ModsByModPath []*ModuleJSON // All modules, ordered by # of path components in module Path...
modsByDir []*moduleJSON // ...or Dir. ModsByDir []*ModuleJSON // ...or Dir.
// moduleCacheInfo stores information about the module cache.
moduleCacheInfo *moduleCacheInfo
} }
type moduleJSON struct { type ModuleJSON struct {
Path string // module path Path string // module path
Version string // module version Version string // module version
Versions []string // available module versions (with -versions) Versions []string // available module versions (with -versions)
Replace *moduleJSON // replaced by this module Replace *ModuleJSON // replaced by this module
Time *time.Time // time version was created Time *time.Time // time version was created
Update *moduleJSON // available update, if any (with -u) Update *ModuleJSON // available update, if any (with -u)
Main bool // is this the main module? Main bool // is this the main module?
Indirect bool // is this module only an indirect dependency of main module? Indirect bool // is this module only an indirect dependency of main module?
Dir string // directory holding files for this module, if any Dir string // directory holding files for this module, if any
GoMod string // path to go.mod file for this module, if any GoMod string // path to go.mod file for this module, if any
Error *moduleErrorJSON // error loading module Error *ModuleErrorJSON // error loading module
} }
type moduleErrorJSON struct { type ModuleErrorJSON struct {
Err string // the error itself Err string // the error itself
} }
func (r *moduleResolver) init() error { func (r *ModuleResolver) init() error {
if r.initialized { if r.Initialized {
return nil return nil
} }
stdout, err := r.env.invokeGo("list", "-m", "-json", "...") stdout, err := r.env.invokeGo("list", "-m", "-json", "...")
@ -57,54 +62,71 @@ func (r *moduleResolver) init() error {
return err return err
} }
for dec := json.NewDecoder(stdout); dec.More(); { for dec := json.NewDecoder(stdout); dec.More(); {
mod := &moduleJSON{} mod := &ModuleJSON{}
if err := dec.Decode(mod); err != nil { if err := dec.Decode(mod); err != nil {
return err return err
} }
if mod.Dir == "" { if mod.Dir == "" {
if r.env.Debug { if r.env.Debug {
log.Printf("module %v has not been downloaded and will be ignored", mod.Path) r.env.Logf("module %v has not been downloaded and will be ignored", mod.Path)
} }
// Can't do anything with a module that's not downloaded. // Can't do anything with a module that's not downloaded.
continue continue
} }
r.modsByModPath = append(r.modsByModPath, mod) r.ModsByModPath = append(r.ModsByModPath, mod)
r.modsByDir = append(r.modsByDir, mod) r.ModsByDir = append(r.ModsByDir, mod)
if mod.Main { if mod.Main {
r.main = mod r.Main = mod
} }
} }
sort.Slice(r.modsByModPath, func(i, j int) bool { sort.Slice(r.ModsByModPath, func(i, j int) bool {
count := func(x int) int { count := func(x int) int {
return strings.Count(r.modsByModPath[x].Path, "/") return strings.Count(r.ModsByModPath[x].Path, "/")
} }
return count(j) < count(i) // descending order return count(j) < count(i) // descending order
}) })
sort.Slice(r.modsByDir, func(i, j int) bool { sort.Slice(r.ModsByDir, func(i, j int) bool {
count := func(x int) int { count := func(x int) int {
return strings.Count(r.modsByDir[x].Dir, "/") return strings.Count(r.ModsByDir[x].Dir, "/")
} }
return count(j) < count(i) // descending order return count(j) < count(i) // descending order
}) })
r.initialized = true if r.moduleCacheInfo == nil {
r.moduleCacheInfo = &moduleCacheInfo{
modCacheDirInfo: make(map[string]*directoryPackageInfo),
}
}
r.Initialized = true
return nil return nil
} }
// findPackage returns the module and directory that contains the package at // findPackage returns the module and directory that contains the package at
// the given import path, or returns nil, "" if no module is in scope. // the given import path, or returns nil, "" if no module is in scope.
func (r *moduleResolver) findPackage(importPath string) (*moduleJSON, string) { func (r *ModuleResolver) findPackage(importPath string) (*ModuleJSON, string) {
for _, m := range r.modsByModPath { for _, m := range r.ModsByModPath {
if !strings.HasPrefix(importPath, m.Path) { if !strings.HasPrefix(importPath, m.Path) {
continue continue
} }
pathInModule := importPath[len(m.Path):] pathInModule := importPath[len(m.Path):]
pkgDir := filepath.Join(m.Dir, pathInModule) pkgDir := filepath.Join(m.Dir, pathInModule)
if dirIsNestedModule(pkgDir, m) { if r.dirIsNestedModule(pkgDir, m) {
continue continue
} }
if info, ok := r.moduleCacheInfo.Load(pkgDir); ok {
if packageScanned, err := info.reachedStatus(directoryScanned); packageScanned {
if err != nil {
// There was some error with scanning this directory.
// It does not contain a valid package.
continue
}
return m, pkgDir
}
}
pkgFiles, err := ioutil.ReadDir(pkgDir) pkgFiles, err := ioutil.ReadDir(pkgDir)
if err != nil { if err != nil {
continue continue
@ -124,7 +146,7 @@ func (r *moduleResolver) findPackage(importPath string) (*moduleJSON, string) {
// findModuleByDir returns the module that contains dir, or nil if no such // findModuleByDir returns the module that contains dir, or nil if no such
// module is in scope. // module is in scope.
func (r *moduleResolver) findModuleByDir(dir string) *moduleJSON { func (r *ModuleResolver) findModuleByDir(dir string) *ModuleJSON {
// This is quite tricky and may not be correct. dir could be: // This is quite tricky and may not be correct. dir could be:
// - a package in the main module. // - a package in the main module.
// - a replace target underneath the main module's directory. // - a replace target underneath the main module's directory.
@ -135,12 +157,12 @@ func (r *moduleResolver) findModuleByDir(dir string) *moduleJSON {
// - in /vendor/ in -mod=vendor mode. // - in /vendor/ in -mod=vendor mode.
// - nested module? Dunno. // - nested module? Dunno.
// Rumor has it that replace targets cannot contain other replace targets. // Rumor has it that replace targets cannot contain other replace targets.
for _, m := range r.modsByDir { for _, m := range r.ModsByDir {
if !strings.HasPrefix(dir, m.Dir) { if !strings.HasPrefix(dir, m.Dir) {
continue continue
} }
if dirIsNestedModule(dir, m) { if r.dirIsNestedModule(dir, m) {
continue continue
} }
@ -151,18 +173,28 @@ func (r *moduleResolver) findModuleByDir(dir string) *moduleJSON {
// dirIsNestedModule reports if dir is contained in a nested module underneath // dirIsNestedModule reports if dir is contained in a nested module underneath
// mod, not actually in mod. // mod, not actually in mod.
func dirIsNestedModule(dir string, mod *moduleJSON) bool { func (r *ModuleResolver) dirIsNestedModule(dir string, mod *ModuleJSON) bool {
if !strings.HasPrefix(dir, mod.Dir) { if !strings.HasPrefix(dir, mod.Dir) {
return false return false
} }
mf := findModFile(dir) if r.dirInModuleCache(dir) {
// Nested modules in the module cache are pruned,
// so it cannot be a nested module.
return false
}
mf := r.findModFile(dir)
if mf == "" { if mf == "" {
return false return false
} }
return filepath.Dir(mf) != mod.Dir return filepath.Dir(mf) != mod.Dir
} }
func findModFile(dir string) string { func (r *ModuleResolver) findModFile(dir string) string {
if r.dirInModuleCache(dir) {
matches := modCacheRegexp.FindStringSubmatch(dir)
index := strings.Index(dir, matches[1]+"@"+matches[2])
return filepath.Join(dir[:index], matches[1]+"@"+matches[2], "go.mod")
}
for { for {
f := filepath.Join(dir, "go.mod") f := filepath.Join(dir, "go.mod")
info, err := os.Stat(f) info, err := os.Stat(f)
@ -177,7 +209,14 @@ func findModFile(dir string) string {
} }
} }
func (r *moduleResolver) loadPackageNames(importPaths []string, srcDir string) (map[string]string, error) { func (r *ModuleResolver) dirInModuleCache(dir string) bool {
if r.moduleCacheDir == "" {
return false
}
return strings.HasPrefix(dir, r.moduleCacheDir)
}
func (r *ModuleResolver) loadPackageNames(importPaths []string, srcDir string) (map[string]string, error) {
if err := r.init(); err != nil { if err := r.init(); err != nil {
return nil, err return nil, err
} }
@ -196,7 +235,7 @@ func (r *moduleResolver) loadPackageNames(importPaths []string, srcDir string) (
return names, nil return names, nil
} }
func (r *moduleResolver) scan(_ references) ([]*pkg, error) { func (r *ModuleResolver) scan(_ references) ([]*pkg, error) {
if err := r.init(); err != nil { if err := r.init(); err != nil {
return nil, err return nil, err
} }
@ -205,15 +244,16 @@ func (r *moduleResolver) scan(_ references) ([]*pkg, error) {
roots := []gopathwalk.Root{ roots := []gopathwalk.Root{
{filepath.Join(r.env.GOROOT, "/src"), gopathwalk.RootGOROOT}, {filepath.Join(r.env.GOROOT, "/src"), gopathwalk.RootGOROOT},
} }
if r.main != nil { if r.Main != nil {
roots = append(roots, gopathwalk.Root{r.main.Dir, gopathwalk.RootCurrentModule}) roots = append(roots, gopathwalk.Root{r.Main.Dir, gopathwalk.RootCurrentModule})
} }
for _, p := range filepath.SplitList(r.env.GOPATH) { if r.moduleCacheDir == "" {
roots = append(roots, gopathwalk.Root{filepath.Join(p, "/pkg/mod"), gopathwalk.RootModuleCache}) r.moduleCacheDir = filepath.Join(filepath.SplitList(r.env.GOPATH)[0], "/pkg/mod")
} }
roots = append(roots, gopathwalk.Root{r.moduleCacheDir, gopathwalk.RootModuleCache})
// Walk replace targets, just in case they're not in any of the above. // Walk replace targets, just in case they're not in any of the above.
for _, mod := range r.modsByModPath { for _, mod := range r.ModsByModPath {
if mod.Replace != nil { if mod.Replace != nil {
roots = append(roots, gopathwalk.Root{mod.Dir, gopathwalk.RootOther}) roots = append(roots, gopathwalk.Root{mod.Dir, gopathwalk.RootOther})
} }
@ -223,90 +263,181 @@ func (r *moduleResolver) scan(_ references) ([]*pkg, error) {
dupCheck := make(map[string]bool) dupCheck := make(map[string]bool)
var mu sync.Mutex var mu sync.Mutex
gopathwalk.Walk(roots, func(root gopathwalk.Root, dir string) { // Packages in the module cache are immutable. If we have
// already seen this package on a previous scan of the module
// cache, return that result.
skip := func(root gopathwalk.Root, dir string) bool {
mu.Lock() mu.Lock()
defer mu.Unlock() defer mu.Unlock()
// If we have already processed this directory on this walk, skip it.
if _, dup := dupCheck[dir]; dup {
return true
}
// If we have saved this directory information, skip it.
info, ok := r.moduleCacheInfo.Load(dir)
if !ok {
return false
}
// This directory can be skipped as long as we have already scanned it.
// Packages with errors will continue to have errors, so there is no need
// to rescan them.
packageScanned, _ := info.reachedStatus(directoryScanned)
return packageScanned
}
add := func(root gopathwalk.Root, dir string) {
mu.Lock()
defer mu.Unlock()
if _, dup := dupCheck[dir]; dup { if _, dup := dupCheck[dir]; dup {
return return
} }
dupCheck[dir] = true info, err := r.scanDirForPackage(root, dir)
if err != nil {
subdir := ""
if dir != root.Path {
subdir = dir[len(root.Path)+len("/"):]
}
importPath := filepath.ToSlash(subdir)
if strings.HasPrefix(importPath, "vendor/") {
// Ignore vendor dirs. If -mod=vendor is on, then things
// should mostly just work, but when it's not vendor/
// is a mess. There's no easy way to tell if it's on.
// We can still find things in the mod cache and
// map them into /vendor when -mod=vendor is on.
return return
} }
switch root.Type { if root.Type == gopathwalk.RootModuleCache {
case gopathwalk.RootCurrentModule: // Save this package information in the cache and return.
importPath = path.Join(r.main.Path, filepath.ToSlash(subdir)) // Packages from the module cache are added after Walk.
case gopathwalk.RootModuleCache: r.moduleCacheInfo.Store(dir, info)
matches := modCacheRegexp.FindStringSubmatch(subdir) return
modPath, err := module.DecodePath(filepath.ToSlash(matches[1]))
if err != nil {
if r.env.Debug {
log.Printf("decoding module cache path %q: %v", subdir, err)
}
return
}
importPath = path.Join(modPath, filepath.ToSlash(matches[3]))
case gopathwalk.RootGOROOT:
importPath = subdir
} }
// Check if the directory is underneath a module that's in scope. // Skip this package if there was an error loading package info.
if mod := r.findModuleByDir(dir); mod != nil { if info.err != nil {
// It is. If dir is the target of a replace directive, return
// our guessed import path is wrong. Use the real one.
if mod.Dir == dir {
importPath = mod.Path
} else {
dirInMod := dir[len(mod.Dir)+len("/"):]
importPath = path.Join(mod.Path, filepath.ToSlash(dirInMod))
}
} else {
// The package is in an unknown module. Check that it's
// not obviously impossible to import.
var modFile string
switch root.Type {
case gopathwalk.RootModuleCache:
matches := modCacheRegexp.FindStringSubmatch(subdir)
modFile = filepath.Join(matches[1], "@", matches[2], "go.mod")
default:
modFile = findModFile(dir)
}
modBytes, err := ioutil.ReadFile(modFile)
if err == nil && !strings.HasPrefix(importPath, modulePath(modBytes)) {
// The module's declared path does not match
// its expected path. It probably needs a
// replace directive we don't have.
return
}
}
// We may have discovered a package that has a different version
// in scope already. Canonicalize to that one if possible.
if _, canonicalDir := r.findPackage(importPath); canonicalDir != "" {
dir = canonicalDir
} }
result = append(result, &pkg{ // The rest of this function canonicalizes the packages using the results
importPathShort: VendorlessPath(importPath), // of initializing the resolver from 'go list -m'.
dir: dir, res, err := r.canonicalize(info.nonCanonicalImportPath, info.dir, info.needsReplace)
}) if err != nil {
}, gopathwalk.Options{Debug: r.env.Debug, ModulesEnabled: true}) return
}
result = append(result, res)
}
gopathwalk.WalkSkip(roots, add, skip, gopathwalk.Options{Debug: r.env.Debug, ModulesEnabled: true})
// Add the packages from the modules in the mod cache that were skipped.
for _, dir := range r.moduleCacheInfo.Keys() {
info, ok := r.moduleCacheInfo.Load(dir)
if !ok {
continue
}
// Skip this directory if we were not able to get the package information successfully.
if scanned, err := info.reachedStatus(directoryScanned); !scanned || err != nil {
continue
}
res, err := r.canonicalize(info.nonCanonicalImportPath, info.dir, info.needsReplace)
if err != nil {
continue
}
result = append(result, res)
}
return result, nil return result, nil
} }
// canonicalize gets the result of canonicalizing the packages using the results
// of initializing the resolver from 'go list -m'.
func (r *ModuleResolver) canonicalize(importPath, dir string, needsReplace bool) (res *pkg, err error) {
// Check if the directory is underneath a module that's in scope.
if mod := r.findModuleByDir(dir); mod != nil {
// It is. If dir is the target of a replace directive,
// our guessed import path is wrong. Use the real one.
if mod.Dir == dir {
importPath = mod.Path
} else {
dirInMod := dir[len(mod.Dir)+len("/"):]
importPath = path.Join(mod.Path, filepath.ToSlash(dirInMod))
}
} else if needsReplace {
return nil, fmt.Errorf("needed this package to be in scope: %s", dir)
}
// We may have discovered a package that has a different version
// in scope already. Canonicalize to that one if possible.
if _, canonicalDir := r.findPackage(importPath); canonicalDir != "" {
dir = canonicalDir
}
return &pkg{
importPathShort: VendorlessPath(importPath),
dir: dir,
}, nil
}
func (r *ModuleResolver) loadExports(ctx context.Context, expectPackage string, pkg *pkg) (map[string]bool, error) {
if err := r.init(); err != nil {
return nil, err
}
return loadExportsFromFiles(ctx, r.env, expectPackage, pkg.dir)
}
func (r *ModuleResolver) scanDirForPackage(root gopathwalk.Root, dir string) (directoryPackageInfo, error) {
subdir := ""
if dir != root.Path {
subdir = dir[len(root.Path)+len("/"):]
}
importPath := filepath.ToSlash(subdir)
if strings.HasPrefix(importPath, "vendor/") {
// Ignore vendor dirs. If -mod=vendor is on, then things
// should mostly just work, but when it's not vendor/
// is a mess. There's no easy way to tell if it's on.
// We can still find things in the mod cache and
// map them into /vendor when -mod=vendor is on.
return directoryPackageInfo{}, fmt.Errorf("vendor directory")
}
switch root.Type {
case gopathwalk.RootCurrentModule:
importPath = path.Join(r.Main.Path, filepath.ToSlash(subdir))
case gopathwalk.RootModuleCache:
matches := modCacheRegexp.FindStringSubmatch(subdir)
if len(matches) == 0 {
return directoryPackageInfo{
status: directoryScanned,
err: fmt.Errorf("invalid module cache path: %v", subdir),
}, nil
}
modPath, err := module.DecodePath(filepath.ToSlash(matches[1]))
if err != nil {
if r.env.Debug {
r.env.Logf("decoding module cache path %q: %v", subdir, err)
}
return directoryPackageInfo{
status: directoryScanned,
err: fmt.Errorf("decoding module cache path %q: %v", subdir, err),
}, nil
}
importPath = path.Join(modPath, filepath.ToSlash(matches[3]))
case gopathwalk.RootGOROOT:
importPath = subdir
}
// Check that this package is not obviously impossible to import.
modFile := r.findModFile(dir)
var needsReplace bool
modBytes, err := ioutil.ReadFile(modFile)
if err == nil && !strings.HasPrefix(importPath, modulePath(modBytes)) {
// The module's declared path does not match
// its expected path. It probably needs a
// replace directive we don't have.
needsReplace = true
}
return directoryPackageInfo{
status: directoryScanned,
dir: dir,
nonCanonicalImportPath: importPath,
needsReplace: needsReplace,
}, nil
}
// modCacheRegexp splits a path in a module cache into module, module version, and package. // modCacheRegexp splits a path in a module cache into module, module version, and package.
var modCacheRegexp = regexp.MustCompile(`(.*)@([^/\\]*)(.*)`) var modCacheRegexp = regexp.MustCompile(`(.*)@([^/\\]*)(.*)`)

121
vendor/golang.org/x/tools/internal/imports/mod_cache.go generated vendored Normal file

@ -0,0 +1,121 @@
package imports
import (
"sync"
)
// ModuleResolver implements Resolver for modules using the go command as little
// as feasible.
//
// To find packages to import, the resolver needs to know about all of the
// the packages that could be imported. This includes packages that are
// already in modules that are in (1) the current module, (2) replace targets,
// and (3) packages in the module cache. Packages in (1) and (2) may change over
// time, as the client may edit the current module and locally replaced modules.
// The module cache (which includes all of the packages in (3)) can only
// ever be added to.
//
// The resolver can thus save state about packages in the module cache
// and guarantee that this will not change over time. To obtain information
// about new modules added to the module cache, the module cache should be
// rescanned.
//
// It is OK to serve information about modules that have been deleted,
// as they do still exist.
// TODO(suzmue): can we share information with the caller about
// what module needs to be downloaded to import this package?
type directoryPackageStatus int
const (
_ directoryPackageStatus = iota
directoryScanned
)
type directoryPackageInfo struct {
// status indicates the extent to which this struct has been filled in.
status directoryPackageStatus
// err is non-nil when there was an error trying to reach status.
err error
// Set when status > directoryScanned.
// dir is the absolute directory of this package.
dir string
// nonCanonicalImportPath is the expected import path for this package.
// This may not be an import path that can be used to import this package.
nonCanonicalImportPath string
// needsReplace is true if the nonCanonicalImportPath does not match the
// the modules declared path, making it impossible to import without a
// replace directive.
needsReplace bool
}
// reachedStatus returns true when info has a status at least target and any error associated with
// an attempt to reach target.
func (info *directoryPackageInfo) reachedStatus(target directoryPackageStatus) (bool, error) {
if info.err == nil {
return info.status >= target, nil
}
if info.status == target {
return true, info.err
}
return true, nil
}
// moduleCacheInfo is a concurrency safe map for storing information about
// the directories in the module cache.
//
// The information in this cache is built incrementally. Entries are initialized in scan.
// No new keys should be added in any other functions, as all directories containing
// packages are identified in scan.
//
// Other functions, including loadExports and findPackage, may update entries in this cache
// as they discover new things about the directory.
//
// We do not need to protect the data in the cache for multiple writes, because it only stores
// module cache directories, which do not change. If two competing stores take place, there will be
// one store that wins. Although this could result in a loss of information it will not be incorrect
// and may just result in recomputing the same result later.
//
// TODO(suzmue): consider other concurrency strategies and data structures (RWLocks, sync.Map, etc)
type moduleCacheInfo struct {
mu sync.Mutex
// modCacheDirInfo stores information about packages in
// module cache directories. Keyed by absolute directory.
modCacheDirInfo map[string]*directoryPackageInfo
}
// Store stores the package info for dir.
func (d *moduleCacheInfo) Store(dir string, info directoryPackageInfo) {
d.mu.Lock()
defer d.mu.Unlock()
d.modCacheDirInfo[dir] = &directoryPackageInfo{
status: info.status,
err: info.err,
dir: info.dir,
nonCanonicalImportPath: info.nonCanonicalImportPath,
needsReplace: info.needsReplace,
}
}
// Load returns a copy of the directoryPackageInfo for absolute directory dir.
func (d *moduleCacheInfo) Load(dir string) (directoryPackageInfo, bool) {
d.mu.Lock()
defer d.mu.Unlock()
info, ok := d.modCacheDirInfo[dir]
if !ok {
return directoryPackageInfo{}, false
}
return *info, true
}
// Keys returns the keys currently present in d.
func (d *moduleCacheInfo) Keys() (keys []string) {
d.mu.Lock()
defer d.mu.Unlock()
for key := range d.modCacheDirInfo {
keys = append(keys, key)
}
return keys
}

@ -58,6 +58,53 @@ func sortImports(env *ProcessEnv, fset *token.FileSet, f *ast.File) {
} }
} }
// mergeImports merges all the import declarations into the first one.
// Taken from golang.org/x/tools/ast/astutil.
func mergeImports(env *ProcessEnv, fset *token.FileSet, f *ast.File) {
if len(f.Decls) <= 1 {
return
}
// Merge all the import declarations into the first one.
var first *ast.GenDecl
for i := 0; i < len(f.Decls); i++ {
decl := f.Decls[i]
gen, ok := decl.(*ast.GenDecl)
if !ok || gen.Tok != token.IMPORT || declImports(gen, "C") {
continue
}
if first == nil {
first = gen
continue // Don't touch the first one.
}
// We now know there is more than one package in this import
// declaration. Ensure that it ends up parenthesized.
first.Lparen = first.Pos()
// Move the imports of the other import declaration to the first one.
for _, spec := range gen.Specs {
spec.(*ast.ImportSpec).Path.ValuePos = first.Pos()
first.Specs = append(first.Specs, spec)
}
f.Decls = append(f.Decls[:i], f.Decls[i+1:]...)
i--
}
}
// declImports reports whether gen contains an import of path.
// Taken from golang.org/x/tools/ast/astutil.
func declImports(gen *ast.GenDecl, path string) bool {
if gen.Tok != token.IMPORT {
return false
}
for _, spec := range gen.Specs {
impspec := spec.(*ast.ImportSpec)
if importPath(impspec) == path {
return true
}
}
return false
}
func importPath(s ast.Spec) string { func importPath(s ast.Spec) string {
t, err := strconv.Unquote(s.(*ast.ImportSpec).Path.Value) t, err := strconv.Unquote(s.(*ast.ImportSpec).Path.Value)
if err == nil { if err == nil {

2
vendor/modules.txt vendored

@ -196,7 +196,7 @@ golang.org/x/sys/windows
golang.org/x/text/width golang.org/x/text/width
golang.org/x/text/transform golang.org/x/text/transform
golang.org/x/text/unicode/norm golang.org/x/text/unicode/norm
# golang.org/x/tools v0.0.0-20190521203540-521d6ed310dd # golang.org/x/tools v0.0.0-20190521203540-521d6ed310dd => github.com/golangci/tools v0.0.0-20190713050349-979bdb7f8cc8
golang.org/x/tools/go/analysis golang.org/x/tools/go/analysis
golang.org/x/tools/go/analysis/passes/asmdecl golang.org/x/tools/go/analysis/passes/asmdecl
golang.org/x/tools/go/analysis/passes/assign golang.org/x/tools/go/analysis/passes/assign