Update to latest x/tools (#930)

* Update to latest x/tools (2020/01/19).

Fixes #893

* Initialize vet analyzers as unexportd global vars.

Fixes #915

* Support testing Go 1.14beta1.

* ci: reset go.mod and go.sum before generated diff check

* Update to latest x/tools (2020/02/04)

Co-authored-by: Aleksandr Razumov <ar@gortc.io>
This commit is contained in:
Trevor Pounds 2020-02-04 17:11:14 -05:00 committed by GitHub
parent bec09851bf
commit c46c1b3224
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
69 changed files with 12528 additions and 11786 deletions

View File

@ -2,6 +2,7 @@ language: go
go:
- 1.12.x
- 1.13.x
- 1.14beta1
before_script:
- nvm install 12.14.0

View File

@ -58,13 +58,13 @@ maintainer-clean: clean
check_generated:
$(MAKE) --always-make generate
git checkout -- vendor/modules.txt # can differ between go1.12 and go1.13
git checkout -- vendor/modules.txt go.mod go.sum # can differ between go1.12 and go1.13
git diff --exit-code # check no changes
.PHONY: check_generated
fast_check_generated:
$(MAKE) --always-make fast_generate
git checkout -- vendor/modules.txt # can differ between go1.12 and go1.13
git checkout -- vendor/modules.txt go.mod go.sum # can differ between go1.12 and go1.13
git diff --exit-code # check no changes
.PHONY: fast_check_generated

6
go.mod
View File

@ -44,14 +44,10 @@ require (
github.com/ultraware/whitespace v0.0.4
github.com/uudashr/gocognit v1.0.1
github.com/valyala/quicktemplate v1.2.0
golang.org/x/tools v0.0.0-20200102140908-9497f49d5709
golang.org/x/tools v0.0.0-20200204192400-7124308813f3
gopkg.in/yaml.v2 v2.2.7
honnef.co/go/tools v0.0.1-2019.2.3
mvdan.cc/interfacer v0.0.0-20180901003855-c20040233aed
mvdan.cc/lint v0.0.0-20170908181259-adc824a0674b // indirect
mvdan.cc/unparam v0.0.0-20190720180237-d51796306d8f
)
// https://github.com/golang/tools/pull/156
// https://github.com/golang/tools/pull/160
replace golang.org/x/tools => github.com/golangci/tools v0.0.0-20190915081525-6aa350649b1c

36
go.sum
View File

@ -107,8 +107,6 @@ github.com/golangci/prealloc v0.0.0-20180630174525-215b22d4de21 h1:leSNB7iYzLYSS
github.com/golangci/prealloc v0.0.0-20180630174525-215b22d4de21/go.mod h1:tf5+bzsHdTM0bsB7+8mt0GUMvjCgwLpTapNZHU8AajI=
github.com/golangci/revgrep v0.0.0-20180526074752-d9c87f5ffaf0 h1:HVfrLniijszjS1aiNg8JbBMO2+E1WIQ+j/gL4SQqGPg=
github.com/golangci/revgrep v0.0.0-20180526074752-d9c87f5ffaf0/go.mod h1:qOQCunEYvmd/TLamH+7LlVccLvUH5kZNhbCgTHoBbp4=
github.com/golangci/tools v0.0.0-20190915081525-6aa350649b1c h1:JF7g2hV+1F/DwJ3CrSxOc9ZNVY6N8zYt5mB0Qve//fU=
github.com/golangci/tools v0.0.0-20190915081525-6aa350649b1c/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
github.com/golangci/unconvert v0.0.0-20180507085042-28b1c447d1f4 h1:zwtduBRr5SSWhqsYNgcuWO2kFlpdOZbP0+yRjmvPGys=
github.com/golangci/unconvert v0.0.0-20180507085042-28b1c447d1f4/go.mod h1:Izgrg8RkN3rCIMLGE9CyYmU9pY2Jer6DgANEnZ/L/cQ=
github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
@ -130,10 +128,10 @@ github.com/hpcloud/tail v1.0.0 h1:nfCOvKYfkgYP8hkirhJocXT2+zOD8yUNjXaWfTlyFKI=
github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM=
github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8=
github.com/jirfag/go-printf-func-name v0.0.0-20191110105641-45db9963cdd3 h1:jNYPNLe3d8smommaoQlK7LOA5ESyUJJ+Wf79ZtA7Vp4=
github.com/jirfag/go-printf-func-name v0.0.0-20191110105641-45db9963cdd3/go.mod h1:HEWGJkRDzjJY2sqdDwxccsGicWEf9BQOZsq2tV+xzM0=
github.com/jingyugao/rowserrcheck v0.0.0-20191204022205-72ab7603b68a h1:GmsqmapfzSJkm28dhRoHz2tLRbJmqhU86IPgBtN3mmk=
github.com/jingyugao/rowserrcheck v0.0.0-20191204022205-72ab7603b68a/go.mod h1:xRskid8CManxVta/ALEhJha/pweKBaVG6fWgc0yH25s=
github.com/jirfag/go-printf-func-name v0.0.0-20191110105641-45db9963cdd3 h1:jNYPNLe3d8smommaoQlK7LOA5ESyUJJ+Wf79ZtA7Vp4=
github.com/jirfag/go-printf-func-name v0.0.0-20191110105641-45db9963cdd3/go.mod h1:HEWGJkRDzjJY2sqdDwxccsGicWEf9BQOZsq2tV+xzM0=
github.com/jmoiron/sqlx v1.2.1-0.20190826204134-d7d95172beb5 h1:lrdPtrORjGv1HbbEvKWDUAy97mPpFm4B8hp77tcCUJY=
github.com/jmoiron/sqlx v1.2.1-0.20190826204134-d7d95172beb5/go.mod h1:1FEQNm3xlJgrMD+FBdI9+xvCksHtbpVBBw5dYhBSsks=
github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo=
@ -286,14 +284,18 @@ golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnf
golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc=
golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg=
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180911220305-26e67e76b6c3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190522155817-f3200d17e092/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=
@ -321,10 +323,36 @@ golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.2 h1:tW2bmiBqwgJj/UpqtC8EpXEZVYOwU0yG4iWbprSVAcs=
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20180525024113-a5b4c53f6e8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20181117154741-2ddaf7f79a09/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190110163146-51295c7ec13a/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190221204921-83362c3779f5/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY=
golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190311215038-5c2858a9cfe5/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190322203728-c1a832b0ad89/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190521203540-521d6ed310dd/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
golang.org/x/tools v0.0.0-20190719005602-e377ae9d6386/go.mod h1:jcCCGcm9btYwXyDqrUWc6MKQKKGJCWEQ3AfLSRIbEuI=
golang.org/x/tools v0.0.0-20190910044552-dd2b5c81c578/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191108193012-7d206e10da11/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191113232020-e2727e816f5a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20200102140908-9497f49d5709 h1:AfG1EmoRkFK24HWWLxSrRKNg2G+oA3JVOG8GJsHWypQ=
golang.org/x/tools v0.0.0-20200102140908-9497f49d5709/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200119215504-eb0d8dd85bcc h1:ZA7KFRdqWZkBr0/YbHm1h08vDJ5gQdjVG/8L153z5c4=
golang.org/x/tools v0.0.0-20200119215504-eb0d8dd85bcc/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200204192400-7124308813f3 h1:Ms82wn6YK4ZycO6Bxyh0kxX3gFFVGo79CCuc52xgcys=
golang.org/x/tools v0.0.0-20200204192400-7124308813f3/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7 h1:9zdDQZ7Thm29KFXgAX/+yaf3eVbP7djjWp/dXAppNCc=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898 h1:/atklqdjdhuosWIl6AIbOeHJjicWYPqR9bpxqxYG2pA=
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
google.golang.org/appengine v1.1.0 h1:igQkv0AAhEIvTEpD5LIpAfav2eeVO9HBTjvKHVJPRSs=
google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
google.golang.org/grpc v1.21.0/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=

View File

@ -2,35 +2,34 @@ package golinters
import (
"golang.org/x/tools/go/analysis"
// analysis plug-ins
"golang.org/x/tools/go/analysis/passes/asmdecl"
"golang.org/x/tools/go/analysis/passes/assign"
"golang.org/x/tools/go/analysis/passes/atomic"
"golang.org/x/tools/go/analysis/passes/atomicalign"
"golang.org/x/tools/go/analysis/passes/bools"
"golang.org/x/tools/go/analysis/passes/buildssa"
_ "golang.org/x/tools/go/analysis/passes/buildssa" // unused, internal analyzer
"golang.org/x/tools/go/analysis/passes/buildtag"
"golang.org/x/tools/go/analysis/passes/cgocall"
"golang.org/x/tools/go/analysis/passes/composite"
"golang.org/x/tools/go/analysis/passes/copylock"
"golang.org/x/tools/go/analysis/passes/ctrlflow"
_ "golang.org/x/tools/go/analysis/passes/ctrlflow" // unused, internal analyzer
"golang.org/x/tools/go/analysis/passes/deepequalerrors"
"golang.org/x/tools/go/analysis/passes/errorsas"
"golang.org/x/tools/go/analysis/passes/findcall"
"golang.org/x/tools/go/analysis/passes/httpresponse"
"golang.org/x/tools/go/analysis/passes/inspect"
_ "golang.org/x/tools/go/analysis/passes/inspect" // unused internal analyzer
"golang.org/x/tools/go/analysis/passes/loopclosure"
"golang.org/x/tools/go/analysis/passes/lostcancel"
"golang.org/x/tools/go/analysis/passes/nilfunc"
"golang.org/x/tools/go/analysis/passes/nilness"
"golang.org/x/tools/go/analysis/passes/pkgfact"
_ "golang.org/x/tools/go/analysis/passes/pkgfact" // unused, internal analyzer
"golang.org/x/tools/go/analysis/passes/printf"
"golang.org/x/tools/go/analysis/passes/shadow"
"golang.org/x/tools/go/analysis/passes/shift"
"golang.org/x/tools/go/analysis/passes/sortslice"
"golang.org/x/tools/go/analysis/passes/stdmethods"
"golang.org/x/tools/go/analysis/passes/structtag"
"golang.org/x/tools/go/analysis/passes/testinggoroutine"
"golang.org/x/tools/go/analysis/passes/tests"
"golang.org/x/tools/go/analysis/passes/unmarshal"
"golang.org/x/tools/go/analysis/passes/unreachable"
@ -41,53 +40,40 @@ import (
"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
)
func getAllAnalyzers() []*analysis.Analyzer {
var analyzers []*analysis.Analyzer
for _, a := range []*analysis.Analyzer{
var (
allAnalyzers = []*analysis.Analyzer{
asmdecl.Analyzer,
assign.Analyzer,
atomic.Analyzer,
atomicalign.Analyzer,
bools.Analyzer,
buildssa.Analyzer,
buildtag.Analyzer,
cgocall.Analyzer,
composite.Analyzer,
copylock.Analyzer,
ctrlflow.Analyzer,
deepequalerrors.Analyzer,
errorsas.Analyzer,
findcall.Analyzer,
httpresponse.Analyzer,
inspect.Analyzer,
loopclosure.Analyzer,
lostcancel.Analyzer,
nilfunc.Analyzer,
nilness.Analyzer,
pkgfact.Analyzer,
printf.Analyzer,
shadow.Analyzer,
shift.Analyzer,
sortslice.Analyzer,
stdmethods.Analyzer,
structtag.Analyzer,
testinggoroutine.Analyzer,
tests.Analyzer,
unmarshal.Analyzer,
unreachable.Analyzer,
unsafeptr.Analyzer,
unusedresult.Analyzer,
} {
if a.ResultType != nil {
// Skipping internal analyzers.
continue
}
analyzers = append(analyzers, a)
}
return analyzers
}
func getDefaultAnalyzers() []*analysis.Analyzer {
return []*analysis.Analyzer{
defaultAnalyzers = []*analysis.Analyzer{
asmdecl.Analyzer,
assign.Analyzer,
atomic.Analyzer,
@ -111,7 +97,7 @@ func getDefaultAnalyzers() []*analysis.Analyzer {
unsafeptr.Analyzer,
unusedresult.Analyzer,
}
}
)
func isAnalyzerEnabled(name string, cfg *config.GovetSettings, defaultAnalyzers []*analysis.Analyzer) bool {
if cfg.EnableAll {
@ -141,16 +127,16 @@ func isAnalyzerEnabled(name string, cfg *config.GovetSettings, defaultAnalyzers
func analyzersFromConfig(cfg *config.GovetSettings) []*analysis.Analyzer {
if cfg == nil {
return getDefaultAnalyzers()
return defaultAnalyzers
}
if cfg.CheckShadowing {
// Keeping for backward compatibility.
cfg.Enable = append(cfg.Enable, shadow.Analyzer.Name)
}
var enabledAnalyzers []*analysis.Analyzer
defaultAnalyzers := getDefaultAnalyzers()
for _, a := range getAllAnalyzers() {
for _, a := range allAnalyzers {
if isAnalyzerEnabled(a.Name, cfg, defaultAnalyzers) {
enabledAnalyzers = append(enabledAnalyzers, a)
}

View File

@ -18,8 +18,7 @@ import (
func TestGovet(t *testing.T) {
// Checking that every default analyzer is in "all analyzers" list.
allAnalyzers := getAllAnalyzers()
checkList := append(getDefaultAnalyzers(),
checkList := append(defaultAnalyzers,
shadow.Analyzer, // special case, used in analyzersFromConfig
)
for _, defaultAnalyzer := range checkList {
@ -45,12 +44,12 @@ func (p sortedAnalyzers) Swap(i, j int) { p[i].Name, p[j].Name = p[j].Name,
func TestGovetSorted(t *testing.T) {
// Keeping analyzers sorted so their order match the import order.
t.Run("All", func(t *testing.T) {
if !sort.IsSorted(sortedAnalyzers(getAllAnalyzers())) {
if !sort.IsSorted(sortedAnalyzers(allAnalyzers)) {
t.Error("please keep all analyzers list sorted by name")
}
})
t.Run("Default", func(t *testing.T) {
if !sort.IsSorted(sortedAnalyzers(getDefaultAnalyzers())) {
if !sort.IsSorted(sortedAnalyzers(defaultAnalyzers)) {
t.Error("please keep default analyzers list sorted by name")
}
})

View File

@ -163,13 +163,19 @@ func (pass *Pass) Reportf(pos token.Pos, format string, args ...interface{}) {
pass.Report(Diagnostic{Pos: pos, Message: msg})
}
// reportNodef is a helper function that reports a Diagnostic using the
// range denoted by the AST node.
//
// WARNING: This is an experimental API and may change in the future.
func (pass *Pass) reportNodef(node ast.Node, format string, args ...interface{}) {
// The Range interface provides a range. It's equivalent to and satisfied by
// ast.Node.
type Range interface {
Pos() token.Pos // position of first character belonging to the node
End() token.Pos // position of first character immediately after the node
}
// ReportRangef is a helper function that reports a Diagnostic using the
// range provided. ast.Node values can be passed in as the range because
// they satisfy the Range interface.
func (pass *Pass) ReportRangef(rng Range, format string, args ...interface{}) {
msg := fmt.Sprintf(format, args...)
pass.Report(Diagnostic{Pos: node.Pos(), End: node.End(), Message: msg})
pass.Report(Diagnostic{Pos: rng.Pos(), End: rng.End(), Message: msg})
}
func (pass *Pass) String() string {

View File

@ -22,6 +22,19 @@ type Diagnostic struct {
// Diagnostics should not contain SuggestedFixes that overlap.
// Experimental: This API is experimental and may change in the future.
SuggestedFixes []SuggestedFix // optional
// Experimental: This API is experimental and may change in the future.
Related []RelatedInformation // optional
}
// RelatedInformation contains information related to a diagnostic.
// For example, a diagnostic that flags duplicated declarations of a
// variable may include one RelatedInformation per existing
// declaration.
type RelatedInformation struct {
Pos token.Pos
End token.Pos
Message string
}
// A SuggestedFix is a code change associated with a Diagnostic that a user can choose

View File

@ -1,8 +1,9 @@
/*
The analysis package defines the interface between a modular static
Package analysis defines the interface between a modular static
analysis and an analysis driver program.
Background
A static analysis is a function that inspects a package of Go code and
@ -51,7 +52,6 @@ the go/analysis/passes/ subdirectory:
...
}
An analysis driver is a program such as vet that runs a set of
analyses and prints the diagnostics that they report.
The driver program must import the list of Analyzers it needs.
@ -70,39 +70,6 @@ A driver may use the name, flags, and documentation to provide on-line
help that describes the analyses it performs.
The doc comment contains a brief one-line summary,
optionally followed by paragraphs of explanation.
The vet command, shown below, is an example of a driver that runs
multiple analyzers. It is based on the multichecker package
(see the "Standalone commands" section for details).
$ go build golang.org/x/tools/go/analysis/cmd/vet
$ ./vet help
vet is a tool for static analysis of Go programs.
Usage: vet [-flag] [package]
Registered analyzers:
asmdecl report mismatches between assembly files and Go declarations
assign check for useless assignments
atomic check for common mistakes using the sync/atomic package
...
unusedresult check for unused results of calls to some functions
$ ./vet help unusedresult
unusedresult: check for unused results of calls to some functions
Analyzer flags:
-unusedresult.funcs value
comma-separated list of functions whose results must be used (default Error,String)
-unusedresult.stringmethods value
comma-separated list of names of methods of type func() string whose results must be used
Some functions like fmt.Errorf return a result and have no side effects,
so it is always a mistake to discard the result. This analyzer reports
calls to certain functions in which the result of the call is ignored.
The set of functions may be controlled using flags.
The Analyzer type has more fields besides those shown above:
@ -330,7 +297,5 @@ entirety as:
A tool that provides multiple analyzers can use multichecker in a
similar way, giving it the list of Analyzers.
*/
package analysis

View File

@ -22,9 +22,11 @@ import (
"golang.org/x/tools/go/analysis/passes/internal/analysisutil"
)
const Doc = "report mismatches between assembly files and Go declarations"
var Analyzer = &analysis.Analyzer{
Name: "asmdecl",
Doc: "report mismatches between assembly files and Go declarations",
Doc: Doc,
Run: run,
}
@ -79,13 +81,13 @@ var (
asmArchArm = asmArch{name: "arm", bigEndian: false, stack: "R13", lr: true}
asmArchArm64 = asmArch{name: "arm64", bigEndian: false, stack: "RSP", lr: true}
asmArchAmd64 = asmArch{name: "amd64", bigEndian: false, stack: "SP", lr: false}
asmArchAmd64p32 = asmArch{name: "amd64p32", bigEndian: false, stack: "SP", lr: false}
asmArchMips = asmArch{name: "mips", bigEndian: true, stack: "R29", lr: true}
asmArchMipsLE = asmArch{name: "mipsle", bigEndian: false, stack: "R29", lr: true}
asmArchMips64 = asmArch{name: "mips64", bigEndian: true, stack: "R29", lr: true}
asmArchMips64LE = asmArch{name: "mips64le", bigEndian: false, stack: "R29", lr: true}
asmArchPpc64 = asmArch{name: "ppc64", bigEndian: true, stack: "R1", lr: true}
asmArchPpc64LE = asmArch{name: "ppc64le", bigEndian: false, stack: "R1", lr: true}
asmArchRISCV64 = asmArch{name: "riscv64", bigEndian: false, stack: "SP", lr: true}
asmArchS390X = asmArch{name: "s390x", bigEndian: true, stack: "R15", lr: true}
asmArchWasm = asmArch{name: "wasm", bigEndian: false, stack: "SP", lr: false}
@ -94,13 +96,13 @@ var (
&asmArchArm,
&asmArchArm64,
&asmArchAmd64,
&asmArchAmd64p32,
&asmArchMips,
&asmArchMipsLE,
&asmArchMips64,
&asmArchMips64LE,
&asmArchPpc64,
&asmArchPpc64LE,
&asmArchRISCV64,
&asmArchS390X,
&asmArchWasm,
}
@ -635,9 +637,6 @@ func asmCheckVar(badf func(string, ...interface{}), fn *asmFunc, line, expr stri
case "amd64.LEAQ":
dst = 8
addr = true
case "amd64p32.LEAL":
dst = 4
addr = true
default:
switch fn.arch.name {
case "386", "amd64":
@ -666,6 +665,10 @@ func asmCheckVar(badf func(string, ...interface{}), fn *asmFunc, line, expr stri
src = 4
break
}
if op == "MOVO" || op == "MOVOU" {
src = 16
break
}
if strings.HasPrefix(op, "SET") {
// SETEQ, etc
src = 1
@ -741,6 +744,11 @@ func asmCheckVar(badf func(string, ...interface{}), fn *asmFunc, line, expr stri
vk = v.inner[0].kind
vs = v.inner[0].size
vt = v.inner[0].typ
case asmComplex:
// Allow a single instruction to load both parts of a complex.
if int(kind) == vs {
kind = asmComplex
}
}
if addr {
vk = asmKind(archDef.ptrSize)

View File

@ -63,7 +63,9 @@ func run(pass *analysis.Pass) (interface{}, error) {
pass.Report(analysis.Diagnostic{
Pos: stmt.Pos(), Message: fmt.Sprintf("self-assignment of %s to %s", re, le),
SuggestedFixes: []analysis.SuggestedFix{
{Message: "Remove", TextEdits: []analysis.TextEdit{{stmt.Pos(), stmt.End(), []byte{}}}},
{Message: "Remove", TextEdits: []analysis.TextEdit{
{Pos: stmt.Pos(), End: stmt.End(), NewText: []byte{}},
}},
},
})
}

View File

@ -91,6 +91,6 @@ func checkAtomicAddAssignment(pass *analysis.Pass, left ast.Expr, call *ast.Call
}
if broken {
pass.Reportf(left.Pos(), "direct assignment to atomic value")
pass.ReportRangef(left, "direct assignment to atomic value")
}
}

View File

@ -16,12 +16,15 @@ import (
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/inspect"
"golang.org/x/tools/go/analysis/passes/internal/analysisutil"
"golang.org/x/tools/go/ast/inspector"
)
const Doc = "check for non-64-bits-aligned arguments to sync/atomic functions"
var Analyzer = &analysis.Analyzer{
Name: "atomicalign",
Doc: "check for non-64-bits-aligned arguments to sync/atomic functions",
Doc: Doc,
Requires: []*analysis.Analyzer{inspect.Analyzer},
Run: run,
}
@ -30,7 +33,7 @@ func run(pass *analysis.Pass) (interface{}, error) {
if 8*pass.TypesSizes.Sizeof(types.Typ[types.Uintptr]) == 64 {
return nil, nil // 64-bit platform
}
if imports(pass.Pkg, "sync/atomic") == nil {
if !analysisutil.Imports(pass.Pkg, "sync/atomic") {
return nil, nil // doesn't directly import sync/atomic
}
@ -110,17 +113,5 @@ func check64BitAlignment(pass *analysis.Pass, funcName string, arg ast.Expr) {
return // 64-bit aligned
}
pass.Reportf(arg.Pos(), "address of non 64-bit aligned field .%s passed to atomic.%s", tvar.Name(), funcName)
}
// imports reports whether pkg has path among its direct imports.
// It returns the imported package if so, or nil if not.
// copied from passes/cgocall.
func imports(pkg *types.Package, path string) *types.Package {
for _, imp := range pkg.Imports() {
if imp.Path() == path {
return imp
}
}
return nil
pass.ReportRangef(arg, "address of non 64-bit aligned field .%s passed to atomic.%s", tvar.Name(), funcName)
}

View File

@ -17,9 +17,11 @@ import (
"golang.org/x/tools/go/ast/inspector"
)
const Doc = "check for common mistakes involving boolean operators"
var Analyzer = &analysis.Analyzer{
Name: "bools",
Doc: "check for common mistakes involving boolean operators",
Doc: Doc,
Requires: []*analysis.Analyzer{inspect.Analyzer},
Run: run,
}
@ -100,7 +102,7 @@ func (op boolOp) checkRedundant(pass *analysis.Pass, exprs []ast.Expr) {
for _, e := range exprs {
efmt := analysisutil.Format(pass.Fset, e)
if seen[efmt] {
pass.Reportf(e.Pos(), "redundant %s: %s %s %s", op.name, efmt, op.tok, efmt)
pass.ReportRangef(e, "redundant %s: %s %s %s", op.name, efmt, op.tok, efmt)
} else {
seen[efmt] = true
}
@ -147,7 +149,7 @@ func (op boolOp) checkSuspect(pass *analysis.Pass, exprs []ast.Expr) {
if prev, found := seen[xfmt]; found {
// checkRedundant handles the case in which efmt == prev.
if efmt != prev {
pass.Reportf(e.Pos(), "suspect %s: %s %s %s", op.name, efmt, op.tok, prev)
pass.ReportRangef(e, "suspect %s: %s %s %s", op.name, efmt, op.tok, prev)
}
} else {
seen[xfmt] = efmt

View File

@ -16,9 +16,11 @@ import (
"golang.org/x/tools/go/analysis/passes/internal/analysisutil"
)
const Doc = "check that +build tags are well-formed and correctly located"
var Analyzer = &analysis.Analyzer{
Name: "buildtag",
Doc: "check that +build tags are well-formed and correctly located",
Doc: Doc,
Run: runBuildTag,
}

View File

@ -23,7 +23,7 @@ import (
const debug = false
const doc = `detect some violations of the cgo pointer passing rules
const Doc = `detect some violations of the cgo pointer passing rules
Check for invalid cgo pointer passing.
This looks for code that uses cgo to call C code passing values
@ -34,13 +34,13 @@ or slice to C, either directly, or via a pointer, array, or struct.`
var Analyzer = &analysis.Analyzer{
Name: "cgocall",
Doc: doc,
Doc: Doc,
RunDespiteErrors: true,
Run: run,
}
func run(pass *analysis.Pass) (interface{}, error) {
if imports(pass.Pkg, "runtime/cgo") == nil {
if !analysisutil.Imports(pass.Pkg, "runtime/cgo") {
return nil, nil // doesn't use cgo
}
@ -374,15 +374,3 @@ func imported(info *types.Info, spec *ast.ImportSpec) *types.Package {
}
return obj.(*types.PkgName).Imported()
}
// imports reports whether pkg has path among its direct imports.
// It returns the imported package if so, or nil if not.
// TODO(adonovan): move to analysisutil.
func imports(pkg *types.Package, path string) *types.Package {
for _, imp := range pkg.Imports() {
if imp.Path() == path {
return imp
}
}
return nil
}

View File

@ -97,7 +97,7 @@ func run(pass *analysis.Pass) (interface{}, error) {
return
}
pass.Reportf(cl.Pos(), "%s composite literal uses unkeyed fields", typeName)
pass.ReportRangef(cl, "%s composite literal uses unkeyed fields", typeName)
})
return nil, nil
}

View File

@ -74,7 +74,7 @@ func run(pass *analysis.Pass) (interface{}, error) {
func checkCopyLocksAssign(pass *analysis.Pass, as *ast.AssignStmt) {
for i, x := range as.Rhs {
if path := lockPathRhs(pass, x); path != nil {
pass.Reportf(x.Pos(), "assignment copies lock value to %v: %v", analysisutil.Format(pass.Fset, as.Lhs[i]), path)
pass.ReportRangef(x, "assignment copies lock value to %v: %v", analysisutil.Format(pass.Fset, as.Lhs[i]), path)
}
}
}
@ -89,7 +89,7 @@ func checkCopyLocksGenDecl(pass *analysis.Pass, gd *ast.GenDecl) {
valueSpec := spec.(*ast.ValueSpec)
for i, x := range valueSpec.Values {
if path := lockPathRhs(pass, x); path != nil {
pass.Reportf(x.Pos(), "variable declaration copies lock value to %v: %v", valueSpec.Names[i].Name, path)
pass.ReportRangef(x, "variable declaration copies lock value to %v: %v", valueSpec.Names[i].Name, path)
}
}
}
@ -102,7 +102,7 @@ func checkCopyLocksCompositeLit(pass *analysis.Pass, cl *ast.CompositeLit) {
x = node.Value
}
if path := lockPathRhs(pass, x); path != nil {
pass.Reportf(x.Pos(), "literal copies lock value from %v: %v", analysisutil.Format(pass.Fset, x), path)
pass.ReportRangef(x, "literal copies lock value from %v: %v", analysisutil.Format(pass.Fset, x), path)
}
}
}
@ -111,7 +111,7 @@ func checkCopyLocksCompositeLit(pass *analysis.Pass, cl *ast.CompositeLit) {
func checkCopyLocksReturnStmt(pass *analysis.Pass, rs *ast.ReturnStmt) {
for _, x := range rs.Results {
if path := lockPathRhs(pass, x); path != nil {
pass.Reportf(x.Pos(), "return copies lock value: %v", path)
pass.ReportRangef(x, "return copies lock value: %v", path)
}
}
}
@ -133,7 +133,7 @@ func checkCopyLocksCallExpr(pass *analysis.Pass, ce *ast.CallExpr) {
}
for _, x := range ce.Args {
if path := lockPathRhs(pass, x); path != nil {
pass.Reportf(x.Pos(), "call of %s copies lock value: %v", analysisutil.Format(pass.Fset, ce.Fun), path)
pass.ReportRangef(x, "call of %s copies lock value: %v", analysisutil.Format(pass.Fset, ce.Fun), path)
}
}
}
@ -146,7 +146,7 @@ func checkCopyLocksFunc(pass *analysis.Pass, name string, recv *ast.FieldList, t
if recv != nil && len(recv.List) > 0 {
expr := recv.List[0].Type
if path := lockPath(pass.Pkg, pass.TypesInfo.Types[expr].Type); path != nil {
pass.Reportf(expr.Pos(), "%s passes lock by value: %v", name, path)
pass.ReportRangef(expr, "%s passes lock by value: %v", name, path)
}
}
@ -154,7 +154,7 @@ func checkCopyLocksFunc(pass *analysis.Pass, name string, recv *ast.FieldList, t
for _, field := range typ.Params.List {
expr := field.Type
if path := lockPath(pass.Pkg, pass.TypesInfo.Types[expr].Type); path != nil {
pass.Reportf(expr.Pos(), "%s passes lock by value: %v", name, path)
pass.ReportRangef(expr, "%s passes lock by value: %v", name, path)
}
}
}

View File

@ -45,7 +45,7 @@ func run(pass *analysis.Pass) (interface{}, error) {
return
}
if fn.FullName() == "reflect.DeepEqual" && hasError(pass, call.Args[0]) && hasError(pass, call.Args[1]) {
pass.Reportf(call.Pos(), "avoid using reflect.DeepEqual with errors")
pass.ReportRangef(call, "avoid using reflect.DeepEqual with errors")
}
})
return nil, nil

View File

@ -16,14 +16,14 @@ import (
"golang.org/x/tools/go/types/typeutil"
)
const doc = `report passing non-pointer or non-error values to errors.As
const Doc = `report passing non-pointer or non-error values to errors.As
The errorsas analysis reports calls to errors.As where the type
of the second argument is not a pointer to a type implementing error.`
var Analyzer = &analysis.Analyzer{
Name: "errorsas",
Doc: doc,
Doc: Doc,
Requires: []*analysis.Analyzer{inspect.Analyzer},
Run: run,
}
@ -51,7 +51,7 @@ func run(pass *analysis.Pass) (interface{}, error) {
return // not enough arguments, e.g. called with return values of another function
}
if fn.FullName() == "errors.As" && !pointerToInterfaceOrError(pass, call.Args[1]) {
pass.Reportf(call.Pos(), "second argument to errors.As must be a pointer to an interface or a type implementing error")
pass.ReportRangef(call, "second argument to errors.As must be a pointer to an interface or a type implementing error")
}
})
return nil, nil

View File

@ -2,10 +2,12 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// The findcall package defines an Analyzer that serves as a trivial
// Package findcall defines an Analyzer that serves as a trivial
// example and test of the Analysis API. It reports a diagnostic for
// every call to a function or method of the name specified by its
// -name flag.
// -name flag. It also exports a fact for each declaration that
// matches the name, plus a package-level fact if the package contained
// one or more such declarations.
package findcall
import (
@ -69,6 +71,10 @@ func run(pass *analysis.Pass) (interface{}, error) {
}
}
if len(pass.AllObjectFacts()) > 0 {
pass.ExportPackageFact(new(foundFact))
}
return nil, nil
}

View File

@ -12,6 +12,7 @@ import (
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/inspect"
"golang.org/x/tools/go/analysis/passes/internal/analysisutil"
"golang.org/x/tools/go/ast/inspector"
)
@ -43,7 +44,7 @@ func run(pass *analysis.Pass) (interface{}, error) {
// Fast path: if the package doesn't import net/http,
// skip the traversal.
if !imports(pass.Pkg, "net/http") {
if !analysisutil.Imports(pass.Pkg, "net/http") {
return nil, nil
}
@ -85,7 +86,7 @@ func run(pass *analysis.Pass) (interface{}, error) {
}
if resp.Obj == root.Obj {
pass.Reportf(root.Pos(), "using %s before checking for errors", resp.Name)
pass.ReportRangef(root, "using %s before checking for errors", resp.Name)
}
return true
})
@ -166,12 +167,3 @@ func isNamedType(t types.Type, path, name string) bool {
obj := n.Obj()
return obj.Name() == name && obj.Pkg() != nil && obj.Pkg().Path() == path
}
func imports(pkg *types.Package, path string) bool {
for _, imp := range pkg.Imports() {
if imp.Path() == path {
return true
}
}
return false
}

View File

@ -16,7 +16,7 @@
//
// var Analyzer = &analysis.Analyzer{
// ...
// Requires: reflect.TypeOf(new(inspect.Analyzer)),
// Requires: []*analysis.Analyzer{inspect.Analyzer},
// }
//
// func run(pass *analysis.Pass) (interface{}, error) {

View File

@ -104,3 +104,13 @@ func LineStart(f *token.File, line int) token.Pos {
}
}
}
// Imports returns true if path is imported by pkg.
func Imports(pkg *types.Package, path string) bool {
for _, imp := range pkg.Imports() {
if imp.Path() == path {
return true
}
}
return false
}

View File

@ -119,7 +119,7 @@ func run(pass *analysis.Pass) (interface{}, error) {
}
for _, v := range vars {
if v.Obj == id.Obj {
pass.Reportf(id.Pos(), "loop variable %s captured by func literal",
pass.ReportRangef(id, "loop variable %s captured by func literal",
id.Name)
}
}

View File

@ -3,7 +3,7 @@
// license that can be found in the LICENSE file.
// Package lostcancel defines an Analyzer that checks for failure to
// call a context cancelation function.
// call a context cancellation function.
package lostcancel
import (
@ -20,7 +20,7 @@ import (
const Doc = `check cancel func returned by context.WithCancel is called
The cancelation function returned by context.WithCancel, WithTimeout,
The cancellation function returned by context.WithCancel, WithTimeout,
and WithDeadline must be called or the new context will remain live
until its parent context is cancelled.
(The background context is never cancelled.)`
@ -121,7 +121,7 @@ func runFunc(pass *analysis.Pass, node ast.Node) {
}
if id != nil {
if id.Name == "_" {
pass.Reportf(id.Pos(),
pass.ReportRangef(id,
"the cancel function returned by context.%s should be called, not discarded, to avoid a context leak",
n.(*ast.SelectorExpr).Sel.Name)
} else if v, ok := pass.TypesInfo.Uses[id].(*types.Var); ok {
@ -174,8 +174,8 @@ func runFunc(pass *analysis.Pass, node ast.Node) {
for v, stmt := range cancelvars {
if ret := lostCancelPath(pass, g, v, stmt, sig); ret != nil {
lineno := pass.Fset.Position(stmt.Pos()).Line
pass.Reportf(stmt.Pos(), "the %s function is not used on all paths (possible context leak)", v.Name())
pass.Reportf(ret.Pos(), "this return statement may be reached without using the %s var defined on line %d", v.Name(), lineno)
pass.ReportRangef(stmt, "the %s function is not used on all paths (possible context leak)", v.Name())
pass.ReportRangef(ret, "this return statement may be reached without using the %s var defined on line %d", v.Name(), lineno)
}
}
}

View File

@ -68,7 +68,7 @@ func run(pass *analysis.Pass) (interface{}, error) {
return
}
pass.Reportf(e.Pos(), "comparison of function %v %v nil is always %v", obj.Name(), e.Op, e.Op == token.NEQ)
pass.ReportRangef(e, "comparison of function %v %v nil is always %v", obj.Name(), e.Op, e.Op == token.NEQ)
})
return nil, nil
}

View File

@ -66,7 +66,7 @@ func run(pass *analysis.Pass) (interface{}, error) {
eq := strings.IndexByte(pair, '=')
result[pair[:eq]] = pair[1+eq:]
}
pass.Reportf(spec.Pos(), "%s", strings.Join(fact, " "))
pass.ReportRangef(spec, "%s", strings.Join(fact, " "))
}
}

View File

@ -2,8 +2,8 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// This file contains the printf-checker.
// Package printf defines an Analyzer that checks consistency
// of Printf format strings and arguments.
package printf
import (
@ -13,6 +13,7 @@ import (
"go/constant"
"go/token"
"go/types"
"reflect"
"regexp"
"sort"
"strconv"
@ -32,13 +33,14 @@ func init() {
var Analyzer = &analysis.Analyzer{
Name: "printf",
Doc: doc,
Doc: Doc,
Requires: []*analysis.Analyzer{inspect.Analyzer},
Run: run,
ResultType: reflect.TypeOf((*Result)(nil)),
FactTypes: []analysis.Fact{new(isWrapper)},
}
const doc = `check consistency of Printf format strings and arguments
const Doc = `check consistency of Printf format strings and arguments
The check applies to known functions (for example, those in package fmt)
as well as any detected wrappers of known functions.
@ -66,18 +68,64 @@ argument list. Otherwise it is assumed to be Print-like, taking a list
of arguments with no format string.
`
// Kind is a kind of fmt function behavior.
type Kind int
const (
KindNone Kind = iota // not a fmt wrapper function
KindPrint // function behaves like fmt.Print
KindPrintf // function behaves like fmt.Printf
KindErrorf // function behaves like fmt.Errorf
)
func (kind Kind) String() string {
switch kind {
case KindPrint:
return "print"
case KindPrintf:
return "printf"
case KindErrorf:
return "errorf"
}
return ""
}
// Result is the printf analyzer's result type. Clients may query the result
// to learn whether a function behaves like fmt.Print or fmt.Printf.
type Result struct {
funcs map[*types.Func]Kind
}
// Kind reports whether fn behaves like fmt.Print or fmt.Printf.
func (r *Result) Kind(fn *types.Func) Kind {
_, ok := isPrint[fn.FullName()]
if !ok {
// Next look up just "printf", for use with -printf.funcs.
_, ok = isPrint[strings.ToLower(fn.Name())]
}
if ok {
if strings.HasSuffix(fn.Name(), "f") {
return KindPrintf
} else {
return KindPrint
}
}
return r.funcs[fn]
}
// isWrapper is a fact indicating that a function is a print or printf wrapper.
type isWrapper struct{ Kind funcKind }
type isWrapper struct{ Kind Kind }
func (f *isWrapper) AFact() {}
func (f *isWrapper) String() string {
switch f.Kind {
case kindPrintf:
case KindPrintf:
return "printfWrapper"
case kindPrint:
case KindPrint:
return "printWrapper"
case kindErrorf:
case KindErrorf:
return "errorfWrapper"
default:
return "unknownWrapper"
@ -85,9 +133,12 @@ func (f *isWrapper) String() string {
}
func run(pass *analysis.Pass) (interface{}, error) {
findPrintfLike(pass)
res := &Result{
funcs: make(map[*types.Func]Kind),
}
findPrintfLike(pass, res)
checkCall(pass)
return nil, nil
return res, nil
}
type printfWrapper struct {
@ -154,7 +205,7 @@ func maybePrintfWrapper(info *types.Info, decl ast.Decl) *printfWrapper {
}
// findPrintfLike scans the entire package to find printf-like functions.
func findPrintfLike(pass *analysis.Pass) (interface{}, error) {
func findPrintfLike(pass *analysis.Pass, res *Result) (interface{}, error) {
// Gather potential wrappers and call graph between them.
byObj := make(map[*types.Func]*printfWrapper)
var wrappers []*printfWrapper
@ -209,7 +260,7 @@ func findPrintfLike(pass *analysis.Pass) (interface{}, error) {
fn, kind := printfNameAndKind(pass, call)
if kind != 0 {
checkPrintfFwd(pass, w, call, kind)
checkPrintfFwd(pass, w, call, kind, res)
return true
}
@ -232,20 +283,11 @@ func match(info *types.Info, arg ast.Expr, param *types.Var) bool {
return ok && info.ObjectOf(id) == param
}
type funcKind int
const (
kindUnknown funcKind = iota
kindPrintf = iota
kindPrint
kindErrorf
)
// checkPrintfFwd checks that a printf-forwarding wrapper is forwarding correctly.
// It diagnoses writing fmt.Printf(format, args) instead of fmt.Printf(format, args...).
func checkPrintfFwd(pass *analysis.Pass, w *printfWrapper, call *ast.CallExpr, kind funcKind) {
matched := kind == kindPrint ||
kind != kindUnknown && len(call.Args) >= 2 && match(pass.TypesInfo, call.Args[len(call.Args)-2], w.format)
func checkPrintfFwd(pass *analysis.Pass, w *printfWrapper, call *ast.CallExpr, kind Kind, res *Result) {
matched := kind == KindPrint ||
kind != KindNone && len(call.Args) >= 2 && match(pass.TypesInfo, call.Args[len(call.Args)-2], w.format)
if !matched {
return
}
@ -266,10 +308,10 @@ func checkPrintfFwd(pass *analysis.Pass, w *printfWrapper, call *ast.CallExpr, k
return
}
desc := "printf"
if kind == kindPrint {
if kind == KindPrint {
desc = "print"
}
pass.Reportf(call.Pos(), "missing ... in args forwarded to %s-like function", desc)
pass.ReportRangef(call, "missing ... in args forwarded to %s-like function", desc)
return
}
fn := w.obj
@ -277,8 +319,9 @@ func checkPrintfFwd(pass *analysis.Pass, w *printfWrapper, call *ast.CallExpr, k
if !pass.ImportObjectFact(fn, &fact) {
fact.Kind = kind
pass.ExportObjectFact(fn, &fact)
res.funcs[fn] = kind
for _, caller := range w.callers {
checkPrintfFwd(pass, caller.w, caller.call, kind)
checkPrintfFwd(pass, caller.w, caller.call, kind, res)
}
}
}
@ -427,15 +470,15 @@ func checkCall(pass *analysis.Pass) {
call := n.(*ast.CallExpr)
fn, kind := printfNameAndKind(pass, call)
switch kind {
case kindPrintf, kindErrorf:
case KindPrintf, KindErrorf:
checkPrintf(pass, kind, call, fn)
case kindPrint:
case KindPrint:
checkPrint(pass, call, fn)
}
})
}
func printfNameAndKind(pass *analysis.Pass, call *ast.CallExpr) (fn *types.Func, kind funcKind) {
func printfNameAndKind(pass *analysis.Pass, call *ast.CallExpr) (fn *types.Func, kind Kind) {
fn, _ = typeutil.Callee(pass.TypesInfo, call).(*types.Func)
if fn == nil {
return nil, 0
@ -448,11 +491,11 @@ func printfNameAndKind(pass *analysis.Pass, call *ast.CallExpr) (fn *types.Func,
}
if ok {
if fn.Name() == "Errorf" {
kind = kindErrorf
kind = KindErrorf
} else if strings.HasSuffix(fn.Name(), "f") {
kind = kindPrintf
kind = KindPrintf
} else {
kind = kindPrint
kind = KindPrint
}
return fn, kind
}
@ -462,7 +505,7 @@ func printfNameAndKind(pass *analysis.Pass, call *ast.CallExpr) (fn *types.Func,
return fn, fact.Kind
}
return fn, kindUnknown
return fn, KindNone
}
// isFormatter reports whether t satisfies fmt.Formatter.
@ -504,7 +547,7 @@ type formatState struct {
}
// checkPrintf checks a call to a formatted print routine such as Printf.
func checkPrintf(pass *analysis.Pass, kind funcKind, call *ast.CallExpr, fn *types.Func) {
func checkPrintf(pass *analysis.Pass, kind Kind, call *ast.CallExpr, fn *types.Func) {
format, idx := formatString(pass, call)
if idx < 0 {
if false {
@ -542,7 +585,7 @@ func checkPrintf(pass *analysis.Pass, kind funcKind, call *ast.CallExpr, fn *typ
anyIndex = true
}
if state.verb == 'w' {
if kind != kindErrorf {
if kind != KindErrorf {
pass.Reportf(call.Pos(), "%s call has error-wrapping directive %%w", state.name)
return
}
@ -574,7 +617,7 @@ func checkPrintf(pass *analysis.Pass, kind funcKind, call *ast.CallExpr, fn *typ
if maxArgNum != len(call.Args) {
expect := maxArgNum - firstArg
numArgs := len(call.Args) - firstArg
pass.Reportf(call.Pos(), "%s call needs %v but has %v", fn.Name(), count(expect, "arg"), count(numArgs, "arg"))
pass.ReportRangef(call, "%s call needs %v but has %v", fn.Name(), count(expect, "arg"), count(numArgs, "arg"))
}
}
@ -615,13 +658,13 @@ func (s *formatState) parseIndex() bool {
ok = false
s.nbytes = strings.Index(s.format, "]")
if s.nbytes < 0 {
s.pass.Reportf(s.call.Pos(), "%s format %s is missing closing ]", s.name, s.format)
s.pass.ReportRangef(s.call, "%s format %s is missing closing ]", s.name, s.format)
return false
}
}
arg32, err := strconv.ParseInt(s.format[start:s.nbytes], 10, 32)
if err != nil || !ok || arg32 <= 0 || arg32 > int64(len(s.call.Args)-s.firstArg) {
s.pass.Reportf(s.call.Pos(), "%s format has invalid argument index [%s]", s.name, s.format[start:s.nbytes])
s.pass.ReportRangef(s.call, "%s format has invalid argument index [%s]", s.name, s.format[start:s.nbytes])
return false
}
s.nbytes++ // skip ']'
@ -698,7 +741,7 @@ func parsePrintfVerb(pass *analysis.Pass, call *ast.CallExpr, name, format strin
return nil
}
if state.nbytes == len(state.format) {
pass.Reportf(call.Pos(), "%s format %s is missing verb at end of string", name, state.format)
pass.ReportRangef(call.Fun, "%s format %s is missing verb at end of string", name, state.format)
return nil
}
verb, w := utf8.DecodeRuneInString(state.format[state.nbytes:])
@ -748,7 +791,7 @@ var printVerbs = []printVerb{
// '#' is alternate format for several verbs.
// ' ' is spacer for numbers
{'%', noFlag, 0},
{'b', numFlag, argInt | argFloat | argComplex | argPointer},
{'b', sharpNumFlag, argInt | argFloat | argComplex | argPointer},
{'c', "-", argRune | argInt},
{'d', numFlag, argInt | argPointer},
{'e', sharpNumFlag, argFloat | argComplex},
@ -766,8 +809,8 @@ var printVerbs = []printVerb{
{'U', "-#", argRune | argInt},
{'v', allFlags, anyType},
{'w', allFlags, argError},
{'x', sharpNumFlag, argRune | argInt | argString | argPointer},
{'X', sharpNumFlag, argRune | argInt | argString | argPointer},
{'x', sharpNumFlag, argRune | argInt | argString | argPointer | argFloat | argComplex},
{'X', sharpNumFlag, argRune | argInt | argString | argPointer | argFloat | argComplex},
}
// okPrintfArg compares the formatState to the arguments actually present,
@ -794,7 +837,7 @@ func okPrintfArg(pass *analysis.Pass, call *ast.CallExpr, state *formatState) (o
if !formatter {
if !found {
pass.Reportf(call.Pos(), "%s format %s has unknown verb %c", state.name, state.format, state.verb)
pass.ReportRangef(call, "%s format %s has unknown verb %c", state.name, state.format, state.verb)
return false
}
for _, flag := range state.flags {
@ -804,7 +847,7 @@ func okPrintfArg(pass *analysis.Pass, call *ast.CallExpr, state *formatState) (o
continue
}
if !strings.ContainsRune(v.flags, rune(flag)) {
pass.Reportf(call.Pos(), "%s format %s has unrecognized flag %c", state.name, state.format, flag)
pass.ReportRangef(call, "%s format %s has unrecognized flag %c", state.name, state.format, flag)
return false
}
}
@ -823,7 +866,7 @@ func okPrintfArg(pass *analysis.Pass, call *ast.CallExpr, state *formatState) (o
}
arg := call.Args[argNum]
if !matchArgType(pass, argInt, nil, arg) {
pass.Reportf(call.Pos(), "%s format %s uses non-int %s as argument of *", state.name, state.format, analysisutil.Format(pass.Fset, arg))
pass.ReportRangef(call, "%s format %s uses non-int %s as argument of *", state.name, state.format, analysisutil.Format(pass.Fset, arg))
return false
}
}
@ -837,7 +880,7 @@ func okPrintfArg(pass *analysis.Pass, call *ast.CallExpr, state *formatState) (o
}
arg := call.Args[argNum]
if isFunctionValue(pass, arg) && state.verb != 'p' && state.verb != 'T' {
pass.Reportf(call.Pos(), "%s format %s arg %s is a func value, not called", state.name, state.format, analysisutil.Format(pass.Fset, arg))
pass.ReportRangef(call, "%s format %s arg %s is a func value, not called", state.name, state.format, analysisutil.Format(pass.Fset, arg))
return false
}
if !matchArgType(pass, v.typ, nil, arg) {
@ -845,11 +888,11 @@ func okPrintfArg(pass *analysis.Pass, call *ast.CallExpr, state *formatState) (o
if typ := pass.TypesInfo.Types[arg].Type; typ != nil {
typeString = typ.String()
}
pass.Reportf(call.Pos(), "%s format %s has arg %s of wrong type %s", state.name, state.format, analysisutil.Format(pass.Fset, arg), typeString)
pass.ReportRangef(call, "%s format %s has arg %s of wrong type %s", state.name, state.format, analysisutil.Format(pass.Fset, arg), typeString)
return false
}
if v.typ&argString != 0 && v.verb != 'T' && !bytes.Contains(state.flags, []byte{'#'}) && recursiveStringer(pass, arg) {
pass.Reportf(call.Pos(), "%s format %s with arg %s causes recursive String method call", state.name, state.format, analysisutil.Format(pass.Fset, arg))
pass.ReportRangef(call, "%s format %s with arg %s causes recursive String method call", state.name, state.format, analysisutil.Format(pass.Fset, arg))
return false
}
return true
@ -935,7 +978,7 @@ func argCanBeChecked(pass *analysis.Pass, call *ast.CallExpr, formatArg int, sta
// There are bad indexes in the format or there are fewer arguments than the format needs.
// This is the argument number relative to the format: Printf("%s", "hi") will give 1 for the "hi".
arg := argNum - state.firstArg + 1 // People think of arguments as 1-indexed.
pass.Reportf(call.Pos(), "%s format %s reads arg #%d, but call has %v", state.name, state.format, arg, count(len(call.Args)-state.firstArg, "arg"))
pass.ReportRangef(call, "%s format %s reads arg #%d, but call has %v", state.name, state.format, arg, count(len(call.Args)-state.firstArg, "arg"))
return false
}
@ -986,7 +1029,7 @@ func checkPrint(pass *analysis.Pass, call *ast.CallExpr, fn *types.Func) {
if sel, ok := call.Args[0].(*ast.SelectorExpr); ok {
if x, ok := sel.X.(*ast.Ident); ok {
if x.Name == "os" && strings.HasPrefix(sel.Sel.Name, "Std") {
pass.Reportf(call.Pos(), "%s does not take io.Writer but has first arg %s", fn.Name(), analysisutil.Format(pass.Fset, call.Args[0]))
pass.ReportRangef(call, "%s does not take io.Writer but has first arg %s", fn.Name(), analysisutil.Format(pass.Fset, call.Args[0]))
}
}
}
@ -1000,7 +1043,7 @@ func checkPrint(pass *analysis.Pass, call *ast.CallExpr, fn *types.Func) {
if strings.Contains(s, "%") {
m := printFormatRE.FindStringSubmatch(s)
if m != nil {
pass.Reportf(call.Pos(), "%s call has possible formatting directive %s", fn.Name(), m[0])
pass.ReportRangef(call, "%s call has possible formatting directive %s", fn.Name(), m[0])
}
}
}
@ -1010,16 +1053,16 @@ func checkPrint(pass *analysis.Pass, call *ast.CallExpr, fn *types.Func) {
if lit, ok := arg.(*ast.BasicLit); ok && lit.Kind == token.STRING {
str, _ := strconv.Unquote(lit.Value)
if strings.HasSuffix(str, "\n") {
pass.Reportf(call.Pos(), "%s arg list ends with redundant newline", fn.Name())
pass.ReportRangef(call, "%s arg list ends with redundant newline", fn.Name())
}
}
}
for _, arg := range args {
if isFunctionValue(pass, arg) {
pass.Reportf(call.Pos(), "%s arg %s is a func value, not called", fn.Name(), analysisutil.Format(pass.Fset, arg))
pass.ReportRangef(call, "%s arg %s is a func value, not called", fn.Name(), analysisutil.Format(pass.Fset, arg))
}
if recursiveStringer(pass, arg) {
pass.Reportf(call.Pos(), "%s arg %s causes recursive call to String method", fn.Name(), analysisutil.Format(pass.Fset, arg))
pass.ReportRangef(call, "%s arg %s causes recursive call to String method", fn.Name(), analysisutil.Format(pass.Fset, arg))
}
}
}

View File

@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package shadow defines an Analyzer that checks for shadowed variables.
package shadow
import (
@ -160,7 +161,7 @@ func checkShadowAssignment(pass *analysis.Pass, spans map[types.Object]span, a *
for _, expr := range a.Lhs {
ident, ok := expr.(*ast.Ident)
if !ok {
pass.Reportf(expr.Pos(), "invalid AST: short variable declaration of non-identifier")
pass.ReportRangef(expr, "invalid AST: short variable declaration of non-identifier")
return
}
checkShadowing(pass, spans, ident)
@ -182,7 +183,7 @@ func idiomaticShortRedecl(pass *analysis.Pass, a *ast.AssignStmt) bool {
for i, expr := range a.Lhs {
lhs, ok := expr.(*ast.Ident)
if !ok {
pass.Reportf(expr.Pos(), "invalid AST: short variable declaration of non-identifier")
pass.ReportRangef(expr, "invalid AST: short variable declaration of non-identifier")
return true // Don't do any more processing.
}
switch rhs := a.Rhs[i].(type) {
@ -208,16 +209,17 @@ func idiomaticShortRedecl(pass *analysis.Pass, a *ast.AssignStmt) bool {
func idiomaticRedecl(d *ast.ValueSpec) bool {
// Don't complain about deliberate redeclarations of the form
// var i, j = i, j
// Don't ignore redeclarations of the form
// var i = 3
if len(d.Names) != len(d.Values) {
return false
}
for i, lhs := range d.Names {
if rhs, ok := d.Values[i].(*ast.Ident); ok {
if lhs.Name != rhs.Name {
rhs, ok := d.Values[i].(*ast.Ident)
if !ok || lhs.Name != rhs.Name {
return false
}
}
}
return true
}
@ -229,7 +231,7 @@ func checkShadowDecl(pass *analysis.Pass, spans map[types.Object]span, d *ast.Ge
for _, spec := range d.Specs {
valueSpec, ok := spec.(*ast.ValueSpec)
if !ok {
pass.Reportf(spec.Pos(), "invalid AST: var GenDecl not ValueSpec")
pass.ReportRangef(spec, "invalid AST: var GenDecl not ValueSpec")
return
}
// Don't complain about deliberate redeclarations of the form
@ -273,7 +275,7 @@ func checkShadowing(pass *analysis.Pass, spans map[types.Object]span, ident *ast
// the shadowing identifier.
span, ok := spans[shadowed]
if !ok {
pass.Reportf(ident.Pos(), "internal error: no range for %q", ident.Name)
pass.ReportRangef(ident, "internal error: no range for %q", ident.Name)
return
}
if !span.contains(ident.Pos()) {
@ -283,6 +285,6 @@ func checkShadowing(pass *analysis.Pass, spans map[types.Object]span, ident *ast
// Don't complain if the types differ: that implies the programmer really wants two different things.
if types.Identical(obj.Type(), shadowed.Type()) {
line := pass.Fset.Position(shadowed.Pos()).Line
pass.Reportf(ident.Pos(), "declaration of %q shadows declaration at line %d", obj.Name(), line)
pass.ReportRangef(ident, "declaration of %q shadows declaration at line %d", obj.Name(), line)
}
}

View File

@ -21,9 +21,11 @@ import (
"golang.org/x/tools/go/ast/inspector"
)
const Doc = "check for shifts that equal or exceed the width of the integer"
var Analyzer = &analysis.Analyzer{
Name: "shift",
Doc: "check for shifts that equal or exceed the width of the integer",
Doc: Doc,
Requires: []*analysis.Analyzer{inspect.Analyzer},
Run: run,
}
@ -94,6 +96,6 @@ func checkLongShift(pass *analysis.Pass, node ast.Node, x, y ast.Expr) {
size := 8 * pass.TypesSizes.Sizeof(t)
if amt >= size {
ident := analysisutil.Format(pass.Fset, x)
pass.Reportf(node.Pos(), "%s (%d bits) too small for shift of %d", ident, size, amt)
pass.ReportRangef(node, "%s (%d bits) too small for shift of %d", ident, size, amt)
}
}

View File

@ -141,7 +141,7 @@ func canonicalMethod(pass *analysis.Pass, id *ast.Ident) {
actual = strings.TrimPrefix(actual, "func")
actual = id.Name + actual
pass.Reportf(id.Pos(), "method %s should have signature %s", actual, expectFmt)
pass.ReportRangef(id, "method %s should have signature %s", actual, expectFmt)
}
}

View File

@ -0,0 +1,154 @@
// Copyright 2020 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package testinggoroutine
import (
"go/ast"
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/inspect"
"golang.org/x/tools/go/analysis/passes/internal/analysisutil"
"golang.org/x/tools/go/ast/inspector"
)
const Doc = `report calls to (*testing.T).Fatal from goroutines started by a test.
Functions that abruptly terminate a test, such as the Fatal, Fatalf, FailNow, and
Skip{,f,Now} methods of *testing.T, must be called from the test goroutine itself.
This checker detects calls to these functions that occur within a goroutine
started by the test. For example:
func TestFoo(t *testing.T) {
go func() {
t.Fatal("oops") // error: (*T).Fatal called from non-test goroutine
}()
}
`
var Analyzer = &analysis.Analyzer{
Name: "testinggoroutine",
Doc: Doc,
Requires: []*analysis.Analyzer{inspect.Analyzer},
Run: run,
}
var forbidden = map[string]bool{
"FailNow": true,
"Fatal": true,
"Fatalf": true,
"Skip": true,
"Skipf": true,
"SkipNow": true,
}
func run(pass *analysis.Pass) (interface{}, error) {
inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
if !analysisutil.Imports(pass.Pkg, "testing") {
return nil, nil
}
// Filter out anything that isn't a function declaration.
onlyFuncs := []ast.Node{
(*ast.FuncDecl)(nil),
}
inspect.Nodes(onlyFuncs, func(node ast.Node, push bool) bool {
fnDecl, ok := node.(*ast.FuncDecl)
if !ok {
return false
}
if !hasBenchmarkOrTestParams(fnDecl) {
return false
}
// Now traverse the benchmark/test's body and check that none of the
// forbidden methods are invoked in the goroutines within the body.
ast.Inspect(fnDecl, func(n ast.Node) bool {
goStmt, ok := n.(*ast.GoStmt)
if !ok {
return true
}
checkGoStmt(pass, goStmt)
// No need to further traverse the GoStmt since right
// above we manually traversed it in the ast.Inspect(goStmt, ...)
return false
})
return false
})
return nil, nil
}
func hasBenchmarkOrTestParams(fnDecl *ast.FuncDecl) bool {
// Check that the function's arguments include "*testing.T" or "*testing.B".
params := fnDecl.Type.Params.List
for _, param := range params {
if _, ok := typeIsTestingDotTOrB(param.Type); ok {
return true
}
}
return false
}
func typeIsTestingDotTOrB(expr ast.Expr) (string, bool) {
starExpr, ok := expr.(*ast.StarExpr)
if !ok {
return "", false
}
selExpr, ok := starExpr.X.(*ast.SelectorExpr)
if !ok {
return "", false
}
varPkg := selExpr.X.(*ast.Ident)
if varPkg.Name != "testing" {
return "", false
}
varTypeName := selExpr.Sel.Name
ok = varTypeName == "B" || varTypeName == "T"
return varTypeName, ok
}
// checkGoStmt traverses the goroutine and checks for the
// use of the forbidden *testing.(B, T) methods.
func checkGoStmt(pass *analysis.Pass, goStmt *ast.GoStmt) {
// Otherwise examine the goroutine to check for the forbidden methods.
ast.Inspect(goStmt, func(n ast.Node) bool {
selExpr, ok := n.(*ast.SelectorExpr)
if !ok {
return true
}
_, bad := forbidden[selExpr.Sel.Name]
if !bad {
return true
}
// Now filter out false positives by the import-path/type.
ident, ok := selExpr.X.(*ast.Ident)
if !ok {
return true
}
if ident.Obj == nil || ident.Obj.Decl == nil {
return true
}
field, ok := ident.Obj.Decl.(*ast.Field)
if !ok {
return true
}
if typeName, ok := typeIsTestingDotTOrB(field.Type); ok {
pass.ReportRangef(selExpr, "call to (*%s).%s from a non-test goroutine", typeName, selExpr.Sel)
}
return true
})
}

View File

@ -16,14 +16,14 @@ import (
"golang.org/x/tools/go/types/typeutil"
)
const doc = `report passing non-pointer or non-interface values to unmarshal
const Doc = `report passing non-pointer or non-interface values to unmarshal
The unmarshal analysis reports calls to functions such as json.Unmarshal
in which the argument type is not a pointer or an interface.`
var Analyzer = &analysis.Analyzer{
Name: "unmarshal",
Doc: doc,
Doc: Doc,
Requires: []*analysis.Analyzer{inspect.Analyzer},
Run: run,
}

View File

@ -189,7 +189,7 @@ func (d *deadState) findDead(stmt ast.Stmt) {
case *ast.EmptyStmt:
// do not warn about unreachable empty statements
default:
d.pass.Reportf(stmt.Pos(), "unreachable code")
d.pass.ReportRangef(stmt, "unreachable code")
d.reachable = true // silence error about next statement
}
}

View File

@ -45,7 +45,7 @@ func run(pass *analysis.Pass) (interface{}, error) {
if hasBasicType(pass.TypesInfo, x.Fun, types.UnsafePointer) &&
hasBasicType(pass.TypesInfo, x.Args[0], types.Uintptr) &&
!isSafeUintptr(pass.TypesInfo, x.Args[0]) {
pass.Reportf(x.Pos(), "possible misuse of unsafe.Pointer")
pass.ReportRangef(x, "possible misuse of unsafe.Pointer")
}
})
return nil, nil

View File

@ -9,13 +9,10 @@ import (
// Validate reports an error if any of the analyzers are misconfigured.
// Checks include:
// that the name is a valid identifier;
// that analyzer names are unique;
// that the Requires graph is acyclic;
// that analyzer fact types are unique;
// that each fact type is a pointer.
func Validate(analyzers []*Analyzer) error {
names := make(map[string]bool)
// Map each fact type to its sole generating analyzer.
factTypes := make(map[reflect.Type]*Analyzer)
@ -39,10 +36,6 @@ func Validate(analyzers []*Analyzer) error {
if !validIdent(a.Name) {
return fmt.Errorf("invalid analyzer name %q", a)
}
if names[a.Name] {
return fmt.Errorf("duplicate analyzer name %q", a)
}
names[a.Name] = true
if a.Doc == "" {
return fmt.Errorf("analyzer %q is undocumented", a)

View File

@ -275,9 +275,10 @@ func DeleteNamedImport(fset *token.FileSet, f *ast.File, name, path string) (del
// We deleted an entry but now there may be
// a blank line-sized hole where the import was.
if line-lastLine > 1 {
if line-lastLine > 1 || !gen.Rparen.IsValid() {
// There was a blank line immediately preceding the deleted import,
// so there's no need to close the hole.
// so there's no need to close the hole. The right parenthesis is
// invalid after AddImport to an import statement without parenthesis.
// Do nothing.
} else if line != fset.File(gen.Rparen).LineCount() {
// There was no blank line. Close the hole.

View File

@ -90,7 +90,7 @@ func (in *Inspector) Preorder(types []ast.Node, f func(ast.Node)) {
// The types argument, if non-empty, enables type-based filtering of
// events. The function f if is called only for nodes whose type
// matches an element of the types slice.
func (in *Inspector) Nodes(types []ast.Node, f func(n ast.Node, push bool) (prune bool)) {
func (in *Inspector) Nodes(types []ast.Node, f func(n ast.Node, push bool) (proceed bool)) {
mask := maskOf(types)
for i := 0; i < len(in.events); {
ev := in.events[i]
@ -114,7 +114,7 @@ func (in *Inspector) Nodes(types []ast.Node, f func(n ast.Node, push bool) (prun
// supplies each call to f an additional argument, the current
// traversal stack. The stack's first element is the outermost node,
// an *ast.File; its last is the innermost, n.
func (in *Inspector) WithStack(types []ast.Node, f func(n ast.Node, push bool, stack []ast.Node) (prune bool)) {
func (in *Inspector) WithStack(types []ast.Node, f func(n ast.Node, push bool, stack []ast.Node) (proceed bool)) {
mask := maskOf(types)
var stack []ast.Node
for i := 0; i < len(in.events); {

View File

@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// This package constructs a simple control-flow graph (CFG) of the
// Package cfg constructs a simple control-flow graph (CFG) of the
// statements and expressions within a single function.
//
// Use cfg.New to construct the CFG for a function body.

View File

@ -100,7 +100,7 @@ func Read(in io.Reader, fset *token.FileSet, imports map[string]*types.Package,
// Write writes encoded type information for the specified package to out.
// The FileSet provides file position information for named objects.
func Write(out io.Writer, fset *token.FileSet, pkg *types.Package) error {
b, err := gcimporter.BExportData(fset, pkg)
b, err := gcimporter.IExportData(fset, pkg)
if err != nil {
return err
}

View File

@ -2,9 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package cgo
// This file handles cgo preprocessing of files containing `import "C"`.
// Package cgo handles cgo preprocessing of files containing `import "C"`.
//
// DESIGN
//
@ -51,6 +49,8 @@ package cgo
// its handling of function calls, analogous to the treatment of map
// lookups in which y=m[k] and y,ok=m[k] are both legal.
package cgo
import (
"fmt"
"go/ast"

View File

@ -332,7 +332,7 @@ func (p *importer) pos() token.Pos {
p.prevFile = file
p.prevLine = line
return p.fake.pos(file, line)
return p.fake.pos(file, line, 0)
}
// Synthesize a token.Pos
@ -341,7 +341,9 @@ type fakeFileSet struct {
files map[string]*token.File
}
func (s *fakeFileSet) pos(file string, line int) token.Pos {
func (s *fakeFileSet) pos(file string, line, column int) token.Pos {
// TODO(mdempsky): Make use of column.
// Since we don't know the set of needed file positions, we
// reserve maxlines positions per file.
const maxlines = 64 * 1024

View File

@ -344,7 +344,7 @@ func (p *parser) expectKeyword(keyword string) {
// PackageId = string_lit .
//
func (p *parser) parsePackageId() string {
func (p *parser) parsePackageID() string {
id, err := strconv.Unquote(p.expect(scanner.String))
if err != nil {
p.error(err)
@ -384,7 +384,7 @@ func (p *parser) parseDotIdent() string {
//
func (p *parser) parseQualifiedName() (id, name string) {
p.expect('@')
id = p.parsePackageId()
id = p.parsePackageID()
p.expect('.')
// Per rev f280b8a485fd (10/2/2013), qualified names may be used for anonymous fields.
if p.tok == '?' {
@ -696,7 +696,7 @@ func (p *parser) parseInterfaceType(parent *types.Package) types.Type {
// Complete requires the type's embedded interfaces to be fully defined,
// but we do not define any
return types.NewInterface(methods, nil).Complete()
return newInterface(methods, nil).Complete()
}
// ChanType = ( "chan" [ "<-" ] | "<-" "chan" ) Type .
@ -785,7 +785,7 @@ func (p *parser) parseType(parent *types.Package) types.Type {
func (p *parser) parseImportDecl() {
p.expectKeyword("import")
name := p.parsePackageName()
p.getPkg(p.parsePackageId(), name)
p.getPkg(p.parsePackageID(), name)
}
// int_lit = [ "+" | "-" ] { "0" ... "9" } .

View File

@ -6,8 +6,6 @@
// This file was derived from $GOROOT/src/cmd/compile/internal/gc/iexport.go;
// see that file for specification of the format.
// +build go1.11
package gcimporter
import (
@ -28,7 +26,10 @@ import (
const iexportVersion = 0
// IExportData returns the binary export data for pkg.
//
// If no file set is provided, position info will be missing.
// The package path of the top-level package will not be recorded,
// so that calls to IImportData can override with a provided package path.
func IExportData(fset *token.FileSet, pkg *types.Package) (b []byte, err error) {
defer func() {
if e := recover(); e != nil {
@ -48,6 +49,7 @@ func IExportData(fset *token.FileSet, pkg *types.Package) (b []byte, err error)
stringIndex: map[string]uint64{},
declIndex: map[types.Object]uint64{},
typIndex: map[types.Type]uint64{},
localpkg: pkg,
}
for i, pt := range predeclared() {
@ -73,7 +75,7 @@ func IExportData(fset *token.FileSet, pkg *types.Package) (b []byte, err error)
// Append indices to data0 section.
dataLen := uint64(p.data0.Len())
w := p.newWriter()
w.writeIndex(p.declIndex, pkg)
w.writeIndex(p.declIndex)
w.flush()
// Assemble header.
@ -95,14 +97,14 @@ func IExportData(fset *token.FileSet, pkg *types.Package) (b []byte, err error)
// we're writing out the main index, which is also read by
// non-compiler tools and includes a complete package description
// (i.e., name and height).
func (w *exportWriter) writeIndex(index map[types.Object]uint64, localpkg *types.Package) {
func (w *exportWriter) writeIndex(index map[types.Object]uint64) {
// Build a map from packages to objects from that package.
pkgObjs := map[*types.Package][]types.Object{}
// For the main index, make sure to include every package that
// we reference, even if we're not exporting (or reexporting)
// any symbols from it.
pkgObjs[localpkg] = nil
pkgObjs[w.p.localpkg] = nil
for pkg := range w.p.allPkgs {
pkgObjs[pkg] = nil
}
@ -121,12 +123,12 @@ func (w *exportWriter) writeIndex(index map[types.Object]uint64, localpkg *types
}
sort.Slice(pkgs, func(i, j int) bool {
return pkgs[i].Path() < pkgs[j].Path()
return w.exportPath(pkgs[i]) < w.exportPath(pkgs[j])
})
w.uint64(uint64(len(pkgs)))
for _, pkg := range pkgs {
w.string(pkg.Path())
w.string(w.exportPath(pkg))
w.string(pkg.Name())
w.uint64(uint64(0)) // package height is not needed for go/types
@ -143,6 +145,8 @@ type iexporter struct {
fset *token.FileSet
out *bytes.Buffer
localpkg *types.Package
// allPkgs tracks all packages that have been referenced by
// the export data, so we can ensure to include them in the
// main index.
@ -195,6 +199,13 @@ type exportWriter struct {
prevLine int64
}
func (w *exportWriter) exportPath(pkg *types.Package) string {
if pkg == w.p.localpkg {
return ""
}
return pkg.Path()
}
func (p *iexporter) doDecl(obj types.Object) {
w := p.newWriter()
w.setPkg(obj.Pkg(), false)
@ -267,6 +278,11 @@ func (w *exportWriter) tag(tag byte) {
}
func (w *exportWriter) pos(pos token.Pos) {
if w.p.fset == nil {
w.int64(0)
return
}
p := w.p.fset.Position(pos)
file := p.Filename
line := int64(p.Line)
@ -299,7 +315,7 @@ func (w *exportWriter) pkg(pkg *types.Package) {
// Ensure any referenced packages are declared in the main index.
w.p.allPkgs[pkg] = true
w.string(pkg.Path())
w.string(w.exportPath(pkg))
}
func (w *exportWriter) qualifiedIdent(obj types.Object) {
@ -394,7 +410,7 @@ func (w *exportWriter) doTyp(t types.Type, pkg *types.Package) {
w.pos(f.Pos())
w.string(f.Name())
w.typ(f.Type(), pkg)
w.bool(f.Embedded())
w.bool(f.Anonymous())
w.string(t.Tag(i)) // note (or tag)
}

View File

@ -63,8 +63,8 @@ const (
// If the export data version is not recognized or the format is otherwise
// compromised, an error is returned.
func IImportData(fset *token.FileSet, imports map[string]*types.Package, data []byte, path string) (_ int, pkg *types.Package, err error) {
const currentVersion = 0
version := -1
const currentVersion = 1
version := int64(-1)
defer func() {
if e := recover(); e != nil {
if version > currentVersion {
@ -77,9 +77,9 @@ func IImportData(fset *token.FileSet, imports map[string]*types.Package, data []
r := &intReader{bytes.NewReader(data), path}
version = int(r.uint64())
version = int64(r.uint64())
switch version {
case currentVersion:
case currentVersion, 0:
default:
errorf("unknown iexport format version %d", version)
}
@ -94,6 +94,7 @@ func IImportData(fset *token.FileSet, imports map[string]*types.Package, data []
p := iimporter{
ipath: path,
version: int(version),
stringData: stringData,
stringCache: make(map[uint64]string),
@ -142,20 +143,18 @@ func IImportData(fset *token.FileSet, imports map[string]*types.Package, data []
p.pkgIndex[pkg] = nameIndex
pkgList[i] = pkg
}
var localpkg *types.Package
for _, pkg := range pkgList {
if pkg.Path() == path {
localpkg = pkg
if len(pkgList) == 0 {
errorf("no packages found for %s", path)
panic("unreachable")
}
}
names := make([]string, 0, len(p.pkgIndex[localpkg]))
for name := range p.pkgIndex[localpkg] {
p.ipkg = pkgList[0]
names := make([]string, 0, len(p.pkgIndex[p.ipkg]))
for name := range p.pkgIndex[p.ipkg] {
names = append(names, name)
}
sort.Strings(names)
for _, name := range names {
p.doDecl(localpkg, name)
p.doDecl(p.ipkg, name)
}
for _, typ := range p.interfaceList {
@ -165,17 +164,19 @@ func IImportData(fset *token.FileSet, imports map[string]*types.Package, data []
// record all referenced packages as imports
list := append(([]*types.Package)(nil), pkgList[1:]...)
sort.Sort(byPath(list))
localpkg.SetImports(list)
p.ipkg.SetImports(list)
// package was imported completely and without errors
localpkg.MarkComplete()
p.ipkg.MarkComplete()
consumed, _ := r.Seek(0, io.SeekCurrent)
return int(consumed), localpkg, nil
return int(consumed), p.ipkg, nil
}
type iimporter struct {
ipath string
ipkg *types.Package
version int
stringData []byte
stringCache map[uint64]string
@ -226,6 +227,9 @@ func (p *iimporter) pkgAt(off uint64) *types.Package {
return pkg
}
path := p.stringAt(off)
if path == p.ipath {
return p.ipkg
}
errorf("missing package %q in %q", path, p.ipath)
return nil
}
@ -255,6 +259,7 @@ type importReader struct {
currPkg *types.Package
prevFile string
prevLine int64
prevColumn int64
}
func (r *importReader) obj(name string) {
@ -448,6 +453,19 @@ func (r *importReader) qualifiedIdent() (*types.Package, string) {
}
func (r *importReader) pos() token.Pos {
if r.p.version >= 1 {
r.posv1()
} else {
r.posv0()
}
if r.prevFile == "" && r.prevLine == 0 && r.prevColumn == 0 {
return token.NoPos
}
return r.p.fake.pos(r.prevFile, int(r.prevLine), int(r.prevColumn))
}
func (r *importReader) posv0() {
delta := r.int64()
if delta != deltaNewFile {
r.prevLine += delta
@ -457,12 +475,18 @@ func (r *importReader) pos() token.Pos {
r.prevFile = r.string()
r.prevLine = l
}
}
if r.prevFile == "" && r.prevLine == 0 {
return token.NoPos
func (r *importReader) posv1() {
delta := r.int64()
r.prevColumn += delta >> 1
if delta&1 != 0 {
delta = r.int64()
r.prevLine += delta >> 1
if delta&1 != 0 {
r.prevFile = r.string()
}
}
return r.p.fake.pos(r.prevFile, int(r.prevLine))
}
func (r *importReader) typ() types.Type {

View File

@ -81,13 +81,13 @@ func GetSizesGolist(ctx context.Context, buildFlags, env []string, dir string, u
args := []string{"list", "-f", "{{context.GOARCH}} {{context.Compiler}}"}
args = append(args, buildFlags...)
args = append(args, "--", "unsafe")
stdout, err := InvokeGo(ctx, env, dir, usesExportData, args...)
stdout, stderr, err := invokeGo(ctx, env, dir, usesExportData, args...)
var goarch, compiler string
if err != nil {
if strings.Contains(err.Error(), "cannot find main module") {
// User's running outside of a module. All bets are off. Get GOARCH and guess compiler is gc.
// TODO(matloob): Is this a problem in practice?
envout, enverr := InvokeGo(ctx, env, dir, usesExportData, "env", "GOARCH")
envout, _, enverr := invokeGo(ctx, env, dir, usesExportData, "env", "GOARCH")
if enverr != nil {
return nil, err
}
@ -99,7 +99,8 @@ func GetSizesGolist(ctx context.Context, buildFlags, env []string, dir string, u
} else {
fields := strings.Fields(stdout.String())
if len(fields) < 2 {
return nil, fmt.Errorf("could not determine GOARCH and Go compiler")
return nil, fmt.Errorf("could not parse GOARCH and Go compiler in format \"<GOARCH> <compiler>\" from stdout of go command:\n%s\ndir: %s\nstdout: <<%s>>\nstderr: <<%s>>",
cmdDebugStr(env, args...), dir, stdout.String(), stderr.String())
}
goarch = fields[0]
compiler = fields[1]
@ -107,8 +108,8 @@ func GetSizesGolist(ctx context.Context, buildFlags, env []string, dir string, u
return types.SizesFor(compiler, goarch), nil
}
// InvokeGo returns the stdout of a go command invocation.
func InvokeGo(ctx context.Context, env []string, dir string, usesExportData bool, args ...string) (*bytes.Buffer, error) {
// invokeGo returns the stdout and stderr of a go command invocation.
func invokeGo(ctx context.Context, env []string, dir string, usesExportData bool, args ...string) (*bytes.Buffer, *bytes.Buffer, error) {
if debug {
defer func(start time.Time) { log.Printf("%s for %v", time.Since(start), cmdDebugStr(env, args...)) }(time.Now())
}
@ -131,7 +132,7 @@ func InvokeGo(ctx context.Context, env []string, dir string, usesExportData bool
// Catastrophic error:
// - executable not found
// - context cancellation
return nil, fmt.Errorf("couldn't exec 'go %v': %s %T", args, err, err)
return nil, nil, fmt.Errorf("couldn't exec 'go %v': %s %T", args, err, err)
}
// Export mode entails a build.
@ -139,7 +140,7 @@ func InvokeGo(ctx context.Context, env []string, dir string, usesExportData bool
// (despite the -e flag) and the Export field is blank.
// Do not fail in that case.
if !usesExportData {
return nil, fmt.Errorf("go %v: %s: %s", args, exitErr, stderr)
return nil, nil, fmt.Errorf("go %v: %s: %s", args, exitErr, stderr)
}
}
@ -158,7 +159,7 @@ func InvokeGo(ctx context.Context, env []string, dir string, usesExportData bool
fmt.Fprintf(os.Stderr, "%s stdout: <<%s>>\n", cmdDebugStr(env, args...), stdout)
}
return stdout, nil
return stdout, stderr, nil
}
func cmdDebugStr(envlist []string, args ...string) string {

View File

@ -60,8 +60,7 @@ causes Load to run in LoadFiles mode, collecting minimal information.
See the documentation for type Config for details.
As noted earlier, the Config.Mode controls the amount of detail
reported about the loaded packages, with each mode returning all the data of the
previous mode with some extra added. See the documentation for type LoadMode
reported about the loaded packages. See the documentation for type LoadMode
for details.
Most tools should pass their command-line arguments (after any flags)

View File

@ -12,6 +12,7 @@ import (
"bytes"
"encoding/json"
"fmt"
"os"
"os/exec"
"strings"
)
@ -76,15 +77,21 @@ func findExternalDriver(cfg *Config) driver {
}
buf := new(bytes.Buffer)
stderr := new(bytes.Buffer)
cmd := exec.CommandContext(cfg.Context, tool, words...)
cmd.Dir = cfg.Dir
cmd.Env = cfg.Env
cmd.Stdin = bytes.NewReader(req)
cmd.Stdout = buf
cmd.Stderr = new(bytes.Buffer)
cmd.Stderr = stderr
if err := cmd.Run(); err != nil {
return nil, fmt.Errorf("%v: %v: %s", tool, err, cmd.Stderr)
}
if len(stderr.Bytes()) != 0 && os.Getenv("GOPACKAGESPRINTDRIVERERRORS") != "" {
fmt.Fprintf(os.Stderr, "%s stderr: <<%s>>\n", cmdDebugStr(cmd, words...), stderr)
}
var response driverResponse
if err := json.Unmarshal(buf.Bytes(), &response); err != nil {
return nil, err

View File

@ -6,17 +6,17 @@ package packages
import (
"bytes"
"context"
"encoding/json"
"fmt"
"go/types"
"io/ioutil"
"log"
"os"
"os/exec"
"path"
"path/filepath"
"reflect"
"regexp"
"sort"
"strconv"
"strings"
"sync"
@ -24,8 +24,6 @@ import (
"unicode"
"golang.org/x/tools/go/internal/packagesdriver"
"golang.org/x/tools/internal/gopathwalk"
"golang.org/x/tools/internal/semver"
)
// debug controls verbose logging.
@ -44,16 +42,21 @@ type responseDeduper struct {
dr *driverResponse
}
// init fills in r with a driverResponse.
func (r *responseDeduper) init(dr *driverResponse) {
r.dr = dr
r.seenRoots = map[string]bool{}
r.seenPackages = map[string]*Package{}
func newDeduper() *responseDeduper {
return &responseDeduper{
dr: &driverResponse{},
seenRoots: map[string]bool{},
seenPackages: map[string]*Package{},
}
}
// addAll fills in r with a driverResponse.
func (r *responseDeduper) addAll(dr *driverResponse) {
for _, pkg := range dr.Packages {
r.seenPackages[pkg.ID] = pkg
r.addPackage(pkg)
}
for _, root := range dr.Roots {
r.seenRoots[root] = true
r.addRoot(root)
}
}
@ -73,25 +76,47 @@ func (r *responseDeduper) addRoot(id string) {
r.dr.Roots = append(r.dr.Roots, id)
}
// goInfo contains global information from the go tool.
type goInfo struct {
type golistState struct {
cfg *Config
ctx context.Context
envOnce sync.Once
goEnvError error
goEnv map[string]string
rootsOnce sync.Once
rootDirsError error
rootDirs map[string]string
env goEnv
// vendorDirs caches the (non)existence of vendor directories.
vendorDirs map[string]bool
}
type goEnv struct {
modulesOn bool
}
func determineEnv(cfg *Config) goEnv {
buf, err := invokeGo(cfg, "env", "GOMOD")
if err != nil {
return goEnv{}
// getEnv returns Go environment variables. Only specific variables are
// populated -- computing all of them is slow.
func (state *golistState) getEnv() (map[string]string, error) {
state.envOnce.Do(func() {
var b *bytes.Buffer
b, state.goEnvError = state.invokeGo("env", "-json", "GOMOD", "GOPATH")
if state.goEnvError != nil {
return
}
gomod := bytes.TrimSpace(buf.Bytes())
env := goEnv{}
env.modulesOn = len(gomod) > 0
state.goEnv = make(map[string]string)
decoder := json.NewDecoder(b)
if state.goEnvError = decoder.Decode(&state.goEnv); state.goEnvError != nil {
return
}
})
return state.goEnv, state.goEnvError
}
// mustGetEnv is a convenience function that can be used if getEnv has already succeeded.
func (state *golistState) mustGetEnv() map[string]string {
env, err := state.getEnv()
if err != nil {
panic(fmt.Sprintf("mustGetEnv: %v", err))
}
return env
}
@ -99,56 +124,38 @@ func determineEnv(cfg *Config) goEnv {
// the build system package structure.
// See driver for more details.
func goListDriver(cfg *Config, patterns ...string) (*driverResponse, error) {
var sizes types.Sizes
// Make sure that any asynchronous go commands are killed when we return.
parentCtx := cfg.Context
if parentCtx == nil {
parentCtx = context.Background()
}
ctx, cancel := context.WithCancel(parentCtx)
defer cancel()
response := newDeduper()
// Fill in response.Sizes asynchronously if necessary.
var sizeserr error
var sizeswg sync.WaitGroup
if cfg.Mode&NeedTypesSizes != 0 {
if cfg.Mode&NeedTypesSizes != 0 || cfg.Mode&NeedTypes != 0 {
sizeswg.Add(1)
go func() {
sizes, sizeserr = getSizes(cfg)
var sizes types.Sizes
sizes, sizeserr = packagesdriver.GetSizesGolist(ctx, cfg.BuildFlags, cfg.Env, cfg.Dir, usesExportData(cfg))
// types.SizesFor always returns nil or a *types.StdSizes.
response.dr.Sizes, _ = sizes.(*types.StdSizes)
sizeswg.Done()
}()
}
// start fetching rootDirs
var info goInfo
var getGoInfo func() *goInfo
if len(cfg.Overlay) != 0 {
// Both of determineEnv and determineRootDirs calls take 100-200ms on MacBook Pro.
// Optimize: right now they are needed in most cases only for overlay processing.
var rootDirsReady, envReady = make(chan struct{}), make(chan struct{})
go func() {
info.rootDirs = determineRootDirs(cfg)
close(rootDirsReady)
}()
go func() {
info.env = determineEnv(cfg)
close(envReady)
}()
getGoInfo = func() *goInfo {
<-rootDirsReady
<-envReady
return &info
}
} else {
var determineRootDirsOnce sync.Once
getGoInfo = func() *goInfo {
determineRootDirsOnce.Do(func() {
info.rootDirs = determineRootDirs(cfg)
})
return &info
}
}
// always pass getGoInfo to golistDriver
golistDriver := func(cfg *Config, patterns ...string) (*driverResponse, error) {
return golistDriver(cfg, getGoInfo, patterns...)
state := &golistState{
cfg: cfg,
ctx: ctx,
vendorDirs: map[string]bool{},
}
// Determine files requested in contains patterns
var containFiles []string
var packagesNamed []string
restPatterns := make([]string, 0, len(patterns))
// Extract file= and other [querytype]= patterns. Report an error if querytype
// doesn't exist.
@ -164,8 +171,6 @@ extractQueries:
containFiles = append(containFiles, value)
case "pattern":
restPatterns = append(restPatterns, value)
case "iamashamedtousethedisabledqueryname":
packagesNamed = append(packagesNamed, value)
case "": // not a reserved query
restPatterns = append(restPatterns, pattern)
default:
@ -181,52 +186,34 @@ extractQueries:
}
}
response := &responseDeduper{}
var err error
// See if we have any patterns to pass through to go list. Zero initial
// patterns also requires a go list call, since it's the equivalent of
// ".".
if len(restPatterns) > 0 || len(patterns) == 0 {
dr, err := golistDriver(cfg, restPatterns...)
dr, err := state.createDriverResponse(restPatterns...)
if err != nil {
return nil, err
}
response.init(dr)
} else {
response.init(&driverResponse{})
response.addAll(dr)
}
sizeswg.Wait()
if sizeserr != nil {
return nil, sizeserr
}
// types.SizesFor always returns nil or a *types.StdSizes
response.dr.Sizes, _ = sizes.(*types.StdSizes)
var containsCandidates []string
if len(containFiles) != 0 {
if err := runContainsQueries(cfg, golistDriver, response, containFiles, getGoInfo); err != nil {
if err := state.runContainsQueries(response, containFiles); err != nil {
return nil, err
}
}
if len(packagesNamed) != 0 {
if err := runNamedQueries(cfg, golistDriver, response, packagesNamed); err != nil {
return nil, err
}
}
modifiedPkgs, needPkgs, err := processGolistOverlay(cfg, response, getGoInfo)
modifiedPkgs, needPkgs, err := state.processGolistOverlay(response)
if err != nil {
return nil, err
}
var containsCandidates []string
if len(containFiles) > 0 {
containsCandidates = append(containsCandidates, modifiedPkgs...)
containsCandidates = append(containsCandidates, needPkgs...)
}
if err := addNeededOverlayPackages(cfg, golistDriver, response, needPkgs, getGoInfo); err != nil {
if err := state.addNeededOverlayPackages(response, needPkgs); err != nil {
return nil, err
}
// Check candidate packages for containFiles.
@ -255,36 +242,32 @@ extractQueries:
}
}
sizeswg.Wait()
if sizeserr != nil {
return nil, sizeserr
}
return response.dr, nil
}
func addNeededOverlayPackages(cfg *Config, driver driver, response *responseDeduper, pkgs []string, getGoInfo func() *goInfo) error {
func (state *golistState) addNeededOverlayPackages(response *responseDeduper, pkgs []string) error {
if len(pkgs) == 0 {
return nil
}
drivercfg := *cfg
if getGoInfo().env.modulesOn {
drivercfg.BuildFlags = append(drivercfg.BuildFlags, "-mod=readonly")
}
dr, err := driver(&drivercfg, pkgs...)
dr, err := state.createDriverResponse(pkgs...)
if err != nil {
return err
}
for _, pkg := range dr.Packages {
response.addPackage(pkg)
}
_, needPkgs, err := processGolistOverlay(cfg, response, getGoInfo)
_, needPkgs, err := state.processGolistOverlay(response)
if err != nil {
return err
}
if err := addNeededOverlayPackages(cfg, driver, response, needPkgs, getGoInfo); err != nil {
return err
}
return nil
return state.addNeededOverlayPackages(response, needPkgs)
}
func runContainsQueries(cfg *Config, driver driver, response *responseDeduper, queries []string, goInfo func() *goInfo) error {
func (state *golistState) runContainsQueries(response *responseDeduper, queries []string) error {
for _, query := range queries {
// TODO(matloob): Do only one query per directory.
fdir := filepath.Dir(query)
@ -294,31 +277,17 @@ func runContainsQueries(cfg *Config, driver driver, response *responseDeduper, q
if err != nil {
return fmt.Errorf("could not determine absolute path of file= query path %q: %v", query, err)
}
dirResponse, err := driver(cfg, pattern)
if err != nil || (len(dirResponse.Packages) == 1 && len(dirResponse.Packages[0].Errors) == 1) {
// There was an error loading the package. Try to load the file as an ad-hoc package.
// Usually the error will appear in a returned package, but may not if we're in modules mode
// and the ad-hoc is located outside a module.
dirResponse, err := state.createDriverResponse(pattern)
// If there was an error loading the package, or the package is returned
// with errors, try to load the file as an ad-hoc package.
// Usually the error will appear in a returned package, but may not if we're
// in module mode and the ad-hoc is located outside a module.
if err != nil || len(dirResponse.Packages) == 1 && len(dirResponse.Packages[0].GoFiles) == 0 &&
len(dirResponse.Packages[0].Errors) == 1 {
var queryErr error
dirResponse, queryErr = driver(cfg, query)
if queryErr != nil {
// Return the original error if the attempt to fall back failed.
return err
}
// Special case to handle issue #33482:
// If this is a file= query for ad-hoc packages where the file only exists on an overlay,
// and exists outside of a module, add the file in for the package.
if len(dirResponse.Packages) == 1 && len(dirResponse.Packages) == 1 &&
dirResponse.Packages[0].ID == "command-line-arguments" && len(dirResponse.Packages[0].GoFiles) == 0 {
filename := filepath.Join(pattern, filepath.Base(query)) // avoid recomputing abspath
// TODO(matloob): check if the file is outside of a root dir?
for path := range cfg.Overlay {
if path == filename {
dirResponse.Packages[0].Errors = nil
dirResponse.Packages[0].GoFiles = []string{path}
dirResponse.Packages[0].CompiledGoFiles = []string{path}
}
}
if dirResponse, queryErr = state.adhocPackage(pattern, query); queryErr != nil {
return err // return the original error
}
}
isRoot := make(map[string]bool, len(dirResponse.Roots))
@ -346,260 +315,47 @@ func runContainsQueries(cfg *Config, driver driver, response *responseDeduper, q
return nil
}
// modCacheRegexp splits a path in a module cache into module, module version, and package.
var modCacheRegexp = regexp.MustCompile(`(.*)@([^/\\]*)(.*)`)
func runNamedQueries(cfg *Config, driver driver, response *responseDeduper, queries []string) error {
// calling `go env` isn't free; bail out if there's nothing to do.
if len(queries) == 0 {
return nil
}
// Determine which directories are relevant to scan.
roots, modRoot, err := roots(cfg)
// adhocPackage attempts to load or construct an ad-hoc package for a given
// query, if the original call to the driver produced inadequate results.
func (state *golistState) adhocPackage(pattern, query string) (*driverResponse, error) {
response, err := state.createDriverResponse(query)
if err != nil {
return err
return nil, err
}
// Scan the selected directories. Simple matches, from GOPATH/GOROOT
// or the local module, can simply be "go list"ed. Matches from the
// module cache need special treatment.
var matchesMu sync.Mutex
var simpleMatches, modCacheMatches []string
add := func(root gopathwalk.Root, dir string) {
// Walk calls this concurrently; protect the result slices.
matchesMu.Lock()
defer matchesMu.Unlock()
path := dir
if dir != root.Path {
path = dir[len(root.Path)+1:]
// If we get nothing back from `go list`,
// try to make this file into its own ad-hoc package.
// TODO(rstambler): Should this check against the original response?
if len(response.Packages) == 0 {
response.Packages = append(response.Packages, &Package{
ID: "command-line-arguments",
PkgPath: query,
GoFiles: []string{query},
CompiledGoFiles: []string{query},
Imports: make(map[string]*Package),
})
response.Roots = append(response.Roots, "command-line-arguments")
}
if pathMatchesQueries(path, queries) {
switch root.Type {
case gopathwalk.RootModuleCache:
modCacheMatches = append(modCacheMatches, path)
case gopathwalk.RootCurrentModule:
// We'd need to read go.mod to find the full
// import path. Relative's easier.
rel, err := filepath.Rel(cfg.Dir, dir)
if err != nil {
// This ought to be impossible, since
// we found dir in the current module.
panic(err)
}
simpleMatches = append(simpleMatches, "./"+rel)
case gopathwalk.RootGOPATH, gopathwalk.RootGOROOT:
simpleMatches = append(simpleMatches, path)
}
}
}
startWalk := time.Now()
gopathwalk.Walk(roots, add, gopathwalk.Options{ModulesEnabled: modRoot != "", Debug: debug})
cfg.Logf("%v for walk", time.Since(startWalk))
// Weird special case: the top-level package in a module will be in
// whatever directory the user checked the repository out into. It's
// more reasonable for that to not match the package name. So, if there
// are any Go files in the mod root, query it just to be safe.
if modRoot != "" {
rel, err := filepath.Rel(cfg.Dir, modRoot)
if err != nil {
panic(err) // See above.
}
files, err := ioutil.ReadDir(modRoot)
for _, f := range files {
if strings.HasSuffix(f.Name(), ".go") {
simpleMatches = append(simpleMatches, rel)
break
}
}
}
addResponse := func(r *driverResponse) {
for _, pkg := range r.Packages {
response.addPackage(pkg)
for _, name := range queries {
if pkg.Name == name {
response.addRoot(pkg.ID)
break
// Handle special cases.
if len(response.Packages) == 1 {
// golang/go#33482: If this is a file= query for ad-hoc packages where
// the file only exists on an overlay, and exists outside of a module,
// add the file to the package and remove the errors.
if response.Packages[0].ID == "command-line-arguments" ||
filepath.ToSlash(response.Packages[0].PkgPath) == filepath.ToSlash(query) {
if len(response.Packages[0].GoFiles) == 0 {
filename := filepath.Join(pattern, filepath.Base(query)) // avoid recomputing abspath
// TODO(matloob): check if the file is outside of a root dir?
for path := range state.cfg.Overlay {
if path == filename {
response.Packages[0].Errors = nil
response.Packages[0].GoFiles = []string{path}
response.Packages[0].CompiledGoFiles = []string{path}
}
}
}
}
if len(simpleMatches) != 0 {
resp, err := driver(cfg, simpleMatches...)
if err != nil {
return err
}
addResponse(resp)
}
// Module cache matches are tricky. We want to avoid downloading new
// versions of things, so we need to use the ones present in the cache.
// go list doesn't accept version specifiers, so we have to write out a
// temporary module, and do the list in that module.
if len(modCacheMatches) != 0 {
// Collect all the matches, deduplicating by major version
// and preferring the newest.
type modInfo struct {
mod string
major string
}
mods := make(map[modInfo]string)
var imports []string
for _, modPath := range modCacheMatches {
matches := modCacheRegexp.FindStringSubmatch(modPath)
mod, ver := filepath.ToSlash(matches[1]), matches[2]
importPath := filepath.ToSlash(filepath.Join(matches[1], matches[3]))
major := semver.Major(ver)
if prevVer, ok := mods[modInfo{mod, major}]; !ok || semver.Compare(ver, prevVer) > 0 {
mods[modInfo{mod, major}] = ver
}
imports = append(imports, importPath)
}
// Build the temporary module.
var gomod bytes.Buffer
gomod.WriteString("module modquery\nrequire (\n")
for mod, version := range mods {
gomod.WriteString("\t" + mod.mod + " " + version + "\n")
}
gomod.WriteString(")\n")
tmpCfg := *cfg
// We're only trying to look at stuff in the module cache, so
// disable the network. This should speed things up, and has
// prevented errors in at least one case, #28518.
tmpCfg.Env = append(append([]string{"GOPROXY=off"}, cfg.Env...))
var err error
tmpCfg.Dir, err = ioutil.TempDir("", "gopackages-modquery")
if err != nil {
return err
}
defer os.RemoveAll(tmpCfg.Dir)
if err := ioutil.WriteFile(filepath.Join(tmpCfg.Dir, "go.mod"), gomod.Bytes(), 0777); err != nil {
return fmt.Errorf("writing go.mod for module cache query: %v", err)
}
// Run the query, using the import paths calculated from the matches above.
resp, err := driver(&tmpCfg, imports...)
if err != nil {
return fmt.Errorf("querying module cache matches: %v", err)
}
addResponse(resp)
}
return nil
}
func getSizes(cfg *Config) (types.Sizes, error) {
return packagesdriver.GetSizesGolist(cfg.Context, cfg.BuildFlags, cfg.Env, cfg.Dir, usesExportData(cfg))
}
// roots selects the appropriate paths to walk based on the passed-in configuration,
// particularly the environment and the presence of a go.mod in cfg.Dir's parents.
func roots(cfg *Config) ([]gopathwalk.Root, string, error) {
stdout, err := invokeGo(cfg, "env", "GOROOT", "GOPATH", "GOMOD")
if err != nil {
return nil, "", err
}
fields := strings.Split(stdout.String(), "\n")
if len(fields) != 4 || len(fields[3]) != 0 {
return nil, "", fmt.Errorf("go env returned unexpected output: %q", stdout.String())
}
goroot, gopath, gomod := fields[0], filepath.SplitList(fields[1]), fields[2]
var modDir string
if gomod != "" {
modDir = filepath.Dir(gomod)
}
var roots []gopathwalk.Root
// Always add GOROOT.
roots = append(roots, gopathwalk.Root{filepath.Join(goroot, "/src"), gopathwalk.RootGOROOT})
// If modules are enabled, scan the module dir.
if modDir != "" {
roots = append(roots, gopathwalk.Root{modDir, gopathwalk.RootCurrentModule})
}
// Add either GOPATH/src or GOPATH/pkg/mod, depending on module mode.
for _, p := range gopath {
if modDir != "" {
roots = append(roots, gopathwalk.Root{filepath.Join(p, "/pkg/mod"), gopathwalk.RootModuleCache})
} else {
roots = append(roots, gopathwalk.Root{filepath.Join(p, "/src"), gopathwalk.RootGOPATH})
}
}
return roots, modDir, nil
}
// These functions were copied from goimports. See further documentation there.
// pathMatchesQueries is adapted from pkgIsCandidate.
// TODO: is it reasonable to do Contains here, rather than an exact match on a path component?
func pathMatchesQueries(path string, queries []string) bool {
lastTwo := lastTwoComponents(path)
for _, query := range queries {
if strings.Contains(lastTwo, query) {
return true
}
if hasHyphenOrUpperASCII(lastTwo) && !hasHyphenOrUpperASCII(query) {
lastTwo = lowerASCIIAndRemoveHyphen(lastTwo)
if strings.Contains(lastTwo, query) {
return true
}
}
}
return false
}
// lastTwoComponents returns at most the last two path components
// of v, using either / or \ as the path separator.
func lastTwoComponents(v string) string {
nslash := 0
for i := len(v) - 1; i >= 0; i-- {
if v[i] == '/' || v[i] == '\\' {
nslash++
if nslash == 2 {
return v[i:]
}
}
}
return v
}
func hasHyphenOrUpperASCII(s string) bool {
for i := 0; i < len(s); i++ {
b := s[i]
if b == '-' || ('A' <= b && b <= 'Z') {
return true
}
}
return false
}
func lowerASCIIAndRemoveHyphen(s string) (ret string) {
buf := make([]byte, 0, len(s))
for i := 0; i < len(s); i++ {
b := s[i]
switch {
case b == '-':
continue
case 'A' <= b && b <= 'Z':
buf = append(buf, b+('a'-'A'))
default:
buf = append(buf, b)
}
}
return string(buf)
return response, nil
}
// Fields must match go list;
@ -644,10 +400,9 @@ func otherFiles(p *jsonPackage) [][]string {
return [][]string{p.CFiles, p.CXXFiles, p.MFiles, p.HFiles, p.FFiles, p.SFiles, p.SwigFiles, p.SwigCXXFiles, p.SysoFiles}
}
// golistDriver uses the "go list" command to expand the pattern
// words and return metadata for the specified packages. dir may be
// "" and env may be nil, as per os/exec.Command.
func golistDriver(cfg *Config, rootsDirs func() *goInfo, words ...string) (*driverResponse, error) {
// createDriverResponse uses the "go list" command to expand the pattern
// words and return a response for the specified packages.
func (state *golistState) createDriverResponse(words ...string) (*driverResponse, error) {
// go list uses the following identifiers in ImportPath and Imports:
//
// "p" -- importable package or main (command)
@ -661,11 +416,13 @@ func golistDriver(cfg *Config, rootsDirs func() *goInfo, words ...string) (*driv
// Run "go list" for complete
// information on the specified packages.
buf, err := invokeGo(cfg, golistargs(cfg, words)...)
buf, err := state.invokeGo("list", golistargs(state.cfg, words)...)
if err != nil {
return nil, err
}
seen := make(map[string]*jsonPackage)
pkgs := make(map[string]*Package)
additionalErrors := make(map[string][]Error)
// Decode the JSON and convert it to Package form.
var response driverResponse
for dec := json.NewDecoder(buf); dec.More(); {
@ -696,19 +453,73 @@ func golistDriver(cfg *Config, rootsDirs func() *goInfo, words ...string) (*driv
// contained in a known module or GOPATH entry. This will allow the package to be
// properly "reclaimed" when overlays are processed.
if filepath.IsAbs(p.ImportPath) && p.Error != nil {
pkgPath, ok := getPkgPath(p.ImportPath, rootsDirs)
pkgPath, ok, err := state.getPkgPath(p.ImportPath)
if err != nil {
return nil, err
}
if ok {
p.ImportPath = pkgPath
}
}
if old, found := seen[p.ImportPath]; found {
// If one version of the package has an error, and the other doesn't, assume
// that this is a case where go list is reporting a fake dependency variant
// of the imported package: When a package tries to invalidly import another
// package, go list emits a variant of the imported package (with the same
// import path, but with an error on it, and the package will have a
// DepError set on it). An example of when this can happen is for imports of
// main packages: main packages can not be imported, but they may be
// separately matched and listed by another pattern.
// See golang.org/issue/36188 for more details.
// The plan is that eventually, hopefully in Go 1.15, the error will be
// reported on the importing package rather than the duplicate "fake"
// version of the imported package. Once all supported versions of Go
// have the new behavior this logic can be deleted.
// TODO(matloob): delete the workaround logic once all supported versions of
// Go return the errors on the proper package.
// There should be exactly one version of a package that doesn't have an
// error.
if old.Error == nil && p.Error == nil {
if !reflect.DeepEqual(p, old) {
return nil, fmt.Errorf("internal error: go list gives conflicting information for package %v", p.ImportPath)
}
// skip the duplicate
continue
}
// Determine if this package's error needs to be bubbled up.
// This is a hack, and we expect for go list to eventually set the error
// on the package.
if old.Error != nil {
var errkind string
if strings.Contains(old.Error.Err, "not an importable package") {
errkind = "not an importable package"
} else if strings.Contains(old.Error.Err, "use of internal package") && strings.Contains(old.Error.Err, "not allowed") {
errkind = "use of internal package not allowed"
}
if errkind != "" {
if len(old.Error.ImportStack) < 2 {
return nil, fmt.Errorf(`internal error: go list gave a %q error with an import stack with fewer than two elements`, errkind)
}
importingPkg := old.Error.ImportStack[len(old.Error.ImportStack)-2]
additionalErrors[importingPkg] = append(additionalErrors[importingPkg], Error{
Pos: old.Error.Pos,
Msg: old.Error.Err,
Kind: ListError,
})
}
}
// Make sure that if there's a version of the package without an error,
// that's the one reported to the user.
if old.Error == nil {
continue
}
// This package will replace the old one at the end of the loop.
}
seen[p.ImportPath] = p
pkg := &Package{
@ -717,6 +528,7 @@ func golistDriver(cfg *Config, rootsDirs func() *goInfo, words ...string) (*driv
GoFiles: absJoin(p.Dir, p.GoFiles, p.CgoFiles),
CompiledGoFiles: absJoin(p.Dir, p.CompiledGoFiles),
OtherFiles: absJoin(p.Dir, otherFiles(p)...),
forTest: p.ForTest,
}
// Work around https://golang.org/issue/28749:
@ -793,37 +605,68 @@ func golistDriver(cfg *Config, rootsDirs func() *goInfo, words ...string) (*driv
}
if p.Error != nil {
msg := strings.TrimSpace(p.Error.Err) // Trim to work around golang.org/issue/32363.
// Address golang.org/issue/35964 by appending import stack to error message.
if msg == "import cycle not allowed" && len(p.Error.ImportStack) != 0 {
msg += fmt.Sprintf(": import stack: %v", p.Error.ImportStack)
}
pkg.Errors = append(pkg.Errors, Error{
Pos: p.Error.Pos,
Msg: strings.TrimSpace(p.Error.Err), // Trim to work around golang.org/issue/32363.
Msg: msg,
Kind: ListError,
})
}
pkgs[pkg.ID] = pkg
}
for id, errs := range additionalErrors {
if p, ok := pkgs[id]; ok {
p.Errors = append(p.Errors, errs...)
}
}
for _, pkg := range pkgs {
response.Packages = append(response.Packages, pkg)
}
sort.Slice(response.Packages, func(i, j int) bool { return response.Packages[i].ID < response.Packages[j].ID })
return &response, nil
}
// getPkgPath finds the package path of a directory if it's relative to a root directory.
func getPkgPath(dir string, goInfo func() *goInfo) (string, bool) {
for rdir, rpath := range goInfo().rootDirs {
func (state *golistState) getPkgPath(dir string) (string, bool, error) {
absDir, err := filepath.Abs(dir)
if err != nil {
return "", false, err
}
roots, err := state.determineRootDirs()
if err != nil {
return "", false, err
}
for rdir, rpath := range roots {
// Make sure that the directory is in the module,
// to avoid creating a path relative to another module.
if !strings.HasPrefix(absDir, rdir) {
continue
}
// TODO(matloob): This doesn't properly handle symlinks.
r, err := filepath.Rel(rdir, dir)
if err != nil {
continue
}
if rpath != "" {
// We choose only ore root even though the directory even it can belong in multiple modules
// We choose only one root even though the directory even it can belong in multiple modules
// or GOPATH entries. This is okay because we only need to work with absolute dirs when a
// file is missing from disk, for instance when gopls calls go/packages in an overlay.
// Once the file is saved, gopls, or the next invocation of the tool will get the correct
// result straight from golist.
// TODO(matloob): Implement module tiebreaking?
return path.Join(rpath, filepath.ToSlash(r)), true
return path.Join(rpath, filepath.ToSlash(r)), true, nil
}
return filepath.ToSlash(r), true, nil
}
return "", false
return "", false, nil
}
// absJoin absolutizes and flattens the lists of files.
@ -840,16 +683,13 @@ func absJoin(dir string, fileses ...[]string) (res []string) {
}
func golistargs(cfg *Config, words []string) []string {
const findFlags = NeedImports | NeedTypes | NeedSyntax
const findFlags = NeedImports | NeedTypes | NeedSyntax | NeedTypesInfo
fullargs := []string{
"list", "-e", "-json",
fmt.Sprintf("-compiled=%t", cfg.Mode&(NeedCompiledGoFiles|NeedSyntax|NeedTypesInfo|NeedTypesSizes) != 0),
"-e", "-json",
fmt.Sprintf("-compiled=%t", cfg.Mode&(NeedCompiledGoFiles|NeedSyntax|NeedTypes|NeedTypesInfo|NeedTypesSizes) != 0),
fmt.Sprintf("-test=%t", cfg.Tests),
fmt.Sprintf("-export=%t", usesExportData(cfg)),
// Obtain package information about each dependency if needed
fmt.Sprintf("-deps=%t", loadsDeps(cfg)),
fmt.Sprintf("-deps=%t", cfg.Mode&NeedImports != 0),
// go list doesn't let you pass -test and -find together,
// probably because you'd just get the TestMain.
fmt.Sprintf("-find=%t", !cfg.Tests && cfg.Mode&findFlags == 0),
@ -861,10 +701,17 @@ func golistargs(cfg *Config, words []string) []string {
}
// invokeGo returns the stdout of a go command invocation.
func invokeGo(cfg *Config, args ...string) (*bytes.Buffer, error) {
func (state *golistState) invokeGo(verb string, args ...string) (*bytes.Buffer, error) {
cfg := state.cfg
stdout := new(bytes.Buffer)
stderr := new(bytes.Buffer)
cmd := exec.CommandContext(cfg.Context, "go", args...)
goArgs := []string{verb}
if verb != "env" {
goArgs = append(goArgs, cfg.BuildFlags...)
}
goArgs = append(goArgs, args...)
cmd := exec.CommandContext(state.ctx, "go", goArgs...)
// On darwin the cwd gets resolved to the real path, which breaks anything that
// expects the working directory to keep the original path, including the
// go command when dealing with modules.
@ -876,7 +723,7 @@ func invokeGo(cfg *Config, args ...string) (*bytes.Buffer, error) {
cmd.Stdout = stdout
cmd.Stderr = stderr
defer func(start time.Time) {
cfg.Logf("%s for %v, stderr: <<%s>>\n", time.Since(start), cmdDebugStr(cmd, args...), stderr)
cfg.Logf("%s for %v, stderr: <<%s>> stdout: <<%s>>\n", time.Since(start), cmdDebugStr(cmd, goArgs...), stderr, stdout)
}(time.Now())
if err := cmd.Run(); err != nil {
@ -913,10 +760,15 @@ func invokeGo(cfg *Config, args ...string) (*bytes.Buffer, error) {
// (the Graphic characters without spaces) and may also exclude the
// characters !"#$%&'()*,:;<=>?[\]^`{|} and the Unicode replacement character U+FFFD.
return unicode.IsOneOf([]*unicode.RangeTable{unicode.L, unicode.M, unicode.N, unicode.P, unicode.S}, r) &&
strings.IndexRune("!\"#$%&'()*,:;<=>?[\\]^`{|}\uFFFD", r) == -1
!strings.ContainsRune("!\"#$%&'()*,:;<=>?[\\]^`{|}\uFFFD", r)
}
if len(stderr.String()) > 0 && strings.HasPrefix(stderr.String(), "# ") {
if strings.HasPrefix(strings.TrimLeftFunc(stderr.String()[len("# "):], isPkgPathRune), "\n") {
msg := stderr.String()[len("# "):]
if strings.HasPrefix(strings.TrimLeftFunc(msg, isPkgPathRune), "\n") {
return stdout, nil
}
// Treat pkg-config errors as a special case (golang.org/issue/36770).
if strings.HasPrefix(msg, "pkg-config") {
return stdout, nil
}
}
@ -964,13 +816,24 @@ func invokeGo(cfg *Config, args ...string) (*bytes.Buffer, error) {
// TODO(matloob): command-line-arguments isn't correct here.
"command-line-arguments", strings.Trim(stderr.String(), "\n"))
return bytes.NewBufferString(output), nil
}
// Another variation of the previous error
if len(stderr.String()) > 0 && strings.Contains(stderr.String(), "outside module root") {
output := fmt.Sprintf(`{"ImportPath": %q,"Incomplete": true,"Error": {"Pos": "","Err": %q}}`,
// TODO(matloob): command-line-arguments isn't correct here.
"command-line-arguments", strings.Trim(stderr.String(), "\n"))
return bytes.NewBufferString(output), nil
}
// Workaround for an instance of golang.org/issue/26755: go list -e will return a non-zero exit
// status if there's a dependency on a package that doesn't exist. But it should return
// a zero exit status and set an error on that package.
if len(stderr.String()) > 0 && strings.Contains(stderr.String(), "no Go files in") {
// Don't clobber stdout if `go list` actually returned something.
if len(stdout.String()) > 0 {
return stdout, nil
}
// try to extract package name from string
stderrStr := stderr.String()
var importPath string
@ -1004,12 +867,6 @@ func invokeGo(cfg *Config, args ...string) (*bytes.Buffer, error) {
if len(stderr.Bytes()) != 0 && os.Getenv("GOPACKAGESPRINTGOLISTERRORS") != "" {
fmt.Fprintf(os.Stderr, "%s stderr: <<%s>>\n", cmdDebugStr(cmd, args...), stderr)
}
// debugging
if false {
fmt.Fprintf(os.Stderr, "%s stdout: <<%s>>\n", cmdDebugStr(cmd, args...), stdout)
}
return stdout, nil
}

View File

@ -1,13 +1,13 @@
package packages
import (
"bytes"
"encoding/json"
"fmt"
"go/parser"
"go/token"
"path"
"os"
"path/filepath"
"sort"
"strconv"
"strings"
)
@ -17,7 +17,7 @@ import (
// sometimes incorrect.
// TODO(matloob): Handle unsupported cases, including the following:
// - determining the correct package to add given a new import path
func processGolistOverlay(cfg *Config, response *responseDeduper, rootDirs func() *goInfo) (modifiedPkgs, needPkgs []string, err error) {
func (state *golistState) processGolistOverlay(response *responseDeduper) (modifiedPkgs, needPkgs []string, err error) {
havePkgs := make(map[string]string) // importPath -> non-test package ID
needPkgsSet := make(map[string]bool)
modifiedPkgsSet := make(map[string]bool)
@ -35,13 +35,29 @@ func processGolistOverlay(cfg *Config, response *responseDeduper, rootDirs func(
// potentially modifying the transitive set of dependencies).
var overlayAddsImports bool
for opath, contents := range cfg.Overlay {
// If both a package and its test package are created by the overlay, we
// need the real package first. Process all non-test files before test
// files, and make the whole process deterministic while we're at it.
var overlayFiles []string
for opath := range state.cfg.Overlay {
overlayFiles = append(overlayFiles, opath)
}
sort.Slice(overlayFiles, func(i, j int) bool {
iTest := strings.HasSuffix(overlayFiles[i], "_test.go")
jTest := strings.HasSuffix(overlayFiles[j], "_test.go")
if iTest != jTest {
return !iTest // non-tests are before tests.
}
return overlayFiles[i] < overlayFiles[j]
})
for _, opath := range overlayFiles {
contents := state.cfg.Overlay[opath]
base := filepath.Base(opath)
dir := filepath.Dir(opath)
var pkg *Package
var pkg *Package // if opath belongs to both a package and its test variant, this will be the test variant
var testVariantOf *Package // if opath is a test file, this is the package it is testing
var fileExists bool
isTest := strings.HasSuffix(opath, "_test.go")
isTestFile := strings.HasSuffix(opath, "_test.go")
pkgName, ok := extractPackageName(opath, contents)
if !ok {
// Don't bother adding a file that doesn't even have a parsable package statement
@ -57,13 +73,20 @@ func processGolistOverlay(cfg *Config, response *responseDeduper, rootDirs func(
if !sameFile(filepath.Dir(f), dir) {
continue
}
if isTest && !hasTestFiles(p) {
// Make sure to capture information on the package's test variant, if needed.
if isTestFile && !hasTestFiles(p) {
// TODO(matloob): Are there packages other than the 'production' variant
// of a package that this can match? This shouldn't match the test main package
// because the file is generated in another directory.
testVariantOf = p
continue nextPackage
}
// We must have already seen the package of which this is a test variant.
if pkg != nil && p != pkg && pkg.PkgPath == p.PkgPath {
if hasTestFiles(p) {
testVariantOf = pkg
}
}
pkg = p
if filepath.Base(f) == base {
fileExists = true
@ -74,32 +97,19 @@ func processGolistOverlay(cfg *Config, response *responseDeduper, rootDirs func(
if pkg == nil {
// Try to find the module or gopath dir the file is contained in.
// Then for modules, add the module opath to the beginning.
var pkgPath string
for rdir, rpath := range rootDirs().rootDirs {
// TODO(matloob): This doesn't properly handle symlinks.
r, err := filepath.Rel(rdir, dir)
pkgPath, ok, err := state.getPkgPath(dir)
if err != nil {
continue
return nil, nil, err
}
pkgPath = filepath.ToSlash(r)
if rpath != "" {
pkgPath = path.Join(rpath, pkgPath)
}
// We only create one new package even it can belong in multiple modules or GOPATH entries.
// This is okay because tools (such as the LSP) that use overlays will recompute the overlay
// once the file is saved, and golist will do the right thing.
// TODO(matloob): Implement module tiebreaking?
if !ok {
break
}
if pkgPath == "" {
continue
}
isXTest := strings.HasSuffix(pkgName, "_test")
if isXTest {
pkgPath += "_test"
}
id := pkgPath
if isTest && !isXTest {
if isTestFile && !isXTest {
id = fmt.Sprintf("%s [%s.test]", pkgPath, pkgPath)
}
// Try to reclaim a package with the same id if it exists in the response.
@ -115,9 +125,14 @@ func processGolistOverlay(cfg *Config, response *responseDeduper, rootDirs func(
response.addPackage(pkg)
havePkgs[pkg.PkgPath] = id
// Add the production package's sources for a test variant.
if isTest && !isXTest && testVariantOf != nil {
if isTestFile && !isXTest && testVariantOf != nil {
pkg.GoFiles = append(pkg.GoFiles, testVariantOf.GoFiles...)
pkg.CompiledGoFiles = append(pkg.CompiledGoFiles, testVariantOf.CompiledGoFiles...)
// Add the package under test and its imports to the test variant.
pkg.forTest = testVariantOf.PkgPath
for k, v := range testVariantOf.Imports {
pkg.Imports[k] = &Package{ID: v.ID}
}
}
}
}
@ -134,38 +149,45 @@ func processGolistOverlay(cfg *Config, response *responseDeduper, rootDirs func(
continue
}
for _, imp := range imports {
_, found := pkg.Imports[imp]
if !found {
overlayAddsImports = true
// TODO(matloob): Handle cases when the following block isn't correct.
// These include imports of test variants, imports of vendored packages, etc.
id, ok := havePkgs[imp]
if !ok {
id = imp
}
pkg.Imports[imp] = &Package{ID: id}
}
}
if _, found := pkg.Imports[imp]; found {
continue
}
// toPkgPath tries to guess the package path given the id.
// This isn't always correct -- it's certainly wrong for
// vendored packages' paths.
toPkgPath := func(id string) string {
// TODO(matloob): Handle vendor paths.
i := strings.IndexByte(id, ' ')
if i >= 0 {
return id[:i]
overlayAddsImports = true
id, ok := havePkgs[imp]
if !ok {
var err error
id, err = state.resolveImport(dir, imp)
if err != nil {
return nil, nil, err
}
}
pkg.Imports[imp] = &Package{ID: id}
// Add dependencies to the non-test variant version of this package as well.
if testVariantOf != nil {
testVariantOf.Imports[imp] = &Package{ID: id}
}
}
return id
}
// Do another pass now that new packages have been created to determine the
// set of missing packages.
// toPkgPath guesses the package path given the id.
toPkgPath := func(sourceDir, id string) (string, error) {
if i := strings.IndexByte(id, ' '); i >= 0 {
return state.resolveImport(sourceDir, id[:i])
}
return state.resolveImport(sourceDir, id)
}
// Now that new packages have been created, do another pass to determine
// the new set of missing packages.
for _, pkg := range response.dr.Packages {
for _, imp := range pkg.Imports {
pkgPath := toPkgPath(imp.ID)
if len(pkg.GoFiles) == 0 {
return nil, nil, fmt.Errorf("cannot resolve imports for package %q with no Go files", pkg.PkgPath)
}
pkgPath, err := toPkgPath(filepath.Dir(pkg.GoFiles[0]), imp.ID)
if err != nil {
return nil, nil, err
}
if _, ok := havePkgs[pkgPath]; !ok {
needPkgsSet[pkgPath] = true
}
@ -185,6 +207,52 @@ func processGolistOverlay(cfg *Config, response *responseDeduper, rootDirs func(
return modifiedPkgs, needPkgs, err
}
// resolveImport finds the the ID of a package given its import path.
// In particular, it will find the right vendored copy when in GOPATH mode.
func (state *golistState) resolveImport(sourceDir, importPath string) (string, error) {
env, err := state.getEnv()
if err != nil {
return "", err
}
if env["GOMOD"] != "" {
return importPath, nil
}
searchDir := sourceDir
for {
vendorDir := filepath.Join(searchDir, "vendor")
exists, ok := state.vendorDirs[vendorDir]
if !ok {
info, err := os.Stat(vendorDir)
exists = err == nil && info.IsDir()
state.vendorDirs[vendorDir] = exists
}
if exists {
vendoredPath := filepath.Join(vendorDir, importPath)
if info, err := os.Stat(vendoredPath); err == nil && info.IsDir() {
// We should probably check for .go files here, but shame on anyone who fools us.
path, ok, err := state.getPkgPath(vendoredPath)
if err != nil {
return "", err
}
if ok {
return path, nil
}
}
}
// We know we've hit the top of the filesystem when we Dir / and get /,
// or C:\ and get C:\, etc.
next := filepath.Dir(searchDir)
if next == searchDir {
break
}
searchDir = next
}
return importPath, nil
}
func hasTestFiles(p *Package) bool {
for _, f := range p.GoFiles {
if strings.HasSuffix(f, "_test.go") {
@ -194,44 +262,59 @@ func hasTestFiles(p *Package) bool {
return false
}
// determineRootDirs returns a mapping from directories code can be contained in to the
// corresponding import path prefixes of those directories.
// Its result is used to try to determine the import path for a package containing
// an overlay file.
func determineRootDirs(cfg *Config) map[string]string {
// Assume modules first:
out, err := invokeGo(cfg, "list", "-m", "-json", "all")
// determineRootDirs returns a mapping from absolute directories that could
// contain code to their corresponding import path prefixes.
func (state *golistState) determineRootDirs() (map[string]string, error) {
env, err := state.getEnv()
if err != nil {
return determineRootDirsGOPATH(cfg)
return nil, err
}
if env["GOMOD"] != "" {
state.rootsOnce.Do(func() {
state.rootDirs, state.rootDirsError = state.determineRootDirsModules()
})
} else {
state.rootsOnce.Do(func() {
state.rootDirs, state.rootDirsError = state.determineRootDirsGOPATH()
})
}
return state.rootDirs, state.rootDirsError
}
func (state *golistState) determineRootDirsModules() (map[string]string, error) {
out, err := state.invokeGo("list", "-m", "-json", "all")
if err != nil {
return nil, err
}
m := map[string]string{}
type jsonMod struct{ Path, Dir string }
for dec := json.NewDecoder(out); dec.More(); {
mod := new(jsonMod)
if err := dec.Decode(mod); err != nil {
return m // Give up and return an empty map. Package won't be found for overlay.
return nil, err
}
if mod.Dir != "" && mod.Path != "" {
// This is a valid module; add it to the map.
m[mod.Dir] = mod.Path
absDir, err := filepath.Abs(mod.Dir)
if err != nil {
return nil, err
}
m[absDir] = mod.Path
}
}
return m
return m, nil
}
func determineRootDirsGOPATH(cfg *Config) map[string]string {
func (state *golistState) determineRootDirsGOPATH() (map[string]string, error) {
m := map[string]string{}
out, err := invokeGo(cfg, "env", "GOPATH")
for _, dir := range filepath.SplitList(state.mustGetEnv()["GOPATH"]) {
absDir, err := filepath.Abs(dir)
if err != nil {
// Could not determine root dir mapping. Everything is best-effort, so just return an empty map.
// When we try to find the import path for a directory, there will be no root-dir match and
// we'll give up.
return m
return nil, err
}
for _, p := range filepath.SplitList(string(bytes.TrimSpace(out.Bytes()))) {
m[filepath.Join(p, "src")] = ""
m[filepath.Join(absDir, "src")] = ""
}
return m
return m, nil
}
func extractImports(filename string, contents []byte) ([]string, error) {

View File

@ -0,0 +1,57 @@
// Copyright 2019 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package packages
import (
"fmt"
"strings"
)
var allModes = []LoadMode{
NeedName,
NeedFiles,
NeedCompiledGoFiles,
NeedImports,
NeedDeps,
NeedExportsFile,
NeedTypes,
NeedSyntax,
NeedTypesInfo,
NeedTypesSizes,
}
var modeStrings = []string{
"NeedName",
"NeedFiles",
"NeedCompiledGoFiles",
"NeedImports",
"NeedDeps",
"NeedExportsFile",
"NeedTypes",
"NeedSyntax",
"NeedTypesInfo",
"NeedTypesSizes",
}
func (mod LoadMode) String() string {
m := mod
if m == 0 {
return fmt.Sprintf("LoadMode(0)")
}
var out []string
for i, x := range allModes {
if x > m {
break
}
if (m & x) != 0 {
out = append(out, modeStrings[i])
m = m ^ x
}
}
if m != 0 {
out = append(out, "Unknown")
}
return fmt.Sprintf("LoadMode(%s)", strings.Join(out, "|"))
}

View File

@ -23,6 +23,7 @@ import (
"sync"
"golang.org/x/tools/go/gcexportdata"
"golang.org/x/tools/internal/packagesinternal"
)
// A LoadMode controls the amount of detail to return when loading.
@ -34,6 +35,9 @@ import (
// Load may return more information than requested.
type LoadMode int
// TODO(matloob): When a V2 of go/packages is released, rename NeedExportsFile to
// NeedExportFile to make it consistent with the Package field it's adding.
const (
// NeedName adds Name and PkgPath.
NeedName LoadMode = 1 << iota
@ -51,7 +55,7 @@ const (
// NeedDeps adds the fields requested by the LoadMode in the packages in Imports.
NeedDeps
// NeedExportsFile adds ExportsFile.
// NeedExportsFile adds ExportFile.
NeedExportsFile
// NeedTypes adds Types, Fset, and IllTyped.
@ -292,6 +296,15 @@ type Package struct {
// TypesSizes provides the effective size function for types in TypesInfo.
TypesSizes types.Sizes
// forTest is the package under test, if any.
forTest string
}
func init() {
packagesinternal.GetForTest = func(p interface{}) string {
return p.(*Package).forTest
}
}
// An Error describes a problem with a package's metadata, syntax, or types.
@ -401,9 +414,8 @@ type loaderPackage struct {
importErrors map[string]error // maps each bad import to its error
loadOnce sync.Once
color uint8 // for cycle detection
needsyntax bool // fill syntax trees
needtypes bool // basic type information is either requested or depended on (export data is enough)
needtypesinfo bool // full type information is either requested or depended on (need to load from source)
needsrc bool // load from source (Mode >= LoadTypes)
needtypes bool // type information is either requested or depended on
initial bool // package was matched by a pattern
}
@ -468,7 +480,7 @@ func newLoader(cfg *Config) *loader {
ld.requestedMode = ld.Mode
ld.Mode = impliedLoadMode(ld.Mode)
if ld.Mode&NeedTypes != 0 {
if ld.Mode&NeedTypes != 0 || ld.Mode&NeedSyntax != 0 {
if ld.Fset == nil {
ld.Fset = token.NewFileSet()
}
@ -502,25 +514,22 @@ func (ld *loader) refine(roots []string, list ...*Package) ([]*Package, error) {
rootIndex = i
}
// For root packages (rootIndex >= 0) load types if they were requested by NeedTypes.
// For all other packages (dependencies) load types only if types were requested (NeedTypes) for dependecies (NeedDeps).
explicitlyNeedTypes := ld.Mode&NeedTypes != 0 && (rootIndex >= 0 || ld.Mode&NeedDeps != 0)
explicitlyNeedTypesInfo := ld.Mode&NeedTypesInfo != 0 && (rootIndex >= 0 || // load from source all root packages
ld.Mode&NeedDeps != 0) // load from source all dependencies if needed
hasValidExportData := (pkg.ExportFile != "" || pkg.PkgPath == "unsafe") &&
// overlays can invalidate export data. TODO(matloob): make this check fine-grained based on dependencies on overlaid files
len(ld.Overlay) == 0
needTypesInfo := explicitlyNeedTypesInfo || (ld.Mode&NeedTypes != 0 && !hasValidExportData)
explicitlyNeedSyntax := ld.Mode&NeedSyntax != 0 && (rootIndex >= 0 || ld.Mode&NeedDeps != 0)
needSyntax := explicitlyNeedSyntax || needTypesInfo // types info loading requires syntax trees building
// Overlays can invalidate export data.
// TODO(matloob): make this check fine-grained based on dependencies on overlaid files
exportDataInvalid := len(ld.Overlay) > 0 || pkg.ExportFile == "" && pkg.PkgPath != "unsafe"
// This package needs type information if the caller requested types and the package is
// either a root, or it's a non-root and the user requested dependencies ...
needtypes := (ld.Mode&NeedTypes|NeedTypesInfo != 0 && (rootIndex >= 0 || ld.Mode&NeedDeps != 0))
// This package needs source if the call requested source (or types info, which implies source)
// and the package is either a root, or itas a non- root and the user requested dependencies...
needsrc := ((ld.Mode&(NeedSyntax|NeedTypesInfo) != 0 && (rootIndex >= 0 || ld.Mode&NeedDeps != 0)) ||
// ... or if we need types and the exportData is invalid. We fall back to (incompletely)
// typechecking packages from source if they fail to compile.
(ld.Mode&NeedTypes|NeedTypesInfo != 0 && exportDataInvalid)) && pkg.PkgPath != "unsafe"
lpkg := &loaderPackage{
Package: pkg,
needtypes: explicitlyNeedTypes,
needtypesinfo: needTypesInfo,
needsyntax: needSyntax,
needtypes: needtypes,
needsrc: needsrc,
}
ld.pkgs[lpkg.ID] = lpkg
if rootIndex >= 0 {
@ -534,19 +543,6 @@ func (ld *loader) refine(roots []string, list ...*Package) ([]*Package, error) {
}
}
// Build loader packages for imported packages when no deeps are needed
if ld.Mode&NeedDeps == 0 {
for _, pkg := range list {
for _, ipkg := range pkg.Imports {
if imp := ld.pkgs[ipkg.ID]; imp == nil {
ld.pkgs[ipkg.ID] = &loaderPackage{
Package: ipkg,
}
}
}
}
}
// Materialize the import graph.
const (
@ -562,16 +558,16 @@ func (ld *loader) refine(roots []string, list ...*Package) ([]*Package, error) {
// Invalid imports (cycles and missing nodes) are saved in the importErrors map.
// Thus, even in the presence of both kinds of errors, the Import graph remains a DAG.
//
// visit returns whether the package needs types info or has a transitive
// visit returns whether the package needs src or has a transitive
// dependency on a package that does. These are the only packages
// for which we load source code.
var stack []*loaderPackage
var visit func(lpkg *loaderPackage) bool
var typesInfoPkgs []*loaderPackage
var srcPkgs []*loaderPackage
visit = func(lpkg *loaderPackage) bool {
switch lpkg.color {
case black:
return lpkg.needtypesinfo
return lpkg.needsrc
case grey:
panic("internal error: grey node")
}
@ -598,18 +594,14 @@ func (ld *loader) refine(roots []string, list ...*Package) ([]*Package, error) {
continue
}
// If don't need deps, just fill Imports for the root. No need to recurse further.
if loadsDeps(&ld.Config) {
if visit(imp) {
lpkg.needtypesinfo = true
lpkg.needsyntax = true // types info loading (needtypesinfo) requires syntax trees building
lpkg.needsrc = true
}
lpkg.Imports[importPath] = imp.Package
}
}
lpkg.Imports[importPath] = imp.Package // deduplicate imported package
}
}
if lpkg.needtypesinfo {
typesInfoPkgs = append(typesInfoPkgs, lpkg)
if lpkg.needsrc {
srcPkgs = append(srcPkgs, lpkg)
}
if ld.Mode&NeedTypesSizes != 0 {
lpkg.TypesSizes = ld.sizes
@ -617,7 +609,7 @@ func (ld *loader) refine(roots []string, list ...*Package) ([]*Package, error) {
stack = stack[:len(stack)-1] // pop
lpkg.color = black
return lpkg.needtypesinfo
return lpkg.needsrc
}
if ld.Mode&NeedImports == 0 {
@ -631,21 +623,19 @@ func (ld *loader) refine(roots []string, list ...*Package) ([]*Package, error) {
visit(lpkg)
}
}
// Set needtypes for immediate dependencies if need types info
for _, lpkg := range typesInfoPkgs {
if ld.Mode&NeedImports != 0 && ld.Mode&NeedTypes != 0 {
for _, lpkg := range srcPkgs {
// Complete type information is required for the
// immediate dependencies of packages for which
// we need types info.
// immediate dependencies of each source package.
for _, ipkg := range lpkg.Imports {
imp := ld.pkgs[ipkg.ID]
imp.needtypes = true
}
}
// Load type data if needed, starting at
}
// Load type data and syntax if needed, starting at
// the initial packages (roots of the import DAG).
if ld.Mode&NeedTypes != 0 {
if ld.Mode&NeedTypes != 0 || ld.Mode&NeedSyntax != 0 {
var wg sync.WaitGroup
for _, lpkg := range initial {
wg.Add(1)
@ -747,6 +737,13 @@ func (ld *loader) loadPackage(lpkg *loaderPackage) {
// which would then require that such created packages be explicitly
// inserted back into the Import graph as a final step after export data loading.
// The Diamond test exercises this case.
if !lpkg.needtypes && !lpkg.needsrc {
return
}
if !lpkg.needsrc {
ld.loadFromExportData(lpkg)
return // not a source package, don't get syntax trees
}
appendError := func(err error) {
// Convert various error types into the one true Error.
@ -797,44 +794,24 @@ func (ld *loader) loadPackage(lpkg *loaderPackage) {
lpkg.Errors = append(lpkg.Errors, errs...)
}
if lpkg.needsyntax {
if ld.Config.Mode&NeedTypes != 0 && len(lpkg.CompiledGoFiles) == 0 && lpkg.ExportFile != "" {
// The config requested loading sources and types, but sources are missing.
// Add an error to the package and fall back to loading from export data.
appendError(Error{"-", fmt.Sprintf("sources missing for package %s", lpkg.ID), ParseError})
ld.loadFromExportData(lpkg)
return // can't get syntax trees for this package
}
files, errs := ld.parseFiles(lpkg.CompiledGoFiles)
for _, err := range errs {
appendError(err)
}
lpkg.Syntax = files
} else if lpkg.needtypesinfo {
log.Fatalf("Internal error: can't load package %s types info without loading syntax trees", lpkg.ID)
}
if !lpkg.needtypesinfo {
if !lpkg.needtypes {
// Need just syntax trees
if ld.Config.Mode&NeedTypes == 0 {
return
}
_, err := ld.loadFromExportData(lpkg)
if err == nil {
// Successfully types loaded from export data
return
}
log.Fatalf("Failed to load package %s from export data: %s", lpkg.ID, err)
}
if !lpkg.needtypes {
log.Fatal("Internal error: types will be loaded with types info")
}
if len(lpkg.CompiledGoFiles) == 0 && lpkg.ExportFile != "" {
// The config requested loading sources and types, but sources are missing.
// Add an error to the package and fall back to loading from export data.
appendError(Error{"-", fmt.Sprintf("sources missing for package %s", lpkg.ID), ParseError})
ld.loadFromExportData(lpkg)
return // can't get syntax trees for this package
}
lpkg.TypesInfo = &types.Info{
Types: make(map[ast.Expr]types.TypeAndValue),
Defs: make(map[*ast.Ident]types.Object),
@ -874,7 +851,7 @@ func (ld *loader) loadPackage(lpkg *loaderPackage) {
tc := &types.Config{
Importer: importer,
// Type-check bodies of functions only in initial packages.
// Type-check bodies of functions only in non-initial packages.
// Example: for import graph A->B->C and initial packages {A,C},
// we can ignore function bodies in B.
IgnoreFuncBodies: ld.Mode&NeedDeps == 0 && !lpkg.initial,
@ -1139,49 +1116,16 @@ func (ld *loader) loadFromExportData(lpkg *loaderPackage) (*types.Package, error
return tpkg, nil
}
func usesExportData(cfg *Config) bool {
return cfg.Mode&NeedExportsFile != 0 ||
// If NeedTypes but not NeedTypesInfo we won't typecheck using sources, so we need export data.
(cfg.Mode&NeedTypes != 0 && cfg.Mode&NeedTypesInfo == 0) ||
// If NeedTypesInfo but not NeedDeps, we're typechecking a package using its sources plus its dependencies' export data
(cfg.Mode&NeedTypesInfo != 0 && cfg.Mode&NeedDeps == 0)
}
func loadsDeps(cfg *Config) bool {
return cfg.Mode&NeedDeps != 0 ||
// Immediate dependencies information (at least, export data) is required to do typechecking
// on sources, which is required for the TypesInfo. In such cases we could load packages
// without deps and then call go list again for immediate dependecies, but it's typically
// much slower than running go list -deps=true once.
cfg.Mode&NeedTypesInfo != 0
}
// impliedLoadMode returns loadMode with it's dependencies
// impliedLoadMode returns loadMode with its dependencies.
func impliedLoadMode(loadMode LoadMode) LoadMode {
if loadMode&NeedTypesInfo != 0 && loadMode&NeedSyntax == 0 {
// When NeedTypesInfo is set we load types info from source code.
// For parsing the source code we need NeedSyntax.
loadMode |= NeedSyntax
}
if loadMode&NeedTypesInfo != 0 && loadMode&NeedImports == 0 {
// When NeedTypesInfo is set we load types info from source code.
// We need immediate dependencies types information for that.
// NeedImports handles processing of immediate dependencies.
// If NeedTypesInfo, go/packages needs to do typechecking itself so it can
// associate type info with the AST. To do so, we need the export data
// for dependencies, which means we need to ask for the direct dependencies.
// NeedImports is used to ask for the direct dependencies.
loadMode |= NeedImports
}
if loadMode&NeedTypesInfo != 0 && loadMode&NeedTypes == 0 {
// When NeedTypesInfo is set we load types info from source code,
// this procedure also fills types.
loadMode |= NeedTypes
}
if loadMode&NeedTypesInfo != 0 && loadMode&NeedTypesSizes == 0 {
// Types loading requires types sizes (set in types.Config).
loadMode |= NeedTypesSizes
}
if loadMode&NeedDeps != 0 && loadMode&NeedImports == 0 {
// With NeedDeps we need to load at least direct dependencies.
// NeedImports is used to ask for the direct dependencies.
@ -1190,3 +1134,7 @@ func impliedLoadMode(loadMode LoadMode) LoadMode {
return loadMode
}
func usesExportData(cfg *Config) bool {
return cfg.Mode&NeedExportsFile != 0 || cfg.Mode&NeedTypes != 0 && cfg.Mode&NeedDeps == 0
}

View File

@ -635,7 +635,7 @@ func (b *builder) expr0(fn *Function, e ast.Expr, tv types.TypeAndValue) Value {
case token.EQL, token.NEQ, token.GTR, token.LSS, token.LEQ, token.GEQ:
cmp := emitCompare(fn, e.Op, b.expr(fn, e.X), b.expr(fn, e.Y), e.OpPos)
// The type of x==y may be UntypedBool.
return emitConv(fn, cmp, DefaultType(tv.Type))
return emitConv(fn, cmp, types.Default(tv.Type))
default:
panic("illegal op in BinaryExpr: " + e.Op.String())
}
@ -1746,6 +1746,7 @@ func (b *builder) rangeIndexed(fn *Function, x Value, tv types.Type, pos token.P
Index: k,
}
instr.setType(t.Elem())
instr.setPos(x.Pos())
v = fn.emit(instr)
case *types.Pointer: // *array
@ -1754,6 +1755,7 @@ func (b *builder) rangeIndexed(fn *Function, x Value, tv types.Type, pos token.P
Index: k,
}
instr.setType(types.NewPointer(t.Elem().Underlying().(*types.Array).Elem()))
instr.setPos(x.Pos())
v = emitLoad(fn, fn.emit(instr))
case *types.Slice:
@ -1762,6 +1764,7 @@ func (b *builder) rangeIndexed(fn *Function, x Value, tv types.Type, pos token.P
Index: k,
}
instr.setType(types.NewPointer(t.Elem()))
instr.setPos(x.Pos())
v = emitLoad(fn, fn.emit(instr))
default:

View File

@ -53,7 +53,7 @@ func (a byDomPreorder) Less(i, j int) bool { return a[i].dom.pre < a[j].dom.pre
//
func (f *Function) DomPreorder() []*BasicBlock {
n := len(f.Blocks)
order := make(byDomPreorder, n, n)
order := make(byDomPreorder, n)
copy(order, f.Blocks)
sort.Sort(order)
return order
@ -123,7 +123,7 @@ func buildDomTree(f *Function) {
n := len(f.Blocks)
// Allocate space for 5 contiguous [n]*BasicBlock arrays:
// sdom, parent, ancestor, preorder, buckets.
space := make([]*BasicBlock, 5*n, 5*n)
space := make([]*BasicBlock, 5*n)
lt := ltState{
sdom: space[0:n],
parent: space[n : 2*n],

View File

@ -204,7 +204,7 @@ func emitConv(f *Function, val Value, typ types.Type) Value {
// Convert (non-nil) "untyped" literals to their default type.
if t, ok := ut_src.(*types.Basic); ok && t.Info()&types.IsUntyped != 0 {
val = emitConv(f, val, DefaultType(ut_src))
val = emitConv(f, val, types.Default(ut_src))
}
f.Pkg.Prog.needMethodsOf(val.Type())

View File

@ -52,36 +52,6 @@ func recvType(obj *types.Func) types.Type {
return obj.Type().(*types.Signature).Recv().Type()
}
// DefaultType returns the default "typed" type for an "untyped" type;
// it returns the incoming type for all other types. The default type
// for untyped nil is untyped nil.
//
// Exported to ssa/interp.
//
// TODO(adonovan): use go/types.DefaultType after 1.8.
//
func DefaultType(typ types.Type) types.Type {
if t, ok := typ.(*types.Basic); ok {
k := t.Kind()
switch k {
case types.UntypedBool:
k = types.Bool
case types.UntypedInt:
k = types.Int
case types.UntypedRune:
k = types.Rune
case types.UntypedFloat:
k = types.Float64
case types.UntypedComplex:
k = types.Complex128
case types.UntypedString:
k = types.String
}
typ = types.Typ[k]
}
return typ
}
// logStack prints the formatted "start" message to stderr and
// returns a closure that prints the corresponding "end" message.
// Call using 'defer logStack(...)()' to show builder stack on panic.

View File

@ -4,6 +4,7 @@ package imports // import "golang.org/x/tools/imports"
import (
"go/build"
"os"
intimp "golang.org/x/tools/internal/imports"
)
@ -42,6 +43,10 @@ func Process(filename string, src []byte, opt *Options) ([]byte, error) {
Env: &intimp.ProcessEnv{
GOPATH: build.Default.GOPATH,
GOROOT: build.Default.GOROOT,
GOFLAGS: os.Getenv("GOFLAGS"),
GO111MODULE: os.Getenv("GO111MODULE"),
GOPROXY: os.Getenv("GOPROXY"),
GOSUMDB: os.Getenv("GOSUMDB"),
Debug: Debug,
LocalPrefix: LocalPrefix,
},

View File

@ -14,14 +14,14 @@ import (
"sync"
)
// TraverseLink is used as a return value from WalkFuncs to indicate that the
// ErrTraverseLink is used as a return value from WalkFuncs to indicate that the
// symlink named in the call may be traversed.
var TraverseLink = errors.New("fastwalk: traverse symlink, assuming target is a directory")
var ErrTraverseLink = errors.New("fastwalk: traverse symlink, assuming target is a directory")
// SkipFiles is a used as a return value from WalkFuncs to indicate that the
// ErrSkipFiles is a used as a return value from WalkFuncs to indicate that the
// callback should not be called for any other files in the current directory.
// Child directories will still be traversed.
var SkipFiles = errors.New("fastwalk: skip remaining files in directory")
var ErrSkipFiles = errors.New("fastwalk: skip remaining files in directory")
// Walk is a faster implementation of filepath.Walk.
//
@ -167,7 +167,7 @@ func (w *walker) onDirEnt(dirName, baseName string, typ os.FileMode) error {
err := w.fn(joined, typ)
if typ == os.ModeSymlink {
if err == TraverseLink {
if err == ErrTraverseLink {
// Set callbackDone so we don't call it twice for both the
// symlink-as-symlink and the symlink-as-directory later:
w.enqueue(walkItem{dir: joined, callbackDone: true})

View File

@ -26,7 +26,7 @@ func readDir(dirName string, fn func(dirName, entName string, typ os.FileMode) e
continue
}
if err := fn(dirName, fi.Name(), fi.Mode()&os.ModeType); err != nil {
if err == SkipFiles {
if err == ErrSkipFiles {
skipFiles = true
continue
}

View File

@ -66,7 +66,7 @@ func readDir(dirName string, fn func(dirName, entName string, typ os.FileMode) e
continue
}
if err := fn(dirName, name, typ); err != nil {
if err == SkipFiles {
if err == ErrSkipFiles {
skipFiles = true
continue
}

View File

@ -16,6 +16,7 @@ import (
"os"
"path/filepath"
"strings"
"time"
"golang.org/x/tools/internal/fastwalk"
)
@ -76,6 +77,7 @@ func WalkSkip(roots []Root, add func(root Root, dir string), skip func(root Root
}
}
// walkDir creates a walker and starts fastwalk with this walker.
func walkDir(root Root, add func(Root, string), skip func(root Root, dir string) bool, opts Options) {
if _, err := os.Stat(root.Path); os.IsNotExist(err) {
if opts.Debug {
@ -83,8 +85,9 @@ func walkDir(root Root, add func(Root, string), skip func(root Root, dir string)
}
return
}
start := time.Now()
if opts.Debug {
log.Printf("scanning %s", root.Path)
log.Printf("gopathwalk: scanning %s", root.Path)
}
w := &walker{
root: root,
@ -98,7 +101,7 @@ func walkDir(root Root, add func(Root, string), skip func(root Root, dir string)
}
if opts.Debug {
log.Printf("scanned %s", root.Path)
log.Printf("gopathwalk: scanned %s in %v", root.Path, time.Since(start))
}
}
@ -112,7 +115,7 @@ type walker struct {
ignoredDirs []os.FileInfo // The ignored directories, loaded from .goimportsignore files.
}
// init initializes the walker based on its Options.
// init initializes the walker based on its Options
func (w *walker) init() {
var ignoredPaths []string
if w.root.Type == RootModuleCache {
@ -165,6 +168,7 @@ func (w *walker) getIgnoredDirs(path string) []string {
return ignoredDirs
}
// shouldSkipDir reports whether the file should be skipped or not.
func (w *walker) shouldSkipDir(fi os.FileInfo, dir string) bool {
for _, ignoredDir := range w.ignoredDirs {
if os.SameFile(fi, ignoredDir) {
@ -178,20 +182,21 @@ func (w *walker) shouldSkipDir(fi os.FileInfo, dir string) bool {
return false
}
// walk walks through the given path.
func (w *walker) walk(path string, typ os.FileMode) error {
dir := filepath.Dir(path)
if typ.IsRegular() {
if dir == w.root.Path && (w.root.Type == RootGOROOT || w.root.Type == RootGOPATH) {
// Doesn't make sense to have regular files
// directly in your $GOPATH/src or $GOROOT/src.
return fastwalk.SkipFiles
return fastwalk.ErrSkipFiles
}
if !strings.HasSuffix(path, ".go") {
return nil
}
w.add(w.root, dir)
return fastwalk.SkipFiles
return fastwalk.ErrSkipFiles
}
if typ == os.ModeDir {
base := filepath.Base(path)
@ -219,7 +224,7 @@ func (w *walker) walk(path string, typ os.FileMode) error {
return nil
}
if w.shouldTraverse(dir, fi) {
return fastwalk.TraverseLink
return fastwalk.ErrTraverseLink
}
}
return nil

View File

@ -17,6 +17,7 @@ import (
"os/exec"
"path"
"path/filepath"
"reflect"
"sort"
"strconv"
"strings"
@ -26,7 +27,6 @@ import (
"unicode/utf8"
"golang.org/x/tools/go/ast/astutil"
"golang.org/x/tools/go/packages"
"golang.org/x/tools/internal/gopathwalk"
)
@ -82,6 +82,7 @@ type ImportFix struct {
IdentName string
// FixType is the type of fix this is (AddImport, DeleteImport, SetImportName).
FixType ImportFixType
Relevance int // see pkg
}
// An ImportInfo represents a single import statement.
@ -301,7 +302,7 @@ func (p *pass) importIdentifier(imp *ImportInfo) string {
if known != nil && known.name != "" {
return known.name
}
return importPathToAssumedName(imp.ImportPath)
return ImportPathToAssumedName(imp.ImportPath)
}
// load reads in everything necessary to run a pass, and reports whether the
@ -434,7 +435,7 @@ func (p *pass) importSpecName(imp *ImportInfo) string {
}
ident := p.importIdentifier(imp)
if ident == importPathToAssumedName(imp.ImportPath) {
if ident == ImportPathToAssumedName(imp.ImportPath) {
return "" // ident not needed since the assumed and real names are the same.
}
return ident
@ -475,9 +476,9 @@ func (p *pass) assumeSiblingImportsValid() {
}
for left, rights := range refs {
if imp, ok := importsByName[left]; ok {
if _, ok := stdlib[imp.ImportPath]; ok {
if m, ok := stdlib[imp.ImportPath]; ok {
// We have the stdlib in memory; no need to guess.
rights = stdlib[imp.ImportPath]
rights = copyExports(m)
}
p.addCandidate(imp, &packageInfo{
// no name; we already know it.
@ -584,29 +585,163 @@ func getFixes(fset *token.FileSet, f *ast.File, filename string, env *ProcessEnv
return fixes, nil
}
// getAllCandidates gets all of the candidates to be imported, regardless of if they are needed.
func getAllCandidates(filename string, env *ProcessEnv) ([]ImportFix, error) {
// TODO(suzmue): scan for additional candidates and filter out
// current package.
// Highest relevance, used for the standard library. Chosen arbitrarily to
// match pre-existing gopls code.
const MaxRelevance = 7
// Get the stdlib candidates and sort by import path.
var paths []string
for importPath := range stdlib {
paths = append(paths, importPath)
// getCandidatePkgs works with the passed callback to find all acceptable packages.
// It deduplicates by import path, and uses a cached stdlib rather than reading
// from disk.
func getCandidatePkgs(ctx context.Context, wrappedCallback *scanCallback, filename, filePkg string, env *ProcessEnv) error {
notSelf := func(p *pkg) bool {
return p.packageName != filePkg || p.dir != filepath.Dir(filename)
}
// Start off with the standard library.
for importPath, exports := range stdlib {
p := &pkg{
dir: filepath.Join(env.GOROOT, "src", importPath),
importPathShort: importPath,
packageName: path.Base(importPath),
relevance: MaxRelevance,
}
if notSelf(p) && wrappedCallback.packageNameLoaded(p) {
wrappedCallback.exportsLoaded(p, exports)
}
}
sort.Strings(paths)
var imports []ImportFix
for _, importPath := range paths {
imports = append(imports, ImportFix{
StmtInfo: ImportInfo{
ImportPath: importPath,
var mu sync.Mutex
dupCheck := map[string]struct{}{}
scanFilter := &scanCallback{
rootFound: func(root gopathwalk.Root) bool {
// Exclude goroot results -- getting them is relatively expensive, not cached,
// and generally redundant with the in-memory version.
return root.Type != gopathwalk.RootGOROOT && wrappedCallback.rootFound(root)
},
IdentName: path.Base(importPath),
FixType: AddImport,
})
dirFound: wrappedCallback.dirFound,
packageNameLoaded: func(pkg *pkg) bool {
mu.Lock()
defer mu.Unlock()
if _, ok := dupCheck[pkg.importPathShort]; ok {
return false
}
return imports, nil
dupCheck[pkg.importPathShort] = struct{}{}
return notSelf(pkg) && wrappedCallback.packageNameLoaded(pkg)
},
exportsLoaded: func(pkg *pkg, exports []string) {
// If we're an x_test, load the package under test's test variant.
if strings.HasSuffix(filePkg, "_test") && pkg.dir == filepath.Dir(filename) {
var err error
_, exports, err = loadExportsFromFiles(ctx, env, pkg.dir, true)
if err != nil {
return
}
}
wrappedCallback.exportsLoaded(pkg, exports)
},
}
return env.GetResolver().scan(ctx, scanFilter)
}
func ScoreImportPaths(ctx context.Context, env *ProcessEnv, paths []string) map[string]int {
result := make(map[string]int)
for _, path := range paths {
result[path] = env.GetResolver().scoreImportPath(ctx, path)
}
return result
}
func PrimeCache(ctx context.Context, env *ProcessEnv) error {
// Fully scan the disk for directories, but don't actually read any Go files.
callback := &scanCallback{
rootFound: func(gopathwalk.Root) bool {
return true
},
dirFound: func(pkg *pkg) bool {
return false
},
packageNameLoaded: func(pkg *pkg) bool {
return false
},
}
return getCandidatePkgs(ctx, callback, "", "", env)
}
func candidateImportName(pkg *pkg) string {
if ImportPathToAssumedName(pkg.importPathShort) != pkg.packageName {
return pkg.packageName
}
return ""
}
// getAllCandidates gets all of the candidates to be imported, regardless of if they are needed.
func getAllCandidates(ctx context.Context, wrapped func(ImportFix), searchPrefix, filename, filePkg string, env *ProcessEnv) error {
callback := &scanCallback{
rootFound: func(gopathwalk.Root) bool {
return true
},
dirFound: func(pkg *pkg) bool {
if !canUse(filename, pkg.dir) {
return false
}
// Try the assumed package name first, then a simpler path match
// in case of packages named vN, which are not uncommon.
return strings.HasPrefix(ImportPathToAssumedName(pkg.importPathShort), searchPrefix) ||
strings.HasPrefix(path.Base(pkg.importPathShort), searchPrefix)
},
packageNameLoaded: func(pkg *pkg) bool {
if !strings.HasPrefix(pkg.packageName, searchPrefix) {
return false
}
wrapped(ImportFix{
StmtInfo: ImportInfo{
ImportPath: pkg.importPathShort,
Name: candidateImportName(pkg),
},
IdentName: pkg.packageName,
FixType: AddImport,
Relevance: pkg.relevance,
})
return false
},
}
return getCandidatePkgs(ctx, callback, filename, filePkg, env)
}
// A PackageExport is a package and its exports.
type PackageExport struct {
Fix *ImportFix
Exports []string
}
func getPackageExports(ctx context.Context, wrapped func(PackageExport), searchPkg, filename, filePkg string, env *ProcessEnv) error {
callback := &scanCallback{
rootFound: func(gopathwalk.Root) bool {
return true
},
dirFound: func(pkg *pkg) bool {
return pkgIsCandidate(filename, references{searchPkg: nil}, pkg)
},
packageNameLoaded: func(pkg *pkg) bool {
return pkg.packageName == searchPkg
},
exportsLoaded: func(pkg *pkg, exports []string) {
sort.Strings(exports)
wrapped(PackageExport{
Fix: &ImportFix{
StmtInfo: ImportInfo{
ImportPath: pkg.importPathShort,
Name: candidateImportName(pkg),
},
IdentName: pkg.packageName,
FixType: AddImport,
Relevance: pkg.relevance,
},
Exports: exports,
})
},
}
return getCandidatePkgs(ctx, callback, filename, filePkg, env)
}
// ProcessEnv contains environment variables and settings that affect the use of
@ -620,15 +755,19 @@ type ProcessEnv struct {
GOPATH, GOROOT, GO111MODULE, GOPROXY, GOFLAGS, GOSUMDB string
WorkingDir string
// If true, use go/packages regardless of the environment.
ForceGoPackages bool
// Logf is the default logger for the ProcessEnv.
Logf func(format string, args ...interface{})
resolver Resolver
}
// CopyConfig copies the env's configuration into a new env.
func (e *ProcessEnv) CopyConfig() *ProcessEnv {
copy := *e
copy.resolver = nil
return &copy
}
func (e *ProcessEnv) env() []string {
env := os.Environ()
add := func(k, v string) {
@ -652,32 +791,34 @@ func (e *ProcessEnv) GetResolver() Resolver {
if e.resolver != nil {
return e.resolver
}
if e.ForceGoPackages {
e.resolver = &goPackagesResolver{env: e}
return e.resolver
}
out, err := e.invokeGo("env", "GOMOD")
if err != nil || len(bytes.TrimSpace(out.Bytes())) == 0 {
e.resolver = &gopathResolver{env: e}
e.resolver = newGopathResolver(e)
return e.resolver
}
e.resolver = &ModuleResolver{env: e}
e.resolver = newModuleResolver(e)
return e.resolver
}
func (e *ProcessEnv) newPackagesConfig(mode packages.LoadMode) *packages.Config {
return &packages.Config{
Mode: mode,
Dir: e.WorkingDir,
Env: e.env(),
}
}
func (e *ProcessEnv) buildContext() *build.Context {
ctx := build.Default
ctx.GOROOT = e.GOROOT
ctx.GOPATH = e.GOPATH
// As of Go 1.14, build.Context has a Dir field
// (see golang.org/issue/34860).
// Populate it only if present.
rc := reflect.ValueOf(&ctx).Elem()
dir := rc.FieldByName("Dir")
if !dir.IsValid() {
// Working drafts of Go 1.14 named the field "WorkingDir" instead.
// TODO(bcmills): Remove this case after the Go 1.14 beta has been released.
dir = rc.FieldByName("WorkingDir")
}
if dir.IsValid() && dir.Kind() == reflect.String {
dir.SetString(e.WorkingDir)
}
return &ctx
}
@ -712,9 +853,10 @@ func cmdDebugStr(cmd *exec.Cmd) string {
func addStdlibCandidates(pass *pass, refs references) {
add := func(pkg string) {
exports := copyExports(stdlib[pkg])
pass.addCandidate(
&ImportInfo{ImportPath: pkg},
&packageInfo{name: path.Base(pkg), exports: stdlib[pkg]})
&packageInfo{name: path.Base(pkg), exports: exports})
}
for left := range refs {
if left == "rand" {
@ -735,88 +877,65 @@ func addStdlibCandidates(pass *pass, refs references) {
type Resolver interface {
// loadPackageNames loads the package names in importPaths.
loadPackageNames(importPaths []string, srcDir string) (map[string]string, error)
// scan finds (at least) the packages satisfying refs. The returned slice is unordered.
scan(refs references) ([]*pkg, error)
// scan works with callback to search for packages. See scanCallback for details.
scan(ctx context.Context, callback *scanCallback) error
// loadExports returns the set of exported symbols in the package at dir.
// It returns an error if the package name in dir does not match expectPackage.
// loadExports may be called concurrently.
loadExports(ctx context.Context, expectPackage string, pkg *pkg) (map[string]bool, error)
loadExports(ctx context.Context, pkg *pkg, includeTest bool) (string, []string, error)
// scoreImportPath returns the relevance for an import path.
scoreImportPath(ctx context.Context, path string) int
ClearForNewScan()
}
// gopackagesResolver implements resolver for GOPATH and module workspaces using go/packages.
type goPackagesResolver struct {
env *ProcessEnv
}
func (r *goPackagesResolver) loadPackageNames(importPaths []string, srcDir string) (map[string]string, error) {
if len(importPaths) == 0 {
return nil, nil
}
cfg := r.env.newPackagesConfig(packages.LoadFiles)
pkgs, err := packages.Load(cfg, importPaths...)
if err != nil {
return nil, err
}
names := map[string]string{}
for _, pkg := range pkgs {
names[VendorlessPath(pkg.PkgPath)] = pkg.Name
}
// We may not have found all the packages. Guess the rest.
for _, path := range importPaths {
if _, ok := names[path]; ok {
continue
}
names[path] = importPathToAssumedName(path)
}
return names, nil
}
func (r *goPackagesResolver) scan(refs references) ([]*pkg, error) {
var loadQueries []string
for pkgName := range refs {
loadQueries = append(loadQueries, "iamashamedtousethedisabledqueryname="+pkgName)
}
sort.Strings(loadQueries)
cfg := r.env.newPackagesConfig(packages.LoadFiles)
goPackages, err := packages.Load(cfg, loadQueries...)
if err != nil {
return nil, err
}
var scan []*pkg
for _, goPackage := range goPackages {
scan = append(scan, &pkg{
dir: filepath.Dir(goPackage.CompiledGoFiles[0]),
importPathShort: VendorlessPath(goPackage.PkgPath),
goPackage: goPackage,
})
}
return scan, nil
}
func (r *goPackagesResolver) loadExports(ctx context.Context, expectPackage string, pkg *pkg) (map[string]bool, error) {
if pkg.goPackage == nil {
return nil, fmt.Errorf("goPackage not set")
}
exports := map[string]bool{}
fset := token.NewFileSet()
for _, fname := range pkg.goPackage.CompiledGoFiles {
f, err := parser.ParseFile(fset, fname, nil, 0)
if err != nil {
return nil, fmt.Errorf("parsing %s: %v", fname, err)
}
for name := range f.Scope.Objects {
if ast.IsExported(name) {
exports[name] = true
}
}
}
return exports, nil
// A scanCallback controls a call to scan and receives its results.
// In general, minor errors will be silently discarded; a user should not
// expect to receive a full series of calls for everything.
type scanCallback struct {
// rootFound is called before scanning a new root dir. If it returns true,
// the root will be scanned. Returning false will not necessarily prevent
// directories from that root making it to dirFound.
rootFound func(gopathwalk.Root) bool
// dirFound is called when a directory is found that is possibly a Go package.
// pkg will be populated with everything except packageName.
// If it returns true, the package's name will be loaded.
dirFound func(pkg *pkg) bool
// packageNameLoaded is called when a package is found and its name is loaded.
// If it returns true, the package's exports will be loaded.
packageNameLoaded func(pkg *pkg) bool
// exportsLoaded is called when a package's exports have been loaded.
exportsLoaded func(pkg *pkg, exports []string)
}
func addExternalCandidates(pass *pass, refs references, filename string) error {
dirScan, err := pass.env.GetResolver().scan(refs)
var mu sync.Mutex
found := make(map[string][]pkgDistance)
callback := &scanCallback{
rootFound: func(gopathwalk.Root) bool {
return true // We want everything.
},
dirFound: func(pkg *pkg) bool {
return pkgIsCandidate(filename, refs, pkg)
},
packageNameLoaded: func(pkg *pkg) bool {
if _, want := refs[pkg.packageName]; !want {
return false
}
if pkg.dir == pass.srcDir && pass.f.Name.Name == pkg.packageName {
// The candidate is in the same directory and has the
// same package name. Don't try to import ourselves.
return false
}
if !canUse(filename, pkg.dir) {
return false
}
mu.Lock()
defer mu.Unlock()
found[pkg.packageName] = append(found[pkg.packageName], pkgDistance{pkg, distance(pass.srcDir, pkg.dir)})
return false // We'll do our own loading after we sort.
},
}
err := pass.env.GetResolver().scan(context.Background(), callback)
if err != nil {
return err
}
@ -843,7 +962,7 @@ func addExternalCandidates(pass *pass, refs references, filename string) error {
go func(pkgName string, symbols map[string]bool) {
defer wg.Done()
found, err := findImport(ctx, pass, dirScan, pkgName, symbols, filename)
found, err := findImport(ctx, pass, found[pkgName], pkgName, symbols, filename)
if err != nil {
firstErrOnce.Do(func() {
@ -887,7 +1006,7 @@ func notIdentifier(ch rune) bool {
ch >= utf8.RuneSelf && (unicode.IsLetter(ch) || unicode.IsDigit(ch)))
}
// importPathToAssumedName returns the assumed package name of an import path.
// ImportPathToAssumedName returns the assumed package name of an import path.
// It does this using only string parsing of the import path.
// It picks the last element of the path that does not look like a major
// version, and then picks the valid identifier off the start of that element.
@ -895,7 +1014,7 @@ func notIdentifier(ch rune) bool {
// clarity.
// This function could be moved to a standard package and exported if we want
// for use in other tools.
func importPathToAssumedName(importPath string) string {
func ImportPathToAssumedName(importPath string) string {
base := path.Base(importPath)
if strings.HasPrefix(base, "v") {
if _, err := strconv.Atoi(base[1:]); err == nil {
@ -915,6 +1034,32 @@ func importPathToAssumedName(importPath string) string {
// gopathResolver implements resolver for GOPATH workspaces.
type gopathResolver struct {
env *ProcessEnv
walked bool
cache *dirInfoCache
scanSema chan struct{} // scanSema prevents concurrent scans.
}
func newGopathResolver(env *ProcessEnv) *gopathResolver {
r := &gopathResolver{
env: env,
cache: &dirInfoCache{
dirs: map[string]*directoryPackageInfo{},
listeners: map[*int]cacheListener{},
},
scanSema: make(chan struct{}, 1),
}
r.scanSema <- struct{}{}
return r
}
func (r *gopathResolver) ClearForNewScan() {
<-r.scanSema
r.cache = &dirInfoCache{
dirs: map[string]*directoryPackageInfo{},
listeners: map[*int]cacheListener{},
}
r.walked = false
r.scanSema <- struct{}{}
}
func (r *gopathResolver) loadPackageNames(importPaths []string, srcDir string) (map[string]string, error) {
@ -997,9 +1142,10 @@ func packageDirToName(dir string) (packageName string, err error) {
}
type pkg struct {
goPackage *packages.Package
dir string // absolute file path to pkg directory ("/usr/lib/go/src/net/http")
importPathShort string // vendorless import path ("net/http", "a/b")
packageName string // package name loaded from source if requested
relevance int // a weakly-defined score of how relevant a package is. 0 is most relevant.
}
type pkgDistance struct {
@ -1043,32 +1189,101 @@ func distance(basepath, targetpath string) int {
return strings.Count(p, string(filepath.Separator)) + 1
}
func (r *gopathResolver) scan(_ references) ([]*pkg, error) {
dupCheck := make(map[string]bool)
var result []*pkg
var mu sync.Mutex
func (r *gopathResolver) scan(ctx context.Context, callback *scanCallback) error {
add := func(root gopathwalk.Root, dir string) {
mu.Lock()
defer mu.Unlock()
if _, dup := dupCheck[dir]; dup {
// We assume cached directories have not changed. We can skip them and their
// children.
if _, ok := r.cache.Load(dir); ok {
return
}
dupCheck[dir] = true
importpath := filepath.ToSlash(dir[len(root.Path)+len("/"):])
result = append(result, &pkg{
importPathShort: VendorlessPath(importpath),
info := directoryPackageInfo{
status: directoryScanned,
dir: dir,
})
rootType: root.Type,
nonCanonicalImportPath: VendorlessPath(importpath),
}
gopathwalk.Walk(gopathwalk.SrcDirsRoots(r.env.buildContext()), add, gopathwalk.Options{Debug: r.env.Debug, ModulesEnabled: false})
return result, nil
r.cache.Store(dir, info)
}
processDir := func(info directoryPackageInfo) {
// Skip this directory if we were not able to get the package information successfully.
if scanned, err := info.reachedStatus(directoryScanned); !scanned || err != nil {
return
}
p := &pkg{
importPathShort: info.nonCanonicalImportPath,
dir: info.dir,
relevance: MaxRelevance - 1,
}
if info.rootType == gopathwalk.RootGOROOT {
p.relevance = MaxRelevance
}
if !callback.dirFound(p) {
return
}
var err error
p.packageName, err = r.cache.CachePackageName(info)
if err != nil {
return
}
if !callback.packageNameLoaded(p) {
return
}
if _, exports, err := r.loadExports(ctx, p, false); err == nil {
callback.exportsLoaded(p, exports)
}
}
stop := r.cache.ScanAndListen(ctx, processDir)
defer stop()
// The callback is not necessarily safe to use in the goroutine below. Process roots eagerly.
roots := filterRoots(gopathwalk.SrcDirsRoots(r.env.buildContext()), callback.rootFound)
// We can't cancel walks, because we need them to finish to have a usable
// cache. Instead, run them in a separate goroutine and detach.
scanDone := make(chan struct{})
go func() {
select {
case <-ctx.Done():
return
case <-r.scanSema:
}
defer func() { r.scanSema <- struct{}{} }()
gopathwalk.Walk(roots, add, gopathwalk.Options{Debug: r.env.Debug, ModulesEnabled: false})
close(scanDone)
}()
select {
case <-ctx.Done():
case <-scanDone:
}
return nil
}
func (r *gopathResolver) loadExports(ctx context.Context, expectPackage string, pkg *pkg) (map[string]bool, error) {
return loadExportsFromFiles(ctx, r.env, expectPackage, pkg.dir)
func (r *gopathResolver) scoreImportPath(ctx context.Context, path string) int {
if _, ok := stdlib[path]; ok {
return MaxRelevance
}
return MaxRelevance - 1
}
func filterRoots(roots []gopathwalk.Root, include func(gopathwalk.Root) bool) []gopathwalk.Root {
var result []gopathwalk.Root
for _, root := range roots {
if !include(root) {
continue
}
result = append(result, root)
}
return result
}
func (r *gopathResolver) loadExports(ctx context.Context, pkg *pkg, includeTest bool) (string, []string, error) {
if info, ok := r.cache.Load(pkg.dir); ok && !includeTest {
return r.cache.CacheExports(ctx, r.env, info)
}
return loadExportsFromFiles(ctx, r.env, pkg.dir, includeTest)
}
// VendorlessPath returns the devendorized version of the import path ipath.
@ -1084,18 +1299,18 @@ func VendorlessPath(ipath string) string {
return ipath
}
func loadExportsFromFiles(ctx context.Context, env *ProcessEnv, expectPackage string, dir string) (map[string]bool, error) {
exports := make(map[string]bool)
func loadExportsFromFiles(ctx context.Context, env *ProcessEnv, dir string, includeTest bool) (string, []string, error) {
var exports []string
// Look for non-test, buildable .go files which could provide exports.
all, err := ioutil.ReadDir(dir)
if err != nil {
return nil, err
return "", nil, err
}
var files []os.FileInfo
for _, fi := range all {
name := fi.Name()
if !strings.HasSuffix(name, ".go") || strings.HasSuffix(name, "_test.go") {
if !strings.HasSuffix(name, ".go") || (!includeTest && strings.HasSuffix(name, "_test.go")) {
continue
}
match, err := env.buildContext().MatchFile(dir, fi.Name())
@ -1106,74 +1321,51 @@ func loadExportsFromFiles(ctx context.Context, env *ProcessEnv, expectPackage st
}
if len(files) == 0 {
return nil, fmt.Errorf("dir %v contains no buildable, non-test .go files", dir)
return "", nil, fmt.Errorf("dir %v contains no buildable, non-test .go files", dir)
}
var pkgName string
fset := token.NewFileSet()
for _, fi := range files {
select {
case <-ctx.Done():
return nil, ctx.Err()
return "", nil, ctx.Err()
default:
}
fullFile := filepath.Join(dir, fi.Name())
f, err := parser.ParseFile(fset, fullFile, nil, 0)
if err != nil {
return nil, fmt.Errorf("parsing %s: %v", fullFile, err)
return "", nil, fmt.Errorf("parsing %s: %v", fullFile, err)
}
pkgName := f.Name.Name
if pkgName == "documentation" {
if f.Name.Name == "documentation" {
// Special case from go/build.ImportDir, not
// handled by MatchFile above.
continue
}
if pkgName != expectPackage {
return nil, fmt.Errorf("scan of dir %v is not expected package %v (actually %v)", dir, expectPackage, pkgName)
if includeTest && strings.HasSuffix(f.Name.Name, "_test") {
// x_test package. We want internal test files only.
continue
}
pkgName = f.Name.Name
for name := range f.Scope.Objects {
if ast.IsExported(name) {
exports[name] = true
exports = append(exports, name)
}
}
}
if env.Debug {
exportList := make([]string, 0, len(exports))
for k := range exports {
exportList = append(exportList, k)
sortedExports := append([]string(nil), exports...)
sort.Strings(sortedExports)
env.Logf("loaded exports in dir %v (package %v): %v", dir, pkgName, strings.Join(sortedExports, ", "))
}
sort.Strings(exportList)
env.Logf("loaded exports in dir %v (package %v): %v", dir, expectPackage, strings.Join(exportList, ", "))
}
return exports, nil
return pkgName, exports, nil
}
// findImport searches for a package with the given symbols.
// If no package is found, findImport returns ("", false, nil)
func findImport(ctx context.Context, pass *pass, dirScan []*pkg, pkgName string, symbols map[string]bool, filename string) (*pkg, error) {
pkgDir, err := filepath.Abs(filename)
if err != nil {
return nil, err
}
pkgDir = filepath.Dir(pkgDir)
// Find candidate packages, looking only at their directory names first.
var candidates []pkgDistance
for _, pkg := range dirScan {
if pkg.dir == pkgDir && pass.f.Name.Name == pkgName {
// The candidate is in the same directory and has the
// same package name. Don't try to import ourselves.
continue
}
if pkgIsCandidate(filename, pkgName, pkg) {
candidates = append(candidates, pkgDistance{
pkg: pkg,
distance: distance(pkgDir, pkg.dir),
})
}
}
func findImport(ctx context.Context, pass *pass, candidates []pkgDistance, pkgName string, symbols map[string]bool, filename string) (*pkg, error) {
// Sort the candidates by their import package length,
// assuming that shorter package names are better than long
// ones. Note that this sorts by the de-vendored name, so
@ -1186,7 +1378,6 @@ func findImport(ctx context.Context, pass *pass, dirScan []*pkg, pkgName string,
}
// Collect exports for packages with matching names.
rescv := make([]chan *pkg, len(candidates))
for i := range candidates {
rescv[i] = make(chan *pkg, 1)
@ -1221,7 +1412,9 @@ func findImport(ctx context.Context, pass *pass, dirScan []*pkg, pkgName string,
if pass.env.Debug {
pass.env.Logf("loading exports in dir %s (seeking package %s)", c.pkg.dir, pkgName)
}
exports, err := pass.env.GetResolver().loadExports(ctx, pkgName, c.pkg)
// If we're an x_test, load the package under test's test variant.
includeTest := strings.HasSuffix(pass.f.Name.Name, "_test") && c.pkg.dir == pass.srcDir
_, exports, err := pass.env.GetResolver().loadExports(ctx, c.pkg, includeTest)
if err != nil {
if pass.env.Debug {
pass.env.Logf("loading exports in dir %s (seeking package %s): %v", c.pkg.dir, pkgName, err)
@ -1230,10 +1423,15 @@ func findImport(ctx context.Context, pass *pass, dirScan []*pkg, pkgName string,
return
}
exportsMap := make(map[string]bool, len(exports))
for _, sym := range exports {
exportsMap[sym] = true
}
// If it doesn't have the right
// symbols, send nil to mean no match.
for symbol := range symbols {
if !exports[symbol] {
if !exportsMap[symbol] {
resc <- nil
return
}
@ -1265,7 +1463,7 @@ func findImport(ctx context.Context, pass *pass, dirScan []*pkg, pkgName string,
// filename is the file being formatted.
// pkgIdent is the package being searched for, like "client" (if
// searching for "client.New")
func pkgIsCandidate(filename, pkgIdent string, pkg *pkg) bool {
func pkgIsCandidate(filename string, refs references, pkg *pkg) bool {
// Check "internal" and "vendor" visibility:
if !canUse(filename, pkg.dir) {
return false
@ -1283,6 +1481,7 @@ func pkgIsCandidate(filename, pkgIdent string, pkg *pkg) bool {
// "bar", which is strongly discouraged
// anyway. There's no reason goimports needs
// to be slow just to accommodate that.
for pkgIdent := range refs {
lastTwo := lastTwoComponents(pkg.importPathShort)
if strings.Contains(lastTwo, pkgIdent) {
return true
@ -1293,7 +1492,7 @@ func pkgIsCandidate(filename, pkgIdent string, pkg *pkg) bool {
return true
}
}
}
return false
}
@ -1383,3 +1582,11 @@ type visitFn func(node ast.Node) ast.Visitor
func (fn visitFn) Visit(node ast.Node) ast.Visitor {
return fn(node)
}
func copyExports(pkg []string) map[string]bool {
m := make(map[string]bool, len(pkg))
for _, v := range pkg {
m[v] = true
}
return m
}

View File

@ -11,6 +11,7 @@ package imports
import (
"bufio"
"bytes"
"context"
"fmt"
"go/ast"
"go/build"
@ -21,6 +22,7 @@ import (
"io"
"io/ioutil"
"log"
"os"
"regexp"
"strconv"
"strings"
@ -83,33 +85,54 @@ func FixImports(filename string, src []byte, opt *Options) (fixes []*ImportFix,
return getFixes(fileSet, file, filename, opt.Env)
}
// ApplyFix will apply all of the fixes to the file and format it.
func ApplyFixes(fixes []*ImportFix, filename string, src []byte, opt *Options) (formatted []byte, err error) {
// ApplyFixes applies all of the fixes to the file and formats it. extraMode
// is added in when parsing the file.
func ApplyFixes(fixes []*ImportFix, filename string, src []byte, opt *Options, extraMode parser.Mode) (formatted []byte, err error) {
src, opt, err = initialize(filename, src, opt)
if err != nil {
return nil, err
}
// Don't use parse() -- we don't care about fragments or statement lists
// here, and we need to work with unparseable files.
fileSet := token.NewFileSet()
file, adjust, err := parse(fileSet, filename, src, opt)
if err != nil {
parserMode := parser.Mode(0)
if opt.Comments {
parserMode |= parser.ParseComments
}
if opt.AllErrors {
parserMode |= parser.AllErrors
}
parserMode |= extraMode
file, err := parser.ParseFile(fileSet, filename, src, parserMode)
if file == nil {
return nil, err
}
// Apply the fixes to the file.
apply(fileSet, file, fixes)
return formatFile(fileSet, file, src, adjust, opt)
return formatFile(fileSet, file, src, nil, opt)
}
// GetAllCandidates gets all of the standard library candidate packages to import in
// sorted order on import path.
func GetAllCandidates(filename string, opt *Options) (pkgs []ImportFix, err error) {
_, opt, err = initialize(filename, []byte{}, opt)
// GetAllCandidates gets all of the packages starting with prefix that can be
// imported by filename, sorted by import path.
func GetAllCandidates(ctx context.Context, callback func(ImportFix), searchPrefix, filename, filePkg string, opt *Options) error {
_, opt, err := initialize(filename, []byte{}, opt)
if err != nil {
return nil, err
return err
}
return getAllCandidates(filename, opt.Env)
return getAllCandidates(ctx, callback, searchPrefix, filename, filePkg, opt.Env)
}
// GetPackageExports returns all known packages with name pkg and their exports.
func GetPackageExports(ctx context.Context, callback func(PackageExport), searchPkg, filename, filePkg string, opt *Options) error {
_, opt, err := initialize(filename, []byte{}, opt)
if err != nil {
return err
}
return getPackageExports(ctx, callback, searchPkg, filename, filePkg, opt.Env)
}
// initialize sets the values for opt and src.
@ -126,6 +149,10 @@ func initialize(filename string, src []byte, opt *Options) ([]byte, *Options, er
opt.Env = &ProcessEnv{
GOPATH: build.Default.GOPATH,
GOROOT: build.Default.GOROOT,
GOFLAGS: os.Getenv("GOFLAGS"),
GO111MODULE: os.Getenv("GO111MODULE"),
GOPROXY: os.Getenv("GOPROXY"),
GOSUMDB: os.Getenv("GOSUMDB"),
}
}

View File

@ -13,11 +13,10 @@ import (
"sort"
"strconv"
"strings"
"sync"
"time"
"golang.org/x/tools/internal/gopathwalk"
"golang.org/x/tools/internal/module"
"golang.org/x/tools/internal/semver"
)
// ModuleResolver implements resolver for modules using the go command as little
@ -25,38 +24,128 @@ import (
type ModuleResolver struct {
env *ProcessEnv
moduleCacheDir string
dummyVendorMod *ModuleJSON // If vendoring is enabled, the pseudo-module that represents the /vendor directory.
roots []gopathwalk.Root
scanSema chan struct{} // scanSema prevents concurrent scans and guards scannedRoots.
scannedRoots map[gopathwalk.Root]bool
Initialized bool
Main *ModuleJSON
ModsByModPath []*ModuleJSON // All modules, ordered by # of path components in module Path...
ModsByDir []*ModuleJSON // ...or Dir.
initialized bool
main *ModuleJSON
modsByModPath []*ModuleJSON // All modules, ordered by # of path components in module Path...
modsByDir []*ModuleJSON // ...or Dir.
// moduleCacheInfo stores information about the module cache.
moduleCacheInfo *moduleCacheInfo
// moduleCacheCache stores information about the module cache.
moduleCacheCache *dirInfoCache
otherCache *dirInfoCache
}
type ModuleJSON struct {
Path string // module path
Version string // module version
Versions []string // available module versions (with -versions)
Replace *ModuleJSON // replaced by this module
Time *time.Time // time version was created
Update *ModuleJSON // available update, if any (with -u)
Main bool // is this the main module?
Indirect bool // is this module only an indirect dependency of main module?
Dir string // directory holding files for this module, if any
GoMod string // path to go.mod file for this module, if any
Error *ModuleErrorJSON // error loading module
GoVersion string // go version used in module
}
type ModuleErrorJSON struct {
Err string // the error itself
func newModuleResolver(e *ProcessEnv) *ModuleResolver {
r := &ModuleResolver{
env: e,
scanSema: make(chan struct{}, 1),
}
r.scanSema <- struct{}{}
return r
}
func (r *ModuleResolver) init() error {
if r.Initialized {
if r.initialized {
return nil
}
mainMod, vendorEnabled, err := vendorEnabled(r.env)
if err != nil {
return err
}
if mainMod != nil && vendorEnabled {
// Vendor mode is on, so all the non-Main modules are irrelevant,
// and we need to search /vendor for everything.
r.main = mainMod
r.dummyVendorMod = &ModuleJSON{
Path: "",
Dir: filepath.Join(mainMod.Dir, "vendor"),
}
r.modsByModPath = []*ModuleJSON{mainMod, r.dummyVendorMod}
r.modsByDir = []*ModuleJSON{mainMod, r.dummyVendorMod}
} else {
// Vendor mode is off, so run go list -m ... to find everything.
r.initAllMods()
}
r.moduleCacheDir = filepath.Join(filepath.SplitList(r.env.GOPATH)[0], "/pkg/mod")
sort.Slice(r.modsByModPath, func(i, j int) bool {
count := func(x int) int {
return strings.Count(r.modsByModPath[x].Path, "/")
}
return count(j) < count(i) // descending order
})
sort.Slice(r.modsByDir, func(i, j int) bool {
count := func(x int) int {
return strings.Count(r.modsByDir[x].Dir, "/")
}
return count(j) < count(i) // descending order
})
r.roots = []gopathwalk.Root{
{filepath.Join(r.env.GOROOT, "/src"), gopathwalk.RootGOROOT},
}
if r.main != nil {
r.roots = append(r.roots, gopathwalk.Root{r.main.Dir, gopathwalk.RootCurrentModule})
}
if vendorEnabled {
r.roots = append(r.roots, gopathwalk.Root{r.dummyVendorMod.Dir, gopathwalk.RootOther})
} else {
addDep := func(mod *ModuleJSON) {
if mod.Replace == nil {
// This is redundant with the cache, but we'll skip it cheaply enough.
r.roots = append(r.roots, gopathwalk.Root{mod.Dir, gopathwalk.RootModuleCache})
} else {
r.roots = append(r.roots, gopathwalk.Root{mod.Dir, gopathwalk.RootOther})
}
}
// Walk dependent modules before scanning the full mod cache, direct deps first.
for _, mod := range r.modsByModPath {
if !mod.Indirect && !mod.Main {
addDep(mod)
}
}
for _, mod := range r.modsByModPath {
if mod.Indirect && !mod.Main {
addDep(mod)
}
}
r.roots = append(r.roots, gopathwalk.Root{r.moduleCacheDir, gopathwalk.RootModuleCache})
}
r.scannedRoots = map[gopathwalk.Root]bool{}
if r.moduleCacheCache == nil {
r.moduleCacheCache = &dirInfoCache{
dirs: map[string]*directoryPackageInfo{},
listeners: map[*int]cacheListener{},
}
}
if r.otherCache == nil {
r.otherCache = &dirInfoCache{
dirs: map[string]*directoryPackageInfo{},
listeners: map[*int]cacheListener{},
}
}
r.initialized = true
return nil
}
func (r *ModuleResolver) initAllMods() error {
stdout, err := r.env.invokeGo("list", "-m", "-json", "...")
if err != nil {
return err
@ -73,42 +162,43 @@ func (r *ModuleResolver) init() error {
// Can't do anything with a module that's not downloaded.
continue
}
r.ModsByModPath = append(r.ModsByModPath, mod)
r.ModsByDir = append(r.ModsByDir, mod)
r.modsByModPath = append(r.modsByModPath, mod)
r.modsByDir = append(r.modsByDir, mod)
if mod.Main {
r.Main = mod
r.main = mod
}
}
sort.Slice(r.ModsByModPath, func(i, j int) bool {
count := func(x int) int {
return strings.Count(r.ModsByModPath[x].Path, "/")
}
return count(j) < count(i) // descending order
})
sort.Slice(r.ModsByDir, func(i, j int) bool {
count := func(x int) int {
return strings.Count(r.ModsByDir[x].Dir, "/")
}
return count(j) < count(i) // descending order
})
if r.moduleCacheInfo == nil {
r.moduleCacheInfo = &moduleCacheInfo{
modCacheDirInfo: make(map[string]*directoryPackageInfo),
}
}
r.Initialized = true
return nil
}
func (r *ModuleResolver) ClearForNewScan() {
<-r.scanSema
r.scannedRoots = map[gopathwalk.Root]bool{}
r.otherCache = &dirInfoCache{
dirs: map[string]*directoryPackageInfo{},
listeners: map[*int]cacheListener{},
}
r.scanSema <- struct{}{}
}
func (r *ModuleResolver) ClearForNewMod() {
<-r.scanSema
*r = ModuleResolver{
env: r.env,
moduleCacheCache: r.moduleCacheCache,
otherCache: r.otherCache,
scanSema: r.scanSema,
}
r.init()
r.scanSema <- struct{}{}
}
// findPackage returns the module and directory that contains the package at
// the given import path, or returns nil, "" if no module is in scope.
func (r *ModuleResolver) findPackage(importPath string) (*ModuleJSON, string) {
// This can't find packages in the stdlib, but that's harmless for all
// the existing code paths.
for _, m := range r.ModsByModPath {
for _, m := range r.modsByModPath {
if !strings.HasPrefix(importPath, m.Path) {
continue
}
@ -118,22 +208,31 @@ func (r *ModuleResolver) findPackage(importPath string) (*ModuleJSON, string) {
continue
}
if info, ok := r.moduleCacheInfo.Load(pkgDir); ok {
if packageScanned, err := info.reachedStatus(directoryScanned); packageScanned {
if info, ok := r.cacheLoad(pkgDir); ok {
if loaded, err := info.reachedStatus(nameLoaded); loaded {
if err != nil {
// There was some error with scanning this directory.
// It does not contain a valid package.
continue
continue // No package in this dir.
}
return m, pkgDir
}
if scanned, err := info.reachedStatus(directoryScanned); scanned && err != nil {
continue // Dir is unreadable, etc.
}
// This is slightly wrong: a directory doesn't have to have an
// importable package to count as a package for package-to-module
// resolution. package main or _test files should count but
// don't.
// TODO(heschi): fix this.
if _, err := r.cachePackageName(info); err == nil {
return m, pkgDir
}
}
// Not cached. Read the filesystem.
pkgFiles, err := ioutil.ReadDir(pkgDir)
if err != nil {
continue
}
// A module only contains a package if it has buildable go
// files in that directory. If not, it could be provided by an
// outer module. See #29736.
@ -146,6 +245,40 @@ func (r *ModuleResolver) findPackage(importPath string) (*ModuleJSON, string) {
return nil, ""
}
func (r *ModuleResolver) cacheLoad(dir string) (directoryPackageInfo, bool) {
if info, ok := r.moduleCacheCache.Load(dir); ok {
return info, ok
}
return r.otherCache.Load(dir)
}
func (r *ModuleResolver) cacheStore(info directoryPackageInfo) {
if info.rootType == gopathwalk.RootModuleCache {
r.moduleCacheCache.Store(info.dir, info)
} else {
r.otherCache.Store(info.dir, info)
}
}
func (r *ModuleResolver) cacheKeys() []string {
return append(r.moduleCacheCache.Keys(), r.otherCache.Keys()...)
}
// cachePackageName caches the package name for a dir already in the cache.
func (r *ModuleResolver) cachePackageName(info directoryPackageInfo) (string, error) {
if info.rootType == gopathwalk.RootModuleCache {
return r.moduleCacheCache.CachePackageName(info)
}
return r.otherCache.CachePackageName(info)
}
func (r *ModuleResolver) cacheExports(ctx context.Context, env *ProcessEnv, info directoryPackageInfo) (string, []string, error) {
if info.rootType == gopathwalk.RootModuleCache {
return r.moduleCacheCache.CacheExports(ctx, env, info)
}
return r.otherCache.CacheExports(ctx, env, info)
}
// findModuleByDir returns the module that contains dir, or nil if no such
// module is in scope.
func (r *ModuleResolver) findModuleByDir(dir string) *ModuleJSON {
@ -159,7 +292,7 @@ func (r *ModuleResolver) findModuleByDir(dir string) *ModuleJSON {
// - in /vendor/ in -mod=vendor mode.
// - nested module? Dunno.
// Rumor has it that replace targets cannot contain other replace targets.
for _, m := range r.ModsByDir {
for _, m := range r.modsByDir {
if !strings.HasPrefix(dir, m.Dir) {
continue
}
@ -184,28 +317,45 @@ func (r *ModuleResolver) dirIsNestedModule(dir string, mod *ModuleJSON) bool {
// so it cannot be a nested module.
return false
}
mf := r.findModFile(dir)
if mf == "" {
if mod != nil && mod == r.dummyVendorMod {
// The /vendor pseudomodule is flattened and doesn't actually count.
return false
}
return filepath.Dir(mf) != mod.Dir
modDir, _ := r.modInfo(dir)
if modDir == "" {
return false
}
return modDir != mod.Dir
}
func (r *ModuleResolver) findModFile(dir string) string {
func (r *ModuleResolver) modInfo(dir string) (modDir string, modName string) {
readModName := func(modFile string) string {
modBytes, err := ioutil.ReadFile(modFile)
if err != nil {
return ""
}
return modulePath(modBytes)
}
if r.dirInModuleCache(dir) {
matches := modCacheRegexp.FindStringSubmatch(dir)
index := strings.Index(dir, matches[1]+"@"+matches[2])
return filepath.Join(dir[:index], matches[1]+"@"+matches[2], "go.mod")
modDir := filepath.Join(dir[:index], matches[1]+"@"+matches[2])
return modDir, readModName(filepath.Join(modDir, "go.mod"))
}
for {
if info, ok := r.cacheLoad(dir); ok {
return info.moduleDir, info.moduleName
}
f := filepath.Join(dir, "go.mod")
info, err := os.Stat(f)
if err == nil && !info.IsDir() {
return f
return dir, readModName(f)
}
d := filepath.Dir(dir)
if len(d) >= len(dir) {
return "" // reached top of file system, no go.mod
return "", "" // reached top of file system, no go.mod
}
dir = d
}
@ -237,47 +387,50 @@ func (r *ModuleResolver) loadPackageNames(importPaths []string, srcDir string) (
return names, nil
}
func (r *ModuleResolver) scan(_ references) ([]*pkg, error) {
func (r *ModuleResolver) scan(ctx context.Context, callback *scanCallback) error {
if err := r.init(); err != nil {
return nil, err
return err
}
// Walk GOROOT, GOPATH/pkg/mod, and the main module.
roots := []gopathwalk.Root{
{filepath.Join(r.env.GOROOT, "/src"), gopathwalk.RootGOROOT},
}
if r.Main != nil {
roots = append(roots, gopathwalk.Root{r.Main.Dir, gopathwalk.RootCurrentModule})
}
if r.moduleCacheDir == "" {
r.moduleCacheDir = filepath.Join(filepath.SplitList(r.env.GOPATH)[0], "/pkg/mod")
}
roots = append(roots, gopathwalk.Root{r.moduleCacheDir, gopathwalk.RootModuleCache})
// Walk replace targets, just in case they're not in any of the above.
for _, mod := range r.ModsByModPath {
if mod.Replace != nil {
roots = append(roots, gopathwalk.Root{mod.Dir, gopathwalk.RootOther})
processDir := func(info directoryPackageInfo) {
// Skip this directory if we were not able to get the package information successfully.
if scanned, err := info.reachedStatus(directoryScanned); !scanned || err != nil {
return
}
pkg, err := r.canonicalize(info)
if err != nil {
return
}
var result []*pkg
dupCheck := make(map[string]bool)
var mu sync.Mutex
if !callback.dirFound(pkg) {
return
}
pkg.packageName, err = r.cachePackageName(info)
if err != nil {
return
}
// Packages in the module cache are immutable. If we have
// already seen this package on a previous scan of the module
// cache, return that result.
if !callback.packageNameLoaded(pkg) {
return
}
_, exports, err := r.loadExports(ctx, pkg, false)
if err != nil {
return
}
callback.exportsLoaded(pkg, exports)
}
// Start processing everything in the cache, and listen for the new stuff
// we discover in the walk below.
stop1 := r.moduleCacheCache.ScanAndListen(ctx, processDir)
defer stop1()
stop2 := r.otherCache.ScanAndListen(ctx, processDir)
defer stop2()
// We assume cached directories are fully cached, including all their
// children, and have not changed. We can skip them.
skip := func(root gopathwalk.Root, dir string) bool {
mu.Lock()
defer mu.Unlock()
// If we have already processed this directory on this walk, skip it.
if _, dup := dupCheck[dir]; dup {
return true
}
// If we have saved this directory information, skip it.
info, ok := r.moduleCacheInfo.Load(dir)
info, ok := r.cacheLoad(dir)
if !ok {
return false
}
@ -288,122 +441,143 @@ func (r *ModuleResolver) scan(_ references) ([]*pkg, error) {
return packageScanned
}
// Add anything new to the cache, and process it if we're still listening.
add := func(root gopathwalk.Root, dir string) {
mu.Lock()
defer mu.Unlock()
if _, dup := dupCheck[dir]; dup {
r.cacheStore(r.scanDirForPackage(root, dir))
}
// r.roots and the callback are not necessarily safe to use in the
// goroutine below. Process them eagerly.
roots := filterRoots(r.roots, callback.rootFound)
// We can't cancel walks, because we need them to finish to have a usable
// cache. Instead, run them in a separate goroutine and detach.
scanDone := make(chan struct{})
go func() {
select {
case <-ctx.Done():
return
case <-r.scanSema:
}
defer func() { r.scanSema <- struct{}{} }()
// We have the lock on r.scannedRoots, and no other scans can run.
for _, root := range roots {
if ctx.Err() != nil {
return
}
info, err := r.scanDirForPackage(root, dir)
if err != nil {
return
}
if root.Type == gopathwalk.RootModuleCache {
// Save this package information in the cache and return.
// Packages from the module cache are added after Walk.
r.moduleCacheInfo.Store(dir, info)
return
}
// Skip this package if there was an error loading package info.
if info.err != nil {
return
}
// The rest of this function canonicalizes the packages using the results
// of initializing the resolver from 'go list -m'.
res, err := r.canonicalize(info.nonCanonicalImportPath, info.dir, info.needsReplace)
if err != nil {
return
}
result = append(result, res)
}
gopathwalk.WalkSkip(roots, add, skip, gopathwalk.Options{Debug: r.env.Debug, ModulesEnabled: true})
// Add the packages from the modules in the mod cache that were skipped.
for _, dir := range r.moduleCacheInfo.Keys() {
info, ok := r.moduleCacheInfo.Load(dir)
if !ok {
if r.scannedRoots[root] {
continue
}
// Skip this directory if we were not able to get the package information successfully.
if scanned, err := info.reachedStatus(directoryScanned); !scanned || err != nil {
continue
gopathwalk.WalkSkip([]gopathwalk.Root{root}, add, skip, gopathwalk.Options{Debug: r.env.Debug, ModulesEnabled: true})
r.scannedRoots[root] = true
}
res, err := r.canonicalize(info.nonCanonicalImportPath, info.dir, info.needsReplace)
if err != nil {
continue
}
result = append(result, res)
close(scanDone)
}()
select {
case <-ctx.Done():
case <-scanDone:
}
return nil
}
return result, nil
func (r *ModuleResolver) scoreImportPath(ctx context.Context, path string) int {
if _, ok := stdlib[path]; ok {
return MaxRelevance
}
mod, _ := r.findPackage(path)
return modRelevance(mod)
}
func modRelevance(mod *ModuleJSON) int {
switch {
case mod == nil: // out of scope
return MaxRelevance - 4
case mod.Indirect:
return MaxRelevance - 3
case !mod.Main:
return MaxRelevance - 2
default:
return MaxRelevance - 1 // main module ties with stdlib
}
}
// canonicalize gets the result of canonicalizing the packages using the results
// of initializing the resolver from 'go list -m'.
func (r *ModuleResolver) canonicalize(importPath, dir string, needsReplace bool) (res *pkg, err error) {
// Check if the directory is underneath a module that's in scope.
if mod := r.findModuleByDir(dir); mod != nil {
// It is. If dir is the target of a replace directive,
// our guessed import path is wrong. Use the real one.
if mod.Dir == dir {
importPath = mod.Path
} else {
dirInMod := dir[len(mod.Dir)+len("/"):]
importPath = path.Join(mod.Path, filepath.ToSlash(dirInMod))
}
} else if needsReplace {
return nil, fmt.Errorf("needed this package to be in scope: %s", dir)
func (r *ModuleResolver) canonicalize(info directoryPackageInfo) (*pkg, error) {
// Packages in GOROOT are already canonical, regardless of the std/cmd modules.
if info.rootType == gopathwalk.RootGOROOT {
return &pkg{
importPathShort: info.nonCanonicalImportPath,
dir: info.dir,
packageName: path.Base(info.nonCanonicalImportPath),
relevance: MaxRelevance,
}, nil
}
importPath := info.nonCanonicalImportPath
mod := r.findModuleByDir(info.dir)
// Check if the directory is underneath a module that's in scope.
if mod != nil {
// It is. If dir is the target of a replace directive,
// our guessed import path is wrong. Use the real one.
if mod.Dir == info.dir {
importPath = mod.Path
} else {
dirInMod := info.dir[len(mod.Dir)+len("/"):]
importPath = path.Join(mod.Path, filepath.ToSlash(dirInMod))
}
} else if !strings.HasPrefix(importPath, info.moduleName) {
// The module's name doesn't match the package's import path. It
// probably needs a replace directive we don't have.
return nil, fmt.Errorf("package in %q is not valid without a replace statement", info.dir)
}
res := &pkg{
importPathShort: importPath,
dir: info.dir,
relevance: modRelevance(mod),
}
// We may have discovered a package that has a different version
// in scope already. Canonicalize to that one if possible.
if _, canonicalDir := r.findPackage(importPath); canonicalDir != "" {
dir = canonicalDir
res.dir = canonicalDir
}
return &pkg{
importPathShort: VendorlessPath(importPath),
dir: dir,
}, nil
return res, nil
}
func (r *ModuleResolver) loadExports(ctx context.Context, expectPackage string, pkg *pkg) (map[string]bool, error) {
func (r *ModuleResolver) loadExports(ctx context.Context, pkg *pkg, includeTest bool) (string, []string, error) {
if err := r.init(); err != nil {
return nil, err
return "", nil, err
}
return loadExportsFromFiles(ctx, r.env, expectPackage, pkg.dir)
if info, ok := r.cacheLoad(pkg.dir); ok && !includeTest {
return r.cacheExports(ctx, r.env, info)
}
return loadExportsFromFiles(ctx, r.env, pkg.dir, includeTest)
}
func (r *ModuleResolver) scanDirForPackage(root gopathwalk.Root, dir string) (directoryPackageInfo, error) {
func (r *ModuleResolver) scanDirForPackage(root gopathwalk.Root, dir string) directoryPackageInfo {
subdir := ""
if dir != root.Path {
subdir = dir[len(root.Path)+len("/"):]
}
importPath := filepath.ToSlash(subdir)
if strings.HasPrefix(importPath, "vendor/") {
// Ignore vendor dirs. If -mod=vendor is on, then things
// should mostly just work, but when it's not vendor/
// is a mess. There's no easy way to tell if it's on.
// We can still find things in the mod cache and
// map them into /vendor when -mod=vendor is on.
return directoryPackageInfo{}, fmt.Errorf("vendor directory")
// Only enter vendor directories if they're explicitly requested as a root.
return directoryPackageInfo{
status: directoryScanned,
err: fmt.Errorf("unwanted vendor directory"),
}
}
switch root.Type {
case gopathwalk.RootCurrentModule:
importPath = path.Join(r.Main.Path, filepath.ToSlash(subdir))
importPath = path.Join(r.main.Path, filepath.ToSlash(subdir))
case gopathwalk.RootModuleCache:
matches := modCacheRegexp.FindStringSubmatch(subdir)
if len(matches) == 0 {
return directoryPackageInfo{
status: directoryScanned,
err: fmt.Errorf("invalid module cache path: %v", subdir),
}, nil
}
}
modPath, err := module.DecodePath(filepath.ToSlash(matches[1]))
if err != nil {
@ -413,35 +587,25 @@ func (r *ModuleResolver) scanDirForPackage(root gopathwalk.Root, dir string) (di
return directoryPackageInfo{
status: directoryScanned,
err: fmt.Errorf("decoding module cache path %q: %v", subdir, err),
}, nil
}
}
importPath = path.Join(modPath, filepath.ToSlash(matches[3]))
case gopathwalk.RootGOROOT:
importPath = subdir
}
modDir, modName := r.modInfo(dir)
result := directoryPackageInfo{
status: directoryScanned,
dir: dir,
rootType: root.Type,
nonCanonicalImportPath: importPath,
needsReplace: false,
moduleDir: modDir,
moduleName: modName,
}
if root.Type == gopathwalk.RootGOROOT {
// stdlib packages are always in scope, despite the confusing go.mod
return result, nil
return result
}
// Check that this package is not obviously impossible to import.
modFile := r.findModFile(dir)
modBytes, err := ioutil.ReadFile(modFile)
if err == nil && !strings.HasPrefix(importPath, modulePath(modBytes)) {
// The module's declared path does not match
// its expected path. It probably needs a
// replace directive we don't have.
result.needsReplace = true
}
return result, nil
return result
}
// modCacheRegexp splits a path in a module cache into module, module version, and package.
@ -490,3 +654,63 @@ func modulePath(mod []byte) string {
}
return "" // missing module path
}
var modFlagRegexp = regexp.MustCompile(`-mod[ =](\w+)`)
// vendorEnabled indicates if vendoring is enabled.
// Inspired by setDefaultBuildMod in modload/init.go
func vendorEnabled(env *ProcessEnv) (*ModuleJSON, bool, error) {
mainMod, go114, err := getMainModuleAnd114(env)
if err != nil {
return nil, false, err
}
matches := modFlagRegexp.FindStringSubmatch(env.GOFLAGS)
var modFlag string
if len(matches) != 0 {
modFlag = matches[1]
}
if modFlag != "" {
// Don't override an explicit '-mod=' argument.
return mainMod, modFlag == "vendor", nil
}
if mainMod == nil || !go114 {
return mainMod, false, nil
}
// Check 1.14's automatic vendor mode.
if fi, err := os.Stat(filepath.Join(mainMod.Dir, "vendor")); err == nil && fi.IsDir() {
if mainMod.GoVersion != "" && semver.Compare("v"+mainMod.GoVersion, "v1.14") >= 0 {
// The Go version is at least 1.14, and a vendor directory exists.
// Set -mod=vendor by default.
return mainMod, true, nil
}
}
return mainMod, false, nil
}
// getMainModuleAnd114 gets the main module's information and whether the
// go command in use is 1.14+. This is the information needed to figure out
// if vendoring should be enabled.
func getMainModuleAnd114(env *ProcessEnv) (*ModuleJSON, bool, error) {
const format = `{{.Path}}
{{.Dir}}
{{.GoMod}}
{{.GoVersion}}
{{range context.ReleaseTags}}{{if eq . "go1.14"}}{{.}}{{end}}{{end}}
`
stdout, err := env.invokeGo("list", "-m", "-f", format)
if err != nil {
return nil, false, nil
}
lines := strings.Split(stdout.String(), "\n")
if len(lines) < 5 {
return nil, false, fmt.Errorf("unexpected stdout: %q", stdout)
}
mod := &ModuleJSON{
Path: lines[0],
Dir: lines[1],
GoMod: lines[2],
GoVersion: lines[3],
Main: true,
}
return mod, lines[4] == "go1.14", nil
}

View File

@ -1,12 +1,13 @@
package imports
import (
"context"
"fmt"
"sync"
"golang.org/x/tools/internal/gopathwalk"
)
// ModuleResolver implements Resolver for modules using the go command as little
// as feasible.
//
// To find packages to import, the resolver needs to know about all of the
// the packages that could be imported. This includes packages that are
// already in modules that are in (1) the current module, (2) replace targets,
@ -30,6 +31,8 @@ type directoryPackageStatus int
const (
_ directoryPackageStatus = iota
directoryScanned
nameLoaded
exportsLoaded
)
type directoryPackageInfo struct {
@ -38,17 +41,26 @@ type directoryPackageInfo struct {
// err is non-nil when there was an error trying to reach status.
err error
// Set when status > directoryScanned.
// Set when status >= directoryScanned.
// dir is the absolute directory of this package.
dir string
// nonCanonicalImportPath is the expected import path for this package.
// This may not be an import path that can be used to import this package.
rootType gopathwalk.RootType
// nonCanonicalImportPath is the package's expected import path. It may
// not actually be importable at that path.
nonCanonicalImportPath string
// needsReplace is true if the nonCanonicalImportPath does not match the
// the modules declared path, making it impossible to import without a
// replace directive.
needsReplace bool
// Module-related information.
moduleDir string // The directory that is the module root of this dir.
moduleName string // The module name that contains this dir.
// Set when status >= nameLoaded.
packageName string // the package name, as declared in the source.
// Set when status >= exportsLoaded.
exports []string
}
// reachedStatus returns true when info has a status at least target and any error associated with
@ -63,8 +75,8 @@ func (info *directoryPackageInfo) reachedStatus(target directoryPackageStatus) (
return true, nil
}
// moduleCacheInfo is a concurrency safe map for storing information about
// the directories in the module cache.
// dirInfoCache is a concurrency safe map for storing information about
// directories that may contain packages.
//
// The information in this cache is built incrementally. Entries are initialized in scan.
// No new keys should be added in any other functions, as all directories containing
@ -73,37 +85,101 @@ func (info *directoryPackageInfo) reachedStatus(target directoryPackageStatus) (
// Other functions, including loadExports and findPackage, may update entries in this cache
// as they discover new things about the directory.
//
// We do not need to protect the data in the cache for multiple writes, because it only stores
// module cache directories, which do not change. If two competing stores take place, there will be
// one store that wins. Although this could result in a loss of information it will not be incorrect
// and may just result in recomputing the same result later.
// The information in the cache is not expected to change for the cache's
// lifetime, so there is no protection against competing writes. Users should
// take care not to hold the cache across changes to the underlying files.
//
// TODO(suzmue): consider other concurrency strategies and data structures (RWLocks, sync.Map, etc)
type moduleCacheInfo struct {
type dirInfoCache struct {
mu sync.Mutex
// modCacheDirInfo stores information about packages in
// module cache directories. Keyed by absolute directory.
modCacheDirInfo map[string]*directoryPackageInfo
// dirs stores information about packages in directories, keyed by absolute path.
dirs map[string]*directoryPackageInfo
listeners map[*int]cacheListener
}
type cacheListener func(directoryPackageInfo)
// ScanAndListen calls listener on all the items in the cache, and on anything
// newly added. The returned stop function waits for all in-flight callbacks to
// finish and blocks new ones.
func (d *dirInfoCache) ScanAndListen(ctx context.Context, listener cacheListener) func() {
ctx, cancel := context.WithCancel(ctx)
// Flushing out all the callbacks is tricky without knowing how many there
// are going to be. Setting an arbitrary limit makes it much easier.
const maxInFlight = 10
sema := make(chan struct{}, maxInFlight)
for i := 0; i < maxInFlight; i++ {
sema <- struct{}{}
}
cookie := new(int) // A unique ID we can use for the listener.
// We can't hold mu while calling the listener.
d.mu.Lock()
var keys []string
for key := range d.dirs {
keys = append(keys, key)
}
d.listeners[cookie] = func(info directoryPackageInfo) {
select {
case <-ctx.Done():
return
case <-sema:
}
listener(info)
sema <- struct{}{}
}
d.mu.Unlock()
stop := func() {
cancel()
d.mu.Lock()
delete(d.listeners, cookie)
d.mu.Unlock()
for i := 0; i < maxInFlight; i++ {
<-sema
}
}
// Process the pre-existing keys.
for _, k := range keys {
select {
case <-ctx.Done():
return stop
default:
}
if v, ok := d.Load(k); ok {
listener(v)
}
}
return stop
}
// Store stores the package info for dir.
func (d *moduleCacheInfo) Store(dir string, info directoryPackageInfo) {
func (d *dirInfoCache) Store(dir string, info directoryPackageInfo) {
d.mu.Lock()
defer d.mu.Unlock()
d.modCacheDirInfo[dir] = &directoryPackageInfo{
status: info.status,
err: info.err,
dir: info.dir,
nonCanonicalImportPath: info.nonCanonicalImportPath,
needsReplace: info.needsReplace,
_, old := d.dirs[dir]
d.dirs[dir] = &info
var listeners []cacheListener
for _, l := range d.listeners {
listeners = append(listeners, l)
}
d.mu.Unlock()
if !old {
for _, l := range listeners {
l(info)
}
}
}
// Load returns a copy of the directoryPackageInfo for absolute directory dir.
func (d *moduleCacheInfo) Load(dir string) (directoryPackageInfo, bool) {
func (d *dirInfoCache) Load(dir string) (directoryPackageInfo, bool) {
d.mu.Lock()
defer d.mu.Unlock()
info, ok := d.modCacheDirInfo[dir]
info, ok := d.dirs[dir]
if !ok {
return directoryPackageInfo{}, false
}
@ -111,11 +187,46 @@ func (d *moduleCacheInfo) Load(dir string) (directoryPackageInfo, bool) {
}
// Keys returns the keys currently present in d.
func (d *moduleCacheInfo) Keys() (keys []string) {
func (d *dirInfoCache) Keys() (keys []string) {
d.mu.Lock()
defer d.mu.Unlock()
for key := range d.modCacheDirInfo {
for key := range d.dirs {
keys = append(keys, key)
}
return keys
}
func (d *dirInfoCache) CachePackageName(info directoryPackageInfo) (string, error) {
if loaded, err := info.reachedStatus(nameLoaded); loaded {
return info.packageName, err
}
if scanned, err := info.reachedStatus(directoryScanned); !scanned || err != nil {
return "", fmt.Errorf("cannot read package name, scan error: %v", err)
}
info.packageName, info.err = packageDirToName(info.dir)
info.status = nameLoaded
d.Store(info.dir, info)
return info.packageName, info.err
}
func (d *dirInfoCache) CacheExports(ctx context.Context, env *ProcessEnv, info directoryPackageInfo) (string, []string, error) {
if reached, _ := info.reachedStatus(exportsLoaded); reached {
return info.packageName, info.exports, info.err
}
if reached, err := info.reachedStatus(nameLoaded); reached && err != nil {
return "", nil, err
}
info.packageName, info.exports, info.err = loadExportsFromFiles(ctx, env, info.dir, false)
if info.err == context.Canceled || info.err == context.DeadlineExceeded {
return info.packageName, info.exports, info.err
}
// The cache structure wants things to proceed linearly. We can skip a
// step here, but only if we succeed.
if info.status == nameLoaded || info.err == nil {
info.status = exportsLoaded
} else {
info.status = nameLoaded
}
d.Store(info.dir, info)
return info.packageName, info.exports, info.err
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,4 @@
// Package packagesinternal exposes internal-only fields from go/packages.
package packagesinternal
var GetForTest = func(p interface{}) string { return "" }

4
vendor/modules.txt vendored
View File

@ -196,7 +196,7 @@ golang.org/x/sys/windows
golang.org/x/text/transform
golang.org/x/text/unicode/norm
golang.org/x/text/width
# golang.org/x/tools v0.0.0-20200102140908-9497f49d5709 => github.com/golangci/tools v0.0.0-20190915081525-6aa350649b1c
# golang.org/x/tools v0.0.0-20200204192400-7124308813f3
golang.org/x/tools/go/analysis
golang.org/x/tools/go/analysis/passes/asmdecl
golang.org/x/tools/go/analysis/passes/assign
@ -226,6 +226,7 @@ golang.org/x/tools/go/analysis/passes/shift
golang.org/x/tools/go/analysis/passes/sortslice
golang.org/x/tools/go/analysis/passes/stdmethods
golang.org/x/tools/go/analysis/passes/structtag
golang.org/x/tools/go/analysis/passes/testinggoroutine
golang.org/x/tools/go/analysis/passes/tests
golang.org/x/tools/go/analysis/passes/unmarshal
golang.org/x/tools/go/analysis/passes/unreachable
@ -250,6 +251,7 @@ golang.org/x/tools/internal/fastwalk
golang.org/x/tools/internal/gopathwalk
golang.org/x/tools/internal/imports
golang.org/x/tools/internal/module
golang.org/x/tools/internal/packagesinternal
golang.org/x/tools/internal/semver
# gopkg.in/ini.v1 v1.51.0
gopkg.in/ini.v1