reduce 1.5x memory usage on large repos on repeated runs (#764)

Get rid of AST cache: load AST when needed. Optimize memory allocations
for go/analysis actions.

Relates: #337
This commit is contained in:
Isaev Denis 2019-10-01 14:52:00 +03:00 committed by GitHub
parent ea417ffa0b
commit df4f6766ba
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
15 changed files with 265 additions and 348 deletions

View File

@ -290,8 +290,8 @@ func (e *Executor) runAnalysis(ctx context.Context, args []string) ([]result.Iss
} }
lintCtx.Log = e.log.Child("linters context") lintCtx.Log = e.log.Child("linters context")
runner, err := lint.NewRunner(lintCtx.ASTCache, e.cfg, e.log.Child("runner"), runner, err := lint.NewRunner(e.cfg, e.log.Child("runner"),
e.goenv, e.lineCache, e.DBManager) e.goenv, e.lineCache, e.DBManager, lintCtx.Packages)
if err != nil { if err != nil {
return nil, err return nil, err
} }

View File

@ -189,7 +189,7 @@ func buildIssuesFromErrorsForTypecheckMode(errs []error, lintCtx *linter.Context
if !ok { if !ok {
return nil, err return nil, err
} }
for _, err := range libpackages.ExtractErrors(itErr.Pkg, lintCtx.ASTCache) { for _, err := range libpackages.ExtractErrors(itErr.Pkg) {
i, perr := parseError(err) i, perr := parseError(err)
if perr != nil { // failed to parse if perr != nil { // failed to parse
if uniqReportedIssues[err.Msg] { if uniqReportedIssues[err.Msg] {

View File

@ -108,59 +108,83 @@ func (r *runner) run(analyzers []*analysis.Analyzer, initialPackages []*packages
return extractDiagnostics(roots) return extractDiagnostics(roots)
} }
//nolint:gocritic type actKey struct {
func (r *runner) prepareAnalysis(pkgs []*packages.Package,
analyzers []*analysis.Analyzer) (map[*packages.Package]bool, []*action, []*action) {
// Construct the action graph.
// Each graph node (action) is one unit of analysis.
// Edges express package-to-package (vertical) dependencies,
// and analysis-to-analysis (horizontal) dependencies.
type key struct {
*analysis.Analyzer *analysis.Analyzer
*packages.Package *packages.Package
} }
actions := make(map[key]*action)
initialPkgs := map[*packages.Package]bool{} func (r *runner) markAllActions(a *analysis.Analyzer, pkg *packages.Package, markedActions map[actKey]struct{}) {
for _, pkg := range pkgs { k := actKey{a, pkg}
initialPkgs[pkg] = true if _, ok := markedActions[k]; ok {
return
} }
var mkAction func(a *analysis.Analyzer, pkg *packages.Package) *action for _, req := range a.Requires {
mkAction = func(a *analysis.Analyzer, pkg *packages.Package) *action { r.markAllActions(req, pkg, markedActions)
k := key{a, pkg} }
if len(a.FactTypes) != 0 {
for path := range pkg.Imports {
r.markAllActions(a, pkg.Imports[path], markedActions)
}
}
markedActions[k] = struct{}{}
}
func (r *runner) makeAction(a *analysis.Analyzer, pkg *packages.Package,
initialPkgs map[*packages.Package]bool, actions map[actKey]*action, actAlloc *actionAllocator) *action {
k := actKey{a, pkg}
act, ok := actions[k] act, ok := actions[k]
if !ok { if ok {
act = &action{ return act
a: a,
pkg: pkg,
log: r.log,
prefix: r.prefix,
pkgCache: r.pkgCache,
isInitialPkg: initialPkgs[pkg],
needAnalyzeSource: initialPkgs[pkg],
analysisDoneCh: make(chan struct{}),
objectFacts: make(map[objectFactKey]analysis.Fact),
packageFacts: make(map[packageFactKey]analysis.Fact),
loadMode: r.loadMode,
} }
act = actAlloc.alloc()
act.a = a
act.pkg = pkg
act.log = r.log
act.prefix = r.prefix
act.pkgCache = r.pkgCache
act.isInitialPkg = initialPkgs[pkg]
act.needAnalyzeSource = initialPkgs[pkg]
act.analysisDoneCh = make(chan struct{})
depsCount := len(a.Requires)
if len(a.FactTypes) > 0 {
depsCount += len(pkg.Imports)
}
act.deps = make([]*action, 0, depsCount)
// Add a dependency on each required analyzers. // Add a dependency on each required analyzers.
for _, req := range a.Requires { for _, req := range a.Requires {
act.deps = append(act.deps, mkAction(req, pkg)) act.deps = append(act.deps, r.makeAction(req, pkg, initialPkgs, actions, actAlloc))
} }
r.buildActionFactDeps(act, a, pkg, initialPkgs, actions, actAlloc)
actions[k] = act
return act
}
func (r *runner) buildActionFactDeps(act *action, a *analysis.Analyzer, pkg *packages.Package,
initialPkgs map[*packages.Package]bool, actions map[actKey]*action, actAlloc *actionAllocator) {
// An analysis that consumes/produces facts // An analysis that consumes/produces facts
// must run on the package's dependencies too. // must run on the package's dependencies too.
if len(a.FactTypes) > 0 { if len(a.FactTypes) == 0 {
return
}
act.objectFacts = make(map[objectFactKey]analysis.Fact)
act.packageFacts = make(map[packageFactKey]analysis.Fact)
paths := make([]string, 0, len(pkg.Imports)) paths := make([]string, 0, len(pkg.Imports))
for path := range pkg.Imports { for path := range pkg.Imports {
paths = append(paths, path) paths = append(paths, path)
} }
sort.Strings(paths) // for determinism sort.Strings(paths) // for determinism
for _, path := range paths { for _, path := range paths {
dep := mkAction(a, pkg.Imports[path]) dep := r.makeAction(a, pkg.Imports[path], initialPkgs, actions, actAlloc)
act.deps = append(act.deps, dep) act.deps = append(act.deps, dep)
} }
@ -170,16 +194,59 @@ func (r *runner) prepareAnalysis(pkgs []*packages.Package,
} }
} }
actions[k] = act type actionAllocator struct {
allocatedActions []action
nextFreeIndex int
} }
func newActionAllocator(maxCount int) *actionAllocator {
return &actionAllocator{
allocatedActions: make([]action, maxCount),
nextFreeIndex: 0,
}
}
func (actAlloc *actionAllocator) alloc() *action {
if actAlloc.nextFreeIndex == len(actAlloc.allocatedActions) {
panic(fmt.Sprintf("Made too many allocations of actions: %d allowed", len(actAlloc.allocatedActions)))
}
act := &actAlloc.allocatedActions[actAlloc.nextFreeIndex]
actAlloc.nextFreeIndex++
return act return act
} }
// Build nodes for initial packages. //nolint:gocritic
var roots []*action func (r *runner) prepareAnalysis(pkgs []*packages.Package,
analyzers []*analysis.Analyzer) (map[*packages.Package]bool, []*action, []*action) {
// Construct the action graph.
// Each graph node (action) is one unit of analysis.
// Edges express package-to-package (vertical) dependencies,
// and analysis-to-analysis (horizontal) dependencies.
// This place is memory-intensive: e.g. Istio project has 120k total actions.
// Therefore optimize it carefully.
markedActions := make(map[actKey]struct{}, len(analyzers)*len(pkgs))
for _, a := range analyzers { for _, a := range analyzers {
for _, pkg := range pkgs { for _, pkg := range pkgs {
root := mkAction(a, pkg) r.markAllActions(a, pkg, markedActions)
}
}
totalActionsCount := len(markedActions)
actions := make(map[actKey]*action, totalActionsCount)
actAlloc := newActionAllocator(totalActionsCount)
initialPkgs := make(map[*packages.Package]bool, len(pkgs))
for _, pkg := range pkgs {
initialPkgs[pkg] = true
}
// Build nodes for initial packages.
roots := make([]*action, 0, len(pkgs)*len(analyzers))
for _, a := range analyzers {
for _, pkg := range pkgs {
root := r.makeAction(a, pkg, initialPkgs, actions, actAlloc)
root.isroot = true root.isroot = true
roots = append(roots, root) roots = append(roots, root)
} }
@ -190,6 +257,8 @@ func (r *runner) prepareAnalysis(pkgs []*packages.Package,
allActions = append(allActions, act) allActions = append(allActions, act)
} }
debugf("Built %d actions", len(actions))
return initialPkgs, allActions, roots return initialPkgs, allActions, roots
} }
@ -334,9 +403,6 @@ type action struct {
a *analysis.Analyzer a *analysis.Analyzer
pkg *packages.Package pkg *packages.Package
pass *analysis.Pass pass *analysis.Pass
isroot bool
isInitialPkg bool
needAnalyzeSource bool
deps []*action deps []*action
objectFacts map[objectFactKey]analysis.Fact objectFacts map[objectFactKey]analysis.Fact
packageFacts map[packageFactKey]analysis.Fact packageFacts map[packageFactKey]analysis.Fact
@ -349,7 +415,9 @@ type action struct {
analysisDoneCh chan struct{} analysisDoneCh chan struct{}
loadCachedFactsDone bool loadCachedFactsDone bool
loadCachedFactsOk bool loadCachedFactsOk bool
loadMode LoadMode isroot bool
isInitialPkg bool
needAnalyzeSource bool
} }
type objectFactKey struct { type objectFactKey struct {

View File

@ -1,163 +0,0 @@
package astcache
import (
"go/ast"
"go/parser"
"go/token"
"path/filepath"
"golang.org/x/tools/go/packages"
"github.com/golangci/golangci-lint/pkg/fsutils"
"github.com/golangci/golangci-lint/pkg/logutils"
)
type File struct {
F *ast.File
Fset *token.FileSet
Name string
Err error
}
type Cache struct {
m map[string]*File // map from absolute file path to file data
s []*File
log logutils.Log
}
func NewCache(log logutils.Log) *Cache {
return &Cache{
m: map[string]*File{},
log: log,
}
}
func (c Cache) ParsedFilenames() []string {
var keys []string
for k := range c.m {
keys = append(keys, k)
}
return keys
}
func (c Cache) normalizeFilename(filename string) string {
absPath := func() string {
if filepath.IsAbs(filename) {
return filepath.Clean(filename)
}
absFilename, err := filepath.Abs(filename)
if err != nil {
c.log.Warnf("Can't abs-ify filename %s: %s", filename, err)
return filename
}
return absFilename
}()
ret, err := fsutils.EvalSymlinks(absPath)
if err != nil {
c.log.Warnf("Failed to eval symlinks for %s: %s", absPath, err)
return absPath
}
return ret
}
func (c Cache) Get(filename string) *File {
return c.m[c.normalizeFilename(filename)]
}
func (c Cache) GetAllValidFiles() []*File {
return c.s
}
func (c *Cache) prepareValidFiles() {
files := make([]*File, 0, len(c.m))
for _, f := range c.m {
if f.Err != nil || f.F == nil {
continue
}
files = append(files, f)
}
c.s = files
}
func LoadFromFilenames(log logutils.Log, filenames ...string) *Cache {
c := NewCache(log)
fset := token.NewFileSet()
for _, filename := range filenames {
c.parseFile(filename, fset)
}
c.prepareValidFiles()
return c
}
func LoadFromPackages(pkgs []*packages.Package, log logutils.Log) (*Cache, error) {
c := NewCache(log)
for _, pkg := range pkgs {
c.loadFromPackage(pkg)
}
c.prepareValidFiles()
return c, nil
}
func (c *Cache) extractFilenamesForAstFile(fset *token.FileSet, f *ast.File) []string {
var ret []string
// false ignores //line comments: name can be incorrect for generated files with //line directives
// mapping e.g. from .rl to .go files.
pos := fset.PositionFor(f.Pos(), false)
if pos.Filename != "" {
ret = append(ret, pos.Filename)
}
return ret
}
func (c *Cache) loadFromPackage(pkg *packages.Package) {
for _, f := range pkg.Syntax {
for _, filename := range c.extractFilenamesForAstFile(pkg.Fset, f) {
filePath := c.normalizeFilename(filename)
c.m[filePath] = &File{
F: f,
Fset: pkg.Fset,
Name: filePath,
}
}
}
// some Go files sometimes aren't present in pkg.Syntax
fset := token.NewFileSet() // can't use pkg.Fset: it will overwrite offsets by preprocessed files
for _, filePath := range pkg.GoFiles {
filePath = c.normalizeFilename(filePath)
if c.m[filePath] == nil {
c.parseFile(filePath, fset)
}
}
}
func (c *Cache) parseFile(filePath string, fset *token.FileSet) {
if fset == nil {
fset = token.NewFileSet()
}
filePath = c.normalizeFilename(filePath)
// comments needed by e.g. golint
f, err := parser.ParseFile(fset, filePath, nil, parser.ParseComments)
c.m[filePath] = &File{
F: f,
Fset: fset,
Err: err,
Name: filePath,
}
if err != nil {
c.log.Infof("Can't parse AST of %s: %s", filePath, err)
// Info level because it will be reported by typecheck linter or go/analysis.
}
}

View File

@ -3,15 +3,10 @@ package linter
import ( import (
"golang.org/x/tools/go/packages" "golang.org/x/tools/go/packages"
"github.com/golangci/golangci-lint/pkg/lint/astcache"
"github.com/golangci/golangci-lint/pkg/golinters/goanalysis/load"
"github.com/golangci/golangci-lint/internal/pkgcache" "github.com/golangci/golangci-lint/internal/pkgcache"
"github.com/golangci/golangci-lint/pkg/fsutils"
"github.com/golangci/golangci-lint/pkg/config" "github.com/golangci/golangci-lint/pkg/config"
"github.com/golangci/golangci-lint/pkg/fsutils"
"github.com/golangci/golangci-lint/pkg/golinters/goanalysis/load"
"github.com/golangci/golangci-lint/pkg/logutils" "github.com/golangci/golangci-lint/pkg/logutils"
) )
@ -30,7 +25,6 @@ type Context struct {
PkgCache *pkgcache.Cache PkgCache *pkgcache.Cache
LoadGuard *load.Guard LoadGuard *load.Guard
ASTCache *astcache.Cache
} }
func (c *Context) Settings() *config.LintersSettings { func (c *Context) Settings() *config.LintersSettings {

View File

@ -23,7 +23,6 @@ import (
"github.com/golangci/golangci-lint/pkg/config" "github.com/golangci/golangci-lint/pkg/config"
"github.com/golangci/golangci-lint/pkg/exitcodes" "github.com/golangci/golangci-lint/pkg/exitcodes"
"github.com/golangci/golangci-lint/pkg/goutil" "github.com/golangci/golangci-lint/pkg/goutil"
"github.com/golangci/golangci-lint/pkg/lint/astcache"
"github.com/golangci/golangci-lint/pkg/lint/linter" "github.com/golangci/golangci-lint/pkg/lint/linter"
"github.com/golangci/golangci-lint/pkg/logutils" "github.com/golangci/golangci-lint/pkg/logutils"
) )
@ -293,14 +292,6 @@ func (cl *ContextLoader) Load(ctx context.Context, linters []*linter.Config) (*l
return nil, exitcodes.ErrNoGoFiles return nil, exitcodes.ErrNoGoFiles
} }
astLog := cl.log.Child("astcache")
startedLoadingASTAt := time.Now()
astCache, err := astcache.LoadFromPackages(deduplicatedPkgs, astLog)
if err != nil {
return nil, err
}
cl.log.Infof("Loaded %d AST files in %s", len(astCache.ParsedFilenames()), time.Since(startedLoadingASTAt))
ret := &linter.Context{ ret := &linter.Context{
Packages: deduplicatedPkgs, Packages: deduplicatedPkgs,
@ -309,7 +300,6 @@ func (cl *ContextLoader) Load(ctx context.Context, linters []*linter.Config) (*l
OriginalPackages: pkgs, OriginalPackages: pkgs,
Cfg: cl.cfg, Cfg: cl.cfg,
ASTCache: astCache,
Log: cl.log, Log: cl.log,
FileCache: cl.fileCache, FileCache: cl.fileCache,
LineCache: cl.lineCache, LineCache: cl.lineCache,

View File

@ -13,13 +13,14 @@ import (
"github.com/golangci/golangci-lint/pkg/config" "github.com/golangci/golangci-lint/pkg/config"
"github.com/golangci/golangci-lint/pkg/goutil" "github.com/golangci/golangci-lint/pkg/goutil"
"github.com/golangci/golangci-lint/pkg/lint/astcache"
"github.com/golangci/golangci-lint/pkg/lint/linter" "github.com/golangci/golangci-lint/pkg/lint/linter"
"github.com/golangci/golangci-lint/pkg/logutils" "github.com/golangci/golangci-lint/pkg/logutils"
"github.com/golangci/golangci-lint/pkg/packages" "github.com/golangci/golangci-lint/pkg/packages"
"github.com/golangci/golangci-lint/pkg/result" "github.com/golangci/golangci-lint/pkg/result"
"github.com/golangci/golangci-lint/pkg/result/processors" "github.com/golangci/golangci-lint/pkg/result/processors"
"github.com/golangci/golangci-lint/pkg/timeutils" "github.com/golangci/golangci-lint/pkg/timeutils"
gopackages "golang.org/x/tools/go/packages"
) )
type Runner struct { type Runner struct {
@ -27,8 +28,8 @@ type Runner struct {
Log logutils.Log Log logutils.Log
} }
func NewRunner(astCache *astcache.Cache, cfg *config.Config, log logutils.Log, goenv *goutil.Env, func NewRunner(cfg *config.Config, log logutils.Log, goenv *goutil.Env,
lineCache *fsutils.LineCache, dbManager *lintersdb.Manager) (*Runner, error) { lineCache *fsutils.LineCache, dbManager *lintersdb.Manager, pkgs []*gopackages.Package) (*Runner, error) {
icfg := cfg.Issues icfg := cfg.Issues
excludePatterns := icfg.ExcludePatterns excludePatterns := icfg.ExcludePatterns
if icfg.UseDefaultExcludes { if icfg.UseDefaultExcludes {
@ -67,16 +68,23 @@ func NewRunner(astCache *astcache.Cache, cfg *config.Config, log logutils.Log, g
return &Runner{ return &Runner{
Processors: []processors.Processor{ Processors: []processors.Processor{
processors.NewCgo(goenv), processors.NewCgo(goenv),
processors.NewFilenameUnadjuster(astCache, log.Child("filename_unadjuster")), // must go after Cgo
processors.NewPathPrettifier(), // must be before diff, nolint and exclude autogenerated processor at least // Must go after Cgo.
processors.NewFilenameUnadjuster(pkgs, log.Child("filename_unadjuster")),
// Must be before diff, nolint and exclude autogenerated processor at least.
processors.NewPathPrettifier(),
skipFilesProcessor, skipFilesProcessor,
skipDirsProcessor, // must be after path prettifier skipDirsProcessor, // must be after path prettifier
processors.NewAutogeneratedExclude(astCache), processors.NewAutogeneratedExclude(),
processors.NewIdentifierMarker(), // must be before exclude because users see already marked output and configure excluding by it
// Must be before exclude because users see already marked output and configure excluding by it.
processors.NewIdentifierMarker(),
processors.NewExclude(excludeTotalPattern), processors.NewExclude(excludeTotalPattern),
processors.NewExcludeRules(excludeRules, lineCache, log.Child("exclude_rules")), processors.NewExcludeRules(excludeRules, lineCache, log.Child("exclude_rules")),
processors.NewNolint(astCache, log.Child("nolint"), dbManager), processors.NewNolint(log.Child("nolint"), dbManager),
processors.NewUniqByLine(cfg), processors.NewUniqByLine(cfg),
processors.NewDiff(icfg.Diff, icfg.DiffFromRevision, icfg.DiffPatchFilePath), processors.NewDiff(icfg.Diff, icfg.DiffFromRevision, icfg.DiffPatchFilePath),

View File

@ -65,7 +65,8 @@ func (sl StderrLog) Fatalf(format string, args ...interface{}) {
} }
func (sl StderrLog) Panicf(format string, args ...interface{}) { func (sl StderrLog) Panicf(format string, args ...interface{}) {
sl.logger.Panicf("%s%s", sl.prefix(), fmt.Sprintf(format, args...)) v := fmt.Sprintf("%s%s", sl.prefix(), fmt.Sprintf(format, args...))
panic(v)
} }
func (sl StderrLog) Errorf(format string, args ...interface{}) { func (sl StderrLog) Errorf(format string, args ...interface{}) {

View File

@ -3,13 +3,11 @@ package packages
import ( import (
"fmt" "fmt"
"github.com/golangci/golangci-lint/pkg/lint/astcache"
"golang.org/x/tools/go/packages" "golang.org/x/tools/go/packages"
) )
//nolint:gocyclo //nolint:gocyclo
func ExtractErrors(pkg *packages.Package, astCache *astcache.Cache) []packages.Error { func ExtractErrors(pkg *packages.Package) []packages.Error {
errors := extractErrorsImpl(pkg, map[*packages.Package]bool{}) errors := extractErrorsImpl(pkg, map[*packages.Package]bool{})
if len(errors) == 0 { if len(errors) == 0 {
return errors return errors
@ -28,8 +26,8 @@ func ExtractErrors(pkg *packages.Package, astCache *astcache.Cache) []packages.E
if len(pkg.GoFiles) != 0 { if len(pkg.GoFiles) != 0 {
// errors were extracted from deps and have at leat one file in package // errors were extracted from deps and have at leat one file in package
for i := range uniqErrors { for i := range uniqErrors {
errPos, parseErr := ParseErrorPosition(uniqErrors[i].Pos) _, parseErr := ParseErrorPosition(uniqErrors[i].Pos)
if parseErr != nil || astCache.Get(errPos.Filename) == nil { if parseErr != nil {
// change pos to local file to properly process it by processors (properly read line etc) // change pos to local file to properly process it by processors (properly read line etc)
uniqErrors[i].Msg = fmt.Sprintf("%s: %s", uniqErrors[i].Pos, uniqErrors[i].Msg) uniqErrors[i].Msg = fmt.Sprintf("%s: %s", uniqErrors[i].Pos, uniqErrors[i].Msg)
uniqErrors[i].Pos = fmt.Sprintf("%s:1", pkg.GoFiles[0]) uniqErrors[i].Pos = fmt.Sprintf("%s:1", pkg.GoFiles[0])

View File

@ -1,13 +1,14 @@
package processors package processors
import ( import (
"bufio"
"fmt" "fmt"
"go/ast" "os"
"go/token"
"path/filepath" "path/filepath"
"strings" "strings"
"github.com/golangci/golangci-lint/pkg/lint/astcache" "github.com/pkg/errors"
"github.com/golangci/golangci-lint/pkg/logutils" "github.com/golangci/golangci-lint/pkg/logutils"
"github.com/golangci/golangci-lint/pkg/result" "github.com/golangci/golangci-lint/pkg/result"
) )
@ -22,13 +23,11 @@ type ageFileSummaryCache map[string]*ageFileSummary
type AutogeneratedExclude struct { type AutogeneratedExclude struct {
fileSummaryCache ageFileSummaryCache fileSummaryCache ageFileSummaryCache
astCache *astcache.Cache
} }
func NewAutogeneratedExclude(astCache *astcache.Cache) *AutogeneratedExclude { func NewAutogeneratedExclude() *AutogeneratedExclude {
return &AutogeneratedExclude{ return &AutogeneratedExclude{
fileSummaryCache: ageFileSummaryCache{}, fileSummaryCache: ageFileSummaryCache{},
astCache: astCache,
} }
} }
@ -103,60 +102,42 @@ func (p *AutogeneratedExclude) getOrCreateFileSummary(i *result.Issue) (*ageFile
return nil, fmt.Errorf("no file path for issue") return nil, fmt.Errorf("no file path for issue")
} }
f := p.astCache.Get(i.FilePath()) doc, err := getDoc(i.FilePath())
if f == nil || f.Err != nil { if err != nil {
return nil, fmt.Errorf("can't parse file %s: %v", i.FilePath(), f) return nil, errors.Wrapf(err, "failed to get doc of file %s", i.FilePath())
} }
autogenDebugf("file %q: astcache file is %+v", i.FilePath(), *f)
doc := getDoc(f.F, f.Fset, i.FilePath())
fs.isGenerated = isGeneratedFileByComment(doc) fs.isGenerated = isGeneratedFileByComment(doc)
autogenDebugf("file %q is generated: %t", i.FilePath(), fs.isGenerated) autogenDebugf("file %q is generated: %t", i.FilePath(), fs.isGenerated)
return fs, nil return fs, nil
} }
func getDoc(f *ast.File, fset *token.FileSet, filePath string) string { func getDoc(filePath string) (string, error) {
// don't use just f.Doc: e.g. mockgen leaves extra line between comment and package name file, err := os.Open(filePath)
if err != nil {
return "", errors.Wrap(err, "failed to open file")
}
defer file.Close()
var importPos token.Pos scanner := bufio.NewScanner(file)
if len(f.Imports) != 0 { var docLines []string
importPos = f.Imports[0].Pos() for scanner.Scan() {
autogenDebugf("file %q: search comments until first import pos %d (%s)", line := strings.TrimSpace(scanner.Text())
filePath, importPos, fset.Position(importPos)) if strings.HasPrefix(line, "//") { //nolint:gocritic
text := strings.TrimSpace(strings.TrimPrefix(line, "//"))
docLines = append(docLines, text)
} else if line == "" {
// go to next line
} else { } else {
importPos = f.End() break
autogenDebugf("file %q: search comments until EOF pos %d (%s)",
filePath, importPos, fset.Position(importPos))
}
var neededComments []string
for _, g := range f.Comments {
pos := g.Pos()
filePos := fset.Position(pos)
text := g.Text()
// files using cgo have implicitly added comment "Created by cgo - DO NOT EDIT" for go <= 1.10
// and "Code generated by cmd/cgo" for go >= 1.11
isCgoGenerated := strings.Contains(text, "Created by cgo") || strings.Contains(text, "Code generated by cmd/cgo")
isAllowed := pos < importPos && filePos.Column == 1 && !isCgoGenerated
if isAllowed {
autogenDebugf("file %q: pos=%d, filePos=%s: comment %q: it's allowed", filePath, pos, filePos, text)
neededComments = append(neededComments, text)
} else {
autogenDebugf("file %q: pos=%d, filePos=%s: comment %q: it's NOT allowed", filePath, pos, filePos, text)
} }
} }
autogenDebugf("file %q: got %d allowed comments", filePath, len(neededComments)) if err := scanner.Err(); err != nil {
return "", errors.Wrap(err, "failed to scan file")
if len(neededComments) == 0 {
return ""
} }
return strings.Join(neededComments, "\n") return strings.Join(docLines, "\n"), nil
} }
func (p AutogeneratedExclude) Finish() {} func (p AutogeneratedExclude) Finish() {}

View File

@ -1,6 +1,7 @@
package processors package processors
import ( import (
"path/filepath"
"strings" "strings"
"testing" "testing"
@ -66,3 +67,12 @@ func TestIsAutogeneratedDetection(t *testing.T) {
assert.False(t, isGenerated) assert.False(t, isGenerated)
} }
} }
func TestGetDoc(t *testing.T) {
const expectedDoc = `first line
second line
third line`
doc, err := getDoc(filepath.Join("testdata", "autogen_exclude.go"))
assert.NoError(t, err)
assert.Equal(t, expectedDoc, doc)
}

View File

@ -1,14 +1,16 @@
package processors package processors
import ( import (
"go/parser"
"go/token" "go/token"
"path/filepath" "path/filepath"
"strings" "strings"
"sync"
"time"
"golang.org/x/tools/go/packages"
"github.com/golangci/golangci-lint/pkg/logutils" "github.com/golangci/golangci-lint/pkg/logutils"
"github.com/golangci/golangci-lint/pkg/lint/astcache"
"github.com/golangci/golangci-lint/pkg/result" "github.com/golangci/golangci-lint/pkg/result"
) )
@ -25,32 +27,61 @@ type FilenameUnadjuster struct {
var _ Processor = &FilenameUnadjuster{} var _ Processor = &FilenameUnadjuster{}
func NewFilenameUnadjuster(cache *astcache.Cache, log logutils.Log) *FilenameUnadjuster { func processUnadjusterPkg(m map[string]posMapper, pkg *packages.Package, log logutils.Log) {
m := map[string]posMapper{} fset := token.NewFileSet() // it's more memory efficient to not store all in one fset
for _, f := range cache.GetAllValidFiles() {
adjustedFilename := f.Fset.PositionFor(f.F.Pos(), true).Filename for _, filename := range pkg.CompiledGoFiles {
if adjustedFilename == "" { // It's important to call func here to run GC
continue processUnadjusterFile(filename, m, log, fset)
} }
unadjustedFilename := f.Fset.PositionFor(f.F.Pos(), false).Filename
if unadjustedFilename == "" || unadjustedFilename == adjustedFilename {
continue
}
if !strings.HasSuffix(unadjustedFilename, ".go") {
continue // file.go -> /caches/cgo-xxx
} }
f := f func processUnadjusterFile(filename string, m map[string]posMapper, log logutils.Log, fset *token.FileSet) {
syntax, err := parser.ParseFile(fset, filename, nil, parser.ParseComments)
if err != nil {
// Error will be reported by typecheck
return
}
adjustedFilename := fset.PositionFor(syntax.Pos(), true).Filename
if adjustedFilename == "" {
return
}
unadjustedFilename := fset.PositionFor(syntax.Pos(), false).Filename
if unadjustedFilename == "" || unadjustedFilename == adjustedFilename {
return
}
if !strings.HasSuffix(unadjustedFilename, ".go") {
return // file.go -> /caches/cgo-xxx
}
m[adjustedFilename] = func(adjustedPos token.Position) token.Position { m[adjustedFilename] = func(adjustedPos token.Position) token.Position {
tokenFile := f.Fset.File(f.F.Pos()) tokenFile := fset.File(syntax.Pos())
if tokenFile == nil { if tokenFile == nil {
log.Warnf("Failed to get token file for %s", adjustedFilename) log.Warnf("Failed to get token file for %s", adjustedFilename)
return adjustedPos return adjustedPos
} }
return f.Fset.PositionFor(tokenFile.Pos(adjustedPos.Offset), false) return fset.PositionFor(tokenFile.Pos(adjustedPos.Offset), false)
} }
} }
func NewFilenameUnadjuster(pkgs []*packages.Package, log logutils.Log) *FilenameUnadjuster {
m := map[string]posMapper{}
startedAt := time.Now()
var wg sync.WaitGroup
wg.Add(len(pkgs))
for _, pkg := range pkgs {
go func(pkg *packages.Package) {
// It's important to call func here to run GC
processUnadjusterPkg(m, pkg, log)
wg.Done()
}(pkg)
}
wg.Wait()
log.Infof("Pre-built %d adjustments in %s", len(m), time.Since(startedAt))
return &FilenameUnadjuster{ return &FilenameUnadjuster{
m: m, m: m,
log: log, log: log,

View File

@ -3,11 +3,11 @@ package processors
import ( import (
"fmt" "fmt"
"go/ast" "go/ast"
"go/parser"
"go/token" "go/token"
"sort" "sort"
"strings" "strings"
"github.com/golangci/golangci-lint/pkg/lint/astcache"
"github.com/golangci/golangci-lint/pkg/lint/lintersdb" "github.com/golangci/golangci-lint/pkg/lint/lintersdb"
"github.com/golangci/golangci-lint/pkg/logutils" "github.com/golangci/golangci-lint/pkg/logutils"
"github.com/golangci/golangci-lint/pkg/result" "github.com/golangci/golangci-lint/pkg/result"
@ -47,17 +47,15 @@ type filesCache map[string]*fileData
type Nolint struct { type Nolint struct {
cache filesCache cache filesCache
astCache *astcache.Cache
dbManager *lintersdb.Manager dbManager *lintersdb.Manager
log logutils.Log log logutils.Log
unknownLintersSet map[string]bool unknownLintersSet map[string]bool
} }
func NewNolint(astCache *astcache.Cache, log logutils.Log, dbManager *lintersdb.Manager) *Nolint { func NewNolint(log logutils.Log, dbManager *lintersdb.Manager) *Nolint {
return &Nolint{ return &Nolint{
cache: filesCache{}, cache: filesCache{},
astCache: astCache,
dbManager: dbManager, dbManager: dbManager,
log: log, log: log,
unknownLintersSet: map[string]bool{}, unknownLintersSet: map[string]bool{},
@ -87,17 +85,18 @@ func (p *Nolint) getOrCreateFileData(i *result.Issue) (*fileData, error) {
return nil, fmt.Errorf("no file path for issue") return nil, fmt.Errorf("no file path for issue")
} }
file := p.astCache.Get(i.FilePath()) // TODO: migrate this parsing to go/analysis facts
if file == nil { // or cache them somehow per file.
return nil, fmt.Errorf("no file %s in ast cache %v",
i.FilePath(), p.astCache.ParsedFilenames()) // Don't use cached AST because they consume a lot of memory on large projects.
} fset := token.NewFileSet()
if file.Err != nil { f, err := parser.ParseFile(fset, i.FilePath(), nil, parser.ParseComments)
if err != nil {
// Don't report error because it's already must be reporter by typecheck or go/analysis. // Don't report error because it's already must be reporter by typecheck or go/analysis.
return fd, nil return fd, nil
} }
fd.ignoredRanges = p.buildIgnoredRangesForFile(file.F, file.Fset, i.FilePath()) fd.ignoredRanges = p.buildIgnoredRangesForFile(f, fset, i.FilePath())
nolintDebugf("file %s: built nolint ranges are %+v", i.FilePath(), fd.ignoredRanges) nolintDebugf("file %s: built nolint ranges are %+v", i.FilePath(), fd.ignoredRanges)
return fd, nil return fd, nil
} }

View File

@ -9,7 +9,6 @@ import (
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/mock" "github.com/stretchr/testify/mock"
"github.com/golangci/golangci-lint/pkg/lint/astcache"
"github.com/golangci/golangci-lint/pkg/lint/lintersdb" "github.com/golangci/golangci-lint/pkg/lint/lintersdb"
"github.com/golangci/golangci-lint/pkg/logutils" "github.com/golangci/golangci-lint/pkg/logutils"
"github.com/golangci/golangci-lint/pkg/result" "github.com/golangci/golangci-lint/pkg/result"
@ -32,13 +31,7 @@ func newNolint2FileIssue(line int) result.Issue {
} }
func newTestNolintProcessor(log logutils.Log) *Nolint { func newTestNolintProcessor(log logutils.Log) *Nolint {
cache := astcache.LoadFromFilenames(log, return NewNolint(log, lintersdb.NewManager(nil))
filepath.Join("testdata", "nolint.go"),
filepath.Join("testdata", "nolint2.go"),
filepath.Join("testdata", "nolint_bad_names.go"),
filepath.Join("testdata", "nolint_whole_file.go"),
)
return NewNolint(cache, log, lintersdb.NewManager(nil))
} }
func getMockLog() *logutils.MockLog { func getMockLog() *logutils.MockLog {

View File

@ -0,0 +1,7 @@
// first line
//second line
// third line
package testdata // no this text
// and no this text too