reduce 1.5x memory usage on large repos on repeated runs (#764)

Get rid of AST cache: load AST when needed. Optimize memory allocations
for go/analysis actions.

Relates: #337
This commit is contained in:
Isaev Denis 2019-10-01 14:52:00 +03:00 committed by GitHub
parent ea417ffa0b
commit df4f6766ba
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
15 changed files with 265 additions and 348 deletions

View File

@ -290,8 +290,8 @@ func (e *Executor) runAnalysis(ctx context.Context, args []string) ([]result.Iss
}
lintCtx.Log = e.log.Child("linters context")
runner, err := lint.NewRunner(lintCtx.ASTCache, e.cfg, e.log.Child("runner"),
e.goenv, e.lineCache, e.DBManager)
runner, err := lint.NewRunner(e.cfg, e.log.Child("runner"),
e.goenv, e.lineCache, e.DBManager, lintCtx.Packages)
if err != nil {
return nil, err
}

View File

@ -189,7 +189,7 @@ func buildIssuesFromErrorsForTypecheckMode(errs []error, lintCtx *linter.Context
if !ok {
return nil, err
}
for _, err := range libpackages.ExtractErrors(itErr.Pkg, lintCtx.ASTCache) {
for _, err := range libpackages.ExtractErrors(itErr.Pkg) {
i, perr := parseError(err)
if perr != nil { // failed to parse
if uniqReportedIssues[err.Msg] {

View File

@ -108,6 +108,113 @@ func (r *runner) run(analyzers []*analysis.Analyzer, initialPackages []*packages
return extractDiagnostics(roots)
}
type actKey struct {
*analysis.Analyzer
*packages.Package
}
func (r *runner) markAllActions(a *analysis.Analyzer, pkg *packages.Package, markedActions map[actKey]struct{}) {
k := actKey{a, pkg}
if _, ok := markedActions[k]; ok {
return
}
for _, req := range a.Requires {
r.markAllActions(req, pkg, markedActions)
}
if len(a.FactTypes) != 0 {
for path := range pkg.Imports {
r.markAllActions(a, pkg.Imports[path], markedActions)
}
}
markedActions[k] = struct{}{}
}
func (r *runner) makeAction(a *analysis.Analyzer, pkg *packages.Package,
initialPkgs map[*packages.Package]bool, actions map[actKey]*action, actAlloc *actionAllocator) *action {
k := actKey{a, pkg}
act, ok := actions[k]
if ok {
return act
}
act = actAlloc.alloc()
act.a = a
act.pkg = pkg
act.log = r.log
act.prefix = r.prefix
act.pkgCache = r.pkgCache
act.isInitialPkg = initialPkgs[pkg]
act.needAnalyzeSource = initialPkgs[pkg]
act.analysisDoneCh = make(chan struct{})
depsCount := len(a.Requires)
if len(a.FactTypes) > 0 {
depsCount += len(pkg.Imports)
}
act.deps = make([]*action, 0, depsCount)
// Add a dependency on each required analyzers.
for _, req := range a.Requires {
act.deps = append(act.deps, r.makeAction(req, pkg, initialPkgs, actions, actAlloc))
}
r.buildActionFactDeps(act, a, pkg, initialPkgs, actions, actAlloc)
actions[k] = act
return act
}
func (r *runner) buildActionFactDeps(act *action, a *analysis.Analyzer, pkg *packages.Package,
initialPkgs map[*packages.Package]bool, actions map[actKey]*action, actAlloc *actionAllocator) {
// An analysis that consumes/produces facts
// must run on the package's dependencies too.
if len(a.FactTypes) == 0 {
return
}
act.objectFacts = make(map[objectFactKey]analysis.Fact)
act.packageFacts = make(map[packageFactKey]analysis.Fact)
paths := make([]string, 0, len(pkg.Imports))
for path := range pkg.Imports {
paths = append(paths, path)
}
sort.Strings(paths) // for determinism
for _, path := range paths {
dep := r.makeAction(a, pkg.Imports[path], initialPkgs, actions, actAlloc)
act.deps = append(act.deps, dep)
}
// Need to register fact types for pkgcache proper gob encoding.
for _, f := range a.FactTypes {
gob.Register(f)
}
}
type actionAllocator struct {
allocatedActions []action
nextFreeIndex int
}
func newActionAllocator(maxCount int) *actionAllocator {
return &actionAllocator{
allocatedActions: make([]action, maxCount),
nextFreeIndex: 0,
}
}
func (actAlloc *actionAllocator) alloc() *action {
if actAlloc.nextFreeIndex == len(actAlloc.allocatedActions) {
panic(fmt.Sprintf("Made too many allocations of actions: %d allowed", len(actAlloc.allocatedActions)))
}
act := &actAlloc.allocatedActions[actAlloc.nextFreeIndex]
actAlloc.nextFreeIndex++
return act
}
//nolint:gocritic
func (r *runner) prepareAnalysis(pkgs []*packages.Package,
analyzers []*analysis.Analyzer) (map[*packages.Package]bool, []*action, []*action) {
@ -116,70 +223,30 @@ func (r *runner) prepareAnalysis(pkgs []*packages.Package,
// Each graph node (action) is one unit of analysis.
// Edges express package-to-package (vertical) dependencies,
// and analysis-to-analysis (horizontal) dependencies.
type key struct {
*analysis.Analyzer
*packages.Package
}
actions := make(map[key]*action)
initialPkgs := map[*packages.Package]bool{}
// This place is memory-intensive: e.g. Istio project has 120k total actions.
// Therefore optimize it carefully.
markedActions := make(map[actKey]struct{}, len(analyzers)*len(pkgs))
for _, a := range analyzers {
for _, pkg := range pkgs {
r.markAllActions(a, pkg, markedActions)
}
}
totalActionsCount := len(markedActions)
actions := make(map[actKey]*action, totalActionsCount)
actAlloc := newActionAllocator(totalActionsCount)
initialPkgs := make(map[*packages.Package]bool, len(pkgs))
for _, pkg := range pkgs {
initialPkgs[pkg] = true
}
var mkAction func(a *analysis.Analyzer, pkg *packages.Package) *action
mkAction = func(a *analysis.Analyzer, pkg *packages.Package) *action {
k := key{a, pkg}
act, ok := actions[k]
if !ok {
act = &action{
a: a,
pkg: pkg,
log: r.log,
prefix: r.prefix,
pkgCache: r.pkgCache,
isInitialPkg: initialPkgs[pkg],
needAnalyzeSource: initialPkgs[pkg],
analysisDoneCh: make(chan struct{}),
objectFacts: make(map[objectFactKey]analysis.Fact),
packageFacts: make(map[packageFactKey]analysis.Fact),
loadMode: r.loadMode,
}
// Add a dependency on each required analyzers.
for _, req := range a.Requires {
act.deps = append(act.deps, mkAction(req, pkg))
}
// An analysis that consumes/produces facts
// must run on the package's dependencies too.
if len(a.FactTypes) > 0 {
paths := make([]string, 0, len(pkg.Imports))
for path := range pkg.Imports {
paths = append(paths, path)
}
sort.Strings(paths) // for determinism
for _, path := range paths {
dep := mkAction(a, pkg.Imports[path])
act.deps = append(act.deps, dep)
}
// Need to register fact types for pkgcache proper gob encoding.
for _, f := range a.FactTypes {
gob.Register(f)
}
}
actions[k] = act
}
return act
}
// Build nodes for initial packages.
var roots []*action
roots := make([]*action, 0, len(pkgs)*len(analyzers))
for _, a := range analyzers {
for _, pkg := range pkgs {
root := mkAction(a, pkg)
root := r.makeAction(a, pkg, initialPkgs, actions, actAlloc)
root.isroot = true
roots = append(roots, root)
}
@ -190,6 +257,8 @@ func (r *runner) prepareAnalysis(pkgs []*packages.Package,
allActions = append(allActions, act)
}
debugf("Built %d actions", len(actions))
return initialPkgs, allActions, roots
}
@ -334,9 +403,6 @@ type action struct {
a *analysis.Analyzer
pkg *packages.Package
pass *analysis.Pass
isroot bool
isInitialPkg bool
needAnalyzeSource bool
deps []*action
objectFacts map[objectFactKey]analysis.Fact
packageFacts map[packageFactKey]analysis.Fact
@ -349,7 +415,9 @@ type action struct {
analysisDoneCh chan struct{}
loadCachedFactsDone bool
loadCachedFactsOk bool
loadMode LoadMode
isroot bool
isInitialPkg bool
needAnalyzeSource bool
}
type objectFactKey struct {

View File

@ -1,163 +0,0 @@
package astcache
import (
"go/ast"
"go/parser"
"go/token"
"path/filepath"
"golang.org/x/tools/go/packages"
"github.com/golangci/golangci-lint/pkg/fsutils"
"github.com/golangci/golangci-lint/pkg/logutils"
)
type File struct {
F *ast.File
Fset *token.FileSet
Name string
Err error
}
type Cache struct {
m map[string]*File // map from absolute file path to file data
s []*File
log logutils.Log
}
func NewCache(log logutils.Log) *Cache {
return &Cache{
m: map[string]*File{},
log: log,
}
}
func (c Cache) ParsedFilenames() []string {
var keys []string
for k := range c.m {
keys = append(keys, k)
}
return keys
}
func (c Cache) normalizeFilename(filename string) string {
absPath := func() string {
if filepath.IsAbs(filename) {
return filepath.Clean(filename)
}
absFilename, err := filepath.Abs(filename)
if err != nil {
c.log.Warnf("Can't abs-ify filename %s: %s", filename, err)
return filename
}
return absFilename
}()
ret, err := fsutils.EvalSymlinks(absPath)
if err != nil {
c.log.Warnf("Failed to eval symlinks for %s: %s", absPath, err)
return absPath
}
return ret
}
func (c Cache) Get(filename string) *File {
return c.m[c.normalizeFilename(filename)]
}
func (c Cache) GetAllValidFiles() []*File {
return c.s
}
func (c *Cache) prepareValidFiles() {
files := make([]*File, 0, len(c.m))
for _, f := range c.m {
if f.Err != nil || f.F == nil {
continue
}
files = append(files, f)
}
c.s = files
}
func LoadFromFilenames(log logutils.Log, filenames ...string) *Cache {
c := NewCache(log)
fset := token.NewFileSet()
for _, filename := range filenames {
c.parseFile(filename, fset)
}
c.prepareValidFiles()
return c
}
func LoadFromPackages(pkgs []*packages.Package, log logutils.Log) (*Cache, error) {
c := NewCache(log)
for _, pkg := range pkgs {
c.loadFromPackage(pkg)
}
c.prepareValidFiles()
return c, nil
}
func (c *Cache) extractFilenamesForAstFile(fset *token.FileSet, f *ast.File) []string {
var ret []string
// false ignores //line comments: name can be incorrect for generated files with //line directives
// mapping e.g. from .rl to .go files.
pos := fset.PositionFor(f.Pos(), false)
if pos.Filename != "" {
ret = append(ret, pos.Filename)
}
return ret
}
func (c *Cache) loadFromPackage(pkg *packages.Package) {
for _, f := range pkg.Syntax {
for _, filename := range c.extractFilenamesForAstFile(pkg.Fset, f) {
filePath := c.normalizeFilename(filename)
c.m[filePath] = &File{
F: f,
Fset: pkg.Fset,
Name: filePath,
}
}
}
// some Go files sometimes aren't present in pkg.Syntax
fset := token.NewFileSet() // can't use pkg.Fset: it will overwrite offsets by preprocessed files
for _, filePath := range pkg.GoFiles {
filePath = c.normalizeFilename(filePath)
if c.m[filePath] == nil {
c.parseFile(filePath, fset)
}
}
}
func (c *Cache) parseFile(filePath string, fset *token.FileSet) {
if fset == nil {
fset = token.NewFileSet()
}
filePath = c.normalizeFilename(filePath)
// comments needed by e.g. golint
f, err := parser.ParseFile(fset, filePath, nil, parser.ParseComments)
c.m[filePath] = &File{
F: f,
Fset: fset,
Err: err,
Name: filePath,
}
if err != nil {
c.log.Infof("Can't parse AST of %s: %s", filePath, err)
// Info level because it will be reported by typecheck linter or go/analysis.
}
}

View File

@ -3,15 +3,10 @@ package linter
import (
"golang.org/x/tools/go/packages"
"github.com/golangci/golangci-lint/pkg/lint/astcache"
"github.com/golangci/golangci-lint/pkg/golinters/goanalysis/load"
"github.com/golangci/golangci-lint/internal/pkgcache"
"github.com/golangci/golangci-lint/pkg/fsutils"
"github.com/golangci/golangci-lint/pkg/config"
"github.com/golangci/golangci-lint/pkg/fsutils"
"github.com/golangci/golangci-lint/pkg/golinters/goanalysis/load"
"github.com/golangci/golangci-lint/pkg/logutils"
)
@ -30,7 +25,6 @@ type Context struct {
PkgCache *pkgcache.Cache
LoadGuard *load.Guard
ASTCache *astcache.Cache
}
func (c *Context) Settings() *config.LintersSettings {

View File

@ -23,7 +23,6 @@ import (
"github.com/golangci/golangci-lint/pkg/config"
"github.com/golangci/golangci-lint/pkg/exitcodes"
"github.com/golangci/golangci-lint/pkg/goutil"
"github.com/golangci/golangci-lint/pkg/lint/astcache"
"github.com/golangci/golangci-lint/pkg/lint/linter"
"github.com/golangci/golangci-lint/pkg/logutils"
)
@ -293,14 +292,6 @@ func (cl *ContextLoader) Load(ctx context.Context, linters []*linter.Config) (*l
return nil, exitcodes.ErrNoGoFiles
}
astLog := cl.log.Child("astcache")
startedLoadingASTAt := time.Now()
astCache, err := astcache.LoadFromPackages(deduplicatedPkgs, astLog)
if err != nil {
return nil, err
}
cl.log.Infof("Loaded %d AST files in %s", len(astCache.ParsedFilenames()), time.Since(startedLoadingASTAt))
ret := &linter.Context{
Packages: deduplicatedPkgs,
@ -309,7 +300,6 @@ func (cl *ContextLoader) Load(ctx context.Context, linters []*linter.Config) (*l
OriginalPackages: pkgs,
Cfg: cl.cfg,
ASTCache: astCache,
Log: cl.log,
FileCache: cl.fileCache,
LineCache: cl.lineCache,

View File

@ -13,13 +13,14 @@ import (
"github.com/golangci/golangci-lint/pkg/config"
"github.com/golangci/golangci-lint/pkg/goutil"
"github.com/golangci/golangci-lint/pkg/lint/astcache"
"github.com/golangci/golangci-lint/pkg/lint/linter"
"github.com/golangci/golangci-lint/pkg/logutils"
"github.com/golangci/golangci-lint/pkg/packages"
"github.com/golangci/golangci-lint/pkg/result"
"github.com/golangci/golangci-lint/pkg/result/processors"
"github.com/golangci/golangci-lint/pkg/timeutils"
gopackages "golang.org/x/tools/go/packages"
)
type Runner struct {
@ -27,8 +28,8 @@ type Runner struct {
Log logutils.Log
}
func NewRunner(astCache *astcache.Cache, cfg *config.Config, log logutils.Log, goenv *goutil.Env,
lineCache *fsutils.LineCache, dbManager *lintersdb.Manager) (*Runner, error) {
func NewRunner(cfg *config.Config, log logutils.Log, goenv *goutil.Env,
lineCache *fsutils.LineCache, dbManager *lintersdb.Manager, pkgs []*gopackages.Package) (*Runner, error) {
icfg := cfg.Issues
excludePatterns := icfg.ExcludePatterns
if icfg.UseDefaultExcludes {
@ -67,16 +68,23 @@ func NewRunner(astCache *astcache.Cache, cfg *config.Config, log logutils.Log, g
return &Runner{
Processors: []processors.Processor{
processors.NewCgo(goenv),
processors.NewFilenameUnadjuster(astCache, log.Child("filename_unadjuster")), // must go after Cgo
processors.NewPathPrettifier(), // must be before diff, nolint and exclude autogenerated processor at least
// Must go after Cgo.
processors.NewFilenameUnadjuster(pkgs, log.Child("filename_unadjuster")),
// Must be before diff, nolint and exclude autogenerated processor at least.
processors.NewPathPrettifier(),
skipFilesProcessor,
skipDirsProcessor, // must be after path prettifier
processors.NewAutogeneratedExclude(astCache),
processors.NewIdentifierMarker(), // must be before exclude because users see already marked output and configure excluding by it
processors.NewAutogeneratedExclude(),
// Must be before exclude because users see already marked output and configure excluding by it.
processors.NewIdentifierMarker(),
processors.NewExclude(excludeTotalPattern),
processors.NewExcludeRules(excludeRules, lineCache, log.Child("exclude_rules")),
processors.NewNolint(astCache, log.Child("nolint"), dbManager),
processors.NewNolint(log.Child("nolint"), dbManager),
processors.NewUniqByLine(cfg),
processors.NewDiff(icfg.Diff, icfg.DiffFromRevision, icfg.DiffPatchFilePath),

View File

@ -65,7 +65,8 @@ func (sl StderrLog) Fatalf(format string, args ...interface{}) {
}
func (sl StderrLog) Panicf(format string, args ...interface{}) {
sl.logger.Panicf("%s%s", sl.prefix(), fmt.Sprintf(format, args...))
v := fmt.Sprintf("%s%s", sl.prefix(), fmt.Sprintf(format, args...))
panic(v)
}
func (sl StderrLog) Errorf(format string, args ...interface{}) {

View File

@ -3,13 +3,11 @@ package packages
import (
"fmt"
"github.com/golangci/golangci-lint/pkg/lint/astcache"
"golang.org/x/tools/go/packages"
)
//nolint:gocyclo
func ExtractErrors(pkg *packages.Package, astCache *astcache.Cache) []packages.Error {
func ExtractErrors(pkg *packages.Package) []packages.Error {
errors := extractErrorsImpl(pkg, map[*packages.Package]bool{})
if len(errors) == 0 {
return errors
@ -28,8 +26,8 @@ func ExtractErrors(pkg *packages.Package, astCache *astcache.Cache) []packages.E
if len(pkg.GoFiles) != 0 {
// errors were extracted from deps and have at leat one file in package
for i := range uniqErrors {
errPos, parseErr := ParseErrorPosition(uniqErrors[i].Pos)
if parseErr != nil || astCache.Get(errPos.Filename) == nil {
_, parseErr := ParseErrorPosition(uniqErrors[i].Pos)
if parseErr != nil {
// change pos to local file to properly process it by processors (properly read line etc)
uniqErrors[i].Msg = fmt.Sprintf("%s: %s", uniqErrors[i].Pos, uniqErrors[i].Msg)
uniqErrors[i].Pos = fmt.Sprintf("%s:1", pkg.GoFiles[0])

View File

@ -1,13 +1,14 @@
package processors
import (
"bufio"
"fmt"
"go/ast"
"go/token"
"os"
"path/filepath"
"strings"
"github.com/golangci/golangci-lint/pkg/lint/astcache"
"github.com/pkg/errors"
"github.com/golangci/golangci-lint/pkg/logutils"
"github.com/golangci/golangci-lint/pkg/result"
)
@ -22,13 +23,11 @@ type ageFileSummaryCache map[string]*ageFileSummary
type AutogeneratedExclude struct {
fileSummaryCache ageFileSummaryCache
astCache *astcache.Cache
}
func NewAutogeneratedExclude(astCache *astcache.Cache) *AutogeneratedExclude {
func NewAutogeneratedExclude() *AutogeneratedExclude {
return &AutogeneratedExclude{
fileSummaryCache: ageFileSummaryCache{},
astCache: astCache,
}
}
@ -103,60 +102,42 @@ func (p *AutogeneratedExclude) getOrCreateFileSummary(i *result.Issue) (*ageFile
return nil, fmt.Errorf("no file path for issue")
}
f := p.astCache.Get(i.FilePath())
if f == nil || f.Err != nil {
return nil, fmt.Errorf("can't parse file %s: %v", i.FilePath(), f)
doc, err := getDoc(i.FilePath())
if err != nil {
return nil, errors.Wrapf(err, "failed to get doc of file %s", i.FilePath())
}
autogenDebugf("file %q: astcache file is %+v", i.FilePath(), *f)
doc := getDoc(f.F, f.Fset, i.FilePath())
fs.isGenerated = isGeneratedFileByComment(doc)
autogenDebugf("file %q is generated: %t", i.FilePath(), fs.isGenerated)
return fs, nil
}
func getDoc(f *ast.File, fset *token.FileSet, filePath string) string {
// don't use just f.Doc: e.g. mockgen leaves extra line between comment and package name
var importPos token.Pos
if len(f.Imports) != 0 {
importPos = f.Imports[0].Pos()
autogenDebugf("file %q: search comments until first import pos %d (%s)",
filePath, importPos, fset.Position(importPos))
} else {
importPos = f.End()
autogenDebugf("file %q: search comments until EOF pos %d (%s)",
filePath, importPos, fset.Position(importPos))
func getDoc(filePath string) (string, error) {
file, err := os.Open(filePath)
if err != nil {
return "", errors.Wrap(err, "failed to open file")
}
defer file.Close()
var neededComments []string
for _, g := range f.Comments {
pos := g.Pos()
filePos := fset.Position(pos)
text := g.Text()
// files using cgo have implicitly added comment "Created by cgo - DO NOT EDIT" for go <= 1.10
// and "Code generated by cmd/cgo" for go >= 1.11
isCgoGenerated := strings.Contains(text, "Created by cgo") || strings.Contains(text, "Code generated by cmd/cgo")
isAllowed := pos < importPos && filePos.Column == 1 && !isCgoGenerated
if isAllowed {
autogenDebugf("file %q: pos=%d, filePos=%s: comment %q: it's allowed", filePath, pos, filePos, text)
neededComments = append(neededComments, text)
scanner := bufio.NewScanner(file)
var docLines []string
for scanner.Scan() {
line := strings.TrimSpace(scanner.Text())
if strings.HasPrefix(line, "//") { //nolint:gocritic
text := strings.TrimSpace(strings.TrimPrefix(line, "//"))
docLines = append(docLines, text)
} else if line == "" {
// go to next line
} else {
autogenDebugf("file %q: pos=%d, filePos=%s: comment %q: it's NOT allowed", filePath, pos, filePos, text)
break
}
}
autogenDebugf("file %q: got %d allowed comments", filePath, len(neededComments))
if len(neededComments) == 0 {
return ""
if err := scanner.Err(); err != nil {
return "", errors.Wrap(err, "failed to scan file")
}
return strings.Join(neededComments, "\n")
return strings.Join(docLines, "\n"), nil
}
func (p AutogeneratedExclude) Finish() {}

View File

@ -1,6 +1,7 @@
package processors
import (
"path/filepath"
"strings"
"testing"
@ -66,3 +67,12 @@ func TestIsAutogeneratedDetection(t *testing.T) {
assert.False(t, isGenerated)
}
}
func TestGetDoc(t *testing.T) {
const expectedDoc = `first line
second line
third line`
doc, err := getDoc(filepath.Join("testdata", "autogen_exclude.go"))
assert.NoError(t, err)
assert.Equal(t, expectedDoc, doc)
}

View File

@ -1,14 +1,16 @@
package processors
import (
"go/parser"
"go/token"
"path/filepath"
"strings"
"sync"
"time"
"golang.org/x/tools/go/packages"
"github.com/golangci/golangci-lint/pkg/logutils"
"github.com/golangci/golangci-lint/pkg/lint/astcache"
"github.com/golangci/golangci-lint/pkg/result"
)
@ -25,31 +27,60 @@ type FilenameUnadjuster struct {
var _ Processor = &FilenameUnadjuster{}
func NewFilenameUnadjuster(cache *astcache.Cache, log logutils.Log) *FilenameUnadjuster {
m := map[string]posMapper{}
for _, f := range cache.GetAllValidFiles() {
adjustedFilename := f.Fset.PositionFor(f.F.Pos(), true).Filename
if adjustedFilename == "" {
continue
}
unadjustedFilename := f.Fset.PositionFor(f.F.Pos(), false).Filename
if unadjustedFilename == "" || unadjustedFilename == adjustedFilename {
continue
}
if !strings.HasSuffix(unadjustedFilename, ".go") {
continue // file.go -> /caches/cgo-xxx
}
func processUnadjusterPkg(m map[string]posMapper, pkg *packages.Package, log logutils.Log) {
fset := token.NewFileSet() // it's more memory efficient to not store all in one fset
f := f
m[adjustedFilename] = func(adjustedPos token.Position) token.Position {
tokenFile := f.Fset.File(f.F.Pos())
if tokenFile == nil {
log.Warnf("Failed to get token file for %s", adjustedFilename)
return adjustedPos
}
return f.Fset.PositionFor(tokenFile.Pos(adjustedPos.Offset), false)
}
for _, filename := range pkg.CompiledGoFiles {
// It's important to call func here to run GC
processUnadjusterFile(filename, m, log, fset)
}
}
func processUnadjusterFile(filename string, m map[string]posMapper, log logutils.Log, fset *token.FileSet) {
syntax, err := parser.ParseFile(fset, filename, nil, parser.ParseComments)
if err != nil {
// Error will be reported by typecheck
return
}
adjustedFilename := fset.PositionFor(syntax.Pos(), true).Filename
if adjustedFilename == "" {
return
}
unadjustedFilename := fset.PositionFor(syntax.Pos(), false).Filename
if unadjustedFilename == "" || unadjustedFilename == adjustedFilename {
return
}
if !strings.HasSuffix(unadjustedFilename, ".go") {
return // file.go -> /caches/cgo-xxx
}
m[adjustedFilename] = func(adjustedPos token.Position) token.Position {
tokenFile := fset.File(syntax.Pos())
if tokenFile == nil {
log.Warnf("Failed to get token file for %s", adjustedFilename)
return adjustedPos
}
return fset.PositionFor(tokenFile.Pos(adjustedPos.Offset), false)
}
}
func NewFilenameUnadjuster(pkgs []*packages.Package, log logutils.Log) *FilenameUnadjuster {
m := map[string]posMapper{}
startedAt := time.Now()
var wg sync.WaitGroup
wg.Add(len(pkgs))
for _, pkg := range pkgs {
go func(pkg *packages.Package) {
// It's important to call func here to run GC
processUnadjusterPkg(m, pkg, log)
wg.Done()
}(pkg)
}
wg.Wait()
log.Infof("Pre-built %d adjustments in %s", len(m), time.Since(startedAt))
return &FilenameUnadjuster{
m: m,

View File

@ -3,11 +3,11 @@ package processors
import (
"fmt"
"go/ast"
"go/parser"
"go/token"
"sort"
"strings"
"github.com/golangci/golangci-lint/pkg/lint/astcache"
"github.com/golangci/golangci-lint/pkg/lint/lintersdb"
"github.com/golangci/golangci-lint/pkg/logutils"
"github.com/golangci/golangci-lint/pkg/result"
@ -47,17 +47,15 @@ type filesCache map[string]*fileData
type Nolint struct {
cache filesCache
astCache *astcache.Cache
dbManager *lintersdb.Manager
log logutils.Log
unknownLintersSet map[string]bool
}
func NewNolint(astCache *astcache.Cache, log logutils.Log, dbManager *lintersdb.Manager) *Nolint {
func NewNolint(log logutils.Log, dbManager *lintersdb.Manager) *Nolint {
return &Nolint{
cache: filesCache{},
astCache: astCache,
dbManager: dbManager,
log: log,
unknownLintersSet: map[string]bool{},
@ -87,17 +85,18 @@ func (p *Nolint) getOrCreateFileData(i *result.Issue) (*fileData, error) {
return nil, fmt.Errorf("no file path for issue")
}
file := p.astCache.Get(i.FilePath())
if file == nil {
return nil, fmt.Errorf("no file %s in ast cache %v",
i.FilePath(), p.astCache.ParsedFilenames())
}
if file.Err != nil {
// TODO: migrate this parsing to go/analysis facts
// or cache them somehow per file.
// Don't use cached AST because they consume a lot of memory on large projects.
fset := token.NewFileSet()
f, err := parser.ParseFile(fset, i.FilePath(), nil, parser.ParseComments)
if err != nil {
// Don't report error because it's already must be reporter by typecheck or go/analysis.
return fd, nil
}
fd.ignoredRanges = p.buildIgnoredRangesForFile(file.F, file.Fset, i.FilePath())
fd.ignoredRanges = p.buildIgnoredRangesForFile(f, fset, i.FilePath())
nolintDebugf("file %s: built nolint ranges are %+v", i.FilePath(), fd.ignoredRanges)
return fd, nil
}

View File

@ -9,7 +9,6 @@ import (
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/mock"
"github.com/golangci/golangci-lint/pkg/lint/astcache"
"github.com/golangci/golangci-lint/pkg/lint/lintersdb"
"github.com/golangci/golangci-lint/pkg/logutils"
"github.com/golangci/golangci-lint/pkg/result"
@ -32,13 +31,7 @@ func newNolint2FileIssue(line int) result.Issue {
}
func newTestNolintProcessor(log logutils.Log) *Nolint {
cache := astcache.LoadFromFilenames(log,
filepath.Join("testdata", "nolint.go"),
filepath.Join("testdata", "nolint2.go"),
filepath.Join("testdata", "nolint_bad_names.go"),
filepath.Join("testdata", "nolint_whole_file.go"),
)
return NewNolint(cache, log, lintersdb.NewManager(nil))
return NewNolint(log, lintersdb.NewManager(nil))
}
func getMockLog() *logutils.MockLog {

View File

@ -0,0 +1,7 @@
// first line
//second line
// third line
package testdata // no this text
// and no this text too