dev: unifying processors code style (#4592)

This commit is contained in:
Ludovic Fernandez 2024-03-29 21:00:18 +01:00 committed by GitHub
parent ec9755157e
commit f00c89f86b
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
23 changed files with 434 additions and 385 deletions

View File

@ -86,7 +86,7 @@ func NewRunner(log logutils.Log, cfg *config.Config, args []string, goenv *gouti
processors.NewNolint(log.Child(logutils.DebugKeyNolint), dbManager, enabledLinters),
processors.NewUniqByLine(cfg),
processors.NewDiff(cfg.Issues.Diff, cfg.Issues.DiffFromRevision, cfg.Issues.DiffPatchFilePath, cfg.Issues.WholeFiles),
processors.NewDiff(&cfg.Issues),
processors.NewMaxPerFileFromLinter(cfg),
processors.NewMaxSameIssues(cfg.Issues.MaxSameIssues, log.Child(logutils.DebugKeyMaxSameIssues), cfg),
processors.NewMaxFromLinter(cfg.Issues.MaxIssuesPerLinter, log.Child(logutils.DebugKeyMaxFromLinter), cfg),

View File

@ -17,7 +17,7 @@ const (
genAutoFile = "autogenerated file" // easyjson
)
var _ Processor = &AutogeneratedExclude{}
var _ Processor = (*AutogeneratedExclude)(nil)
type fileSummary struct {
generated bool
@ -41,7 +41,7 @@ func NewAutogeneratedExclude(strict bool) *AutogeneratedExclude {
}
}
func (p *AutogeneratedExclude) Name() string {
func (*AutogeneratedExclude) Name() string {
return "autogenerated_exclude"
}

View File

@ -9,49 +9,51 @@ import (
"github.com/golangci/golangci-lint/pkg/result"
)
var _ Processor = (*Cgo)(nil)
type Cgo struct {
goCacheDir string
}
var _ Processor = Cgo{}
func NewCgo(goenv *goutil.Env) *Cgo {
return &Cgo{
goCacheDir: goenv.Get(goutil.EnvGoCache),
}
}
func (p Cgo) Name() string {
func (Cgo) Name() string {
return "cgo"
}
func (p Cgo) Process(issues []result.Issue) ([]result.Issue, error) {
return filterIssuesErr(issues, func(issue *result.Issue) (bool, error) {
// some linters (e.g. gosec, deadcode) return incorrect filepaths for cgo issues,
// also cgo files have strange issues looking like false positives.
// cache dir contains all preprocessed files including cgo files
issueFilePath := issue.FilePath()
if !filepath.IsAbs(issue.FilePath()) {
absPath, err := filepath.Abs(issue.FilePath())
if err != nil {
return false, fmt.Errorf("failed to build abs path for %q: %w", issue.FilePath(), err)
}
issueFilePath = absPath
}
if p.goCacheDir != "" && strings.HasPrefix(issueFilePath, p.goCacheDir) {
return false, nil
}
if filepath.Base(issue.FilePath()) == "_cgo_gotypes.go" {
// skip cgo warning for go1.10
return false, nil
}
return true, nil
})
return filterIssuesErr(issues, p.shouldPassIssue)
}
func (Cgo) Finish() {}
func (p Cgo) shouldPassIssue(issue *result.Issue) (bool, error) {
// some linters (e.g. gosec, deadcode) return incorrect filepaths for cgo issues,
// also cgo files have strange issues looking like false positives.
// cache dir contains all preprocessed files including cgo files
issueFilePath := issue.FilePath()
if !filepath.IsAbs(issue.FilePath()) {
absPath, err := filepath.Abs(issue.FilePath())
if err != nil {
return false, fmt.Errorf("failed to build abs path for %q: %w", issue.FilePath(), err)
}
issueFilePath = absPath
}
if p.goCacheDir != "" && strings.HasPrefix(issueFilePath, p.goCacheDir) {
return false, nil
}
if filepath.Base(issue.FilePath()) == "_cgo_gotypes.go" {
// skip cgo warning for go1.10
return false, nil
}
return true, nil
}

View File

@ -9,11 +9,14 @@ import (
"github.com/golangci/revgrep"
"github.com/golangci/golangci-lint/pkg/config"
"github.com/golangci/golangci-lint/pkg/result"
)
const envGolangciDiffProcessorPatch = "GOLANGCI_DIFF_PROCESSOR_PATCH"
var _ Processor = (*Diff)(nil)
type Diff struct {
onlyNew bool
fromRev string
@ -22,19 +25,17 @@ type Diff struct {
patch string
}
var _ Processor = Diff{}
func NewDiff(onlyNew bool, fromRev, patchFilePath string, wholeFiles bool) *Diff {
func NewDiff(cfg *config.Issues) *Diff {
return &Diff{
onlyNew: onlyNew,
fromRev: fromRev,
patchFilePath: patchFilePath,
wholeFiles: wholeFiles,
onlyNew: cfg.Diff,
fromRev: cfg.DiffFromRevision,
patchFilePath: cfg.DiffPatchFilePath,
wholeFiles: cfg.WholeFiles,
patch: os.Getenv(envGolangciDiffProcessorPatch),
}
}
func (p Diff) Name() string {
func (Diff) Name() string {
return "diff"
}

View File

@ -6,7 +6,7 @@ import (
"github.com/golangci/golangci-lint/pkg/result"
)
var _ Processor = Exclude{}
var _ Processor = (*Exclude)(nil)
type Exclude struct {
name string
@ -49,4 +49,4 @@ func (p Exclude) Process(issues []result.Issue) ([]result.Issue, error) {
}), nil
}
func (p Exclude) Finish() {}
func (Exclude) Finish() {}

View File

@ -8,7 +8,7 @@ import (
"github.com/golangci/golangci-lint/pkg/result"
)
var _ Processor = ExcludeRules{}
var _ Processor = (*ExcludeRules)(nil)
type excludeRule struct {
baseRule
@ -50,10 +50,13 @@ func NewExcludeRules(log logutils.Log, files *fsutils.Files, opts ExcludeRulesOp
return p
}
func (p ExcludeRules) Name() string { return p.name }
func (p ExcludeRules) Process(issues []result.Issue) ([]result.Issue, error) {
if len(p.rules) == 0 {
return issues, nil
}
return filterIssues(issues, func(issue *result.Issue) bool {
for _, rule := range p.rules {
rule := rule
@ -61,12 +64,11 @@ func (p ExcludeRules) Process(issues []result.Issue) ([]result.Issue, error) {
return false
}
}
return true
}), nil
}
func (p ExcludeRules) Name() string { return p.name }
func (ExcludeRules) Finish() {}
func createRules(rules []ExcludeRule, prefix string) []excludeRule {

View File

@ -14,6 +14,8 @@ import (
"github.com/golangci/golangci-lint/pkg/result"
)
var _ Processor = (*FilenameUnadjuster)(nil)
type posMapper func(pos token.Position) token.Position
type adjustMap struct {
@ -30,7 +32,61 @@ type FilenameUnadjuster struct {
loggedUnadjustments map[string]bool
}
var _ Processor = &FilenameUnadjuster{}
func NewFilenameUnadjuster(pkgs []*packages.Package, log logutils.Log) *FilenameUnadjuster {
m := adjustMap{m: map[string]posMapper{}}
startedAt := time.Now()
var wg sync.WaitGroup
wg.Add(len(pkgs))
for _, pkg := range pkgs {
go func(pkg *packages.Package) {
// It's important to call func here to run GC
processUnadjusterPkg(&m, pkg, log)
wg.Done()
}(pkg)
}
wg.Wait()
log.Infof("Pre-built %d adjustments in %s", len(m.m), time.Since(startedAt))
return &FilenameUnadjuster{
m: m.m,
log: log,
loggedUnadjustments: map[string]bool{},
}
}
func (*FilenameUnadjuster) Name() string {
return "filename_unadjuster"
}
func (p *FilenameUnadjuster) Process(issues []result.Issue) ([]result.Issue, error) {
return transformIssues(issues, func(issue *result.Issue) *result.Issue {
issueFilePath := issue.FilePath()
if !filepath.IsAbs(issue.FilePath()) {
absPath, err := filepath.Abs(issue.FilePath())
if err != nil {
p.log.Warnf("failed to build abs path for %q: %s", issue.FilePath(), err)
return issue
}
issueFilePath = absPath
}
mapper := p.m[issueFilePath]
if mapper == nil {
return issue
}
newIssue := *issue
newIssue.Pos = mapper(issue.Pos)
if !p.loggedUnadjustments[issue.Pos.Filename] {
p.log.Infof("Unadjusted from %v to %v", issue.Pos, newIssue.Pos)
p.loggedUnadjustments[issue.Pos.Filename] = true
}
return &newIssue
}), nil
}
func (*FilenameUnadjuster) Finish() {}
func processUnadjusterPkg(m *adjustMap, pkg *packages.Package, log logutils.Log) {
fset := token.NewFileSet() // it's more memory efficient to not store all in one fset
@ -64,68 +120,14 @@ func processUnadjusterFile(filename string, m *adjustMap, log logutils.Log, fset
m.Lock()
defer m.Unlock()
m.m[adjustedFilename] = func(adjustedPos token.Position) token.Position {
tokenFile := fset.File(syntax.Pos())
if tokenFile == nil {
log.Warnf("Failed to get token file for %s", adjustedFilename)
return adjustedPos
}
return fset.PositionFor(tokenFile.Pos(adjustedPos.Offset), false)
}
}
func NewFilenameUnadjuster(pkgs []*packages.Package, log logutils.Log) *FilenameUnadjuster {
m := adjustMap{m: map[string]posMapper{}}
startedAt := time.Now()
var wg sync.WaitGroup
wg.Add(len(pkgs))
for _, pkg := range pkgs {
go func(pkg *packages.Package) {
// It's important to call func here to run GC
processUnadjusterPkg(&m, pkg, log)
wg.Done()
}(pkg)
}
wg.Wait()
log.Infof("Pre-built %d adjustments in %s", len(m.m), time.Since(startedAt))
return &FilenameUnadjuster{
m: m.m,
log: log,
loggedUnadjustments: map[string]bool{},
}
}
func (p *FilenameUnadjuster) Name() string {
return "filename_unadjuster"
}
func (p *FilenameUnadjuster) Process(issues []result.Issue) ([]result.Issue, error) {
return transformIssues(issues, func(issue *result.Issue) *result.Issue {
issueFilePath := issue.FilePath()
if !filepath.IsAbs(issue.FilePath()) {
absPath, err := filepath.Abs(issue.FilePath())
if err != nil {
p.log.Warnf("failed to build abs path for %q: %s", issue.FilePath(), err)
return issue
}
issueFilePath = absPath
}
mapper := p.m[issueFilePath]
if mapper == nil {
return issue
}
newIssue := *issue
newIssue.Pos = mapper(issue.Pos)
if !p.loggedUnadjustments[issue.Pos.Filename] {
p.log.Infof("Unadjusted from %v to %v", issue.Pos, newIssue.Pos)
p.loggedUnadjustments[issue.Pos.Filename] = true
}
return &newIssue
}), nil
}
func (p *FilenameUnadjuster) Finish() {}

View File

@ -16,7 +16,7 @@ import (
"github.com/golangci/golangci-lint/pkg/timeutils"
)
var _ Processor = Fixer{}
var _ Processor = (*Fixer)(nil)
type Fixer struct {
cfg *config.Config
@ -34,12 +34,12 @@ func NewFixer(cfg *config.Config, log logutils.Log, fileCache *fsutils.FileCache
}
}
func (f Fixer) printStat() {
f.sw.PrintStages()
func (Fixer) Name() string {
return "fixer"
}
func (f Fixer) Process(issues []result.Issue) ([]result.Issue, error) {
if !f.cfg.Issues.NeedFix {
func (p Fixer) Process(issues []result.Issue) ([]result.Issue, error) {
if !p.cfg.Issues.NeedFix {
return issues, nil
}
@ -57,37 +57,36 @@ func (f Fixer) Process(issues []result.Issue) ([]result.Issue, error) {
for file, issuesToFix := range issuesToFixPerFile {
var err error
f.sw.TrackStage("all", func() {
err = f.fixIssuesInFile(file, issuesToFix)
p.sw.TrackStage("all", func() {
err = p.fixIssuesInFile(file, issuesToFix)
})
if err != nil {
f.log.Errorf("Failed to fix issues in file %s: %s", file, err)
p.log.Errorf("Failed to fix issues in file %s: %s", file, err)
// show issues only if can't fix them
outIssues = append(outIssues, issuesToFix...)
}
}
f.printStat()
p.printStat()
return outIssues, nil
}
func (f Fixer) Name() string {
return "fixer"
}
func (Fixer) Finish() {}
func (f Fixer) Finish() {}
func (f Fixer) fixIssuesInFile(filePath string, issues []result.Issue) error {
func (p Fixer) fixIssuesInFile(filePath string, issues []result.Issue) error {
// TODO: don't read the whole file into memory: read line by line;
// can't just use bufio.scanner: it has a line length limit
origFileData, err := f.fileCache.GetFileBytes(filePath)
origFileData, err := p.fileCache.GetFileBytes(filePath)
if err != nil {
return fmt.Errorf("failed to get file bytes for %s: %w", filePath, err)
}
origFileLines := bytes.Split(origFileData, []byte("\n"))
tmpFileName := filepath.Join(filepath.Dir(filePath), fmt.Sprintf(".%s.golangci_fix", filepath.Base(filePath)))
tmpOutFile, err := os.Create(tmpFileName)
if err != nil {
return fmt.Errorf("failed to make file %s: %w", tmpFileName, err)
@ -102,20 +101,21 @@ func (f Fixer) fixIssuesInFile(filePath string, issues []result.Issue) error {
issues = issues[:0] // reuse the same memory
for line, lineIssues := range issuesPerLine {
if mergedIssue := f.mergeLineIssues(line, lineIssues, origFileLines); mergedIssue != nil {
if mergedIssue := p.mergeLineIssues(line, lineIssues, origFileLines); mergedIssue != nil {
issues = append(issues, *mergedIssue)
}
}
issues = f.findNotIntersectingIssues(issues)
issues = p.findNotIntersectingIssues(issues)
if err = f.writeFixedFile(origFileLines, issues, tmpOutFile); err != nil {
if err = p.writeFixedFile(origFileLines, issues, tmpOutFile); err != nil {
tmpOutFile.Close()
_ = robustio.RemoveAll(tmpOutFile.Name())
return err
}
tmpOutFile.Close()
if err = robustio.Rename(tmpOutFile.Name(), filePath); err != nil {
_ = robustio.RemoveAll(tmpOutFile.Name())
return fmt.Errorf("failed to rename %s -> %s: %w", tmpOutFile.Name(), filePath, err)
@ -124,7 +124,7 @@ func (f Fixer) fixIssuesInFile(filePath string, issues []result.Issue) error {
return nil
}
func (f Fixer) mergeLineIssues(lineNum int, lineIssues []result.Issue, origFileLines [][]byte) *result.Issue {
func (p Fixer) mergeLineIssues(lineNum int, lineIssues []result.Issue, origFileLines [][]byte) *result.Issue {
origLine := origFileLines[lineNum-1] // lineNum is 1-based
if len(lineIssues) == 1 && lineIssues[0].Replacement.Inline == nil {
@ -136,27 +136,27 @@ func (f Fixer) mergeLineIssues(lineNum int, lineIssues []result.Issue, origFileL
li := &lineIssues[ind]
if li.LineRange != nil {
f.log.Infof("Line %d has multiple issues but at least one of them is ranged: %#v", lineNum, lineIssues)
p.log.Infof("Line %d has multiple issues but at least one of them is ranged: %#v", lineNum, lineIssues)
return &lineIssues[0]
}
inline := li.Replacement.Inline
if inline == nil || len(li.Replacement.NewLines) != 0 || li.Replacement.NeedOnlyDelete {
f.log.Infof("Line %d has multiple issues but at least one of them isn't inline: %#v", lineNum, lineIssues)
p.log.Infof("Line %d has multiple issues but at least one of them isn't inline: %#v", lineNum, lineIssues)
return li
}
if inline.StartCol < 0 || inline.Length <= 0 || inline.StartCol+inline.Length > len(origLine) {
f.log.Warnf("Line %d (%q) has invalid inline fix: %#v, %#v", lineNum, origLine, li, inline)
p.log.Warnf("Line %d (%q) has invalid inline fix: %#v, %#v", lineNum, origLine, li, inline)
return nil
}
}
return f.applyInlineFixes(lineIssues, origLine, lineNum)
return p.applyInlineFixes(lineIssues, origLine, lineNum)
}
func (f Fixer) applyInlineFixes(lineIssues []result.Issue, origLine []byte, lineNum int) *result.Issue {
func (p Fixer) applyInlineFixes(lineIssues []result.Issue, origLine []byte, lineNum int) *result.Issue {
sort.Slice(lineIssues, func(i, j int) bool {
return lineIssues[i].Replacement.Inline.StartCol < lineIssues[j].Replacement.Inline.StartCol
})
@ -171,7 +171,7 @@ func (f Fixer) applyInlineFixes(lineIssues []result.Issue, origLine []byte, line
for i := range lineIssues {
fix := lineIssues[i].Replacement.Inline
if fix.StartCol < curOrigLinePos {
f.log.Warnf("Line %d has multiple intersecting issues: %#v", lineNum, lineIssues)
p.log.Warnf("Line %d has multiple intersecting issues: %#v", lineNum, lineIssues)
return nil
}
@ -192,7 +192,7 @@ func (f Fixer) applyInlineFixes(lineIssues []result.Issue, origLine []byte, line
return &mergedIssue
}
func (f Fixer) findNotIntersectingIssues(issues []result.Issue) []result.Issue {
func (p Fixer) findNotIntersectingIssues(issues []result.Issue) []result.Issue {
sort.SliceStable(issues, func(i, j int) bool {
a, b := issues[i], issues[j]
return a.Line() < b.Line()
@ -204,10 +204,10 @@ func (f Fixer) findNotIntersectingIssues(issues []result.Issue) []result.Issue {
issue := &issues[i]
rng := issue.GetLineRange()
if rng.From <= currentEnd {
f.log.Infof("Skip issue %#v: intersects with end %d", issue, currentEnd)
p.log.Infof("Skip issue %#v: intersects with end %d", issue, currentEnd)
continue // skip intersecting issue
}
f.log.Infof("Fix issue %#v with range %v", issue, issue.GetLineRange())
p.log.Infof("Fix issue %#v with range %v", issue, issue.GetLineRange())
ret = append(ret, *issue)
currentEnd = rng.To
}
@ -215,7 +215,7 @@ func (f Fixer) findNotIntersectingIssues(issues []result.Issue) []result.Issue {
return ret
}
func (f Fixer) writeFixedFile(origFileLines [][]byte, issues []result.Issue, tmpOutFile *os.File) error {
func (p Fixer) writeFixedFile(origFileLines [][]byte, issues []result.Issue, tmpOutFile *os.File) error {
// issues aren't intersecting
nextIssueIndex := 0
@ -234,7 +234,7 @@ func (f Fixer) writeFixedFile(origFileLines [][]byte, issues []result.Issue, tmp
rng := nextIssue.GetLineRange()
if rng.From > rng.To {
// Maybe better decision is to skip such issues, re-evaluate if regressed.
f.log.Warnf("[fixer]: issue line range is probably invalid, fix can be incorrect (from=%d, to=%d, linter=%s)",
p.log.Warnf("[fixer]: issue line range is probably invalid, fix can be incorrect (from=%d, to=%d, linter=%s)",
rng.From, rng.To, nextIssue.FromLinter,
)
}
@ -255,3 +255,7 @@ func (f Fixer) writeFixedFile(origFileLines [][]byte, issues []result.Issue, tmp
return nil
}
func (p Fixer) printStat() {
p.sw.PrintStages()
}

View File

@ -6,6 +6,8 @@ import (
"github.com/golangci/golangci-lint/pkg/result"
)
var _ Processor = (*IdentifierMarker)(nil)
type replacePattern struct {
re string
repl string
@ -126,16 +128,22 @@ func NewIdentifierMarker() *IdentifierMarker {
}
}
func (im IdentifierMarker) Process(issues []result.Issue) ([]result.Issue, error) {
func (IdentifierMarker) Name() string {
return "identifier_marker"
}
func (p IdentifierMarker) Process(issues []result.Issue) ([]result.Issue, error) {
return transformIssues(issues, func(issue *result.Issue) *result.Issue {
newIssue := *issue
newIssue.Text = im.markIdentifiers(newIssue.Text)
newIssue.Text = p.markIdentifiers(newIssue.Text)
return &newIssue
}), nil
}
func (im IdentifierMarker) markIdentifiers(s string) string {
for _, rr := range im.replaceRegexps {
func (IdentifierMarker) Finish() {}
func (p IdentifierMarker) markIdentifiers(s string) string {
for _, rr := range p.replaceRegexps {
rs := rr.re.ReplaceAllString(s, rr.repl)
if rs != s {
return rs
@ -144,8 +152,3 @@ func (im IdentifierMarker) markIdentifiers(s string) string {
return s
}
func (im IdentifierMarker) Name() string {
return "identifier_marker"
}
func (im IdentifierMarker) Finish() {}

View File

@ -7,7 +7,7 @@ import (
"github.com/golangci/golangci-lint/pkg/result"
)
var _ Processor = InvalidIssue{}
var _ Processor = (*InvalidIssue)(nil)
type InvalidIssue struct {
log logutils.Log
@ -17,15 +17,15 @@ func NewInvalidIssue(log logutils.Log) *InvalidIssue {
return &InvalidIssue{log: log}
}
func (InvalidIssue) Name() string {
return "invalid_issue"
}
func (p InvalidIssue) Process(issues []result.Issue) ([]result.Issue, error) {
return filterIssuesErr(issues, p.shouldPassIssue)
}
func (p InvalidIssue) Name() string {
return "invalid_issue"
}
func (p InvalidIssue) Finish() {}
func (InvalidIssue) Finish() {}
func (p InvalidIssue) shouldPassIssue(issue *result.Issue) (bool, error) {
if issue.FromLinter == "typecheck" {

View File

@ -6,25 +6,25 @@ import (
"github.com/golangci/golangci-lint/pkg/result"
)
type MaxFromLinter struct {
lc linterToCountMap
limit int
log logutils.Log
cfg *config.Config
}
var _ Processor = (*MaxFromLinter)(nil)
var _ Processor = &MaxFromLinter{}
type MaxFromLinter struct {
linterCounter map[string]int
limit int
log logutils.Log
cfg *config.Config
}
func NewMaxFromLinter(limit int, log logutils.Log, cfg *config.Config) *MaxFromLinter {
return &MaxFromLinter{
lc: linterToCountMap{},
limit: limit,
log: log,
cfg: cfg,
linterCounter: map[string]int{},
limit: limit,
log: log,
cfg: cfg,
}
}
func (p *MaxFromLinter) Name() string {
func (*MaxFromLinter) Name() string {
return "max_from_linter"
}
@ -39,13 +39,14 @@ func (p *MaxFromLinter) Process(issues []result.Issue) ([]result.Issue, error) {
return true
}
p.lc[issue.FromLinter]++ // always inc for stat
return p.lc[issue.FromLinter] <= p.limit
p.linterCounter[issue.FromLinter]++ // always inc for stat
return p.linterCounter[issue.FromLinter] <= p.limit
}), nil
}
func (p *MaxFromLinter) Finish() {
walkStringToIntMapSortedByValue(p.lc, func(linter string, count int) {
walkStringToIntMapSortedByValue(p.linterCounter, func(linter string, count int) {
if count > p.limit {
p.log.Infof("%d/%d issues from linter %s were hidden, use --max-issues-per-linter",
count-p.limit, count, linter)

View File

@ -5,18 +5,13 @@ import (
"github.com/golangci/golangci-lint/pkg/result"
)
type (
linterToCountMap map[string]int
fileToLinterToCountMap map[string]linterToCountMap
)
var _ Processor = (*MaxPerFileFromLinter)(nil)
type MaxPerFileFromLinter struct {
flc fileToLinterToCountMap
fileLinterCounter fileLinterCounter
maxPerFileFromLinterConfig map[string]int
}
var _ Processor = &MaxPerFileFromLinter{}
func NewMaxPerFileFromLinter(cfg *config.Config) *MaxPerFileFromLinter {
maxPerFileFromLinterConfig := map[string]int{}
@ -28,12 +23,12 @@ func NewMaxPerFileFromLinter(cfg *config.Config) *MaxPerFileFromLinter {
}
return &MaxPerFileFromLinter{
flc: fileToLinterToCountMap{},
fileLinterCounter: fileLinterCounter{},
maxPerFileFromLinterConfig: maxPerFileFromLinterConfig,
}
}
func (p *MaxPerFileFromLinter) Name() string {
func (*MaxPerFileFromLinter) Name() string {
return "max_per_file_from_linter"
}
@ -44,18 +39,35 @@ func (p *MaxPerFileFromLinter) Process(issues []result.Issue) ([]result.Issue, e
return true
}
lm := p.flc[issue.FilePath()]
if lm == nil {
p.flc[issue.FilePath()] = linterToCountMap{}
}
count := p.flc[issue.FilePath()][issue.FromLinter]
if count >= limit {
if p.fileLinterCounter.GetCount(issue) >= limit {
return false
}
p.flc[issue.FilePath()][issue.FromLinter]++
p.fileLinterCounter.Increment(issue)
return true
}), nil
}
func (p *MaxPerFileFromLinter) Finish() {}
func (*MaxPerFileFromLinter) Finish() {}
type fileLinterCounter map[string]map[string]int
func (f fileLinterCounter) GetCount(issue *result.Issue) int {
return f.getCounter(issue)[issue.FromLinter]
}
func (f fileLinterCounter) Increment(issue *result.Issue) {
f.getCounter(issue)[issue.FromLinter]++
}
func (f fileLinterCounter) getCounter(issue *result.Issue) map[string]int {
lc := f[issue.FilePath()]
if lc == nil {
lc = map[string]int{}
f[issue.FilePath()] = lc
}
return lc
}

View File

@ -8,27 +8,25 @@ import (
"github.com/golangci/golangci-lint/pkg/result"
)
type textToCountMap map[string]int
var _ Processor = (*MaxSameIssues)(nil)
type MaxSameIssues struct {
tc textToCountMap
limit int
log logutils.Log
cfg *config.Config
textCounter map[string]int
limit int
log logutils.Log
cfg *config.Config
}
var _ Processor = &MaxSameIssues{}
func NewMaxSameIssues(limit int, log logutils.Log, cfg *config.Config) *MaxSameIssues {
return &MaxSameIssues{
tc: textToCountMap{},
limit: limit,
log: log,
cfg: cfg,
textCounter: map[string]int{},
limit: limit,
log: log,
cfg: cfg,
}
}
func (p *MaxSameIssues) Name() string {
func (*MaxSameIssues) Name() string {
return "max_same_issues"
}
@ -43,13 +41,13 @@ func (p *MaxSameIssues) Process(issues []result.Issue) ([]result.Issue, error) {
return true
}
p.tc[issue.Text]++ // always inc for stat
return p.tc[issue.Text] <= p.limit
p.textCounter[issue.Text]++ // always inc for stat
return p.textCounter[issue.Text] <= p.limit
}), nil
}
func (p *MaxSameIssues) Finish() {
walkStringToIntMapSortedByValue(p.tc, func(text string, count int) {
walkStringToIntMapSortedByValue(p.textCounter, func(text string, count int) {
if count > p.limit {
p.log.Infof("%d/%d issues with text %q were hidden, use --max-same-issues",
count-p.limit, count, text)

View File

@ -17,10 +17,9 @@ import (
"github.com/golangci/golangci-lint/pkg/result"
)
var (
nolintDebugf = logutils.Debug(logutils.DebugKeyNolint)
nolintRe = regexp.MustCompile(`^nolint( |:|$)`)
)
var _ Processor = (*Nolint)(nil)
var nolintDebugf = logutils.Debug(logutils.DebugKeyNolint)
type ignoredRange struct {
linters []string
@ -65,30 +64,29 @@ type fileData struct {
ignoredRanges []ignoredRange
}
type filesCache map[string]*fileData
type Nolint struct {
cache filesCache
fileCache map[string]*fileData
dbManager *lintersdb.Manager
enabledLinters map[string]*linter.Config
log logutils.Log
unknownLintersSet map[string]bool
pattern *regexp.Regexp
}
func NewNolint(log logutils.Log, dbManager *lintersdb.Manager, enabledLinters map[string]*linter.Config) *Nolint {
return &Nolint{
cache: filesCache{},
fileCache: map[string]*fileData{},
dbManager: dbManager,
enabledLinters: enabledLinters,
log: log,
unknownLintersSet: map[string]bool{},
pattern: regexp.MustCompile(`^nolint( |:|$)`),
}
}
var _ Processor = &Nolint{}
func (p *Nolint) Name() string {
func (*Nolint) Name() string {
return "nolint"
}
@ -98,14 +96,60 @@ func (p *Nolint) Process(issues []result.Issue) ([]result.Issue, error) {
return filterIssuesErr(issues, p.shouldPassIssue)
}
func (p *Nolint) Finish() {
if len(p.unknownLintersSet) == 0 {
return
}
unknownLinters := maps.Keys(p.unknownLintersSet)
sort.Strings(unknownLinters)
p.log.Warnf("Found unknown linters in //nolint directives: %s", strings.Join(unknownLinters, ", "))
}
func (p *Nolint) shouldPassIssue(issue *result.Issue) (bool, error) {
nolintDebugf("got issue: %v", *issue)
// don't expect disabled linters to cover their nolint statements
if issue.FromLinter == golinters.NoLintLintName && issue.ExpectNoLint && issue.ExpectedNoLintLinter != "" {
nolintDebugf("enabled linters: %v", p.enabledLinters)
if p.enabledLinters[issue.ExpectedNoLintLinter] == nil {
return false, nil
}
nolintDebugf("checking that lint issue was used for %s: %v", issue.ExpectedNoLintLinter, issue)
}
fd := p.getOrCreateFileData(issue)
for _, ir := range fd.ignoredRanges {
if !ir.doesMatch(issue) {
continue
}
nolintDebugf("found ignored range for issue %v: %v", issue, ir)
ir.matchedIssueFromLinter[issue.FromLinter] = true
if ir.originalRange != nil {
ir.originalRange.matchedIssueFromLinter[issue.FromLinter] = true
}
return false, nil
}
return true, nil
}
func (p *Nolint) getOrCreateFileData(issue *result.Issue) *fileData {
fd := p.cache[issue.FilePath()]
fd := p.fileCache[issue.FilePath()]
if fd != nil {
return fd
}
fd = &fileData{}
p.cache[issue.FilePath()] = fd
p.fileCache[issue.FilePath()] = fd
// TODO: migrate this parsing to go/analysis facts
// or cache them somehow per file.
@ -147,76 +191,6 @@ func (p *Nolint) buildIgnoredRangesForFile(f *ast.File, fset *token.FileSet, fil
return allRanges
}
func (p *Nolint) shouldPassIssue(issue *result.Issue) (bool, error) {
nolintDebugf("got issue: %v", *issue)
if issue.FromLinter == golinters.NoLintLintName && issue.ExpectNoLint && issue.ExpectedNoLintLinter != "" {
// don't expect disabled linters to cover their nolint statements
nolintDebugf("enabled linters: %v", p.enabledLinters)
if p.enabledLinters[issue.ExpectedNoLintLinter] == nil {
return false, nil
}
nolintDebugf("checking that lint issue was used for %s: %v", issue.ExpectedNoLintLinter, issue)
}
fd := p.getOrCreateFileData(issue)
for _, ir := range fd.ignoredRanges {
if ir.doesMatch(issue) {
nolintDebugf("found ignored range for issue %v: %v", issue, ir)
ir.matchedIssueFromLinter[issue.FromLinter] = true
if ir.originalRange != nil {
ir.originalRange.matchedIssueFromLinter[issue.FromLinter] = true
}
return false, nil
}
}
return true, nil
}
type rangeExpander struct {
fset *token.FileSet
inlineRanges []ignoredRange
expandedRanges []ignoredRange
}
func (e *rangeExpander) Visit(node ast.Node) ast.Visitor {
if node == nil {
return e
}
nodeStartPos := e.fset.Position(node.Pos())
nodeStartLine := nodeStartPos.Line
nodeEndLine := e.fset.Position(node.End()).Line
var foundRange *ignoredRange
for _, r := range e.inlineRanges {
if r.To == nodeStartLine-1 && nodeStartPos.Column == r.col {
r := r
foundRange = &r
break
}
}
if foundRange == nil {
return e
}
expandedRange := *foundRange
// store the original unexpanded range for matching nolintlint issues
if expandedRange.originalRange == nil {
expandedRange.originalRange = foundRange
}
if expandedRange.To < nodeEndLine {
expandedRange.To = nodeEndLine
}
nolintDebugf("found range is %v for node %#v [%d;%d], expanded range is %v",
*foundRange, node, nodeStartLine, nodeEndLine, expandedRange)
e.expandedRanges = append(e.expandedRanges, expandedRange)
return e
}
func (p *Nolint) extractFileCommentsInlineRanges(fset *token.FileSet, comments ...*ast.CommentGroup) []ignoredRange {
var ret []ignoredRange
for _, g := range comments {
@ -233,7 +207,7 @@ func (p *Nolint) extractFileCommentsInlineRanges(fset *token.FileSet, comments .
func (p *Nolint) extractInlineRangeFromComment(text string, g ast.Node, fset *token.FileSet) *ignoredRange {
text = strings.TrimLeft(text, "/ ")
if !nolintRe.MatchString(text) {
if !p.pattern.MatchString(text) {
return nil
}
@ -282,15 +256,47 @@ func (p *Nolint) extractInlineRangeFromComment(text string, g ast.Node, fset *to
return buildRange(linters)
}
func (p *Nolint) Finish() {
if len(p.unknownLintersSet) == 0 {
return
type rangeExpander struct {
fset *token.FileSet
inlineRanges []ignoredRange
expandedRanges []ignoredRange
}
func (e *rangeExpander) Visit(node ast.Node) ast.Visitor {
if node == nil {
return e
}
unknownLinters := maps.Keys(p.unknownLintersSet)
sort.Strings(unknownLinters)
nodeStartPos := e.fset.Position(node.Pos())
nodeStartLine := nodeStartPos.Line
nodeEndLine := e.fset.Position(node.End()).Line
p.log.Warnf("Found unknown linters in //nolint directives: %s", strings.Join(unknownLinters, ", "))
var foundRange *ignoredRange
for _, r := range e.inlineRanges {
if r.To == nodeStartLine-1 && nodeStartPos.Column == r.col {
r := r
foundRange = &r
break
}
}
if foundRange == nil {
return e
}
expandedRange := *foundRange
// store the original unexpanded range for matching nolintlint issues
if expandedRange.originalRange == nil {
expandedRange.originalRange = foundRange
}
if expandedRange.To < nodeEndLine {
expandedRange.To = nodeEndLine
}
nolintDebugf("found range is %v for node %#v [%d;%d], expanded range is %v",
*foundRange, node, nodeStartLine, nodeEndLine, expandedRange)
e.expandedRanges = append(e.expandedRanges, expandedRange)
return e
}
// put nolintlint last

View File

@ -5,13 +5,13 @@ import (
"github.com/golangci/golangci-lint/pkg/result"
)
var _ Processor = (*PathPrefixer)(nil)
// PathPrefixer adds a customizable prefix to every output path
type PathPrefixer struct {
prefix string
}
var _ Processor = new(PathPrefixer)
// NewPathPrefixer returns a new path prefixer for the provided string
func NewPathPrefixer(prefix string) *PathPrefixer {
return &PathPrefixer{prefix: prefix}

View File

@ -8,23 +8,22 @@ import (
"github.com/golangci/golangci-lint/pkg/result"
)
var _ Processor = (*PathPrettifier)(nil)
type PathPrettifier struct {
root string
}
var _ Processor = PathPrettifier{}
func NewPathPrettifier() *PathPrettifier {
root, err := fsutils.Getwd()
if err != nil {
panic(fmt.Sprintf("Can't get working dir: %s", err))
}
return &PathPrettifier{
root: root,
}
return &PathPrettifier{root: root}
}
func (p PathPrettifier) Name() string {
func (PathPrettifier) Name() string {
return "path_prettifier"
}
@ -45,4 +44,4 @@ func (p PathPrettifier) Process(issues []result.Issue) ([]result.Issue, error) {
}), nil
}
func (p PathPrettifier) Finish() {}
func (PathPrettifier) Finish() {}

View File

@ -8,23 +8,22 @@ import (
"github.com/golangci/golangci-lint/pkg/result"
)
var _ Processor = (*PathShortener)(nil)
type PathShortener struct {
wd string
}
var _ Processor = PathShortener{}
func NewPathShortener() *PathShortener {
wd, err := fsutils.Getwd()
if err != nil {
panic(fmt.Sprintf("Can't get working dir: %s", err))
}
return &PathShortener{
wd: wd,
}
return &PathShortener{wd: wd}
}
func (p PathShortener) Name() string {
func (PathShortener) Name() string {
return "path_shortener"
}
@ -37,4 +36,4 @@ func (p PathShortener) Process(issues []result.Issue) ([]result.Issue, error) {
}), nil
}
func (p PathShortener) Finish() {}
func (PathShortener) Finish() {}

View File

@ -10,7 +10,7 @@ import (
const severityFromLinter = "@linter"
var _ Processor = &Severity{}
var _ Processor = (*Severity)(nil)
type severityRule struct {
baseRule
@ -58,6 +58,8 @@ func NewSeverity(log logutils.Log, files *fsutils.Files, opts SeverityOptions) *
return p
}
func (p *Severity) Name() string { return p.name }
func (p *Severity) Process(issues []result.Issue) ([]result.Issue, error) {
if len(p.rules) == 0 && p.defaultSeverity == "" {
return issues, nil
@ -66,6 +68,8 @@ func (p *Severity) Process(issues []result.Issue) ([]result.Issue, error) {
return transformIssues(issues, p.transform), nil
}
func (*Severity) Finish() {}
func (p *Severity) transform(issue *result.Issue) *result.Issue {
for _, rule := range p.rules {
if rule.match(issue, p.files, p.log) {
@ -89,10 +93,6 @@ func (p *Severity) transform(issue *result.Issue) *result.Issue {
return issue
}
func (p *Severity) Name() string { return p.name }
func (*Severity) Finish() {}
func createSeverityRules(rules []SeverityRule, prefix string) []severityRule {
parsedRules := make([]severityRule, 0, len(rules))

View File

@ -10,6 +10,8 @@ import (
"github.com/golangci/golangci-lint/pkg/result"
)
var _ Processor = (*SkipDirs)(nil)
type skipStat struct {
pattern string
count int
@ -24,8 +26,6 @@ type SkipDirs struct {
pathPrefix string
}
var _ Processor = (*SkipDirs)(nil)
func NewSkipDirs(patterns []string, log logutils.Log, args []string, pathPrefix string) (*SkipDirs, error) {
var patternsRe []*regexp.Regexp
for _, p := range patterns {
@ -52,7 +52,7 @@ func NewSkipDirs(patterns []string, log logutils.Log, args []string, pathPrefix
}, nil
}
func (p *SkipDirs) Name() string {
func (*SkipDirs) Name() string {
return "skip_dirs"
}
@ -64,6 +64,12 @@ func (p *SkipDirs) Process(issues []result.Issue) ([]result.Issue, error) {
return filterIssues(issues, p.shouldPassIssue), nil
}
func (p *SkipDirs) Finish() {
for dir, stat := range p.skippedDirs {
p.log.Infof("Skipped %d issues from dir %s by pattern %s", stat.count, dir, stat.pattern)
}
}
func (p *SkipDirs) shouldPassIssue(issue *result.Issue) bool {
if filepath.IsAbs(issue.FilePath()) {
if isGoFile(issue.FilePath()) {
@ -124,12 +130,6 @@ func (p *SkipDirs) shouldPassIssueDirs(issueRelDir, issueAbsDir string) bool {
return true
}
func (p *SkipDirs) Finish() {
for dir, stat := range p.skippedDirs {
p.log.Infof("Skipped %d issues from dir %s by pattern %s", stat.count, dir, stat.pattern)
}
}
func absDirs(args []string) ([]string, error) {
if len(args) == 0 {
args = append(args, "./...")

View File

@ -8,21 +8,23 @@ import (
"github.com/golangci/golangci-lint/pkg/result"
)
var _ Processor = (*SkipFiles)(nil)
type SkipFiles struct {
patterns []*regexp.Regexp
pathPrefix string
}
var _ Processor = (*SkipFiles)(nil)
func NewSkipFiles(patterns []string, pathPrefix string) (*SkipFiles, error) {
var patternsRe []*regexp.Regexp
for _, p := range patterns {
p = fsutils.NormalizePathInRegex(p)
patternRe, err := regexp.Compile(p)
if err != nil {
return nil, fmt.Errorf("can't compile regexp %q: %w", p, err)
}
patternsRe = append(patternsRe, patternRe)
}
@ -32,7 +34,7 @@ func NewSkipFiles(patterns []string, pathPrefix string) (*SkipFiles, error) {
}, nil
}
func (p SkipFiles) Name() string {
func (SkipFiles) Name() string {
return "skip_files"
}
@ -43,6 +45,7 @@ func (p SkipFiles) Process(issues []result.Issue) ([]result.Issue, error) {
return filterIssues(issues, func(issue *result.Issue) bool {
path := fsutils.WithPathPrefix(p.pathPrefix, issue.FilePath())
for _, pattern := range p.patterns {
if pattern.MatchString(path) {
return false
@ -53,4 +56,4 @@ func (p SkipFiles) Process(issues []result.Issue) ([]result.Issue, error) {
}), nil
}
func (p SkipFiles) Finish() {}
func (SkipFiles) Finish() {}

View File

@ -45,23 +45,26 @@ func NewSortResults(cfg *config.Config) *SortResults {
}
}
func (SortResults) Name() string { return "sort_results" }
// Process is performing sorting of the result issues.
func (sr SortResults) Process(issues []result.Issue) ([]result.Issue, error) {
if !sr.cfg.SortResults {
func (p SortResults) Process(issues []result.Issue) ([]result.Issue, error) {
if !p.cfg.SortResults {
return issues, nil
}
if len(sr.cfg.SortOrder) == 0 {
sr.cfg.SortOrder = []string{orderNameFile}
if len(p.cfg.SortOrder) == 0 {
p.cfg.SortOrder = []string{orderNameFile}
}
var cmps []*comparator
for _, name := range sr.cfg.SortOrder {
if c, ok := sr.cmps[name]; ok {
cmps = append(cmps, c)
} else {
for _, name := range p.cfg.SortOrder {
c, ok := p.cmps[name]
if !ok {
return nil, fmt.Errorf("unsupported sort-order name %q", name)
}
cmps = append(cmps, c)
}
cmp, err := mergeComparators(cmps)
@ -76,9 +79,7 @@ func (sr SortResults) Process(issues []result.Issue) ([]result.Issue, error) {
return issues, nil
}
func (sr SortResults) Name() string { return "sort_results" }
func (sr SortResults) Finish() {}
func (SortResults) Finish() {}
type compareResult int

View File

@ -6,13 +6,13 @@ import (
"github.com/golangci/golangci-lint/pkg/result"
)
var _ Processor = (*SourceCode)(nil)
type SourceCode struct {
lineCache *fsutils.LineCache
log logutils.Log
}
var _ Processor = SourceCode{}
func NewSourceCode(lc *fsutils.LineCache, log logutils.Log) *SourceCode {
return &SourceCode{
lineCache: lc,
@ -20,28 +20,31 @@ func NewSourceCode(lc *fsutils.LineCache, log logutils.Log) *SourceCode {
}
}
func (p SourceCode) Name() string {
func (SourceCode) Name() string {
return "source_code"
}
func (p SourceCode) Process(issues []result.Issue) ([]result.Issue, error) {
return transformIssues(issues, func(issue *result.Issue) *result.Issue {
newIssue := *issue
lineRange := issue.GetLineRange()
for lineNumber := lineRange.From; lineNumber <= lineRange.To; lineNumber++ {
line, err := p.lineCache.GetLine(issue.FilePath(), lineNumber)
if err != nil {
p.log.Warnf("Failed to get line %d for file %s: %s",
lineNumber, issue.FilePath(), err)
return issue
}
newIssue.SourceLines = append(newIssue.SourceLines, line)
}
return &newIssue
}), nil
return transformIssues(issues, p.transform), nil
}
func (p SourceCode) Finish() {}
func (SourceCode) Finish() {}
func (p SourceCode) transform(issue *result.Issue) *result.Issue {
newIssue := *issue
lineRange := issue.GetLineRange()
for lineNumber := lineRange.From; lineNumber <= lineRange.To; lineNumber++ {
line, err := p.lineCache.GetLine(issue.FilePath(), lineNumber)
if err != nil {
p.log.Warnf("Failed to get line %d for file %s: %s",
lineNumber, issue.FilePath(), err)
return issue
}
newIssue.SourceLines = append(newIssue.SourceLines, line)
}
return &newIssue
}

View File

@ -5,26 +5,23 @@ import (
"github.com/golangci/golangci-lint/pkg/result"
)
type (
lineToCount map[int]int
fileToLineToCount map[string]lineToCount
)
const uniqByLineLimit = 1
var _ Processor = (*UniqByLine)(nil)
type UniqByLine struct {
flc fileToLineToCount
cfg *config.Config
fileLineCounter fileLineCounter
cfg *config.Config
}
func NewUniqByLine(cfg *config.Config) *UniqByLine {
return &UniqByLine{
flc: fileToLineToCount{},
cfg: cfg,
fileLineCounter: fileLineCounter{},
cfg: cfg,
}
}
var _ Processor = &UniqByLine{}
func (p *UniqByLine) Name() string {
func (*UniqByLine) Name() string {
return "uniq_by_line"
}
@ -33,28 +30,44 @@ func (p *UniqByLine) Process(issues []result.Issue) ([]result.Issue, error) {
return issues, nil
}
return filterIssues(issues, func(issue *result.Issue) bool {
if issue.Replacement != nil && p.cfg.Issues.NeedFix {
// if issue will be auto-fixed we shouldn't collapse issues:
// e.g. one line can contain 2 misspellings, they will be in 2 issues and misspell should fix both of them.
return true
}
lc := p.flc[issue.FilePath()]
if lc == nil {
lc = lineToCount{}
p.flc[issue.FilePath()] = lc
}
const limit = 1
count := lc[issue.Line()]
if count == limit {
return false
}
lc[issue.Line()]++
return true
}), nil
return filterIssues(issues, p.shouldPassIssue), nil
}
func (p *UniqByLine) Finish() {}
func (*UniqByLine) Finish() {}
func (p *UniqByLine) shouldPassIssue(issue *result.Issue) bool {
if issue.Replacement != nil && p.cfg.Issues.NeedFix {
// if issue will be auto-fixed we shouldn't collapse issues:
// e.g. one line can contain 2 misspellings, they will be in 2 issues and misspell should fix both of them.
return true
}
if p.fileLineCounter.GetCount(issue) == uniqByLineLimit {
return false
}
p.fileLineCounter.Increment(issue)
return true
}
type fileLineCounter map[string]map[int]int
func (f fileLineCounter) GetCount(issue *result.Issue) int {
return f.getCounter(issue)[issue.Line()]
}
func (f fileLineCounter) Increment(issue *result.Issue) {
f.getCounter(issue)[issue.Line()]++
}
func (f fileLineCounter) getCounter(issue *result.Issue) map[int]int {
lc := f[issue.FilePath()]
if lc == nil {
lc = map[int]int{}
f[issue.FilePath()] = lc
}
return lc
}