fix: nolintlint comment analysis. (#1571)

This commit is contained in:
Ludovic Fernandez 2020-12-22 01:24:28 +01:00 committed by GitHub
parent a893212f02
commit be0297933a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 181 additions and 141 deletions

View File

@ -47,7 +47,7 @@ type NotSpecific struct {
} }
func (i NotSpecific) Details() string { func (i NotSpecific) Details() string {
return fmt.Sprintf("directive `%s` should mention specific linter such as `//%s:my-linter`", return fmt.Sprintf("directive `%s` should mention specific linter such as `%s:my-linter`",
i.fullDirective, i.directiveWithOptionalLeadingSpace) i.fullDirective, i.directiveWithOptionalLeadingSpace)
} }
@ -58,7 +58,7 @@ type ParseError struct {
} }
func (i ParseError) Details() string { func (i ParseError) Details() string {
return fmt.Sprintf("directive `%s` should match `//%s[:<comma-separated-linters>] [// <explanation>]`", return fmt.Sprintf("directive `%s` should match `%s[:<comma-separated-linters>] [// <explanation>]`",
i.fullDirective, i.fullDirective,
i.directiveWithOptionalLeadingSpace) i.directiveWithOptionalLeadingSpace)
} }
@ -112,8 +112,7 @@ const (
NeedsAll = NeedsMachineOnly | NeedsSpecific | NeedsExplanation NeedsAll = NeedsMachineOnly | NeedsSpecific | NeedsExplanation
) )
// matches lines starting with the nolint directive var commentPattern = regexp.MustCompile(`^//\s*(nolint)(:\s*[\w-]+\s*(?:,\s*[\w-]+\s*)*)?\b`)
var directiveOnlyPattern = regexp.MustCompile(`^\s*(nolint)(:\s*[\w-]+\s*(?:,\s*[\w-]+\s*)*)?\b`)
// matches a complete nolint directive // matches a complete nolint directive
var fullDirectivePattern = regexp.MustCompile(`^//\s*nolint(:\s*[\w-]+\s*(?:,\s*[\w-]+\s*)*)?\s*(//.*)?\s*\n?$`) var fullDirectivePattern = regexp.MustCompile(`^//\s*nolint(:\s*[\w-]+\s*(?:,\s*[\w-]+\s*)*)?\s*(//.*)?\s*\n?$`)
@ -142,50 +141,50 @@ var trailingBlankExplanation = regexp.MustCompile(`\s*(//\s*)?$`)
func (l Linter) Run(fset *token.FileSet, nodes ...ast.Node) ([]Issue, error) { func (l Linter) Run(fset *token.FileSet, nodes ...ast.Node) ([]Issue, error) {
var issues []Issue var issues []Issue
for _, node := range nodes { for _, node := range nodes {
if file, ok := node.(*ast.File); ok { if file, ok := node.(*ast.File); ok {
for _, c := range file.Comments { for _, c := range file.Comments {
text := c.Text() for _, comment := range c.List {
matches := directiveOnlyPattern.FindStringSubmatch(text) if !commentPattern.MatchString(comment.Text) {
if len(matches) == 0 {
continue continue
} }
directive := matches[1]
// check for a space between the "//" and the directive // check for a space between the "//" and the directive
leadingSpaceMatches := leadingSpacePattern.FindStringSubmatch(c.List[0].Text) // c.Text() doesn't have all leading space leadingSpaceMatches := leadingSpacePattern.FindStringSubmatch(comment.Text)
if len(leadingSpaceMatches) == 0 {
continue
}
leadingSpace := leadingSpaceMatches[1]
directiveWithOptionalLeadingSpace := directive var leadingSpace string
if len(leadingSpaceMatches) > 0 {
leadingSpace = leadingSpaceMatches[1]
}
directiveWithOptionalLeadingSpace := comment.Text
if len(leadingSpace) > 0 { if len(leadingSpace) > 0 {
directiveWithOptionalLeadingSpace = " " + directive split := strings.Split(strings.SplitN(comment.Text, ":", 2)[0], "//")
directiveWithOptionalLeadingSpace = "// " + strings.TrimSpace(split[1])
} }
base := BaseIssue{ base := BaseIssue{
fullDirective: c.List[0].Text, fullDirective: comment.Text,
directiveWithOptionalLeadingSpace: directiveWithOptionalLeadingSpace, directiveWithOptionalLeadingSpace: directiveWithOptionalLeadingSpace,
position: fset.Position(c.Pos()), position: fset.Position(comment.Pos()),
} }
// check for, report and eliminate leading spaces so we can check for other issues // check for, report and eliminate leading spaces so we can check for other issues
if leadingSpace != "" && leadingSpace != " " { if len(leadingSpace) > 1 {
issues = append(issues, ExtraLeadingSpace{ issues = append(issues, ExtraLeadingSpace{BaseIssue: base})
BaseIssue: base,
})
} }
if (l.needs&NeedsMachineOnly) != 0 && strings.HasPrefix(directiveWithOptionalLeadingSpace, " ") { if (l.needs&NeedsMachineOnly) != 0 && len(leadingSpace) > 0 {
issues = append(issues, NotMachine{BaseIssue: base}) issues = append(issues, NotMachine{BaseIssue: base})
} }
fullMatches := fullDirectivePattern.FindStringSubmatch(c.List[0].Text) fullMatches := fullDirectivePattern.FindStringSubmatch(comment.Text)
if len(fullMatches) == 0 { if len(fullMatches) == 0 {
issues = append(issues, ParseError{BaseIssue: base}) issues = append(issues, ParseError{BaseIssue: base})
continue continue
} }
lintersText, explanation := fullMatches[1], fullMatches[2] lintersText, explanation := fullMatches[1], fullMatches[2]
var linters []string var linters []string
if len(lintersText) > 0 { if len(lintersText) > 0 {
@ -198,6 +197,7 @@ func (l Linter) Run(fset *token.FileSet, nodes ...ast.Node) ([]Issue, error) {
} }
} }
} }
if (l.needs & NeedsSpecific) != 0 { if (l.needs & NeedsSpecific) != 0 {
if len(linters) == 0 { if len(linters) == 0 {
issues = append(issues, NotSpecific{BaseIssue: base}) issues = append(issues, NotSpecific{BaseIssue: base})
@ -205,7 +205,7 @@ func (l Linter) Run(fset *token.FileSet, nodes ...ast.Node) ([]Issue, error) {
} }
// when detecting unused directives, we send all the directives through and filter them out in the nolint processor // when detecting unused directives, we send all the directives through and filter them out in the nolint processor
if l.needs&NeedsUnused != 0 { if (l.needs & NeedsUnused) != 0 {
if len(linters) == 0 { if len(linters) == 0 {
issues = append(issues, UnusedCandidate{BaseIssue: base}) issues = append(issues, UnusedCandidate{BaseIssue: base})
} else { } else {
@ -224,8 +224,9 @@ func (l Linter) Run(fset *token.FileSet, nodes ...ast.Node) ([]Issue, error) {
break break
} }
} }
if needsExplanation { if needsExplanation {
fullDirectiveWithoutExplanation := trailingBlankExplanation.ReplaceAllString(c.List[0].Text, "") fullDirectiveWithoutExplanation := trailingBlankExplanation.ReplaceAllString(comment.Text, "")
issues = append(issues, NoExplanation{ issues = append(issues, NoExplanation{
BaseIssue: base, BaseIssue: base,
fullDirectiveWithoutExplanation: fullDirectiveWithoutExplanation, fullDirectiveWithoutExplanation: fullDirectiveWithoutExplanation,
@ -235,5 +236,7 @@ func (l Linter) Run(fset *token.FileSet, nodes ...ast.Node) ([]Issue, error) {
} }
} }
} }
}
return issues, nil return issues, nil
} }

View File

@ -6,14 +6,26 @@ import (
"testing" "testing"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
) )
//nolint:funlen
func TestNoLintLint(t *testing.T) { func TestNoLintLint(t *testing.T) {
t.Run("when no explanation is provided", func(t *testing.T) { testCases := []struct {
linter, _ := NewLinter(NeedsExplanation, nil) desc string
expectIssues(t, linter, ` needs Needs
excludes []string
contents string
expected []string
}{
{
desc: "when no explanation is provided",
needs: NeedsExplanation,
contents: `
package bar package bar
// example
//nolint
func foo() { func foo() {
bad() //nolint bad() //nolint
bad() //nolint // bad() //nolint //
@ -21,25 +33,42 @@ func foo() {
good() //nolint // this is ok good() //nolint // this is ok
other() //nolintother other() //nolintother
}`, }`,
"directive `//nolint` should provide explanation such as `//nolint // this is why` at testing.go:5:9", expected: []string{
"directive `//nolint //` should provide explanation such as `//nolint // this is why` at testing.go:6:9", "directive `//nolint` should provide explanation such as `//nolint // this is why` at testing.go:5:1",
"directive `//nolint // ` should provide explanation such as `//nolint // this is why` at testing.go:7:9", "directive `//nolint` should provide explanation such as `//nolint // this is why` at testing.go:7:9",
) "directive `//nolint //` should provide explanation such as `//nolint // this is why` at testing.go:8:9",
}) "directive `//nolint // ` should provide explanation such as `//nolint // this is why` at testing.go:9:9",
},
},
{
desc: "when multiple directives on multiple lines",
needs: NeedsExplanation,
contents: `
package bar
t.Run("when no explanation is needed for a specific linter", func(t *testing.T) { // example
linter, _ := NewLinter(NeedsExplanation, []string{"lll"}) //nolint // this is ok
expectIssues(t, linter, ` //nolint:dupl
func foo() {}`,
expected: []string{
"directive `//nolint:dupl` should provide explanation such as `//nolint:dupl // this is why` at testing.go:6:1",
},
},
{
desc: "when no explanation is needed for a specific linter",
needs: NeedsExplanation,
excludes: []string{"lll"},
contents: `
package bar package bar
func foo() { func foo() {
thisIsAReallyLongLine() //nolint:lll thisIsAReallyLongLine() //nolint:lll
}`) }`,
}) },
{
t.Run("when no specific linter is mentioned", func(t *testing.T) { desc: "when no specific linter is mentioned",
linter, _ := NewLinter(NeedsSpecific, nil) needs: NeedsSpecific,
expectIssues(t, linter, ` contents: `
package bar package bar
func foo() { func foo() {
@ -47,35 +76,41 @@ func foo() {
bad() //nolint bad() //nolint
bad() // nolint // because bad() // nolint // because
}`, }`,
expected: []string{
"directive `//nolint` should mention specific linter such as `//nolint:my-linter` at testing.go:6:9", "directive `//nolint` should mention specific linter such as `//nolint:my-linter` at testing.go:6:9",
"directive `// nolint // because` should mention specific linter such as `// nolint:my-linter` at testing.go:7:9") "directive `// nolint // because` should mention specific linter such as `// nolint:my-linter` at testing.go:7:9",
}) },
},
t.Run("when machine-readable style isn't used", func(t *testing.T) { {
linter, _ := NewLinter(NeedsMachineOnly, nil) desc: "when machine-readable style isn't used",
expectIssues(t, linter, ` needs: NeedsMachineOnly,
contents: `
package bar package bar
func foo() { func foo() {
bad() // nolint bad() // nolint
good() //nolint good() //nolint
}`, "directive `// nolint` should be written without leading space as `//nolint` at testing.go:5:9") }`,
}) expected: []string{
"directive `// nolint` should be written without leading space as `//nolint` at testing.go:5:9",
t.Run("extra spaces in front of directive are reported", func(t *testing.T) { },
linter, _ := NewLinter(0, nil) },
expectIssues(t, linter, ` {
desc: "extra spaces in front of directive are reported",
contents: `
package bar package bar
func foo() { func foo() {
bad() // nolint bad() // nolint
good() // nolint good() // nolint
}`, "directive `// nolint` should not have more than one leading space at testing.go:5:9") }`,
}) expected: []string{
"directive `// nolint` should not have more than one leading space at testing.go:5:9",
t.Run("spaces are allowed in comma-separated list of linters", func(t *testing.T) { },
linter, _ := NewLinter(0, nil) },
expectIssues(t, linter, ` {
desc: "spaces are allowed in comma-separated list of linters",
contents: `
package bar package bar
func foo() { func foo() {
@ -84,40 +119,42 @@ func foo() {
good() // nolint: linter1,linter2 good() // nolint: linter1,linter2
good() // nolint: linter1, linter2 good() // nolint: linter1, linter2
}`, }`,
expected: []string{
"directive `// nolint:linter1 linter2` should match `// nolint[:<comma-separated-linters>] [// <explanation>]` at testing.go:6:9", //nolint:lll // this is a string "directive `// nolint:linter1 linter2` should match `// nolint[:<comma-separated-linters>] [// <explanation>]` at testing.go:6:9", //nolint:lll // this is a string
) },
}) },
{
t.Run("multi-line comments don't confuse parser", func(t *testing.T) { desc: "multi-line comments don't confuse parser",
linter, _ := NewLinter(0, nil) contents: `
expectIssues(t, linter, `
package bar package bar
func foo() { func foo() {
//nolint:test //nolint:test
// something else // something else
}`) }`,
}) },
} }
func expectIssues(t *testing.T, linter *Linter, contents string, issues ...string) { for _, test := range testCases {
actualIssues := parseFile(t, linter, contents) test := test
t.Run(test.desc, func(t *testing.T) {
t.Parallel()
linter, _ := NewLinter(test.needs, test.excludes)
fset := token.NewFileSet()
expr, err := parser.ParseFile(fset, "testing.go", test.contents, parser.ParseComments)
require.NoError(t, err)
actualIssues, err := linter.Run(fset, expr)
require.NoError(t, err)
actualIssueStrs := make([]string, 0, len(actualIssues)) actualIssueStrs := make([]string, 0, len(actualIssues))
for _, i := range actualIssues { for _, i := range actualIssues {
actualIssueStrs = append(actualIssueStrs, i.String()) actualIssueStrs = append(actualIssueStrs, i.String())
} }
assert.ElementsMatch(t, issues, actualIssueStrs, "expected %s but got %s", issues, actualIssues)
}
func parseFile(t *testing.T, linter *Linter, contents string) []Issue { assert.ElementsMatch(t, test.expected, actualIssueStrs, "expected %s \nbut got %s", test.expected, actualIssues)
fset := token.NewFileSet() })
expr, err := parser.ParseFile(fset, "testing.go", contents, parser.ParseComments)
if err != nil {
t.Fatalf("unable to parse file contents: %s", err)
} }
issues, err := linter.Run(fset, expr)
if err != nil {
t.Fatalf("unable to parse file: %s", err)
}
return issues
} }

View File

@ -22,7 +22,7 @@ import (
// //
// Sources files are supplied as fullshort slice. // Sources files are supplied as fullshort slice.
// It consists of pairs: full path to source file and its base name. // It consists of pairs: full path to source file and its base name.
//nolint:gocyclo,funlen //nolint:gocyclo
func errorCheck(outStr string, wantAuto bool, fullshort ...string) (err error) { func errorCheck(outStr string, wantAuto bool, fullshort ...string) (err error) {
var errs []error var errs []error
out := splitOutput(outStr, wantAuto) out := splitOutput(outStr, wantAuto)
@ -160,7 +160,7 @@ var (
) )
// wantedErrors parses expected errors from comments in a file. // wantedErrors parses expected errors from comments in a file.
//nolint:nakedret,gocyclo,funlen //nolint:nakedret
func wantedErrors(file, short string) (errs []wantedError) { func wantedErrors(file, short string) (errs []wantedError) {
cache := make(map[string]*regexp.Regexp) cache := make(map[string]*regexp.Regexp)