enabled linters: gocritic, nilerr (#1853)

This commit is contained in:
mmetc 2022-11-07 10:36:50 +01:00 committed by GitHub
parent 59fc403e32
commit 895691dad1
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
20 changed files with 97 additions and 104 deletions

View file

@ -71,6 +71,7 @@ linters:
# - exportloopref # checks for pointers to enclosing loop variables
# - funlen # Tool for detection of long functions
# - gochecknoinits # Checks that no init functions are present in Go code
# - gocritic # Provides diagnostics that check for bugs, performance and style issues.
# - goheader # Checks is file header matches to pattern
# - gomoddirectives # Manage the use of 'replace', 'retract', and 'excludes' directives in go.mod.
# - gomodguard # Allow and block list linter for direct Go module dependencies. This is different from depguard where there are different block types for example version constraints and module recommendations.
@ -84,6 +85,7 @@ linters:
# - logrlint # Check logr arguments.
# - makezero # Finds slice declarations with non-zero initial length
# - misspell # Finds commonly misspelled English words in comments
# - nilerr # Finds the code that returns nil even if it checks that the error is not nil.
# - nolintlint # Reports ill-formed or insufficient nolint directives
# - predeclared # find code that shadows one of Go's predeclared identifiers
# - reassign # Checks that package variables are not reassigned
@ -107,14 +109,12 @@ linters:
- errorlint # errorlint is a linter for that can be used to find code that will cause problems with the error wrapping scheme introduced in Go 1.13.
- exhaustive # check exhaustiveness of enum switch statements
- gci # Gci control golang package import order and make it always deterministic.
- gocritic # Provides diagnostics that check for bugs, performance and style issues.
- godot # Check if comments end in a period
- gofmt # Gofmt checks whether code was gofmt-ed. By default this tool runs with -s option to check for code simplification
- goimports # In addition to fixing imports, goimports also formats your code in the same style as gofmt.
- gosec # (gas): Inspects source code for security problems
- lll # Reports long lines
- nakedret # Finds naked returns in functions greater than a specified function length
- nilerr # Finds the code that returns nil even if it checks that the error is not nil.
- nonamedreturns # Reports all named returns
- nosprintfhostport # Checks for misuse of Sprintf to construct a host with port in a URL.
- promlinter # Check Prometheus metrics naming via promlint
@ -199,3 +199,23 @@ issues:
- linters:
- errcheck
text: "Error return value of `.*` is not checked"
#
# gocritic
#
- linters:
- gocritic
text: "ifElseChain: rewrite if-else to switch statement"
- linters:
- gocritic
text: "captLocal: `.*' should not be capitalized"
- linters:
- gocritic
text: "appendAssign: append result not assigned to the same slice"
- linters:
- gocritic
text: "commentFormatting: put a space between `//` and comment text"

View file

@ -189,31 +189,27 @@ cscli alerts list --type ban`,
if *alertListFilter.Until == "" {
alertListFilter.Until = nil
} else {
} else if strings.HasSuffix(*alertListFilter.Until, "d") {
/*time.ParseDuration support hours 'h' as bigger unit, let's make the user's life easier*/
if strings.HasSuffix(*alertListFilter.Until, "d") {
realDuration := strings.TrimSuffix(*alertListFilter.Until, "d")
days, err := strconv.Atoi(realDuration)
if err != nil {
printHelp(cmd)
log.Fatalf("Can't parse duration %s, valid durations format: 1d, 4h, 4h15m", *alertListFilter.Until)
}
*alertListFilter.Until = fmt.Sprintf("%d%s", days*24, "h")
realDuration := strings.TrimSuffix(*alertListFilter.Until, "d")
days, err := strconv.Atoi(realDuration)
if err != nil {
printHelp(cmd)
log.Fatalf("Can't parse duration %s, valid durations format: 1d, 4h, 4h15m", *alertListFilter.Until)
}
*alertListFilter.Until = fmt.Sprintf("%d%s", days*24, "h")
}
if *alertListFilter.Since == "" {
alertListFilter.Since = nil
} else {
} else if strings.HasSuffix(*alertListFilter.Since, "d") {
/*time.ParseDuration support hours 'h' as bigger unit, let's make the user's life easier*/
if strings.HasSuffix(*alertListFilter.Since, "d") {
realDuration := strings.TrimSuffix(*alertListFilter.Since, "d")
days, err := strconv.Atoi(realDuration)
if err != nil {
printHelp(cmd)
log.Fatalf("Can't parse duration %s, valid durations format: 1d, 4h, 4h15m", *alertListFilter.Since)
}
*alertListFilter.Since = fmt.Sprintf("%d%s", days*24, "h")
realDuration := strings.TrimSuffix(*alertListFilter.Since, "d")
days, err := strconv.Atoi(realDuration)
if err != nil {
printHelp(cmd)
log.Fatalf("Can't parse duration %s, valid durations format: 1d, 4h, 4h15m", *alertListFilter.Since)
}
*alertListFilter.Since = fmt.Sprintf("%d%s", days*24, "h")
}
if *alertListFilter.IncludeCAPI {

View file

@ -179,31 +179,28 @@ cscli decisions list -t ban
/* nullify the empty entries to avoid bad filter */
if *filter.Until == "" {
filter.Until = nil
} else {
} else if strings.HasSuffix(*filter.Until, "d") {
/*time.ParseDuration support hours 'h' as bigger unit, let's make the user's life easier*/
if strings.HasSuffix(*filter.Until, "d") {
realDuration := strings.TrimSuffix(*filter.Until, "d")
days, err := strconv.Atoi(realDuration)
if err != nil {
printHelp(cmd)
log.Fatalf("Can't parse duration %s, valid durations format: 1d, 4h, 4h15m", *filter.Until)
}
*filter.Until = fmt.Sprintf("%d%s", days*24, "h")
realDuration := strings.TrimSuffix(*filter.Until, "d")
days, err := strconv.Atoi(realDuration)
if err != nil {
printHelp(cmd)
log.Fatalf("Can't parse duration %s, valid durations format: 1d, 4h, 4h15m", *filter.Until)
}
*filter.Until = fmt.Sprintf("%d%s", days*24, "h")
}
if *filter.Since == "" {
filter.Since = nil
} else {
} else if strings.HasSuffix(*filter.Since, "d") {
/*time.ParseDuration support hours 'h' as bigger unit, let's make the user's life easier*/
if strings.HasSuffix(*filter.Since, "d") {
realDuration := strings.TrimSuffix(*filter.Since, "d")
days, err := strconv.Atoi(realDuration)
if err != nil {
printHelp(cmd)
log.Fatalf("Can't parse duration %s, valid durations format: 1d, 4h, 4h15m", *filter.Until)
}
*filter.Since = fmt.Sprintf("%d%s", days*24, "h")
realDuration := strings.TrimSuffix(*filter.Since, "d")
days, err := strconv.Atoi(realDuration)
if err != nil {
printHelp(cmd)
log.Fatalf("Can't parse duration %s, valid durations format: 1d, 4h, 4h15m", *filter.Until)
}
*filter.Since = fmt.Sprintf("%d%s", days*24, "h")
}
if *filter.IncludeCAPI {
*filter.Limit = 0

View file

@ -113,7 +113,7 @@ title: %s
`
name := filepath.Base(filename)
base := strings.TrimSuffix(name, path.Ext(name))
return fmt.Sprintf(header, base, strings.Replace(base, "_", " ", -1))
return fmt.Sprintf(header, base, strings.ReplaceAll(base, "_", " "))
}
func linkHandler(name string) string {

View file

@ -292,7 +292,7 @@ func (cw *CloudwatchSource) WatchLogGroupForStreams(out chan LogStreamTailConfig
return false
}
cw.logger.Tracef("stream %s is elligible for monitoring", *event.LogStreamName)
//the stream has been update recently, check if we should monitor it
//the stream has been updated recently, check if we should monitor it
var expectMode int
if !cw.Config.UseTimeMachine {
expectMode = leaky.LIVE
@ -329,7 +329,7 @@ func (cw *CloudwatchSource) WatchLogGroupForStreams(out chan LogStreamTailConfig
}
}
//LogStreamManager receives the potential streams to monitor, and start a go routine when needed
//LogStreamManager receives the potential streams to monitor, and starts a go routine when needed
func (cw *CloudwatchSource) LogStreamManager(in chan LogStreamTailConfig, outChan chan types.Event) error {
cw.logger.Debugf("starting to monitor streams for %s", cw.Config.GroupName)
@ -350,11 +350,9 @@ func (cw *CloudwatchSource) LogStreamManager(in chan LogStreamTailConfig, outCha
match, err := regexp.Match(*cw.Config.StreamRegexp, []byte(newStream.StreamName))
if err != nil {
cw.logger.Warningf("invalid regexp : %s", err)
} else {
if !match {
cw.logger.Tracef("stream %s doesn't match %s", newStream.StreamName, *cw.Config.StreamRegexp)
continue
}
} else if !match {
cw.logger.Tracef("stream %s doesn't match %s", newStream.StreamName, *cw.Config.StreamRegexp)
continue
}
}
@ -414,7 +412,7 @@ func (cw *CloudwatchSource) LogStreamManager(in chan LogStreamTailConfig, outCha
func (cw *CloudwatchSource) TailLogStream(cfg *LogStreamTailConfig, outChan chan types.Event) error {
var startFrom *string
var lastReadMessage time.Time = time.Now().UTC()
lastReadMessage := time.Now().UTC()
ticker := time.NewTicker(cfg.PollStreamInterval)
//resume at existing index if we already had
streamIndexMutex.Lock()

View file

@ -54,7 +54,8 @@ func readLine(scanner *bufio.Scanner, out chan string, errChan chan error) error
if errChan != nil && scanner.Err() != nil {
errChan <- scanner.Err()
close(errChan)
return nil //error is already consumed by runJournalCtl
// the error is already consumed by runJournalCtl
return nil //nolint:nilerr
}
if errChan != nil {
close(errChan)

View file

@ -38,10 +38,8 @@ func TestPri(t *testing.T) {
} else {
if test.expectedErr != "" {
t.Errorf("expected error %s, got no error", test.expectedErr)
} else {
if r.PRI != test.expected {
t.Errorf("expected %d, got %d", test.expected, r.PRI)
}
} else if r.PRI != test.expected {
t.Errorf("expected %d, got %d", test.expected, r.PRI)
}
}
})
@ -85,10 +83,8 @@ func TestTimestamp(t *testing.T) {
} else {
if test.expectedErr != "" {
t.Errorf("expected error %s, got no error", test.expectedErr)
} else {
if r.Timestamp.Format(time.RFC3339) != test.expected {
t.Errorf("expected %s, got %s", test.expected, r.Timestamp.Format(time.RFC3339))
}
} else if r.Timestamp.Format(time.RFC3339) != test.expected {
t.Errorf("expected %s, got %s", test.expected, r.Timestamp.Format(time.RFC3339))
}
}
})
@ -141,10 +137,8 @@ func TestHostname(t *testing.T) {
} else {
if test.expectedErr != "" {
t.Errorf("expected error %s, got no error", test.expectedErr)
} else {
if r.Hostname != test.expected {
t.Errorf("expected %s, got %s", test.expected, r.Hostname)
}
} else if r.Hostname != test.expected {
t.Errorf("expected %s, got %s", test.expected, r.Hostname)
}
}
})
@ -228,10 +222,8 @@ func TestMessage(t *testing.T) {
} else {
if test.expectedErr != "" {
t.Errorf("expected error %s, got no error", test.expectedErr)
} else {
if r.Message != test.expected {
t.Errorf("expected message %s, got %s", test.expected, r.Tag)
}
} else if r.Message != test.expected {
t.Errorf("expected message %s, got %s", test.expected, r.Tag)
}
}
})

View file

@ -38,10 +38,8 @@ func TestPri(t *testing.T) {
} else {
if test.expectedErr != "" {
t.Errorf("expected error %s, got no error", test.expectedErr)
} else {
if r.PRI != test.expected {
t.Errorf("expected %d, got %d", test.expected, r.PRI)
}
} else if r.PRI != test.expected {
t.Errorf("expected %d, got %d", test.expected, r.PRI)
}
}
})
@ -94,10 +92,8 @@ func TestHostname(t *testing.T) {
} else {
if test.expectedErr != "" {
t.Errorf("expected error %s, got no error", test.expectedErr)
} else {
if r.Hostname != test.expected {
t.Errorf("expected %s, got %s", test.expected, r.Hostname)
}
} else if r.Hostname != test.expected {
t.Errorf("expected %s, got %s", test.expected, r.Hostname)
}
}
})

View file

@ -115,17 +115,15 @@ func (a *APIKey) MiddlewareFunc() gin.HandlerFunc {
c.JSON(http.StatusForbidden, gin.H{"message": "access forbidden"})
c.Abort()
return
} else {
} else if bouncer.AuthType != types.TlsAuthType {
//bouncer was found in DB
if bouncer.AuthType != types.TlsAuthType {
log.WithFields(log.Fields{
"ip": c.ClientIP(),
"cn": extractedCN,
}).Errorf("bouncer isn't allowed to auth by TLS")
c.JSON(http.StatusForbidden, gin.H{"message": "access forbidden"})
c.Abort()
return
}
log.WithFields(log.Fields{
"ip": c.ClientIP(),
"cn": extractedCN,
}).Errorf("bouncer isn't allowed to auth by TLS")
c.JSON(http.StatusForbidden, gin.H{"message": "access forbidden"})
c.Abort()
return
}
} else {
//API Key Authentication

View file

@ -77,7 +77,7 @@ func (l *LocalApiClientCfg) Load() error {
if l.Credentials != nil && l.Credentials.URL != "" {
if !strings.HasSuffix(l.Credentials.URL, "/") {
l.Credentials.URL = l.Credentials.URL + "/"
l.Credentials.URL += "/"
}
}

View file

@ -329,7 +329,7 @@ func (pb *PluginBroker) pushNotificationsToPlugin(pluginName string, alerts []*m
},
)
if err == nil {
return err
return nil
}
log.WithField("plugin", pluginName).Errorf("%s error, retry num %d", err, i)
time.Sleep(backoffDuration)

View file

@ -169,12 +169,10 @@ func parser_visit(path string, f os.DirEntry, err error) error {
log.Tracef("marking %s as downloaded", v.Name)
v.Downloaded = true
}
} else {
} else if CheckSuffix(hubpath, v.RemotePath) {
//wrong file
//<type>/<stage>/<author>/<name>.yaml
if CheckSuffix(hubpath, v.RemotePath) {
continue
}
continue
}
sha, err := getSHA256(path)
if err != nil {

View file

@ -52,7 +52,7 @@ func JsonExtractUnescape(jsblob string, target ...string) string {
func JsonExtract(jsblob string, target string) string {
if !strings.HasPrefix(target, "[") {
target = strings.Replace(target, "[", ".[", -1)
target = strings.ReplaceAll(target, "[", ".[")
}
fullpath := strings.Split(target, ".")
@ -62,7 +62,7 @@ func JsonExtract(jsblob string, target string) string {
func jsonExtractType(jsblob string, target string, t jsonparser.ValueType) ([]byte, error) {
if !strings.HasPrefix(target, "[") {
target = strings.Replace(target, "[", ".[", -1)
target = strings.ReplaceAll(target, "[", ".[")
}
fullpath := strings.Split(target, ".")

View file

@ -152,8 +152,8 @@ func (h *HubTest) GetScenariosCoverage() ([]ScenarioCoverage, error) {
coverage[idx].PresentIn[assert] = true
continue
}
fixedProbingWord := strings.Replace(pcover.Scenario, "probbing", "probing", -1)
fixedProbingAssert := strings.Replace(scanner_name, "probbing", "probing", -1)
fixedProbingWord := strings.ReplaceAll(pcover.Scenario, "probbing", "probing")
fixedProbingAssert := strings.ReplaceAll(scanner_name, "probbing", "probing")
if fixedProbingWord == fixedProbingAssert {
coverage[idx].TestsCount++
coverage[idx].PresentIn[assert] = true

View file

@ -131,7 +131,7 @@ func DumpBucketsStateAt(deadline time.Time, outputdir string, buckets *Buckets)
})
bbuckets, err := json.MarshalIndent(serialized, "", " ")
if err != nil {
log.Fatalf("Failed to unmarshal buckets : %s", err)
return "", fmt.Errorf("Failed to unmarshal buckets : %s", err)
}
size, err := tmpFd.Write(bbuckets)
if err != nil {

View file

@ -19,7 +19,7 @@ func reverse_dns(field string, p *types.Event, ctx interface{}, plog *log.Entry)
rets, err := net.LookupAddr(field)
if err != nil {
plog.Debugf("failed to resolve '%s'", field)
return nil, nil
return nil, nil //nolint:nilerr
}
//When using the host C library resolver, at most one result will be returned. To bypass the host resolver, use a custom Resolver.
ret["reverse_dns"] = rets[0]

View file

@ -51,7 +51,7 @@ func GeoIpASN(field string, p *types.Event, ctx interface{}, plog *log.Entry) (m
record, err := ctx.(*geoip2.Reader).ASN(ip)
if err != nil {
plog.Errorf("Unable to enrich ip '%s'", field)
return nil, nil
return nil, nil //nolint:nilerr
}
ret["ASNNumber"] = fmt.Sprintf("%d", record.AutonomousSystemNumber)
ret["ASNumber"] = fmt.Sprintf("%d", record.AutonomousSystemNumber)
@ -75,7 +75,7 @@ func GeoIpCity(field string, p *types.Event, ctx interface{}, plog *log.Entry) (
record, err := ctx.(*geoip2.Reader).City(ip)
if err != nil {
plog.Debugf("Unable to enrich ip '%s'", ip)
return nil, nil
return nil, nil //nolint:nilerr
}
if record.Country.IsoCode != "" {
ret["IsoCode"] = record.Country.IsoCode

View file

@ -302,14 +302,12 @@ func (n *Node) process(p *types.Event, ctx UnixParserCtx, expressionEnv map[stri
clog.Debugf("child is success, OnSuccess=next_stage, skip")
break
}
} else {
} else if !NodeHasOKGrok {
/*
If the parent node has a successful grok pattern, it's state will stay successful even if one or more chil fails.
If the parent node is a skeleton node (no grok pattern), then at least one child must be successful for it to be a success.
*/
if !NodeHasOKGrok {
NodeState = false
}
NodeState = false
}
}
}

View file

@ -375,7 +375,7 @@ func TestGeneratePatternsDoc(t *testing.T) {
i := 0
for key, val := range pctx.Grok {
p[i] = Pair{key, val}
p[i].Value = strings.Replace(p[i].Value, "{%{", "\\{\\%\\{", -1)
p[i].Value = strings.ReplaceAll(p[i].Value, "{%{", "\\{\\%\\{")
i++
}
sort.Sort(p)

View file

@ -210,10 +210,9 @@ func CopyFile(sourceSymLink, destinationFile string) (err error) {
return
}
}
if err = os.Link(sourceFile, destinationFile); err == nil {
return
if err = os.Link(sourceFile, destinationFile); err != nil {
err = copyFileContents(sourceFile, destinationFile)
}
err = copyFileContents(sourceFile, destinationFile)
return
}