Merge branch 'master' into file_notification_plugin

This commit is contained in:
Laurence Jones 2024-04-25 13:23:10 +01:00 committed by GitHub
commit c9331d55a8
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
53 changed files with 494 additions and 314 deletions

View file

@ -42,7 +42,7 @@ issue:
3. Check [Releases](https://github.com/crowdsecurity/crowdsec/releases/latest) to make sure your agent is on the latest version. 3. Check [Releases](https://github.com/crowdsecurity/crowdsec/releases/latest) to make sure your agent is on the latest version.
- prefix: kind - prefix: kind
list: ['feature', 'bug', 'packaging', 'enhancement'] list: ['feature', 'bug', 'packaging', 'enhancement', 'refactoring']
multiple: false multiple: false
author_association: author_association:
author: true author: true
@ -54,6 +54,7 @@ issue:
@$AUTHOR: There are no 'kind' label on this issue. You need a 'kind' label to start the triage process. @$AUTHOR: There are no 'kind' label on this issue. You need a 'kind' label to start the triage process.
* `/kind feature` * `/kind feature`
* `/kind enhancement` * `/kind enhancement`
* `/kind refactoring`
* `/kind bug` * `/kind bug`
* `/kind packaging` * `/kind packaging`
@ -65,12 +66,13 @@ pull_request:
labels: labels:
- prefix: kind - prefix: kind
multiple: false multiple: false
list: [ 'feature', 'enhancement', 'fix', 'chore', 'dependencies'] list: [ 'feature', 'enhancement', 'fix', 'chore', 'dependencies', 'refactoring']
needs: needs:
comment: | comment: |
@$AUTHOR: There are no 'kind' label on this PR. You need a 'kind' label to generate the release automatically. @$AUTHOR: There are no 'kind' label on this PR. You need a 'kind' label to generate the release automatically.
* `/kind feature` * `/kind feature`
* `/kind enhancement` * `/kind enhancement`
* `/kind refactoring`
* `/kind fix` * `/kind fix`
* `/kind chore` * `/kind chore`
* `/kind dependencies` * `/kind dependencies`

View file

@ -81,3 +81,4 @@ jobs:
with: with:
files: ./coverage-bats.out files: ./coverage-bats.out
flags: bats flags: bats
token: ${{ secrets.CODECOV_TOKEN }}

View file

@ -59,15 +59,15 @@ jobs:
cd docker/test cd docker/test
python -m pip install --upgrade pipenv wheel python -m pip install --upgrade pipenv wheel
- name: "Cache virtualenvs" #- name: "Cache virtualenvs"
id: cache-pipenv # id: cache-pipenv
uses: actions/cache@v4 # uses: actions/cache@v4
with: # with:
path: ~/.local/share/virtualenvs # path: ~/.local/share/virtualenvs
key: ${{ runner.os }}-pipenv-${{ hashFiles('**/Pipfile.lock') }} # key: ${{ runner.os }}-pipenv-${{ hashFiles('**/Pipfile.lock') }}
- name: "Install dependencies" - name: "Install dependencies"
if: steps.cache-pipenv.outputs.cache-hit != 'true' #if: steps.cache-pipenv.outputs.cache-hit != 'true'
run: | run: |
cd docker/test cd docker/test
pipenv install --deploy pipenv install --deploy

View file

@ -52,6 +52,7 @@ jobs:
with: with:
files: coverage.out files: coverage.out
flags: unit-windows flags: unit-windows
token: ${{ secrets.CODECOV_TOKEN }}
- name: golangci-lint - name: golangci-lint
uses: golangci/golangci-lint-action@v4 uses: golangci/golangci-lint-action@v4

View file

@ -153,6 +153,7 @@ jobs:
with: with:
files: coverage.out files: coverage.out
flags: unit-linux flags: unit-linux
token: ${{ secrets.CODECOV_TOKEN }}
- name: golangci-lint - name: golangci-lint
uses: golangci/golangci-lint-action@v4 uses: golangci/golangci-lint-action@v4

View file

@ -37,17 +37,10 @@ linters-settings:
statements: 122 statements: 122
govet: govet:
enable: enable-all: true
- atomicalign disable:
- deepequalerrors - reflectvaluecompare
# TODO: - fieldalignment - fieldalignment
- findcall
- nilness
# TODO: - reflectvaluecompare
- shadow
- sortslice
- timeformat
- unusedwrite
lll: lll:
# lower this after refactoring # lower this after refactoring
@ -65,7 +58,7 @@ linters-settings:
min-complexity: 28 min-complexity: 28
nlreturn: nlreturn:
block-size: 4 block-size: 5
nolintlint: nolintlint:
allow-unused: false # report any unused nolint directives allow-unused: false # report any unused nolint directives
@ -147,37 +140,40 @@ linters:
# #
# DEPRECATED by golangi-lint # DEPRECATED by golangi-lint
# #
- deadcode # The owner seems to have abandoned the linter. Replaced by unused. - deadcode
- exhaustivestruct # The owner seems to have abandoned the linter. Replaced by exhaustruct. - exhaustivestruct
- golint # Golint differs from gofmt. Gofmt reformats Go source code, whereas golint prints out style mistakes - golint
- ifshort # Checks that your code uses short syntax for if-statements whenever possible - ifshort
- interfacer # Linter that suggests narrower interface types - interfacer
- maligned # Tool to detect Go structs that would take less memory if their fields were sorted - maligned
- nosnakecase # nosnakecase is a linter that detects snake case of variable naming and function name. - nosnakecase
- scopelint # Scopelint checks for unpinned variables in go programs - scopelint
- structcheck # The owner seems to have abandoned the linter. Replaced by unused. - structcheck
- varcheck # The owner seems to have abandoned the linter. Replaced by unused. - varcheck
# #
# Enabled # Enabled
# #
# - asasalint # check for pass []any as any in variadic func(...any) # - asasalint # check for pass []any as any in variadic func(...any)
# - asciicheck # Simple linter to check that your code does not contain non-ASCII identifiers # - asciicheck # checks that all code identifiers does not have non-ASCII symbols in the name
# - bidichk # Checks for dangerous unicode character sequences # - bidichk # Checks for dangerous unicode character sequences
# - bodyclose # checks whether HTTP response body is closed successfully # - bodyclose # checks whether HTTP response body is closed successfully
# - copyloopvar # copyloopvar is a linter detects places where loop variables are copied
# - cyclop # checks function and package cyclomatic complexity # - cyclop # checks function and package cyclomatic complexity
# - decorder # check declaration order and count of types, constants, variables and functions # - decorder # check declaration order and count of types, constants, variables and functions
# - depguard # Go linter that checks if package imports are in a list of acceptable packages # - depguard # Go linter that checks if package imports are in a list of acceptable packages
# - dupword # checks for duplicate words in the source code # - dupword # checks for duplicate words in the source code
# - durationcheck # check for two durations multiplied together # - durationcheck # check for two durations multiplied together
# - errcheck # Errcheck is a program for checking for unchecked errors in go programs. These unchecked errors can be critical bugs in some cases # - errcheck # errcheck is a program for checking for unchecked errors in Go code. These unchecked errors can be critical bugs in some cases
# - errorlint # errorlint is a linter for that can be used to find code that will cause problems with the error wrapping scheme introduced in Go 1.13. # - errorlint # errorlint is a linter for that can be used to find code that will cause problems with the error wrapping scheme introduced in Go 1.13.
# - execinquery # execinquery is a linter about query string checker in Query function which reads your Go src files and warning it finds # - execinquery # execinquery is a linter about query string checker in Query function which reads your Go src files and warning it finds
# - exportloopref # checks for pointers to enclosing loop variables # - exportloopref # checks for pointers to enclosing loop variables
# - funlen # Tool for detection of long functions # - funlen # Tool for detection of long functions
# - ginkgolinter # enforces standards of using ginkgo and gomega # - ginkgolinter # enforces standards of using ginkgo and gomega
# - gocheckcompilerdirectives # Checks that go compiler directive comments (//go:) are valid.
# - gochecknoinits # Checks that no init functions are present in Go code # - gochecknoinits # Checks that no init functions are present in Go code
# - gochecksumtype # Run exhaustiveness checks on Go "sum types"
# - gocognit # Computes and checks the cognitive complexity of functions # - gocognit # Computes and checks the cognitive complexity of functions
# - gocritic # Provides diagnostics that check for bugs, performance and style issues. # - gocritic # Provides diagnostics that check for bugs, performance and style issues.
# - gocyclo # Computes and checks the cyclomatic complexity of functions # - gocyclo # Computes and checks the cyclomatic complexity of functions
@ -185,48 +181,56 @@ linters:
# - gomoddirectives # Manage the use of 'replace', 'retract', and 'excludes' directives in go.mod. # - gomoddirectives # Manage the use of 'replace', 'retract', and 'excludes' directives in go.mod.
# - gomodguard # Allow and block list linter for direct Go module dependencies. This is different from depguard where there are different block types for example version constraints and module recommendations. # - gomodguard # Allow and block list linter for direct Go module dependencies. This is different from depguard where there are different block types for example version constraints and module recommendations.
# - goprintffuncname # Checks that printf-like functions are named with `f` at the end # - goprintffuncname # Checks that printf-like functions are named with `f` at the end
# - gosimple # (megacheck): Linter for Go source code that specializes in simplifying a code # - gosimple # (megacheck): Linter for Go source code that specializes in simplifying code
# - govet # (vet, vetshadow): Vet examines Go source code and reports suspicious constructs, such as Printf calls whose arguments do not align with the format string # - gosmopolitan # Report certain i18n/l10n anti-patterns in your Go codebase
# - grouper # An analyzer to analyze expression groups. # - govet # (vet, vetshadow): Vet examines Go source code and reports suspicious constructs. It is roughly the same as 'go vet' and uses its passes.
# - grouper # Analyze expression groups.
# - importas # Enforces consistent import aliases # - importas # Enforces consistent import aliases
# - ineffassign # Detects when assignments to existing variables are not used # - ineffassign # Detects when assignments to existing variables are not used
# - interfacebloat # A linter that checks the number of methods inside an interface. # - interfacebloat # A linter that checks the number of methods inside an interface.
# - intrange # intrange is a linter to find places where for loops could make use of an integer range.
# - lll # Reports long lines # - lll # Reports long lines
# - loggercheck # (logrlint): Checks key value pairs for common logger libraries (kitlog,klog,logr,zap).
# - logrlint # Check logr arguments. # - logrlint # Check logr arguments.
# - maintidx # maintidx measures the maintainability index of each function. # - maintidx # maintidx measures the maintainability index of each function.
# - makezero # Finds slice declarations with non-zero initial length # - makezero # Finds slice declarations with non-zero initial length
# - misspell # Finds commonly misspelled English words in comments # - mirror # reports wrong mirror patterns of bytes/strings usage
# - nakedret # Finds naked returns in functions greater than a specified function length # - misspell # Finds commonly misspelled English words
# - nakedret # Checks that functions with naked returns are not longer than a maximum size (can be zero).
# - nestif # Reports deeply nested if statements # - nestif # Reports deeply nested if statements
# - nilerr # Finds the code that returns nil even if it checks that the error is not nil. # - nilerr # Finds the code that returns nil even if it checks that the error is not nil.
# - nolintlint # Reports ill-formed or insufficient nolint directives # - nolintlint # Reports ill-formed or insufficient nolint directives
# - nonamedreturns # Reports all named returns # - nonamedreturns # Reports all named returns
# - nosprintfhostport # Checks for misuse of Sprintf to construct a host with port in a URL. # - nosprintfhostport # Checks for misuse of Sprintf to construct a host with port in a URL.
# - perfsprint # Checks that fmt.Sprintf can be replaced with a faster alternative.
# - predeclared # find code that shadows one of Go's predeclared identifiers # - predeclared # find code that shadows one of Go's predeclared identifiers
# - reassign # Checks that package variables are not reassigned # - reassign # Checks that package variables are not reassigned
# - rowserrcheck # checks whether Err of rows is checked successfully # - rowserrcheck # checks whether Rows.Err of rows is checked successfully
# - sqlclosecheck # Checks that sql.Rows and sql.Stmt are closed. # - sloglint # ensure consistent code style when using log/slog
# - staticcheck # (megacheck): Staticcheck is a go vet on steroids, applying a ton of static analysis checks # - spancheck # Checks for mistakes with OpenTelemetry/Census spans.
# - testableexamples # linter checks if examples are testable (have an expected output) # - sqlclosecheck # Checks that sql.Rows, sql.Stmt, sqlx.NamedStmt, pgx.Query are closed.
# - staticcheck # (megacheck): It's a set of rules from staticcheck. It's not the same thing as the staticcheck binary. The author of staticcheck doesn't support or approve the use of staticcheck as a library inside golangci-lint.
# - tenv # tenv is analyzer that detects using os.Setenv instead of t.Setenv since Go1.17 # - tenv # tenv is analyzer that detects using os.Setenv instead of t.Setenv since Go1.17
# - testableexamples # linter checks if examples are testable (have an expected output)
# - testifylint # Checks usage of github.com/stretchr/testify.
# - tparallel # tparallel detects inappropriate usage of t.Parallel() method in your Go test codes # - tparallel # tparallel detects inappropriate usage of t.Parallel() method in your Go test codes
# - typecheck # Like the front-end of a Go compiler, parses and type-checks Go code
# - unconvert # Remove unnecessary type conversions # - unconvert # Remove unnecessary type conversions
# - unused # (megacheck): Checks Go code for unused constants, variables, functions and types # - unused # (megacheck): Checks Go code for unused constants, variables, functions and types
# - usestdlibvars # A linter that detect the possibility to use variables/constants from the Go standard library. # - usestdlibvars # A linter that detect the possibility to use variables/constants from the Go standard library.
# - wastedassign # wastedassign finds wasted assignment statements. # - wastedassign # Finds wasted assignment statements
# - zerologlint # Detects the wrong usage of `zerolog` that a user forgets to dispatch with `Send` or `Msg`
# #
# Recommended? (easy) # Recommended? (easy)
# #
- dogsled # Checks assignments with too many blank identifiers (e.g. x, _, _, _, := f()) - dogsled # Checks assignments with too many blank identifiers (e.g. x, _, _, _, := f())
- errchkjson # Checks types passed to the json encoding functions. Reports unsupported types and optionally reports occations, where the check for the returned error can be omitted. - errchkjson # Checks types passed to the json encoding functions. Reports unsupported types and reports occations, where the check for the returned error can be omitted.
- exhaustive # check exhaustiveness of enum switch statements - exhaustive # check exhaustiveness of enum switch statements
- gci # Gci control golang package import order and make it always deterministic. - gci # Gci control golang package import order and make it always deterministic.
- godot # Check if comments end in a period - godot # Check if comments end in a period
- gofmt # Gofmt checks whether code was gofmt-ed. By default this tool runs with -s option to check for code simplification - gofmt # Gofmt checks whether code was gofmt-ed. By default this tool runs with -s option to check for code simplification
- goimports # In addition to fixing imports, goimports also formats your code in the same style as gofmt. - goimports # Check import statements are formatted according to the 'goimport' command. Reformat imports in autofix mode.
- gosec # (gas): Inspects source code for security problems - gosec # (gas): Inspects source code for security problems
- inamedparam # reports interfaces with unnamed method parameters - inamedparam # reports interfaces with unnamed method parameters
- musttag # enforce field tags in (un)marshaled structs - musttag # enforce field tags in (un)marshaled structs
@ -234,7 +238,7 @@ linters:
- protogetter # Reports direct reads from proto message fields when getters should be used - protogetter # Reports direct reads from proto message fields when getters should be used
- revive # Fast, configurable, extensible, flexible, and beautiful linter for Go. Drop-in replacement of golint. - revive # Fast, configurable, extensible, flexible, and beautiful linter for Go. Drop-in replacement of golint.
- tagalign # check that struct tags are well aligned - tagalign # check that struct tags are well aligned
- thelper # thelper detects golang test helpers without t.Helper() call and checks the consistency of test helpers - thelper # thelper detects tests helpers which is not start with t.Helper() method.
- wrapcheck # Checks that errors returned from external packages are wrapped - wrapcheck # Checks that errors returned from external packages are wrapped
# #
@ -242,12 +246,12 @@ linters:
# #
- containedctx # containedctx is a linter that detects struct contained context.Context field - containedctx # containedctx is a linter that detects struct contained context.Context field
- contextcheck # check the function whether use a non-inherited context - contextcheck # check whether the function uses a non-inherited context
- errname # Checks that sentinel errors are prefixed with the `Err` and error types are suffixed with the `Error`. - errname # Checks that sentinel errors are prefixed with the `Err` and error types are suffixed with the `Error`.
- gomnd # An analyzer to detect magic numbers. - gomnd # An analyzer to detect magic numbers.
- ireturn # Accept Interfaces, Return Concrete Types - ireturn # Accept Interfaces, Return Concrete Types
- nilnil # Checks that there is no simultaneous return of `nil` error and an invalid value. - nilnil # Checks that there is no simultaneous return of `nil` error and an invalid value.
- noctx # noctx finds sending http request without context.Context - noctx # Finds sending http request without context.Context
- unparam # Reports unused function parameters - unparam # Reports unused function parameters
# #
@ -256,8 +260,8 @@ linters:
- gofumpt # Gofumpt checks whether code was gofumpt-ed. - gofumpt # Gofumpt checks whether code was gofumpt-ed.
- nlreturn # nlreturn checks for a new line before return and branch statements to increase code clarity - nlreturn # nlreturn checks for a new line before return and branch statements to increase code clarity
- whitespace # Tool for detection of leading and trailing whitespace - whitespace # Whitespace is a linter that checks for unnecessary newlines at the start and end of functions, if, for, etc.
- wsl # Whitespace Linter - Forces you to use empty lines! - wsl # add or remove empty lines
# #
# Well intended, but not ready for this # Well intended, but not ready for this
@ -265,8 +269,8 @@ linters:
- dupl # Tool for code clone detection - dupl # Tool for code clone detection
- forcetypeassert # finds forced type assertions - forcetypeassert # finds forced type assertions
- godox # Tool for detection of FIXME, TODO and other comment keywords - godox # Tool for detection of FIXME, TODO and other comment keywords
- goerr113 # Golang linter to check the errors handling expressions - goerr113 # Go linter to check the errors handling expressions
- paralleltest # paralleltest detects missing usage of t.Parallel() method in your Go test - paralleltest # Detects missing usage of t.Parallel() method in your Go test
- testpackage # linter that makes you use a separate _test package - testpackage # linter that makes you use a separate _test package
# #
@ -274,7 +278,7 @@ linters:
# #
- exhaustruct # Checks if all structure fields are initialized - exhaustruct # Checks if all structure fields are initialized
- forbidigo # Forbids identifiers - forbidigo # Forbids identifiers
- gochecknoglobals # check that no global variables exist - gochecknoglobals # Check that no global variables exist.
- goconst # Finds repeated strings that could be replaced by a constant - goconst # Finds repeated strings that could be replaced by a constant
- stylecheck # Stylecheck is a replacement for golint - stylecheck # Stylecheck is a replacement for golint
- tagliatelle # Checks the struct tags. - tagliatelle # Checks the struct tags.

View file

@ -16,7 +16,7 @@ RUN apk add --no-cache git g++ gcc libc-dev make bash gettext binutils-gold core
cd re2-${RE2_VERSION} && \ cd re2-${RE2_VERSION} && \
make install && \ make install && \
echo "githubciXXXXXXXXXXXXXXXXXXXXXXXX" > /etc/machine-id && \ echo "githubciXXXXXXXXXXXXXXXXXXXXXXXX" > /etc/machine-id && \
go install github.com/mikefarah/yq/v4@v4.40.4 go install github.com/mikefarah/yq/v4@v4.43.1
COPY . . COPY . .

View file

@ -21,7 +21,7 @@ RUN apt-get update && \
make && \ make && \
make install && \ make install && \
echo "githubciXXXXXXXXXXXXXXXXXXXXXXXX" > /etc/machine-id && \ echo "githubciXXXXXXXXXXXXXXXXXXXXXXXX" > /etc/machine-id && \
go install github.com/mikefarah/yq/v4@v4.40.4 go install github.com/mikefarah/yq/v4@v4.43.1
COPY . . COPY . .

View file

@ -4,6 +4,7 @@ import (
"context" "context"
"encoding/csv" "encoding/csv"
"encoding/json" "encoding/json"
"errors"
"fmt" "fmt"
"net/url" "net/url"
"os" "os"
@ -204,6 +205,7 @@ func (cli *cliAlerts) NewCommand() *cobra.Command {
if err != nil { if err != nil {
return fmt.Errorf("parsing api url %s: %w", apiURL, err) return fmt.Errorf("parsing api url %s: %w", apiURL, err)
} }
cli.client, err = apiclient.NewClient(&apiclient.Config{ cli.client, err = apiclient.NewClient(&apiclient.Config{
MachineID: cfg.API.Client.Credentials.Login, MachineID: cfg.API.Client.Credentials.Login,
Password: strfmt.Password(cfg.API.Client.Credentials.Password), Password: strfmt.Password(cfg.API.Client.Credentials.Password),
@ -211,7 +213,6 @@ func (cli *cliAlerts) NewCommand() *cobra.Command {
URL: apiURL, URL: apiURL,
VersionPrefix: "v1", VersionPrefix: "v1",
}) })
if err != nil { if err != nil {
return fmt.Errorf("new api client: %w", err) return fmt.Errorf("new api client: %w", err)
} }
@ -229,7 +230,7 @@ func (cli *cliAlerts) NewCommand() *cobra.Command {
} }
func (cli *cliAlerts) NewListCmd() *cobra.Command { func (cli *cliAlerts) NewListCmd() *cobra.Command {
var alertListFilter = apiclient.AlertsListOpts{ alertListFilter := apiclient.AlertsListOpts{
ScopeEquals: new(string), ScopeEquals: new(string),
ValueEquals: new(string), ValueEquals: new(string),
ScenarioEquals: new(string), ScenarioEquals: new(string),
@ -363,7 +364,7 @@ func (cli *cliAlerts) NewDeleteCmd() *cobra.Command {
delAlertByID string delAlertByID string
) )
var alertDeleteFilter = apiclient.AlertsDeleteOpts{ alertDeleteFilter := apiclient.AlertsDeleteOpts{
ScopeEquals: new(string), ScopeEquals: new(string),
ValueEquals: new(string), ValueEquals: new(string),
ScenarioEquals: new(string), ScenarioEquals: new(string),
@ -391,7 +392,7 @@ cscli alerts delete -s crowdsecurity/ssh-bf"`,
*alertDeleteFilter.ScenarioEquals == "" && *alertDeleteFilter.IPEquals == "" && *alertDeleteFilter.ScenarioEquals == "" && *alertDeleteFilter.IPEquals == "" &&
*alertDeleteFilter.RangeEquals == "" && delAlertByID == "" { *alertDeleteFilter.RangeEquals == "" && delAlertByID == "" {
_ = cmd.Usage() _ = cmd.Usage()
return fmt.Errorf("at least one filter or --all must be specified") return errors.New("at least one filter or --all must be specified")
} }
return nil return nil
@ -478,7 +479,7 @@ func (cli *cliAlerts) NewInspectCmd() *cobra.Command {
cfg := cli.cfg() cfg := cli.cfg()
if len(args) == 0 { if len(args) == 0 {
printHelp(cmd) printHelp(cmd)
return fmt.Errorf("missing alert_id") return errors.New("missing alert_id")
} }
for _, alertID := range args { for _, alertID := range args {
id, err := strconv.Atoi(alertID) id, err := strconv.Atoi(alertID)

View file

@ -175,7 +175,7 @@ func (cli *cliCapi) status() error {
return err return err
} }
scenarios, err := hub.GetInstalledItemNames(cwhub.SCENARIOS) scenarios, err := hub.GetInstalledNamesByType(cwhub.SCENARIOS)
if err != nil { if err != nil {
return fmt.Errorf("failed to get scenarios: %w", err) return fmt.Errorf("failed to get scenarios: %w", err)
} }

View file

@ -10,13 +10,15 @@ import (
"github.com/sanity-io/litter" "github.com/sanity-io/litter"
log "github.com/sirupsen/logrus" log "github.com/sirupsen/logrus"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"gopkg.in/yaml.v2" "gopkg.in/yaml.v3"
"github.com/crowdsecurity/crowdsec/pkg/csconfig" "github.com/crowdsecurity/crowdsec/pkg/csconfig"
"github.com/crowdsecurity/crowdsec/pkg/exprhelpers" "github.com/crowdsecurity/crowdsec/pkg/exprhelpers"
) )
func showConfigKey(key string) error { func (cli *cliConfig) showKey(key string) error {
cfg := cli.cfg()
type Env struct { type Env struct {
Config *csconfig.Config Config *csconfig.Config
} }
@ -30,15 +32,15 @@ func showConfigKey(key string) error {
return err return err
} }
output, err := expr.Run(program, Env{Config: csConfig}) output, err := expr.Run(program, Env{Config: cfg})
if err != nil { if err != nil {
return err return err
} }
switch csConfig.Cscli.Output { switch cfg.Cscli.Output {
case "human", "raw": case "human", "raw":
// Don't use litter for strings, it adds quotes // Don't use litter for strings, it adds quotes
// that we didn't have before // that would break compatibility with previous versions
switch output.(type) { switch output.(type) {
case string: case string:
fmt.Println(output) fmt.Println(output)
@ -51,13 +53,14 @@ func showConfigKey(key string) error {
return fmt.Errorf("failed to marshal configuration: %w", err) return fmt.Errorf("failed to marshal configuration: %w", err)
} }
fmt.Printf("%s\n", string(data)) fmt.Println(string(data))
} }
return nil return nil
} }
var configShowTemplate = `Global: func (cli *cliConfig) template() string {
return `Global:
{{- if .ConfigPaths }} {{- if .ConfigPaths }}
- Configuration Folder : {{.ConfigPaths.ConfigDir}} - Configuration Folder : {{.ConfigPaths.ConfigDir}}
@ -182,19 +185,11 @@ Central API:
{{- end }} {{- end }}
{{- end }} {{- end }}
` `
}
func (cli *cliConfig) show(key string) error { func (cli *cliConfig) show() error {
cfg := cli.cfg() cfg := cli.cfg()
if err := cfg.LoadAPIClient(); err != nil {
log.Errorf("failed to load API client configuration: %s", err)
// don't return, we can still show the configuration
}
if key != "" {
return showConfigKey(key)
}
switch cfg.Cscli.Output { switch cfg.Cscli.Output {
case "human": case "human":
// The tests on .Enable look funny because the option has a true default which has // The tests on .Enable look funny because the option has a true default which has
@ -205,7 +200,7 @@ func (cli *cliConfig) show(key string) error {
"ValueBool": func(b *bool) bool { return b != nil && *b }, "ValueBool": func(b *bool) bool { return b != nil && *b },
} }
tmp, err := template.New("config").Funcs(funcs).Parse(configShowTemplate) tmp, err := template.New("config").Funcs(funcs).Parse(cli.template())
if err != nil { if err != nil {
return err return err
} }
@ -220,14 +215,14 @@ func (cli *cliConfig) show(key string) error {
return fmt.Errorf("failed to marshal configuration: %w", err) return fmt.Errorf("failed to marshal configuration: %w", err)
} }
fmt.Printf("%s\n", string(data)) fmt.Println(string(data))
case "raw": case "raw":
data, err := yaml.Marshal(cfg) data, err := yaml.Marshal(cfg)
if err != nil { if err != nil {
return fmt.Errorf("failed to marshal configuration: %w", err) return fmt.Errorf("failed to marshal configuration: %w", err)
} }
fmt.Printf("%s\n", string(data)) fmt.Println(string(data))
} }
return nil return nil
@ -243,7 +238,16 @@ func (cli *cliConfig) newShowCmd() *cobra.Command {
Args: cobra.ExactArgs(0), Args: cobra.ExactArgs(0),
DisableAutoGenTag: true, DisableAutoGenTag: true,
RunE: func(_ *cobra.Command, _ []string) error { RunE: func(_ *cobra.Command, _ []string) error {
return cli.show(key) if err := cli.cfg().LoadAPIClient(); err != nil {
log.Errorf("failed to load API client configuration: %s", err)
// don't return, we can still show the configuration
}
if key != "" {
return cli.showKey(key)
}
return cli.show()
}, },
} }

View file

@ -4,9 +4,11 @@ import (
"context" "context"
"encoding/csv" "encoding/csv"
"encoding/json" "encoding/json"
"errors"
"fmt" "fmt"
"net/url" "net/url"
"os" "os"
"strconv"
"strings" "strings"
"github.com/fatih/color" "github.com/fatih/color"
@ -36,7 +38,7 @@ func NewCLIConsole(cfg configGetter) *cliConsole {
} }
func (cli *cliConsole) NewCommand() *cobra.Command { func (cli *cliConsole) NewCommand() *cobra.Command {
var cmd = &cobra.Command{ cmd := &cobra.Command{
Use: "console [action]", Use: "console [action]",
Short: "Manage interaction with Crowdsec console (https://app.crowdsec.net)", Short: "Manage interaction with Crowdsec console (https://app.crowdsec.net)",
Args: cobra.MinimumNArgs(1), Args: cobra.MinimumNArgs(1),
@ -101,7 +103,7 @@ After running this command your will need to validate the enrollment in the weba
return err return err
} }
scenarios, err := hub.GetInstalledItemNames(cwhub.SCENARIOS) scenarios, err := hub.GetInstalledNamesByType(cwhub.SCENARIOS)
if err != nil { if err != nil {
return fmt.Errorf("failed to get installed scenarios: %w", err) return fmt.Errorf("failed to get installed scenarios: %w", err)
} }
@ -203,7 +205,7 @@ Enable given information push to the central API. Allows to empower the console`
log.Infof("All features have been enabled successfully") log.Infof("All features have been enabled successfully")
} else { } else {
if len(args) == 0 { if len(args) == 0 {
return fmt.Errorf("you must specify at least one feature to enable") return errors.New("you must specify at least one feature to enable")
} }
if err := cli.setConsoleOpts(args, true); err != nil { if err := cli.setConsoleOpts(args, true); err != nil {
return err return err
@ -288,11 +290,11 @@ func (cli *cliConsole) newStatusCmd() *cobra.Command {
} }
rows := [][]string{ rows := [][]string{
{csconfig.SEND_MANUAL_SCENARIOS, fmt.Sprintf("%t", *consoleCfg.ShareManualDecisions)}, {csconfig.SEND_MANUAL_SCENARIOS, strconv.FormatBool(*consoleCfg.ShareManualDecisions)},
{csconfig.SEND_CUSTOM_SCENARIOS, fmt.Sprintf("%t", *consoleCfg.ShareCustomScenarios)}, {csconfig.SEND_CUSTOM_SCENARIOS, strconv.FormatBool(*consoleCfg.ShareCustomScenarios)},
{csconfig.SEND_TAINTED_SCENARIOS, fmt.Sprintf("%t", *consoleCfg.ShareTaintedScenarios)}, {csconfig.SEND_TAINTED_SCENARIOS, strconv.FormatBool(*consoleCfg.ShareTaintedScenarios)},
{csconfig.SEND_CONTEXT, fmt.Sprintf("%t", *consoleCfg.ShareContext)}, {csconfig.SEND_CONTEXT, strconv.FormatBool(*consoleCfg.ShareContext)},
{csconfig.CONSOLE_MANAGEMENT, fmt.Sprintf("%t", *consoleCfg.ConsoleManagement)}, {csconfig.CONSOLE_MANAGEMENT, strconv.FormatBool(*consoleCfg.ConsoleManagement)},
} }
for _, row := range rows { for _, row := range rows {
err = csvwriter.Write(row) err = csvwriter.Write(row)

View file

@ -9,7 +9,6 @@ import (
log "github.com/sirupsen/logrus" log "github.com/sirupsen/logrus"
) )
/*help to copy the file, ioutil doesn't offer the feature*/ /*help to copy the file, ioutil doesn't offer the feature*/
func copyFileContents(src, dst string) (err error) { func copyFileContents(src, dst string) (err error) {
@ -69,6 +68,7 @@ func CopyFile(sourceSymLink, destinationFile string) error {
if !(destinationFileStat.Mode().IsRegular()) { if !(destinationFileStat.Mode().IsRegular()) {
return fmt.Errorf("copyFile: non-regular destination file %s (%q)", destinationFileStat.Name(), destinationFileStat.Mode().String()) return fmt.Errorf("copyFile: non-regular destination file %s (%q)", destinationFileStat.Name(), destinationFileStat.Mode().String())
} }
if os.SameFile(sourceFileStat, destinationFileStat) { if os.SameFile(sourceFileStat, destinationFileStat) {
return err return err
} }
@ -80,4 +80,3 @@ func CopyFile(sourceSymLink, destinationFile string) error {
return err return err
} }

View file

@ -4,6 +4,7 @@ import (
"context" "context"
"encoding/csv" "encoding/csv"
"encoding/json" "encoding/json"
"errors"
"fmt" "fmt"
"net/url" "net/url"
"os" "os"
@ -346,7 +347,7 @@ cscli decisions add --scope username --value foobar
addScope = types.Range addScope = types.Range
} else if addValue == "" { } else if addValue == "" {
printHelp(cmd) printHelp(cmd)
return fmt.Errorf("missing arguments, a value is required (--ip, --range or --scope and --value)") return errors.New("missing arguments, a value is required (--ip, --range or --scope and --value)")
} }
if addReason == "" { if addReason == "" {
@ -371,7 +372,7 @@ cscli decisions add --scope username --value foobar
Scenario: &addReason, Scenario: &addReason,
ScenarioVersion: &empty, ScenarioVersion: &empty,
Simulated: &simulated, Simulated: &simulated,
//setting empty scope/value broke plugins, and it didn't seem to be needed anymore w/ latest papi changes // setting empty scope/value broke plugins, and it didn't seem to be needed anymore w/ latest papi changes
Source: &models.Source{ Source: &models.Source{
AsName: empty, AsName: empty,
AsNumber: empty, AsNumber: empty,
@ -411,7 +412,7 @@ cscli decisions add --scope username --value foobar
} }
func (cli *cliDecisions) newDeleteCmd() *cobra.Command { func (cli *cliDecisions) newDeleteCmd() *cobra.Command {
var delFilter = apiclient.DecisionsDeleteOpts{ delFilter := apiclient.DecisionsDeleteOpts{
ScopeEquals: new(string), ScopeEquals: new(string),
ValueEquals: new(string), ValueEquals: new(string),
TypeEquals: new(string), TypeEquals: new(string),
@ -448,7 +449,7 @@ cscli decisions delete --origin lists --scenario list_name
*delFilter.RangeEquals == "" && *delFilter.ScenarioEquals == "" && *delFilter.RangeEquals == "" && *delFilter.ScenarioEquals == "" &&
*delFilter.OriginEquals == "" && delDecisionID == "" { *delFilter.OriginEquals == "" && delDecisionID == "" {
cmd.Usage() cmd.Usage()
return fmt.Errorf("at least one filter or --all must be specified") return errors.New("at least one filter or --all must be specified")
} }
return nil return nil

View file

@ -5,6 +5,7 @@ import (
"bytes" "bytes"
"context" "context"
"encoding/json" "encoding/json"
"errors"
"fmt" "fmt"
"io" "io"
"os" "os"
@ -81,7 +82,7 @@ func (cli *cliDecisions) runImport(cmd *cobra.Command, args []string) error {
} }
if defaultDuration == "" { if defaultDuration == "" {
return fmt.Errorf("--duration cannot be empty") return errors.New("--duration cannot be empty")
} }
defaultScope, err := flags.GetString("scope") defaultScope, err := flags.GetString("scope")
@ -90,7 +91,7 @@ func (cli *cliDecisions) runImport(cmd *cobra.Command, args []string) error {
} }
if defaultScope == "" { if defaultScope == "" {
return fmt.Errorf("--scope cannot be empty") return errors.New("--scope cannot be empty")
} }
defaultReason, err := flags.GetString("reason") defaultReason, err := flags.GetString("reason")
@ -99,7 +100,7 @@ func (cli *cliDecisions) runImport(cmd *cobra.Command, args []string) error {
} }
if defaultReason == "" { if defaultReason == "" {
return fmt.Errorf("--reason cannot be empty") return errors.New("--reason cannot be empty")
} }
defaultType, err := flags.GetString("type") defaultType, err := flags.GetString("type")
@ -108,7 +109,7 @@ func (cli *cliDecisions) runImport(cmd *cobra.Command, args []string) error {
} }
if defaultType == "" { if defaultType == "" {
return fmt.Errorf("--type cannot be empty") return errors.New("--type cannot be empty")
} }
batchSize, err := flags.GetInt("batch") batchSize, err := flags.GetInt("batch")
@ -136,7 +137,7 @@ func (cli *cliDecisions) runImport(cmd *cobra.Command, args []string) error {
} }
if format == "" { if format == "" {
return fmt.Errorf("unable to guess format from file extension, please provide a format with --format flag") return errors.New("unable to guess format from file extension, please provide a format with --format flag")
} }
if input == "-" { if input == "-" {
@ -235,7 +236,6 @@ func (cli *cliDecisions) runImport(cmd *cobra.Command, args []string) error {
return nil return nil
} }
func (cli *cliDecisions) newImportCmd() *cobra.Command { func (cli *cliDecisions) newImportCmd() *cobra.Command {
cmd := &cobra.Command{ cmd := &cobra.Command{
Use: "import [options]", Use: "import [options]",

View file

@ -39,8 +39,10 @@ id: %s
title: %s title: %s
--- ---
` `
name := filepath.Base(filename) name := filepath.Base(filename)
base := strings.TrimSuffix(name, filepath.Ext(name)) base := strings.TrimSuffix(name, filepath.Ext(name))
return fmt.Sprintf(header, base, strings.ReplaceAll(base, "_", " ")) return fmt.Sprintf(header, base, strings.ReplaceAll(base, "_", " "))
} }

View file

@ -83,7 +83,7 @@ tail -n 5 myfile.log | cscli explain --type nginx -f -
PersistentPreRunE: func(_ *cobra.Command, _ []string) error { PersistentPreRunE: func(_ *cobra.Command, _ []string) error {
fileInfo, _ := os.Stdin.Stat() fileInfo, _ := os.Stdin.Stat()
if cli.flags.logFile == "-" && ((fileInfo.Mode() & os.ModeCharDevice) == os.ModeCharDevice) { if cli.flags.logFile == "-" && ((fileInfo.Mode() & os.ModeCharDevice) == os.ModeCharDevice) {
return fmt.Errorf("the option -f - is intended to work with pipes") return errors.New("the option -f - is intended to work with pipes")
} }
return nil return nil
@ -160,18 +160,22 @@ func (cli *cliExplain) run() error {
} else if logFile == "-" { } else if logFile == "-" {
reader := bufio.NewReader(os.Stdin) reader := bufio.NewReader(os.Stdin)
errCount := 0 errCount := 0
for { for {
input, err := reader.ReadBytes('\n') input, err := reader.ReadBytes('\n')
if err != nil && errors.Is(err, io.EOF) { if err != nil && errors.Is(err, io.EOF) {
break break
} }
if len(input) > 1 { if len(input) > 1 {
_, err = f.Write(input) _, err = f.Write(input)
} }
if err != nil || len(input) <= 1 { if err != nil || len(input) <= 1 {
errCount++ errCount++
} }
} }
if errCount > 0 { if errCount > 0 {
log.Warnf("Failed to write %d lines to %s", errCount, tmpFile) log.Warnf("Failed to write %d lines to %s", errCount, tmpFile)
} }
@ -207,7 +211,7 @@ func (cli *cliExplain) run() error {
} }
if dsn == "" { if dsn == "" {
return fmt.Errorf("no acquisition (--file or --dsn) provided, can't run cscli test") return errors.New("no acquisition (--file or --dsn) provided, can't run cscli test")
} }
cmdArgs := []string{"-c", ConfigFilePath, "-type", logType, "-dsn", dsn, "-dump-data", dir, "-no-api"} cmdArgs := []string{"-c", ConfigFilePath, "-type", logType, "-dsn", dsn, "-dump-data", dir, "-no-api"}

View file

@ -13,7 +13,7 @@ import (
"github.com/crowdsecurity/crowdsec/pkg/cwhub" "github.com/crowdsecurity/crowdsec/pkg/cwhub"
) )
type cliHub struct { type cliHub struct{
cfg configGetter cfg configGetter
} }
@ -137,7 +137,7 @@ func (cli *cliHub) upgrade(force bool) error {
} }
for _, itemType := range cwhub.ItemTypes { for _, itemType := range cwhub.ItemTypes {
items, err := hub.GetInstalledItems(itemType) items, err := hub.GetInstalledItemsByType(itemType)
if err != nil { if err != nil {
return err return err
} }

View file

@ -13,8 +13,9 @@ import (
"github.com/crowdsecurity/crowdsec/pkg/cwhub" "github.com/crowdsecurity/crowdsec/pkg/cwhub"
) )
func NewCLIAppsecConfig() *cliItem { func NewCLIAppsecConfig(cfg configGetter) *cliItem {
return &cliItem{ return &cliItem{
cfg: cfg,
name: cwhub.APPSEC_CONFIGS, name: cwhub.APPSEC_CONFIGS,
singular: "appsec-config", singular: "appsec-config",
oneOrMore: "appsec-config(s)", oneOrMore: "appsec-config(s)",
@ -46,7 +47,7 @@ cscli appsec-configs list crowdsecurity/vpatch`,
} }
} }
func NewCLIAppsecRule() *cliItem { func NewCLIAppsecRule(cfg configGetter) *cliItem {
inspectDetail := func(item *cwhub.Item) error { inspectDetail := func(item *cwhub.Item) error {
// Only show the converted rules in human mode // Only show the converted rules in human mode
if csConfig.Cscli.Output != "human" { if csConfig.Cscli.Output != "human" {
@ -57,11 +58,11 @@ func NewCLIAppsecRule() *cliItem {
yamlContent, err := os.ReadFile(item.State.LocalPath) yamlContent, err := os.ReadFile(item.State.LocalPath)
if err != nil { if err != nil {
return fmt.Errorf("unable to read file %s : %s", item.State.LocalPath, err) return fmt.Errorf("unable to read file %s: %w", item.State.LocalPath, err)
} }
if err := yaml.Unmarshal(yamlContent, &appsecRule); err != nil { if err := yaml.Unmarshal(yamlContent, &appsecRule); err != nil {
return fmt.Errorf("unable to unmarshal yaml file %s : %s", item.State.LocalPath, err) return fmt.Errorf("unable to unmarshal yaml file %s: %w", item.State.LocalPath, err)
} }
for _, ruleType := range appsec_rule.SupportedTypes() { for _, ruleType := range appsec_rule.SupportedTypes() {
@ -70,7 +71,7 @@ func NewCLIAppsecRule() *cliItem {
for _, rule := range appsecRule.Rules { for _, rule := range appsecRule.Rules {
convertedRule, _, err := rule.Convert(ruleType, appsecRule.Name) convertedRule, _, err := rule.Convert(ruleType, appsecRule.Name)
if err != nil { if err != nil {
return fmt.Errorf("unable to convert rule %s : %s", rule.Name, err) return fmt.Errorf("unable to convert rule %s: %w", rule.Name, err)
} }
fmt.Println(convertedRule) fmt.Println(convertedRule)
@ -88,6 +89,7 @@ func NewCLIAppsecRule() *cliItem {
} }
return &cliItem{ return &cliItem{
cfg: cfg,
name: "appsec-rules", name: "appsec-rules",
singular: "appsec-rule", singular: "appsec-rule",
oneOrMore: "appsec-rule(s)", oneOrMore: "appsec-rule(s)",

View file

@ -4,8 +4,9 @@ import (
"github.com/crowdsecurity/crowdsec/pkg/cwhub" "github.com/crowdsecurity/crowdsec/pkg/cwhub"
) )
func NewCLICollection() *cliItem { func NewCLICollection(cfg configGetter) *cliItem {
return &cliItem{ return &cliItem{
cfg: cfg,
name: cwhub.COLLECTIONS, name: cwhub.COLLECTIONS,
singular: "collection", singular: "collection",
oneOrMore: "collection(s)", oneOrMore: "collection(s)",

View file

@ -4,8 +4,9 @@ import (
"github.com/crowdsecurity/crowdsec/pkg/cwhub" "github.com/crowdsecurity/crowdsec/pkg/cwhub"
) )
func NewCLIContext() *cliItem { func NewCLIContext(cfg configGetter) *cliItem {
return &cliItem{ return &cliItem{
cfg: cfg,
name: cwhub.CONTEXTS, name: cwhub.CONTEXTS,
singular: "context", singular: "context",
oneOrMore: "context(s)", oneOrMore: "context(s)",

View file

@ -4,8 +4,9 @@ import (
"github.com/crowdsecurity/crowdsec/pkg/cwhub" "github.com/crowdsecurity/crowdsec/pkg/cwhub"
) )
func NewCLIParser() *cliItem { func NewCLIParser(cfg configGetter) *cliItem {
return &cliItem{ return &cliItem{
cfg: cfg,
name: cwhub.PARSERS, name: cwhub.PARSERS,
singular: "parser", singular: "parser",
oneOrMore: "parser(s)", oneOrMore: "parser(s)",

View file

@ -4,8 +4,9 @@ import (
"github.com/crowdsecurity/crowdsec/pkg/cwhub" "github.com/crowdsecurity/crowdsec/pkg/cwhub"
) )
func NewCLIPostOverflow() *cliItem { func NewCLIPostOverflow(cfg configGetter) *cliItem {
return &cliItem{ return &cliItem{
cfg: cfg,
name: cwhub.POSTOVERFLOWS, name: cwhub.POSTOVERFLOWS,
singular: "postoverflow", singular: "postoverflow",
oneOrMore: "postoverflow(s)", oneOrMore: "postoverflow(s)",

View file

@ -4,8 +4,9 @@ import (
"github.com/crowdsecurity/crowdsec/pkg/cwhub" "github.com/crowdsecurity/crowdsec/pkg/cwhub"
) )
func NewCLIScenario() *cliItem { func NewCLIScenario(cfg configGetter) *cliItem {
return &cliItem{ return &cliItem{
cfg: cfg,
name: cwhub.SCENARIOS, name: cwhub.SCENARIOS,
singular: "scenario", singular: "scenario",
oneOrMore: "scenario(s)", oneOrMore: "scenario(s)",

View file

@ -135,6 +135,7 @@ cscli hubtest create my-scenario-test --parsers crowdsecurity/nginx --scenarios
// create empty nuclei template file // create empty nuclei template file
nucleiFileName := fmt.Sprintf("%s.yaml", testName) nucleiFileName := fmt.Sprintf("%s.yaml", testName)
nucleiFilePath := filepath.Join(testPath, nucleiFileName) nucleiFilePath := filepath.Join(testPath, nucleiFileName)
nucleiFile, err := os.OpenFile(nucleiFilePath, os.O_RDWR|os.O_CREATE, 0755) nucleiFile, err := os.OpenFile(nucleiFilePath, os.O_RDWR|os.O_CREATE, 0755)
if err != nil { if err != nil {
return err return err
@ -405,7 +406,7 @@ func (cli *cliHubTest) NewRunCmd() *cobra.Command {
} }
func (cli *cliHubTest) NewCleanCmd() *cobra.Command { func (cli *cliHubTest) NewCleanCmd() *cobra.Command {
var cmd = &cobra.Command{ cmd := &cobra.Command{
Use: "clean", Use: "clean",
Short: "clean [test_name]", Short: "clean [test_name]",
Args: cobra.MinimumNArgs(1), Args: cobra.MinimumNArgs(1),

View file

@ -37,6 +37,7 @@ func ShowMetrics(hubItem *cwhub.Item) error {
appsecMetricsTable(color.Output, hubItem.Name, metrics) appsecMetricsTable(color.Output, hubItem.Name, metrics)
default: // no metrics for this item type default: // no metrics for this item type
} }
return nil return nil
} }
@ -49,21 +50,27 @@ func GetParserMetric(url string, itemName string) map[string]map[string]int {
if !strings.HasPrefix(fam.Name, "cs_") { if !strings.HasPrefix(fam.Name, "cs_") {
continue continue
} }
log.Tracef("round %d", idx) log.Tracef("round %d", idx)
for _, m := range fam.Metrics { for _, m := range fam.Metrics {
metric, ok := m.(prom2json.Metric) metric, ok := m.(prom2json.Metric)
if !ok { if !ok {
log.Debugf("failed to convert metric to prom2json.Metric") log.Debugf("failed to convert metric to prom2json.Metric")
continue continue
} }
name, ok := metric.Labels["name"] name, ok := metric.Labels["name"]
if !ok { if !ok {
log.Debugf("no name in Metric %v", metric.Labels) log.Debugf("no name in Metric %v", metric.Labels)
} }
if name != itemName { if name != itemName {
continue continue
} }
source, ok := metric.Labels["source"] source, ok := metric.Labels["source"]
if !ok { if !ok {
log.Debugf("no source in Metric %v", metric.Labels) log.Debugf("no source in Metric %v", metric.Labels)
} else { } else {
@ -71,12 +78,15 @@ func GetParserMetric(url string, itemName string) map[string]map[string]int {
source = srctype + ":" + source source = srctype + ":" + source
} }
} }
value := m.(prom2json.Metric).Value value := m.(prom2json.Metric).Value
fval, err := strconv.ParseFloat(value, 32) fval, err := strconv.ParseFloat(value, 32)
if err != nil { if err != nil {
log.Errorf("Unexpected int value %s : %s", value, err) log.Errorf("Unexpected int value %s : %s", value, err)
continue continue
} }
ival := int(fval) ival := int(fval)
switch fam.Name { switch fam.Name {
@ -119,6 +129,7 @@ func GetParserMetric(url string, itemName string) map[string]map[string]int {
} }
} }
} }
return stats return stats
} }
@ -136,26 +147,34 @@ func GetScenarioMetric(url string, itemName string) map[string]int {
if !strings.HasPrefix(fam.Name, "cs_") { if !strings.HasPrefix(fam.Name, "cs_") {
continue continue
} }
log.Tracef("round %d", idx) log.Tracef("round %d", idx)
for _, m := range fam.Metrics { for _, m := range fam.Metrics {
metric, ok := m.(prom2json.Metric) metric, ok := m.(prom2json.Metric)
if !ok { if !ok {
log.Debugf("failed to convert metric to prom2json.Metric") log.Debugf("failed to convert metric to prom2json.Metric")
continue continue
} }
name, ok := metric.Labels["name"] name, ok := metric.Labels["name"]
if !ok { if !ok {
log.Debugf("no name in Metric %v", metric.Labels) log.Debugf("no name in Metric %v", metric.Labels)
} }
if name != itemName { if name != itemName {
continue continue
} }
value := m.(prom2json.Metric).Value value := m.(prom2json.Metric).Value
fval, err := strconv.ParseFloat(value, 32) fval, err := strconv.ParseFloat(value, 32)
if err != nil { if err != nil {
log.Errorf("Unexpected int value %s : %s", value, err) log.Errorf("Unexpected int value %s : %s", value, err)
continue continue
} }
ival := int(fval) ival := int(fval)
switch fam.Name { switch fam.Name {
@ -174,6 +193,7 @@ func GetScenarioMetric(url string, itemName string) map[string]int {
} }
} }
} }
return stats return stats
} }
@ -188,17 +208,22 @@ func GetAppsecRuleMetric(url string, itemName string) map[string]int {
if !strings.HasPrefix(fam.Name, "cs_") { if !strings.HasPrefix(fam.Name, "cs_") {
continue continue
} }
log.Tracef("round %d", idx) log.Tracef("round %d", idx)
for _, m := range fam.Metrics { for _, m := range fam.Metrics {
metric, ok := m.(prom2json.Metric) metric, ok := m.(prom2json.Metric)
if !ok { if !ok {
log.Debugf("failed to convert metric to prom2json.Metric") log.Debugf("failed to convert metric to prom2json.Metric")
continue continue
} }
name, ok := metric.Labels["rule_name"] name, ok := metric.Labels["rule_name"]
if !ok { if !ok {
log.Debugf("no rule_name in Metric %v", metric.Labels) log.Debugf("no rule_name in Metric %v", metric.Labels)
} }
if name != itemName { if name != itemName {
continue continue
} }
@ -209,11 +234,13 @@ func GetAppsecRuleMetric(url string, itemName string) map[string]int {
} }
value := m.(prom2json.Metric).Value value := m.(prom2json.Metric).Value
fval, err := strconv.ParseFloat(value, 32) fval, err := strconv.ParseFloat(value, 32)
if err != nil { if err != nil {
log.Errorf("Unexpected int value %s : %s", value, err) log.Errorf("Unexpected int value %s : %s", value, err)
continue continue
} }
ival := int(fval) ival := int(fval)
switch fam.Name { switch fam.Name {
@ -231,6 +258,7 @@ func GetAppsecRuleMetric(url string, itemName string) map[string]int {
} }
} }
} }
return stats return stats
} }
@ -247,6 +275,7 @@ func GetPrometheusMetric(url string) []*prom2json.Family {
go func() { go func() {
defer trace.CatchPanic("crowdsec/GetPrometheusMetric") defer trace.CatchPanic("crowdsec/GetPrometheusMetric")
err := prom2json.FetchMetricFamilies(url, mfChan, transport) err := prom2json.FetchMetricFamilies(url, mfChan, transport)
if err != nil { if err != nil {
log.Fatalf("failed to fetch prometheus metrics : %v", err) log.Fatalf("failed to fetch prometheus metrics : %v", err)
@ -257,6 +286,7 @@ func GetPrometheusMetric(url string) []*prom2json.Family {
for mf := range mfChan { for mf := range mfChan {
result = append(result, prom2json.NewFamily(mf)) result = append(result, prom2json.NewFamily(mf))
} }
log.Debugf("Finished reading prometheus output, %d entries", len(result)) log.Debugf("Finished reading prometheus output, %d entries", len(result))
return result return result

View file

@ -61,7 +61,7 @@ func compInstalledItems(itemType string, args []string, toComplete string) ([]st
return nil, cobra.ShellCompDirectiveDefault return nil, cobra.ShellCompDirectiveDefault
} }
items, err := hub.GetInstalledItemNames(itemType) items, err := hub.GetInstalledNamesByType(itemType)
if err != nil { if err != nil {
cobra.CompDebugln(fmt.Sprintf("list installed %s err: %s", itemType, err), true) cobra.CompDebugln(fmt.Sprintf("list installed %s err: %s", itemType, err), true)
return nil, cobra.ShellCompDirectiveDefault return nil, cobra.ShellCompDirectiveDefault

View file

@ -1,6 +1,7 @@
package main package main
import ( import (
"errors"
"fmt" "fmt"
"os" "os"
"strings" "strings"
@ -28,6 +29,7 @@ type cliHelp struct {
} }
type cliItem struct { type cliItem struct {
cfg configGetter
name string // plural, as used in the hub index name string // plural, as used in the hub index
singular string singular string
oneOrMore string // parenthetical pluralizaion: "parser(s)" oneOrMore string // parenthetical pluralizaion: "parser(s)"
@ -61,7 +63,9 @@ func (cli cliItem) NewCommand() *cobra.Command {
} }
func (cli cliItem) install(args []string, downloadOnly bool, force bool, ignoreError bool) error { func (cli cliItem) install(args []string, downloadOnly bool, force bool, ignoreError bool) error {
hub, err := require.Hub(csConfig, require.RemoteHub(csConfig), log.StandardLogger()) cfg := cli.cfg()
hub, err := require.Hub(cfg, require.RemoteHub(cfg), log.StandardLogger())
if err != nil { if err != nil {
return err return err
} }
@ -71,7 +75,7 @@ func (cli cliItem) install(args []string, downloadOnly bool, force bool, ignoreE
if item == nil { if item == nil {
msg := suggestNearestMessage(hub, cli.name, name) msg := suggestNearestMessage(hub, cli.name, name)
if !ignoreError { if !ignoreError {
return fmt.Errorf(msg) return errors.New(msg)
} }
log.Errorf(msg) log.Errorf(msg)
@ -107,10 +111,10 @@ func (cli cliItem) newInstallCmd() *cobra.Command {
Example: cli.installHelp.example, Example: cli.installHelp.example,
Args: cobra.MinimumNArgs(1), Args: cobra.MinimumNArgs(1),
DisableAutoGenTag: true, DisableAutoGenTag: true,
ValidArgsFunction: func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { ValidArgsFunction: func(_ *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) {
return compAllItems(cli.name, args, toComplete) return compAllItems(cli.name, args, toComplete)
}, },
RunE: func(cmd *cobra.Command, args []string) error { RunE: func(_ *cobra.Command, args []string) error {
return cli.install(args, downloadOnly, force, ignoreError) return cli.install(args, downloadOnly, force, ignoreError)
}, },
} }
@ -137,15 +141,15 @@ func istalledParentNames(item *cwhub.Item) []string {
} }
func (cli cliItem) remove(args []string, purge bool, force bool, all bool) error { func (cli cliItem) remove(args []string, purge bool, force bool, all bool) error {
hub, err := require.Hub(csConfig, nil, log.StandardLogger()) hub, err := require.Hub(cli.cfg(), nil, log.StandardLogger())
if err != nil { if err != nil {
return err return err
} }
if all { if all {
getter := hub.GetInstalledItems getter := hub.GetInstalledItemsByType
if purge { if purge {
getter = hub.GetAllItems getter = hub.GetItemsByType
} }
items, err := getter(cli.name) items, err := getter(cli.name)
@ -163,6 +167,7 @@ func (cli cliItem) remove(args []string, purge bool, force bool, all bool) error
if didRemove { if didRemove {
log.Infof("Removed %s", item.Name) log.Infof("Removed %s", item.Name)
removed++ removed++
} }
} }
@ -204,6 +209,7 @@ func (cli cliItem) remove(args []string, purge bool, force bool, all bool) error
if didRemove { if didRemove {
log.Infof("Removed %s", item.Name) log.Infof("Removed %s", item.Name)
removed++ removed++
} }
} }
@ -231,10 +237,10 @@ func (cli cliItem) newRemoveCmd() *cobra.Command {
Example: cli.removeHelp.example, Example: cli.removeHelp.example,
Aliases: []string{"delete"}, Aliases: []string{"delete"},
DisableAutoGenTag: true, DisableAutoGenTag: true,
ValidArgsFunction: func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { ValidArgsFunction: func(_ *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) {
return compInstalledItems(cli.name, args, toComplete) return compInstalledItems(cli.name, args, toComplete)
}, },
RunE: func(cmd *cobra.Command, args []string) error { RunE: func(_ *cobra.Command, args []string) error {
return cli.remove(args, purge, force, all) return cli.remove(args, purge, force, all)
}, },
} }
@ -248,13 +254,15 @@ func (cli cliItem) newRemoveCmd() *cobra.Command {
} }
func (cli cliItem) upgrade(args []string, force bool, all bool) error { func (cli cliItem) upgrade(args []string, force bool, all bool) error {
hub, err := require.Hub(csConfig, require.RemoteHub(csConfig), log.StandardLogger()) cfg := cli.cfg()
hub, err := require.Hub(cfg, require.RemoteHub(cfg), log.StandardLogger())
if err != nil { if err != nil {
return err return err
} }
if all { if all {
items, err := hub.GetInstalledItems(cli.name) items, err := hub.GetInstalledItemsByType(cli.name)
if err != nil { if err != nil {
return err return err
} }
@ -300,6 +308,7 @@ func (cli cliItem) upgrade(args []string, force bool, all bool) error {
if didUpdate { if didUpdate {
log.Infof("Updated %s", item.Name) log.Infof("Updated %s", item.Name)
updated++ updated++
} }
} }
@ -323,10 +332,10 @@ func (cli cliItem) newUpgradeCmd() *cobra.Command {
Long: coalesce.String(cli.upgradeHelp.long, fmt.Sprintf("Fetch and upgrade one or more %s from the hub", cli.name)), Long: coalesce.String(cli.upgradeHelp.long, fmt.Sprintf("Fetch and upgrade one or more %s from the hub", cli.name)),
Example: cli.upgradeHelp.example, Example: cli.upgradeHelp.example,
DisableAutoGenTag: true, DisableAutoGenTag: true,
ValidArgsFunction: func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { ValidArgsFunction: func(_ *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) {
return compInstalledItems(cli.name, args, toComplete) return compInstalledItems(cli.name, args, toComplete)
}, },
RunE: func(cmd *cobra.Command, args []string) error { RunE: func(_ *cobra.Command, args []string) error {
return cli.upgrade(args, force, all) return cli.upgrade(args, force, all)
}, },
} }
@ -339,21 +348,23 @@ func (cli cliItem) newUpgradeCmd() *cobra.Command {
} }
func (cli cliItem) inspect(args []string, url string, diff bool, rev bool, noMetrics bool) error { func (cli cliItem) inspect(args []string, url string, diff bool, rev bool, noMetrics bool) error {
cfg := cli.cfg()
if rev && !diff { if rev && !diff {
return fmt.Errorf("--rev can only be used with --diff") return errors.New("--rev can only be used with --diff")
} }
if url != "" { if url != "" {
csConfig.Cscli.PrometheusUrl = url cfg.Cscli.PrometheusUrl = url
} }
remote := (*cwhub.RemoteHubCfg)(nil) remote := (*cwhub.RemoteHubCfg)(nil)
if diff { if diff {
remote = require.RemoteHub(csConfig) remote = require.RemoteHub(cfg)
} }
hub, err := require.Hub(csConfig, remote, log.StandardLogger()) hub, err := require.Hub(cfg, remote, log.StandardLogger())
if err != nil { if err != nil {
return err return err
} }
@ -399,10 +410,10 @@ func (cli cliItem) newInspectCmd() *cobra.Command {
Example: cli.inspectHelp.example, Example: cli.inspectHelp.example,
Args: cobra.MinimumNArgs(1), Args: cobra.MinimumNArgs(1),
DisableAutoGenTag: true, DisableAutoGenTag: true,
ValidArgsFunction: func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { ValidArgsFunction: func(_ *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) {
return compInstalledItems(cli.name, args, toComplete) return compInstalledItems(cli.name, args, toComplete)
}, },
RunE: func(cmd *cobra.Command, args []string) error { RunE: func(_ *cobra.Command, args []string) error {
return cli.inspect(args, url, diff, rev, noMetrics) return cli.inspect(args, url, diff, rev, noMetrics)
}, },
} }
@ -417,7 +428,7 @@ func (cli cliItem) newInspectCmd() *cobra.Command {
} }
func (cli cliItem) list(args []string, all bool) error { func (cli cliItem) list(args []string, all bool) error {
hub, err := require.Hub(csConfig, nil, log.StandardLogger()) hub, err := require.Hub(cli.cfg(), nil, log.StandardLogger())
if err != nil { if err != nil {
return err return err
} }
@ -526,6 +537,7 @@ func (cli cliItem) whyTainted(hub *cwhub.Hub, item *cwhub.Item, reverse bool) st
// hack: avoid message "item is tainted by itself" // hack: avoid message "item is tainted by itself"
continue continue
} }
ret = append(ret, fmt.Sprintf("# %s is tainted by %s", sub.FQName(), taintList)) ret = append(ret, fmt.Sprintf("# %s is tainted by %s", sub.FQName(), taintList))
} }
} }

View file

@ -17,7 +17,7 @@ import (
// selectItems returns a slice of items of a given type, selected by name and sorted by case-insensitive name // selectItems returns a slice of items of a given type, selected by name and sorted by case-insensitive name
func selectItems(hub *cwhub.Hub, itemType string, args []string, installedOnly bool) ([]*cwhub.Item, error) { func selectItems(hub *cwhub.Hub, itemType string, args []string, installedOnly bool) ([]*cwhub.Item, error) {
itemNames := hub.GetItemNames(itemType) itemNames := hub.GetNamesByType(itemType)
notExist := []string{} notExist := []string{}
@ -116,7 +116,7 @@ func listItems(out io.Writer, itemTypes []string, items map[string][]*cwhub.Item
} }
if err := csvwriter.Write(header); err != nil { if err := csvwriter.Write(header); err != nil {
return fmt.Errorf("failed to write header: %s", err) return fmt.Errorf("failed to write header: %w", err)
} }
for _, itemType := range itemTypes { for _, itemType := range itemTypes {
@ -132,7 +132,7 @@ func listItems(out io.Writer, itemTypes []string, items map[string][]*cwhub.Item
} }
if err := csvwriter.Write(row); err != nil { if err := csvwriter.Write(row); err != nil {
return fmt.Errorf("failed to write raw output: %s", err) return fmt.Errorf("failed to write raw output: %w", err)
} }
} }
} }
@ -150,12 +150,12 @@ func inspectItem(item *cwhub.Item, showMetrics bool) error {
enc.SetIndent(2) enc.SetIndent(2)
if err := enc.Encode(item); err != nil { if err := enc.Encode(item); err != nil {
return fmt.Errorf("unable to encode item: %s", err) return fmt.Errorf("unable to encode item: %w", err)
} }
case "json": case "json":
b, err := json.MarshalIndent(*item, "", " ") b, err := json.MarshalIndent(*item, "", " ")
if err != nil { if err != nil {
return fmt.Errorf("unable to marshal item: %s", err) return fmt.Errorf("unable to marshal item: %w", err)
} }
fmt.Print(string(b)) fmt.Print(string(b))

View file

@ -56,7 +56,7 @@ func (cli *cliLapi) status() error {
return err return err
} }
scenarios, err := hub.GetInstalledItemNames(cwhub.SCENARIOS) scenarios, err := hub.GetInstalledNamesByType(cwhub.SCENARIOS)
if err != nil { if err != nil {
return fmt.Errorf("failed to get scenarios: %w", err) return fmt.Errorf("failed to get scenarios: %w", err)
} }
@ -116,7 +116,6 @@ func (cli *cliLapi) register(apiURL string, outputFile string, machine string) e
URL: apiurl, URL: apiurl,
VersionPrefix: LAPIURLPrefix, VersionPrefix: LAPIURLPrefix,
}, nil) }, nil)
if err != nil { if err != nil {
return fmt.Errorf("api client register: %w", err) return fmt.Errorf("api client register: %w", err)
} }
@ -585,7 +584,7 @@ func detectNode(node parser.Node, parserCTX parser.UnixParserCtx) []string {
} }
func detectSubNode(node parser.Node, parserCTX parser.UnixParserCtx) []string { func detectSubNode(node parser.Node, parserCTX parser.UnixParserCtx) []string {
var ret = make([]string, 0) ret := make([]string, 0)
for _, subnode := range node.LeavesNodes { for _, subnode := range node.LeavesNodes {
if subnode.Grok.RunTimeRegexp != nil { if subnode.Grok.RunTimeRegexp != nil {

View file

@ -1,7 +1,9 @@
package main package main
import ( import (
"fmt"
"os" "os"
"path/filepath"
"slices" "slices"
"time" "time"
@ -10,14 +12,18 @@ import (
log "github.com/sirupsen/logrus" log "github.com/sirupsen/logrus"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/crowdsecurity/go-cs-lib/trace"
"github.com/crowdsecurity/crowdsec/pkg/csconfig" "github.com/crowdsecurity/crowdsec/pkg/csconfig"
"github.com/crowdsecurity/crowdsec/pkg/database" "github.com/crowdsecurity/crowdsec/pkg/database"
"github.com/crowdsecurity/crowdsec/pkg/fflag" "github.com/crowdsecurity/crowdsec/pkg/fflag"
) )
var ConfigFilePath string var (
var csConfig *csconfig.Config ConfigFilePath string
var dbClient *database.Client csConfig *csconfig.Config
dbClient *database.Client
)
type configGetter func() *csconfig.Config type configGetter func() *csconfig.Config
@ -82,6 +88,11 @@ func loadConfigFor(command string) (*csconfig.Config, string, error) {
return nil, "", err return nil, "", err
} }
// set up directory for trace files
if err := trace.Init(filepath.Join(config.ConfigPaths.DataDir, "trace")); err != nil {
return nil, "", fmt.Errorf("while setting up trace directory: %w", err)
}
return config, merged, nil return config, merged, nil
} }
@ -249,13 +260,13 @@ It is meant to allow you to manage bans, parsers/scenarios/etc, api and generall
cmd.AddCommand(NewCLINotifications(cli.cfg).NewCommand()) cmd.AddCommand(NewCLINotifications(cli.cfg).NewCommand())
cmd.AddCommand(NewCLISupport().NewCommand()) cmd.AddCommand(NewCLISupport().NewCommand())
cmd.AddCommand(NewCLIPapi(cli.cfg).NewCommand()) cmd.AddCommand(NewCLIPapi(cli.cfg).NewCommand())
cmd.AddCommand(NewCLICollection().NewCommand()) cmd.AddCommand(NewCLICollection(cli.cfg).NewCommand())
cmd.AddCommand(NewCLIParser().NewCommand()) cmd.AddCommand(NewCLIParser(cli.cfg).NewCommand())
cmd.AddCommand(NewCLIScenario().NewCommand()) cmd.AddCommand(NewCLIScenario(cli.cfg).NewCommand())
cmd.AddCommand(NewCLIPostOverflow().NewCommand()) cmd.AddCommand(NewCLIPostOverflow(cli.cfg).NewCommand())
cmd.AddCommand(NewCLIContext().NewCommand()) cmd.AddCommand(NewCLIContext(cli.cfg).NewCommand())
cmd.AddCommand(NewCLIAppsecConfig().NewCommand()) cmd.AddCommand(NewCLIAppsecConfig(cli.cfg).NewCommand())
cmd.AddCommand(NewCLIAppsecRule().NewCommand()) cmd.AddCommand(NewCLIAppsecRule(cli.cfg).NewCommand())
if fflag.CscliSetup.IsEnabled() { if fflag.CscliSetup.IsEnabled() {
cmd.AddCommand(NewSetupCmd()) cmd.AddCommand(NewSetupCmd())

View file

@ -4,6 +4,7 @@ import (
"context" "context"
"encoding/csv" "encoding/csv"
"encoding/json" "encoding/json"
"errors"
"fmt" "fmt"
"io/fs" "io/fs"
"net/url" "net/url"
@ -88,7 +89,7 @@ func (cli *cliNotifications) getPluginConfigs() (map[string]csplugin.PluginConfi
return fmt.Errorf("error while traversing directory %s: %w", path, err) return fmt.Errorf("error while traversing directory %s: %w", path, err)
} }
name := filepath.Join(cfg.ConfigPaths.NotificationDir, info.Name()) //Avoid calling info.Name() twice name := filepath.Join(cfg.ConfigPaths.NotificationDir, info.Name()) // Avoid calling info.Name() twice
if (strings.HasSuffix(name, "yaml") || strings.HasSuffix(name, "yml")) && !(info.IsDir()) { if (strings.HasSuffix(name, "yaml") || strings.HasSuffix(name, "yml")) && !(info.IsDir()) {
ts, err := csplugin.ParsePluginConfigFile(name) ts, err := csplugin.ParsePluginConfigFile(name)
if err != nil { if err != nil {
@ -266,7 +267,7 @@ func (cli *cliNotifications) NewTestCmd() *cobra.Command {
if !ok { if !ok {
return fmt.Errorf("plugin name: '%s' does not exist", args[0]) return fmt.Errorf("plugin name: '%s' does not exist", args[0])
} }
//Create a single profile with plugin name as notification name // Create a single profile with plugin name as notification name
return pluginBroker.Init(cfg.PluginConfig, []*csconfig.ProfileCfg{ return pluginBroker.Init(cfg.PluginConfig, []*csconfig.ProfileCfg{
{ {
Notifications: []string{ Notifications: []string{
@ -320,8 +321,8 @@ func (cli *cliNotifications) NewTestCmd() *cobra.Command {
Alert: alert, Alert: alert,
} }
//time.Sleep(2 * time.Second) // There's no mechanism to ensure notification has been sent // time.Sleep(2 * time.Second) // There's no mechanism to ensure notification has been sent
pluginTomb.Kill(fmt.Errorf("terminating")) pluginTomb.Kill(errors.New("terminating"))
pluginTomb.Wait() pluginTomb.Wait()
return nil return nil
@ -416,8 +417,8 @@ cscli notifications reinject <alert_id> -a '{"remediation": true,"scenario":"not
break break
} }
} }
//time.Sleep(2 * time.Second) // There's no mechanism to ensure notification has been sent // time.Sleep(2 * time.Second) // There's no mechanism to ensure notification has been sent
pluginTomb.Kill(fmt.Errorf("terminating")) pluginTomb.Kill(errors.New("terminating"))
pluginTomb.Wait() pluginTomb.Wait()
return nil return nil

View file

@ -64,25 +64,22 @@ func (cli *cliPapi) NewStatusCmd() *cobra.Command {
cfg := cli.cfg() cfg := cli.cfg()
dbClient, err = database.NewClient(cfg.DbConfig) dbClient, err = database.NewClient(cfg.DbConfig)
if err != nil { if err != nil {
return fmt.Errorf("unable to initialize database client: %s", err) return fmt.Errorf("unable to initialize database client: %w", err)
} }
apic, err := apiserver.NewAPIC(cfg.API.Server.OnlineClient, dbClient, cfg.API.Server.ConsoleConfig, cfg.API.Server.CapiWhitelists) apic, err := apiserver.NewAPIC(cfg.API.Server.OnlineClient, dbClient, cfg.API.Server.ConsoleConfig, cfg.API.Server.CapiWhitelists)
if err != nil { if err != nil {
return fmt.Errorf("unable to initialize API client: %s", err) return fmt.Errorf("unable to initialize API client: %w", err)
} }
papi, err := apiserver.NewPAPI(apic, dbClient, cfg.API.Server.ConsoleConfig, log.GetLevel()) papi, err := apiserver.NewPAPI(apic, dbClient, cfg.API.Server.ConsoleConfig, log.GetLevel())
if err != nil { if err != nil {
return fmt.Errorf("unable to initialize PAPI client: %s", err) return fmt.Errorf("unable to initialize PAPI client: %w", err)
} }
perms, err := papi.GetPermissions() perms, err := papi.GetPermissions()
if err != nil { if err != nil {
return fmt.Errorf("unable to get PAPI permissions: %s", err) return fmt.Errorf("unable to get PAPI permissions: %w", err)
} }
var lastTimestampStr *string var lastTimestampStr *string
lastTimestampStr, err = dbClient.GetConfigItem(apiserver.PapiPullKey) lastTimestampStr, err = dbClient.GetConfigItem(apiserver.PapiPullKey)
@ -118,27 +115,26 @@ func (cli *cliPapi) NewSyncCmd() *cobra.Command {
dbClient, err = database.NewClient(cfg.DbConfig) dbClient, err = database.NewClient(cfg.DbConfig)
if err != nil { if err != nil {
return fmt.Errorf("unable to initialize database client: %s", err) return fmt.Errorf("unable to initialize database client: %w", err)
} }
apic, err := apiserver.NewAPIC(cfg.API.Server.OnlineClient, dbClient, cfg.API.Server.ConsoleConfig, cfg.API.Server.CapiWhitelists) apic, err := apiserver.NewAPIC(cfg.API.Server.OnlineClient, dbClient, cfg.API.Server.ConsoleConfig, cfg.API.Server.CapiWhitelists)
if err != nil { if err != nil {
return fmt.Errorf("unable to initialize API client: %s", err) return fmt.Errorf("unable to initialize API client: %w", err)
} }
t.Go(apic.Push) t.Go(apic.Push)
papi, err := apiserver.NewPAPI(apic, dbClient, cfg.API.Server.ConsoleConfig, log.GetLevel()) papi, err := apiserver.NewPAPI(apic, dbClient, cfg.API.Server.ConsoleConfig, log.GetLevel())
if err != nil { if err != nil {
return fmt.Errorf("unable to initialize PAPI client: %s", err) return fmt.Errorf("unable to initialize PAPI client: %w", err)
} }
t.Go(papi.SyncDecisions) t.Go(papi.SyncDecisions)
err = papi.PullOnce(time.Time{}, true) err = papi.PullOnce(time.Time{}, true)
if err != nil { if err != nil {
return fmt.Errorf("unable to sync decisions: %s", err) return fmt.Errorf("unable to sync decisions: %w", err)
} }
log.Infof("Sending acknowledgements to CAPI") log.Infof("Sending acknowledgements to CAPI")

View file

@ -1,6 +1,7 @@
package require package require
import ( import (
"errors"
"fmt" "fmt"
"io" "io"
@ -16,7 +17,7 @@ func LAPI(c *csconfig.Config) error {
} }
if c.DisableAPI { if c.DisableAPI {
return fmt.Errorf("local API is disabled -- this command must be run on the local API machine") return errors.New("local API is disabled -- this command must be run on the local API machine")
} }
return nil return nil
@ -32,7 +33,7 @@ func CAPI(c *csconfig.Config) error {
func PAPI(c *csconfig.Config) error { func PAPI(c *csconfig.Config) error {
if c.API.Server.OnlineClient.Credentials.PapiURL == "" { if c.API.Server.OnlineClient.Credentials.PapiURL == "" {
return fmt.Errorf("no PAPI URL in configuration") return errors.New("no PAPI URL in configuration")
} }
return nil return nil
@ -40,7 +41,7 @@ func PAPI(c *csconfig.Config) error {
func CAPIRegistered(c *csconfig.Config) error { func CAPIRegistered(c *csconfig.Config) error {
if c.API.Server.OnlineClient.Credentials == nil { if c.API.Server.OnlineClient.Credentials == nil {
return fmt.Errorf("the Central API (CAPI) must be configured with 'cscli capi register'") return errors.New("the Central API (CAPI) must be configured with 'cscli capi register'")
} }
return nil return nil
@ -56,7 +57,7 @@ func DB(c *csconfig.Config) error {
func Notifications(c *csconfig.Config) error { func Notifications(c *csconfig.Config) error {
if c.ConfigPaths.NotificationDir == "" { if c.ConfigPaths.NotificationDir == "" {
return fmt.Errorf("config_paths.notification_dir is not set in crowdsec config") return errors.New("config_paths.notification_dir is not set in crowdsec config")
} }
return nil return nil
@ -82,7 +83,7 @@ func Hub(c *csconfig.Config, remote *cwhub.RemoteHubCfg, logger *logrus.Logger)
local := c.Hub local := c.Hub
if local == nil { if local == nil {
return nil, fmt.Errorf("you must configure cli before interacting with hub") return nil, errors.New("you must configure cli before interacting with hub")
} }
if logger == nil { if logger == nil {

View file

@ -2,6 +2,7 @@ package main
import ( import (
"bytes" "bytes"
"errors"
"fmt" "fmt"
"os" "os"
"os/exec" "os/exec"
@ -118,9 +119,11 @@ func runSetupDetect(cmd *cobra.Command, args []string) error {
switch detectConfigFile { switch detectConfigFile {
case "-": case "-":
log.Tracef("Reading detection rules from stdin") log.Tracef("Reading detection rules from stdin")
detectReader = os.Stdin detectReader = os.Stdin
default: default:
log.Tracef("Reading detection rules: %s", detectConfigFile) log.Tracef("Reading detection rules: %s", detectConfigFile)
detectReader, err = os.Open(detectConfigFile) detectReader, err = os.Open(detectConfigFile)
if err != nil { if err != nil {
return err return err
@ -171,6 +174,7 @@ func runSetupDetect(cmd *cobra.Command, args []string) error {
_, err := exec.LookPath("systemctl") _, err := exec.LookPath("systemctl")
if err != nil { if err != nil {
log.Debug("systemctl not available: snubbing systemd") log.Debug("systemctl not available: snubbing systemd")
snubSystemd = true snubSystemd = true
} }
} }
@ -182,6 +186,7 @@ func runSetupDetect(cmd *cobra.Command, args []string) error {
if forcedOSFamily == "" && forcedOSID != "" { if forcedOSFamily == "" && forcedOSID != "" {
log.Debug("force-os-id is set: force-os-family defaults to 'linux'") log.Debug("force-os-id is set: force-os-family defaults to 'linux'")
forcedOSFamily = "linux" forcedOSFamily = "linux"
} }
@ -219,6 +224,7 @@ func runSetupDetect(cmd *cobra.Command, args []string) error {
if err != nil { if err != nil {
return err return err
} }
fmt.Println(setup) fmt.Println(setup)
return nil return nil
@ -318,6 +324,7 @@ func runSetupInstallHub(cmd *cobra.Command, args []string) error {
func runSetupValidate(cmd *cobra.Command, args []string) error { func runSetupValidate(cmd *cobra.Command, args []string) error {
fromFile := args[0] fromFile := args[0]
input, err := os.ReadFile(fromFile) input, err := os.ReadFile(fromFile)
if err != nil { if err != nil {
return fmt.Errorf("while reading stdin: %w", err) return fmt.Errorf("while reading stdin: %w", err)
@ -325,7 +332,7 @@ func runSetupValidate(cmd *cobra.Command, args []string) error {
if err = setup.Validate(input); err != nil { if err = setup.Validate(input); err != nil {
fmt.Printf("%v\n", err) fmt.Printf("%v\n", err)
return fmt.Errorf("invalid setup file") return errors.New("invalid setup file")
} }
return nil return nil

View file

@ -1,6 +1,7 @@
package main package main
import ( import (
"errors"
"fmt" "fmt"
"os" "os"
"slices" "slices"
@ -36,7 +37,7 @@ cscli simulation disable crowdsecurity/ssh-bf`,
return err return err
} }
if cli.cfg().Cscli.SimulationConfig == nil { if cli.cfg().Cscli.SimulationConfig == nil {
return fmt.Errorf("no simulation configured") return errors.New("no simulation configured")
} }
return nil return nil
@ -99,11 +100,11 @@ func (cli *cliSimulation) NewEnableCmd() *cobra.Command {
log.Printf("simulation mode for '%s' enabled", scenario) log.Printf("simulation mode for '%s' enabled", scenario)
} }
if err := cli.dumpSimulationFile(); err != nil { if err := cli.dumpSimulationFile(); err != nil {
return fmt.Errorf("simulation enable: %s", err) return fmt.Errorf("simulation enable: %w", err)
} }
} else if forceGlobalSimulation { } else if forceGlobalSimulation {
if err := cli.enableGlobalSimulation(); err != nil { if err := cli.enableGlobalSimulation(); err != nil {
return fmt.Errorf("unable to enable global simulation mode: %s", err) return fmt.Errorf("unable to enable global simulation mode: %w", err)
} }
} else { } else {
printHelp(cmd) printHelp(cmd)
@ -146,11 +147,11 @@ func (cli *cliSimulation) NewDisableCmd() *cobra.Command {
log.Printf("simulation mode for '%s' disabled", scenario) log.Printf("simulation mode for '%s' disabled", scenario)
} }
if err := cli.dumpSimulationFile(); err != nil { if err := cli.dumpSimulationFile(); err != nil {
return fmt.Errorf("simulation disable: %s", err) return fmt.Errorf("simulation disable: %w", err)
} }
} else if forceGlobalSimulation { } else if forceGlobalSimulation {
if err := cli.disableGlobalSimulation(); err != nil { if err := cli.disableGlobalSimulation(); err != nil {
return fmt.Errorf("unable to disable global simulation mode: %s", err) return fmt.Errorf("unable to disable global simulation mode: %w", err)
} }
} else { } else {
printHelp(cmd) printHelp(cmd)
@ -202,7 +203,7 @@ func (cli *cliSimulation) enableGlobalSimulation() error {
cfg.Cscli.SimulationConfig.Exclusions = []string{} cfg.Cscli.SimulationConfig.Exclusions = []string{}
if err := cli.dumpSimulationFile(); err != nil { if err := cli.dumpSimulationFile(); err != nil {
return fmt.Errorf("unable to dump simulation file: %s", err) return fmt.Errorf("unable to dump simulation file: %w", err)
} }
log.Printf("global simulation: enabled") log.Printf("global simulation: enabled")
@ -215,12 +216,12 @@ func (cli *cliSimulation) dumpSimulationFile() error {
newConfigSim, err := yaml.Marshal(cfg.Cscli.SimulationConfig) newConfigSim, err := yaml.Marshal(cfg.Cscli.SimulationConfig)
if err != nil { if err != nil {
return fmt.Errorf("unable to marshal simulation configuration: %s", err) return fmt.Errorf("unable to marshal simulation configuration: %w", err)
} }
err = os.WriteFile(cfg.ConfigPaths.SimulationFilePath, newConfigSim, 0o644) err = os.WriteFile(cfg.ConfigPaths.SimulationFilePath, newConfigSim, 0o644)
if err != nil { if err != nil {
return fmt.Errorf("write simulation config in '%s' failed: %s", cfg.ConfigPaths.SimulationFilePath, err) return fmt.Errorf("write simulation config in '%s' failed: %w", cfg.ConfigPaths.SimulationFilePath, err)
} }
log.Debugf("updated simulation file %s", cfg.ConfigPaths.SimulationFilePath) log.Debugf("updated simulation file %s", cfg.ConfigPaths.SimulationFilePath)
@ -237,12 +238,12 @@ func (cli *cliSimulation) disableGlobalSimulation() error {
newConfigSim, err := yaml.Marshal(cfg.Cscli.SimulationConfig) newConfigSim, err := yaml.Marshal(cfg.Cscli.SimulationConfig)
if err != nil { if err != nil {
return fmt.Errorf("unable to marshal new simulation configuration: %s", err) return fmt.Errorf("unable to marshal new simulation configuration: %w", err)
} }
err = os.WriteFile(cfg.ConfigPaths.SimulationFilePath, newConfigSim, 0o644) err = os.WriteFile(cfg.ConfigPaths.SimulationFilePath, newConfigSim, 0o644)
if err != nil { if err != nil {
return fmt.Errorf("unable to write new simulation config in '%s': %s", cfg.ConfigPaths.SimulationFilePath, err) return fmt.Errorf("unable to write new simulation config in '%s': %w", cfg.ConfigPaths.SimulationFilePath, err)
} }
log.Printf("global simulation: disabled") log.Printf("global simulation: disabled")
@ -269,8 +270,10 @@ func (cli *cliSimulation) status() {
} }
} else { } else {
log.Println("global simulation: disabled") log.Println("global simulation: disabled")
if len(cfg.Cscli.SimulationConfig.Exclusions) > 0 { if len(cfg.Cscli.SimulationConfig.Exclusions) > 0 {
log.Println("Scenarios in simulation mode :") log.Println("Scenarios in simulation mode :")
for _, scenario := range cfg.Cscli.SimulationConfig.Exclusions { for _, scenario := range cfg.Cscli.SimulationConfig.Exclusions {
log.Printf(" - %s", scenario) log.Printf(" - %s", scenario)
} }

View file

@ -4,6 +4,7 @@ import (
"archive/zip" "archive/zip"
"bytes" "bytes"
"context" "context"
"errors"
"fmt" "fmt"
"io" "io"
"net/http" "net/http"
@ -12,12 +13,14 @@ import (
"path/filepath" "path/filepath"
"regexp" "regexp"
"strings" "strings"
"time"
"github.com/blackfireio/osinfo" "github.com/blackfireio/osinfo"
"github.com/go-openapi/strfmt" "github.com/go-openapi/strfmt"
log "github.com/sirupsen/logrus" log "github.com/sirupsen/logrus"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/crowdsecurity/go-cs-lib/trace"
"github.com/crowdsecurity/go-cs-lib/version" "github.com/crowdsecurity/go-cs-lib/version"
"github.com/crowdsecurity/crowdsec/cmd/crowdsec-cli/require" "github.com/crowdsecurity/crowdsec/cmd/crowdsec-cli/require"
@ -47,6 +50,7 @@ const (
SUPPORT_CAPI_STATUS_PATH = "capi_status.txt" SUPPORT_CAPI_STATUS_PATH = "capi_status.txt"
SUPPORT_ACQUISITION_CONFIG_BASE_PATH = "config/acquis/" SUPPORT_ACQUISITION_CONFIG_BASE_PATH = "config/acquis/"
SUPPORT_CROWDSEC_PROFILE_PATH = "config/profiles.yaml" SUPPORT_CROWDSEC_PROFILE_PATH = "config/profiles.yaml"
SUPPORT_CRASH_PATH = "crash/"
) )
// from https://github.com/acarl005/stripansi // from https://github.com/acarl005/stripansi
@ -62,7 +66,7 @@ func collectMetrics() ([]byte, []byte, error) {
if csConfig.Cscli.PrometheusUrl == "" { if csConfig.Cscli.PrometheusUrl == "" {
log.Warn("No Prometheus URL configured, metrics will not be collected") log.Warn("No Prometheus URL configured, metrics will not be collected")
return nil, nil, fmt.Errorf("prometheus_uri is not set") return nil, nil, errors.New("prometheus_uri is not set")
} }
humanMetrics := bytes.NewBuffer(nil) humanMetrics := bytes.NewBuffer(nil)
@ -70,7 +74,7 @@ func collectMetrics() ([]byte, []byte, error) {
ms := NewMetricStore() ms := NewMetricStore()
if err := ms.Fetch(csConfig.Cscli.PrometheusUrl); err != nil { if err := ms.Fetch(csConfig.Cscli.PrometheusUrl); err != nil {
return nil, nil, fmt.Errorf("could not fetch prometheus metrics: %s", err) return nil, nil, fmt.Errorf("could not fetch prometheus metrics: %w", err)
} }
if err := ms.Format(humanMetrics, nil, "human", false); err != nil { if err := ms.Format(humanMetrics, nil, "human", false); err != nil {
@ -79,21 +83,21 @@ func collectMetrics() ([]byte, []byte, error) {
req, err := http.NewRequest(http.MethodGet, csConfig.Cscli.PrometheusUrl, nil) req, err := http.NewRequest(http.MethodGet, csConfig.Cscli.PrometheusUrl, nil)
if err != nil { if err != nil {
return nil, nil, fmt.Errorf("could not create requests to prometheus endpoint: %s", err) return nil, nil, fmt.Errorf("could not create requests to prometheus endpoint: %w", err)
} }
client := &http.Client{} client := &http.Client{}
resp, err := client.Do(req) resp, err := client.Do(req)
if err != nil { if err != nil {
return nil, nil, fmt.Errorf("could not get metrics from prometheus endpoint: %s", err) return nil, nil, fmt.Errorf("could not get metrics from prometheus endpoint: %w", err)
} }
defer resp.Body.Close() defer resp.Body.Close()
body, err := io.ReadAll(resp.Body) body, err := io.ReadAll(resp.Body)
if err != nil { if err != nil {
return nil, nil, fmt.Errorf("could not read metrics from prometheus endpoint: %s", err) return nil, nil, fmt.Errorf("could not read metrics from prometheus endpoint: %w", err)
} }
return humanMetrics.Bytes(), body, nil return humanMetrics.Bytes(), body, nil
@ -121,19 +125,18 @@ func collectOSInfo() ([]byte, error) {
log.Info("Collecting OS info") log.Info("Collecting OS info")
info, err := osinfo.GetOSInfo() info, err := osinfo.GetOSInfo()
if err != nil { if err != nil {
return nil, err return nil, err
} }
w := bytes.NewBuffer(nil) w := bytes.NewBuffer(nil)
w.WriteString(fmt.Sprintf("Architecture: %s\n", info.Architecture)) fmt.Fprintf(w, "Architecture: %s\n", info.Architecture)
w.WriteString(fmt.Sprintf("Family: %s\n", info.Family)) fmt.Fprintf(w, "Family: %s\n", info.Family)
w.WriteString(fmt.Sprintf("ID: %s\n", info.ID)) fmt.Fprintf(w, "ID: %s\n", info.ID)
w.WriteString(fmt.Sprintf("Name: %s\n", info.Name)) fmt.Fprintf(w, "Name: %s\n", info.Name)
w.WriteString(fmt.Sprintf("Codename: %s\n", info.Codename)) fmt.Fprintf(w, "Codename: %s\n", info.Codename)
w.WriteString(fmt.Sprintf("Version: %s\n", info.Version)) fmt.Fprintf(w, "Version: %s\n", info.Version)
w.WriteString(fmt.Sprintf("Build: %s\n", info.Build)) fmt.Fprintf(w, "Build: %s\n", info.Build)
return w.Bytes(), nil return w.Bytes(), nil
} }
@ -163,7 +166,7 @@ func collectBouncers(dbClient *database.Client) ([]byte, error) {
bouncers, err := dbClient.ListBouncers() bouncers, err := dbClient.ListBouncers()
if err != nil { if err != nil {
return nil, fmt.Errorf("unable to list bouncers: %s", err) return nil, fmt.Errorf("unable to list bouncers: %w", err)
} }
getBouncersTable(out, bouncers) getBouncersTable(out, bouncers)
@ -176,7 +179,7 @@ func collectAgents(dbClient *database.Client) ([]byte, error) {
machines, err := dbClient.ListMachines() machines, err := dbClient.ListMachines()
if err != nil { if err != nil {
return nil, fmt.Errorf("unable to list machines: %s", err) return nil, fmt.Errorf("unable to list machines: %w", err)
} }
getAgentsTable(out, machines) getAgentsTable(out, machines)
@ -196,7 +199,7 @@ func collectAPIStatus(login string, password string, endpoint string, prefix str
return []byte(fmt.Sprintf("cannot parse API URL: %s", err)) return []byte(fmt.Sprintf("cannot parse API URL: %s", err))
} }
scenarios, err := hub.GetInstalledItemNames(cwhub.SCENARIOS) scenarios, err := hub.GetInstalledNamesByType(cwhub.SCENARIOS)
if err != nil { if err != nil {
return []byte(fmt.Sprintf("could not collect scenarios: %s", err)) return []byte(fmt.Sprintf("could not collect scenarios: %s", err))
} }
@ -264,6 +267,11 @@ func collectAcquisitionConfig() map[string][]byte {
return ret return ret
} }
func collectCrash() ([]string, error) {
log.Info("Collecting crash dumps")
return trace.List()
}
type cliSupport struct{} type cliSupport struct{}
func NewCLISupport() *cliSupport { func NewCLISupport() *cliSupport {
@ -431,11 +439,31 @@ cscli support dump -f /tmp/crowdsec-support.zip
} }
} }
crash, err := collectCrash()
if err != nil {
log.Errorf("could not collect crash dumps: %s", err)
}
for _, filename := range crash {
content, err := os.ReadFile(filename)
if err != nil {
log.Errorf("could not read crash dump %s: %s", filename, err)
}
infos[SUPPORT_CRASH_PATH+filepath.Base(filename)] = content
}
w := bytes.NewBuffer(nil) w := bytes.NewBuffer(nil)
zipWriter := zip.NewWriter(w) zipWriter := zip.NewWriter(w)
for filename, data := range infos { for filename, data := range infos {
fw, err := zipWriter.Create(filename) header := &zip.FileHeader{
Name: filename,
Method: zip.Deflate,
// TODO: retain mtime where possible (esp. trace)
Modified: time.Now(),
}
fw, err := zipWriter.CreateHeader(header)
if err != nil { if err != nil {
log.Errorf("Could not add zip entry for %s: %s", filename, err) log.Errorf("Could not add zip entry for %s: %s", filename, err)
continue continue

View file

@ -17,12 +17,12 @@ import (
) )
func AuthenticatedLAPIClient(credentials csconfig.ApiCredentialsCfg, hub *cwhub.Hub) (*apiclient.ApiClient, error) { func AuthenticatedLAPIClient(credentials csconfig.ApiCredentialsCfg, hub *cwhub.Hub) (*apiclient.ApiClient, error) {
scenarios, err := hub.GetInstalledItemNames(cwhub.SCENARIOS) scenarios, err := hub.GetInstalledNamesByType(cwhub.SCENARIOS)
if err != nil { if err != nil {
return nil, fmt.Errorf("loading list of installed hub scenarios: %w", err) return nil, fmt.Errorf("loading list of installed hub scenarios: %w", err)
} }
appsecRules, err := hub.GetInstalledItemNames(cwhub.APPSEC_RULES) appsecRules, err := hub.GetInstalledNamesByType(cwhub.APPSEC_RULES)
if err != nil { if err != nil {
return nil, fmt.Errorf("loading list of installed hub appsec rules: %w", err) return nil, fmt.Errorf("loading list of installed hub appsec rules: %w", err)
} }
@ -52,11 +52,11 @@ func AuthenticatedLAPIClient(credentials csconfig.ApiCredentialsCfg, hub *cwhub.
PapiURL: papiURL, PapiURL: papiURL,
VersionPrefix: "v1", VersionPrefix: "v1",
UpdateScenario: func() ([]string, error) { UpdateScenario: func() ([]string, error) {
scenarios, err := hub.GetInstalledItemNames(cwhub.SCENARIOS) scenarios, err := hub.GetInstalledNamesByType(cwhub.SCENARIOS)
if err != nil { if err != nil {
return nil, err return nil, err
} }
appsecRules, err := hub.GetInstalledItemNames(cwhub.APPSEC_RULES) appsecRules, err := hub.GetInstalledNamesByType(cwhub.APPSEC_RULES)
if err != nil { if err != nil {
return nil, err return nil, err
} }

View file

@ -6,6 +6,7 @@ import (
"fmt" "fmt"
_ "net/http/pprof" _ "net/http/pprof"
"os" "os"
"path/filepath"
"runtime" "runtime"
"runtime/pprof" "runtime/pprof"
"strings" "strings"
@ -14,6 +15,8 @@ import (
log "github.com/sirupsen/logrus" log "github.com/sirupsen/logrus"
"gopkg.in/tomb.v2" "gopkg.in/tomb.v2"
"github.com/crowdsecurity/go-cs-lib/trace"
"github.com/crowdsecurity/crowdsec/pkg/acquisition" "github.com/crowdsecurity/crowdsec/pkg/acquisition"
"github.com/crowdsecurity/crowdsec/pkg/csconfig" "github.com/crowdsecurity/crowdsec/pkg/csconfig"
"github.com/crowdsecurity/crowdsec/pkg/csplugin" "github.com/crowdsecurity/crowdsec/pkg/csplugin"
@ -96,8 +99,8 @@ func LoadBuckets(cConfig *csconfig.Config, hub *cwhub.Hub) error {
buckets = leakybucket.NewBuckets() buckets = leakybucket.NewBuckets()
log.Infof("Loading %d scenario files", len(files)) log.Infof("Loading %d scenario files", len(files))
holders, outputEventChan, err = leakybucket.LoadBuckets(cConfig.Crowdsec, hub, files, &bucketsTomb, buckets, flags.OrderEvent)
holders, outputEventChan, err = leakybucket.LoadBuckets(cConfig.Crowdsec, hub, files, &bucketsTomb, buckets, flags.OrderEvent)
if err != nil { if err != nil {
return fmt.Errorf("scenario loading failed: %w", err) return fmt.Errorf("scenario loading failed: %w", err)
} }
@ -230,6 +233,10 @@ func LoadConfig(configFile string, disableAgent bool, disableAPI bool, quiet boo
return nil, fmt.Errorf("while loading configuration file: %w", err) return nil, fmt.Errorf("while loading configuration file: %w", err)
} }
if err := trace.Init(filepath.Join(cConfig.ConfigPaths.DataDir, "trace")); err != nil {
return nil, fmt.Errorf("while setting up trace directory: %w", err)
}
cConfig.Common.LogLevel = newLogLevel(cConfig.Common.LogLevel, flags) cConfig.Common.LogLevel = newLogLevel(cConfig.Common.LogLevel, flags)
if dumpFolder != "" { if dumpFolder != "" {

View file

@ -3,7 +3,6 @@ package main
import ( import (
"fmt" "fmt"
"net/http" "net/http"
"time"
"github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promhttp" "github.com/prometheus/client_golang/prometheus/promhttp"
@ -22,7 +21,8 @@ import (
"github.com/crowdsecurity/crowdsec/pkg/parser" "github.com/crowdsecurity/crowdsec/pkg/parser"
) )
/*prometheus*/ // Prometheus
var globalParserHits = prometheus.NewCounterVec( var globalParserHits = prometheus.NewCounterVec(
prometheus.CounterOpts{ prometheus.CounterOpts{
Name: "cs_parser_hits_total", Name: "cs_parser_hits_total",
@ -30,6 +30,7 @@ var globalParserHits = prometheus.NewCounterVec(
}, },
[]string{"source", "type"}, []string{"source", "type"},
) )
var globalParserHitsOk = prometheus.NewCounterVec( var globalParserHitsOk = prometheus.NewCounterVec(
prometheus.CounterOpts{ prometheus.CounterOpts{
Name: "cs_parser_hits_ok_total", Name: "cs_parser_hits_ok_total",
@ -37,6 +38,7 @@ var globalParserHitsOk = prometheus.NewCounterVec(
}, },
[]string{"source", "type"}, []string{"source", "type"},
) )
var globalParserHitsKo = prometheus.NewCounterVec( var globalParserHitsKo = prometheus.NewCounterVec(
prometheus.CounterOpts{ prometheus.CounterOpts{
Name: "cs_parser_hits_ko_total", Name: "cs_parser_hits_ko_total",
@ -116,9 +118,7 @@ func computeDynamicMetrics(next http.Handler, dbClient *database.Client) http.Ha
return return
} }
decisionsFilters := make(map[string][]string, 0) decisions, err := dbClient.QueryDecisionCountByScenario()
decisions, err := dbClient.QueryDecisionCountByScenario(decisionsFilters)
if err != nil { if err != nil {
log.Errorf("Error querying decisions for metrics: %v", err) log.Errorf("Error querying decisions for metrics: %v", err)
next.ServeHTTP(w, r) next.ServeHTTP(w, r)
@ -139,7 +139,6 @@ func computeDynamicMetrics(next http.Handler, dbClient *database.Client) http.Ha
} }
alerts, err := dbClient.AlertsCountPerScenario(alertsFilter) alerts, err := dbClient.AlertsCountPerScenario(alertsFilter)
if err != nil { if err != nil {
log.Errorf("Error querying alerts for metrics: %v", err) log.Errorf("Error querying alerts for metrics: %v", err)
next.ServeHTTP(w, r) next.ServeHTTP(w, r)
@ -194,7 +193,6 @@ func servePrometheus(config *csconfig.PrometheusCfg, dbClient *database.Client,
defer trace.CatchPanic("crowdsec/servePrometheus") defer trace.CatchPanic("crowdsec/servePrometheus")
http.Handle("/metrics", computeDynamicMetrics(promhttp.Handler(), dbClient)) http.Handle("/metrics", computeDynamicMetrics(promhttp.Handler(), dbClient))
log.Debugf("serving metrics after %s ms", time.Since(crowdsecT0))
if err := http.ListenAndServe(fmt.Sprintf("%s:%d", config.ListenAddr, config.ListenPort), nil); err != nil { if err := http.ListenAndServe(fmt.Sprintf("%s:%d", config.ListenAddr, config.ListenPort), nil); err != nil {
// in time machine, we most likely have the LAPI using the port // in time machine, we most likely have the LAPI using the port

View file

@ -391,7 +391,7 @@ func Serve(cConfig *csconfig.Config, agentReady chan bool) error {
} }
if cConfig.Common != nil && cConfig.Common.Daemonize { if cConfig.Common != nil && cConfig.Common.Daemonize {
csdaemon.NotifySystemd(log.StandardLogger()) csdaemon.Notify(csdaemon.Ready, log.StandardLogger())
// wait for signals // wait for signals
return HandleSignals(cConfig) return HandleSignals(cConfig)
} }

2
go.mod
View file

@ -27,7 +27,7 @@ require (
github.com/corazawaf/libinjection-go v0.1.2 github.com/corazawaf/libinjection-go v0.1.2
github.com/crowdsecurity/coraza/v3 v3.0.0-20240108124027-a62b8d8e5607 github.com/crowdsecurity/coraza/v3 v3.0.0-20240108124027-a62b8d8e5607
github.com/crowdsecurity/dlog v0.0.0-20170105205344-4fb5f8204f26 github.com/crowdsecurity/dlog v0.0.0-20170105205344-4fb5f8204f26
github.com/crowdsecurity/go-cs-lib v0.0.6 github.com/crowdsecurity/go-cs-lib v0.0.10
github.com/crowdsecurity/grokky v0.2.1 github.com/crowdsecurity/grokky v0.2.1
github.com/crowdsecurity/machineid v1.0.2 github.com/crowdsecurity/machineid v1.0.2
github.com/davecgh/go-spew v1.1.1 github.com/davecgh/go-spew v1.1.1

4
go.sum
View file

@ -102,8 +102,8 @@ github.com/crowdsecurity/coraza/v3 v3.0.0-20240108124027-a62b8d8e5607 h1:hyrYw3h
github.com/crowdsecurity/coraza/v3 v3.0.0-20240108124027-a62b8d8e5607/go.mod h1:br36fEqurGYZQGit+iDYsIzW0FF6VufMbDzyyLxEuPA= github.com/crowdsecurity/coraza/v3 v3.0.0-20240108124027-a62b8d8e5607/go.mod h1:br36fEqurGYZQGit+iDYsIzW0FF6VufMbDzyyLxEuPA=
github.com/crowdsecurity/dlog v0.0.0-20170105205344-4fb5f8204f26 h1:r97WNVC30Uen+7WnLs4xDScS/Ex988+id2k6mDf8psU= github.com/crowdsecurity/dlog v0.0.0-20170105205344-4fb5f8204f26 h1:r97WNVC30Uen+7WnLs4xDScS/Ex988+id2k6mDf8psU=
github.com/crowdsecurity/dlog v0.0.0-20170105205344-4fb5f8204f26/go.mod h1:zpv7r+7KXwgVUZnUNjyP22zc/D7LKjyoY02weH2RBbk= github.com/crowdsecurity/dlog v0.0.0-20170105205344-4fb5f8204f26/go.mod h1:zpv7r+7KXwgVUZnUNjyP22zc/D7LKjyoY02weH2RBbk=
github.com/crowdsecurity/go-cs-lib v0.0.6 h1:Ef6MylXe0GaJE9vrfvxEdbHb31+JUP1os+murPz7Pos= github.com/crowdsecurity/go-cs-lib v0.0.10 h1:Twt/y/rYCUspGY1zxDnGurL2svRSREAz+2+puLepd9c=
github.com/crowdsecurity/go-cs-lib v0.0.6/go.mod h1:8FMKNGsh3hMZi2SEv6P15PURhEJnZV431XjzzBSuf0k= github.com/crowdsecurity/go-cs-lib v0.0.10/go.mod h1:8FMKNGsh3hMZi2SEv6P15PURhEJnZV431XjzzBSuf0k=
github.com/crowdsecurity/grokky v0.2.1 h1:t4VYnDlAd0RjDM2SlILalbwfCrQxtJSMGdQOR0zwkE4= github.com/crowdsecurity/grokky v0.2.1 h1:t4VYnDlAd0RjDM2SlILalbwfCrQxtJSMGdQOR0zwkE4=
github.com/crowdsecurity/grokky v0.2.1/go.mod h1:33usDIYzGDsgX1kHAThCbseso6JuWNJXOzRQDGXHtWM= github.com/crowdsecurity/grokky v0.2.1/go.mod h1:33usDIYzGDsgX1kHAThCbseso6JuWNJXOzRQDGXHtWM=
github.com/crowdsecurity/machineid v1.0.2 h1:wpkpsUghJF8Khtmn/tg6GxgdhLA1Xflerh5lirI+bdc= github.com/crowdsecurity/machineid v1.0.2 h1:wpkpsUghJF8Khtmn/tg6GxgdhLA1Xflerh5lirI+bdc=

View file

@ -104,7 +104,7 @@ func LoadConsoleContext(c *csconfig.Config, hub *cwhub.Hub) error {
c.Crowdsec.ContextToSend = make(map[string][]string, 0) c.Crowdsec.ContextToSend = make(map[string][]string, 0)
if hub != nil { if hub != nil {
items, err := hub.GetInstalledItems(cwhub.CONTEXTS) items, err := hub.GetInstalledItemsByType(cwhub.CONTEXTS)
if err != nil { if err != nil {
return err return err
} }

View file

@ -84,11 +84,16 @@ func recoverFromPanic(c *gin.Context) {
} }
if brokenPipe { if brokenPipe {
log.Warningf("client %s disconnected : %s", c.ClientIP(), err) log.Warningf("client %s disconnected: %s", c.ClientIP(), err)
c.Abort() c.Abort()
} else { } else {
filename := trace.WriteStackTrace(err) log.Warningf("client %s error: %s", c.ClientIP(), err)
log.Warningf("client %s error : %s", c.ClientIP(), err)
filename, err := trace.WriteStackTrace(err)
if err != nil {
log.Errorf("also while writing stacktrace: %s", err)
}
log.Warningf("stacktrace written to %s, please join to your issue", filename) log.Warningf("stacktrace written to %s, please join to your issue", filename)
c.AbortWithStatus(http.StatusInternalServerError) c.AbortWithStatus(http.StatusInternalServerError)
} }

View file

@ -76,26 +76,24 @@ func (c *Config) LoadDBConfig(inCli bool) error {
if c.DbConfig.UseWal == nil { if c.DbConfig.UseWal == nil {
dbDir := filepath.Dir(c.DbConfig.DbPath) dbDir := filepath.Dir(c.DbConfig.DbPath)
isNetwork, fsType, err := types.IsNetworkFS(dbDir) isNetwork, fsType, err := types.IsNetworkFS(dbDir)
if err != nil { switch {
case err != nil:
log.Warnf("unable to determine if database is on network filesystem: %s", err) log.Warnf("unable to determine if database is on network filesystem: %s", err)
log.Warning("You are using sqlite without WAL, this can have a performance impact. If you do not store the database in a network share, set db_config.use_wal to true. Set explicitly to false to disable this warning.") log.Warning("You are using sqlite without WAL, this can have a performance impact. If you do not store the database in a network share, set db_config.use_wal to true. Set explicitly to false to disable this warning.")
return nil case isNetwork:
}
if isNetwork {
log.Debugf("database is on network filesystem (%s), setting useWal to false", fsType) log.Debugf("database is on network filesystem (%s), setting useWal to false", fsType)
c.DbConfig.UseWal = ptr.Of(false) c.DbConfig.UseWal = ptr.Of(false)
} else { default:
log.Debugf("database is on local filesystem (%s), setting useWal to true", fsType) log.Debugf("database is on local filesystem (%s), setting useWal to true", fsType)
c.DbConfig.UseWal = ptr.Of(true) c.DbConfig.UseWal = ptr.Of(true)
} }
} else if *c.DbConfig.UseWal { } else if *c.DbConfig.UseWal {
dbDir := filepath.Dir(c.DbConfig.DbPath) dbDir := filepath.Dir(c.DbConfig.DbPath)
isNetwork, fsType, err := types.IsNetworkFS(dbDir) isNetwork, fsType, err := types.IsNetworkFS(dbDir)
if err != nil { switch {
case err != nil:
log.Warnf("unable to determine if database is on network filesystem: %s", err) log.Warnf("unable to determine if database is on network filesystem: %s", err)
return nil case isNetwork:
}
if isNetwork {
log.Warnf("database seems to be stored on a network share (%s), but useWal is set to true. Proceed at your own risk.", fsType) log.Warnf("database seems to be stored on a network share (%s), but useWal is set to true. Proceed at your own risk.", fsType)
} }
} }

View file

@ -214,9 +214,9 @@ func (h *Hub) GetItemFQ(itemFQName string) (*Item, error) {
return i, nil return i, nil
} }
// GetItemNames returns a slice of (full) item names for a given type // GetNamesByType returns a slice of (full) item names for a given type
// (eg. for collections: crowdsecurity/apache2 crowdsecurity/nginx). // (eg. for collections: crowdsecurity/apache2 crowdsecurity/nginx).
func (h *Hub) GetItemNames(itemType string) []string { func (h *Hub) GetNamesByType(itemType string) []string {
m := h.GetItemMap(itemType) m := h.GetItemMap(itemType)
if m == nil { if m == nil {
return nil return nil
@ -230,8 +230,8 @@ func (h *Hub) GetItemNames(itemType string) []string {
return names return names
} }
// GetAllItems returns a slice of all the items of a given type, installed or not. // GetItemsByType returns a slice of all the items of a given type, installed or not.
func (h *Hub) GetAllItems(itemType string) ([]*Item, error) { func (h *Hub) GetItemsByType(itemType string) ([]*Item, error) {
if !slices.Contains(ItemTypes, itemType) { if !slices.Contains(ItemTypes, itemType) {
return nil, fmt.Errorf("invalid item type %s", itemType) return nil, fmt.Errorf("invalid item type %s", itemType)
} }
@ -250,8 +250,8 @@ func (h *Hub) GetAllItems(itemType string) ([]*Item, error) {
return ret, nil return ret, nil
} }
// GetInstalledItems returns a slice of the installed items of a given type. // GetInstalledItemsByType returns a slice of the installed items of a given type.
func (h *Hub) GetInstalledItems(itemType string) ([]*Item, error) { func (h *Hub) GetInstalledItemsByType(itemType string) ([]*Item, error) {
if !slices.Contains(ItemTypes, itemType) { if !slices.Contains(ItemTypes, itemType) {
return nil, fmt.Errorf("invalid item type %s", itemType) return nil, fmt.Errorf("invalid item type %s", itemType)
} }
@ -269,9 +269,9 @@ func (h *Hub) GetInstalledItems(itemType string) ([]*Item, error) {
return retItems, nil return retItems, nil
} }
// GetInstalledItemNames returns the names of the installed items of a given type. // GetInstalledNamesByType returns the names of the installed items of a given type.
func (h *Hub) GetInstalledItemNames(itemType string) ([]string, error) { func (h *Hub) GetInstalledNamesByType(itemType string) ([]string, error) {
items, err := h.GetInstalledItems(itemType) items, err := h.GetInstalledItemsByType(itemType)
if err != nil { if err != nil {
return nil, err return nil, err
} }

View file

@ -636,14 +636,24 @@ func (c *Client) createAlertChunk(machineID string, owner *ent.Machine, alerts [
if len(alertItem.Meta) > 0 { if len(alertItem.Meta) > 0 {
metaBulk := make([]*ent.MetaCreate, len(alertItem.Meta)) metaBulk := make([]*ent.MetaCreate, len(alertItem.Meta))
for i, metaItem := range alertItem.Meta { for i, metaItem := range alertItem.Meta {
key := metaItem.Key
value := metaItem.Value
if len(metaItem.Value) > 4095 {
c.Log.Warningf("truncated meta %s : value too long", metaItem.Key)
value = value[:4095]
}
if len(metaItem.Key) > 255 {
c.Log.Warningf("truncated meta %s : key too long", metaItem.Key)
key = key[:255]
}
metaBulk[i] = c.Ent.Meta.Create(). metaBulk[i] = c.Ent.Meta.Create().
SetKey(metaItem.Key). SetKey(key).
SetValue(metaItem.Value) SetValue(value)
} }
metas, err = c.Ent.Meta.CreateBulk(metaBulk...).Save(c.CTX) metas, err = c.Ent.Meta.CreateBulk(metaBulk...).Save(c.CTX)
if err != nil { if err != nil {
return nil, errors.Wrapf(BulkError, "creating alert meta: %s", err) c.Log.Warningf("error creating alert meta: %s", err)
} }
} }

View file

@ -37,6 +37,7 @@ func BuildDecisionRequestWithFilter(query *ent.DecisionQuery, filter map[string]
if v[0] == "false" { if v[0] == "false" {
query = query.Where(decision.SimulatedEQ(false)) query = query.Where(decision.SimulatedEQ(false))
} }
delete(filter, "simulated") delete(filter, "simulated")
} else { } else {
query = query.Where(decision.SimulatedEQ(false)) query = query.Where(decision.SimulatedEQ(false))
@ -49,7 +50,7 @@ func BuildDecisionRequestWithFilter(query *ent.DecisionQuery, filter map[string]
if err != nil { if err != nil {
return nil, errors.Wrapf(InvalidFilter, "invalid contains value : %s", err) return nil, errors.Wrapf(InvalidFilter, "invalid contains value : %s", err)
} }
case "scopes", "scope": //Swagger mentions both of them, let's just support both to make sure we don't break anything case "scopes", "scope": // Swagger mentions both of them, let's just support both to make sure we don't break anything
scopes := strings.Split(value[0], ",") scopes := strings.Split(value[0], ",")
for i, scope := range scopes { for i, scope := range scopes {
switch strings.ToLower(scope) { switch strings.ToLower(scope) {
@ -63,6 +64,7 @@ func BuildDecisionRequestWithFilter(query *ent.DecisionQuery, filter map[string]
scopes[i] = types.AS scopes[i] = types.AS
} }
} }
query = query.Where(decision.ScopeIn(scopes...)) query = query.Where(decision.ScopeIn(scopes...))
case "value": case "value":
query = query.Where(decision.ValueEQ(value[0])) query = query.Where(decision.ValueEQ(value[0]))
@ -164,11 +166,11 @@ func (c *Client) QueryExpiredDecisionsWithFilters(filters map[string][]string) (
return data, nil return data, nil
} }
func (c *Client) QueryDecisionCountByScenario(filters map[string][]string) ([]*DecisionsByScenario, error) { func (c *Client) QueryDecisionCountByScenario() ([]*DecisionsByScenario, error) {
query := c.Ent.Decision.Query().Where( query := c.Ent.Decision.Query().Where(
decision.UntilGT(time.Now().UTC()), decision.UntilGT(time.Now().UTC()),
) )
query, err := BuildDecisionRequestWithFilter(query, filters) query, err := BuildDecisionRequestWithFilter(query, make(map[string][]string))
if err != nil { if err != nil {
c.Log.Warningf("QueryDecisionCountByScenario : %s", err) c.Log.Warningf("QueryDecisionCountByScenario : %s", err)
@ -277,10 +279,12 @@ func (c *Client) QueryNewDecisionsSinceWithFilters(since time.Time, filters map[
decision.CreatedAtGT(since), decision.CreatedAtGT(since),
decision.UntilGT(time.Now().UTC()), decision.UntilGT(time.Now().UTC()),
) )
//Allow a bouncer to ask for non-deduplicated results
// Allow a bouncer to ask for non-deduplicated results
if v, ok := filters["dedup"]; !ok || v[0] != "false" { if v, ok := filters["dedup"]; !ok || v[0] != "false" {
query = query.Where(longestDecisionForScopeTypeValue) query = query.Where(longestDecisionForScopeTypeValue)
} }
query, err := BuildDecisionRequestWithFilter(query, filters) query, err := BuildDecisionRequestWithFilter(query, filters)
if err != nil { if err != nil {
c.Log.Warningf("QueryNewDecisionsSinceWithFilters : %s", err) c.Log.Warningf("QueryNewDecisionsSinceWithFilters : %s", err)
@ -294,17 +298,20 @@ func (c *Client) QueryNewDecisionsSinceWithFilters(since time.Time, filters map[
c.Log.Warningf("QueryNewDecisionsSinceWithFilters : %s", err) c.Log.Warningf("QueryNewDecisionsSinceWithFilters : %s", err)
return []*ent.Decision{}, errors.Wrapf(QueryFail, "new decisions since '%s'", since.String()) return []*ent.Decision{}, errors.Wrapf(QueryFail, "new decisions since '%s'", since.String())
} }
return data, nil return data, nil
} }
func (c *Client) DeleteDecisionById(decisionId int) ([]*ent.Decision, error) { func (c *Client) DeleteDecisionById(decisionID int) ([]*ent.Decision, error) {
toDelete, err := c.Ent.Decision.Query().Where(decision.IDEQ(decisionId)).All(c.CTX) toDelete, err := c.Ent.Decision.Query().Where(decision.IDEQ(decisionID)).All(c.CTX)
if err != nil { if err != nil {
c.Log.Warningf("DeleteDecisionById : %s", err) c.Log.Warningf("DeleteDecisionById : %s", err)
return nil, errors.Wrapf(DeleteFail, "decision with id '%d' doesn't exist", decisionId) return nil, errors.Wrapf(DeleteFail, "decision with id '%d' doesn't exist", decisionID)
} }
count, err := c.BulkDeleteDecisions(toDelete, false) count, err := c.BulkDeleteDecisions(toDelete, false)
c.Log.Debugf("deleted %d decisions", count) c.Log.Debugf("deleted %d decisions", count)
return toDelete, err return toDelete, err
} }
@ -317,6 +324,7 @@ func (c *Client) DeleteDecisionsWithFilter(filter map[string][]string) (string,
else, return bans that are *contained* by the given value (value is the outer) */ else, return bans that are *contained* by the given value (value is the outer) */
decisions := c.Ent.Decision.Query() decisions := c.Ent.Decision.Query()
for param, value := range filter { for param, value := range filter {
switch param { switch param {
case "contains": case "contains":
@ -359,48 +367,48 @@ func (c *Client) DeleteDecisionsWithFilter(filter map[string][]string) (string,
} else if ip_sz == 16 { } else if ip_sz == 16 {
if contains { /*decision contains {start_ip,end_ip}*/ if contains { /*decision contains {start_ip,end_ip}*/
decisions = decisions.Where(decision.And( decisions = decisions.Where(decision.And(
//matching addr size // matching addr size
decision.IPSizeEQ(int64(ip_sz)), decision.IPSizeEQ(int64(ip_sz)),
decision.Or( decision.Or(
//decision.start_ip < query.start_ip // decision.start_ip < query.start_ip
decision.StartIPLT(start_ip), decision.StartIPLT(start_ip),
decision.And( decision.And(
//decision.start_ip == query.start_ip // decision.start_ip == query.start_ip
decision.StartIPEQ(start_ip), decision.StartIPEQ(start_ip),
//decision.start_suffix <= query.start_suffix // decision.start_suffix <= query.start_suffix
decision.StartSuffixLTE(start_sfx), decision.StartSuffixLTE(start_sfx),
)), )),
decision.Or( decision.Or(
//decision.end_ip > query.end_ip // decision.end_ip > query.end_ip
decision.EndIPGT(end_ip), decision.EndIPGT(end_ip),
decision.And( decision.And(
//decision.end_ip == query.end_ip // decision.end_ip == query.end_ip
decision.EndIPEQ(end_ip), decision.EndIPEQ(end_ip),
//decision.end_suffix >= query.end_suffix // decision.end_suffix >= query.end_suffix
decision.EndSuffixGTE(end_sfx), decision.EndSuffixGTE(end_sfx),
), ),
), ),
)) ))
} else { } else {
decisions = decisions.Where(decision.And( decisions = decisions.Where(decision.And(
//matching addr size // matching addr size
decision.IPSizeEQ(int64(ip_sz)), decision.IPSizeEQ(int64(ip_sz)),
decision.Or( decision.Or(
//decision.start_ip > query.start_ip // decision.start_ip > query.start_ip
decision.StartIPGT(start_ip), decision.StartIPGT(start_ip),
decision.And( decision.And(
//decision.start_ip == query.start_ip // decision.start_ip == query.start_ip
decision.StartIPEQ(start_ip), decision.StartIPEQ(start_ip),
//decision.start_suffix >= query.start_suffix // decision.start_suffix >= query.start_suffix
decision.StartSuffixGTE(start_sfx), decision.StartSuffixGTE(start_sfx),
)), )),
decision.Or( decision.Or(
//decision.end_ip < query.end_ip // decision.end_ip < query.end_ip
decision.EndIPLT(end_ip), decision.EndIPLT(end_ip),
decision.And( decision.And(
//decision.end_ip == query.end_ip // decision.end_ip == query.end_ip
decision.EndIPEQ(end_ip), decision.EndIPEQ(end_ip),
//decision.end_suffix <= query.end_suffix // decision.end_suffix <= query.end_suffix
decision.EndSuffixLTE(end_sfx), decision.EndSuffixLTE(end_sfx),
), ),
), ),
@ -415,11 +423,13 @@ func (c *Client) DeleteDecisionsWithFilter(filter map[string][]string) (string,
c.Log.Warningf("DeleteDecisionsWithFilter : %s", err) c.Log.Warningf("DeleteDecisionsWithFilter : %s", err)
return "0", nil, errors.Wrap(DeleteFail, "decisions with provided filter") return "0", nil, errors.Wrap(DeleteFail, "decisions with provided filter")
} }
count, err := c.BulkDeleteDecisions(toDelete, false) count, err := c.BulkDeleteDecisions(toDelete, false)
if err != nil { if err != nil {
c.Log.Warningf("While deleting decisions : %s", err) c.Log.Warningf("While deleting decisions : %s", err)
return "0", nil, errors.Wrap(DeleteFail, "decisions with provided filter") return "0", nil, errors.Wrap(DeleteFail, "decisions with provided filter")
} }
return strconv.Itoa(count), toDelete, nil return strconv.Itoa(count), toDelete, nil
} }
@ -432,6 +442,7 @@ func (c *Client) SoftDeleteDecisionsWithFilter(filter map[string][]string) (stri
/*if contains is true, return bans that *contains* the given value (value is the inner) /*if contains is true, return bans that *contains* the given value (value is the inner)
else, return bans that are *contained* by the given value (value is the outer)*/ else, return bans that are *contained* by the given value (value is the outer)*/
decisions := c.Ent.Decision.Query().Where(decision.UntilGT(time.Now().UTC())) decisions := c.Ent.Decision.Query().Where(decision.UntilGT(time.Now().UTC()))
for param, value := range filter { for param, value := range filter {
switch param { switch param {
case "contains": case "contains":
@ -480,24 +491,24 @@ func (c *Client) SoftDeleteDecisionsWithFilter(filter map[string][]string) (stri
/*decision contains {start_ip,end_ip}*/ /*decision contains {start_ip,end_ip}*/
if contains { if contains {
decisions = decisions.Where(decision.And( decisions = decisions.Where(decision.And(
//matching addr size // matching addr size
decision.IPSizeEQ(int64(ip_sz)), decision.IPSizeEQ(int64(ip_sz)),
decision.Or( decision.Or(
//decision.start_ip < query.start_ip // decision.start_ip < query.start_ip
decision.StartIPLT(start_ip), decision.StartIPLT(start_ip),
decision.And( decision.And(
//decision.start_ip == query.start_ip // decision.start_ip == query.start_ip
decision.StartIPEQ(start_ip), decision.StartIPEQ(start_ip),
//decision.start_suffix <= query.start_suffix // decision.start_suffix <= query.start_suffix
decision.StartSuffixLTE(start_sfx), decision.StartSuffixLTE(start_sfx),
)), )),
decision.Or( decision.Or(
//decision.end_ip > query.end_ip // decision.end_ip > query.end_ip
decision.EndIPGT(end_ip), decision.EndIPGT(end_ip),
decision.And( decision.And(
//decision.end_ip == query.end_ip // decision.end_ip == query.end_ip
decision.EndIPEQ(end_ip), decision.EndIPEQ(end_ip),
//decision.end_suffix >= query.end_suffix // decision.end_suffix >= query.end_suffix
decision.EndSuffixGTE(end_sfx), decision.EndSuffixGTE(end_sfx),
), ),
), ),
@ -505,24 +516,24 @@ func (c *Client) SoftDeleteDecisionsWithFilter(filter map[string][]string) (stri
} else { } else {
/*decision is contained within {start_ip,end_ip}*/ /*decision is contained within {start_ip,end_ip}*/
decisions = decisions.Where(decision.And( decisions = decisions.Where(decision.And(
//matching addr size // matching addr size
decision.IPSizeEQ(int64(ip_sz)), decision.IPSizeEQ(int64(ip_sz)),
decision.Or( decision.Or(
//decision.start_ip > query.start_ip // decision.start_ip > query.start_ip
decision.StartIPGT(start_ip), decision.StartIPGT(start_ip),
decision.And( decision.And(
//decision.start_ip == query.start_ip // decision.start_ip == query.start_ip
decision.StartIPEQ(start_ip), decision.StartIPEQ(start_ip),
//decision.start_suffix >= query.start_suffix // decision.start_suffix >= query.start_suffix
decision.StartSuffixGTE(start_sfx), decision.StartSuffixGTE(start_sfx),
)), )),
decision.Or( decision.Or(
//decision.end_ip < query.end_ip // decision.end_ip < query.end_ip
decision.EndIPLT(end_ip), decision.EndIPLT(end_ip),
decision.And( decision.And(
//decision.end_ip == query.end_ip // decision.end_ip == query.end_ip
decision.EndIPEQ(end_ip), decision.EndIPEQ(end_ip),
//decision.end_suffix <= query.end_suffix // decision.end_suffix <= query.end_suffix
decision.EndSuffixLTE(end_sfx), decision.EndSuffixLTE(end_sfx),
), ),
), ),
@ -531,6 +542,7 @@ func (c *Client) SoftDeleteDecisionsWithFilter(filter map[string][]string) (stri
} else if ip_sz != 0 { } else if ip_sz != 0 {
return "0", nil, errors.Wrapf(InvalidFilter, "Unknown ip size %d", ip_sz) return "0", nil, errors.Wrapf(InvalidFilter, "Unknown ip size %d", ip_sz)
} }
DecisionsToDelete, err := decisions.All(c.CTX) DecisionsToDelete, err := decisions.All(c.CTX)
if err != nil { if err != nil {
c.Log.Warningf("SoftDeleteDecisionsWithFilter : %s", err) c.Log.Warningf("SoftDeleteDecisionsWithFilter : %s", err)
@ -541,13 +553,14 @@ func (c *Client) SoftDeleteDecisionsWithFilter(filter map[string][]string) (stri
if err != nil { if err != nil {
return "0", nil, errors.Wrapf(DeleteFail, "soft delete decisions with provided filter : %s", err) return "0", nil, errors.Wrapf(DeleteFail, "soft delete decisions with provided filter : %s", err)
} }
return strconv.Itoa(count), DecisionsToDelete, err return strconv.Itoa(count), DecisionsToDelete, err
} }
// BulkDeleteDecisions set the expiration of a bulk of decisions to now() or hard deletes them. // BulkDeleteDecisions sets the expiration of a bulk of decisions to now() or hard deletes them.
// We are doing it this way so we can return impacted decisions for sync with CAPI/PAPI // We are doing it this way so we can return impacted decisions for sync with CAPI/PAPI
func (c *Client) BulkDeleteDecisions(decisionsToDelete []*ent.Decision, softDelete bool) (int, error) { func (c *Client) BulkDeleteDecisions(decisionsToDelete []*ent.Decision, softDelete bool) (int, error) {
const bulkSize = 256 //scientifically proven to be the best value for bulk delete const bulkSize = 256 // scientifically proven to be the best value for bulk delete
var ( var (
nbUpdates int nbUpdates int
@ -576,6 +589,7 @@ func (c *Client) BulkDeleteDecisions(decisionsToDelete []*ent.Decision, softDele
return totalUpdates, fmt.Errorf("hard delete decisions with provided filter: %w", err) return totalUpdates, fmt.Errorf("hard delete decisions with provided filter: %w", err)
} }
} }
totalUpdates += nbUpdates totalUpdates += nbUpdates
} }
@ -612,6 +626,7 @@ func (c *Client) CountDecisionsByValue(decisionValue string) (int, error) {
contains := true contains := true
decisions := c.Ent.Decision.Query() decisions := c.Ent.Decision.Query()
decisions, err = applyStartIpEndIpFilter(decisions, contains, ip_sz, start_ip, start_sfx, end_ip, end_sfx) decisions, err = applyStartIpEndIpFilter(decisions, contains, ip_sz, start_ip, start_sfx, end_ip, end_sfx)
if err != nil { if err != nil {
return 0, errors.Wrapf(err, "fail to apply StartIpEndIpFilter") return 0, errors.Wrapf(err, "fail to apply StartIpEndIpFilter")
@ -667,6 +682,7 @@ func applyStartIpEndIpFilter(decisions *ent.DecisionQuery, contains bool, ip_sz
decision.IPSizeEQ(int64(ip_sz)), decision.IPSizeEQ(int64(ip_sz)),
)) ))
} }
return decisions, nil return decisions, nil
} }
@ -674,24 +690,24 @@ func applyStartIpEndIpFilter(decisions *ent.DecisionQuery, contains bool, ip_sz
/*decision contains {start_ip,end_ip}*/ /*decision contains {start_ip,end_ip}*/
if contains { if contains {
decisions = decisions.Where(decision.And( decisions = decisions.Where(decision.And(
//matching addr size // matching addr size
decision.IPSizeEQ(int64(ip_sz)), decision.IPSizeEQ(int64(ip_sz)),
decision.Or( decision.Or(
//decision.start_ip < query.start_ip // decision.start_ip < query.start_ip
decision.StartIPLT(start_ip), decision.StartIPLT(start_ip),
decision.And( decision.And(
//decision.start_ip == query.start_ip // decision.start_ip == query.start_ip
decision.StartIPEQ(start_ip), decision.StartIPEQ(start_ip),
//decision.start_suffix <= query.start_suffix // decision.start_suffix <= query.start_suffix
decision.StartSuffixLTE(start_sfx), decision.StartSuffixLTE(start_sfx),
)), )),
decision.Or( decision.Or(
//decision.end_ip > query.end_ip // decision.end_ip > query.end_ip
decision.EndIPGT(end_ip), decision.EndIPGT(end_ip),
decision.And( decision.And(
//decision.end_ip == query.end_ip // decision.end_ip == query.end_ip
decision.EndIPEQ(end_ip), decision.EndIPEQ(end_ip),
//decision.end_suffix >= query.end_suffix // decision.end_suffix >= query.end_suffix
decision.EndSuffixGTE(end_sfx), decision.EndSuffixGTE(end_sfx),
), ),
), ),
@ -699,29 +715,30 @@ func applyStartIpEndIpFilter(decisions *ent.DecisionQuery, contains bool, ip_sz
} else { } else {
/*decision is contained within {start_ip,end_ip}*/ /*decision is contained within {start_ip,end_ip}*/
decisions = decisions.Where(decision.And( decisions = decisions.Where(decision.And(
//matching addr size // matching addr size
decision.IPSizeEQ(int64(ip_sz)), decision.IPSizeEQ(int64(ip_sz)),
decision.Or( decision.Or(
//decision.start_ip > query.start_ip // decision.start_ip > query.start_ip
decision.StartIPGT(start_ip), decision.StartIPGT(start_ip),
decision.And( decision.And(
//decision.start_ip == query.start_ip // decision.start_ip == query.start_ip
decision.StartIPEQ(start_ip), decision.StartIPEQ(start_ip),
//decision.start_suffix >= query.start_suffix // decision.start_suffix >= query.start_suffix
decision.StartSuffixGTE(start_sfx), decision.StartSuffixGTE(start_sfx),
)), )),
decision.Or( decision.Or(
//decision.end_ip < query.end_ip // decision.end_ip < query.end_ip
decision.EndIPLT(end_ip), decision.EndIPLT(end_ip),
decision.And( decision.And(
//decision.end_ip == query.end_ip // decision.end_ip == query.end_ip
decision.EndIPEQ(end_ip), decision.EndIPEQ(end_ip),
//decision.end_suffix <= query.end_suffix // decision.end_suffix <= query.end_suffix
decision.EndSuffixLTE(end_sfx), decision.EndSuffixLTE(end_sfx),
), ),
), ),
)) ))
} }
return decisions, nil return decisions, nil
} }
@ -735,8 +752,10 @@ func applyStartIpEndIpFilter(decisions *ent.DecisionQuery, contains bool, ip_sz
func decisionPredicatesFromStr(s string, predicateFunc func(string) predicate.Decision) []predicate.Decision { func decisionPredicatesFromStr(s string, predicateFunc func(string) predicate.Decision) []predicate.Decision {
words := strings.Split(s, ",") words := strings.Split(s, ",")
predicates := make([]predicate.Decision, len(words)) predicates := make([]predicate.Decision, len(words))
for i, word := range words { for i, word := range words {
predicates[i] = predicateFunc(word) predicates[i] = predicateFunc(word)
} }
return predicates return predicates
} }

View file

@ -26,10 +26,10 @@ produces:
paths: paths:
/decisions/stream: /decisions/stream:
get: get:
description: Returns a list of new/expired decisions. Intended for bouncers that need to "stream" decisions description: Returns a list of new/expired decisions. Intended for remediation component that need to "stream" decisions
summary: getDecisionsStream summary: getDecisionsStream
tags: tags:
- bouncers - Remediation component
operationId: getDecisionsStream operationId: getDecisionsStream
deprecated: false deprecated: false
produces: produces:
@ -39,7 +39,7 @@ paths:
in: query in: query
required: false required: false
type: boolean type: boolean
description: 'If true, means that the bouncers is starting and a full list must be provided' description: 'If true, means that the remediation component is starting and a full list must be provided'
- name: scopes - name: scopes
in: query in: query
required: false required: false
@ -73,10 +73,10 @@ paths:
security: security:
- APIKeyAuthorizer: [] - APIKeyAuthorizer: []
head: head:
description: Returns a list of new/expired decisions. Intended for bouncers that need to "stream" decisions description: Returns a list of new/expired decisions. Intended for remediation component that need to "stream" decisions
summary: GetDecisionsStream summary: GetDecisionsStream
tags: tags:
- bouncers - Remediation component
operationId: headDecisionsStream operationId: headDecisionsStream
deprecated: false deprecated: false
produces: produces:
@ -100,7 +100,7 @@ paths:
description: Returns information about existing decisions description: Returns information about existing decisions
summary: getDecisions summary: getDecisions
tags: tags:
- bouncers - Remediation component
operationId: getDecisions operationId: getDecisions
deprecated: false deprecated: false
produces: produces:
@ -164,7 +164,7 @@ paths:
description: Returns information about existing decisions description: Returns information about existing decisions
summary: GetDecisions summary: GetDecisions
tags: tags:
- bouncers - Remediation component
operationId: headDecisions operationId: headDecisions
deprecated: false deprecated: false
produces: produces:
@ -1008,7 +1008,7 @@ definitions:
title: "error response" title: "error response"
description: "error response return by the API" description: "error response return by the API"
tags: tags:
- name: bouncers - name: Remediation component
description: 'Operations about decisions : bans, captcha, rate-limit etc.' description: 'Operations about decisions : bans, captcha, rate-limit etc.'
- name: watchers - name: watchers
description: 'Operations about watchers : cscli & crowdsec' description: 'Operations about watchers : cscli & crowdsec'

View file

@ -1,4 +1,4 @@
//go:build !windows //go:build !windows && !freebsd
package types package types

View file

@ -0,0 +1,25 @@
//go:build freebsd
package types
import (
"fmt"
"syscall"
)
func GetFSType(path string) (string, error) {
var fsStat syscall.Statfs_t
if err := syscall.Statfs(path, &fsStat); err != nil {
return "", fmt.Errorf("failed to get filesystem type: %w", err)
}
bs := fsStat.Fstypename
b := make([]byte, len(bs))
for i, v := range bs {
b[i] = byte(v)
}
return string(b), nil
}

View file

@ -66,11 +66,11 @@ bats-check-requirements: ## Check dependencies for functional tests
@$(TEST_DIR)/bin/check-requirements @$(TEST_DIR)/bin/check-requirements
bats-update-tools: ## Install/update tools required for functional tests bats-update-tools: ## Install/update tools required for functional tests
# yq v4.40.4 # yq v4.43.1
GOBIN=$(TEST_DIR)/tools go install github.com/mikefarah/yq/v4@1c3d55106075bd37df197b4bc03cb4a413fdb903 GOBIN=$(TEST_DIR)/tools go install github.com/mikefarah/yq/v4@c35ec752e38ea0c096d3c44e13cfc0797ac394d8
# cfssl v1.6.4 # cfssl v1.6.5
GOBIN=$(TEST_DIR)/tools go install github.com/cloudflare/cfssl/cmd/cfssl@b4d0d877cac528f63db39dfb62d5c96cd3a32a0b GOBIN=$(TEST_DIR)/tools go install github.com/cloudflare/cfssl/cmd/cfssl@96259aa29c9cc9b2f4e04bad7d4bc152e5405dda
GOBIN=$(TEST_DIR)/tools go install github.com/cloudflare/cfssl/cmd/cfssljson@b4d0d877cac528f63db39dfb62d5c96cd3a32a0b GOBIN=$(TEST_DIR)/tools go install github.com/cloudflare/cfssl/cmd/cfssljson@96259aa29c9cc9b2f4e04bad7d4bc152e5405dda
# Build and installs crowdsec in a local directory. Rebuilds if already exists. # Build and installs crowdsec in a local directory. Rebuilds if already exists.
bats-build: bats-environment ## Build binaries for functional tests bats-build: bats-environment ## Build binaries for functional tests