diff --git a/.github/workflows/bats-hub.yml b/.github/workflows/bats-hub.yml index 32d53a3a6..61ccb8f67 100644 --- a/.github/workflows/bats-hub.yml +++ b/.github/workflows/bats-hub.yml @@ -15,7 +15,7 @@ jobs: build: strategy: matrix: - go-version: ["1.20.6"] + go-version: ["1.21.3"] name: "Build + tests" runs-on: ubuntu-latest @@ -37,7 +37,6 @@ jobs: uses: actions/setup-go@v4 with: go-version: ${{ matrix.go-version }} - cache-dependency-path: "**/go.sum" - name: "Install bats dependencies" env: diff --git a/.github/workflows/bats-mysql.yml b/.github/workflows/bats-mysql.yml index 897122f63..e39737af0 100644 --- a/.github/workflows/bats-mysql.yml +++ b/.github/workflows/bats-mysql.yml @@ -14,7 +14,7 @@ jobs: build: strategy: matrix: - go-version: ["1.20.6"] + go-version: ["1.21.3"] name: "Build + tests" runs-on: ubuntu-latest @@ -44,7 +44,6 @@ jobs: uses: actions/setup-go@v4 with: go-version: ${{ matrix.go-version }} - cache-dependency-path: "**/go.sum" - name: "Install bats dependencies" env: diff --git a/.github/workflows/bats-postgres.yml b/.github/workflows/bats-postgres.yml index c2aefef04..ce983f178 100644 --- a/.github/workflows/bats-postgres.yml +++ b/.github/workflows/bats-postgres.yml @@ -10,14 +10,14 @@ jobs: build: strategy: matrix: - go-version: ["1.20.6"] + go-version: ["1.21.3"] name: "Build + tests" runs-on: ubuntu-latest timeout-minutes: 30 services: database: - image: postgres:15 + image: postgres:16 env: POSTGRES_PASSWORD: "secret" ports: @@ -30,13 +30,13 @@ jobs: steps: - - name: "Install pg_dump v15" + - name: "Install pg_dump v16" # we can remove this when it's released on ubuntu-latest run: | sudo sh -c 'echo "deb http://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" > /etc/apt/sources.list.d/pgdg.list' wget -qO- https://www.postgresql.org/media/keys/ACCC4CF8.asc | sudo tee /etc/apt/trusted.gpg.d/pgdg.asc &>/dev/null sudo apt update - sudo apt -qq -y -o=Dpkg::Use-Pty=0 install postgresql-client-15 + sudo apt -qq -y -o=Dpkg::Use-Pty=0 install postgresql-client-16 - name: "Force machineid" run: | @@ -53,7 +53,6 @@ jobs: uses: actions/setup-go@v4 with: go-version: ${{ matrix.go-version }} - cache-dependency-path: "**/go.sum" - name: "Install bats dependencies" env: diff --git a/.github/workflows/bats-sqlite-coverage.yml b/.github/workflows/bats-sqlite-coverage.yml index 93f85b72e..dc31488a1 100644 --- a/.github/workflows/bats-sqlite-coverage.yml +++ b/.github/workflows/bats-sqlite-coverage.yml @@ -11,7 +11,7 @@ jobs: build: strategy: matrix: - go-version: ["1.20.6"] + go-version: ["1.21.3"] name: "Build + tests" runs-on: ubuntu-latest @@ -34,7 +34,6 @@ jobs: uses: actions/setup-go@v4 with: go-version: ${{ matrix.go-version }} - cache-dependency-path: "**/go.sum" - name: "Install bats dependencies" env: diff --git a/.github/workflows/ci-windows-build-msi.yml b/.github/workflows/ci-windows-build-msi.yml index 47f5b905d..869363f25 100644 --- a/.github/workflows/ci-windows-build-msi.yml +++ b/.github/workflows/ci-windows-build-msi.yml @@ -23,7 +23,7 @@ jobs: build: strategy: matrix: - go-version: ["1.20.6"] + go-version: ["1.21.3"] name: Build runs-on: windows-2019 @@ -40,7 +40,6 @@ jobs: uses: actions/setup-go@v4 with: go-version: ${{ matrix.go-version }} - cache-dependency-path: "**/go.sum" - name: Build run: make windows_installer BUILD_RE2_WASM=1 diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index c1995cd8d..ce9482274 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -45,6 +45,9 @@ jobs: steps: - name: Checkout repository uses: actions/checkout@v3 + with: + # required to pick up tags for BUILD_VERSION + fetch-depth: 0 # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL @@ -58,8 +61,8 @@ jobs: # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). # If this step fails, then you should remove it and run the build manually (see below) - - name: Autobuild - uses: github/codeql-action/autobuild@v2 + # - name: Autobuild + # uses: github/codeql-action/autobuild@v2 # ℹī¸ Command-line programs to run using the OS shell. # 📚 https://git.io/JvXDl @@ -68,9 +71,14 @@ jobs: # and modify them (or add more) to build your code if your project # uses a compiled language - #- run: | - # make bootstrap - # make release + - name: "Set up Go" + uses: actions/setup-go@v4 + with: + go-version: "1.21.0" + cache-dependency-path: "**/go.sum" + + - run: | + make clean build BUILD_RE2_WASM=1 - name: Perform CodeQL Analysis uses: github/codeql-action/analyze@v2 diff --git a/.github/workflows/go-tests-windows.yml b/.github/workflows/go-tests-windows.yml index 772c574ab..aa2116b1c 100644 --- a/.github/workflows/go-tests-windows.yml +++ b/.github/workflows/go-tests-windows.yml @@ -22,7 +22,7 @@ jobs: build: strategy: matrix: - go-version: ["1.20.6"] + go-version: ["1.21.3"] name: "Build + tests" runs-on: windows-2022 @@ -39,7 +39,6 @@ jobs: uses: actions/setup-go@v4 with: go-version: ${{ matrix.go-version }} - cache-dependency-path: "**/go.sum" - name: Build run: | @@ -61,7 +60,7 @@ jobs: - name: golangci-lint uses: golangci/golangci-lint-action@v3 with: - version: v1.51 + version: v1.54 args: --issues-exit-code=1 --timeout 10m only-new-issues: false # the cache is already managed above, enabling it here diff --git a/.github/workflows/go-tests.yml b/.github/workflows/go-tests.yml index 2dff8af21..2a760d149 100644 --- a/.github/workflows/go-tests.yml +++ b/.github/workflows/go-tests.yml @@ -34,7 +34,7 @@ jobs: build: strategy: matrix: - go-version: ["1.20.6"] + go-version: ["1.21.3"] name: "Build + tests" runs-on: ubuntu-latest @@ -120,7 +120,6 @@ jobs: uses: actions/setup-go@v4 with: go-version: ${{ matrix.go-version }} - cache-dependency-path: "**/go.sum" - name: Build and run tests, static run: | @@ -145,7 +144,7 @@ jobs: - name: golangci-lint uses: golangci/golangci-lint-action@v3 with: - version: v1.51 + version: v1.54 args: --issues-exit-code=1 --timeout 10m only-new-issues: false # the cache is already managed above, enabling it here diff --git a/.github/workflows/release_publish-package.yml b/.github/workflows/release_publish-package.yml index c38e0812c..54fb62147 100644 --- a/.github/workflows/release_publish-package.yml +++ b/.github/workflows/release_publish-package.yml @@ -14,7 +14,7 @@ jobs: build: strategy: matrix: - go-version: ["1.20.6"] + go-version: ["1.21.3"] name: Build and upload binary package runs-on: ubuntu-latest @@ -30,7 +30,6 @@ jobs: uses: actions/setup-go@v4 with: go-version: ${{ matrix.go-version }} - cache-dependency-path: "**/go.sum" - name: Build the binaries run: | @@ -42,4 +41,4 @@ jobs: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | tag_name="${GITHUB_REF##*/}" - hub release edit -a crowdsec-release.tgz -a vendor.tgz -m "" "$tag_name" + hub release edit -a crowdsec-release.tgz -a vendor.tgz -a *-vendor.tar.xz -m "" "$tag_name" diff --git a/.gitignore b/.gitignore index d2804f0c4..3054e9eb3 100644 --- a/.gitignore +++ b/.gitignore @@ -41,12 +41,7 @@ vendor.tgz # crowdsec binaries cmd/crowdsec-cli/cscli cmd/crowdsec/crowdsec -plugins/notifications/http/notification-http -plugins/notifications/slack/notification-slack -plugins/notifications/splunk/notification-splunk -plugins/notifications/email/notification-email -plugins/notifications/dummy/notification-dummy -plugins/notifications/sentinel/notification-sentinel +cmd/notification-*/notification-* # Test cache (downloaded files) .cache diff --git a/.golangci.yml b/.golangci.yml index faa67c4bb..7aabdc4b1 100644 --- a/.golangci.yml +++ b/.golangci.yml @@ -105,6 +105,7 @@ linters: # Recommended? (easy) # + - depguard # Go linter that checks if package imports are in a list of acceptable packages - dogsled # Checks assignments with too many blank identifiers (e.g. x, _, _, _, := f()) - errchkjson # Checks types passed to the json encoding functions. Reports unsupported types and optionally reports occations, where the check for the returned error can be omitted. - errorlint # errorlint is a linter for that can be used to find code that will cause problems with the error wrapping scheme introduced in Go 1.13. @@ -121,10 +122,10 @@ linters: - nosprintfhostport # Checks for misuse of Sprintf to construct a host with port in a URL. - promlinter # Check Prometheus metrics naming via promlint - revive # Fast, configurable, extensible, flexible, and beautiful linter for Go. Drop-in replacement of golint. + - tagalign # check that struct tags are well aligned [fast: true, auto-fix: true] - thelper # thelper detects golang test helpers without t.Helper() call and checks the consistency of test helpers - wastedassign # wastedassign finds wasted assignment statements. - wrapcheck # Checks that errors returned from external packages are wrapped - - depguard # Go linter that checks if package imports are in a list of acceptable packages # # Recommended? (requires some work) @@ -198,6 +199,18 @@ issues: - govet text: "shadow: declaration of \"err\" shadows declaration" + # + # typecheck + # + + - linters: + - typecheck + text: "undefined: min" + + - linters: + - typecheck + text: "undefined: max" + # # errcheck # diff --git a/Dockerfile b/Dockerfile index e2da0a106..32d723efc 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,5 +1,5 @@ # vim: set ft=dockerfile: -ARG GOVERSION=1.20.6 +ARG GOVERSION=1.21.3 FROM golang:${GOVERSION}-alpine AS build @@ -52,11 +52,11 @@ FROM slim as plugins # Due to the wizard using cp -n, we have to copy the config files directly from the source as -n does not exist in busybox cp # The files are here for reference, as users will need to mount a new version to be actually able to use notifications -COPY --from=build /go/src/crowdsec/plugins/notifications/email/email.yaml /staging/etc/crowdsec/notifications/email.yaml -COPY --from=build /go/src/crowdsec/plugins/notifications/http/http.yaml /staging/etc/crowdsec/notifications/http.yaml -COPY --from=build /go/src/crowdsec/plugins/notifications/slack/slack.yaml /staging/etc/crowdsec/notifications/slack.yaml -COPY --from=build /go/src/crowdsec/plugins/notifications/splunk/splunk.yaml /staging/etc/crowdsec/notifications/splunk.yaml -COPY --from=build /go/src/crowdsec/plugins/notifications/sentinel/sentinel.yaml /staging/etc/crowdsec/notifications/sentinel.yaml +COPY --from=build /go/src/crowdsec/cmd/notification-email/email.yaml /staging/etc/crowdsec/notifications/email.yaml +COPY --from=build /go/src/crowdsec/cmd/notification-http/http.yaml /staging/etc/crowdsec/notifications/http.yaml +COPY --from=build /go/src/crowdsec/cmd/notification-slack/slack.yaml /staging/etc/crowdsec/notifications/slack.yaml +COPY --from=build /go/src/crowdsec/cmd/notification-splunk/splunk.yaml /staging/etc/crowdsec/notifications/splunk.yaml +COPY --from=build /go/src/crowdsec/cmd/notification-sentinel/sentinel.yaml /staging/etc/crowdsec/notifications/sentinel.yaml COPY --from=build /usr/local/lib/crowdsec/plugins /usr/local/lib/crowdsec/plugins FROM slim as geoip diff --git a/Dockerfile.debian b/Dockerfile.debian index e5dcee695..42a156ae1 100644 --- a/Dockerfile.debian +++ b/Dockerfile.debian @@ -1,5 +1,5 @@ # vim: set ft=dockerfile: -ARG GOVERSION=1.20.6 +ARG GOVERSION=1.21.3 FROM golang:${GOVERSION}-bookworm AS build @@ -68,11 +68,11 @@ FROM slim as plugins # Due to the wizard using cp -n, we have to copy the config files directly from the source as -n does not exist in busybox cp # The files are here for reference, as users will need to mount a new version to be actually able to use notifications -COPY --from=build /go/src/crowdsec/plugins/notifications/email/email.yaml /staging/etc/crowdsec/notifications/email.yaml -COPY --from=build /go/src/crowdsec/plugins/notifications/http/http.yaml /staging/etc/crowdsec/notifications/http.yaml -COPY --from=build /go/src/crowdsec/plugins/notifications/slack/slack.yaml /staging/etc/crowdsec/notifications/slack.yaml -COPY --from=build /go/src/crowdsec/plugins/notifications/splunk/splunk.yaml /staging/etc/crowdsec/notifications/splunk.yaml -COPY --from=build /go/src/crowdsec/plugins/notifications/sentinel/sentinel.yaml /staging/etc/crowdsec/notifications/sentinel.yaml +COPY --from=build /go/src/crowdsec/cmd/notification-email/email.yaml /staging/etc/crowdsec/notifications/email.yaml +COPY --from=build /go/src/crowdsec/cmd/notification-http/http.yaml /staging/etc/crowdsec/notifications/http.yaml +COPY --from=build /go/src/crowdsec/cmd/notification-slack/slack.yaml /staging/etc/crowdsec/notifications/slack.yaml +COPY --from=build /go/src/crowdsec/cmd/notification-splunk/splunk.yaml /staging/etc/crowdsec/notifications/splunk.yaml +COPY --from=build /go/src/crowdsec/cmd/notification-sentinel/sentinel.yaml /staging/etc/crowdsec/notifications/sentinel.yaml COPY --from=build /usr/local/lib/crowdsec/plugins /usr/local/lib/crowdsec/plugins FROM slim as geoip diff --git a/Makefile b/Makefile index 9b837cd58..c54f647cd 100644 --- a/Makefile +++ b/Makefile @@ -23,11 +23,11 @@ BUILD_RE2_WASM ?= 0 BUILD_STATIC ?= 0 # List of plugins to build -PLUGINS ?= $(patsubst ./plugins/notifications/%,%,$(wildcard ./plugins/notifications/*)) +PLUGINS ?= $(patsubst ./cmd/notification-%,%,$(wildcard ./cmd/notification-*)) # Can be overriden, if you can deal with the consequences BUILD_REQUIRE_GO_MAJOR ?= 1 -BUILD_REQUIRE_GO_MINOR ?= 20 +BUILD_REQUIRE_GO_MINOR ?= 21 #-------------------------------------- @@ -38,7 +38,7 @@ BUILD_CODENAME ?= alphaga CROWDSEC_FOLDER = ./cmd/crowdsec CSCLI_FOLDER = ./cmd/crowdsec-cli/ -PLUGINS_DIR = ./plugins/notifications +PLUGINS_DIR_PREFIX = ./cmd/notification- CROWDSEC_BIN = crowdsec$(EXT) CSCLI_BIN = cscli$(EXT) @@ -64,7 +64,7 @@ bool = $(if $(filter $(call lc, $1),1 yes true),1,0) #-------------------------------------- # -# Define MAKE_FLAGS and LD_OPTS for the sub-makefiles in cmd/ and plugins/ +# Define MAKE_FLAGS and LD_OPTS for the sub-makefiles in cmd/ # MAKE_FLAGS = --no-print-directory GOARCH=$(GOARCH) GOOS=$(GOOS) RM="$(RM)" WIN_IGNORE_ERR="$(WIN_IGNORE_ERR)" CP="$(CP)" CPR="$(CPR)" MKDIR="$(MKDIR)" @@ -92,7 +92,6 @@ ifeq ($(PKG_CONFIG),) endif ifeq ($(RE2_CHECK),) -# we could detect the platform and suggest the command to install RE2_FAIL := "libre2-dev is not installed, please install it or set BUILD_RE2_WASM=1 to use the WebAssembly version" else # += adds a space that we don't want @@ -101,6 +100,7 @@ LD_OPTS_VARS += -X '$(GO_MODULE_NAME)/pkg/cwversion.Libre2=C++' endif endif +# Build static to avoid the runtime dependency on libre2.so ifeq ($(call bool,$(BUILD_STATIC)),1) BUILD_TYPE = static EXTLDFLAGS := -extldflags '-static' @@ -109,10 +109,19 @@ BUILD_TYPE = dynamic EXTLDFLAGS := endif -export LD_OPTS=-ldflags "-s -w $(EXTLDFLAGS) $(LD_OPTS_VARS)" \ - -trimpath -tags $(GO_TAGS) +# Build with debug symbols, and disable optimizations + inlining, to use Delve +ifeq ($(call bool,$(DEBUG)),1) +STRIP_SYMBOLS := +DISABLE_OPTIMIZATION := -gcflags "-N -l" +else +STRIP_SYMBOLS := -s -w +DISABLE_OPTIMIZATION := +endif -ifneq (,$(TEST_COVERAGE)) +export LD_OPTS=-ldflags "$(STRIP_SYMBOLS) $(EXTLDFLAGS) $(LD_OPTS_VARS)" \ + -trimpath -tags $(GO_TAGS) $(DISABLE_OPTIMIZATION) + +ifeq ($(call bool,$(TEST_COVERAGE)),1) LD_OPTS += -cover endif @@ -135,19 +144,47 @@ ifneq (,$(RE2_CHECK)) else $(info Fallback to WebAssembly regexp library. To use the C++ version, make sure you have installed libre2-dev and pkg-config.) endif + +ifeq ($(call bool,$(DEBUG)),1) + $(info Building with debug symbols and disabled optimizations) +endif + +ifeq ($(call bool,$(TEST_COVERAGE)),1) + $(info Test coverage collection enabled) +endif + $(info ) + .PHONY: all all: clean test build .PHONY: plugins plugins: @$(foreach plugin,$(PLUGINS), \ - $(MAKE) -C $(PLUGINS_DIR)/$(plugin) build $(MAKE_FLAGS); \ + $(MAKE) -C $(PLUGINS_DIR_PREFIX)$(plugin) build $(MAKE_FLAGS); \ ) +# same as "$(MAKE) -f debian/rules clean" but without the dependency on debhelper +.PHONY: clean-debian +clean-debian: + @$(RM) -r debian/crowdsec + @$(RM) -r debian/crowdsec + @$(RM) -r debian/files + @$(RM) -r debian/.debhelper + @$(RM) -r debian/*.substvars + @$(RM) -r debian/*-stamp + +.PHONY: clean-rpm +clean-rpm: + @$(RM) -r rpm/BUILD + @$(RM) -r rpm/BUILDROOT + @$(RM) -r rpm/RPMS + @$(RM) -r rpm/SOURCES/*.tar.gz + @$(RM) -r rpm/SRPMS + .PHONY: clean -clean: testclean +clean: clean-debian clean-rpm testclean @$(MAKE) -C $(CROWDSEC_FOLDER) clean $(MAKE_FLAGS) @$(MAKE) -C $(CSCLI_FOLDER) clean $(MAKE_FLAGS) @$(RM) $(CROWDSEC_BIN) $(WIN_IGNORE_ERR) @@ -155,7 +192,7 @@ clean: testclean @$(RM) *.log $(WIN_IGNORE_ERR) @$(RM) crowdsec-release.tgz $(WIN_IGNORE_ERR) @$(foreach plugin,$(PLUGINS), \ - $(MAKE) -C $(PLUGINS_DIR)/$(plugin) clean $(MAKE_FLAGS); \ + $(MAKE) -C $(PLUGINS_DIR_PREFIX)$(plugin) clean $(MAKE_FLAGS); \ ) .PHONY: cscli @@ -166,6 +203,12 @@ cscli: goversion crowdsec: goversion @$(MAKE) -C $(CROWDSEC_FOLDER) build $(MAKE_FLAGS) +.PHONY: notification-email +notification-email: goversion + @$(MAKE) -C cmd/notification-email build $(MAKE_FLAGS) + + + .PHONY: testclean testclean: bats-clean @$(RM) pkg/apiserver/ent $(WIN_IGNORE_ERR) @@ -201,37 +244,17 @@ localstack: localstack-stop: docker-compose -f test/localstack/docker-compose.yml down -# list of plugins that contain go.mod -PLUGIN_VENDOR = $(foreach plugin,$(PLUGINS),$(shell if [ -f $(PLUGINS_DIR)/$(plugin)/go.mod ]; then echo $(PLUGINS_DIR)/$(plugin); fi)) - # build vendor.tgz to be distributed with the release .PHONY: vendor -vendor: - $(foreach plugin_dir,$(PLUGIN_VENDOR), \ - cd $(plugin_dir) >/dev/null && \ - $(GO) mod vendor && \ - cd - >/dev/null; \ - ) +vendor: vendor-remove $(GO) mod vendor - tar -czf vendor.tgz vendor $(foreach plugin_dir,$(PLUGIN_VENDOR),$(plugin_dir)/vendor) - -.PHONY: tidy -tidy: - $(GO) mod tidy - $(foreach plugin_dir,$(PLUGIN_VENDOR), \ - cd $(plugin_dir) >/dev/null && \ - $(GO) mod tidy && \ - cd - >/dev/null; \ - ) + tar czf vendor.tgz vendor + tar --create --auto-compress --file=$(RELDIR)-vendor.tar.xz vendor # remove vendor directories and vendor.tgz .PHONY: vendor-remove vendor-remove: - $(foreach plugin_dir,$(PLUGIN_VENDOR), \ - $(RM) $(plugin_dir)/vendor; \ - ) - $(RM) vendor vendor.tgz - + $(RM) vendor vendor.tgz *-vendor.tar.xz .PHONY: package package: @@ -242,9 +265,9 @@ package: @$(CP) $(CSCLI_FOLDER)/$(CSCLI_BIN) $(RELDIR)/cmd/crowdsec-cli @$(foreach plugin,$(PLUGINS), \ - $(MKDIR) $(RELDIR)/$(PLUGINS_DIR)/$(plugin); \ - $(CP) $(PLUGINS_DIR)/$(plugin)/notification-$(plugin)$(EXT) $(RELDIR)/$(PLUGINS_DIR)/$(plugin); \ - $(CP) $(PLUGINS_DIR)/$(plugin)/$(plugin).yaml $(RELDIR)/$(PLUGINS_DIR)/$(plugin)/; \ + $(MKDIR) $(RELDIR)/$(PLUGINS_DIR_PREFIX)$(plugin); \ + $(CP) $(PLUGINS_DIR_PREFIX)$(plugin)/notification-$(plugin)$(EXT) $(RELDIR)/$(PLUGINS_DIR_PREFIX)$(plugin); \ + $(CP) $(PLUGINS_DIR_PREFIX)$(plugin)/$(plugin).yaml $(RELDIR)/$(PLUGINS_DIR_PREFIX)$(plugin)/; \ ) @$(CPR) ./config $(RELDIR) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index d4a2f5b21..d3d848369 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -27,7 +27,7 @@ stages: - task: GoTool@0 displayName: "Install Go 1.20" inputs: - version: '1.20.6' + version: '1.21.3' - pwsh: | choco install -y make diff --git a/cmd/crowdsec-cli/Makefile b/cmd/crowdsec-cli/Makefile index cd7eedff6..392361ef8 100644 --- a/cmd/crowdsec-cli/Makefile +++ b/cmd/crowdsec-cli/Makefile @@ -4,10 +4,8 @@ ifeq ($(OS), Windows_NT) EXT = .exe endif -# Go parameters GO = go GOBUILD = $(GO) build -GOTEST = $(GO) test BINARY_NAME = cscli$(EXT) PREFIX ?= "/" @@ -17,7 +15,7 @@ BIN_PREFIX = $(PREFIX)"/usr/local/bin/" all: clean build build: clean - $(GOBUILD) $(LD_OPTS) $(BUILD_VENDOR_FLAGS) -o $(BINARY_NAME) + $(GOBUILD) $(LD_OPTS) -o $(BINARY_NAME) .PHONY: install install: install-conf install-bin diff --git a/cmd/crowdsec-cli/alerts.go b/cmd/crowdsec-cli/alerts.go index c37d09b1e..8cee46ba3 100644 --- a/cmd/crowdsec-cli/alerts.go +++ b/cmd/crowdsec-cli/alerts.go @@ -126,6 +126,12 @@ func AlertsToTable(alerts *models.GetAlertsResponse, printMachine bool) error { } csvwriter.Flush() } else if csConfig.Cscli.Output == "json" { + if *alerts == nil { + // avoid returning "null" in json + // could be cleaner if we used slice of alerts directly + fmt.Println("[]") + return nil + } x, _ := json.MarshalIndent(alerts, "", " ") fmt.Printf("%s", string(x)) } else if csConfig.Cscli.Output == "human" { @@ -208,6 +214,7 @@ func NewAlertsCmd() *cobra.Command { Short: "Manage alerts", Args: cobra.MinimumNArgs(1), DisableAutoGenTag: true, + Aliases: []string{"alert"}, PersistentPreRunE: func(cmd *cobra.Command, args []string) error { var err error if err := csConfig.LoadAPIClient(); err != nil { diff --git a/cmd/crowdsec-cli/bouncers.go b/cmd/crowdsec-cli/bouncers.go index 3c92d22b8..7f5c4b597 100644 --- a/cmd/crowdsec-cli/bouncers.go +++ b/cmd/crowdsec-cli/bouncers.go @@ -5,6 +5,7 @@ import ( "encoding/json" "fmt" "io" + "slices" "strings" "time" @@ -12,7 +13,6 @@ import ( "github.com/fatih/color" log "github.com/sirupsen/logrus" "github.com/spf13/cobra" - "golang.org/x/exp/slices" middlewares "github.com/crowdsecurity/crowdsec/pkg/apiserver/middlewares/v1" "github.com/crowdsecurity/crowdsec/pkg/database" @@ -160,7 +160,7 @@ func runBouncersDelete(cmd *cobra.Command, args []string) error { func NewBouncersDeleteCmd() *cobra.Command { cmdBouncersDelete := &cobra.Command{ Use: "delete MyBouncerName", - Short: "delete a single bouncer from the database", + Short: "delete bouncer(s) from the database", Args: cobra.MinimumNArgs(1), Aliases: []string{"remove"}, DisableAutoGenTag: true, diff --git a/cmd/crowdsec-cli/capi.go b/cmd/crowdsec-cli/capi.go index 334899ce9..0e0f217aa 100644 --- a/cmd/crowdsec-cli/capi.go +++ b/cmd/crowdsec-cli/capi.go @@ -60,16 +60,16 @@ func NewCapiRegisterCmd() *cobra.Command { Short: "Register to Central API (CAPI)", Args: cobra.MinimumNArgs(0), DisableAutoGenTag: true, - Run: func(cmd *cobra.Command, args []string) { + RunE: func(cmd *cobra.Command, args []string) error { var err error capiUser, err := generateID(capiUserPrefix) if err != nil { - log.Fatalf("unable to generate machine id: %s", err) + return fmt.Errorf("unable to generate machine id: %s", err) } password := strfmt.Password(generatePassword(passwordLength)) apiurl, err := url.Parse(types.CAPIBaseURL) if err != nil { - log.Fatalf("unable to parse api url %s : %s", types.CAPIBaseURL, err) + return fmt.Errorf("unable to parse api url %s: %w", types.CAPIBaseURL, err) } _, err = apiclient.RegisterClient(&apiclient.Config{ MachineID: capiUser, @@ -80,7 +80,7 @@ func NewCapiRegisterCmd() *cobra.Command { }, nil) if err != nil { - log.Fatalf("api client register ('%s'): %s", types.CAPIBaseURL, err) + return fmt.Errorf("api client register ('%s'): %w", types.CAPIBaseURL, err) } log.Printf("Successfully registered to Central API (CAPI)") @@ -103,12 +103,12 @@ func NewCapiRegisterCmd() *cobra.Command { } apiConfigDump, err := yaml.Marshal(apiCfg) if err != nil { - log.Fatalf("unable to marshal api credentials: %s", err) + return fmt.Errorf("unable to marshal api credentials: %w", err) } if dumpFile != "" { err = os.WriteFile(dumpFile, apiConfigDump, 0600) if err != nil { - log.Fatalf("write api credentials in '%s' failed: %s", dumpFile, err) + return fmt.Errorf("write api credentials in '%s' failed: %w", dumpFile, err) } log.Printf("Central API credentials dumped to '%s'", dumpFile) } else { @@ -116,6 +116,8 @@ func NewCapiRegisterCmd() *cobra.Command { } log.Warning(ReloadMessage()) + + return nil }, } cmdCapiRegister.Flags().StringVarP(&outputFile, "file", "f", "", "output file destination") @@ -133,53 +135,56 @@ func NewCapiStatusCmd() *cobra.Command { Short: "Check status with the Central API (CAPI)", Args: cobra.MinimumNArgs(0), DisableAutoGenTag: true, - Run: func(cmd *cobra.Command, args []string) { + RunE: func(cmd *cobra.Command, args []string) error { if csConfig.API.Server.OnlineClient == nil { - log.Fatalf("Please provide credentials for the Central API (CAPI) in '%s'", csConfig.API.Server.OnlineClient.CredentialsFilePath) + return fmt.Errorf("please provide credentials for the Central API (CAPI) in '%s'", csConfig.API.Server.OnlineClient.CredentialsFilePath) } if csConfig.API.Server.OnlineClient.Credentials == nil { - log.Fatalf("no credentials for Central API (CAPI) in '%s'", csConfig.API.Server.OnlineClient.CredentialsFilePath) + return fmt.Errorf("no credentials for Central API (CAPI) in '%s'", csConfig.API.Server.OnlineClient.CredentialsFilePath) } password := strfmt.Password(csConfig.API.Server.OnlineClient.Credentials.Password) + apiurl, err := url.Parse(csConfig.API.Server.OnlineClient.Credentials.URL) if err != nil { - log.Fatalf("parsing api url ('%s'): %s", csConfig.API.Server.OnlineClient.Credentials.URL, err) + return fmt.Errorf("parsing api url ('%s'): %w", csConfig.API.Server.OnlineClient.Credentials.URL, err) } - if err := csConfig.LoadHub(); err != nil { - log.Fatal(err) + if err := require.Hub(csConfig); err != nil { + return err } - if err := cwhub.GetHubIdx(csConfig.Hub); err != nil { - log.Info("Run 'sudo cscli hub update' to get the hub index") - log.Fatalf("Failed to load hub index : %s", err) - } - scenarios, err := cwhub.GetInstalledScenariosAsString() + scenarios, err := cwhub.GetInstalledItemsAsString(cwhub.SCENARIOS) if err != nil { - log.Fatalf("failed to get scenarios : %s", err) + return fmt.Errorf("failed to get scenarios: %w", err) } + if len(scenarios) == 0 { - log.Fatalf("no scenarios installed, abort") + return fmt.Errorf("no scenarios installed, abort") } Client, err = apiclient.NewDefaultClient(apiurl, CAPIURLPrefix, fmt.Sprintf("crowdsec/%s", version.String()), nil) if err != nil { - log.Fatalf("init default client: %s", err) + return fmt.Errorf("init default client: %w", err) } + t := models.WatcherAuthRequest{ MachineID: &csConfig.API.Server.OnlineClient.Credentials.Login, Password: &password, Scenarios: scenarios, } + log.Infof("Loaded credentials from %s", csConfig.API.Server.OnlineClient.CredentialsFilePath) log.Infof("Trying to authenticate with username %s on %s", csConfig.API.Server.OnlineClient.Credentials.Login, apiurl) + _, _, err = Client.Auth.AuthenticateWatcher(context.Background(), t) if err != nil { - log.Fatalf("Failed to authenticate to Central API (CAPI) : %s", err) + return fmt.Errorf("failed to authenticate to Central API (CAPI): %w", err) } log.Infof("You can successfully interact with Central API (CAPI)") + + return nil }, } diff --git a/cmd/crowdsec-cli/collections.go b/cmd/crowdsec-cli/collections.go index 3e24a5860..6806d39a7 100644 --- a/cmd/crowdsec-cli/collections.go +++ b/cmd/crowdsec-cli/collections.go @@ -7,6 +7,7 @@ import ( log "github.com/sirupsen/logrus" "github.com/spf13/cobra" + "github.com/crowdsecurity/crowdsec/cmd/crowdsec-cli/require" "github.com/crowdsecurity/crowdsec/pkg/cwhub" ) @@ -20,20 +21,8 @@ func NewCollectionsCmd() *cobra.Command { Aliases: []string{"collection"}, DisableAutoGenTag: true, PersistentPreRunE: func(cmd *cobra.Command, args []string) error { - if err := csConfig.LoadHub(); err != nil { - log.Fatal(err) - } - if csConfig.Hub == nil { - return fmt.Errorf("you must configure cli before interacting with hub") - } - - if err := cwhub.SetHubBranch(); err != nil { - return fmt.Errorf("error while setting hub branch: %s", err) - } - - if err := cwhub.GetHubIdx(csConfig.Hub); err != nil { - log.Info("Run 'sudo cscli hub update' to get the hub index") - log.Fatalf("Failed to get Hub index : %v", err) + if err := require.Hub(csConfig); err != nil { + return err } return nil @@ -47,6 +36,7 @@ func NewCollectionsCmd() *cobra.Command { } var ignoreError bool + var cmdCollectionsInstall = &cobra.Command{ Use: "install collection", Short: "Install given collection(s)", @@ -57,7 +47,7 @@ func NewCollectionsCmd() *cobra.Command { return compAllItems(cwhub.COLLECTIONS, args, toComplete) }, DisableAutoGenTag: true, - Run: func(cmd *cobra.Command, args []string) { + RunE: func(cmd *cobra.Command, args []string) error { for _, name := range args { t := cwhub.GetItem(cwhub.COLLECTIONS, name) if t == nil { @@ -67,11 +57,12 @@ func NewCollectionsCmd() *cobra.Command { } if err := cwhub.InstallItem(csConfig, name, cwhub.COLLECTIONS, forceAction, downloadOnly); err != nil { if !ignoreError { - log.Fatalf("Error while installing '%s': %s", name, err) + return fmt.Errorf("error while installing '%s': %w", name, err) } log.Errorf("Error while installing '%s': %s", name, err) } } + return nil }, } cmdCollectionsInstall.PersistentFlags().BoolVarP(&downloadOnly, "download-only", "d", false, "Only download packages, don't enable") @@ -89,21 +80,21 @@ func NewCollectionsCmd() *cobra.Command { ValidArgsFunction: func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { return compInstalledItems(cwhub.COLLECTIONS, args, toComplete) }, - Run: func(cmd *cobra.Command, args []string) { + RunE: func(cmd *cobra.Command, args []string) error { if all { cwhub.RemoveMany(csConfig, cwhub.COLLECTIONS, "", all, purge, forceAction) - return + return nil } if len(args) == 0 { - log.Fatal("Specify at least one collection to remove or '--all' flag.") + return fmt.Errorf("specify at least one collection to remove or '--all'") } for _, name := range args { if !forceAction { item := cwhub.GetItem(cwhub.COLLECTIONS, name) if item == nil { - log.Fatalf("unable to retrieve: %s\n", name) + return fmt.Errorf("unable to retrieve: %s", name) } if len(item.BelongsToCollections) > 0 { log.Warningf("%s belongs to other collections :\n%s\n", name, item.BelongsToCollections) @@ -113,6 +104,7 @@ func NewCollectionsCmd() *cobra.Command { } cwhub.RemoveMany(csConfig, cwhub.COLLECTIONS, name, all, purge, forceAction) } + return nil }, } cmdCollectionsRemove.PersistentFlags().BoolVar(&purge, "purge", false, "Delete source file too") @@ -129,17 +121,18 @@ func NewCollectionsCmd() *cobra.Command { ValidArgsFunction: func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { return compInstalledItems(cwhub.COLLECTIONS, args, toComplete) }, - Run: func(cmd *cobra.Command, args []string) { + RunE: func(cmd *cobra.Command, args []string) error { if all { cwhub.UpgradeConfig(csConfig, cwhub.COLLECTIONS, "", forceAction) } else { if len(args) == 0 { - log.Fatalf("no target collection to upgrade") + return fmt.Errorf("specify at least one collection to upgrade or '--all'") } for _, name := range args { cwhub.UpgradeConfig(csConfig, cwhub.COLLECTIONS, name, forceAction) } } + return nil }, } cmdCollectionsUpgrade.PersistentFlags().BoolVarP(&all, "all", "a", false, "Upgrade all the collections") diff --git a/cmd/crowdsec-cli/config_backup.go b/cmd/crowdsec-cli/config_backup.go index 717fc990b..436eff8aa 100644 --- a/cmd/crowdsec-cli/config_backup.go +++ b/cmd/crowdsec-cli/config_backup.go @@ -1,6 +1,7 @@ package main import ( + "encoding/json" "fmt" "os" "path/filepath" @@ -9,9 +10,76 @@ import ( "github.com/spf13/cobra" "github.com/crowdsecurity/crowdsec/pkg/cwhub" + "github.com/crowdsecurity/crowdsec/cmd/crowdsec-cli/require" ) -/* Backup crowdsec configurations to directory : +func backupHub(dirPath string) error { + var err error + var itemDirectory string + var upstreamParsers []string + + for _, itemType := range cwhub.ItemTypes { + clog := log.WithFields(log.Fields{ + "type": itemType, + }) + itemMap := cwhub.GetItemMap(itemType) + if itemMap == nil { + clog.Infof("No %s to backup.", itemType) + continue + } + itemDirectory = fmt.Sprintf("%s/%s/", dirPath, itemType) + if err := os.MkdirAll(itemDirectory, os.ModePerm); err != nil { + return fmt.Errorf("error while creating %s : %s", itemDirectory, err) + } + upstreamParsers = []string{} + for k, v := range itemMap { + clog = clog.WithFields(log.Fields{ + "file": v.Name, + }) + if !v.Installed { //only backup installed ones + clog.Debugf("[%s] : not installed", k) + continue + } + + //for the local/tainted ones, we backup the full file + if v.Tainted || v.Local || !v.UpToDate { + //we need to backup stages for parsers + if itemType == cwhub.PARSERS || itemType == cwhub.PARSERS_OVFLW { + fstagedir := fmt.Sprintf("%s%s", itemDirectory, v.Stage) + if err := os.MkdirAll(fstagedir, os.ModePerm); err != nil { + return fmt.Errorf("error while creating stage dir %s : %s", fstagedir, err) + } + } + clog.Debugf("[%s] : backuping file (tainted:%t local:%t up-to-date:%t)", k, v.Tainted, v.Local, v.UpToDate) + tfile := fmt.Sprintf("%s%s/%s", itemDirectory, v.Stage, v.FileName) + if err = CopyFile(v.LocalPath, tfile); err != nil { + return fmt.Errorf("failed copy %s %s to %s : %s", itemType, v.LocalPath, tfile, err) + } + clog.Infof("local/tainted saved %s to %s", v.LocalPath, tfile) + continue + } + clog.Debugf("[%s] : from hub, just backup name (up-to-date:%t)", k, v.UpToDate) + clog.Infof("saving, version:%s, up-to-date:%t", v.Version, v.UpToDate) + upstreamParsers = append(upstreamParsers, v.Name) + } + //write the upstream items + upstreamParsersFname := fmt.Sprintf("%s/upstream-%s.json", itemDirectory, itemType) + upstreamParsersContent, err := json.MarshalIndent(upstreamParsers, "", " ") + if err != nil { + return fmt.Errorf("failed marshaling upstream parsers : %s", err) + } + err = os.WriteFile(upstreamParsersFname, upstreamParsersContent, 0644) + if err != nil { + return fmt.Errorf("unable to write to %s %s : %s", itemType, upstreamParsersFname, err) + } + clog.Infof("Wrote %d entries for %s to %s", len(upstreamParsers), itemType, upstreamParsersFname) + } + + return nil +} + +/* + Backup crowdsec configurations to directory : - Main config (config.yaml) - Profiles config (profiles.yaml) @@ -19,6 +87,7 @@ import ( - Backup of API credentials (local API and online API) - List of scenarios, parsers, postoverflows and collections that are up-to-date - Tainted/local/out-of-date scenarios, parsers, postoverflows and collections +- Acquisition files (acquis.yaml, acquis.d/*.yaml) */ func backupConfigToDirectory(dirPath string) error { var err error @@ -120,7 +189,7 @@ func backupConfigToDirectory(dirPath string) error { log.Infof("Saved profiles to %s", backupProfiles) } - if err = BackupHub(dirPath); err != nil { + if err = backupHub(dirPath); err != nil { return fmt.Errorf("failed to backup hub config: %s", err) } @@ -128,15 +197,10 @@ func backupConfigToDirectory(dirPath string) error { } func runConfigBackup(cmd *cobra.Command, args []string) error { - if err := csConfig.LoadHub(); err != nil { + if err := require.Hub(csConfig); err != nil { return err } - if err := cwhub.GetHubIdx(csConfig.Hub); err != nil { - log.Info("Run 'sudo cscli hub update' to get the hub index") - return fmt.Errorf("failed to get Hub index: %w", err) - } - if err := backupConfigToDirectory(args[0]); err != nil { return fmt.Errorf("failed to backup config: %w", err) } diff --git a/cmd/crowdsec-cli/config_feature_flags.go b/cmd/crowdsec-cli/config_feature_flags.go index ed672711f..838d8a0c1 100644 --- a/cmd/crowdsec-cli/config_feature_flags.go +++ b/cmd/crowdsec-cli/config_feature_flags.go @@ -2,10 +2,12 @@ package main import ( "fmt" + "path/filepath" "github.com/fatih/color" "github.com/spf13/cobra" + "github.com/crowdsecurity/crowdsec/pkg/csconfig" "github.com/crowdsecurity/crowdsec/pkg/fflag" ) @@ -87,7 +89,14 @@ func runConfigFeatureFlags(cmd *cobra.Command, args []string) error { fmt.Println("To enable a feature you can: ") fmt.Println(" - set the environment variable CROWDSEC_FEATURE_ to true") - fmt.Printf(" - add the line '- ' to the file %s/feature.yaml\n", csConfig.ConfigPaths.ConfigDir) + + featurePath, err := filepath.Abs(csconfig.GetFeatureFilePath(ConfigFilePath)) + if err != nil { + // we already read the file, shouldn't happen + return err + } + + fmt.Printf(" - add the line '- ' to the file %s\n", featurePath) fmt.Println() if len(enabled) == 0 && len(disabled) == 0 { diff --git a/cmd/crowdsec-cli/config_restore.go b/cmd/crowdsec-cli/config_restore.go index 55ab7aa9b..395e943bc 100644 --- a/cmd/crowdsec-cli/config_restore.go +++ b/cmd/crowdsec-cli/config_restore.go @@ -11,6 +11,7 @@ import ( "github.com/spf13/cobra" "gopkg.in/yaml.v2" + "github.com/crowdsecurity/crowdsec/cmd/crowdsec-cli/require" "github.com/crowdsecurity/crowdsec/pkg/csconfig" "github.com/crowdsecurity/crowdsec/pkg/cwhub" ) @@ -20,7 +21,126 @@ type OldAPICfg struct { Password string `json:"password"` } -/* Restore crowdsec configurations to directory : +// it's a rip of the cli version, but in silent-mode +func silentInstallItem(name string, obtype string) (string, error) { + var item = cwhub.GetItem(obtype, name) + if item == nil { + return "", fmt.Errorf("error retrieving item") + } + if downloadOnly && item.Downloaded && item.UpToDate { + return fmt.Sprintf("%s is already downloaded and up-to-date", item.Name), nil + } + err := cwhub.DownloadLatest(csConfig.Hub, item, forceAction, false) + if err != nil { + return "", fmt.Errorf("error while downloading %s : %v", item.Name, err) + } + if err := cwhub.AddItem(obtype, *item); err != nil { + return "", err + } + + if downloadOnly { + return fmt.Sprintf("Downloaded %s to %s", item.Name, csConfig.Cscli.HubDir+"/"+item.RemotePath), nil + } + err = cwhub.EnableItem(csConfig.Hub, item) + if err != nil { + return "", fmt.Errorf("error while enabling %s : %v", item.Name, err) + } + if err := cwhub.AddItem(obtype, *item); err != nil { + return "", err + } + return fmt.Sprintf("Enabled %s", item.Name), nil +} + +func restoreHub(dirPath string) error { + var err error + + if err := csConfig.LoadHub(); err != nil { + return err + } + + cwhub.SetHubBranch() + + for _, itype := range cwhub.ItemTypes { + itemDirectory := fmt.Sprintf("%s/%s/", dirPath, itype) + if _, err = os.Stat(itemDirectory); err != nil { + log.Infof("no %s in backup", itype) + continue + } + /*restore the upstream items*/ + upstreamListFN := fmt.Sprintf("%s/upstream-%s.json", itemDirectory, itype) + file, err := os.ReadFile(upstreamListFN) + if err != nil { + return fmt.Errorf("error while opening %s : %s", upstreamListFN, err) + } + var upstreamList []string + err = json.Unmarshal(file, &upstreamList) + if err != nil { + return fmt.Errorf("error unmarshaling %s : %s", upstreamListFN, err) + } + for _, toinstall := range upstreamList { + label, err := silentInstallItem(toinstall, itype) + if err != nil { + log.Errorf("Error while installing %s : %s", toinstall, err) + } else if label != "" { + log.Infof("Installed %s : %s", toinstall, label) + } else { + log.Printf("Installed %s : ok", toinstall) + } + } + + /*restore the local and tainted items*/ + files, err := os.ReadDir(itemDirectory) + if err != nil { + return fmt.Errorf("failed enumerating files of %s : %s", itemDirectory, err) + } + for _, file := range files { + //this was the upstream data + if file.Name() == fmt.Sprintf("upstream-%s.json", itype) { + continue + } + if itype == cwhub.PARSERS || itype == cwhub.PARSERS_OVFLW { + //we expect a stage here + if !file.IsDir() { + continue + } + stage := file.Name() + stagedir := fmt.Sprintf("%s/%s/%s/", csConfig.ConfigPaths.ConfigDir, itype, stage) + log.Debugf("Found stage %s in %s, target directory : %s", stage, itype, stagedir) + if err = os.MkdirAll(stagedir, os.ModePerm); err != nil { + return fmt.Errorf("error while creating stage directory %s : %s", stagedir, err) + } + /*find items*/ + ifiles, err := os.ReadDir(itemDirectory + "/" + stage + "/") + if err != nil { + return fmt.Errorf("failed enumerating files of %s : %s", itemDirectory+"/"+stage, err) + } + //finally copy item + for _, tfile := range ifiles { + log.Infof("Going to restore local/tainted [%s]", tfile.Name()) + sourceFile := fmt.Sprintf("%s/%s/%s", itemDirectory, stage, tfile.Name()) + destinationFile := fmt.Sprintf("%s%s", stagedir, tfile.Name()) + if err = CopyFile(sourceFile, destinationFile); err != nil { + return fmt.Errorf("failed copy %s %s to %s : %s", itype, sourceFile, destinationFile, err) + } + log.Infof("restored %s to %s", sourceFile, destinationFile) + } + } else { + log.Infof("Going to restore local/tainted [%s]", file.Name()) + sourceFile := fmt.Sprintf("%s/%s", itemDirectory, file.Name()) + destinationFile := fmt.Sprintf("%s/%s/%s", csConfig.ConfigPaths.ConfigDir, itype, file.Name()) + if err = CopyFile(sourceFile, destinationFile); err != nil { + return fmt.Errorf("failed copy %s %s to %s : %s", itype, sourceFile, destinationFile, err) + } + log.Infof("restored %s to %s", sourceFile, destinationFile) + } + + } + } + return nil +} + +/* + Restore crowdsec configurations to directory : - Main config (config.yaml) - Profiles config (profiles.yaml) @@ -28,6 +148,7 @@ type OldAPICfg struct { - Backup of API credentials (local API and online API) - List of scenarios, parsers, postoverflows and collections that are up-to-date - Tainted/local/out-of-date scenarios, parsers, postoverflows and collections +- Acquisition files (acquis.yaml, acquis.d/*.yaml) */ func restoreConfigFromDirectory(dirPath string, oldBackup bool) error { var err error @@ -111,7 +232,7 @@ func restoreConfigFromDirectory(dirPath string, oldBackup bool) error { /*if there is a acquisition dir, restore its content*/ if csConfig.Crowdsec.AcquisitionDirPath != "" { - if err = os.Mkdir(csConfig.Crowdsec.AcquisitionDirPath, 0o700); err != nil { + if err = os.MkdirAll(csConfig.Crowdsec.AcquisitionDirPath, 0o700); err != nil { return fmt.Errorf("error while creating %s : %s", csConfig.Crowdsec.AcquisitionDirPath, err) } } @@ -166,7 +287,7 @@ func restoreConfigFromDirectory(dirPath string, oldBackup bool) error { } } - if err = RestoreHub(dirPath); err != nil { + if err = restoreHub(dirPath); err != nil { return fmt.Errorf("failed to restore hub config : %s", err) } @@ -181,15 +302,10 @@ func runConfigRestore(cmd *cobra.Command, args []string) error { return err } - if err := csConfig.LoadHub(); err != nil { + if err := require.Hub(csConfig); err != nil { return err } - if err := cwhub.GetHubIdx(csConfig.Hub); err != nil { - log.Info("Run 'sudo cscli hub update' to get the hub index") - return fmt.Errorf("failed to get Hub index: %w", err) - } - if err := restoreConfigFromDirectory(args[0], oldBackup); err != nil { return fmt.Errorf("failed to restore config from %s: %w", args[0], err) } diff --git a/cmd/crowdsec-cli/config_show.go b/cmd/crowdsec-cli/config_show.go index b7675faff..9f5b11fc1 100644 --- a/cmd/crowdsec-cli/config_show.go +++ b/cmd/crowdsec-cli/config_show.go @@ -57,7 +57,6 @@ func showConfigKey(key string) error { var configShowTemplate = `Global: {{- if .ConfigPaths }} - - Configuration Folder : {{.ConfigPaths.ConfigDir}} - Configuration Folder : {{.ConfigPaths.ConfigDir}} - Data Folder : {{.ConfigPaths.DataDir}} - Hub Folder : {{.ConfigPaths.HubDir}} @@ -164,6 +163,12 @@ Central API: - User : {{.DbConfig.User}} - DB Name : {{.DbConfig.DbName}} {{- end }} +{{- if .DbConfig.MaxOpenConns }} + - Max Open Conns : {{.DbConfig.MaxOpenConns}} +{{- end }} +{{- if ne .DbConfig.DecisionBulkSize 0 }} + - Decision Bulk Size : {{.DbConfig.DecisionBulkSize}} +{{- end }} {{- if .DbConfig.Flush }} {{- if .DbConfig.Flush.MaxAge }} - Flush age : {{.DbConfig.Flush.MaxAge}} diff --git a/cmd/crowdsec-cli/console.go b/cmd/crowdsec-cli/console.go index ab01cd986..439a8143b 100644 --- a/cmd/crowdsec-cli/console.go +++ b/cmd/crowdsec-cli/console.go @@ -39,7 +39,7 @@ func NewConsoleCmd() *cobra.Command { if err := require.CAPI(csConfig); err != nil { return err } - if err := require.Enrolled(csConfig); err != nil { + if err := require.CAPIRegistered(csConfig); err != nil { return err } return nil @@ -64,25 +64,20 @@ After running this command your will need to validate the enrollment in the weba `, Args: cobra.ExactArgs(1), DisableAutoGenTag: true, - Run: func(cmd *cobra.Command, args []string) { + RunE: func(cmd *cobra.Command, args []string) error { password := strfmt.Password(csConfig.API.Server.OnlineClient.Credentials.Password) apiURL, err := url.Parse(csConfig.API.Server.OnlineClient.Credentials.URL) if err != nil { - log.Fatalf("Could not parse CAPI URL : %s", err) + return fmt.Errorf("could not parse CAPI URL: %s", err) } - if err := csConfig.LoadHub(); err != nil { - log.Fatal(err) + if err := require.Hub(csConfig); err != nil { + return err } - if err := cwhub.GetHubIdx(csConfig.Hub); err != nil { - log.Fatalf("Failed to load hub index : %s", err) - log.Info("Run 'sudo cscli hub update' to get the hub index") - } - - scenarios, err := cwhub.GetInstalledScenariosAsString() + scenarios, err := cwhub.GetInstalledItemsAsString(cwhub.SCENARIOS) if err != nil { - log.Fatalf("failed to get scenarios : %s", err) + return fmt.Errorf("failed to get installed scenarios: %s", err) } if len(scenarios) == 0 { @@ -99,20 +94,21 @@ After running this command your will need to validate the enrollment in the weba }) resp, err := c.Auth.EnrollWatcher(context.Background(), args[0], name, tags, overwrite) if err != nil { - log.Fatalf("Could not enroll instance: %s", err) + return fmt.Errorf("could not enroll instance: %s", err) } if resp.Response.StatusCode == 200 && !overwrite { log.Warning("Instance already enrolled. You can use '--overwrite' to force enroll") - return + return nil } - SetConsoleOpts(csconfig.CONSOLE_CONFIGS, true) - if err := csConfig.API.Server.DumpConsoleConfig(); err != nil { - log.Fatalf("failed writing console config : %s", err) + if err := SetConsoleOpts([]string{csconfig.SEND_MANUAL_SCENARIOS, csconfig.SEND_TAINTED_SCENARIOS}, true); err != nil { + return err } - log.Infof("Enabled tainted&manual alerts sharing, see 'cscli console status'.") - log.Infof("Watcher successfully enrolled. Visit https://app.crowdsec.net to accept it.") - log.Infof("Please restart crowdsec after accepting the enrollment.") + + log.Info("Enabled tainted&manual alerts sharing, see 'cscli console status'.") + log.Info("Watcher successfully enrolled. Visit https://app.crowdsec.net to accept it.") + log.Info("Please restart crowdsec after accepting the enrollment.") + return nil }, } cmdEnroll.Flags().StringVarP(&name, "name", "n", "", "Name to display in the console") @@ -130,21 +126,23 @@ After running this command your will need to validate the enrollment in the weba Enable given information push to the central API. Allows to empower the console`, ValidArgs: csconfig.CONSOLE_CONFIGS, DisableAutoGenTag: true, - Run: func(cmd *cobra.Command, args []string) { + RunE: func(cmd *cobra.Command, args []string) error { if enableAll { - SetConsoleOpts(csconfig.CONSOLE_CONFIGS, true) + if err := SetConsoleOpts(csconfig.CONSOLE_CONFIGS, true); err != nil { + return err + } log.Infof("All features have been enabled successfully") } else { if len(args) == 0 { - log.Fatalf("You must specify at least one feature to enable") + return fmt.Errorf("you must specify at least one feature to enable") + } + if err := SetConsoleOpts(args, true); err != nil { + return err } - SetConsoleOpts(args, true) log.Infof("%v have been enabled", args) } - if err := csConfig.API.Server.DumpConsoleConfig(); err != nil { - log.Fatalf("failed writing console config : %s", err) - } log.Infof(ReloadMessage()) + return nil }, } cmdEnable.Flags().BoolVarP(&enableAll, "all", "a", false, "Enable all console options") @@ -157,49 +155,55 @@ Enable given information push to the central API. Allows to empower the console` Long: ` Disable given information push to the central API.`, ValidArgs: csconfig.CONSOLE_CONFIGS, - Args: cobra.MinimumNArgs(1), DisableAutoGenTag: true, - Run: func(cmd *cobra.Command, args []string) { - if disableAll { - SetConsoleOpts(csconfig.CONSOLE_CONFIGS, false) - } else { - SetConsoleOpts(args, false) - } - - if err := csConfig.API.Server.DumpConsoleConfig(); err != nil { - log.Fatalf("failed writing console config : %s", err) - } + RunE: func(cmd *cobra.Command, args []string) error { if disableAll { + if err := SetConsoleOpts(csconfig.CONSOLE_CONFIGS, false); err != nil { + return err + } log.Infof("All features have been disabled") } else { + if err := SetConsoleOpts(args, false); err != nil { + return err + } log.Infof("%v have been disabled", args) } + log.Infof(ReloadMessage()) + return nil }, } cmdDisable.Flags().BoolVarP(&disableAll, "all", "a", false, "Disable all console options") cmdConsole.AddCommand(cmdDisable) cmdConsoleStatus := &cobra.Command{ - Use: "status [option]", - Short: "Shows status of one or all console options", + Use: "status", + Short: "Shows status of the console options", Example: `sudo cscli console status`, DisableAutoGenTag: true, - Run: func(cmd *cobra.Command, args []string) { + RunE: func(cmd *cobra.Command, args []string) error { switch csConfig.Cscli.Output { case "human": cmdConsoleStatusTable(color.Output, *csConfig) case "json": - data, err := json.MarshalIndent(csConfig.API.Server.ConsoleConfig, "", " ") - if err != nil { - log.Fatalf("failed to marshal configuration: %s", err) + c := csConfig.API.Server.ConsoleConfig + out := map[string](*bool){ + csconfig.SEND_MANUAL_SCENARIOS: c.ShareManualDecisions, + csconfig.SEND_CUSTOM_SCENARIOS: c.ShareCustomScenarios, + csconfig.SEND_TAINTED_SCENARIOS: c.ShareTaintedScenarios, + csconfig.SEND_CONTEXT: c.ShareContext, + csconfig.CONSOLE_MANAGEMENT: c.ConsoleManagement, } - fmt.Printf("%s\n", string(data)) + data, err := json.MarshalIndent(out, "", " ") + if err != nil { + return fmt.Errorf("failed to marshal configuration: %s", err) + } + fmt.Println(string(data)) case "raw": csvwriter := csv.NewWriter(os.Stdout) err := csvwriter.Write([]string{"option", "enabled"}) if err != nil { - log.Fatal(err) + return err } rows := [][]string{ @@ -212,11 +216,12 @@ Disable given information push to the central API.`, for _, row := range rows { err = csvwriter.Write(row) if err != nil { - log.Fatal(err) + return err } } csvwriter.Flush() } + return nil }, } cmdConsole.AddCommand(cmdConsoleStatus) @@ -224,7 +229,25 @@ Disable given information push to the central API.`, return cmdConsole } -func SetConsoleOpts(args []string, wanted bool) { +func dumpConsoleConfig(c *csconfig.LocalApiServerCfg) error { + out, err := yaml.Marshal(c.ConsoleConfig) + if err != nil { + return fmt.Errorf("while marshaling ConsoleConfig (for %s): %w", c.ConsoleConfigPath, err) + } + + if c.ConsoleConfigPath == "" { + c.ConsoleConfigPath = csconfig.DefaultConsoleConfigFilePath + log.Debugf("Empty console_path, defaulting to %s", c.ConsoleConfigPath) + } + + if err := os.WriteFile(c.ConsoleConfigPath, out, 0600); err != nil { + return fmt.Errorf("while dumping console config to %s: %w", c.ConsoleConfigPath, err) + } + + return nil +} + +func SetConsoleOpts(args []string, wanted bool) error { for _, arg := range args { switch arg { case csconfig.CONSOLE_MANAGEMENT: @@ -255,12 +278,12 @@ func SetConsoleOpts(args []string, wanted bool) { if changed { fileContent, err := yaml.Marshal(csConfig.API.Server.OnlineClient.Credentials) if err != nil { - log.Fatalf("Cannot marshal credentials: %s", err) + return fmt.Errorf("cannot marshal credentials: %s", err) } log.Infof("Updating credentials file: %s", csConfig.API.Server.OnlineClient.CredentialsFilePath) err = os.WriteFile(csConfig.API.Server.OnlineClient.CredentialsFilePath, fileContent, 0600) if err != nil { - log.Fatalf("Cannot write credentials file: %s", err) + return fmt.Errorf("cannot write credentials file: %s", err) } } } @@ -317,8 +340,13 @@ func SetConsoleOpts(args []string, wanted bool) { csConfig.API.Server.ConsoleConfig.ShareContext = ptr.Of(wanted) } default: - log.Fatalf("unknown flag %s", arg) + return fmt.Errorf("unknown flag %s", arg) } } + if err := dumpConsoleConfig(csConfig.API.Server); err != nil { + return fmt.Errorf("failed writing console config: %s", err) + } + + return nil } diff --git a/cmd/crowdsec-cli/decisions.go b/cmd/crowdsec-cli/decisions.go index 49d6ddd60..985b4d2d8 100644 --- a/cmd/crowdsec-cli/decisions.go +++ b/cmd/crowdsec-cli/decisions.go @@ -81,6 +81,12 @@ func DecisionsToTable(alerts *models.GetAlertsResponse, printMachine bool) error } csvwriter.Flush() } else if csConfig.Cscli.Output == "json" { + if *alerts == nil { + // avoid returning "null" in `json" + // could be cleaner if we used slice of alerts directly + fmt.Println("[]") + return nil + } x, _ := json.MarshalIndent(alerts, "", " ") fmt.Printf("%s", string(x)) } else if csConfig.Cscli.Output == "human" { diff --git a/cmd/crowdsec-cli/decisions_import.go b/cmd/crowdsec-cli/decisions_import.go index e7ba1d83f..56fc37c87 100644 --- a/cmd/crowdsec-cli/decisions_import.go +++ b/cmd/crowdsec-cli/decisions_import.go @@ -188,7 +188,9 @@ func runDecisionsImport(cmd *cobra.Command, args []string) error { } } - alerts := models.AddAlertsRequest{} + if len(decisions) > 1000 { + log.Infof("You are about to add %d decisions, this may take a while", len(decisions)) + } for _, chunk := range slicetools.Chunks(decisions, batchSize) { log.Debugf("Processing chunk of %d decisions", len(chunk)) @@ -212,16 +214,11 @@ func runDecisionsImport(cmd *cobra.Command, args []string) error { ScenarioVersion: ptr.Of(""), Decisions: chunk, } - alerts = append(alerts, &importAlert) - } - if len(decisions) > 1000 { - log.Infof("You are about to add %d decisions, this may take a while", len(decisions)) - } - - _, _, err = Client.Alerts.Add(context.Background(), alerts) - if err != nil { - return err + _, _, err = Client.Alerts.Add(context.Background(), models.AddAlertsRequest{&importAlert}) + if err != nil { + return err + } } log.Infof("Imported %d decisions", len(decisions)) diff --git a/cmd/crowdsec-cli/explain.go b/cmd/crowdsec-cli/explain.go index d9b1ae31d..0c33a845e 100644 --- a/cmd/crowdsec-cli/explain.go +++ b/cmd/crowdsec-cli/explain.go @@ -12,9 +12,22 @@ import ( "github.com/spf13/cobra" "github.com/crowdsecurity/crowdsec/pkg/hubtest" - "github.com/crowdsecurity/crowdsec/pkg/types" ) +func GetLineCountForFile(filepath string) (int, error) { + f, err := os.Open(filepath) + if err != nil { + return 0, err + } + defer f.Close() + lc := 0 + fs := bufio.NewScanner(f) + for fs.Scan() { + lc++ + } + return lc, nil +} + func runExplain(cmd *cobra.Command, args []string) error { flags := cmd.Flags() @@ -61,6 +74,11 @@ func runExplain(cmd *cobra.Command, args []string) error { return err } + labels, err := flags.GetString("labels") + if err != nil { + return err + } + fileInfo, _ := os.Stdin.Stat() if logType == "" || (logLine == "" && logFile == "" && dsn == "") { @@ -123,9 +141,12 @@ func runExplain(cmd *cobra.Command, args []string) error { return fmt.Errorf("unable to get absolute path of '%s', exiting", logFile) } dsn = fmt.Sprintf("file://%s", absolutePath) - lineCount := types.GetLineCountForFile(absolutePath) + lineCount, err := GetLineCountForFile(absolutePath) + if err != nil { + return err + } if lineCount > 100 { - log.Warnf("log file contains %d lines. This may take lot of resources.", lineCount) + log.Warnf("The log file contains %d lines. This may take a lot of resources.", lineCount) } } @@ -134,6 +155,10 @@ func runExplain(cmd *cobra.Command, args []string) error { } cmdArgs := []string{"-c", ConfigFilePath, "-type", logType, "-dsn", dsn, "-dump-data", dir, "-no-api"} + if labels != "" { + log.Debugf("adding labels %s", labels) + cmdArgs = append(cmdArgs, "-label", labels) + } crowdsecCmd := exec.Command(crowdsec, cmdArgs...) output, err := crowdsecCmd.CombinedOutput() if err != nil { @@ -193,6 +218,7 @@ tail -n 5 myfile.log | cscli explain --type nginx -f - flags.StringP("dsn", "d", "", "DSN to test") flags.StringP("log", "l", "", "Log line to test") flags.StringP("type", "t", "", "Type of the acquisition to test") + flags.String("labels", "", "Additional labels to add to the acquisition format (key:value,key2:value2)") flags.BoolP("verbose", "v", false, "Display individual changes") flags.Bool("failures", false, "Only show failed lines") flags.Bool("only-successful-parsers", false, "Only show successful parsers") diff --git a/cmd/crowdsec-cli/hub.go b/cmd/crowdsec-cli/hub.go index 4fec8fc8d..7bdfd5162 100644 --- a/cmd/crowdsec-cli/hub.go +++ b/cmd/crowdsec-cli/hub.go @@ -8,6 +8,7 @@ import ( log "github.com/sirupsen/logrus" "github.com/spf13/cobra" + "github.com/crowdsecurity/crowdsec/cmd/crowdsec-cli/require" "github.com/crowdsecurity/crowdsec/pkg/cwhub" ) @@ -50,17 +51,13 @@ func NewHubListCmd() *cobra.Command { Short: "List installed configs", Args: cobra.ExactArgs(0), DisableAutoGenTag: true, - Run: func(cmd *cobra.Command, args []string) { + RunE: func(cmd *cobra.Command, args []string) error { + if err := require.Hub(csConfig); err != nil { + return err + } - if err := csConfig.LoadHub(); err != nil { - log.Fatal(err) - } - if err := cwhub.GetHubIdx(csConfig.Hub); err != nil { - log.Info("Run 'sudo cscli hub update' to get the hub index") - log.Fatalf("Failed to get Hub index : %v", err) - } - //use LocalSync to get warnings about tainted / outdated items - _, warn := cwhub.LocalSync(csConfig.Hub) + // use LocalSync to get warnings about tainted / outdated items + warn, _ := cwhub.LocalSync(csConfig.Hub) for _, v := range warn { log.Info(v) } @@ -68,6 +65,8 @@ func NewHubListCmd() *cobra.Command { ListItems(color.Output, []string{ cwhub.COLLECTIONS, cwhub.PARSERS, cwhub.SCENARIOS, cwhub.PARSERS_OVFLW, }, args, true, false, all) + + return nil }, } cmdHubList.PersistentFlags().BoolVarP(&all, "all", "a", false, "List disabled items as well") @@ -89,31 +88,31 @@ Fetches the [.index.json](https://github.com/crowdsecurity/hub/blob/master/.inde return fmt.Errorf("you must configure cli before interacting with hub") } - if err := cwhub.SetHubBranch(); err != nil { - return fmt.Errorf("error while setting hub branch: %s", err) - } + cwhub.SetHubBranch() + return nil }, - Run: func(cmd *cobra.Command, args []string) { + RunE: func(cmd *cobra.Command, args []string) error { if err := csConfig.LoadHub(); err != nil { - log.Fatal(err) + return err } if err := cwhub.UpdateHubIdx(csConfig.Hub); err != nil { - if errors.Is(err, cwhub.ErrIndexNotFound) { - log.Warnf("Could not find index file for branch '%s', using 'master'", cwhub.HubBranch) - cwhub.HubBranch = "master" - if err := cwhub.UpdateHubIdx(csConfig.Hub); err != nil { - log.Fatalf("Failed to get Hub index after retry : %v", err) - } - } else { - log.Fatalf("Failed to get Hub index : %v", err) + if !errors.Is(err, cwhub.ErrIndexNotFound) { + return fmt.Errorf("failed to get Hub index : %w", err) + } + log.Warnf("Could not find index file for branch '%s', using 'master'", cwhub.HubBranch) + cwhub.HubBranch = "master" + if err := cwhub.UpdateHubIdx(csConfig.Hub); err != nil { + return fmt.Errorf("failed to get Hub index after retry: %w", err) } } - //use LocalSync to get warnings about tainted / outdated items - _, warn := cwhub.LocalSync(csConfig.Hub) + // use LocalSync to get warnings about tainted / outdated items + warn, _ := cwhub.LocalSync(csConfig.Hub) for _, v := range warn { log.Info(v) } + + return nil }, } @@ -134,18 +133,13 @@ Upgrade all configs installed from Crowdsec Hub. Run 'sudo cscli hub update' if return fmt.Errorf("you must configure cli before interacting with hub") } - if err := cwhub.SetHubBranch(); err != nil { - return fmt.Errorf("error while setting hub branch: %s", err) - } + cwhub.SetHubBranch() + return nil }, - Run: func(cmd *cobra.Command, args []string) { - if err := csConfig.LoadHub(); err != nil { - log.Fatal(err) - } - if err := cwhub.GetHubIdx(csConfig.Hub); err != nil { - log.Info("Run 'sudo cscli hub update' to get the hub index") - log.Fatalf("Failed to get Hub index : %v", err) + RunE: func(cmd *cobra.Command, args []string) error { + if err := require.Hub(csConfig); err != nil { + return err } log.Infof("Upgrading collections") @@ -156,6 +150,8 @@ Upgrade all configs installed from Crowdsec Hub. Run 'sudo cscli hub update' if cwhub.UpgradeConfig(csConfig, cwhub.SCENARIOS, "", forceAction) log.Infof("Upgrading postoverflows") cwhub.UpgradeConfig(csConfig, cwhub.PARSERS_OVFLW, "", forceAction) + + return nil }, } cmdHubUpgrade.PersistentFlags().BoolVar(&forceAction, "force", false, "Force upgrade : Overwrite tainted and outdated files") diff --git a/cmd/crowdsec-cli/lapi.go b/cmd/crowdsec-cli/lapi.go index 049f40a0d..37ee0088c 100644 --- a/cmd/crowdsec-cli/lapi.go +++ b/cmd/crowdsec-cli/lapi.go @@ -5,17 +5,18 @@ import ( "fmt" "net/url" "os" + "slices" "sort" "strings" "github.com/go-openapi/strfmt" log "github.com/sirupsen/logrus" "github.com/spf13/cobra" - "golang.org/x/exp/slices" "gopkg.in/yaml.v2" "github.com/crowdsecurity/go-cs-lib/version" + "github.com/crowdsecurity/crowdsec/cmd/crowdsec-cli/require" "github.com/crowdsecurity/crowdsec/pkg/alertcontext" "github.com/crowdsecurity/crowdsec/pkg/apiclient" "github.com/crowdsecurity/crowdsec/pkg/csconfig" @@ -36,15 +37,12 @@ func runLapiStatus(cmd *cobra.Command, args []string) error { if err != nil { log.Fatalf("parsing api url ('%s'): %s", apiurl, err) } - if err := csConfig.LoadHub(); err != nil { + + if err := require.Hub(csConfig); err != nil { log.Fatal(err) } - if err := cwhub.GetHubIdx(csConfig.Hub); err != nil { - log.Info("Run 'sudo cscli hub update' to get the hub index") - log.Fatalf("Failed to load hub index : %s", err) - } - scenarios, err := cwhub.GetInstalledScenariosAsString() + scenarios, err := cwhub.GetInstalledItemsAsString(cwhub.SCENARIOS) if err != nil { log.Fatalf("failed to get scenarios : %s", err) } @@ -216,6 +214,29 @@ func NewLapiCmd() *cobra.Command { return cmdLapi } +func AddContext(key string, values []string) error { + if err := alertcontext.ValidateContextExpr(key, values); err != nil { + return fmt.Errorf("invalid context configuration :%s", err) + } + if _, ok := csConfig.Crowdsec.ContextToSend[key]; !ok { + csConfig.Crowdsec.ContextToSend[key] = make([]string, 0) + log.Infof("key '%s' added", key) + } + data := csConfig.Crowdsec.ContextToSend[key] + for _, val := range values { + if !slices.Contains(data, val) { + log.Infof("value '%s' added to key '%s'", val, key) + data = append(data, val) + } + csConfig.Crowdsec.ContextToSend[key] = data + } + if err := csConfig.Crowdsec.DumpContextConfigFile(); err != nil { + return err + } + + return nil +} + func NewLapiContextCmd() *cobra.Command { cmdContext := &cobra.Command{ Use: "context [command]", @@ -246,32 +267,29 @@ func NewLapiContextCmd() *cobra.Command { Short: "Add context to send with alerts. You must specify the output key with the expr value you want", Example: `cscli lapi context add --key source_ip --value evt.Meta.source_ip cscli lapi context add --key file_source --value evt.Line.Src +cscli lapi context add --value evt.Meta.source_ip --value evt.Meta.target_user `, DisableAutoGenTag: true, Run: func(cmd *cobra.Command, args []string) { - if err := alertcontext.ValidateContextExpr(keyToAdd, valuesToAdd); err != nil { - log.Fatalf("invalid context configuration :%s", err) - } - if _, ok := csConfig.Crowdsec.ContextToSend[keyToAdd]; !ok { - csConfig.Crowdsec.ContextToSend[keyToAdd] = make([]string, 0) - log.Infof("key '%s' added", keyToAdd) - } - data := csConfig.Crowdsec.ContextToSend[keyToAdd] - for _, val := range valuesToAdd { - if !slices.Contains(data, val) { - log.Infof("value '%s' added to key '%s'", val, keyToAdd) - data = append(data, val) + if keyToAdd != "" { + if err := AddContext(keyToAdd, valuesToAdd); err != nil { + log.Fatalf(err.Error()) } - csConfig.Crowdsec.ContextToSend[keyToAdd] = data + return } - if err := csConfig.Crowdsec.DumpContextConfigFile(); err != nil { - log.Fatalf(err.Error()) + + for _, v := range valuesToAdd { + keySlice := strings.Split(v, ".") + key := keySlice[len(keySlice)-1] + value := []string{v} + if err := AddContext(key, value); err != nil { + log.Fatalf(err.Error()) + } } }, } cmdContextAdd.Flags().StringVarP(&keyToAdd, "key", "k", "", "The key of the different values to send") cmdContextAdd.Flags().StringSliceVar(&valuesToAdd, "value", []string{}, "The expr fields to associate with the key") - cmdContextAdd.MarkFlagRequired("key") cmdContextAdd.MarkFlagRequired("value") cmdContext.AddCommand(cmdContextAdd) diff --git a/cmd/crowdsec-cli/machines.go b/cmd/crowdsec-cli/machines.go index 5b4daa540..6c97c0109 100644 --- a/cmd/crowdsec-cli/machines.go +++ b/cmd/crowdsec-cli/machines.go @@ -8,6 +8,7 @@ import ( "io" "math/big" "os" + "slices" "strings" "time" @@ -17,7 +18,6 @@ import ( "github.com/google/uuid" log "github.com/sirupsen/logrus" "github.com/spf13/cobra" - "golang.org/x/exp/slices" "gopkg.in/yaml.v2" "github.com/crowdsecurity/machineid" diff --git a/cmd/crowdsec-cli/main.go b/cmd/crowdsec-cli/main.go index 35dcb9fad..b7c5da886 100644 --- a/cmd/crowdsec-cli/main.go +++ b/cmd/crowdsec-cli/main.go @@ -4,6 +4,7 @@ import ( "fmt" "os" "path/filepath" + "slices" "strings" "github.com/fatih/color" @@ -11,7 +12,6 @@ import ( log "github.com/sirupsen/logrus" "github.com/spf13/cobra" "github.com/spf13/cobra/doc" - "golang.org/x/exp/slices" "github.com/crowdsecurity/crowdsec/pkg/csconfig" "github.com/crowdsecurity/crowdsec/pkg/cwhub" @@ -53,11 +53,11 @@ func initConfig() { } if !slices.Contains(NoNeedConfig, os.Args[1]) { + log.Debugf("Using %s as configuration file", ConfigFilePath) csConfig, mergedConfig, err = csconfig.NewConfig(ConfigFilePath, false, false, true) if err != nil { log.Fatal(err) } - log.Debugf("Using %s as configuration file", ConfigFilePath) if err := csConfig.LoadCSCLI(); err != nil { log.Fatal(err) } @@ -188,7 +188,7 @@ It is meant to allow you to manage bans, parsers/scenarios/etc, api and generall /*usage*/ var cmdVersion = &cobra.Command{ Use: "version", - Short: "Display version and exit.", + Short: "Display version", Args: cobra.ExactArgs(0), DisableAutoGenTag: true, Run: func(cmd *cobra.Command, args []string) { diff --git a/cmd/crowdsec-cli/parsers.go b/cmd/crowdsec-cli/parsers.go index 9b810238b..d97b070db 100644 --- a/cmd/crowdsec-cli/parsers.go +++ b/cmd/crowdsec-cli/parsers.go @@ -7,10 +7,10 @@ import ( log "github.com/sirupsen/logrus" "github.com/spf13/cobra" + "github.com/crowdsecurity/crowdsec/cmd/crowdsec-cli/require" "github.com/crowdsecurity/crowdsec/pkg/cwhub" ) - func NewParsersCmd() *cobra.Command { var cmdParsers = &cobra.Command{ Use: "parsers [action] [config]", @@ -25,21 +25,10 @@ cscli parsers remove crowdsecurity/sshd-logs Aliases: []string{"parser"}, DisableAutoGenTag: true, PersistentPreRunE: func(cmd *cobra.Command, args []string) error { - if err := csConfig.LoadHub(); err != nil { - log.Fatal(err) - } - if csConfig.Hub == nil { - return fmt.Errorf("you must configure cli before interacting with hub") + if err := require.Hub(csConfig); err != nil { + return err } - if err := cwhub.SetHubBranch(); err != nil { - return fmt.Errorf("error while setting hub branch: %s", err) - } - - if err := cwhub.GetHubIdx(csConfig.Hub); err != nil { - log.Info("Run 'sudo cscli hub update' to get the hub index") - log.Fatalf("Failed to get Hub index : %v", err) - } return nil }, PersistentPostRun: func(cmd *cobra.Command, args []string) { @@ -59,7 +48,6 @@ cscli parsers remove crowdsecurity/sshd-logs return cmdParsers } - func NewParsersInstallCmd() *cobra.Command { var ignoreError bool @@ -73,7 +61,7 @@ func NewParsersInstallCmd() *cobra.Command { ValidArgsFunction: func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { return compAllItems(cwhub.PARSERS, args, toComplete) }, - Run: func(cmd *cobra.Command, args []string) { + RunE: func(cmd *cobra.Command, args []string) error { for _, name := range args { t := cwhub.GetItem(cwhub.PARSERS, name) if t == nil { @@ -82,15 +70,16 @@ func NewParsersInstallCmd() *cobra.Command { continue } if err := cwhub.InstallItem(csConfig, name, cwhub.PARSERS, forceAction, downloadOnly); err != nil { - if ignoreError { - log.Errorf("Error while installing '%s': %s", name, err) - } else { - log.Fatalf("Error while installing '%s': %s", name, err) + if !ignoreError { + return fmt.Errorf("error while installing '%s': %w", name, err) } + log.Errorf("Error while installing '%s': %s", name, err) } } + return nil }, } + cmdParsersInstall.PersistentFlags().BoolVarP(&downloadOnly, "download-only", "d", false, "Only download packages, don't enable") cmdParsersInstall.PersistentFlags().BoolVar(&forceAction, "force", false, "Force install : Overwrite tainted and outdated files") cmdParsersInstall.PersistentFlags().BoolVar(&ignoreError, "ignore", false, "Ignore errors when installing multiple parsers") @@ -98,33 +87,35 @@ func NewParsersInstallCmd() *cobra.Command { return cmdParsersInstall } - func NewParsersRemoveCmd() *cobra.Command { - var cmdParsersRemove = &cobra.Command{ + cmdParsersRemove := &cobra.Command{ Use: "remove [config]", Short: "Remove given parser(s)", Long: `Remove given parse(s) from hub`, - Aliases: []string{"delete"}, Example: `cscli parsers remove crowdsec/xxx crowdsec/xyz`, + Aliases: []string{"delete"}, DisableAutoGenTag: true, ValidArgsFunction: func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { return compInstalledItems(cwhub.PARSERS, args, toComplete) }, - Run: func(cmd *cobra.Command, args []string) { + RunE: func(cmd *cobra.Command, args []string) error { if all { cwhub.RemoveMany(csConfig, cwhub.PARSERS, "", all, purge, forceAction) - return + return nil } if len(args) == 0 { - log.Fatalf("Specify at least one parser to remove or '--all' flag.") + return fmt.Errorf("specify at least one parser to remove or '--all'") } for _, name := range args { cwhub.RemoveMany(csConfig, cwhub.PARSERS, name, all, purge, forceAction) } + + return nil }, } + cmdParsersRemove.PersistentFlags().BoolVar(&purge, "purge", false, "Delete source file too") cmdParsersRemove.PersistentFlags().BoolVar(&forceAction, "force", false, "Force remove : Remove tainted and outdated files") cmdParsersRemove.PersistentFlags().BoolVar(&all, "all", false, "Delete all the parsers") @@ -132,9 +123,8 @@ func NewParsersRemoveCmd() *cobra.Command { return cmdParsersRemove } - func NewParsersUpgradeCmd() *cobra.Command { - var cmdParsersUpgrade = &cobra.Command{ + cmdParsersUpgrade := &cobra.Command{ Use: "upgrade [config]", Short: "Upgrade given parser(s)", Long: `Fetch and upgrade given parser(s) from hub`, @@ -143,26 +133,27 @@ func NewParsersUpgradeCmd() *cobra.Command { ValidArgsFunction: func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { return compInstalledItems(cwhub.PARSERS, args, toComplete) }, - Run: func(cmd *cobra.Command, args []string) { + RunE: func(cmd *cobra.Command, args []string) error { if all { cwhub.UpgradeConfig(csConfig, cwhub.PARSERS, "", forceAction) } else { if len(args) == 0 { - log.Fatalf("no target parser to upgrade") + return fmt.Errorf("specify at least one parser to upgrade or '--all'") } for _, name := range args { cwhub.UpgradeConfig(csConfig, cwhub.PARSERS, name, forceAction) } } + return nil }, } + cmdParsersUpgrade.PersistentFlags().BoolVar(&all, "all", false, "Upgrade all the parsers") cmdParsersUpgrade.PersistentFlags().BoolVar(&forceAction, "force", false, "Force upgrade : Overwrite tainted and outdated files") return cmdParsersUpgrade } - func NewParsersInspectCmd() *cobra.Command { var cmdParsersInspect = &cobra.Command{ Use: "inspect [name]", @@ -178,12 +169,12 @@ func NewParsersInspectCmd() *cobra.Command { InspectItem(args[0], cwhub.PARSERS) }, } + cmdParsersInspect.PersistentFlags().StringVarP(&prometheusURL, "url", "u", "", "Prometheus url") return cmdParsersInspect } - func NewParsersListCmd() *cobra.Command { var cmdParsersList = &cobra.Command{ Use: "list [name]", @@ -196,6 +187,7 @@ cscli parser list crowdsecurity/xxx`, ListItems(color.Output, []string{cwhub.PARSERS}, args, false, true, all) }, } + cmdParsersList.PersistentFlags().BoolVarP(&all, "all", "a", false, "List disabled items as well") return cmdParsersList diff --git a/cmd/crowdsec-cli/postoverflows.go b/cmd/crowdsec-cli/postoverflows.go index 19cffccd2..f4db0a79e 100644 --- a/cmd/crowdsec-cli/postoverflows.go +++ b/cmd/crowdsec-cli/postoverflows.go @@ -7,9 +7,46 @@ import ( log "github.com/sirupsen/logrus" "github.com/spf13/cobra" + "github.com/crowdsecurity/crowdsec/cmd/crowdsec-cli/require" "github.com/crowdsecurity/crowdsec/pkg/cwhub" ) +func NewPostOverflowsCmd() *cobra.Command { + cmdPostOverflows := &cobra.Command{ + Use: "postoverflows [action] [config]", + Short: "Install/Remove/Upgrade/Inspect postoverflow(s) from hub", + Example: `cscli postoverflows install crowdsecurity/cdn-whitelist + cscli postoverflows inspect crowdsecurity/cdn-whitelist + cscli postoverflows upgrade crowdsecurity/cdn-whitelist + cscli postoverflows list + cscli postoverflows remove crowdsecurity/cdn-whitelist`, + Args: cobra.MinimumNArgs(1), + Aliases: []string{"postoverflow"}, + DisableAutoGenTag: true, + PersistentPreRunE: func(cmd *cobra.Command, args []string) error { + if err := require.Hub(csConfig); err != nil { + return err + } + + return nil + }, + PersistentPostRun: func(cmd *cobra.Command, args []string) { + if cmd.Name() == "inspect" || cmd.Name() == "list" { + return + } + log.Infof(ReloadMessage()) + }, + } + + cmdPostOverflows.AddCommand(NewPostOverflowsInstallCmd()) + cmdPostOverflows.AddCommand(NewPostOverflowsRemoveCmd()) + cmdPostOverflows.AddCommand(NewPostOverflowsUpgradeCmd()) + cmdPostOverflows.AddCommand(NewPostOverflowsInspectCmd()) + cmdPostOverflows.AddCommand(NewPostOverflowsListCmd()) + + return cmdPostOverflows +} + func NewPostOverflowsInstallCmd() *cobra.Command { var ignoreError bool @@ -23,7 +60,7 @@ func NewPostOverflowsInstallCmd() *cobra.Command { ValidArgsFunction: func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { return compAllItems(cwhub.PARSERS_OVFLW, args, toComplete) }, - Run: func(cmd *cobra.Command, args []string) { + RunE: func(cmd *cobra.Command, args []string) error { for _, name := range args { t := cwhub.GetItem(cwhub.PARSERS_OVFLW, name) if t == nil { @@ -32,13 +69,13 @@ func NewPostOverflowsInstallCmd() *cobra.Command { continue } if err := cwhub.InstallItem(csConfig, name, cwhub.PARSERS_OVFLW, forceAction, downloadOnly); err != nil { - if ignoreError { - log.Errorf("Error while installing '%s': %s", name, err) - } else { - log.Fatalf("Error while installing '%s': %s", name, err) + if !ignoreError { + return fmt.Errorf("error while installing '%s': %w", name, err) } + log.Errorf("Error while installing '%s': %s", name, err) } } + return nil }, } @@ -55,24 +92,26 @@ func NewPostOverflowsRemoveCmd() *cobra.Command { Short: "Remove given postoverflow(s)", Long: `remove given postoverflow(s)`, Example: `cscli postoverflows remove crowdsec/xxx crowdsec/xyz`, - DisableAutoGenTag: true, Aliases: []string{"delete"}, + DisableAutoGenTag: true, ValidArgsFunction: func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { return compInstalledItems(cwhub.PARSERS_OVFLW, args, toComplete) }, - Run: func(cmd *cobra.Command, args []string) { + RunE: func(cmd *cobra.Command, args []string) error { if all { cwhub.RemoveMany(csConfig, cwhub.PARSERS_OVFLW, "", all, purge, forceAction) - return + return nil } if len(args) == 0 { - log.Fatalf("Specify at least one postoverflow to remove or '--all' flag.") + return fmt.Errorf("specify at least one postoverflow to remove or '--all'") } for _, name := range args { cwhub.RemoveMany(csConfig, cwhub.PARSERS_OVFLW, name, all, purge, forceAction) } + + return nil }, } @@ -93,17 +132,18 @@ func NewPostOverflowsUpgradeCmd() *cobra.Command { ValidArgsFunction: func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { return compInstalledItems(cwhub.PARSERS_OVFLW, args, toComplete) }, - Run: func(cmd *cobra.Command, args []string) { + RunE: func(cmd *cobra.Command, args []string) error { if all { cwhub.UpgradeConfig(csConfig, cwhub.PARSERS_OVFLW, "", forceAction) } else { if len(args) == 0 { - log.Fatalf("no target postoverflow to upgrade") + return fmt.Errorf("specify at least one postoverflow to upgrade or '--all'") } for _, name := range args { cwhub.UpgradeConfig(csConfig, cwhub.PARSERS_OVFLW, name, forceAction) } } + return nil }, } @@ -120,10 +160,10 @@ func NewPostOverflowsInspectCmd() *cobra.Command { Long: `Inspect given postoverflow`, Example: `cscli postoverflows inspect crowdsec/xxx crowdsec/xyz`, DisableAutoGenTag: true, + Args: cobra.MinimumNArgs(1), ValidArgsFunction: func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { return compInstalledItems(cwhub.PARSERS_OVFLW, args, toComplete) }, - Args: cobra.MinimumNArgs(1), Run: func(cmd *cobra.Command, args []string) { InspectItem(args[0], cwhub.PARSERS_OVFLW) }, @@ -149,52 +189,3 @@ cscli postoverflows list crowdsecurity/xxx`, return cmdPostOverflowsList } - - - -func NewPostOverflowsCmd() *cobra.Command { - cmdPostOverflows := &cobra.Command{ - Use: "postoverflows [action] [config]", - Short: "Install/Remove/Upgrade/Inspect postoverflow(s) from hub", - Example: `cscli postoverflows install crowdsecurity/cdn-whitelist - cscli postoverflows inspect crowdsecurity/cdn-whitelist - cscli postoverflows upgrade crowdsecurity/cdn-whitelist - cscli postoverflows list - cscli postoverflows remove crowdsecurity/cdn-whitelist`, - Args: cobra.MinimumNArgs(1), - Aliases: []string{"postoverflow"}, - DisableAutoGenTag: true, - PersistentPreRunE: func(cmd *cobra.Command, args []string) error { - if err := csConfig.LoadHub(); err != nil { - log.Fatal(err) - } - if csConfig.Hub == nil { - return fmt.Errorf("you must configure cli before interacting with hub") - } - - if err := cwhub.SetHubBranch(); err != nil { - return fmt.Errorf("error while setting hub branch: %s", err) - } - - if err := cwhub.GetHubIdx(csConfig.Hub); err != nil { - log.Info("Run 'sudo cscli hub update' to get the hub index") - log.Fatalf("Failed to get Hub index : %v", err) - } - return nil - }, - PersistentPostRun: func(cmd *cobra.Command, args []string) { - if cmd.Name() == "inspect" || cmd.Name() == "list" { - return - } - log.Infof(ReloadMessage()) - }, - } - - cmdPostOverflows.AddCommand(NewPostOverflowsInstallCmd()) - cmdPostOverflows.AddCommand(NewPostOverflowsRemoveCmd()) - cmdPostOverflows.AddCommand(NewPostOverflowsUpgradeCmd()) - cmdPostOverflows.AddCommand(NewPostOverflowsInspectCmd()) - cmdPostOverflows.AddCommand(NewPostOverflowsListCmd()) - - return cmdPostOverflows -} diff --git a/cmd/crowdsec-cli/require/require.go b/cmd/crowdsec-cli/require/require.go index 7a29ccd36..f4129a44f 100644 --- a/cmd/crowdsec-cli/require/require.go +++ b/cmd/crowdsec-cli/require/require.go @@ -4,6 +4,7 @@ import ( "fmt" "github.com/crowdsecurity/crowdsec/pkg/csconfig" + "github.com/crowdsecurity/crowdsec/pkg/cwhub" ) func LAPI(c *csconfig.Config) error { @@ -32,7 +33,7 @@ func PAPI(c *csconfig.Config) error { return nil } -func Enrolled(c *csconfig.Config) error { +func CAPIRegistered(c *csconfig.Config) error { if c.API.Server.OnlineClient.Credentials == nil { return fmt.Errorf("the Central API (CAPI) must be configured with 'cscli capi register'") } @@ -63,3 +64,20 @@ func Notifications(c *csconfig.Config) error { return nil } +func Hub (c *csconfig.Config) error { + if err := c.LoadHub(); err != nil { + return err + } + + if c.Hub == nil { + return fmt.Errorf("you must configure cli before interacting with hub") + } + + cwhub.SetHubBranch() + + if err := cwhub.GetHubIdx(c.Hub); err != nil { + return fmt.Errorf("failed to read Hub index: '%w'. Run 'sudo cscli hub update' to download the index again", err) + } + + return nil +} diff --git a/cmd/crowdsec-cli/scenarios.go b/cmd/crowdsec-cli/scenarios.go index de52dcb48..01e0b02dc 100644 --- a/cmd/crowdsec-cli/scenarios.go +++ b/cmd/crowdsec-cli/scenarios.go @@ -7,6 +7,7 @@ import ( log "github.com/sirupsen/logrus" "github.com/spf13/cobra" + "github.com/crowdsecurity/crowdsec/cmd/crowdsec-cli/require" "github.com/crowdsecurity/crowdsec/pkg/cwhub" ) @@ -24,20 +25,8 @@ cscli scenarios remove crowdsecurity/ssh-bf Aliases: []string{"scenario"}, DisableAutoGenTag: true, PersistentPreRunE: func(cmd *cobra.Command, args []string) error { - if err := csConfig.LoadHub(); err != nil { - log.Fatal(err) - } - if csConfig.Hub == nil { - return fmt.Errorf("you must configure cli before interacting with hub") - } - - if err := cwhub.SetHubBranch(); err != nil { - return fmt.Errorf("while setting hub branch: %w", err) - } - - if err := cwhub.GetHubIdx(csConfig.Hub); err != nil { - log.Info("Run 'sudo cscli hub update' to get the hub index") - log.Fatalf("Failed to get Hub index : %v", err) + if err := require.Hub(csConfig); err != nil { + return err } return nil @@ -72,7 +61,7 @@ func NewCmdScenariosInstall() *cobra.Command { return compAllItems(cwhub.SCENARIOS, args, toComplete) }, DisableAutoGenTag: true, - Run: func(cmd *cobra.Command, args []string) { + RunE: func(cmd *cobra.Command, args []string) error { for _, name := range args { t := cwhub.GetItem(cwhub.SCENARIOS, name) if t == nil { @@ -81,13 +70,13 @@ func NewCmdScenariosInstall() *cobra.Command { continue } if err := cwhub.InstallItem(csConfig, name, cwhub.SCENARIOS, forceAction, downloadOnly); err != nil { - if ignoreError { - log.Errorf("Error while installing '%s': %s", name, err) - } else { - log.Fatalf("Error while installing '%s': %s", name, err) + if !ignoreError { + return fmt.Errorf("error while installing '%s': %w", name, err) } + log.Errorf("Error while installing '%s': %s", name, err) } } + return nil }, } cmdScenariosInstall.PersistentFlags().BoolVarP(&downloadOnly, "download-only", "d", false, "Only download packages, don't enable") @@ -108,19 +97,20 @@ func NewCmdScenariosRemove() *cobra.Command { return compInstalledItems(cwhub.SCENARIOS, args, toComplete) }, DisableAutoGenTag: true, - Run: func(cmd *cobra.Command, args []string) { + RunE: func(cmd *cobra.Command, args []string) error { if all { cwhub.RemoveMany(csConfig, cwhub.SCENARIOS, "", all, purge, forceAction) - return + return nil } if len(args) == 0 { - log.Fatalf("Specify at least one scenario to remove or '--all' flag.") + return fmt.Errorf("specify at least one scenario to remove or '--all'") } for _, name := range args { cwhub.RemoveMany(csConfig, cwhub.SCENARIOS, name, all, purge, forceAction) } + return nil }, } cmdScenariosRemove.PersistentFlags().BoolVar(&purge, "purge", false, "Delete source file too") @@ -140,17 +130,18 @@ func NewCmdScenariosUpgrade() *cobra.Command { return compInstalledItems(cwhub.SCENARIOS, args, toComplete) }, DisableAutoGenTag: true, - Run: func(cmd *cobra.Command, args []string) { + RunE: func(cmd *cobra.Command, args []string) error { if all { cwhub.UpgradeConfig(csConfig, cwhub.SCENARIOS, "", forceAction) } else { if len(args) == 0 { - log.Fatalf("no target scenario to upgrade") + return fmt.Errorf("specify at least one scenario to upgrade or '--all'") } for _, name := range args { cwhub.UpgradeConfig(csConfig, cwhub.SCENARIOS, name, forceAction) } } + return nil }, } cmdScenariosUpgrade.PersistentFlags().BoolVarP(&all, "all", "a", false, "Upgrade all the scenarios") diff --git a/cmd/crowdsec-cli/setup.go b/cmd/crowdsec-cli/setup.go index 7f1da4c44..cc0a9a35d 100644 --- a/cmd/crowdsec-cli/setup.go +++ b/cmd/crowdsec-cli/setup.go @@ -112,6 +112,20 @@ func runSetupDetect(cmd *cobra.Command, args []string) error { return err } + var detectReader *os.File + + switch detectConfigFile { + case "-": + log.Tracef("Reading detection rules from stdin") + detectReader = os.Stdin + default: + log.Tracef("Reading detection rules: %s", detectConfigFile) + detectReader, err = os.Open(detectConfigFile) + if err != nil { + return err + } + } + listSupportedServices, err := flags.GetBool("list-supported-services") if err != nil { return err @@ -171,7 +185,7 @@ func runSetupDetect(cmd *cobra.Command, args []string) error { } if listSupportedServices { - supported, err := setup.ListSupported(detectConfigFile) + supported, err := setup.ListSupported(detectReader) if err != nil { return err } @@ -195,7 +209,7 @@ func runSetupDetect(cmd *cobra.Command, args []string) error { SnubSystemd: snubSystemd, } - hubSetup, err := setup.Detect(detectConfigFile, opts) + hubSetup, err := setup.Detect(detectReader, opts) if err != nil { return fmt.Errorf("detecting services: %w", err) } diff --git a/cmd/crowdsec-cli/simulation.go b/cmd/crowdsec-cli/simulation.go index db499e380..890785a2d 100644 --- a/cmd/crowdsec-cli/simulation.go +++ b/cmd/crowdsec-cli/simulation.go @@ -3,12 +3,13 @@ package main import ( "fmt" "os" + "slices" log "github.com/sirupsen/logrus" "github.com/spf13/cobra" - "golang.org/x/exp/slices" "gopkg.in/yaml.v2" + "github.com/crowdsecurity/crowdsec/cmd/crowdsec-cli/require" "github.com/crowdsecurity/crowdsec/pkg/cwhub" ) @@ -18,7 +19,7 @@ func addToExclusion(name string) error { } func removeFromExclusion(name string) error { - index := indexOf(name, csConfig.Cscli.SimulationConfig.Exclusions) + index := slices.Index(csConfig.Cscli.SimulationConfig.Exclusions, name) // Remove element from the slice csConfig.Cscli.SimulationConfig.Exclusions[index] = csConfig.Cscli.SimulationConfig.Exclusions[len(csConfig.Cscli.SimulationConfig.Exclusions)-1] @@ -144,13 +145,9 @@ func NewSimulationEnableCmd() *cobra.Command { Example: `cscli simulation enable`, DisableAutoGenTag: true, Run: func(cmd *cobra.Command, args []string) { - if err := csConfig.LoadHub(); err != nil { + if err := require.Hub(csConfig); err != nil { log.Fatal(err) } - if err := cwhub.GetHubIdx(csConfig.Hub); err != nil { - log.Info("Run 'sudo cscli hub update' to get the hub index") - log.Fatalf("Failed to get Hub index : %v", err) - } if len(args) > 0 { for _, scenario := range args { diff --git a/cmd/crowdsec-cli/support.go b/cmd/crowdsec-cli/support.go index cc5775d9a..e5a4c36ab 100644 --- a/cmd/crowdsec-cli/support.go +++ b/cmd/crowdsec-cli/support.go @@ -20,6 +20,7 @@ import ( "github.com/crowdsecurity/go-cs-lib/version" + "github.com/crowdsecurity/crowdsec/cmd/crowdsec-cli/require" "github.com/crowdsecurity/crowdsec/pkg/apiclient" "github.com/crowdsecurity/crowdsec/pkg/cwhub" "github.com/crowdsecurity/crowdsec/pkg/cwversion" @@ -131,24 +132,6 @@ func collectOSInfo() ([]byte, error) { return w.Bytes(), nil } -func initHub() error { - if err := csConfig.LoadHub(); err != nil { - return fmt.Errorf("cannot load hub: %s", err) - } - if csConfig.Hub == nil { - return fmt.Errorf("hub not configured") - } - - if err := cwhub.SetHubBranch(); err != nil { - return fmt.Errorf("cannot set hub branch: %s", err) - } - - if err := cwhub.GetHubIdx(csConfig.Hub); err != nil { - return fmt.Errorf("no hub index found: %s", err) - } - return nil -} - func collectHubItems(itemType string) []byte { out := bytes.NewBuffer(nil) log.Infof("Collecting %s list", itemType) @@ -184,7 +167,7 @@ func collectAPIStatus(login string, password string, endpoint string, prefix str if err != nil { return []byte(fmt.Sprintf("cannot parse API URL: %s", err)) } - scenarios, err := cwhub.GetInstalledScenariosAsString() + scenarios, err := cwhub.GetInstalledItemsAsString(cwhub.SCENARIOS) if err != nil { return []byte(fmt.Sprintf("could not collect scenarios: %s", err)) } @@ -312,8 +295,7 @@ cscli support dump -f /tmp/crowdsec-support.zip skipAgent = true } - err = initHub() - if err != nil { + if err := require.Hub(csConfig); err != nil { log.Warn("Could not init hub, running on LAPI ? Hub related information will not be collected") skipHub = true infos[SUPPORT_PARSERS_PATH] = []byte(err.Error()) diff --git a/cmd/crowdsec-cli/utils.go b/cmd/crowdsec-cli/utils.go index 8f8047b56..1101235b6 100644 --- a/cmd/crowdsec-cli/utils.go +++ b/cmd/crowdsec-cli/utils.go @@ -8,7 +8,7 @@ import ( "math" "net" "net/http" - "os" + "slices" "strconv" "strings" "time" @@ -19,11 +19,11 @@ import ( "github.com/prometheus/prom2json" log "github.com/sirupsen/logrus" "github.com/spf13/cobra" - "golang.org/x/exp/slices" "gopkg.in/yaml.v2" "github.com/crowdsecurity/go-cs-lib/trace" + "github.com/crowdsecurity/crowdsec/cmd/crowdsec-cli/require" "github.com/crowdsecurity/crowdsec/pkg/cwhub" "github.com/crowdsecurity/crowdsec/pkg/database" "github.com/crowdsecurity/crowdsec/pkg/types" @@ -38,34 +38,6 @@ func printHelp(cmd *cobra.Command) { } } -func indexOf(s string, slice []string) int { - for i, elem := range slice { - if s == elem { - return i - } - } - return -1 -} - -func LoadHub() error { - if err := csConfig.LoadHub(); err != nil { - log.Fatal(err) - } - if csConfig.Hub == nil { - return fmt.Errorf("unable to load hub") - } - - if err := cwhub.SetHubBranch(); err != nil { - log.Warningf("unable to set hub branch (%s), default to master", err) - } - - if err := cwhub.GetHubIdx(csConfig.Hub); err != nil { - return fmt.Errorf("Failed to get Hub index : '%w'. Run 'sudo cscli hub update' to get the hub index", err) - } - - return nil -} - func Suggest(itemType string, baseItem string, suggestItem string, score int, ignoreErr bool) { errMsg := "" if score < MaxDistance { @@ -100,7 +72,7 @@ func GetDistance(itemType string, itemName string) (*cwhub.Item, int) { } func compAllItems(itemType string, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { - if err := LoadHub(); err != nil { + if err := require.Hub(csConfig); err != nil { return nil, cobra.ShellCompDirectiveDefault } @@ -116,31 +88,16 @@ func compAllItems(itemType string, args []string, toComplete string) ([]string, } func compInstalledItems(itemType string, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { - if err := LoadHub(); err != nil { - return nil, cobra.ShellCompDirectiveDefault - } - - var items []string - var err error - switch itemType { - case cwhub.PARSERS: - items, err = cwhub.GetInstalledParsersAsString() - case cwhub.SCENARIOS: - items, err = cwhub.GetInstalledScenariosAsString() - case cwhub.PARSERS_OVFLW: - items, err = cwhub.GetInstalledPostOverflowsAsString() - case cwhub.COLLECTIONS: - items, err = cwhub.GetInstalledCollectionsAsString() - case cwhub.WAAP_RULES: - items, err = cwhub.GetInstalledWafRulesAsString() - default: + if err := require.Hub(csConfig); err != nil { return nil, cobra.ShellCompDirectiveDefault } + items, err := cwhub.GetInstalledItemsAsString(itemType) if err != nil { cobra.CompDebugln(fmt.Sprintf("list installed %s err: %s", itemType, err), true) return nil, cobra.ShellCompDirectiveDefault } + comp := make([]string, 0) if toComplete != "" { @@ -470,37 +427,6 @@ func GetScenarioMetric(url string, itemName string) map[string]int { return stats } -// it's a rip of the cli version, but in silent-mode -func silenceInstallItem(name string, obtype string) (string, error) { - var item = cwhub.GetItem(obtype, name) - if item == nil { - return "", fmt.Errorf("error retrieving item") - } - it := *item - if downloadOnly && it.Downloaded && it.UpToDate { - return fmt.Sprintf("%s is already downloaded and up-to-date", it.Name), nil - } - it, err := cwhub.DownloadLatest(csConfig.Hub, it, forceAction, false) - if err != nil { - return "", fmt.Errorf("error while downloading %s : %v", it.Name, err) - } - if err := cwhub.AddItem(obtype, it); err != nil { - return "", err - } - - if downloadOnly { - return fmt.Sprintf("Downloaded %s to %s", it.Name, csConfig.Cscli.HubDir+"/"+it.RemotePath), nil - } - it, err = cwhub.EnableItem(csConfig.Hub, it) - if err != nil { - return "", fmt.Errorf("error while enabling %s : %v", it.Name, err) - } - if err := cwhub.AddItem(obtype, it); err != nil { - return "", err - } - return fmt.Sprintf("Enabled %s", it.Name), nil -} - func GetPrometheusMetric(url string) []*prom2json.Family { mfChan := make(chan *dto.MetricFamily, 1024) @@ -529,160 +455,6 @@ func GetPrometheusMetric(url string) []*prom2json.Family { return result } -func RestoreHub(dirPath string) error { - var err error - - if err := csConfig.LoadHub(); err != nil { - return err - } - if err := cwhub.SetHubBranch(); err != nil { - return fmt.Errorf("error while setting hub branch: %s", err) - } - - for _, itype := range cwhub.ItemTypes { - itemDirectory := fmt.Sprintf("%s/%s/", dirPath, itype) - if _, err = os.Stat(itemDirectory); err != nil { - log.Infof("no %s in backup", itype) - continue - } - /*restore the upstream items*/ - upstreamListFN := fmt.Sprintf("%s/upstream-%s.json", itemDirectory, itype) - file, err := os.ReadFile(upstreamListFN) - if err != nil { - return fmt.Errorf("error while opening %s : %s", upstreamListFN, err) - } - var upstreamList []string - err = json.Unmarshal(file, &upstreamList) - if err != nil { - return fmt.Errorf("error unmarshaling %s : %s", upstreamListFN, err) - } - for _, toinstall := range upstreamList { - label, err := silenceInstallItem(toinstall, itype) - if err != nil { - log.Errorf("Error while installing %s : %s", toinstall, err) - } else if label != "" { - log.Infof("Installed %s : %s", toinstall, label) - } else { - log.Printf("Installed %s : ok", toinstall) - } - } - - /*restore the local and tainted items*/ - files, err := os.ReadDir(itemDirectory) - if err != nil { - return fmt.Errorf("failed enumerating files of %s : %s", itemDirectory, err) - } - for _, file := range files { - //this was the upstream data - if file.Name() == fmt.Sprintf("upstream-%s.json", itype) { - continue - } - if itype == cwhub.PARSERS || itype == cwhub.PARSERS_OVFLW { - //we expect a stage here - if !file.IsDir() { - continue - } - stage := file.Name() - stagedir := fmt.Sprintf("%s/%s/%s/", csConfig.ConfigPaths.ConfigDir, itype, stage) - log.Debugf("Found stage %s in %s, target directory : %s", stage, itype, stagedir) - if err = os.MkdirAll(stagedir, os.ModePerm); err != nil { - return fmt.Errorf("error while creating stage directory %s : %s", stagedir, err) - } - /*find items*/ - ifiles, err := os.ReadDir(itemDirectory + "/" + stage + "/") - if err != nil { - return fmt.Errorf("failed enumerating files of %s : %s", itemDirectory+"/"+stage, err) - } - //finally copy item - for _, tfile := range ifiles { - log.Infof("Going to restore local/tainted [%s]", tfile.Name()) - sourceFile := fmt.Sprintf("%s/%s/%s", itemDirectory, stage, tfile.Name()) - destinationFile := fmt.Sprintf("%s%s", stagedir, tfile.Name()) - if err = CopyFile(sourceFile, destinationFile); err != nil { - return fmt.Errorf("failed copy %s %s to %s : %s", itype, sourceFile, destinationFile, err) - } - log.Infof("restored %s to %s", sourceFile, destinationFile) - } - } else { - log.Infof("Going to restore local/tainted [%s]", file.Name()) - sourceFile := fmt.Sprintf("%s/%s", itemDirectory, file.Name()) - destinationFile := fmt.Sprintf("%s/%s/%s", csConfig.ConfigPaths.ConfigDir, itype, file.Name()) - if err = CopyFile(sourceFile, destinationFile); err != nil { - return fmt.Errorf("failed copy %s %s to %s : %s", itype, sourceFile, destinationFile, err) - } - log.Infof("restored %s to %s", sourceFile, destinationFile) - } - - } - } - return nil -} - -func BackupHub(dirPath string) error { - var err error - var itemDirectory string - var upstreamParsers []string - - for _, itemType := range cwhub.ItemTypes { - clog := log.WithFields(log.Fields{ - "type": itemType, - }) - itemMap := cwhub.GetItemMap(itemType) - if itemMap == nil { - clog.Infof("No %s to backup.", itemType) - continue - } - itemDirectory = fmt.Sprintf("%s/%s/", dirPath, itemType) - if err := os.MkdirAll(itemDirectory, os.ModePerm); err != nil { - return fmt.Errorf("error while creating %s : %s", itemDirectory, err) - } - upstreamParsers = []string{} - for k, v := range itemMap { - clog = clog.WithFields(log.Fields{ - "file": v.Name, - }) - if !v.Installed { //only backup installed ones - clog.Debugf("[%s] : not installed", k) - continue - } - - //for the local/tainted ones, we backup the full file - if v.Tainted || v.Local || !v.UpToDate { - //we need to backup stages for parsers - if itemType == cwhub.PARSERS || itemType == cwhub.PARSERS_OVFLW { - fstagedir := fmt.Sprintf("%s%s", itemDirectory, v.Stage) - if err := os.MkdirAll(fstagedir, os.ModePerm); err != nil { - return fmt.Errorf("error while creating stage dir %s : %s", fstagedir, err) - } - } - clog.Debugf("[%s] : backuping file (tainted:%t local:%t up-to-date:%t)", k, v.Tainted, v.Local, v.UpToDate) - tfile := fmt.Sprintf("%s%s/%s", itemDirectory, v.Stage, v.FileName) - if err = CopyFile(v.LocalPath, tfile); err != nil { - return fmt.Errorf("failed copy %s %s to %s : %s", itemType, v.LocalPath, tfile, err) - } - clog.Infof("local/tainted saved %s to %s", v.LocalPath, tfile) - continue - } - clog.Debugf("[%s] : from hub, just backup name (up-to-date:%t)", k, v.UpToDate) - clog.Infof("saving, version:%s, up-to-date:%t", v.Version, v.UpToDate) - upstreamParsers = append(upstreamParsers, v.Name) - } - //write the upstream items - upstreamParsersFname := fmt.Sprintf("%s/upstream-%s.json", itemDirectory, itemType) - upstreamParsersContent, err := json.MarshalIndent(upstreamParsers, "", " ") - if err != nil { - return fmt.Errorf("failed marshaling upstream parsers : %s", err) - } - err = os.WriteFile(upstreamParsersFname, upstreamParsersContent, 0644) - if err != nil { - return fmt.Errorf("unable to write to %s %s : %s", itemType, upstreamParsersFname, err) - } - clog.Infof("Wrote %d entries for %s to %s", len(upstreamParsers), itemType, upstreamParsersFname) - } - - return nil -} - type unit struct { value int64 symbol string diff --git a/cmd/crowdsec-cli/utils_table.go b/cmd/crowdsec-cli/utils_table.go index aef1e94f7..16f42d72a 100644 --- a/cmd/crowdsec-cli/utils_table.go +++ b/cmd/crowdsec-cli/utils_table.go @@ -17,7 +17,7 @@ func listHubItemTable(out io.Writer, title string, statuses []cwhub.ItemHubStatu t.SetAlignment(table.AlignLeft, table.AlignLeft, table.AlignLeft, table.AlignLeft) for _, status := range statuses { - t.AddRow(status.Name, status.UTF8_Status, status.LocalVersion, status.LocalPath) + t.AddRow(status.Name, status.UTF8Status, status.LocalVersion, status.LocalPath) } renderTableTitle(out, title) t.Render() diff --git a/cmd/crowdsec-cli/waap_rules.go b/cmd/crowdsec-cli/waap_rules.go index ad448b7ab..548921836 100644 --- a/cmd/crowdsec-cli/waap_rules.go +++ b/cmd/crowdsec-cli/waap_rules.go @@ -31,9 +31,7 @@ cscli waap-rules remove crowdsecurity/core-rule-set return fmt.Errorf("you must configure cli before interacting with hub") } - if err := cwhub.SetHubBranch(); err != nil { - return fmt.Errorf("error while setting hub branch: %s", err) - } + cwhub.SetHubBranch() if err := cwhub.GetHubIdx(csConfig.Hub); err != nil { log.Info("Run 'sudo cscli hub update' to get the hub index") diff --git a/cmd/crowdsec/Makefile b/cmd/crowdsec/Makefile index 2513873e8..7425d970a 100644 --- a/cmd/crowdsec/Makefile +++ b/cmd/crowdsec/Makefile @@ -4,7 +4,6 @@ ifeq ($(OS), Windows_NT) EXT = .exe endif -# Go parameters GO = go GOBUILD = $(GO) build GOTEST = $(GO) test @@ -23,7 +22,7 @@ SYSTEMD_PATH_FILE = "/etc/systemd/system/crowdsec.service" all: clean test build build: clean - $(GOBUILD) $(LD_OPTS) $(BUILD_VENDOR_FLAGS) -o $(CROWDSEC_BIN) + $(GOBUILD) $(LD_OPTS) -o $(CROWDSEC_BIN) test: $(GOTEST) $(LD_OPTS) -v ./... diff --git a/cmd/crowdsec/main.go b/cmd/crowdsec/main.go index 273ef6fb8..c604e670a 100644 --- a/cmd/crowdsec/main.go +++ b/cmd/crowdsec/main.go @@ -138,11 +138,13 @@ func (l *labelsMap) String() string { } func (l labelsMap) Set(label string) error { - split := strings.Split(label, ":") - if len(split) != 2 { - return errors.Wrapf(errors.New("Bad Format"), "for Label '%s'", label) + for _, pair := range strings.Split(label, ",") { + split := strings.Split(pair, ":") + if len(split) != 2 { + return fmt.Errorf("invalid format for label '%s', must be key:value", pair) + } + l[split[0]] = split[1] } - l[split[0]] = split[1] return nil } diff --git a/cmd/crowdsec/output.go b/cmd/crowdsec/output.go index 348504034..95642bbf3 100644 --- a/cmd/crowdsec/output.go +++ b/cmd/crowdsec/output.go @@ -70,7 +70,7 @@ func runOutput(input chan types.Event, overflow chan types.Event, buckets *leaky var cache []types.RuntimeAlert var cacheMutex sync.Mutex - scenarios, err := cwhub.GetInstalledScenariosAsString() + scenarios, err := cwhub.GetInstalledItemsAsString(cwhub.SCENARIOS) if err != nil { return fmt.Errorf("loading list of installed hub scenarios: %w", err) } @@ -93,7 +93,7 @@ func runOutput(input chan types.Event, overflow chan types.Event, buckets *leaky URL: apiURL, PapiURL: papiURL, VersionPrefix: "v1", - UpdateScenario: cwhub.GetInstalledScenariosAsString, + UpdateScenario: func() ([]string, error) {return cwhub.GetInstalledItemsAsString(cwhub.SCENARIOS)}, }) if err != nil { return fmt.Errorf("new client api: %w", err) diff --git a/cmd/crowdsec/serve.go b/cmd/crowdsec/serve.go index ad61d781e..8513e0046 100644 --- a/cmd/crowdsec/serve.go +++ b/cmd/crowdsec/serve.go @@ -141,12 +141,24 @@ func ShutdownCrowdsecRoutines() error { time.Sleep(1 * time.Second) // ugly workaround for now outputsTomb.Kill(nil) - if err := outputsTomb.Wait(); err != nil { - log.Warningf("Ouputs returned error : %s", err) - reterr = err + done := make(chan error, 1) + go func() { + done <- outputsTomb.Wait() + }() + + // wait for outputs to finish, max 3 seconds + select { + case err := <-done: + if err != nil { + log.Warningf("Outputs returned error : %s", err) + reterr = err + } + log.Debugf("outputs are done") + case <-time.After(3 * time.Second): + // this can happen if outputs are stuck in a http retry loop + log.Warningf("Outputs didn't finish in time, some events may have not been flushed") } - log.Debugf("outputs are done") // He's dead, Jim. crowdsecTomb.Kill(nil) diff --git a/plugins/notifications/http/Makefile b/cmd/notification-dummy/Makefile similarity index 63% rename from plugins/notifications/http/Makefile rename to cmd/notification-dummy/Makefile index 4545b6910..251abe19d 100644 --- a/plugins/notifications/http/Makefile +++ b/cmd/notification-dummy/Makefile @@ -4,14 +4,13 @@ ifeq ($(OS), Windows_NT) EXT = .exe endif -PLUGIN=http -BINARY_NAME = notification-$(PLUGIN)$(EXT) - GO = go GOBUILD = $(GO) build +BINARY_NAME = notification-dummy$(EXT) + build: clean - $(GOBUILD) $(LD_OPTS) $(BUILD_VENDOR_FLAGS) -o $(BINARY_NAME) + $(GOBUILD) $(LD_OPTS) -o $(BINARY_NAME) .PHONY: clean clean: diff --git a/plugins/notifications/dummy/dummy.yaml b/cmd/notification-dummy/dummy.yaml similarity index 100% rename from plugins/notifications/dummy/dummy.yaml rename to cmd/notification-dummy/dummy.yaml diff --git a/plugins/notifications/dummy/main.go b/cmd/notification-dummy/main.go similarity index 100% rename from plugins/notifications/dummy/main.go rename to cmd/notification-dummy/main.go diff --git a/plugins/notifications/slack/Makefile b/cmd/notification-email/Makefile similarity index 63% rename from plugins/notifications/slack/Makefile rename to cmd/notification-email/Makefile index bd7ea8146..7a782cc9d 100644 --- a/plugins/notifications/slack/Makefile +++ b/cmd/notification-email/Makefile @@ -4,14 +4,13 @@ ifeq ($(OS), Windows_NT) EXT = .exe endif -PLUGIN=slack -BINARY_NAME = notification-$(PLUGIN)$(EXT) - GO = go GOBUILD = $(GO) build +BINARY_NAME = notification-email$(EXT) + build: clean - $(GOBUILD) $(LD_OPTS) $(BUILD_VENDOR_FLAGS) -o $(BINARY_NAME) + $(GOBUILD) $(LD_OPTS) -o $(BINARY_NAME) .PHONY: clean clean: diff --git a/plugins/notifications/email/email.yaml b/cmd/notification-email/email.yaml similarity index 72% rename from plugins/notifications/email/email.yaml rename to cmd/notification-email/email.yaml index 37ce2a982..512633c63 100644 --- a/plugins/notifications/email/email.yaml +++ b/cmd/notification-email/email.yaml @@ -15,12 +15,14 @@ timeout: 20s # Time to wait for response from the plugin before conside # The following template receives a list of models.Alert objects # The output goes in the email message body format: | + {{range . -}} {{$alert := . -}} {{range .Decisions -}} -

{{.Value}} will get {{.Type}} for next {{.Duration}} for triggering {{.Scenario}} on machine {{$alert.MachineID}}.

CrowdSec CTI

+

{{.Value}} will get {{.Type}} for next {{.Duration}} for triggering {{.Scenario}} on machine {{$alert.MachineID}}.

CrowdSec CTI

{{end -}} {{end -}} + smtp_host: # example: smtp.gmail.com smtp_username: # Replace with your actual username @@ -35,7 +37,15 @@ receiver_emails: # - email2@gmail.com # One of "ssltls", "starttls", "none" -encryption_type: ssltls +encryption_type: "ssltls" + +# If you need to set the HELO hostname: +# helo_host: "localhost" + +# If the email server is hitting the default timeouts (10 seconds), you can increase them here +# +# connect_timeout: 10s +# send_timeout: 10s --- diff --git a/plugins/notifications/email/main.go b/cmd/notification-email/main.go similarity index 85% rename from plugins/notifications/email/main.go rename to cmd/notification-email/main.go index ac09c1eef..789740156 100644 --- a/plugins/notifications/email/main.go +++ b/cmd/notification-email/main.go @@ -4,6 +4,7 @@ import ( "context" "fmt" "os" + "time" "github.com/crowdsecurity/crowdsec/pkg/protobufs" "github.com/hashicorp/go-hclog" @@ -47,6 +48,8 @@ type PluginConfig struct { EncryptionType string `yaml:"encryption_type"` AuthType string `yaml:"auth_type"` HeloHost string `yaml:"helo_host"` + ConnectTimeout string `yaml:"connect_timeout"` + SendTimeout string `yaml:"send_timeout"` } type EmailPlugin struct { @@ -77,7 +80,7 @@ func (n *EmailPlugin) Configure(ctx context.Context, config *protobufs.Config) ( } if d.ReceiverEmails == nil || len(d.ReceiverEmails) == 0 { - return nil, fmt.Errorf("Receiver emails are not set") + return nil, fmt.Errorf("receiver emails are not set") } n.ConfigByName[d.Name] = d @@ -108,6 +111,24 @@ func (n *EmailPlugin) Notify(ctx context.Context, notification *protobufs.Notifi server.Authentication = AuthStringToType[cfg.AuthType] server.Helo = cfg.HeloHost + var err error + + if cfg.ConnectTimeout != "" { + server.ConnectTimeout, err = time.ParseDuration(cfg.ConnectTimeout) + if err != nil { + logger.Warn(fmt.Sprintf("invalid connect timeout '%s', using default '10s'", cfg.ConnectTimeout)) + server.ConnectTimeout = 10 * time.Second + } + } + + if cfg.SendTimeout != "" { + server.SendTimeout, err = time.ParseDuration(cfg.SendTimeout) + if err != nil { + logger.Warn(fmt.Sprintf("invalid send timeout '%s', using default '10s'", cfg.SendTimeout)) + server.SendTimeout = 10 * time.Second + } + } + logger.Debug("making smtp connection") smtpClient, err := server.Connect() if err != nil { diff --git a/plugins/notifications/splunk/Makefile b/cmd/notification-http/Makefile similarity index 63% rename from plugins/notifications/splunk/Makefile rename to cmd/notification-http/Makefile index 669779ff3..30ed43a69 100644 --- a/plugins/notifications/splunk/Makefile +++ b/cmd/notification-http/Makefile @@ -4,14 +4,13 @@ ifeq ($(OS), Windows_NT) EXT = .exe endif -PLUGIN=splunk -BINARY_NAME = notification-$(PLUGIN)$(EXT) - GO = go GOBUILD = $(GO) build +BINARY_NAME = notification-http$(EXT) + build: clean - $(GOBUILD) $(LD_OPTS) $(BUILD_VENDOR_FLAGS) -o $(BINARY_NAME) + $(GOBUILD) $(LD_OPTS) -o $(BINARY_NAME) .PHONY: clean clean: diff --git a/plugins/notifications/http/http.yaml b/cmd/notification-http/http.yaml similarity index 100% rename from plugins/notifications/http/http.yaml rename to cmd/notification-http/http.yaml diff --git a/plugins/notifications/http/main.go b/cmd/notification-http/main.go similarity index 98% rename from plugins/notifications/http/main.go rename to cmd/notification-http/main.go index 7e15fccae..f7908ddda 100644 --- a/plugins/notifications/http/main.go +++ b/cmd/notification-http/main.go @@ -63,7 +63,7 @@ func (s *HTTPPlugin) Notify(ctx context.Context, notification *protobufs.Notific logger.Debug(fmt.Sprintf("adding header %s: %s", headerName, headerValue)) request.Header.Add(headerName, headerValue) } - logger.Debug(fmt.Sprintf("making HTTP %s call to %s with body %s", cfg.Method, cfg.URL, string(notification.Text))) + logger.Debug(fmt.Sprintf("making HTTP %s call to %s with body %s", cfg.Method, cfg.URL, notification.Text)) resp, err := client.Do(request) if err != nil { logger.Error(fmt.Sprintf("Failed to make HTTP request : %s", err)) diff --git a/plugins/notifications/sentinel/Makefile b/cmd/notification-sentinel/Makefile similarity index 74% rename from plugins/notifications/sentinel/Makefile rename to cmd/notification-sentinel/Makefile index df2a23fe6..21d176a90 100644 --- a/plugins/notifications/sentinel/Makefile +++ b/cmd/notification-sentinel/Makefile @@ -4,14 +4,14 @@ ifeq ($(OS), Windows_NT) EXT = .exe endif -# Go parameters GO = go GOBUILD = $(GO) build BINARY_NAME = notification-sentinel$(EXT) build: clean - $(GOBUILD) $(LD_OPTS) $(BUILD_VENDOR_FLAGS) -o $(BINARY_NAME) + $(GOBUILD) $(LD_OPTS) -o $(BINARY_NAME) +.PHONY: clean clean: @$(RM) $(BINARY_NAME) $(WIN_IGNORE_ERR) diff --git a/plugins/notifications/sentinel/main.go b/cmd/notification-sentinel/main.go similarity index 97% rename from plugins/notifications/sentinel/main.go rename to cmd/notification-sentinel/main.go index 2a37be5bf..18eff1b05 100644 --- a/plugins/notifications/sentinel/main.go +++ b/cmd/notification-sentinel/main.go @@ -78,7 +78,7 @@ func (s *SentinelPlugin) Notify(ctx context.Context, notification *protobufs.Not return &protobufs.Empty{}, err } - req, err := http.NewRequest("POST", url, body) + req, err := http.NewRequest(http.MethodPost, url, body) if err != nil { logger.Error("failed to create request", "error", err) return &protobufs.Empty{}, err @@ -98,7 +98,7 @@ func (s *SentinelPlugin) Notify(ctx context.Context, notification *protobufs.Not defer resp.Body.Close() logger.Debug("sent notification to sentinel", "status", resp.Status) - if resp.StatusCode != 200 { + if resp.StatusCode != http.StatusOK { return &protobufs.Empty{}, fmt.Errorf("failed to send notification to sentinel: %s", resp.Status) } diff --git a/plugins/notifications/sentinel/sentinel.yaml b/cmd/notification-sentinel/sentinel.yaml similarity index 100% rename from plugins/notifications/sentinel/sentinel.yaml rename to cmd/notification-sentinel/sentinel.yaml diff --git a/plugins/notifications/dummy/Makefile b/cmd/notification-slack/Makefile similarity index 63% rename from plugins/notifications/dummy/Makefile rename to cmd/notification-slack/Makefile index e6081de49..06c9ccc3f 100644 --- a/plugins/notifications/dummy/Makefile +++ b/cmd/notification-slack/Makefile @@ -4,14 +4,13 @@ ifeq ($(OS), Windows_NT) EXT = .exe endif -PLUGIN = dummy -BINARY_NAME = notification-$(PLUGIN)$(EXT) - GO = go GOBUILD = $(GO) build +BINARY_NAME = notification-slack$(EXT) + build: clean - $(GOBUILD) $(LD_OPTS) $(BUILD_VENDOR_FLAGS) -o $(BINARY_NAME) + $(GOBUILD) $(LD_OPTS) -o $(BINARY_NAME) .PHONY: clean clean: diff --git a/plugins/notifications/slack/main.go b/cmd/notification-slack/main.go similarity index 100% rename from plugins/notifications/slack/main.go rename to cmd/notification-slack/main.go diff --git a/plugins/notifications/slack/slack.yaml b/cmd/notification-slack/slack.yaml similarity index 100% rename from plugins/notifications/slack/slack.yaml rename to cmd/notification-slack/slack.yaml diff --git a/cmd/notification-splunk/Makefile b/cmd/notification-splunk/Makefile new file mode 100644 index 000000000..aa15ecac9 --- /dev/null +++ b/cmd/notification-splunk/Makefile @@ -0,0 +1,17 @@ +ifeq ($(OS), Windows_NT) + SHELL := pwsh.exe + .SHELLFLAGS := -NoProfile -Command + EXT = .exe +endif + +GO = go +GOBUILD = $(GO) build + +BINARY_NAME = notification-splunk$(EXT) + +build: clean + $(GOBUILD) $(LD_OPTS) -o $(BINARY_NAME) + +.PHONY: clean +clean: + @$(RM) $(BINARY_NAME) $(WIN_IGNORE_ERR) diff --git a/plugins/notifications/splunk/main.go b/cmd/notification-splunk/main.go similarity index 96% rename from plugins/notifications/splunk/main.go rename to cmd/notification-splunk/main.go index a9b4be50a..826986877 100644 --- a/plugins/notifications/splunk/main.go +++ b/cmd/notification-splunk/main.go @@ -58,7 +58,7 @@ func (s *Splunk) Notify(ctx context.Context, notification *protobufs.Notificatio return &protobufs.Empty{}, err } - req, err := http.NewRequest("POST", cfg.URL, strings.NewReader(string(data))) + req, err := http.NewRequest(http.MethodPost, cfg.URL, strings.NewReader(string(data))) if err != nil { return &protobufs.Empty{}, err } @@ -70,7 +70,7 @@ func (s *Splunk) Notify(ctx context.Context, notification *protobufs.Notificatio return &protobufs.Empty{}, err } - if resp.StatusCode != 200 { + if resp.StatusCode != http.StatusOK { content, err := io.ReadAll(resp.Body) if err != nil { return &protobufs.Empty{}, fmt.Errorf("got non 200 response and failed to read error %s", err) diff --git a/plugins/notifications/splunk/splunk.yaml b/cmd/notification-splunk/splunk.yaml similarity index 100% rename from plugins/notifications/splunk/splunk.yaml rename to cmd/notification-splunk/splunk.yaml diff --git a/debian/install b/debian/install index 78bf71bee..3153244b8 100644 --- a/debian/install +++ b/debian/install @@ -6,8 +6,8 @@ config/patterns/* etc/crowdsec/patterns config/crowdsec.service lib/systemd/system # Referenced configs: -plugins/notifications/slack/slack.yaml etc/crowdsec/notifications/ -plugins/notifications/http/http.yaml etc/crowdsec/notifications/ -plugins/notifications/splunk/splunk.yaml etc/crowdsec/notifications/ -plugins/notifications/email/email.yaml etc/crowdsec/notifications/ -plugins/notifications/sentinel/sentinel.yaml etc/crowdsec/notifications/ +cmd/notification-slack/slack.yaml etc/crowdsec/notifications/ +cmd/notification-http/http.yaml etc/crowdsec/notifications/ +cmd/notification-splunk/splunk.yaml etc/crowdsec/notifications/ +cmd/notification-email/email.yaml etc/crowdsec/notifications/ +cmd/notification-sentinel/sentinel.yaml etc/crowdsec/notifications/ diff --git a/debian/rules b/debian/rules index a734bfa8c..655af3dfe 100755 --- a/debian/rules +++ b/debian/rules @@ -25,11 +25,11 @@ override_dh_auto_install: mkdir -p debian/crowdsec/usr/lib/crowdsec/plugins/ mkdir -p debian/crowdsec/etc/crowdsec/notifications/ - install -m 551 plugins/notifications/slack/notification-slack debian/crowdsec/usr/lib/crowdsec/plugins/ - install -m 551 plugins/notifications/http/notification-http debian/crowdsec/usr/lib/crowdsec/plugins/ - install -m 551 plugins/notifications/splunk/notification-splunk debian/crowdsec/usr/lib/crowdsec/plugins/ - install -m 551 plugins/notifications/email/notification-email debian/crowdsec/usr/lib/crowdsec/plugins/ - install -m 551 plugins/notifications/sentinel/notification-sentinel debian/crowdsec/usr/lib/crowdsec/plugins/ + install -m 551 cmd/notification-slack/notification-slack debian/crowdsec/usr/lib/crowdsec/plugins/ + install -m 551 cmd/notification-http/notification-http debian/crowdsec/usr/lib/crowdsec/plugins/ + install -m 551 cmd/notification-splunk/notification-splunk debian/crowdsec/usr/lib/crowdsec/plugins/ + install -m 551 cmd/notification-email/notification-email debian/crowdsec/usr/lib/crowdsec/plugins/ + install -m 551 cmd/notification-sentinel/notification-sentinel debian/crowdsec/usr/lib/crowdsec/plugins/ cp cmd/crowdsec/crowdsec debian/crowdsec/usr/bin cp cmd/crowdsec-cli/cscli debian/crowdsec/usr/bin diff --git a/docker/README.md b/docker/README.md index e1c7b517e..4d1182fa4 100644 --- a/docker/README.md +++ b/docker/README.md @@ -19,11 +19,7 @@ All the following images are available on Docker Hub for the architectures - `crowdsecurity/crowdsec:{version}` -Recommended for production usage. Also available on GitHub (ghcr.io). - - - `crowdsecurity/crowdsec:dev` - -The latest stable release. +Latest stable release recommended for production usage. Also available on GitHub (ghcr.io). - `crowdsecurity/crowdsec:dev` @@ -190,6 +186,14 @@ It is not recommended anymore to bind-mount the full config.yaml file and you sh If you want to use the [notification system](https://docs.crowdsec.net/docs/notification_plugins/intro), you have to use the full image (not slim) and mount at least a custom `profiles.yaml` and a notification configuration to `/etc/crowdsec/notifications` +```shell +docker run -d \ + -v ./profiles.yaml:/etc/crowdsec/profiles.yaml \ + -v ./http_notification.yaml:/etc/crowdsec/notifications/http_notification.yaml \ + -p 8080:8080 -p 6060:6060 \ + --name crowdsec crowdsecurity/crowdsec +``` + # Deployment use cases Crowdsec is composed of an `agent` that parses logs and creates `alerts`, and a diff --git a/docker/test/Pipfile b/docker/test/Pipfile index bffd8f2cc..e929790e8 100644 --- a/docker/test/Pipfile +++ b/docker/test/Pipfile @@ -1,7 +1,7 @@ [packages] pytest-dotenv = "0.5.2" pytest-xdist = "3.3.1" -pytest-cs = {ref = "0.7.16", git = "https://github.com/crowdsecurity/pytest-cs.git"} +pytest-cs = {ref = "0.7.18", git = "https://github.com/crowdsecurity/pytest-cs.git"} [dev-packages] gnureadline = "8.1.2" diff --git a/docker/test/Pipfile.lock b/docker/test/Pipfile.lock index f83da8cc8..b9edce194 100644 --- a/docker/test/Pipfile.lock +++ b/docker/test/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "78f693678e411b7bdb5dd0280b7d6f8d9880069b331d44d96d32ba697275e30d" + "sha256": "64085783c9fec3a9eda976b7700b5bad7abd2b7a0f0670fa2209c52f3647be7f" }, "pipfile-spec": 6, "requires": { @@ -176,32 +176,32 @@ }, "cryptography": { "hashes": [ - "sha256:01f1d9e537f9a15b037d5d9ee442b8c22e3ae11ce65ea1f3316a41c78756b711", - "sha256:079347de771f9282fbfe0e0236c716686950c19dee1b76240ab09ce1624d76d7", - "sha256:182be4171f9332b6741ee818ec27daff9fb00349f706629f5cbf417bd50e66fd", - "sha256:192255f539d7a89f2102d07d7375b1e0a81f7478925b3bc2e0549ebf739dae0e", - "sha256:2a034bf7d9ca894720f2ec1d8b7b5832d7e363571828037f9e0c4f18c1b58a58", - "sha256:342f3767e25876751e14f8459ad85e77e660537ca0a066e10e75df9c9e9099f0", - "sha256:439c3cc4c0d42fa999b83ded80a9a1fb54d53c58d6e59234cfe97f241e6c781d", - "sha256:49c3222bb8f8e800aead2e376cbef687bc9e3cb9b58b29a261210456a7783d83", - "sha256:674b669d5daa64206c38e507808aae49904c988fa0a71c935e7006a3e1e83831", - "sha256:7a9a3bced53b7f09da251685224d6a260c3cb291768f54954e28f03ef14e3766", - "sha256:7af244b012711a26196450d34f483357e42aeddb04128885d95a69bd8b14b69b", - "sha256:7d230bf856164de164ecb615ccc14c7fc6de6906ddd5b491f3af90d3514c925c", - "sha256:84609ade00a6ec59a89729e87a503c6e36af98ddcd566d5f3be52e29ba993182", - "sha256:9a6673c1828db6270b76b22cc696f40cde9043eb90373da5c2f8f2158957f42f", - "sha256:9b6d717393dbae53d4e52684ef4f022444fc1cce3c48c38cb74fca29e1f08eaa", - "sha256:9c3fe6534d59d071ee82081ca3d71eed3210f76ebd0361798c74abc2bcf347d4", - "sha256:a719399b99377b218dac6cf547b6ec54e6ef20207b6165126a280b0ce97e0d2a", - "sha256:b332cba64d99a70c1e0836902720887fb4529ea49ea7f5462cf6640e095e11d2", - "sha256:d124682c7a23c9764e54ca9ab5b308b14b18eba02722b8659fb238546de83a76", - "sha256:d73f419a56d74fef257955f51b18d046f3506270a5fd2ac5febbfa259d6c0fa5", - "sha256:f0dc40e6f7aa37af01aba07277d3d64d5a03dc66d682097541ec4da03cc140ee", - "sha256:f14ad275364c8b4e525d018f6716537ae7b6d369c094805cae45300847e0894f", - "sha256:f772610fe364372de33d76edcd313636a25684edb94cee53fd790195f5989d14" + "sha256:004b6ccc95943f6a9ad3142cfabcc769d7ee38a3f60fb0dddbfb431f818c3a67", + "sha256:047c4603aeb4bbd8db2756e38f5b8bd7e94318c047cfe4efeb5d715e08b49311", + "sha256:0d9409894f495d465fe6fda92cb70e8323e9648af912d5b9141d616df40a87b8", + "sha256:23a25c09dfd0d9f28da2352503b23e086f8e78096b9fd585d1d14eca01613e13", + "sha256:2ed09183922d66c4ec5fdaa59b4d14e105c084dd0febd27452de8f6f74704143", + "sha256:35c00f637cd0b9d5b6c6bd11b6c3359194a8eba9c46d4e875a3660e3b400005f", + "sha256:37480760ae08065437e6573d14be973112c9e6dcaf5f11d00147ee74f37a3829", + "sha256:3b224890962a2d7b57cf5eeb16ccaafba6083f7b811829f00476309bce2fe0fd", + "sha256:5a0f09cefded00e648a127048119f77bc2b2ec61e736660b5789e638f43cc397", + "sha256:5b72205a360f3b6176485a333256b9bcd48700fc755fef51c8e7e67c4b63e3ac", + "sha256:7e53db173370dea832190870e975a1e09c86a879b613948f09eb49324218c14d", + "sha256:7febc3094125fc126a7f6fb1f420d0da639f3f32cb15c8ff0dc3997c4549f51a", + "sha256:80907d3faa55dc5434a16579952ac6da800935cd98d14dbd62f6f042c7f5e839", + "sha256:86defa8d248c3fa029da68ce61fe735432b047e32179883bdb1e79ed9bb8195e", + "sha256:8ac4f9ead4bbd0bc8ab2d318f97d85147167a488be0e08814a37eb2f439d5cf6", + "sha256:93530900d14c37a46ce3d6c9e6fd35dbe5f5601bf6b3a5c325c7bffc030344d9", + "sha256:9eeb77214afae972a00dee47382d2591abe77bdae166bda672fb1e24702a3860", + "sha256:b5f4dfe950ff0479f1f00eda09c18798d4f49b98f4e2006d644b3301682ebdca", + "sha256:c3391bd8e6de35f6f1140e50aaeb3e2b3d6a9012536ca23ab0d9c35ec18c8a91", + "sha256:c880eba5175f4307129784eca96f4e70b88e57aa3f680aeba3bab0e980b0f37d", + "sha256:cecfefa17042941f94ab54f769c8ce0fe14beff2694e9ac684176a2535bf9714", + "sha256:e40211b4923ba5a6dc9769eab704bdb3fbb58d56c5b336d30996c24fcf12aadb", + "sha256:efc8ad4e6fc4f1752ebfb58aefece8b4e3c4cae940b0994d43649bdfce8d0d4f" ], "markers": "python_version >= '3.7'", - "version": "==41.0.2" + "version": "==41.0.4" }, "docker": { "hashes": [ @@ -245,11 +245,11 @@ }, "pluggy": { "hashes": [ - "sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849", - "sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3" + "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12", + "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7" ], - "markers": "python_version >= '3.7'", - "version": "==1.2.0" + "markers": "python_version >= '3.8'", + "version": "==1.3.0" }, "psutil": { "hashes": [ @@ -280,15 +280,15 @@ }, "pytest": { "hashes": [ - "sha256:78bf16451a2eb8c7a2ea98e32dc119fd2aa758f1d5d66dbf0a59d69a3969df32", - "sha256:b4bf8c45bd59934ed84001ad51e11b4ee40d40a1229d2c79f9c592b0a3f6bd8a" + "sha256:1d881c6124e08ff0a1bb75ba3ec0bfd8b5354a01c194ddd5a0a870a48d99b002", + "sha256:a766259cfab564a2ad52cb1aae1b881a75c3eb7e34ca3779697c23ed47c47069" ], "markers": "python_version >= '3.7'", - "version": "==7.4.0" + "version": "==7.4.2" }, "pytest-cs": { "git": "https://github.com/crowdsecurity/pytest-cs.git", - "ref": "4a3451084215053af8a48ff37507b4f86bf75c10" + "ref": "df835beabc539be7f7f627b21caa0d6ad333daae" }, "pytest-datadir": { "hashes": [ @@ -324,7 +324,9 @@ }, "pyyaml": { "hashes": [ + "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5", "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc", + "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df", "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741", "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206", "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27", @@ -332,7 +334,10 @@ "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62", "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98", "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696", + "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290", + "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9", "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d", + "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6", "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867", "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47", "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486", @@ -340,9 +345,12 @@ "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3", "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007", "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938", + "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0", "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c", "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735", "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d", + "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28", + "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4", "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba", "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8", "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5", @@ -357,7 +365,9 @@ "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43", "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859", "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673", + "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54", "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a", + "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b", "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab", "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa", "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c", @@ -386,28 +396,28 @@ }, "urllib3": { "hashes": [ - "sha256:8d22f86aae8ef5e410d4f539fde9ce6b2113a001bb4d189e0aed70642d602b11", - "sha256:de7df1803967d2c2a98e4b11bb7d6bd9210474c46e8a0401514e3a42a75ebde4" + "sha256:13abf37382ea2ce6fb744d4dad67838eec857c9f4f57009891805e0b5e123594", + "sha256:ef16afa8ba34a1f989db38e1dbbe0c302e4289a47856990d0682e374563ce35e" ], "markers": "python_version >= '3.7'", - "version": "==2.0.4" + "version": "==2.0.5" }, "websocket-client": { "hashes": [ - "sha256:c951af98631d24f8df89ab1019fc365f2227c0892f12fd150e935607c79dd0dd", - "sha256:f1f9f2ad5291f0225a49efad77abf9e700b6fef553900623060dad6e26503b9d" + "sha256:3aad25d31284266bcfcfd1fd8a743f63282305a364b8d0948a43bd606acc652f", + "sha256:6cfc30d051ebabb73a5fa246efdcc14c8fbebbd0330f8984ac3bb6d9edd2ad03" ], - "markers": "python_version >= '3.7'", - "version": "==1.6.1" + "markers": "python_version >= '3.8'", + "version": "==1.6.3" } }, "develop": { "asttokens": { "hashes": [ - "sha256:4622110b2a6f30b77e1473affaa97e711bc2f07d3f10848420ff1898edbe94f3", - "sha256:6b0ac9e93fb0335014d382b8fa9b3afa7df546984258005da0b9e7095b3deb1c" + "sha256:2e0171b991b2c959acc6c49318049236844a5da1d65ba2672c4880c1c894834e", + "sha256:cf8fc9e61a86461aa9fb161a14a0841a03c405fa829ac6b202670b3495d2ce69" ], - "version": "==2.2.1" + "version": "==2.4.0" }, "backcall": { "hashes": [ @@ -474,19 +484,19 @@ }, "ipython": { "hashes": [ - "sha256:1d197b907b6ba441b692c48cf2a3a2de280dc0ac91a3405b39349a50272ca0a1", - "sha256:248aca623f5c99a6635bc3857677b7320b9b8039f99f070ee0d20a5ca5a8e6bf" + "sha256:2baeb5be6949eeebf532150f81746f8333e2ccce02de1c7eedde3f23ed5e9f1e", + "sha256:45a2c3a529296870a97b7de34eda4a31bee16bc7bf954e07d39abe49caf8f887" ], "markers": "python_version >= '3.11'", - "version": "==8.14.0" + "version": "==8.15.0" }, "jedi": { "hashes": [ - "sha256:203c1fd9d969ab8f2119ec0a3342e0b49910045abe6af0a3ae83a5764d54639e", - "sha256:bae794c30d07f6d910d32a7048af09b5a39ed740918da923c6b780790ebac612" + "sha256:bcf9894f1753969cbac8022a8c2eaee06bfa3724e4192470aaffe7eb6272b0c4", + "sha256:cb8ce23fbccff0025e9386b5cf85e892f94c9b822378f8da49970471335ac64e" ], "markers": "python_version >= '3.6'", - "version": "==0.18.2" + "version": "==0.19.0" }, "matplotlib-inline": { "hashes": [ @@ -543,11 +553,11 @@ }, "pygments": { "hashes": [ - "sha256:8ace4d3c1dd481894b2005f560ead0f9f19ee64fe983366be1a21e171d12775c", - "sha256:db2db3deb4b4179f399a09054b023b6a586b76499d36965813c71aa8ed7b5fd1" + "sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692", + "sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29" ], "markers": "python_version >= '3.7'", - "version": "==2.15.1" + "version": "==2.16.1" }, "six": { "hashes": [ @@ -566,11 +576,11 @@ }, "traitlets": { "hashes": [ - "sha256:9e6ec080259b9a5940c797d58b613b5e31441c2257b87c2e795c5228ae80d2d8", - "sha256:f6cde21a9c68cf756af02035f72d5a723bf607e862e7be33ece505abf4a3bad9" + "sha256:417745a96681fbb358e723d5346a547521f36e9bd0d50ba7ab368fff5d67aa54", + "sha256:f584ea209240466e66e91f3c81aa7d004ba4cf794990b0c775938a1544217cd1" ], - "markers": "python_version >= '3.7'", - "version": "==5.9.0" + "markers": "python_version >= '3.8'", + "version": "==5.10.0" }, "wcwidth": { "hashes": [ diff --git a/docker/test/tests/test_tls.py b/docker/test/tests/test_tls.py index cea29b9fc..eeda18f56 100644 --- a/docker/test/tests/test_tls.py +++ b/docker/test/tests/test_tls.py @@ -4,7 +4,7 @@ Test agent-lapi and cscli-lapi communication via TLS, on the same container. """ -import random +import uuid from pytest_cs import Status @@ -140,7 +140,7 @@ def test_tls_lapi_var(crowdsec, flavor, certs_dir): def test_tls_split_lapi_agent(crowdsec, flavor, certs_dir): """Server-only certificate, split containers""" - rand = random.randint(0, 10000) + rand = uuid.uuid1() lapiname = 'lapi-' + str(rand) agentname = 'agent-' + str(rand) @@ -193,7 +193,7 @@ def test_tls_split_lapi_agent(crowdsec, flavor, certs_dir): def test_tls_mutual_split_lapi_agent(crowdsec, flavor, certs_dir): """Server and client certificates, split containers""" - rand = random.randint(0, 10000) + rand = uuid.uuid1() lapiname = 'lapi-' + str(rand) agentname = 'agent-' + str(rand) @@ -244,7 +244,7 @@ def test_tls_mutual_split_lapi_agent(crowdsec, flavor, certs_dir): def test_tls_client_ou(crowdsec, certs_dir): """Check behavior of client certificate vs AGENTS_ALLOWED_OU""" - rand = random.randint(0, 10000) + rand = uuid.uuid1() lapiname = 'lapi-' + str(rand) agentname = 'agent-' + str(rand) @@ -287,6 +287,19 @@ def test_tls_client_ou(crowdsec, certs_dir): lapi_env['AGENTS_ALLOWED_OU'] = 'custom-client-ou' + # change container names to avoid conflict + # recreate certificates because they need the new hostname + + rand = uuid.uuid1() + lapiname = 'lapi-' + str(rand) + agentname = 'agent-' + str(rand) + + agent_env['LOCAL_API_URL'] = f'https://{lapiname}:8080' + + volumes = { + certs_dir(lapi_hostname=lapiname, agent_ou='custom-client-ou'): {'bind': '/etc/ssl/crowdsec', 'mode': 'ro'}, + } + cs_lapi = crowdsec(name=lapiname, environment=lapi_env, volumes=volumes) cs_agent = crowdsec(name=agentname, environment=agent_env, volumes=volumes) diff --git a/go.mod b/go.mod index 8237e171f..2113bece3 100644 --- a/go.mod +++ b/go.mod @@ -1,9 +1,13 @@ module github.com/crowdsecurity/crowdsec -go 1.20 +go 1.21 + +// Don't use the toolchain directive to avoid uncontrolled downloads during +// a build, especially in sandboxed environments (freebsd, gentoo...). +// toolchain go1.21.3 require ( - entgo.io/ent v0.11.3 + entgo.io/ent v0.12.4 github.com/AlecAivazis/survey/v2 v2.2.7 github.com/Masterminds/semver/v3 v3.1.1 github.com/Masterminds/sprig/v3 v3.2.2 @@ -21,7 +25,7 @@ require ( github.com/c-robinson/iplib v1.0.3 github.com/cespare/xxhash/v2 v2.2.0 github.com/crowdsecurity/dlog v0.0.0-20170105205344-4fb5f8204f26 - github.com/crowdsecurity/go-cs-lib v0.0.3 + github.com/crowdsecurity/go-cs-lib v0.0.4 github.com/crowdsecurity/grokky v0.2.1 github.com/crowdsecurity/machineid v1.0.2 github.com/davecgh/go-spew v1.1.1 @@ -68,12 +72,14 @@ require ( github.com/segmentio/kafka-go v0.4.34 github.com/shirou/gopsutil/v3 v3.23.5 github.com/sirupsen/logrus v1.9.3 + github.com/slack-go/slack v0.12.2 github.com/spf13/cobra v1.7.0 github.com/stretchr/testify v1.8.4 github.com/umahmood/haversine v0.0.0-20151105152445-808ab04add26 github.com/wasilibs/go-re2 v1.3.0 golang.org/x/crypto v0.10.0 golang.org/x/exp v0.0.0-20230522175609-2e198f4a06a1 + github.com/xhit/go-simple-mail/v2 v2.16.0 golang.org/x/mod v0.11.0 golang.org/x/sys v0.10.0 google.golang.org/grpc v1.56.1 @@ -90,7 +96,7 @@ require ( ) require ( - ariga.io/atlas v0.7.2-0.20220927111110-867ee0cca56a // indirect + ariga.io/atlas v0.14.1-0.20230918065911-83ad451a4935 // indirect github.com/Masterminds/goutils v1.1.1 // indirect github.com/Microsoft/go-winio v0.6.1 // indirect github.com/ahmetalpbalkan/dlog v0.0.0-20170105205344-4fb5f8204f26 // indirect @@ -126,6 +132,7 @@ require ( github.com/golang/protobuf v1.5.3 // indirect github.com/google/go-cmp v0.5.9 // indirect github.com/google/gofuzz v1.2.0 // indirect + github.com/gorilla/websocket v1.5.0 // indirect github.com/hashicorp/hcl/v2 v2.13.0 // indirect github.com/hashicorp/yamux v0.0.0-20180604194846-3520598351bb // indirect github.com/huandu/xstrings v1.3.2 // indirect @@ -184,6 +191,7 @@ require ( github.com/tidwall/pretty v1.2.1 // indirect github.com/tklauser/go-sysconf v0.3.11 // indirect github.com/tklauser/numcpus v0.6.0 // indirect + github.com/toorop/go-dkim v0.0.0-20201103131630-e1cd1a0a5208 // indirect github.com/twitchyliquid64/golang-asm v0.15.1 // indirect github.com/ugorji/go/codec v1.2.11 // indirect github.com/vmihailenco/msgpack v4.0.4+incompatible // indirect @@ -196,7 +204,7 @@ require ( golang.org/x/term v0.10.0 // indirect golang.org/x/text v0.11.0 // indirect golang.org/x/time v0.2.0 // indirect - golang.org/x/tools v0.7.0 // indirect + golang.org/x/tools v0.8.1-0.20230428195545-5283a0178901 // indirect golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect google.golang.org/appengine v1.6.7 // indirect google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1 // indirect diff --git a/go.sum b/go.sum index b218a4273..25445199e 100644 --- a/go.sum +++ b/go.sum @@ -1,6 +1,7 @@ -ariga.io/atlas v0.7.2-0.20220927111110-867ee0cca56a h1:6/nt4DODfgxzHTTg3tYy7YkVzruGQGZ/kRvXpA45KUo= -ariga.io/atlas v0.7.2-0.20220927111110-867ee0cca56a/go.mod h1:ft47uSh5hWGDCmQC9DsztZg6Xk+KagM5Ts/mZYKb9JE= +ariga.io/atlas v0.14.1-0.20230918065911-83ad451a4935 h1:JnYs/y8RJ3+MiIUp+3RgyyeO48VHLAZimqiaZYnMKk8= +ariga.io/atlas v0.14.1-0.20230918065911-83ad451a4935/go.mod h1:isZrlzJ5cpoCoKFoY9knZug7Lq4pP1cm8g3XciLZ0Pw= bitbucket.org/creachadair/stringset v0.0.9 h1:L4vld9nzPt90UZNrXjNelTshD74ps4P5NGs3Iq6yN3o= +bitbucket.org/creachadair/stringset v0.0.9/go.mod h1:t+4WcQ4+PXTa8aQdNKe40ZP6iwesoMFWAxPGd3UGjyY= cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= @@ -34,14 +35,16 @@ cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohl cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= -entgo.io/ent v0.11.3 h1:F5FBGAWiDCGder7YT+lqMnyzXl6d0xU3xMBM/SO3CMc= -entgo.io/ent v0.11.3/go.mod h1:mvDhvynOzAsOe7anH7ynPPtMjA/eeXP96kAfweevyxc= +entgo.io/ent v0.12.4 h1:LddPnAyxls/O7DTXZvUGDj0NZIdGSu317+aoNLJWbD8= +entgo.io/ent v0.12.4/go.mod h1:Y3JVAjtlIk8xVZYSn3t3mf8xlZIn5SAOXZQxD6kKI+Q= github.com/AlecAivazis/survey/v2 v2.2.7 h1:5NbxkF4RSKmpywYdcRgUmos1o+roJY8duCLZXbVjoig= github.com/AlecAivazis/survey/v2 v2.2.7/go.mod h1:9DYvHgXtiXm6nCn+jXnOXLKbH+Yo9u8fAS/SduGdoPk= github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 h1:UQHMgLO+TxOElx5B5HZ4hJQsoJ/PvUvKRhJHDQXO8P8= +github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= github.com/DATA-DOG/go-sqlmock v1.5.0 h1:Shsta01QNfFxHCfpW6YH2STWB0MudeXXEWMr20OEh60= +github.com/DATA-DOG/go-sqlmock v1.5.0/go.mod h1:f/Ixk793poVmq4qj/V1dPUg2JEAKC73Q5eFN3EC/SaM= github.com/Masterminds/goutils v1.1.1 h1:5nUrii3FMTL5diU80unEVvNevw1nH4+ZV4DSLVJLSYI= github.com/Masterminds/goutils v1.1.1/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU= github.com/Masterminds/semver/v3 v3.1.1 h1:hLg3sBzpNErnxhQtUy/mmLR2I9foDujNK030IGemrRc= @@ -138,8 +141,8 @@ github.com/crowdsecurity/coraza/v3 v3.0.0-20230727080316-2348f4b3045f h1:7MgSs0r github.com/crowdsecurity/coraza/v3 v3.0.0-20230727080316-2348f4b3045f/go.mod h1:YwM+m6iBdUn6P1eQKu+F+83bzkP0AzSEBCcVL//zh9c= github.com/crowdsecurity/dlog v0.0.0-20170105205344-4fb5f8204f26 h1:r97WNVC30Uen+7WnLs4xDScS/Ex988+id2k6mDf8psU= github.com/crowdsecurity/dlog v0.0.0-20170105205344-4fb5f8204f26/go.mod h1:zpv7r+7KXwgVUZnUNjyP22zc/D7LKjyoY02weH2RBbk= -github.com/crowdsecurity/go-cs-lib v0.0.3 h1:NPSHTLS83A3wFyzV5R9Py8fBbTrVHu/1PQeaD7id4+I= -github.com/crowdsecurity/go-cs-lib v0.0.3/go.mod h1:8FMKNGsh3hMZi2SEv6P15PURhEJnZV431XjzzBSuf0k= +github.com/crowdsecurity/go-cs-lib v0.0.4 h1:mH3iqz8H8iH9YpldqCdojyKHy9z3JDhas/k6I8M0ims= +github.com/crowdsecurity/go-cs-lib v0.0.4/go.mod h1:8FMKNGsh3hMZi2SEv6P15PURhEJnZV431XjzzBSuf0k= github.com/crowdsecurity/grokky v0.2.1 h1:t4VYnDlAd0RjDM2SlILalbwfCrQxtJSMGdQOR0zwkE4= github.com/crowdsecurity/grokky v0.2.1/go.mod h1:33usDIYzGDsgX1kHAThCbseso6JuWNJXOzRQDGXHtWM= github.com/crowdsecurity/machineid v1.0.2 h1:wpkpsUghJF8Khtmn/tg6GxgdhLA1Xflerh5lirI+bdc= @@ -288,6 +291,7 @@ github.com/go-openapi/validate v0.20.0 h1:pzutNCCBZGZlE+u8HD3JZyWdc/TVbtVwlWUp8/ github.com/go-openapi/validate v0.20.0/go.mod h1:b60iJT+xNNLfaQJUqLI7946tYiFEOuE9E4k54HpKcJ0= github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s= +github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= github.com/go-playground/locales v0.13.0/go.mod h1:taPMhCMXrRLJO55olJkUXHZBHCxTMfnGwq/HNwmWNS8= github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA= github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY= @@ -302,7 +306,8 @@ github.com/go-sql-driver/mysql v1.6.0 h1:BCTh4TKNUYmOmMUcQ3IipzF5prigylS7XXjEkfC github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= github.com/go-stack/stack v1.8.0 h1:5SgMzNM5HxrEjV0ww2lTmX6E2Izsfxas4+YHWRs3Lsk= github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= -github.com/go-test/deep v1.0.3 h1:ZrJSEWsXzPOxaZnFteGEfooLba+ju3FYIbOrS+rQd68= +github.com/go-test/deep v1.0.4 h1:u2CU3YKy9I2pmu9pX0eq50wCgjfGIt539SqR7FbHiho= +github.com/go-test/deep v1.0.4/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA= github.com/gobuffalo/attrs v0.0.0-20190224210810-a9411de4debd/go.mod h1:4duuawTqi2wkkpB4ePgWMaai6/Kc6WEz83bhFwpHzj0= github.com/gobuffalo/depgen v0.0.0-20190329151759-d478694a28d3/go.mod h1:3STtPUQYuzV0gBVOY3vy6CfMm/ljR4pABfrTeHNLHUY= github.com/gobuffalo/depgen v0.1.0/go.mod h1:+ifsuy7fhi15RWncXQQKjWS9JPkdah5sZvtHc2RXGlg= @@ -385,6 +390,7 @@ github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE= github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-querystring v1.0.0 h1:Xkwi/a1rcvNg1PPYe5vI8GbeBY/jrVuDX5ASuANWTrk= @@ -412,6 +418,9 @@ github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+ github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= github.com/goombaio/namegenerator v0.0.0-20181006234301-989e774b106e h1:XmA6L9IPRdUr28a+SK/oMchGgQy159wvzXA5tJ7l+40= github.com/goombaio/namegenerator v0.0.0-20181006234301-989e774b106e/go.mod h1:AFIo+02s+12CEg8Gzz9kzhCbmbq6JcKNrhHffCGA9z4= +github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= +github.com/gorilla/websocket v1.5.0 h1:PPwGk2jz7EePpoHN/+ClbZu8SPxiqlu12wZP/3sWmnc= +github.com/gorilla/websocket v1.5.0/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= github.com/hashicorp/go-hclog v1.5.0 h1:bI2ocEMgcVlz55Oj1xZNBsVi900c7II+fWDyV9o+13c= github.com/hashicorp/go-hclog v1.5.0/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M= github.com/hashicorp/go-plugin v1.4.10 h1:xUbmA4jC6Dq163/fWcp8P3JuHilrHHMLNRxzGQJ9hNk= @@ -488,6 +497,7 @@ github.com/jackc/puddle v1.2.0/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dv github.com/jarcoal/httpmock v1.1.0 h1:F47ChZj1Y2zFsCXxNkBPwNNKnAyOATcdQibk0qEdVCE= github.com/jarcoal/httpmock v1.1.0/go.mod h1:ATjnClrvW/3tijVmpL/va5Z3aAyGvqU3gCT8nX0Txik= github.com/jhump/protoreflect v1.6.0 h1:h5jfMVslIg6l29nsMs0D8Wj17RDVdNYti0vDN/PZZoE= +github.com/jhump/protoreflect v1.6.0/go.mod h1:eaTn3RZAmMBcV0fifFvlm6VHNz3wSkYyXYWUh7ymB74= github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg= github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= github.com/jmespath/go-jmespath/internal/testify v1.5.1 h1:shLQSRRSCCPj3f2gpwzGwWFoC7ycTf1rcQZHOlsJ6N8= @@ -529,6 +539,7 @@ github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFB github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0= +github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/pty v1.1.4/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/pty v1.1.5/go.mod h1:9r2w37qlBe7rQ6e1fg1S/9xpWHSnaqNdHD3WcMdbPDA= @@ -538,14 +549,15 @@ github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/kylelemons/godebug v0.0.0-20170820004349-d65d576e9348 h1:MtvEpTB6LX3vkb4ax0b5D2DHbNAUsen0Gx5wZoq3lV4= +github.com/kylelemons/godebug v0.0.0-20170820004349-d65d576e9348/go.mod h1:B69LEHPfb2qLo0BaaOLcbitczOKLWTsrBG9LczfCD4k= github.com/leodido/go-urn v1.2.0/go.mod h1:+8+nEpDfqqsY+g338gtMEUOtuK+4dEMhiQEgxpxOKII= github.com/leodido/go-urn v1.2.4 h1:XlAE/cm/ms7TE/VMVoduSpNBoyc2dOxHs5MZSwAN63Q= github.com/leodido/go-urn v1.2.4/go.mod h1:7ZrI8mTSeBSHl/UaRyKQW1qZeMgak41ANeCNaVckg+4= github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= github.com/lib/pq v1.1.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= +github.com/lib/pq v1.10.2 h1:AqzbZs4ZoCBp+GtejcpCpcxM3zlSMx29dXbUSeVtJb8= github.com/lib/pq v1.10.2/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= -github.com/lib/pq v1.10.7 h1:p7ZhMD+KsSRozJr34udlUrhboJwWAgCg34+/ZZNvZZw= github.com/lithammer/dedent v1.1.0 h1:VNzHMVCBNG1j0fh3OrsFRkVUwStdDArbgBWoPAffktY= github.com/lithammer/dedent v1.1.0/go.mod h1:jrXYCQtgg0nJiN+StA2KgR7w6CiQNv9Fd/Z9BP0jIOc= github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 h1:6E+4a0GO5zZEnZ81pIr0yLvtUWk2if982qA3F3QD6H4= @@ -693,6 +705,7 @@ github.com/rogpeppe/go-internal v1.1.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFR github.com/rogpeppe/go-internal v1.2.2/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.10.0 h1:TMyTOH3F/DB16zRVcYyreMH6GnZZrwQVAoYjRBZyWFQ= +github.com/rogpeppe/go-internal v1.10.0/go.mod h1:UQnix2H7Ngw/k4C5ijL5+65zddjncjaFoBhdsK/akog= github.com/rs/xid v1.2.1/go.mod h1:+uKXf+4Djp6Md1KODXJxgGQPKngRmWyn10oCKFzNHOQ= github.com/rs/zerolog v1.13.0/go.mod h1:YbFCdg8HfsridGWAh22vktObvhZbQsZXe4/zB0OKkWU= github.com/rs/zerolog v1.15.0/go.mod h1:xYTKnLHcpfU2225ny5qZjxnj9NvkumZYjJHlAThCjNc= @@ -719,6 +732,8 @@ github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6Mwd github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88= github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= +github.com/slack-go/slack v0.12.2 h1:x3OppyMyGIbbiyFhsBmpf9pwkUzMhthJMRNmNlA4LaQ= +github.com/slack-go/slack v0.12.2/go.mod h1:hlGi5oXA+Gt+yWTPP0plCdRKmjsDxecdHxYQdlMQKOw= github.com/spf13/cast v1.3.1 h1:nFm6S0SMdyzrzcmThSipiEubIDy8WEXKNZ0UOgiRpng= github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= github.com/spf13/cobra v0.0.3/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ= @@ -764,6 +779,8 @@ github.com/tklauser/go-sysconf v0.3.11 h1:89WgdJhk5SNwJfu+GKyYveZ4IaJ7xAkecBo+Kd github.com/tklauser/go-sysconf v0.3.11/go.mod h1:GqXfhXY3kiPa0nAXPDIQIWzJbMCB7AmcWpGR8lSZfqI= github.com/tklauser/numcpus v0.6.0 h1:kebhY2Qt+3U6RNK7UqpYNA+tJ23IBEGKkB7JQBfDYms= github.com/tklauser/numcpus v0.6.0/go.mod h1:FEZLMke0lhOUG6w2JadTzp0a+Nl8PF/GFkQ5UVIcaL4= +github.com/toorop/go-dkim v0.0.0-20201103131630-e1cd1a0a5208 h1:PM5hJF7HVfNWmCjMdEfbuOBNXSVF2cMFGgQTPdKCbwM= +github.com/toorop/go-dkim v0.0.0-20201103131630-e1cd1a0a5208/go.mod h1:BzWtXXrXzZUvMacR0oF/fbDDgUPO8L36tDMmRAf14ns= github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI= github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08= github.com/ugorji/go v1.1.7/go.mod h1:kZn38zHttfInRq0xu/PH0az30d+z6vm202qpg1oXVMw= @@ -774,6 +791,7 @@ github.com/umahmood/haversine v0.0.0-20151105152445-808ab04add26 h1:UFHFmFfixpmf github.com/umahmood/haversine v0.0.0-20151105152445-808ab04add26/go.mod h1:IGhd0qMDsUa9acVjsbsT7bu3ktadtGOHI79+idTew/M= github.com/vektah/gqlparser v1.1.2/go.mod h1:1ycwN7Ij5njmMkPPAOaRFY4rET2Enx7IkVv3vaXspKw= github.com/vjeantet/grok v1.0.1 h1:2rhIR7J4gThTgcZ1m2JY4TrJZNgjn985U28kT2wQrJ4= +github.com/vjeantet/grok v1.0.1/go.mod h1:ax1aAchzC6/QMXMcyzHQGZWaW1l195+uMYIkCWPCNIo= github.com/vmihailenco/msgpack v4.0.4+incompatible h1:dSLoQfGFAo3F6OoNhwUmLwVgaUXK79GlxNBwueZn0xI= github.com/vmihailenco/msgpack v4.0.4+incompatible/go.mod h1:fy3FlTQTDXWkZ7Bh6AcGMlsjHatGryHQYUTf1ShIgkk= github.com/vmihailenco/msgpack/v4 v4.3.12/go.mod h1:gborTTJjAo/GWTqqRjrLCn9pgNN+NXzzngzBKDPIqw4= @@ -781,6 +799,7 @@ github.com/vmihailenco/tagparser v0.1.1/go.mod h1:OeAg3pn3UbLjkWt+rN9oFYB6u/cQgq github.com/wasilibs/go-re2 v1.3.0 h1:LFhBNzoStM3wMie6rN2slD1cuYH2CGiHpvNL3UtcsMw= github.com/wasilibs/go-re2 v1.3.0/go.mod h1:AafrCXVvGRJJOImMajgJ2M7rVmWyisVK7sFshbxnVrg= github.com/wasilibs/nottinygc v0.4.0 h1:h1TJMihMC4neN6Zq+WKpLxgd9xCFMw7O9ETLwY2exJQ= +github.com/wasilibs/nottinygc v0.4.0/go.mod h1:oDcIotskuYNMpqMF23l7Z8uzD4TC0WXHK8jetlB3HIo= github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI= github.com/xdg-go/scram v1.0.2/go.mod h1:1WAq6h33pAW+iRreB34OORO2Nf7qel3VV3fjBj+hCSs= github.com/xdg-go/stringprep v1.0.2/go.mod h1:8F9zXuvzgwmyT5DUm4GUfZGDdT3W+LCvS6+da4O5kxM= @@ -790,6 +809,8 @@ github.com/xdg/scram v1.0.5/go.mod h1:lB8K/P019DLNhemzwFU4jHLhdvlE6uDZjXFejJXr49 github.com/xdg/stringprep v0.0.0-20180714160509-73f8eece6fdc/go.mod h1:Jhud4/sHMO4oL310DaZAKk9ZaJ08SJfe+sJh0HrGL1Y= github.com/xdg/stringprep v1.0.3 h1:cmL5Enob4W83ti/ZHuZLuKD/xqJfus4fVPwE+/BDm+4= github.com/xdg/stringprep v1.0.3/go.mod h1:Jhud4/sHMO4oL310DaZAKk9ZaJ08SJfe+sJh0HrGL1Y= +github.com/xhit/go-simple-mail/v2 v2.16.0 h1:ouGy/Ww4kuaqu2E2UrDw7SvLaziWTB60ICLkIkNVccA= +github.com/xhit/go-simple-mail/v2 v2.16.0/go.mod h1:b7P5ygho6SYE+VIqpxA6QkYfv4teeyG4MKqB3utRu98= github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d/go.mod h1:rHwXgn7JulP+udvsHwJoVG1YGAP6VLg4y9I5dyZdqmA= github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= @@ -858,8 +879,6 @@ golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u0 golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= -golang.org/x/exp v0.0.0-20230522175609-2e198f4a06a1 h1:k/i9J1pBpvlfR+9QsetwPyERsqu1GIbi967PQMq3Ivc= -golang.org/x/exp v0.0.0-20230522175609-2e198f4a06a1/go.mod h1:V1LtkGg67GoY2N1AnLN78QLrzxkLyJw7RJb1gzOOz9w= golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= @@ -1090,8 +1109,8 @@ golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.7.0 h1:W4OVu8VVOaIO0yzWMNdepAulS7YfoS3Zabrm8DOXXU4= -golang.org/x/tools v0.7.0/go.mod h1:4pg6aUX35JBAogB10C9AtvVL+qowtN4pT3CGSQex14s= +golang.org/x/tools v0.8.1-0.20230428195545-5283a0178901 h1:0wxTF6pSjIIhNt7mo9GvjDfzyCOiWhmICgtO/Ah948s= +golang.org/x/tools v0.8.1-0.20230428195545-5283a0178901/go.mod h1:JxBZ99ISMI5ViVkT1tr6tdNmXeTrcpVSD3vZ1RsRdN4= golang.org/x/xerrors v0.0.0-20190410155217-1f06c39b4373/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20190513163551-3ee3066db522/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= @@ -1241,3 +1260,4 @@ sigs.k8s.io/json v0.0.0-20221116044647-bc3834ca7abd/go.mod h1:B8JuhiUyNFVKdsE8h6 sigs.k8s.io/structured-merge-diff/v4 v4.2.3 h1:PRbqxJClWWYMNV1dhaG4NsibJbArud9kFxnAMREiWFE= sigs.k8s.io/structured-merge-diff/v4 v4.2.3/go.mod h1:qjx8mGObPmV2aSZepjQjbmb2ihdVs8cGKBraizNC69E= sigs.k8s.io/yaml v1.3.0 h1:a2VclLzOGrwOHDiV8EfBGhvjHvP46CtW5j6POvhYGGo= +sigs.k8s.io/yaml v1.3.0/go.mod h1:GeOyir5tyXNByN85N/dRIT9es5UQNerPYEKK56eTBm8= diff --git a/pkg/acquisition/modules/cloudwatch/cloudwatch.go b/pkg/acquisition/modules/cloudwatch/cloudwatch.go index 48bbe4217..89887bef0 100644 --- a/pkg/acquisition/modules/cloudwatch/cloudwatch.go +++ b/pkg/acquisition/modules/cloudwatch/cloudwatch.go @@ -370,7 +370,7 @@ func (cw *CloudwatchSource) LogStreamManager(in chan LogStreamTailConfig, outCha } if cw.Config.StreamRegexp != nil { - match, err := regexp.Match(*cw.Config.StreamRegexp, []byte(newStream.StreamName)) + match, err := regexp.MatchString(*cw.Config.StreamRegexp, newStream.StreamName) if err != nil { cw.logger.Warningf("invalid regexp : %s", err) } else if !match { diff --git a/pkg/acquisition/modules/docker/docker.go b/pkg/acquisition/modules/docker/docker.go index 929626974..60f1100b3 100644 --- a/pkg/acquisition/modules/docker/docker.go +++ b/pkg/acquisition/modules/docker/docker.go @@ -392,14 +392,14 @@ func (d *DockerSource) EvalContainer(container dockerTypes.Container) (*Containe } for _, cont := range d.compiledContainerID { - if matched := cont.Match([]byte(container.ID)); matched { + if matched := cont.MatchString(container.ID); matched { return &ContainerConfig{ID: container.ID, Name: container.Names[0], Labels: d.Config.Labels, Tty: d.getContainerTTY(container.ID)}, true } } for _, cont := range d.compiledContainerName { for _, name := range container.Names { - if matched := cont.Match([]byte(name)); matched { + if matched := cont.MatchString(name); matched { return &ContainerConfig{ID: container.ID, Name: name, Labels: d.Config.Labels, Tty: d.getContainerTTY(container.ID)}, true } } diff --git a/pkg/acquisition/modules/docker/docker_test.go b/pkg/acquisition/modules/docker/docker_test.go index 7d4d938d1..3c3eeefe6 100644 --- a/pkg/acquisition/modules/docker/docker_test.go +++ b/pkg/acquisition/modules/docker/docker_test.go @@ -193,7 +193,7 @@ container_name_regexp: actualLines++ ticker.Reset(1 * time.Second) case <-ticker.C: - log.Infof("no more line to read") + log.Infof("no more lines to read") dockerSource.t.Kill(nil) return nil } diff --git a/pkg/acquisition/modules/file/file_test.go b/pkg/acquisition/modules/file/file_test.go index 3b39cf6bd..410beb4bc 100644 --- a/pkg/acquisition/modules/file/file_test.go +++ b/pkg/acquisition/modules/file/file_test.go @@ -410,9 +410,7 @@ force_inotify: true`, testPattern), if tc.expectedLines != 0 { fd, err := os.Create("test_files/stream.log") - if err != nil { - t.Fatalf("could not create test file : %s", err) - } + require.NoError(t, err, "could not create test file") for i := 0; i < 5; i++ { _, err = fmt.Fprintf(fd, "%d\n", i) @@ -424,7 +422,7 @@ force_inotify: true`, testPattern), fd.Close() // we sleep to make sure we detect the new file - time.Sleep(1 * time.Second) + time.Sleep(3 * time.Second) os.Remove("test_files/stream.log") assert.Equal(t, tc.expectedLines, actualLines) } diff --git a/pkg/acquisition/modules/kafka/kafka.go b/pkg/acquisition/modules/kafka/kafka.go index 28ed8cd16..f825a924c 100644 --- a/pkg/acquisition/modules/kafka/kafka.go +++ b/pkg/acquisition/modules/kafka/kafka.go @@ -149,7 +149,9 @@ func (k *KafkaSource) ReadMessage(out chan types.Event) error { return nil } k.logger.Errorln(fmt.Errorf("while reading %s message: %w", dataSourceName, err)) + continue } + k.logger.Tracef("got message: %s", string(m.Value)) l := types.Line{ Raw: string(m.Value), Labels: k.Config.Labels, @@ -223,7 +225,6 @@ func (kc *KafkaConfiguration) NewTLSConfig() (*tls.Config, error) { caCertPool.AppendCertsFromPEM(caCert) tlsConfig.RootCAs = caCertPool - tlsConfig.BuildNameToCertificate() return &tlsConfig, err } diff --git a/pkg/alertcontext/alertcontext.go b/pkg/alertcontext/alertcontext.go index 29c13e3f3..8d305302c 100644 --- a/pkg/alertcontext/alertcontext.go +++ b/pkg/alertcontext/alertcontext.go @@ -3,12 +3,12 @@ package alertcontext import ( "encoding/json" "fmt" + "slices" "strconv" "github.com/antonmedv/expr" "github.com/antonmedv/expr/vm" log "github.com/sirupsen/logrus" - "golang.org/x/exp/slices" "github.com/crowdsecurity/crowdsec/pkg/exprhelpers" "github.com/crowdsecurity/crowdsec/pkg/models" diff --git a/pkg/apiclient/auth.go b/pkg/apiclient/auth.go index 84df74456..96230b910 100644 --- a/pkg/apiclient/auth.go +++ b/pkg/apiclient/auth.go @@ -96,10 +96,16 @@ func (r retryRoundTripper) ShouldRetry(statusCode int) bool { func (r retryRoundTripper) RoundTrip(req *http.Request) (*http.Response, error) { var resp *http.Response var err error + backoff := 0 - for i := 0; i < r.maxAttempts; i++ { + maxAttempts := r.maxAttempts + if fflag.DisableHttpRetryBackoff.IsEnabled() { + maxAttempts = 1 + } + + for i := 0; i < maxAttempts; i++ { if i > 0 { - if r.withBackOff && !fflag.DisableHttpRetryBackoff.IsEnabled() { + if r.withBackOff { backoff += 10 + rand.Intn(20) } log.Infof("retrying in %d seconds (attempt %d of %d)", backoff, i+1, r.maxAttempts) @@ -115,7 +121,10 @@ func (r retryRoundTripper) RoundTrip(req *http.Request) (*http.Response, error) clonedReq := cloneRequest(req) resp, err = r.next.RoundTrip(clonedReq) if err != nil { - log.Errorf("error while performing request: %s; %d retries left", err, r.maxAttempts-i-1) + left := maxAttempts - i - 1 + if left > 0 { + log.Errorf("error while performing request: %s; %d retries left", err, left) + } continue } if !r.ShouldRetry(resp.StatusCode) { @@ -264,7 +273,9 @@ func (t *JWTTransport) RoundTrip(req *http.Request) (*http.Response, error) { return resp, fmt.Errorf("performing jwt auth: %w", err) } - log.Debugf("resp-jwt: %d", resp.StatusCode) + if resp != nil { + log.Debugf("resp-jwt: %d", resp.StatusCode) + } return resp, nil } diff --git a/pkg/apiserver/apic.go b/pkg/apiserver/apic.go index ff45e2a8b..52849dc09 100644 --- a/pkg/apiserver/apic.go +++ b/pkg/apiserver/apic.go @@ -7,6 +7,7 @@ import ( "net" "net/http" "net/url" + "slices" "strconv" "strings" "sync" @@ -15,7 +16,6 @@ import ( "github.com/go-openapi/strfmt" "github.com/pkg/errors" log "github.com/sirupsen/logrus" - "golang.org/x/exp/slices" "gopkg.in/tomb.v2" "github.com/crowdsecurity/go-cs-lib/ptr" @@ -43,8 +43,6 @@ const ( metricsIntervalDelta = time.Minute * 15 ) -var SCOPE_CAPI_ALIAS_ALIAS string = "crowdsecurity/community-blocklist" //we don't use "CAPI" directly, to make it less confusing for the user - type apic struct { // when changing the intervals in tests, always set *First too // or they can be negative @@ -620,59 +618,57 @@ func (a *apic) PullTop(forcePull bool) error { return nil } +// if decisions is whitelisted: return representation of the whitelist ip or cidr +// if not whitelisted: empty string +func (a *apic) whitelistedBy(decision *models.Decision) string { + if decision.Value == nil { + return "" + } + ipval := net.ParseIP(*decision.Value) + for _, cidr := range a.whitelists.Cidrs { + if cidr.Contains(ipval) { + return cidr.String() + } + } + for _, ip := range a.whitelists.Ips { + if ip != nil && ip.Equal(ipval) { + return ip.String() + } + } + return "" +} + func (a *apic) ApplyApicWhitelists(decisions []*models.Decision) []*models.Decision { - if a.whitelists == nil { + if a.whitelists == nil || len(a.whitelists.Cidrs) == 0 && len(a.whitelists.Ips) == 0 { return decisions } //deal with CAPI whitelists for fire. We want to avoid having a second list, so we shrink in place outIdx := 0 for _, decision := range decisions { - if decision.Value == nil { + whitelister := a.whitelistedBy(decision) + if whitelister != "" { + log.Infof("%s from %s is whitelisted by %s", *decision.Value, *decision.Scenario, whitelister) continue } - skip := false - ipval := net.ParseIP(*decision.Value) - for _, cidr := range a.whitelists.Cidrs { - if skip { - break - } - if cidr.Contains(ipval) { - log.Infof("%s from %s is whitelisted by %s", *decision.Value, *decision.Scenario, cidr.String()) - skip = true - } - } - for _, ip := range a.whitelists.Ips { - if skip { - break - } - if ip != nil && ip.Equal(ipval) { - log.Infof("%s from %s is whitelisted by %s", *decision.Value, *decision.Scenario, ip.String()) - skip = true - } - } - if !skip { - decisions[outIdx] = decision - outIdx++ - } - + decisions[outIdx] = decision + outIdx++ } //shrink the list, those are deleted items - decisions = decisions[:outIdx] - return decisions + return decisions[:outIdx] } func (a *apic) SaveAlerts(alertsFromCapi []*models.Alert, add_counters map[string]map[string]int, delete_counters map[string]map[string]int) error { - for idx, alert := range alertsFromCapi { - alertsFromCapi[idx] = setAlertScenario(add_counters, delete_counters, alert) - log.Debugf("%s has %d decisions", *alertsFromCapi[idx].Source.Scope, len(alertsFromCapi[idx].Decisions)) + for _, alert := range alertsFromCapi { + setAlertScenario(alert, add_counters, delete_counters) + log.Debugf("%s has %d decisions", *alert.Source.Scope, len(alert.Decisions)) if a.dbClient.Type == "sqlite" && (a.dbClient.WalMode == nil || !*a.dbClient.WalMode) { log.Warningf("sqlite is not using WAL mode, LAPI might become unresponsive when inserting the community blocklist") } - alertID, inserted, deleted, err := a.dbClient.UpdateCommunityBlocklist(alertsFromCapi[idx]) + alertID, inserted, deleted, err := a.dbClient.UpdateCommunityBlocklist(alert) if err != nil { - return fmt.Errorf("while saving alert from %s: %w", *alertsFromCapi[idx].Source.Scope, err) + return fmt.Errorf("while saving alert from %s: %w", *alert.Source.Scope, err) } - log.Printf("%s : added %d entries, deleted %d entries (alert:%d)", *alertsFromCapi[idx].Source.Scope, inserted, deleted, alertID) + log.Printf("%s : added %d entries, deleted %d entries (alert:%d)", *alert.Source.Scope, inserted, deleted, alertID) } return nil @@ -708,6 +704,60 @@ func (a *apic) ShouldForcePullBlocklist(blocklist *modelscapi.BlocklistLink) (bo return false, nil } +func (a *apic) updateBlocklist(client *apiclient.ApiClient, blocklist *modelscapi.BlocklistLink, add_counters map[string]map[string]int) error { + if blocklist.Scope == nil { + log.Warningf("blocklist has no scope") + return nil + } + if blocklist.Duration == nil { + log.Warningf("blocklist has no duration") + return nil + } + forcePull, err := a.ShouldForcePullBlocklist(blocklist) + if err != nil { + return fmt.Errorf("while checking if we should force pull blocklist %s: %w", *blocklist.Name, err) + } + blocklistConfigItemName := fmt.Sprintf("blocklist:%s:last_pull", *blocklist.Name) + var lastPullTimestamp *string + if !forcePull { + lastPullTimestamp, err = a.dbClient.GetConfigItem(blocklistConfigItemName) + if err != nil { + return fmt.Errorf("while getting last pull timestamp for blocklist %s: %w", *blocklist.Name, err) + } + } + decisions, hasChanged, err := client.Decisions.GetDecisionsFromBlocklist(context.Background(), blocklist, lastPullTimestamp) + if err != nil { + return fmt.Errorf("while getting decisions from blocklist %s: %w", *blocklist.Name, err) + } + if !hasChanged { + if lastPullTimestamp == nil { + log.Infof("blocklist %s hasn't been modified or there was an error reading it, skipping", *blocklist.Name) + } else { + log.Infof("blocklist %s hasn't been modified since %s, skipping", *blocklist.Name, *lastPullTimestamp) + } + return nil + } + err = a.dbClient.SetConfigItem(blocklistConfigItemName, time.Now().UTC().Format(http.TimeFormat)) + if err != nil { + return fmt.Errorf("while setting last pull timestamp for blocklist %s: %w", *blocklist.Name, err) + } + if len(decisions) == 0 { + log.Infof("blocklist %s has no decisions", *blocklist.Name) + return nil + } + //apply APIC specific whitelists + decisions = a.ApplyApicWhitelists(decisions) + alert := createAlertForDecision(decisions[0]) + alertsFromCapi := []*models.Alert{alert} + alertsFromCapi = fillAlertsWithDecisions(alertsFromCapi, decisions, add_counters) + + err = a.SaveAlerts(alertsFromCapi, add_counters, nil) + if err != nil { + return fmt.Errorf("while saving alert from blocklist %s: %w", *blocklist.Name, err) + } + return nil +} + func (a *apic) UpdateBlocklists(links *modelscapi.GetDecisionsStreamResponseLinks, add_counters map[string]map[string]int) error { if links == nil { return nil @@ -722,69 +772,21 @@ func (a *apic) UpdateBlocklists(links *modelscapi.GetDecisionsStreamResponseLink return fmt.Errorf("while creating default client: %w", err) } for _, blocklist := range links.Blocklists { - if blocklist.Scope == nil { - log.Warningf("blocklist has no scope") - continue - } - if blocklist.Duration == nil { - log.Warningf("blocklist has no duration") - continue - } - forcePull, err := a.ShouldForcePullBlocklist(blocklist) - if err != nil { - return fmt.Errorf("while checking if we should force pull blocklist %s: %w", *blocklist.Name, err) - } - blocklistConfigItemName := fmt.Sprintf("blocklist:%s:last_pull", *blocklist.Name) - var lastPullTimestamp *string - if !forcePull { - lastPullTimestamp, err = a.dbClient.GetConfigItem(blocklistConfigItemName) - if err != nil { - return fmt.Errorf("while getting last pull timestamp for blocklist %s: %w", *blocklist.Name, err) - } - } - decisions, has_changed, err := defaultClient.Decisions.GetDecisionsFromBlocklist(context.Background(), blocklist, lastPullTimestamp) - if err != nil { - return fmt.Errorf("while getting decisions from blocklist %s: %w", *blocklist.Name, err) - } - if !has_changed { - if lastPullTimestamp == nil { - log.Infof("blocklist %s hasn't been modified or there was an error reading it, skipping", *blocklist.Name) - } else { - log.Infof("blocklist %s hasn't been modified since %s, skipping", *blocklist.Name, *lastPullTimestamp) - } - continue - } - err = a.dbClient.SetConfigItem(blocklistConfigItemName, time.Now().UTC().Format(http.TimeFormat)) - if err != nil { - return fmt.Errorf("while setting last pull timestamp for blocklist %s: %w", *blocklist.Name, err) - } - if len(decisions) == 0 { - log.Infof("blocklist %s has no decisions", *blocklist.Name) - continue - } - //apply APIC specific whitelists - decisions = a.ApplyApicWhitelists(decisions) - alert := createAlertForDecision(decisions[0]) - alertsFromCapi := []*models.Alert{alert} - alertsFromCapi = fillAlertsWithDecisions(alertsFromCapi, decisions, add_counters) - - err = a.SaveAlerts(alertsFromCapi, add_counters, nil) - if err != nil { - return fmt.Errorf("while saving alert from blocklist %s: %w", *blocklist.Name, err) + if err := a.updateBlocklist(defaultClient, blocklist, add_counters); err != nil { + return err } } return nil } -func setAlertScenario(add_counters map[string]map[string]int, delete_counters map[string]map[string]int, alert *models.Alert) *models.Alert { +func setAlertScenario(alert *models.Alert, add_counters map[string]map[string]int, delete_counters map[string]map[string]int) { if *alert.Source.Scope == types.CAPIOrigin { - *alert.Source.Scope = SCOPE_CAPI_ALIAS_ALIAS + *alert.Source.Scope = types.CommunityBlocklistPullSourceScope alert.Scenario = ptr.Of(fmt.Sprintf("update : +%d/-%d IPs", add_counters[types.CAPIOrigin]["all"], delete_counters[types.CAPIOrigin]["all"])) } else if *alert.Source.Scope == types.ListOrigin { *alert.Source.Scope = fmt.Sprintf("%s:%s", types.ListOrigin, *alert.Scenario) alert.Scenario = ptr.Of(fmt.Sprintf("update : +%d/-%d IPs", add_counters[types.ListOrigin][*alert.Scenario], delete_counters[types.ListOrigin][*alert.Scenario])) } - return alert } func (a *apic) Pull() error { diff --git a/pkg/apiserver/apic_metrics.go b/pkg/apiserver/apic_metrics.go index cfaf79147..1e85cb06a 100644 --- a/pkg/apiserver/apic_metrics.go +++ b/pkg/apiserver/apic_metrics.go @@ -2,10 +2,10 @@ package apiserver import ( "context" + "slices" "time" log "github.com/sirupsen/logrus" - "golang.org/x/exp/slices" "github.com/crowdsecurity/go-cs-lib/ptr" "github.com/crowdsecurity/go-cs-lib/trace" @@ -83,7 +83,7 @@ func (a *apic) SendMetrics(stop chan (bool)) { // intervals must always be > 0 metInts := []time.Duration{1*time.Millisecond, a.metricsIntervalFirst, a.metricsInterval} - log.Infof("Start send metrics to CrowdSec Central API (interval: %s once, then %s)", + log.Infof("Start sending metrics to CrowdSec Central API (interval: %s once, then %s)", metInts[1].Round(time.Second), metInts[2]) count := -1 @@ -129,12 +129,15 @@ func (a *apic) SendMetrics(stop chan (bool)) { metTicker.Stop() metrics, err := a.GetMetrics() if err != nil { - log.Errorf("unable to get metrics (%s), will retry", err) + log.Errorf("unable to get metrics (%s)", err) } - log.Info("capi metrics: sending") - _, _, err = a.apiClient.Metrics.Add(context.Background(), metrics) - if err != nil { - log.Errorf("capi metrics: failed: %s", err) + // metrics are nil if they could not be retrieved + if metrics != nil { + log.Info("capi metrics: sending") + _, _, err = a.apiClient.Metrics.Add(context.Background(), metrics) + if err != nil { + log.Errorf("capi metrics: failed: %s", err) + } } metTicker.Reset(nextMetInt()) case <-a.metricsTomb.Dying(): // if one apic routine is dying, do we kill the others? diff --git a/pkg/apiserver/apic_test.go b/pkg/apiserver/apic_test.go index b7163a342..4aa23ffb6 100644 --- a/pkg/apiserver/apic_test.go +++ b/pkg/apiserver/apic_test.go @@ -689,7 +689,7 @@ func TestAPICWhitelists(t *testing.T) { alertScenario[alert.SourceScope]++ } assert.Equal(t, 3, len(alertScenario)) - assert.Equal(t, 1, alertScenario[SCOPE_CAPI_ALIAS_ALIAS]) + assert.Equal(t, 1, alertScenario[types.CommunityBlocklistPullSourceScope]) assert.Equal(t, 1, alertScenario["lists:blocklist1"]) assert.Equal(t, 1, alertScenario["lists:blocklist2"]) @@ -818,7 +818,7 @@ func TestAPICPullTop(t *testing.T) { alertScenario[alert.SourceScope]++ } assert.Equal(t, 3, len(alertScenario)) - assert.Equal(t, 1, alertScenario[SCOPE_CAPI_ALIAS_ALIAS]) + assert.Equal(t, 1, alertScenario[types.CommunityBlocklistPullSourceScope]) assert.Equal(t, 1, alertScenario["lists:blocklist1"]) assert.Equal(t, 1, alertScenario["lists:blocklist2"]) diff --git a/pkg/apiserver/jwt_test.go b/pkg/apiserver/jwt_test.go index ebca91252..e5c3529cc 100644 --- a/pkg/apiserver/jwt_test.go +++ b/pkg/apiserver/jwt_test.go @@ -55,7 +55,7 @@ func TestLogin(t *testing.T) { router.ServeHTTP(w, req) assert.Equal(t, 401, w.Code) - assert.Equal(t, "{\"code\":401,\"message\":\"input format error\"}", w.Body.String()) + assert.Equal(t, "{\"code\":401,\"message\":\"validation failure list:\\npassword in body is required\"}", w.Body.String()) //Validate machine err = ValidateMachine("test", config.API.Server.DbConfig) diff --git a/pkg/apiserver/middlewares/v1/api_key.go b/pkg/apiserver/middlewares/v1/api_key.go index ce1bc8eee..207f35fc4 100644 --- a/pkg/apiserver/middlewares/v1/api_key.go +++ b/pkg/apiserver/middlewares/v1/api_key.go @@ -33,7 +33,9 @@ func GenerateAPIKey(n int) (string, error) { if _, err := rand.Read(bytes); err != nil { return "", err } - return base64.StdEncoding.EncodeToString(bytes), nil + encoded := base64.StdEncoding.EncodeToString(bytes) + // the '=' can cause issues on some bouncers + return strings.TrimRight(encoded, "="), nil } func NewAPIKey(dbClient *database.Client) *APIKey { diff --git a/pkg/apiserver/middlewares/v1/jwt.go b/pkg/apiserver/middlewares/v1/jwt.go index bbd33c544..22c171c63 100644 --- a/pkg/apiserver/middlewares/v1/jwt.go +++ b/pkg/apiserver/middlewares/v1/jwt.go @@ -2,6 +2,7 @@ package v1 import ( "crypto/rand" + "errors" "fmt" "net/http" "os" @@ -16,7 +17,6 @@ import ( "github.com/crowdsecurity/crowdsec/pkg/types" "github.com/gin-gonic/gin" "github.com/go-openapi/strfmt" - "github.com/pkg/errors" log "github.com/sirupsen/logrus" "golang.org/x/crypto/bcrypt" ) @@ -46,142 +46,176 @@ func IdentityHandler(c *gin.Context) interface{} { } } -func (j *JWT) Authenticator(c *gin.Context) (interface{}, error) { - var loginInput models.WatcherAuthRequest - var scenarios string - var err error - var scenariosInput []string - var clientMachine *ent.Machine - var machineID string - if c.Request.TLS != nil && len(c.Request.TLS.PeerCertificates) > 0 { - if j.TlsAuth == nil { - c.JSON(http.StatusForbidden, gin.H{"message": "access forbidden"}) - c.Abort() - return nil, errors.New("TLS auth is not configured") - } - validCert, extractedCN, err := j.TlsAuth.ValidateCert(c) - if err != nil { - log.Error(err) - c.JSON(http.StatusForbidden, gin.H{"message": "access forbidden"}) - c.Abort() - return nil, errors.Wrap(err, "while trying to validate client cert") - } - if !validCert { - c.JSON(http.StatusForbidden, gin.H{"message": "access forbidden"}) - c.Abort() - return nil, fmt.Errorf("failed cert authentication") - } - machineID = fmt.Sprintf("%s@%s", extractedCN, c.ClientIP()) - clientMachine, err = j.DbClient.Ent.Machine.Query(). - Where(machine.MachineId(machineID)). - First(j.DbClient.CTX) - if ent.IsNotFound(err) { - //Machine was not found, let's create it - log.Printf("machine %s not found, create it", machineID) - //let's use an apikey as the password, doesn't matter in this case (generatePassword is only available in cscli) - pwd, err := GenerateAPIKey(dummyAPIKeySize) - if err != nil { - log.WithFields(log.Fields{ - "ip": c.ClientIP(), - "cn": extractedCN, - }).Errorf("error generating password: %s", err) - return nil, fmt.Errorf("error generating password") - } - password := strfmt.Password(pwd) - clientMachine, err = j.DbClient.CreateMachine(&machineID, &password, "", true, true, types.TlsAuthType) - if err != nil { - return "", errors.Wrapf(err, "while creating machine entry for %s", machineID) - } - } else if err != nil { - return "", errors.Wrapf(err, "while selecting machine entry for %s", machineID) - } else { - if clientMachine.AuthType != types.TlsAuthType { - return "", errors.Errorf("machine %s attempted to auth with TLS cert but it is configured to use %s", machineID, clientMachine.AuthType) - } - machineID = clientMachine.MachineId - loginInput := struct { - Scenarios []string `json:"scenarios"` - }{ - Scenarios: []string{}, - } - err := c.ShouldBindJSON(&loginInput) - if err != nil { - return "", errors.Wrap(err, "missing scenarios list in login request for TLS auth") - } - scenariosInput = loginInput.Scenarios - } +type authInput struct { + machineID string + clientMachine *ent.Machine + scenariosInput []string +} - } else { - //normal auth - if err := c.ShouldBindJSON(&loginInput); err != nil { - return "", errors.Wrap(err, "missing") - } - if err := loginInput.Validate(strfmt.Default); err != nil { - return "", errors.New("input format error") - } - machineID = *loginInput.MachineID - password := *loginInput.Password - scenariosInput = loginInput.Scenarios - clientMachine, err = j.DbClient.Ent.Machine.Query(). - Where(machine.MachineId(machineID)). - First(j.DbClient.CTX) - if err != nil { - log.Printf("Error machine login for %s : %+v ", machineID, err) - return nil, err - } +func (j *JWT) authTLS(c *gin.Context) (*authInput, error) { + ret := authInput{} - if clientMachine == nil { - log.Errorf("Nothing for '%s'", machineID) - return nil, jwt.ErrFailedAuthentication - } - - if clientMachine.AuthType != types.PasswordAuthType { - return nil, errors.Errorf("machine %s attempted to auth with password but it is configured to use %s", machineID, clientMachine.AuthType) - } - - if !clientMachine.IsValidated { - return nil, fmt.Errorf("machine %s not validated", machineID) - } - - if err = bcrypt.CompareHashAndPassword([]byte(clientMachine.Password), []byte(password)); err != nil { - return nil, jwt.ErrFailedAuthentication - } - - //end of normal auth + if j.TlsAuth == nil { + c.JSON(http.StatusForbidden, gin.H{"message": "access forbidden"}) + c.Abort() + return nil, errors.New("TLS auth is not configured") } - if len(scenariosInput) > 0 { - for _, scenario := range scenariosInput { + validCert, extractedCN, err := j.TlsAuth.ValidateCert(c) + if err != nil { + log.Error(err) + c.JSON(http.StatusForbidden, gin.H{"message": "access forbidden"}) + c.Abort() + return nil, fmt.Errorf("while trying to validate client cert: %w", err) + } + + if !validCert { + c.JSON(http.StatusForbidden, gin.H{"message": "access forbidden"}) + c.Abort() + return nil, fmt.Errorf("failed cert authentication") + } + + ret.machineID = fmt.Sprintf("%s@%s", extractedCN, c.ClientIP()) + ret.clientMachine, err = j.DbClient.Ent.Machine.Query(). + Where(machine.MachineId(ret.machineID)). + First(j.DbClient.CTX) + if ent.IsNotFound(err) { + //Machine was not found, let's create it + log.Infof("machine %s not found, create it", ret.machineID) + //let's use an apikey as the password, doesn't matter in this case (generatePassword is only available in cscli) + pwd, err := GenerateAPIKey(dummyAPIKeySize) + if err != nil { + log.WithFields(log.Fields{ + "ip": c.ClientIP(), + "cn": extractedCN, + }).Errorf("error generating password: %s", err) + return nil, fmt.Errorf("error generating password") + } + password := strfmt.Password(pwd) + ret.clientMachine, err = j.DbClient.CreateMachine(&ret.machineID, &password, "", true, true, types.TlsAuthType) + if err != nil { + return nil, fmt.Errorf("while creating machine entry for %s: %w", ret.machineID, err) + } + } else if err != nil { + return nil, fmt.Errorf("while selecting machine entry for %s: %w", ret.machineID, err) + } else { + if ret.clientMachine.AuthType != types.TlsAuthType { + return nil, fmt.Errorf("machine %s attempted to auth with TLS cert but it is configured to use %s", ret.machineID, ret.clientMachine.AuthType) + } + ret.machineID = ret.clientMachine.MachineId + } + + loginInput := struct { + Scenarios []string `json:"scenarios"` + }{ + Scenarios: []string{}, + } + err = c.ShouldBindJSON(&loginInput) + if err != nil { + return nil, fmt.Errorf("missing scenarios list in login request for TLS auth: %w", err) + } + ret.scenariosInput = loginInput.Scenarios + + return &ret, nil +} + + + +func (j *JWT) authPlain(c *gin.Context) (*authInput, error) { + var loginInput models.WatcherAuthRequest + var err error + + ret := authInput{} + + if err = c.ShouldBindJSON(&loginInput); err != nil { + return nil, fmt.Errorf("missing: %w", err) + } + if err = loginInput.Validate(strfmt.Default); err != nil { + return nil, err + } + ret.machineID = *loginInput.MachineID + password := *loginInput.Password + ret.scenariosInput = loginInput.Scenarios + + ret.clientMachine, err = j.DbClient.Ent.Machine.Query(). + Where(machine.MachineId(ret.machineID)). + First(j.DbClient.CTX) + if err != nil { + log.Infof("Error machine login for %s : %+v ", ret.machineID, err) + return nil, err + } + + if ret.clientMachine == nil { + log.Errorf("Nothing for '%s'", ret.machineID) + return nil, jwt.ErrFailedAuthentication + } + + if ret.clientMachine.AuthType != types.PasswordAuthType { + return nil, fmt.Errorf("machine %s attempted to auth with password but it is configured to use %s", ret.machineID, ret.clientMachine.AuthType) + } + + if !ret.clientMachine.IsValidated { + return nil, fmt.Errorf("machine %s not validated", ret.machineID) + } + + if err := bcrypt.CompareHashAndPassword([]byte(ret.clientMachine.Password), []byte(password)); err != nil { + return nil, jwt.ErrFailedAuthentication + } + + return &ret, nil +} + + +func (j *JWT) Authenticator(c *gin.Context) (interface{}, error) { + var err error + var auth *authInput + + if c.Request.TLS != nil && len(c.Request.TLS.PeerCertificates) > 0 { + auth, err = j.authTLS(c) + if err != nil { + return nil, err + } + } else { + auth, err = j.authPlain(c) + if err != nil { + return nil, err + } + } + + var scenarios string + + if len(auth.scenariosInput) > 0 { + for _, scenario := range auth.scenariosInput { if scenarios == "" { scenarios = scenario } else { scenarios += "," + scenario } } - err = j.DbClient.UpdateMachineScenarios(scenarios, clientMachine.ID) + err = j.DbClient.UpdateMachineScenarios(scenarios, auth.clientMachine.ID) if err != nil { - log.Errorf("Failed to update scenarios list for '%s': %s\n", machineID, err) + log.Errorf("Failed to update scenarios list for '%s': %s\n", auth.machineID, err) return nil, jwt.ErrFailedAuthentication } } - if clientMachine.IpAddress == "" { - err = j.DbClient.UpdateMachineIP(c.ClientIP(), clientMachine.ID) + if auth.clientMachine.IpAddress == "" { + err = j.DbClient.UpdateMachineIP(c.ClientIP(), auth.clientMachine.ID) if err != nil { - log.Errorf("Failed to update ip address for '%s': %s\n", machineID, err) + log.Errorf("Failed to update ip address for '%s': %s\n", auth.machineID, err) return nil, jwt.ErrFailedAuthentication } } - if clientMachine.IpAddress != c.ClientIP() && clientMachine.IpAddress != "" { - log.Warningf("new IP address detected for machine '%s': %s (old: %s)", clientMachine.MachineId, c.ClientIP(), clientMachine.IpAddress) - err = j.DbClient.UpdateMachineIP(c.ClientIP(), clientMachine.ID) + if auth.clientMachine.IpAddress != c.ClientIP() && auth.clientMachine.IpAddress != "" { + log.Warningf("new IP address detected for machine '%s': %s (old: %s)", auth.clientMachine.MachineId, c.ClientIP(), auth.clientMachine.IpAddress) + err = j.DbClient.UpdateMachineIP(c.ClientIP(), auth.clientMachine.ID) if err != nil { - log.Errorf("Failed to update ip address for '%s': %s\n", clientMachine.MachineId, err) + log.Errorf("Failed to update ip address for '%s': %s\n", auth.clientMachine.MachineId, err) return nil, jwt.ErrFailedAuthentication } } @@ -192,13 +226,13 @@ func (j *JWT) Authenticator(c *gin.Context) (interface{}, error) { return nil, jwt.ErrFailedAuthentication } - if err := j.DbClient.UpdateMachineVersion(useragent[1], clientMachine.ID); err != nil { - log.Errorf("unable to update machine '%s' version '%s': %s", clientMachine.MachineId, useragent[1], err) + if err := j.DbClient.UpdateMachineVersion(useragent[1], auth.clientMachine.ID); err != nil { + log.Errorf("unable to update machine '%s' version '%s': %s", auth.clientMachine.MachineId, useragent[1], err) log.Errorf("bad user agent from : %s", c.ClientIP()) return nil, jwt.ErrFailedAuthentication } return &models.WatcherAuthRequest{ - MachineID: &machineID, + MachineID: &auth.machineID, }, nil } diff --git a/pkg/csconfig/api.go b/pkg/csconfig/api.go index f8447a137..bbe2e1622 100644 --- a/pkg/csconfig/api.go +++ b/pkg/csconfig/api.go @@ -3,7 +3,9 @@ package csconfig import ( "crypto/tls" "crypto/x509" + "errors" "fmt" + "io" "net" "os" "strings" @@ -56,7 +58,6 @@ type CTICfg struct { } func (a *CTICfg) Load() error { - if a.Key == nil { *a.Enabled = false } @@ -331,40 +332,59 @@ type capiWhitelists struct { Cidrs []string `yaml:"cidrs"` } +func parseCapiWhitelists(fd io.Reader) (*CapiWhitelist, error) { + fromCfg := capiWhitelists{} + + decoder := yaml.NewDecoder(fd) + if err := decoder.Decode(&fromCfg); err != nil { + if errors.Is(err, io.EOF) { + return nil, fmt.Errorf("empty file") + } + return nil, err + } + ret := &CapiWhitelist{ + Ips: make([]net.IP, len(fromCfg.Ips)), + Cidrs: make([]*net.IPNet, len(fromCfg.Cidrs)), + } + for idx, v := range fromCfg.Ips { + ip := net.ParseIP(v) + if ip == nil { + return nil, fmt.Errorf("invalid IP address: %s", v) + } + ret.Ips[idx] = ip + } + for idx, v := range fromCfg.Cidrs { + _, tnet, err := net.ParseCIDR(v) + if err != nil { + return nil, err + } + ret.Cidrs[idx] = tnet + } + + return ret, nil +} + func (s *LocalApiServerCfg) LoadCapiWhitelists() error { if s.CapiWhitelistsPath == "" { return nil } + if _, err := os.Stat(s.CapiWhitelistsPath); os.IsNotExist(err) { return fmt.Errorf("capi whitelist file '%s' does not exist", s.CapiWhitelistsPath) } + fd, err := os.Open(s.CapiWhitelistsPath) if err != nil { - return fmt.Errorf("unable to open capi whitelist file '%s': %s", s.CapiWhitelistsPath, err) + return fmt.Errorf("while opening capi whitelist file: %s", err) } - var fromCfg capiWhitelists - s.CapiWhitelists = &CapiWhitelist{} - defer fd.Close() - decoder := yaml.NewDecoder(fd) - if err := decoder.Decode(&fromCfg); err != nil { - return fmt.Errorf("while parsing capi whitelist file '%s': %s", s.CapiWhitelistsPath, err) - } - for _, v := range fromCfg.Ips { - ip := net.ParseIP(v) - if ip == nil { - return fmt.Errorf("unable to parse ip whitelist '%s'", v) - } - s.CapiWhitelists.Ips = append(s.CapiWhitelists.Ips, ip) - } - for _, v := range fromCfg.Cidrs { - _, tnet, err := net.ParseCIDR(v) - if err != nil { - return fmt.Errorf("unable to parse cidr whitelist '%s' : %v", v, err) - } - s.CapiWhitelists.Cidrs = append(s.CapiWhitelists.Cidrs, tnet) + + s.CapiWhitelists, err = parseCapiWhitelists(fd) + if err != nil { + return fmt.Errorf("while parsing capi whitelist file '%s': %w", s.CapiWhitelistsPath, err) } + return nil } diff --git a/pkg/csconfig/api_test.go b/pkg/csconfig/api_test.go index de196123f..4338de9c1 100644 --- a/pkg/csconfig/api_test.go +++ b/pkg/csconfig/api_test.go @@ -1,7 +1,7 @@ package csconfig import ( - "fmt" + "net" "os" "path/filepath" "strings" @@ -9,6 +9,7 @@ import ( log "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" "gopkg.in/yaml.v2" "github.com/crowdsecurity/go-cs-lib/cstest" @@ -25,7 +26,7 @@ func TestLoadLocalApiClientCfg(t *testing.T) { { name: "basic valid configuration", input: &LocalApiClientCfg{ - CredentialsFilePath: "./tests/lapi-secrets.yaml", + CredentialsFilePath: "./testdata/lapi-secrets.yaml", }, expected: &ApiCredentialsCfg{ URL: "http://localhost:8080/", @@ -36,7 +37,7 @@ func TestLoadLocalApiClientCfg(t *testing.T) { { name: "invalid configuration", input: &LocalApiClientCfg{ - CredentialsFilePath: "./tests/bad_lapi-secrets.yaml", + CredentialsFilePath: "./testdata/bad_lapi-secrets.yaml", }, expected: &ApiCredentialsCfg{}, expectedErr: "field unknown_key not found in type csconfig.ApiCredentialsCfg", @@ -44,15 +45,15 @@ func TestLoadLocalApiClientCfg(t *testing.T) { { name: "invalid configuration filepath", input: &LocalApiClientCfg{ - CredentialsFilePath: "./tests/nonexist_lapi-secrets.yaml", + CredentialsFilePath: "./testdata/nonexist_lapi-secrets.yaml", }, expected: nil, - expectedErr: "open ./tests/nonexist_lapi-secrets.yaml: " + cstest.FileNotFoundMessage, + expectedErr: "open ./testdata/nonexist_lapi-secrets.yaml: " + cstest.FileNotFoundMessage, }, { name: "valid configuration with insecure skip verify", input: &LocalApiClientCfg{ - CredentialsFilePath: "./tests/lapi-secrets.yaml", + CredentialsFilePath: "./testdata/lapi-secrets.yaml", InsecureSkipVerify: ptr.Of(false), }, expected: &ApiCredentialsCfg{ @@ -87,7 +88,7 @@ func TestLoadOnlineApiClientCfg(t *testing.T) { { name: "basic valid configuration", input: &OnlineApiClientCfg{ - CredentialsFilePath: "./tests/online-api-secrets.yaml", + CredentialsFilePath: "./testdata/online-api-secrets.yaml", }, expected: &ApiCredentialsCfg{ URL: "http://crowdsec.api", @@ -98,7 +99,7 @@ func TestLoadOnlineApiClientCfg(t *testing.T) { { name: "invalid configuration", input: &OnlineApiClientCfg{ - CredentialsFilePath: "./tests/bad_lapi-secrets.yaml", + CredentialsFilePath: "./testdata/bad_lapi-secrets.yaml", }, expected: &ApiCredentialsCfg{}, expectedErr: "failed unmarshaling api server credentials", @@ -106,14 +107,14 @@ func TestLoadOnlineApiClientCfg(t *testing.T) { { name: "missing field configuration", input: &OnlineApiClientCfg{ - CredentialsFilePath: "./tests/bad_online-api-secrets.yaml", + CredentialsFilePath: "./testdata/bad_online-api-secrets.yaml", }, expected: nil, }, { name: "invalid configuration filepath", input: &OnlineApiClientCfg{ - CredentialsFilePath: "./tests/nonexist_online-api-secrets.yaml", + CredentialsFilePath: "./testdata/nonexist_online-api-secrets.yaml", }, expected: &ApiCredentialsCfg{}, expectedErr: "failed to read api server credentials", @@ -136,27 +137,23 @@ func TestLoadOnlineApiClientCfg(t *testing.T) { func TestLoadAPIServer(t *testing.T) { tmpLAPI := &LocalApiServerCfg{ - ProfilesPath: "./tests/profiles.yaml", - } - if err := tmpLAPI.LoadProfiles(); err != nil { - t.Fatalf("loading tmp profiles: %+v", err) + ProfilesPath: "./testdata/profiles.yaml", } + err := tmpLAPI.LoadProfiles() + require.NoError(t, err) + + LogDirFullPath, err := filepath.Abs("./testdata") + require.NoError(t, err) - LogDirFullPath, err := filepath.Abs("./tests") - if err != nil { - t.Fatal(err) - } logLevel := log.InfoLevel config := &Config{} - fcontent, err := os.ReadFile("./tests/config.yaml") - if err != nil { - t.Fatal(err) - } + fcontent, err := os.ReadFile("./testdata/config.yaml") + require.NoError(t, err) + configData := os.ExpandEnv(string(fcontent)) err = yaml.UnmarshalStrict([]byte(configData), &config) - if err != nil { - t.Fatal(err) - } + require.NoError(t, err) + tests := []struct { name string input *Config @@ -171,18 +168,18 @@ func TestLoadAPIServer(t *testing.T) { Server: &LocalApiServerCfg{ ListenURI: "http://crowdsec.api", OnlineClient: &OnlineApiClientCfg{ - CredentialsFilePath: "./tests/online-api-secrets.yaml", + CredentialsFilePath: "./testdata/online-api-secrets.yaml", }, - ProfilesPath: "./tests/profiles.yaml", + ProfilesPath: "./testdata/profiles.yaml", PapiLogLevel: &logLevel, }, }, DbConfig: &DatabaseCfg{ Type: "sqlite", - DbPath: "./tests/test.db", + DbPath: "./testdata/test.db", }, Common: &CommonCfg{ - LogDir: "./tests/", + LogDir: "./testdata/", LogMedia: "stdout", }, DisableAPI: false, @@ -192,9 +189,10 @@ func TestLoadAPIServer(t *testing.T) { ListenURI: "http://crowdsec.api", TLS: nil, DbConfig: &DatabaseCfg{ - DbPath: "./tests/test.db", - Type: "sqlite", - MaxOpenConns: ptr.Of(DEFAULT_MAX_OPEN_CONNS), + DbPath: "./testdata/test.db", + Type: "sqlite", + MaxOpenConns: ptr.Of(DEFAULT_MAX_OPEN_CONNS), + DecisionBulkSize: defaultDecisionBulkSize, }, ConsoleConfigPath: DefaultConfigPath("console.yaml"), ConsoleConfig: &ConsoleConfig{ @@ -207,7 +205,7 @@ func TestLoadAPIServer(t *testing.T) { LogDir: LogDirFullPath, LogMedia: "stdout", OnlineClient: &OnlineApiClientCfg{ - CredentialsFilePath: "./tests/online-api-secrets.yaml", + CredentialsFilePath: "./testdata/online-api-secrets.yaml", Credentials: &ApiCredentialsCfg{ URL: "http://crowdsec.api", Login: "test", @@ -215,7 +213,7 @@ func TestLoadAPIServer(t *testing.T) { }, }, Profiles: tmpLAPI.Profiles, - ProfilesPath: "./tests/profiles.yaml", + ProfilesPath: "./testdata/profiles.yaml", UseForwardedForHeaders: false, PapiLogLevel: &logLevel, }, @@ -228,34 +226,91 @@ func TestLoadAPIServer(t *testing.T) { Server: &LocalApiServerCfg{}, }, Common: &CommonCfg{ - LogDir: "./tests/", + LogDir: "./testdata/", LogMedia: "stdout", }, DisableAPI: false, }, expected: &LocalApiServerCfg{ - Enable: ptr.Of(true), + Enable: ptr.Of(true), PapiLogLevel: &logLevel, }, expectedErr: "no database configuration provided", }, } - for idx, test := range tests { - err := test.input.LoadAPIServer() - if err == nil && test.expectedErr != "" { - fmt.Printf("TEST '%s': NOK\n", test.name) - t.Fatalf("Test number %d/%d expected error, didn't get it", idx+1, len(tests)) - } else if test.expectedErr != "" { - fmt.Printf("ERR: %+v\n", err) - if !strings.HasPrefix(fmt.Sprintf("%s", err), test.expectedErr) { - fmt.Printf("TEST '%s': NOK\n", test.name) - t.Fatalf("%d/%d expected '%s' got '%s'", idx, len(tests), - test.expectedErr, - fmt.Sprintf("%s", err)) + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + err := tc.input.LoadAPIServer() + cstest.RequireErrorContains(t, err, tc.expectedErr) + if tc.expectedErr != "" { + return } - assert.Equal(t, test.expected, test.input.API.Server) - } + assert.Equal(t, tc.expected, tc.input.API.Server) + }) + } +} + +func mustParseCIDRNet(t *testing.T, s string) *net.IPNet { + _, ipNet, err := net.ParseCIDR(s) + require.NoError(t, err) + return ipNet +} + +func TestParseCapiWhitelists(t *testing.T) { + tests := []struct { + name string + input string + expected *CapiWhitelist + expectedErr string + }{ + { + name: "empty file", + input: "", + expected: &CapiWhitelist{ + Ips: []net.IP{}, + Cidrs: []*net.IPNet{}, + }, + expectedErr: "empty file", + }, + { + name: "empty ip and cidr", + input: `{"ips": [], "cidrs": []}`, + expected: &CapiWhitelist{ + Ips: []net.IP{}, + Cidrs: []*net.IPNet{}, + }, + }, + { + name: "some ip", + input: `{"ips": ["1.2.3.4"]}`, + expected: &CapiWhitelist{ + Ips: []net.IP{net.IPv4(1, 2, 3, 4)}, + Cidrs: []*net.IPNet{}, + }, + }, + { + name: "some cidr", + input: `{"cidrs": ["1.2.3.0/24"]}`, + expected: &CapiWhitelist{ + Ips: []net.IP{}, + Cidrs: []*net.IPNet{mustParseCIDRNet(t, "1.2.3.0/24")}, + }, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + wl, err := parseCapiWhitelists(strings.NewReader(tc.input)) + cstest.RequireErrorContains(t, err, tc.expectedErr) + if tc.expectedErr != "" { + return + } + + assert.Equal(t, tc.expected, wl) + }) } } diff --git a/pkg/csconfig/common_test.go b/pkg/csconfig/common_test.go index 5666f2d74..2c5f798a6 100644 --- a/pkg/csconfig/common_test.go +++ b/pkg/csconfig/common_test.go @@ -1,44 +1,41 @@ package csconfig import ( - "fmt" "path/filepath" - "strings" "testing" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/crowdsecurity/go-cs-lib/cstest" ) func TestLoadCommon(t *testing.T) { - pidDirPath := "./tests" - LogDirFullPath, err := filepath.Abs("./tests/log/") - if err != nil { - t.Fatal(err) - } + pidDirPath := "./testdata" + LogDirFullPath, err := filepath.Abs("./testdata/log/") + require.NoError(t, err) - WorkingDirFullPath, err := filepath.Abs("./tests") - if err != nil { - t.Fatal(err) - } + WorkingDirFullPath, err := filepath.Abs("./testdata") + require.NoError(t, err) tests := []struct { - name string - Input *Config - expectedResult *CommonCfg - err string + name string + input *Config + expected *CommonCfg + expectedErr string }{ { name: "basic valid configuration", - Input: &Config{ + input: &Config{ Common: &CommonCfg{ Daemonize: true, - PidDir: "./tests", + PidDir: "./testdata", LogMedia: "file", - LogDir: "./tests/log/", - WorkingDir: "./tests/", + LogDir: "./testdata/log/", + WorkingDir: "./testdata/", }, }, - expectedResult: &CommonCfg{ + expected: &CommonCfg{ Daemonize: true, PidDir: pidDirPath, LogMedia: "file", @@ -48,15 +45,15 @@ func TestLoadCommon(t *testing.T) { }, { name: "empty working dir", - Input: &Config{ + input: &Config{ Common: &CommonCfg{ Daemonize: true, - PidDir: "./tests", + PidDir: "./testdata", LogMedia: "file", - LogDir: "./tests/log/", + LogDir: "./testdata/log/", }, }, - expectedResult: &CommonCfg{ + expected: &CommonCfg{ Daemonize: true, PidDir: pidDirPath, LogMedia: "file", @@ -64,31 +61,23 @@ func TestLoadCommon(t *testing.T) { }, }, { - name: "no common", - Input: &Config{}, - expectedResult: nil, + name: "no common", + input: &Config{}, + expected: nil, + expectedErr: "no common block provided in configuration file", }, } - for idx, test := range tests { - err := test.Input.LoadCommon() - if err == nil && test.err != "" { - fmt.Printf("TEST '%s': NOK\n", test.name) - t.Fatalf("%d/%d expected error, didn't get it", idx, len(tests)) - } else if test.err != "" { - if !strings.HasPrefix(fmt.Sprintf("%s", err), test.err) { - fmt.Printf("TEST '%s': NOK\n", test.name) - t.Fatalf("%d/%d expected '%s' got '%s'", idx, len(tests), - test.err, - fmt.Sprintf("%s", err)) + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + err := tc.input.LoadCommon() + cstest.RequireErrorContains(t, err, tc.expectedErr) + if tc.expectedErr != "" { + return } - } - isOk := assert.Equal(t, test.expectedResult, test.Input.Common) - if !isOk { - t.Fatalf("TEST '%s': NOK", test.name) - } else { - fmt.Printf("TEST '%s': OK\n", test.name) - } + assert.Equal(t, tc.expected, tc.input.Common) + }) } } diff --git a/pkg/csconfig/config.go b/pkg/csconfig/config.go index 2aa7f4144..0fa0e1d2d 100644 --- a/pkg/csconfig/config.go +++ b/pkg/csconfig/config.go @@ -1,3 +1,5 @@ +// Package csconfig contains the configuration structures for crowdsec and cscli. + package csconfig import ( @@ -37,15 +39,6 @@ type Config struct { Hub *Hub `yaml:"-"` } -func (c *Config) Dump() error { - out, err := yaml.Marshal(c) - if err != nil { - return fmt.Errorf("failed marshaling config: %w", err) - } - fmt.Printf("%s", string(out)) - return nil -} - func NewConfig(configFile string, disableAgent bool, disableAPI bool, quiet bool) (*Config, string, error) { patcher := yamlpatch.NewPatcher(configFile, ".local") patcher.SetQuiet(quiet) diff --git a/pkg/csconfig/config_test.go b/pkg/csconfig/config_test.go index 53f570ab6..9bdf2da6d 100644 --- a/pkg/csconfig/config_test.go +++ b/pkg/csconfig/config_test.go @@ -5,42 +5,43 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + "gopkg.in/yaml.v2" "github.com/crowdsecurity/go-cs-lib/cstest" ) func TestNormalLoad(t *testing.T) { - _, _, err := NewConfig("./tests/config.yaml", false, false, false) + _, _, err := NewConfig("./testdata/config.yaml", false, false, false) require.NoError(t, err) - _, _, err = NewConfig("./tests/xxx.yaml", false, false, false) - assert.EqualError(t, err, "while reading yaml file: open ./tests/xxx.yaml: "+cstest.FileNotFoundMessage) + _, _, err = NewConfig("./testdata/xxx.yaml", false, false, false) + assert.EqualError(t, err, "while reading yaml file: open ./testdata/xxx.yaml: "+cstest.FileNotFoundMessage) - _, _, err = NewConfig("./tests/simulation.yaml", false, false, false) - assert.EqualError(t, err, "./tests/simulation.yaml: yaml: unmarshal errors:\n line 1: field simulation not found in type csconfig.Config") + _, _, err = NewConfig("./testdata/simulation.yaml", false, false, false) + assert.EqualError(t, err, "./testdata/simulation.yaml: yaml: unmarshal errors:\n line 1: field simulation not found in type csconfig.Config") } func TestNewCrowdSecConfig(t *testing.T) { tests := []struct { - name string - expectedResult *Config + name string + expected *Config }{ { - name: "new configuration: basic", - expectedResult: &Config{}, + name: "new configuration: basic", + expected: &Config{}, }, } for _, tc := range tests { tc := tc t.Run(tc.name, func(t *testing.T) { result := &Config{} - assert.Equal(t, tc.expectedResult, result) + assert.Equal(t, tc.expected, result) }) } } func TestDefaultConfig(t *testing.T) { x := NewDefaultConfig() - err := x.Dump() - require.NoError(t, err) + _, err := yaml.Marshal(x) + require.NoError(t, err, "failed marshaling config: %s", err) } diff --git a/pkg/csconfig/console.go b/pkg/csconfig/console.go index d1326ddb4..32c4cff88 100644 --- a/pkg/csconfig/console.go +++ b/pkg/csconfig/console.go @@ -82,23 +82,3 @@ func (c *LocalApiServerCfg) LoadConsoleConfig() error { return nil } - -func (c *LocalApiServerCfg) DumpConsoleConfig() error { - var out []byte - var err error - - if out, err = yaml.Marshal(c.ConsoleConfig); err != nil { - return fmt.Errorf("while marshaling ConsoleConfig (for %s): %w", c.ConsoleConfigPath, err) - } - if c.ConsoleConfigPath == "" { - c.ConsoleConfigPath = DefaultConsoleConfigFilePath - log.Debugf("Empty console_path, defaulting to %s", c.ConsoleConfigPath) - - } - - if err := os.WriteFile(c.ConsoleConfigPath, out, 0600); err != nil { - return fmt.Errorf("while dumping console config to %s: %w", c.ConsoleConfigPath, err) - } - - return nil -} diff --git a/pkg/csconfig/crowdsec_service_test.go b/pkg/csconfig/crowdsec_service_test.go index ded6956e5..aa1d341f5 100644 --- a/pkg/csconfig/crowdsec_service_test.go +++ b/pkg/csconfig/crowdsec_service_test.go @@ -1,24 +1,23 @@ package csconfig import ( - "fmt" "path/filepath" "testing" + "github.com/stretchr/testify/require" + "github.com/crowdsecurity/go-cs-lib/cstest" "github.com/crowdsecurity/go-cs-lib/ptr" - - "github.com/stretchr/testify/require" ) func TestLoadCrowdsec(t *testing.T) { - acquisFullPath, err := filepath.Abs("./tests/acquis.yaml") + acquisFullPath, err := filepath.Abs("./testdata/acquis.yaml") require.NoError(t, err) - acquisInDirFullPath, err := filepath.Abs("./tests/acquis/acquis.yaml") + acquisInDirFullPath, err := filepath.Abs("./testdata/acquis/acquis.yaml") require.NoError(t, err) - acquisDirFullPath, err := filepath.Abs("./tests/acquis") + acquisDirFullPath, err := filepath.Abs("./testdata/acquis") require.NoError(t, err) hubFullPath, err := filepath.Abs("./hub") @@ -27,42 +26,42 @@ func TestLoadCrowdsec(t *testing.T) { dataFullPath, err := filepath.Abs("./data") require.NoError(t, err) - configDirFullPath, err := filepath.Abs("./tests") + configDirFullPath, err := filepath.Abs("./testdata") require.NoError(t, err) hubIndexFileFullPath, err := filepath.Abs("./hub/.index.json") require.NoError(t, err) - contextFileFullPath, err := filepath.Abs("./tests/context.yaml") + contextFileFullPath, err := filepath.Abs("./testdata/context.yaml") require.NoError(t, err) tests := []struct { - name string - input *Config - expectedResult *CrowdsecServiceCfg - expectedErr string + name string + input *Config + expected *CrowdsecServiceCfg + expectedErr string }{ { name: "basic valid configuration", input: &Config{ ConfigPaths: &ConfigurationPaths{ - ConfigDir: "./tests", + ConfigDir: "./testdata", DataDir: "./data", HubDir: "./hub", }, API: &APICfg{ Client: &LocalApiClientCfg{ - CredentialsFilePath: "./tests/lapi-secrets.yaml", + CredentialsFilePath: "./testdata/lapi-secrets.yaml", }, }, Crowdsec: &CrowdsecServiceCfg{ - AcquisitionFilePath: "./tests/acquis.yaml", - SimulationFilePath: "./tests/simulation.yaml", - ConsoleContextPath: "./tests/context.yaml", + AcquisitionFilePath: "./testdata/acquis.yaml", + SimulationFilePath: "./testdata/simulation.yaml", + ConsoleContextPath: "./testdata/context.yaml", ConsoleContextValueLength: 2500, }, }, - expectedResult: &CrowdsecServiceCfg{ + expected: &CrowdsecServiceCfg{ Enable: ptr.Of(true), AcquisitionDirPath: "", ConsoleContextPath: contextFileFullPath, @@ -76,7 +75,7 @@ func TestLoadCrowdsec(t *testing.T) { OutputRoutinesCount: 1, ConsoleContextValueLength: 2500, AcquisitionFiles: []string{acquisFullPath}, - SimulationFilePath: "./tests/simulation.yaml", + SimulationFilePath: "./testdata/simulation.yaml", ContextToSend: map[string][]string{ "source_ip": {"evt.Parsed.source_ip"}, }, @@ -89,23 +88,23 @@ func TestLoadCrowdsec(t *testing.T) { name: "basic valid configuration with acquisition dir", input: &Config{ ConfigPaths: &ConfigurationPaths{ - ConfigDir: "./tests", + ConfigDir: "./testdata", DataDir: "./data", HubDir: "./hub", }, API: &APICfg{ Client: &LocalApiClientCfg{ - CredentialsFilePath: "./tests/lapi-secrets.yaml", + CredentialsFilePath: "./testdata/lapi-secrets.yaml", }, }, Crowdsec: &CrowdsecServiceCfg{ - AcquisitionFilePath: "./tests/acquis.yaml", - AcquisitionDirPath: "./tests/acquis/", - SimulationFilePath: "./tests/simulation.yaml", - ConsoleContextPath: "./tests/context.yaml", + AcquisitionFilePath: "./testdata/acquis.yaml", + AcquisitionDirPath: "./testdata/acquis/", + SimulationFilePath: "./testdata/simulation.yaml", + ConsoleContextPath: "./testdata/context.yaml", }, }, - expectedResult: &CrowdsecServiceCfg{ + expected: &CrowdsecServiceCfg{ Enable: ptr.Of(true), AcquisitionDirPath: acquisDirFullPath, AcquisitionFilePath: acquisFullPath, @@ -122,7 +121,7 @@ func TestLoadCrowdsec(t *testing.T) { ContextToSend: map[string][]string{ "source_ip": {"evt.Parsed.source_ip"}, }, - SimulationFilePath: "./tests/simulation.yaml", + SimulationFilePath: "./testdata/simulation.yaml", SimulationConfig: &SimulationConfig{ Simulation: ptr.Of(false), }, @@ -132,13 +131,13 @@ func TestLoadCrowdsec(t *testing.T) { name: "no acquisition file and dir", input: &Config{ ConfigPaths: &ConfigurationPaths{ - ConfigDir: "./tests", + ConfigDir: "./testdata", DataDir: "./data", HubDir: "./hub", }, API: &APICfg{ Client: &LocalApiClientCfg{ - CredentialsFilePath: "./tests/lapi-secrets.yaml", + CredentialsFilePath: "./testdata/lapi-secrets.yaml", }, }, Crowdsec: &CrowdsecServiceCfg{ @@ -146,7 +145,7 @@ func TestLoadCrowdsec(t *testing.T) { ConsoleContextValueLength: 10, }, }, - expectedResult: &CrowdsecServiceCfg{ + expected: &CrowdsecServiceCfg{ Enable: ptr.Of(true), AcquisitionDirPath: "", AcquisitionFilePath: "", @@ -173,18 +172,18 @@ func TestLoadCrowdsec(t *testing.T) { name: "non existing acquisition file", input: &Config{ ConfigPaths: &ConfigurationPaths{ - ConfigDir: "./tests", + ConfigDir: "./testdata", DataDir: "./data", HubDir: "./hub", }, API: &APICfg{ Client: &LocalApiClientCfg{ - CredentialsFilePath: "./tests/lapi-secrets.yaml", + CredentialsFilePath: "./testdata/lapi-secrets.yaml", }, }, Crowdsec: &CrowdsecServiceCfg{ ConsoleContextPath: "", - AcquisitionFilePath: "./tests/acquis_not_exist.yaml", + AcquisitionFilePath: "./testdata/acquis_not_exist.yaml", }, }, expectedErr: cstest.FileNotFoundMessage, @@ -193,26 +192,25 @@ func TestLoadCrowdsec(t *testing.T) { name: "agent disabled", input: &Config{ ConfigPaths: &ConfigurationPaths{ - ConfigDir: "./tests", + ConfigDir: "./testdata", DataDir: "./data", HubDir: "./hub", }, }, - expectedResult: nil, + expected: nil, }, } for _, tc := range tests { tc := tc t.Run(tc.name, func(t *testing.T) { - fmt.Printf("TEST '%s'\n", tc.name) err := tc.input.LoadCrowdsec() cstest.RequireErrorContains(t, err, tc.expectedErr) if tc.expectedErr != "" { return } - require.Equal(t, tc.expectedResult, tc.input.Crowdsec) + require.Equal(t, tc.expected, tc.input.Crowdsec) }) } } diff --git a/pkg/csconfig/cscli_test.go b/pkg/csconfig/cscli_test.go index 1f432f6e3..b3d0abc6b 100644 --- a/pkg/csconfig/cscli_test.go +++ b/pkg/csconfig/cscli_test.go @@ -1,52 +1,45 @@ package csconfig import ( - "fmt" "path/filepath" - "strings" "testing" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/crowdsecurity/go-cs-lib/cstest" ) func TestLoadCSCLI(t *testing.T) { hubFullPath, err := filepath.Abs("./hub") - if err != nil { - t.Fatal(err) - } + require.NoError(t, err) dataFullPath, err := filepath.Abs("./data") - if err != nil { - t.Fatal(err) - } + require.NoError(t, err) - configDirFullPath, err := filepath.Abs("./tests") - if err != nil { - t.Fatal(err) - } + configDirFullPath, err := filepath.Abs("./testdata") + require.NoError(t, err) hubIndexFileFullPath, err := filepath.Abs("./hub/.index.json") - if err != nil { - t.Fatal(err) - } + require.NoError(t, err) tests := []struct { - name string - Input *Config - expectedResult *CscliCfg - err string + name string + input *Config + expected *CscliCfg + expectedErr string }{ { name: "basic valid configuration", - Input: &Config{ + input: &Config{ ConfigPaths: &ConfigurationPaths{ - ConfigDir: "./tests", + ConfigDir: "./testdata", DataDir: "./data", HubDir: "./hub", HubIndexFile: "./hub/.index.json", }, }, - expectedResult: &CscliCfg{ + expected: &CscliCfg{ ConfigDir: configDirFullPath, DataDir: dataFullPath, HubDir: hubFullPath, @@ -54,31 +47,23 @@ func TestLoadCSCLI(t *testing.T) { }, }, { - name: "no configuration path", - Input: &Config{}, - expectedResult: &CscliCfg{}, + name: "no configuration path", + input: &Config{}, + expected: &CscliCfg{}, + expectedErr: "no configuration paths provided", }, } - for idx, test := range tests { - err := test.Input.LoadCSCLI() - if err == nil && test.err != "" { - fmt.Printf("TEST '%s': NOK\n", test.name) - t.Fatalf("%d/%d expected error, didn't get it", idx, len(tests)) - } else if test.err != "" { - if !strings.HasPrefix(fmt.Sprintf("%s", err), test.err) { - fmt.Printf("TEST '%s': NOK\n", test.name) - t.Fatalf("%d/%d expected '%s' got '%s'", idx, len(tests), - test.err, - fmt.Sprintf("%s", err)) + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + err := tc.input.LoadCSCLI() + cstest.RequireErrorContains(t, err, tc.expectedErr) + if tc.expectedErr != "" { + return } - } - isOk := assert.Equal(t, test.expectedResult, test.Input.Cscli) - if !isOk { - t.Fatalf("TEST '%s': NOK", test.name) - } else { - fmt.Printf("TEST '%s': OK\n", test.name) - } + assert.Equal(t, tc.expected, tc.input.Cscli) + }) } } diff --git a/pkg/csconfig/database.go b/pkg/csconfig/database.go index 354f18c11..4d041c312 100644 --- a/pkg/csconfig/database.go +++ b/pkg/csconfig/database.go @@ -10,21 +10,28 @@ import ( "github.com/crowdsecurity/go-cs-lib/ptr" ) -var DEFAULT_MAX_OPEN_CONNS = 100 +const ( + DEFAULT_MAX_OPEN_CONNS = 100 + defaultDecisionBulkSize = 1000 + // we need an upper bound due to the sqlite limit of 32k variables in a query + // we have 15 variables per decision, so 32768/15 = 2184.5333 + maxDecisionBulkSize = 2000 +) type DatabaseCfg struct { - User string `yaml:"user"` - Password string `yaml:"password"` - DbName string `yaml:"db_name"` - Sslmode string `yaml:"sslmode"` - Host string `yaml:"host"` - Port int `yaml:"port"` - DbPath string `yaml:"db_path"` - Type string `yaml:"type"` - Flush *FlushDBCfg `yaml:"flush"` - LogLevel *log.Level `yaml:"log_level"` - MaxOpenConns *int `yaml:"max_open_conns,omitempty"` - UseWal *bool `yaml:"use_wal,omitempty"` + User string `yaml:"user"` + Password string `yaml:"password"` + DbName string `yaml:"db_name"` + Sslmode string `yaml:"sslmode"` + Host string `yaml:"host"` + Port int `yaml:"port"` + DbPath string `yaml:"db_path"` + Type string `yaml:"type"` + Flush *FlushDBCfg `yaml:"flush"` + LogLevel *log.Level `yaml:"log_level"` + MaxOpenConns *int `yaml:"max_open_conns,omitempty"` + UseWal *bool `yaml:"use_wal,omitempty"` + DecisionBulkSize int `yaml:"decision_bulk_size,omitempty"` } type AuthGCCfg struct { @@ -60,11 +67,20 @@ func (c *Config) LoadDBConfig() error { c.DbConfig.MaxOpenConns = ptr.Of(DEFAULT_MAX_OPEN_CONNS) } + if c.DbConfig.DecisionBulkSize == 0 { + log.Tracef("No decision_bulk_size value provided, using default value of %d", defaultDecisionBulkSize) + c.DbConfig.DecisionBulkSize = defaultDecisionBulkSize + } + + if c.DbConfig.DecisionBulkSize > maxDecisionBulkSize { + log.Warningf("decision_bulk_size too high (%d), setting to the maximum value of %d", c.DbConfig.DecisionBulkSize, maxDecisionBulkSize) + c.DbConfig.DecisionBulkSize = maxDecisionBulkSize + } + if c.DbConfig.Type == "sqlite" { if c.DbConfig.UseWal == nil { log.Warning("You are using sqlite without WAL, this can have a performance impact. If you do not store the database in a network share, set db_config.use_wal to true. Set explicitly to false to disable this warning.") } - } return nil diff --git a/pkg/csconfig/database_test.go b/pkg/csconfig/database_test.go index 017014b04..631e63ae2 100644 --- a/pkg/csconfig/database_test.go +++ b/pkg/csconfig/database_test.go @@ -1,28 +1,27 @@ package csconfig import ( - "fmt" - "strings" "testing" "github.com/stretchr/testify/assert" + "github.com/crowdsecurity/go-cs-lib/cstest" "github.com/crowdsecurity/go-cs-lib/ptr" ) func TestLoadDBConfig(t *testing.T) { tests := []struct { - name string - Input *Config - expectedResult *DatabaseCfg - err string + name string + input *Config + expected *DatabaseCfg + expectedErr string }{ { name: "basic valid configuration", - Input: &Config{ + input: &Config{ DbConfig: &DatabaseCfg{ Type: "sqlite", - DbPath: "./tests/test.db", + DbPath: "./testdata/test.db", MaxOpenConns: ptr.Of(10), }, Cscli: &CscliCfg{}, @@ -30,37 +29,31 @@ func TestLoadDBConfig(t *testing.T) { Server: &LocalApiServerCfg{}, }, }, - expectedResult: &DatabaseCfg{ - Type: "sqlite", - DbPath: "./tests/test.db", - MaxOpenConns: ptr.Of(10), + expected: &DatabaseCfg{ + Type: "sqlite", + DbPath: "./testdata/test.db", + MaxOpenConns: ptr.Of(10), + DecisionBulkSize: defaultDecisionBulkSize, }, }, { - name: "no configuration path", - Input: &Config{}, - expectedResult: nil, + name: "no configuration path", + input: &Config{}, + expected: nil, + expectedErr: "no database configuration provided", }, } - for idx, test := range tests { - err := test.Input.LoadDBConfig() - if err == nil && test.err != "" { - fmt.Printf("TEST '%s': NOK\n", test.name) - t.Fatalf("%d/%d expected error, didn't get it", idx, len(tests)) - } else if test.err != "" { - if !strings.HasPrefix(fmt.Sprintf("%s", err), test.err) { - fmt.Printf("TEST '%s': NOK\n", test.name) - t.Fatalf("%d/%d expected '%s' got '%s'", idx, len(tests), - test.err, - fmt.Sprintf("%s", err)) + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + err := tc.input.LoadDBConfig() + cstest.RequireErrorContains(t, err, tc.expectedErr) + if tc.expectedErr != "" { + return } - } - isOk := assert.Equal(t, test.expectedResult, test.Input.DbConfig) - if !isOk { - t.Fatalf("TEST '%s': NOK", test.name) - } else { - fmt.Printf("TEST '%s': OK\n", test.name) - } + + assert.Equal(t, tc.expected, tc.input.DbConfig) + }) } } diff --git a/pkg/csconfig/fflag.go b/pkg/csconfig/fflag.go index e9110649d..7311f9e75 100644 --- a/pkg/csconfig/fflag.go +++ b/pkg/csconfig/fflag.go @@ -10,7 +10,6 @@ import ( "github.com/crowdsecurity/crowdsec/pkg/fflag" ) - // LoadFeatureFlagsEnv parses the environment variables to enable feature flags. func LoadFeatureFlagsEnv(logger *log.Logger) error { if err := fflag.Crowdsec.SetFromEnv(logger); err != nil { @@ -19,13 +18,18 @@ func LoadFeatureFlagsEnv(logger *log.Logger) error { return nil } - -// LoadFeatureFlags parses feature.yaml to enable feature flags. +// FeatureFlagsFileLocation returns the path to the feature.yaml file. // The file is in the same directory as config.yaml, which is provided // as the fist parameter. This can be different than ConfigPaths.ConfigDir -func LoadFeatureFlagsFile(configPath string, logger *log.Logger) error { +// because we have not read config.yaml yet so we don't know the value of ConfigDir. +func GetFeatureFilePath(configPath string) string { dir := filepath.Dir(configPath) - featurePath := filepath.Join(dir, "feature.yaml") + return filepath.Join(dir, "feature.yaml") +} + +// LoadFeatureFlags parses feature.yaml to enable feature flags. +func LoadFeatureFlagsFile(configPath string, logger *log.Logger) error { + featurePath := GetFeatureFilePath(configPath) if err := fflag.Crowdsec.SetFromYamlFile(featurePath, logger); err != nil { return fmt.Errorf("file %s: %s", featurePath, err) @@ -33,7 +37,6 @@ func LoadFeatureFlagsFile(configPath string, logger *log.Logger) error { return nil } - // ListFeatureFlags returns a list of the enabled feature flags. func ListFeatureFlags() string { enabledFeatures := fflag.Crowdsec.GetEnabledFeatures() diff --git a/pkg/csconfig/hub.go b/pkg/csconfig/hub.go index eb3bd7c42..4c3c610aa 100644 --- a/pkg/csconfig/hub.go +++ b/pkg/csconfig/hub.go @@ -2,10 +2,10 @@ package csconfig /*cscli specific config, such as hub directory*/ type Hub struct { - HubDir string `yaml:"-"` - ConfigDir string `yaml:"-"` - HubIndexFile string `yaml:"-"` - DataDir string `yaml:"-"` + HubIndexFile string + HubDir string + InstallDir string + InstallDataDir string } func (c *Config) LoadHub() error { @@ -14,10 +14,10 @@ func (c *Config) LoadHub() error { } c.Hub = &Hub{ - HubIndexFile: c.ConfigPaths.HubIndexFile, - ConfigDir: c.ConfigPaths.ConfigDir, - HubDir: c.ConfigPaths.HubDir, - DataDir: c.ConfigPaths.DataDir, + HubIndexFile: c.ConfigPaths.HubIndexFile, + HubDir: c.ConfigPaths.HubDir, + InstallDir: c.ConfigPaths.ConfigDir, + InstallDataDir: c.ConfigPaths.DataDir, } return nil diff --git a/pkg/csconfig/hub_test.go b/pkg/csconfig/hub_test.go index 136790d5f..d573e4690 100644 --- a/pkg/csconfig/hub_test.go +++ b/pkg/csconfig/hub_test.go @@ -1,94 +1,79 @@ package csconfig import ( - "fmt" "path/filepath" - "strings" "testing" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/crowdsecurity/go-cs-lib/cstest" ) func TestLoadHub(t *testing.T) { hubFullPath, err := filepath.Abs("./hub") - if err != nil { - t.Fatal(err) - } + require.NoError(t, err) dataFullPath, err := filepath.Abs("./data") - if err != nil { - t.Fatal(err) - } + require.NoError(t, err) - configDirFullPath, err := filepath.Abs("./tests") - if err != nil { - t.Fatal(err) - } + configDirFullPath, err := filepath.Abs("./testdata") + require.NoError(t, err) hubIndexFileFullPath, err := filepath.Abs("./hub/.index.json") - if err != nil { - t.Fatal(err) - } + require.NoError(t, err) tests := []struct { - name string - Input *Config - expectedResult *Hub - err string + name string + input *Config + expected *Hub + expectedErr string }{ { name: "basic valid configuration", - Input: &Config{ + input: &Config{ ConfigPaths: &ConfigurationPaths{ - ConfigDir: "./tests", + ConfigDir: "./testdata", DataDir: "./data", HubDir: "./hub", HubIndexFile: "./hub/.index.json", }, }, - expectedResult: &Hub{ - ConfigDir: configDirFullPath, - DataDir: dataFullPath, - HubDir: hubFullPath, - HubIndexFile: hubIndexFileFullPath, + expected: &Hub{ + HubDir: hubFullPath, + HubIndexFile: hubIndexFileFullPath, + InstallDir: configDirFullPath, + InstallDataDir: dataFullPath, }, }, { name: "no data dir", - Input: &Config{ + input: &Config{ ConfigPaths: &ConfigurationPaths{ - ConfigDir: "./tests", + ConfigDir: "./testdata", HubDir: "./hub", HubIndexFile: "./hub/.index.json", }, }, - expectedResult: nil, + expectedErr: "please provide a data directory with the 'data_dir' directive in the 'config_paths' section", }, { - name: "no configuration path", - Input: &Config{}, - expectedResult: nil, + name: "no configuration path", + input: &Config{}, + expectedErr: "no configuration paths provided", }, } - for idx, test := range tests { - err := test.Input.LoadHub() - if err == nil && test.err != "" { - fmt.Printf("TEST '%s': NOK\n", test.name) - t.Fatalf("%d/%d expected error, didn't get it", idx, len(tests)) - } else if test.err != "" { - if !strings.HasPrefix(fmt.Sprintf("%s", err), test.err) { - fmt.Printf("TEST '%s': NOK\n", test.name) - t.Fatalf("%d/%d expected '%s' got '%s'", idx, len(tests), - test.err, - fmt.Sprintf("%s", err)) + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + err := tc.input.LoadHub() + cstest.RequireErrorContains(t, err, tc.expectedErr) + if tc.expectedErr != "" { + return } - } - isOk := assert.Equal(t, test.expectedResult, test.Input.Hub) - if !isOk { - t.Fatalf("TEST '%s': NOK", test.name) - } else { - fmt.Printf("TEST '%s': OK\n", test.name) - } + + assert.Equal(t, tc.expected, tc.input.Hub) + }) } } diff --git a/pkg/csconfig/profiles.go b/pkg/csconfig/profiles.go index 7071b28cd..ad3779ed1 100644 --- a/pkg/csconfig/profiles.go +++ b/pkg/csconfig/profiles.go @@ -6,10 +6,11 @@ import ( "fmt" "io" + "gopkg.in/yaml.v2" + "github.com/crowdsecurity/go-cs-lib/yamlpatch" "github.com/crowdsecurity/crowdsec/pkg/models" - "gopkg.in/yaml.v2" ) // var OnErrorDefault = OnErrorIgnore @@ -43,7 +44,6 @@ func (c *LocalApiServerCfg) LoadProfiles() error { } reader := bytes.NewReader(fcontent) - //process the yaml dec := yaml.NewDecoder(reader) dec.SetStrict(true) for { diff --git a/pkg/csconfig/prometheus_test.go b/pkg/csconfig/prometheus_test.go index 9f14d1149..79c9ec58f 100644 --- a/pkg/csconfig/prometheus_test.go +++ b/pkg/csconfig/prometheus_test.go @@ -3,21 +3,21 @@ package csconfig import ( "testing" - "github.com/crowdsecurity/go-cs-lib/cstest" - "github.com/stretchr/testify/require" + + "github.com/crowdsecurity/go-cs-lib/cstest" ) func TestLoadPrometheus(t *testing.T) { tests := []struct { name string - Input *Config + input *Config expectedURL string expectedErr string }{ { name: "basic valid configuration", - Input: &Config{ + input: &Config{ Prometheus: &PrometheusCfg{ Enabled: true, Level: "full", @@ -33,10 +33,10 @@ func TestLoadPrometheus(t *testing.T) { for _, tc := range tests { tc := tc t.Run(tc.name, func(t *testing.T) { - err := tc.Input.LoadPrometheus() + err := tc.input.LoadPrometheus() cstest.RequireErrorContains(t, err, tc.expectedErr) - require.Equal(t, tc.expectedURL, tc.Input.Cscli.PrometheusUrl) + require.Equal(t, tc.expectedURL, tc.input.Cscli.PrometheusUrl) }) } } diff --git a/pkg/csconfig/simulation.go b/pkg/csconfig/simulation.go index 93c823924..184708f0d 100644 --- a/pkg/csconfig/simulation.go +++ b/pkg/csconfig/simulation.go @@ -4,8 +4,9 @@ import ( "fmt" "path/filepath" - "github.com/crowdsecurity/go-cs-lib/yamlpatch" "gopkg.in/yaml.v2" + + "github.com/crowdsecurity/go-cs-lib/yamlpatch" ) type SimulationConfig struct { diff --git a/pkg/csconfig/simulation_test.go b/pkg/csconfig/simulation_test.go index 3e9a42847..44b8909a2 100644 --- a/pkg/csconfig/simulation_test.go +++ b/pkg/csconfig/simulation_test.go @@ -12,46 +12,46 @@ import ( ) func TestSimulationLoading(t *testing.T) { - testXXFullPath, err := filepath.Abs("./tests/xxx.yaml") + testXXFullPath, err := filepath.Abs("./testdata/xxx.yaml") require.NoError(t, err) - badYamlFullPath, err := filepath.Abs("./tests/config.yaml") + badYamlFullPath, err := filepath.Abs("./testdata/config.yaml") require.NoError(t, err) tests := []struct { - name string - Input *Config - expectedResult *SimulationConfig - expectedErr string + name string + input *Config + expected *SimulationConfig + expectedErr string }{ { name: "basic valid simulation", - Input: &Config{ + input: &Config{ ConfigPaths: &ConfigurationPaths{ - SimulationFilePath: "./tests/simulation.yaml", + SimulationFilePath: "./testdata/simulation.yaml", DataDir: "./data", }, Crowdsec: &CrowdsecServiceCfg{}, Cscli: &CscliCfg{}, }, - expectedResult: &SimulationConfig{Simulation: new(bool)}, + expected: &SimulationConfig{Simulation: new(bool)}, }, { name: "basic nil config", - Input: &Config{ + input: &Config{ ConfigPaths: &ConfigurationPaths{ SimulationFilePath: "", DataDir: "./data", }, Crowdsec: &CrowdsecServiceCfg{}, }, - expectedErr: "simulation.yaml: "+cstest.FileNotFoundMessage, + expectedErr: "simulation.yaml: " + cstest.FileNotFoundMessage, }, { name: "basic bad file name", - Input: &Config{ + input: &Config{ ConfigPaths: &ConfigurationPaths{ - SimulationFilePath: "./tests/xxx.yaml", + SimulationFilePath: "./testdata/xxx.yaml", DataDir: "./data", }, Crowdsec: &CrowdsecServiceCfg{}, @@ -60,9 +60,9 @@ func TestSimulationLoading(t *testing.T) { }, { name: "basic bad file content", - Input: &Config{ + input: &Config{ ConfigPaths: &ConfigurationPaths{ - SimulationFilePath: "./tests/config.yaml", + SimulationFilePath: "./testdata/config.yaml", DataDir: "./data", }, Crowdsec: &CrowdsecServiceCfg{}, @@ -71,9 +71,9 @@ func TestSimulationLoading(t *testing.T) { }, { name: "basic bad file content", - Input: &Config{ + input: &Config{ ConfigPaths: &ConfigurationPaths{ - SimulationFilePath: "./tests/config.yaml", + SimulationFilePath: "./testdata/config.yaml", DataDir: "./data", }, Crowdsec: &CrowdsecServiceCfg{}, @@ -85,10 +85,10 @@ func TestSimulationLoading(t *testing.T) { for _, tc := range tests { tc := tc t.Run(tc.name, func(t *testing.T) { - err := tc.Input.LoadSimulation() + err := tc.input.LoadSimulation() cstest.RequireErrorContains(t, err, tc.expectedErr) - assert.Equal(t, tc.expectedResult, tc.Input.Crowdsec.SimulationConfig) + assert.Equal(t, tc.expected, tc.input.Crowdsec.SimulationConfig) }) } } @@ -109,32 +109,32 @@ func TestIsSimulated(t *testing.T) { name string SimulationConfig *SimulationConfig Input string - expectedResult bool + expected bool }{ { name: "No simulation except (in exclusion)", SimulationConfig: simCfgOff, Input: "test", - expectedResult: true, + expected: true, }, { name: "All simulation (not in exclusion)", SimulationConfig: simCfgOn, Input: "toto", - expectedResult: true, + expected: true, }, { name: "All simulation (in exclusion)", SimulationConfig: simCfgOn, Input: "test", - expectedResult: false, + expected: false, }, } for _, tc := range tests { tc := tc t.Run(tc.name, func(t *testing.T) { - IsSimulated := tc.SimulationConfig.IsSimulated(tc.Input) - require.Equal(t, tc.expectedResult, IsSimulated) + isSimulated := tc.SimulationConfig.IsSimulated(tc.Input) + require.Equal(t, tc.expected, isSimulated) }) } } diff --git a/pkg/csconfig/tests/acquis.yaml b/pkg/csconfig/testdata/acquis.yaml similarity index 100% rename from pkg/csconfig/tests/acquis.yaml rename to pkg/csconfig/testdata/acquis.yaml diff --git a/pkg/csconfig/tests/acquis/acquis.yaml b/pkg/csconfig/testdata/acquis/acquis.yaml similarity index 100% rename from pkg/csconfig/tests/acquis/acquis.yaml rename to pkg/csconfig/testdata/acquis/acquis.yaml diff --git a/pkg/csconfig/tests/bad_lapi-secrets.yaml b/pkg/csconfig/testdata/bad_lapi-secrets.yaml similarity index 100% rename from pkg/csconfig/tests/bad_lapi-secrets.yaml rename to pkg/csconfig/testdata/bad_lapi-secrets.yaml diff --git a/pkg/csconfig/tests/bad_online-api-secrets.yaml b/pkg/csconfig/testdata/bad_online-api-secrets.yaml similarity index 100% rename from pkg/csconfig/tests/bad_online-api-secrets.yaml rename to pkg/csconfig/testdata/bad_online-api-secrets.yaml diff --git a/pkg/csconfig/tests/config.yaml b/pkg/csconfig/testdata/config.yaml similarity index 55% rename from pkg/csconfig/tests/config.yaml rename to pkg/csconfig/testdata/config.yaml index 3148659e3..288c09b84 100644 --- a/pkg/csconfig/tests/config.yaml +++ b/pkg/csconfig/testdata/config.yaml @@ -7,7 +7,7 @@ prometheus: enabled: true level: full crowdsec_service: - acquisition_path: ./tests/acquis.yaml + acquisition_path: ./testdata/acquis.yaml parser_routines: 1 cscli: output: human @@ -21,17 +21,17 @@ db_config: type: sqlite api: client: - credentials_path: ./tests/lapi-secrets.yaml + credentials_path: ./testdata/lapi-secrets.yaml server: - profiles_path: ./tests/profiles.yaml + profiles_path: ./testdata/profiles.yaml listen_uri: 127.0.0.1:8080 tls: null online_client: - credentials_path: ./tests/online-api-secrets.yaml + credentials_path: ./testdata/online-api-secrets.yaml config_paths: - config_dir: ./tests + config_dir: ./testdata data_dir: . - simulation_path: ./tests/simulation.yaml - index_path: ./tests/hub/.index.json - hub_dir: ./tests/hub + simulation_path: ./testdata/simulation.yaml + index_path: ./testdata/hub/.index.json + hub_dir: ./testdata/hub diff --git a/pkg/csconfig/tests/context.yaml b/pkg/csconfig/testdata/context.yaml similarity index 100% rename from pkg/csconfig/tests/context.yaml rename to pkg/csconfig/testdata/context.yaml diff --git a/pkg/csconfig/tests/lapi-secrets.yaml b/pkg/csconfig/testdata/lapi-secrets.yaml similarity index 100% rename from pkg/csconfig/tests/lapi-secrets.yaml rename to pkg/csconfig/testdata/lapi-secrets.yaml diff --git a/pkg/csconfig/tests/online-api-secrets.yaml b/pkg/csconfig/testdata/online-api-secrets.yaml similarity index 100% rename from pkg/csconfig/tests/online-api-secrets.yaml rename to pkg/csconfig/testdata/online-api-secrets.yaml diff --git a/pkg/csconfig/tests/profiles.yaml b/pkg/csconfig/testdata/profiles.yaml similarity index 100% rename from pkg/csconfig/tests/profiles.yaml rename to pkg/csconfig/testdata/profiles.yaml diff --git a/pkg/csconfig/tests/simulation.yaml b/pkg/csconfig/testdata/simulation.yaml similarity index 100% rename from pkg/csconfig/tests/simulation.yaml rename to pkg/csconfig/testdata/simulation.yaml diff --git a/pkg/csplugin/broker_suite_test.go b/pkg/csplugin/broker_suite_test.go index 9fb41394e..778bb2dfe 100644 --- a/pkg/csplugin/broker_suite_test.go +++ b/pkg/csplugin/broker_suite_test.go @@ -49,7 +49,7 @@ func (s *PluginSuite) SetupSuite() { s.builtBinary += ".exe" } - cmd := exec.Command("go", "build", "-o", s.builtBinary, "../../plugins/notifications/dummy/") + cmd := exec.Command("go", "build", "-o", s.builtBinary, "../../cmd/notification-dummy/") err = cmd.Run() require.NoError(t, err, "while building dummy plugin") } diff --git a/pkg/csplugin/helpers.go b/pkg/csplugin/helpers.go index 3d4163a1a..75ee773b8 100644 --- a/pkg/csplugin/helpers.go +++ b/pkg/csplugin/helpers.go @@ -1,6 +1,7 @@ package csplugin import ( + "html" "os" "text/template" @@ -28,7 +29,8 @@ var helpers = template.FuncMap{ } return ret }, - "Hostname": os.Hostname, + "Hostname": os.Hostname, + "HTMLEscape": html.EscapeString, } func funcMap() template.FuncMap { diff --git a/pkg/csprofiles/csprofiles.go b/pkg/csprofiles/csprofiles.go index 4394a01db..6d217bda2 100644 --- a/pkg/csprofiles/csprofiles.go +++ b/pkg/csprofiles/csprofiles.go @@ -86,8 +86,15 @@ func NewProfile(profilesCfg []*csconfig.ProfileCfg) ([]*Runtime, error) { for _, decision := range profile.Decisions { if runtime.RuntimeDurationExpr == nil { - if _, err := time.ParseDuration(*decision.Duration); err != nil { - return []*Runtime{}, errors.Wrapf(err, "error parsing duration '%s' of %s", *decision.Duration, profile.Name) + var duration string + if decision.Duration != nil { + duration = *decision.Duration + } else { + runtime.Logger.Warningf("No duration specified for %s, using default duration %s", profile.Name, defaultDuration) + duration = defaultDuration + } + if _, err := time.ParseDuration(duration); err != nil { + return []*Runtime{}, errors.Wrapf(err, "error parsing duration '%s' of %s", duration, profile.Name) } } } diff --git a/pkg/csprofiles/csprofiles_test.go b/pkg/csprofiles/csprofiles_test.go index 8adf68291..be1d0178e 100644 --- a/pkg/csprofiles/csprofiles_test.go +++ b/pkg/csprofiles/csprofiles_test.go @@ -86,6 +86,19 @@ func TestNewProfile(t *testing.T) { }, expectedNbProfile: 1, }, + { + name: "filter ok and no duration", + profileCfg: &csconfig.ProfileCfg{ + Filters: []string{ + "1==1", + }, + Debug: &boolTrue, + Decisions: []models.Decision{ + {Type: &typ, Scope: &scope, Simulated: &boolFalse}, + }, + }, + expectedNbProfile: 1, + }, } for _, test := range tests { diff --git a/pkg/cwhub/cwhub.go b/pkg/cwhub/cwhub.go index 40c11058b..e9d3fb68f 100644 --- a/pkg/cwhub/cwhub.go +++ b/pkg/cwhub/cwhub.go @@ -1,9 +1,7 @@ package cwhub import ( - "crypto/sha256" "fmt" - "io" "os" "path/filepath" "sort" @@ -15,22 +13,32 @@ import ( "golang.org/x/mod/semver" ) -/*managed configuration types*/ -var PARSERS = "parsers" -var PARSERS_OVFLW = "postoverflows" -var SCENARIOS = "scenarios" -var COLLECTIONS = "collections" -var WAAP_RULES = "waap-rules" -var ItemTypes = []string{PARSERS, PARSERS_OVFLW, SCENARIOS, COLLECTIONS, WAAP_RULES} +const ( + HubIndexFile = ".index.json" -var hubIdx map[string]map[string]Item + // managed item types + PARSERS = "parsers" + PARSERS_OVFLW = "postoverflows" + SCENARIOS = "scenarios" + COLLECTIONS = "collections" + WAAP_RULES = "waap-rules" +) -var RawFileURLTemplate = "https://hub-cdn.crowdsec.net/%s/%s" -var HubBranch = "master" -var HubIndexFile = ".index.json" +var ( + ItemTypes = []string{PARSERS, PARSERS_OVFLW, SCENARIOS, COLLECTIONS, WAAP_RULES} + + ErrMissingReference = errors.New("Reference(s) missing in collection") + + // XXX: can we remove these globals? + skippedLocal = 0 + skippedTainted = 0 + RawFileURLTemplate = "https://hub-cdn.crowdsec.net/%s/%s" + HubBranch = "master" + hubIdx map[string]map[string]Item +) type ItemVersion struct { - Digest string `json:"digest,omitempty"` + Digest string `json:"digest,omitempty"` // meow Deprecated bool `json:"deprecated,omitempty"` } @@ -39,339 +47,220 @@ type ItemHubStatus struct { LocalVersion string `json:"local_version"` LocalPath string `json:"local_path"` Description string `json:"description"` - UTF8_Status string `json:"utf8_status"` + UTF8Status string `json:"utf8_status"` Status string `json:"status"` } -// Item can be : parsed, scenario, collection +// Item can be: parser, scenario, collection.. type Item struct { - /*descriptive info*/ - Type string `yaml:"type,omitempty" json:"type,omitempty"` //parser|postoverflows|scenario|collection(|enrich) - Stage string `json:"stage,omitempty" yaml:"stage,omitempty,omitempty"` //Stage for parser|postoverflow : s00-raw/s01-... - Name string `json:"name,omitempty"` //as seen in .config.json, usually "author/name" - FileName string `json:"file_name,omitempty"` //the filename, ie. apache2-logs.yaml - Description string `yaml:"description,omitempty" json:"description,omitempty"` //as seen in .config.json - Author string `json:"author,omitempty"` //as seen in .config.json - References []string `yaml:"references,omitempty" json:"references,omitempty"` //as seen in .config.json - BelongsToCollections []string `yaml:"belongs_to_collections,omitempty" json:"belongs_to_collections,omitempty"` /*if it's part of collections, track name here*/ + // descriptive info + Type string `json:"type,omitempty" yaml:"type,omitempty"` // parser|postoverflows|scenario|collection(|enrich) + Stage string `json:"stage,omitempty" yaml:"stage,omitempty"` // Stage for parser|postoverflow: s00-raw/s01-... + Name string `json:"name,omitempty"` // as seen in .config.json, usually "author/name" + FileName string `json:"file_name,omitempty"` // the filename, ie. apache2-logs.yaml + Description string `json:"description,omitempty" yaml:"description,omitempty"` // as seen in .config.json + Author string `json:"author,omitempty"` // as seen in .config.json + References []string `json:"references,omitempty" yaml:"references,omitempty"` // as seen in .config.json + BelongsToCollections []string `json:"belongs_to_collections,omitempty" yaml:"belongs_to_collections,omitempty"` // parent collection if any - /*remote (hub) infos*/ - RemoteURL string `yaml:"remoteURL,omitempty" json:"remoteURL,omitempty"` //the full remote uri of file in http - RemotePath string `json:"path,omitempty" yaml:"remote_path,omitempty"` //the path relative to git ie. /parsers/stage/author/file.yaml - RemoteHash string `yaml:"hash,omitempty" json:"hash,omitempty"` //the meow - Version string `json:"version,omitempty"` //the last version - Versions map[string]ItemVersion `json:"versions,omitempty" yaml:"-"` //the list of existing versions + // remote (hub) info + RemotePath string `json:"path,omitempty" yaml:"remote_path,omitempty"` // the path relative to (git | hub API) ie. /parsers/stage/author/file.yaml + Version string `json:"version,omitempty"` // the last version + Versions map[string]ItemVersion `json:"versions,omitempty" yaml:"-"` // the list of existing versions - /*local (deployed) infos*/ - LocalPath string `yaml:"local_path,omitempty" json:"local_path,omitempty"` //the local path relative to ${CFG_DIR} - //LocalHubPath string + // local (deployed) info + LocalPath string `json:"local_path,omitempty" yaml:"local_path,omitempty"` // the local path relative to ${CFG_DIR} LocalVersion string `json:"local_version,omitempty"` - LocalHash string `json:"local_hash,omitempty"` //the local meow + LocalHash string `json:"local_hash,omitempty"` // the local meow Installed bool `json:"installed,omitempty"` Downloaded bool `json:"downloaded,omitempty"` UpToDate bool `json:"up_to_date,omitempty"` - Tainted bool `json:"tainted,omitempty"` //has it been locally modified - Local bool `json:"local,omitempty"` //if it's a non versioned control one + Tainted bool `json:"tainted,omitempty"` // has it been locally modified + Local bool `json:"local,omitempty"` // if it's a non versioned control one - /*if it's a collection, it not a single file*/ - Parsers []string `yaml:"parsers,omitempty" json:"parsers,omitempty"` - PostOverflows []string `yaml:"postoverflows,omitempty" json:"postoverflows,omitempty"` - Scenarios []string `yaml:"scenarios,omitempty" json:"scenarios,omitempty"` - Collections []string `yaml:"collections,omitempty" json:"collections,omitempty"` - WafRules []string `yaml:"waap-rules,omitempty" json:"waap-rules,omitempty"` + // if it's a collection, it's not a single file + Parsers []string `json:"parsers,omitempty" yaml:"parsers,omitempty"` + PostOverflows []string `json:"postoverflows,omitempty" yaml:"postoverflows,omitempty"` + Scenarios []string `json:"scenarios,omitempty" yaml:"scenarios,omitempty"` + Collections []string `json:"collections,omitempty" yaml:"collections,omitempty"` + WafRules []string `json:"waap-rules,omitempty" yaml:"waap-rules,omitempty"` } -func (i *Item) toHubStatus() ItemHubStatus { - hubStatus := ItemHubStatus{} - hubStatus.Name = i.Name - hubStatus.LocalVersion = i.LocalVersion - hubStatus.LocalPath = i.LocalPath - hubStatus.Description = i.Description +func (i *Item) status() (string, emoji.Emoji) { + status := "disabled" + ok := false - status, ok, warning, managed := ItemStatus(*i) - hubStatus.Status = status - if !managed { - hubStatus.UTF8_Status = fmt.Sprintf("%v %s", emoji.House, status) - } else if !i.Installed { - hubStatus.UTF8_Status = fmt.Sprintf("%v %s", emoji.Prohibited, status) - } else if warning { - hubStatus.UTF8_Status = fmt.Sprintf("%v %s", emoji.Warning, status) - } else if ok { - hubStatus.UTF8_Status = fmt.Sprintf("%v %s", emoji.CheckMark, status) + if i.Installed { + ok = true + status = "enabled" } - return hubStatus + + managed := true + if i.Local { + managed = false + status += ",local" + } + + warning := false + if i.Tainted { + warning = true + status += ",tainted" + } else if !i.UpToDate && !i.Local { + warning = true + status += ",update-available" + } + + emo := emoji.QuestionMark + + switch { + case !managed: + emo = emoji.House + case !i.Installed: + emo = emoji.Prohibited + case warning: + emo = emoji.Warning + case ok: + emo = emoji.CheckMark + } + + return status, emo } -var skippedLocal = 0 -var skippedTainted = 0 +func (i *Item) hubStatus() ItemHubStatus { + status, emo := i.status() -/*To be used when reference(s) (is/are) missing in a collection*/ -var ReferenceMissingError = errors.New("Reference(s) missing in collection") -var MissingHubIndex = errors.New("hub index can't be found") - -// GetVersionStatus : semver requires 'v' prefix -func GetVersionStatus(v *Item) int { - return semver.Compare("v"+v.Version, "v"+v.LocalVersion) + return ItemHubStatus{ + Name: i.Name, + LocalVersion: i.LocalVersion, + LocalPath: i.LocalPath, + Description: i.Description, + Status: status, + UTF8Status: fmt.Sprintf("%v %s", emo, status), + } } -// calculate sha256 of a file -func getSHA256(filepath string) (string, error) { - /* Digest of file */ - f, err := os.Open(filepath) - if err != nil { - return "", fmt.Errorf("unable to open '%s' : %s", filepath, err) - } - - defer f.Close() - - h := sha256.New() - if _, err := io.Copy(h, f); err != nil { - return "", fmt.Errorf("unable to calculate sha256 of '%s': %s", filepath, err) - } - - return fmt.Sprintf("%x", h.Sum(nil)), nil +// versionStatus: semver requires 'v' prefix +func (i *Item) versionStatus() int { + return semver.Compare("v"+i.Version, "v"+i.LocalVersion) } func GetItemMap(itemType string) map[string]Item { - var m map[string]Item - var ok bool - - if m, ok = hubIdx[itemType]; !ok { + m, ok := hubIdx[itemType] + if !ok { return nil } + return m } +// Given a FileInfo, extract the map key. Follow a symlink if necessary +func itemKey(itemPath string) (string, error) { + f, err := os.Lstat(itemPath) + if err != nil { + return "", fmt.Errorf("while performing lstat on %s: %w", itemPath, err) + } + + if f.Mode()&os.ModeSymlink == 0 { + // it's not a symlink, so the filename itsef should be the key + return filepath.Base(itemPath), nil + } + + // resolve the symlink to hub file + pathInHub, err := os.Readlink(itemPath) + if err != nil { + return "", fmt.Errorf("while reading symlink of %s: %w", itemPath, err) + } + + author := filepath.Base(filepath.Dir(pathInHub)) + + fname := filepath.Base(pathInHub) + fname = strings.TrimSuffix(fname, ".yaml") + fname = strings.TrimSuffix(fname, ".yml") + + return fmt.Sprintf("%s/%s", author, fname), nil +} + // GetItemByPath retrieves the item from hubIdx based on the path. To achieve this it will resolve symlink to find associated hub item. func GetItemByPath(itemType string, itemPath string) (*Item, error) { - /*try to resolve symlink*/ - finalName := "" - f, err := os.Lstat(itemPath) + itemKey, err := itemKey(itemPath) if err != nil { - return nil, fmt.Errorf("while performing lstat on %s: %w", itemPath, err) + return nil, err } - if f.Mode()&os.ModeSymlink == 0 { - /*it's not a symlink, it should be the filename itsef the key*/ - finalName = filepath.Base(itemPath) - } else { - /*resolve the symlink to hub file*/ - pathInHub, err := os.Readlink(itemPath) - if err != nil { - return nil, fmt.Errorf("while reading symlink of %s: %w", itemPath, err) - } - //extract author from path - fname := filepath.Base(pathInHub) - author := filepath.Base(filepath.Dir(pathInHub)) - //trim yaml suffix - fname = strings.TrimSuffix(fname, ".yaml") - fname = strings.TrimSuffix(fname, ".yml") - finalName = fmt.Sprintf("%s/%s", author, fname) + m := GetItemMap(itemType) + if m == nil { + return nil, fmt.Errorf("item type %s doesn't exist", itemType) } - /*it's not a symlink, it should be the filename itsef the key*/ - if m := GetItemMap(itemType); m != nil { - if v, ok := m[finalName]; ok { - return &v, nil - } - return nil, fmt.Errorf("%s not found in %s", finalName, itemType) + v, ok := m[itemKey] + if !ok { + return nil, fmt.Errorf("%s not found in %s", itemKey, itemType) } - return nil, fmt.Errorf("item type %s doesn't exist", itemType) + + return &v, nil } func GetItem(itemType string, itemName string) *Item { if m, ok := GetItemMap(itemType)[itemName]; ok { return &m } + return nil } func AddItem(itemType string, item Item) error { - in := false for _, itype := range ItemTypes { if itype == itemType { - in = true + hubIdx[itemType][item.Name] = item + return nil } } - if !in { - return fmt.Errorf("ItemType %s is unknown", itemType) - } - hubIdx[itemType][item.Name] = item - return nil + + return fmt.Errorf("ItemType %s is unknown", itemType) } func DisplaySummary() { - log.Printf("Loaded %d collecs, %d parsers, %d scenarios, %d post-overflow parsers, %d waf rules", len(hubIdx[COLLECTIONS]), + log.Infof("Loaded %d collecs, %d parsers, %d scenarios, %d post-overflow parsers,%d waf rules", len(hubIdx[COLLECTIONS]), len(hubIdx[PARSERS]), len(hubIdx[SCENARIOS]), len(hubIdx[PARSERS_OVFLW]), len(hubIdx[WAAP_RULES])) + if skippedLocal > 0 || skippedTainted > 0 { - log.Printf("unmanaged items : %d local, %d tainted", skippedLocal, skippedTainted) + log.Infof("unmanaged items: %d local, %d tainted", skippedLocal, skippedTainted) } } -// returns: human-text, Enabled, Warning, Unmanaged -func ItemStatus(v Item) (string, bool, bool, bool) { - strret := "disabled" - Ok := false - if v.Installed { - Ok = true - strret = "enabled" +func GetInstalledItems(itemType string) ([]Item, error) { + items, ok := hubIdx[itemType] + if !ok { + return nil, fmt.Errorf("no %s in hubIdx", itemType) } - Managed := true - if v.Local { - Managed = false - strret += ",local" - } + retItems := make([]Item, 0) - //tainted or out of date - Warning := false - if v.Tainted { - Warning = true - strret += ",tainted" - } else if !v.UpToDate && !v.Local { - strret += ",update-available" - Warning = true - } - return strret, Ok, Warning, Managed -} - -func GetInstalledScenariosAsString() ([]string, error) { - var retStr []string - - items, err := GetInstalledScenarios() - if err != nil { - return nil, fmt.Errorf("while fetching scenarios: %w", err) - } - for _, it := range items { - retStr = append(retStr, it.Name) - } - return retStr, nil -} - -func GetInstalledScenarios() ([]Item, error) { - var retItems []Item - - if _, ok := hubIdx[SCENARIOS]; !ok { - return nil, fmt.Errorf("no scenarios in hubIdx") - } - for _, item := range hubIdx[SCENARIOS] { + for _, item := range items { if item.Installed { retItems = append(retItems, item) } } + return retItems, nil } -func GetInstalledParsers() ([]Item, error) { - var retItems []Item - - if _, ok := hubIdx[PARSERS]; !ok { - return nil, fmt.Errorf("no parsers in hubIdx") - } - for _, item := range hubIdx[PARSERS] { - if item.Installed { - retItems = append(retItems, item) - } - } - return retItems, nil -} - -func GetInstalledParsersAsString() ([]string, error) { - var retStr []string - - items, err := GetInstalledParsers() +func GetInstalledItemsAsString(itemType string) ([]string, error) { + items, err := GetInstalledItems(itemType) if err != nil { - return nil, fmt.Errorf("while fetching parsers: %w", err) + return nil, err } - for _, it := range items { - retStr = append(retStr, it.Name) + + retStr := make([]string, len(items)) + + for i, it := range items { + retStr[i] = it.Name } + return retStr, nil } -func GetInstalledPostOverflows() ([]Item, error) { - var retItems []Item - - if _, ok := hubIdx[PARSERS_OVFLW]; !ok { - return nil, fmt.Errorf("no post overflows in hubIdx") - } - for _, item := range hubIdx[PARSERS_OVFLW] { - if item.Installed { - retItems = append(retItems, item) - } - } - return retItems, nil -} - -func GetInstalledPostOverflowsAsString() ([]string, error) { - var retStr []string - - items, err := GetInstalledPostOverflows() - if err != nil { - return nil, fmt.Errorf("while fetching post overflows: %w", err) - } - for _, it := range items { - retStr = append(retStr, it.Name) - } - return retStr, nil -} - -func GetInstalledCollectionsAsString() ([]string, error) { - var retStr []string - - items, err := GetInstalledCollections() - if err != nil { - return nil, fmt.Errorf("while fetching collections: %w", err) - } - - for _, it := range items { - retStr = append(retStr, it.Name) - } - return retStr, nil -} - -func GetInstalledCollections() ([]Item, error) { - var retItems []Item - - if _, ok := hubIdx[COLLECTIONS]; !ok { - return nil, fmt.Errorf("no collection in hubIdx") - } - for _, item := range hubIdx[COLLECTIONS] { - if item.Installed { - retItems = append(retItems, item) - } - } - return retItems, nil -} - -func GetInstalledWafRules() ([]Item, error) { - var retItems []Item - - if _, ok := hubIdx[WAAP_RULES]; !ok { - return nil, fmt.Errorf("no waf rules in hubIdx") - } - for _, item := range hubIdx[WAAP_RULES] { - if item.Installed { - retItems = append(retItems, item) - } - } - return retItems, nil -} - -func GetInstalledWafRulesAsString() ([]string, error) { - var retStr []string - - items, err := GetInstalledWafRules() - if err != nil { - return nil, errors.Wrap(err, "while fetching waf rules") - } - for _, it := range items { - retStr = append(retStr, it.Name) - } - return retStr, nil -} - -// Returns a list of entries for packages : name, status, local_path, local_version, utf8_status (fancy) +// Returns a slice of entries for packages: name, status, local_path, local_version, utf8_status (fancy) func GetHubStatusForItemType(itemType string, name string, all bool) []ItemHubStatus { if _, ok := hubIdx[itemType]; !ok { log.Errorf("type %s doesn't exist", itemType) @@ -379,20 +268,23 @@ func GetHubStatusForItemType(itemType string, name string, all bool) []ItemHubSt return nil } - var ret = make([]ItemHubStatus, 0) - /*remember, you do it for the user :)*/ + ret := make([]ItemHubStatus, 0) + + // remember, you do it for the user :) for _, item := range hubIdx[itemType] { if name != "" && name != item.Name { - //user has requested a specific name + // user has requested a specific name continue } - //Only enabled items ? + // Only enabled items ? if !all && !item.Installed { continue } - //Check the item status - ret = append(ret, item.toHubStatus()) + // Check the item status + ret = append(ret, item.hubStatus()) } + sort.Slice(ret, func(i, j int) bool { return ret[i].Name < ret[j].Name }) + return ret } diff --git a/pkg/cwhub/cwhub_test.go b/pkg/cwhub/cwhub_test.go index f91b0dced..2fef828c2 100644 --- a/pkg/cwhub/cwhub_test.go +++ b/pkg/cwhub/cwhub_test.go @@ -1,7 +1,6 @@ package cwhub import ( - "fmt" "io" "net/http" "os" @@ -9,8 +8,13 @@ import ( "strings" "testing" - "github.com/crowdsecurity/crowdsec/pkg/csconfig" log "github.com/sirupsen/logrus" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/crowdsecurity/go-cs-lib/cstest" + + "github.com/crowdsecurity/crowdsec/pkg/csconfig" ) /* @@ -28,47 +32,39 @@ func TestItemStatus(t *testing.T) { cfg := envSetup(t) defer envTearDown(cfg) + // DownloadHubIdx() err := UpdateHubIdx(cfg.Hub) - //DownloadHubIdx() - if err != nil { - t.Fatalf("failed to download index : %s", err) - } - if err := GetHubIdx(cfg.Hub); err != nil { - t.Fatalf("failed to load hub index : %s", err) - } + require.NoError(t, err, "failed to download index") - //get existing map + err = GetHubIdx(cfg.Hub) + require.NoError(t, err, "failed to load hub index") + + // get existing map x := GetItemMap(COLLECTIONS) - if len(x) == 0 { - t.Fatalf("expected non empty result") - } + require.NotEmpty(t, x) - //Get item : good and bad + // Get item : good and bad for k := range x { item := GetItem(COLLECTIONS, k) - if item == nil { - t.Fatalf("expected item") - } + require.NotNil(t, item) + item.Installed = true item.UpToDate = false item.Local = false item.Tainted = false - txt, _, _, _ := ItemStatus(*item) - if txt != "enabled,update-available" { - t.Fatalf("got '%s'", txt) - } + + txt, _ := item.status() + require.Equal(t, "enabled,update-available", txt) item.Installed = false item.UpToDate = false item.Local = true item.Tainted = false - txt, _, _, _ = ItemStatus(*item) - if txt != "disabled,local" { - t.Fatalf("got '%s'", txt) - } - break + txt, _ = item.status() + require.Equal(t, "disabled,local", txt) } + DisplaySummary() } @@ -76,109 +72,86 @@ func TestGetters(t *testing.T) { cfg := envSetup(t) defer envTearDown(cfg) + // DownloadHubIdx() err := UpdateHubIdx(cfg.Hub) - //DownloadHubIdx() - if err != nil { - t.Fatalf("failed to download index : %s", err) - } - if err := GetHubIdx(cfg.Hub); err != nil { - t.Fatalf("failed to load hub index : %s", err) - } + require.NoError(t, err, "failed to download index") - //get non existing map + err = GetHubIdx(cfg.Hub) + require.NoError(t, err, "failed to load hub index") + + // get non existing map empty := GetItemMap("ratata") - if empty != nil { - t.Fatalf("expected nil result") - } - //get existing map - x := GetItemMap(COLLECTIONS) - if len(x) == 0 { - t.Fatalf("expected non empty result") - } + require.Nil(t, empty) - //Get item : good and bad + // get existing map + x := GetItemMap(COLLECTIONS) + require.NotEmpty(t, x) + + // Get item : good and bad for k := range x { empty := GetItem(COLLECTIONS, k+"nope") - if empty != nil { - t.Fatalf("expected empty item") - } + require.Nil(t, empty) item := GetItem(COLLECTIONS, k) - if item == nil { - t.Fatalf("expected non empty item") - } + require.NotNil(t, item) - //Add item and get it + // Add item and get it item.Name += "nope" - if err := AddItem(COLLECTIONS, *item); err != nil { - t.Fatalf("didn't expect error : %s", err) - } + err := AddItem(COLLECTIONS, *item) + require.NoError(t, err) newitem := GetItem(COLLECTIONS, item.Name) - if newitem == nil { - t.Fatalf("expected non empty item") - } + require.NotNil(t, newitem) - //Add bad item - if err := AddItem("ratata", *item); err != nil { - if fmt.Sprintf("%s", err) != "ItemType ratata is unknown" { - t.Fatalf("unexpected error") - } - } else { - t.Fatalf("Expected error") - } - - break + err = AddItem("ratata", *item) + cstest.RequireErrorContains(t, err, "ItemType ratata is unknown") } - } func TestIndexDownload(t *testing.T) { cfg := envSetup(t) defer envTearDown(cfg) + // DownloadHubIdx() err := UpdateHubIdx(cfg.Hub) - //DownloadHubIdx() - if err != nil { - t.Fatalf("failed to download index : %s", err) - } - if err := GetHubIdx(cfg.Hub); err != nil { - t.Fatalf("failed to load hub index : %s", err) - } + require.NoError(t, err, "failed to download index") + + err = GetHubIdx(cfg.Hub) + require.NoError(t, err, "failed to load hub index") } -func getTestCfg() (cfg *csconfig.Config) { - cfg = &csconfig.Config{Hub: &csconfig.Hub{}} - cfg.Hub.ConfigDir, _ = filepath.Abs("./install") +func getTestCfg() *csconfig.Config { + cfg := &csconfig.Config{Hub: &csconfig.Hub{}} + cfg.Hub.InstallDir, _ = filepath.Abs("./install") cfg.Hub.HubDir, _ = filepath.Abs("./hubdir") cfg.Hub.HubIndexFile = filepath.Clean("./hubdir/.index.json") - return + + return cfg } func envSetup(t *testing.T) *csconfig.Config { resetResponseByPath() log.SetLevel(log.DebugLevel) + cfg := getTestCfg() defaultTransport := http.DefaultClient.Transport + t.Cleanup(func() { http.DefaultClient.Transport = defaultTransport }) - //Mock the http client + // Mock the http client http.DefaultClient.Transport = newMockTransport() - if err := os.MkdirAll(cfg.Hub.ConfigDir, 0700); err != nil { - log.Fatalf("mkdir : %s", err) - } + err := os.MkdirAll(cfg.Hub.InstallDir, 0700) + require.NoError(t, err) - if err := os.MkdirAll(cfg.Hub.HubDir, 0700); err != nil { - log.Fatalf("failed to mkdir %s : %s", cfg.Hub.HubDir, err) - } + err = os.MkdirAll(cfg.Hub.HubDir, 0700) + require.NoError(t, err) - if err := UpdateHubIdx(cfg.Hub); err != nil { - log.Fatalf("failed to download index : %s", err) - } + err = UpdateHubIdx(cfg.Hub) + require.NoError(t, err) // if err := os.RemoveAll(cfg.Hub.InstallDir); err != nil { // log.Fatalf("failed to remove %s : %s", cfg.Hub.InstallDir, err) @@ -189,10 +162,9 @@ func envSetup(t *testing.T) *csconfig.Config { return cfg } - func envTearDown(cfg *csconfig.Config) { - if err := os.RemoveAll(cfg.Hub.ConfigDir); err != nil { - log.Fatalf("failed to remove %s : %s", cfg.Hub.ConfigDir, err) + if err := os.RemoveAll(cfg.Hub.InstallDir); err != nil { + log.Fatalf("failed to remove %s : %s", cfg.Hub.InstallDir, err) } if err := os.RemoveAll(cfg.Hub.HubDir); err != nil { @@ -200,124 +172,90 @@ func envTearDown(cfg *csconfig.Config) { } } - func testInstallItem(cfg *csconfig.Hub, t *testing.T, item Item) { + // Install the parser + err := DownloadLatest(cfg, &item, false, false) + require.NoError(t, err, "failed to download %s", item.Name) - //Install the parser - item, err := DownloadLatest(cfg, item, false, false) - if err != nil { - t.Fatalf("error while downloading %s : %v", item.Name, err) - } - if err, _ := LocalSync(cfg); err != nil { - t.Fatalf("taint: failed to run localSync : %s", err) - } - if !hubIdx[item.Type][item.Name].UpToDate { - t.Fatalf("download: %s should be up-to-date", item.Name) - } - if hubIdx[item.Type][item.Name].Installed { - t.Fatalf("download: %s should not be installed", item.Name) - } - if hubIdx[item.Type][item.Name].Tainted { - t.Fatalf("download: %s should not be tainted", item.Name) - } + _, err = LocalSync(cfg) + require.NoError(t, err, "failed to run localSync") - item, err = EnableItem(cfg, item) - if err != nil { - t.Fatalf("error while enabling %s : %v.", item.Name, err) - } - if err, _ := LocalSync(cfg); err != nil { - t.Fatalf("taint: failed to run localSync : %s", err) - } - if !hubIdx[item.Type][item.Name].Installed { - t.Fatalf("install: %s should be installed", item.Name) - } + assert.True(t, hubIdx[item.Type][item.Name].UpToDate, "%s should be up-to-date", item.Name) + assert.False(t, hubIdx[item.Type][item.Name].Installed, "%s should not be installed", item.Name) + assert.False(t, hubIdx[item.Type][item.Name].Tainted, "%s should not be tainted", item.Name) + + err = EnableItem(cfg, &item) + require.NoError(t, err, "failed to enable %s", item.Name) + + _, err = LocalSync(cfg) + require.NoError(t, err, "failed to run localSync") + + assert.True(t, hubIdx[item.Type][item.Name].Installed, "%s should be installed", item.Name) } func testTaintItem(cfg *csconfig.Hub, t *testing.T, item Item) { - if hubIdx[item.Type][item.Name].Tainted { - t.Fatalf("pre-taint: %s should not be tainted", item.Name) - } + assert.False(t, hubIdx[item.Type][item.Name].Tainted, "%s should not be tainted", item.Name) + f, err := os.OpenFile(item.LocalPath, os.O_APPEND|os.O_WRONLY, 0600) - if err != nil { - t.Fatalf("(taint) opening %s (%s) : %s", item.LocalPath, item.Name, err) - } + require.NoError(t, err, "failed to open %s (%s)", item.LocalPath, item.Name) + defer f.Close() - if _, err = f.WriteString("tainted"); err != nil { - t.Fatalf("tainting %s : %s", item.Name, err) - } - //Local sync and check status - if err, _ := LocalSync(cfg); err != nil { - t.Fatalf("taint: failed to run localSync : %s", err) - } - if !hubIdx[item.Type][item.Name].Tainted { - t.Fatalf("taint: %s should be tainted", item.Name) - } + _, err = f.WriteString("tainted") + require.NoError(t, err, "failed to write to %s (%s)", item.LocalPath, item.Name) + + // Local sync and check status + _, err = LocalSync(cfg) + require.NoError(t, err, "failed to run localSync") + + assert.True(t, hubIdx[item.Type][item.Name].Tainted, "%s should be tainted", item.Name) } func testUpdateItem(cfg *csconfig.Hub, t *testing.T, item Item) { + assert.False(t, hubIdx[item.Type][item.Name].UpToDate, "%s should not be up-to-date", item.Name) - if hubIdx[item.Type][item.Name].UpToDate { - t.Fatalf("update: %s should NOT be up-to-date", item.Name) - } - //Update it + check status - item, err := DownloadLatest(cfg, item, true, true) - if err != nil { - t.Fatalf("failed to update %s : %s", item.Name, err) - } - //Local sync and check status - if err, _ := LocalSync(cfg); err != nil { - t.Fatalf("failed to run localSync : %s", err) - } - if !hubIdx[item.Type][item.Name].UpToDate { - t.Fatalf("update: %s should be up-to-date", item.Name) - } - if hubIdx[item.Type][item.Name].Tainted { - t.Fatalf("update: %s should not be tainted anymore", item.Name) - } + // Update it + check status + err := DownloadLatest(cfg, &item, true, true) + require.NoError(t, err, "failed to update %s", item.Name) + + // Local sync and check status + _, err = LocalSync(cfg) + require.NoError(t, err, "failed to run localSync") + + assert.True(t, hubIdx[item.Type][item.Name].UpToDate, "%s should be up-to-date", item.Name) + assert.False(t, hubIdx[item.Type][item.Name].Tainted, "%s should not be tainted anymore", item.Name) } func testDisableItem(cfg *csconfig.Hub, t *testing.T, item Item) { - if !item.Installed { - t.Fatalf("disable: %s should be installed", item.Name) - } - //Remove - item, err := DisableItem(cfg, item, false, false) - if err != nil { - t.Fatalf("failed to disable item : %v", err) - } - //Local sync and check status - if err, warns := LocalSync(cfg); err != nil || len(warns) > 0 { - t.Fatalf("failed to run localSync : %s (%+v)", err, warns) - } - if hubIdx[item.Type][item.Name].Tainted { - t.Fatalf("disable: %s should not be tainted anymore", item.Name) - } - if hubIdx[item.Type][item.Name].Installed { - t.Fatalf("disable: %s should not be installed anymore", item.Name) - } - if !hubIdx[item.Type][item.Name].Downloaded { - t.Fatalf("disable: %s should still be downloaded", item.Name) - } - //Purge - item, err = DisableItem(cfg, item, true, false) - if err != nil { - t.Fatalf("failed to purge item : %v", err) - } - //Local sync and check status - if err, warns := LocalSync(cfg); err != nil || len(warns) > 0 { - t.Fatalf("failed to run localSync : %s (%+v)", err, warns) - } - if hubIdx[item.Type][item.Name].Installed { - t.Fatalf("disable: %s should not be installed anymore", item.Name) - } - if hubIdx[item.Type][item.Name].Downloaded { - t.Fatalf("disable: %s should not be downloaded", item.Name) - } + assert.True(t, hubIdx[item.Type][item.Name].Installed, "%s should be installed", item.Name) + + // Remove + err := DisableItem(cfg, &item, false, false) + require.NoError(t, err, "failed to disable %s", item.Name) + + // Local sync and check status + warns, err := LocalSync(cfg) + require.NoError(t, err, "failed to run localSync") + require.Empty(t, warns, "unexpected warnings : %+v", warns) + + assert.False(t, hubIdx[item.Type][item.Name].Tainted, "%s should not be tainted anymore", item.Name) + assert.False(t, hubIdx[item.Type][item.Name].Installed, "%s should not be installed anymore", item.Name) + assert.True(t, hubIdx[item.Type][item.Name].Downloaded, "%s should still be downloaded", item.Name) + + // Purge + err = DisableItem(cfg, &item, true, false) + require.NoError(t, err, "failed to purge %s", item.Name) + + // Local sync and check status + warns, err = LocalSync(cfg) + require.NoError(t, err, "failed to run localSync") + require.Empty(t, warns, "unexpected warnings : %+v", warns) + + assert.False(t, hubIdx[item.Type][item.Name].Installed, "%s should not be installed anymore", item.Name) + assert.False(t, hubIdx[item.Type][item.Name].Downloaded, "%s should not be downloaded", item.Name) } func TestInstallParser(t *testing.T) { - /* - install a random parser - check its status @@ -331,7 +269,7 @@ func TestInstallParser(t *testing.T) { defer envTearDown(cfg) getHubIdxOrFail(t) - //map iteration is random by itself + // map iteration is random by itself for _, it := range hubIdx[PARSERS] { testInstallItem(cfg.Hub, t, it) it = hubIdx[PARSERS][it.Name] @@ -349,7 +287,6 @@ func TestInstallParser(t *testing.T) { } func TestInstallCollection(t *testing.T) { - /* - install a random parser - check its status @@ -363,7 +300,7 @@ func TestInstallCollection(t *testing.T) { defer envTearDown(cfg) getHubIdxOrFail(t) - //map iteration is random by itself + // map iteration is random by itself for _, it := range hubIdx[COLLECTIONS] { testInstallItem(cfg.Hub, t, it) it = hubIdx[COLLECTIONS][it.Name] @@ -375,7 +312,8 @@ func TestInstallCollection(t *testing.T) { it = hubIdx[COLLECTIONS][it.Name] x := GetHubStatusForItemType(COLLECTIONS, it.Name, false) - log.Printf("%+v", x) + log.Infof("%+v", x) + break } } @@ -395,39 +333,41 @@ func (t *mockTransport) RoundTrip(req *http.Request) (*http.Response, error) { StatusCode: http.StatusOK, } response.Header.Set("Content-Type", "application/json") - responseBody := "" - log.Printf("---> %s", req.URL.Path) - /*FAKE PARSER*/ - if resp, ok := responseByPath[req.URL.Path]; ok { - responseBody = resp - } else { + log.Infof("---> %s", req.URL.Path) + + // FAKE PARSER + resp, ok := responseByPath[req.URL.Path] + if !ok { log.Fatalf("unexpected url :/ %s", req.URL.Path) } - response.Body = io.NopCloser(strings.NewReader(responseBody)) + response.Body = io.NopCloser(strings.NewReader(resp)) + return response, nil } func fileToStringX(path string) string { - if f, err := os.Open(path); err == nil { - defer f.Close() - if data, err := io.ReadAll(f); err == nil { - return strings.ReplaceAll(string(data), "\r\n", "\n") - } else { - panic(err) - } - } else { + f, err := os.Open(path) + if err != nil { panic(err) } + defer f.Close() + + data, err := io.ReadAll(f) + if err != nil { + panic(err) + } + + return strings.ReplaceAll(string(data), "\r\n", "\n") } func resetResponseByPath() { responseByPath = map[string]string{ - "/master/parsers/s01-parse/crowdsecurity/foobar_parser.yaml": fileToStringX("./tests/foobar_parser.yaml"), - "/master/parsers/s01-parse/crowdsecurity/foobar_subparser.yaml": fileToStringX("./tests/foobar_parser.yaml"), - "/master/collections/crowdsecurity/test_collection.yaml": fileToStringX("./tests/collection_v1.yaml"), - "/master/.index.json": fileToStringX("./tests/index1.json"), + "/master/parsers/s01-parse/crowdsecurity/foobar_parser.yaml": fileToStringX("./testdata/foobar_parser.yaml"), + "/master/parsers/s01-parse/crowdsecurity/foobar_subparser.yaml": fileToStringX("./testdata/foobar_parser.yaml"), + "/master/collections/crowdsecurity/test_collection.yaml": fileToStringX("./testdata/collection_v1.yaml"), + "/master/.index.json": fileToStringX("./testdata/index1.json"), "/master/scenarios/crowdsecurity/foobar_scenario.yaml": `filter: true name: crowdsecurity/foobar_scenario`, "/master/scenarios/crowdsecurity/barfoo_scenario.yaml": `filter: true diff --git a/pkg/cwhub/dataset.go b/pkg/cwhub/dataset.go index a6488751b..2255d40a7 100644 --- a/pkg/cwhub/dataset.go +++ b/pkg/cwhub/dataset.go @@ -18,6 +18,7 @@ type DataSet struct { func downloadFile(url string, destPath string) error { log.Debugf("downloading %s in %s", url, destPath) + req, err := http.NewRequest(http.MethodGet, url, nil) if err != nil { return err @@ -43,7 +44,7 @@ func downloadFile(url string, destPath string) error { return err } - _, err = file.WriteString(string(body)) + _, err = file.Write(body) if err != nil { return err } @@ -60,6 +61,7 @@ func GetData(data []*types.DataSource, dataDir string) error { for _, dataS := range data { destPath := filepath.Join(dataDir, dataS.DestPath) log.Infof("downloading data '%s' in '%s'", dataS.SourceURL, destPath) + err := downloadFile(dataS.SourceURL, destPath) if err != nil { return err diff --git a/pkg/cwhub/dataset_test.go b/pkg/cwhub/dataset_test.go index 106268c01..40f6ba847 100644 --- a/pkg/cwhub/dataset_test.go +++ b/pkg/cwhub/dataset_test.go @@ -4,9 +4,8 @@ import ( "os" "testing" - "github.com/stretchr/testify/assert" - "github.com/jarcoal/httpmock" + "github.com/stretchr/testify/assert" ) func TestDownloadFile(t *testing.T) { @@ -26,6 +25,7 @@ func TestDownloadFile(t *testing.T) { "https://example.com/x", httpmock.NewStringResponder(404, "not found"), ) + err := downloadFile("https://example.com/xx", examplePath) assert.NoError(t, err) content, err := os.ReadFile(examplePath) diff --git a/pkg/cwhub/download.go b/pkg/cwhub/download.go index 491e31dab..ef111ba62 100644 --- a/pkg/cwhub/download.go +++ b/pkg/cwhub/download.go @@ -24,36 +24,45 @@ func UpdateHubIdx(hub *csconfig.Hub) error { if err != nil { return fmt.Errorf("failed to download index: %w", err) } + ret, err := LoadPkgIndex(bidx) if err != nil { - if !errors.Is(err, ReferenceMissingError) { + if !errors.Is(err, ErrMissingReference) { return fmt.Errorf("failed to read index: %w", err) } } + hubIdx = ret - if err, _ := LocalSync(hub); err != nil { + + if _, err := LocalSync(hub); err != nil { return fmt.Errorf("failed to sync: %w", err) } + return nil } func DownloadHubIdx(hub *csconfig.Hub) ([]byte, error) { log.Debugf("fetching index from branch %s (%s)", HubBranch, fmt.Sprintf(RawFileURLTemplate, HubBranch, HubIndexFile)) + req, err := http.NewRequest(http.MethodGet, fmt.Sprintf(RawFileURLTemplate, HubBranch, HubIndexFile), nil) if err != nil { return nil, fmt.Errorf("failed to build request for hub index: %w", err) } + resp, err := http.DefaultClient.Do(req) if err != nil { return nil, fmt.Errorf("failed http request for hub index: %w", err) } defer resp.Body.Close() + if resp.StatusCode != http.StatusOK { if resp.StatusCode == http.StatusNotFound { return nil, ErrIndexNotFound } + return nil, fmt.Errorf("bad http code %d while requesting %s", resp.StatusCode, req.URL.String()) } + body, err := io.ReadAll(resp.Body) if err != nil { return nil, fmt.Errorf("failed to read request answer for hub index: %w", err) @@ -76,24 +85,28 @@ func DownloadHubIdx(hub *csconfig.Hub) ([]byte, error) { } defer file.Close() - wsize, err := file.WriteString(string(body)) + wsize, err := file.Write(body) if err != nil { return nil, fmt.Errorf("while writing hub index file: %w", err) } + log.Infof("Wrote new %d bytes index to %s", wsize, hub.HubIndexFile) + return body, nil } // DownloadLatest will download the latest version of Item to the tdir directory -func DownloadLatest(hub *csconfig.Hub, target Item, overwrite bool, updateOnly bool) (Item, error) { +func DownloadLatest(hub *csconfig.Hub, target *Item, overwrite bool, updateOnly bool) error { var err error log.Debugf("Downloading %s %s", target.Type, target.Name) + if target.Type != COLLECTIONS { if !target.Installed && updateOnly && target.Downloaded { log.Debugf("skipping upgrade of %s : not installed", target.Name) - return target, nil + return nil } + return DownloadItem(hub, target, overwrite) } @@ -104,7 +117,7 @@ func DownloadLatest(hub *csconfig.Hub, target Item, overwrite bool, updateOnly b for _, p := range ptr { val, ok := hubIdx[ptrtype][p] if !ok { - return target, fmt.Errorf("required %s %s of %s doesn't exist, abort", ptrtype, p, target.Name) + return fmt.Errorf("required %s %s of %s doesn't exist, abort", ptrtype, p, target.Name) } if !val.Installed && updateOnly && val.Downloaded { @@ -116,94 +129,115 @@ func DownloadLatest(hub *csconfig.Hub, target Item, overwrite bool, updateOnly b //recurse as it's a collection if ptrtype == COLLECTIONS { log.Tracef("collection, recurse") - hubIdx[ptrtype][p], err = DownloadLatest(hub, val, overwrite, updateOnly) + + err = DownloadLatest(hub, &val, overwrite, updateOnly) if err != nil { - return target, fmt.Errorf("while downloading %s: %w", val.Name, err) + return fmt.Errorf("while downloading %s: %w", val.Name, err) } } - item, err := DownloadItem(hub, val, overwrite) + + downloaded := val.Downloaded + + err = DownloadItem(hub, &val, overwrite) if err != nil { - return target, fmt.Errorf("while downloading %s: %w", val.Name, err) + return fmt.Errorf("while downloading %s: %w", val.Name, err) } // We need to enable an item when it has been added to a collection since latest release of the collection. // We check if val.Downloaded is false because maybe the item has been disabled by the user. - if !item.Installed && !val.Downloaded { - if item, err = EnableItem(hub, item); err != nil { - return target, fmt.Errorf("enabling '%s': %w", item.Name, err) + if !val.Installed && !downloaded { + if err = EnableItem(hub, &val); err != nil { + return fmt.Errorf("enabling '%s': %w", val.Name, err) } } - hubIdx[ptrtype][p] = item + + hubIdx[ptrtype][p] = val } } - target, err = DownloadItem(hub, target, overwrite) + + err = DownloadItem(hub, target, overwrite) if err != nil { - return target, fmt.Errorf("failed to download item : %s", err) + return fmt.Errorf("failed to download item: %w", err) } - return target, nil + + return nil } -func DownloadItem(hub *csconfig.Hub, target Item, overwrite bool) (Item, error) { - var tdir = hub.HubDir - var dataFolder = hub.DataDir - /*if user didn't --force, don't overwrite local, tainted, up-to-date files*/ +func DownloadItem(hub *csconfig.Hub, target *Item, overwrite bool) error { + tdir := hub.HubDir + + // if user didn't --force, don't overwrite local, tainted, up-to-date files if !overwrite { if target.Tainted { log.Debugf("%s : tainted, not updated", target.Name) - return target, nil + return nil } + if target.UpToDate { - log.Debugf("%s : up-to-date, not updated", target.Name) // We still have to check if data files are present + log.Debugf("%s : up-to-date, not updated", target.Name) } } + req, err := http.NewRequest(http.MethodGet, fmt.Sprintf(RawFileURLTemplate, HubBranch, target.RemotePath), nil) if err != nil { - return target, fmt.Errorf("while downloading %s: %w", req.URL.String(), err) + return fmt.Errorf("while downloading %s: %w", req.URL.String(), err) } + resp, err := http.DefaultClient.Do(req) if err != nil { - return target, fmt.Errorf("while downloading %s: %w", req.URL.String(), err) + return fmt.Errorf("while downloading %s: %w", req.URL.String(), err) } + if resp.StatusCode != http.StatusOK { - return target, fmt.Errorf("bad http code %d for %s", resp.StatusCode, req.URL.String()) + return fmt.Errorf("bad http code %d for %s", resp.StatusCode, req.URL.String()) } + defer resp.Body.Close() + body, err := io.ReadAll(resp.Body) if err != nil { - return target, fmt.Errorf("while reading %s: %w", req.URL.String(), err) + return fmt.Errorf("while reading %s: %w", req.URL.String(), err) } + h := sha256.New() - if _, err := h.Write(body); err != nil { - return target, fmt.Errorf("while hashing %s: %w", target.Name, err) + if _, err = h.Write(body); err != nil { + return fmt.Errorf("while hashing %s: %w", target.Name, err) } + meow := fmt.Sprintf("%x", h.Sum(nil)) if meow != target.Versions[target.Version].Digest { log.Errorf("Downloaded version doesn't match index, please 'hub update'") log.Debugf("got %s, expected %s", meow, target.Versions[target.Version].Digest) - return target, fmt.Errorf("invalid download hash for %s", target.Name) + + return fmt.Errorf("invalid download hash for %s", target.Name) } + //all good, install //check if parent dir exists tmpdirs := strings.Split(tdir+"/"+target.RemotePath, "/") - parent_dir := strings.Join(tmpdirs[:len(tmpdirs)-1], "/") + parentDir := strings.Join(tmpdirs[:len(tmpdirs)-1], "/") - /*ensure that target file is within target dir*/ + // ensure that target file is within target dir finalPath, err := filepath.Abs(tdir + "/" + target.RemotePath) if err != nil { - return target, fmt.Errorf("filepath.Abs error on %s: %w", tdir+"/"+target.RemotePath, err) + return fmt.Errorf("filepath.Abs error on %s: %w", tdir+"/"+target.RemotePath, err) } + if !strings.HasPrefix(finalPath, tdir) { - return target, fmt.Errorf("path %s escapes %s, abort", target.RemotePath, tdir) + return fmt.Errorf("path %s escapes %s, abort", target.RemotePath, tdir) } - /*check dir*/ - if _, err = os.Stat(parent_dir); os.IsNotExist(err) { - log.Debugf("%s doesn't exist, create", parent_dir) - if err := os.MkdirAll(parent_dir, os.ModePerm); err != nil { - return target, fmt.Errorf("while creating parent directories: %w", err) + + // check dir + if _, err = os.Stat(parentDir); os.IsNotExist(err) { + log.Debugf("%s doesn't exist, create", parentDir) + + if err = os.MkdirAll(parentDir, os.ModePerm); err != nil { + return fmt.Errorf("while creating parent directories: %w", err) } } - /*check actual file*/ + + // check actual file if _, err = os.Stat(finalPath); !os.IsNotExist(err) { log.Warningf("%s : overwrite", target.Name) log.Debugf("target: %s/%s", tdir, target.RemotePath) @@ -213,62 +247,71 @@ func DownloadItem(hub *csconfig.Hub, target Item, overwrite bool) (Item, error) f, err := os.OpenFile(tdir+"/"+target.RemotePath, os.O_CREATE|os.O_WRONLY|os.O_TRUNC, 0644) if err != nil { - return target, fmt.Errorf("while opening file: %w", err) + return fmt.Errorf("while opening file: %w", err) } + defer f.Close() - _, err = f.WriteString(string(body)) + + _, err = f.Write(body) if err != nil { - return target, fmt.Errorf("while writing file: %w", err) + return fmt.Errorf("while writing file: %w", err) } + target.Downloaded = true target.Tainted = false target.UpToDate = true - if err = downloadData(dataFolder, overwrite, bytes.NewReader(body)); err != nil { - return target, fmt.Errorf("while downloading data for %s: %w", target.FileName, err) + if err = downloadData(hub.InstallDataDir, overwrite, bytes.NewReader(body)); err != nil { + return fmt.Errorf("while downloading data for %s: %w", target.FileName, err) } - hubIdx[target.Type][target.Name] = target - return target, nil + hubIdx[target.Type][target.Name] = *target + + return nil } func DownloadDataIfNeeded(hub *csconfig.Hub, target Item, force bool) error { - var ( - dataFolder = hub.DataDir - itemFile *os.File - err error - ) - itemFilePath := fmt.Sprintf("%s/%s/%s/%s", hub.ConfigDir, target.Type, target.Stage, target.FileName) - if itemFile, err = os.Open(itemFilePath); err != nil { + itemFilePath := fmt.Sprintf("%s/%s/%s/%s", hub.InstallDir, target.Type, target.Stage, target.FileName) + + itemFile, err := os.Open(itemFilePath) + if err != nil { return fmt.Errorf("while opening %s: %w", itemFilePath, err) } + defer itemFile.Close() - if err = downloadData(dataFolder, force, itemFile); err != nil { + + if err = downloadData(hub.InstallDataDir, force, itemFile); err != nil { return fmt.Errorf("while downloading data for %s: %w", itemFilePath, err) } + return nil } func downloadData(dataFolder string, force bool, reader io.Reader) error { var err error + dec := yaml.NewDecoder(reader) for { data := &DataSet{} + err = dec.Decode(data) if err != nil { if errors.Is(err, io.EOF) { break } + return fmt.Errorf("while reading file: %w", err) } download := false + for _, dataS := range data.Data { - if _, err := os.Stat(filepath.Join(dataFolder, dataS.DestPath)); os.IsNotExist(err) { + if _, err = os.Stat(filepath.Join(dataFolder, dataS.DestPath)); os.IsNotExist(err) { download = true } } + if download || force { err = GetData(data.Data, dataFolder) if err != nil { @@ -276,5 +319,6 @@ func downloadData(dataFolder string, force bool, reader io.Reader) error { } } } + return nil } diff --git a/pkg/cwhub/download_test.go b/pkg/cwhub/download_test.go index 156c41322..351b08f8e 100644 --- a/pkg/cwhub/download_test.go +++ b/pkg/cwhub/download_test.go @@ -5,38 +5,48 @@ import ( "strings" "testing" - "github.com/crowdsecurity/crowdsec/pkg/csconfig" log "github.com/sirupsen/logrus" + + "github.com/crowdsecurity/crowdsec/pkg/csconfig" ) func TestDownloadHubIdx(t *testing.T) { back := RawFileURLTemplate - //bad url template + // bad url template fmt.Println("Test 'bad URL'") + RawFileURLTemplate = "x" + ret, err := DownloadHubIdx(&csconfig.Hub{}) if err == nil || !strings.HasPrefix(fmt.Sprintf("%s", err), "failed to build request for hub index: parse ") { log.Errorf("unexpected error %s", err) } + fmt.Printf("->%+v", ret) - //bad domain + // bad domain fmt.Println("Test 'bad domain'") + RawFileURLTemplate = "https://baddomain/%s/%s" + ret, err = DownloadHubIdx(&csconfig.Hub{}) if err == nil || !strings.HasPrefix(fmt.Sprintf("%s", err), "failed http request for hub index: Get") { log.Errorf("unexpected error %s", err) } + fmt.Printf("->%+v", ret) - //bad target path + // bad target path fmt.Println("Test 'bad target path'") + RawFileURLTemplate = back + ret, err = DownloadHubIdx(&csconfig.Hub{HubIndexFile: "/does/not/exist/index.json"}) if err == nil || !strings.HasPrefix(fmt.Sprintf("%s", err), "while opening hub index file: open /does/not/exist/index.json:") { log.Errorf("unexpected error %s", err) } RawFileURLTemplate = back + fmt.Printf("->%+v", ret) } diff --git a/pkg/cwhub/helpers.go b/pkg/cwhub/helpers.go index 4133e2272..c17e6758d 100644 --- a/pkg/cwhub/helpers.go +++ b/pkg/cwhub/helpers.go @@ -13,76 +13,74 @@ import ( ) // pick a hub branch corresponding to the current crowdsec version. -func chooseHubBranch() (string, error) { +func chooseHubBranch() string { latest, err := cwversion.Latest() if err != nil { log.Warningf("Unable to retrieve latest crowdsec version: %s, defaulting to master", err) - //lint:ignore nilerr reason - return "master", nil // ignore + //lint:ignore nilerr + return "master" } csVersion := cwversion.VersionStrip() if csVersion == latest { log.Debugf("current version is equal to latest (%s)", csVersion) - return "master", nil + return "master" } // if current version is greater than the latest we are in pre-release if semver.Compare(csVersion, latest) == 1 { log.Debugf("Your current crowdsec version seems to be a pre-release (%s)", csVersion) - return "master", nil + return "master" } if csVersion == "" { log.Warning("Crowdsec version is not set, using master branch for the hub") - return "master", nil + return "master" } log.Warnf("Crowdsec is not the latest version. "+ "Current version is '%s' and the latest stable version is '%s'. Please update it!", csVersion, latest) + log.Warnf("As a result, you will not be able to use parsers/scenarios/collections "+ "added to Crowdsec Hub after CrowdSec %s", latest) - return csVersion, nil + + return csVersion } // SetHubBranch sets the package variable that points to the hub branch. -func SetHubBranch() error { +func SetHubBranch() { // a branch is already set, or specified from the flags if HubBranch != "" { - return nil + return } // use the branch corresponding to the crowdsec version - branch, err := chooseHubBranch() - if err != nil { - return err - } - HubBranch = branch + HubBranch = chooseHubBranch() + log.Debugf("Using branch '%s' for the hub", HubBranch) - return nil } func InstallItem(csConfig *csconfig.Config, name string, obtype string, force bool, downloadOnly bool) error { - it := GetItem(obtype, name) - if it == nil { + item := GetItem(obtype, name) + if item == nil { return fmt.Errorf("unable to retrieve item: %s", name) } - item := *it if downloadOnly && item.Downloaded && item.UpToDate { log.Warningf("%s is already downloaded and up-to-date", item.Name) + if !force { return nil } } - item, err := DownloadLatest(csConfig.Hub, item, force, true) + err := DownloadLatest(csConfig.Hub, item, force, true) if err != nil { return fmt.Errorf("while downloading %s: %w", item.Name, err) } - if err := AddItem(obtype, item); err != nil { + if err = AddItem(obtype, *item); err != nil { return fmt.Errorf("while adding %s: %w", item.Name, err) } @@ -91,12 +89,12 @@ func InstallItem(csConfig *csconfig.Config, name string, obtype string, force bo return nil } - item, err = EnableItem(csConfig.Hub, item) + err = EnableItem(csConfig.Hub, item) if err != nil { return fmt.Errorf("while enabling %s: %w", item.Name, err) } - if err := AddItem(obtype, item); err != nil { + if err := AddItem(obtype, *item); err != nil { return fmt.Errorf("while adding %s: %w", item.Name, err) } @@ -107,26 +105,22 @@ func InstallItem(csConfig *csconfig.Config, name string, obtype string, force bo // XXX this must return errors instead of log.Fatal func RemoveMany(csConfig *csconfig.Config, itemType string, name string, all bool, purge bool, forceAction bool) { - var ( - err error - disabled int - ) - if name != "" { - it := GetItem(itemType, name) - if it == nil { + item := GetItem(itemType, name) + if item == nil { log.Fatalf("unable to retrieve: %s", name) } - item := *it - item, err = DisableItem(csConfig.Hub, item, purge, forceAction) + err := DisableItem(csConfig.Hub, item, purge, forceAction) + if err != nil { log.Fatalf("unable to disable %s : %v", item.Name, err) } - if err := AddItem(itemType, item); err != nil { + if err = AddItem(itemType, *item); err != nil { log.Fatalf("unable to add %s: %v", item.Name, err) } + return } @@ -134,12 +128,15 @@ func RemoveMany(csConfig *csconfig.Config, itemType string, name string, all boo log.Fatal("removing item: no item specified") } + disabled := 0 + // remove all for _, v := range GetItemMap(itemType) { if !v.Installed { continue } - v, err = DisableItem(csConfig.Hub, v, purge, forceAction) + + err := DisableItem(csConfig.Hub, &v, purge, forceAction) if err != nil { log.Fatalf("unable to disable %s : %v", v.Name, err) } @@ -149,15 +146,13 @@ func RemoveMany(csConfig *csconfig.Config, itemType string, name string, all boo } disabled++ } + log.Infof("Disabled %d items", disabled) } func UpgradeConfig(csConfig *csconfig.Config, itemType string, name string, force bool) { - var ( - err error - updated int - found bool - ) + updated := 0 + found := false for _, v := range GetItemMap(itemType) { if name != "" && name != v.Name { @@ -179,7 +174,7 @@ func UpgradeConfig(csConfig *csconfig.Config, itemType string, name string, forc if v.UpToDate { log.Infof("%s : up-to-date", v.Name) - if err = DownloadDataIfNeeded(csConfig.Hub, v, force); err != nil { + if err := DownloadDataIfNeeded(csConfig.Hub, v, force); err != nil { log.Fatalf("%s : download failed : %v", v.Name, err) } @@ -188,8 +183,7 @@ func UpgradeConfig(csConfig *csconfig.Config, itemType string, name string, forc } } - v, err = DownloadLatest(csConfig.Hub, v, force, true) - if err != nil { + if err := DownloadLatest(csConfig.Hub, &v, force, true); err != nil { log.Fatalf("%s : download failed : %v", v.Name, err) } diff --git a/pkg/cwhub/helpers_test.go b/pkg/cwhub/helpers_test.go index b8a15519d..c8bb28c36 100644 --- a/pkg/cwhub/helpers_test.go +++ b/pkg/cwhub/helpers_test.go @@ -6,7 +6,7 @@ import ( "github.com/stretchr/testify/require" ) -//Download index, install collection. Add scenario to collection (hub-side), update index, upgrade collection +// Download index, install collection. Add scenario to collection (hub-side), update index, upgrade collection // We expect the new scenario to be installed func TestUpgradeConfigNewScenarioInCollection(t *testing.T) { cfg := envSetup(t) @@ -37,6 +37,7 @@ func TestUpgradeConfigNewScenarioInCollection(t *testing.T) { if err := UpdateHubIdx(cfg.Hub); err != nil { t.Fatalf("failed to download index : %s", err) } + getHubIdxOrFail(t) require.True(t, hubIdx[COLLECTIONS]["crowdsecurity/test_collection"].Downloaded) @@ -49,7 +50,6 @@ func TestUpgradeConfigNewScenarioInCollection(t *testing.T) { require.True(t, hubIdx[SCENARIOS]["crowdsecurity/barfoo_scenario"].Downloaded) require.True(t, hubIdx[SCENARIOS]["crowdsecurity/barfoo_scenario"].Installed) - } // Install a collection, disable a scenario. @@ -140,6 +140,7 @@ func TestUpgradeConfigNewScenarioIsInstalledWhenReferencedScenarioIsDisabled(t * if err := UpdateHubIdx(cfg.Hub); err != nil { t.Fatalf("failed to download index : %s", err) } + require.False(t, hubIdx[SCENARIOS]["crowdsecurity/foobar_scenario"].Installed) getHubIdxOrFail(t) @@ -151,11 +152,12 @@ func TestUpgradeConfigNewScenarioIsInstalledWhenReferencedScenarioIsDisabled(t * func assertCollectionDepsInstalled(t *testing.T, collection string) { t.Helper() + c := hubIdx[COLLECTIONS][collection] require.NoError(t, CollecDepsCheck(&c)) } func pushUpdateToCollectionInHub() { - responseByPath["/master/.index.json"] = fileToStringX("./tests/index2.json") - responseByPath["/master/collections/crowdsecurity/test_collection.yaml"] = fileToStringX("./tests/collection_v2.yaml") + responseByPath["/master/.index.json"] = fileToStringX("./testdata/index2.json") + responseByPath["/master/collections/crowdsecurity/test_collection.yaml"] = fileToStringX("./testdata/collection_v2.yaml") } diff --git a/pkg/cwhub/install.go b/pkg/cwhub/install.go index 505c36297..45e2ba419 100644 --- a/pkg/cwhub/install.go +++ b/pkg/cwhub/install.go @@ -11,68 +11,64 @@ import ( ) func purgeItem(hub *csconfig.Hub, target Item) (Item, error) { - var hdir = hub.HubDir - hubpath := hdir + "/" + target.RemotePath + itempath := hub.HubDir + "/" + target.RemotePath // disable hub file - if err := os.Remove(hubpath); err != nil { + if err := os.Remove(itempath); err != nil { return target, fmt.Errorf("while removing file: %w", err) } target.Downloaded = false - log.Infof("Removed source file [%s] : %s", target.Name, hubpath) + log.Infof("Removed source file [%s]: %s", target.Name, itempath) hubIdx[target.Type][target.Name] = target + return target, nil } -//DisableItem to disable an item managed by the hub, removes the symlink if purge is true -func DisableItem(hub *csconfig.Hub, target Item, purge bool, force bool) (Item, error) { - var tdir = hub.ConfigDir - var hdir = hub.HubDir +// DisableItem to disable an item managed by the hub, removes the symlink if purge is true +func DisableItem(hub *csconfig.Hub, target *Item, purge bool, force bool) error { var err error + // already disabled, noop unless purge if !target.Installed { if purge { - target, err = purgeItem(hub, target) + *target, err = purgeItem(hub, *target) if err != nil { - return target, err + return err } } - return target, nil - } - syml, err := filepath.Abs(tdir + "/" + target.Type + "/" + target.Stage + "/" + target.FileName) - if err != nil { - return Item{}, err + return nil } if target.Local { - return target, fmt.Errorf("%s isn't managed by hub. Please delete manually", target.Name) + return fmt.Errorf("%s isn't managed by hub. Please delete manually", target.Name) } if target.Tainted && !force { - return target, fmt.Errorf("%s is tainted, use '--force' to overwrite", target.Name) + return fmt.Errorf("%s is tainted, use '--force' to overwrite", target.Name) } - /*for a COLLECTIONS, disable sub-items*/ + // for a COLLECTIONS, disable sub-items if target.Type == COLLECTIONS { - var tmp = [][]string{target.Parsers, target.PostOverflows, target.Scenarios, target.Collections} - for idx, ptr := range tmp { + for idx, ptr := range [][]string{target.Parsers, target.PostOverflows, target.Scenarios, target.Collections} { ptrtype := ItemTypes[idx] for _, p := range ptr { if val, ok := hubIdx[ptrtype][p]; ok { // check if the item doesn't belong to another collection before removing it toRemove := true + for _, collection := range val.BelongsToCollections { if collection != target.Name { toRemove = false break } } + if toRemove { - hubIdx[ptrtype][p], err = DisableItem(hub, val, purge, force) + err = DisableItem(hub, &val, purge, force) if err != nil { - return target, fmt.Errorf("while disabling %s: %w", p, err) + return fmt.Errorf("while disabling %s: %w", p, err) } } else { log.Infof("%s was not removed because it belongs to another collection", val.Name) @@ -84,118 +80,135 @@ func DisableItem(hub *csconfig.Hub, target Item, purge bool, force bool) (Item, } } + syml, err := filepath.Abs(hub.InstallDir + "/" + target.Type + "/" + target.Stage + "/" + target.FileName) + if err != nil { + return err + } + stat, err := os.Lstat(syml) if os.IsNotExist(err) { - if !purge && !force { //we only accept to "delete" non existing items if it's a purge - return target, fmt.Errorf("can't delete %s : %s doesn't exist", target.Name, syml) + // we only accept to "delete" non existing items if it's a forced purge + if !purge && !force { + return fmt.Errorf("can't delete %s : %s doesn't exist", target.Name, syml) } } else { - //if it's managed by hub, it's a symlink to csconfig.GConfig.hub.HubDir / ... + // if it's managed by hub, it's a symlink to csconfig.GConfig.hub.HubDir / ... if stat.Mode()&os.ModeSymlink == 0 { log.Warningf("%s (%s) isn't a symlink, can't disable", target.Name, syml) - return target, fmt.Errorf("%s isn't managed by hub", target.Name) - } - hubpath, err := os.Readlink(syml) - if err != nil { - return target, fmt.Errorf("while reading symlink: %w", err) - } - absPath, err := filepath.Abs(hdir + "/" + target.RemotePath) - if err != nil { - return target, fmt.Errorf("while abs path: %w", err) - } - if hubpath != absPath { - log.Warningf("%s (%s) isn't a symlink to %s", target.Name, syml, absPath) - return target, fmt.Errorf("%s isn't managed by hub", target.Name) + return fmt.Errorf("%s isn't managed by hub", target.Name) } - //remove the symlink - if err = os.Remove(syml); err != nil { - return target, fmt.Errorf("while removing symlink: %w", err) + hubpath, err := os.Readlink(syml) + if err != nil { + return fmt.Errorf("while reading symlink: %w", err) } + + absPath, err := filepath.Abs(hub.HubDir + "/" + target.RemotePath) + if err != nil { + return fmt.Errorf("while abs path: %w", err) + } + + if hubpath != absPath { + log.Warningf("%s (%s) isn't a symlink to %s", target.Name, syml, absPath) + return fmt.Errorf("%s isn't managed by hub", target.Name) + } + + // remove the symlink + if err = os.Remove(syml); err != nil { + return fmt.Errorf("while removing symlink: %w", err) + } + log.Infof("Removed symlink [%s] : %s", target.Name, syml) } + target.Installed = false if purge { - target, err = purgeItem(hub, target) + *target, err = purgeItem(hub, *target) if err != nil { - return target, err + return err } } - hubIdx[target.Type][target.Name] = target - return target, nil + + hubIdx[target.Type][target.Name] = *target + + return nil } // creates symlink between actual config file at hub.HubDir and hub.ConfigDir // Handles collections recursively -func EnableItem(hub *csconfig.Hub, target Item) (Item, error) { - var tdir = hub.ConfigDir - var hdir = hub.HubDir +func EnableItem(hub *csconfig.Hub, target *Item) error { var err error - parent_dir := filepath.Clean(tdir + "/" + target.Type + "/" + target.Stage + "/") - /*create directories if needed*/ + + parentDir := filepath.Clean(hub.InstallDir + "/" + target.Type + "/" + target.Stage + "/") + + // create directories if needed if target.Installed { if target.Tainted { - return target, fmt.Errorf("%s is tainted, won't enable unless --force", target.Name) + return fmt.Errorf("%s is tainted, won't enable unless --force", target.Name) } + if target.Local { - return target, fmt.Errorf("%s is local, won't enable", target.Name) + return fmt.Errorf("%s is local, won't enable", target.Name) } - /* if it's a collection, check sub-items even if the collection file itself is up-to-date */ + + // if it's a collection, check sub-items even if the collection file itself is up-to-date if target.UpToDate && target.Type != COLLECTIONS { log.Tracef("%s is installed and up-to-date, skip.", target.Name) - return target, nil - } - } - if _, err := os.Stat(parent_dir); os.IsNotExist(err) { - log.Printf("%s doesn't exist, create", parent_dir) - if err := os.MkdirAll(parent_dir, os.ModePerm); err != nil { - return target, fmt.Errorf("while creating directory: %w", err) + return nil } } - /*install sub-items if it's a collection*/ + if _, err = os.Stat(parentDir); os.IsNotExist(err) { + log.Infof("%s doesn't exist, create", parentDir) + + if err = os.MkdirAll(parentDir, os.ModePerm); err != nil { + return fmt.Errorf("while creating directory: %w", err) + } + } + + // install sub-items if it's a collection if target.Type == COLLECTIONS { - var tmp = [][]string{target.Parsers, target.PostOverflows, target.Scenarios, target.Collections} - for idx, ptr := range tmp { + for idx, ptr := range [][]string{target.Parsers, target.PostOverflows, target.Scenarios, target.Collections} { ptrtype := ItemTypes[idx] for _, p := range ptr { val, ok := hubIdx[ptrtype][p] if !ok { - return target, fmt.Errorf("required %s %s of %s doesn't exist, abort", ptrtype, p, target.Name) + return fmt.Errorf("required %s %s of %s doesn't exist, abort", ptrtype, p, target.Name) } - hubIdx[ptrtype][p], err = EnableItem(hub, val) + err = EnableItem(hub, &val) if err != nil { - return target, fmt.Errorf("while installing %s: %w", p, err) + return fmt.Errorf("while installing %s: %w", p, err) } } } } // check if file already exists where it should in configdir (eg /etc/crowdsec/collections/) - if _, err := os.Lstat(parent_dir + "/" + target.FileName); !os.IsNotExist(err) { - log.Printf("%s already exists.", parent_dir+"/"+target.FileName) - return target, nil + if _, err = os.Lstat(parentDir + "/" + target.FileName); !os.IsNotExist(err) { + log.Infof("%s already exists.", parentDir+"/"+target.FileName) + return nil } - //tdir+target.RemotePath - srcPath, err := filepath.Abs(hdir + "/" + target.RemotePath) + // hub.ConfigDir + target.RemotePath + srcPath, err := filepath.Abs(hub.HubDir + "/" + target.RemotePath) if err != nil { - return target, fmt.Errorf("while getting source path: %w", err) + return fmt.Errorf("while getting source path: %w", err) } - dstPath, err := filepath.Abs(parent_dir + "/" + target.FileName) + dstPath, err := filepath.Abs(parentDir + "/" + target.FileName) if err != nil { - return target, fmt.Errorf("while getting destination path: %w", err) + return fmt.Errorf("while getting destination path: %w", err) } if err = os.Symlink(srcPath, dstPath); err != nil { - return target, fmt.Errorf("while creating symlink from %s to %s: %w", srcPath, dstPath, err) + return fmt.Errorf("while creating symlink from %s to %s: %w", srcPath, dstPath, err) } - log.Printf("Enabled %s : %s", target.Type, target.Name) + log.Infof("Enabled %s : %s", target.Type, target.Name) target.Installed = true - hubIdx[target.Type][target.Name] = target - return target, nil + hubIdx[target.Type][target.Name] = *target + + return nil } diff --git a/pkg/cwhub/loader.go b/pkg/cwhub/loader.go index e088f9338..dcc19f863 100644 --- a/pkg/cwhub/loader.go +++ b/pkg/cwhub/loader.go @@ -1,33 +1,154 @@ package cwhub import ( + "crypto/sha256" "encoding/json" "errors" "fmt" + "io" "os" "path/filepath" "sort" "strings" log "github.com/sirupsen/logrus" - "golang.org/x/mod/semver" "github.com/crowdsecurity/crowdsec/pkg/csconfig" ) -/*the walk/parser_visit function can't receive extra args*/ -var hubdir, installdir string +func isYAMLFileName(path string) bool { + return strings.HasSuffix(path, ".yaml") || strings.HasSuffix(path, ".yml") +} -func parser_visit(path string, f os.DirEntry, err error) error { +func validItemFileName(vname string, fauthor string, fname string) bool { + return (fauthor+"/"+fname == vname+".yaml") || (fauthor+"/"+fname == vname+".yml") +} - var target Item - var local bool - var hubpath string - var inhub bool - var fname string - var ftype string - var fauthor string - var stage string +func handleSymlink(path string) (string, error) { + hubpath, err := os.Readlink(path) + if err != nil { + return "", fmt.Errorf("unable to read symlink of %s", path) + } + // the symlink target doesn't exist, user might have removed ~/.hub/hub/...yaml without deleting /etc/crowdsec/....yaml + _, err = os.Lstat(hubpath) + if os.IsNotExist(err) { + log.Infof("%s is a symlink to %s that doesn't exist, deleting symlink", path, hubpath) + // remove the symlink + if err = os.Remove(path); err != nil { + return "", fmt.Errorf("failed to unlink %s: %w", path, err) + } + + // XXX: is this correct? + return "", nil + } + + return hubpath, nil +} + +func getSHA256(filepath string) (string, error) { + f, err := os.Open(filepath) + if err != nil { + return "", fmt.Errorf("unable to open '%s': %w", filepath, err) + } + + defer f.Close() + + h := sha256.New() + if _, err := io.Copy(h, f); err != nil { + return "", fmt.Errorf("unable to calculate sha256 of '%s': %w", filepath, err) + } + + return fmt.Sprintf("%x", h.Sum(nil)), nil +} + +type Walker struct { + // the walk/parserVisit function can't receive extra args + hubdir string + installdir string +} + +func NewWalker(hub *csconfig.Hub) Walker { + return Walker{ + hubdir: hub.HubDir, + installdir: hub.InstallDir, + } +} + +type itemFileInfo struct { + fname string + stage string + ftype string + fauthor string +} + +func (w Walker) getItemInfo(path string) (itemFileInfo, bool, error) { + ret := itemFileInfo{} + inhub := false + + subs := strings.Split(path, string(os.PathSeparator)) + + log.Tracef("path:%s, hubdir:%s, installdir:%s", path, w.hubdir, w.installdir) + log.Tracef("subs:%v", subs) + // we're in hub (~/.hub/hub/) + if strings.HasPrefix(path, w.hubdir) { + log.Tracef("in hub dir") + + inhub = true + //.../hub/parsers/s00-raw/crowdsec/skip-pretag.yaml + //.../hub/scenarios/crowdsec/ssh_bf.yaml + //.../hub/profiles/crowdsec/linux.yaml + if len(subs) < 4 { + log.Fatalf("path is too short : %s (%d)", path, len(subs)) + } + + ret.fname = subs[len(subs)-1] + ret.fauthor = subs[len(subs)-2] + ret.stage = subs[len(subs)-3] + ret.ftype = subs[len(subs)-4] + } else if strings.HasPrefix(path, w.installdir) { // we're in install /etc/crowdsec//... + log.Tracef("in install dir") + if len(subs) < 3 { + log.Fatalf("path is too short : %s (%d)", path, len(subs)) + } + ///.../config/parser/stage/file.yaml + ///.../config/postoverflow/stage/file.yaml + ///.../config/scenarios/scenar.yaml + ///.../config/collections/linux.yaml //file is empty + ret.fname = subs[len(subs)-1] + ret.stage = subs[len(subs)-2] + ret.ftype = subs[len(subs)-3] + ret.fauthor = "" + } else { + return itemFileInfo{}, false, fmt.Errorf("file '%s' is not from hub '%s' nor from the configuration directory '%s'", path, w.hubdir, w.installdir) + } + + log.Tracef("stage:%s ftype:%s", ret.stage, ret.ftype) + // log.Infof("%s -> name:%s stage:%s", path, fname, stage) + + if ret.stage == SCENARIOS { + ret.ftype = SCENARIOS + ret.stage = "" + } else if ret.stage == COLLECTIONS { + ret.ftype = COLLECTIONS + ret.stage = "" + } else if ret.stage == WAAP_RULES { + ret.ftype = WAAP_RULES + ret.stage = "" + } else if ret.ftype != PARSERS && ret.ftype != PARSERS_OVFLW { + // its a PARSER / PARSER_OVFLW with a stage + return itemFileInfo{}, inhub, fmt.Errorf("unknown configuration type for file '%s'", path) + } + + log.Tracef("CORRECTED [%s] by [%s] in stage [%s] of type [%s]", ret.fname, ret.fauthor, ret.stage, ret.ftype) + + return ret, inhub, nil +} + +func (w Walker) parserVisit(path string, f os.DirEntry, err error) error { + var ( + local bool + hubpath string + ) if err != nil { log.Debugf("while syncing hub dir: %s", err) @@ -39,392 +160,395 @@ func parser_visit(path string, f os.DirEntry, err error) error { if err != nil { return err } - //we only care about files + + // we only care about files if f == nil || f.IsDir() { return nil } - //we only care about yaml files - if !strings.HasSuffix(f.Name(), ".yaml") && !strings.HasSuffix(f.Name(), ".yml") { + + if !isYAMLFileName(f.Name()) { return nil } - subs := strings.Split(path, string(os.PathSeparator)) - - log.Tracef("path:%s, hubdir:%s, installdir:%s", path, hubdir, installdir) - log.Tracef("subs:%v", subs) - /*we're in hub (~/.hub/hub/)*/ - if strings.HasPrefix(path, hubdir) { - log.Tracef("in hub dir") - inhub = true - //.../hub/parsers/s00-raw/crowdsec/skip-pretag.yaml - //.../hub/scenarios/crowdsec/ssh_bf.yaml - //.../hub/profiles/crowdsec/linux.yaml - if len(subs) < 4 { - log.Fatalf("path is too short : %s (%d)", path, len(subs)) - } - fname = subs[len(subs)-1] - fauthor = subs[len(subs)-2] - stage = subs[len(subs)-3] - ftype = subs[len(subs)-4] - } else if strings.HasPrefix(path, installdir) { /*we're in install /etc/crowdsec//... */ - log.Tracef("in install dir") - if len(subs) < 3 { - log.Fatalf("path is too short : %s (%d)", path, len(subs)) - } - ///.../config/parser/stage/file.yaml - ///.../config/postoverflow/stage/file.yaml - ///.../config/scenarios/scenar.yaml - ///.../config/collections/linux.yaml //file is empty - fname = subs[len(subs)-1] - stage = subs[len(subs)-2] - ftype = subs[len(subs)-3] - fauthor = "" - } else { - return fmt.Errorf("file '%s' is not from hub '%s' nor from the configuration directory '%s'", path, hubdir, installdir) + info, inhub, err := w.getItemInfo(path) + if err != nil { + return err } - log.Tracef("stage:%s ftype:%s", stage, ftype) - //log.Printf("%s -> name:%s stage:%s", path, fname, stage) - if stage == SCENARIOS { - ftype = SCENARIOS - stage = "" - } else if stage == COLLECTIONS { - ftype = COLLECTIONS - stage = "" - } else if stage == WAAP_RULES { - ftype = WAAP_RULES - stage = "" - } else if ftype != PARSERS && ftype != PARSERS_OVFLW /*its a PARSER / PARSER_OVFLW with a stage */ { - return fmt.Errorf("unknown configuration type %s for file '%s'", ftype, path) - } - - log.Tracef("CORRECTED [%s] by [%s] in stage [%s] of type [%s]", fname, fauthor, stage, ftype) - /* we can encounter 'collections' in the form of a symlink : /etc/crowdsec/.../collections/linux.yaml -> ~/.hub/hub/collections/.../linux.yaml when the collection is installed, both files are created */ - //non symlinks are local user files or hub files + // non symlinks are local user files or hub files if f.Type()&os.ModeSymlink == 0 { local = true + log.Tracef("%s isn't a symlink", path) } else { - hubpath, err = os.Readlink(path) + hubpath, err = handleSymlink(path) if err != nil { - return fmt.Errorf("unable to read symlink of %s", path) - } - //the symlink target doesn't exist, user might have removed ~/.hub/hub/...yaml without deleting /etc/crowdsec/....yaml - _, err := os.Lstat(hubpath) - if os.IsNotExist(err) { - log.Infof("%s is a symlink to %s that doesn't exist, deleting symlink", path, hubpath) - //remove the symlink - if err = os.Remove(path); err != nil { - return fmt.Errorf("failed to unlink %s: %+v", path, err) - } - return nil + return err } log.Tracef("%s points to %s", path, hubpath) + + if hubpath == "" { + // XXX: is this correct? + return nil + } } - //if it's not a symlink and not in hub, it's a local file, don't bother + // if it's not a symlink and not in hub, it's a local file, don't bother if local && !inhub { log.Tracef("%s is a local file, skip", path) skippedLocal++ - // log.Printf("local scenario, skip.") - target.Name = fname - target.Stage = stage - target.Installed = true - target.Type = ftype - target.Local = true - target.LocalPath = path - target.UpToDate = true - _, target.FileName = filepath.Split(path) + // log.Infof("local scenario, skip.") + + _, fileName := filepath.Split(path) + + hubIdx[info.ftype][info.fname] = Item{ + Name: info.fname, + Stage: info.stage, + Installed: true, + Type: info.ftype, + Local: true, + LocalPath: path, + UpToDate: true, + FileName: fileName, + } - hubIdx[ftype][fname] = target return nil } - //try to find which configuration item it is - log.Tracef("check [%s] of %s", fname, ftype) + + // try to find which configuration item it is + log.Tracef("check [%s] of %s", info.fname, info.ftype) match := false - for k, v := range hubIdx[ftype] { - log.Tracef("check [%s] vs [%s] : %s", fname, v.RemotePath, ftype+"/"+stage+"/"+fname+".yaml") - if fname != v.FileName { - log.Tracef("%s != %s (filename)", fname, v.FileName) + + for name, item := range hubIdx[info.ftype] { + log.Tracef("check [%s] vs [%s] : %s", info.fname, item.RemotePath, info.ftype+"/"+info.stage+"/"+info.fname+".yaml") + + if info.fname != item.FileName { + log.Tracef("%s != %s (filename)", info.fname, item.FileName) continue } - //wrong stage - if v.Stage != stage { + + // wrong stage + if item.Stage != info.stage { continue } - /*if we are walking hub dir, just mark present files as downloaded*/ + + // if we are walking hub dir, just mark present files as downloaded if inhub { - //wrong author - if fauthor != v.Author { - continue - } - //wrong file - if CheckName(v.Name, fauthor, fname) { + // wrong author + if info.fauthor != item.Author { continue } - if path == hubdir+"/"+v.RemotePath { - log.Tracef("marking %s as downloaded", v.Name) - v.Downloaded = true + // wrong file + if !validItemFileName(item.Name, info.fauthor, info.fname) { + continue } - } else if CheckSuffix(hubpath, v.RemotePath) { - //wrong file - /////.yaml + + if path == w.hubdir+"/"+item.RemotePath { + log.Tracef("marking %s as downloaded", item.Name) + item.Downloaded = true + } + } else if !hasPathSuffix(hubpath, item.RemotePath) { + // wrong file + // ///.yaml continue } + sha, err := getSHA256(path) if err != nil { log.Fatalf("Failed to get sha of %s : %v", path, err) } - //let's reverse sort the versions to deal with hash collisions (#154) - versions := make([]string, 0, len(v.Versions)) - for k := range v.Versions { + + // let's reverse sort the versions to deal with hash collisions (#154) + versions := make([]string, 0, len(item.Versions)) + for k := range item.Versions { versions = append(versions, k) } + sort.Sort(sort.Reverse(sort.StringSlice(versions))) for _, version := range versions { - val := v.Versions[version] + val := item.Versions[version] if sha != val.Digest { - //log.Printf("matching filenames, wrong hash %s != %s -- %s", sha, val.Digest, spew.Sdump(v)) + // log.Infof("matching filenames, wrong hash %s != %s -- %s", sha, val.Digest, spew.Sdump(v)) continue } - /*we got an exact match, update struct*/ + + // we got an exact match, update struct + + item.Downloaded = true + item.LocalHash = sha + if !inhub { - log.Tracef("found exact match for %s, version is %s, latest is %s", v.Name, version, v.Version) - v.LocalPath = path - v.LocalVersion = version - v.Tainted = false - v.Downloaded = true - /*if we're walking the hub, present file doesn't means installed file*/ - v.Installed = true - v.LocalHash = sha - _, target.FileName = filepath.Split(path) - } else { - v.Downloaded = true - v.LocalHash = sha + log.Tracef("found exact match for %s, version is %s, latest is %s", item.Name, version, item.Version) + item.LocalPath = path + item.LocalVersion = version + item.Tainted = false + // if we're walking the hub, present file doesn't means installed file + item.Installed = true } - if version == v.Version { - log.Tracef("%s is up-to-date", v.Name) - v.UpToDate = true + + if version == item.Version { + log.Tracef("%s is up-to-date", item.Name) + item.UpToDate = true } + match = true + break } - if !match { - log.Tracef("got tainted match for %s : %s", v.Name, path) - skippedTainted += 1 - //the file and the stage is right, but the hash is wrong, it has been tainted by user - if !inhub { - v.LocalPath = path - v.Installed = true - } - v.UpToDate = false - v.LocalVersion = "?" - v.Tainted = true - v.LocalHash = sha - _, target.FileName = filepath.Split(path) + if !match { + log.Tracef("got tainted match for %s: %s", item.Name, path) + + skippedTainted++ + // the file and the stage is right, but the hash is wrong, it has been tainted by user + if !inhub { + item.LocalPath = path + item.Installed = true + } + + item.UpToDate = false + item.LocalVersion = "?" + item.Tainted = true + item.LocalHash = sha } - //update the entry if appropriate + + // update the entry if appropriate // if _, ok := hubIdx[ftype][k]; !ok || !inhub || v.D { // fmt.Printf("Updating %s", k) // hubIdx[ftype][k] = v // } else if !inhub { // } else if - hubIdx[ftype][k] = v + hubIdx[info.ftype][name] = item + return nil } - log.Infof("Ignoring file %s of type %s", path, ftype) + + log.Infof("Ignoring file %s of type %s", path, info.ftype) + return nil } func CollecDepsCheck(v *Item) error { - - if GetVersionStatus(v) != 0 { //not up-to-date + if v.versionStatus() != 0 { // not up-to-date log.Debugf("%s dependencies not checked : not up-to-date", v.Name) return nil } - /*if it's a collection, ensure all the items are installed, or tag it as tainted*/ - if v.Type == COLLECTIONS { - log.Tracef("checking submembers of %s installed:%t", v.Name, v.Installed) - var tmp = [][]string{v.Parsers, v.PostOverflows, v.Scenarios, v.Collections} - for idx, ptr := range tmp { - ptrtype := ItemTypes[idx] - for _, p := range ptr { - val, ok := hubIdx[ptrtype][p] - if !ok { - log.Fatalf("Referred %s %s in collection %s doesn't exist.", ptrtype, p, v.Name) - } - log.Tracef("check %s installed:%t", val.Name, val.Installed) - if !v.Installed { - continue - } - if val.Type == COLLECTIONS { - log.Tracef("collec, recurse.") - if err := CollecDepsCheck(&val); err != nil { - if val.Tainted { - v.Tainted = true - } - return fmt.Errorf("sub collection %s is broken : %s", val.Name, err) + if v.Type != COLLECTIONS { + return nil + } + + // if it's a collection, ensure all the items are installed, or tag it as tainted + log.Tracef("checking submembers of %s installed:%t", v.Name, v.Installed) + + for idx, itemSlice := range [][]string{v.Parsers, v.PostOverflows, v.Scenarios, v.Collections} { + sliceType := ItemTypes[idx] + for _, subName := range itemSlice { + subItem, ok := hubIdx[sliceType][subName] + if !ok { + log.Fatalf("Referred %s %s in collection %s doesn't exist.", sliceType, subName, v.Name) + } + + log.Tracef("check %s installed:%t", subItem.Name, subItem.Installed) + + if !v.Installed { + continue + } + + if subItem.Type == COLLECTIONS { + log.Tracef("collec, recurse.") + + if err := CollecDepsCheck(&subItem); err != nil { + if subItem.Tainted { + v.Tainted = true } - hubIdx[ptrtype][p] = val + + return fmt.Errorf("sub collection %s is broken: %w", subItem.Name, err) } - //propagate the state of sub-items to set - if val.Tainted { - v.Tainted = true - return fmt.Errorf("tainted %s %s, tainted.", ptrtype, p) - } - if !val.Installed && v.Installed { - v.Tainted = true - return fmt.Errorf("missing %s %s, tainted.", ptrtype, p) - } - if !val.UpToDate { - v.UpToDate = false - return fmt.Errorf("outdated %s %s", ptrtype, p) - } - skip := false - for idx := range val.BelongsToCollections { - if val.BelongsToCollections[idx] == v.Name { - skip = true - } - } - if !skip { - val.BelongsToCollections = append(val.BelongsToCollections, v.Name) - } - hubIdx[ptrtype][p] = val - log.Tracef("checking for %s - tainted:%t uptodate:%t", p, v.Tainted, v.UpToDate) + hubIdx[sliceType][subName] = subItem } + + // propagate the state of sub-items to set + if subItem.Tainted { + v.Tainted = true + return fmt.Errorf("tainted %s %s, tainted", sliceType, subName) + } + + if !subItem.Installed && v.Installed { + v.Tainted = true + return fmt.Errorf("missing %s %s, tainted", sliceType, subName) + } + + if !subItem.UpToDate { + v.UpToDate = false + return fmt.Errorf("outdated %s %s", sliceType, subName) + } + + skip := false + + for idx := range subItem.BelongsToCollections { + if subItem.BelongsToCollections[idx] == v.Name { + skip = true + } + } + + if !skip { + subItem.BelongsToCollections = append(subItem.BelongsToCollections, v.Name) + } + + hubIdx[sliceType][subName] = subItem + + log.Tracef("checking for %s - tainted:%t uptodate:%t", subName, v.Tainted, v.UpToDate) } } + return nil } -func SyncDir(hub *csconfig.Hub, dir string) (error, []string) { - hubdir = hub.HubDir - installdir = hub.ConfigDir +func SyncDir(hub *csconfig.Hub, dir string) ([]string, error) { warnings := []string{} - /*For each, scan PARSERS, PARSERS_OVFLW, SCENARIOS and COLLECTIONS last*/ + // For each, scan PARSERS, PARSERS_OVFLW, SCENARIOS and COLLECTIONS last for _, scan := range ItemTypes { cpath, err := filepath.Abs(fmt.Sprintf("%s/%s", dir, scan)) if err != nil { log.Errorf("failed %s : %s", cpath, err) } - err = filepath.WalkDir(cpath, parser_visit) + + err = filepath.WalkDir(cpath, NewWalker(hub).parserVisit) if err != nil { - return err, warnings + return warnings, err } - } - for k, v := range hubIdx[COLLECTIONS] { - if v.Installed { - versStat := GetVersionStatus(&v) - if versStat == 0 { //latest - if err := CollecDepsCheck(&v); err != nil { - warnings = append(warnings, fmt.Sprintf("dependency of %s : %s", v.Name, err)) - hubIdx[COLLECTIONS][k] = v - } - } else if versStat == 1 { //not up-to-date - warnings = append(warnings, fmt.Sprintf("update for collection %s available (currently:%s, latest:%s)", v.Name, v.LocalVersion, v.Version)) - } else { //version is higher than the highest available from hub? - warnings = append(warnings, fmt.Sprintf("collection %s is in the future (currently:%s, latest:%s)", v.Name, v.LocalVersion, v.Version)) + for name, item := range hubIdx[COLLECTIONS] { + if !item.Installed { + continue + } + + vs := item.versionStatus() + switch vs { + case 0: // latest + if err := CollecDepsCheck(&item); err != nil { + warnings = append(warnings, fmt.Sprintf("dependency of %s: %s", item.Name, err)) + hubIdx[COLLECTIONS][name] = item } - log.Debugf("installed (%s) - status:%d | installed:%s | latest : %s | full : %+v", v.Name, semver.Compare("v"+v.Version, "v"+v.LocalVersion), v.LocalVersion, v.Version, v.Versions) + case 1: // not up-to-date + warnings = append(warnings, fmt.Sprintf("update for collection %s available (currently:%s, latest:%s)", item.Name, item.LocalVersion, item.Version)) + default: // version is higher than the highest available from hub? + warnings = append(warnings, fmt.Sprintf("collection %s is in the future (currently:%s, latest:%s)", item.Name, item.LocalVersion, item.Version)) } + + log.Debugf("installed (%s) - status:%d | installed:%s | latest : %s | full : %+v", item.Name, vs, item.LocalVersion, item.Version, item.Versions) } - return nil, warnings + + return warnings, nil } -/* Updates the infos from HubInit() with the local state */ -func LocalSync(hub *csconfig.Hub) (error, []string) { +// Updates the info from HubInit() with the local state +func LocalSync(hub *csconfig.Hub) ([]string, error) { skippedLocal = 0 skippedTainted = 0 - err, warnings := SyncDir(hub, hub.ConfigDir) + warnings, err := SyncDir(hub, hub.InstallDir) if err != nil { - return fmt.Errorf("failed to scan %s : %s", hub.ConfigDir, err), warnings + return warnings, fmt.Errorf("failed to scan %s: %w", hub.InstallDir, err) } - err, _ = SyncDir(hub, hub.HubDir) + + _, err = SyncDir(hub, hub.HubDir) if err != nil { - return fmt.Errorf("failed to scan %s : %s", hub.HubDir, err), warnings + return warnings, fmt.Errorf("failed to scan %s: %w", hub.HubDir, err) } - return nil, warnings + + return warnings, nil } func GetHubIdx(hub *csconfig.Hub) error { if hub == nil { return fmt.Errorf("no configuration found for hub") } + log.Debugf("loading hub idx %s", hub.HubIndexFile) + bidx, err := os.ReadFile(hub.HubIndexFile) if err != nil { return fmt.Errorf("unable to read index file: %w", err) } + ret, err := LoadPkgIndex(bidx) if err != nil { - if !errors.Is(err, ReferenceMissingError) { - log.Fatalf("Unable to load existing index : %v.", err) + if !errors.Is(err, ErrMissingReference) { + return fmt.Errorf("unable to load existing index: %w", err) } + + // XXX: why the error check if we bail out anyway? return err } + hubIdx = ret - err, _ = LocalSync(hub) + + _, err = LocalSync(hub) if err != nil { - log.Fatalf("Failed to sync Hub index with local deployment : %v", err) + return fmt.Errorf("failed to sync Hub index with local deployment : %w", err) } + return nil } -/*LoadPkgIndex loads a local .index.json file and returns the map of parsers/scenarios/collections associated*/ +// LoadPkgIndex loads a local .index.json file and returns the map of associated parsers/scenarios/collections func LoadPkgIndex(buff []byte) (map[string]map[string]Item, error) { - var err error - var RawIndex map[string]map[string]Item - var missingItems []string + var ( + RawIndex map[string]map[string]Item + missingItems []string + ) - if err = json.Unmarshal(buff, &RawIndex); err != nil { - return nil, fmt.Errorf("failed to unmarshal index : %v", err) + if err := json.Unmarshal(buff, &RawIndex); err != nil { + return nil, fmt.Errorf("failed to unmarshal index: %w", err) } log.Debugf("%d item types in hub index", len(ItemTypes)) - /*Iterate over the different types to complete struct */ + + // Iterate over the different types to complete the struct for _, itemType := range ItemTypes { - /*complete struct*/ log.Tracef("%d item", len(RawIndex[itemType])) - for idx, item := range RawIndex[itemType] { - item.Name = idx + + for name, item := range RawIndex[itemType] { + item.Name = name item.Type = itemType x := strings.Split(item.RemotePath, "/") item.FileName = x[len(x)-1] - RawIndex[itemType][idx] = item - /*if it's a collection, check its sub-items are present*/ - //XX should be done later - if itemType == COLLECTIONS { - var tmp = [][]string{item.Parsers, item.PostOverflows, item.Scenarios, item.Collections, item.WafRules} - for idx, ptr := range tmp { - ptrtype := ItemTypes[idx] - for _, p := range ptr { - if _, ok := RawIndex[ptrtype][p]; !ok { - log.Errorf("Referred %s %s in collection %s doesn't exist.", ptrtype, p, item.Name) - missingItems = append(missingItems, p) - } + RawIndex[itemType][name] = item + + if itemType != COLLECTIONS { + continue + } + + // if it's a collection, check its sub-items are present + // XXX should be done later + for idx, ptr := range [][]string{item.Parsers, item.PostOverflows, item.Scenarios, item.Collections, item.WafRules} { + ptrtype := ItemTypes[idx] + for _, p := range ptr { + if _, ok := RawIndex[ptrtype][p]; !ok { + log.Errorf("Referred %s %s in collection %s doesn't exist.", ptrtype, p, item.Name) + missingItems = append(missingItems, p) } } } } } + if len(missingItems) > 0 { - return RawIndex, fmt.Errorf("%q : %w", missingItems, ReferenceMissingError) + return RawIndex, fmt.Errorf("%q: %w", missingItems, ErrMissingReference) } return RawIndex, nil diff --git a/pkg/cwhub/path_separator_windows.go b/pkg/cwhub/path_separator_windows.go deleted file mode 100644 index 42f61aa16..000000000 --- a/pkg/cwhub/path_separator_windows.go +++ /dev/null @@ -1,23 +0,0 @@ -package cwhub - -import ( - "path/filepath" - "strings" -) - -func CheckSuffix(hubpath string, remotePath string) bool { - newPath := filepath.ToSlash(hubpath) - if !strings.HasSuffix(newPath, remotePath) { - return true - } else { - return false - } -} - -func CheckName(vname string, fauthor string, fname string) bool { - if vname+".yaml" != fauthor+"/"+fname && vname+".yml" != fauthor+"/"+fname { - return true - } else { - return false - } -} diff --git a/pkg/cwhub/pathseparator.go b/pkg/cwhub/pathseparator.go deleted file mode 100644 index 0340697ee..000000000 --- a/pkg/cwhub/pathseparator.go +++ /dev/null @@ -1,24 +0,0 @@ -//go:build linux || freebsd || netbsd || openbsd || solaris || !windows -// +build linux freebsd netbsd openbsd solaris !windows - -package cwhub - -import "strings" - -const PathSeparator = "/" - -func CheckSuffix(hubpath string, remotePath string) bool { - if !strings.HasSuffix(hubpath, remotePath) { - return true - } else { - return false - } -} - -func CheckName(vname string, fauthor string, fname string) bool { - if vname+".yaml" != fauthor+"/"+fname && vname+".yml" != fauthor+"/"+fname { - return true - } else { - return false - } -} diff --git a/pkg/cwhub/pathseparator_unix.go b/pkg/cwhub/pathseparator_unix.go new file mode 100644 index 000000000..9420dc11e --- /dev/null +++ b/pkg/cwhub/pathseparator_unix.go @@ -0,0 +1,9 @@ +//go:build unix + +package cwhub + +import "strings" + +func hasPathSuffix(hubpath string, remotePath string) bool { + return strings.HasSuffix(hubpath, remotePath) +} diff --git a/pkg/cwhub/pathseparator_windows.go b/pkg/cwhub/pathseparator_windows.go new file mode 100644 index 000000000..a6d1be3f8 --- /dev/null +++ b/pkg/cwhub/pathseparator_windows.go @@ -0,0 +1,11 @@ +package cwhub + +import ( + "path/filepath" + "strings" +) + +func hasPathSuffix(hubpath string, remotePath string) bool { + newPath := filepath.ToSlash(hubpath) + return strings.HasSuffix(newPath, remotePath) +} diff --git a/pkg/cwhub/tests/collection_v1.yaml b/pkg/cwhub/testdata/collection_v1.yaml similarity index 100% rename from pkg/cwhub/tests/collection_v1.yaml rename to pkg/cwhub/testdata/collection_v1.yaml diff --git a/pkg/cwhub/tests/collection_v2.yaml b/pkg/cwhub/testdata/collection_v2.yaml similarity index 100% rename from pkg/cwhub/tests/collection_v2.yaml rename to pkg/cwhub/testdata/collection_v2.yaml diff --git a/pkg/cwhub/tests/foobar_parser.yaml b/pkg/cwhub/testdata/foobar_parser.yaml similarity index 100% rename from pkg/cwhub/tests/foobar_parser.yaml rename to pkg/cwhub/testdata/foobar_parser.yaml diff --git a/pkg/cwhub/tests/index1.json b/pkg/cwhub/testdata/index1.json similarity index 100% rename from pkg/cwhub/tests/index1.json rename to pkg/cwhub/testdata/index1.json diff --git a/pkg/cwhub/tests/index2.json b/pkg/cwhub/testdata/index2.json similarity index 100% rename from pkg/cwhub/tests/index2.json rename to pkg/cwhub/testdata/index2.json diff --git a/pkg/database/alerts.go b/pkg/database/alerts.go index afdf51688..0ae63f374 100644 --- a/pkg/database/alerts.go +++ b/pkg/database/alerts.go @@ -9,17 +9,18 @@ import ( "strings" "time" + "github.com/mattn/go-sqlite3" + "github.com/davecgh/go-spew/spew" "github.com/pkg/errors" log "github.com/sirupsen/logrus" - "github.com/crowdsecurity/crowdsec/pkg/csconfig" + "github.com/crowdsecurity/go-cs-lib/slicetools" + "github.com/crowdsecurity/crowdsec/pkg/database/ent" "github.com/crowdsecurity/crowdsec/pkg/database/ent/alert" - "github.com/crowdsecurity/crowdsec/pkg/database/ent/bouncer" "github.com/crowdsecurity/crowdsec/pkg/database/ent/decision" "github.com/crowdsecurity/crowdsec/pkg/database/ent/event" - "github.com/crowdsecurity/crowdsec/pkg/database/ent/machine" "github.com/crowdsecurity/crowdsec/pkg/database/ent/meta" "github.com/crowdsecurity/crowdsec/pkg/database/ent/predicate" "github.com/crowdsecurity/crowdsec/pkg/models" @@ -27,10 +28,10 @@ import ( ) const ( - paginationSize = 100 // used to queryAlert to avoid 'too many SQL variable' - defaultLimit = 100 // default limit of element to returns when query alerts - bulkSize = 50 // bulk size when create alerts - decisionBulkSize = 50 + paginationSize = 100 // used to queryAlert to avoid 'too many SQL variable' + defaultLimit = 100 // default limit of element to returns when query alerts + bulkSize = 50 // bulk size when create alerts + maxLockRetries = 10 // how many times to retry a bulk operation when sqlite3.ErrBusy is encountered ) func formatAlertCN(source models.Source) string { @@ -44,32 +45,36 @@ func formatAlertCN(source models.Source) string { } func formatAlertSource(alert *models.Alert) string { - if alert.Source == nil { + if alert.Source == nil || alert.Source.Scope == nil || *alert.Source.Scope == "" { return "empty source" } if *alert.Source.Scope == types.Ip { ret := "ip " + *alert.Source.Value + cn := formatAlertCN(*alert.Source) if cn != "" { ret += " (" + cn + ")" } + return ret } if *alert.Source.Scope == types.Range { ret := "range " + *alert.Source.Value + cn := formatAlertCN(*alert.Source) if cn != "" { ret += " (" + cn + ")" } + return ret } return *alert.Source.Scope + " " + *alert.Source.Value } -func formatAlertAsString(machineId string, alert *models.Alert) []string { +func formatAlertAsString(machineID string, alert *models.Alert) []string { src := formatAlertSource(alert) /**/ @@ -85,11 +90,15 @@ func formatAlertAsString(machineId string, alert *models.Alert) []string { reason := fmt.Sprintf("%s by %s", msg, src) if len(alert.Decisions) == 0 { - return []string{fmt.Sprintf("(%s) alert : %s", machineId, reason)} + return []string{fmt.Sprintf("(%s) alert : %s", machineID, reason)} } var retStr []string + if alert.Decisions[0].Origin != nil && *alert.Decisions[0].Origin == types.CscliImportOrigin { + return []string{fmt.Sprintf("(%s) alert : %s", machineID, reason)} + } + for i, decisionItem := range alert.Decisions { decision := "" if alert.Simulated != nil && *alert.Simulated { @@ -97,25 +106,29 @@ func formatAlertAsString(machineId string, alert *models.Alert) []string { } else if decisionItem.Simulated != nil && *decisionItem.Simulated { decision = "(simulated decision)" } + if log.GetLevel() >= log.DebugLevel { /*spew is expensive*/ log.Debugf("%s", spew.Sdump(decisionItem)) } + if len(alert.Decisions) > 1 { reason = fmt.Sprintf("%s for %d/%d decisions", msg, i+1, len(alert.Decisions)) } - machineIdOrigin := "" - if machineId == "" { - machineIdOrigin = *decisionItem.Origin + + machineIDOrigin := "" + if machineID == "" { + machineIDOrigin = *decisionItem.Origin } else { - machineIdOrigin = fmt.Sprintf("%s/%s", machineId, *decisionItem.Origin) + machineIDOrigin = fmt.Sprintf("%s/%s", machineID, *decisionItem.Origin) } decision += fmt.Sprintf("%s %s on %s %s", *decisionItem.Duration, *decisionItem.Type, *decisionItem.Scope, *decisionItem.Value) retStr = append(retStr, - fmt.Sprintf("(%s) %s : %s", machineIdOrigin, reason, decision)) + fmt.Sprintf("(%s) %s : %s", machineIDOrigin, reason, decision)) } + return retStr } @@ -123,7 +136,6 @@ func formatAlertAsString(machineId string, alert *models.Alert) []string { // if alert already exists, it checks it associated decisions already exists // if some associated decisions are missing (ie. previous insert ended up in error) it inserts them func (c *Client) CreateOrUpdateAlert(machineID string, alertItem *models.Alert) (string, error) { - if alertItem.UUID == "" { return "", fmt.Errorf("alert UUID is empty") } @@ -136,11 +148,12 @@ func (c *Client) CreateOrUpdateAlert(machineID string, alertItem *models.Alert) //alert wasn't found, insert it (expected hotpath) if ent.IsNotFound(err) || len(alerts) == 0 { - ret, err := c.CreateAlert(machineID, []*models.Alert{alertItem}) + alertIDs, err := c.CreateAlert(machineID, []*models.Alert{alertItem}) if err != nil { return "", fmt.Errorf("unable to create alert: %w", err) } - return ret[0], nil + + return alertIDs[0], nil } //this should never happen @@ -149,22 +162,26 @@ func (c *Client) CreateOrUpdateAlert(machineID string, alertItem *models.Alert) } log.Infof("Alert %s already exists, checking associated decisions", alertItem.UUID) + //alert is found, check for any missing decisions - missingUuids := []string{} - newUuids := []string{} - for _, decItem := range alertItem.Decisions { - newUuids = append(newUuids, decItem.UUID) + + newUuids := make([]string, len(alertItem.Decisions)) + for i, decItem := range alertItem.Decisions { + newUuids[i] = decItem.UUID } foundAlert := alerts[0] - foundUuids := []string{} - for _, decItem := range foundAlert.Edges.Decisions { - foundUuids = append(foundUuids, decItem.UUID) + foundUuids := make([]string, len(foundAlert.Edges.Decisions)) + + for i, decItem := range foundAlert.Edges.Decisions { + foundUuids[i] = decItem.UUID } sort.Strings(foundUuids) sort.Strings(newUuids) + missingUuids := []string{} + for idx, uuid := range newUuids { if len(foundUuids) < idx+1 || uuid != foundUuids[idx] { log.Warningf("Decision with uuid %s not found in alert %s", uuid, foundAlert.UUID) @@ -177,9 +194,10 @@ func (c *Client) CreateOrUpdateAlert(machineID string, alertItem *models.Alert) return "", nil } - //add any and all missing decisions based on their uuids - //prepare missing decisions + // add any and all missing decisions based on their uuids + // prepare missing decisions missingDecisions := []*models.Decision{} + for _, uuid := range missingUuids { for _, newDecision := range alertItem.Decisions { if newDecision.UUID == uuid { @@ -191,8 +209,7 @@ func (c *Client) CreateOrUpdateAlert(machineID string, alertItem *models.Alert) //add missing decisions log.Debugf("Adding %d missing decisions to alert %s", len(missingDecisions), foundAlert.UUID) - decisions := make([]*ent.Decision, 0) - decisionBulk := make([]*ent.DecisionCreate, 0, decisionBulkSize) + decisionBuilders := make([]*ent.DecisionCreate, len(missingDecisions)) for i, decisionItem := range missingDecisions { var start_ip, start_sfx, end_ip, end_sfx int64 @@ -205,20 +222,24 @@ func (c *Client) CreateOrUpdateAlert(machineID string, alertItem *models.Alert) return "", errors.Wrapf(InvalidIPOrRange, "invalid addr/range %s : %s", *decisionItem.Value, err) } } + decisionDuration, err := time.ParseDuration(*decisionItem.Duration) if err != nil { log.Warningf("invalid duration %s for decision %s", *decisionItem.Duration, decisionItem.UUID) continue } + //use the created_at from the alert instead alertTime, err := time.Parse(time.RFC3339, alertItem.CreatedAt) if err != nil { log.Errorf("unable to parse alert time %s : %s", alertItem.CreatedAt, err) + alertTime = time.Now() } + decisionUntil := alertTime.UTC().Add(decisionDuration) - decisionCreate := c.Ent.Decision.Create(). + decisionBuilder := c.Ent.Decision.Create(). SetUntil(decisionUntil). SetScenario(*decisionItem.Scenario). SetType(*decisionItem.Type). @@ -233,58 +254,34 @@ func (c *Client) CreateOrUpdateAlert(machineID string, alertItem *models.Alert) SetSimulated(*alertItem.Simulated). SetUUID(decisionItem.UUID) - decisionBulk = append(decisionBulk, decisionCreate) - if len(decisionBulk) == decisionBulkSize { - decisionsCreateRet, err := c.Ent.Decision.CreateBulk(decisionBulk...).Save(c.CTX) - if err != nil { - return "", errors.Wrapf(BulkError, "creating alert decisions: %s", err) + decisionBuilders[i] = decisionBuilder + } - } - decisions = append(decisions, decisionsCreateRet...) - if len(missingDecisions)-i <= decisionBulkSize { - decisionBulk = make([]*ent.DecisionCreate, 0, (len(missingDecisions) - i)) - } else { - decisionBulk = make([]*ent.DecisionCreate, 0, decisionBulkSize) - } + decisions := []*ent.Decision{} + + builderChunks := slicetools.Chunks(decisionBuilders, c.decisionBulkSize) + + for _, builderChunk := range builderChunks { + decisionsCreateRet, err := c.Ent.Decision.CreateBulk(builderChunk...).Save(c.CTX) + if err != nil { + return "", fmt.Errorf("creating alert decisions: %w", err) } + + decisions = append(decisions, decisionsCreateRet...) } - decisionsCreateRet, err := c.Ent.Decision.CreateBulk(decisionBulk...).Save(c.CTX) - if err != nil { - return "", errors.Wrapf(BulkError, "creating alert decisions: %s", err) - } - decisions = append(decisions, decisionsCreateRet...) + //now that we bulk created missing decisions, let's update the alert - err = c.Ent.Alert.Update().Where(alert.UUID(alertItem.UUID)).AddDecisions(decisions...).Exec(c.CTX) - if err != nil { - return "", fmt.Errorf("updating alert %s: %w", alertItem.UUID, err) + + decisionChunks := slicetools.Chunks(decisions, c.decisionBulkSize) + + for _, decisionChunk := range decisionChunks { + err = c.Ent.Alert.Update().Where(alert.UUID(alertItem.UUID)).AddDecisions(decisionChunk...).Exec(c.CTX) + if err != nil { + return "", fmt.Errorf("updating alert %s: %w", alertItem.UUID, err) + } } return "", nil - -} - -func (c *Client) CreateAlert(machineID string, alertList []*models.Alert) ([]string, error) { - pageStart := 0 - pageEnd := bulkSize - ret := []string{} - for { - if pageEnd >= len(alertList) { - results, err := c.CreateAlertBulk(machineID, alertList[pageStart:]) - if err != nil { - return []string{}, fmt.Errorf("unable to create alerts: %s", err) - } - ret = append(ret, results...) - break - } - results, err := c.CreateAlertBulk(machineID, alertList[pageStart:pageEnd]) - if err != nil { - return []string{}, fmt.Errorf("unable to create alerts: %s", err) - } - ret = append(ret, results...) - pageStart += bulkSize - pageEnd += bulkSize - } - return ret, nil } // UpdateCommunityBlocklist is called to update either the community blocklist (or other lists the user subscribed to) @@ -292,21 +289,23 @@ func (c *Client) CreateAlert(machineID string, alertList []*models.Alert) ([]str // 1st pull, you get decisions [1,2,3]. it inserts [1,2,3] // 2nd pull, you get decisions [1,2,3,4]. it inserts [1,2,3,4] and will try to delete [1,2,3,4] with a different alert ID and same origin func (c *Client) UpdateCommunityBlocklist(alertItem *models.Alert) (int, int, int, error) { - var err error - if alertItem == nil { return 0, 0, 0, fmt.Errorf("nil alert") } + if alertItem.StartAt == nil { return 0, 0, 0, fmt.Errorf("nil start_at") } + startAtTime, err := time.Parse(time.RFC3339, *alertItem.StartAt) if err != nil { return 0, 0, 0, errors.Wrapf(ParseTimeFail, "start_at field time '%s': %s", *alertItem.StartAt, err) } + if alertItem.StopAt == nil { return 0, 0, 0, fmt.Errorf("nil stop_at") } + stopAtTime, err := time.Parse(time.RFC3339, *alertItem.StopAt) if err != nil { return 0, 0, 0, errors.Wrapf(ParseTimeFail, "stop_at field time '%s': %s", *alertItem.StopAt, err) @@ -315,6 +314,7 @@ func (c *Client) UpdateCommunityBlocklist(alertItem *models.Alert) (int, int, in ts, err := time.Parse(time.RFC3339, *alertItem.StopAt) if err != nil { c.Log.Errorf("While parsing StartAt of item %s : %s", *alertItem.StopAt, err) + ts = time.Now().UTC() } @@ -353,9 +353,9 @@ func (c *Client) UpdateCommunityBlocklist(alertItem *models.Alert) (int, int, in if err != nil { return 0, 0, 0, errors.Wrapf(BulkError, "error creating transaction : %s", err) } - decisionBulk := make([]*ent.DecisionCreate, 0, decisionBulkSize) - valueList := make([]string, 0, decisionBulkSize) + DecOrigin := CapiMachineID + if *alertItem.Decisions[0].Origin == CapiMachineID || *alertItem.Decisions[0].Origin == CapiListsMachineID { DecOrigin = *alertItem.Decisions[0].Origin } else { @@ -365,25 +365,33 @@ func (c *Client) UpdateCommunityBlocklist(alertItem *models.Alert) (int, int, in deleted := 0 inserted := 0 - for i, decisionItem := range alertItem.Decisions { + decisionBuilders := make([]*ent.DecisionCreate, 0, len(alertItem.Decisions)) + valueList := make([]string, 0, len(alertItem.Decisions)) + + for _, decisionItem := range alertItem.Decisions { var start_ip, start_sfx, end_ip, end_sfx int64 var sz int + if decisionItem.Duration == nil { log.Warning("nil duration in community decision") continue } + duration, err := time.ParseDuration(*decisionItem.Duration) if err != nil { rollbackErr := txClient.Rollback() if rollbackErr != nil { log.Errorf("rollback error: %s", rollbackErr) } + return 0, 0, 0, errors.Wrapf(ParseDurationFail, "decision duration '%+v' : %s", *decisionItem.Duration, err) } + if decisionItem.Scope == nil { log.Warning("nil scope in community decision") continue } + /*if the scope is IP or Range, convert the value to integers */ if strings.ToLower(*decisionItem.Scope) == "ip" || strings.ToLower(*decisionItem.Scope) == "range" { sz, start_ip, start_sfx, end_ip, end_sfx, err = types.Addr2Ints(*decisionItem.Value) @@ -392,11 +400,13 @@ func (c *Client) UpdateCommunityBlocklist(alertItem *models.Alert) (int, int, in if rollbackErr != nil { log.Errorf("rollback error: %s", rollbackErr) } + return 0, 0, 0, errors.Wrapf(InvalidIPOrRange, "invalid addr/range %s : %s", *decisionItem.Value, err) } } + /*bulk insert some new decisions*/ - decisionBulk = append(decisionBulk, c.Ent.Decision.Create(). + decisionBuilder := c.Ent.Decision.Create(). SetUntil(ts.Add(duration)). SetScenario(*decisionItem.Scenario). SetType(*decisionItem.Type). @@ -409,145 +419,141 @@ func (c *Client) UpdateCommunityBlocklist(alertItem *models.Alert) (int, int, in SetScope(*decisionItem.Scope). SetOrigin(*decisionItem.Origin). SetSimulated(*alertItem.Simulated). - SetOwner(alertRef)) + SetOwner(alertRef) + + decisionBuilders = append(decisionBuilders, decisionBuilder) /*for bulk delete of duplicate decisions*/ if decisionItem.Value == nil { log.Warning("nil value in community decision") continue } + valueList = append(valueList, *decisionItem.Value) - - if len(decisionBulk) == decisionBulkSize { - - insertedDecisions, err := txClient.Decision.CreateBulk(decisionBulk...).Save(c.CTX) - if err != nil { - rollbackErr := txClient.Rollback() - if rollbackErr != nil { - log.Errorf("rollback error: %s", rollbackErr) - } - return 0, 0, 0, errors.Wrapf(BulkError, "bulk creating decisions : %s", err) - } - inserted += len(insertedDecisions) - - /*Deleting older decisions from capi*/ - deletedDecisions, err := txClient.Decision.Delete(). - Where(decision.And( - decision.OriginEQ(DecOrigin), - decision.Not(decision.HasOwnerWith(alert.IDEQ(alertRef.ID))), - decision.ValueIn(valueList...), - )).Exec(c.CTX) - if err != nil { - rollbackErr := txClient.Rollback() - if rollbackErr != nil { - log.Errorf("rollback error: %s", rollbackErr) - } - return 0, 0, 0, fmt.Errorf("while deleting older community blocklist decisions: %w", err) - } - deleted += deletedDecisions - - if len(alertItem.Decisions)-i <= decisionBulkSize { - decisionBulk = make([]*ent.DecisionCreate, 0, (len(alertItem.Decisions) - i)) - valueList = make([]string, 0, (len(alertItem.Decisions) - i)) - } else { - decisionBulk = make([]*ent.DecisionCreate, 0, decisionBulkSize) - valueList = make([]string, 0, decisionBulkSize) - } - } - } - log.Debugf("deleted %d decisions for %s vs %s", deleted, DecOrigin, *alertItem.Decisions[0].Origin) - insertedDecisions, err := txClient.Decision.CreateBulk(decisionBulk...).Save(c.CTX) - if err != nil { - return 0, 0, 0, errors.Wrapf(BulkError, "creating alert decisions: %s", err) - } - inserted += len(insertedDecisions) - /*Deleting older decisions from capi*/ - if len(valueList) > 0 { + + deleteChunks := slicetools.Chunks(valueList, c.decisionBulkSize) + + for _, deleteChunk := range deleteChunks { + // Deleting older decisions from capi deletedDecisions, err := txClient.Decision.Delete(). Where(decision.And( decision.OriginEQ(DecOrigin), decision.Not(decision.HasOwnerWith(alert.IDEQ(alertRef.ID))), - decision.ValueIn(valueList...), + decision.ValueIn(deleteChunk...), )).Exec(c.CTX) if err != nil { rollbackErr := txClient.Rollback() if rollbackErr != nil { log.Errorf("rollback error: %s", rollbackErr) } + return 0, 0, 0, fmt.Errorf("while deleting older community blocklist decisions: %w", err) } + deleted += deletedDecisions } + + builderChunks := slicetools.Chunks(decisionBuilders, c.decisionBulkSize) + + for _, builderChunk := range builderChunks { + insertedDecisions, err := txClient.Decision.CreateBulk(builderChunk...).Save(c.CTX) + if err != nil { + rollbackErr := txClient.Rollback() + if rollbackErr != nil { + log.Errorf("rollback error: %s", rollbackErr) + } + + return 0, 0, 0, fmt.Errorf("while bulk creating decisions: %w", err) + } + + inserted += len(insertedDecisions) + } + + log.Debugf("deleted %d decisions for %s vs %s", deleted, DecOrigin, *alertItem.Decisions[0].Origin) + err = txClient.Commit() if err != nil { rollbackErr := txClient.Rollback() if rollbackErr != nil { log.Errorf("rollback error: %s", rollbackErr) } - return 0, 0, 0, errors.Wrapf(BulkError, "error committing transaction : %s", err) + + return 0, 0, 0, fmt.Errorf("error committing transaction: %w", err) } return alertRef.ID, inserted, deleted, nil } -func chunkDecisions(decisions []*ent.Decision, chunkSize int) [][]*ent.Decision { - var ret [][]*ent.Decision - var chunk []*ent.Decision +func (c *Client) createDecisionChunk(simulated bool, stopAtTime time.Time, decisions []*models.Decision) ([]*ent.Decision, error) { + decisionCreate := make([]*ent.DecisionCreate, len(decisions)) - for _, d := range decisions { - chunk = append(chunk, d) - if len(chunk) == chunkSize { - ret = append(ret, chunk) - chunk = nil + for i, decisionItem := range decisions { + var start_ip, start_sfx, end_ip, end_sfx int64 + var sz int + + duration, err := time.ParseDuration(*decisionItem.Duration) + if err != nil { + return nil, errors.Wrapf(ParseDurationFail, "decision duration '%+v' : %s", *decisionItem.Duration, err) } + + /*if the scope is IP or Range, convert the value to integers */ + if strings.ToLower(*decisionItem.Scope) == "ip" || strings.ToLower(*decisionItem.Scope) == "range" { + sz, start_ip, start_sfx, end_ip, end_sfx, err = types.Addr2Ints(*decisionItem.Value) + if err != nil { + return nil, fmt.Errorf("%s: %w", *decisionItem.Value, InvalidIPOrRange) + } + } + + newDecision := c.Ent.Decision.Create(). + SetUntil(stopAtTime.Add(duration)). + SetScenario(*decisionItem.Scenario). + SetType(*decisionItem.Type). + SetStartIP(start_ip). + SetStartSuffix(start_sfx). + SetEndIP(end_ip). + SetEndSuffix(end_sfx). + SetIPSize(int64(sz)). + SetValue(*decisionItem.Value). + SetScope(*decisionItem.Scope). + SetOrigin(*decisionItem.Origin). + SetSimulated(simulated). + SetUUID(decisionItem.UUID) + + decisionCreate[i] = newDecision } - if len(chunk) > 0 { - ret = append(ret, chunk) + + ret, err := c.Ent.Decision.CreateBulk(decisionCreate...).Save(c.CTX) + if err != nil { + return nil, err } - return ret + + return ret, nil } -func (c *Client) CreateAlertBulk(machineId string, alertList []*models.Alert) ([]string, error) { - ret := []string{} - bulkSize := 20 - var owner *ent.Machine - var err error +func (c *Client) createAlertChunk(machineID string, owner *ent.Machine, alerts []*models.Alert) ([]string, error) { + alertBuilders := make([]*ent.AlertCreate, len(alerts)) + alertDecisions := make([][]*ent.Decision, len(alerts)) - if machineId != "" { - owner, err = c.QueryMachineByID(machineId) - if err != nil { - if errors.Cause(err) != UserNotExists { - return []string{}, errors.Wrapf(QueryFail, "machine '%s': %s", machineId, err) - } - c.Log.Debugf("CreateAlertBulk: Machine Id %s doesn't exist", machineId) - owner = nil - } - } else { - owner = nil - } - - c.Log.Debugf("writing %d items", len(alertList)) - bulk := make([]*ent.AlertCreate, 0, bulkSize) - alertDecisions := make([][]*ent.Decision, 0, bulkSize) - for i, alertItem := range alertList { - var decisions []*ent.Decision + for i, alertItem := range alerts { var metas []*ent.Meta var events []*ent.Event startAtTime, err := time.Parse(time.RFC3339, *alertItem.StartAt) if err != nil { c.Log.Errorf("CreateAlertBulk: Failed to parse startAtTime '%s', defaulting to now: %s", *alertItem.StartAt, err) + startAtTime = time.Now().UTC() } stopAtTime, err := time.Parse(time.RFC3339, *alertItem.StopAt) if err != nil { c.Log.Errorf("CreateAlertBulk: Failed to parse stopAtTime '%s', defaulting to now: %s", *alertItem.StopAt, err) + stopAtTime = time.Now().UTC() } /*display proper alert in logs*/ - for _, disp := range formatAlertAsString(machineId, alertItem) { + for _, disp := range formatAlertAsString(machineID, alertItem) { c.Log.Info(disp) } @@ -557,12 +563,15 @@ func (c *Client) CreateAlertBulk(machineId string, alertList []*models.Alert) ([ if len(alertItem.Events) > 0 { eventBulk := make([]*ent.EventCreate, len(alertItem.Events)) + for i, eventItem := range alertItem.Events { ts, err := time.Parse(time.RFC3339, *eventItem.Timestamp) if err != nil { c.Log.Errorf("CreateAlertBulk: Failed to parse event timestamp '%s', defaulting to now: %s", *eventItem.Timestamp, err) + ts = time.Now().UTC() } + marshallMetas, err := json.Marshal(eventItem.Meta) if err != nil { return nil, errors.Wrapf(MarshalFail, "event meta '%v' : %s", eventItem.Meta, err) @@ -574,6 +583,7 @@ func (c *Client) CreateAlertBulk(machineId string, alertList []*models.Alert) ([ valid := false stripSize := 2048 + for !valid && stripSize > 0 { for _, serializedItem := range eventItem.Meta { if len(serializedItem.Value) > stripSize*2 { @@ -585,9 +595,11 @@ func (c *Client) CreateAlertBulk(machineId string, alertList []*models.Alert) ([ if err != nil { return nil, errors.Wrapf(MarshalFail, "event meta '%v' : %s", eventItem.Meta, err) } + if event.SerializedValidator(string(marshallMetas)) == nil { valid = true } + stripSize /= 2 } @@ -597,19 +609,21 @@ func (c *Client) CreateAlertBulk(machineId string, alertList []*models.Alert) ([ stripped = false marshallMetas = []byte("") } - } eventBulk[i] = c.Ent.Event.Create(). SetTime(ts). SetSerialized(string(marshallMetas)) } + if stripped { - c.Log.Warningf("stripped 'serialized' field (machine %s / scenario %s)", machineId, *alertItem.Scenario) + c.Log.Warningf("stripped 'serialized' field (machine %s / scenario %s)", machineID, *alertItem.Scenario) } + if dropped { - c.Log.Warningf("dropped 'serialized' field (machine %s / scenario %s)", machineId, *alertItem.Scenario) + c.Log.Warningf("dropped 'serialized' field (machine %s / scenario %s)", machineID, *alertItem.Scenario) } + events, err = c.Ent.Event.CreateBulk(eventBulk...).Save(c.CTX) if err != nil { return nil, errors.Wrapf(BulkError, "creating alert events: %s", err) @@ -623,70 +637,26 @@ func (c *Client) CreateAlertBulk(machineId string, alertList []*models.Alert) ([ SetKey(metaItem.Key). SetValue(metaItem.Value) } + metas, err = c.Ent.Meta.CreateBulk(metaBulk...).Save(c.CTX) if err != nil { return nil, errors.Wrapf(BulkError, "creating alert meta: %s", err) } } - decisions = make([]*ent.Decision, 0) - if len(alertItem.Decisions) > 0 { - decisionBulk := make([]*ent.DecisionCreate, 0, decisionBulkSize) - for i, decisionItem := range alertItem.Decisions { - var start_ip, start_sfx, end_ip, end_sfx int64 - var sz int + decisions := []*ent.Decision{} - duration, err := time.ParseDuration(*decisionItem.Duration) - if err != nil { - return nil, errors.Wrapf(ParseDurationFail, "decision duration '%+v' : %s", *decisionItem.Duration, err) - } - - /*if the scope is IP or Range, convert the value to integers */ - if strings.ToLower(*decisionItem.Scope) == "ip" || strings.ToLower(*decisionItem.Scope) == "range" { - sz, start_ip, start_sfx, end_ip, end_sfx, err = types.Addr2Ints(*decisionItem.Value) - if err != nil { - return nil, fmt.Errorf("%s: %w", *decisionItem.Value, InvalidIPOrRange) - } - } - - decisionCreate := c.Ent.Decision.Create(). - SetUntil(stopAtTime.Add(duration)). - SetScenario(*decisionItem.Scenario). - SetType(*decisionItem.Type). - SetStartIP(start_ip). - SetStartSuffix(start_sfx). - SetEndIP(end_ip). - SetEndSuffix(end_sfx). - SetIPSize(int64(sz)). - SetValue(*decisionItem.Value). - SetScope(*decisionItem.Scope). - SetOrigin(*decisionItem.Origin). - SetSimulated(*alertItem.Simulated). - SetUUID(decisionItem.UUID) - - decisionBulk = append(decisionBulk, decisionCreate) - if len(decisionBulk) == decisionBulkSize { - decisionsCreateRet, err := c.Ent.Decision.CreateBulk(decisionBulk...).Save(c.CTX) - if err != nil { - return nil, errors.Wrapf(BulkError, "creating alert decisions: %s", err) - - } - decisions = append(decisions, decisionsCreateRet...) - if len(alertItem.Decisions)-i <= decisionBulkSize { - decisionBulk = make([]*ent.DecisionCreate, 0, (len(alertItem.Decisions) - i)) - } else { - decisionBulk = make([]*ent.DecisionCreate, 0, decisionBulkSize) - } - } - } - decisionsCreateRet, err := c.Ent.Decision.CreateBulk(decisionBulk...).Save(c.CTX) + decisionChunks := slicetools.Chunks(alertItem.Decisions, c.decisionBulkSize) + for _, decisionChunk := range decisionChunks { + decisionRet, err := c.createDecisionChunk(*alertItem.Simulated, stopAtTime, decisionChunk) if err != nil { - return nil, errors.Wrapf(BulkError, "creating alert decisions: %s", err) + return nil, fmt.Errorf("creating alert decisions: %w", err) } - decisions = append(decisions, decisionsCreateRet...) + + decisions = append(decisions, decisionRet...) } - alertB := c.Ent.Alert. + alertBuilder := c.Ent.Alert. Create(). SetScenario(*alertItem.Scenario). SetMessage(*alertItem.Message). @@ -712,50 +682,52 @@ func (c *Client) CreateAlertBulk(machineId string, alertList []*models.Alert) ([ AddMetas(metas...) if owner != nil { - alertB.SetOwner(owner) + alertBuilder.SetOwner(owner) } - bulk = append(bulk, alertB) - alertDecisions = append(alertDecisions, decisions) - if len(bulk) == bulkSize { - alerts, err := c.Ent.Alert.CreateBulk(bulk...).Save(c.CTX) - if err != nil { - return nil, errors.Wrapf(BulkError, "bulk creating alert : %s", err) - } - for alertIndex, a := range alerts { - ret = append(ret, strconv.Itoa(a.ID)) - d := alertDecisions[alertIndex] - decisionsChunk := chunkDecisions(d, bulkSize) - for _, d2 := range decisionsChunk { - _, err := c.Ent.Alert.Update().Where(alert.IDEQ(a.ID)).AddDecisions(d2...).Save(c.CTX) - if err != nil { - return nil, fmt.Errorf("error while updating decisions: %s", err) + alertBuilders[i] = alertBuilder + alertDecisions[i] = decisions + } + + alertsCreateBulk, err := c.Ent.Alert.CreateBulk(alertBuilders...).Save(c.CTX) + if err != nil { + return nil, errors.Wrapf(BulkError, "bulk creating alert : %s", err) + } + + ret := make([]string, len(alertsCreateBulk)) + for i, a := range alertsCreateBulk { + ret[i] = strconv.Itoa(a.ID) + + d := alertDecisions[i] + decisionsChunk := slicetools.Chunks(d, c.decisionBulkSize) + + for _, d2 := range decisionsChunk { + retry := 0 + + for retry < maxLockRetries { + // so much for the happy path... but sqlite3 errors work differently + _, err := c.Ent.Alert.Update().Where(alert.IDEQ(a.ID)).AddDecisions(d2...).Save(c.CTX) + if err == nil { + break + } + + if sqliteErr, ok := err.(sqlite3.Error); ok { + if sqliteErr.Code == sqlite3.ErrBusy { + // sqlite3.Error{ + // Code: 5, + // ExtendedCode: 5, + // SystemErrno: 0, + // err: "database is locked", + // } + retry++ + log.Warningf("while updating decisions, sqlite3.ErrBusy: %s, retry %d of %d", err, retry, maxLockRetries) + time.Sleep(1 * time.Second) + + continue } } - } - if len(alertList)-i <= bulkSize { - bulk = make([]*ent.AlertCreate, 0, (len(alertList) - i)) - alertDecisions = make([][]*ent.Decision, 0, (len(alertList) - i)) - } else { - bulk = make([]*ent.AlertCreate, 0, bulkSize) - alertDecisions = make([][]*ent.Decision, 0, bulkSize) - } - } - } - alerts, err := c.Ent.Alert.CreateBulk(bulk...).Save(c.CTX) - if err != nil { - return nil, errors.Wrapf(BulkError, "leftovers creating alert : %s", err) - } - - for alertIndex, a := range alerts { - ret = append(ret, strconv.Itoa(a.ID)) - d := alertDecisions[alertIndex] - decisionsChunk := chunkDecisions(d, bulkSize) - for _, d2 := range decisionsChunk { - _, err := c.Ent.Alert.Update().Where(alert.IDEQ(a.ID)).AddDecisions(d2...).Save(c.CTX) - if err != nil { - return nil, fmt.Errorf("error while updating decisions: %s", err) + return nil, fmt.Errorf("error while updating decisions: %w", err) } } } @@ -763,13 +735,49 @@ func (c *Client) CreateAlertBulk(machineId string, alertList []*models.Alert) ([ return ret, nil } +func (c *Client) CreateAlert(machineID string, alertList []*models.Alert) ([]string, error) { + var owner *ent.Machine + var err error + + if machineID != "" { + owner, err = c.QueryMachineByID(machineID) + if err != nil { + if errors.Cause(err) != UserNotExists { + return nil, fmt.Errorf("machine '%s': %w", machineID, err) + } + + c.Log.Debugf("CreateAlertBulk: Machine Id %s doesn't exist", machineID) + + owner = nil + } + } + + c.Log.Debugf("writing %d items", len(alertList)) + + alertChunks := slicetools.Chunks(alertList, bulkSize) + alertIDs := []string{} + + for _, alertChunk := range alertChunks { + ids, err := c.createAlertChunk(machineID, owner, alertChunk) + if err != nil { + return nil, fmt.Errorf("machine '%s': %w", machineID, err) + } + + alertIDs = append(alertIDs, ids...) + } + + return alertIDs, nil +} + func AlertPredicatesFromFilter(filter map[string][]string) ([]predicate.Alert, error) { predicates := make([]predicate.Alert, 0) + var err error var start_ip, start_sfx, end_ip, end_sfx int64 var hasActiveDecision bool var ip_sz int - var contains bool = true + var contains = true + /*if contains is true, return bans that *contains* the given value (value is the inner) else, return bans that are *contained* by the given value (value is the outer)*/ @@ -792,12 +800,13 @@ func AlertPredicatesFromFilter(filter map[string][]string) ([]predicate.Alert, e return nil, errors.Wrapf(InvalidFilter, "invalid contains value : %s", err) } case "scope": - var scope string = value[0] + var scope = value[0] if strings.ToLower(scope) == "ip" { scope = types.Ip } else if strings.ToLower(scope) == "range" { scope = types.Range } + predicates = append(predicates, alert.SourceScopeEQ(scope)) case "value": predicates = append(predicates, alert.SourceValueEQ(value[0])) @@ -809,34 +818,40 @@ func AlertPredicatesFromFilter(filter map[string][]string) ([]predicate.Alert, e return nil, errors.Wrapf(InvalidIPOrRange, "unable to convert '%s' to int: %s", value[0], err) } case "since": - duration, err := types.ParseDuration(value[0]) - if err != nil { - return nil, fmt.Errorf("while parsing duration: %w", err) - } - since := time.Now().UTC().Add(-duration) - if since.IsZero() { - return nil, fmt.Errorf("Empty time now() - %s", since.String()) - } - predicates = append(predicates, alert.StartedAtGTE(since)) - case "created_before": - duration, err := types.ParseDuration(value[0]) + duration, err := ParseDuration(value[0]) if err != nil { return nil, fmt.Errorf("while parsing duration: %w", err) } + since := time.Now().UTC().Add(-duration) if since.IsZero() { return nil, fmt.Errorf("empty time now() - %s", since.String()) } - predicates = append(predicates, alert.CreatedAtLTE(since)) - case "until": - duration, err := types.ParseDuration(value[0]) + + predicates = append(predicates, alert.StartedAtGTE(since)) + case "created_before": + duration, err := ParseDuration(value[0]) if err != nil { return nil, fmt.Errorf("while parsing duration: %w", err) } + + since := time.Now().UTC().Add(-duration) + if since.IsZero() { + return nil, fmt.Errorf("empty time now() - %s", since.String()) + } + + predicates = append(predicates, alert.CreatedAtLTE(since)) + case "until": + duration, err := ParseDuration(value[0]) + if err != nil { + return nil, fmt.Errorf("while parsing duration: %w", err) + } + until := time.Now().UTC().Add(-duration) if until.IsZero() { return nil, fmt.Errorf("empty time now() - %s", until.String()) } + predicates = append(predicates, alert.StartedAtLTE(until)) case "decision_type": predicates = append(predicates, alert.HasDecisionsWith(decision.TypeEQ(value[0]))) @@ -844,11 +859,25 @@ func AlertPredicatesFromFilter(filter map[string][]string) ([]predicate.Alert, e predicates = append(predicates, alert.HasDecisionsWith(decision.OriginEQ(value[0]))) case "include_capi": //allows to exclude one or more specific origins if value[0] == "false" { - predicates = append(predicates, alert.HasDecisionsWith( - decision.Or(decision.OriginEQ(types.CrowdSecOrigin), - decision.OriginEQ(types.CscliOrigin), - decision.OriginEQ(types.ConsoleOrigin), - decision.OriginEQ(types.CscliImportOrigin)))) + predicates = append(predicates, alert.And( + //do not show alerts with active decisions having origin CAPI or lists + alert.And( + alert.Not(alert.HasDecisionsWith(decision.OriginEQ(types.CAPIOrigin))), + alert.Not(alert.HasDecisionsWith(decision.OriginEQ(types.ListOrigin))), + ), + alert.Not( + alert.And( + //do not show neither alerts with no decisions if the Source Scope is lists: or CAPI + alert.Not(alert.HasDecisions()), + alert.Or( + alert.SourceScopeHasPrefix(types.ListOrigin+":"), + alert.SourceScopeEQ(types.CommunityBlocklistPullSourceScope), + ), + ), + ), + ), + ) + } else if value[0] != "true" { log.Errorf("Invalid bool '%s' for include_capi", value[0]) } @@ -856,6 +885,7 @@ func AlertPredicatesFromFilter(filter map[string][]string) ([]predicate.Alert, e if hasActiveDecision, err = strconv.ParseBool(value[0]); err != nil { return nil, errors.Wrapf(ParseType, "'%s' is not a boolean: %s", value[0], err) } + if hasActiveDecision { predicates = append(predicates, alert.HasDecisionsWith(decision.UntilGTE(time.Now().UTC()))) } else { @@ -889,7 +919,6 @@ func AlertPredicatesFromFilter(filter map[string][]string) ([]predicate.Alert, e )) } } else if ip_sz == 16 { - if contains { /*decision contains {start_ip,end_ip}*/ predicates = append(predicates, alert.And( //matching addr size @@ -942,18 +971,20 @@ func AlertPredicatesFromFilter(filter map[string][]string) ([]predicate.Alert, e } else if ip_sz != 0 { return nil, errors.Wrapf(InvalidFilter, "Unknown ip size %d", ip_sz) } + return predicates, nil } + func BuildAlertRequestFromFilter(alerts *ent.AlertQuery, filter map[string][]string) (*ent.AlertQuery, error) { preds, err := AlertPredicatesFromFilter(filter) if err != nil { return nil, err } + return alerts.Where(preds...), nil } func (c *Client) AlertsCountPerScenario(filters map[string][]string) (map[string]int, error) { - var res []struct { Scenario string Count int @@ -989,8 +1020,8 @@ func (c *Client) TotalAlerts() (int, error) { } func (c *Client) QueryAlertWithFilter(filter map[string][]string) ([]*ent.Alert, error) { - sort := "DESC" // we sort by desc by default + if val, ok := filter["sort"]; ok { if val[0] != "ASC" && val[0] != "DESC" { c.Log.Errorf("invalid 'sort' parameter: %s", val) @@ -998,22 +1029,27 @@ func (c *Client) QueryAlertWithFilter(filter map[string][]string) ([]*ent.Alert, sort = val[0] } } + limit := defaultLimit + if val, ok := filter["limit"]; ok { limitConv, err := strconv.Atoi(val[0]) if err != nil { - return []*ent.Alert{}, errors.Wrapf(QueryFail, "bad limit in parameters: %s", val) + return nil, errors.Wrapf(QueryFail, "bad limit in parameters: %s", val) } - limit = limitConv + limit = limitConv } + offset := 0 ret := make([]*ent.Alert, 0) + for { alerts := c.Ent.Alert.Query() + alerts, err := BuildAlertRequestFromFilter(alerts, filter) if err != nil { - return []*ent.Alert{}, err + return nil, err } //only if with_decisions is present and set to false, we exclude this @@ -1023,6 +1059,7 @@ func (c *Client) QueryAlertWithFilter(filter map[string][]string) ([]*ent.Alert, alerts = alerts. WithDecisions() } + alerts = alerts. WithEvents(). WithMetas(). @@ -1031,7 +1068,7 @@ func (c *Client) QueryAlertWithFilter(filter map[string][]string) ([]*ent.Alert, if limit == 0 { limit, err = alerts.Count(c.CTX) if err != nil { - return []*ent.Alert{}, fmt.Errorf("unable to count nb alerts: %s", err) + return nil, fmt.Errorf("unable to count nb alerts: %s", err) } } @@ -1043,23 +1080,27 @@ func (c *Client) QueryAlertWithFilter(filter map[string][]string) ([]*ent.Alert, result, err := alerts.Limit(paginationSize).Offset(offset).All(c.CTX) if err != nil { - return []*ent.Alert{}, errors.Wrapf(QueryFail, "pagination size: %d, offset: %d: %s", paginationSize, offset, err) + return nil, errors.Wrapf(QueryFail, "pagination size: %d, offset: %d: %s", paginationSize, offset, err) } + if diff := limit - len(ret); diff < paginationSize { if len(result) < diff { ret = append(ret, result...) c.Log.Debugf("Pagination done, %d < %d", len(result), diff) + break } - ret = append(ret, result[0:diff]...) + ret = append(ret, result[0:diff]...) } else { ret = append(ret, result...) } + if len(ret) == limit || len(ret) == 0 || len(ret) < paginationSize { c.Log.Debugf("Pagination done len(ret) = %d", len(ret)) break } + offset += paginationSize } @@ -1154,180 +1195,10 @@ func (c *Client) DeleteAlertWithFilter(filter map[string][]string) (int, error) if err != nil { return 0, err } + return c.Ent.Alert.Delete().Where(preds...).Exec(c.CTX) } -func (c *Client) FlushOrphans() { - /* While it has only been linked to some very corner-case bug : https://github.com/crowdsecurity/crowdsec/issues/778 */ - /* We want to take care of orphaned events for which the parent alert/decision has been deleted */ - - events_count, err := c.Ent.Event.Delete().Where(event.Not(event.HasOwner())).Exec(c.CTX) - if err != nil { - c.Log.Warningf("error while deleting orphan events : %s", err) - return - } - if events_count > 0 { - c.Log.Infof("%d deleted orphan events", events_count) - } - - events_count, err = c.Ent.Decision.Delete().Where( - decision.Not(decision.HasOwner())).Where(decision.UntilLTE(time.Now().UTC())).Exec(c.CTX) - - if err != nil { - c.Log.Warningf("error while deleting orphan decisions : %s", err) - return - } - if events_count > 0 { - c.Log.Infof("%d deleted orphan decisions", events_count) - } -} - -func (c *Client) FlushAgentsAndBouncers(agentsCfg *csconfig.AuthGCCfg, bouncersCfg *csconfig.AuthGCCfg) error { - log.Debug("starting FlushAgentsAndBouncers") - if bouncersCfg != nil { - if bouncersCfg.ApiDuration != nil { - log.Debug("trying to delete old bouncers from api") - deletionCount, err := c.Ent.Bouncer.Delete().Where( - bouncer.LastPullLTE(time.Now().UTC().Add(-*bouncersCfg.ApiDuration)), - ).Where( - bouncer.AuthTypeEQ(types.ApiKeyAuthType), - ).Exec(c.CTX) - if err != nil { - c.Log.Errorf("while auto-deleting expired bouncers (api key) : %s", err) - } else if deletionCount > 0 { - c.Log.Infof("deleted %d expired bouncers (api auth)", deletionCount) - } - } - if bouncersCfg.CertDuration != nil { - log.Debug("trying to delete old bouncers from cert") - - deletionCount, err := c.Ent.Bouncer.Delete().Where( - bouncer.LastPullLTE(time.Now().UTC().Add(-*bouncersCfg.CertDuration)), - ).Where( - bouncer.AuthTypeEQ(types.TlsAuthType), - ).Exec(c.CTX) - if err != nil { - c.Log.Errorf("while auto-deleting expired bouncers (api key) : %s", err) - } else if deletionCount > 0 { - c.Log.Infof("deleted %d expired bouncers (api auth)", deletionCount) - } - } - } - - if agentsCfg != nil { - if agentsCfg.CertDuration != nil { - log.Debug("trying to delete old agents from cert") - - deletionCount, err := c.Ent.Machine.Delete().Where( - machine.LastHeartbeatLTE(time.Now().UTC().Add(-*agentsCfg.CertDuration)), - ).Where( - machine.Not(machine.HasAlerts()), - ).Where( - machine.AuthTypeEQ(types.TlsAuthType), - ).Exec(c.CTX) - log.Debugf("deleted %d entries", deletionCount) - if err != nil { - c.Log.Errorf("while auto-deleting expired machine (cert) : %s", err) - } else if deletionCount > 0 { - c.Log.Infof("deleted %d expired machine (cert auth)", deletionCount) - } - } - if agentsCfg.LoginPasswordDuration != nil { - log.Debug("trying to delete old agents from password") - - deletionCount, err := c.Ent.Machine.Delete().Where( - machine.LastHeartbeatLTE(time.Now().UTC().Add(-*agentsCfg.LoginPasswordDuration)), - ).Where( - machine.Not(machine.HasAlerts()), - ).Where( - machine.AuthTypeEQ(types.PasswordAuthType), - ).Exec(c.CTX) - log.Debugf("deleted %d entries", deletionCount) - if err != nil { - c.Log.Errorf("while auto-deleting expired machine (password) : %s", err) - } else if deletionCount > 0 { - c.Log.Infof("deleted %d expired machine (password auth)", deletionCount) - } - } - } - return nil -} - -func (c *Client) FlushAlerts(MaxAge string, MaxItems int) error { - var deletedByAge int - var deletedByNbItem int - var totalAlerts int - var err error - - if !c.CanFlush { - c.Log.Debug("a list is being imported, flushing later") - return nil - } - - c.Log.Debug("Flushing orphan alerts") - c.FlushOrphans() - c.Log.Debug("Done flushing orphan alerts") - totalAlerts, err = c.TotalAlerts() - if err != nil { - c.Log.Warningf("FlushAlerts (max items count) : %s", err) - return fmt.Errorf("unable to get alerts count: %w", err) - } - c.Log.Debugf("FlushAlerts (Total alerts): %d", totalAlerts) - if MaxAge != "" { - filter := map[string][]string{ - "created_before": {MaxAge}, - } - nbDeleted, err := c.DeleteAlertWithFilter(filter) - if err != nil { - c.Log.Warningf("FlushAlerts (max age) : %s", err) - return fmt.Errorf("unable to flush alerts with filter until=%s: %w", MaxAge, err) - } - c.Log.Debugf("FlushAlerts (deleted max age alerts): %d", nbDeleted) - deletedByAge = nbDeleted - } - if MaxItems > 0 { - //We get the highest id for the alerts - //We subtract MaxItems to avoid deleting alerts that are not old enough - //This gives us the oldest alert that we want to keep - //We then delete all the alerts with an id lower than this one - //We can do this because the id is auto-increment, and the database won't reuse the same id twice - lastAlert, err := c.QueryAlertWithFilter(map[string][]string{ - "sort": {"DESC"}, - "limit": {"1"}, - //we do not care about fetching the edges, we just want the id - "with_decisions": {"false"}, - }) - c.Log.Debugf("FlushAlerts (last alert): %+v", lastAlert) - if err != nil { - c.Log.Errorf("FlushAlerts: could not get last alert: %s", err) - return fmt.Errorf("could not get last alert: %w", err) - } - - if len(lastAlert) != 0 { - maxid := lastAlert[0].ID - MaxItems - - c.Log.Debugf("FlushAlerts (max id): %d", maxid) - - if maxid > 0 { - //This may lead to orphan alerts (at least on MySQL), but the next time the flush job will run, they will be deleted - deletedByNbItem, err = c.Ent.Alert.Delete().Where(alert.IDLT(maxid)).Exec(c.CTX) - - if err != nil { - c.Log.Errorf("FlushAlerts: Could not delete alerts : %s", err) - return fmt.Errorf("could not delete alerts: %w", err) - } - } - } - } - if deletedByNbItem > 0 { - c.Log.Infof("flushed %d/%d alerts because max number of alerts has been reached (%d max)", deletedByNbItem, totalAlerts, MaxItems) - } - if deletedByAge > 0 { - c.Log.Infof("flushed %d/%d alerts because they were created %s ago or more", deletedByAge, totalAlerts, MaxAge) - } - return nil -} - func (c *Client) GetAlertByID(alertID int) (*ent.Alert, error) { alert, err := c.Ent.Alert.Query().Where(alert.IDEQ(alertID)).WithDecisions().WithEvents().WithMetas().WithOwner().First(c.CTX) if err != nil { @@ -1336,8 +1207,11 @@ func (c *Client) GetAlertByID(alertID int) (*ent.Alert, error) { log.Warningf("GetAlertByID (not found): %s", err) return &ent.Alert{}, ItemNotFound } + c.Log.Warningf("GetAlertByID : %s", err) + return &ent.Alert{}, QueryFail } + return alert, nil } diff --git a/pkg/database/database.go b/pkg/database/database.go index 2d8ddd51e..aa191d7dc 100644 --- a/pkg/database/database.go +++ b/pkg/database/database.go @@ -5,10 +5,8 @@ import ( "database/sql" "fmt" "os" - "time" entsql "entgo.io/ent/dialect/sql" - "github.com/go-co-op/gocron" _ "github.com/go-sql-driver/mysql" _ "github.com/jackc/pgx/v4/stdlib" _ "github.com/mattn/go-sqlite3" @@ -22,12 +20,13 @@ import ( ) type Client struct { - Ent *ent.Client - CTX context.Context - Log *log.Logger - CanFlush bool - Type string - WalMode *bool + Ent *ent.Client + CTX context.Context + Log *log.Logger + CanFlush bool + Type string + WalMode *bool + decisionBulkSize int } func getEntDriver(dbtype string, dbdialect string, dsn string, config *csconfig.DatabaseCfg) (*entsql.Driver, error) { @@ -93,74 +92,14 @@ func NewClient(config *csconfig.DatabaseCfg) (*Client, error) { if err = client.Schema.Create(context.Background()); err != nil { return nil, fmt.Errorf("failed creating schema resources: %v", err) } - return &Client{Ent: client, CTX: context.Background(), Log: clog, CanFlush: true, Type: config.Type, WalMode: config.UseWal}, nil -} - -func (c *Client) StartFlushScheduler(config *csconfig.FlushDBCfg) (*gocron.Scheduler, error) { - maxItems := 0 - maxAge := "" - if config.MaxItems != nil && *config.MaxItems <= 0 { - return nil, fmt.Errorf("max_items can't be zero or negative number") - } - if config.MaxItems != nil { - maxItems = *config.MaxItems - } - if config.MaxAge != nil && *config.MaxAge != "" { - maxAge = *config.MaxAge - } - - // Init & Start cronjob every minute for alerts - scheduler := gocron.NewScheduler(time.UTC) - job, err := scheduler.Every(1).Minute().Do(c.FlushAlerts, maxAge, maxItems) - if err != nil { - return nil, fmt.Errorf("while starting FlushAlerts scheduler: %w", err) - } - job.SingletonMode() - // Init & Start cronjob every hour for bouncers/agents - if config.AgentsGC != nil { - if config.AgentsGC.Cert != nil { - duration, err := types.ParseDuration(*config.AgentsGC.Cert) - if err != nil { - return nil, fmt.Errorf("while parsing agents cert auto-delete duration: %w", err) - } - config.AgentsGC.CertDuration = &duration - } - if config.AgentsGC.LoginPassword != nil { - duration, err := types.ParseDuration(*config.AgentsGC.LoginPassword) - if err != nil { - return nil, fmt.Errorf("while parsing agents login/password auto-delete duration: %w", err) - } - config.AgentsGC.LoginPasswordDuration = &duration - } - if config.AgentsGC.Api != nil { - log.Warning("agents auto-delete for API auth is not supported (use cert or login_password)") - } - } - if config.BouncersGC != nil { - if config.BouncersGC.Cert != nil { - duration, err := types.ParseDuration(*config.BouncersGC.Cert) - if err != nil { - return nil, fmt.Errorf("while parsing bouncers cert auto-delete duration: %w", err) - } - config.BouncersGC.CertDuration = &duration - } - if config.BouncersGC.Api != nil { - duration, err := types.ParseDuration(*config.BouncersGC.Api) - if err != nil { - return nil, fmt.Errorf("while parsing bouncers api auto-delete duration: %w", err) - } - config.BouncersGC.ApiDuration = &duration - } - if config.BouncersGC.LoginPassword != nil { - log.Warning("bouncers auto-delete for login/password auth is not supported (use cert or api)") - } - } - baJob, err := scheduler.Every(1).Minute().Do(c.FlushAgentsAndBouncers, config.AgentsGC, config.BouncersGC) - if err != nil { - return nil, fmt.Errorf("while starting FlushAgentsAndBouncers scheduler: %w", err) - } - baJob.SingletonMode() - scheduler.StartAsync() - - return scheduler, nil + + return &Client{ + Ent: client, + CTX: context.Background(), + Log: clog, + CanFlush: true, + Type: config.Type, + WalMode: config.UseWal, + decisionBulkSize: config.DecisionBulkSize, + }, nil } diff --git a/pkg/database/flush.go b/pkg/database/flush.go new file mode 100644 index 000000000..a7b364fa9 --- /dev/null +++ b/pkg/database/flush.go @@ -0,0 +1,278 @@ +package database + +import ( + "fmt" + "time" + + "github.com/go-co-op/gocron" + log "github.com/sirupsen/logrus" + + "github.com/crowdsecurity/crowdsec/pkg/csconfig" + "github.com/crowdsecurity/crowdsec/pkg/database/ent/alert" + "github.com/crowdsecurity/crowdsec/pkg/database/ent/bouncer" + "github.com/crowdsecurity/crowdsec/pkg/database/ent/decision" + "github.com/crowdsecurity/crowdsec/pkg/database/ent/event" + "github.com/crowdsecurity/crowdsec/pkg/database/ent/machine" + "github.com/crowdsecurity/crowdsec/pkg/types" +) + + +func (c *Client) StartFlushScheduler(config *csconfig.FlushDBCfg) (*gocron.Scheduler, error) { + maxItems := 0 + maxAge := "" + if config.MaxItems != nil && *config.MaxItems <= 0 { + return nil, fmt.Errorf("max_items can't be zero or negative number") + } + if config.MaxItems != nil { + maxItems = *config.MaxItems + } + if config.MaxAge != nil && *config.MaxAge != "" { + maxAge = *config.MaxAge + } + + // Init & Start cronjob every minute for alerts + scheduler := gocron.NewScheduler(time.UTC) + job, err := scheduler.Every(1).Minute().Do(c.FlushAlerts, maxAge, maxItems) + if err != nil { + return nil, fmt.Errorf("while starting FlushAlerts scheduler: %w", err) + } + + job.SingletonMode() + // Init & Start cronjob every hour for bouncers/agents + if config.AgentsGC != nil { + if config.AgentsGC.Cert != nil { + duration, err := ParseDuration(*config.AgentsGC.Cert) + if err != nil { + return nil, fmt.Errorf("while parsing agents cert auto-delete duration: %w", err) + } + config.AgentsGC.CertDuration = &duration + } + if config.AgentsGC.LoginPassword != nil { + duration, err := ParseDuration(*config.AgentsGC.LoginPassword) + if err != nil { + return nil, fmt.Errorf("while parsing agents login/password auto-delete duration: %w", err) + } + config.AgentsGC.LoginPasswordDuration = &duration + } + if config.AgentsGC.Api != nil { + log.Warning("agents auto-delete for API auth is not supported (use cert or login_password)") + } + } + if config.BouncersGC != nil { + if config.BouncersGC.Cert != nil { + duration, err := ParseDuration(*config.BouncersGC.Cert) + if err != nil { + return nil, fmt.Errorf("while parsing bouncers cert auto-delete duration: %w", err) + } + config.BouncersGC.CertDuration = &duration + } + if config.BouncersGC.Api != nil { + duration, err := ParseDuration(*config.BouncersGC.Api) + if err != nil { + return nil, fmt.Errorf("while parsing bouncers api auto-delete duration: %w", err) + } + config.BouncersGC.ApiDuration = &duration + } + if config.BouncersGC.LoginPassword != nil { + log.Warning("bouncers auto-delete for login/password auth is not supported (use cert or api)") + } + } + baJob, err := scheduler.Every(1).Minute().Do(c.FlushAgentsAndBouncers, config.AgentsGC, config.BouncersGC) + if err != nil { + return nil, fmt.Errorf("while starting FlushAgentsAndBouncers scheduler: %w", err) + } + + baJob.SingletonMode() + scheduler.StartAsync() + + return scheduler, nil +} + + +func (c *Client) FlushOrphans() { + /* While it has only been linked to some very corner-case bug : https://github.com/crowdsecurity/crowdsec/issues/778 */ + /* We want to take care of orphaned events for which the parent alert/decision has been deleted */ + eventsCount, err := c.Ent.Event.Delete().Where(event.Not(event.HasOwner())).Exec(c.CTX) + if err != nil { + c.Log.Warningf("error while deleting orphan events: %s", err) + return + } + if eventsCount > 0 { + c.Log.Infof("%d deleted orphan events", eventsCount) + } + + eventsCount, err = c.Ent.Decision.Delete().Where( + decision.Not(decision.HasOwner())).Where(decision.UntilLTE(time.Now().UTC())).Exec(c.CTX) + + if err != nil { + c.Log.Warningf("error while deleting orphan decisions: %s", err) + return + } + if eventsCount > 0 { + c.Log.Infof("%d deleted orphan decisions", eventsCount) + } +} + +func (c *Client) flushBouncers(bouncersCfg *csconfig.AuthGCCfg) { + if bouncersCfg == nil { + return + } + + if bouncersCfg.ApiDuration != nil { + log.Debug("trying to delete old bouncers from api") + + deletionCount, err := c.Ent.Bouncer.Delete().Where( + bouncer.LastPullLTE(time.Now().UTC().Add(-*bouncersCfg.ApiDuration)), + ).Where( + bouncer.AuthTypeEQ(types.ApiKeyAuthType), + ).Exec(c.CTX) + if err != nil { + c.Log.Errorf("while auto-deleting expired bouncers (api key): %s", err) + } else if deletionCount > 0 { + c.Log.Infof("deleted %d expired bouncers (api auth)", deletionCount) + } + } + + if bouncersCfg.CertDuration != nil { + log.Debug("trying to delete old bouncers from cert") + + deletionCount, err := c.Ent.Bouncer.Delete().Where( + bouncer.LastPullLTE(time.Now().UTC().Add(-*bouncersCfg.CertDuration)), + ).Where( + bouncer.AuthTypeEQ(types.TlsAuthType), + ).Exec(c.CTX) + if err != nil { + c.Log.Errorf("while auto-deleting expired bouncers (api key): %s", err) + } else if deletionCount > 0 { + c.Log.Infof("deleted %d expired bouncers (api auth)", deletionCount) + } + } +} + +func (c *Client) flushAgents(agentsCfg *csconfig.AuthGCCfg) { + if agentsCfg == nil { + return + } + + if agentsCfg.CertDuration != nil { + log.Debug("trying to delete old agents from cert") + + deletionCount, err := c.Ent.Machine.Delete().Where( + machine.LastHeartbeatLTE(time.Now().UTC().Add(-*agentsCfg.CertDuration)), + ).Where( + machine.Not(machine.HasAlerts()), + ).Where( + machine.AuthTypeEQ(types.TlsAuthType), + ).Exec(c.CTX) + log.Debugf("deleted %d entries", deletionCount) + if err != nil { + c.Log.Errorf("while auto-deleting expired machine (cert): %s", err) + } else if deletionCount > 0 { + c.Log.Infof("deleted %d expired machine (cert auth)", deletionCount) + } + } + + if agentsCfg.LoginPasswordDuration != nil { + log.Debug("trying to delete old agents from password") + + deletionCount, err := c.Ent.Machine.Delete().Where( + machine.LastHeartbeatLTE(time.Now().UTC().Add(-*agentsCfg.LoginPasswordDuration)), + ).Where( + machine.Not(machine.HasAlerts()), + ).Where( + machine.AuthTypeEQ(types.PasswordAuthType), + ).Exec(c.CTX) + log.Debugf("deleted %d entries", deletionCount) + if err != nil { + c.Log.Errorf("while auto-deleting expired machine (password): %s", err) + } else if deletionCount > 0 { + c.Log.Infof("deleted %d expired machine (password auth)", deletionCount) + } + } +} + +func (c *Client) FlushAgentsAndBouncers(agentsCfg *csconfig.AuthGCCfg, bouncersCfg *csconfig.AuthGCCfg) error { + log.Debug("starting FlushAgentsAndBouncers") + + c.flushBouncers(bouncersCfg) + c.flushAgents(agentsCfg) + + return nil +} + +func (c *Client) FlushAlerts(MaxAge string, MaxItems int) error { + var deletedByAge int + var deletedByNbItem int + var totalAlerts int + var err error + + if !c.CanFlush { + c.Log.Debug("a list is being imported, flushing later") + return nil + } + + c.Log.Debug("Flushing orphan alerts") + c.FlushOrphans() + c.Log.Debug("Done flushing orphan alerts") + totalAlerts, err = c.TotalAlerts() + if err != nil { + c.Log.Warningf("FlushAlerts (max items count): %s", err) + return fmt.Errorf("unable to get alerts count: %w", err) + } + + c.Log.Debugf("FlushAlerts (Total alerts): %d", totalAlerts) + if MaxAge != "" { + filter := map[string][]string{ + "created_before": {MaxAge}, + } + nbDeleted, err := c.DeleteAlertWithFilter(filter) + if err != nil { + c.Log.Warningf("FlushAlerts (max age): %s", err) + return fmt.Errorf("unable to flush alerts with filter until=%s: %w", MaxAge, err) + } + + c.Log.Debugf("FlushAlerts (deleted max age alerts): %d", nbDeleted) + deletedByAge = nbDeleted + } + if MaxItems > 0 { + //We get the highest id for the alerts + //We subtract MaxItems to avoid deleting alerts that are not old enough + //This gives us the oldest alert that we want to keep + //We then delete all the alerts with an id lower than this one + //We can do this because the id is auto-increment, and the database won't reuse the same id twice + lastAlert, err := c.QueryAlertWithFilter(map[string][]string{ + "sort": {"DESC"}, + "limit": {"1"}, + //we do not care about fetching the edges, we just want the id + "with_decisions": {"false"}, + }) + c.Log.Debugf("FlushAlerts (last alert): %+v", lastAlert) + if err != nil { + c.Log.Errorf("FlushAlerts: could not get last alert: %s", err) + return fmt.Errorf("could not get last alert: %w", err) + } + + if len(lastAlert) != 0 { + maxid := lastAlert[0].ID - MaxItems + + c.Log.Debugf("FlushAlerts (max id): %d", maxid) + + if maxid > 0 { + //This may lead to orphan alerts (at least on MySQL), but the next time the flush job will run, they will be deleted + deletedByNbItem, err = c.Ent.Alert.Delete().Where(alert.IDLT(maxid)).Exec(c.CTX) + + if err != nil { + c.Log.Errorf("FlushAlerts: Could not delete alerts: %s", err) + return fmt.Errorf("could not delete alerts: %w", err) + } + } + } + } + if deletedByNbItem > 0 { + c.Log.Infof("flushed %d/%d alerts because the max number of alerts has been reached (%d max)", deletedByNbItem, totalAlerts, MaxItems) + } + if deletedByAge > 0 { + c.Log.Infof("flushed %d/%d alerts because they were created %s ago or more", deletedByAge, totalAlerts, MaxAge) + } + return nil +} diff --git a/pkg/database/utils.go b/pkg/database/utils.go index 5d6d4a442..2414e7027 100644 --- a/pkg/database/utils.go +++ b/pkg/database/utils.go @@ -4,6 +4,9 @@ import ( "encoding/binary" "fmt" "net" + "strconv" + "strings" + "time" ) func IP2Int(ip net.IP) uint32 { @@ -63,3 +66,23 @@ func GetIpsFromIpRange(host string) (int64, int64, error) { return ipStart, ipEnd, nil } + +func ParseDuration(d string) (time.Duration, error) { + durationStr := d + if strings.HasSuffix(d, "d") { + days := strings.Split(d, "d")[0] + if len(days) == 0 { + return 0, fmt.Errorf("'%s' can't be parsed as duration", d) + } + daysInt, err := strconv.Atoi(days) + if err != nil { + return 0, err + } + durationStr = strconv.Itoa(daysInt*24) + "h" + } + duration, err := time.ParseDuration(durationStr) + if err != nil { + return 0, err + } + return duration, nil +} diff --git a/pkg/exprhelpers/crowdsec_cti.go b/pkg/exprhelpers/crowdsec_cti.go index 6440295c8..dc67815d8 100644 --- a/pkg/exprhelpers/crowdsec_cti.go +++ b/pkg/exprhelpers/crowdsec_cti.go @@ -16,7 +16,7 @@ var CTIUrlSuffix = "/v2/smoke/" var CTIApiKey = "" // this is set for non-recoverable errors, such as 403 when querying API or empty API key -var CTIApiEnabled = true +var CTIApiEnabled = false // when hitting quotas or auth errors, we temporarily disable the API var CTIBackOffUntil time.Time @@ -25,12 +25,11 @@ var CTIBackOffDuration time.Duration = 5 * time.Minute var ctiClient *cticlient.CrowdsecCTIClient func InitCrowdsecCTI(Key *string, TTL *time.Duration, Size *int, LogLevel *log.Level) error { - if Key != nil { - CTIApiKey = *Key - } else { - CTIApiEnabled = false - return fmt.Errorf("CTI API key not set, CTI will not be available") + if Key == nil || *Key == "" { + log.Warningf("CTI API key not set or empty, CTI will not be available") + return cticlient.ErrDisabled } + CTIApiKey = *Key if Size == nil { Size = new(int) *Size = 1000 @@ -39,7 +38,6 @@ func InitCrowdsecCTI(Key *string, TTL *time.Duration, Size *int, LogLevel *log.L TTL = new(time.Duration) *TTL = 5 * time.Minute } - //dedicated logger clog := log.New() if err := types.ConfigureLogger(clog); err != nil { return errors.Wrap(err, "while configuring datasource logger") @@ -53,6 +51,7 @@ func InitCrowdsecCTI(Key *string, TTL *time.Duration, Size *int, LogLevel *log.L subLogger := clog.WithFields(customLog) CrowdsecCTIInitCache(*Size, *TTL) ctiClient = cticlient.NewCrowdsecCTIClient(cticlient.WithAPIKey(CTIApiKey), cticlient.WithLogger(subLogger)) + CTIApiEnabled = true return nil } @@ -61,7 +60,7 @@ func ShutdownCrowdsecCTI() { CTICache.Purge() } CTIApiKey = "" - CTIApiEnabled = true + CTIApiEnabled = false } // Cache for responses @@ -75,20 +74,13 @@ func CrowdsecCTIInitCache(size int, ttl time.Duration) { // func CrowdsecCTI(ip string) (*cticlient.SmokeItem, error) { func CrowdsecCTI(params ...any) (any, error) { - ip := params[0].(string) + var ip string if !CTIApiEnabled { - ctiClient.Logger.Warningf("Crowdsec CTI API is disabled, please check your configuration") return &cticlient.SmokeItem{}, cticlient.ErrDisabled } - - if CTIApiKey == "" { - ctiClient.Logger.Warningf("CrowdsecCTI : no key provided, skipping") - return &cticlient.SmokeItem{}, cticlient.ErrDisabled - } - - if ctiClient == nil { - ctiClient.Logger.Warningf("CrowdsecCTI: no client, skipping") - return &cticlient.SmokeItem{}, cticlient.ErrDisabled + var ok bool + if ip, ok = params[0].(string); !ok { + return &cticlient.SmokeItem{}, fmt.Errorf("invalid type for ip : %T", params[0]) } if val, err := CTICache.Get(ip); err == nil && val != nil { @@ -112,15 +104,16 @@ func CrowdsecCTI(params ...any) (any, error) { ctiResp, err := ctiClient.GetIPInfo(ip) ctiClient.Logger.Debugf("request for %s took %v", ip, time.Since(before)) if err != nil { - if err == cticlient.ErrUnauthorized { + switch err { + case cticlient.ErrUnauthorized: CTIApiEnabled = false ctiClient.Logger.Errorf("Invalid API key provided, disabling CTI API") return &cticlient.SmokeItem{}, cticlient.ErrUnauthorized - } else if err == cticlient.ErrLimit { + case cticlient.ErrLimit: CTIBackOffUntil = time.Now().Add(CTIBackOffDuration) ctiClient.Logger.Errorf("CTI API is throttled, will try again in %s", CTIBackOffDuration) return &cticlient.SmokeItem{}, cticlient.ErrLimit - } else { + default: ctiClient.Logger.Warnf("CTI API error : %s", err) return &cticlient.SmokeItem{}, fmt.Errorf("unexpected error : %v", err) } diff --git a/pkg/exprhelpers/crowdsec_cti_test.go b/pkg/exprhelpers/crowdsec_cti_test.go index c8d1c92fd..84cd3347b 100644 --- a/pkg/exprhelpers/crowdsec_cti_test.go +++ b/pkg/exprhelpers/crowdsec_cti_test.go @@ -106,6 +106,16 @@ func smokeHandler(req *http.Request) *http.Response { } } +func TestNillClient(t *testing.T) { + defer ShutdownCrowdsecCTI() + if err := InitCrowdsecCTI(ptr.Of(""), nil, nil, nil); err != cticlient.ErrDisabled { + t.Fatalf("failed to init CTI : %s", err) + } + item, err := CrowdsecCTI("1.2.3.4") + assert.Equal(t, err, cticlient.ErrDisabled) + assert.Equal(t, item, &cticlient.SmokeItem{}) +} + func TestInvalidAuth(t *testing.T) { defer ShutdownCrowdsecCTI() if err := InitCrowdsecCTI(ptr.Of("asdasd"), nil, nil, nil); err != nil { @@ -135,7 +145,7 @@ func TestInvalidAuth(t *testing.T) { func TestNoKey(t *testing.T) { defer ShutdownCrowdsecCTI() err := InitCrowdsecCTI(nil, nil, nil, nil) - assert.ErrorContains(t, err, "CTI API key not set") + assert.ErrorIs(t, err, cticlient.ErrDisabled) //Replace the client created by InitCrowdsecCTI with one that uses a custom transport ctiClient = cticlient.NewCrowdsecCTIClient(cticlient.WithAPIKey("asdasd"), cticlient.WithHTTPClient(&http.Client{ Transport: RoundTripFunc(smokeHandler), diff --git a/pkg/exprhelpers/expr_lib.go b/pkg/exprhelpers/expr_lib.go index 1d29539cb..db191b84a 100644 --- a/pkg/exprhelpers/expr_lib.go +++ b/pkg/exprhelpers/expr_lib.go @@ -434,6 +434,13 @@ var exprFuncs = []exprCustomFunc{ new(func() (string, error)), }, }, + { + name: "FloatApproxEqual", + function: FloatApproxEqual, + signature: []interface{}{ + new(func(float64, float64) bool), + }, + }, } //go 1.20 "CutPrefix": strings.CutPrefix, diff --git a/pkg/exprhelpers/helpers.go b/pkg/exprhelpers/helpers.go index f28aef84a..a8679beec 100644 --- a/pkg/exprhelpers/helpers.go +++ b/pkg/exprhelpers/helpers.go @@ -4,6 +4,7 @@ import ( "bufio" "encoding/base64" "fmt" + "math" "net" "net/url" "os" @@ -55,6 +56,16 @@ var exprFunctionOptions []expr.Option var keyValuePattern = regexp.MustCompile(`(?P[^=\s]+)=(?:"(?P[^"\\]*(?:\\.[^"\\]*)*)"|(?P[^=\s]+)|\s*)`) func GetExprOptions(ctx map[string]interface{}) []expr.Option { + if len(exprFunctionOptions) == 0 { + exprFunctionOptions = []expr.Option{} + for _, function := range exprFuncs { + exprFunctionOptions = append(exprFunctionOptions, + expr.Function(function.name, + function.function, + function.signature..., + )) + } + } ret := []expr.Option{} ret = append(ret, exprFunctionOptions...) ret = append(ret, expr.Env(ctx)) @@ -67,15 +78,6 @@ func Init(databaseClient *database.Client) error { dataFileRe2 = make(map[string][]*re2.Regexp) dbClient = databaseClient - exprFunctionOptions = []expr.Option{} - for _, function := range exprFuncs { - exprFunctionOptions = append(exprFunctionOptions, - expr.Function(function.name, - function.function, - function.signature..., - )) - } - return nil } @@ -129,6 +131,19 @@ func UpdateRegexpCacheMetrics() { func FileInit(fileFolder string, filename string, fileType string) error { log.Debugf("init (folder:%s) (file:%s) (type:%s)", fileFolder, filename, fileType) + if fileType == "" { + log.Debugf("ignored file %s%s because no type specified", fileFolder, filename) + return nil + } + ok, err := existsInFileMaps(filename, fileType) + if ok { + log.Debugf("ignored file %s%s because already loaded", fileFolder, filename) + return nil + } + if err != nil { + return err + } + filepath := filepath.Join(fileFolder, filename) file, err := os.Open(filepath) if err != nil { @@ -136,13 +151,6 @@ func FileInit(fileFolder string, filename string, fileType string) error { } defer file.Close() - if fileType == "" { - log.Debugf("ignored file %s%s because no type specified", fileFolder, filename) - return nil - } - if _, ok := dataFile[filename]; !ok { - dataFile[filename] = []string{} - } scanner := bufio.NewScanner(file) for scanner.Scan() { if strings.HasPrefix(scanner.Text(), "#") { // allow comments @@ -155,13 +163,11 @@ func FileInit(fileFolder string, filename string, fileType string) error { case "regex", "regexp": if fflag.Re2RegexpInfileSupport.IsEnabled() { dataFileRe2[filename] = append(dataFileRe2[filename], re2.MustCompile(scanner.Text())) - } else { - dataFileRegex[filename] = append(dataFileRegex[filename], regexp.MustCompile(scanner.Text())) + continue } + dataFileRegex[filename] = append(dataFileRegex[filename], regexp.MustCompile(scanner.Text())) case "string": dataFile[filename] = append(dataFile[filename], scanner.Text()) - default: - return fmt.Errorf("unknown data type '%s' for : '%s'", fileType, filename) } } @@ -219,6 +225,25 @@ func flatten(args []interface{}, v reflect.Value) []interface{} { return args } +func existsInFileMaps(filename string, ftype string) (bool, error) { + ok := false + var err error + switch ftype { + case "regex", "regexp": + if fflag.Re2RegexpInfileSupport.IsEnabled() { + _, ok = dataFileRe2[filename] + } else { + _, ok = dataFileRegex[filename] + } + case "string": + _, ok = dataFile[filename] + default: + err = fmt.Errorf("unknown data type '%s' for : '%s'", ftype, filename) + } + return ok, err +} + +//Expr helpers // func Get(arr []string, index int) string { func Get(params ...any) (any, error) { @@ -637,6 +662,16 @@ func Match(params ...any) (any, error) { return matched, nil } +func FloatApproxEqual(params ...any) (any, error) { + float1 := params[0].(float64) + float2 := params[1].(float64) + + if math.Abs(float1-float2) < 1e-6 { + return true, nil + } + return false, nil +} + func B64Decode(params ...any) (any, error) { encoded := params[0].(string) decoded, err := base64.StdEncoding.DecodeString(encoded) diff --git a/pkg/exprhelpers/jsonextract.go b/pkg/exprhelpers/jsonextract.go index a616588a7..6edb34e36 100644 --- a/pkg/exprhelpers/jsonextract.go +++ b/pkg/exprhelpers/jsonextract.go @@ -174,7 +174,7 @@ func UnmarshalJSON(params ...any) (any, error) { err := json.Unmarshal([]byte(jsonBlob), &out) if err != nil { - log.Errorf("UnmarshalJSON : %s", err) + log.WithField("line", jsonBlob).Errorf("UnmarshalJSON : %s", err) return nil, err } target[key] = out diff --git a/pkg/fflag/features_test.go b/pkg/fflag/features_test.go index dc19c101b..57745b3c3 100644 --- a/pkg/fflag/features_test.go +++ b/pkg/fflag/features_test.go @@ -364,7 +364,7 @@ func TestSetFromYamlFile(t *testing.T) { defer os.Remove(tmpfile.Name()) // write the config file - _, err = tmpfile.Write([]byte("- experimental1")) + _, err = tmpfile.WriteString("- experimental1") require.NoError(t, err) require.NoError(t, tmpfile.Close()) diff --git a/pkg/hubtest/hubtest_item.go b/pkg/hubtest/hubtest_item.go index 475f42cf6..47a151220 100644 --- a/pkg/hubtest/hubtest_item.go +++ b/pkg/hubtest/hubtest_item.go @@ -97,7 +97,7 @@ func NewTest(name string, hubTest *HubTest) (*HubTestItem, error) { } err = yaml.Unmarshal(yamlFile, configFileData) if err != nil { - return nil, fmt.Errorf("Unmarshal: %v", err) + return nil, fmt.Errorf("unmarshal: %v", err) } parserAssertFilePath := filepath.Join(testPath, ParserAssertFileName) @@ -122,10 +122,10 @@ func NewTest(name string, hubTest *HubTest) (*HubTestItem, error) { ScenarioResultFile: filepath.Join(resultPath, ScenarioResultFileName), BucketPourResultFile: filepath.Join(resultPath, BucketPourResultFileName), RuntimeHubConfig: &csconfig.Hub{ - HubDir: runtimeHubFolder, - ConfigDir: runtimeFolder, - HubIndexFile: hubTest.HubIndexFile, - DataDir: filepath.Join(runtimeFolder, "data"), + HubDir: runtimeHubFolder, + HubIndexFile: hubTest.HubIndexFile, + InstallDir: runtimeFolder, + InstallDataDir: filepath.Join(runtimeFolder, "data"), }, Config: configFileData, HubPath: hubTest.HubPath, @@ -516,7 +516,7 @@ func (t *HubTestItem) Run() error { return fmt.Errorf("unable to stat log file '%s': %s", logFile, err) } if logFileStat.Size() == 0 { - return fmt.Errorf("Log file '%s' is empty, please fill it with log", logFile) + return fmt.Errorf("log file '%s' is empty, please fill it with log", logFile) } cmdArgs := []string{"-c", t.RuntimeConfigFilePath, "machines", "add", "testMachine", "--auto"} @@ -555,7 +555,7 @@ func (t *HubTestItem) Run() error { if os.IsNotExist(err) { parserAssertFile, err := os.Create(t.ParserAssert.File) if err != nil { - log.Fatal(err) + return err } parserAssertFile.Close() } @@ -591,7 +591,7 @@ func (t *HubTestItem) Run() error { if os.IsNotExist(err) { scenarioAssertFile, err := os.Create(t.ScenarioAssert.File) if err != nil { - log.Fatal(err) + return err } scenarioAssertFile.Close() } diff --git a/pkg/hubtest/parser_assert.go b/pkg/hubtest/parser_assert.go index 95400b50d..c9a183336 100644 --- a/pkg/hubtest/parser_assert.go +++ b/pkg/hubtest/parser_assert.go @@ -103,7 +103,6 @@ func (p *ParserAssert) AssertFile(testFile string) error { p.NbAssert += 1 if !ok { log.Debugf("%s is FALSE", scanner.Text()) - //fmt.SPrintf(" %s '%s'\n", emoji.RedSquare, scanner.Text()) failedAssert := &AssertFail{ File: p.File, Line: nbLine, @@ -112,10 +111,13 @@ func (p *ParserAssert) AssertFile(testFile string) error { } variableRE := regexp.MustCompile(`(?P[^ =]+) == .*`) match := variableRE.FindStringSubmatch(scanner.Text()) + variable := "" if len(match) == 0 { log.Infof("Couldn't get variable of line '%s'", scanner.Text()) + variable = scanner.Text() + } else { + variable = match[1] } - variable := match[1] result, err := p.EvalExpression(variable) if err != nil { log.Errorf("unable to evaluate variable '%s': %s", variable, err) @@ -123,6 +125,7 @@ func (p *ParserAssert) AssertFile(testFile string) error { } failedAssert.Debug[variable] = result p.Fails = append(p.Fails, *failedAssert) + continue } //fmt.Printf(" %s '%s'\n", emoji.GreenSquare, scanner.Text()) @@ -154,13 +157,14 @@ func (p *ParserAssert) RunExpression(expression string) (interface{}, error) { env := map[string]interface{}{"results": *p.TestData} if runtimeFilter, err = expr.Compile(expression, exprhelpers.GetExprOptions(env)...); err != nil { + log.Errorf("failed to compile '%s' : %s", expression, err) return output, err } //dump opcode in trace level log.Tracef("%s", runtimeFilter.Disassemble()) - output, err = expr.Run(runtimeFilter, map[string]interface{}{"results": *p.TestData}) + output, err = expr.Run(runtimeFilter, env) if err != nil { log.Warningf("running : %s", expression) log.Warningf("runtime error : %s", err) @@ -228,33 +232,41 @@ func (p *ParserAssert) AutoGenParserAssert() string { if !result.Success { continue } - for pkey, pval := range result.Evt.Parsed { + for _, pkey := range sortedMapKeys(result.Evt.Parsed) { + pval := result.Evt.Parsed[pkey] if pval == "" { continue } ret += fmt.Sprintf(`results["%s"]["%s"][%d].Evt.Parsed["%s"] == "%s"`+"\n", stage, parser, pidx, pkey, Escape(pval)) } - for mkey, mval := range result.Evt.Meta { + for _, mkey := range sortedMapKeys(result.Evt.Meta) { + mval := result.Evt.Meta[mkey] if mval == "" { continue } ret += fmt.Sprintf(`results["%s"]["%s"][%d].Evt.Meta["%s"] == "%s"`+"\n", stage, parser, pidx, mkey, Escape(mval)) } - for ekey, eval := range result.Evt.Enriched { + for _, ekey := range sortedMapKeys(result.Evt.Enriched) { + eval := result.Evt.Enriched[ekey] if eval == "" { continue } ret += fmt.Sprintf(`results["%s"]["%s"][%d].Evt.Enriched["%s"] == "%s"`+"\n", stage, parser, pidx, ekey, Escape(eval)) } - for ekey, eval := range result.Evt.Unmarshaled { - if eval == "" { + for _, ukey := range sortedMapKeys(result.Evt.Unmarshaled) { + uval := result.Evt.Unmarshaled[ukey] + if uval == "" { continue } - base := fmt.Sprintf(`results["%s"]["%s"][%d].Evt.Unmarshaled["%s"]`, stage, parser, pidx, ekey) - for _, line := range p.buildUnmarshaledAssert("", eval) { - ret += base + line + base := fmt.Sprintf(`results["%s"]["%s"][%d].Evt.Unmarshaled["%s"]`, stage, parser, pidx, ukey) + for _, line := range p.buildUnmarshaledAssert(base, uval) { + ret += line } } + ret += fmt.Sprintf(`results["%s"]["%s"][%d].Evt.Whitelisted == %t`+"\n", stage, parser, pidx, result.Evt.Whitelisted) + if result.Evt.WhitelistReason != "" { + ret += fmt.Sprintf(`results["%s"]["%s"][%d].Evt.WhitelistReason == "%s"`+"\n", stage, parser, pidx, Escape(result.Evt.WhitelistReason)) + } } } } @@ -280,7 +292,8 @@ func (p *ParserAssert) buildUnmarshaledAssert(ekey string, eval interface{}) []s case int: ret = append(ret, fmt.Sprintf(`%s == %d`+"\n", ekey, val)) case float64: - ret = append(ret, fmt.Sprintf(`%s == %f`+"\n", ekey, val)) + ret = append(ret, fmt.Sprintf(`FloatApproxEqual(%s, %f)`+"\n", + ekey, val)) default: log.Warningf("unknown type '%T' for key '%s'", val, ekey) } @@ -312,9 +325,14 @@ func LoadParserDump(filepath string) (*ParserResults, error) { stages = append(stages, k) } sort.Strings(stages) - /*the very last one is set to 'success' which is just a bool indicating if the line was successfully parsed*/ - lastStage := stages[len(stages)-2] - + var lastStage string + //Loop over stages to find last successful one with at least one parser + for i := len(stages) - 2; i >= 0; i-- { + if len(pdump[stages[i]]) != 0 { + lastStage = stages[i] + break + } + } parsers := make([]string, 0, len(pdump[lastStage])) for k := range pdump[lastStage] { parsers = append(parsers, k) diff --git a/pkg/hubtest/scenario_assert.go b/pkg/hubtest/scenario_assert.go index 2e2a4e9c8..f5517c350 100644 --- a/pkg/hubtest/scenario_assert.go +++ b/pkg/hubtest/scenario_assert.go @@ -55,7 +55,6 @@ func (s *ScenarioAssert) AutoGenFromFile(filename string) (string, error) { } func (s *ScenarioAssert) LoadTest(filename string, bucketpour string) error { - var err error bucketDump, err := LoadScenarioDump(filename) if err != nil { return fmt.Errorf("loading scenario dump file '%s': %+v", filename, err) @@ -206,7 +205,7 @@ func (s *ScenarioAssert) AutoGenScenarioAssert() string { } for evtIndex, evt := range event.Overflow.Alert.Events { for _, meta := range evt.Meta { - ret += fmt.Sprintf(`results[%d].Overflow.Alert.Events[%d].GetMeta("%s") == "%s"`+"\n", eventIndex, evtIndex, meta.Key, meta.Value) + ret += fmt.Sprintf(`results[%d].Overflow.Alert.Events[%d].GetMeta("%s") == "%s"`+"\n", eventIndex, evtIndex, meta.Key, Escape(meta.Value)) } } ret += fmt.Sprintf(`results[%d].Overflow.Alert.GetScenario() == "%s"`+"\n", eventIndex, *event.Overflow.Alert.Scenario) diff --git a/pkg/hubtest/utils.go b/pkg/hubtest/utils.go index 73de3510b..5ccbbad39 100644 --- a/pkg/hubtest/utils.go +++ b/pkg/hubtest/utils.go @@ -4,18 +4,29 @@ import ( "fmt" "os" "path/filepath" + "sort" ) -func Copy(sourceFile string, destinationFile string) error { - input, err := os.ReadFile(sourceFile) +func sortedMapKeys[V any](m map[string]V) []string { + keys := make([]string, 0, len(m)) + for k := range m { + keys = append(keys, k) + } + sort.Strings(keys) + return keys +} + +func Copy(src string, dst string) error { + content, err := os.ReadFile(src) if err != nil { return err } - err = os.WriteFile(destinationFile, input, 0644) + err = os.WriteFile(dst, content, 0644) if err != nil { return err } + return nil } @@ -60,6 +71,7 @@ func CopyDir(src string, dest string) error { if err != nil { return err } + if !file.IsDir() { return fmt.Errorf("Source " + file.Name() + " is not a directory!") } @@ -75,32 +87,15 @@ func CopyDir(src string, dest string) error { } for _, f := range files { - if f.IsDir() { - - err = CopyDir(src+"/"+f.Name(), dest+"/"+f.Name()) - if err != nil { + if err = CopyDir(filepath.Join(src, f.Name()), filepath.Join(dest, f.Name())); err != nil { + return err + } + } else { + if err = Copy(filepath.Join(src, f.Name()), filepath.Join(dest, f.Name())); err != nil { return err } - } - - if !f.IsDir() { - - content, err := os.ReadFile(src + "/" + f.Name()) - if err != nil { - return err - - } - - err = os.WriteFile(dest+"/"+f.Name(), content, 0755) - if err != nil { - return err - - } - - } - } return nil diff --git a/pkg/leakybucket/bucket.go b/pkg/leakybucket/bucket.go index d17006692..66780b854 100644 --- a/pkg/leakybucket/bucket.go +++ b/pkg/leakybucket/bucket.go @@ -216,7 +216,7 @@ func LeakRoutine(leaky *Leaky) error { defer BucketsCurrentCount.With(prometheus.Labels{"name": leaky.Name}).Dec() /*todo : we create a logger at runtime while we want leakroutine to be up asap, might not be a good idea*/ - leaky.logger = leaky.BucketConfig.logger.WithFields(log.Fields{"capacity": leaky.Capacity, "partition": leaky.Mapkey, "bucket_id": leaky.Uuid}) + leaky.logger = leaky.BucketConfig.logger.WithFields(log.Fields{"partition": leaky.Mapkey, "bucket_id": leaky.Uuid}) //We copy the processors, as they are coming from the BucketFactory, and thus are shared between buckets //If we don't copy, processors using local cache (such as Uniq) are subject to race conditions diff --git a/pkg/leakybucket/manager_load.go b/pkg/leakybucket/manager_load.go index 337f6fd3f..b384e5967 100644 --- a/pkg/leakybucket/manager_load.go +++ b/pkg/leakybucket/manager_load.go @@ -284,14 +284,12 @@ func LoadBucket(bucketFactory *BucketFactory, tomb *tomb.Tomb) error { bucketFactory.logger = clog.WithFields(log.Fields{ "cfg": bucketFactory.BucketName, "name": bucketFactory.Name, - "file": bucketFactory.Filename, }) } else { /* else bind it to the default one (might find something more elegant here)*/ bucketFactory.logger = log.WithFields(log.Fields{ "cfg": bucketFactory.BucketName, "name": bucketFactory.Name, - "file": bucketFactory.Filename, }) } diff --git a/pkg/leakybucket/manager_run.go b/pkg/leakybucket/manager_run.go index 388227a41..49c2ce141 100644 --- a/pkg/leakybucket/manager_run.go +++ b/pkg/leakybucket/manager_run.go @@ -297,7 +297,7 @@ func PourItemToHolders(parsed types.Event, holders []BucketFactory, buckets *Buc evt := deepcopy.Copy(parsed) BucketPourCache["OK"] = append(BucketPourCache["OK"], evt.(types.Event)) } - + parserEnv := map[string]interface{}{"evt": &parsed} //find the relevant holders (scenarios) for idx := 0; idx < len(holders); idx++ { //for idx, holder := range holders { @@ -305,7 +305,7 @@ func PourItemToHolders(parsed types.Event, holders []BucketFactory, buckets *Buc //evaluate bucket's condition if holders[idx].RunTimeFilter != nil { holders[idx].logger.Tracef("event against holder %d/%d", idx, len(holders)) - output, err := expr.Run(holders[idx].RunTimeFilter, map[string]interface{}{"evt": &parsed}) + output, err := expr.Run(holders[idx].RunTimeFilter, parserEnv) if err != nil { holders[idx].logger.Errorf("failed parsing : %v", err) return false, fmt.Errorf("leaky failed : %s", err) @@ -317,7 +317,7 @@ func PourItemToHolders(parsed types.Event, holders []BucketFactory, buckets *Buc } if holders[idx].Debug { - holders[idx].ExprDebugger.Run(holders[idx].logger, condition, map[string]interface{}{"evt": &parsed}) + holders[idx].ExprDebugger.Run(holders[idx].logger, condition, parserEnv) } if !condition { holders[idx].logger.Debugf("Event leaving node : ko (filter mismatch)") @@ -328,7 +328,7 @@ func PourItemToHolders(parsed types.Event, holders []BucketFactory, buckets *Buc //groupby determines the partition key for the specific bucket var groupby string if holders[idx].RunTimeGroupBy != nil { - tmpGroupBy, err := expr.Run(holders[idx].RunTimeGroupBy, map[string]interface{}{"evt": &parsed}) + tmpGroupBy, err := expr.Run(holders[idx].RunTimeGroupBy, parserEnv) if err != nil { holders[idx].logger.Errorf("failed groupby : %v", err) return false, errors.New("leaky failed :/") diff --git a/pkg/leakybucket/timemachine.go b/pkg/leakybucket/timemachine.go index 6e84797d4..266a8be7c 100644 --- a/pkg/leakybucket/timemachine.go +++ b/pkg/leakybucket/timemachine.go @@ -35,7 +35,7 @@ func TimeMachinePour(l *Leaky, msg types.Event) { } l.Last_ts = d l.mutex.Unlock() - if l.Limiter.AllowN(d, 1) { + if l.Limiter.AllowN(d, 1) || l.conditionalOverflow { l.logger.Tracef("Time-Pouring event %s (tokens:%f)", d, l.Limiter.GetTokensCount()) l.Queue.Add(msg) } else { diff --git a/pkg/parser/node.go b/pkg/parser/node.go index 036a244cc..f3a0e8c55 100644 --- a/pkg/parser/node.go +++ b/pkg/parser/node.go @@ -3,7 +3,6 @@ package parser import ( "errors" "fmt" - "net" "strings" "time" @@ -172,78 +171,24 @@ func (n *Node) process(p *types.Event, ctx UnixParserCtx, expressionEnv map[stri if n.Name != "" { NodesHits.With(prometheus.Labels{"source": p.Line.Src, "type": p.Line.Module, "name": n.Name}).Inc() } - isWhitelisted := false - hasWhitelist := false - var srcs []net.IP - /*overflow and log don't hold the source ip in the same field, should be changed */ - /* perform whitelist checks for ips, cidr accordingly */ - /* TODO move whitelist elsewhere */ - if p.Type == types.LOG { - if _, ok := p.Meta["source_ip"]; ok { - srcs = append(srcs, net.ParseIP(p.Meta["source_ip"])) - } - } else if p.Type == types.OVFLW { - for k := range p.Overflow.Sources { - srcs = append(srcs, net.ParseIP(k)) - } + exprErr := error(nil) + isWhitelisted := n.CheckIPsWL(p.ParseIPSources()) + if !isWhitelisted { + isWhitelisted, exprErr = n.CheckExprWL(cachedExprEnv) } - for _, src := range srcs { - if isWhitelisted { - break - } - for _, v := range n.Whitelist.B_Ips { - if v.Equal(src) { - clog.Debugf("Event from [%s] is whitelisted by IP (%s), reason [%s]", src, v, n.Whitelist.Reason) - isWhitelisted = true - } else { - clog.Tracef("whitelist: %s is not eq [%s]", src, v) - } - hasWhitelist = true - } - for _, v := range n.Whitelist.B_Cidrs { - if v.Contains(src) { - clog.Debugf("Event from [%s] is whitelisted by CIDR (%s), reason [%s]", src, v, n.Whitelist.Reason) - isWhitelisted = true - } else { - clog.Tracef("whitelist: %s not in [%s]", src, v) - } - hasWhitelist = true - } + if exprErr != nil { + // Previous code returned nil if there was an error, so we keep this behavior + return false, nil //nolint:nilerr } - if isWhitelisted { + if isWhitelisted && !p.Whitelisted { p.Whitelisted = true - } - /* run whitelist expression tests anyway */ - for eidx, e := range n.Whitelist.B_Exprs { - output, err := expr.Run(e.Filter, cachedExprEnv) - if err != nil { - clog.Warningf("failed to run whitelist expr : %v", err) - clog.Debug("Event leaving node : ko") - return false, nil - } - switch out := output.(type) { - case bool: - if n.Debug { - e.ExprDebugger.Run(clog, out, cachedExprEnv) - } - if out { - clog.Debugf("Event is whitelisted by expr, reason [%s]", n.Whitelist.Reason) - p.Whitelisted = true - isWhitelisted = true - } - hasWhitelist = true - default: - log.Errorf("unexpected type %t (%v) while running '%s'", output, output, n.Whitelist.Exprs[eidx]) - } - } - if isWhitelisted { p.WhitelistReason = n.Whitelist.Reason /*huglily wipe the ban order if the event is whitelisted and it's an overflow */ if p.Type == types.OVFLW { /*don't do this at home kids */ ips := []string{} - for _, src := range srcs { - ips = append(ips, src.String()) + for k := range p.Overflow.Sources { + ips = append(ips, k) } clog.Infof("Ban for %s whitelisted, reason [%s]", strings.Join(ips, ","), n.Whitelist.Reason) p.Overflow.Whitelisted = true @@ -402,9 +347,10 @@ func (n *Node) process(p *types.Event, ctx UnixParserCtx, expressionEnv map[stri } /* - This is to apply statics when the node *has* whitelists that successfully matched the node. + This is to apply statics when the node either was whitelisted, or is not a whitelist (it has no expr/ips wl) + It is overconvoluted and should be simplified */ - if len(n.Statics) > 0 && (isWhitelisted || !hasWhitelist) { + if len(n.Statics) > 0 && (isWhitelisted || !n.ContainsWLs()) { clog.Debugf("+ Processing %d statics", len(n.Statics)) // if all else is good in whitelist, process node's statics err := n.ProcessStatics(n.Statics, p) @@ -617,36 +563,11 @@ func (n *Node) compile(pctx *UnixParserCtx, ectx EnricherCtx) error { } /* compile whitelists if present */ - for _, v := range n.Whitelist.Ips { - n.Whitelist.B_Ips = append(n.Whitelist.B_Ips, net.ParseIP(v)) - n.Logger.Debugf("adding ip %s to whitelists", net.ParseIP(v)) - valid = true - } - - for _, v := range n.Whitelist.Cidrs { - _, tnet, err := net.ParseCIDR(v) - if err != nil { - n.Logger.Fatalf("Unable to parse cidr whitelist '%s' : %v.", v, err) - } - n.Whitelist.B_Cidrs = append(n.Whitelist.B_Cidrs, tnet) - n.Logger.Debugf("adding cidr %s to whitelists", tnet) - valid = true - } - - for _, filter := range n.Whitelist.Exprs { - expression := &ExprWhitelist{} - expression.Filter, err = expr.Compile(filter, exprhelpers.GetExprOptions(map[string]interface{}{"evt": &types.Event{}})...) - if err != nil { - n.Logger.Fatalf("Unable to compile whitelist expression '%s' : %v.", filter, err) - } - expression.ExprDebugger, err = exprhelpers.NewDebugger(filter, exprhelpers.GetExprOptions(map[string]interface{}{"evt": &types.Event{}})...) - if err != nil { - log.Errorf("unable to build debug filter for '%s' : %s", filter, err) - } - n.Whitelist.B_Exprs = append(n.Whitelist.B_Exprs, expression) - n.Logger.Debugf("adding expression %s to whitelists", filter) - valid = true + whitelistValid, err := n.CompileWLs() + if err != nil { + return err } + valid = valid || whitelistValid if !valid { /* node is empty, error force return */ diff --git a/pkg/parser/runtime.go b/pkg/parser/runtime.go index e6ee07ea7..6708a70bb 100644 --- a/pkg/parser/runtime.go +++ b/pkg/parser/runtime.go @@ -154,7 +154,7 @@ func (n *Node) ProcessStatics(statics []ExtraField, event *types.Event) error { /*still way too hackish, but : inject all the results in enriched, and */ if enricherPlugin, ok := n.EnrichFunctions.Registered[static.Method]; ok { clog.Tracef("Found method '%s'", static.Method) - ret, err := enricherPlugin.EnrichFunc(value, event, enricherPlugin.Ctx, n.Logger) + ret, err := enricherPlugin.EnrichFunc(value, event, enricherPlugin.Ctx, n.Logger.WithField("method", static.Method)) if err != nil { clog.Errorf("method '%s' returned an error : %v", static.Method, err) } diff --git a/pkg/parser/stage.go b/pkg/parser/stage.go index 37d43fbfe..9f650e116 100644 --- a/pkg/parser/stage.go +++ b/pkg/parser/stage.go @@ -75,7 +75,7 @@ func LoadStages(stageFiles []Stagefile, pctx *UnixParserCtx, ectx EnricherCtx) ( //check for empty bucket if node.Name == "" && node.Description == "" && node.Author == "" { - log.Infof("Node in %s has no name,author or description. Skipping.", stageFile.Filename) + log.Infof("Node in %s has no name, author or description. Skipping.", stageFile.Filename) continue } //check compat diff --git a/pkg/parser/unix_parser.go b/pkg/parser/unix_parser.go index a6992befe..2e4a8035b 100644 --- a/pkg/parser/unix_parser.go +++ b/pkg/parser/unix_parser.go @@ -148,6 +148,12 @@ func LoadParsers(cConfig *csconfig.Config, parsers *Parsers) (*Parsers, error) { parsers.Ctx.Profiling = true parsers.Povfwctx.Profiling = true } - + /* + Reset CTX grok to reduce memory footprint after we compile all the patterns + */ + parsers.Ctx.Grok = grokky.Host{} + parsers.Povfwctx.Grok = grokky.Host{} + parsers.StageFiles = []Stagefile{} + parsers.PovfwStageFiles = []Stagefile{} return parsers, nil } diff --git a/pkg/parser/whitelist.go b/pkg/parser/whitelist.go index e2f179fb3..8c18e70c3 100644 --- a/pkg/parser/whitelist.go +++ b/pkg/parser/whitelist.go @@ -1,11 +1,14 @@ package parser import ( + "fmt" "net" + "github.com/antonmedv/expr" "github.com/antonmedv/expr/vm" "github.com/crowdsecurity/crowdsec/pkg/exprhelpers" + "github.com/crowdsecurity/crowdsec/pkg/types" ) type Whitelist struct { @@ -22,3 +25,111 @@ type ExprWhitelist struct { Filter *vm.Program ExprDebugger *exprhelpers.ExprDebugger // used to debug expression by printing the content of each variable of the expression } + +func (n *Node) ContainsWLs() bool { + return n.ContainsIPLists() || n.ContainsExprLists() +} + +func (n *Node) ContainsExprLists() bool { + return len(n.Whitelist.B_Exprs) > 0 +} + +func (n *Node) ContainsIPLists() bool { + return len(n.Whitelist.B_Ips) > 0 || len(n.Whitelist.B_Cidrs) > 0 +} + +func (n *Node) CheckIPsWL(srcs []net.IP) bool { + isWhitelisted := false + if !n.ContainsIPLists() { + return isWhitelisted + } + for _, src := range srcs { + if isWhitelisted { + break + } + for _, v := range n.Whitelist.B_Ips { + if v.Equal(src) { + n.Logger.Debugf("Event from [%s] is whitelisted by IP (%s), reason [%s]", src, v, n.Whitelist.Reason) + isWhitelisted = true + break + } + n.Logger.Tracef("whitelist: %s is not eq [%s]", src, v) + } + for _, v := range n.Whitelist.B_Cidrs { + if v.Contains(src) { + n.Logger.Debugf("Event from [%s] is whitelisted by CIDR (%s), reason [%s]", src, v, n.Whitelist.Reason) + isWhitelisted = true + break + } + n.Logger.Tracef("whitelist: %s not in [%s]", src, v) + } + } + return isWhitelisted +} + +func (n *Node) CheckExprWL(cachedExprEnv map[string]interface{}) (bool, error) { + isWhitelisted := false + + if !n.ContainsExprLists() { + return false, nil + } + /* run whitelist expression tests anyway */ + for eidx, e := range n.Whitelist.B_Exprs { + //if we already know the event is whitelisted, skip the rest of the expressions + if isWhitelisted { + break + } + output, err := expr.Run(e.Filter, cachedExprEnv) + if err != nil { + n.Logger.Warningf("failed to run whitelist expr : %v", err) + n.Logger.Debug("Event leaving node : ko") + return isWhitelisted, err + } + switch out := output.(type) { + case bool: + if n.Debug { + e.ExprDebugger.Run(n.Logger, out, cachedExprEnv) + } + if out { + n.Logger.Debugf("Event is whitelisted by expr, reason [%s]", n.Whitelist.Reason) + isWhitelisted = true + } + default: + n.Logger.Errorf("unexpected type %t (%v) while running '%s'", output, output, n.Whitelist.Exprs[eidx]) + } + } + return isWhitelisted, nil +} + +func (n *Node) CompileWLs() (bool, error) { + for _, v := range n.Whitelist.Ips { + n.Whitelist.B_Ips = append(n.Whitelist.B_Ips, net.ParseIP(v)) + n.Logger.Debugf("adding ip %s to whitelists", net.ParseIP(v)) + } + + for _, v := range n.Whitelist.Cidrs { + _, tnet, err := net.ParseCIDR(v) + if err != nil { + return false, fmt.Errorf("unable to parse cidr whitelist '%s' : %v", v, err) + } + n.Whitelist.B_Cidrs = append(n.Whitelist.B_Cidrs, tnet) + n.Logger.Debugf("adding cidr %s to whitelists", tnet) + } + + for _, filter := range n.Whitelist.Exprs { + var err error + expression := &ExprWhitelist{} + expression.Filter, err = expr.Compile(filter, exprhelpers.GetExprOptions(map[string]interface{}{"evt": &types.Event{}})...) + if err != nil { + return false, fmt.Errorf("unable to compile whitelist expression '%s' : %v", filter, err) + } + expression.ExprDebugger, err = exprhelpers.NewDebugger(filter, exprhelpers.GetExprOptions(map[string]interface{}{"evt": &types.Event{}})...) + if err != nil { + n.Logger.Errorf("unable to build debug filter for '%s' : %s", filter, err) + } + + n.Whitelist.B_Exprs = append(n.Whitelist.B_Exprs, expression) + n.Logger.Debugf("adding expression %s to whitelists", filter) + } + return n.ContainsWLs(), nil +} diff --git a/pkg/parser/whitelist_test.go b/pkg/parser/whitelist_test.go new file mode 100644 index 000000000..8796aaeda --- /dev/null +++ b/pkg/parser/whitelist_test.go @@ -0,0 +1,300 @@ +package parser + +import ( + "testing" + + log "github.com/sirupsen/logrus" + "github.com/stretchr/testify/require" + + "github.com/crowdsecurity/go-cs-lib/cstest" + + "github.com/crowdsecurity/crowdsec/pkg/models" + "github.com/crowdsecurity/crowdsec/pkg/types" +) + +func TestWhitelistCompile(t *testing.T) { + node := &Node{ + Logger: log.NewEntry(log.New()), + } + tests := []struct { + name string + whitelist Whitelist + expectedErr string + }{ + { + name: "Valid CIDR whitelist", + whitelist: Whitelist{ + Reason: "test", + Cidrs: []string{ + "127.0.0.1/24", + }, + }, + }, + { + name: "Invalid CIDR whitelist", + whitelist: Whitelist{ + Reason: "test", + Cidrs: []string{ + "127.0.0.1/1000", + }, + }, + expectedErr: "invalid CIDR address", + }, + { + name: "Valid EXPR whitelist", + whitelist: Whitelist{ + Reason: "test", + Exprs: []string{ + "1==1", + }, + }, + }, + { + name: "Invalid EXPR whitelist", + whitelist: Whitelist{ + Reason: "test", + Exprs: []string{ + "evt.THISPROPERTYSHOULDERROR == true", + }, + }, + expectedErr: "types.Event has no field", + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + node.Whitelist = tt.whitelist + _, err := node.CompileWLs() + cstest.RequireErrorContains(t, err, tt.expectedErr) + }) + } +} + +func TestWhitelistCheck(t *testing.T) { + node := &Node{ + Logger: log.NewEntry(log.New()), + } + tests := []struct { + name string + whitelist Whitelist + event *types.Event + expected bool + }{ + { + name: "IP Whitelisted", + whitelist: Whitelist{ + Reason: "test", + Ips: []string{ + "127.0.0.1", + }, + }, + event: &types.Event{ + Meta: map[string]string{ + "source_ip": "127.0.0.1", + }, + }, + expected: true, + }, + { + name: "IP Not Whitelisted", + whitelist: Whitelist{ + Reason: "test", + Ips: []string{ + "127.0.0.1", + }, + }, + event: &types.Event{ + Meta: map[string]string{ + "source_ip": "127.0.0.2", + }, + }, + }, + { + name: "CIDR Whitelisted", + whitelist: Whitelist{ + Reason: "test", + Cidrs: []string{ + "127.0.0.1/32", + }, + }, + event: &types.Event{ + Meta: map[string]string{ + "source_ip": "127.0.0.1", + }, + }, + expected: true, + }, + { + name: "CIDR Not Whitelisted", + whitelist: Whitelist{ + Reason: "test", + Cidrs: []string{ + "127.0.0.1/32", + }, + }, + event: &types.Event{ + Meta: map[string]string{ + "source_ip": "127.0.0.2", + }, + }, + }, + { + name: "EXPR Whitelisted", + whitelist: Whitelist{ + Reason: "test", + Exprs: []string{ + "evt.Meta.source_ip == '127.0.0.1'", + }, + }, + event: &types.Event{ + Meta: map[string]string{ + "source_ip": "127.0.0.1", + }, + }, + expected: true, + }, + { + name: "EXPR Not Whitelisted", + whitelist: Whitelist{ + Reason: "test", + Exprs: []string{ + "evt.Meta.source_ip == '127.0.0.1'", + }, + }, + event: &types.Event{ + Meta: map[string]string{ + "source_ip": "127.0.0.2", + }, + }, + }, + { + name: "Postoverflow IP Whitelisted", + whitelist: Whitelist{ + Reason: "test", + Ips: []string{ + "192.168.1.1", + }, + }, + event: &types.Event{ + Type: types.OVFLW, + Overflow: types.RuntimeAlert{ + Sources: map[string]models.Source{ + "192.168.1.1": {}, + }, + }, + }, + expected: true, + }, + { + name: "Postoverflow IP Not Whitelisted", + whitelist: Whitelist{ + Reason: "test", + Ips: []string{ + "192.168.1.2", + }, + }, + event: &types.Event{ + Type: types.OVFLW, + Overflow: types.RuntimeAlert{ + Sources: map[string]models.Source{ + "192.168.1.1": {}, + }, + }, + }, + }, + { + name: "Postoverflow CIDR Whitelisted", + whitelist: Whitelist{ + Reason: "test", + Cidrs: []string{ + "192.168.1.1/32", + }, + }, + event: &types.Event{ + Type: types.OVFLW, + Overflow: types.RuntimeAlert{ + Sources: map[string]models.Source{ + "192.168.1.1": {}, + }, + }, + }, + expected: true, + }, + { + name: "Postoverflow CIDR Not Whitelisted", + whitelist: Whitelist{ + Reason: "test", + Cidrs: []string{ + "192.168.1.2/32", + }, + }, + event: &types.Event{ + Type: types.OVFLW, + Overflow: types.RuntimeAlert{ + Sources: map[string]models.Source{ + "192.168.1.1": {}, + }, + }, + }, + }, + { + name: "Postoverflow EXPR Whitelisted", + whitelist: Whitelist{ + Reason: "test", + Exprs: []string{ + "evt.Overflow.APIAlerts[0].Source.Cn == 'test'", + }, + }, + event: &types.Event{ + Type: types.OVFLW, + Overflow: types.RuntimeAlert{ + APIAlerts: []models.Alert{ + { + Source: &models.Source{ + Cn: "test", + }, + }, + }, + }, + }, + expected: true, + }, + { + name: "Postoverflow EXPR Not Whitelisted", + whitelist: Whitelist{ + Reason: "test", + Exprs: []string{ + "evt.Overflow.APIAlerts[0].Source.Cn == 'test2'", + }, + }, + event: &types.Event{ + Type: types.OVFLW, + Overflow: types.RuntimeAlert{ + APIAlerts: []models.Alert{ + { + Source: &models.Source{ + Cn: "test", + }, + }, + }, + }, + }, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + var err error + node.Whitelist = tt.whitelist + node.CompileWLs() + isWhitelisted := node.CheckIPsWL(tt.event.ParseIPSources()) + if !isWhitelisted { + isWhitelisted, err = node.CheckExprWL(map[string]interface{}{"evt": tt.event}) + } + require.NoError(t, err) + require.Equal(t, tt.expected, isWhitelisted) + }) + } +} diff --git a/pkg/setup/detect.go b/pkg/setup/detect.go index b345c0d6f..7d73092f7 100644 --- a/pkg/setup/detect.go +++ b/pkg/setup/detect.go @@ -3,6 +3,7 @@ package setup import ( "bytes" "fmt" + "io" "os" "os/exec" "sort" @@ -86,19 +87,19 @@ func validateDataSource(opaqueDS DataSourceItem) error { return nil } -func readDetectConfig(file string) (DetectConfig, error) { +func readDetectConfig(fin io.Reader) (DetectConfig, error) { var dc DetectConfig - yamlBytes, err := os.ReadFile(file) + yamlBytes, err := io.ReadAll(fin) if err != nil { - return DetectConfig{}, fmt.Errorf("while reading file: %w", err) + return DetectConfig{}, err } dec := yaml.NewDecoder(bytes.NewBuffer(yamlBytes)) dec.KnownFields(true) if err = dec.Decode(&dc); err != nil { - return DetectConfig{}, fmt.Errorf("while parsing %s: %w", file, err) + return DetectConfig{}, err } switch dc.Version { @@ -107,7 +108,7 @@ func readDetectConfig(file string) (DetectConfig, error) { case "1.0": // all is well default: - return DetectConfig{}, fmt.Errorf("unsupported version tag '%s' (must be 1.0)", dc.Version) + return DetectConfig{}, fmt.Errorf("invalid version tag '%s' (must be 1.0)", dc.Version) } for name, svc := range dc.Detect { @@ -457,15 +458,13 @@ type DetectOptions struct { // Detect performs the service detection from a given configuration. // It outputs a setup file that can be used as input to "cscli setup install-hub" // or "cscli setup datasources". -func Detect(serviceDetectionFile string, opts DetectOptions) (Setup, error) { +func Detect(detectReader io.Reader, opts DetectOptions) (Setup, error) { ret := Setup{} // explicitly initialize to avoid json mashaling an empty slice as "null" ret.Setup = make([]ServiceSetup, 0) - log.Tracef("Reading detection rules: %s", serviceDetectionFile) - - sc, err := readDetectConfig(serviceDetectionFile) + sc, err := readDetectConfig(detectReader) if err != nil { return ret, err } @@ -559,8 +558,8 @@ func Detect(serviceDetectionFile string, opts DetectOptions) (Setup, error) { } // ListSupported parses the configuration file and outputs a list of the supported services. -func ListSupported(serviceDetectionFile string) ([]string, error) { - dc, err := readDetectConfig(serviceDetectionFile) +func ListSupported(detectConfig io.Reader) ([]string, error) { + dc, err := readDetectConfig(detectConfig) if err != nil { return nil, err } diff --git a/pkg/setup/detect_test.go b/pkg/setup/detect_test.go index fb0535635..162df0db2 100644 --- a/pkg/setup/detect_test.go +++ b/pkg/setup/detect_test.go @@ -10,7 +10,6 @@ import ( "github.com/lithammer/dedent" "github.com/stretchr/testify/require" - "github.com/crowdsecurity/go-cs-lib/csstring" "github.com/crowdsecurity/go-cs-lib/cstest" "github.com/crowdsecurity/crowdsec/pkg/setup" @@ -58,7 +57,7 @@ func TestSetupHelperProcess(t *testing.T) { os.Exit(0) } -func tempYAML(t *testing.T, content string) string { +func tempYAML(t *testing.T, content string) os.File { t.Helper() require := require.New(t) file, err := os.CreateTemp("", "") @@ -70,7 +69,10 @@ func tempYAML(t *testing.T, content string) string { err = file.Close() require.NoError(err) - return file.Name() + file, err = os.Open(file.Name()) + require.NoError(err) + + return *file } func TestPathExists(t *testing.T) { @@ -239,7 +241,7 @@ func TestListSupported(t *testing.T) { "invalid yaml: bad version", "version: 2.0", nil, - "unsupported version tag '2.0' (must be 1.0)", + "invalid version tag '2.0' (must be 1.0)", }, } @@ -248,8 +250,8 @@ func TestListSupported(t *testing.T) { t.Run(tc.name, func(t *testing.T) { t.Parallel() f := tempYAML(t, tc.yml) - defer os.Remove(f) - supported, err := setup.ListSupported(f) + defer os.Remove(f.Name()) + supported, err := setup.ListSupported(&f) cstest.RequireErrorContains(t, err, tc.expectedErr) require.ElementsMatch(t, tc.expected, supported) }) @@ -373,9 +375,9 @@ func TestDetectSimpleRule(t *testing.T) { - false ugly: `) - defer os.Remove(f) + defer os.Remove(f.Name()) - detected, err := setup.Detect(f, setup.DetectOptions{}) + detected, err := setup.Detect(&f, setup.DetectOptions{}) require.NoError(err) expected := []setup.ServiceSetup{ @@ -420,9 +422,9 @@ detect: tc := tc t.Run(tc.name, func(t *testing.T) { f := tempYAML(t, tc.config) - defer os.Remove(f) + defer os.Remove(f.Name()) - detected, err := setup.Detect(f, setup.DetectOptions{}) + detected, err := setup.Detect(&f, setup.DetectOptions{}) cstest.RequireErrorContains(t, err, tc.expectedErr) require.Equal(tc.expected, detected) }) @@ -514,9 +516,9 @@ detect: tc := tc t.Run(tc.name, func(t *testing.T) { f := tempYAML(t, tc.config) - defer os.Remove(f) + defer os.Remove(f.Name()) - detected, err := setup.Detect(f, setup.DetectOptions{}) + detected, err := setup.Detect(&f, setup.DetectOptions{}) cstest.RequireErrorContains(t, err, tc.expectedErr) require.Equal(tc.expected, detected) }) @@ -542,9 +544,9 @@ func TestDetectForcedUnit(t *testing.T) { journalctl_filter: - _SYSTEMD_UNIT=crowdsec-setup-forced.service `) - defer os.Remove(f) + defer os.Remove(f.Name()) - detected, err := setup.Detect(f, setup.DetectOptions{ForcedUnits: []string{"crowdsec-setup-forced.service"}}) + detected, err := setup.Detect(&f, setup.DetectOptions{ForcedUnits: []string{"crowdsec-setup-forced.service"}}) require.NoError(err) expected := setup.Setup{ @@ -580,9 +582,9 @@ func TestDetectForcedProcess(t *testing.T) { when: - ProcessRunning("foobar") `) - defer os.Remove(f) + defer os.Remove(f.Name()) - detected, err := setup.Detect(f, setup.DetectOptions{ForcedProcesses: []string{"foobar"}}) + detected, err := setup.Detect(&f, setup.DetectOptions{ForcedProcesses: []string{"foobar"}}) require.NoError(err) expected := setup.Setup{ @@ -610,9 +612,9 @@ func TestDetectSkipService(t *testing.T) { when: - ProcessRunning("foobar") `) - defer os.Remove(f) + defer os.Remove(f.Name()) - detected, err := setup.Detect(f, setup.DetectOptions{ForcedProcesses: []string{"foobar"}, SkipServices: []string{"wizard"}}) + detected, err := setup.Detect(&f, setup.DetectOptions{ForcedProcesses: []string{"foobar"}, SkipServices: []string{"wizard"}}) require.NoError(err) expected := setup.Setup{[]setup.ServiceSetup{}} @@ -826,9 +828,9 @@ func TestDetectForcedOS(t *testing.T) { tc := tc t.Run(tc.name, func(t *testing.T) { f := tempYAML(t, tc.config) - defer os.Remove(f) + defer os.Remove(f.Name()) - detected, err := setup.Detect(f, setup.DetectOptions{ForcedOS: tc.forced}) + detected, err := setup.Detect(&f, setup.DetectOptions{ForcedOS: tc.forced}) cstest.RequireErrorContains(t, err, tc.expectedErr) require.Equal(tc.expected, detected) }) @@ -882,7 +884,7 @@ func TestDetectDatasourceValidation(t *testing.T) { datasource: source: file`, expected: setup.Setup{Setup: []setup.ServiceSetup{}}, - expectedErr: "while parsing {{.DetectYaml}}: yaml: unmarshal errors:\n line 6: field source not found in type setup.Service", + expectedErr: "yaml: unmarshal errors:\n line 6: field source not found in type setup.Service", }, { name: "source is mismatched", config: ` @@ -1001,18 +1003,10 @@ func TestDetectDatasourceValidation(t *testing.T) { for _, tc := range tests { tc := tc t.Run(tc.name, func(t *testing.T) { - detectYaml := tempYAML(t, tc.config) - defer os.Remove(detectYaml) - - data := map[string]string{ - "DetectYaml": detectYaml, - } - - expectedErr, err := csstring.Interpolate(tc.expectedErr, data) - require.NoError(err) - - detected, err := setup.Detect(detectYaml, setup.DetectOptions{}) - cstest.RequireErrorContains(t, err, expectedErr) + f := tempYAML(t, tc.config) + defer os.Remove(f.Name()) + detected, err := setup.Detect(&f, setup.DetectOptions{}) + cstest.RequireErrorContains(t, err, tc.expectedErr) require.Equal(tc.expected, detected) }) } diff --git a/pkg/setup/install.go b/pkg/setup/install.go index 5d3bfdbc9..92a1968c8 100644 --- a/pkg/setup/install.go +++ b/pkg/setup/install.go @@ -56,9 +56,7 @@ func InstallHubItems(csConfig *csconfig.Config, input []byte, dryRun bool) error return fmt.Errorf("loading hub: %w", err) } - if err := cwhub.SetHubBranch(); err != nil { - return fmt.Errorf("setting hub branch: %w", err) - } + cwhub.SetHubBranch() if err := cwhub.GetHubIdx(csConfig.Hub); err != nil { return fmt.Errorf("getting hub index: %w", err) diff --git a/pkg/types/constants.go b/pkg/types/constants.go index fa50b64f3..acb5b5bfa 100644 --- a/pkg/types/constants.go +++ b/pkg/types/constants.go @@ -17,6 +17,7 @@ const ConsoleOrigin = "console" const CscliImportOrigin = "cscli-import" const ListOrigin = "lists" const CAPIOrigin = "CAPI" +const CommunityBlocklistPullSourceScope = "crowdsecurity/community-blocklist" const DecisionTypeBan = "ban" diff --git a/pkg/types/event.go b/pkg/types/event.go index d3b21c35f..b60c9a823 100644 --- a/pkg/types/event.go +++ b/pkg/types/event.go @@ -2,6 +2,7 @@ package types import ( "fmt" + "net" "regexp" "time" @@ -276,6 +277,21 @@ func (e *Event) GetMeta(key string) string { return "" } +func (e *Event) ParseIPSources() []net.IP { + var srcs []net.IP + switch e.Type { + case LOG: + if _, ok := e.Meta["source_ip"]; ok { + srcs = append(srcs, net.ParseIP(e.Meta["source_ip"])) + } + case OVFLW: + for k := range e.Overflow.Sources { + srcs = append(srcs, net.ParseIP(k)) + } + } + return srcs +} + // Move in leakybuckets const ( Undefined = "" diff --git a/pkg/types/event_test.go b/pkg/types/event_test.go new file mode 100644 index 000000000..c3261c647 --- /dev/null +++ b/pkg/types/event_test.go @@ -0,0 +1,79 @@ +package types + +import ( + "net" + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/crowdsecurity/crowdsec/pkg/models" +) + +func TestParseIPSources(t *testing.T) { + tests := []struct { + name string + evt Event + expected []net.IP + }{ + { + name: "ParseIPSources: Valid Log Sources", + evt: Event{ + Type: LOG, + Meta: map[string]string{ + "source_ip": "127.0.0.1", + }, + }, + expected: []net.IP{ + net.ParseIP("127.0.0.1"), + }, + }, + { + name: "ParseIPSources: Valid Overflow Sources", + evt: Event{ + Type: OVFLW, + Overflow: RuntimeAlert{ + Sources: map[string]models.Source{ + "127.0.0.1": {}, + }, + }, + }, + expected: []net.IP{ + net.ParseIP("127.0.0.1"), + }, + }, + { + name: "ParseIPSources: Invalid Log Sources", + evt: Event{ + Type: LOG, + Meta: map[string]string{ + "source_ip": "IAMNOTANIP", + }, + }, + expected: []net.IP{ + nil, + }, + }, + { + name: "ParseIPSources: Invalid Overflow Sources", + evt: Event{ + Type: OVFLW, + Overflow: RuntimeAlert{ + Sources: map[string]models.Source{ + "IAMNOTANIP": {}, + }, + }, + }, + expected: []net.IP{ + nil, + }, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + ips := tt.evt.ParseIPSources() + assert.Equal(t, ips, tt.expected) + }) + } +} diff --git a/pkg/types/profile.go b/pkg/types/profile.go deleted file mode 100644 index e8034210c..000000000 --- a/pkg/types/profile.go +++ /dev/null @@ -1,25 +0,0 @@ -package types - -import ( - "time" - - "github.com/antonmedv/expr/vm" -) - -/*Action profiles*/ -type RemediationProfile struct { - Apply bool - Ban bool - Slow bool - Captcha bool - Duration string - TimeDuration time.Duration -} -type Profile struct { - Profile string `yaml:"profile"` - Filter string `yaml:"filter"` - Remediation RemediationProfile `yaml:"remediation"` - RunTimeFilter *vm.Program - ApiPush *bool `yaml:"api"` - OutputConfigs []map[string]string `yaml:"outputs,omitempty"` -} diff --git a/pkg/types/utils.go b/pkg/types/utils.go index 0485db59e..8a49c9d32 100644 --- a/pkg/types/utils.go +++ b/pkg/types/utils.go @@ -1,12 +1,8 @@ package types import ( - "bufio" "fmt" - "os" "path/filepath" - "strconv" - "strings" "time" log "github.com/sirupsen/logrus" @@ -68,40 +64,6 @@ func ConfigureLogger(clog *log.Logger) error { return nil } -func ParseDuration(d string) (time.Duration, error) { - durationStr := d - if strings.HasSuffix(d, "d") { - days := strings.Split(d, "d")[0] - if len(days) == 0 { - return 0, fmt.Errorf("'%s' can't be parsed as duration", d) - } - daysInt, err := strconv.Atoi(days) - if err != nil { - return 0, err - } - durationStr = strconv.Itoa(daysInt*24) + "h" - } - duration, err := time.ParseDuration(durationStr) - if err != nil { - return 0, err - } - return duration, nil -} - func UtcNow() time.Time { return time.Now().UTC() } - -func GetLineCountForFile(filepath string) int { - f, err := os.Open(filepath) - if err != nil { - log.Fatalf("unable to open log file %s : %s", filepath, err) - } - defer f.Close() - lc := 0 - fs := bufio.NewScanner(f) - for fs.Scan() { - lc++ - } - return lc -} diff --git a/plugins/notifications/dummy/LICENSE b/plugins/notifications/dummy/LICENSE deleted file mode 100644 index 912563863..000000000 --- a/plugins/notifications/dummy/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2021 Crowdsec - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/plugins/notifications/email/LICENSE b/plugins/notifications/email/LICENSE deleted file mode 100644 index 912563863..000000000 --- a/plugins/notifications/email/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2021 Crowdsec - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/plugins/notifications/email/Makefile b/plugins/notifications/email/Makefile deleted file mode 100644 index 096b0c40c..000000000 --- a/plugins/notifications/email/Makefile +++ /dev/null @@ -1,18 +0,0 @@ -ifeq ($(OS), Windows_NT) - SHELL := pwsh.exe - .SHELLFLAGS := -NoProfile -Command - EXT = .exe -endif - -PLUGIN = email -BINARY_NAME = notification-$(PLUGIN)$(EXT) - -GO = go -GOBUILD = $(GO) build - -build: clean - $(GOBUILD) $(LD_OPTS) $(BUILD_VENDOR_FLAGS) -o $(BINARY_NAME) - -.PHONY: clean -clean: - @$(RM) $(BINARY_NAME) $(WIN_IGNORE_ERR) diff --git a/plugins/notifications/email/go.mod b/plugins/notifications/email/go.mod deleted file mode 100644 index cab13fdc8..000000000 --- a/plugins/notifications/email/go.mod +++ /dev/null @@ -1,29 +0,0 @@ -module github.com/crowdsecurity/email-plugin - -go 1.20 - -replace github.com/crowdsecurity/crowdsec => ../../../ - -require ( - github.com/crowdsecurity/crowdsec v1.5.2 - github.com/hashicorp/go-hclog v1.5.0 - github.com/hashicorp/go-plugin v1.4.10 - github.com/xhit/go-simple-mail/v2 v2.10.0 - gopkg.in/yaml.v2 v2.4.0 -) - -require ( - github.com/fatih/color v1.15.0 // indirect - github.com/golang/protobuf v1.5.3 // indirect - github.com/hashicorp/yamux v0.0.0-20180604194846-3520598351bb // indirect - github.com/mattn/go-colorable v0.1.13 // indirect - github.com/mattn/go-isatty v0.0.19 // indirect - github.com/mitchellh/go-testing-interface v1.0.0 // indirect - github.com/oklog/run v1.0.0 // indirect - golang.org/x/net v0.11.0 // indirect - golang.org/x/sys v0.10.0 // indirect - golang.org/x/text v0.11.0 // indirect - google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1 // indirect - google.golang.org/grpc v1.56.1 // indirect - google.golang.org/protobuf v1.30.0 // indirect -) diff --git a/plugins/notifications/email/go.sum b/plugins/notifications/email/go.sum deleted file mode 100644 index 4d8ac7bb4..000000000 --- a/plugins/notifications/email/go.sum +++ /dev/null @@ -1,66 +0,0 @@ -github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= -github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk= -github.com/fatih/color v1.15.0 h1:kOqh6YHBtK8aywxGerMG2Eq3H6Qgoqeo13Bk2Mv/nBs= -github.com/fatih/color v1.15.0/go.mod h1:0h5ZqXfHYED7Bhv2ZJamyIOUej9KtShiJESRwBDUSsw= -github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= -github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg= -github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= -github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= -github.com/hashicorp/go-hclog v1.5.0 h1:bI2ocEMgcVlz55Oj1xZNBsVi900c7II+fWDyV9o+13c= -github.com/hashicorp/go-hclog v1.5.0/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M= -github.com/hashicorp/go-plugin v1.4.10 h1:xUbmA4jC6Dq163/fWcp8P3JuHilrHHMLNRxzGQJ9hNk= -github.com/hashicorp/go-plugin v1.4.10/go.mod h1:6/1TEzT0eQznvI/gV2CM29DLSkAK/e58mUWKVsPaph0= -github.com/hashicorp/yamux v0.0.0-20180604194846-3520598351bb h1:b5rjCoWHc7eqmAS4/qyk21ZsHyb6Mxv/jykxvNTkU4M= -github.com/hashicorp/yamux v0.0.0-20180604194846-3520598351bb/go.mod h1:+NfK9FKeTrX5uv1uIXGdwYDTeHna2qgaIlx54MXqjAM= -github.com/jhump/protoreflect v1.6.0 h1:h5jfMVslIg6l29nsMs0D8Wj17RDVdNYti0vDN/PZZoE= -github.com/mattn/go-colorable v0.1.9/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= -github.com/mattn/go-colorable v0.1.12/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4= -github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= -github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= -github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= -github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= -github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= -github.com/mattn/go-isatty v0.0.19 h1:JITubQf0MOLdlGRuRq+jtsDlekdYPia9ZFsB8h/APPA= -github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= -github.com/mitchellh/go-testing-interface v1.0.0 h1:fzU/JVNcaqHQEcVFAKeR41fkiLdIPrefOvVG1VZ96U0= -github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI= -github.com/oklog/run v1.0.0 h1:Ru7dDtJNOyC66gQ5dQmaCa0qIsAUFY3sFpK1Xk8igrw= -github.com/oklog/run v1.0.0/go.mod h1:dlhp/R75TPv97u0XWUtDeV/lRKWPKSdTuV0TZvrmrQA= -github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= -github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/testify v1.7.2/go.mod h1:R6va5+xMeoiuVRoj+gSkQ7d3FALtqAAGI1FQKckRals= -github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk= -github.com/xhit/go-simple-mail/v2 v2.10.0 h1:nib6RaJ4qVh5HD9UE9QJqnUZyWp3upv+Z6CFxaMj0V8= -github.com/xhit/go-simple-mail/v2 v2.10.0/go.mod h1:kA1XbQfCI4JxQ9ccSN6VFyIEkkugOm7YiPkA5hKiQn4= -golang.org/x/net v0.11.0 h1:Gi2tvZIJyBtO9SDr1q9h5hEQCp/4L2RQ+ar0qjx2oNU= -golang.org/x/net v0.11.0/go.mod h1:2L/ixqYpgIVXmeoSA/4Lu7BzTG4KIyPIryS4IsOd1oQ= -golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220503163025-988cb79eb6c6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.10.0 h1:SqMFp9UcQJZa+pmYuAKjd9xq1f0j5rLcDIk0mj4qAsA= -golang.org/x/sys v0.10.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/text v0.11.0 h1:LAntKIrcmeSKERyiOh0XMV39LXS8IE9UL2yP7+f5ij4= -golang.org/x/text v0.11.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= -golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1 h1:KpwkzHKEF7B9Zxg18WzOa7djJ+Ha5DzthMyZYQfEn2A= -google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1/go.mod h1:nKE/iIaLqn2bQwXBg8f1g2Ylh6r5MN5CmZvuzZCgsCU= -google.golang.org/grpc v1.56.1 h1:z0dNfjIl0VpaZ9iSVjA6daGatAYwPGstTjt5vkRMFkQ= -google.golang.org/grpc v1.56.1/go.mod h1:I9bI3vqKfayGqPUAwGdOSu7kt6oIJLixfffKrpXqQ9s= -google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= -google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= -google.golang.org/protobuf v1.30.0 h1:kPPoIgf3TsEvrm0PFe15JQ+570QVxYzEvvHqChK+cng= -google.golang.org/protobuf v1.30.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= -gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= -gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= -gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= -gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= -gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/plugins/notifications/http/LICENSE b/plugins/notifications/http/LICENSE deleted file mode 100644 index 912563863..000000000 --- a/plugins/notifications/http/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2021 Crowdsec - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/plugins/notifications/http/go.mod b/plugins/notifications/http/go.mod deleted file mode 100644 index a1b171aa5..000000000 --- a/plugins/notifications/http/go.mod +++ /dev/null @@ -1,28 +0,0 @@ -module github.com/crowdsecurity/http-plugin - -go 1.20 - -replace github.com/crowdsecurity/crowdsec => ../../../ - -require ( - github.com/crowdsecurity/crowdsec v1.5.2 - github.com/hashicorp/go-hclog v1.5.0 - github.com/hashicorp/go-plugin v1.4.10 - gopkg.in/yaml.v2 v2.4.0 -) - -require ( - github.com/fatih/color v1.15.0 // indirect - github.com/golang/protobuf v1.5.3 // indirect - github.com/hashicorp/yamux v0.0.0-20180604194846-3520598351bb // indirect - github.com/mattn/go-colorable v0.1.13 // indirect - github.com/mattn/go-isatty v0.0.19 // indirect - github.com/mitchellh/go-testing-interface v1.0.0 // indirect - github.com/oklog/run v1.0.0 // indirect - golang.org/x/net v0.11.0 // indirect - golang.org/x/sys v0.10.0 // indirect - golang.org/x/text v0.11.0 // indirect - google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1 // indirect - google.golang.org/grpc v1.56.1 // indirect - google.golang.org/protobuf v1.30.0 // indirect -) diff --git a/plugins/notifications/http/go.sum b/plugins/notifications/http/go.sum deleted file mode 100644 index 7a99c33fa..000000000 --- a/plugins/notifications/http/go.sum +++ /dev/null @@ -1,64 +0,0 @@ -github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= -github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk= -github.com/fatih/color v1.15.0 h1:kOqh6YHBtK8aywxGerMG2Eq3H6Qgoqeo13Bk2Mv/nBs= -github.com/fatih/color v1.15.0/go.mod h1:0h5ZqXfHYED7Bhv2ZJamyIOUej9KtShiJESRwBDUSsw= -github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= -github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg= -github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= -github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= -github.com/hashicorp/go-hclog v1.5.0 h1:bI2ocEMgcVlz55Oj1xZNBsVi900c7II+fWDyV9o+13c= -github.com/hashicorp/go-hclog v1.5.0/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M= -github.com/hashicorp/go-plugin v1.4.10 h1:xUbmA4jC6Dq163/fWcp8P3JuHilrHHMLNRxzGQJ9hNk= -github.com/hashicorp/go-plugin v1.4.10/go.mod h1:6/1TEzT0eQznvI/gV2CM29DLSkAK/e58mUWKVsPaph0= -github.com/hashicorp/yamux v0.0.0-20180604194846-3520598351bb h1:b5rjCoWHc7eqmAS4/qyk21ZsHyb6Mxv/jykxvNTkU4M= -github.com/hashicorp/yamux v0.0.0-20180604194846-3520598351bb/go.mod h1:+NfK9FKeTrX5uv1uIXGdwYDTeHna2qgaIlx54MXqjAM= -github.com/jhump/protoreflect v1.6.0 h1:h5jfMVslIg6l29nsMs0D8Wj17RDVdNYti0vDN/PZZoE= -github.com/mattn/go-colorable v0.1.9/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= -github.com/mattn/go-colorable v0.1.12/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4= -github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= -github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= -github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= -github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= -github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= -github.com/mattn/go-isatty v0.0.19 h1:JITubQf0MOLdlGRuRq+jtsDlekdYPia9ZFsB8h/APPA= -github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= -github.com/mitchellh/go-testing-interface v1.0.0 h1:fzU/JVNcaqHQEcVFAKeR41fkiLdIPrefOvVG1VZ96U0= -github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI= -github.com/oklog/run v1.0.0 h1:Ru7dDtJNOyC66gQ5dQmaCa0qIsAUFY3sFpK1Xk8igrw= -github.com/oklog/run v1.0.0/go.mod h1:dlhp/R75TPv97u0XWUtDeV/lRKWPKSdTuV0TZvrmrQA= -github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= -github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/testify v1.7.2/go.mod h1:R6va5+xMeoiuVRoj+gSkQ7d3FALtqAAGI1FQKckRals= -github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk= -golang.org/x/net v0.11.0 h1:Gi2tvZIJyBtO9SDr1q9h5hEQCp/4L2RQ+ar0qjx2oNU= -golang.org/x/net v0.11.0/go.mod h1:2L/ixqYpgIVXmeoSA/4Lu7BzTG4KIyPIryS4IsOd1oQ= -golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220503163025-988cb79eb6c6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.10.0 h1:SqMFp9UcQJZa+pmYuAKjd9xq1f0j5rLcDIk0mj4qAsA= -golang.org/x/sys v0.10.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/text v0.11.0 h1:LAntKIrcmeSKERyiOh0XMV39LXS8IE9UL2yP7+f5ij4= -golang.org/x/text v0.11.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= -golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1 h1:KpwkzHKEF7B9Zxg18WzOa7djJ+Ha5DzthMyZYQfEn2A= -google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1/go.mod h1:nKE/iIaLqn2bQwXBg8f1g2Ylh6r5MN5CmZvuzZCgsCU= -google.golang.org/grpc v1.56.1 h1:z0dNfjIl0VpaZ9iSVjA6daGatAYwPGstTjt5vkRMFkQ= -google.golang.org/grpc v1.56.1/go.mod h1:I9bI3vqKfayGqPUAwGdOSu7kt6oIJLixfffKrpXqQ9s= -google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= -google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= -google.golang.org/protobuf v1.30.0 h1:kPPoIgf3TsEvrm0PFe15JQ+570QVxYzEvvHqChK+cng= -google.golang.org/protobuf v1.30.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= -gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= -gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= -gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= -gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= -gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/plugins/notifications/sentinel/go.mod b/plugins/notifications/sentinel/go.mod deleted file mode 100644 index 22b831ec1..000000000 --- a/plugins/notifications/sentinel/go.mod +++ /dev/null @@ -1,28 +0,0 @@ -module github.com/crowdsecurity/sentinel-plugin - -go 1.20 - -replace github.com/crowdsecurity/crowdsec => ../../../ - -require ( - github.com/crowdsecurity/crowdsec v1.5.0 - github.com/hashicorp/go-hclog v1.5.0 - github.com/hashicorp/go-plugin v1.4.10 - gopkg.in/yaml.v3 v3.0.1 -) - -require ( - github.com/fatih/color v1.15.0 // indirect - github.com/golang/protobuf v1.5.3 // indirect - github.com/hashicorp/yamux v0.0.0-20180604194846-3520598351bb // indirect - github.com/mattn/go-colorable v0.1.13 // indirect - github.com/mattn/go-isatty v0.0.19 // indirect - github.com/mitchellh/go-testing-interface v1.0.0 // indirect - github.com/oklog/run v1.0.0 // indirect - golang.org/x/net v0.11.0 // indirect - golang.org/x/sys v0.10.0 // indirect - golang.org/x/text v0.11.0 // indirect - google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1 // indirect - google.golang.org/grpc v1.56.1 // indirect - google.golang.org/protobuf v1.30.0 // indirect -) diff --git a/plugins/notifications/sentinel/go.sum b/plugins/notifications/sentinel/go.sum deleted file mode 100644 index e14b515e3..000000000 --- a/plugins/notifications/sentinel/go.sum +++ /dev/null @@ -1,62 +0,0 @@ -github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= -github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk= -github.com/fatih/color v1.15.0 h1:kOqh6YHBtK8aywxGerMG2Eq3H6Qgoqeo13Bk2Mv/nBs= -github.com/fatih/color v1.15.0/go.mod h1:0h5ZqXfHYED7Bhv2ZJamyIOUej9KtShiJESRwBDUSsw= -github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= -github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg= -github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= -github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= -github.com/hashicorp/go-hclog v1.5.0 h1:bI2ocEMgcVlz55Oj1xZNBsVi900c7II+fWDyV9o+13c= -github.com/hashicorp/go-hclog v1.5.0/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M= -github.com/hashicorp/go-plugin v1.4.10 h1:xUbmA4jC6Dq163/fWcp8P3JuHilrHHMLNRxzGQJ9hNk= -github.com/hashicorp/go-plugin v1.4.10/go.mod h1:6/1TEzT0eQznvI/gV2CM29DLSkAK/e58mUWKVsPaph0= -github.com/hashicorp/yamux v0.0.0-20180604194846-3520598351bb h1:b5rjCoWHc7eqmAS4/qyk21ZsHyb6Mxv/jykxvNTkU4M= -github.com/hashicorp/yamux v0.0.0-20180604194846-3520598351bb/go.mod h1:+NfK9FKeTrX5uv1uIXGdwYDTeHna2qgaIlx54MXqjAM= -github.com/jhump/protoreflect v1.6.0 h1:h5jfMVslIg6l29nsMs0D8Wj17RDVdNYti0vDN/PZZoE= -github.com/mattn/go-colorable v0.1.9/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= -github.com/mattn/go-colorable v0.1.12/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4= -github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= -github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= -github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= -github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= -github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= -github.com/mattn/go-isatty v0.0.19 h1:JITubQf0MOLdlGRuRq+jtsDlekdYPia9ZFsB8h/APPA= -github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= -github.com/mitchellh/go-testing-interface v1.0.0 h1:fzU/JVNcaqHQEcVFAKeR41fkiLdIPrefOvVG1VZ96U0= -github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI= -github.com/oklog/run v1.0.0 h1:Ru7dDtJNOyC66gQ5dQmaCa0qIsAUFY3sFpK1Xk8igrw= -github.com/oklog/run v1.0.0/go.mod h1:dlhp/R75TPv97u0XWUtDeV/lRKWPKSdTuV0TZvrmrQA= -github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= -github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/testify v1.7.2/go.mod h1:R6va5+xMeoiuVRoj+gSkQ7d3FALtqAAGI1FQKckRals= -github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk= -golang.org/x/net v0.11.0 h1:Gi2tvZIJyBtO9SDr1q9h5hEQCp/4L2RQ+ar0qjx2oNU= -golang.org/x/net v0.11.0/go.mod h1:2L/ixqYpgIVXmeoSA/4Lu7BzTG4KIyPIryS4IsOd1oQ= -golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220503163025-988cb79eb6c6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.10.0 h1:SqMFp9UcQJZa+pmYuAKjd9xq1f0j5rLcDIk0mj4qAsA= -golang.org/x/sys v0.10.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/text v0.11.0 h1:LAntKIrcmeSKERyiOh0XMV39LXS8IE9UL2yP7+f5ij4= -golang.org/x/text v0.11.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= -golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1 h1:KpwkzHKEF7B9Zxg18WzOa7djJ+Ha5DzthMyZYQfEn2A= -google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1/go.mod h1:nKE/iIaLqn2bQwXBg8f1g2Ylh6r5MN5CmZvuzZCgsCU= -google.golang.org/grpc v1.56.1 h1:z0dNfjIl0VpaZ9iSVjA6daGatAYwPGstTjt5vkRMFkQ= -google.golang.org/grpc v1.56.1/go.mod h1:I9bI3vqKfayGqPUAwGdOSu7kt6oIJLixfffKrpXqQ9s= -google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= -google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= -google.golang.org/protobuf v1.30.0 h1:kPPoIgf3TsEvrm0PFe15JQ+570QVxYzEvvHqChK+cng= -google.golang.org/protobuf v1.30.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= -gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= -gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= -gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/plugins/notifications/slack/LICENSE b/plugins/notifications/slack/LICENSE deleted file mode 100644 index 912563863..000000000 --- a/plugins/notifications/slack/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2021 Crowdsec - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/plugins/notifications/slack/go.mod b/plugins/notifications/slack/go.mod deleted file mode 100644 index 35bf487ba..000000000 --- a/plugins/notifications/slack/go.mod +++ /dev/null @@ -1,31 +0,0 @@ -module github.com/crowdsecurity/slack-plugin - -go 1.20 - -replace github.com/crowdsecurity/crowdsec => ../../../ - -require ( - github.com/crowdsecurity/crowdsec v1.5.2 - github.com/hashicorp/go-hclog v1.5.0 - github.com/hashicorp/go-plugin v1.4.10 - github.com/slack-go/slack v0.9.2 - gopkg.in/yaml.v2 v2.4.0 -) - -require ( - github.com/fatih/color v1.15.0 // indirect - github.com/golang/protobuf v1.5.3 // indirect - github.com/gorilla/websocket v1.4.2 // indirect - github.com/hashicorp/yamux v0.0.0-20180604194846-3520598351bb // indirect - github.com/mattn/go-colorable v0.1.13 // indirect - github.com/mattn/go-isatty v0.0.19 // indirect - github.com/mitchellh/go-testing-interface v1.0.0 // indirect - github.com/oklog/run v1.0.0 // indirect - github.com/pkg/errors v0.9.1 // indirect - golang.org/x/net v0.11.0 // indirect - golang.org/x/sys v0.10.0 // indirect - golang.org/x/text v0.11.0 // indirect - google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1 // indirect - google.golang.org/grpc v1.56.1 // indirect - google.golang.org/protobuf v1.30.0 // indirect -) diff --git a/plugins/notifications/slack/go.sum b/plugins/notifications/slack/go.sum deleted file mode 100644 index 72926c7a0..000000000 --- a/plugins/notifications/slack/go.sum +++ /dev/null @@ -1,74 +0,0 @@ -github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= -github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk= -github.com/fatih/color v1.15.0 h1:kOqh6YHBtK8aywxGerMG2Eq3H6Qgoqeo13Bk2Mv/nBs= -github.com/fatih/color v1.15.0/go.mod h1:0h5ZqXfHYED7Bhv2ZJamyIOUej9KtShiJESRwBDUSsw= -github.com/go-test/deep v1.0.4 h1:u2CU3YKy9I2pmu9pX0eq50wCgjfGIt539SqR7FbHiho= -github.com/go-test/deep v1.0.4/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA= -github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= -github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg= -github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= -github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= -github.com/gorilla/websocket v1.4.2 h1:+/TMaTYc4QFitKJxsQ7Yye35DkWvkdLcvGKqM+x0Ufc= -github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= -github.com/hashicorp/go-hclog v1.5.0 h1:bI2ocEMgcVlz55Oj1xZNBsVi900c7II+fWDyV9o+13c= -github.com/hashicorp/go-hclog v1.5.0/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M= -github.com/hashicorp/go-plugin v1.4.10 h1:xUbmA4jC6Dq163/fWcp8P3JuHilrHHMLNRxzGQJ9hNk= -github.com/hashicorp/go-plugin v1.4.10/go.mod h1:6/1TEzT0eQznvI/gV2CM29DLSkAK/e58mUWKVsPaph0= -github.com/hashicorp/yamux v0.0.0-20180604194846-3520598351bb h1:b5rjCoWHc7eqmAS4/qyk21ZsHyb6Mxv/jykxvNTkU4M= -github.com/hashicorp/yamux v0.0.0-20180604194846-3520598351bb/go.mod h1:+NfK9FKeTrX5uv1uIXGdwYDTeHna2qgaIlx54MXqjAM= -github.com/jhump/protoreflect v1.6.0 h1:h5jfMVslIg6l29nsMs0D8Wj17RDVdNYti0vDN/PZZoE= -github.com/mattn/go-colorable v0.1.9/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= -github.com/mattn/go-colorable v0.1.12/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4= -github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= -github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= -github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= -github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= -github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= -github.com/mattn/go-isatty v0.0.19 h1:JITubQf0MOLdlGRuRq+jtsDlekdYPia9ZFsB8h/APPA= -github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= -github.com/mitchellh/go-testing-interface v1.0.0 h1:fzU/JVNcaqHQEcVFAKeR41fkiLdIPrefOvVG1VZ96U0= -github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI= -github.com/oklog/run v1.0.0 h1:Ru7dDtJNOyC66gQ5dQmaCa0qIsAUFY3sFpK1Xk8igrw= -github.com/oklog/run v1.0.0/go.mod h1:dlhp/R75TPv97u0XWUtDeV/lRKWPKSdTuV0TZvrmrQA= -github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= -github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= -github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= -github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= -github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/slack-go/slack v0.9.2 h1:tjIrKKYUCOmWeEAktWShKW+3UjLTH/wmgmCkAGAf8wM= -github.com/slack-go/slack v0.9.2/go.mod h1:wWL//kk0ho+FcQXcBTmEafUI5dz4qz5f4mMk8oIkioQ= -github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= -github.com/stretchr/testify v1.7.2/go.mod h1:R6va5+xMeoiuVRoj+gSkQ7d3FALtqAAGI1FQKckRals= -github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk= -golang.org/x/net v0.11.0 h1:Gi2tvZIJyBtO9SDr1q9h5hEQCp/4L2RQ+ar0qjx2oNU= -golang.org/x/net v0.11.0/go.mod h1:2L/ixqYpgIVXmeoSA/4Lu7BzTG4KIyPIryS4IsOd1oQ= -golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220503163025-988cb79eb6c6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.10.0 h1:SqMFp9UcQJZa+pmYuAKjd9xq1f0j5rLcDIk0mj4qAsA= -golang.org/x/sys v0.10.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/text v0.11.0 h1:LAntKIrcmeSKERyiOh0XMV39LXS8IE9UL2yP7+f5ij4= -golang.org/x/text v0.11.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= -golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1 h1:KpwkzHKEF7B9Zxg18WzOa7djJ+Ha5DzthMyZYQfEn2A= -google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1/go.mod h1:nKE/iIaLqn2bQwXBg8f1g2Ylh6r5MN5CmZvuzZCgsCU= -google.golang.org/grpc v1.56.1 h1:z0dNfjIl0VpaZ9iSVjA6daGatAYwPGstTjt5vkRMFkQ= -google.golang.org/grpc v1.56.1/go.mod h1:I9bI3vqKfayGqPUAwGdOSu7kt6oIJLixfffKrpXqQ9s= -google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= -google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= -google.golang.org/protobuf v1.30.0 h1:kPPoIgf3TsEvrm0PFe15JQ+570QVxYzEvvHqChK+cng= -google.golang.org/protobuf v1.30.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= -gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= -gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= -gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= -gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= -gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/plugins/notifications/splunk/LICENSE b/plugins/notifications/splunk/LICENSE deleted file mode 100644 index 912563863..000000000 --- a/plugins/notifications/splunk/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2021 Crowdsec - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/plugins/notifications/splunk/go.mod b/plugins/notifications/splunk/go.mod deleted file mode 100644 index 62c333fcc..000000000 --- a/plugins/notifications/splunk/go.mod +++ /dev/null @@ -1,28 +0,0 @@ -module github.com/crowdsecurity/splunk-plugin - -go 1.20 - -replace github.com/crowdsecurity/crowdsec => ../../../ - -require ( - github.com/crowdsecurity/crowdsec v1.5.2 - github.com/hashicorp/go-hclog v1.5.0 - github.com/hashicorp/go-plugin v1.4.10 - gopkg.in/yaml.v2 v2.4.0 -) - -require ( - github.com/fatih/color v1.15.0 // indirect - github.com/golang/protobuf v1.5.3 // indirect - github.com/hashicorp/yamux v0.0.0-20180604194846-3520598351bb // indirect - github.com/mattn/go-colorable v0.1.13 // indirect - github.com/mattn/go-isatty v0.0.19 // indirect - github.com/mitchellh/go-testing-interface v1.0.0 // indirect - github.com/oklog/run v1.0.0 // indirect - golang.org/x/net v0.11.0 // indirect - golang.org/x/sys v0.10.0 // indirect - golang.org/x/text v0.11.0 // indirect - google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1 // indirect - google.golang.org/grpc v1.56.1 // indirect - google.golang.org/protobuf v1.30.0 // indirect -) diff --git a/plugins/notifications/splunk/go.sum b/plugins/notifications/splunk/go.sum deleted file mode 100644 index 7a99c33fa..000000000 --- a/plugins/notifications/splunk/go.sum +++ /dev/null @@ -1,64 +0,0 @@ -github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= -github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk= -github.com/fatih/color v1.15.0 h1:kOqh6YHBtK8aywxGerMG2Eq3H6Qgoqeo13Bk2Mv/nBs= -github.com/fatih/color v1.15.0/go.mod h1:0h5ZqXfHYED7Bhv2ZJamyIOUej9KtShiJESRwBDUSsw= -github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= -github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg= -github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= -github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= -github.com/hashicorp/go-hclog v1.5.0 h1:bI2ocEMgcVlz55Oj1xZNBsVi900c7II+fWDyV9o+13c= -github.com/hashicorp/go-hclog v1.5.0/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M= -github.com/hashicorp/go-plugin v1.4.10 h1:xUbmA4jC6Dq163/fWcp8P3JuHilrHHMLNRxzGQJ9hNk= -github.com/hashicorp/go-plugin v1.4.10/go.mod h1:6/1TEzT0eQznvI/gV2CM29DLSkAK/e58mUWKVsPaph0= -github.com/hashicorp/yamux v0.0.0-20180604194846-3520598351bb h1:b5rjCoWHc7eqmAS4/qyk21ZsHyb6Mxv/jykxvNTkU4M= -github.com/hashicorp/yamux v0.0.0-20180604194846-3520598351bb/go.mod h1:+NfK9FKeTrX5uv1uIXGdwYDTeHna2qgaIlx54MXqjAM= -github.com/jhump/protoreflect v1.6.0 h1:h5jfMVslIg6l29nsMs0D8Wj17RDVdNYti0vDN/PZZoE= -github.com/mattn/go-colorable v0.1.9/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= -github.com/mattn/go-colorable v0.1.12/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4= -github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= -github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= -github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= -github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= -github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= -github.com/mattn/go-isatty v0.0.19 h1:JITubQf0MOLdlGRuRq+jtsDlekdYPia9ZFsB8h/APPA= -github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= -github.com/mitchellh/go-testing-interface v1.0.0 h1:fzU/JVNcaqHQEcVFAKeR41fkiLdIPrefOvVG1VZ96U0= -github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI= -github.com/oklog/run v1.0.0 h1:Ru7dDtJNOyC66gQ5dQmaCa0qIsAUFY3sFpK1Xk8igrw= -github.com/oklog/run v1.0.0/go.mod h1:dlhp/R75TPv97u0XWUtDeV/lRKWPKSdTuV0TZvrmrQA= -github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= -github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/testify v1.7.2/go.mod h1:R6va5+xMeoiuVRoj+gSkQ7d3FALtqAAGI1FQKckRals= -github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk= -golang.org/x/net v0.11.0 h1:Gi2tvZIJyBtO9SDr1q9h5hEQCp/4L2RQ+ar0qjx2oNU= -golang.org/x/net v0.11.0/go.mod h1:2L/ixqYpgIVXmeoSA/4Lu7BzTG4KIyPIryS4IsOd1oQ= -golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220503163025-988cb79eb6c6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.10.0 h1:SqMFp9UcQJZa+pmYuAKjd9xq1f0j5rLcDIk0mj4qAsA= -golang.org/x/sys v0.10.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/text v0.11.0 h1:LAntKIrcmeSKERyiOh0XMV39LXS8IE9UL2yP7+f5ij4= -golang.org/x/text v0.11.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= -golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1 h1:KpwkzHKEF7B9Zxg18WzOa7djJ+Ha5DzthMyZYQfEn2A= -google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1/go.mod h1:nKE/iIaLqn2bQwXBg8f1g2Ylh6r5MN5CmZvuzZCgsCU= -google.golang.org/grpc v1.56.1 h1:z0dNfjIl0VpaZ9iSVjA6daGatAYwPGstTjt5vkRMFkQ= -google.golang.org/grpc v1.56.1/go.mod h1:I9bI3vqKfayGqPUAwGdOSu7kt6oIJLixfffKrpXqQ9s= -google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= -google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= -google.golang.org/protobuf v1.30.0 h1:kPPoIgf3TsEvrm0PFe15JQ+570QVxYzEvvHqChK+cng= -google.golang.org/protobuf v1.30.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= -gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= -gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= -gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= -gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= -gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/rpm/SPECS/crowdsec.spec b/rpm/SPECS/crowdsec.spec index 327294008..5a3980651 100644 --- a/rpm/SPECS/crowdsec.spec +++ b/rpm/SPECS/crowdsec.spec @@ -63,17 +63,17 @@ install -m 644 -D config/context.yaml %{buildroot}%{_sysconfdir}/crowdsec/consol install -m 750 -D config/%{name}.cron.daily %{buildroot}%{_sysconfdir}/cron.daily/%{name} install -m 644 -D %{SOURCE1} %{buildroot}%{_presetdir} -install -m 551 plugins/notifications/slack/notification-slack %{buildroot}%{_libdir}/%{name}/plugins/ -install -m 551 plugins/notifications/http/notification-http %{buildroot}%{_libdir}/%{name}/plugins/ -install -m 551 plugins/notifications/splunk/notification-splunk %{buildroot}%{_libdir}/%{name}/plugins/ -install -m 551 plugins/notifications/email/notification-email %{buildroot}%{_libdir}/%{name}/plugins/ -install -m 551 plugins/notifications/sentinel/notification-sentinel %{buildroot}%{_libdir}/%{name}/plugins/ +install -m 551 cmd/notification-slack/notification-slack %{buildroot}%{_libdir}/%{name}/plugins/ +install -m 551 cmd/notification-http/notification-http %{buildroot}%{_libdir}/%{name}/plugins/ +install -m 551 cmd/notification-splunk/notification-splunk %{buildroot}%{_libdir}/%{name}/plugins/ +install -m 551 cmd/notification-email/notification-email %{buildroot}%{_libdir}/%{name}/plugins/ +install -m 551 cmd/notification-sentinel/notification-sentinel %{buildroot}%{_libdir}/%{name}/plugins/ -install -m 600 plugins/notifications/slack/slack.yaml %{buildroot}%{_sysconfdir}/crowdsec/notifications/ -install -m 600 plugins/notifications/http/http.yaml %{buildroot}%{_sysconfdir}/crowdsec/notifications/ -install -m 600 plugins/notifications/splunk/splunk.yaml %{buildroot}%{_sysconfdir}/crowdsec/notifications/ -install -m 600 plugins/notifications/email/email.yaml %{buildroot}%{_sysconfdir}/crowdsec/notifications/ -install -m 600 plugins/notifications/sentinel/sentinel.yaml %{buildroot}%{_sysconfdir}/crowdsec/notifications/ +install -m 600 cmd/notification-slack/slack.yaml %{buildroot}%{_sysconfdir}/crowdsec/notifications/ +install -m 600 cmd/notification-http/http.yaml %{buildroot}%{_sysconfdir}/crowdsec/notifications/ +install -m 600 cmd/notification-splunk/splunk.yaml %{buildroot}%{_sysconfdir}/crowdsec/notifications/ +install -m 600 cmd/notification-email/email.yaml %{buildroot}%{_sysconfdir}/crowdsec/notifications/ +install -m 600 cmd/notification-sentinel/sentinel.yaml %{buildroot}%{_sysconfdir}/crowdsec/notifications/ %clean @@ -123,6 +123,7 @@ rm -rf %{buildroot} %config(noreplace) %{_sysconfdir}/%{name}/notifications/slack.yaml %config(noreplace) %{_sysconfdir}/%{name}/notifications/splunk.yaml %config(noreplace) %{_sysconfdir}/%{name}/notifications/email.yaml +%config(noreplace) %{_sysconfdir}/%{name}/notifications/sentinel.yaml %config(noreplace) %{_sysconfdir}/cron.daily/%{name} %{_unitdir}/%{name}.service @@ -130,6 +131,7 @@ rm -rf %{buildroot} %ghost %{_sysconfdir}/%{name}/hub/.index.json %ghost %{_localstatedir}/log/%{name}.log %dir /var/lib/%{name}/data/ +%dir %{_sysconfdir}/%{name}/hub %ghost %{_sysconfdir}/crowdsec/local_api_credentials.yaml %ghost %{_sysconfdir}/crowdsec/online_api_credentials.yaml diff --git a/scripts/test_env.ps1 b/scripts/test_env.ps1 index 74781396c..f81b61d5a 100644 --- a/scripts/test_env.ps1 +++ b/scripts/test_env.ps1 @@ -9,7 +9,7 @@ function show_help() { Write-Output ".\test_env.ps1 -d tests #creates test env in .\tests" } -function create_arbo() { +function create_tree() { $null = New-Item -ItemType Directory $data_dir $null = New-Item -ItemType Directory $log_dir $null = New-Item -ItemType Directory $config_dir @@ -37,8 +37,8 @@ function copy_file() { #envsubst < "./config/dev.yaml" > $BASE/dev.yaml Copy-Item .\config\dev.yaml $base\dev.yaml $plugins | ForEach-Object { - Copy-Item $plugins_dir\$notif_dir\$_\notification-$_.exe $base\$plugins_dir\notification-$_.exe - Copy-Item $plugins_dir\$notif_dir\$_\$_.yaml $config_dir\$notif_dir\$_.yaml + Copy-Item .\cmd\notification-$_\notification-$_.exe $base\$plugins_dir\notification-$_.exe + Copy-Item .\cmd\notification-$_\$_.yaml $config_dir\$notif_dir\$_.yaml } } @@ -76,9 +76,9 @@ $plugins_dir="plugins" $notif_dir="notifications" -Write-Output "Creating test arbo in $base" -create_arbo -Write-Output "Arbo created" +Write-Output "Creating test tree in $base" +create_tree +Write-Output "Tree created" Write-Output "Copying files" copy_file Write-Output "Files copied" diff --git a/scripts/test_env.sh b/scripts/test_env.sh index fc06b46ff..2e089ead0 100755 --- a/scripts/test_env.sh +++ b/scripts/test_env.sh @@ -3,10 +3,10 @@ BASE="./tests" usage() { - echo "Usage:" - echo " ./wizard.sh -h Display this help message." - echo " ./test_env.sh -d ./tests Create test environment in './tests' folder" - exit 0 + echo "Usage:" + echo " $0 -h Display this help message." + echo " $0 -d ./tests Create test environment in './tests' folder" + exit 0 } @@ -24,7 +24,7 @@ do exit 0 ;; *) # unknown option - log_err "Unknown argument ${key}." + echo "Unknown argument ${key}." >&2 usage exit 1 ;; @@ -57,7 +57,7 @@ log_info() { echo -e "[$date][INFO] $msg" } -create_arbo() { +create_tree() { mkdir -p "$BASE" mkdir -p "$DATA_DIR" mkdir -p "$LOG_DIR" @@ -86,8 +86,8 @@ copy_files() { envsubst < "./config/dev.yaml" > $BASE/dev.yaml for plugin in $PLUGINS do - cp $PLUGINS_DIR/$NOTIF_DIR/$plugin/notification-$plugin $BASE/$PLUGINS_DIR/notification-$plugin - cp $PLUGINS_DIR/$NOTIF_DIR/$plugin/$plugin.yaml $CONFIG_DIR/$NOTIF_DIR/$plugin.yaml + cp cmd/notification-$plugin/notification-$plugin $BASE/$PLUGINS_DIR/notification-$plugin + cp cmd/notification-$plugin/$plugin.yaml $CONFIG_DIR/$NOTIF_DIR/$plugin.yaml done } @@ -103,9 +103,9 @@ setup_api() { main() { - log_info "Creating test arboresence in $BASE" - create_arbo - log_info "Arboresence created" + log_info "Creating test tree in $BASE" + create_tree + log_info "Tree created" log_info "Copying needed files for tests environment" copy_files log_info "Files copied" diff --git a/test/ansible/debug_tools.yml b/test/ansible/debug_tools.yml index 769a973fe..d2e493f86 100644 --- a/test/ansible/debug_tools.yml +++ b/test/ansible/debug_tools.yml @@ -14,5 +14,6 @@ - zsh-autosuggestions - zsh-syntax-highlighting - zsh-theme-powerlevel9k + - silversearcher-ag when: - ansible_facts.os_family == "Debian" diff --git a/test/ansible/roles/make_fixture/tasks/main.yml b/test/ansible/roles/make_fixture/tasks/main.yml index 305cec3a6..908bcf4f1 100644 --- a/test/ansible/roles/make_fixture/tasks/main.yml +++ b/test/ansible/roles/make_fixture/tasks/main.yml @@ -52,7 +52,7 @@ # daemonize -> /usr/bin or /usr/local/sbin # pidof -> /usr/sbin # bash -> /opt/bash/bin - PATH: "/opt/bash/bin:{{ ansible_env.PATH }}:/usr/sbin:/usr/local/sbin" + PATH: "/opt/bash/bin:{{ ansible_env.PATH }}:{{ golang_install_dir }}/bin/:/usr/sbin:/usr/local/sbin" rescue: - name: "Read crowdsec.log" ansible.builtin.slurp: diff --git a/test/ansible/vagrant/common b/test/ansible/vagrant/common index 4bc237a7e..83d770675 100644 --- a/test/ansible/vagrant/common +++ b/test/ansible/vagrant/common @@ -14,14 +14,14 @@ end Vagrant.configure('2') do |config| config.vm.define 'crowdsec' - if ARGV.any? { |arg| arg == 'up' || arg == 'provision' } + if ARGV.any? { |arg| arg == 'up' || arg == 'provision' } && !ARGV.include?('--no-provision') unless ENV['DB_BACKEND'] $stderr.puts "\e[31mThe DB_BACKEND environment variable is not defined. Please set up the environment and try again.\e[0m" exit 1 end end - config.vm.provision 'shell', path: 'bootstrap' if File.exists?('bootstrap') + config.vm.provision 'shell', path: 'bootstrap' if File.exist?('bootstrap') config.vm.synced_folder '.', '/vagrant', disabled: true config.vm.provider :libvirt do |libvirt| diff --git a/test/ansible/vagrant/ubuntu-22.04-jammy/Vagrantfile b/test/ansible/vagrant/ubuntu-22.04-jammy/Vagrantfile index 9e17f71fb..9b399cae4 100644 --- a/test/ansible/vagrant/ubuntu-22.04-jammy/Vagrantfile +++ b/test/ansible/vagrant/ubuntu-22.04-jammy/Vagrantfile @@ -3,6 +3,7 @@ Vagrant.configure('2') do |config| config.vm.box = 'generic/ubuntu2204' config.vm.provision "shell", inline: <<-SHELL + sudo apt install -y kitty-terminfo SHELL end diff --git a/test/ansible/vagrant/ubuntu-22.10-kinetic/Vagrantfile b/test/ansible/vagrant/ubuntu-22.10-kinetic/Vagrantfile index 6c15b0a1e..e08b59568 100644 --- a/test/ansible/vagrant/ubuntu-22.10-kinetic/Vagrantfile +++ b/test/ansible/vagrant/ubuntu-22.10-kinetic/Vagrantfile @@ -3,6 +3,7 @@ Vagrant.configure('2') do |config| config.vm.box = 'generic/ubuntu2210' config.vm.provision "shell", inline: <<-SHELL + sudo apt install -y kitty-terminfo SHELL end diff --git a/test/ansible/vagrant/ubuntu-23.04-lunar/Vagrantfile b/test/ansible/vagrant/ubuntu-23.04-lunar/Vagrantfile index f40fb7bd5..367cf5279 100644 --- a/test/ansible/vagrant/ubuntu-23.04-lunar/Vagrantfile +++ b/test/ansible/vagrant/ubuntu-23.04-lunar/Vagrantfile @@ -3,6 +3,7 @@ Vagrant.configure('2') do |config| config.vm.box = 'bento/ubuntu-23.04' config.vm.provision "shell", inline: <<-SHELL + sudo apt install -y kitty-terminfo SHELL end diff --git a/test/ansible/vagrant/wizard/centos-8/Vagrantfile b/test/ansible/vagrant/wizard/centos-8/Vagrantfile index 9db09a4ce..4b469ad65 100644 --- a/test/ansible/vagrant/wizard/centos-8/Vagrantfile +++ b/test/ansible/vagrant/wizard/centos-8/Vagrantfile @@ -10,4 +10,4 @@ Vagrant.configure('2') do |config| end common = '../common' -load common if File.exists?(common) +load common if File.exist?(common) diff --git a/test/ansible/vagrant/wizard/common b/test/ansible/vagrant/wizard/common index be1820914..fda6d50f4 100644 --- a/test/ansible/vagrant/wizard/common +++ b/test/ansible/vagrant/wizard/common @@ -21,7 +21,7 @@ Vagrant.configure('2') do |config| end end - config.vm.provision 'shell', path: 'bootstrap' if File.exists?('bootstrap') + config.vm.provision 'shell', path: 'bootstrap' if File.exist?('bootstrap') config.vm.synced_folder '.', '/vagrant', disabled: true config.vm.provider :libvirt do |libvirt| diff --git a/test/ansible/vagrant/wizard/debian-10-buster/Vagrantfile b/test/ansible/vagrant/wizard/debian-10-buster/Vagrantfile index 3b10b312d..9602acb69 100644 --- a/test/ansible/vagrant/wizard/debian-10-buster/Vagrantfile +++ b/test/ansible/vagrant/wizard/debian-10-buster/Vagrantfile @@ -9,4 +9,4 @@ Vagrant.configure('2') do |config| end common = '../common' -load common if File.exists?(common) +load common if File.exist?(common) diff --git a/test/ansible/vagrant/wizard/debian-11-bullseye/Vagrantfile b/test/ansible/vagrant/wizard/debian-11-bullseye/Vagrantfile index 6dd7bb2fc..9184fb676 100644 --- a/test/ansible/vagrant/wizard/debian-11-bullseye/Vagrantfile +++ b/test/ansible/vagrant/wizard/debian-11-bullseye/Vagrantfile @@ -9,4 +9,4 @@ Vagrant.configure('2') do |config| end common = '../common' -load common if File.exists?(common) +load common if File.exist?(common) diff --git a/test/ansible/vagrant/wizard/debian-12-bookworm/Vagrantfile b/test/ansible/vagrant/wizard/debian-12-bookworm/Vagrantfile index 5ccf234eb..1a0a43eb2 100644 --- a/test/ansible/vagrant/wizard/debian-12-bookworm/Vagrantfile +++ b/test/ansible/vagrant/wizard/debian-12-bookworm/Vagrantfile @@ -9,4 +9,4 @@ Vagrant.configure('2') do |config| end common = '../common' -load common if File.exists?(common) +load common if File.exist?(common) diff --git a/test/ansible/vagrant/wizard/fedora-36/Vagrantfile b/test/ansible/vagrant/wizard/fedora-36/Vagrantfile index 969a8e70c..ac9a0319e 100644 --- a/test/ansible/vagrant/wizard/fedora-36/Vagrantfile +++ b/test/ansible/vagrant/wizard/fedora-36/Vagrantfile @@ -8,4 +8,4 @@ Vagrant.configure('2') do |config| end common = '../common' -load common if File.exists?(common) +load common if File.exist?(common) diff --git a/test/ansible/vagrant/wizard/ubuntu-22.04-jammy/Vagrantfile b/test/ansible/vagrant/wizard/ubuntu-22.04-jammy/Vagrantfile index c13d2f946..f1ebf43a0 100644 --- a/test/ansible/vagrant/wizard/ubuntu-22.04-jammy/Vagrantfile +++ b/test/ansible/vagrant/wizard/ubuntu-22.04-jammy/Vagrantfile @@ -3,9 +3,9 @@ Vagrant.configure('2') do |config| config.vm.box = 'generic/ubuntu2204' config.vm.provision "shell", inline: <<-SHELL - sudo apt install -y aptitude kitty-terminfo + sudo env DEBIAN_FRONTEND=noninteractive apt install -y aptitude kitty-terminfo SHELL end common = '../common' -load common if File.exists?(common) +load common if File.exist?(common) diff --git a/test/ansible/vagrant/wizard/ubuntu-22.10-kinetic/Vagrantfile b/test/ansible/vagrant/wizard/ubuntu-22.10-kinetic/Vagrantfile index d0e2e3cda..5875587ee 100644 --- a/test/ansible/vagrant/wizard/ubuntu-22.10-kinetic/Vagrantfile +++ b/test/ansible/vagrant/wizard/ubuntu-22.10-kinetic/Vagrantfile @@ -8,4 +8,4 @@ Vagrant.configure('2') do |config| end common = '../common' -load common if File.exists?(common) +load common if File.exist?(common) diff --git a/test/ansible/vars/go.yml b/test/ansible/vars/go.yml index 0f60356b1..725a07104 100644 --- a/test/ansible/vars/go.yml +++ b/test/ansible/vars/go.yml @@ -1,5 +1,5 @@ # vim: set ft=yaml.ansible: --- -golang_version: "1.20.6" +golang_version: "1.21.3" golang_install_dir: "/opt/go/{{ golang_version }}" diff --git a/test/bats-detect/proftpd-deb.bats b/test/bats-detect/proftpd-deb.bats index b21ea466d..fce556caf 100644 --- a/test/bats-detect/proftpd-deb.bats +++ b/test/bats-detect/proftpd-deb.bats @@ -10,7 +10,8 @@ setup_file() { teardown_file() { load "../lib/teardown_file.sh" - deb-remove proftpd + systemctl stop proftpd.service || : + deb-remove proftpd proftpd-core } setup() { @@ -32,6 +33,7 @@ setup() { @test "proftpd: install" { run -0 deb-install proftpd + run -0 sudo systemctl unmask proftpd.service run -0 sudo systemctl enable proftpd.service } diff --git a/test/bats.mk b/test/bats.mk index 259bef379..35241b03e 100644 --- a/test/bats.mk +++ b/test/bats.mk @@ -77,9 +77,9 @@ bats-update-tools: # Build and installs crowdsec in a local directory. Rebuilds if already exists. bats-build: bats-environment @$(MKDIR) $(BIN_DIR) $(LOG_DIR) $(PID_DIR) $(BATS_PLUGIN_DIR) - @TEST_COVERAGE=$(TEST_COVERAGE) DEFAULT_CONFIGDIR=$(CONFIG_DIR) DEFAULT_DATADIR=$(DATA_DIR) $(MAKE) build + @$(MAKE) build DEBUG=1 TEST_COVERAGE=$(TEST_COVERAGE) DEFAULT_CONFIGDIR=$(CONFIG_DIR) DEFAULT_DATADIR=$(DATA_DIR) @install -m 0755 cmd/crowdsec/crowdsec cmd/crowdsec-cli/cscli $(BIN_DIR)/ - @install -m 0755 plugins/notifications/*/notification-* $(BATS_PLUGIN_DIR)/ + @install -m 0755 cmd/notification-*/notification-* $(BATS_PLUGIN_DIR)/ # Create a reusable package with initial configuration + data bats-fixture: bats-check-requirements bats-update-tools diff --git a/test/bats/01_crowdsec.bats b/test/bats/01_crowdsec.bats index a1a2861f6..2e38e0e6c 100644 --- a/test/bats/01_crowdsec.bats +++ b/test/bats/01_crowdsec.bats @@ -79,7 +79,7 @@ teardown() { rune -0 ./instance-crowdsec start-pid PID="$output" - assert_file_exist "$log_old" + assert_file_exists "$log_old" assert_file_contains "$log_old" "Starting processing data" logdir2=$(TMPDIR="${BATS_TEST_TMPDIR}" mktemp -u) @@ -113,7 +113,7 @@ teardown() { sleep 5 - assert_file_exist "$log_new" + assert_file_exists "$log_new" for ((i=0; i<10; i++)); do sleep 1 @@ -192,6 +192,12 @@ teardown() { } @test "crowdsec (disabled datasources)" { + if is_package_testing; then + # we can't hide journalctl in package testing + # because crowdsec is run from systemd + skip "n/a for package testing" + fi + config_set '.common.log_media="stdout"' # a datasource cannot run - missing journalctl command diff --git a/test/bats/01_cscli.bats b/test/bats/01_cscli.bats index 0664c5691..3e61bd807 100644 --- a/test/bats/01_cscli.bats +++ b/test/bats/01_cscli.bats @@ -102,12 +102,12 @@ teardown() { rune -0 cscli config show -o human assert_line --regexp ".*- URL +: http://127.0.0.1:8080/" - assert_line --regexp ".*- Login +: githubciXXXXXXXXXXXXXXXXXXXXXXXX" + assert_line --regexp ".*- Login +: githubciXXXXXXXXXXXXXXXXXXXXXXXX([a-zA-Z0-9]{16})?" assert_line --regexp ".*- Credentials File +: .*/local_api_credentials.yaml" rune -0 cscli config show -o json - rune -0 jq -c '.API.Client.Credentials | [.url,.login]' <(output) - assert_output '["http://127.0.0.1:8080/","githubciXXXXXXXXXXXXXXXXXXXXXXXX"]' + rune -0 jq -c '.API.Client.Credentials | [.url,.login[0:32]]' <(output) + assert_json '["http://127.0.0.1:8080/","githubciXXXXXXXXXXXXXXXXXXXXXXXX"]' } @test "cscli config show-yaml" { @@ -146,7 +146,7 @@ teardown() { # restore rm "${SIMULATION_YAML}" rune -0 cscli config restore "${backupdir}" - assert_file_exist "${SIMULATION_YAML}" + assert_file_exists "${SIMULATION_YAML}" # cleanup rm -rf -- "${backupdir:?}" @@ -283,7 +283,7 @@ teardown() { @test "cscli support dump (smoke test)" { rune -0 cscli support dump -f "$BATS_TEST_TMPDIR"/dump.zip - assert_file_exist "$BATS_TEST_TMPDIR"/dump.zip + assert_file_exists "$BATS_TEST_TMPDIR"/dump.zip } @test "cscli explain" { @@ -321,14 +321,14 @@ teardown() { rune -0 cscli doc refute_output refute_stderr - assert_file_exist "doc/cscli.md" + assert_file_exists "doc/cscli.md" assert_file_not_exist "doc/cscli_setup.md" # commands guarded by feature flags are not documented unless the feature flag is set export CROWDSEC_FEATURE_CSCLI_SETUP="true" rune -0 cscli doc - assert_file_exist "doc/cscli_setup.md" + assert_file_exists "doc/cscli_setup.md" } @test "feature.yaml for subcommands" { diff --git a/test/bats/04_capi.bats b/test/bats/04_capi.bats index 2cfabc7b7..ef933e10c 100644 --- a/test/bats/04_capi.bats +++ b/test/bats/04_capi.bats @@ -34,7 +34,7 @@ setup() { ./instance-crowdsec start for ((i=0; i<15; i++)); do sleep 2 - [[ $(cscli alerts list -a -o json 2>/dev/null || cscli alerts list -o json) != "null" ]] && break + [[ $(cscli alerts list -a -o json) != "[]" ]] && break done rune -0 cscli alerts list -a -o json @@ -45,7 +45,7 @@ setup() { @test "we have exactly one machine, localhost" { rune -0 cscli machines list -o json rune -0 jq -c '[. | length, .[0].machineId[0:32], .[0].isValidated, .[0].ipAddress]' <(output) - assert_output '[1,"githubciXXXXXXXXXXXXXXXXXXXXXXXX",true,"127.0.0.1"]' + assert_json '[1,"githubciXXXXXXXXXXXXXXXXXXXXXXXX",true,"127.0.0.1"]' } @test "no agent: capi status should be ok" { diff --git a/test/bats/07_setup.bats b/test/bats/07_setup.bats index c31120376..9d6b32d15 100644 --- a/test/bats/07_setup.bats +++ b/test/bats/07_setup.bats @@ -7,6 +7,8 @@ setup_file() { load "../lib/setup_file.sh" ./instance-data load HUB_DIR=$(config_get '.config_paths.hub_dir') + # remove trailing slash if any (like in default config.yaml from package) + HUB_DIR=${HUB_DIR%/} export HUB_DIR DETECT_YAML="${HUB_DIR}/detect.yaml" export DETECT_YAML @@ -68,7 +70,11 @@ teardown() { assert_line --partial "--skip-service strings ignore a service, don't recommend hub/datasources (can be repeated)" rune -1 cscli setup detect --detect-config /path/does/not/exist - assert_stderr --partial "detecting services: while reading file: open /path/does/not/exist: no such file or directory" + assert_stderr --partial "open /path/does/not/exist: no such file or directory" + + # - is stdin + rune -1 cscli setup detect --detect-config - <<< "{}" + assert_stderr --partial "detecting services: missing version tag (must be 1.0)" # rm -f "${HUB_DIR}/detect.yaml" } @@ -142,7 +148,7 @@ teardown() { EOT rune -1 cscli setup detect --list-supported-services --detect-config "$tempfile" - assert_stderr --partial "while parsing ${tempfile}: yaml: unmarshal errors:" + assert_stderr --partial "yaml: unmarshal errors:" rm -f "$tempfile" } diff --git a/test/bats/09_console.bats b/test/bats/09_console.bats new file mode 100644 index 000000000..d3cf5286d --- /dev/null +++ b/test/bats/09_console.bats @@ -0,0 +1,102 @@ +#!/usr/bin/env bats +# vim: ft=bats:list:ts=8:sts=4:sw=4:et:ai:si: + +set -u + +setup_file() { + load "../lib/setup_file.sh" +} + +teardown_file() { + load "../lib/teardown_file.sh" +} + +setup() { + load "../lib/setup.sh" + load "../lib/bats-file/load.bash" + ./instance-data load + config_enable_capi + + config_set "$(config_get '.api.server.online_client.credentials_path')" ' + .url="https://api.crowdsec.net/" | + .login="test" | + .password="test" + ' +} + +#---------- + +@test "cscli console status" { + rune -0 cscli console status + assert_output --partial "Option Name" + assert_output --partial "Activated" + assert_output --partial "Description" + assert_output --partial "custom" + assert_output --partial "manual" + assert_output --partial "tainted" + assert_output --partial "context" + assert_output --partial "console_management" + rune -0 cscli console status -o json + assert_json - <<- EOT + { + "console_management": false, + "context": false, + "custom": true, + "manual": false, + "tainted": true + } + EOT + rune -0 cscli console status -o raw + assert_output - <<-EOT + option,enabled + manual,false + custom,true + tainted,true + context,false + console_management,false + EOT +} + +@test "cscli console enable" { + rune -0 cscli console enable manual --debug + assert_stderr --partial "manual set to true" + assert_stderr --partial "[manual] have been enabled" + rune -0 cscli console enable manual --debug + assert_stderr --partial "manual already set to true" + assert_stderr --partial "[manual] have been enabled" + rune -0 cscli console enable manual context --debug + assert_stderr --partial "context set to true" + assert_stderr --partial "[manual context] have been enabled" + rune -0 cscli console enable --all --debug + assert_stderr --partial "custom already set to true" + assert_stderr --partial "manual already set to true" + assert_stderr --partial "tainted already set to true" + assert_stderr --partial "context already set to true" + assert_stderr --partial "All features have been enabled successfully" + CROWDSEC_FEATURE_PAPI_CLIENT=true rune -0 cscli console enable --all --debug + assert_stderr --partial "console_management set to true" + rune -1 cscli console enable tralala + assert_stderr --partial "unknown flag tralala" +} + +@test "cscli console disable" { + rune -0 cscli console disable tainted --debug + assert_stderr --partial "tainted set to false" + assert_stderr --partial "[tainted] have been disabled" + rune -0 cscli console disable tainted --debug + assert_stderr --partial "tainted already set to false" + assert_stderr --partial "[tainted] have been disabled" + rune -0 cscli console disable tainted custom --debug + assert_stderr --partial "custom set to false" + assert_stderr --partial "[tainted custom] have been disabled" + rune -0 cscli console disable --all --debug + assert_stderr --partial "custom already set to false" + assert_stderr --partial "manual already set to false" + assert_stderr --partial "tainted already set to false" + assert_stderr --partial "context already set to false" + assert_stderr --partial "All features have been disabled" + CROWDSEC_FEATURE_PAPI_CLIENT=true rune -0 cscli console disable --all --debug + assert_stderr --partial "console_management already set to false" + rune -1 cscli console disable tralala + assert_stderr --partial "unknown flag tralala" +} diff --git a/test/bats/13_capi_whitelists.bats b/test/bats/13_capi_whitelists.bats new file mode 100644 index 000000000..491de6498 --- /dev/null +++ b/test/bats/13_capi_whitelists.bats @@ -0,0 +1,76 @@ +#!/usr/bin/env bats +# vim: ft=bats:list:ts=8:sts=4:sw=4:et:ai:si: + +set -u + +setup_file() { + load "../lib/setup_file.sh" + CONFIG_DIR=$(dirname "$CONFIG_YAML") + CAPI_WHITELISTS_YAML="$CONFIG_DIR/capi-whitelists.yaml" + export CAPI_WHITELISTS_YAML +} + +teardown_file() { + load "../lib/teardown_file.sh" +} + +setup() { + load "../lib/setup.sh" + load "../lib/bats-file/load.bash" + ./instance-data load + config_set '.api.server.capi_whitelists_path=strenv(CAPI_WHITELISTS_YAML)' +} + +teardown() { + ./instance-crowdsec stop +} + +#---------- + +@test "capi_whitelists: file missing" { + rune -1 timeout 1s "${CROWDSEC}" + assert_stderr --partial "capi whitelist file '$CAPI_WHITELISTS_YAML' does not exist" +} + +@test "capi_whitelists: error on open" { + echo > "$CAPI_WHITELISTS_YAML" + chmod 000 "$CAPI_WHITELISTS_YAML" + rune -1 timeout 1s "${CROWDSEC}" + assert_stderr --partial "while opening capi whitelist file: open $CAPI_WHITELISTS_YAML: permission denied" +} + +@test "capi_whitelists: empty file" { + echo > "$CAPI_WHITELISTS_YAML" + rune -1 timeout 1s "${CROWDSEC}" + assert_stderr --partial "while parsing capi whitelist file '$CAPI_WHITELISTS_YAML': empty file" +} + +@test "capi_whitelists: empty lists" { + echo '{"ips": [], "cidrs": []}' > "$CAPI_WHITELISTS_YAML" + rune -124 timeout 1s "${CROWDSEC}" +} + +@test "capi_whitelists: bad ip" { + echo '{"ips": ["blahblah"], "cidrs": []}' > "$CAPI_WHITELISTS_YAML" + rune -1 timeout 1s "${CROWDSEC}" + assert_stderr --partial "while parsing capi whitelist file '$CAPI_WHITELISTS_YAML': invalid IP address: blahblah" +} + +@test "capi_whitelists: bad cidr" { + echo '{"ips": [], "cidrs": ["blahblah"]}' > "$CAPI_WHITELISTS_YAML" + rune -1 timeout 1s "${CROWDSEC}" + assert_stderr --partial "while parsing capi whitelist file '$CAPI_WHITELISTS_YAML': invalid CIDR address: blahblah" +} + +@test "capi_whitelists: file with ip and cidr values" { + cat <<-EOT > "$CAPI_WHITELISTS_YAML" + ips: + - 1.2.3.4 + - 2.3.4.5 + cidrs: + - 1.2.3.0/24 + EOT + + config_set '.common.log_level="trace"' + rune -0 ./instance-crowdsec start +} diff --git a/test/bats/40_cold-logs.bats b/test/bats/40_cold-logs.bats index ad4d5233c..21c0615c7 100644 --- a/test/bats/40_cold-logs.bats +++ b/test/bats/40_cold-logs.bats @@ -66,7 +66,7 @@ setup() { @test "1.1.1.172 has not been banned (range/NOT-contained: -r 1.1.2.0/24)" { rune -0 cscli decisions list -r 1.1.2.0/24 -o json - assert_output 'null' + assert_json '[]' } @test "1.1.1.172 has been banned (exact: -i 1.1.1.172)" { @@ -77,5 +77,5 @@ setup() { @test "1.1.1.173 has not been banned (exact: -i 1.1.1.173)" { rune -0 cscli decisions list -i 1.1.1.173 -o json - assert_output 'null' + assert_json '[]' } diff --git a/test/bats/50_simulation.bats b/test/bats/50_simulation.bats index 578dcf81a..0add1e816 100644 --- a/test/bats/50_simulation.bats +++ b/test/bats/50_simulation.bats @@ -54,7 +54,7 @@ setup() { rune -0 cscli simulation enable crowdsecurity/ssh-bf fake_log | "${CROWDSEC}" -dsn file:///dev/fd/0 -type syslog -no-api rune -0 cscli decisions list --no-simu -o json - assert_output 'null' + assert_json '[]' } @test "global simulation, listing non-simulated: expect no decision" { @@ -62,5 +62,5 @@ setup() { rune -0 cscli simulation enable --global fake_log | "${CROWDSEC}" -dsn file:///dev/fd/0 -type syslog -no-api rune -0 cscli decisions list --no-simu -o json - assert_output 'null' + assert_json '[]' } diff --git a/test/bats/70_http_plugin.bats b/test/bats/70_plugin_http.bats similarity index 100% rename from test/bats/70_http_plugin.bats rename to test/bats/70_plugin_http.bats diff --git a/test/bats/71_dummy_plugin.bats b/test/bats/71_plugin_dummy.bats similarity index 94% rename from test/bats/71_dummy_plugin.bats rename to test/bats/71_plugin_dummy.bats index 78352c514..95b64fea0 100644 --- a/test/bats/71_dummy_plugin.bats +++ b/test/bats/71_plugin_dummy.bats @@ -5,7 +5,7 @@ set -u setup_file() { load "../lib/setup_file.sh" - [[ -n "${PACKAGE_TESTING}" ]] && return + is_package_testing && return ./instance-data load @@ -51,7 +51,7 @@ teardown_file() { } setup() { - [[ -n "${PACKAGE_TESTING}" ]] && skip + is_package_testing && skip load "../lib/setup.sh" } diff --git a/test/bats/72_plugin_badconfig.bats b/test/bats/72_plugin_badconfig.bats index 9640e3330..4f325b0f9 100644 --- a/test/bats/72_plugin_badconfig.bats +++ b/test/bats/72_plugin_badconfig.bats @@ -73,11 +73,9 @@ teardown() { rune -0 yq -i '.name="email_default"' "$CONFIG_DIR/notifications/http.yaml" # enable a notification, otherwise plugins are ignored config_set "${PROFILES_PATH}" '.notifications=["slack_default"]' - # we want to check the logs + # the slack plugin may fail or not, but we just need the logs config_set '.common.log_media="stdout"' - # the command will fail because slack_deault is not working - run -1 --separate-stderr timeout 2s "${CROWDSEC}" - # but we have what we wanted + rune timeout 2s "${CROWDSEC}" assert_stderr --partial "notification 'email_default' is defined multiple times" } diff --git a/test/bats/73_plugin_formatting.bats b/test/bats/73_plugin_formatting.bats new file mode 100644 index 000000000..153193fb1 --- /dev/null +++ b/test/bats/73_plugin_formatting.bats @@ -0,0 +1,65 @@ +#!/usr/bin/env bats +# vim: ft=bats:list:ts=8:sts=4:sw=4:et:ai:si: + +set -u + +setup_file() { + load "../lib/setup_file.sh" + is_package_testing && return + + ./instance-data load + + tempfile=$(TMPDIR="${BATS_FILE_TMPDIR}" mktemp) + export tempfile + + DUMMY_YAML="$(config_get '.config_paths.notification_dir')/dummy.yaml" + + # we test the template that is suggested in the email notification + # the $alert is not a shell variable + # shellcheck disable=SC2016 + config_set "${DUMMY_YAML}" ' + .group_wait="5s" | + .group_threshold=2 | + .output_file=strenv(tempfile) | + .format=" {{range . -}} {{$alert := . -}} {{range .Decisions -}}

{{.Value}} will get {{.Type}} for next {{.Duration}} for triggering {{.Scenario}} on machine {{$alert.MachineID}}.

CrowdSec CTI

{{end -}} {{end -}} " + ' + + config_set "$(config_get '.api.server.profiles_path')" ' + .notifications=["dummy_default"] | + .filters=["Alert.GetScope() == \"Ip\""] + ' + + config_set ' + .plugin_config.user="" | + .plugin_config.group="" + ' + + ./instance-crowdsec start +} + +teardown_file() { + load "../lib/teardown_file.sh" +} + +setup() { + is_package_testing && skip + load "../lib/setup.sh" +} + +#---------- + +@test "add two bans" { + rune -0 cscli decisions add --ip 1.2.3.4 --duration 30s + assert_stderr --partial 'Decision successfully added' + + rune -0 cscli decisions add --ip 1.2.3.5 --duration 30s + assert_stderr --partial 'Decision successfully added' + sleep 2 +} + +@test "expected 1 notification" { + rune -0 cat "${tempfile}" + assert_output - <<-EOT +

1.2.3.4 will get ban for next 30s for triggering manual 'ban' from 'githubciXXXXXXXXXXXXXXXXXXXXXXXX' on machine githubciXXXXXXXXXXXXXXXXXXXXXXXX.

CrowdSec CTI

1.2.3.5 will get ban for next 30s for triggering manual 'ban' from 'githubciXXXXXXXXXXXXXXXXXXXXXXXX' on machine githubciXXXXXXXXXXXXXXXXXXXXXXXX.

CrowdSec CTI

+ EOT +} diff --git a/test/bats/80_alerts.bats b/test/bats/80_alerts.bats index 0f70223b6..e0fdcb022 100644 --- a/test/bats/80_alerts.bats +++ b/test/bats/80_alerts.bats @@ -118,7 +118,7 @@ teardown() { rune -0 cscli alerts list --until 200d -o human assert_output "No active alerts" rune -0 cscli alerts list --until 200d -o json - assert_output "null" + assert_json "[]" rune -0 cscli alerts list --until 200d -o raw assert_output "id,scope,value,reason,country,as,decisions,created_at" rune -0 cscli alerts list --until 200d -o raw --machine @@ -172,7 +172,7 @@ teardown() { rune -0 cscli alerts delete -i 1.2.3.4 assert_stderr --partial 'alert(s) deleted' rune -0 cscli decisions list -o json - assert_output null + assert_json '[]' } @test "cscli alerts delete (must ignore the query limit)" { diff --git a/test/bats/81_alert_context.bats b/test/bats/81_alert_context.bats index 5dbcc7334..6dd6100b9 100644 --- a/test/bats/81_alert_context.bats +++ b/test/bats/81_alert_context.bats @@ -40,7 +40,12 @@ teardown() { type: syslog EOT - CONTEXT_YAML=$(config_get '.crowdsec_service.console_context_path') + # we set the path here because the default is empty + CONFIG_DIR=$(dirname "$CONFIG_YAML") + CONTEXT_YAML="$CONFIG_DIR/console/context.yaml" + export CONTEXT_YAML + config_set '.crowdsec_service.console_context_path=strenv(CONTEXT_YAML)' + mkdir -p "$CONFIG_DIR/console" cat <<-EOT >"${CONTEXT_YAML}" target_user: diff --git a/test/bats/90_decisions.bats b/test/bats/90_decisions.bats index bcb410de9..f2464084a 100644 --- a/test/bats/90_decisions.bats +++ b/test/bats/90_decisions.bats @@ -163,7 +163,7 @@ teardown() { whatever EOT assert_stderr --partial 'Parsing values' - assert_stderr --partial 'API error: unable to create alerts: whatever: invalid ip address / range' + assert_stderr --partial 'creating alert decisions: whatever: invalid ip address / range' #---------- # Batch diff --git a/test/bats/97_ipv4_single.bats b/test/bats/97_ipv4_single.bats index c42836071..1ada1c464 100644 --- a/test/bats/97_ipv4_single.bats +++ b/test/bats/97_ipv4_single.bats @@ -33,7 +33,7 @@ api() { # delete community pull rune -0 cscli decisions delete --all rune -0 cscli decisions list -o json - assert_output 'null' + assert_json '[]' } @test "API - first decisions list: must be empty" { @@ -74,7 +74,7 @@ api() { @test "CLI - decision for 1.2.3.5" { rune -0 cscli decisions list -i '1.2.3.5' -o json - assert_output 'null' + assert_json '[]' } @test "API - decision for 1.2.3.5" { @@ -86,7 +86,7 @@ api() { @test "CLI - decision for 1.2.3.0/24" { rune -0 cscli decisions list -r '1.2.3.0/24' -o json - assert_output 'null' + assert_json '[]' } @test "API - decision for 1.2.3.0/24" { diff --git a/test/bats/97_ipv6_single.bats b/test/bats/97_ipv6_single.bats index 41948fb25..ffbfc125b 100644 --- a/test/bats/97_ipv6_single.bats +++ b/test/bats/97_ipv6_single.bats @@ -33,7 +33,7 @@ api() { # delete community pull rune -0 cscli decisions delete --all rune -0 cscli decisions list -o json - assert_output 'null' + assert_json '[]' } @test "adding decision for ip 1111:2222:3333:4444:5555:6666:7777:8888" { @@ -67,7 +67,7 @@ api() { @test "CLI - decisions for ip 1211:2222:3333:4444:5555:6666:7777:8888" { rune -0 cscli decisions list -i '1211:2222:3333:4444:5555:6666:7777:8888' -o json - assert_output 'null' + assert_json '[]' } @test "API - decisions for ip 1211:2222:3333:4444:5555:6666:7777:888" { @@ -77,7 +77,7 @@ api() { @test "CLI - decisions for ip 1111:2222:3333:4444:5555:6666:7777:8887" { rune -0 cscli decisions list -i '1111:2222:3333:4444:5555:6666:7777:8887' -o json - assert_output 'null' + assert_json '[]' } @test "API - decisions for ip 1111:2222:3333:4444:5555:6666:7777:8887" { @@ -87,7 +87,7 @@ api() { @test "CLI - decisions for range 1111:2222:3333:4444:5555:6666:7777:8888/48" { rune -0 cscli decisions list -r '1111:2222:3333:4444:5555:6666:7777:8888/48' -o json - assert_output 'null' + assert_json '[]' } @test "API - decisions for range 1111:2222:3333:4444:5555:6666:7777:8888/48" { @@ -109,7 +109,7 @@ api() { @test "CLI - decisions for range 1111:2222:3333:4444:5555:6666:7777:8888/64" { rune -0 cscli decisions list -r '1111:2222:3333:4444:5555:6666:7777:8888/64' -o json - assert_output 'null' + assert_json '[]' } @test "API - decisions for range 1111:2222:3333:4444:5555:6666:7777:8888/64" { @@ -141,7 +141,7 @@ api() { @test "CLI - decisions for ip 1111:2222:3333:4444:5555:6666:7777:8889 after delete" { rune -0 cscli decisions list -i '1111:2222:3333:4444:5555:6666:7777:8889' -o json - assert_output 'null' + assert_json '[]' } @test "deleting decision for range 1111:2222:3333:4444:5555:6666:7777:8888/64" { @@ -151,5 +151,5 @@ api() { @test "CLI - decisions for ip/range in 1111:2222:3333:4444:5555:6666:7777:8888/64 after delete" { rune -0 cscli decisions list -r '1111:2222:3333:4444:5555:6666:7777:8888/64' -o json --contained - assert_output 'null' + assert_json '[]' } diff --git a/test/bats/98_ipv4_range.bats b/test/bats/98_ipv4_range.bats index 1983225b9..b0f6f4829 100644 --- a/test/bats/98_ipv4_range.bats +++ b/test/bats/98_ipv4_range.bats @@ -33,7 +33,7 @@ api() { # delete community pull rune -0 cscli decisions delete --all rune -0 cscli decisions list -o json - assert_output 'null' + assert_json '[]' } @test "adding decision for range 4.4.4.0/24" { @@ -69,7 +69,7 @@ api() { @test "CLI - decisions for ip contained in 4.4.4." { rune -0 cscli decisions list -i '4.4.4.4' -o json --contained - assert_output 'null' + assert_json '[]' } @test "API - decisions for ip contained in 4.4.4." { @@ -79,7 +79,7 @@ api() { @test "CLI - decisions for ip 5.4.4." { rune -0 cscli decisions list -i '5.4.4.3' -o json - assert_output 'null' + assert_json '[]' } @test "API - decisions for ip 5.4.4." { @@ -89,7 +89,7 @@ api() { @test "CLI - decisions for range 4.4.0.0/1" { rune -0 cscli decisions list -r '4.4.0.0/16' -o json - assert_output 'null' + assert_json '[]' } @test "API - decisions for range 4.4.0.0/1" { @@ -125,7 +125,7 @@ api() { @test "CLI - decisions for range 4.4.3.2/2" { rune -0 cscli decisions list -r '4.4.3.2/28' -o json - assert_output 'null' + assert_json '[]' } @test "API - decisions for range 4.4.3.2/2" { diff --git a/test/bats/98_ipv6_range.bats b/test/bats/98_ipv6_range.bats index b85f0dfcd..d3c347583 100644 --- a/test/bats/98_ipv6_range.bats +++ b/test/bats/98_ipv6_range.bats @@ -33,7 +33,7 @@ api() { # delete community pull rune -0 cscli decisions delete --all rune -0 cscli decisions list -o json - assert_output 'null' + assert_json '[]' } @test "adding decision for range aaaa:2222:3333:4444::/64" { @@ -69,7 +69,7 @@ api() { @test "CLI - decisions for ip aaaa:2222:3333:4445:5555:6666:7777:8888" { rune -0 cscli decisions list -i 'aaaa:2222:3333:4445:5555:6666:7777:8888' -o json - assert_output 'null' + assert_json '[]' } @test "API - decisions for ip aaaa:2222:3333:4445:5555:6666:7777:8888" { @@ -79,7 +79,7 @@ api() { @test "CLI - decisions for ip aaa1:2222:3333:4444:5555:6666:7777:8887" { rune -0 cscli decisions list -i 'aaa1:2222:3333:4444:5555:6666:7777:8887' -o json - assert_output 'null' + assert_json '[]' } @test "API - decisions for ip aaa1:2222:3333:4444:5555:6666:7777:8887" { @@ -103,7 +103,7 @@ api() { @test "CLI - decisions for range aaaa:2222:3333:4441:5555::/80" { rune -0 cscli decisions list -r 'aaaa:2222:3333:4441:5555::/80' -o json - assert_output 'null' + assert_json '[]' } @@ -114,7 +114,7 @@ api() { @test "CLI - decisions for range aaa1:2222:3333:4444:5555::/80" { rune -0 cscli decisions list -r 'aaa1:2222:3333:4444:5555::/80' -o json - assert_output 'null' + assert_json '[]' } @test "API - decisions for range aaa1:2222:3333:4444:5555::/80" { @@ -126,7 +126,7 @@ api() { @test "CLI - decisions for range aaaa:2222:3333:4444:5555:6666:7777:8888/48" { rune -0 cscli decisions list -r 'aaaa:2222:3333:4444:5555:6666:7777:8888/48' -o json - assert_output 'null' + assert_json '[]' } @test "API - decisions for range aaaa:2222:3333:4444:5555:6666:7777:8888/48" { @@ -148,7 +148,7 @@ api() { @test "CLI - decisions for ip/range in aaaa:2222:3333:4445:5555:6666:7777:8888/48" { rune -0 cscli decisions list -r 'aaaa:2222:3333:4445:5555:6666:7777:8888/48' -o json - assert_output 'null' + assert_json '[]' } @test "API - decisions for ip/range in aaaa:2222:3333:4445:5555:6666:7777:8888/48" { @@ -177,7 +177,7 @@ api() { @test "CLI - decisions for ip bbbb:db8:0000:0000:0000:8fff:ffff:ffff" { rune -0 cscli decisions list -o json -i 'bbbb:db8:0000:0000:0000:8fff:ffff:ffff' - assert_output 'null' + assert_json '[]' } @test "API - decisions for ip in bbbb:db8:0000:0000:0000:8fff:ffff:ffff" { @@ -192,7 +192,7 @@ api() { @test "CLI - decisions for range aaaa:2222:3333:4444::/64 after delete" { rune -0 cscli decisions list -o json -r 'aaaa:2222:3333:4444::/64' - assert_output 'null' + assert_json '[]' } @test "adding decision for ip bbbb:db8:0000:0000:0000:8fff:ffff:ffff" { diff --git a/test/bats/testdata/explain/explain-log.txt b/test/bats/testdata/explain/explain-log.txt index cf83570db..aae9e8098 100644 --- a/test/bats/testdata/explain/explain-log.txt +++ b/test/bats/testdata/explain/explain-log.txt @@ -5,7 +5,8 @@ line: Sep 19 18:33:22 scw-d95986 sshd[24347]: pam_unix(sshd:auth): authenticatio | └ đŸŸĸ crowdsecurity/sshd-logs (+8 ~1) ├ s02-enrich | ├ đŸŸĸ crowdsecurity/dateparse-enrich (+2 ~2) - | └ đŸŸĸ crowdsecurity/geoip-enrich (+10) + | ├ đŸŸĸ crowdsecurity/geoip-enrich (+10) + | └ đŸŸĸ crowdsecurity/whitelists (unchanged) ├-------- parser success đŸŸĸ ├ Scenarios ├ đŸŸĸ crowdsecurity/ssh-bf diff --git a/test/lib/bats-core b/test/lib/bats-core index 6636e2c2e..f7defb943 160000 --- a/test/lib/bats-core +++ b/test/lib/bats-core @@ -1 +1 @@ -Subproject commit 6636e2c2ef5ffe361535cb45fc61682c5ef46b71 +Subproject commit f7defb94362f2053a3e73d13086a167448ea9133 diff --git a/test/lib/bats-file b/test/lib/bats-file index 17fa557f6..cb914cdc1 160000 --- a/test/lib/bats-file +++ b/test/lib/bats-file @@ -1 +1 @@ -Subproject commit 17fa557f6fe28a327933e3fa32efef1d211caa5a +Subproject commit cb914cdc176da00e321d3bc92f88383698c701d6 diff --git a/test/lib/bats-support b/test/lib/bats-support index d140a6504..3c8fadc50 160000 --- a/test/lib/bats-support +++ b/test/lib/bats-support @@ -1 +1 @@ -Subproject commit d140a65044b2d6810381935ae7f0c94c7023c8c3 +Subproject commit 3c8fadc5097c9acfc96d836dced2bb598e48b009 diff --git a/test/lib/config/config-local b/test/lib/config/config-local index 6aba06a36..0e2c86692 100755 --- a/test/lib/config/config-local +++ b/test/lib/config/config-local @@ -61,8 +61,6 @@ config_generate() { ../config/online_api_credentials.yaml \ "${CONFIG_DIR}/" - cp ../config/context.yaml "${CONFIG_DIR}/console/" - cp ../config/detect.yaml \ "${HUB_DIR}" @@ -76,7 +74,7 @@ config_generate() { type: syslog EOT - cp ../plugins/notifications/*/{http,email,slack,splunk,dummy,sentinel}.yaml \ + cp ../cmd/notification-*/*.yaml \ "${CONFIG_DIR}/notifications/" yq e ' @@ -99,7 +97,6 @@ config_generate() { .api.client.credentials_path=strenv(CONFIG_DIR)+"/local_api_credentials.yaml" | .api.server.profiles_path=strenv(CONFIG_DIR)+"/profiles.yaml" | .api.server.console_path=strenv(CONFIG_DIR)+"/console.yaml" | - .crowdsec_service.console_context_path=strenv(CONFIG_DIR) + "/console/context.yaml" | del(.api.server.online_client) ' ../config/config.yaml >"${CONFIG_DIR}/config.yaml" } @@ -113,7 +110,6 @@ make_init_data() { mkdir -p "${CONFIG_DIR}/notifications" mkdir -p "${CONFIG_DIR}/hub" mkdir -p "${CONFIG_DIR}/patterns" - mkdir -p "${CONFIG_DIR}/console" cp -a "../config/patterns" "${CONFIG_DIR}/" config_generate # XXX errors from instance-db should be reported... @@ -123,6 +119,8 @@ make_init_data() { "$CSCLI" --warning machines add githubciXXXXXXXXXXXXXXXXXXXXXXXX --auto "$CSCLI" --warning hub update "$CSCLI" --warning collections install crowdsecurity/linux + # the whitelists are installed by the deb & rpm packages, so we test with the same config + "$CSCLI" --warning parsers install crowdsecurity/whitelists mkdir -p "$LOCAL_INIT_DIR" diff --git a/test/lib/setup_file.sh b/test/lib/setup_file.sh index 385e5f586..5e16340ec 100755 --- a/test/lib/setup_file.sh +++ b/test/lib/setup_file.sh @@ -116,18 +116,23 @@ output() { } export -f output +is_package_testing() { + [[ "$PACKAGE_TESTING" != "" ]] +} +export -f is_package_testing + is_db_postgres() { - [[ "${DB_BACKEND}" =~ ^postgres|pgx$ ]] + [[ "$DB_BACKEND" =~ ^postgres|pgx$ ]] } export -f is_db_postgres is_db_mysql() { - [[ "${DB_BACKEND}" == "mysql" ]] + [[ "$DB_BACKEND" == "mysql" ]] } export -f is_db_mysql is_db_sqlite() { - [[ "${DB_BACKEND}" == "sqlite" ]] + [[ "$DB_BACKEND" == "sqlite" ]] } export -f is_db_sqlite diff --git a/windows/installer/product.wxs b/windows/installer/product.wxs index 9027f021c..0696da34b 100644 --- a/windows/installer/product.wxs +++ b/windows/installer/product.wxs @@ -87,19 +87,19 @@ - + - + - + - + - + @@ -123,11 +123,11 @@ - - - - - + + + + + diff --git a/wizard.sh b/wizard.sh index 9bd14e13d..34e302877 100755 --- a/wizard.sh +++ b/wizard.sh @@ -77,17 +77,17 @@ smb " -HTTP_PLUGIN_BINARY="./plugins/notifications/http/notification-http" -SLACK_PLUGIN_BINARY="./plugins/notifications/slack/notification-slack" -SPLUNK_PLUGIN_BINARY="./plugins/notifications/splunk/notification-splunk" -EMAIL_PLUGIN_BINARY="./plugins/notifications/email/notification-email" -SENTINEL_PLUGIN_BINARY="./plugins/notifications/sentinel/notification-sentinel" +HTTP_PLUGIN_BINARY="./cmd/notification-http/notification-http" +SLACK_PLUGIN_BINARY="./cmd/notification-slack/notification-slack" +SPLUNK_PLUGIN_BINARY="./cmd/notification-splunk/notification-splunk" +EMAIL_PLUGIN_BINARY="./cmd/notification-email/notification-email" +SENTINEL_PLUGIN_BINARY="./cmd/notification-sentinel/notification-sentinel" -HTTP_PLUGIN_CONFIG="./plugins/notifications/http/http.yaml" -SLACK_PLUGIN_CONFIG="./plugins/notifications/slack/slack.yaml" -SPLUNK_PLUGIN_CONFIG="./plugins/notifications/splunk/splunk.yaml" -EMAIL_PLUGIN_CONFIG="./plugins/notifications/email/email.yaml" -SENTINEL_PLUGIN_CONFIG="./plugins/notifications/sentinel/sentinel.yaml" +HTTP_PLUGIN_CONFIG="./cmd/notification-http/http.yaml" +SLACK_PLUGIN_CONFIG="./cmd/notification-slack/slack.yaml" +SPLUNK_PLUGIN_CONFIG="./cmd/notification-splunk/splunk.yaml" +EMAIL_PLUGIN_CONFIG="./cmd/notification-email/email.yaml" +SENTINEL_PLUGIN_CONFIG="./cmd/notification-sentinel/sentinel.yaml" BACKUP_DIR=$(mktemp -d)