From af4bb350c06966e0c3ae3d0f414a61e6c1fadae1 Mon Sep 17 00:00:00 2001
From: "Thibault \"bui\" Koechlin"
Date: Mon, 4 Oct 2021 17:14:52 +0200
Subject: [PATCH] hubtests revamp + cscli explain (#988)
* New hubtest CI for scenarios/parsers from the hub
* New `cscli explain` command to visualize parsers/scenarios pipeline
Co-authored-by: alteredCoder
Co-authored-by: Sebastien Blot
Co-authored-by: he2ss
Co-authored-by: Cristian Nitescu
---
.github/workflows/ci_hubtest.yml | 74 +++
.../dispatch_create_tag_hub-tests.yaml | 24 -
README.md | 3 +-
cmd/crowdsec-cli/explain.go | 109 ++++
cmd/crowdsec-cli/hubtest.go | 585 +++++++++++++++++
cmd/crowdsec-cli/machines.go | 3 +
cmd/crowdsec-cli/main.go | 2 +
cmd/crowdsec/crowdsec.go | 69 ++
cmd/crowdsec/main.go | 12 +
cmd/crowdsec/output.go | 15 +-
pkg/acquisition/modules/file/file.go | 3 +
pkg/csconfig/crowdsec_service.go | 2 +-
pkg/csconfig/crowdsec_service_test.go | 13 +-
pkg/csconfig/profiles.go | 4 +-
pkg/cstest/coverage.go | 177 +++++
pkg/cstest/hubtest.go | 114 ++++
pkg/cstest/hubtest_item.go | 604 ++++++++++++++++++
pkg/cstest/parser_assert.go | 392 ++++++++++++
pkg/cstest/scenario_assert.go | 272 ++++++++
pkg/cstest/utils.go | 81 +++
pkg/cwhub/download.go | 3 +-
pkg/cwhub/loader.go | 1 +
pkg/exprhelpers/exprlib.go | 5 +
pkg/leakybucket/manager_run.go | 24 +
pkg/leakybucket/overflows.go | 10 +-
pkg/models/helpers.go | 30 +
pkg/parser/node.go | 2 +-
pkg/parser/runtime.go | 30 +-
pkg/types/event.go | 12 +-
29 files changed, 2629 insertions(+), 46 deletions(-)
create mode 100644 .github/workflows/ci_hubtest.yml
delete mode 100644 .github/workflows/dispatch_create_tag_hub-tests.yaml
create mode 100644 cmd/crowdsec-cli/explain.go
create mode 100644 cmd/crowdsec-cli/hubtest.go
create mode 100644 pkg/cstest/coverage.go
create mode 100644 pkg/cstest/hubtest.go
create mode 100644 pkg/cstest/hubtest_item.go
create mode 100644 pkg/cstest/parser_assert.go
create mode 100644 pkg/cstest/scenario_assert.go
create mode 100644 pkg/cstest/utils.go
diff --git a/.github/workflows/ci_hubtest.yml b/.github/workflows/ci_hubtest.yml
new file mode 100644
index 000000000..a2a01e1e0
--- /dev/null
+++ b/.github/workflows/ci_hubtest.yml
@@ -0,0 +1,74 @@
+name: Hub Tests
+
+on:
+ push:
+ branches:
+ - master
+ pull_request:
+ branches:
+ - master
+
+jobs:
+ build:
+ name: Hub tests
+ runs-on: ubuntu-latest
+ steps:
+ - name: Set up Go 1.16
+ uses: actions/setup-go@v1
+ with:
+ go-version: 1.16
+ id: go
+ - name: Check out code into the Go module directory
+ uses: actions/checkout@v2
+ - id: keydb
+ uses: pozetroninc/github-action-get-latest-release@master
+ with:
+ owner: crowdsecurity
+ repo: crowdsec
+ excludes: draft
+ - name: Build release
+ run: BUILD_VERSION=${{ steps.keydb.outputs.release }} make release
+ - name: "Force machineid"
+ run: |
+ sudo chmod +w /etc/machine-id
+ echo githubciXXXXXXXXXXXXXXXXXXXXXXXX | sudo tee /etc/machine-id
+ - name: Install release
+ run: |
+ cd crowdsec-${{ steps.keydb.outputs.release }}
+ sudo ./wizard.sh --unattended
+ - name: "Clone CrowdSec Hub"
+ run: |
+ git clone https://github.com/crowdsecurity/hub.git
+ - name: "Run tests"
+ run: |
+ cd hub/
+ git checkout hub_tests
+ cscli hubtest run --all --clean
+ echo "PARSERS_COV=$(cscli hubtest coverage --parsers --percent | cut -d '=' -f2)" >> $GITHUB_ENV
+ echo "SCENARIOS_COV=$(cscli hubtest coverage --scenarios --percent | cut -d '=' -f2)" >> $GITHUB_ENV
+ echo "PARSERS_COV_NUMBER=$(cscli hubtest coverage --parsers --percent | cut -d '=' -f2 | tr -d '%')"
+ echo "SCENARIOS_COV_NUMBER=$(cscli hubtest coverage --scenarios --percent | cut -d '=' -f2 | tr -d '%')"
+ echo "PARSER_BADGE_COLOR=$(if [ $PARSERS_COV_NUMBER -lt '70' ]; then echo 'red'; else echo 'green'; fi)" >> $GITHUB_ENV
+ echo "SCENARIO_BADGE_COLOR=$(if [ $SCENARIOS_COV_NUMBER -lt '70' ]; then echo 'red'; else echo 'green'; fi)" >> $GITHUB_ENV
+ - name: Create Parsers badge
+ if: github.ref == 'ref/head/master'
+ uses: schneegans/dynamic-badges-action@v1.1.0
+ with:
+ auth: ${{ secrets.GIST_BADGES_SECRET }}
+ gistID: ${{ secrets.GIST_BADGES_ID }}
+ filename: crowdsec_parsers_badge.json
+ label: Hub Parsers
+ message: ${{ env.PARSERS_COV }}
+ color: ${{ env.SCENARIO_BADGE_COLOR }}
+ - name: Create Scenarios badge
+ if: github.ref == 'ref/head/master'
+ uses: schneegans/dynamic-badges-action@v1.1.0
+ with:
+ auth: ${{ secrets.GIST_BADGES_SECRET }}
+ gistID: ${{ secrets.GIST_BADGES_ID }}
+ filename: crowdsec_scenarios_badge.json
+ label: Hub Scenarios
+ message: ${{ env.SCENARIOS_COV }}
+ color: ${{ env.SCENARIO_BADGE_COLOR }}
+
+
diff --git a/.github/workflows/dispatch_create_tag_hub-tests.yaml b/.github/workflows/dispatch_create_tag_hub-tests.yaml
deleted file mode 100644
index b22f8dc8e..000000000
--- a/.github/workflows/dispatch_create_tag_hub-tests.yaml
+++ /dev/null
@@ -1,24 +0,0 @@
-name: Dispatch to hub-tests when creating pre-release
-
-on:
- release:
- types: prereleased
-
-jobs:
- dispatch:
- name: dispatch to hub-tests
- runs-on: ubuntu-latest
- steps:
- - id: keydb
- uses: pozetroninc/github-action-get-latest-release@master
- with:
- owner: crowdsecurity
- repo: crowdsec
- excludes: prerelease, draft
- - name: Repository Dispatch
- uses: peter-evans/repository-dispatch@v1
- with:
- token: ${{ secrets.DISPATCH_TOKEN }}
- event-type: create_tag
- repository: crowdsecurity/hub-tests
- client-payload: '{"version": "${{ steps.keydb.outputs.release }}"}'
diff --git a/README.md b/README.md
index c3c37ab22..26e649e0a 100644
--- a/README.md
+++ b/README.md
@@ -11,7 +11,8 @@
-
+
+
diff --git a/cmd/crowdsec-cli/explain.go b/cmd/crowdsec-cli/explain.go
new file mode 100644
index 000000000..d95f5d61a
--- /dev/null
+++ b/cmd/crowdsec-cli/explain.go
@@ -0,0 +1,109 @@
+package main
+
+import (
+ "fmt"
+ "os"
+ "os/exec"
+ "path/filepath"
+
+ "github.com/crowdsecurity/crowdsec/pkg/cstest"
+ log "github.com/sirupsen/logrus"
+ "github.com/spf13/cobra"
+)
+
+func NewExplainCmd() *cobra.Command {
+ /* ---- HUB COMMAND */
+ var logFile string
+ var dsn string
+ var logLine string
+ var logType string
+
+ var cmdExplain = &cobra.Command{
+ Use: "explain",
+ Short: "Explain log pipeline",
+ Long: `
+Explain log pipeline
+ `,
+ Example: `
+cscli explain --file ./myfile.log --type nginx
+cscli explain --log "Sep 19 18:33:22 scw-d95986 sshd[24347]: pam_unix(sshd:auth): authentication failure; logname= uid=0 euid=0 tty=ssh ruser= rhost=1.2.3.4" --type syslog
+cscli explain -dsn "file://myfile.log" --type nginx
+ `,
+ Args: cobra.ExactArgs(0),
+ DisableAutoGenTag: true,
+ Run: func(cmd *cobra.Command, args []string) {
+
+ if logType == "" || (logLine == "" && logFile == "" && dsn == "") {
+ cmd.Help()
+ fmt.Println()
+ fmt.Printf("Please provide --type flag\n")
+ os.Exit(1)
+ }
+
+ // we create a temporary log file if a log line has been provided
+ if logLine != "" {
+ logFile = "./cscli_test_tmp.log"
+ f, err := os.Create(logFile)
+ if err != nil {
+ log.Fatal(err)
+ }
+ defer f.Close()
+
+ _, err = f.WriteString(logLine)
+
+ if err != nil {
+ log.Fatal(err)
+ }
+ }
+
+ if logFile != "" {
+ absolutePath, err := filepath.Abs(logFile)
+ if err != nil {
+ log.Fatalf("unable to get absolue path of '%s', exiting", logFile)
+ }
+ dsn = fmt.Sprintf("file://%s", absolutePath)
+ }
+
+ if dsn == "" {
+ log.Fatal("no acquisition (--file or --dsn) provided, can't run cscli test.")
+ }
+
+ cmdArgs := []string{"-c", ConfigFilePath, "-type", logType, "-dsn", dsn, "-dump-data", "./", "-no-api"}
+ crowdsecCmd := exec.Command("crowdsec", cmdArgs...)
+ output, err := crowdsecCmd.CombinedOutput()
+ if err != nil {
+ fmt.Println(string(output))
+ log.Fatalf("fail to run crowdsec for test: %v", err)
+ }
+
+ // rm the temporary log file if only a log line was provided
+ if logLine != "" {
+ if err := os.Remove(logFile); err != nil {
+ log.Fatalf("unable to remove tmp log file '%s': %+v", logFile, err)
+ }
+ }
+ parserDumpFile := filepath.Join("./", cstest.ParserResultFileName)
+ bucketStateDumpFile := filepath.Join("./", cstest.BucketPourResultFileName)
+
+ parserDump, err := cstest.LoadParserDump(parserDumpFile)
+ if err != nil {
+ log.Fatalf("unable to load parser dump result: %s", err)
+ }
+
+ bucketStateDump, err := cstest.LoadBucketPourDump(bucketStateDumpFile)
+ if err != nil {
+ log.Fatalf("unable to load bucket dump result: %s", err)
+ }
+
+ if err := cstest.DumpTree(*parserDump, *bucketStateDump); err != nil {
+ log.Fatalf(err.Error())
+ }
+ },
+ }
+ cmdExplain.PersistentFlags().StringVarP(&logFile, "file", "f", "", "Log file to test")
+ cmdExplain.PersistentFlags().StringVarP(&dsn, "dsn", "d", "", "DSN to test")
+ cmdExplain.PersistentFlags().StringVarP(&logLine, "log", "l", "", "Lgg line to test")
+ cmdExplain.PersistentFlags().StringVarP(&logType, "type", "t", "", "Type of the acquisition to test")
+
+ return cmdExplain
+}
diff --git a/cmd/crowdsec-cli/hubtest.go b/cmd/crowdsec-cli/hubtest.go
new file mode 100644
index 000000000..ba9bc7b4a
--- /dev/null
+++ b/cmd/crowdsec-cli/hubtest.go
@@ -0,0 +1,585 @@
+package main
+
+import (
+ "encoding/json"
+ "fmt"
+ "math"
+ "os"
+ "path/filepath"
+ "strings"
+
+ "github.com/AlecAivazis/survey/v2"
+ "github.com/crowdsecurity/crowdsec/pkg/cstest"
+ "github.com/enescakir/emoji"
+ "github.com/olekukonko/tablewriter"
+ log "github.com/sirupsen/logrus"
+ "github.com/spf13/cobra"
+ "gopkg.in/yaml.v2"
+)
+
+var (
+ HubTest cstest.HubTest
+)
+
+func NewHubTestCmd() *cobra.Command {
+ /* ---- HUB COMMAND */
+ var hubPath string
+ var logType string
+ var crowdsecPath string
+ var cscliPath string
+
+ var cmdHubTest = &cobra.Command{
+ Use: "hubtest",
+ Short: "Run fonctionnals tests on hub configurations",
+ Long: `
+ Run fonctionnals tests on hub configurations (parsers, scenarios, collections...)
+ `,
+ Args: cobra.ExactArgs(0),
+ DisableAutoGenTag: true,
+ PersistentPreRun: func(cmd *cobra.Command, args []string) {
+ var err error
+ HubTest, err = cstest.NewHubTest(hubPath, crowdsecPath, cscliPath)
+ if err != nil {
+ log.Fatalf("unable to load hubtest: %+v", err)
+ }
+ },
+ }
+ cmdHubTest.PersistentFlags().StringVar(&hubPath, "hub", ".", "Path to hub folder")
+ cmdHubTest.PersistentFlags().StringVar(&crowdsecPath, "crowdsec", "crowdsec", "Path to crowdsec")
+ cmdHubTest.PersistentFlags().StringVar(&cscliPath, "cscli", "cscli", "Path to cscli")
+
+ parsers := []string{}
+ postoverflows := []string{}
+ scenarios := []string{}
+ var ignoreParsers bool
+
+ var cmdHubTestCreate = &cobra.Command{
+ Use: "create",
+ Short: "create [test_name]",
+ Example: `cscli hubtest create my-awesome-test --type syslog
+cscli hubtest create my-nginx-custom-test --type nginx
+cscli hubtest create my-scenario-test --parser crowdsecurity/nginx --scenario crowdsecurity/http-probing`,
+ Args: cobra.ExactArgs(1),
+ DisableAutoGenTag: true,
+ Run: func(cmd *cobra.Command, args []string) {
+ testName := args[0]
+ testPath := filepath.Join(HubTest.HubTestPath, testName)
+ if _, err := os.Stat(testPath); os.IsExist(err) {
+ log.Fatalf("test '%s' already exists in '%s', exiting", testName, testPath)
+ }
+
+ if logType == "" {
+ log.Fatalf("please provid a type (--type) for the test")
+ }
+
+ if err := os.MkdirAll(testPath, os.ModePerm); err != nil {
+ log.Fatalf("unable to create folder '%s': %+v", testPath, err)
+ }
+
+ // create empty log file
+ logFileName := fmt.Sprintf("%s.log", testName)
+ logFilePath := filepath.Join(testPath, logFileName)
+ logFile, err := os.Create(logFilePath)
+ if err != nil {
+ log.Fatal(err)
+ }
+ logFile.Close()
+
+ // create empty parser assertion file
+ parserAssertFilePath := filepath.Join(testPath, cstest.ParserAssertFileName)
+ parserAssertFile, err := os.Create(parserAssertFilePath)
+ if err != nil {
+ log.Fatal(err)
+ }
+ parserAssertFile.Close()
+
+ // create empty scenario assertion file
+ scenarioAssertFilePath := filepath.Join(testPath, cstest.ScenarioAssertFileName)
+ scenarioAssertFile, err := os.Create(scenarioAssertFilePath)
+ if err != nil {
+ log.Fatal(err)
+ }
+ scenarioAssertFile.Close()
+
+ parsers = append(parsers, "crowdsecurity/syslog-logs")
+ parsers = append(parsers, "crowdsecurity/dateparse-enrich")
+
+ if len(scenarios) == 0 {
+ scenarios = append(scenarios, "")
+ }
+
+ if len(postoverflows) == 0 {
+ postoverflows = append(postoverflows, "")
+ }
+
+ configFileData := &cstest.HubTestItemConfig{
+ Parsers: parsers,
+ Scenarios: scenarios,
+ PostOVerflows: postoverflows,
+ LogFile: logFileName,
+ LogType: logType,
+ IgnoreParsers: ignoreParsers,
+ }
+
+ configFilePath := filepath.Join(testPath, "config.yaml")
+ fd, err := os.OpenFile(configFilePath, os.O_CREATE|os.O_WRONLY|os.O_TRUNC, 0666)
+ if err != nil {
+ log.Fatalf("open: %s", err)
+ }
+ data, err := yaml.Marshal(configFileData)
+ if err != nil {
+ log.Fatalf("marshal: %s", err)
+ }
+ _, err = fd.Write(data)
+ if err != nil {
+ log.Fatalf("write: %s", err)
+ }
+ if err := fd.Close(); err != nil {
+ log.Fatalf(" close: %s", err)
+ }
+ fmt.Println()
+ fmt.Printf(" Test name : %s\n", testName)
+ fmt.Printf(" Test path : %s\n", testPath)
+ fmt.Printf(" Log file : %s (please fill it with logs)\n", logFilePath)
+ fmt.Printf(" Parser assertion file : %s (please fill it with assertion)\n", parserAssertFilePath)
+ fmt.Printf(" Scenario assertion file : %s (please fill it with assertion)\n", scenarioAssertFilePath)
+ fmt.Printf(" Configuration File : %s (please fill it with parsers, scenarios...)\n", configFilePath)
+
+ },
+ }
+ cmdHubTestCreate.PersistentFlags().StringVarP(&logType, "type", "t", "", "Log type of the test")
+ cmdHubTestCreate.Flags().StringSliceVarP(&parsers, "parsers", "p", parsers, "Parsers to add to test")
+ cmdHubTestCreate.Flags().StringSliceVar(&postoverflows, "postoverflows", postoverflows, "Postoverflows to add to test")
+ cmdHubTestCreate.Flags().StringSliceVarP(&scenarios, "scenarios", "s", scenarios, "Scenarios to add to test")
+ cmdHubTestCreate.PersistentFlags().BoolVar(&ignoreParsers, "ignore-parsers", false, "Don't run test on parsers")
+ cmdHubTest.AddCommand(cmdHubTestCreate)
+
+ var noClean bool
+ var runAll bool
+ var forceClean bool
+ var cmdHubTestRun = &cobra.Command{
+ Use: "run",
+ Short: "run [test_name]",
+ DisableAutoGenTag: true,
+ Run: func(cmd *cobra.Command, args []string) {
+ if !runAll && len(args) == 0 {
+ cmd.Help()
+ fmt.Println("Please provide test to run or --all flag")
+ os.Exit(1)
+ }
+
+ if runAll {
+ if err := HubTest.LoadAllTests(); err != nil {
+ log.Fatalf("unable to load all tests: %+v", err)
+ }
+ } else {
+ for _, testName := range args {
+ _, err := HubTest.LoadTestItem(testName)
+ if err != nil {
+ log.Fatalf("unable to load test '%s': %s", testName, err)
+ }
+ }
+ }
+
+ for _, test := range HubTest.Tests {
+ if csConfig.Cscli.Output == "human" {
+ log.Infof("Running test '%s'", test.Name)
+ }
+ err := test.Run()
+ if err != nil {
+ log.Errorf("running test '%s' failed: %+v", test.Name, err)
+ }
+ }
+
+ },
+ PersistentPostRun: func(cmd *cobra.Command, args []string) {
+ success := true
+ testResult := make(map[string]bool)
+ for _, test := range HubTest.Tests {
+ if test.AutoGen {
+ if test.ParserAssert.AutoGenAssert {
+ log.Warningf("Assert file '%s' is empty, generating assertion:", test.ParserAssert.File)
+ fmt.Println()
+ fmt.Println(test.ParserAssert.AutoGenAssertData)
+ }
+ if test.ScenarioAssert.AutoGenAssert {
+ log.Warningf("Assert file '%s' is empty, generating assertion:", test.ScenarioAssert.File)
+ fmt.Println()
+ fmt.Println(test.ScenarioAssert.AutoGenAssertData)
+ }
+ if !noClean {
+ if err := test.Clean(); err != nil {
+ log.Fatalf("unable to clean test '%s' env: %s", test.Name, err)
+ }
+ }
+ fmt.Printf("\nPlease fill your assert file(s) for test '%s', exiting\n", test.Name)
+ os.Exit(1)
+ }
+ testResult[test.Name] = test.Success
+ if test.Success {
+ if csConfig.Cscli.Output == "human" {
+ log.Infof("Test '%s' passed successfully (%d assertions)\n", test.Name, test.ParserAssert.NbAssert+test.ScenarioAssert.NbAssert)
+ }
+ if !noClean {
+ if err := test.Clean(); err != nil {
+ log.Fatalf("unable to clean test '%s' env: %s", test.Name, err)
+ }
+ }
+ } else {
+ success = false
+ cleanTestEnv := false
+ if csConfig.Cscli.Output == "human" {
+ if len(test.ParserAssert.Fails) > 0 {
+ fmt.Println()
+ log.Errorf("Parser test '%s' failed (%d errors)\n", test.Name, len(test.ParserAssert.Fails))
+ for _, fail := range test.ParserAssert.Fails {
+ fmt.Printf("(L.%d) %s => %s\n", fail.Line, emoji.RedCircle, fail.Expression)
+ fmt.Printf(" Actual expression values:\n")
+ for key, value := range fail.Debug {
+ fmt.Printf(" %s = '%s'\n", key, strings.TrimSuffix(value, "\n"))
+ }
+ fmt.Println()
+ }
+ }
+ if len(test.ScenarioAssert.Fails) > 0 {
+ fmt.Println()
+ log.Errorf("Scenario test '%s' failed (%d errors)\n", test.Name, len(test.ScenarioAssert.Fails))
+ for _, fail := range test.ScenarioAssert.Fails {
+ fmt.Printf("(L.%d) %s => %s\n", fail.Line, emoji.RedCircle, fail.Expression)
+ fmt.Printf(" Actual expression values:\n")
+ for key, value := range fail.Debug {
+ fmt.Printf(" %s = '%s'\n", key, strings.TrimSuffix(value, "\n"))
+ }
+ fmt.Println()
+ }
+ }
+ if !forceClean {
+ prompt := &survey.Confirm{
+ Message: fmt.Sprintf("\nDo you want to remove runtime folder for test '%s'? (default: Yes)", test.Name),
+ Default: true,
+ }
+ if err := survey.AskOne(prompt, &cleanTestEnv); err != nil {
+ log.Fatalf("unable to ask to remove runtime folder: %s", err)
+ }
+ }
+ }
+
+ if cleanTestEnv || forceClean {
+ if err := test.Clean(); err != nil {
+ log.Fatalf("unable to clean test '%s' env: %s", test.Name, err)
+ }
+ }
+ }
+ }
+ if csConfig.Cscli.Output == "human" {
+ table := tablewriter.NewWriter(os.Stdout)
+ table.SetCenterSeparator("")
+ table.SetColumnSeparator("")
+
+ table.SetHeaderAlignment(tablewriter.ALIGN_LEFT)
+ table.SetAlignment(tablewriter.ALIGN_LEFT)
+
+ table.SetHeader([]string{"Test", "Result"})
+ for testName, success := range testResult {
+ status := emoji.CheckMarkButton.String()
+ if !success {
+ status = emoji.CrossMark.String()
+ }
+ table.Append([]string{testName, status})
+ }
+ table.Render()
+ } else if csConfig.Cscli.Output == "json" {
+ jsonResult := make(map[string][]string, 0)
+ jsonResult["success"] = make([]string, 0)
+ jsonResult["fail"] = make([]string, 0)
+ for testName, success := range testResult {
+ if success {
+ jsonResult["success"] = append(jsonResult["success"], testName)
+ } else {
+ jsonResult["fail"] = append(jsonResult["fail"], testName)
+ }
+ }
+ jsonStr, err := json.Marshal(jsonResult)
+ if err != nil {
+ log.Fatalf("unable to json test result: %s", err.Error())
+ }
+ fmt.Println(string(jsonStr))
+ }
+
+ if !success {
+ os.Exit(1)
+ }
+ },
+ }
+ cmdHubTestRun.Flags().BoolVar(&noClean, "no-clean", false, "Don't clean runtime environment if test succeed")
+ cmdHubTestRun.Flags().BoolVar(&forceClean, "clean", false, "Clean runtime environment if test fail")
+ cmdHubTestRun.Flags().BoolVar(&runAll, "all", false, "Run all tests")
+ cmdHubTest.AddCommand(cmdHubTestRun)
+
+ var cmdHubTestClean = &cobra.Command{
+ Use: "clean",
+ Short: "clean [test_name]",
+ Args: cobra.MinimumNArgs(1),
+ DisableAutoGenTag: true,
+ Run: func(cmd *cobra.Command, args []string) {
+ for _, testName := range args {
+ test, err := HubTest.LoadTestItem(testName)
+ if err != nil {
+ log.Fatalf("unable to load test '%s': %s", testName, err)
+ }
+ if err := test.Clean(); err != nil {
+ log.Fatalf("unable to clean test '%s' env: %s", test.Name, err)
+ }
+ }
+ },
+ }
+ cmdHubTest.AddCommand(cmdHubTestClean)
+
+ var cmdHubTestInfo = &cobra.Command{
+ Use: "info",
+ Short: "info [test_name]",
+ Args: cobra.MinimumNArgs(1),
+ DisableAutoGenTag: true,
+ Run: func(cmd *cobra.Command, args []string) {
+ for _, testName := range args {
+ test, err := HubTest.LoadTestItem(testName)
+ if err != nil {
+ log.Fatalf("unable to load test '%s': %s", testName, err)
+ }
+ fmt.Println()
+ fmt.Printf(" Test name : %s\n", test.Name)
+ fmt.Printf(" Test path : %s\n", test.Path)
+ fmt.Printf(" Log file : %s\n", filepath.Join(test.Path, test.Config.LogFile))
+ fmt.Printf(" Parser assertion file : %s\n", filepath.Join(test.Path, cstest.ParserAssertFileName))
+ fmt.Printf(" Scenario assertion file : %s\n", filepath.Join(test.Path, cstest.ScenarioAssertFileName))
+ fmt.Printf(" Configuration File : %s\n", filepath.Join(test.Path, "config.yaml"))
+ }
+ },
+ }
+ cmdHubTest.AddCommand(cmdHubTestInfo)
+
+ var cmdHubTestList = &cobra.Command{
+ Use: "list",
+ Short: "list",
+ DisableAutoGenTag: true,
+ Run: func(cmd *cobra.Command, args []string) {
+ if err := HubTest.LoadAllTests(); err != nil {
+ log.Fatalf("unable to load all tests: %+v", err)
+ }
+
+ table := tablewriter.NewWriter(os.Stdout)
+ table.SetCenterSeparator("")
+ table.SetColumnSeparator("")
+
+ table.SetHeaderAlignment(tablewriter.ALIGN_LEFT)
+ table.SetAlignment(tablewriter.ALIGN_LEFT)
+ table.SetHeader([]string{"Name", "Path"})
+ for _, test := range HubTest.Tests {
+ table.Append([]string{test.Name, test.Path})
+ }
+ table.Render()
+
+ },
+ }
+ cmdHubTest.AddCommand(cmdHubTestList)
+
+ var showParserCov bool
+ var showScenarioCov bool
+ var showOnlyPercent bool
+ var cmdHubTestCoverage = &cobra.Command{
+ Use: "coverage",
+ Short: "coverage",
+ DisableAutoGenTag: true,
+ Run: func(cmd *cobra.Command, args []string) {
+ if err := HubTest.LoadAllTests(); err != nil {
+ log.Fatalf("unable to load all tests: %+v", err)
+ }
+ var err error
+ scenarioCoverage := []cstest.ScenarioCoverage{}
+ parserCoverage := []cstest.ParserCoverage{}
+ scenarioCoveragePercent := 0
+ parserCoveragePercent := 0
+ showAll := false
+
+ if !showScenarioCov && !showParserCov { // if both are false (flag by default), show both
+ showAll = true
+ }
+
+ if showParserCov || showAll {
+ parserCoverage, err = HubTest.GetParsersCoverage()
+ if err != nil {
+ log.Fatalf("while getting parser coverage : %s", err)
+ }
+ parserTested := 0
+ for _, test := range parserCoverage {
+ if test.TestsCount > 0 {
+ parserTested += 1
+ }
+ }
+ parserCoveragePercent = int(math.Round((float64(parserTested) / float64(len(parserCoverage)) * 100)))
+ }
+
+ if showScenarioCov || showAll {
+ scenarioCoverage, err = HubTest.GetScenariosCoverage()
+ if err != nil {
+ log.Fatalf("while getting scenario coverage: %s", err)
+ }
+ scenarioTested := 0
+ for _, test := range scenarioCoverage {
+ if test.TestsCount > 0 {
+ scenarioTested += 1
+ }
+ }
+ scenarioCoveragePercent = int(math.Round((float64(scenarioTested) / float64(len(scenarioCoverage)) * 100)))
+ }
+
+ if showOnlyPercent {
+ if showAll {
+ fmt.Printf("parsers=%d%%\nscenarios=%d%%", parserCoveragePercent, scenarioCoveragePercent)
+ } else if showParserCov {
+ fmt.Printf("parsers=%d%%", parserCoveragePercent)
+ } else if showScenarioCov {
+ fmt.Printf("scenarios=%d%%", scenarioCoveragePercent)
+ }
+ os.Exit(0)
+ }
+
+ if csConfig.Cscli.Output == "human" {
+ if showParserCov || showAll {
+ table := tablewriter.NewWriter(os.Stdout)
+ table.SetCenterSeparator("")
+ table.SetColumnSeparator("")
+
+ table.SetHeaderAlignment(tablewriter.ALIGN_LEFT)
+ table.SetAlignment(tablewriter.ALIGN_LEFT)
+
+ table.SetHeader([]string{"Parser", "Status", "Number of tests"})
+ parserTested := 0
+ for _, test := range parserCoverage {
+ status := emoji.RedCircle.String()
+ if test.TestsCount > 0 {
+ status = emoji.GreenCircle.String()
+ parserTested += 1
+ }
+ table.Append([]string{test.Parser, status, fmt.Sprintf("%d times (accross %d tests)", test.TestsCount, len(test.PresentIn))})
+ }
+ table.Render()
+ }
+
+ if showScenarioCov || showAll {
+ table := tablewriter.NewWriter(os.Stdout)
+ table.SetCenterSeparator("")
+ table.SetColumnSeparator("")
+
+ table.SetHeaderAlignment(tablewriter.ALIGN_LEFT)
+ table.SetAlignment(tablewriter.ALIGN_LEFT)
+
+ table.SetHeader([]string{"Scenario", "Status", "Number of tests"})
+ for _, test := range scenarioCoverage {
+ status := emoji.RedCircle.String()
+ if test.TestsCount > 0 {
+ status = emoji.GreenCircle.String()
+ }
+ table.Append([]string{test.Scenario, status, fmt.Sprintf("%d times (accross %d tests)", test.TestsCount, len(test.PresentIn))})
+ }
+ table.Render()
+ }
+ fmt.Println()
+ if showParserCov || showAll {
+ fmt.Printf("PARSERS : %d%% of coverage\n", parserCoveragePercent)
+ }
+ if showScenarioCov || showAll {
+ fmt.Printf("SCENARIOS : %d%% of coverage\n", scenarioCoveragePercent)
+ }
+ } else if csConfig.Cscli.Output == "json" {
+ dump, err := json.MarshalIndent(parserCoverage, "", " ")
+ if err != nil {
+ log.Fatal(err)
+ }
+ fmt.Printf("%s", dump)
+ dump, err = json.MarshalIndent(scenarioCoverage, "", " ")
+ if err != nil {
+ log.Fatal(err)
+ }
+ fmt.Printf("%s", dump)
+ } else {
+ log.Fatalf("only human/json output modes are supported")
+ }
+
+ },
+ }
+ cmdHubTestCoverage.PersistentFlags().BoolVar(&showOnlyPercent, "percent", false, "Show only percentages of coverage")
+ cmdHubTestCoverage.PersistentFlags().BoolVar(&showParserCov, "parsers", false, "Show only parsers coverage")
+ cmdHubTestCoverage.PersistentFlags().BoolVar(&showScenarioCov, "scenarios", false, "Show only scenarios coverage")
+ cmdHubTest.AddCommand(cmdHubTestCoverage)
+
+ var evalExpression string
+ var cmdHubTestEval = &cobra.Command{
+ Use: "eval",
+ Short: "eval [test_name]",
+ Args: cobra.ExactArgs(1),
+ DisableAutoGenTag: true,
+ Run: func(cmd *cobra.Command, args []string) {
+ for _, testName := range args {
+ test, err := HubTest.LoadTestItem(testName)
+ if err != nil {
+ log.Fatalf("can't load test: %+v", err)
+ }
+ err = test.ParserAssert.LoadTest(test.ParserResultFile)
+ if err != nil {
+ log.Fatalf("can't load test results from '%s': %+v", test.ParserResultFile, err)
+ }
+ output, err := test.ParserAssert.EvalExpression(evalExpression)
+ if err != nil {
+ log.Fatalf(err.Error())
+ }
+ fmt.Printf(output)
+ }
+ },
+ }
+ cmdHubTestEval.PersistentFlags().StringVarP(&evalExpression, "expr", "e", "", "Expression to eval")
+ cmdHubTest.AddCommand(cmdHubTestEval)
+
+ var cmdHubTestExplain = &cobra.Command{
+ Use: "explain",
+ Short: "explain [test_name]",
+ Args: cobra.ExactArgs(1),
+ DisableAutoGenTag: true,
+ Run: func(cmd *cobra.Command, args []string) {
+ for _, testName := range args {
+ test, err := HubTest.LoadTestItem(testName)
+ if err != nil {
+ log.Fatalf("can't load test: %+v", err)
+ }
+ err = test.ParserAssert.LoadTest(test.ParserResultFile)
+ if err != nil {
+ err := test.Run()
+ if err != nil {
+ log.Fatalf("running test '%s' failed: %+v", test.Name, err)
+ }
+ err = test.ParserAssert.LoadTest(test.ParserResultFile)
+ if err != nil {
+ log.Fatalf("unable to load parser result after run: %s", err)
+ }
+ }
+
+ err = test.ScenarioAssert.LoadTest(test.ScenarioResultFile, test.BucketPourResultFile)
+ if err != nil {
+ err := test.Run()
+ if err != nil {
+ log.Fatalf("running test '%s' failed: %+v", test.Name, err)
+ }
+ err = test.ScenarioAssert.LoadTest(test.ScenarioResultFile, test.BucketPourResultFile)
+ if err != nil {
+ log.Fatalf("unable to load scenario result after run: %s", err)
+ }
+ }
+
+ cstest.DumpTree(*test.ParserAssert.TestData, *test.ScenarioAssert.PourData)
+ }
+ },
+ }
+ cmdHubTest.AddCommand(cmdHubTestExplain)
+
+ return cmdHubTest
+}
diff --git a/cmd/crowdsec-cli/machines.go b/cmd/crowdsec-cli/machines.go
index 79c49cfa4..19a65fcfc 100644
--- a/cmd/crowdsec-cli/machines.go
+++ b/cmd/crowdsec-cli/machines.go
@@ -88,6 +88,9 @@ Note: This command requires database direct access, so is intended to be run on
DisableAutoGenTag: true,
PersistentPreRun: func(cmd *cobra.Command, args []string) {
if err := csConfig.LoadAPIServer(); err != nil || csConfig.DisableAPI {
+ if err != nil {
+ log.Errorf("local api : %s", err)
+ }
log.Fatal("Local API is disabled, please run this command on the local API machine")
}
if err := csConfig.LoadDBConfig(); err != nil {
diff --git a/cmd/crowdsec-cli/main.go b/cmd/crowdsec-cli/main.go
index d1c30bff6..b6c8473c9 100644
--- a/cmd/crowdsec-cli/main.go
+++ b/cmd/crowdsec-cli/main.go
@@ -174,6 +174,8 @@ It is meant to allow you to manage bans, parsers/scenarios/etc, api and generall
rootCmd.AddCommand(NewLapiCmd())
rootCmd.AddCommand(NewCompletionCmd())
rootCmd.AddCommand(NewConsoleCmd())
+ rootCmd.AddCommand(NewExplainCmd())
+ rootCmd.AddCommand(NewHubTestCmd())
if err := rootCmd.Execute(); err != nil {
log.Fatalf("While executing root command : %s", err)
}
diff --git a/cmd/crowdsec/crowdsec.go b/cmd/crowdsec/crowdsec.go
index d5ab81133..d489c4d1b 100644
--- a/cmd/crowdsec/crowdsec.go
+++ b/cmd/crowdsec/crowdsec.go
@@ -2,8 +2,11 @@ package main
import (
"fmt"
+ "os"
"sync"
+ "path/filepath"
+
"github.com/crowdsecurity/crowdsec/pkg/acquisition"
"github.com/crowdsecurity/crowdsec/pkg/csconfig"
"github.com/crowdsecurity/crowdsec/pkg/cwhub"
@@ -13,6 +16,7 @@ import (
"github.com/crowdsecurity/crowdsec/pkg/types"
"github.com/pkg/errors"
log "github.com/sirupsen/logrus"
+ "gopkg.in/yaml.v2"
)
func initCrowdsec(cConfig *csconfig.Config) (*parser.Parsers, error) {
@@ -149,10 +153,75 @@ func serveCrowdsec(parsers *parser.Parsers, cConfig *csconfig.Config) {
log.Fatalf("unable to shutdown crowdsec routines: %s", err)
}
log.Debugf("everything is dead, return crowdsecTomb")
+ if dumpStates {
+ dumpParserState()
+ dumpOverflowState()
+ dumpBucketsPour()
+ os.Exit(0)
+ }
return nil
})
}
+func dumpBucketsPour() {
+ fd, err := os.OpenFile(filepath.Join(parser.DumpFolder, "bucketpour-dump.yaml"), os.O_CREATE|os.O_WRONLY|os.O_TRUNC, 0666)
+ if err != nil {
+ log.Fatalf("open: %s", err)
+ }
+ out, err := yaml.Marshal(leaky.BucketPourCache)
+ if err != nil {
+ log.Fatalf("marshal: %s", err)
+ }
+ b, err := fd.Write(out)
+ if err != nil {
+ log.Fatalf("write: %s", err)
+ }
+ log.Tracef("wrote %d bytes", b)
+ if err := fd.Close(); err != nil {
+ log.Fatalf(" close: %s", err)
+ }
+}
+
+func dumpParserState() {
+
+ fd, err := os.OpenFile(filepath.Join(parser.DumpFolder, "parser-dump.yaml"), os.O_CREATE|os.O_WRONLY|os.O_TRUNC, 0666)
+ if err != nil {
+ log.Fatalf("open: %s", err)
+ }
+ out, err := yaml.Marshal(parser.StageParseCache)
+ if err != nil {
+ log.Fatalf("marshal: %s", err)
+ }
+ b, err := fd.Write(out)
+ if err != nil {
+ log.Fatalf("write: %s", err)
+ }
+ log.Tracef("wrote %d bytes", b)
+ if err := fd.Close(); err != nil {
+ log.Fatalf(" close: %s", err)
+ }
+}
+
+func dumpOverflowState() {
+
+ fd, err := os.OpenFile(filepath.Join(parser.DumpFolder, "bucket-dump.yaml"), os.O_CREATE|os.O_WRONLY|os.O_TRUNC, 0666)
+ if err != nil {
+ log.Fatalf("open: %s", err)
+ }
+ out, err := yaml.Marshal(bucketOverflows)
+ if err != nil {
+ log.Fatalf("marshal: %s", err)
+ }
+ b, err := fd.Write(out)
+ if err != nil {
+ log.Fatalf("write: %s", err)
+ }
+ log.Tracef("wrote %d bytes", b)
+ if err := fd.Close(); err != nil {
+ log.Fatalf(" close: %s", err)
+ }
+}
+
func waitOnTomb() {
for {
select {
diff --git a/cmd/crowdsec/main.go b/cmd/crowdsec/main.go
index ca91160f7..4b6e831e4 100644
--- a/cmd/crowdsec/main.go
+++ b/cmd/crowdsec/main.go
@@ -14,6 +14,7 @@ import (
"github.com/crowdsecurity/crowdsec/pkg/csplugin"
"github.com/crowdsecurity/crowdsec/pkg/cwhub"
"github.com/crowdsecurity/crowdsec/pkg/cwversion"
+ "github.com/crowdsecurity/crowdsec/pkg/leakybucket"
leaky "github.com/crowdsecurity/crowdsec/pkg/leakybucket"
"github.com/crowdsecurity/crowdsec/pkg/parser"
"github.com/crowdsecurity/crowdsec/pkg/types"
@@ -160,6 +161,9 @@ func LoadAcquisition(cConfig *csconfig.Config) error {
return nil
}
+var dumpFolder string
+var dumpStates bool
+
func (f *Flags) Parse() {
flag.StringVar(&f.ConfigFile, "c", "/etc/crowdsec/config.yaml", "configuration file")
@@ -172,6 +176,7 @@ func (f *Flags) Parse() {
flag.BoolVar(&f.TestMode, "t", false, "only test configs")
flag.BoolVar(&f.DisableAgent, "no-cs", false, "disable crowdsec agent")
flag.BoolVar(&f.DisableAPI, "no-api", false, "disable local API")
+ flag.StringVar(&dumpFolder, "dump-data", "", "dump parsers/buckets raw outputs")
flag.Parse()
}
@@ -179,6 +184,13 @@ func (f *Flags) Parse() {
// LoadConfig return configuration parsed from configuration file
func LoadConfig(cConfig *csconfig.Config) error {
+ if dumpFolder != "" {
+ parser.ParseDump = true
+ parser.DumpFolder = dumpFolder
+ leakybucket.BucketPourTrack = true
+ dumpStates = true
+ }
+
if !flags.DisableAgent {
if err := cConfig.LoadCrowdsec(); err != nil {
return err
diff --git a/cmd/crowdsec/output.go b/cmd/crowdsec/output.go
index c48f7fec2..713fd6d56 100644
--- a/cmd/crowdsec/output.go
+++ b/cmd/crowdsec/output.go
@@ -58,6 +58,8 @@ func PushAlerts(alerts []types.RuntimeAlert, client *apiclient.ApiClient) error
return nil
}
+var bucketOverflows []types.Event
+
func runOutput(input chan types.Event, overflow chan types.Event, buckets *leaky.Buckets,
postOverflowCTX parser.UnixParserCtx, postOverflowNodes []parser.Node, apiConfig csconfig.ApiCredentialsCfg) error {
@@ -129,7 +131,13 @@ LOOP:
}
break LOOP
case event := <-overflow:
-
+ //if the Alert is nil, it's to signal bucket is ready for GC, don't track this
+ if dumpStates && event.Overflow.Alert != nil {
+ if bucketOverflows == nil {
+ bucketOverflows = make([]types.Event, 0)
+ }
+ bucketOverflows = append(bucketOverflows, event)
+ }
/*if alert is empty and mapKey is present, the overflow is just to cleanup bucket*/
if event.Overflow.Alert == nil && event.Overflow.Mapkey != "" {
buckets.Bucket_map.Delete(event.Overflow.Mapkey)
@@ -149,10 +157,13 @@ LOOP:
log.Printf("[%s] is whitelisted, skip.", *event.Overflow.Alert.Message)
continue
}
+ if dumpStates {
+ continue
+ }
+
cacheMutex.Lock()
cache = append(cache, event.Overflow)
cacheMutex.Unlock()
-
}
}
diff --git a/pkg/acquisition/modules/file/file.go b/pkg/acquisition/modules/file/file.go
index 846f879b2..6add4fb7e 100644
--- a/pkg/acquisition/modules/file/file.go
+++ b/pkg/acquisition/modules/file/file.go
@@ -399,6 +399,9 @@ func (f *FileSource) readFile(filename string, out chan types.Event, t *tomb.Tom
}
scanner.Split(bufio.ScanLines)
for scanner.Scan() {
+ if scanner.Text() == "" {
+ continue
+ }
logger.Debugf("line %s", scanner.Text())
l := types.Line{}
l.Raw = scanner.Text()
diff --git a/pkg/csconfig/crowdsec_service.go b/pkg/csconfig/crowdsec_service.go
index 582e15c39..574c41ffd 100644
--- a/pkg/csconfig/crowdsec_service.go
+++ b/pkg/csconfig/crowdsec_service.go
@@ -62,7 +62,7 @@ func (c *Config) LoadCrowdsec() error {
c.Crowdsec.AcquisitionFiles = append(c.Crowdsec.AcquisitionFiles, files...)
}
if c.Crowdsec.AcquisitionDirPath == "" && c.Crowdsec.AcquisitionFilePath == "" {
- return fmt.Errorf("no acquisition_path nor acquisition_dir")
+ log.Warningf("no acquisition_path nor acquisition_dir")
}
if err := c.LoadSimulation(); err != nil {
return errors.Wrap(err, "load error (simulation)")
diff --git a/pkg/csconfig/crowdsec_service_test.go b/pkg/csconfig/crowdsec_service_test.go
index fbb92dcff..431b61c2b 100644
--- a/pkg/csconfig/crowdsec_service_test.go
+++ b/pkg/csconfig/crowdsec_service_test.go
@@ -139,9 +139,16 @@ func TestLoadCrowdsec(t *testing.T) {
Crowdsec: &CrowdsecServiceCfg{},
},
expectedResult: &CrowdsecServiceCfg{
- BucketsRoutinesCount: 0,
- ParserRoutinesCount: 0,
- OutputRoutinesCount: 0,
+ BucketsRoutinesCount: 1,
+ ParserRoutinesCount: 1,
+ OutputRoutinesCount: 1,
+ ConfigDir: configDirFullPath,
+ HubIndexFile: hubIndexFileFullPath,
+ DataDir: dataFullPath,
+ HubDir: hubFullPath,
+ SimulationConfig: &SimulationConfig{
+ Simulation: &falseBoolPtr,
+ },
},
},
{
diff --git a/pkg/csconfig/profiles.go b/pkg/csconfig/profiles.go
index 4cf790bfb..f5b779d06 100644
--- a/pkg/csconfig/profiles.go
+++ b/pkg/csconfig/profiles.go
@@ -19,8 +19,8 @@ type ProfileCfg struct {
Name string `yaml:"name,omitempty"`
Debug *bool `yaml:"debug,omitempty"`
Filters []string `yaml:"filters,omitempty"` //A list of OR'ed expressions. the models.Alert object
- RuntimeFilters []*vm.Program `json:"-"`
- DebugFilters []*exprhelpers.ExprDebugger `json:"-"`
+ RuntimeFilters []*vm.Program `json:"-" yaml:"-"`
+ DebugFilters []*exprhelpers.ExprDebugger `json:"-" yaml:"-"`
Decisions []models.Decision `yaml:"decisions,omitempty"`
OnSuccess string `yaml:"on_success,omitempty"` //continue or break
OnFailure string `yaml:"on_failure,omitempty"` //continue or break
diff --git a/pkg/cstest/coverage.go b/pkg/cstest/coverage.go
new file mode 100644
index 000000000..c86104e9e
--- /dev/null
+++ b/pkg/cstest/coverage.go
@@ -0,0 +1,177 @@
+package cstest
+
+import (
+ "bufio"
+ "fmt"
+ "os"
+ "path/filepath"
+ "regexp"
+ "sort"
+ "strings"
+
+ "github.com/crowdsecurity/crowdsec/pkg/cwhub"
+ log "github.com/sirupsen/logrus"
+)
+
+type ParserCoverage struct {
+ Parser string
+ TestsCount int
+ PresentIn map[string]bool //poorman's set
+}
+
+type ScenarioCoverage struct {
+ Scenario string
+ TestsCount int
+ PresentIn map[string]bool
+}
+
+func (h *HubTest) GetParsersCoverage() ([]ParserCoverage, error) {
+ var coverage []ParserCoverage
+ if _, ok := h.HubIndex.Data[cwhub.PARSERS]; !ok {
+ return coverage, fmt.Errorf("no parsers in hub index")
+ }
+ //populate from hub, iterate in alphabetical order
+ var pkeys []string
+ for pname := range h.HubIndex.Data[cwhub.PARSERS] {
+ pkeys = append(pkeys, pname)
+ }
+ sort.Strings(pkeys)
+ for _, pname := range pkeys {
+ coverage = append(coverage, ParserCoverage{
+ Parser: pname,
+ TestsCount: 0,
+ PresentIn: make(map[string]bool),
+ })
+ }
+
+ //parser the expressions a-la-oneagain
+ passerts, err := filepath.Glob(".tests/*/parser.assert")
+ if err != nil {
+ return coverage, fmt.Errorf("while find parser asserts : %s", err)
+ }
+ for _, assert := range passerts {
+ file, err := os.Open(assert)
+ if err != nil {
+ return coverage, fmt.Errorf("while reading %s : %s", assert, err)
+ }
+ scanner := bufio.NewScanner(file)
+ for scanner.Scan() {
+ assertLine := regexp.MustCompile(`^results\["[^"]+"\]\["(?P[^"]+)"\]\[[0-9]+\]\.Evt\..*`)
+ line := scanner.Text()
+ log.Debugf("assert line : %s", line)
+ match := assertLine.FindStringSubmatch(line)
+ if len(match) == 0 {
+ log.Debugf("%s doesn't match", line)
+ continue
+ }
+ sidx := assertLine.SubexpIndex("parser")
+ capturedParser := match[sidx]
+ for idx, pcover := range coverage {
+ if pcover.Parser == capturedParser {
+ coverage[idx].TestsCount++
+ coverage[idx].PresentIn[assert] = true
+ continue
+ }
+ parserNameSplit := strings.Split(pcover.Parser, "/")
+ parserNameOnly := parserNameSplit[len(parserNameSplit)-1]
+ if parserNameOnly == capturedParser {
+ coverage[idx].TestsCount++
+ coverage[idx].PresentIn[assert] = true
+ continue
+ }
+ capturedParserSplit := strings.Split(capturedParser, "/")
+ capturedParserName := capturedParserSplit[len(capturedParserSplit)-1]
+ if capturedParserName == parserNameOnly {
+ coverage[idx].TestsCount++
+ coverage[idx].PresentIn[assert] = true
+ continue
+ }
+ if capturedParserName == parserNameOnly+"-logs" {
+ coverage[idx].TestsCount++
+ coverage[idx].PresentIn[assert] = true
+ continue
+ }
+ }
+ }
+ file.Close()
+ }
+ return coverage, nil
+}
+
+func (h *HubTest) GetScenariosCoverage() ([]ScenarioCoverage, error) {
+ var coverage []ScenarioCoverage
+ if _, ok := h.HubIndex.Data[cwhub.SCENARIOS]; !ok {
+ return coverage, fmt.Errorf("no scenarios in hub index")
+ }
+ //populate from hub, iterate in alphabetical order
+ var pkeys []string
+ for scenarioName := range h.HubIndex.Data[cwhub.SCENARIOS] {
+ pkeys = append(pkeys, scenarioName)
+ }
+ sort.Strings(pkeys)
+ for _, scenarioName := range pkeys {
+ coverage = append(coverage, ScenarioCoverage{
+ Scenario: scenarioName,
+ TestsCount: 0,
+ PresentIn: make(map[string]bool),
+ })
+ }
+
+ //parser the expressions a-la-oneagain
+ passerts, err := filepath.Glob(".tests/*/scenario.assert")
+ if err != nil {
+ return coverage, fmt.Errorf("while find scenario asserts : %s", err)
+ }
+ for _, assert := range passerts {
+ file, err := os.Open(assert)
+ if err != nil {
+ return coverage, fmt.Errorf("while reading %s : %s", assert, err)
+ }
+ scanner := bufio.NewScanner(file)
+ for scanner.Scan() {
+ assertLine := regexp.MustCompile(`^results\[[0-9]+\].Overflow.Alert.GetScenario\(\) == "(?P[^"]+)"`)
+ line := scanner.Text()
+ log.Debugf("assert line : %s", line)
+ match := assertLine.FindStringSubmatch(line)
+ if len(match) == 0 {
+ log.Debugf("%s doesn't match", line)
+ continue
+ }
+ sidx := assertLine.SubexpIndex("scenario")
+ scanner_name := match[sidx]
+ for idx, pcover := range coverage {
+ if pcover.Scenario == scanner_name {
+ coverage[idx].TestsCount++
+ coverage[idx].PresentIn[assert] = true
+ continue
+ }
+ scenarioNameSplit := strings.Split(pcover.Scenario, "/")
+ scenarioNameOnly := scenarioNameSplit[len(scenarioNameSplit)-1]
+ if scenarioNameOnly == scanner_name {
+ coverage[idx].TestsCount++
+ coverage[idx].PresentIn[assert] = true
+ continue
+ }
+ fixedProbingWord := strings.Replace(pcover.Scenario, "probbing", "probing", -1)
+ fixedProbingAssert := strings.Replace(scanner_name, "probbing", "probing", -1)
+ if fixedProbingWord == fixedProbingAssert {
+ coverage[idx].TestsCount++
+ coverage[idx].PresentIn[assert] = true
+ continue
+ }
+ if fmt.Sprintf("%s-detection", pcover.Scenario) == scanner_name {
+ coverage[idx].TestsCount++
+ coverage[idx].PresentIn[assert] = true
+ continue
+ }
+ if fmt.Sprintf("%s-detection", fixedProbingWord) == fixedProbingAssert {
+ coverage[idx].TestsCount++
+ coverage[idx].PresentIn[assert] = true
+ continue
+ }
+ }
+ }
+ file.Close()
+ }
+ return coverage, nil
+}
diff --git a/pkg/cstest/hubtest.go b/pkg/cstest/hubtest.go
new file mode 100644
index 000000000..804fe0de3
--- /dev/null
+++ b/pkg/cstest/hubtest.go
@@ -0,0 +1,114 @@
+package cstest
+
+import (
+ "fmt"
+ "io/ioutil"
+ "os"
+ "os/exec"
+ "path/filepath"
+
+ "github.com/crowdsecurity/crowdsec/pkg/cwhub"
+ "github.com/pkg/errors"
+)
+
+type HubTest struct {
+ CrowdSecPath string
+ CscliPath string
+ HubPath string
+ HubTestPath string
+ HubIndexFile string
+ TemplateConfigPath string
+ TemplateProfilePath string
+ TemplateSimulationPath string
+ HubIndex *HubIndex
+ Tests []*HubTestItem
+}
+
+const (
+ templateConfigFile = "template_config.yaml"
+ templateSimulationFile = "template_simulation.yaml"
+ templateProfileFile = "template_profiles.yaml"
+)
+
+func NewHubTest(hubPath string, crowdsecPath string, cscliPath string) (HubTest, error) {
+ var err error
+
+ hubPath, err = filepath.Abs(hubPath)
+ if err != nil {
+ return HubTest{}, fmt.Errorf("can't get absolute path of hub: %+v", err)
+ }
+ // we can't use hubtest without the hub
+ if _, err := os.Stat(hubPath); os.IsNotExist(err) {
+ return HubTest{}, fmt.Errorf("path to hub '%s' doesn't exist, can't run", hubPath)
+ }
+ HubTestPath := filepath.Join(hubPath, "./.tests/")
+
+ // we can't use hubtest without crowdsec binary
+ if _, err := exec.LookPath(crowdsecPath); err != nil {
+ if _, err := os.Stat(crowdsecPath); os.IsNotExist(err) {
+ return HubTest{}, fmt.Errorf("path to crowdsec binary '%s' doesn't exist or is not in $PATH, can't run", crowdsecPath)
+ }
+ }
+
+ // we can't use hubtest without cscli binary
+ if _, err := exec.LookPath(cscliPath); err != nil {
+ if _, err := os.Stat(cscliPath); os.IsNotExist(err) {
+ return HubTest{}, fmt.Errorf("path to cscli binary '%s' doesn't exist or is not in $PATH, can't run", cscliPath)
+ }
+ }
+
+ hubIndexFile := filepath.Join(hubPath, ".index.json")
+ bidx, err := ioutil.ReadFile(hubIndexFile)
+ if err != nil {
+ return HubTest{}, fmt.Errorf("unable to read index file: %s", err)
+ }
+
+ // load hub index
+ hubIndex, err := cwhub.LoadPkgIndex(bidx)
+ if err != nil {
+ return HubTest{}, fmt.Errorf("unable to load hub index file: %s", err)
+ }
+
+ templateConfigFilePath := filepath.Join(HubTestPath, templateConfigFile)
+ templateProfilePath := filepath.Join(HubTestPath, templateProfileFile)
+ templateSimulationPath := filepath.Join(HubTestPath, templateSimulationFile)
+
+ return HubTest{
+ CrowdSecPath: crowdsecPath,
+ CscliPath: cscliPath,
+ HubPath: hubPath,
+ HubTestPath: HubTestPath,
+ HubIndexFile: hubIndexFile,
+ TemplateConfigPath: templateConfigFilePath,
+ TemplateProfilePath: templateProfilePath,
+ TemplateSimulationPath: templateSimulationPath,
+ HubIndex: &HubIndex{Data: hubIndex},
+ }, nil
+}
+
+func (h *HubTest) LoadTestItem(name string) (*HubTestItem, error) {
+ HubTestItem := &HubTestItem{}
+ testItem, err := NewTest(name, h)
+ if err != nil {
+ return HubTestItem, err
+ }
+ h.Tests = append(h.Tests, testItem)
+
+ return testItem, nil
+}
+
+func (h *HubTest) LoadAllTests() error {
+ testsFolder, err := ioutil.ReadDir(h.HubTestPath)
+ if err != nil {
+ return err
+ }
+
+ for _, f := range testsFolder {
+ if f.IsDir() {
+ if _, err := h.LoadTestItem(f.Name()); err != nil {
+ return errors.Wrapf(err, "while loading %s", f.Name())
+ }
+ }
+ }
+ return nil
+}
diff --git a/pkg/cstest/hubtest_item.go b/pkg/cstest/hubtest_item.go
new file mode 100644
index 000000000..d44b744a2
--- /dev/null
+++ b/pkg/cstest/hubtest_item.go
@@ -0,0 +1,604 @@
+package cstest
+
+import (
+ "fmt"
+ "io/ioutil"
+ "os"
+ "os/exec"
+ "path/filepath"
+ "strings"
+
+ "github.com/crowdsecurity/crowdsec/pkg/csconfig"
+ "github.com/crowdsecurity/crowdsec/pkg/cwhub"
+ log "github.com/sirupsen/logrus"
+ "gopkg.in/yaml.v2"
+)
+
+type HubTestItemConfig struct {
+ Parsers []string `yaml:"parsers"`
+ Scenarios []string `yaml:"scenarios"`
+ PostOVerflows []string `yaml:"postoverflows"`
+ LogFile string `yaml:"log_file"`
+ LogType string `yaml:"log_type"`
+ IgnoreParsers bool `yaml:"ignore_parsers"` // if we test a scenario, we don't want to assert on Parser
+}
+
+type HubIndex struct {
+ Data map[string]map[string]cwhub.Item
+}
+
+type HubTestItem struct {
+ Name string
+ Path string
+
+ CrowdSecPath string
+ CscliPath string
+
+ RuntimePath string
+ RuntimeHubPath string
+ RuntimeDataPath string
+ RuntimePatternsPath string
+ RuntimeConfigFilePath string
+ RuntimeProfileFilePath string
+ RuntimeSimulationFilePath string
+ RuntimeHubConfig *csconfig.Hub
+
+ ResultsPath string
+ ParserResultFile string
+ ScenarioResultFile string
+ BucketPourResultFile string
+
+ HubPath string
+ HubTestPath string
+ HubIndexFile string
+ TemplateConfigPath string
+ TemplateProfilePath string
+ TemplateSimulationPath string
+ HubIndex *HubIndex
+
+ Config *HubTestItemConfig
+
+ Success bool
+ ErrorsList []string
+
+ AutoGen bool
+ ParserAssert *ParserAssert
+ ScenarioAssert *ScenarioAssert
+
+ CustomItemsLocation []string
+}
+
+const (
+ ParserAssertFileName = "parser.assert"
+ ParserResultFileName = "parser-dump.yaml"
+
+ ScenarioAssertFileName = "scenario.assert"
+ ScenarioResultFileName = "bucket-dump.yaml"
+
+ BucketPourResultFileName = "bucketpour-dump.yaml"
+
+ crowdsecPatternsFolder = "/etc/crowdsec/patterns/"
+)
+
+func NewTest(name string, hubTest *HubTest) (*HubTestItem, error) {
+ testPath := filepath.Join(hubTest.HubTestPath, name)
+ runtimeFolder := filepath.Join(testPath, "runtime")
+ runtimeHubFolder := filepath.Join(runtimeFolder, "hub")
+ configFilePath := filepath.Join(testPath, "config.yaml")
+ resultPath := filepath.Join(testPath, "results")
+
+ // read test configuration file
+ configFileData := &HubTestItemConfig{}
+ yamlFile, err := ioutil.ReadFile(configFilePath)
+ if err != nil {
+ log.Printf("no config file found in '%s': %v", testPath, err)
+ }
+ err = yaml.Unmarshal(yamlFile, configFileData)
+ if err != nil {
+ return nil, fmt.Errorf("Unmarshal: %v", err)
+ }
+
+ parserAssertFilePath := filepath.Join(testPath, ParserAssertFileName)
+ ParserAssert := NewParserAssert(parserAssertFilePath)
+
+ scenarioAssertFilePath := filepath.Join(testPath, ScenarioAssertFileName)
+ ScenarioAssert := NewScenarioAssert(scenarioAssertFilePath)
+ return &HubTestItem{
+ Name: name,
+ Path: testPath,
+ CrowdSecPath: hubTest.CrowdSecPath,
+ CscliPath: hubTest.CscliPath,
+ RuntimePath: filepath.Join(testPath, "runtime"),
+ RuntimeHubPath: runtimeHubFolder,
+ RuntimeDataPath: filepath.Join(runtimeFolder, "data"),
+ RuntimePatternsPath: filepath.Join(runtimeFolder, "patterns"),
+ RuntimeConfigFilePath: filepath.Join(runtimeFolder, "config.yaml"),
+ RuntimeProfileFilePath: filepath.Join(runtimeFolder, "profiles.yaml"),
+ RuntimeSimulationFilePath: filepath.Join(runtimeFolder, "simulation.yaml"),
+ ResultsPath: resultPath,
+ ParserResultFile: filepath.Join(resultPath, ParserResultFileName),
+ ScenarioResultFile: filepath.Join(resultPath, ScenarioResultFileName),
+ BucketPourResultFile: filepath.Join(resultPath, BucketPourResultFileName),
+ RuntimeHubConfig: &csconfig.Hub{
+ HubDir: runtimeHubFolder,
+ ConfigDir: runtimeFolder,
+ HubIndexFile: hubTest.HubIndexFile,
+ DataDir: filepath.Join(runtimeFolder, "data"),
+ },
+ Config: configFileData,
+ HubPath: hubTest.HubPath,
+ HubTestPath: hubTest.HubTestPath,
+ HubIndexFile: hubTest.HubIndexFile,
+ TemplateConfigPath: hubTest.TemplateConfigPath,
+ TemplateProfilePath: hubTest.TemplateProfilePath,
+ TemplateSimulationPath: hubTest.TemplateSimulationPath,
+ HubIndex: hubTest.HubIndex,
+ ScenarioAssert: ScenarioAssert,
+ ParserAssert: ParserAssert,
+ CustomItemsLocation: []string{hubTest.HubPath, testPath},
+ }, nil
+}
+
+func (t *HubTestItem) InstallHub() error {
+ // install parsers in runtime environment
+ for _, parser := range t.Config.Parsers {
+ if parser == "" {
+ continue
+ }
+ var parserDirDest string
+ if hubParser, ok := t.HubIndex.Data[cwhub.PARSERS][parser]; ok {
+ parserSource, err := filepath.Abs(filepath.Join(t.HubPath, hubParser.RemotePath))
+ if err != nil {
+ return fmt.Errorf("can't get absolute path of '%s': %s", parserSource, err)
+ }
+ parserFileName := filepath.Base(parserSource)
+
+ // runtime/hub/parsers/s00-raw/crowdsecurity/
+ hubDirParserDest := filepath.Join(t.RuntimeHubPath, filepath.Dir(hubParser.RemotePath))
+
+ // runtime/parsers/s00-raw/
+ parserDirDest = fmt.Sprintf("%s/parsers/%s/", t.RuntimePath, hubParser.Stage)
+
+ if err := os.MkdirAll(hubDirParserDest, os.ModePerm); err != nil {
+ return fmt.Errorf("unable to create folder '%s': %s", hubDirParserDest, err)
+ }
+ if err := os.MkdirAll(parserDirDest, os.ModePerm); err != nil {
+ return fmt.Errorf("unable to create folder '%s': %s", parserDirDest, err)
+ }
+
+ // runtime/hub/parsers/s00-raw/crowdsecurity/syslog-logs.yaml
+ hubDirParserPath := filepath.Join(hubDirParserDest, parserFileName)
+ if err := Copy(parserSource, hubDirParserPath); err != nil {
+ return fmt.Errorf("unable to copy '%s' to '%s': %s", parserSource, hubDirParserPath, err)
+ }
+
+ // runtime/parsers/s00-raw/syslog-logs.yaml
+ parserDirParserPath := filepath.Join(parserDirDest, parserFileName)
+ if err := os.Symlink(hubDirParserPath, parserDirParserPath); err != nil {
+ if !os.IsExist(err) {
+ return fmt.Errorf("unable to symlink parser '%s' to '%s': %s", hubDirParserPath, parserDirParserPath, err)
+ }
+ }
+ } else {
+ customParserExist := false
+ for _, customPath := range t.CustomItemsLocation {
+ // we check if its a custom parser
+ customParserPath := filepath.Join(customPath, parser)
+ if _, err := os.Stat(customParserPath); os.IsNotExist(err) {
+ continue
+ //return fmt.Errorf("parser '%s' doesn't exist in the hub and doesn't appear to be a custom one.", parser)
+ }
+
+ customParserPathSplit := strings.Split(customParserPath, "/")
+ customParserName := customParserPathSplit[len(customParserPathSplit)-1]
+ // because path is parsers///parser.yaml and we wan't the stage
+ customParserStage := customParserPathSplit[len(customParserPathSplit)-3]
+
+ // check if stage exist
+ hubStagePath := filepath.Join(t.HubPath, fmt.Sprintf("parsers/%s", customParserStage))
+
+ if _, err := os.Stat(hubStagePath); os.IsNotExist(err) {
+ continue
+ //return fmt.Errorf("stage '%s' extracted from '%s' doesn't exist in the hub", customParserStage, hubStagePath)
+ }
+
+ parserDirDest = fmt.Sprintf("%s/parsers/%s/", t.RuntimePath, customParserStage)
+ if err := os.MkdirAll(parserDirDest, os.ModePerm); err != nil {
+ continue
+ //return fmt.Errorf("unable to create folder '%s': %s", parserDirDest, err)
+ }
+
+ customParserDest := filepath.Join(parserDirDest, customParserName)
+ // if path to parser exist, copy it
+ if err := Copy(customParserPath, customParserDest); err != nil {
+ continue
+ //return fmt.Errorf("unable to copy custom parser '%s' to '%s': %s", customParserPath, customParserDest, err)
+ }
+
+ customParserExist = true
+ break
+ }
+ if !customParserExist {
+ return fmt.Errorf("couldn't find custom parser '%s' in the following location: %+v", parser, t.CustomItemsLocation)
+ }
+ }
+ }
+
+ // install scenarios in runtime environment
+ for _, scenario := range t.Config.Scenarios {
+ if scenario == "" {
+ continue
+ }
+ var scenarioDirDest string
+ if hubScenario, ok := t.HubIndex.Data[cwhub.SCENARIOS][scenario]; ok {
+ scenarioSource, err := filepath.Abs(filepath.Join(t.HubPath, hubScenario.RemotePath))
+ if err != nil {
+ return fmt.Errorf("can't get absolute path to: %s", scenarioSource)
+ }
+ scenarioFileName := filepath.Base(scenarioSource)
+
+ // runtime/hub/scenarios/crowdsecurity/
+ hubDirScenarioDest := filepath.Join(t.RuntimeHubPath, filepath.Dir(hubScenario.RemotePath))
+
+ // runtime/parsers/scenarios/
+ scenarioDirDest = fmt.Sprintf("%s/scenarios/", t.RuntimePath)
+
+ if err := os.MkdirAll(hubDirScenarioDest, os.ModePerm); err != nil {
+ return fmt.Errorf("unable to create folder '%s': %s", hubDirScenarioDest, err)
+ }
+ if err := os.MkdirAll(scenarioDirDest, os.ModePerm); err != nil {
+ return fmt.Errorf("unable to create folder '%s': %s", scenarioDirDest, err)
+ }
+
+ // runtime/hub/scenarios/crowdsecurity/ssh-bf.yaml
+ hubDirScenarioPath := filepath.Join(hubDirScenarioDest, scenarioFileName)
+ if err := Copy(scenarioSource, hubDirScenarioPath); err != nil {
+ return fmt.Errorf("unable to copy '%s' to '%s': %s", scenarioSource, hubDirScenarioPath, err)
+ }
+
+ // runtime/scenarios/ssh-bf.yaml
+ scenarioDirParserPath := filepath.Join(scenarioDirDest, scenarioFileName)
+ if err := os.Symlink(hubDirScenarioPath, scenarioDirParserPath); err != nil {
+ if !os.IsExist(err) {
+ return fmt.Errorf("unable to symlink scenario '%s' to '%s': %s", hubDirScenarioPath, scenarioDirParserPath, err)
+ }
+ }
+ } else {
+ customScenarioExist := false
+ for _, customPath := range t.CustomItemsLocation {
+ // we check if its a custom scenario
+ customScenarioPath := filepath.Join(customPath, scenario)
+ if _, err := os.Stat(customScenarioPath); os.IsNotExist(err) {
+ continue
+ //return fmt.Errorf("scenarios '%s' doesn't exist in the hub and doesn't appear to be a custom one.", scenario)
+ }
+
+ scenarioDirDest = fmt.Sprintf("%s/scenarios/", t.RuntimePath)
+ if err := os.MkdirAll(scenarioDirDest, os.ModePerm); err != nil {
+ return fmt.Errorf("unable to create folder '%s': %s", scenarioDirDest, err)
+ }
+
+ scenarioFileName := filepath.Base(customScenarioPath)
+ scenarioFileDest := filepath.Join(scenarioDirDest, scenarioFileName)
+ if err := Copy(customScenarioPath, scenarioFileDest); err != nil {
+ continue
+ //return fmt.Errorf("unable to copy scenario from '%s' to '%s': %s", customScenarioPath, scenarioFileDest, err)
+ }
+ customScenarioExist = true
+ break
+ }
+ if !customScenarioExist {
+ return fmt.Errorf("couldn't find custom scenario '%s' in the following location: %+v", scenario, t.CustomItemsLocation)
+ }
+ }
+ }
+
+ // install postoverflows in runtime environment
+ for _, postoverflow := range t.Config.PostOVerflows {
+ if postoverflow == "" {
+ continue
+ }
+ var postoverflowDirDest string
+ if hubPostOverflow, ok := t.HubIndex.Data[cwhub.PARSERS_OVFLW][postoverflow]; ok {
+ postoverflowSource, err := filepath.Abs(filepath.Join(t.HubPath, hubPostOverflow.RemotePath))
+ if err != nil {
+ return fmt.Errorf("can't get absolute path of '%s': %s", postoverflowSource, err)
+ }
+ postoverflowFileName := filepath.Base(postoverflowSource)
+
+ // runtime/hub/postoverflows/s00-enrich/crowdsecurity/
+ hubDirPostoverflowDest := filepath.Join(t.RuntimeHubPath, filepath.Dir(hubPostOverflow.RemotePath))
+
+ // runtime/postoverflows/s00-enrich
+ postoverflowDirDest = fmt.Sprintf("%s/postoverflows/%s/", t.RuntimePath, hubPostOverflow.Stage)
+
+ if err := os.MkdirAll(hubDirPostoverflowDest, os.ModePerm); err != nil {
+ return fmt.Errorf("unable to create folder '%s': %s", hubDirPostoverflowDest, err)
+ }
+ if err := os.MkdirAll(postoverflowDirDest, os.ModePerm); err != nil {
+ return fmt.Errorf("unable to create folder '%s': %s", postoverflowDirDest, err)
+ }
+
+ // runtime/hub/postoverflows/s00-enrich/crowdsecurity/rdns.yaml
+ hubDirPostoverflowPath := filepath.Join(hubDirPostoverflowDest, postoverflowFileName)
+ if err := Copy(postoverflowSource, hubDirPostoverflowPath); err != nil {
+ return fmt.Errorf("unable to copy '%s' to '%s': %s", postoverflowSource, hubDirPostoverflowPath, err)
+ }
+
+ // runtime/postoverflows/s00-enrich/rdns.yaml
+ postoverflowDirParserPath := filepath.Join(postoverflowDirDest, postoverflowFileName)
+ if err := os.Symlink(hubDirPostoverflowPath, postoverflowDirParserPath); err != nil {
+ if !os.IsExist(err) {
+ return fmt.Errorf("unable to symlink postoverflow '%s' to '%s': %s", hubDirPostoverflowPath, postoverflowDirParserPath, err)
+ }
+ }
+ } else {
+ customPostoverflowExist := false
+ for _, customPath := range t.CustomItemsLocation {
+ // we check if its a custom postoverflow
+ customPostOverflowPath := filepath.Join(customPath, postoverflow)
+ if _, err := os.Stat(customPostOverflowPath); os.IsNotExist(err) {
+ continue
+ //return fmt.Errorf("postoverflow '%s' doesn't exist in the hub and doesn't appear to be a custom one.", postoverflow)
+ }
+
+ customPostOverflowPathSplit := strings.Split(customPostOverflowPath, "/")
+ customPostoverflowName := customPostOverflowPathSplit[len(customPostOverflowPathSplit)-1]
+ // because path is postoverflows///parser.yaml and we wan't the stage
+ customPostoverflowStage := customPostOverflowPathSplit[len(customPostOverflowPathSplit)-3]
+
+ // check if stage exist
+ hubStagePath := filepath.Join(t.HubPath, fmt.Sprintf("postoverflows/%s", customPostoverflowStage))
+
+ if _, err := os.Stat(hubStagePath); os.IsNotExist(err) {
+ continue
+ //return fmt.Errorf("stage '%s' from extracted '%s' doesn't exist in the hub", customPostoverflowStage, hubStagePath)
+ }
+
+ postoverflowDirDest = fmt.Sprintf("%s/postoverflows/%s/", t.RuntimePath, customPostoverflowStage)
+ if err := os.MkdirAll(postoverflowDirDest, os.ModePerm); err != nil {
+ continue
+ //return fmt.Errorf("unable to create folder '%s': %s", postoverflowDirDest, err)
+ }
+
+ customPostoverflowDest := filepath.Join(postoverflowDirDest, customPostoverflowName)
+ // if path to postoverflow exist, copy it
+ if err := Copy(customPostOverflowPath, customPostoverflowDest); err != nil {
+ continue
+ //return fmt.Errorf("unable to copy custom parser '%s' to '%s': %s", customPostOverflowPath, customPostoverflowDest, err)
+ }
+ customPostoverflowExist = true
+ break
+ }
+ if !customPostoverflowExist {
+ return fmt.Errorf("couldn't find custom postoverflow '%s' in the following location: %+v", postoverflow, t.CustomItemsLocation)
+ }
+ }
+ }
+
+ // load installed hub
+ err := cwhub.GetHubIdx(t.RuntimeHubConfig)
+ if err != nil {
+ log.Fatalf("can't local sync the hub: %+v", err)
+ }
+
+ // install data for parsers if needed
+ ret := cwhub.GetItemMap(cwhub.PARSERS)
+ for parserName, item := range ret {
+ if item.Installed {
+ if err := cwhub.DownloadDataIfNeeded(t.RuntimeHubConfig, item, true); err != nil {
+ return fmt.Errorf("unable to download data for parser '%s': %+v", parserName, err)
+ }
+ log.Debugf("parser '%s' installed succesfully in runtime environment", parserName)
+ }
+ }
+
+ // install data for scenarios if needed
+ ret = cwhub.GetItemMap(cwhub.SCENARIOS)
+ for scenarioName, item := range ret {
+ if item.Installed {
+ if err := cwhub.DownloadDataIfNeeded(t.RuntimeHubConfig, item, true); err != nil {
+ return fmt.Errorf("unable to download data for parser '%s': %+v", scenarioName, err)
+ }
+ log.Debugf("scenario '%s' installed succesfully in runtime environment", scenarioName)
+ }
+ }
+
+ // install data for postoverflows if needed
+ ret = cwhub.GetItemMap(cwhub.PARSERS_OVFLW)
+ for postoverflowName, item := range ret {
+ if item.Installed {
+ if err := cwhub.DownloadDataIfNeeded(t.RuntimeHubConfig, item, true); err != nil {
+ return fmt.Errorf("unable to download data for parser '%s': %+v", postoverflowName, err)
+ }
+ log.Debugf("postoverflow '%s' installed succesfully in runtime environment", postoverflowName)
+ }
+ }
+
+ return nil
+}
+
+func (t *HubTestItem) Clean() error {
+ return os.RemoveAll(t.RuntimePath)
+}
+
+func (t *HubTestItem) Run() error {
+ t.Success = false
+ t.ErrorsList = make([]string, 0)
+
+ testPath := filepath.Join(t.HubTestPath, t.Name)
+ if _, err := os.Stat(testPath); os.IsNotExist(err) {
+ return fmt.Errorf("test '%s' doesn't exist in '%s', exiting", t.Name, t.HubTestPath)
+ }
+
+ currentDir, err := os.Getwd()
+ if err != nil {
+ return fmt.Errorf("can't get current directory: %+v", err)
+ }
+
+ // create runtime folder
+ if err := os.MkdirAll(t.RuntimePath, os.ModePerm); err != nil {
+ return fmt.Errorf("unable to create folder '%s': %+v", t.RuntimePath, err)
+ }
+
+ // create runtime data folder
+ if err := os.MkdirAll(t.RuntimeDataPath, os.ModePerm); err != nil {
+ return fmt.Errorf("unable to create folder '%s': %+v", t.RuntimeDataPath, err)
+ }
+
+ // create runtime hub folder
+ if err := os.MkdirAll(t.RuntimeHubPath, os.ModePerm); err != nil {
+ return fmt.Errorf("unable to create folder '%s': %+v", t.RuntimeHubPath, err)
+ }
+
+ if err := Copy(t.HubIndexFile, filepath.Join(t.RuntimeHubPath, ".index.json")); err != nil {
+ return fmt.Errorf("unable to copy .index.json file in '%s': %s", filepath.Join(t.RuntimeHubPath, ".index.json"), err)
+ }
+
+ // create results folder
+ if err := os.MkdirAll(t.ResultsPath, os.ModePerm); err != nil {
+ return fmt.Errorf("unable to create folder '%s': %+v", t.ResultsPath, err)
+ }
+
+ // copy template config file to runtime folder
+ if err := Copy(t.TemplateConfigPath, t.RuntimeConfigFilePath); err != nil {
+ return fmt.Errorf("unable to copy '%s' to '%s': %v", t.TemplateConfigPath, t.RuntimeConfigFilePath, err)
+ }
+
+ // copy template profile file to runtime folder
+ if err := Copy(t.TemplateProfilePath, t.RuntimeProfileFilePath); err != nil {
+ return fmt.Errorf("unable to copy '%s' to '%s': %v", t.TemplateProfilePath, t.RuntimeProfileFilePath, err)
+ }
+
+ // copy template simulation file to runtime folder
+ if err := Copy(t.TemplateSimulationPath, t.RuntimeSimulationFilePath); err != nil {
+ return fmt.Errorf("unable to copy '%s' to '%s': %v", t.TemplateSimulationPath, t.RuntimeSimulationFilePath, err)
+ }
+
+ // copy template patterns folder to runtime folder
+ if err := CopyDir(crowdsecPatternsFolder, t.RuntimePatternsPath); err != nil {
+ return fmt.Errorf("unable to copy 'patterns' from '%s' to '%s': %s", crowdsecPatternsFolder, t.RuntimePatternsPath, err)
+ }
+
+ // install the hub in the runtime folder
+ if err := t.InstallHub(); err != nil {
+ return fmt.Errorf("unable to install hub in '%s': %s", t.RuntimeHubPath, err)
+ }
+
+ logFile := t.Config.LogFile
+ logType := t.Config.LogType
+ dsn := fmt.Sprintf("file://%s", logFile)
+
+ if err := os.Chdir(testPath); err != nil {
+ return fmt.Errorf("can't 'cd' to '%s': %s", testPath, err)
+ }
+
+ logFileStat, err := os.Stat(logFile)
+ if err != nil {
+ return fmt.Errorf("unable to stat log file '%s': %s", logFile, err.Error())
+ }
+ if logFileStat.Size() == 0 {
+ return fmt.Errorf("Log file '%s' is empty, please fill it with log", logFile)
+ }
+
+ cmdArgs := []string{"-c", t.RuntimeConfigFilePath, "machines", "add", "testMachine", "--auto"}
+ cscliRegisterCmd := exec.Command(t.CscliPath, cmdArgs...)
+ log.Debugf("%s", cscliRegisterCmd.String())
+ output, err := cscliRegisterCmd.CombinedOutput()
+ if err != nil {
+ if !strings.Contains(string(output), "unable to create machine: user 'testMachine': user already exist") {
+ fmt.Println(string(output))
+ return fmt.Errorf("fail to run '%s' for test '%s': %v", cscliRegisterCmd.String(), t.Name, err)
+ }
+ }
+
+ cmdArgs = []string{"-c", t.RuntimeConfigFilePath, "-type", logType, "-dsn", dsn, "-dump-data", t.ResultsPath}
+ crowdsecCmd := exec.Command(t.CrowdSecPath, cmdArgs...)
+ log.Debugf("%s", crowdsecCmd.String())
+ output, err = crowdsecCmd.CombinedOutput()
+ if log.GetLevel() >= log.DebugLevel || err != nil {
+ fmt.Println(string(output))
+ }
+ if err != nil {
+ return fmt.Errorf("fail to run '%s' for test '%s': %v", crowdsecCmd.String(), t.Name, err)
+ }
+
+ if err := os.Chdir(currentDir); err != nil {
+ return fmt.Errorf("can't 'cd' to '%s': %s", currentDir, err)
+ }
+
+ // assert parsers
+ if !t.Config.IgnoreParsers {
+ assertFileStat, err := os.Stat(t.ParserAssert.File)
+ if os.IsNotExist(err) {
+ parserAssertFile, err := os.Create(t.ParserAssert.File)
+ if err != nil {
+ log.Fatal(err)
+ }
+ parserAssertFile.Close()
+ }
+ assertFileStat, err = os.Stat(t.ParserAssert.File)
+ if err != nil {
+ return fmt.Errorf("error while stats '%s': %s", t.ParserAssert.File, err)
+ }
+
+ if assertFileStat.Size() == 0 {
+ assertData, err := t.ParserAssert.AutoGenFromFile(t.ParserResultFile)
+ if err != nil {
+ return fmt.Errorf("couldn't generate assertion: %s", err.Error())
+ }
+ t.ParserAssert.AutoGenAssertData = assertData
+ t.ParserAssert.AutoGenAssert = true
+ } else {
+ if err := t.ParserAssert.AssertFile(t.ParserResultFile); err != nil {
+ return fmt.Errorf("unable to run assertion on file '%s': %s", t.ParserResultFile, err)
+ }
+ }
+ }
+
+ // assert scenarios
+ nbScenario := 0
+ for _, scenario := range t.Config.Scenarios {
+ if scenario == "" {
+ continue
+ }
+ nbScenario += 1
+ }
+ if nbScenario > 0 {
+ assertFileStat, err := os.Stat(t.ScenarioAssert.File)
+ if os.IsNotExist(err) {
+ scenarioAssertFile, err := os.Create(t.ScenarioAssert.File)
+ if err != nil {
+ log.Fatal(err)
+ }
+ scenarioAssertFile.Close()
+ }
+ assertFileStat, err = os.Stat(t.ScenarioAssert.File)
+ if err != nil {
+ return fmt.Errorf("error while stats '%s': %s", t.ScenarioAssert.File, err)
+ }
+
+ if assertFileStat.Size() == 0 {
+ assertData, err := t.ScenarioAssert.AutoGenFromFile(t.ScenarioResultFile)
+ if err != nil {
+ return fmt.Errorf("couldn't generate assertion: %s", err.Error())
+ }
+ t.ScenarioAssert.AutoGenAssertData = assertData
+ t.ScenarioAssert.AutoGenAssert = true
+ } else {
+ if err := t.ScenarioAssert.AssertFile(t.ScenarioResultFile); err != nil {
+ return fmt.Errorf("unable to run assertion on file '%s': %s", t.ScenarioResultFile, err)
+ }
+ }
+ }
+
+ if t.ParserAssert.AutoGenAssert || t.ScenarioAssert.AutoGenAssert {
+ t.AutoGen = true
+ }
+
+ if (t.ParserAssert.Success || t.Config.IgnoreParsers) && (nbScenario == 0 || t.ScenarioAssert.Success) {
+ t.Success = true
+ }
+
+ return nil
+}
diff --git a/pkg/cstest/parser_assert.go b/pkg/cstest/parser_assert.go
new file mode 100644
index 000000000..02639af8a
--- /dev/null
+++ b/pkg/cstest/parser_assert.go
@@ -0,0 +1,392 @@
+package cstest
+
+import (
+ "bufio"
+ "fmt"
+ "io/ioutil"
+ "os"
+ "regexp"
+ "sort"
+ "strings"
+ "time"
+
+ "github.com/antonmedv/expr"
+ "github.com/antonmedv/expr/vm"
+ "github.com/crowdsecurity/crowdsec/pkg/exprhelpers"
+ "github.com/crowdsecurity/crowdsec/pkg/types"
+ "github.com/enescakir/emoji"
+ "github.com/pkg/errors"
+ log "github.com/sirupsen/logrus"
+ "gopkg.in/yaml.v2"
+)
+
+type AssertFail struct {
+ File string
+ Line int
+ Expression string
+ Debug map[string]string
+}
+
+type ParserAssert struct {
+ File string
+ AutoGenAssert bool
+ AutoGenAssertData string
+ NbAssert int
+ Fails []AssertFail
+ Success bool
+ TestData *ParserResults
+}
+
+type ParserResult struct {
+ Evt types.Event
+ Success bool
+}
+type ParserResults map[string]map[string][]ParserResult
+
+func NewParserAssert(file string) *ParserAssert {
+
+ ParserAssert := &ParserAssert{
+ File: file,
+ NbAssert: 0,
+ Success: false,
+ Fails: make([]AssertFail, 0),
+ AutoGenAssert: false,
+ TestData: &ParserResults{},
+ }
+ return ParserAssert
+}
+
+func (p *ParserAssert) AutoGenFromFile(filename string) (string, error) {
+ err := p.LoadTest(filename)
+ if err != nil {
+ return "", err
+ }
+ ret := p.AutoGenParserAssert()
+ return ret, nil
+}
+
+func (p *ParserAssert) LoadTest(filename string) error {
+ var err error
+ parserDump, err := LoadParserDump(filename)
+ if err != nil {
+ return fmt.Errorf("loading parser dump file: %+v", err)
+ }
+ p.TestData = parserDump
+ return nil
+}
+
+func (p *ParserAssert) AssertFile(testFile string) error {
+ file, err := os.Open(p.File)
+
+ if err != nil {
+ return fmt.Errorf("failed to open")
+ }
+
+ if err := p.LoadTest(testFile); err != nil {
+ return fmt.Errorf("unable to load parser dump file '%s': %s", testFile, err)
+ }
+ scanner := bufio.NewScanner(file)
+ scanner.Split(bufio.ScanLines)
+ nbLine := 0
+ for scanner.Scan() {
+ nbLine += 1
+ if scanner.Text() == "" {
+ continue
+ }
+ ok, err := p.Run(scanner.Text())
+ if err != nil {
+ return fmt.Errorf("unable to run assert '%s': %+v", scanner.Text(), err)
+ }
+ p.NbAssert += 1
+ if !ok {
+ log.Debugf("%s is FALSE", scanner.Text())
+ //fmt.SPrintf(" %s '%s'\n", emoji.RedSquare, scanner.Text())
+ failedAssert := &AssertFail{
+ File: p.File,
+ Line: nbLine,
+ Expression: scanner.Text(),
+ Debug: make(map[string]string),
+ }
+ variableRE := regexp.MustCompile(`(?P[^ =]+) == .*`)
+ match := variableRE.FindStringSubmatch(scanner.Text())
+ if len(match) == 0 {
+ log.Infof("Couldn't get variable of line '%s'", scanner.Text())
+ }
+ variable := match[1]
+ result, err := p.EvalExpression(variable)
+ if err != nil {
+ log.Errorf("unable to evaluate variable '%s': %s", variable, err)
+ continue
+ }
+ failedAssert.Debug[variable] = result
+ p.Fails = append(p.Fails, *failedAssert)
+ continue
+ }
+ //fmt.Printf(" %s '%s'\n", emoji.GreenSquare, scanner.Text())
+
+ }
+ file.Close()
+ if p.NbAssert == 0 {
+ assertData, err := p.AutoGenFromFile(testFile)
+ if err != nil {
+ return fmt.Errorf("couldn't generate assertion: %s", err.Error())
+ }
+ p.AutoGenAssertData = assertData
+ p.AutoGenAssert = true
+ }
+ if len(p.Fails) == 0 {
+ p.Success = true
+ }
+
+ return nil
+}
+
+func (p *ParserAssert) RunExpression(expression string) (interface{}, error) {
+ var err error
+ //debug doesn't make much sense with the ability to evaluate "on the fly"
+ //var debugFilter *exprhelpers.ExprDebugger
+ var runtimeFilter *vm.Program
+ var output interface{}
+
+ env := map[string]interface{}{"results": *p.TestData}
+
+ if runtimeFilter, err = expr.Compile(expression, expr.Env(exprhelpers.GetExprEnv(env))); err != nil {
+ return output, err
+ }
+ // if debugFilter, err = exprhelpers.NewDebugger(assert, expr.Env(exprhelpers.GetExprEnv(env))); err != nil {
+ // log.Warningf("Failed building debugher for %s : %s", assert, err)
+ // }
+
+ //dump opcode in trace level
+ log.Tracef("%s", runtimeFilter.Disassemble())
+
+ output, err = expr.Run(runtimeFilter, exprhelpers.GetExprEnv(map[string]interface{}{"results": *p.TestData}))
+ if err != nil {
+ log.Warningf("running : %s", expression)
+ log.Warningf("runtime error : %s", err)
+ return output, errors.Wrapf(err, "while running expression %s", expression)
+ }
+ return output, nil
+}
+
+func (p *ParserAssert) EvalExpression(expression string) (string, error) {
+ output, err := p.RunExpression(expression)
+ if err != nil {
+ return "", err
+ }
+ ret, err := yaml.Marshal(output)
+ if err != nil {
+ return "", err
+ }
+ return string(ret), nil
+}
+
+func (p *ParserAssert) Run(assert string) (bool, error) {
+ output, err := p.RunExpression(assert)
+ if err != nil {
+ return false, err
+ }
+ switch out := output.(type) {
+ case bool:
+ return out, nil
+ default:
+ return false, fmt.Errorf("assertion '%s' is not a condition", assert)
+ }
+}
+
+func Escape(val string) string {
+ val = strings.ReplaceAll(val, `\`, `\\`)
+ val = strings.ReplaceAll(val, `"`, `\"`)
+ return val
+}
+
+func (p *ParserAssert) AutoGenParserAssert() string {
+ //attempt to autogen parser asserts
+ var ret string
+
+ //sort map keys for consistent ordre
+ var stages []string
+ for stage := range *p.TestData {
+ stages = append(stages, stage)
+ }
+ sort.Strings(stages)
+ ret += fmt.Sprintf("len(results) == %d\n", len(*p.TestData))
+ for _, stage := range stages {
+ parsers := (*p.TestData)[stage]
+ //sort map keys for consistent ordre
+ var pnames []string
+ for pname := range parsers {
+ pnames = append(pnames, pname)
+ }
+ sort.Strings(pnames)
+ for _, parser := range pnames {
+ presults := parsers[parser]
+ ret += fmt.Sprintf(`len(results["%s"]["%s"]) == %d`+"\n", stage, parser, len(presults))
+ for pidx, result := range presults {
+ ret += fmt.Sprintf(`results["%s"]["%s"][%d].Success == %t`+"\n", stage, parser, pidx, result.Success)
+
+ if !result.Success {
+ continue
+ }
+ for pkey, pval := range result.Evt.Parsed {
+ if pval == "" {
+ continue
+ }
+ ret += fmt.Sprintf(`results["%s"]["%s"][%d].Evt.Parsed["%s"] == "%s"`+"\n", stage, parser, pidx, pkey, Escape(pval))
+ }
+ for mkey, mval := range result.Evt.Meta {
+ if mval == "" {
+ continue
+ }
+ ret += fmt.Sprintf(`results["%s"]["%s"][%d].Evt.Meta["%s"] == "%s"`+"\n", stage, parser, pidx, mkey, Escape(mval))
+ }
+ for ekey, eval := range result.Evt.Enriched {
+ if eval == "" {
+ continue
+ }
+ ret += fmt.Sprintf(`results["%s"]["%s"][%d].Evt.Enriched["%s"] == "%s"`+"\n", stage, parser, pidx, ekey, Escape(eval))
+ }
+ }
+ }
+ }
+ return ret
+}
+
+func LoadParserDump(filepath string) (*ParserResults, error) {
+ var pdump ParserResults
+
+ dumpData, err := os.Open(filepath)
+ if err != nil {
+ return nil, err
+ }
+ defer dumpData.Close()
+
+ results, err := ioutil.ReadAll(dumpData)
+ if err != nil {
+ return nil, err
+ }
+
+ if err := yaml.Unmarshal(results, &pdump); err != nil {
+ return nil, err
+ }
+ return &pdump, nil
+}
+
+func DumpTree(parser_results ParserResults, bucket_pour BucketPourInfo) error {
+ //note : we can use line -> time as the unique identifier (of acquisition)
+
+ state := make(map[time.Time]map[string]map[string]bool, 0)
+ assoc := make(map[time.Time]string, 0)
+
+ for stage, parsers := range parser_results {
+ for parser, results := range parsers {
+ for _, parser_res := range results {
+ evt := parser_res.Evt
+ if _, ok := state[evt.Line.Time]; !ok {
+ state[evt.Line.Time] = make(map[string]map[string]bool)
+ assoc[evt.Line.Time] = evt.Line.Raw
+ }
+ if _, ok := state[evt.Line.Time][stage]; !ok {
+ state[evt.Line.Time][stage] = make(map[string]bool)
+ }
+ state[evt.Line.Time][stage][parser] = parser_res.Success
+ }
+ }
+ }
+
+ for bname, evtlist := range bucket_pour {
+ for _, evt := range evtlist {
+ if evt.Line.Raw == "" {
+ continue
+ }
+ //it might be bucket oveflow being reprocessed, skip this
+ if _, ok := state[evt.Line.Time]; !ok {
+ state[evt.Line.Time] = make(map[string]map[string]bool)
+ assoc[evt.Line.Time] = evt.Line.Raw
+ }
+ //there is a trick : to know if an event succesfully exit the parsers, we check if it reached the pour() phase
+ //we thus use a fake stage "buckets" and a fake parser "OK" to know if it entered
+ if _, ok := state[evt.Line.Time]["buckets"]; !ok {
+ state[evt.Line.Time]["buckets"] = make(map[string]bool)
+ }
+ state[evt.Line.Time]["buckets"][bname] = true
+ }
+ }
+
+ //get each line
+ for tstamp, rawstr := range assoc {
+ fmt.Printf("line: %s\n", rawstr)
+ skeys := make([]string, 0, len(state[tstamp]))
+ for k := range state[tstamp] {
+ //there is a trick : to know if an event succesfully exit the parsers, we check if it reached the pour() phase
+ //we thus use a fake stage "buckets" and a fake parser "OK" to know if it entered
+ if k == "buckets" {
+ continue
+ }
+ skeys = append(skeys, k)
+ }
+ sort.Strings(skeys)
+ //iterate stage
+ for _, stage := range skeys {
+ parsers := state[tstamp][stage]
+
+ sep := "├"
+ presep := "|"
+
+ fmt.Printf("\t%s %s\n", sep, stage)
+
+ pkeys := make([]string, 0, len(parsers))
+ for k := range parsers {
+ pkeys = append(pkeys, k)
+ }
+ sort.Strings(pkeys)
+
+ for idx, parser := range pkeys {
+ res := parsers[parser]
+ sep := "├"
+ if idx == len(pkeys)-1 {
+ sep = "└"
+ }
+ if res {
+ fmt.Printf("\t%s\t%s %s %s\n", presep, sep, emoji.GreenCircle, parser)
+ } else {
+ fmt.Printf("\t%s\t%s %s %s\n", presep, sep, emoji.RedCircle, parser)
+
+ }
+ }
+ }
+ sep := "└"
+ if len(state[tstamp]["buckets"]) > 0 {
+ sep = "├"
+ }
+ //did the event enter the bucket pour phase ?
+ if _, ok := state[tstamp]["buckets"]["OK"]; ok {
+ fmt.Printf("\t%s-------- parser success %s\n", sep, emoji.GreenCircle)
+ } else {
+ fmt.Printf("\t%s-------- parser failure %s\n", sep, emoji.RedCircle)
+ }
+ //now print bucket info
+ if len(state[tstamp]["buckets"]) > 0 {
+ fmt.Printf("\t├ Scenarios\n")
+ }
+ bnames := make([]string, 0, len(state[tstamp]["buckets"]))
+ for k, _ := range state[tstamp]["buckets"] {
+ //there is a trick : to know if an event succesfully exit the parsers, we check if it reached the pour() phase
+ //we thus use a fake stage "buckets" and a fake parser "OK" to know if it entered
+ if k == "OK" {
+ continue
+ }
+ bnames = append(bnames, k)
+ }
+ sort.Strings(bnames)
+ for idx, bname := range bnames {
+ sep := "├"
+ if idx == len(bnames)-1 {
+ sep = "└"
+ }
+ fmt.Printf("\t\t%s %s %s\n", sep, emoji.GreenCircle, bname)
+ }
+ fmt.Println()
+ }
+ return nil
+}
diff --git a/pkg/cstest/scenario_assert.go b/pkg/cstest/scenario_assert.go
new file mode 100644
index 000000000..472bfad7c
--- /dev/null
+++ b/pkg/cstest/scenario_assert.go
@@ -0,0 +1,272 @@
+package cstest
+
+import (
+ "bufio"
+ "fmt"
+ "io/ioutil"
+ "os"
+ "regexp"
+ "sort"
+
+ "github.com/antonmedv/expr"
+ "github.com/antonmedv/expr/vm"
+ "github.com/crowdsecurity/crowdsec/pkg/exprhelpers"
+ "github.com/crowdsecurity/crowdsec/pkg/types"
+ "github.com/pkg/errors"
+ log "github.com/sirupsen/logrus"
+ "gopkg.in/yaml.v2"
+)
+
+type ScenarioAssert struct {
+ File string
+ AutoGenAssert bool
+ AutoGenAssertData string
+ NbAssert int
+ Fails []AssertFail
+ Success bool
+ TestData *BucketResults
+ PourData *BucketPourInfo
+}
+
+type BucketResults []types.Event
+type BucketPourInfo map[string][]types.Event
+
+func NewScenarioAssert(file string) *ScenarioAssert {
+ ScenarioAssert := &ScenarioAssert{
+ File: file,
+ NbAssert: 0,
+ Success: false,
+ Fails: make([]AssertFail, 0),
+ AutoGenAssert: false,
+ TestData: &BucketResults{},
+ PourData: &BucketPourInfo{},
+ }
+ return ScenarioAssert
+}
+
+func (s *ScenarioAssert) AutoGenFromFile(filename string) (string, error) {
+ err := s.LoadTest(filename, "")
+ if err != nil {
+ return "", err
+ }
+ ret := s.AutoGenScenarioAssert()
+ return ret, nil
+}
+
+func (s *ScenarioAssert) LoadTest(filename string, bucketpour string) error {
+ var err error
+ bucketDump, err := LoadScenarioDump(filename)
+ if err != nil {
+ return fmt.Errorf("loading scenario dump file '%s': %+v", filename, err)
+ }
+ s.TestData = bucketDump
+
+ if bucketpour != "" {
+ pourDump, err := LoadBucketPourDump(bucketpour)
+ if err != nil {
+ return fmt.Errorf("loading bucket pour dump file '%s': %+v", filename, err)
+ }
+ s.PourData = pourDump
+ }
+ return nil
+}
+
+func (s *ScenarioAssert) AssertFile(testFile string) error {
+ file, err := os.Open(s.File)
+
+ if err != nil {
+ return fmt.Errorf("failed to open")
+ }
+
+ if err := s.LoadTest(testFile, ""); err != nil {
+ return fmt.Errorf("unable to load parser dump file '%s': %s", testFile, err)
+ }
+ scanner := bufio.NewScanner(file)
+ scanner.Split(bufio.ScanLines)
+ nbLine := 0
+ for scanner.Scan() {
+ nbLine += 1
+ if scanner.Text() == "" {
+ continue
+ }
+ ok, err := s.Run(scanner.Text())
+ if err != nil {
+ return fmt.Errorf("unable to run assert '%s': %+v", scanner.Text(), err)
+ }
+ s.NbAssert += 1
+ if !ok {
+ log.Debugf("%s is FALSE", scanner.Text())
+ failedAssert := &AssertFail{
+ File: s.File,
+ Line: nbLine,
+ Expression: scanner.Text(),
+ Debug: make(map[string]string),
+ }
+ variableRE := regexp.MustCompile(`(?P[^ ]+) == .*`)
+ match := variableRE.FindStringSubmatch(scanner.Text())
+ if len(match) == 0 {
+ log.Infof("Couldn't get variable of line '%s'", scanner.Text())
+ continue
+ }
+ variable := match[1]
+ result, err := s.EvalExpression(variable)
+ if err != nil {
+ log.Errorf("unable to evaluate variable '%s': %s", variable, err)
+ continue
+ }
+ failedAssert.Debug[variable] = result
+ s.Fails = append(s.Fails, *failedAssert)
+ continue
+ }
+ //fmt.Printf(" %s '%s'\n", emoji.GreenSquare, scanner.Text())
+
+ }
+ file.Close()
+ if s.NbAssert == 0 {
+ assertData, err := s.AutoGenFromFile(testFile)
+ if err != nil {
+ return fmt.Errorf("couldn't generate assertion: %s", err.Error())
+ }
+ s.AutoGenAssertData = assertData
+ s.AutoGenAssert = true
+ }
+
+ if len(s.Fails) == 0 {
+ s.Success = true
+ }
+
+ return nil
+}
+
+func (s *ScenarioAssert) RunExpression(expression string) (interface{}, error) {
+ var err error
+ //debug doesn't make much sense with the ability to evaluate "on the fly"
+ //var debugFilter *exprhelpers.ExprDebugger
+ var runtimeFilter *vm.Program
+ var output interface{}
+
+ env := map[string]interface{}{"results": *s.TestData}
+
+ if runtimeFilter, err = expr.Compile(expression, expr.Env(exprhelpers.GetExprEnv(env))); err != nil {
+ return output, err
+ }
+ // if debugFilter, err = exprhelpers.NewDebugger(assert, expr.Env(exprhelpers.GetExprEnv(env))); err != nil {
+ // log.Warningf("Failed building debugher for %s : %s", assert, err)
+ // }
+
+ //dump opcode in trace level
+ log.Tracef("%s", runtimeFilter.Disassemble())
+
+ output, err = expr.Run(runtimeFilter, exprhelpers.GetExprEnv(map[string]interface{}{"results": *s.TestData}))
+ if err != nil {
+ log.Warningf("running : %s", expression)
+ log.Warningf("runtime error : %s", err)
+ return output, errors.Wrapf(err, "while running expression %s", expression)
+ }
+ return output, nil
+}
+
+func (s *ScenarioAssert) EvalExpression(expression string) (string, error) {
+ output, err := s.RunExpression(expression)
+ if err != nil {
+ return "", err
+ }
+ ret, err := yaml.Marshal(output)
+ if err != nil {
+ return "", err
+ }
+ return string(ret), nil
+}
+
+func (s *ScenarioAssert) Run(assert string) (bool, error) {
+ output, err := s.RunExpression(assert)
+ if err != nil {
+ return false, err
+ }
+ switch out := output.(type) {
+ case bool:
+ return out, nil
+ default:
+ return false, fmt.Errorf("assertion '%s' is not a condition", assert)
+ }
+}
+
+func (s *ScenarioAssert) AutoGenScenarioAssert() string {
+ //attempt to autogen parser asserts
+ var ret string
+ ret += fmt.Sprintf(`len(results) == %d`+"\n", len(*s.TestData))
+ for eventIndex, event := range *s.TestData {
+ for ipSrc, source := range event.Overflow.Sources {
+ ret += fmt.Sprintf(`"%s" in results[%d].Overflow.GetSources()`+"\n", ipSrc, eventIndex)
+ ret += fmt.Sprintf(`results[%d].Overflow.Sources["%s"].IP == "%s"`+"\n", eventIndex, ipSrc, source.IP)
+ ret += fmt.Sprintf(`results[%d].Overflow.Sources["%s"].Range == "%s"`+"\n", eventIndex, ipSrc, source.Range)
+ ret += fmt.Sprintf(`results[%d].Overflow.Sources["%s"].GetScope() == "%s"`+"\n", eventIndex, ipSrc, *source.Scope)
+ ret += fmt.Sprintf(`results[%d].Overflow.Sources["%s"].GetValue() == "%s"`+"\n", eventIndex, ipSrc, *source.Value)
+ }
+ for evtIndex, evt := range event.Overflow.Alert.Events {
+ for _, meta := range evt.Meta {
+ ret += fmt.Sprintf(`results[%d].Overflow.Alert.Events[%d].GetMeta("%s") == "%s"`+"\n", eventIndex, evtIndex, meta.Key, meta.Value)
+ }
+ }
+ ret += fmt.Sprintf(`results[%d].Overflow.Alert.GetScenario() == "%s"`+"\n", eventIndex, *event.Overflow.Alert.Scenario)
+ ret += fmt.Sprintf(`results[%d].Overflow.Alert.Remediation == %t`+"\n", eventIndex, *&event.Overflow.Alert.Remediation)
+ ret += fmt.Sprintf(`results[%d].Overflow.Alert.GetEventsCount() == %d`+"\n", eventIndex, *event.Overflow.Alert.EventsCount)
+ }
+ return ret
+}
+
+func (b BucketResults) Len() int {
+ return len(b)
+}
+
+func (b BucketResults) Less(i, j int) bool {
+ return b[i].Overflow.Alert.GetScenario() > b[j].Overflow.Alert.GetScenario()
+}
+
+func (b BucketResults) Swap(i, j int) {
+ b[i], b[j] = b[j], b[i]
+}
+
+func LoadBucketPourDump(filepath string) (*BucketPourInfo, error) {
+ var bucketDump BucketPourInfo
+
+ dumpData, err := os.Open(filepath)
+ if err != nil {
+ return nil, err
+ }
+ defer dumpData.Close()
+
+ results, err := ioutil.ReadAll(dumpData)
+ if err != nil {
+ return nil, err
+ }
+
+ if err := yaml.Unmarshal(results, &bucketDump); err != nil {
+ return nil, err
+ }
+
+ return &bucketDump, nil
+}
+
+func LoadScenarioDump(filepath string) (*BucketResults, error) {
+ var bucketDump BucketResults
+
+ dumpData, err := os.Open(filepath)
+ if err != nil {
+ return nil, err
+ }
+ defer dumpData.Close()
+
+ results, err := ioutil.ReadAll(dumpData)
+ if err != nil {
+ return nil, err
+ }
+
+ if err := yaml.Unmarshal(results, &bucketDump); err != nil {
+ return nil, err
+ }
+
+ sort.Sort(BucketResults(bucketDump))
+
+ return &bucketDump, nil
+}
diff --git a/pkg/cstest/utils.go b/pkg/cstest/utils.go
new file mode 100644
index 000000000..10837acdd
--- /dev/null
+++ b/pkg/cstest/utils.go
@@ -0,0 +1,81 @@
+package cstest
+
+import (
+ "fmt"
+ "io/ioutil"
+ "os"
+)
+
+func Copy(sourceFile string, destinationFile string) error {
+ input, err := ioutil.ReadFile(sourceFile)
+ if err != nil {
+ return err
+ }
+
+ err = ioutil.WriteFile(destinationFile, input, 0644)
+ if err != nil {
+ return err
+ }
+ return nil
+}
+
+func CopyDir(src string, dest string) error {
+
+ if dest[:len(src)] == src {
+ return fmt.Errorf("Cannot copy a folder into the folder itself!")
+ }
+
+ f, err := os.Open(src)
+ if err != nil {
+ return err
+ }
+
+ file, err := f.Stat()
+ if err != nil {
+ return err
+ }
+ if !file.IsDir() {
+ return fmt.Errorf("Source " + file.Name() + " is not a directory!")
+ }
+
+ err = os.MkdirAll(dest, 0755)
+ if err != nil {
+ return err
+ }
+
+ files, err := ioutil.ReadDir(src)
+ if err != nil {
+ return err
+ }
+
+ for _, f := range files {
+
+ if f.IsDir() {
+
+ err = CopyDir(src+"/"+f.Name(), dest+"/"+f.Name())
+ if err != nil {
+ return err
+ }
+
+ }
+
+ if !f.IsDir() {
+
+ content, err := ioutil.ReadFile(src + "/" + f.Name())
+ if err != nil {
+ return err
+
+ }
+
+ err = ioutil.WriteFile(dest+"/"+f.Name(), content, 0755)
+ if err != nil {
+ return err
+
+ }
+
+ }
+
+ }
+
+ return nil
+}
diff --git a/pkg/cwhub/download.go b/pkg/cwhub/download.go
index 1905bef5b..7e126c057 100644
--- a/pkg/cwhub/download.go
+++ b/pkg/cwhub/download.go
@@ -219,8 +219,7 @@ func DownloadDataIfNeeded(hub *csconfig.Hub, target Item, force bool) error {
itemFile *os.File
err error
)
- itemFilePath := fmt.Sprintf("%s/%s", hub.HubDir, target.RemotePath)
-
+ itemFilePath := fmt.Sprintf("%s/%s/%s/%s", hub.ConfigDir, target.Type, target.Stage, target.FileName)
if itemFile, err = os.Open(itemFilePath); err != nil {
return errors.Wrapf(err, "while opening %s", itemFilePath)
}
diff --git a/pkg/cwhub/loader.go b/pkg/cwhub/loader.go
index ee5dbb14f..a5ba94069 100644
--- a/pkg/cwhub/loader.go
+++ b/pkg/cwhub/loader.go
@@ -81,6 +81,7 @@ func parser_visit(path string, f os.FileInfo, err error) error {
return fmt.Errorf("File '%s' is not from hub '%s' nor from the configuration directory '%s'", path, hubdir, installdir)
}
+ log.Tracef("stage:%s ftype:%s", stage, ftype)
//log.Printf("%s -> name:%s stage:%s", path, fname, stage)
if stage == SCENARIOS {
ftype = SCENARIOS
diff --git a/pkg/exprhelpers/exprlib.go b/pkg/exprhelpers/exprlib.go
index 4ef1c8eb5..52de6cf19 100644
--- a/pkg/exprhelpers/exprlib.go
+++ b/pkg/exprhelpers/exprlib.go
@@ -141,3 +141,8 @@ func IpInRange(ip string, ipRange string) bool {
func TimeNow() string {
return time.Now().Format(time.RFC3339)
}
+
+func KeyExists(key string, dict map[string]interface{}) bool {
+ _, ok := dict[key]
+ return ok
+}
diff --git a/pkg/leakybucket/manager_run.go b/pkg/leakybucket/manager_run.go
index 0f219b88e..5cd94efe5 100644
--- a/pkg/leakybucket/manager_run.go
+++ b/pkg/leakybucket/manager_run.go
@@ -9,6 +9,7 @@ import (
"os"
"time"
+ "github.com/mohae/deepcopy"
log "github.com/sirupsen/logrus"
"github.com/antonmedv/expr"
@@ -18,6 +19,8 @@ import (
)
var serialized map[string]Leaky
+var BucketPourCache map[string][]types.Event
+var BucketPourTrack bool
/*The leaky routines lifecycle are based on "real" time.
But when we are running in time-machine mode, the reference time is in logs and not "real" time.
@@ -158,6 +161,18 @@ func PourItemToHolders(parsed types.Event, holders []BucketFactory, buckets *Buc
)
//synchronize with DumpBucketsStateAt
+ //to track bucket pour : track items that enter the pour routine
+ if BucketPourTrack {
+ if BucketPourCache == nil {
+ BucketPourCache = make(map[string][]types.Event)
+ }
+ if _, ok := BucketPourCache["OK"]; !ok {
+ BucketPourCache["OK"] = make([]types.Event, 0)
+ }
+ evt := deepcopy.Copy(parsed)
+ BucketPourCache["OK"] = append(BucketPourCache["OK"], evt.(types.Event))
+ }
+
for idx, holder := range holders {
if holder.RunTimeFilter != nil {
@@ -290,6 +305,15 @@ func PourItemToHolders(parsed types.Event, holders []BucketFactory, buckets *Buc
select {
case bucket.In <- parsed:
holder.logger.Tracef("Successfully sent !")
+ //and track item poured to each bucket
+ if BucketPourTrack {
+ if _, ok := BucketPourCache[bucket.Name]; !ok {
+ BucketPourCache[bucket.Name] = make([]types.Event, 0)
+ }
+ evt := deepcopy.Copy(parsed)
+ BucketPourCache[bucket.Name] = append(BucketPourCache[bucket.Name], evt.(types.Event))
+ }
+
//sent was successful !
sent = true
continue
diff --git a/pkg/leakybucket/overflows.go b/pkg/leakybucket/overflows.go
index c8cdd596c..7df1aa06f 100644
--- a/pkg/leakybucket/overflows.go
+++ b/pkg/leakybucket/overflows.go
@@ -3,6 +3,7 @@ package leakybucket
import (
"fmt"
"net"
+ "sort"
"strconv"
"github.com/crowdsecurity/crowdsec/pkg/models"
@@ -144,7 +145,14 @@ func EventsFromQueue(queue *Queue) []*models.Event {
continue
}
meta := models.Meta{}
- for k, v := range evt.Meta {
+ //we want consistence
+ skeys := make([]string, 0, len(evt.Meta))
+ for k := range evt.Meta {
+ skeys = append(skeys, k)
+ }
+ sort.Strings(skeys)
+ for _, k := range skeys {
+ v := evt.Meta[k]
subMeta := models.MetaItems0{Key: k, Value: v}
meta = append(meta, &subMeta)
}
diff --git a/pkg/models/helpers.go b/pkg/models/helpers.go
index b9ea4a92f..d476e1fc3 100644
--- a/pkg/models/helpers.go
+++ b/pkg/models/helpers.go
@@ -17,3 +17,33 @@ func (a *Alert) GetScenario() string {
}
return *a.Scenario
}
+
+func (a *Alert) GetEventsCount() int32 {
+ if a.EventsCount == nil {
+ return 0
+ }
+ return *a.EventsCount
+}
+
+func (e *Event) GetMeta(key string) string {
+ for _, meta := range e.Meta {
+ if meta.Key == key {
+ return meta.Value
+ }
+ }
+ return ""
+}
+
+func (s Source) GetValue() string {
+ if s.Value == nil {
+ return ""
+ }
+ return *s.Value
+}
+
+func (s Source) GetScope() string {
+ if s.Scope == nil {
+ return ""
+ }
+ return *s.Scope
+}
diff --git a/pkg/parser/node.go b/pkg/parser/node.go
index a7e0c8d8c..37a73d623 100644
--- a/pkg/parser/node.go
+++ b/pkg/parser/node.go
@@ -210,7 +210,7 @@ func (n *Node) process(p *types.Event, ctx UnixParserCtx) (bool, error) {
}
}
if isWhitelisted {
- p.WhiteListReason = n.Whitelist.Reason
+ p.WhitelistReason = n.Whitelist.Reason
/*huglily wipe the ban order if the event is whitelisted and it's an overflow */
if p.Type == types.OVFLW { /*don't do this at home kids */
ips := []string{}
diff --git a/pkg/parser/runtime.go b/pkg/parser/runtime.go
index c51ea4614..7b5278f37 100644
--- a/pkg/parser/runtime.go
+++ b/pkg/parser/runtime.go
@@ -220,8 +220,14 @@ func stageidx(stage string, stages []string) int {
return -1
}
+type ParserResult struct {
+ Evt types.Event
+ Success bool
+}
+
var ParseDump bool
-var StageParseCache map[string]map[string]types.Event
+var DumpFolder string
+var StageParseCache map[string]map[string][]ParserResult
func Parse(ctx UnixParserCtx, xp types.Event, nodes []Node) (types.Event, error) {
var event types.Event = xp
@@ -250,12 +256,18 @@ func Parse(ctx UnixParserCtx, xp types.Event, nodes []Node) (types.Event, error)
}
if ParseDump {
- StageParseCache = make(map[string]map[string]types.Event)
+ if StageParseCache == nil {
+ StageParseCache = make(map[string]map[string][]ParserResult)
+ StageParseCache["success"] = make(map[string][]ParserResult)
+ StageParseCache["success"][""] = make([]ParserResult, 0)
+ }
}
for _, stage := range ctx.Stages {
if ParseDump {
- StageParseCache[stage] = make(map[string]types.Event)
+ if _, ok := StageParseCache[stage]; !ok {
+ StageParseCache[stage] = make(map[string][]ParserResult)
+ }
}
/* if the node is forward in stages, seek to its stage */
/* this is for example used by testing system to inject logs in post-syslog-parsing phase*/
@@ -290,12 +302,16 @@ func Parse(ctx UnixParserCtx, xp types.Event, nodes []Node) (types.Event, error)
clog.Fatalf("Error while processing node : %v", err)
}
clog.Tracef("node (%s) ret : %v", node.rn, ret)
+ if ParseDump {
+ if len(StageParseCache[stage][node.Name]) == 0 {
+ StageParseCache[stage][node.Name] = make([]ParserResult, 0)
+ }
+ evtcopy := deepcopy.Copy(event)
+ parserInfo := ParserResult{Evt: evtcopy.(types.Event), Success: ret}
+ StageParseCache[stage][node.Name] = append(StageParseCache[stage][node.Name], parserInfo)
+ }
if ret {
isStageOK = true
- if ParseDump {
- evtcopy := deepcopy.Copy(event)
- StageParseCache[stage][node.Name] = evtcopy.(types.Event)
- }
}
if ret && node.OnSuccess == "next_stage" {
clog.Debugf("node successful, stop end stage %s", stage)
diff --git a/pkg/types/event.go b/pkg/types/event.go
index ff04619cd..f18956fe9 100644
--- a/pkg/types/event.go
+++ b/pkg/types/event.go
@@ -20,7 +20,7 @@ type Event struct {
Type int `yaml:"Type,omitempty" json:"Type,omitempty"` //Can be types.LOG (0) or types.OVFLOW (1)
ExpectMode int `yaml:"ExpectMode,omitempty" json:"ExpectMode,omitempty"` //how to buckets should handle event : leaky.TIMEMACHINE or leaky.LIVE
Whitelisted bool `yaml:"Whitelisted,omitempty" json:"Whitelisted,omitempty"`
- WhiteListReason string `yaml:"whitelist_reason,omitempty" json:"whitelist_reason,omitempty"`
+ WhitelistReason string `yaml:"WhitelistReason,omitempty" json:"whitelist_reason,omitempty"`
//should add whitelist reason ?
/* the current stage of the line being parsed */
Stage string `yaml:"Stage,omitempty" json:"Stage,omitempty"`
@@ -31,7 +31,7 @@ type Event struct {
/* output of enrichment */
Enriched map[string]string `yaml:"Enriched,omitempty" json:"Enriched,omitempty"`
/* Overflow */
- Overflow RuntimeAlert `yaml:"Alert,omitempty" json:"Alert,omitempty"`
+ Overflow RuntimeAlert `yaml:"Overflow,omitempty" json:"Alert,omitempty"`
Time time.Time `yaml:"Time,omitempty" json:"Time,omitempty"` //parsed time `json:"-"` ``
StrTime string `yaml:"StrTime,omitempty" json:"StrTime,omitempty"`
MarshaledTime string `yaml:"MarshaledTime,omitempty" json:"MarshaledTime,omitempty"`
@@ -78,3 +78,11 @@ type RuntimeAlert struct {
//APIAlerts will be populated at the end when there is more than one source
APIAlerts []models.Alert `yaml:"APIAlerts,omitempty" json:"APIAlerts,omitempty"`
}
+
+func (r RuntimeAlert) GetSources() []string {
+ ret := make([]string, 0)
+ for key, _ := range r.Sources {
+ ret = append(ret, key)
+ }
+ return ret
+}