merge hub branch

This commit is contained in:
Sebastien Blot 2023-11-08 20:25:42 +01:00
commit 694028f769
No known key found for this signature in database
GPG key ID: DFC2902F40449F6A
31 changed files with 661 additions and 387 deletions

View file

@ -41,4 +41,4 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
tag_name="${GITHUB_REF##*/}"
hub release edit -a crowdsec-release.tgz -a vendor.tgz -a *-vendor.tar.xz -m "" "$tag_name"
gh release upload "$tag_name" crowdsec-release.tgz vendor.tgz *-vendor.tar.xz

View file

@ -22,6 +22,7 @@ type OldAPICfg struct {
}
// it's a rip of the cli version, but in silent-mode
// XXX: redundant, should call InstallItem
func silentInstallItem(hub *cwhub.Hub, name, obtype string) (string, error) {
var item = hub.GetItem(obtype, name)
if item == nil {

View file

@ -1,7 +1,6 @@
package main
import (
"errors"
"fmt"
"github.com/fatih/color"
@ -94,16 +93,7 @@ func runHubUpdate(cmd *cobra.Command, args []string) error {
// don't use require.Hub because if there is no index file, it would fail
hub, err := cwhub.NewHub(local, remote, true)
if err != nil {
// XXX: this should be done when downloading items, too
// but what is the fallback to master actually solving?
if !errors.Is(err, cwhub.ErrIndexNotFound) {
return fmt.Errorf("failed to get Hub index: %w", err)
}
log.Warnf("Could not find index file for branch '%s', using 'master'", remote.Branch)
remote.Branch = "master"
if hub, err = cwhub.NewHub(local, remote, true); err != nil {
return fmt.Errorf("failed to get Hub index after retry: %w", err)
}
return fmt.Errorf("failed to update hub: %w", err)
}
// use LocalSync to get warnings about tainted / outdated items

View file

@ -18,9 +18,7 @@ import (
"github.com/crowdsecurity/crowdsec/pkg/hubtest"
)
var (
HubTest hubtest.HubTest
)
var HubTest hubtest.HubTest
func NewHubTestCmd() *cobra.Command {
var hubPath string
@ -43,6 +41,7 @@ func NewHubTestCmd() *cobra.Command {
return nil
},
}
cmdHubTest.PersistentFlags().StringVar(&hubPath, "hub", ".", "Path to hub folder")
cmdHubTest.PersistentFlags().StringVar(&crowdsecPath, "crowdsec", "crowdsec", "Path to crowdsec")
cmdHubTest.PersistentFlags().StringVar(&cscliPath, "cscli", "cscli", "Path to cscli")
@ -59,7 +58,6 @@ func NewHubTestCmd() *cobra.Command {
return cmdHubTest
}
func NewHubTestCreateCmd() *cobra.Command {
parsers := []string{}
postoverflows := []string{}
@ -164,6 +162,7 @@ cscli hubtest create my-scenario-test --parsers crowdsecurity/nginx --scenarios
return nil
},
}
cmdHubTestCreate.PersistentFlags().StringVarP(&logType, "type", "t", "", "Log type of the test")
cmdHubTestCreate.Flags().StringSliceVarP(&parsers, "parsers", "p", parsers, "Parsers to add to test")
cmdHubTestCreate.Flags().StringSliceVar(&postoverflows, "postoverflows", postoverflows, "Postoverflows to add to test")
@ -173,7 +172,6 @@ cscli hubtest create my-scenario-test --parsers crowdsecurity/nginx --scenarios
return cmdHubTestCreate
}
func NewHubTestRunCmd() *cobra.Command {
var noClean bool
var runAll bool
@ -186,7 +184,7 @@ func NewHubTestRunCmd() *cobra.Command {
RunE: func(cmd *cobra.Command, args []string) error {
if !runAll && len(args) == 0 {
printHelp(cmd)
return fmt.Errorf("Please provide test to run or --all flag")
return fmt.Errorf("please provide test to run or --all flag")
}
if runAll {
@ -202,6 +200,9 @@ func NewHubTestRunCmd() *cobra.Command {
}
}
// set timezone to avoid DST issues
os.Setenv("TZ", "UTC")
for _, test := range HubTest.Tests {
if csConfig.Cscli.Output == "human" {
log.Infof("Running test '%s'", test.Name)
@ -293,9 +294,11 @@ func NewHubTestRunCmd() *cobra.Command {
}
}
}
if csConfig.Cscli.Output == "human" {
switch csConfig.Cscli.Output {
case "human":
hubTestResultTable(color.Output, testResult)
} else if csConfig.Cscli.Output == "json" {
case "json":
jsonResult := make(map[string][]string, 0)
jsonResult["success"] = make([]string, 0)
jsonResult["fail"] = make([]string, 0)
@ -311,6 +314,8 @@ func NewHubTestRunCmd() *cobra.Command {
return fmt.Errorf("unable to json test result: %s", err)
}
fmt.Println(string(jsonStr))
default:
return fmt.Errorf("only human/json output modes are supported")
}
if !success {
@ -320,6 +325,7 @@ func NewHubTestRunCmd() *cobra.Command {
return nil
},
}
cmdHubTestRun.Flags().BoolVar(&noClean, "no-clean", false, "Don't clean runtime environment if test succeed")
cmdHubTestRun.Flags().BoolVar(&forceClean, "clean", false, "Clean runtime environment if test fail")
cmdHubTestRun.Flags().BoolVar(&runAll, "all", false, "Run all tests")
@ -327,7 +333,6 @@ func NewHubTestRunCmd() *cobra.Command {
return cmdHubTestRun
}
func NewHubTestCleanCmd() *cobra.Command {
var cmdHubTestClean = &cobra.Command{
Use: "clean",
@ -352,7 +357,6 @@ func NewHubTestCleanCmd() *cobra.Command {
return cmdHubTestClean
}
func NewHubTestInfoCmd() *cobra.Command {
var cmdHubTestInfo = &cobra.Command{
Use: "info",
@ -381,7 +385,6 @@ func NewHubTestInfoCmd() *cobra.Command {
return cmdHubTestInfo
}
func NewHubTestListCmd() *cobra.Command {
var cmdHubTestList = &cobra.Command{
Use: "list",
@ -412,7 +415,6 @@ func NewHubTestListCmd() *cobra.Command {
return cmdHubTestList
}
func NewHubTestCoverageCmd() *cobra.Command {
var showParserCov bool
var showScenarioCov bool
@ -427,8 +429,8 @@ func NewHubTestCoverageCmd() *cobra.Command {
return fmt.Errorf("unable to load all tests: %+v", err)
}
var err error
scenarioCoverage := []hubtest.ScenarioCoverage{}
parserCoverage := []hubtest.ParserCoverage{}
scenarioCoverage := []hubtest.Coverage{}
parserCoverage := []hubtest.Coverage{}
scenarioCoveragePercent := 0
parserCoveragePercent := 0
@ -443,7 +445,7 @@ func NewHubTestCoverageCmd() *cobra.Command {
parserTested := 0
for _, test := range parserCoverage {
if test.TestsCount > 0 {
parserTested += 1
parserTested++
}
}
parserCoveragePercent = int(math.Round((float64(parserTested) / float64(len(parserCoverage)) * 100)))
@ -454,12 +456,14 @@ func NewHubTestCoverageCmd() *cobra.Command {
if err != nil {
return fmt.Errorf("while getting scenario coverage: %s", err)
}
scenarioTested := 0
for _, test := range scenarioCoverage {
if test.TestsCount > 0 {
scenarioTested += 1
scenarioTested++
}
}
scenarioCoveragePercent = int(math.Round((float64(scenarioTested) / float64(len(scenarioCoverage)) * 100)))
}
@ -474,7 +478,8 @@ func NewHubTestCoverageCmd() *cobra.Command {
os.Exit(0)
}
if csConfig.Cscli.Output == "human" {
switch csConfig.Cscli.Output {
case "human":
if showParserCov || showAll {
hubTestParserCoverageTable(color.Output, parserCoverage)
}
@ -489,7 +494,7 @@ func NewHubTestCoverageCmd() *cobra.Command {
if showScenarioCov || showAll {
fmt.Printf("SCENARIOS : %d%% of coverage\n", scenarioCoveragePercent)
}
} else if csConfig.Cscli.Output == "json" {
case "json":
dump, err := json.MarshalIndent(parserCoverage, "", " ")
if err != nil {
return err
@ -500,13 +505,14 @@ func NewHubTestCoverageCmd() *cobra.Command {
return err
}
fmt.Printf("%s", dump)
} else {
default:
return fmt.Errorf("only human/json output modes are supported")
}
return nil
},
}
cmdHubTestCoverage.PersistentFlags().BoolVar(&showOnlyPercent, "percent", false, "Show only percentages of coverage")
cmdHubTestCoverage.PersistentFlags().BoolVar(&showParserCov, "parsers", false, "Show only parsers coverage")
cmdHubTestCoverage.PersistentFlags().BoolVar(&showScenarioCov, "scenarios", false, "Show only scenarios coverage")
@ -514,7 +520,6 @@ func NewHubTestCoverageCmd() *cobra.Command {
return cmdHubTestCoverage
}
func NewHubTestEvalCmd() *cobra.Command {
var evalExpression string
var cmdHubTestEval = &cobra.Command{
@ -528,26 +533,29 @@ func NewHubTestEvalCmd() *cobra.Command {
if err != nil {
return fmt.Errorf("can't load test: %+v", err)
}
err = test.ParserAssert.LoadTest(test.ParserResultFile)
if err != nil {
return fmt.Errorf("can't load test results from '%s': %+v", test.ParserResultFile, err)
}
output, err := test.ParserAssert.EvalExpression(evalExpression)
if err != nil {
return err
}
fmt.Print(output)
}
return nil
},
}
cmdHubTestEval.PersistentFlags().StringVarP(&evalExpression, "expr", "e", "", "Expression to eval")
return cmdHubTestEval
}
func NewHubTestExplainCmd() *cobra.Command {
var cmdHubTestExplain = &cobra.Command{
Use: "explain",
@ -562,24 +570,22 @@ func NewHubTestExplainCmd() *cobra.Command {
}
err = test.ParserAssert.LoadTest(test.ParserResultFile)
if err != nil {
err := test.Run()
if err != nil {
if err = test.Run(); err != nil {
return fmt.Errorf("running test '%s' failed: %+v", test.Name, err)
}
err = test.ParserAssert.LoadTest(test.ParserResultFile)
if err != nil {
if err = test.ParserAssert.LoadTest(test.ParserResultFile); err != nil {
return fmt.Errorf("unable to load parser result after run: %s", err)
}
}
err = test.ScenarioAssert.LoadTest(test.ScenarioResultFile, test.BucketPourResultFile)
if err != nil {
err := test.Run()
if err != nil {
if err = test.Run(); err != nil {
return fmt.Errorf("running test '%s' failed: %+v", test.Name, err)
}
err = test.ScenarioAssert.LoadTest(test.ScenarioResultFile, test.BucketPourResultFile)
if err != nil {
if err = test.ScenarioAssert.LoadTest(test.ScenarioResultFile, test.BucketPourResultFile); err != nil {
return fmt.Errorf("unable to load scenario result after run: %s", err)
}
}

View file

@ -41,39 +41,41 @@ func hubTestListTable(out io.Writer, tests []*hubtest.HubTestItem) {
t.Render()
}
func hubTestParserCoverageTable(out io.Writer, coverage []hubtest.ParserCoverage) {
func hubTestParserCoverageTable(out io.Writer, coverage []hubtest.Coverage) {
t := newLightTable(out)
t.SetHeaders("Parser", "Status", "Number of tests")
t.SetHeaderAlignment(table.AlignLeft, table.AlignLeft, table.AlignLeft)
t.SetAlignment(table.AlignLeft, table.AlignLeft, table.AlignLeft)
parserTested := 0
for _, test := range coverage {
status := emoji.RedCircle.String()
if test.TestsCount > 0 {
status = emoji.GreenCircle.String()
parserTested++
}
t.AddRow(test.Parser, status, fmt.Sprintf("%d times (across %d tests)", test.TestsCount, len(test.PresentIn)))
t.AddRow(test.Name, status, fmt.Sprintf("%d times (across %d tests)", test.TestsCount, len(test.PresentIn)))
}
t.Render()
}
func hubTestScenarioCoverageTable(out io.Writer, coverage []hubtest.ScenarioCoverage) {
func hubTestScenarioCoverageTable(out io.Writer, coverage []hubtest.Coverage) {
t := newLightTable(out)
t.SetHeaders("Scenario", "Status", "Number of tests")
t.SetHeaderAlignment(table.AlignLeft, table.AlignLeft, table.AlignLeft)
t.SetAlignment(table.AlignLeft, table.AlignLeft, table.AlignLeft)
parserTested := 0
for _, test := range coverage {
status := emoji.RedCircle.String()
if test.TestsCount > 0 {
status = emoji.GreenCircle.String()
parserTested++
}
t.AddRow(test.Scenario, status, fmt.Sprintf("%d times (across %d tests)", test.TestsCount, len(test.PresentIn)))
t.AddRow(test.Name, status, fmt.Sprintf("%d times (across %d tests)", test.TestsCount, len(test.PresentIn)))
}
t.Render()

View file

@ -19,7 +19,7 @@ import (
)
// XXX: this should not need hub?
func ShowMetrics(hub *cwhub.Hub, hubItem *cwhub.Item) {
func ShowMetrics(hub *cwhub.Hub, hubItem *cwhub.Item) error {
switch hubItem.Type {
case cwhub.PARSERS:
metrics := GetParserMetric(csConfig.Cscli.PrometheusUrl, hubItem.Name)
@ -28,26 +28,29 @@ func ShowMetrics(hub *cwhub.Hub, hubItem *cwhub.Item) {
metrics := GetScenarioMetric(csConfig.Cscli.PrometheusUrl, hubItem.Name)
scenarioMetricsTable(color.Output, hubItem.Name, metrics)
case cwhub.COLLECTIONS:
for _, item := range hubItem.Parsers {
metrics := GetParserMetric(csConfig.Cscli.PrometheusUrl, item)
parserMetricsTable(color.Output, item, metrics)
for _, parserName := range hubItem.Parsers {
metrics := GetParserMetric(csConfig.Cscli.PrometheusUrl, parserName)
parserMetricsTable(color.Output, parserName, metrics)
}
for _, item := range hubItem.Scenarios {
metrics := GetScenarioMetric(csConfig.Cscli.PrometheusUrl, item)
scenarioMetricsTable(color.Output, item, metrics)
for _, scenarioName := range hubItem.Scenarios {
metrics := GetScenarioMetric(csConfig.Cscli.PrometheusUrl, scenarioName)
scenarioMetricsTable(color.Output, scenarioName, metrics)
}
for _, item := range hubItem.Collections {
hubItem = hub.GetItem(cwhub.COLLECTIONS, item)
if hubItem == nil {
log.Fatalf("unable to retrieve item '%s' from collection '%s'", item, hubItem.Name)
for _, collName := range hubItem.Collections {
subColl := hub.GetItem(cwhub.COLLECTIONS, collName)
if subColl == nil {
return fmt.Errorf("unable to retrieve sub-collection '%s' from '%s'", collName, hubItem.Name)
}
if err := ShowMetrics(hub, subColl); err != nil {
return err
}
ShowMetrics(hub, hubItem)
}
case cwhub.WAAP_RULES:
log.Fatalf("FIXME: not implemented yet")
default:
log.Errorf("item of type '%s' is unknown", hubItem.Type)
}
return nil
}
// GetParserMetric is a complete rip from prom2json

View file

@ -157,7 +157,9 @@ func InspectItem(hub *cwhub.Hub, item *cwhub.Item, showMetrics bool) error {
if csConfig.Cscli.Output == "human" && showMetrics {
fmt.Printf("\nCurrent metrics: \n")
ShowMetrics(hub, item)
if err := ShowMetrics(hub, item); err != nil {
return err
}
}
return nil

View file

@ -23,6 +23,7 @@ func CAPI(c *csconfig.Config) error {
if c.API.Server.OnlineClient == nil {
return fmt.Errorf("no configuration for Central API (CAPI) in '%s'", *c.FilePath)
}
return nil
}
@ -30,6 +31,7 @@ func PAPI(c *csconfig.Config) error {
if c.API.Server.OnlineClient.Credentials.PapiURL == "" {
return fmt.Errorf("no PAPI URL in configuration")
}
return nil
}
@ -45,6 +47,7 @@ func DB(c *csconfig.Config) error {
if err := c.LoadDBConfig(); err != nil {
return fmt.Errorf("this command requires direct database access (must be run on the local API machine): %w", err)
}
return nil
}
@ -88,7 +91,7 @@ func Hub(c *csconfig.Config, remote *cwhub.RemoteHubCfg) (*cwhub.Hub, error) {
hub, err := cwhub.NewHub(local, remote, false)
if err != nil {
return nil, fmt.Errorf("failed to read Hub index: '%w'. Run 'sudo cscli hub update' to download the index again", err)
return nil, fmt.Errorf("failed to read Hub index: %w. Run 'sudo cscli hub update' to download the index again", err)
}
return hub, nil

View file

@ -15,7 +15,7 @@ import (
)
type DataSet struct {
Data []*types.DataSource `yaml:"data,omitempty"`
Data []types.DataSource `yaml:"data,omitempty"`
}
func downloadFile(url string, destPath string) error {
@ -59,7 +59,7 @@ func downloadFile(url string, destPath string) error {
return nil
}
func GetData(data []*types.DataSource, dataDir string) error {
func GetData(data []types.DataSource, dataDir string) error {
for _, dataS := range data {
destPath := filepath.Join(dataDir, dataS.DestPath)
log.Infof("downloading data '%s' in '%s'", dataS.SourceURL, destPath)

View file

@ -1,7 +1,6 @@
package cwhub
// Enable/disable items already installed (no downloading here)
// This file is not named install.go to avoid confusion with the functions in helpers.go
// Enable/disable items already downloaded
import (
"fmt"
@ -11,8 +10,9 @@ import (
log "github.com/sirupsen/logrus"
)
// creates symlink between actual config file at hub.HubDir and hub.ConfigDir
// EnableItem creates a symlink between actual config file at hub.HubDir and hub.ConfigDir
// Handles collections recursively
// XXX: called from config_restore otherwise no need to export
func (h *Hub) EnableItem(target *Item) error {
parentDir := filepath.Clean(h.local.InstallDir + "/" + target.Type + "/" + target.Stage + "/")
@ -42,16 +42,14 @@ func (h *Hub) EnableItem(target *Item) error {
}
// install sub-items if it's a collection
if target.Type == COLLECTIONS {
for _, sub := range target.SubItems() {
val, ok := h.Items[sub.Type][sub.Name]
if !ok {
return fmt.Errorf("required %s %s of %s doesn't exist, abort", sub.Type, sub.Name, target.Name)
}
for _, sub := range target.SubItems() {
val, ok := h.Items[sub.Type][sub.Name]
if !ok {
return fmt.Errorf("required %s %s of %s doesn't exist, abort", sub.Type, sub.Name, target.Name)
}
if err := h.EnableItem(&val); err != nil {
return fmt.Errorf("while installing %s: %w", sub.Name, err)
}
if err := h.EnableItem(&val); err != nil {
return fmt.Errorf("while installing %s: %w", sub.Name, err)
}
}
@ -123,32 +121,31 @@ func (h *Hub) DisableItem(target *Item, purge bool, force bool) error {
return fmt.Errorf("%s is tainted, use '--force' to overwrite", target.Name)
}
// for a COLLECTIONS, disable sub-items
if target.Type == COLLECTIONS {
for _, sub := range target.SubItems() {
val, ok := h.Items[sub.Type][sub.Name]
if !ok {
log.Errorf("Referred %s %s in collection %s doesn't exist.", sub.Type, sub.Name, target.Name)
continue
}
// disable sub-items if any - it's a collection
for _, sub := range target.SubItems() {
// XXX: we do this already when syncing, do we really need to do consistency checks here and there?
val, ok := h.Items[sub.Type][sub.Name]
if !ok {
log.Errorf("Referred %s %s in collection %s doesn't exist.", sub.Type, sub.Name, target.Name)
continue
}
// check if the item doesn't belong to another collection before removing it
toRemove := true
// check if the item doesn't belong to another collection before removing it
toRemove := true
for _, collection := range val.BelongsToCollections {
if collection != target.Name {
toRemove = false
break
}
for _, collection := range val.BelongsToCollections {
if collection != target.Name {
toRemove = false
break
}
}
if toRemove {
if err = h.DisableItem(&val, purge, force); err != nil {
return fmt.Errorf("while disabling %s: %w", sub.Name, err)
}
} else {
log.Infof("%s was not removed because it belongs to another collection", val.Name)
if toRemove {
if err = h.DisableItem(&val, purge, force); err != nil {
return fmt.Errorf("while disabling %s: %w", sub.Name, err)
}
} else {
log.Infof("%s was not removed because it belongs to another collection", val.Name)
}
}

View file

@ -2,11 +2,20 @@ package cwhub
import (
"errors"
"fmt"
)
var (
// ErrNilRemoteHub is returned when the remote hub configuration is not provided to the NewHub constructor.
// All attempts to download index or items will return this error.
ErrMissingReference = errors.New("Reference(s) missing in collection")
ErrNilRemoteHub = errors.New("remote hub configuration is not provided. Please report this issue to the developers")
)
type IndexNotFoundError struct {
URL string
Branch string
}
func (e IndexNotFoundError) Error() string {
return fmt.Sprintf("index not found at %s, branch '%s'. Please check the .cscli.hub_branch value if you specified it in config.yaml, or use 'master' if not sure", e.URL, e.Branch)
}

View file

@ -2,7 +2,6 @@ package cwhub
import (
"encoding/json"
"errors"
"fmt"
"os"
"strings"
@ -20,10 +19,7 @@ type Hub struct {
skippedTainted int
}
var (
theHub *Hub
ErrIndexNotFound = fmt.Errorf("index not found")
)
var theHub *Hub
// GetHub returns the hub singleton
// it returns an error if it's not initialized to avoid nil dereference
@ -48,85 +44,70 @@ func NewHub(local *csconfig.LocalHubCfg, remote *RemoteHubCfg, downloadIndex boo
}
if downloadIndex {
if err := remote.DownloadIndex(local.HubIndexFile); err != nil {
if err := remote.downloadIndex(local.HubIndexFile); err != nil {
return nil, err
}
}
log.Debugf("loading hub idx %s", local.HubIndexFile)
bidx, err := os.ReadFile(local.HubIndexFile)
if err != nil {
return nil, fmt.Errorf("unable to read index file: %w", err)
}
ret, err := ParseIndex(bidx)
if err != nil {
if !errors.Is(err, ErrMissingReference) {
return nil, fmt.Errorf("failed to load index: %w", err)
}
// XXX: why the error check if we bail out anyway?
return nil, err
}
theHub = &Hub{
Items: ret,
local: local,
remote: remote,
}
if _, err = theHub.LocalSync(); err != nil {
return nil, fmt.Errorf("failed to sync hub index: %w", err)
if err := theHub.parseIndex(); err != nil {
return nil, fmt.Errorf("failed to load index: %w", err)
}
if _, err := theHub.LocalSync(); err != nil {
return nil, fmt.Errorf("failed to sync items: %w", err)
}
return theHub, nil
}
// ParseIndex takes the content of an index file and returns the map of associated parsers/scenarios/collections
func ParseIndex(buff []byte) (HubItems, error) {
var (
RawIndex HubItems
missingItems []string
)
// parseIndex takes the content of an index file and fills the map of associated parsers/scenarios/collections
func (h *Hub) parseIndex() error {
bidx, err := os.ReadFile(h.local.HubIndexFile)
if err != nil {
return fmt.Errorf("unable to read index file: %w", err)
}
if err := json.Unmarshal(buff, &RawIndex); err != nil {
return nil, fmt.Errorf("failed to unmarshal index: %w", err)
if err := json.Unmarshal(bidx, &h.Items); err != nil {
return fmt.Errorf("failed to unmarshal index: %w", err)
}
log.Debugf("%d item types in hub index", len(ItemTypes))
// Iterate over the different types to complete the struct
for _, itemType := range ItemTypes {
log.Tracef("%s: %d items", itemType, len(RawIndex[itemType]))
log.Tracef("%s: %d items", itemType, len(h.Items[itemType]))
for name, item := range RawIndex[itemType] {
for name, item := range h.Items[itemType] {
item.Name = name
// if the item has no (redundant) author, take it from the json key
if item.Author == "" && strings.Contains(name, "/") {
item.Author = strings.Split(name, "/")[0]
}
item.Type = itemType
x := strings.Split(item.RemotePath, "/")
item.FileName = x[len(x)-1]
RawIndex[itemType][name] = item
if itemType != COLLECTIONS {
continue
}
h.Items[itemType][name] = item
// if it's a collection, check its sub-items are present
// XXX should be done later
// XXX should be done later, maybe report all missing at once?
for _, sub := range item.SubItems() {
if _, ok := RawIndex[sub.Type][sub.Name]; !ok {
if _, ok := h.Items[sub.Type][sub.Name]; !ok {
log.Errorf("Referred %s %s in collection %s doesn't exist.", sub.Type, sub.Name, item.Name)
missingItems = append(missingItems, sub.Name)
}
}
}
}
if len(missingItems) > 0 {
return RawIndex, fmt.Errorf("%q: %w", missingItems, ErrMissingReference)
}
return RawIndex, nil
return nil
}
// ItemStats returns total counts of the hub items
@ -134,11 +115,6 @@ func (h *Hub) ItemStats() []string {
loaded := ""
for _, itemType := range ItemTypes {
// ensure the order is always the same
if h.Items[itemType] == nil {
continue
}
if len(h.Items[itemType]) == 0 {
continue
}

View file

@ -45,7 +45,7 @@ func TestDownloadIndex(t *testing.T) {
IndexPath: "",
}
err = hub.remote.DownloadIndex(tmpIndex.Name())
err = hub.remote.downloadIndex(tmpIndex.Name())
cstest.RequireErrorContains(t, err, "failed to build hub index request: invalid URL template 'x'")
// bad domain
@ -57,7 +57,7 @@ func TestDownloadIndex(t *testing.T) {
IndexPath: ".index.json",
}
err = hub.remote.DownloadIndex(tmpIndex.Name())
err = hub.remote.downloadIndex(tmpIndex.Name())
require.NoError(t, err)
// XXX: this is not failing
// cstest.RequireErrorContains(t, err, "failed http request for hub index: Get")
@ -71,6 +71,6 @@ func TestDownloadIndex(t *testing.T) {
IndexPath: ".index.json",
}
err = hub.remote.DownloadIndex("/does/not/exist/index.json")
err = hub.remote.downloadIndex("/does/not/exist/index.json")
cstest.RequireErrorContains(t, err, "while opening hub index file: open /does/not/exist/index.json:")
}

View file

@ -4,8 +4,8 @@ import (
"encoding/json"
"fmt"
"github.com/Masterminds/semver/v3"
"github.com/enescakir/emoji"
"golang.org/x/mod/semver"
)
const (
@ -18,7 +18,14 @@ const (
WAAP_RULES = "waap-rules"
)
// XXX: The order is important, as it is used to range over sub-items in collections
const (
VersionUpToDate = iota
VersionUpdateAvailable
VersionUnknown
VersionFuture
)
// The order is important, as it is used to range over sub-items in collections
var ItemTypes = []string{PARSERS, POSTOVERFLOWS, SCENARIOS, WAAP_CONFIGS, WAAP_RULES, COLLECTIONS}
type HubItems map[string]map[string]Item
@ -194,7 +201,23 @@ func (i *Item) Status() (string, emoji.Emoji) {
// versionStatus: semver requires 'v' prefix
func (i *Item) versionStatus() int {
return semver.Compare("v"+i.Version, "v"+i.LocalVersion)
local, err := semver.NewVersion(i.LocalVersion)
if err != nil {
return VersionUnknown
}
// hub versions are already validated while syncing, ignore errors
latest, _ := semver.NewVersion(i.Version)
if local.LessThan(latest) {
return VersionUpdateAvailable
}
if local.Equal(latest) {
return VersionUpToDate
}
return VersionFuture
}
// validPath returns true if the (relative) path is allowed for the item
@ -250,6 +273,7 @@ func (h *Hub) AddItem(item Item) error {
}
}
// XXX: can this happen?
return fmt.Errorf("ItemType %s is unknown", item.Type)
}

View file

@ -38,23 +38,16 @@ func itemKey(itemPath string) (string, error) {
}
// GetItemByPath retrieves the item from the hub index based on its path.
// To achieve this it resolves a symlink to find the associated hub item.
func (h *Hub) GetItemByPath(itemType string, itemPath string) (*Item, error) {
itemKey, err := itemKey(itemPath)
if err != nil {
return nil, err
}
// XXX: use GetItem()
m := h.GetItemMap(itemType)
if m == nil {
return nil, fmt.Errorf("item type %s doesn't exist", itemType)
}
v, ok := m[itemKey]
if !ok {
item := h.GetItem(itemType, itemKey)
if item == nil {
return nil, fmt.Errorf("%s not found in %s", itemKey, itemType)
}
return &v, nil
return item, nil
}

View file

@ -29,8 +29,8 @@ func (r *RemoteHubCfg) urlTo(remotePath string) (string, error) {
return fmt.Sprintf(r.URLTemplate, r.Branch, remotePath), nil
}
// DownloadIndex downloads the latest version of the index
func (r *RemoteHubCfg) DownloadIndex(localPath string) error {
// downloadIndex downloads the latest version of the index
func (r *RemoteHubCfg) downloadIndex(localPath string) error {
if r == nil {
return ErrNilRemoteHub
}
@ -53,7 +53,7 @@ func (r *RemoteHubCfg) DownloadIndex(localPath string) error {
if resp.StatusCode != http.StatusOK {
if resp.StatusCode == http.StatusNotFound {
return ErrIndexNotFound
return IndexNotFoundError{req.URL.String(), r.Branch}
}
return fmt.Errorf("bad http code %d for %s", resp.StatusCode, req.URL.String())

View file

@ -10,7 +10,9 @@ import (
"sort"
"strings"
"github.com/Masterminds/semver/v3"
log "github.com/sirupsen/logrus"
"slices"
)
func isYAMLFileName(path string) bool {
@ -31,7 +33,7 @@ func handleSymlink(path string) (string, error) {
return "", fmt.Errorf("failed to unlink %s: %w", path, err)
}
// XXX: is this correct?
// ignore this file
return "", nil
}
@ -130,6 +132,28 @@ func (h *Hub) getItemInfo(path string) (itemFileInfo, bool, error) {
return ret, inhub, nil
}
// sortedVersions returns the input data, sorted in reverse order by semver
func sortedVersions(raw []string) ([]string, error) {
vs := make([]*semver.Version, len(raw))
for i, r := range raw {
v, err := semver.NewVersion(r)
if err != nil {
return nil, fmt.Errorf("%s: %w", r, err)
}
vs[i] = v
}
sort.Sort(sort.Reverse(semver.Collection(vs)))
ret := make([]string, len(vs))
for i, v := range vs {
ret[i] = v.Original()
}
return ret, nil
}
func (h *Hub) itemVisit(path string, f os.DirEntry, err error) error {
var (
local bool
@ -179,7 +203,7 @@ func (h *Hub) itemVisit(path string, f os.DirEntry, err error) error {
log.Tracef("%s points to %s", path, hubpath)
if hubpath == "" {
// XXX: is this correct?
// ignore this file
return nil
}
}
@ -251,17 +275,18 @@ func (h *Hub) itemVisit(path string, f os.DirEntry, err error) error {
}
// let's reverse sort the versions to deal with hash collisions (#154)
// XXX: we sure, lexical sorting?
versions := make([]string, 0, len(item.Versions))
for k := range item.Versions {
versions = append(versions, k)
}
sort.Sort(sort.Reverse(sort.StringSlice(versions)))
versions, err = sortedVersions(versions)
if err != nil {
return fmt.Errorf("while syncing %s %s: %w", info.ftype, info.fname, err)
}
for _, version := range versions {
if item.Versions[version].Digest != sha {
// log.Infof("matching filenames, wrong hash %s != %s -- %s", sha, val.Digest, spew.Sdump(v))
continue
}
@ -321,7 +346,7 @@ func (h *Hub) CollectDepsCheck(v *Item) error {
return nil
}
if v.versionStatus() != 0 { // not up-to-date
if v.versionStatus() != VersionUpToDate { // not up-to-date
log.Debugf("%s dependencies not checked: not up-to-date", v.Name)
return nil
}
@ -371,15 +396,7 @@ func (h *Hub) CollectDepsCheck(v *Item) error {
return fmt.Errorf("outdated %s %s", sub.Type, sub.Name)
}
skip := false
for idx := range subItem.BelongsToCollections {
if subItem.BelongsToCollections[idx] == v.Name {
skip = true
}
}
if !skip {
if !slices.Contains(subItem.BelongsToCollections, v.Name) {
subItem.BelongsToCollections = append(subItem.BelongsToCollections, v.Name)
}
@ -419,15 +436,17 @@ func (h *Hub) SyncDir(dir string) ([]string, error) {
vs := item.versionStatus()
switch vs {
case 0: // latest
case VersionUpToDate: // latest
if err := h.CollectDepsCheck(&item); err != nil {
warnings = append(warnings, fmt.Sprintf("dependency of %s: %s", item.Name, err))
h.Items[COLLECTIONS][name] = item
}
case 1: // not up-to-date
case VersionUpdateAvailable: // not up-to-date
warnings = append(warnings, fmt.Sprintf("update for collection %s available (currently:%s, latest:%s)", item.Name, item.LocalVersion, item.Version))
default: // version is higher than the highest available from hub?
case VersionFuture:
warnings = append(warnings, fmt.Sprintf("collection %s is in the future (currently:%s, latest:%s)", item.Name, item.LocalVersion, item.Version))
case VersionUnknown:
warnings = append(warnings, fmt.Sprintf("collection %s is tainted (latest:%s)", item.Name, item.Version))
}
log.Debugf("installed (%s) - status: %d | installed: %s | latest: %s | full: %+v", item.Name, vs, item.LocalVersion, item.Version, item.Versions)

View file

@ -5,173 +5,194 @@ import (
"fmt"
"os"
"path/filepath"
"regexp"
"sort"
"strings"
"github.com/crowdsecurity/crowdsec/pkg/cwhub"
log "github.com/sirupsen/logrus"
"github.com/crowdsecurity/crowdsec/pkg/cwhub"
)
type ParserCoverage struct {
Parser string
type Coverage struct {
Name string
TestsCount int
PresentIn map[string]bool //poorman's set
}
type ScenarioCoverage struct {
Scenario string
TestsCount int
PresentIn map[string]bool
}
func (h *HubTest) GetParsersCoverage() ([]ParserCoverage, error) {
var coverage []ParserCoverage
func (h *HubTest) GetParsersCoverage() ([]Coverage, error) {
if _, ok := h.HubIndex.Items[cwhub.PARSERS]; !ok {
return coverage, fmt.Errorf("no parsers in hub index")
return nil, fmt.Errorf("no parsers in hub index")
}
//populate from hub, iterate in alphabetical order
var pkeys []string
for pname := range h.HubIndex.Items[cwhub.PARSERS] {
pkeys = append(pkeys, pname)
}
sort.Strings(pkeys)
for _, pname := range pkeys {
coverage = append(coverage, ParserCoverage{
Parser: pname,
// populate from hub, iterate in alphabetical order
pkeys := sortedMapKeys(h.HubIndex.Items[cwhub.PARSERS])
coverage := make([]Coverage, len(pkeys))
for i, name := range pkeys {
coverage[i] = Coverage{
Name: name,
TestsCount: 0,
PresentIn: make(map[string]bool),
})
}
}
//parser the expressions a-la-oneagain
// parser the expressions a-la-oneagain
passerts, err := filepath.Glob(".tests/*/parser.assert")
if err != nil {
return coverage, fmt.Errorf("while find parser asserts : %s", err)
return nil, fmt.Errorf("while find parser asserts : %s", err)
}
for _, assert := range passerts {
file, err := os.Open(assert)
if err != nil {
return coverage, fmt.Errorf("while reading %s : %s", assert, err)
return nil, fmt.Errorf("while reading %s : %s", assert, err)
}
scanner := bufio.NewScanner(file)
for scanner.Scan() {
assertLine := regexp.MustCompile(`^results\["[^"]+"\]\["(?P<parser>[^"]+)"\]\[[0-9]+\]\.Evt\..*`)
line := scanner.Text()
log.Debugf("assert line : %s", line)
match := assertLine.FindStringSubmatch(line)
match := parserResultRE.FindStringSubmatch(line)
if len(match) == 0 {
log.Debugf("%s doesn't match", line)
continue
}
sidx := assertLine.SubexpIndex("parser")
sidx := parserResultRE.SubexpIndex("parser")
capturedParser := match[sidx]
for idx, pcover := range coverage {
if pcover.Parser == capturedParser {
if pcover.Name == capturedParser {
coverage[idx].TestsCount++
coverage[idx].PresentIn[assert] = true
continue
}
parserNameSplit := strings.Split(pcover.Parser, "/")
parserNameSplit := strings.Split(pcover.Name, "/")
parserNameOnly := parserNameSplit[len(parserNameSplit)-1]
if parserNameOnly == capturedParser {
coverage[idx].TestsCount++
coverage[idx].PresentIn[assert] = true
continue
}
capturedParserSplit := strings.Split(capturedParser, "/")
capturedParserName := capturedParserSplit[len(capturedParserSplit)-1]
if capturedParserName == parserNameOnly {
coverage[idx].TestsCount++
coverage[idx].PresentIn[assert] = true
continue
}
if capturedParserName == parserNameOnly+"-logs" {
coverage[idx].TestsCount++
coverage[idx].PresentIn[assert] = true
continue
}
}
}
file.Close()
}
return coverage, nil
}
func (h *HubTest) GetScenariosCoverage() ([]ScenarioCoverage, error) {
var coverage []ScenarioCoverage
func (h *HubTest) GetScenariosCoverage() ([]Coverage, error) {
if _, ok := h.HubIndex.Items[cwhub.SCENARIOS]; !ok {
return coverage, fmt.Errorf("no scenarios in hub index")
}
//populate from hub, iterate in alphabetical order
var pkeys []string
for scenarioName := range h.HubIndex.Items[cwhub.SCENARIOS] {
pkeys = append(pkeys, scenarioName)
}
sort.Strings(pkeys)
for _, scenarioName := range pkeys {
coverage = append(coverage, ScenarioCoverage{
Scenario: scenarioName,
TestsCount: 0,
PresentIn: make(map[string]bool),
})
return nil, fmt.Errorf("no scenarios in hub index")
}
//parser the expressions a-la-oneagain
// populate from hub, iterate in alphabetical order
pkeys := sortedMapKeys(h.HubIndex.Items[cwhub.SCENARIOS])
coverage := make([]Coverage, len(pkeys))
for i, name := range pkeys {
coverage[i] = Coverage{
Name: name,
TestsCount: 0,
PresentIn: make(map[string]bool),
}
}
// parser the expressions a-la-oneagain
passerts, err := filepath.Glob(".tests/*/scenario.assert")
if err != nil {
return coverage, fmt.Errorf("while find scenario asserts : %s", err)
return nil, fmt.Errorf("while find scenario asserts : %s", err)
}
for _, assert := range passerts {
file, err := os.Open(assert)
if err != nil {
return coverage, fmt.Errorf("while reading %s : %s", assert, err)
return nil, fmt.Errorf("while reading %s : %s", assert, err)
}
scanner := bufio.NewScanner(file)
for scanner.Scan() {
assertLine := regexp.MustCompile(`^results\[[0-9]+\].Overflow.Alert.GetScenario\(\) == "(?P<scenario>[^"]+)"`)
line := scanner.Text()
log.Debugf("assert line : %s", line)
match := assertLine.FindStringSubmatch(line)
match := scenarioResultRE.FindStringSubmatch(line)
if len(match) == 0 {
log.Debugf("%s doesn't match", line)
continue
}
sidx := assertLine.SubexpIndex("scenario")
scanner_name := match[sidx]
sidx := scenarioResultRE.SubexpIndex("scenario")
scannerName := match[sidx]
for idx, pcover := range coverage {
if pcover.Scenario == scanner_name {
if pcover.Name == scannerName {
coverage[idx].TestsCount++
coverage[idx].PresentIn[assert] = true
continue
}
scenarioNameSplit := strings.Split(pcover.Scenario, "/")
scenarioNameSplit := strings.Split(pcover.Name, "/")
scenarioNameOnly := scenarioNameSplit[len(scenarioNameSplit)-1]
if scenarioNameOnly == scanner_name {
if scenarioNameOnly == scannerName {
coverage[idx].TestsCount++
coverage[idx].PresentIn[assert] = true
continue
}
fixedProbingWord := strings.ReplaceAll(pcover.Scenario, "probbing", "probing")
fixedProbingAssert := strings.ReplaceAll(scanner_name, "probbing", "probing")
fixedProbingWord := strings.ReplaceAll(pcover.Name, "probbing", "probing")
fixedProbingAssert := strings.ReplaceAll(scannerName, "probbing", "probing")
if fixedProbingWord == fixedProbingAssert {
coverage[idx].TestsCount++
coverage[idx].PresentIn[assert] = true
continue
}
if fmt.Sprintf("%s-detection", pcover.Scenario) == scanner_name {
if fmt.Sprintf("%s-detection", pcover.Name) == scannerName {
coverage[idx].TestsCount++
coverage[idx].PresentIn[assert] = true
continue
}
if fmt.Sprintf("%s-detection", fixedProbingWord) == fixedProbingAssert {
coverage[idx].TestsCount++
coverage[idx].PresentIn[assert] = true
continue
}
}
}
file.Close()
}
return coverage, nil
}

View file

@ -6,6 +6,7 @@ import (
"os/exec"
"path/filepath"
"github.com/crowdsecurity/crowdsec/pkg/csconfig"
"github.com/crowdsecurity/crowdsec/pkg/cwhub"
)
@ -29,42 +30,44 @@ const (
)
func NewHubTest(hubPath string, crowdsecPath string, cscliPath string) (HubTest, error) {
var err error
hubPath, err = filepath.Abs(hubPath)
hubPath, err := filepath.Abs(hubPath)
if err != nil {
return HubTest{}, fmt.Errorf("can't get absolute path of hub: %+v", err)
}
// we can't use hubtest without the hub
if _, err := os.Stat(hubPath); os.IsNotExist(err) {
if _, err = os.Stat(hubPath); os.IsNotExist(err) {
return HubTest{}, fmt.Errorf("path to hub '%s' doesn't exist, can't run", hubPath)
}
HubTestPath := filepath.Join(hubPath, "./.tests/")
// we can't use hubtest without crowdsec binary
if _, err := exec.LookPath(crowdsecPath); err != nil {
if _, err := os.Stat(crowdsecPath); os.IsNotExist(err) {
if _, err = exec.LookPath(crowdsecPath); err != nil {
if _, err = os.Stat(crowdsecPath); os.IsNotExist(err) {
return HubTest{}, fmt.Errorf("path to crowdsec binary '%s' doesn't exist or is not in $PATH, can't run", crowdsecPath)
}
}
// we can't use hubtest without cscli binary
if _, err := exec.LookPath(cscliPath); err != nil {
if _, err := os.Stat(cscliPath); os.IsNotExist(err) {
if _, err = exec.LookPath(cscliPath); err != nil {
if _, err = os.Stat(cscliPath); os.IsNotExist(err) {
return HubTest{}, fmt.Errorf("path to cscli binary '%s' doesn't exist or is not in $PATH, can't run", cscliPath)
}
}
hubIndexFile := filepath.Join(hubPath, ".index.json")
bidx, err := os.ReadFile(hubIndexFile)
if err != nil {
return HubTest{}, fmt.Errorf("unable to read index file: %s", err)
local := &csconfig.LocalHubCfg{
HubDir: hubPath,
HubIndexFile: hubIndexFile,
InstallDir: HubTestPath,
InstallDataDir: HubTestPath,
}
// load hub index
hubIndex, err := cwhub.ParseIndex(bidx)
hub, err := cwhub.NewHub(local, nil, false)
if err != nil {
return HubTest{}, fmt.Errorf("unable to load hub index file: %s", err)
return HubTest{}, fmt.Errorf("unable to load hub: %s", err)
}
templateConfigFilePath := filepath.Join(HubTestPath, templateConfigFile)
@ -80,16 +83,18 @@ func NewHubTest(hubPath string, crowdsecPath string, cscliPath string) (HubTest,
TemplateConfigPath: templateConfigFilePath,
TemplateProfilePath: templateProfilePath,
TemplateSimulationPath: templateSimulationPath,
HubIndex: &cwhub.Hub{Items: hubIndex},
HubIndex: hub,
}, nil
}
func (h *HubTest) LoadTestItem(name string) (*HubTestItem, error) {
HubTestItem := &HubTestItem{}
testItem, err := NewTest(name, h)
if err != nil {
return HubTestItem, err
}
h.Tests = append(h.Tests, testItem)
return testItem, nil
@ -108,5 +113,6 @@ func (h *HubTest) LoadAllTests() error {
}
}
}
return nil
}

View file

@ -7,11 +7,12 @@ import (
"path/filepath"
"strings"
log "github.com/sirupsen/logrus"
"gopkg.in/yaml.v2"
"github.com/crowdsecurity/crowdsec/pkg/csconfig"
"github.com/crowdsecurity/crowdsec/pkg/cwhub"
"github.com/crowdsecurity/crowdsec/pkg/parser"
log "github.com/sirupsen/logrus"
"gopkg.in/yaml.v2"
)
type HubTestItemConfig struct {
@ -76,8 +77,6 @@ const (
BucketPourResultFileName = "bucketpour-dump.yaml"
)
var crowdsecPatternsFolder = csconfig.DefaultConfigPath("patterns")
func NewTest(name string, hubTest *HubTest) (*HubTestItem, error) {
testPath := filepath.Join(hubTest.HubTestPath, name)
runtimeFolder := filepath.Join(testPath, "runtime")
@ -87,10 +86,12 @@ func NewTest(name string, hubTest *HubTest) (*HubTestItem, error) {
// read test configuration file
configFileData := &HubTestItemConfig{}
yamlFile, err := os.ReadFile(configFilePath)
if err != nil {
log.Printf("no config file found in '%s': %v", testPath, err)
}
err = yaml.Unmarshal(yamlFile, configFileData)
if err != nil {
return nil, fmt.Errorf("unmarshal: %v", err)
@ -101,6 +102,7 @@ func NewTest(name string, hubTest *HubTest) (*HubTestItem, error) {
scenarioAssertFilePath := filepath.Join(testPath, ScenarioAssertFileName)
ScenarioAssert := NewScenarioAssert(scenarioAssertFilePath)
return &HubTestItem{
Name: name,
Path: testPath,
@ -143,23 +145,25 @@ func (t *HubTestItem) InstallHub() error {
if parser == "" {
continue
}
var parserDirDest string
if hubParser, ok := t.HubIndex.Items[cwhub.PARSERS][parser]; ok {
parserSource, err := filepath.Abs(filepath.Join(t.HubPath, hubParser.RemotePath))
if err != nil {
return fmt.Errorf("can't get absolute path of '%s': %s", parserSource, err)
}
parserFileName := filepath.Base(parserSource)
// runtime/hub/parsers/s00-raw/crowdsecurity/
hubDirParserDest := filepath.Join(t.RuntimeHubPath, filepath.Dir(hubParser.RemotePath))
// runtime/parsers/s00-raw/
parserDirDest = fmt.Sprintf("%s/parsers/%s/", t.RuntimePath, hubParser.Stage)
parserDirDest := fmt.Sprintf("%s/parsers/%s/", t.RuntimePath, hubParser.Stage)
if err := os.MkdirAll(hubDirParserDest, os.ModePerm); err != nil {
return fmt.Errorf("unable to create folder '%s': %s", hubDirParserDest, err)
}
if err := os.MkdirAll(parserDirDest, os.ModePerm); err != nil {
return fmt.Errorf("unable to create folder '%s': %s", parserDirDest, err)
}
@ -200,7 +204,7 @@ func (t *HubTestItem) InstallHub() error {
//return fmt.Errorf("stage '%s' extracted from '%s' doesn't exist in the hub", customParserStage, hubStagePath)
}
parserDirDest = fmt.Sprintf("%s/parsers/%s/", t.RuntimePath, customParserStage)
parserDirDest := fmt.Sprintf("%s/parsers/%s/", t.RuntimePath, customParserStage)
if err := os.MkdirAll(parserDirDest, os.ModePerm); err != nil {
continue
//return fmt.Errorf("unable to create folder '%s': %s", parserDirDest, err)
@ -227,23 +231,25 @@ func (t *HubTestItem) InstallHub() error {
if scenario == "" {
continue
}
var scenarioDirDest string
if hubScenario, ok := t.HubIndex.Items[cwhub.SCENARIOS][scenario]; ok {
scenarioSource, err := filepath.Abs(filepath.Join(t.HubPath, hubScenario.RemotePath))
if err != nil {
return fmt.Errorf("can't get absolute path to: %s", scenarioSource)
}
scenarioFileName := filepath.Base(scenarioSource)
// runtime/hub/scenarios/crowdsecurity/
hubDirScenarioDest := filepath.Join(t.RuntimeHubPath, filepath.Dir(hubScenario.RemotePath))
// runtime/parsers/scenarios/
scenarioDirDest = fmt.Sprintf("%s/scenarios/", t.RuntimePath)
scenarioDirDest := fmt.Sprintf("%s/scenarios/", t.RuntimePath)
if err := os.MkdirAll(hubDirScenarioDest, os.ModePerm); err != nil {
return fmt.Errorf("unable to create folder '%s': %s", hubDirScenarioDest, err)
}
if err := os.MkdirAll(scenarioDirDest, os.ModePerm); err != nil {
return fmt.Errorf("unable to create folder '%s': %s", scenarioDirDest, err)
}
@ -271,7 +277,7 @@ func (t *HubTestItem) InstallHub() error {
//return fmt.Errorf("scenarios '%s' doesn't exist in the hub and doesn't appear to be a custom one.", scenario)
}
scenarioDirDest = fmt.Sprintf("%s/scenarios/", t.RuntimePath)
scenarioDirDest := fmt.Sprintf("%s/scenarios/", t.RuntimePath)
if err := os.MkdirAll(scenarioDirDest, os.ModePerm); err != nil {
return fmt.Errorf("unable to create folder '%s': %s", scenarioDirDest, err)
}
@ -296,23 +302,25 @@ func (t *HubTestItem) InstallHub() error {
if postoverflow == "" {
continue
}
var postoverflowDirDest string
if hubPostOverflow, ok := t.HubIndex.Items[cwhub.POSTOVERFLOWS][postoverflow]; ok {
postoverflowSource, err := filepath.Abs(filepath.Join(t.HubPath, hubPostOverflow.RemotePath))
if err != nil {
return fmt.Errorf("can't get absolute path of '%s': %s", postoverflowSource, err)
}
postoverflowFileName := filepath.Base(postoverflowSource)
// runtime/hub/postoverflows/s00-enrich/crowdsecurity/
hubDirPostoverflowDest := filepath.Join(t.RuntimeHubPath, filepath.Dir(hubPostOverflow.RemotePath))
// runtime/postoverflows/s00-enrich
postoverflowDirDest = fmt.Sprintf("%s/postoverflows/%s/", t.RuntimePath, hubPostOverflow.Stage)
postoverflowDirDest := fmt.Sprintf("%s/postoverflows/%s/", t.RuntimePath, hubPostOverflow.Stage)
if err := os.MkdirAll(hubDirPostoverflowDest, os.ModePerm); err != nil {
return fmt.Errorf("unable to create folder '%s': %s", hubDirPostoverflowDest, err)
}
if err := os.MkdirAll(postoverflowDirDest, os.ModePerm); err != nil {
return fmt.Errorf("unable to create folder '%s': %s", postoverflowDirDest, err)
}
@ -353,7 +361,7 @@ func (t *HubTestItem) InstallHub() error {
//return fmt.Errorf("stage '%s' from extracted '%s' doesn't exist in the hub", customPostoverflowStage, hubStagePath)
}
postoverflowDirDest = fmt.Sprintf("%s/postoverflows/%s/", t.RuntimePath, customPostoverflowStage)
postoverflowDirDest := fmt.Sprintf("%s/postoverflows/%s/", t.RuntimePath, customPostoverflowStage)
if err := os.MkdirAll(postoverflowDirDest, os.ModePerm); err != nil {
continue
//return fmt.Errorf("unable to create folder '%s': %s", postoverflowDirDest, err)
@ -380,10 +388,12 @@ func (t *HubTestItem) InstallHub() error {
Filter: "1==1",
Statics: t.Config.OverrideStatics,
}
b, err := yaml.Marshal(n)
if err != nil {
return fmt.Errorf("unable to marshal overrides: %s", err)
}
tgtFilename := fmt.Sprintf("%s/parsers/s00-raw/00_overrides.yaml", t.RuntimePath)
if err := os.WriteFile(tgtFilename, b, os.ModePerm); err != nil {
return fmt.Errorf("unable to write overrides to '%s': %s", tgtFilename, err)
@ -403,6 +413,7 @@ func (t *HubTestItem) InstallHub() error {
if err := hub.DownloadDataIfNeeded(item, true); err != nil {
return fmt.Errorf("unable to download data for parser '%s': %+v", parserName, err)
}
log.Debugf("parser '%s' installed successfully in runtime environment", parserName)
}
}
@ -414,6 +425,7 @@ func (t *HubTestItem) InstallHub() error {
if err := hub.DownloadDataIfNeeded(item, true); err != nil {
return fmt.Errorf("unable to download data for parser '%s': %+v", scenarioName, err)
}
log.Debugf("scenario '%s' installed successfully in runtime environment", scenarioName)
}
}
@ -425,6 +437,7 @@ func (t *HubTestItem) InstallHub() error {
if err := hub.DownloadDataIfNeeded(item, true); err != nil {
return fmt.Errorf("unable to download data for parser '%s': %+v", postoverflowName, err)
}
log.Debugf("postoverflow '%s' installed successfully in runtime environment", postoverflowName)
}
}
@ -489,6 +502,8 @@ func (t *HubTestItem) Run() error {
return fmt.Errorf("unable to copy '%s' to '%s': %v", t.TemplateSimulationPath, t.RuntimeSimulationFilePath, err)
}
crowdsecPatternsFolder := csconfig.DefaultConfigPath("patterns")
// copy template patterns folder to runtime folder
if err = CopyDir(crowdsecPatternsFolder, t.RuntimePatternsPath); err != nil {
return fmt.Errorf("unable to copy 'patterns' from '%s' to '%s': %s", crowdsecPatternsFolder, t.RuntimePatternsPath, err)
@ -511,6 +526,7 @@ func (t *HubTestItem) Run() error {
if err != nil {
return fmt.Errorf("unable to stat log file '%s': %s", logFile, err)
}
if logFileStat.Size() == 0 {
return fmt.Errorf("log file '%s' is empty, please fill it with log", logFile)
}
@ -518,6 +534,7 @@ func (t *HubTestItem) Run() error {
cmdArgs := []string{"-c", t.RuntimeConfigFilePath, "machines", "add", "testMachine", "--auto"}
cscliRegisterCmd := exec.Command(t.CscliPath, cmdArgs...)
log.Debugf("%s", cscliRegisterCmd.String())
output, err := cscliRegisterCmd.CombinedOutput()
if err != nil {
if !strings.Contains(string(output), "unable to create machine: user 'testMachine': user already exist") {
@ -527,16 +544,20 @@ func (t *HubTestItem) Run() error {
}
cmdArgs = []string{"-c", t.RuntimeConfigFilePath, "-type", logType, "-dsn", dsn, "-dump-data", t.ResultsPath, "-order-event"}
for labelKey, labelValue := range t.Config.Labels {
arg := fmt.Sprintf("%s:%s", labelKey, labelValue)
cmdArgs = append(cmdArgs, "-label", arg)
}
crowdsecCmd := exec.Command(t.CrowdSecPath, cmdArgs...)
log.Debugf("%s", crowdsecCmd.String())
output, err = crowdsecCmd.CombinedOutput()
if log.GetLevel() >= log.DebugLevel || err != nil {
fmt.Println(string(output))
}
if err != nil {
return fmt.Errorf("fail to run '%s' for test '%s': %v", crowdsecCmd.String(), t.Name, err)
}
@ -553,8 +574,10 @@ func (t *HubTestItem) Run() error {
if err != nil {
return err
}
parserAssertFile.Close()
}
assertFileStat, err := os.Stat(t.ParserAssert.File)
if err != nil {
return fmt.Errorf("error while stats '%s': %s", t.ParserAssert.File, err)
@ -565,6 +588,7 @@ func (t *HubTestItem) Run() error {
if err != nil {
return fmt.Errorf("couldn't generate assertion: %s", err)
}
t.ParserAssert.AutoGenAssertData = assertData
t.ParserAssert.AutoGenAssert = true
} else {
@ -576,12 +600,15 @@ func (t *HubTestItem) Run() error {
// assert scenarios
nbScenario := 0
for _, scenario := range t.Config.Scenarios {
if scenario == "" {
continue
}
nbScenario += 1
nbScenario++
}
if nbScenario > 0 {
_, err := os.Stat(t.ScenarioAssert.File)
if os.IsNotExist(err) {
@ -589,8 +616,10 @@ func (t *HubTestItem) Run() error {
if err != nil {
return err
}
scenarioAssertFile.Close()
}
assertFileStat, err := os.Stat(t.ScenarioAssert.File)
if err != nil {
return fmt.Errorf("error while stats '%s': %s", t.ScenarioAssert.File, err)
@ -601,6 +630,7 @@ func (t *HubTestItem) Run() error {
if err != nil {
return fmt.Errorf("couldn't generate assertion: %s", err)
}
t.ScenarioAssert.AutoGenAssertData = assertData
t.ScenarioAssert.AutoGenAssert = true
} else {

View file

@ -5,13 +5,11 @@ import (
"fmt"
"io"
"os"
"regexp"
"sort"
"strings"
"time"
"github.com/antonmedv/expr"
"github.com/antonmedv/expr/vm"
"github.com/enescakir/emoji"
"github.com/fatih/color"
diff "github.com/r3labs/diff/v2"
@ -43,10 +41,10 @@ type ParserResult struct {
Evt types.Event
Success bool
}
type ParserResults map[string]map[string][]ParserResult
func NewParserAssert(file string) *ParserAssert {
ParserAssert := &ParserAssert{
File: file,
NbAssert: 0,
@ -55,6 +53,7 @@ func NewParserAssert(file string) *ParserAssert {
AutoGenAssert: false,
TestData: &ParserResults{},
}
return ParserAssert
}
@ -63,22 +62,24 @@ func (p *ParserAssert) AutoGenFromFile(filename string) (string, error) {
if err != nil {
return "", err
}
ret := p.AutoGenParserAssert()
return ret, nil
}
func (p *ParserAssert) LoadTest(filename string) error {
var err error
parserDump, err := LoadParserDump(filename)
if err != nil {
return fmt.Errorf("loading parser dump file: %+v", err)
}
p.TestData = parserDump
return nil
}
func (p *ParserAssert) AssertFile(testFile string) error {
file, err := os.Open(p.File)
if err != nil {
@ -88,19 +89,26 @@ func (p *ParserAssert) AssertFile(testFile string) error {
if err := p.LoadTest(testFile); err != nil {
return fmt.Errorf("unable to load parser dump file '%s': %s", testFile, err)
}
scanner := bufio.NewScanner(file)
scanner.Split(bufio.ScanLines)
nbLine := 0
for scanner.Scan() {
nbLine += 1
nbLine++
if scanner.Text() == "" {
continue
}
ok, err := p.Run(scanner.Text())
if err != nil {
return fmt.Errorf("unable to run assert '%s': %+v", scanner.Text(), err)
}
p.NbAssert += 1
p.NbAssert++
if !ok {
log.Debugf("%s is FALSE", scanner.Text())
failedAssert := &AssertFail{
@ -109,37 +117,43 @@ func (p *ParserAssert) AssertFile(testFile string) error {
Expression: scanner.Text(),
Debug: make(map[string]string),
}
variableRE := regexp.MustCompile(`(?P<variable>[^ =]+) == .*`)
match := variableRE.FindStringSubmatch(scanner.Text())
variable := ""
if len(match) == 0 {
log.Infof("Couldn't get variable of line '%s'", scanner.Text())
variable = scanner.Text()
} else {
variable = match[1]
}
result, err := p.EvalExpression(variable)
if err != nil {
log.Errorf("unable to evaluate variable '%s': %s", variable, err)
continue
}
failedAssert.Debug[variable] = result
p.Fails = append(p.Fails, *failedAssert)
continue
}
//fmt.Printf(" %s '%s'\n", emoji.GreenSquare, scanner.Text())
}
file.Close()
if p.NbAssert == 0 {
assertData, err := p.AutoGenFromFile(testFile)
if err != nil {
return fmt.Errorf("couldn't generate assertion: %s", err)
}
p.AutoGenAssertData = assertData
p.AutoGenAssert = true
}
if len(p.Fails) == 0 {
p.Success = true
}
@ -148,15 +162,14 @@ func (p *ParserAssert) AssertFile(testFile string) error {
}
func (p *ParserAssert) RunExpression(expression string) (interface{}, error) {
var err error
//debug doesn't make much sense with the ability to evaluate "on the fly"
//var debugFilter *exprhelpers.ExprDebugger
var runtimeFilter *vm.Program
var output interface{}
env := map[string]interface{}{"results": *p.TestData}
if runtimeFilter, err = expr.Compile(expression, exprhelpers.GetExprOptions(env)...); err != nil {
runtimeFilter, err := expr.Compile(expression, exprhelpers.GetExprOptions(env)...)
if err != nil {
log.Errorf("failed to compile '%s' : %s", expression, err)
return output, err
}
@ -168,8 +181,10 @@ func (p *ParserAssert) RunExpression(expression string) (interface{}, error) {
if err != nil {
log.Warningf("running : %s", expression)
log.Warningf("runtime error : %s", err)
return output, fmt.Errorf("while running expression %s: %w", expression, err)
}
return output, nil
}
@ -178,10 +193,13 @@ func (p *ParserAssert) EvalExpression(expression string) (string, error) {
if err != nil {
return "", err
}
ret, err := yaml.Marshal(output)
if err != nil {
return "", err
}
return string(ret), nil
}
@ -190,6 +208,7 @@ func (p *ParserAssert) Run(assert string) (bool, error) {
if err != nil {
return false, err
}
switch out := output.(type) {
case bool:
return out, nil
@ -201,80 +220,89 @@ func (p *ParserAssert) Run(assert string) (bool, error) {
func Escape(val string) string {
val = strings.ReplaceAll(val, `\`, `\\`)
val = strings.ReplaceAll(val, `"`, `\"`)
return val
}
func (p *ParserAssert) AutoGenParserAssert() string {
//attempt to autogen parser asserts
var ret string
ret := fmt.Sprintf("len(results) == %d\n", len(*p.TestData))
//sort map keys for consistent order
stages := sortedMapKeys(*p.TestData)
//sort map keys for consistent ordre
var stages []string
for stage := range *p.TestData {
stages = append(stages, stage)
}
sort.Strings(stages)
ret += fmt.Sprintf("len(results) == %d\n", len(*p.TestData))
for _, stage := range stages {
parsers := (*p.TestData)[stage]
//sort map keys for consistent ordre
var pnames []string
for pname := range parsers {
pnames = append(pnames, pname)
}
sort.Strings(pnames)
//sort map keys for consistent order
pnames := sortedMapKeys(parsers)
for _, parser := range pnames {
presults := parsers[parser]
ret += fmt.Sprintf(`len(results["%s"]["%s"]) == %d`+"\n", stage, parser, len(presults))
for pidx, result := range presults {
ret += fmt.Sprintf(`results["%s"]["%s"][%d].Success == %t`+"\n", stage, parser, pidx, result.Success)
if !result.Success {
continue
}
for _, pkey := range sortedMapKeys(result.Evt.Parsed) {
pval := result.Evt.Parsed[pkey]
if pval == "" {
continue
}
ret += fmt.Sprintf(`results["%s"]["%s"][%d].Evt.Parsed["%s"] == "%s"`+"\n", stage, parser, pidx, pkey, Escape(pval))
}
for _, mkey := range sortedMapKeys(result.Evt.Meta) {
mval := result.Evt.Meta[mkey]
if mval == "" {
continue
}
ret += fmt.Sprintf(`results["%s"]["%s"][%d].Evt.Meta["%s"] == "%s"`+"\n", stage, parser, pidx, mkey, Escape(mval))
}
for _, ekey := range sortedMapKeys(result.Evt.Enriched) {
eval := result.Evt.Enriched[ekey]
if eval == "" {
continue
}
ret += fmt.Sprintf(`results["%s"]["%s"][%d].Evt.Enriched["%s"] == "%s"`+"\n", stage, parser, pidx, ekey, Escape(eval))
}
for _, ukey := range sortedMapKeys(result.Evt.Unmarshaled) {
uval := result.Evt.Unmarshaled[ukey]
if uval == "" {
continue
}
base := fmt.Sprintf(`results["%s"]["%s"][%d].Evt.Unmarshaled["%s"]`, stage, parser, pidx, ukey)
for _, line := range p.buildUnmarshaledAssert(base, uval) {
ret += line
}
}
ret += fmt.Sprintf(`results["%s"]["%s"][%d].Evt.Whitelisted == %t`+"\n", stage, parser, pidx, result.Evt.Whitelisted)
if result.Evt.WhitelistReason != "" {
ret += fmt.Sprintf(`results["%s"]["%s"][%d].Evt.WhitelistReason == "%s"`+"\n", stage, parser, pidx, Escape(result.Evt.WhitelistReason))
}
}
}
}
return ret
}
func (p *ParserAssert) buildUnmarshaledAssert(ekey string, eval interface{}) []string {
ret := make([]string, 0)
switch val := eval.(type) {
case map[string]interface{}:
for k, v := range val {
@ -297,12 +325,11 @@ func (p *ParserAssert) buildUnmarshaledAssert(ekey string, eval interface{}) []s
default:
log.Warningf("unknown type '%T' for key '%s'", val, ekey)
}
return ret
}
func LoadParserDump(filepath string) (*ParserResults, error) {
var pdump ParserResults
dumpData, err := os.Open(filepath)
if err != nil {
return nil, err
@ -314,18 +341,19 @@ func LoadParserDump(filepath string) (*ParserResults, error) {
return nil, err
}
pdump := ParserResults{}
if err := yaml.Unmarshal(results, &pdump); err != nil {
return nil, err
}
/* we know that some variables should always be set,
let's check if they're present in last parser output of last stage */
stages := make([]string, 0, len(pdump))
for k := range pdump {
stages = append(stages, k)
}
sort.Strings(stages)
stages := sortedMapKeys(pdump)
var lastStage string
//Loop over stages to find last successful one with at least one parser
for i := len(stages) - 2; i >= 0; i-- {
if len(pdump[stages[i]]) != 0 {
@ -333,10 +361,13 @@ func LoadParserDump(filepath string) (*ParserResults, error) {
break
}
}
parsers := make([]string, 0, len(pdump[lastStage]))
for k := range pdump[lastStage] {
parsers = append(parsers, k)
}
sort.Strings(parsers)
lastParser := parsers[len(parsers)-1]
@ -357,47 +388,51 @@ type DumpOpts struct {
ShowNotOkParsers bool
}
func DumpTree(parser_results ParserResults, bucket_pour BucketPourInfo, opts DumpOpts) {
func DumpTree(parserResults ParserResults, bucketPour BucketPourInfo, opts DumpOpts) {
//note : we can use line -> time as the unique identifier (of acquisition)
state := make(map[time.Time]map[string]map[string]ParserResult)
assoc := make(map[time.Time]string, 0)
for stage, parsers := range parser_results {
for stage, parsers := range parserResults {
for parser, results := range parsers {
for _, parser_res := range results {
evt := parser_res.Evt
for _, parserRes := range results {
evt := parserRes.Evt
if _, ok := state[evt.Line.Time]; !ok {
state[evt.Line.Time] = make(map[string]map[string]ParserResult)
assoc[evt.Line.Time] = evt.Line.Raw
}
if _, ok := state[evt.Line.Time][stage]; !ok {
state[evt.Line.Time][stage] = make(map[string]ParserResult)
}
state[evt.Line.Time][stage][parser] = ParserResult{Evt: evt, Success: parser_res.Success}
}
state[evt.Line.Time][stage][parser] = ParserResult{Evt: evt, Success: parserRes.Success}
}
}
}
for bname, evtlist := range bucket_pour {
for bname, evtlist := range bucketPour {
for _, evt := range evtlist {
if evt.Line.Raw == "" {
continue
}
//it might be bucket overflow being reprocessed, skip this
if _, ok := state[evt.Line.Time]; !ok {
state[evt.Line.Time] = make(map[string]map[string]ParserResult)
assoc[evt.Line.Time] = evt.Line.Raw
}
//there is a trick : to know if an event successfully exit the parsers, we check if it reached the pour() phase
//we thus use a fake stage "buckets" and a fake parser "OK" to know if it entered
if _, ok := state[evt.Line.Time]["buckets"]; !ok {
state[evt.Line.Time]["buckets"] = make(map[string]ParserResult)
}
state[evt.Line.Time]["buckets"][bname] = ParserResult{Success: true}
}
}
yellow := color.New(color.FgYellow).SprintFunc()
red := color.New(color.FgRed).SprintFunc()
green := color.New(color.FgGreen).SprintFunc()
@ -409,19 +444,25 @@ func DumpTree(parser_results ParserResults, bucket_pour BucketPourInfo, opts Dum
continue
}
}
fmt.Printf("line: %s\n", rawstr)
skeys := make([]string, 0, len(state[tstamp]))
for k := range state[tstamp] {
//there is a trick : to know if an event successfully exit the parsers, we check if it reached the pour() phase
//we thus use a fake stage "buckets" and a fake parser "OK" to know if it entered
if k == "buckets" {
continue
}
skeys = append(skeys, k)
}
sort.Strings(skeys)
//iterate stage
var prev_item types.Event
// iterate stage
var prevItem types.Event
for _, stage := range skeys {
parsers := state[tstamp][stage]
@ -431,18 +472,16 @@ func DumpTree(parser_results ParserResults, bucket_pour BucketPourInfo, opts Dum
fmt.Printf("\t%s %s\n", sep, stage)
pkeys := make([]string, 0, len(parsers))
for k := range parsers {
pkeys = append(pkeys, k)
}
sort.Strings(pkeys)
pkeys := sortedMapKeys(parsers)
for idx, parser := range pkeys {
res := parsers[parser].Success
sep := "├"
if idx == len(pkeys)-1 {
sep = "└"
}
created := 0
updated := 0
deleted := 0
@ -451,16 +490,19 @@ func DumpTree(parser_results ParserResults, bucket_pour BucketPourInfo, opts Dum
detailsDisplay := ""
if res {
changelog, _ := diff.Diff(prev_item, parsers[parser].Evt)
changelog, _ := diff.Diff(prevItem, parsers[parser].Evt)
for _, change := range changelog {
switch change.Type {
case "create":
created++
detailsDisplay += fmt.Sprintf("\t%s\t\t%s %s evt.%s : %s\n", presep, sep, change.Type, strings.Join(change.Path, "."), green(change.To))
case "update":
detailsDisplay += fmt.Sprintf("\t%s\t\t%s %s evt.%s : %s -> %s\n", presep, sep, change.Type, strings.Join(change.Path, "."), change.From, yellow(change.To))
if change.Path[0] == "Whitelisted" && change.To == true {
whitelisted = true
if whitelistReason == "" {
whitelistReason = parsers[parser].Evt.WhitelistReason
}
@ -468,51 +510,64 @@ func DumpTree(parser_results ParserResults, bucket_pour BucketPourInfo, opts Dum
updated++
case "delete":
deleted++
detailsDisplay += fmt.Sprintf("\t%s\t\t%s %s evt.%s\n", presep, sep, change.Type, red(strings.Join(change.Path, ".")))
}
}
prev_item = parsers[parser].Evt
prevItem = parsers[parser].Evt
}
if created > 0 {
changeStr += green(fmt.Sprintf("+%d", created))
}
if updated > 0 {
if len(changeStr) > 0 {
changeStr += " "
}
changeStr += yellow(fmt.Sprintf("~%d", updated))
}
if deleted > 0 {
if len(changeStr) > 0 {
changeStr += " "
}
changeStr += red(fmt.Sprintf("-%d", deleted))
}
if whitelisted {
if len(changeStr) > 0 {
changeStr += " "
}
changeStr += red("[whitelisted]")
}
if changeStr == "" {
changeStr = yellow("unchanged")
}
if res {
fmt.Printf("\t%s\t%s %s %s (%s)\n", presep, sep, emoji.GreenCircle, parser, changeStr)
if opts.Details {
fmt.Print(detailsDisplay)
}
} else if opts.ShowNotOkParsers {
fmt.Printf("\t%s\t%s %s %s\n", presep, sep, emoji.RedCircle, parser)
}
}
}
sep := "└"
if len(state[tstamp]["buckets"]) > 0 {
sep = "├"
}
//did the event enter the bucket pour phase ?
if _, ok := state[tstamp]["buckets"]["OK"]; ok {
fmt.Printf("\t%s-------- parser success %s\n", sep, emoji.GreenCircle)
@ -521,27 +576,35 @@ func DumpTree(parser_results ParserResults, bucket_pour BucketPourInfo, opts Dum
} else {
fmt.Printf("\t%s-------- parser failure %s\n", sep, emoji.RedCircle)
}
//now print bucket info
if len(state[tstamp]["buckets"]) > 0 {
fmt.Printf("\t├ Scenarios\n")
}
bnames := make([]string, 0, len(state[tstamp]["buckets"]))
for k := range state[tstamp]["buckets"] {
//there is a trick : to know if an event successfully exit the parsers, we check if it reached the pour() phase
//we thus use a fake stage "buckets" and a fake parser "OK" to know if it entered
if k == "OK" {
continue
}
bnames = append(bnames, k)
}
sort.Strings(bnames)
for idx, bname := range bnames {
sep := "├"
if idx == len(bnames)-1 {
sep = "└"
}
fmt.Printf("\t\t%s %s %s\n", sep, emoji.GreenCircle, bname)
}
fmt.Println()
}
}

11
pkg/hubtest/regexp.go Normal file
View file

@ -0,0 +1,11 @@
package hubtest
import (
"regexp"
)
var (
variableRE = regexp.MustCompile(`(?P<variable>[^ =]+) == .*`)
parserResultRE = regexp.MustCompile(`^results\["[^"]+"\]\["(?P<parser>[^"]+)"\]\[[0-9]+\]\.Evt\..*`)
scenarioResultRE = regexp.MustCompile(`^results\[[0-9]+\].Overflow.Alert.GetScenario\(\) == "(?P<scenario>[^"]+)"`)
)

View file

@ -5,12 +5,10 @@ import (
"fmt"
"io"
"os"
"regexp"
"sort"
"strings"
"github.com/antonmedv/expr"
"github.com/antonmedv/expr/vm"
log "github.com/sirupsen/logrus"
"gopkg.in/yaml.v2"
@ -42,6 +40,7 @@ func NewScenarioAssert(file string) *ScenarioAssert {
TestData: &BucketResults{},
PourData: &BucketPourInfo{},
}
return ScenarioAssert
}
@ -50,7 +49,9 @@ func (s *ScenarioAssert) AutoGenFromFile(filename string) (string, error) {
if err != nil {
return "", err
}
ret := s.AutoGenScenarioAssert()
return ret, nil
}
@ -59,6 +60,7 @@ func (s *ScenarioAssert) LoadTest(filename string, bucketpour string) error {
if err != nil {
return fmt.Errorf("loading scenario dump file '%s': %+v", filename, err)
}
s.TestData = bucketDump
if bucketpour != "" {
@ -66,8 +68,10 @@ func (s *ScenarioAssert) LoadTest(filename string, bucketpour string) error {
if err != nil {
return fmt.Errorf("loading bucket pour dump file '%s': %+v", filename, err)
}
s.PourData = pourDump
}
return nil
}
@ -81,19 +85,26 @@ func (s *ScenarioAssert) AssertFile(testFile string) error {
if err := s.LoadTest(testFile, ""); err != nil {
return fmt.Errorf("unable to load parser dump file '%s': %s", testFile, err)
}
scanner := bufio.NewScanner(file)
scanner.Split(bufio.ScanLines)
nbLine := 0
for scanner.Scan() {
nbLine += 1
nbLine++
if scanner.Text() == "" {
continue
}
ok, err := s.Run(scanner.Text())
if err != nil {
return fmt.Errorf("unable to run assert '%s': %+v", scanner.Text(), err)
}
s.NbAssert += 1
s.NbAssert++
if !ok {
log.Debugf("%s is FALSE", scanner.Text())
failedAssert := &AssertFail{
@ -102,31 +113,38 @@ func (s *ScenarioAssert) AssertFile(testFile string) error {
Expression: scanner.Text(),
Debug: make(map[string]string),
}
variableRE := regexp.MustCompile(`(?P<variable>[^ ]+) == .*`)
match := variableRE.FindStringSubmatch(scanner.Text())
if len(match) == 0 {
log.Infof("Couldn't get variable of line '%s'", scanner.Text())
continue
}
variable := match[1]
result, err := s.EvalExpression(variable)
if err != nil {
log.Errorf("unable to evaluate variable '%s': %s", variable, err)
continue
}
failedAssert.Debug[variable] = result
s.Fails = append(s.Fails, *failedAssert)
continue
}
//fmt.Printf(" %s '%s'\n", emoji.GreenSquare, scanner.Text())
}
file.Close()
if s.NbAssert == 0 {
assertData, err := s.AutoGenFromFile(testFile)
if err != nil {
return fmt.Errorf("couldn't generate assertion: %s", err)
}
s.AutoGenAssertData = assertData
s.AutoGenAssert = true
}
@ -139,15 +157,14 @@ func (s *ScenarioAssert) AssertFile(testFile string) error {
}
func (s *ScenarioAssert) RunExpression(expression string) (interface{}, error) {
var err error
//debug doesn't make much sense with the ability to evaluate "on the fly"
//var debugFilter *exprhelpers.ExprDebugger
var runtimeFilter *vm.Program
var output interface{}
env := map[string]interface{}{"results": *s.TestData}
if runtimeFilter, err = expr.Compile(expression, exprhelpers.GetExprOptions(env)...); err != nil {
runtimeFilter, err := expr.Compile(expression, exprhelpers.GetExprOptions(env)...)
if err != nil {
return nil, err
}
// if debugFilter, err = exprhelpers.NewDebugger(assert, expr.Env(env)); err != nil {
@ -161,8 +178,10 @@ func (s *ScenarioAssert) RunExpression(expression string) (interface{}, error) {
if err != nil {
log.Warningf("running : %s", expression)
log.Warningf("runtime error : %s", err)
return nil, fmt.Errorf("while running expression %s: %w", expression, err)
}
return output, nil
}
@ -171,10 +190,12 @@ func (s *ScenarioAssert) EvalExpression(expression string) (string, error) {
if err != nil {
return "", err
}
ret, err := yaml.Marshal(output)
if err != nil {
return "", err
}
return string(ret), nil
}
@ -183,6 +204,7 @@ func (s *ScenarioAssert) Run(assert string) (bool, error) {
if err != nil {
return false, err
}
switch out := output.(type) {
case bool:
return out, nil
@ -192,9 +214,9 @@ func (s *ScenarioAssert) Run(assert string) (bool, error) {
}
func (s *ScenarioAssert) AutoGenScenarioAssert() string {
//attempt to autogen parser asserts
var ret string
ret += fmt.Sprintf(`len(results) == %d`+"\n", len(*s.TestData))
// attempt to autogen scenario asserts
ret := fmt.Sprintf(`len(results) == %d`+"\n", len(*s.TestData))
for eventIndex, event := range *s.TestData {
for ipSrc, source := range event.Overflow.Sources {
ret += fmt.Sprintf(`"%s" in results[%d].Overflow.GetSources()`+"\n", ipSrc, eventIndex)
@ -203,15 +225,18 @@ func (s *ScenarioAssert) AutoGenScenarioAssert() string {
ret += fmt.Sprintf(`results[%d].Overflow.Sources["%s"].GetScope() == "%s"`+"\n", eventIndex, ipSrc, *source.Scope)
ret += fmt.Sprintf(`results[%d].Overflow.Sources["%s"].GetValue() == "%s"`+"\n", eventIndex, ipSrc, *source.Value)
}
for evtIndex, evt := range event.Overflow.Alert.Events {
for _, meta := range evt.Meta {
ret += fmt.Sprintf(`results[%d].Overflow.Alert.Events[%d].GetMeta("%s") == "%s"`+"\n", eventIndex, evtIndex, meta.Key, Escape(meta.Value))
}
}
ret += fmt.Sprintf(`results[%d].Overflow.Alert.GetScenario() == "%s"`+"\n", eventIndex, *event.Overflow.Alert.Scenario)
ret += fmt.Sprintf(`results[%d].Overflow.Alert.Remediation == %t`+"\n", eventIndex, event.Overflow.Alert.Remediation)
ret += fmt.Sprintf(`results[%d].Overflow.Alert.GetEventsCount() == %d`+"\n", eventIndex, *event.Overflow.Alert.EventsCount)
}
return ret
}
@ -228,8 +253,6 @@ func (b BucketResults) Swap(i, j int) {
}
func LoadBucketPourDump(filepath string) (*BucketPourInfo, error) {
var bucketDump BucketPourInfo
dumpData, err := os.Open(filepath)
if err != nil {
return nil, err
@ -241,6 +264,8 @@ func LoadBucketPourDump(filepath string) (*BucketPourInfo, error) {
return nil, err
}
var bucketDump BucketPourInfo
if err := yaml.Unmarshal(results, &bucketDump); err != nil {
return nil, err
}
@ -249,8 +274,6 @@ func LoadBucketPourDump(filepath string) (*BucketPourInfo, error) {
}
func LoadScenarioDump(filepath string) (*BucketResults, error) {
var bucketDump BucketResults
dumpData, err := os.Open(filepath)
if err != nil {
return nil, err
@ -262,6 +285,8 @@ func LoadScenarioDump(filepath string) (*BucketResults, error) {
return nil, err
}
var bucketDump BucketResults
if err := yaml.Unmarshal(results, &bucketDump); err != nil {
return nil, err
}

View file

@ -12,7 +12,9 @@ func sortedMapKeys[V any](m map[string]V) []string {
for k := range m {
keys = append(keys, k)
}
sort.Strings(keys)
return keys
}
@ -43,16 +45,20 @@ func checkPathNotContained(path string, subpath string) error {
}
current := absSubPath
for {
if current == absPath {
return fmt.Errorf("cannot copy a folder onto itself")
}
up := filepath.Dir(current)
if current == up {
break
}
current = up
}
return nil
}

View file

@ -3,16 +3,16 @@ package hubtest
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestCheckPathNotContained(t *testing.T) {
assert.Nil(t, checkPathNotContained("/foo", "/bar"))
assert.Nil(t, checkPathNotContained("/foo/bar", "/foo"))
assert.Nil(t, checkPathNotContained("/foo/bar", "/"))
assert.Nil(t, checkPathNotContained("/path/to/somewhere", "/path/to/somewhere-else"))
assert.Nil(t, checkPathNotContained("~/.local/path/to/somewhere", "~/.local/path/to/somewhere-else"))
assert.NotNil(t, checkPathNotContained("/foo", "/foo/bar"))
assert.NotNil(t, checkPathNotContained("/", "/foo"))
assert.NotNil(t, checkPathNotContained("/", "/foo/bar/baz"))
require.NoError(t, checkPathNotContained("/foo", "/bar"))
require.NoError(t, checkPathNotContained("/foo/bar", "/foo"))
require.NoError(t, checkPathNotContained("/foo/bar", "/"))
require.NoError(t, checkPathNotContained("/path/to/somewhere", "/path/to/somewhere-else"))
require.NoError(t, checkPathNotContained("~/.local/path/to/somewhere", "~/.local/path/to/somewhere-else"))
require.Error(t, checkPathNotContained("/foo", "/foo/bar"))
require.Error(t, checkPathNotContained("/", "/foo"))
require.Error(t, checkPathNotContained("/", "/foo/bar/baz"))
}

View file

@ -62,6 +62,14 @@ teardown() {
assert_output --partial 'crowdsecurity/linux'
}
@test "missing reference in hub index" {
new_hub=$(jq <"$HUB_DIR/.index.json" 'del(.parsers."crowdsecurity/smb-logs") | del (.scenarios."crowdsecurity/mysql-bf")')
echo "$new_hub" >"$HUB_DIR/.index.json"
rune -0 cscli hub list --error
assert_stderr --partial "Referred parsers crowdsecurity/smb-logs in collection crowdsecurity/smb doesn't exist."
assert_stderr --partial "Referred scenarios crowdsecurity/mysql-bf in collection crowdsecurity/mysql doesn't exist."
}
@test "cscli hub update" {
#XXX: todo
:

View file

@ -79,26 +79,35 @@ teardown() {
# XXX: check alphabetical order in human, json, raw
}
@test "cscli collections list [collection]..." {
# non-existent
rune -1 cscli collections install foo/bar
assert_stderr --partial "can't find 'foo/bar' in collections"
# not installed
rune -0 cscli collections list crowdsecurity/smb
assert_output --regexp 'crowdsecurity/smb.*disabled'
# install two items
rune -0 cscli collections install crowdsecurity/sshd crowdsecurity/smb
# list one item
# list an installed item
rune -0 cscli collections list crowdsecurity/sshd
assert_output --partial "crowdsecurity/sshd"
assert_output --regexp "crowdsecurity/sshd"
refute_output --partial "crowdsecurity/smb"
# list multiple items
rune -0 cscli collections list crowdsecurity/sshd crowdsecurity/smb
# list multiple installed and non installed items
rune -0 cscli collections list crowdsecurity/sshd crowdsecurity/smb crowdsecurity/nginx
assert_output --partial "crowdsecurity/sshd"
assert_output --partial "crowdsecurity/smb"
assert_output --partial "crowdsecurity/nginx"
rune -0 cscli collections list crowdsecurity/sshd -o json
rune -0 jq '.collections | length' <(output)
assert_output "1"
rune -0 cscli collections list crowdsecurity/sshd crowdsecurity/smb -o json
rune -0 cscli collections list crowdsecurity/sshd crowdsecurity/smb crowdsecurity/nginx -o json
rune -0 jq '.collections | length' <(output)
assert_output "2"
assert_output "3"
rune -0 cscli collections list crowdsecurity/sshd -o raw
rune -0 grep -vc 'name,status,version,description' <(output)
@ -108,14 +117,6 @@ teardown() {
assert_output "2"
}
@test "cscli collections list [collection]... (not installed / not existing)" {
skip "not implemented yet"
# not installed
rune -1 cscli collections list crowdsecurity/sshd
# not existing
rune -1 cscli collections list blahblah/blahblah
}
@test "cscli collections install [collection]..." {
rune -1 cscli collections install
assert_stderr --partial 'requires at least 1 arg(s), only received 0'
@ -132,7 +133,7 @@ teardown() {
# autocorrect
rune -1 cscli collections install crowdsecurity/ssshd
assert_stderr --partial "can't find 'crowdsecurity/ssshd' in collections, did you mean crowdsecurity/sshd?"
assert_stderr --partial "can't find 'crowdsecurity/ssshd' in collections, did you mean 'crowdsecurity/sshd'?"
# install multiple
rune -0 cscli collections install crowdsecurity/sshd crowdsecurity/smb

View file

@ -0,0 +1,72 @@
#!/usr/bin/env bats
# vim: ft=bats:list:ts=8:sts=4:sw=4:et:ai:si:
set -u
setup_file() {
load "../lib/setup_file.sh"
./instance-data load
HUB_DIR=$(config_get '.config_paths.hub_dir')
export HUB_DIR
CONFIG_DIR=$(config_get '.config_paths.config_dir')
export CONFIG_DIR
}
teardown_file() {
load "../lib/teardown_file.sh"
}
setup() {
load "../lib/setup.sh"
load "../lib/bats-file/load.bash"
./instance-data load
hub_purge_all
hub_strip_index
}
teardown() {
./instance-crowdsec stop
}
#----------
#
# Tests that don't need to be repeated for each hub type
#
@test "hub versions are correctly sorted during sync" {
# hash of an empty file
sha256_empty="e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
# add two versions with the same hash, that don't sort the same way
# in a lexical vs semver sort. CrowdSec should report the latest version
new_hub=$( \
jq --arg DIGEST "$sha256_empty" <"$HUB_DIR/.index.json" \
'. * {collections:{"crowdsecurity/sshd":{"versions":{"1.2":{"digest":$DIGEST, "deprecated": false}, "1.10": {"digest":$DIGEST, "deprecated": false}}}}}' \
)
echo "$new_hub" >"$HUB_DIR/.index.json"
rune -0 cscli collections install crowdsecurity/sshd
truncate -s 0 "$CONFIG_DIR/collections/sshd.yaml"
rune -0 cscli collections inspect crowdsecurity/sshd -o json
# XXX: is this supposed to be tainted or up to date?
rune -0 jq -c '[.local_version,.up_to_date,.tainted]' <(output)
assert_json '["1.10",null,null]'
}
@test "hub index with invalid (non semver) version numbers" {
new_hub=$( \
jq <"$HUB_DIR/.index.json" \
'. * {collections:{"crowdsecurity/sshd":{"versions":{"1.2.3.4":{"digest":"foo", "deprecated": false}}}}}' \
)
echo "$new_hub" >"$HUB_DIR/.index.json"
rune -0 cscli collections install crowdsecurity/sshd
rune -1 cscli collections inspect crowdsecurity/sshd --no-metrics
# XXX: we are on the verbose side here...
assert_stderr --partial "failed to read Hub index: failed to sync items: failed to scan $CONFIG_DIR: while syncing collections sshd.yaml: 1.2.3.4: Invalid Semantic Version"
}

View file

@ -133,7 +133,7 @@ teardown() {
# autocorrect
rune -1 cscli parsers install crowdsecurity/sshd-logz
assert_stderr --partial "can't find 'crowdsecurity/sshd-logz' in parsers, did you mean crowdsecurity/sshd-logs?"
assert_stderr --partial "can't find 'crowdsecurity/sshd-logz' in parsers, did you mean 'crowdsecurity/sshd-logs'?"
# install multiple
rune -0 cscli parsers install crowdsecurity/pgsql-logs crowdsecurity/postfix-logs

View file

@ -79,41 +79,43 @@ teardown() {
# XXX: check alphabetical order in human, json, raw
}
@test "cscli postoverflows list [scenario]..." {
# non-existent
rune -1 cscli postoverflows install foo/bar
assert_stderr --partial "can't find 'foo/bar' in postoverflows"
# not installed
rune -0 cscli postoverflows list crowdsecurity/rdns
assert_output --regexp 'crowdsecurity/rdns.*disabled'
# install two items
rune -0 cscli postoverflows install crowdsecurity/rdns crowdsecurity/cdn-whitelist
# list one item
# list an installed item
rune -0 cscli postoverflows list crowdsecurity/rdns
assert_output --partial "crowdsecurity/rdns"
assert_output --regexp "crowdsecurity/rdns.*enabled"
refute_output --partial "crowdsecurity/cdn-whitelist"
# list multiple items
rune -0 cscli postoverflows list crowdsecurity/rdns crowdsecurity/cdn-whitelist
# list multiple installed and non installed items
rune -0 cscli postoverflows list crowdsecurity/rdns crowdsecurity/cdn-whitelist crowdsecurity/ipv6_to_range
assert_output --partial "crowdsecurity/rdns"
assert_output --partial "crowdsecurity/cdn-whitelist"
assert_output --partial "crowdsecurity/ipv6_to_range"
rune -0 cscli postoverflows list crowdsecurity/rdns -o json
rune -0 jq '.postoverflows | length' <(output)
assert_output "1"
rune -0 cscli postoverflows list crowdsecurity/rdns crowdsecurity/cdn-whitelist -o json
rune -0 cscli postoverflows list crowdsecurity/rdns crowdsecurity/cdn-whitelist crowdsecurity/ipv6_to_range -o json
rune -0 jq '.postoverflows | length' <(output)
assert_output "2"
assert_output "3"
rune -0 cscli postoverflows list crowdsecurity/rdns -o raw
rune -0 grep -vc 'name,status,version,description' <(output)
assert_output "1"
rune -0 cscli postoverflows list crowdsecurity/rdns crowdsecurity/cdn-whitelist -o raw
rune -0 cscli postoverflows list crowdsecurity/rdns crowdsecurity/cdn-whitelist crowdsecurity/ipv6_to_range -o raw
rune -0 grep -vc 'name,status,version,description' <(output)
assert_output "2"
}
@test "cscli postoverflows list [scenario]... (not installed / not existing)" {
skip "not implemented yet"
# not installed
rune -1 cscli postoverflows list crowdsecurity/rdns
# not existing
rune -1 cscli postoverflows list blahblah/blahblah
assert_output "3"
}
@test "cscli postoverflows install [scenario]..." {
@ -132,7 +134,7 @@ teardown() {
# autocorrect
rune -1 cscli postoverflows install crowdsecurity/rdnf
assert_stderr --partial "can't find 'crowdsecurity/rdnf' in postoverflows, did you mean crowdsecurity/rdns?"
assert_stderr --partial "can't find 'crowdsecurity/rdnf' in postoverflows, did you mean 'crowdsecurity/rdns'?"
# install multiple
rune -0 cscli postoverflows install crowdsecurity/rdns crowdsecurity/cdn-whitelist
@ -157,6 +159,8 @@ teardown() {
assert_file_exists "$CONFIG_DIR/postoverflows/s00-enrich/rdns.yaml"
}
# XXX: test install with --force
# XXX: test install with --ignore
@test "cscli postoverflows inspect [scenario]..." {
rune -1 cscli postoverflows inspect

View file

@ -133,7 +133,7 @@ teardown() {
# autocorrect
rune -1 cscli scenarios install crowdsecurity/ssh-tf
assert_stderr --partial "can't find 'crowdsecurity/ssh-tf' in scenarios, did you mean crowdsecurity/ssh-bf?"
assert_stderr --partial "can't find 'crowdsecurity/ssh-tf' in scenarios, did you mean 'crowdsecurity/ssh-bf'?"
# install multiple
rune -0 cscli scenarios install crowdsecurity/ssh-bf crowdsecurity/telnet-bf
@ -145,7 +145,6 @@ teardown() {
assert_output --partial 'installed: true'
}
@test "cscli scenarios install [scenario]... (file location and download-only)" {
# simple install
rune -0 cscli scenarios install crowdsecurity/ssh-bf --download-only
@ -159,6 +158,9 @@ teardown() {
assert_file_exists "$CONFIG_DIR/scenarios/ssh-bf.yaml"
}
# XXX: test install with --force
# XXX: test install with --ignore
@test "cscli scenarios inspect [scenario]..." {
rune -1 cscli scenarios inspect