Merge branch 'crowdsecurity:master' into master

This commit is contained in:
Manuel Sabban 2021-12-15 23:10:40 +01:00 committed by GitHub
commit e6dca618a6
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
17 changed files with 532 additions and 54 deletions

View file

@ -5,7 +5,7 @@ FROM golang:${GOVERSION}-alpine AS build
WORKDIR /go/src/crowdsec
# wizard.sh requires GNU coreutils
RUN apk update && apk add --no-cache git jq gcc libc-dev make bash gettext binutils-gold coreutils
RUN apk add --no-cache git jq gcc libc-dev make bash gettext binutils-gold coreutils
COPY . .
@ -13,7 +13,7 @@ RUN SYSTEM="docker" make release
RUN cd crowdsec-v* && ./wizard.sh --docker-mode && cd -
RUN cscli hub update && cscli collections install crowdsecurity/linux && cscli parsers install crowdsecurity/whitelists
FROM alpine:latest
RUN apk update --repository=http://dl-cdn.alpinelinux.org/alpine/edge/community && apk add --repository=http://dl-cdn.alpinelinux.org/alpine/edge/community tzdata yq
RUN apk add --no-cache --repository=http://dl-cdn.alpinelinux.org/alpine/edge/community tzdata yq
COPY --from=build /etc/crowdsec /etc/crowdsec
COPY --from=build /var/lib/crowdsec /var/lib/crowdsec
COPY --from=build /usr/local/bin/crowdsec /usr/local/bin/crowdsec
@ -27,4 +27,4 @@ COPY --from=build /go/src/crowdsec/plugins/notifications/slack/slack.yaml /etc/c
COPY --from=build /go/src/crowdsec/plugins/notifications/splunk/splunk.yaml /etc/crowdsec/notifications/splunk.yaml
COPY --from=build /usr/local/lib/crowdsec/plugins /usr/local/lib/crowdsec/plugins
ENTRYPOINT /bin/sh docker_start.sh
ENTRYPOINT /bin/sh docker_start.sh

View file

@ -110,16 +110,14 @@ cscli bouncers add MyBouncerName -k %s`, generatePassword(32)),
var apiKey string
var err error
if keyName == "" {
log.Errorf("Please provide a name for the api key")
return
log.Fatalf("Please provide a name for the api key")
}
apiKey = key
if key == "" {
apiKey, err = middlewares.GenerateAPIKey(keyLength)
}
if err != nil {
log.Errorf("unable to generate api key: %s", err)
return
log.Fatalf("unable to generate api key: %s", err)
}
err = dbClient.CreateBouncer(keyName, keyIP, middlewares.HashSHA512(apiKey))
if err != nil {
@ -154,8 +152,7 @@ cscli bouncers add MyBouncerName -k %s`, generatePassword(32)),
for _, bouncerID := range args {
err := dbClient.DeleteBouncer(bouncerID)
if err != nil {
log.Errorf("unable to delete bouncer: %s", err)
return
log.Fatalf("unable to delete bouncer: %s", err)
}
log.Infof("bouncer '%s' deleted successfully", bouncerID)
}

View file

@ -6,6 +6,7 @@ import (
"fmt"
"net/url"
"os"
"path/filepath"
"strconv"
"strings"
"time"
@ -15,6 +16,7 @@ import (
"github.com/crowdsecurity/crowdsec/pkg/models"
"github.com/crowdsecurity/crowdsec/pkg/types"
"github.com/go-openapi/strfmt"
"github.com/jszwec/csvutil"
"github.com/olekukonko/tablewriter"
log "github.com/sirupsen/logrus"
"github.com/spf13/cobra"
@ -22,6 +24,13 @@ import (
var Client *apiclient.ApiClient
var (
defaultDuration = "4h"
defaultScope = "ip"
defaultType = "ban"
defaultReason = "manual"
)
func DecisionsToTable(alerts *models.GetAlertsResponse) error {
/*here we cheat a bit : to make it more readable for the user, we dedup some entries*/
var spamLimit map[string]bool = make(map[string]bool)
@ -100,7 +109,7 @@ func NewDecisionsCmd() *cobra.Command {
var cmdDecisions = &cobra.Command{
Use: "decisions [action]",
Short: "Manage decisions",
Long: `Add/List/Delete decisions from LAPI`,
Long: `Add/List/Delete/Import decisions from LAPI`,
Example: `cscli decisions [action] [filter]`,
/*TBD example*/
Args: cobra.MinimumNArgs(1),
@ -305,7 +314,6 @@ cscli decisions add --scope username --value foobar
if addReason == "" {
addReason = fmt.Sprintf("manual '%s' from '%s'", addType, csConfig.API.Client.Credentials.Login)
}
decision := models.Decision{
Duration: &addDuration,
Scope: &addScope,
@ -446,5 +454,153 @@ cscli decisions delete --type captcha
cmdDecisions.AddCommand(cmdDecisionsDelete)
var (
importDuration string
importScope string
importReason string
importType string
importFile string
)
var cmdDecisionImport = &cobra.Command{
Use: "import [options]",
Short: "Import decisions from json or csv file",
Long: "expected format :\n" +
"csv : any of duration,origin,reason,scope,type,value, with a header line\n" +
`json : {"duration" : "24h", "origin" : "my-list", "reason" : "my_scenario", "scope" : "ip", "type" : "ban", "value" : "x.y.z.z"}`,
DisableAutoGenTag: true,
Example: `decisions.csv :
duration,scope,value
24h,ip,1.2.3.4
cscsli decisions import -i decisions.csv
decisions.json :
[{"duration" : "4h", "scope" : "ip", "type" : "ban", "value" : "1.2.3.4"}]
`,
Run: func(cmd *cobra.Command, args []string) {
if importFile == "" {
log.Fatalf("Please provide a input file contaning decisions with -i flag")
}
csvData, err := os.ReadFile(importFile)
if err != nil {
log.Fatalf("unable to open '%s': %s", importFile, err)
}
type decisionRaw struct {
Duration string `csv:"duration,omitempty" json:"duration,omitempty"`
Origin string `csv:"origin,omitempty" json:"origin,omitempty"`
Scenario string `csv:"reason,omitempty" json:"reason,omitempty"`
Scope string `csv:"scope,omitempty" json:"scope,omitempty"`
Type string `csv:"type,omitempty" json:"type,omitempty"`
Value string `csv:"value" json:"value"`
}
var decisionsListRaw []decisionRaw
switch fileFormat := filepath.Ext(importFile); fileFormat {
case ".json":
if err := json.Unmarshal(csvData, &decisionsListRaw); err != nil {
log.Fatalf("unable to unmarshall json: '%s'", err)
}
case ".csv":
if err := csvutil.Unmarshal(csvData, &decisionsListRaw); err != nil {
log.Fatalf("unable to unmarshall csv: '%s'", err)
}
default:
log.Fatalf("file format not supported for '%s'. supported format are 'json' and 'csv'", importFile)
}
decisionsList := make([]*models.Decision, 0)
for i, decisionLine := range decisionsListRaw {
line := i + 2
if decisionLine.Value == "" {
log.Fatalf("please provide a 'value' in your csv line %d", line)
}
/*deal with defaults and cli-override*/
if decisionLine.Duration == "" {
decisionLine.Duration = defaultDuration
log.Debugf("No 'duration' line %d, using default value: '%s'", line, defaultDuration)
}
if importDuration != "" {
decisionLine.Duration = importDuration
log.Debugf("'duration' line %d, using supplied value: '%s'", line, importDuration)
}
decisionLine.Origin = "cscli-import"
if decisionLine.Scenario == "" {
decisionLine.Scenario = defaultReason
log.Debugf("No 'reason' line %d, using value: '%s'", line, decisionLine.Scenario)
}
if importReason != "" {
decisionLine.Scenario = importReason
log.Debugf("No 'reason' line %d, using supplied value: '%s'", line, importReason)
}
if decisionLine.Type == "" {
decisionLine.Type = defaultType
log.Debugf("No 'type' line %d, using default value: '%s'", line, decisionLine.Type)
}
if importType != "" {
decisionLine.Type = importType
log.Debugf("'type' line %d, using supplied value: '%s'", line, importType)
}
if decisionLine.Scope == "" {
decisionLine.Scope = defaultScope
log.Debugf("No 'scope' line %d, using default value: '%s'", line, decisionLine.Scope)
}
if importScope != "" {
decisionLine.Scope = importScope
log.Debugf("'scope' line %d, using supplied value: '%s'", line, importScope)
}
decision := models.Decision{
Value: types.StrPtr(decisionLine.Value),
Duration: types.StrPtr(decisionLine.Duration),
Origin: types.StrPtr(decisionLine.Origin),
Scenario: types.StrPtr(decisionLine.Scenario),
Type: types.StrPtr(decisionLine.Type),
Scope: types.StrPtr(decisionLine.Scope),
Simulated: new(bool),
}
decisionsList = append(decisionsList, &decision)
}
alerts := models.AddAlertsRequest{}
importAlert := models.Alert{
CreatedAt: time.Now().Format(time.RFC3339),
Scenario: types.StrPtr(fmt.Sprintf("add: %d IPs", len(decisionsList))),
Message: types.StrPtr(""),
Events: []*models.Event{},
Source: &models.Source{
Scope: types.StrPtr("cscli/manual-import"),
Value: types.StrPtr(""),
},
StartAt: types.StrPtr(time.Now().Format(time.RFC3339)),
StopAt: types.StrPtr(time.Now().Format(time.RFC3339)),
Capacity: types.Int32Ptr(0),
Simulated: types.BoolPtr(false),
EventsCount: types.Int32Ptr(int32(len(decisionsList))),
Leakspeed: types.StrPtr(""),
ScenarioHash: types.StrPtr(""),
ScenarioVersion: types.StrPtr(""),
Decisions: decisionsList,
}
alerts = append(alerts, &importAlert)
if len(decisionsList) > 1000 {
log.Infof("You are about to add %d decisions, this may take a while", len(decisionsList))
}
_, _, err = Client.Alerts.Add(context.Background(), alerts)
if err != nil {
log.Fatalf(err.Error())
}
log.Infof("%d decisions successfully imported", len(decisionsList))
},
}
cmdDecisionImport.Flags().SortFlags = false
cmdDecisionImport.Flags().StringVarP(&importFile, "input", "i", "", "Input file")
cmdDecisionImport.Flags().StringVarP(&importDuration, "duration", "d", "", "Decision duration (ie. 1h,4h,30m)")
cmdDecisionImport.Flags().StringVar(&importScope, "scope", types.Ip, "Decision scope (ie. ip,range,username)")
cmdDecisionImport.Flags().StringVarP(&importReason, "reason", "R", "", "Decision reason (ie. scenario-name)")
cmdDecisionImport.Flags().StringVarP(&importType, "type", "t", "", "Decision type (ie. ban,captcha,throttle)")
cmdDecisions.AddCommand(cmdDecisionImport)
return cmdDecisions
}

View file

@ -120,7 +120,7 @@ cscli hubtest create my-scenario-test --parsers crowdsecurity/nginx --scenarios
LogFile: logFileName,
LogType: logType,
IgnoreParsers: ignoreParsers,
Labels: labels,
Labels: labels,
}
configFilePath := filepath.Join(testPath, "config.yaml")
@ -255,7 +255,7 @@ cscli hubtest create my-scenario-test --parsers crowdsecurity/nginx --scenarios
fmt.Println()
}
}
if !forceClean {
if !forceClean && !noClean {
prompt := &survey.Confirm{
Message: fmt.Sprintf("\nDo you want to remove runtime folder for test '%s'? (default: Yes)", test.Name),
Default: true,

View file

@ -144,6 +144,7 @@ Note: This command requires database direct access, so is intended to be run on
}
fmt.Printf("%s", string(x))
} else if csConfig.Cscli.Output == "raw" {
fmt.Printf("machine_id,ip_address,updated_at,validated,version\n")
for _, w := range machines {
var validated string
if w.IsValidated {

View file

@ -131,8 +131,12 @@ func ListItem(itemType string, args []string) {
}
fmt.Printf("%s", string(x))
} else if csConfig.Cscli.Output == "raw" {
fmt.Printf("name,status,version,description\n")
for _, v := range hubStatus {
fmt.Printf("%s %s\n", v["name"], v["description"])
if v["local_version"] == "" {
v["local_version"] = "n/a"
}
fmt.Printf("%s,%s,%s,%s\n", v["name"], v["status"], v["local_version"], v["description"])
}
}
}
@ -149,7 +153,7 @@ func InstallItem(name string, obtype string, force bool) {
return
}
}
item, err := cwhub.DownloadLatest(csConfig.Hub, item, force)
item, err := cwhub.DownloadLatest(csConfig.Hub, item, force, false)
if err != nil {
log.Fatalf("error while downloading %s : %v", item.Name, err)
}
@ -230,7 +234,7 @@ func UpgradeConfig(itemType string, name string, force bool) {
continue
}
}
v, err = cwhub.DownloadLatest(csConfig.Hub, v, force)
v, err = cwhub.DownloadLatest(csConfig.Hub, v, force, true)
if err != nil {
log.Fatalf("%s : download failed : %v", v.Name, err)
}
@ -515,7 +519,7 @@ func silenceInstallItem(name string, obtype string) (string, error) {
if downloadOnly && it.Downloaded && it.UpToDate {
return fmt.Sprintf("%s is already downloaded and up-to-date", it.Name), nil
}
it, err := cwhub.DownloadLatest(csConfig.Hub, it, forceAction)
it, err := cwhub.DownloadLatest(csConfig.Hub, it, forceAction, false)
if err != nil {
return "", fmt.Errorf("error while downloading %s : %v", it.Name, err)
}

View file

@ -250,7 +250,9 @@ func LoadConfig(cConfig *csconfig.Config) error {
}
if flags.SingleFileType != "" && flags.OneShotDSN != "" {
cConfig.API.Server.OnlineClient = nil
if cConfig.API != nil && cConfig.API.Server != nil {
cConfig.API.Server.OnlineClient = nil
}
/*if the api is disabled as well, just read file and exit, don't daemonize*/
if flags.DisableAPI {
cConfig.Common.Daemonize = false

1
go.mod
View file

@ -44,6 +44,7 @@ require (
github.com/huandu/xstrings v1.3.2 // indirect
github.com/imdario/mergo v0.3.12 // indirect
github.com/influxdata/go-syslog/v3 v3.0.0
github.com/jszwec/csvutil v1.5.1 // indirect
github.com/leodido/go-urn v1.2.1 // indirect
github.com/lib/pq v1.10.2
github.com/mattn/go-runewidth v0.0.10 // indirect

2
go.sum
View file

@ -436,6 +436,8 @@ github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/u
github.com/json-iterator/go v1.1.10 h1:Kz6Cvnvv2wGdaG/V8yMvfkmNiXq9Ya2KUv4rouJJr68=
github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=
github.com/jszwec/csvutil v1.5.1 h1:c3GFBhj6DFMUl4dMK3+B6rz2+LWWS/e9VJiVJ9t9kfQ=
github.com/jszwec/csvutil v1.5.1/go.mod h1:Rpu7Uu9giO9subDyMCIQfHVDuLrcaC36UA4YcJjGBkg=
github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU=
github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w=
github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM=

View file

@ -125,7 +125,7 @@ func (c *Controller) CreateAlert(gctx *gin.Context) {
c.HandleDBErrors(gctx, err)
return
}
stopFlush := false
for _, alert := range input {
alert.MachineID = machineID
if len(alert.Decisions) != 0 {
@ -143,6 +143,10 @@ func (c *Controller) CreateAlert(gctx *gin.Context) {
break
}
}
decision := alert.Decisions[0]
if decision.Origin != nil && *decision.Origin == "cscli-import" {
stopFlush = true
}
continue
}
@ -164,7 +168,13 @@ func (c *Controller) CreateAlert(gctx *gin.Context) {
}
}
if stopFlush {
c.DBClient.CanFlush = false
}
alerts, err := c.DBClient.CreateAlert(machineID, input)
c.DBClient.CanFlush = true
if err != nil {
c.HandleDBErrors(gctx, err)
return

View file

@ -189,7 +189,7 @@ func test_prepenv() *csconfig.Config {
func testInstallItem(cfg *csconfig.Hub, t *testing.T, item Item) {
//Install the parser
item, err := DownloadLatest(cfg, item, false)
item, err := DownloadLatest(cfg, item, false, false)
if err != nil {
t.Fatalf("error while downloading %s : %v", item.Name, err)
}
@ -246,7 +246,7 @@ func testUpdateItem(cfg *csconfig.Hub, t *testing.T, item Item) {
t.Fatalf("update: %s should NOT be up-to-date", item.Name)
}
//Update it + check status
item, err := DownloadLatest(cfg, item, true)
item, err := DownloadLatest(cfg, item, true, true)
if err != nil {
t.Fatalf("failed to update %s : %s", item.Name, err)
}

View file

@ -76,7 +76,7 @@ func DownloadHubIdx(hub *csconfig.Hub) ([]byte, error) {
}
//DownloadLatest will download the latest version of Item to the tdir directory
func DownloadLatest(hub *csconfig.Hub, target Item, overwrite bool) (Item, error) {
func DownloadLatest(hub *csconfig.Hub, target Item, overwrite bool, updateOnly bool) (Item, error) {
var err error
log.Debugf("Downloading %s %s", target.Type, target.Name)
@ -86,11 +86,15 @@ func DownloadLatest(hub *csconfig.Hub, target Item, overwrite bool) (Item, error
ptrtype := ItemTypes[idx]
for _, p := range ptr {
if val, ok := hubIdx[ptrtype][p]; ok {
log.Debugf("Download %s sub-item : %s %s", target.Name, ptrtype, p)
if !val.Installed && updateOnly {
log.Debugf("skipping upgrade of %s : not installed", target.Name)
continue
}
log.Debugf("Download %s sub-item : %s %s (%t -> %t)", target.Name, ptrtype, p, target.Installed, updateOnly)
//recurse as it's a collection
if ptrtype == COLLECTIONS {
log.Tracef("collection, recurse")
hubIdx[ptrtype][p], err = DownloadLatest(hub, val, overwrite)
hubIdx[ptrtype][p], err = DownloadLatest(hub, val, overwrite, updateOnly)
if err != nil {
return target, errors.Wrap(err, fmt.Sprintf("while downloading %s", val.Name))
}
@ -118,6 +122,10 @@ func DownloadLatest(hub *csconfig.Hub, target Item, overwrite bool) (Item, error
return target, fmt.Errorf("failed to download item : %s", err)
}
} else {
if !target.Installed && updateOnly {
log.Debugf("skipping upgrade of %s : not installed", target.Name)
return target, nil
}
return DownloadItem(hub, target, overwrite)
}
return target, nil

View file

@ -20,9 +20,10 @@ import (
)
const (
paginationSize = 100 // used to queryAlert to avoid 'too many SQL variable'
defaultLimit = 100 // default limit of element to returns when query alerts
bulkSize = 50 // bulk size when create alerts
paginationSize = 100 // used to queryAlert to avoid 'too many SQL variable'
defaultLimit = 100 // default limit of element to returns when query alerts
bulkSize = 50 // bulk size when create alerts
decisionBulkSize = 50
)
func formatAlertAsString(machineId string, alert *models.Alert) []string {
@ -117,7 +118,6 @@ func (c *Client) CreateAlert(machineID string, alertList []*models.Alert) ([]str
/*We can't bulk both the alert and the decision at the same time. With new consensus, we want to bulk a single alert with a lot of decisions.*/
func (c *Client) UpdateCommunityBlocklist(alertItem *models.Alert) (int, int, int, error) {
decisionBulkSize := 50
var err error
var deleted, inserted int
@ -278,6 +278,23 @@ func (c *Client) UpdateCommunityBlocklist(alertItem *models.Alert) (int, int, in
return alertRef.ID, inserted, deleted, nil
}
func chunkDecisions(decisions []*ent.Decision, chunkSize int) [][]*ent.Decision {
var ret [][]*ent.Decision
var chunk []*ent.Decision
for _, d := range decisions {
chunk = append(chunk, d)
if len(chunk) == chunkSize {
ret = append(ret, chunk)
chunk = nil
}
}
if len(chunk) > 0 {
ret = append(ret, chunk)
}
return ret
}
func (c *Client) CreateAlertBulk(machineId string, alertList []*models.Alert) ([]string, error) {
ret := []string{}
@ -285,6 +302,7 @@ func (c *Client) CreateAlertBulk(machineId string, alertList []*models.Alert) ([
c.Log.Debugf("writting %d items", len(alertList))
bulk := make([]*ent.AlertCreate, 0, bulkSize)
alertDecisions := make([][]*ent.Decision, 0, bulkSize)
for i, alertItem := range alertList {
var decisions []*ent.Decision
var metas []*ent.Meta
@ -394,8 +412,10 @@ func (c *Client) CreateAlertBulk(machineId string, alertList []*models.Alert) ([
c.Log.Errorf("While parsing StartAt of item %s : %s", *alertItem.StopAt, err)
ts = time.Now()
}
decisions = make([]*ent.Decision, 0)
if len(alertItem.Decisions) > 0 {
decisionBulk := make([]*ent.DecisionCreate, len(alertItem.Decisions))
decisionBulk := make([]*ent.DecisionCreate, 0, decisionBulkSize)
for i, decisionItem := range alertItem.Decisions {
var start_ip, start_sfx, end_ip, end_sfx int64
var sz int
@ -412,7 +432,8 @@ func (c *Client) CreateAlertBulk(machineId string, alertList []*models.Alert) ([
return []string{}, errors.Wrapf(ParseDurationFail, "invalid addr/range %s : %s", *decisionItem.Value, err)
}
}
decisionBulk[i] = c.Ent.Decision.Create().
decisionCreate := c.Ent.Decision.Create().
SetUntil(ts.Add(duration)).
SetScenario(*decisionItem.Scenario).
SetType(*decisionItem.Type).
@ -425,12 +446,27 @@ func (c *Client) CreateAlertBulk(machineId string, alertList []*models.Alert) ([
SetScope(*decisionItem.Scope).
SetOrigin(*decisionItem.Origin).
SetSimulated(*alertItem.Simulated)
decisionBulk = append(decisionBulk, decisionCreate)
if len(decisionBulk) == decisionBulkSize {
decisionsCreateRet, err := c.Ent.Decision.CreateBulk(decisionBulk...).Save(c.CTX)
if err != nil {
return []string{}, errors.Wrapf(BulkError, "creating alert decisions: %s", err)
}
decisions = append(decisions, decisionsCreateRet...)
if len(alertItem.Decisions)-i <= decisionBulkSize {
decisionBulk = make([]*ent.DecisionCreate, 0, (len(alertItem.Decisions) - i))
} else {
decisionBulk = make([]*ent.DecisionCreate, 0, decisionBulkSize)
}
}
}
decisions, err = c.Ent.Decision.CreateBulk(decisionBulk...).Save(c.CTX)
decisionsCreateRet, err := c.Ent.Decision.CreateBulk(decisionBulk...).Save(c.CTX)
if err != nil {
return []string{}, errors.Wrapf(BulkError, "creating alert decisions: %s", err)
}
decisions = append(decisions, decisionsCreateRet...)
}
alertB := c.Ent.Alert.
@ -454,7 +490,6 @@ func (c *Client) CreateAlertBulk(machineId string, alertList []*models.Alert) ([
SetSimulated(*alertItem.Simulated).
SetScenarioVersion(*alertItem.ScenarioVersion).
SetScenarioHash(*alertItem.ScenarioHash).
AddDecisions(decisions...).
AddEvents(events...).
AddMetas(metas...)
@ -462,20 +497,31 @@ func (c *Client) CreateAlertBulk(machineId string, alertList []*models.Alert) ([
alertB.SetOwner(owner)
}
bulk = append(bulk, alertB)
alertDecisions = append(alertDecisions, decisions)
if len(bulk) == bulkSize {
alerts, err := c.Ent.Alert.CreateBulk(bulk...).Save(c.CTX)
if err != nil {
return []string{}, errors.Wrapf(BulkError, "bulk creating alert : %s", err)
}
for _, alert := range alerts {
ret = append(ret, strconv.Itoa(alert.ID))
for _, a := range alerts {
ret = append(ret, strconv.Itoa(a.ID))
for _, d := range alertDecisions {
decisionsChunk := chunkDecisions(d, bulkSize)
for _, d2 := range decisionsChunk {
_, err := c.Ent.Alert.Update().Where(alert.IDEQ(a.ID)).AddDecisions(d2...).Save(c.CTX)
if err != nil {
return []string{}, fmt.Errorf("error while updating decisions: %s", err.Error())
}
}
}
}
if len(alertList)-i <= bulkSize {
bulk = make([]*ent.AlertCreate, 0, (len(alertList) - i))
alertDecisions = make([][]*ent.Decision, 0, (len(alertList) - i))
} else {
bulk = make([]*ent.AlertCreate, 0, bulkSize)
alertDecisions = make([][]*ent.Decision, 0, bulkSize)
}
}
}
@ -485,8 +531,17 @@ func (c *Client) CreateAlertBulk(machineId string, alertList []*models.Alert) ([
return []string{}, errors.Wrapf(BulkError, "leftovers creating alert : %s", err)
}
for _, alert := range alerts {
ret = append(ret, strconv.Itoa(alert.ID))
for _, a := range alerts {
ret = append(ret, strconv.Itoa(a.ID))
for _, d := range alertDecisions {
decisionsChunk := chunkDecisions(d, bulkSize)
for _, d2 := range decisionsChunk {
_, err := c.Ent.Alert.Update().Where(alert.IDEQ(a.ID)).AddDecisions(d2...).Save(c.CTX)
if err != nil {
return []string{}, fmt.Errorf("error while updating decisions: %s", err.Error())
}
}
}
}
return ret, nil
@ -812,7 +867,9 @@ func (c *Client) FlushOrphans() {
c.Log.Infof("%d deleted orphan events", events_count)
}
events_count, err = c.Ent.Decision.Delete().Where(decision.Not(decision.HasOwner())).Exec(c.CTX)
events_count, err = c.Ent.Decision.Delete().Where(
decision.Not(decision.HasOwner())).Where(decision.UntilLTE(time.Now())).Exec(c.CTX)
if err != nil {
c.Log.Warningf("error while deleting orphan decisions : %s", err)
return
@ -828,6 +885,11 @@ func (c *Client) FlushAlerts(MaxAge string, MaxItems int) error {
var totalAlerts int
var err error
if !c.CanFlush {
c.Log.Debug("a list is being imported, flushing later")
return nil
}
c.Log.Debug("Flushing orphan alerts")
c.FlushOrphans()
c.Log.Debug("Done flushing orphan alerts")

View file

@ -18,9 +18,10 @@ import (
)
type Client struct {
Ent *ent.Client
CTX context.Context
Log *log.Logger
Ent *ent.Client
CTX context.Context
Log *log.Logger
CanFlush bool
}
func NewClient(config *csconfig.DatabaseCfg) (*Client, error) {
@ -82,7 +83,7 @@ func NewClient(config *csconfig.DatabaseCfg) (*Client, error) {
if err = client.Schema.Create(context.Background()); err != nil {
return nil, fmt.Errorf("failed creating schema resources: %v", err)
}
return &Client{Ent: client, CTX: context.Background(), Log: clog}, nil
return &Client{Ent: client, CTX: context.Background(), Log: clog, CanFlush: true}, nil
}
func (c *Client) StartFlushScheduler(config *csconfig.FlushDBCfg) (*gocron.Scheduler, error) {

View file

@ -32,6 +32,10 @@ func Upper(s string) string {
return strings.ToUpper(s)
}
func Lower(s string) string {
return strings.ToLower(s)
}
func GetExprEnv(ctx map[string]interface{}) map[string]interface{} {
var ExprLib = map[string]interface{}{
"Atof": Atof,
@ -41,9 +45,14 @@ func GetExprEnv(ctx map[string]interface{}) map[string]interface{} {
"File": File,
"RegexpInFile": RegexpInFile,
"Upper": Upper,
"Lower": Lower,
"IpInRange": IpInRange,
"TimeNow": TimeNow,
"ParseUri": ParseUri,
"PathUnescape": PathUnescape,
"QueryUnescape": QueryUnescape,
"PathEscape": PathEscape,
"QueryEscape": QueryEscape,
}
for k, v := range ctx {
ExprLib[k] = v
@ -97,6 +106,32 @@ func FileInit(fileFolder string, filename string, fileType string) error {
return nil
}
func QueryEscape(s string) string {
return url.QueryEscape(s)
}
func PathEscape(s string) string {
return url.PathEscape(s)
}
func PathUnescape(s string) string {
ret, err := url.PathUnescape(s)
if err != nil {
log.Errorf("unable to PathUnescape '%s': %+v", s, err)
return s
}
return ret
}
func QueryUnescape(s string) string {
ret, err := url.QueryUnescape(s)
if err != nil {
log.Errorf("unable to QueryUnescape '%s': %+v", s, err)
return s
}
return ret
}
func File(filename string) []string {
if _, ok := dataFile[filename]; ok {
return dataFile[filename]
@ -144,7 +179,6 @@ func TimeNow() string {
return time.Now().Format(time.RFC3339)
}
func ParseUri(uri string) map[string][]string {
ret := make(map[string][]string)
u, err := url.Parse(uri)

View file

@ -455,3 +455,203 @@ func TestParseUri(t *testing.T) {
log.Printf("test '%s' : OK", test.name)
}
}
func TestQueryEscape(t *testing.T) {
tests := []struct {
name string
env map[string]interface{}
code string
result string
err string
}{
{
name: "QueryEscape() test: basic test",
env: map[string]interface{}{
"uri": "/foo?a=1&b=2",
"QueryEscape": QueryEscape,
},
code: "QueryEscape(uri)",
result: "%2Ffoo%3Fa%3D1%26b%3D2",
err: "",
},
{
name: "QueryEscape() test: basic test",
env: map[string]interface{}{
"uri": "/foo?a=1&&b=<>'\"",
"QueryEscape": QueryEscape,
},
code: "QueryEscape(uri)",
result: "%2Ffoo%3Fa%3D1%26%26b%3D%3C%3E%27%22",
err: "",
},
}
for _, test := range tests {
program, err := expr.Compile(test.code, expr.Env(test.env))
require.NoError(t, err)
output, err := expr.Run(program, test.env)
require.NoError(t, err)
require.Equal(t, test.result, output)
log.Printf("test '%s' : OK", test.name)
}
}
func TestPathEscape(t *testing.T) {
tests := []struct {
name string
env map[string]interface{}
code string
result string
err string
}{
{
name: "PathEscape() test: basic test",
env: map[string]interface{}{
"uri": "/foo?a=1&b=2",
"PathEscape": PathEscape,
},
code: "PathEscape(uri)",
result: "%2Ffoo%3Fa=1&b=2",
err: "",
},
{
name: "PathEscape() test: basic test with more special chars",
env: map[string]interface{}{
"uri": "/foo?a=1&&b=<>'\"",
"PathEscape": PathEscape,
},
code: "PathEscape(uri)",
result: "%2Ffoo%3Fa=1&&b=%3C%3E%27%22",
err: "",
},
}
for _, test := range tests {
program, err := expr.Compile(test.code, expr.Env(test.env))
require.NoError(t, err)
output, err := expr.Run(program, test.env)
require.NoError(t, err)
require.Equal(t, test.result, output)
log.Printf("test '%s' : OK", test.name)
}
}
func TestPathUnescape(t *testing.T) {
tests := []struct {
name string
env map[string]interface{}
code string
result string
err string
}{
{
name: "PathUnescape() test: basic test",
env: map[string]interface{}{
"uri": "%2Ffoo%3Fa=1&b=%3C%3E%27%22",
"PathUnescape": PathUnescape,
},
code: "PathUnescape(uri)",
result: "/foo?a=1&b=<>'\"",
err: "",
},
{
name: "PathUnescape() test: basic test with more special chars",
env: map[string]interface{}{
"uri": "/$%7Bjndi",
"PathUnescape": PathUnescape,
},
code: "PathUnescape(uri)",
result: "/${jndi",
err: "",
},
}
for _, test := range tests {
program, err := expr.Compile(test.code, expr.Env(test.env))
require.NoError(t, err)
output, err := expr.Run(program, test.env)
require.NoError(t, err)
require.Equal(t, test.result, output)
log.Printf("test '%s' : OK", test.name)
}
}
func TestQueryUnescape(t *testing.T) {
tests := []struct {
name string
env map[string]interface{}
code string
result string
err string
}{
{
name: "QueryUnescape() test: basic test",
env: map[string]interface{}{
"uri": "%2Ffoo%3Fa=1&b=%3C%3E%27%22",
"QueryUnescape": QueryUnescape,
},
code: "QueryUnescape(uri)",
result: "/foo?a=1&b=<>'\"",
err: "",
},
{
name: "QueryUnescape() test: basic test with more special chars",
env: map[string]interface{}{
"uri": "/$%7Bjndi",
"QueryUnescape": QueryUnescape,
},
code: "QueryUnescape(uri)",
result: "/${jndi",
err: "",
},
}
for _, test := range tests {
program, err := expr.Compile(test.code, expr.Env(test.env))
require.NoError(t, err)
output, err := expr.Run(program, test.env)
require.NoError(t, err)
require.Equal(t, test.result, output)
log.Printf("test '%s' : OK", test.name)
}
}
func TestLower(t *testing.T) {
tests := []struct {
name string
env map[string]interface{}
code string
result string
err string
}{
{
name: "Lower() test: basic test",
env: map[string]interface{}{
"name": "ABCDEFG",
"Lower": Lower,
},
code: "Lower(name)",
result: "abcdefg",
err: "",
},
{
name: "Lower() test: basic test with more special chars",
env: map[string]interface{}{
"name": "AbcDefG!#",
"Lower": Lower,
},
code: "Lower(name)",
result: "abcdefg!#",
err: "",
},
}
for _, test := range tests {
program, err := expr.Compile(test.code, expr.Env(test.env))
require.NoError(t, err)
output, err := expr.Run(program, test.env)
require.NoError(t, err)
require.Equal(t, test.result, output)
log.Printf("test '%s' : OK", test.name)
}
}

View file

@ -226,9 +226,9 @@ install_collection() {
HMENU=()
readarray -t AVAILABLE_COLLECTION < <(${CSCLI_BIN_INSTALLED} collections list -o raw -a)
COLLECTION_TO_INSTALL=()
for collect_info in "${AVAILABLE_COLLECTION[@]}"; do
collection="$(echo ${collect_info} | cut -d " " -f1)"
description="$(echo ${collect_info} | cut -d " " -f2-)"
for collect_info in "${AVAILABLE_COLLECTION[@]:1}"; do
collection="$(echo ${collect_info} | cut -d "," -f1)"
description="$(echo ${collect_info} | cut -d "," -f4)"
in_array $collection "${DETECTED_SERVICES[@]}"
if [[ $? == 0 ]]; then
HMENU+=("${collection}" "${description}" "ON")
@ -340,9 +340,8 @@ check_cs_version () {
if [[ $NEW_MAJOR_VERSION -gt $CURRENT_MAJOR_VERSION ]]; then
if [[ ${FORCE_MODE} == "false" ]]; then
log_warn "new version ($NEW_CS_VERSION) is a major, you need to follow documentation to upgrade !"
log_warn "new version ($NEW_CS_VERSION) is a major, you should follow documentation to upgrade !"
echo ""
echo "Please follow : https://docs.crowdsec.net/Crowdsec/v1/migration/"
exit 1
fi
elif [[ $NEW_MINOR_VERSION -gt $CURRENT_MINOR_VERSION ]] ; then
@ -542,16 +541,17 @@ function show_link {
echo ""
echo "Useful links to start with Crowdsec:"
echo ""
echo " - Documentation : https://docs.crowdsec.net/Crowdsec/v1/getting_started/crowdsec-tour/"
echo " - Documentation : https://doc.crowdsec.net/docs/getting_started/crowdsec_tour"
echo " - Crowdsec Hub : https://hub.crowdsec.net/ "
echo " - Open issues : https://github.com/crowdsecurity/crowdsec/issues"
echo ""
echo "Useful commands to start with Crowdsec:"
echo ""
echo " - sudo cscli metrics : https://docs.crowdsec.net/Crowdsec/v1/cscli/cscli_metrics/"
echo " - sudo cscli decisions list : https://docs.crowdsec.net/Crowdsec/v1/cscli/cscli_decisions_list/"
echo " - sudo cscli alerts list : https://docs.crowdsec.net/Crowdsec/v1/cscli/cscli_alerts_list/"
echo " - sudo cscli hub list : https://docs.crowdsec.net/Crowdsec/v1/cscli/cscli_hub_list/"
echo " - sudo cscli metrics : https://doc.crowdsec.net/docs/observability/cscli"
echo " - sudo cscli decisions list : https://doc.crowdsec.net/docs/user_guides/decisions_mgmt"
echo " - sudo cscli hub list : https://doc.crowdsec.net/docs/user_guides/hub_mgmt"
echo ""
echo "Next step: visualize all your alerts and explore our community CTI : https://app.crowdsec.net"
echo ""
}