support for stdin with "cscli decision import" and raw values (#2291)

and remove Origin from the struct, which was ignored anyway
This commit is contained in:
mmetc 2023-06-27 14:29:42 +02:00 committed by GitHub
parent 6e18c652cb
commit 85839b0199
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
8 changed files with 525 additions and 226 deletions

View file

@ -7,18 +7,15 @@ import (
"fmt"
"net/url"
"os"
"path/filepath"
"strconv"
"strings"
"time"
"github.com/fatih/color"
"github.com/go-openapi/strfmt"
"github.com/jszwec/csvutil"
log "github.com/sirupsen/logrus"
"github.com/spf13/cobra"
"github.com/crowdsecurity/go-cs-lib/pkg/ptr"
"github.com/crowdsecurity/go-cs-lib/pkg/version"
"github.com/crowdsecurity/crowdsec/pkg/apiclient"
@ -168,11 +165,11 @@ cscli decisions list -t ban
`,
Args: cobra.ExactArgs(0),
DisableAutoGenTag: true,
Run: func(cmd *cobra.Command, args []string) {
RunE: func(cmd *cobra.Command, args []string) error {
var err error
/*take care of shorthand options*/
if err := manageCliDecisionAlerts(filter.IPEquals, filter.RangeEquals, filter.ScopeEquals, filter.ValueEquals); err != nil {
log.Fatalf("%s", err)
if err = manageCliDecisionAlerts(filter.IPEquals, filter.RangeEquals, filter.ScopeEquals, filter.ValueEquals); err != nil {
return err
}
filter.ActiveDecisionEquals = new(bool)
*filter.ActiveDecisionEquals = true
@ -188,7 +185,7 @@ cscli decisions list -t ban
days, err := strconv.Atoi(realDuration)
if err != nil {
printHelp(cmd)
log.Fatalf("Can't parse duration %s, valid durations format: 1d, 4h, 4h15m", *filter.Until)
return fmt.Errorf("can't parse duration %s, valid durations format: 1d, 4h, 4h15m", *filter.Until)
}
*filter.Until = fmt.Sprintf("%d%s", days*24, "h")
}
@ -201,7 +198,7 @@ cscli decisions list -t ban
days, err := strconv.Atoi(realDuration)
if err != nil {
printHelp(cmd)
log.Fatalf("Can't parse duration %s, valid durations format: 1d, 4h, 4h15m", *filter.Until)
return fmt.Errorf("can't parse duration %s, valid durations format: 1d, 4h, 4h15m", *filter.Since)
}
*filter.Since = fmt.Sprintf("%d%s", days*24, "h")
}
@ -237,13 +234,15 @@ cscli decisions list -t ban
alerts, _, err := Client.Alerts.List(context.Background(), filter)
if err != nil {
log.Fatalf("Unable to list decisions : %v", err)
return fmt.Errorf("unable to retrieve decisions: %w", err)
}
err = DecisionsToTable(alerts, printMachine)
if err != nil {
log.Fatalf("unable to list decisions : %v", err)
return fmt.Errorf("unable to print decisions: %w", err)
}
return nil
},
}
cmdDecisionsList.Flags().SortFlags = false
@ -287,7 +286,7 @@ cscli decisions add --scope username --value foobar
/*TBD : fix long and example*/
Args: cobra.ExactArgs(0),
DisableAutoGenTag: true,
Run: func(cmd *cobra.Command, args []string) {
RunE: func(cmd *cobra.Command, args []string) error {
var err error
alerts := models.AddAlertsRequest{}
origin := types.CscliOrigin
@ -302,7 +301,7 @@ cscli decisions add --scope username --value foobar
/*take care of shorthand options*/
if err := manageCliDecisionAlerts(&addIP, &addRange, &addScope, &addValue); err != nil {
log.Fatalf("%s", err)
return err
}
if addIP != "" {
@ -313,7 +312,7 @@ cscli decisions add --scope username --value foobar
addScope = types.Range
} else if addValue == "" {
printHelp(cmd)
log.Fatalf("Missing arguments, a value is required (--ip, --range or --scope and --value)")
return fmt.Errorf("Missing arguments, a value is required (--ip, --range or --scope and --value)")
}
if addReason == "" {
@ -356,10 +355,11 @@ cscli decisions add --scope username --value foobar
_, _, err = Client.Alerts.Add(context.Background(), alerts)
if err != nil {
log.Fatal(err)
return err
}
log.Info("Decision successfully added")
return nil
},
}
@ -400,25 +400,27 @@ cscli decisions delete --id 42
cscli decisions delete --type captcha
`,
/*TBD : refaire le Long/Example*/
PreRun: func(cmd *cobra.Command, args []string) {
PreRunE: func(cmd *cobra.Command, args []string) error {
if delDecisionAll {
return
return nil
}
if *delFilter.ScopeEquals == "" && *delFilter.ValueEquals == "" &&
*delFilter.TypeEquals == "" && *delFilter.IPEquals == "" &&
*delFilter.RangeEquals == "" && *delFilter.ScenarioEquals == "" &&
*delFilter.OriginEquals == "" && delDecisionId == "" {
cmd.Usage()
log.Fatalln("At least one filter or --all must be specified")
return fmt.Errorf("at least one filter or --all must be specified")
}
return nil
},
Run: func(cmd *cobra.Command, args []string) {
RunE: func(cmd *cobra.Command, args []string) error {
var err error
var decisions *models.DeleteDecisionResponse
/*take care of shorthand options*/
if err := manageCliDecisionAlerts(delFilter.IPEquals, delFilter.RangeEquals, delFilter.ScopeEquals, delFilter.ValueEquals); err != nil {
log.Fatalf("%s", err)
if err = manageCliDecisionAlerts(delFilter.IPEquals, delFilter.RangeEquals, delFilter.ScopeEquals, delFilter.ValueEquals); err != nil {
return err
}
if *delFilter.ScopeEquals == "" {
delFilter.ScopeEquals = nil
@ -448,18 +450,19 @@ cscli decisions delete --type captcha
if delDecisionId == "" {
decisions, _, err = Client.Decisions.Delete(context.Background(), delFilter)
if err != nil {
log.Fatalf("Unable to delete decisions : %v", err)
return fmt.Errorf("Unable to delete decisions: %v", err)
}
} else {
if _, err = strconv.Atoi(delDecisionId); err != nil {
log.Fatalf("id '%s' is not an integer: %v", delDecisionId, err)
return fmt.Errorf("id '%s' is not an integer: %v", delDecisionId, err)
}
decisions, _, err = Client.Decisions.DeleteOne(context.Background(), delDecisionId)
if err != nil {
log.Fatalf("Unable to delete decision : %v", err)
return fmt.Errorf("Unable to delete decision: %v", err)
}
}
log.Infof("%s decision(s) deleted", decisions.NbDeleted)
return nil
},
}
@ -477,192 +480,3 @@ cscli decisions delete --type captcha
return cmdDecisionsDelete
}
func NewDecisionsImportCmd() *cobra.Command {
var (
defaultDuration = "4h"
defaultScope = "ip"
defaultType = "ban"
defaultReason = "manual"
importDuration string
importScope string
importReason string
importType string
importFile string
batchSize int
)
var cmdDecisionImport = &cobra.Command{
Use: "import [options]",
Short: "Import decisions from json or csv file",
Long: "expected format :\n" +
"csv : any of duration,origin,reason,scope,type,value, with a header line\n" +
`json : {"duration" : "24h", "origin" : "my-list", "reason" : "my_scenario", "scope" : "ip", "type" : "ban", "value" : "x.y.z.z"}`,
DisableAutoGenTag: true,
Example: `decisions.csv :
duration,scope,value
24h,ip,1.2.3.4
cscsli decisions import -i decisions.csv
decisions.json :
[{"duration" : "4h", "scope" : "ip", "type" : "ban", "value" : "1.2.3.4"}]
`,
Run: func(cmd *cobra.Command, args []string) {
if importFile == "" {
log.Fatalf("Please provide a input file containing decisions with -i flag")
}
csvData, err := os.ReadFile(importFile)
if err != nil {
log.Fatalf("unable to open '%s': %s", importFile, err)
}
type decisionRaw struct {
Duration string `csv:"duration,omitempty" json:"duration,omitempty"`
Origin string `csv:"origin,omitempty" json:"origin,omitempty"`
Scenario string `csv:"reason,omitempty" json:"reason,omitempty"`
Scope string `csv:"scope,omitempty" json:"scope,omitempty"`
Type string `csv:"type,omitempty" json:"type,omitempty"`
Value string `csv:"value" json:"value"`
}
var decisionsListRaw []decisionRaw
switch fileFormat := filepath.Ext(importFile); fileFormat {
case ".json":
if err := json.Unmarshal(csvData, &decisionsListRaw); err != nil {
log.Fatalf("unable to unmarshall json: '%s'", err)
}
case ".csv":
if err := csvutil.Unmarshal(csvData, &decisionsListRaw); err != nil {
log.Fatalf("unable to unmarshall csv: '%s'", err)
}
default:
log.Fatalf("file format not supported for '%s'. supported format are 'json' and 'csv'", importFile)
}
decisionsList := make([]*models.Decision, 0)
for i, decisionLine := range decisionsListRaw {
line := i + 2
if decisionLine.Value == "" {
log.Fatalf("please provide a 'value' in your csv line %d", line)
}
/*deal with defaults and cli-override*/
if decisionLine.Duration == "" {
decisionLine.Duration = defaultDuration
log.Debugf("No 'duration' line %d, using default value: '%s'", line, defaultDuration)
}
if importDuration != "" {
decisionLine.Duration = importDuration
log.Debugf("'duration' line %d, using supplied value: '%s'", line, importDuration)
}
decisionLine.Origin = types.CscliImportOrigin
if decisionLine.Scenario == "" {
decisionLine.Scenario = defaultReason
log.Debugf("No 'reason' line %d, using value: '%s'", line, decisionLine.Scenario)
}
if importReason != "" {
decisionLine.Scenario = importReason
log.Debugf("No 'reason' line %d, using supplied value: '%s'", line, importReason)
}
if decisionLine.Type == "" {
decisionLine.Type = defaultType
log.Debugf("No 'type' line %d, using default value: '%s'", line, decisionLine.Type)
}
if importType != "" {
decisionLine.Type = importType
log.Debugf("'type' line %d, using supplied value: '%s'", line, importType)
}
if decisionLine.Scope == "" {
decisionLine.Scope = defaultScope
log.Debugf("No 'scope' line %d, using default value: '%s'", line, decisionLine.Scope)
}
if importScope != "" {
decisionLine.Scope = importScope
log.Debugf("'scope' line %d, using supplied value: '%s'", line, importScope)
}
decision := models.Decision{
Value: ptr.Of(decisionLine.Value),
Duration: ptr.Of(decisionLine.Duration),
Origin: ptr.Of(decisionLine.Origin),
Scenario: ptr.Of(decisionLine.Scenario),
Type: ptr.Of(decisionLine.Type),
Scope: ptr.Of(decisionLine.Scope),
Simulated: new(bool),
}
decisionsList = append(decisionsList, &decision)
}
alerts := models.AddAlertsRequest{}
if batchSize > 0 {
for i := 0; i < len(decisionsList); i += batchSize {
end := i + batchSize
if end > len(decisionsList) {
end = len(decisionsList)
}
decisionBatch := decisionsList[i:end]
importAlert := models.Alert{
CreatedAt: time.Now().UTC().Format(time.RFC3339),
Scenario: ptr.Of(fmt.Sprintf("import %s : %d IPs", importFile, len(decisionBatch))),
Message: ptr.Of(""),
Events: []*models.Event{},
Source: &models.Source{
Scope: ptr.Of(""),
Value: ptr.Of(""),
},
StartAt: ptr.Of(time.Now().UTC().Format(time.RFC3339)),
StopAt: ptr.Of(time.Now().UTC().Format(time.RFC3339)),
Capacity: ptr.Of(int32(0)),
Simulated: ptr.Of(false),
EventsCount: ptr.Of(int32(len(decisionBatch))),
Leakspeed: ptr.Of(""),
ScenarioHash: ptr.Of(""),
ScenarioVersion: ptr.Of(""),
Decisions: decisionBatch,
}
alerts = append(alerts, &importAlert)
}
} else {
importAlert := models.Alert{
CreatedAt: time.Now().UTC().Format(time.RFC3339),
Scenario: ptr.Of(fmt.Sprintf("import %s : %d IPs", importFile, len(decisionsList))),
Message: ptr.Of(""),
Events: []*models.Event{},
Source: &models.Source{
Scope: ptr.Of(""),
Value: ptr.Of(""),
},
StartAt: ptr.Of(time.Now().UTC().Format(time.RFC3339)),
StopAt: ptr.Of(time.Now().UTC().Format(time.RFC3339)),
Capacity: ptr.Of(int32(0)),
Simulated: ptr.Of(false),
EventsCount: ptr.Of(int32(len(decisionsList))),
Leakspeed: ptr.Of(""),
ScenarioHash: ptr.Of(""),
ScenarioVersion: ptr.Of(""),
Decisions: decisionsList,
}
alerts = append(alerts, &importAlert)
}
if len(decisionsList) > 1000 {
log.Infof("You are about to add %d decisions, this may take a while", len(decisionsList))
}
_, _, err = Client.Alerts.Add(context.Background(), alerts)
if err != nil {
log.Fatal(err)
}
log.Infof("%d decisions successfully imported", len(decisionsList))
},
}
cmdDecisionImport.Flags().SortFlags = false
cmdDecisionImport.Flags().StringVarP(&importFile, "input", "i", "", "Input file")
cmdDecisionImport.Flags().StringVarP(&importDuration, "duration", "d", "", "Decision duration (ie. 1h,4h,30m)")
cmdDecisionImport.Flags().StringVar(&importScope, "scope", types.Ip, "Decision scope (ie. ip,range,username)")
cmdDecisionImport.Flags().StringVarP(&importReason, "reason", "R", "", "Decision reason (ie. scenario-name)")
cmdDecisionImport.Flags().StringVarP(&importType, "type", "t", "", "Decision type (ie. ban,captcha,throttle)")
cmdDecisionImport.Flags().IntVar(&batchSize, "batch", 0, "Split import in batches of N decisions")
return cmdDecisionImport
}

View file

@ -0,0 +1,272 @@
package main
import (
"bufio"
"bytes"
"context"
"encoding/json"
"fmt"
"io"
"os"
"strings"
"time"
"github.com/jszwec/csvutil"
log "github.com/sirupsen/logrus"
"github.com/spf13/cobra"
"github.com/crowdsecurity/go-cs-lib/pkg/ptr"
"github.com/crowdsecurity/go-cs-lib/pkg/slicetools"
"github.com/crowdsecurity/crowdsec/pkg/models"
"github.com/crowdsecurity/crowdsec/pkg/types"
)
// decisionRaw is only used to unmarshall json/csv decisions
type decisionRaw struct {
Duration string `csv:"duration,omitempty" json:"duration,omitempty"`
Scenario string `csv:"reason,omitempty" json:"reason,omitempty"`
Scope string `csv:"scope,omitempty" json:"scope,omitempty"`
Type string `csv:"type,omitempty" json:"type,omitempty"`
Value string `csv:"value" json:"value"`
}
func parseDecisionList(content []byte, format string) ([]decisionRaw, error) {
ret := []decisionRaw{}
switch format {
case "values":
log.Infof("Parsing values")
scanner := bufio.NewScanner(bytes.NewReader(content))
for scanner.Scan() {
value := strings.TrimSpace(scanner.Text())
ret = append(ret, decisionRaw{Value: value})
}
if err := scanner.Err(); err != nil {
return nil, fmt.Errorf("unable to parse values: '%s'", err)
}
case "json":
log.Infof("Parsing json")
if err := json.Unmarshal(content, &ret); err != nil {
return nil, err
}
case "csv":
log.Infof("Parsing csv")
if err := csvutil.Unmarshal(content, &ret); err != nil {
return nil, fmt.Errorf("unable to parse csv: '%s'", err)
}
default:
return nil, fmt.Errorf("invalid format '%s', expected one of 'json', 'csv', 'values'", format)
}
return ret, nil
}
func runDecisionsImport(cmd *cobra.Command, args []string) error {
flags := cmd.Flags()
input, err := flags.GetString("input")
if err != nil {
return err
}
defaultDuration, err := flags.GetString("duration")
if err != nil {
return err
}
if defaultDuration == "" {
return fmt.Errorf("--duration cannot be empty")
}
defaultScope, err := flags.GetString("scope")
if err != nil {
return err
}
if defaultScope == "" {
return fmt.Errorf("--scope cannot be empty")
}
defaultReason, err := flags.GetString("reason")
if err != nil {
return err
}
if defaultReason == "" {
return fmt.Errorf("--reason cannot be empty")
}
defaultType, err := flags.GetString("type")
if err != nil {
return err
}
if defaultType == "" {
return fmt.Errorf("--type cannot be empty")
}
batchSize, err := flags.GetInt("batch")
if err != nil {
return err
}
format, err := flags.GetString("format")
if err != nil {
return err
}
var (
content []byte
fin *os.File
)
// set format if the file has a json or csv extension
if format == "" {
if strings.HasSuffix(input, ".json") {
format = "json"
} else if strings.HasSuffix(input, ".csv") {
format = "csv"
}
}
if format == "" {
return fmt.Errorf("unable to guess format from file extension, please provide a format with --format flag")
}
if input == "-" {
fin = os.Stdin
input = "stdin"
} else {
fin, err = os.Open(input)
if err != nil {
return fmt.Errorf("unable to open %s: %s", input, err)
}
}
content, err = io.ReadAll(fin)
if err != nil {
return fmt.Errorf("unable to read from %s: %s", input, err)
}
decisionsListRaw, err := parseDecisionList(content, format)
if err != nil {
return err
}
decisions := make([]*models.Decision, len(decisionsListRaw))
for i, d := range decisionsListRaw {
if d.Value == "" {
return fmt.Errorf("item %d: missing 'value'", i)
}
if d.Duration == "" {
d.Duration = defaultDuration
log.Debugf("item %d: missing 'duration', using default '%s'", i, defaultDuration)
}
if d.Scenario == "" {
d.Scenario = defaultReason
log.Debugf("item %d: missing 'reason', using default '%s'", i, defaultReason)
}
if d.Type == "" {
d.Type = defaultType
log.Debugf("item %d: missing 'type', using default '%s'", i, defaultType)
}
if d.Scope == "" {
d.Scope = defaultScope
log.Debugf("item %d: missing 'scope', using default '%s'", i, defaultScope)
}
decisions[i] = &models.Decision{
Value: ptr.Of(d.Value),
Duration: ptr.Of(d.Duration),
Origin: ptr.Of(types.CscliImportOrigin),
Scenario: ptr.Of(d.Scenario),
Type: ptr.Of(d.Type),
Scope: ptr.Of(d.Scope),
Simulated: ptr.Of(false),
}
}
alerts := models.AddAlertsRequest{}
for _, chunk := range slicetools.Chunks(decisions, batchSize) {
log.Debugf("Processing chunk of %d decisions", len(chunk))
importAlert := models.Alert{
CreatedAt: time.Now().UTC().Format(time.RFC3339),
Scenario: ptr.Of(fmt.Sprintf("import %s: %d IPs", input, len(chunk))),
Message: ptr.Of(""),
Events: []*models.Event{},
Source: &models.Source{
Scope: ptr.Of(""),
Value: ptr.Of(""),
},
StartAt: ptr.Of(time.Now().UTC().Format(time.RFC3339)),
StopAt: ptr.Of(time.Now().UTC().Format(time.RFC3339)),
Capacity: ptr.Of(int32(0)),
Simulated: ptr.Of(false),
EventsCount: ptr.Of(int32(len(chunk))),
Leakspeed: ptr.Of(""),
ScenarioHash: ptr.Of(""),
ScenarioVersion: ptr.Of(""),
Decisions: chunk,
}
alerts = append(alerts, &importAlert)
}
if len(decisions) > 1000 {
log.Infof("You are about to add %d decisions, this may take a while", len(decisions))
}
_, _, err = Client.Alerts.Add(context.Background(), alerts)
if err != nil {
return err
}
log.Infof("Imported %d decisions", len(decisions))
return nil
}
func NewDecisionsImportCmd() *cobra.Command {
var cmdDecisionsImport = &cobra.Command{
Use: "import [options]",
Short: "Import decisions from a file or pipe",
Long: "expected format:\n" +
"csv : any of duration,reason,scope,type,value, with a header line\n" +
`json : {"duration" : "24h", "reason" : "my_scenario", "scope" : "ip", "type" : "ban", "value" : "x.y.z.z"}`,
DisableAutoGenTag: true,
Example: `decisions.csv:
duration,scope,value
24h,ip,1.2.3.4
$ cscli decisions import -i decisions.csv
decisions.json:
[{"duration" : "4h", "scope" : "ip", "type" : "ban", "value" : "1.2.3.4"}]
The file format is detected from the extension, but can be forced with the --format option
which is required when reading from standard input.
Raw values, standard input:
$ echo "1.2.3.4" | cscli decisions import -i - --format values
`,
RunE: runDecisionsImport,
}
flags := cmdDecisionsImport.Flags()
flags.SortFlags = false
flags.StringP("input", "i", "", "Input file")
flags.StringP("duration", "d", "4h", "Decision duration: 1h,4h,30m")
flags.String("scope", types.Ip, "Decision scope: ip,range,username")
flags.StringP("reason", "R", "manual", "Decision reason: <scenario-name>")
flags.StringP("type", "t", "ban", "Decision type: ban,captcha,throttle")
flags.Int("batch", 0, "Split import in batches of N decisions")
flags.String("format", "", "Input format: 'json', 'csv' or 'values' (each line is a value, no headers)")
cmdDecisionsImport.MarkFlagRequired("input")
return cmdDecisionsImport
}

View file

@ -565,7 +565,7 @@ func (c *Client) CreateAlertBulk(machineId string, alertList []*models.Alert) ([
}
marshallMetas, err := json.Marshal(eventItem.Meta)
if err != nil {
return []string{}, errors.Wrapf(MarshalFail, "event meta '%v' : %s", eventItem.Meta, err)
return nil, errors.Wrapf(MarshalFail, "event meta '%v' : %s", eventItem.Meta, err)
}
//the serialized field is too big, let's try to progressively strip it
@ -583,7 +583,7 @@ func (c *Client) CreateAlertBulk(machineId string, alertList []*models.Alert) ([
marshallMetas, err = json.Marshal(eventItem.Meta)
if err != nil {
return []string{}, errors.Wrapf(MarshalFail, "event meta '%v' : %s", eventItem.Meta, err)
return nil, errors.Wrapf(MarshalFail, "event meta '%v' : %s", eventItem.Meta, err)
}
if event.SerializedValidator(string(marshallMetas)) == nil {
valid = true
@ -612,7 +612,7 @@ func (c *Client) CreateAlertBulk(machineId string, alertList []*models.Alert) ([
}
events, err = c.Ent.Event.CreateBulk(eventBulk...).Save(c.CTX)
if err != nil {
return []string{}, errors.Wrapf(BulkError, "creating alert events: %s", err)
return nil, errors.Wrapf(BulkError, "creating alert events: %s", err)
}
}
@ -625,7 +625,7 @@ func (c *Client) CreateAlertBulk(machineId string, alertList []*models.Alert) ([
}
metas, err = c.Ent.Meta.CreateBulk(metaBulk...).Save(c.CTX)
if err != nil {
return []string{}, errors.Wrapf(BulkError, "creating alert meta: %s", err)
return nil, errors.Wrapf(BulkError, "creating alert meta: %s", err)
}
}
@ -638,14 +638,14 @@ func (c *Client) CreateAlertBulk(machineId string, alertList []*models.Alert) ([
duration, err := time.ParseDuration(*decisionItem.Duration)
if err != nil {
return []string{}, errors.Wrapf(ParseDurationFail, "decision duration '%+v' : %s", *decisionItem.Duration, err)
return nil, errors.Wrapf(ParseDurationFail, "decision duration '%+v' : %s", *decisionItem.Duration, err)
}
/*if the scope is IP or Range, convert the value to integers */
if strings.ToLower(*decisionItem.Scope) == "ip" || strings.ToLower(*decisionItem.Scope) == "range" {
sz, start_ip, start_sfx, end_ip, end_sfx, err = types.Addr2Ints(*decisionItem.Value)
if err != nil {
return []string{}, errors.Wrapf(InvalidIPOrRange, "invalid addr/range %s : %s", *decisionItem.Value, err)
return nil, fmt.Errorf("%s: %w", *decisionItem.Value, InvalidIPOrRange)
}
}
@ -668,7 +668,7 @@ func (c *Client) CreateAlertBulk(machineId string, alertList []*models.Alert) ([
if len(decisionBulk) == decisionBulkSize {
decisionsCreateRet, err := c.Ent.Decision.CreateBulk(decisionBulk...).Save(c.CTX)
if err != nil {
return []string{}, errors.Wrapf(BulkError, "creating alert decisions: %s", err)
return nil, errors.Wrapf(BulkError, "creating alert decisions: %s", err)
}
decisions = append(decisions, decisionsCreateRet...)
@ -681,7 +681,7 @@ func (c *Client) CreateAlertBulk(machineId string, alertList []*models.Alert) ([
}
decisionsCreateRet, err := c.Ent.Decision.CreateBulk(decisionBulk...).Save(c.CTX)
if err != nil {
return []string{}, errors.Wrapf(BulkError, "creating alert decisions: %s", err)
return nil, errors.Wrapf(BulkError, "creating alert decisions: %s", err)
}
decisions = append(decisions, decisionsCreateRet...)
}
@ -720,7 +720,7 @@ func (c *Client) CreateAlertBulk(machineId string, alertList []*models.Alert) ([
if len(bulk) == bulkSize {
alerts, err := c.Ent.Alert.CreateBulk(bulk...).Save(c.CTX)
if err != nil {
return []string{}, errors.Wrapf(BulkError, "bulk creating alert : %s", err)
return nil, errors.Wrapf(BulkError, "bulk creating alert : %s", err)
}
for alertIndex, a := range alerts {
ret = append(ret, strconv.Itoa(a.ID))
@ -729,7 +729,7 @@ func (c *Client) CreateAlertBulk(machineId string, alertList []*models.Alert) ([
for _, d2 := range decisionsChunk {
_, err := c.Ent.Alert.Update().Where(alert.IDEQ(a.ID)).AddDecisions(d2...).Save(c.CTX)
if err != nil {
return []string{}, fmt.Errorf("error while updating decisions: %s", err)
return nil, fmt.Errorf("error while updating decisions: %s", err)
}
}
}
@ -745,7 +745,7 @@ func (c *Client) CreateAlertBulk(machineId string, alertList []*models.Alert) ([
alerts, err := c.Ent.Alert.CreateBulk(bulk...).Save(c.CTX)
if err != nil {
return []string{}, errors.Wrapf(BulkError, "leftovers creating alert : %s", err)
return nil, errors.Wrapf(BulkError, "leftovers creating alert : %s", err)
}
for alertIndex, a := range alerts {
@ -755,7 +755,7 @@ func (c *Client) CreateAlertBulk(machineId string, alertList []*models.Alert) ([
for _, d2 := range decisionsChunk {
_, err := c.Ent.Alert.Update().Where(alert.IDEQ(a.ID)).AddDecisions(d2...).Save(c.CTX)
if err != nil {
return []string{}, fmt.Errorf("error while updating decisions: %s", err)
return nil, fmt.Errorf("error while updating decisions: %s", err)
}
}
}

View file

@ -5,6 +5,9 @@ set -u
setup_file() {
load "../lib/setup_file.sh"
TESTDATA="${BATS_TEST_DIRNAME}/testdata/90_decisions"
export TESTDATA
}
teardown_file() {
@ -56,8 +59,122 @@ teardown() {
@test "cscli decisions list, incorrect parameters" {
rune -1 cscli decisions list --until toto
assert_stderr --partial 'Unable to list decisions : performing request: API error: while parsing duration: time: invalid duration \"toto\"'
assert_stderr --partial 'unable to retrieve decisions: performing request: API error: while parsing duration: time: invalid duration \"toto\"'
rune -1 cscli decisions list --until toto -o json
rune -0 jq -c '[.level, .msg]' <(stderr | grep "^{")
assert_output '["fatal","Unable to list decisions : performing request: API error: while parsing duration: time: invalid duration \"toto\""]'
assert_output '["fatal","unable to retrieve decisions: performing request: API error: while parsing duration: time: invalid duration \"toto\""]'
}
@test "cscli decisions import" {
# required input
rune -1 cscli decisions import
assert_stderr --partial 'required flag(s) \"input\" not set"'
# unsupported format
rune -1 cscli decisions import -i - <<<'value\n5.6.7.8' --format xml
assert_stderr --partial "invalid format 'xml', expected one of 'json', 'csv', 'values'"
# invalid defaults
rune -1 cscli decisions import --duration "" -i - <<<'value\n5.6.7.8' --format csv
assert_stderr --partial "--duration cannot be empty"
rune -1 cscli decisions import --scope "" -i - <<<'value\n5.6.7.8' --format csv
assert_stderr --partial "--scope cannot be empty"
rune -1 cscli decisions import --reason "" -i - <<<'value\n5.6.7.8' --format csv
assert_stderr --partial "--reason cannot be empty"
rune -1 cscli decisions import --type "" -i - <<<'value\n5.6.7.8' --format csv
assert_stderr --partial "--type cannot be empty"
#----------
# JSON
#----------
# import from file
rune -1 cscli decisions import -i "${TESTDATA}/json_decisions"
assert_stderr --partial "unable to guess format from file extension, please provide a format with --format flag"
rune -0 cscli decisions import -i "${TESTDATA}/decisions.json"
assert_stderr --partial "Parsing json"
assert_stderr --partial "Imported 5 decisions"
# import from stdin
rune -1 cscli decisions import -i /dev/stdin < <(cat "${TESTDATA}/decisions.json")
assert_stderr --partial "unable to guess format from file extension, please provide a format with --format flag"
rune -0 cscli decisions import -i /dev/stdin < <(cat "${TESTDATA}/decisions.json") --format json
assert_stderr --partial "Parsing json"
assert_stderr --partial "Imported 5 decisions"
# invalid json
rune -1 cscli decisions import -i - <<<'{"blah":"blah"}' --format json
assert_stderr --partial 'Parsing json'
assert_stderr --partial 'json: cannot unmarshal object into Go value of type []main.decisionRaw'
# json with extra data
rune -1 cscli decisions import -i - <<<'{"values":"1.2.3.4","blah":"blah"}' --format json
assert_stderr --partial 'Parsing json'
assert_stderr --partial 'json: cannot unmarshal object into Go value of type []main.decisionRaw'
#----------
# CSV
#----------
# import from file
rune -1 cscli decisions import -i "${TESTDATA}/csv_decisions"
assert_stderr --partial "unable to guess format from file extension, please provide a format with --format flag"
rune -0 cscli decisions import -i "${TESTDATA}/decisions.csv"
assert_stderr --partial 'Parsing csv'
assert_stderr --partial 'Imported 5 decisions'
# import from stdin
rune -1 cscli decisions import -i /dev/stdin < <(cat "${TESTDATA}/decisions.csv")
assert_stderr --partial "unable to guess format from file extension, please provide a format with --format flag"
rune -0 cscli decisions import -i /dev/stdin < <(cat "${TESTDATA}/decisions.csv") --format csv
assert_stderr --partial "Parsing csv"
assert_stderr --partial "Imported 5 decisions"
# invalid csv
# XXX: improve validation
rune -0 cscli decisions import -i - <<<'value\n1.2.3.4,5.6.7.8' --format csv
assert_stderr --partial 'Parsing csv'
assert_stderr --partial "Imported 0 decisions"
#----------
# VALUES
#----------
# can use '-' as stdin
rune -0 cscli decisions import -i - --format values <<-EOT
1.2.3.4
1.2.3.5
1.2.3.6
EOT
assert_stderr --partial 'Parsing values'
assert_stderr --partial 'Imported 3 decisions'
rune -0 cscli decisions import -i - --format values <<-EOT
10.2.3.4
10.2.3.5
10.2.3.6
EOT
assert_stderr --partial 'Parsing values'
assert_stderr --partial 'Imported 3 decisions'
rune -1 cscli decisions import -i - --format values <<-EOT
whatever
EOT
assert_stderr --partial 'Parsing values'
assert_stderr --partial 'API error: unable to create alerts: whatever: invalid ip address / range'
#----------
# Batch
#----------
rune -0 cscli decisions import -i - --format values --batch 2 --debug <<-EOT
1.2.3.4
1.2.3.5
1.2.3.6
EOT
assert_stderr --partial 'Processing chunk of 2 decisions'
assert_stderr --partial 'Processing chunk of 1 decisions'
assert_stderr --partial 'Imported 3 decisions'
}

View file

@ -0,0 +1,6 @@
origin,scope,value,reason,type,duration
cscli,ip,1.6.11.16,manual import from csv,ban,1h
cscli,ip,2.7.12.17,manual import from csv,ban,1h
cscli,ip,3.8.13.18,manual import from csv,ban,1h
cscli,ip,4.9.14.19,manual import from csv,ban,1h
cscli,ip,5.10.15.20,manual import from csv,ban,1h

View file

@ -0,0 +1,6 @@
origin,scope,value,reason,type,duration
cscli,ip,1.6.11.16,manual import from csv,ban,1h
cscli,ip,2.7.12.17,manual import from csv,ban,1h
cscli,ip,3.8.13.18,manual import from csv,ban,1h
cscli,ip,4.9.14.19,manual import from csv,ban,1h
cscli,ip,5.10.15.20,manual import from csv,ban,1h
1 origin scope value reason type duration
2 cscli ip 1.6.11.16 manual import from csv ban 1h
3 cscli ip 2.7.12.17 manual import from csv ban 1h
4 cscli ip 3.8.13.18 manual import from csv ban 1h
5 cscli ip 4.9.14.19 manual import from csv ban 1h
6 cscli ip 5.10.15.20 manual import from csv ban 1h

View file

@ -0,0 +1,42 @@
[
{
"origin": "cscli",
"scope": "ip",
"value": "1.6.11.16",
"reason": "manual import from csv",
"type": "ban",
"duration": "1h"
},
{
"origin": "cscli",
"scope": "ip",
"value": "2.7.12.17",
"reason": "manual import from csv",
"type": "ban",
"duration": "1h"
},
{
"origin": "cscli",
"scope": "ip",
"value": "3.8.13.18",
"reason": "manual import from csv",
"type": "ban",
"duration": "1h"
},
{
"origin": "cscli",
"scope": "ip",
"value": "4.9.14.19",
"reason": "manual import from csv",
"type": "ban",
"duration": "1h"
},
{
"origin": "cscli",
"scope": "ip",
"value": "5.10.15.20",
"reason": "manual import from csv",
"type": "ban",
"duration": "1h"
}
]

View file

@ -0,0 +1,42 @@
[
{
"origin": "cscli",
"scope": "ip",
"value": "1.6.11.16",
"reason": "manual import from csv",
"type": "ban",
"duration": "1h"
},
{
"origin": "cscli",
"scope": "ip",
"value": "2.7.12.17",
"reason": "manual import from csv",
"type": "ban",
"duration": "1h"
},
{
"origin": "cscli",
"scope": "ip",
"value": "3.8.13.18",
"reason": "manual import from csv",
"type": "ban",
"duration": "1h"
},
{
"origin": "cscli",
"scope": "ip",
"value": "4.9.14.19",
"reason": "manual import from csv",
"type": "ban",
"duration": "1h"
},
{
"origin": "cscli",
"scope": "ip",
"value": "5.10.15.20",
"reason": "manual import from csv",
"type": "ban",
"duration": "1h"
}
]