Merge branch 'master' into enable_context_in_console

This commit is contained in:
Thibault "bui" Koechlin 2024-01-03 13:05:29 +01:00 committed by GitHub
commit 62e938add0
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
22 changed files with 221 additions and 50 deletions

View file

@ -7,8 +7,7 @@ import (
) )
func NewCompletionCmd() *cobra.Command { func NewCompletionCmd() *cobra.Command {
completionCmd := &cobra.Command{
var completionCmd = &cobra.Command{
Use: "completion [bash|zsh|powershell|fish]", Use: "completion [bash|zsh|powershell|fish]",
Short: "Generate completion script", Short: "Generate completion script",
Long: `To load completions: Long: `To load completions:
@ -82,5 +81,6 @@ func NewCompletionCmd() *cobra.Command {
} }
}, },
} }
return completionCmd return completionCmd
} }

View file

@ -14,9 +14,6 @@ import (
) )
func backupHub(dirPath string) error { func backupHub(dirPath string) error {
var itemDirectory string
var upstreamParsers []string
hub, err := require.Hub(csConfig, nil, nil) hub, err := require.Hub(csConfig, nil, nil)
if err != nil { if err != nil {
return err return err
@ -26,16 +23,20 @@ func backupHub(dirPath string) error {
clog := log.WithFields(log.Fields{ clog := log.WithFields(log.Fields{
"type": itemType, "type": itemType,
}) })
itemMap := hub.GetItemMap(itemType) itemMap := hub.GetItemMap(itemType)
if itemMap == nil { if itemMap == nil {
clog.Infof("No %s to backup.", itemType) clog.Infof("No %s to backup.", itemType)
continue continue
} }
itemDirectory = fmt.Sprintf("%s/%s/", dirPath, itemType)
itemDirectory := fmt.Sprintf("%s/%s/", dirPath, itemType)
if err = os.MkdirAll(itemDirectory, os.ModePerm); err != nil { if err = os.MkdirAll(itemDirectory, os.ModePerm); err != nil {
return fmt.Errorf("error while creating %s : %s", itemDirectory, err) return fmt.Errorf("error while creating %s : %s", itemDirectory, err)
} }
upstreamParsers = []string{}
upstreamParsers := []string{}
for k, v := range itemMap { for k, v := range itemMap {
clog = clog.WithFields(log.Fields{ clog = clog.WithFields(log.Fields{
"file": v.Name, "file": v.Name,
@ -54,28 +55,36 @@ func backupHub(dirPath string) error {
return fmt.Errorf("error while creating stage dir %s : %s", fstagedir, err) return fmt.Errorf("error while creating stage dir %s : %s", fstagedir, err)
} }
} }
clog.Debugf("[%s]: backing up file (tainted:%t local:%t up-to-date:%t)", k, v.State.Tainted, v.State.IsLocal(), v.State.UpToDate) clog.Debugf("[%s]: backing up file (tainted:%t local:%t up-to-date:%t)", k, v.State.Tainted, v.State.IsLocal(), v.State.UpToDate)
tfile := fmt.Sprintf("%s%s/%s", itemDirectory, v.Stage, v.FileName) tfile := fmt.Sprintf("%s%s/%s", itemDirectory, v.Stage, v.FileName)
if err = CopyFile(v.State.LocalPath, tfile); err != nil { if err = CopyFile(v.State.LocalPath, tfile); err != nil {
return fmt.Errorf("failed copy %s %s to %s : %s", itemType, v.State.LocalPath, tfile, err) return fmt.Errorf("failed copy %s %s to %s : %s", itemType, v.State.LocalPath, tfile, err)
} }
clog.Infof("local/tainted saved %s to %s", v.State.LocalPath, tfile) clog.Infof("local/tainted saved %s to %s", v.State.LocalPath, tfile)
continue continue
} }
clog.Debugf("[%s] : from hub, just backup name (up-to-date:%t)", k, v.State.UpToDate) clog.Debugf("[%s] : from hub, just backup name (up-to-date:%t)", k, v.State.UpToDate)
clog.Infof("saving, version:%s, up-to-date:%t", v.Version, v.State.UpToDate) clog.Infof("saving, version:%s, up-to-date:%t", v.Version, v.State.UpToDate)
upstreamParsers = append(upstreamParsers, v.Name) upstreamParsers = append(upstreamParsers, v.Name)
} }
//write the upstream items //write the upstream items
upstreamParsersFname := fmt.Sprintf("%s/upstream-%s.json", itemDirectory, itemType) upstreamParsersFname := fmt.Sprintf("%s/upstream-%s.json", itemDirectory, itemType)
upstreamParsersContent, err := json.MarshalIndent(upstreamParsers, "", " ") upstreamParsersContent, err := json.MarshalIndent(upstreamParsers, "", " ")
if err != nil { if err != nil {
return fmt.Errorf("failed marshaling upstream parsers : %s", err) return fmt.Errorf("failed marshaling upstream parsers : %s", err)
} }
err = os.WriteFile(upstreamParsersFname, upstreamParsersContent, 0o644) err = os.WriteFile(upstreamParsersFname, upstreamParsersContent, 0o644)
if err != nil { if err != nil {
return fmt.Errorf("unable to write to %s %s : %s", itemType, upstreamParsersFname, err) return fmt.Errorf("unable to write to %s %s : %s", itemType, upstreamParsersFname, err)
} }
clog.Infof("Wrote %d entries for %s to %s", len(upstreamParsers), itemType, upstreamParsersFname) clog.Infof("Wrote %d entries for %s to %s", len(upstreamParsers), itemType, upstreamParsersFname)
} }

View file

@ -44,6 +44,7 @@ func runConfigFeatureFlags(cmd *cobra.Command, args []string) error {
if feat.State == fflag.RetiredState { if feat.State == fflag.RetiredState {
fmt.Printf("\n %s %s", magenta("RETIRED"), feat.DeprecationMsg) fmt.Printf("\n %s %s", magenta("RETIRED"), feat.DeprecationMsg)
} }
fmt.Println() fmt.Println()
} }
@ -58,10 +59,12 @@ func runConfigFeatureFlags(cmd *cobra.Command, args []string) error {
retired = append(retired, feat) retired = append(retired, feat)
continue continue
} }
if feat.IsEnabled() { if feat.IsEnabled() {
enabled = append(enabled, feat) enabled = append(enabled, feat)
continue continue
} }
disabled = append(disabled, feat) disabled = append(disabled, feat)
} }

View file

@ -35,21 +35,26 @@ func restoreHub(dirPath string) error {
} }
/*restore the upstream items*/ /*restore the upstream items*/
upstreamListFN := fmt.Sprintf("%s/upstream-%s.json", itemDirectory, itype) upstreamListFN := fmt.Sprintf("%s/upstream-%s.json", itemDirectory, itype)
file, err := os.ReadFile(upstreamListFN) file, err := os.ReadFile(upstreamListFN)
if err != nil { if err != nil {
return fmt.Errorf("error while opening %s : %s", upstreamListFN, err) return fmt.Errorf("error while opening %s : %s", upstreamListFN, err)
} }
var upstreamList []string var upstreamList []string
err = json.Unmarshal(file, &upstreamList) err = json.Unmarshal(file, &upstreamList)
if err != nil { if err != nil {
return fmt.Errorf("error unmarshaling %s : %s", upstreamListFN, err) return fmt.Errorf("error unmarshaling %s : %s", upstreamListFN, err)
} }
for _, toinstall := range upstreamList { for _, toinstall := range upstreamList {
item := hub.GetItem(itype, toinstall) item := hub.GetItem(itype, toinstall)
if item == nil { if item == nil {
log.Errorf("Item %s/%s not found in hub", itype, toinstall) log.Errorf("Item %s/%s not found in hub", itype, toinstall)
continue continue
} }
err := item.Install(false, false) err := item.Install(false, false)
if err != nil { if err != nil {
log.Errorf("Error while installing %s : %s", toinstall, err) log.Errorf("Error while installing %s : %s", toinstall, err)
@ -61,23 +66,28 @@ func restoreHub(dirPath string) error {
if err != nil { if err != nil {
return fmt.Errorf("failed enumerating files of %s : %s", itemDirectory, err) return fmt.Errorf("failed enumerating files of %s : %s", itemDirectory, err)
} }
for _, file := range files { for _, file := range files {
//this was the upstream data //this was the upstream data
if file.Name() == fmt.Sprintf("upstream-%s.json", itype) { if file.Name() == fmt.Sprintf("upstream-%s.json", itype) {
continue continue
} }
if itype == cwhub.PARSERS || itype == cwhub.POSTOVERFLOWS { if itype == cwhub.PARSERS || itype == cwhub.POSTOVERFLOWS {
//we expect a stage here //we expect a stage here
if !file.IsDir() { if !file.IsDir() {
continue continue
} }
stage := file.Name() stage := file.Name()
stagedir := fmt.Sprintf("%s/%s/%s/", csConfig.ConfigPaths.ConfigDir, itype, stage) stagedir := fmt.Sprintf("%s/%s/%s/", csConfig.ConfigPaths.ConfigDir, itype, stage)
log.Debugf("Found stage %s in %s, target directory : %s", stage, itype, stagedir) log.Debugf("Found stage %s in %s, target directory : %s", stage, itype, stagedir)
if err = os.MkdirAll(stagedir, os.ModePerm); err != nil { if err = os.MkdirAll(stagedir, os.ModePerm); err != nil {
return fmt.Errorf("error while creating stage directory %s : %s", stagedir, err) return fmt.Errorf("error while creating stage directory %s : %s", stagedir, err)
} }
/*find items*/
// find items
ifiles, err := os.ReadDir(itemDirectory + "/" + stage + "/") ifiles, err := os.ReadDir(itemDirectory + "/" + stage + "/")
if err != nil { if err != nil {
return fmt.Errorf("failed enumerating files of %s : %s", itemDirectory+"/"+stage, err) return fmt.Errorf("failed enumerating files of %s : %s", itemDirectory+"/"+stage, err)
@ -86,10 +96,12 @@ func restoreHub(dirPath string) error {
for _, tfile := range ifiles { for _, tfile := range ifiles {
log.Infof("Going to restore local/tainted [%s]", tfile.Name()) log.Infof("Going to restore local/tainted [%s]", tfile.Name())
sourceFile := fmt.Sprintf("%s/%s/%s", itemDirectory, stage, tfile.Name()) sourceFile := fmt.Sprintf("%s/%s/%s", itemDirectory, stage, tfile.Name())
destinationFile := fmt.Sprintf("%s%s", stagedir, tfile.Name()) destinationFile := fmt.Sprintf("%s%s", stagedir, tfile.Name())
if err = CopyFile(sourceFile, destinationFile); err != nil { if err = CopyFile(sourceFile, destinationFile); err != nil {
return fmt.Errorf("failed copy %s %s to %s : %s", itype, sourceFile, destinationFile, err) return fmt.Errorf("failed copy %s %s to %s : %s", itype, sourceFile, destinationFile, err)
} }
log.Infof("restored %s to %s", sourceFile, destinationFile) log.Infof("restored %s to %s", sourceFile, destinationFile)
} }
} else { } else {
@ -101,9 +113,9 @@ func restoreHub(dirPath string) error {
} }
log.Infof("restored %s to %s", sourceFile, destinationFile) log.Infof("restored %s to %s", sourceFile, destinationFile)
} }
} }
} }
return nil return nil
} }

View file

@ -24,6 +24,7 @@ func showConfigKey(key string) error {
opts := []expr.Option{} opts := []expr.Option{}
opts = append(opts, exprhelpers.GetExprOptions(map[string]interface{}{})...) opts = append(opts, exprhelpers.GetExprOptions(map[string]interface{}{})...)
opts = append(opts, expr.Env(Env{})) opts = append(opts, expr.Env(Env{}))
program, err := expr.Compile(key, opts...) program, err := expr.Compile(key, opts...)
if err != nil { if err != nil {
return err return err
@ -52,6 +53,7 @@ func showConfigKey(key string) error {
fmt.Printf("%s\n", string(data)) fmt.Printf("%s\n", string(data))
} }
return nil return nil
} }
@ -211,6 +213,7 @@ func runConfigShow(cmd *cobra.Command, args []string) error {
if err != nil { if err != nil {
return err return err
} }
err = tmp.Execute(os.Stdout, csConfig) err = tmp.Execute(os.Stdout, csConfig)
if err != nil { if err != nil {
return err return err
@ -230,6 +233,7 @@ func runConfigShow(cmd *cobra.Command, args []string) error {
fmt.Printf("%s\n", string(data)) fmt.Printf("%s\n", string(data))
} }
return nil return nil
} }

View file

@ -262,6 +262,7 @@ func SetConsoleOpts(args []string, wanted bool) error {
log.Infof("%s set to %t", csconfig.CONSOLE_MANAGEMENT, wanted) log.Infof("%s set to %t", csconfig.CONSOLE_MANAGEMENT, wanted)
csConfig.API.Server.ConsoleConfig.ConsoleManagement = ptr.Of(wanted) csConfig.API.Server.ConsoleConfig.ConsoleManagement = ptr.Of(wanted)
} }
if csConfig.API.Server.OnlineClient.Credentials != nil { if csConfig.API.Server.OnlineClient.Credentials != nil {
changed := false changed := false
if wanted && csConfig.API.Server.OnlineClient.Credentials.PapiURL == "" { if wanted && csConfig.API.Server.OnlineClient.Credentials.PapiURL == "" {
@ -271,12 +272,15 @@ func SetConsoleOpts(args []string, wanted bool) error {
changed = true changed = true
csConfig.API.Server.OnlineClient.Credentials.PapiURL = "" csConfig.API.Server.OnlineClient.Credentials.PapiURL = ""
} }
if changed { if changed {
fileContent, err := yaml.Marshal(csConfig.API.Server.OnlineClient.Credentials) fileContent, err := yaml.Marshal(csConfig.API.Server.OnlineClient.Credentials)
if err != nil { if err != nil {
return fmt.Errorf("cannot marshal credentials: %s", err) return fmt.Errorf("cannot marshal credentials: %s", err)
} }
log.Infof("Updating credentials file: %s", csConfig.API.Server.OnlineClient.CredentialsFilePath) log.Infof("Updating credentials file: %s", csConfig.API.Server.OnlineClient.CredentialsFilePath)
err = os.WriteFile(csConfig.API.Server.OnlineClient.CredentialsFilePath, fileContent, 0o600) err = os.WriteFile(csConfig.API.Server.OnlineClient.CredentialsFilePath, fileContent, 0o600)
if err != nil { if err != nil {
return fmt.Errorf("cannot write credentials file: %s", err) return fmt.Errorf("cannot write credentials file: %s", err)

View file

@ -46,12 +46,14 @@ func cmdConsoleStatusTable(out io.Writer, csConfig csconfig.Config) {
if *csConfig.API.Server.ConsoleConfig.ShareContext { if *csConfig.API.Server.ConsoleConfig.ShareContext {
activated = string(emoji.CheckMarkButton) activated = string(emoji.CheckMarkButton)
} }
t.AddRow(option, activated, "Send context with alerts to the console") t.AddRow(option, activated, "Send context with alerts to the console")
case csconfig.CONSOLE_MANAGEMENT: case csconfig.CONSOLE_MANAGEMENT:
activated := string(emoji.CrossMark) activated := string(emoji.CrossMark)
if *csConfig.API.Server.ConsoleConfig.ConsoleManagement { if *csConfig.API.Server.ConsoleConfig.ConsoleManagement {
activated = string(emoji.CheckMarkButton) activated = string(emoji.CheckMarkButton)
} }
t.AddRow(option, activated, "Receive decisions from console") t.AddRow(option, activated, "Receive decisions from console")
} }
} }

View file

@ -18,20 +18,25 @@ func copyFileContents(src, dst string) (err error) {
return return
} }
defer in.Close() defer in.Close()
out, err := os.Create(dst) out, err := os.Create(dst)
if err != nil { if err != nil {
return return
} }
defer func() { defer func() {
cerr := out.Close() cerr := out.Close()
if err == nil { if err == nil {
err = cerr err = cerr
} }
}() }()
if _, err = io.Copy(out, in); err != nil { if _, err = io.Copy(out, in); err != nil {
return return
} }
err = out.Sync() err = out.Sync()
return return
} }
@ -40,6 +45,7 @@ func CopyFile(sourceSymLink, destinationFile string) (err error) {
sourceFile, err := filepath.EvalSymlinks(sourceSymLink) sourceFile, err := filepath.EvalSymlinks(sourceSymLink)
if err != nil { if err != nil {
log.Infof("Not a symlink : %s", err) log.Infof("Not a symlink : %s", err)
sourceFile = sourceSymLink sourceFile = sourceSymLink
} }
@ -47,11 +53,13 @@ func CopyFile(sourceSymLink, destinationFile string) (err error) {
if err != nil { if err != nil {
return return
} }
if !sourceFileStat.Mode().IsRegular() { if !sourceFileStat.Mode().IsRegular() {
// cannot copy non-regular files (e.g., directories, // cannot copy non-regular files (e.g., directories,
// symlinks, devices, etc.) // symlinks, devices, etc.)
return fmt.Errorf("copyFile: non-regular source file %s (%q)", sourceFileStat.Name(), sourceFileStat.Mode().String()) return fmt.Errorf("copyFile: non-regular source file %s (%q)", sourceFileStat.Name(), sourceFileStat.Mode().String())
} }
destinationFileStat, err := os.Stat(destinationFile) destinationFileStat, err := os.Stat(destinationFile)
if err != nil { if err != nil {
if !os.IsNotExist(err) { if !os.IsNotExist(err) {
@ -65,9 +73,11 @@ func CopyFile(sourceSymLink, destinationFile string) (err error) {
return return
} }
} }
if err = os.Link(sourceFile, destinationFile); err != nil { if err = os.Link(sourceFile, destinationFile); err != nil {
err = copyFileContents(sourceFile, destinationFile) err = copyFileContents(sourceFile, destinationFile)
} }
return return
} }

View file

@ -201,6 +201,7 @@ func (cli cliDashboard) NewStartCmd() *cobra.Command {
}, },
} }
cmd.Flags().BoolVarP(&forceYes, "yes", "y", false, "force yes") cmd.Flags().BoolVarP(&forceYes, "yes", "y", false, "force yes")
return cmd return cmd
} }
@ -218,6 +219,7 @@ func (cli cliDashboard) NewStopCmd() *cobra.Command {
return nil return nil
}, },
} }
return cmd return cmd
} }
@ -235,6 +237,7 @@ func (cli cliDashboard) NewShowPasswordCmd() *cobra.Command {
return nil return nil
}, },
} }
return cmd return cmd
} }
@ -326,6 +329,7 @@ func passwordIsValid(password string) bool {
if !hasDigit || len(password) < 6 { if !hasDigit || len(password) < 6 {
return false return false
} }
return true return true
} }
@ -334,8 +338,10 @@ func checkSystemMemory(forceYes *bool) error {
if totMem >= uint64(math.Pow(2, 30)) { if totMem >= uint64(math.Pow(2, 30)) {
return nil return nil
} }
if !*forceYes { if !*forceYes {
var answer bool var answer bool
prompt := &survey.Confirm{ prompt := &survey.Confirm{
Message: "Metabase requires 1-2GB of RAM, your system is below this requirement continue ?", Message: "Metabase requires 1-2GB of RAM, your system is below this requirement continue ?",
Default: true, Default: true,
@ -343,12 +349,16 @@ func checkSystemMemory(forceYes *bool) error {
if err := survey.AskOne(prompt, &answer); err != nil { if err := survey.AskOne(prompt, &answer); err != nil {
return fmt.Errorf("unable to ask about RAM check: %s", err) return fmt.Errorf("unable to ask about RAM check: %s", err)
} }
if !answer { if !answer {
return fmt.Errorf("user stated no to continue") return fmt.Errorf("user stated no to continue")
} }
return nil return nil
} }
log.Warn("Metabase requires 1-2GB of RAM, your system is below this requirement") log.Warn("Metabase requires 1-2GB of RAM, your system is below this requirement")
return nil return nil
} }
@ -356,25 +366,32 @@ func warnIfNotLoopback(addr string) {
if addr == "127.0.0.1" || addr == "::1" { if addr == "127.0.0.1" || addr == "::1" {
return return
} }
log.Warnf("You are potentially exposing your metabase port to the internet (addr: %s), please consider using a reverse proxy", addr) log.Warnf("You are potentially exposing your metabase port to the internet (addr: %s), please consider using a reverse proxy", addr)
} }
func disclaimer(forceYes *bool) error { func disclaimer(forceYes *bool) error {
if !*forceYes { if !*forceYes {
var answer bool var answer bool
prompt := &survey.Confirm{ prompt := &survey.Confirm{
Message: "CrowdSec takes no responsibility for the security of your metabase instance. Do you accept these responsibilities ?", Message: "CrowdSec takes no responsibility for the security of your metabase instance. Do you accept these responsibilities ?",
Default: true, Default: true,
} }
if err := survey.AskOne(prompt, &answer); err != nil { if err := survey.AskOne(prompt, &answer); err != nil {
return fmt.Errorf("unable to ask to question: %s", err) return fmt.Errorf("unable to ask to question: %s", err)
} }
if !answer { if !answer {
return fmt.Errorf("user stated no to responsibilities") return fmt.Errorf("user stated no to responsibilities")
} }
return nil return nil
} }
log.Warn("CrowdSec takes no responsibility for the security of your metabase instance. You used force yes, so you accept this disclaimer") log.Warn("CrowdSec takes no responsibility for the security of your metabase instance. You used force yes, so you accept this disclaimer")
return nil return nil
} }
@ -383,19 +400,24 @@ func checkGroups(forceYes *bool) (*user.Group, error) {
if err == nil { if err == nil {
return dockerGroup, nil return dockerGroup, nil
} }
if !*forceYes { if !*forceYes {
var answer bool var answer bool
prompt := &survey.Confirm{ prompt := &survey.Confirm{
Message: fmt.Sprintf("For metabase docker to be able to access SQLite file we need to add a new group called '%s' to the system, is it ok for you ?", crowdsecGroup), Message: fmt.Sprintf("For metabase docker to be able to access SQLite file we need to add a new group called '%s' to the system, is it ok for you ?", crowdsecGroup),
Default: true, Default: true,
} }
if err := survey.AskOne(prompt, &answer); err != nil { if err := survey.AskOne(prompt, &answer); err != nil {
return dockerGroup, fmt.Errorf("unable to ask to question: %s", err) return dockerGroup, fmt.Errorf("unable to ask to question: %s", err)
} }
if !answer { if !answer {
return dockerGroup, fmt.Errorf("unable to continue without creating '%s' group", crowdsecGroup) return dockerGroup, fmt.Errorf("unable to continue without creating '%s' group", crowdsecGroup)
} }
} }
groupAddCmd, err := exec.LookPath("groupadd") groupAddCmd, err := exec.LookPath("groupadd")
if err != nil { if err != nil {
return dockerGroup, fmt.Errorf("unable to find 'groupadd' command, can't continue") return dockerGroup, fmt.Errorf("unable to find 'groupadd' command, can't continue")
@ -405,6 +427,7 @@ func checkGroups(forceYes *bool) (*user.Group, error) {
if err := groupAdd.Run(); err != nil { if err := groupAdd.Run(); err != nil {
return dockerGroup, fmt.Errorf("unable to add group '%s': %s", dockerGroup, err) return dockerGroup, fmt.Errorf("unable to add group '%s': %s", dockerGroup, err)
} }
return user.LookupGroup(crowdsecGroup) return user.LookupGroup(crowdsecGroup)
} }
@ -413,12 +436,14 @@ func chownDatabase(gid string) error {
if err != nil { if err != nil {
return fmt.Errorf("unable to convert group ID to int: %s", err) return fmt.Errorf("unable to convert group ID to int: %s", err)
} }
if stat, err := os.Stat(csConfig.DbConfig.DbPath); !os.IsNotExist(err) { if stat, err := os.Stat(csConfig.DbConfig.DbPath); !os.IsNotExist(err) {
info := stat.Sys() info := stat.Sys()
if err := os.Chown(csConfig.DbConfig.DbPath, int(info.(*syscall.Stat_t).Uid), intID); err != nil { if err := os.Chown(csConfig.DbConfig.DbPath, int(info.(*syscall.Stat_t).Uid), intID); err != nil {
return fmt.Errorf("unable to chown sqlite db file '%s': %s", csConfig.DbConfig.DbPath, err) return fmt.Errorf("unable to chown sqlite db file '%s': %s", csConfig.DbConfig.DbPath, err)
} }
} }
if csConfig.DbConfig.Type == "sqlite" && csConfig.DbConfig.UseWal != nil && *csConfig.DbConfig.UseWal { if csConfig.DbConfig.Type == "sqlite" && csConfig.DbConfig.UseWal != nil && *csConfig.DbConfig.UseWal {
for _, ext := range []string{"-wal", "-shm"} { for _, ext := range []string{"-wal", "-shm"} {
file := csConfig.DbConfig.DbPath + ext file := csConfig.DbConfig.DbPath + ext
@ -430,5 +455,6 @@ func chownDatabase(gid string) error {
} }
} }
} }
return nil return nil
} }

View file

@ -33,27 +33,35 @@ func DecisionsToTable(alerts *models.GetAlertsResponse, printMachine bool) error
for aIdx := 0; aIdx < len(*alerts); aIdx++ { for aIdx := 0; aIdx < len(*alerts); aIdx++ {
alertItem := (*alerts)[aIdx] alertItem := (*alerts)[aIdx]
newDecisions := make([]*models.Decision, 0) newDecisions := make([]*models.Decision, 0)
for _, decisionItem := range alertItem.Decisions { for _, decisionItem := range alertItem.Decisions {
spamKey := fmt.Sprintf("%t:%s:%s:%s", *decisionItem.Simulated, *decisionItem.Type, *decisionItem.Scope, *decisionItem.Value) spamKey := fmt.Sprintf("%t:%s:%s:%s", *decisionItem.Simulated, *decisionItem.Type, *decisionItem.Scope, *decisionItem.Value)
if _, ok := spamLimit[spamKey]; ok { if _, ok := spamLimit[spamKey]; ok {
skipped++ skipped++
continue continue
} }
spamLimit[spamKey] = true spamLimit[spamKey] = true
newDecisions = append(newDecisions, decisionItem) newDecisions = append(newDecisions, decisionItem)
} }
alertItem.Decisions = newDecisions alertItem.Decisions = newDecisions
} }
if csConfig.Cscli.Output == "raw" { if csConfig.Cscli.Output == "raw" {
csvwriter := csv.NewWriter(os.Stdout) csvwriter := csv.NewWriter(os.Stdout)
header := []string{"id", "source", "ip", "reason", "action", "country", "as", "events_count", "expiration", "simulated", "alert_id"} header := []string{"id", "source", "ip", "reason", "action", "country", "as", "events_count", "expiration", "simulated", "alert_id"}
if printMachine { if printMachine {
header = append(header, "machine") header = append(header, "machine")
} }
err := csvwriter.Write(header) err := csvwriter.Write(header)
if err != nil { if err != nil {
return err return err
} }
for _, alertItem := range *alerts { for _, alertItem := range *alerts {
for _, decisionItem := range alertItem.Decisions { for _, decisionItem := range alertItem.Decisions {
raw := []string{ raw := []string{
@ -79,6 +87,7 @@ func DecisionsToTable(alerts *models.GetAlertsResponse, printMachine bool) error
} }
} }
} }
csvwriter.Flush() csvwriter.Flush()
} else if csConfig.Cscli.Output == "json" { } else if csConfig.Cscli.Output == "json" {
if *alerts == nil { if *alerts == nil {
@ -99,6 +108,7 @@ func DecisionsToTable(alerts *models.GetAlertsResponse, printMachine bool) error
fmt.Printf("%d duplicated entries skipped\n", skipped) fmt.Printf("%d duplicated entries skipped\n", skipped)
} }
} }
return nil return nil
} }
@ -119,7 +129,7 @@ func (cli cliDecisions) NewCommand() *cobra.Command {
/*TBD example*/ /*TBD example*/
Args: cobra.MinimumNArgs(1), Args: cobra.MinimumNArgs(1),
DisableAutoGenTag: true, DisableAutoGenTag: true,
PersistentPreRunE: func(cmd *cobra.Command, args []string) error { PersistentPreRunE: func(_ *cobra.Command, _ []string) error {
if err := csConfig.LoadAPIClient(); err != nil { if err := csConfig.LoadAPIClient(); err != nil {
return fmt.Errorf("loading api client: %w", err) return fmt.Errorf("loading api client: %w", err)
} }
@ -164,8 +174,10 @@ func (cli cliDecisions) NewListCmd() *cobra.Command {
IncludeCAPI: new(bool), IncludeCAPI: new(bool),
Limit: new(int), Limit: new(int),
} }
NoSimu := new(bool) NoSimu := new(bool)
contained := new(bool) contained := new(bool)
var printMachine bool var printMachine bool
cmd := &cobra.Command{ cmd := &cobra.Command{
@ -178,7 +190,7 @@ cscli decisions list -t ban
`, `,
Args: cobra.ExactArgs(0), Args: cobra.ExactArgs(0),
DisableAutoGenTag: true, DisableAutoGenTag: true,
RunE: func(cmd *cobra.Command, args []string) error { RunE: func(cmd *cobra.Command, _ []string) error {
var err error var err error
/*take care of shorthand options*/ /*take care of shorthand options*/
if err = manageCliDecisionAlerts(filter.IPEquals, filter.RangeEquals, filter.ScopeEquals, filter.ValueEquals); err != nil { if err = manageCliDecisionAlerts(filter.IPEquals, filter.RangeEquals, filter.ScopeEquals, filter.ValueEquals); err != nil {
@ -299,7 +311,7 @@ cscli decisions add --scope username --value foobar
/*TBD : fix long and example*/ /*TBD : fix long and example*/
Args: cobra.ExactArgs(0), Args: cobra.ExactArgs(0),
DisableAutoGenTag: true, DisableAutoGenTag: true,
RunE: func(cmd *cobra.Command, args []string) error { RunE: func(cmd *cobra.Command, _ []string) error {
var err error var err error
alerts := models.AddAlertsRequest{} alerts := models.AddAlertsRequest{}
origin := types.CscliOrigin origin := types.CscliOrigin
@ -325,7 +337,7 @@ cscli decisions add --scope username --value foobar
addScope = types.Range addScope = types.Range
} else if addValue == "" { } else if addValue == "" {
printHelp(cmd) printHelp(cmd)
return fmt.Errorf("Missing arguments, a value is required (--ip, --range or --scope and --value)") return fmt.Errorf("missing arguments, a value is required (--ip, --range or --scope and --value)")
} }
if addReason == "" { if addReason == "" {
@ -398,8 +410,11 @@ func (cli cliDecisions) NewDeleteCmd() *cobra.Command {
ScenarioEquals: new(string), ScenarioEquals: new(string),
OriginEquals: new(string), OriginEquals: new(string),
} }
var delDecisionId string
var delDecisionID string
var delDecisionAll bool var delDecisionAll bool
contained := new(bool) contained := new(bool)
cmd := &cobra.Command{ cmd := &cobra.Command{
@ -413,21 +428,21 @@ cscli decisions delete --id 42
cscli decisions delete --type captcha cscli decisions delete --type captcha
`, `,
/*TBD : refaire le Long/Example*/ /*TBD : refaire le Long/Example*/
PreRunE: func(cmd *cobra.Command, args []string) error { PreRunE: func(cmd *cobra.Command, _ []string) error {
if delDecisionAll { if delDecisionAll {
return nil return nil
} }
if *delFilter.ScopeEquals == "" && *delFilter.ValueEquals == "" && if *delFilter.ScopeEquals == "" && *delFilter.ValueEquals == "" &&
*delFilter.TypeEquals == "" && *delFilter.IPEquals == "" && *delFilter.TypeEquals == "" && *delFilter.IPEquals == "" &&
*delFilter.RangeEquals == "" && *delFilter.ScenarioEquals == "" && *delFilter.RangeEquals == "" && *delFilter.ScenarioEquals == "" &&
*delFilter.OriginEquals == "" && delDecisionId == "" { *delFilter.OriginEquals == "" && delDecisionID == "" {
cmd.Usage() cmd.Usage()
return fmt.Errorf("at least one filter or --all must be specified") return fmt.Errorf("at least one filter or --all must be specified")
} }
return nil return nil
}, },
RunE: func(cmd *cobra.Command, args []string) error { RunE: func(_ *cobra.Command, _ []string) error {
var err error var err error
var decisions *models.DeleteDecisionResponse var decisions *models.DeleteDecisionResponse
@ -460,18 +475,18 @@ cscli decisions delete --type captcha
delFilter.Contains = new(bool) delFilter.Contains = new(bool)
} }
if delDecisionId == "" { if delDecisionID == "" {
decisions, _, err = Client.Decisions.Delete(context.Background(), delFilter) decisions, _, err = Client.Decisions.Delete(context.Background(), delFilter)
if err != nil { if err != nil {
return fmt.Errorf("Unable to delete decisions: %v", err) return fmt.Errorf("unable to delete decisions: %v", err)
} }
} else { } else {
if _, err = strconv.Atoi(delDecisionId); err != nil { if _, err = strconv.Atoi(delDecisionID); err != nil {
return fmt.Errorf("id '%s' is not an integer: %v", delDecisionId, err) return fmt.Errorf("id '%s' is not an integer: %v", delDecisionID, err)
} }
decisions, _, err = Client.Decisions.DeleteOne(context.Background(), delDecisionId) decisions, _, err = Client.Decisions.DeleteOne(context.Background(), delDecisionID)
if err != nil { if err != nil {
return fmt.Errorf("Unable to delete decision: %v", err) return fmt.Errorf("unable to delete decision: %v", err)
} }
} }
log.Infof("%s decision(s) deleted", decisions.NbDeleted) log.Infof("%s decision(s) deleted", decisions.NbDeleted)
@ -487,7 +502,7 @@ cscli decisions delete --type captcha
cmd.Flags().StringVarP(delFilter.ScenarioEquals, "scenario", "s", "", "the scenario name (ie. crowdsecurity/ssh-bf)") cmd.Flags().StringVarP(delFilter.ScenarioEquals, "scenario", "s", "", "the scenario name (ie. crowdsecurity/ssh-bf)")
cmd.Flags().StringVar(delFilter.OriginEquals, "origin", "", fmt.Sprintf("the value to match for the specified origin (%s ...)", strings.Join(types.GetOrigins(), ","))) cmd.Flags().StringVar(delFilter.OriginEquals, "origin", "", fmt.Sprintf("the value to match for the specified origin (%s ...)", strings.Join(types.GetOrigins(), ",")))
cmd.Flags().StringVar(&delDecisionId, "id", "", "decision id") cmd.Flags().StringVar(&delDecisionID, "id", "", "decision id")
cmd.Flags().BoolVar(&delDecisionAll, "all", false, "delete all decisions") cmd.Flags().BoolVar(&delDecisionAll, "all", false, "delete all decisions")
cmd.Flags().BoolVar(contained, "contained", false, "query decisions contained by range") cmd.Flags().BoolVar(contained, "contained", false, "query decisions contained by range")

View file

@ -37,21 +37,25 @@ func parseDecisionList(content []byte, format string) ([]decisionRaw, error) {
switch format { switch format {
case "values": case "values":
log.Infof("Parsing values") log.Infof("Parsing values")
scanner := bufio.NewScanner(bytes.NewReader(content)) scanner := bufio.NewScanner(bytes.NewReader(content))
for scanner.Scan() { for scanner.Scan() {
value := strings.TrimSpace(scanner.Text()) value := strings.TrimSpace(scanner.Text())
ret = append(ret, decisionRaw{Value: value}) ret = append(ret, decisionRaw{Value: value})
} }
if err := scanner.Err(); err != nil { if err := scanner.Err(); err != nil {
return nil, fmt.Errorf("unable to parse values: '%s'", err) return nil, fmt.Errorf("unable to parse values: '%s'", err)
} }
case "json": case "json":
log.Infof("Parsing json") log.Infof("Parsing json")
if err := json.Unmarshal(content, &ret); err != nil { if err := json.Unmarshal(content, &ret); err != nil {
return nil, err return nil, err
} }
case "csv": case "csv":
log.Infof("Parsing csv") log.Infof("Parsing csv")
if err := csvutil.Unmarshal(content, &ret); err != nil { if err := csvutil.Unmarshal(content, &ret); err != nil {
return nil, fmt.Errorf("unable to parse csv: '%s'", err) return nil, fmt.Errorf("unable to parse csv: '%s'", err)
} }
@ -75,6 +79,7 @@ func (cli cliDecisions) runImport(cmd *cobra.Command, args []string) error {
if err != nil { if err != nil {
return err return err
} }
if defaultDuration == "" { if defaultDuration == "" {
return fmt.Errorf("--duration cannot be empty") return fmt.Errorf("--duration cannot be empty")
} }
@ -83,6 +88,7 @@ func (cli cliDecisions) runImport(cmd *cobra.Command, args []string) error {
if err != nil { if err != nil {
return err return err
} }
if defaultScope == "" { if defaultScope == "" {
return fmt.Errorf("--scope cannot be empty") return fmt.Errorf("--scope cannot be empty")
} }
@ -91,6 +97,7 @@ func (cli cliDecisions) runImport(cmd *cobra.Command, args []string) error {
if err != nil { if err != nil {
return err return err
} }
if defaultReason == "" { if defaultReason == "" {
return fmt.Errorf("--reason cannot be empty") return fmt.Errorf("--reason cannot be empty")
} }
@ -99,6 +106,7 @@ func (cli cliDecisions) runImport(cmd *cobra.Command, args []string) error {
if err != nil { if err != nil {
return err return err
} }
if defaultType == "" { if defaultType == "" {
return fmt.Errorf("--type cannot be empty") return fmt.Errorf("--type cannot be empty")
} }
@ -152,6 +160,7 @@ func (cli cliDecisions) runImport(cmd *cobra.Command, args []string) error {
} }
decisions := make([]*models.Decision, len(decisionsListRaw)) decisions := make([]*models.Decision, len(decisionsListRaw))
for i, d := range decisionsListRaw { for i, d := range decisionsListRaw {
if d.Value == "" { if d.Value == "" {
return fmt.Errorf("item %d: missing 'value'", i) return fmt.Errorf("item %d: missing 'value'", i)
@ -222,6 +231,7 @@ func (cli cliDecisions) runImport(cmd *cobra.Command, args []string) error {
} }
log.Infof("Imported %d decisions", len(decisions)) log.Infof("Imported %d decisions", len(decisions))
return nil return nil
} }

View file

@ -100,11 +100,13 @@ func (cli cliItem) Install(cmd *cobra.Command, args []string) error {
if !ignoreError { if !ignoreError {
return fmt.Errorf("error while installing '%s': %w", item.Name, err) return fmt.Errorf("error while installing '%s': %w", item.Name, err)
} }
log.Errorf("Error while installing '%s': %s", item.Name, err) log.Errorf("Error while installing '%s': %s", item.Name, err)
} }
} }
log.Infof(ReloadMessage()) log.Infof(ReloadMessage())
return nil return nil
} }
@ -184,6 +186,7 @@ func (cli cliItem) Remove(cmd *cobra.Command, args []string) error {
if err != nil { if err != nil {
return err return err
} }
if didRemove { if didRemove {
log.Infof("Removed %s", item.Name) log.Infof("Removed %s", item.Name)
removed++ removed++
@ -191,6 +194,7 @@ func (cli cliItem) Remove(cmd *cobra.Command, args []string) error {
} }
log.Infof("Removed %d %s", removed, cli.name) log.Infof("Removed %d %s", removed, cli.name)
if removed > 0 { if removed > 0 {
log.Infof(ReloadMessage()) log.Infof(ReloadMessage())
} }
@ -231,6 +235,7 @@ func (cli cliItem) Remove(cmd *cobra.Command, args []string) error {
} }
log.Infof("Removed %d %s", removed, cli.name) log.Infof("Removed %d %s", removed, cli.name)
if removed > 0 { if removed > 0 {
log.Infof(ReloadMessage()) log.Infof(ReloadMessage())
} }
@ -291,6 +296,7 @@ func (cli cliItem) Upgrade(cmd *cobra.Command, args []string) error {
if err != nil { if err != nil {
return err return err
} }
if didUpdate { if didUpdate {
updated++ updated++
} }
@ -327,6 +333,7 @@ func (cli cliItem) Upgrade(cmd *cobra.Command, args []string) error {
updated++ updated++
} }
} }
if updated > 0 { if updated > 0 {
log.Infof(ReloadMessage()) log.Infof(ReloadMessage())
} }
@ -523,6 +530,7 @@ func (cli cliItem) itemDiff(item *cwhub.Item, reverse bool) (string, error) {
file2 := remoteURL file2 := remoteURL
content1 := string(localContent) content1 := string(localContent)
content2 := string(latestContent) content2 := string(latestContent)
if reverse { if reverse {
file1, file2 = file2, file1 file1, file2 = file2, file1
content1, content2 = content2, content1 content1, content2 = content2, content1
@ -568,6 +576,10 @@ func (cli cliItem) whyTainted(hub *cwhub.Hub, item *cwhub.Item, reverse bool) st
ret = append(ret, diff) ret = append(ret, diff)
} else if len(sub.State.TaintedBy) > 0 { } else if len(sub.State.TaintedBy) > 0 {
taintList := strings.Join(sub.State.TaintedBy, ", ") taintList := strings.Join(sub.State.TaintedBy, ", ")
if sub.FQName() == taintList {
// hack: avoid message "item is tainted by itself"
continue
}
ret = append(ret, fmt.Sprintf("# %s is tainted by %s", sub.FQName(), taintList)) ret = append(ret, fmt.Sprintf("# %s is tainted by %s", sub.FQName(), taintList))
} }
} }

View file

@ -6,6 +6,7 @@ import (
"fmt" "fmt"
"io" "io"
"os" "os"
"path/filepath"
"strings" "strings"
"gopkg.in/yaml.v3" "gopkg.in/yaml.v3"
@ -62,7 +63,9 @@ func listItems(out io.Writer, itemTypes []string, items map[string][]*cwhub.Item
if omitIfEmpty && len(items[itemType]) == 0 { if omitIfEmpty && len(items[itemType]) == 0 {
continue continue
} }
listHubItemTable(out, "\n"+strings.ToUpper(itemType), items[itemType]) listHubItemTable(out, "\n"+strings.ToUpper(itemType), items[itemType])
nothingToDisplay = false nothingToDisplay = false
} }
@ -127,11 +130,13 @@ func listItems(out io.Writer, itemTypes []string, items map[string][]*cwhub.Item
if len(itemTypes) > 1 { if len(itemTypes) > 1 {
row = append(row, itemType) row = append(row, itemType)
} }
if err := csvwriter.Write(row); err != nil { if err := csvwriter.Write(row); err != nil {
return fmt.Errorf("failed to write raw output: %s", err) return fmt.Errorf("failed to write raw output: %s", err)
} }
} }
} }
csvwriter.Flush() csvwriter.Flush()
default: default:
return fmt.Errorf("unknown output format '%s'", csConfig.Cscli.Output) return fmt.Errorf("unknown output format '%s'", csConfig.Cscli.Output)
@ -145,6 +150,7 @@ func InspectItem(item *cwhub.Item, showMetrics bool) error {
case "human", "raw": case "human", "raw":
enc := yaml.NewEncoder(os.Stdout) enc := yaml.NewEncoder(os.Stdout)
enc.SetIndent(2) enc.SetIndent(2)
if err := enc.Encode(item); err != nil { if err := enc.Encode(item); err != nil {
return fmt.Errorf("unable to encode item: %s", err) return fmt.Errorf("unable to encode item: %s", err)
} }
@ -153,11 +159,23 @@ func InspectItem(item *cwhub.Item, showMetrics bool) error {
if err != nil { if err != nil {
return fmt.Errorf("unable to marshal item: %s", err) return fmt.Errorf("unable to marshal item: %s", err)
} }
fmt.Print(string(b)) fmt.Print(string(b))
} }
if csConfig.Cscli.Output == "human" && showMetrics { if csConfig.Cscli.Output != "human" {
return nil
}
if item.State.Tainted {
fmt.Println()
fmt.Printf(`This item is tainted. Use "%s %s inspect --diff %s" to see why.`, filepath.Base(os.Args[0]), item.Type, item.Name)
fmt.Println()
}
if showMetrics {
fmt.Printf("\nCurrent metrics: \n") fmt.Printf("\nCurrent metrics: \n")
if err := ShowMetrics(item); err != nil { if err := ShowMetrics(item); err != nil {
return err return err
} }

View file

@ -23,23 +23,31 @@ type HubItemWrapper struct {
} }
// mergeContext adds the context from src to dest. // mergeContext adds the context from src to dest.
func mergeContext(dest map[string][]string, src map[string][]string) { func mergeContext(dest map[string][]string, src map[string][]string) error {
if len(src) == 0 {
return fmt.Errorf("no context data to merge")
}
for k, v := range src { for k, v := range src {
if _, ok := dest[k]; !ok { if _, ok := dest[k]; !ok {
dest[k] = make([]string, 0) dest[k] = make([]string, 0)
} }
for _, s := range v { for _, s := range v {
if !slices.Contains(dest[k], s) { if !slices.Contains(dest[k], s) {
dest[k] = append(dest[k], s) dest[k] = append(dest[k], s)
} }
} }
} }
return nil
} }
// addContextFromItem merges the context from an item into the context to send to the console. // addContextFromItem merges the context from an item into the context to send to the console.
func addContextFromItem(toSend map[string][]string, item *cwhub.Item) error { func addContextFromItem(toSend map[string][]string, item *cwhub.Item) error {
filePath := item.State.LocalPath filePath := item.State.LocalPath
log.Tracef("loading console context from %s", filePath) log.Tracef("loading console context from %s", filePath)
content, err := os.ReadFile(filePath) content, err := os.ReadFile(filePath)
if err != nil { if err != nil {
return err return err
@ -52,7 +60,11 @@ func addContextFromItem(toSend map[string][]string, item *cwhub.Item) error {
return fmt.Errorf("%s: %w", filePath, err) return fmt.Errorf("%s: %w", filePath, err)
} }
mergeContext(toSend, wrapper.Context) err = mergeContext(toSend, wrapper.Context)
if err != nil {
// having an empty hub item deserves an error
log.Errorf("while merging context from %s: %s. Note that context data should be under the 'context:' key, the top-level is metadata.", filePath, err)
}
return nil return nil
} }
@ -60,6 +72,7 @@ func addContextFromItem(toSend map[string][]string, item *cwhub.Item) error {
// addContextFromFile merges the context from a file into the context to send to the console. // addContextFromFile merges the context from a file into the context to send to the console.
func addContextFromFile(toSend map[string][]string, filePath string) error { func addContextFromFile(toSend map[string][]string, filePath string) error {
log.Tracef("loading console context from %s", filePath) log.Tracef("loading console context from %s", filePath)
content, err := os.ReadFile(filePath) content, err := os.ReadFile(filePath)
if err != nil { if err != nil {
return err return err
@ -72,7 +85,10 @@ func addContextFromFile(toSend map[string][]string, filePath string) error {
return fmt.Errorf("%s: %w", filePath, err) return fmt.Errorf("%s: %w", filePath, err)
} }
mergeContext(toSend, newContext) err = mergeContext(toSend, newContext)
if err != nil {
log.Warningf("while merging context from %s: %s", filePath, err)
}
return nil return nil
} }

View file

@ -31,6 +31,7 @@ func (s *PluginSuite) permissionSetter(perm os.FileMode) func(*testing.T) {
func (s *PluginSuite) readconfig() PluginConfig { func (s *PluginSuite) readconfig() PluginConfig {
var config PluginConfig var config PluginConfig
t := s.T() t := s.T()
orig, err := os.ReadFile(s.pluginConfig) orig, err := os.ReadFile(s.pluginConfig)
@ -111,7 +112,7 @@ func (s *PluginSuite) TestBrokerInit() {
}, },
{ {
name: "Invalid user and group", name: "Invalid user and group",
expectedErr: "unknown user toto1234", expectedErr: "toto1234",
procCfg: csconfig.PluginCfg{ procCfg: csconfig.PluginCfg{
User: "toto1234", User: "toto1234",
Group: "toto1234", Group: "toto1234",
@ -119,7 +120,7 @@ func (s *PluginSuite) TestBrokerInit() {
}, },
{ {
name: "Valid user and invalid group", name: "Valid user and invalid group",
expectedErr: "unknown group toto1234", expectedErr: "toto1234",
procCfg: csconfig.PluginCfg{ procCfg: csconfig.PluginCfg{
User: "nobody", User: "nobody",
Group: "toto1234", Group: "toto1234",
@ -142,6 +143,7 @@ func (s *PluginSuite) TestBrokerInit() {
func (s *PluginSuite) TestBrokerNoThreshold() { func (s *PluginSuite) TestBrokerNoThreshold() {
var alerts []models.Alert var alerts []models.Alert
DefaultEmptyTicker = 50 * time.Millisecond DefaultEmptyTicker = 50 * time.Millisecond
t := s.T() t := s.T()
@ -154,6 +156,7 @@ func (s *PluginSuite) TestBrokerNoThreshold() {
// send one item, it should be processed right now // send one item, it should be processed right now
pb.PluginChannel <- ProfileAlert{ProfileID: uint(0), Alert: &models.Alert{}} pb.PluginChannel <- ProfileAlert{ProfileID: uint(0), Alert: &models.Alert{}}
time.Sleep(200 * time.Millisecond) time.Sleep(200 * time.Millisecond)
// we expect one now // we expect one now
@ -170,6 +173,7 @@ func (s *PluginSuite) TestBrokerNoThreshold() {
// and another one // and another one
log.Printf("second send") log.Printf("second send")
pb.PluginChannel <- ProfileAlert{ProfileID: uint(0), Alert: &models.Alert{}} pb.PluginChannel <- ProfileAlert{ProfileID: uint(0), Alert: &models.Alert{}}
time.Sleep(200 * time.Millisecond) time.Sleep(200 * time.Millisecond)
// we expect one again, as we cleaned the file // we expect one again, as we cleaned the file
@ -204,6 +208,7 @@ func (s *PluginSuite) TestBrokerRunGroupAndTimeThreshold_TimeFirst() {
pb.PluginChannel <- ProfileAlert{ProfileID: uint(0), Alert: &models.Alert{}} pb.PluginChannel <- ProfileAlert{ProfileID: uint(0), Alert: &models.Alert{}}
pb.PluginChannel <- ProfileAlert{ProfileID: uint(0), Alert: &models.Alert{}} pb.PluginChannel <- ProfileAlert{ProfileID: uint(0), Alert: &models.Alert{}}
pb.PluginChannel <- ProfileAlert{ProfileID: uint(0), Alert: &models.Alert{}} pb.PluginChannel <- ProfileAlert{ProfileID: uint(0), Alert: &models.Alert{}}
time.Sleep(500 * time.Millisecond) time.Sleep(500 * time.Millisecond)
// because of group threshold, we shouldn't have data yet // because of group threshold, we shouldn't have data yet
assert.NoFileExists(t, "./out") assert.NoFileExists(t, "./out")
@ -239,11 +244,13 @@ func (s *PluginSuite) TestBrokerRunGroupAndTimeThreshold_CountFirst() {
pb.PluginChannel <- ProfileAlert{ProfileID: uint(0), Alert: &models.Alert{}} pb.PluginChannel <- ProfileAlert{ProfileID: uint(0), Alert: &models.Alert{}}
pb.PluginChannel <- ProfileAlert{ProfileID: uint(0), Alert: &models.Alert{}} pb.PluginChannel <- ProfileAlert{ProfileID: uint(0), Alert: &models.Alert{}}
pb.PluginChannel <- ProfileAlert{ProfileID: uint(0), Alert: &models.Alert{}} pb.PluginChannel <- ProfileAlert{ProfileID: uint(0), Alert: &models.Alert{}}
time.Sleep(100 * time.Millisecond) time.Sleep(100 * time.Millisecond)
// because of group threshold, we shouldn't have data yet // because of group threshold, we shouldn't have data yet
assert.NoFileExists(t, "./out") assert.NoFileExists(t, "./out")
pb.PluginChannel <- ProfileAlert{ProfileID: uint(0), Alert: &models.Alert{}} pb.PluginChannel <- ProfileAlert{ProfileID: uint(0), Alert: &models.Alert{}}
time.Sleep(100 * time.Millisecond) time.Sleep(100 * time.Millisecond)
// and now we should // and now we should
@ -277,6 +284,7 @@ func (s *PluginSuite) TestBrokerRunGroupThreshold() {
pb.PluginChannel <- ProfileAlert{ProfileID: uint(0), Alert: &models.Alert{}} pb.PluginChannel <- ProfileAlert{ProfileID: uint(0), Alert: &models.Alert{}}
pb.PluginChannel <- ProfileAlert{ProfileID: uint(0), Alert: &models.Alert{}} pb.PluginChannel <- ProfileAlert{ProfileID: uint(0), Alert: &models.Alert{}}
pb.PluginChannel <- ProfileAlert{ProfileID: uint(0), Alert: &models.Alert{}} pb.PluginChannel <- ProfileAlert{ProfileID: uint(0), Alert: &models.Alert{}}
time.Sleep(time.Second) time.Sleep(time.Second)
// because of group threshold, we shouldn't have data yet // because of group threshold, we shouldn't have data yet
@ -284,6 +292,7 @@ func (s *PluginSuite) TestBrokerRunGroupThreshold() {
pb.PluginChannel <- ProfileAlert{ProfileID: uint(0), Alert: &models.Alert{}} pb.PluginChannel <- ProfileAlert{ProfileID: uint(0), Alert: &models.Alert{}}
pb.PluginChannel <- ProfileAlert{ProfileID: uint(0), Alert: &models.Alert{}} pb.PluginChannel <- ProfileAlert{ProfileID: uint(0), Alert: &models.Alert{}}
pb.PluginChannel <- ProfileAlert{ProfileID: uint(0), Alert: &models.Alert{}} pb.PluginChannel <- ProfileAlert{ProfileID: uint(0), Alert: &models.Alert{}}
time.Sleep(time.Second) time.Sleep(time.Second)
// and now we should // and now we should
@ -326,6 +335,7 @@ func (s *PluginSuite) TestBrokerRunTimeThreshold() {
// send data // send data
pb.PluginChannel <- ProfileAlert{ProfileID: uint(0), Alert: &models.Alert{}} pb.PluginChannel <- ProfileAlert{ProfileID: uint(0), Alert: &models.Alert{}}
time.Sleep(200 * time.Millisecond) time.Sleep(200 * time.Millisecond)
// we shouldn't have data yet // we shouldn't have data yet

View file

@ -101,7 +101,7 @@ func (i *Item) downloadLatest(overwrite bool, updateOnly bool) (string, error) {
} }
} }
if !i.State.Installed && updateOnly && i.State.Downloaded { if !i.State.Installed && updateOnly && i.State.Downloaded && !overwrite {
i.hub.logger.Debugf("skipping upgrade of %s: not installed", i.Name) i.hub.logger.Debugf("skipping upgrade of %s: not installed", i.Name)
return "", nil return "", nil
} }
@ -154,9 +154,17 @@ func (i *Item) FetchLatest() ([]byte, string, error) {
// download downloads the item from the hub and writes it to the hub directory. // download downloads the item from the hub and writes it to the hub directory.
func (i *Item) download(overwrite bool) (string, error) { func (i *Item) download(overwrite bool) (string, error) {
if i.State.IsLocal() { // ensure that target file is within target dir
return "", fmt.Errorf("%s is local, can't download", i.Name) finalPath, err := i.downloadPath()
if err != nil {
return "", err
} }
if i.State.IsLocal() {
i.hub.logger.Warningf("%s is local, can't download", i.Name)
return finalPath, nil
}
// if user didn't --force, don't overwrite local, tainted, up-to-date files // if user didn't --force, don't overwrite local, tainted, up-to-date files
if !overwrite { if !overwrite {
if i.State.Tainted { if i.State.Tainted {
@ -177,12 +185,6 @@ func (i *Item) download(overwrite bool) (string, error) {
// all good, install // all good, install
// ensure that target file is within target dir
finalPath, err := i.downloadPath()
if err != nil {
return "", err
}
parentDir := filepath.Dir(finalPath) parentDir := filepath.Dir(finalPath)
if err = os.MkdirAll(parentDir, os.ModePerm); err != nil { if err = os.MkdirAll(parentDir, os.ModePerm); err != nil {

View file

@ -321,6 +321,7 @@ func (t *HubTestItem) InstallHub() error {
// install appsec-rules in runtime environment // install appsec-rules in runtime environment
for _, appsecrule := range t.Config.AppsecRules { for _, appsecrule := range t.Config.AppsecRules {
log.Debugf("adding rule '%s'", appsecrule) log.Debugf("adding rule '%s'", appsecrule)
if appsecrule == "" { if appsecrule == "" {
continue continue
} }
@ -544,7 +545,6 @@ func (t *HubTestItem) Clean() error {
} }
func (t *HubTestItem) RunWithNucleiTemplate() error { func (t *HubTestItem) RunWithNucleiTemplate() error {
crowdsecLogFile := fmt.Sprintf("%s/log/crowdsec.log", t.RuntimePath) crowdsecLogFile := fmt.Sprintf("%s/log/crowdsec.log", t.RuntimePath)
testPath := filepath.Join(t.HubTestPath, t.Name) testPath := filepath.Join(t.HubTestPath, t.Name)
@ -595,6 +595,7 @@ func (t *HubTestItem) RunWithNucleiTemplate() error {
log.Errorf("crowdsec log file '%s'", crowdsecLogFile) log.Errorf("crowdsec log file '%s'", crowdsecLogFile)
log.Errorf("%s\n", string(crowdsecLog)) log.Errorf("%s\n", string(crowdsecLog))
} }
return fmt.Errorf("appsec is down: %s", err) return fmt.Errorf("appsec is down: %s", err)
} }
@ -603,6 +604,7 @@ func (t *HubTestItem) RunWithNucleiTemplate() error {
if err != nil { if err != nil {
return fmt.Errorf("unable to parse target '%s': %s", t.NucleiTargetHost, err) return fmt.Errorf("unable to parse target '%s': %s", t.NucleiTargetHost, err)
} }
nucleiTargetHost := nucleiTargetParsedURL.Host nucleiTargetHost := nucleiTargetParsedURL.Host
if _, err := IsAlive(nucleiTargetHost); err != nil { if _, err := IsAlive(nucleiTargetHost); err != nil {
return fmt.Errorf("target is down: %s", err) return fmt.Errorf("target is down: %s", err)
@ -648,7 +650,9 @@ func (t *HubTestItem) RunWithNucleiTemplate() error {
} }
} }
} }
crowdsecDaemon.Process.Kill() crowdsecDaemon.Process.Kill()
return nil return nil
} }
@ -731,7 +735,7 @@ func (t *HubTestItem) RunWithLogFile() error {
return fmt.Errorf("log file '%s' is empty, please fill it with log", logFile) return fmt.Errorf("log file '%s' is empty, please fill it with log", logFile)
} }
cmdArgs := []string{"-c", t.RuntimeConfigFilePath, "machines", "add", "testMachine", "--auto"} cmdArgs := []string{"-c", t.RuntimeConfigFilePath, "machines", "add", "testMachine", "--force", "--auto"}
cscliRegisterCmd := exec.Command(t.CscliPath, cmdArgs...) cscliRegisterCmd := exec.Command(t.CscliPath, cmdArgs...)
log.Debugf("%s", cscliRegisterCmd.String()) log.Debugf("%s", cscliRegisterCmd.String())
@ -853,6 +857,7 @@ func (t *HubTestItem) RunWithLogFile() error {
func (t *HubTestItem) Run() error { func (t *HubTestItem) Run() error {
var err error var err error
t.Success = false t.Success = false
t.ErrorsList = make([]string, 0) t.ErrorsList = make([]string, 0)
@ -911,6 +916,7 @@ func (t *HubTestItem) Run() error {
if len(t.Config.AppsecRules) > 0 { if len(t.Config.AppsecRules) > 0 {
// copy template acquis file to runtime folder // copy template acquis file to runtime folder
log.Debugf("copying %s to %s", t.TemplateAcquisPath, t.RuntimeAcquisFilePath) log.Debugf("copying %s to %s", t.TemplateAcquisPath, t.RuntimeAcquisFilePath)
if err = Copy(t.TemplateAcquisPath, t.RuntimeAcquisFilePath); err != nil { if err = Copy(t.TemplateAcquisPath, t.RuntimeAcquisFilePath); err != nil {
return fmt.Errorf("unable to copy '%s' to '%s': %v", t.TemplateAcquisPath, t.RuntimeAcquisFilePath, err) return fmt.Errorf("unable to copy '%s' to '%s': %v", t.TemplateAcquisPath, t.RuntimeAcquisFilePath, err)
} }

View file

@ -1,14 +1,14 @@
#these are the events we input into parser #these are the events we input into parser
lines: lines:
- Enriched: - Enriched:
IpToResolve: 8.8.8.8 IpToResolve: 1.1.1.1
- Enriched: - Enriched:
IpToResolve: 1.2.3.4 IpToResolve: 1.2.3.4
#these are the results we expect from the parser #these are the results we expect from the parser
results: results:
- Enriched: - Enriched:
reverse_dns: dns.google. reverse_dns: one.one.one.one.
IpToResolve: 8.8.8.8 IpToResolve: 1.1.1.1
Meta: Meta:
did_dns_succeeded: yes did_dns_succeeded: yes
Process: true Process: true

View file

@ -4,7 +4,7 @@ debug: true
whitelist: whitelist:
reason: "Whitelist tests" reason: "Whitelist tests"
ip: ip:
- 8.8.8.8 - 1.1.1.1
cidr: cidr:
- "1.2.3.0/24" - "1.2.3.0/24"
expression: expression:

View file

@ -2,7 +2,7 @@
lines: lines:
- Meta: - Meta:
test: test1 test: test1
source_ip: 8.8.8.8 source_ip: 1.1.1.1
statics: toto statics: toto
- Meta: - Meta:
test: test2 test: test2

View file

@ -152,9 +152,9 @@ teardown() {
rune -0 mkdir -p "$CONFIG_DIR/collections" rune -0 mkdir -p "$CONFIG_DIR/collections"
rune -0 touch "$CONFIG_DIR/collections/foobar.yaml" rune -0 touch "$CONFIG_DIR/collections/foobar.yaml"
rune -1 cscli collections install foobar.yaml rune -1 cscli collections install foobar.yaml
assert_stderr --partial "failed to download item: foobar.yaml is local, can't download" assert_stderr --partial "foobar.yaml is local, can't download"
rune -1 cscli collections install foobar.yaml --force rune -1 cscli collections install foobar.yaml --force
assert_stderr --partial "failed to download item: foobar.yaml is local, can't download" assert_stderr --partial "foobar.yaml is local, can't download"
} }
@test "a local item cannot be removed by cscli" { @test "a local item cannot be removed by cscli" {
@ -181,3 +181,15 @@ teardown() {
rune -0 jq '.collections' <(output) rune -0 jq '.collections' <(output)
assert_json '[]' assert_json '[]'
} }
@test "tainted hub file, not enabled, install --force should repair" {
rune -0 cscli scenarios install crowdsecurity/ssh-bf
rune -0 cscli scenarios inspect crowdsecurity/ssh-bf -o json
local_path="$(jq -r '.local_path' <(output))"
echo >> "$local_path"
rm "$local_path"
rune -0 cscli scenarios install crowdsecurity/ssh-bf --force
rune -0 cscli scenarios inspect crowdsecurity/ssh-bf -o json
rune -0 jq -c '.tainted' <(output)
assert_output 'false'
}

View file

@ -29,11 +29,11 @@ teardown() {
@test "'decisions add' requires parameters" { @test "'decisions add' requires parameters" {
rune -1 cscli decisions add rune -1 cscli decisions add
assert_line "Usage:" assert_line "Usage:"
assert_stderr --partial "Missing arguments, a value is required (--ip, --range or --scope and --value)" assert_stderr --partial "missing arguments, a value is required (--ip, --range or --scope and --value)"
rune -1 cscli decisions add -o json rune -1 cscli decisions add -o json
rune -0 jq -c '[ .level, .msg]' <(stderr | grep "^{") rune -0 jq -c '[ .level, .msg]' <(stderr | grep "^{")
assert_output '["fatal","Missing arguments, a value is required (--ip, --range or --scope and --value)"]' assert_output '["fatal","missing arguments, a value is required (--ip, --range or --scope and --value)"]'
} }
@test "cscli decisions list, with and without --machine" { @test "cscli decisions list, with and without --machine" {