add cscli support dump (#1634)

This commit is contained in:
blotus 2022-08-18 11:54:01 +02:00 committed by GitHub
parent 27194a9f9c
commit e46ca38cbb
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
15 changed files with 576 additions and 138 deletions

View file

@ -1,10 +1,10 @@
package main package main
import ( import (
"bytes"
"encoding/csv" "encoding/csv"
"encoding/json" "encoding/json"
"fmt" "fmt"
"os"
"time" "time"
middlewares "github.com/crowdsecurity/crowdsec/pkg/apiserver/middlewares/v1" middlewares "github.com/crowdsecurity/crowdsec/pkg/apiserver/middlewares/v1"
@ -12,6 +12,7 @@ import (
"github.com/crowdsecurity/crowdsec/pkg/types" "github.com/crowdsecurity/crowdsec/pkg/types"
"github.com/enescakir/emoji" "github.com/enescakir/emoji"
"github.com/olekukonko/tablewriter" "github.com/olekukonko/tablewriter"
"github.com/pkg/errors"
log "github.com/sirupsen/logrus" log "github.com/sirupsen/logrus"
"github.com/spf13/cobra" "github.com/spf13/cobra"
) )
@ -20,6 +21,60 @@ var keyIP string
var keyLength int var keyLength int
var key string var key string
func getBouncers(dbClient *database.Client) ([]byte, error) {
bouncers, err := dbClient.ListBouncers()
w := bytes.NewBuffer(nil)
if err != nil {
return nil, fmt.Errorf("unable to list bouncers: %s", err)
}
if csConfig.Cscli.Output == "human" {
table := tablewriter.NewWriter(w)
table.SetCenterSeparator("")
table.SetColumnSeparator("")
table.SetHeaderAlignment(tablewriter.ALIGN_LEFT)
table.SetAlignment(tablewriter.ALIGN_LEFT)
table.SetHeader([]string{"Name", "IP Address", "Valid", "Last API pull", "Type", "Version", "Auth Type"})
for _, b := range bouncers {
var revoked string
if !b.Revoked {
revoked = emoji.CheckMark.String()
} else {
revoked = emoji.Prohibited.String()
}
table.Append([]string{b.Name, b.IPAddress, revoked, b.LastPull.Format(time.RFC3339), b.Type, b.Version, b.AuthType})
}
table.Render()
} else if csConfig.Cscli.Output == "json" {
x, err := json.MarshalIndent(bouncers, "", " ")
if err != nil {
return nil, errors.Wrap(err, "failed to unmarshal")
}
return x, nil
} else if csConfig.Cscli.Output == "raw" {
csvwriter := csv.NewWriter(w)
err := csvwriter.Write([]string{"name", "ip", "revoked", "last_pull", "type", "version", "auth_type"})
if err != nil {
return nil, errors.Wrap(err, "failed to write raw header")
}
for _, b := range bouncers {
var revoked string
if !b.Revoked {
revoked = "validated"
} else {
revoked = "pending"
}
err := csvwriter.Write([]string{b.Name, b.IPAddress, revoked, b.LastPull.Format(time.RFC3339), b.Type, b.Version, b.AuthType})
if err != nil {
return nil, errors.Wrap(err, "failed to write raw")
}
}
csvwriter.Flush()
}
return w.Bytes(), nil
}
func NewBouncersCmd() *cobra.Command { func NewBouncersCmd() *cobra.Command {
/* ---- DECISIONS COMMAND */ /* ---- DECISIONS COMMAND */
var cmdBouncers = &cobra.Command{ var cmdBouncers = &cobra.Command{
@ -54,55 +109,11 @@ Note: This command requires database direct access, so is intended to be run on
Args: cobra.ExactArgs(0), Args: cobra.ExactArgs(0),
DisableAutoGenTag: true, DisableAutoGenTag: true,
Run: func(cmd *cobra.Command, arg []string) { Run: func(cmd *cobra.Command, arg []string) {
blockers, err := dbClient.ListBouncers() bouncers, err := getBouncers(dbClient)
if err != nil { if err != nil {
log.Errorf("unable to list blockers: %s", err) log.Fatalf("unable to list bouncers: %s", err)
}
if csConfig.Cscli.Output == "human" {
table := tablewriter.NewWriter(os.Stdout)
table.SetCenterSeparator("")
table.SetColumnSeparator("")
table.SetHeaderAlignment(tablewriter.ALIGN_LEFT)
table.SetAlignment(tablewriter.ALIGN_LEFT)
table.SetHeader([]string{"Name", "IP Address", "Valid", "Last API pull", "Type", "Version", "Auth Type"})
for _, b := range blockers {
var revoked string
if !b.Revoked {
revoked = emoji.CheckMark.String()
} else {
revoked = emoji.Prohibited.String()
}
table.Append([]string{b.Name, b.IPAddress, revoked, b.LastPull.Format(time.RFC3339), b.Type, b.Version, b.AuthType})
}
table.Render()
} else if csConfig.Cscli.Output == "json" {
x, err := json.MarshalIndent(blockers, "", " ")
if err != nil {
log.Fatalf("failed to unmarshal")
}
fmt.Printf("%s", string(x))
} else if csConfig.Cscli.Output == "raw" {
csvwriter := csv.NewWriter(os.Stdout)
err := csvwriter.Write([]string{"name", "ip", "revoked", "last_pull", "type", "version", "auth_type"})
if err != nil {
log.Fatalf("failed to write raw header: %s", err)
}
for _, b := range blockers {
var revoked string
if !b.Revoked {
revoked = "validated"
} else {
revoked = "pending"
}
err := csvwriter.Write([]string{b.Name, b.IPAddress, revoked, b.LastPull.Format(time.RFC3339), b.Type, b.Version, b.AuthType})
if err != nil {
log.Fatalf("failed to write raw: %s", err)
}
}
csvwriter.Flush()
} }
fmt.Printf("%s", bouncers)
}, },
} }
cmdBouncers.AddCommand(cmdBouncersList) cmdBouncers.AddCommand(cmdBouncersList)

View file

@ -173,7 +173,8 @@ func NewCollectionsCmd() *cobra.Command {
Args: cobra.ExactArgs(0), Args: cobra.ExactArgs(0),
DisableAutoGenTag: true, DisableAutoGenTag: true,
Run: func(cmd *cobra.Command, args []string) { Run: func(cmd *cobra.Command, args []string) {
ListItems([]string{cwhub.COLLECTIONS}, args, false, true, all) items := ListItems([]string{cwhub.COLLECTIONS}, args, false, true, all)
fmt.Printf("%s\n", string(items))
}, },
} }
cmdCollectionsList.PersistentFlags().BoolVarP(&all, "all", "a", false, "List disabled items as well") cmdCollectionsList.PersistentFlags().BoolVarP(&all, "all", "a", false, "List disabled items as well")

View file

@ -56,9 +56,10 @@ cscli hub update # Download list of available configurations from the hub
log.Info(v) log.Info(v)
} }
cwhub.DisplaySummary() cwhub.DisplaySummary()
ListItems([]string{ items := ListItems([]string{
cwhub.COLLECTIONS, cwhub.PARSERS, cwhub.SCENARIOS, cwhub.PARSERS_OVFLW, cwhub.COLLECTIONS, cwhub.PARSERS, cwhub.SCENARIOS, cwhub.PARSERS_OVFLW,
}, args, true, false, all) }, args, true, false, all)
fmt.Printf("%s\n", items)
}, },
} }
cmdHubList.PersistentFlags().BoolVarP(&all, "all", "a", false, "List disabled items as well") cmdHubList.PersistentFlags().BoolVarP(&all, "all", "a", false, "List disabled items as well")

View file

@ -1,13 +1,13 @@
package main package main
import ( import (
"bytes"
saferand "crypto/rand" saferand "crypto/rand"
"encoding/csv" "encoding/csv"
"encoding/json" "encoding/json"
"fmt" "fmt"
"io/ioutil" "io/ioutil"
"math/big" "math/big"
"os"
"strings" "strings"
"time" "time"
@ -109,6 +109,61 @@ func displayLastHeartBeat(m *ent.Machine, fancy bool) string {
return hbDisplay return hbDisplay
} }
func getAgents(dbClient *database.Client) ([]byte, error) {
w := bytes.NewBuffer(nil)
machines, err := dbClient.ListMachines()
if err != nil {
return nil, fmt.Errorf("unable to list machines: %s", err)
}
if csConfig.Cscli.Output == "human" {
table := tablewriter.NewWriter(w)
table.SetCenterSeparator("")
table.SetColumnSeparator("")
table.SetHeaderAlignment(tablewriter.ALIGN_LEFT)
table.SetAlignment(tablewriter.ALIGN_LEFT)
table.SetHeader([]string{"Name", "IP Address", "Last Update", "Status", "Version", "Auth Type", "Last Heartbeat"})
for _, w := range machines {
var validated string
if w.IsValidated {
validated = emoji.CheckMark.String()
} else {
validated = emoji.Prohibited.String()
}
table.Append([]string{w.MachineId, w.IpAddress, w.UpdatedAt.Format(time.RFC3339), validated, w.Version, w.AuthType, displayLastHeartBeat(w, true)})
}
table.Render()
} else if csConfig.Cscli.Output == "json" {
x, err := json.MarshalIndent(machines, "", " ")
if err != nil {
log.Fatalf("failed to unmarshal")
}
return x, nil
} else if csConfig.Cscli.Output == "raw" {
csvwriter := csv.NewWriter(w)
err := csvwriter.Write([]string{"machine_id", "ip_address", "updated_at", "validated", "version", "auth_type", "last_heartbeat"})
if err != nil {
log.Fatalf("failed to write header: %s", err)
}
for _, w := range machines {
var validated string
if w.IsValidated {
validated = "true"
} else {
validated = "false"
}
err := csvwriter.Write([]string{w.MachineId, w.IpAddress, w.UpdatedAt.Format(time.RFC3339), validated, w.Version, w.AuthType, displayLastHeartBeat(w, false)})
if err != nil {
log.Fatalf("failed to write raw output : %s", err)
}
}
csvwriter.Flush()
} else {
log.Errorf("unknown output '%s'", csConfig.Cscli.Output)
}
return w.Bytes(), nil
}
func NewMachinesCmd() *cobra.Command { func NewMachinesCmd() *cobra.Command {
/* ---- DECISIONS COMMAND */ /* ---- DECISIONS COMMAND */
var cmdMachines = &cobra.Command{ var cmdMachines = &cobra.Command{
@ -149,56 +204,11 @@ Note: This command requires database direct access, so is intended to be run on
} }
}, },
Run: func(cmd *cobra.Command, args []string) { Run: func(cmd *cobra.Command, args []string) {
machines, err := dbClient.ListMachines() agents, err := getAgents(dbClient)
if err != nil { if err != nil {
log.Errorf("unable to list machines: %s", err) log.Fatalf("unable to list machines: %s", err)
}
if csConfig.Cscli.Output == "human" {
table := tablewriter.NewWriter(os.Stdout)
table.SetCenterSeparator("")
table.SetColumnSeparator("")
table.SetHeaderAlignment(tablewriter.ALIGN_LEFT)
table.SetAlignment(tablewriter.ALIGN_LEFT)
table.SetHeader([]string{"Name", "IP Address", "Last Update", "Status", "Version", "Auth Type", "Last Heartbeat"})
for _, w := range machines {
var validated string
if w.IsValidated {
validated = emoji.CheckMark.String()
} else {
validated = emoji.Prohibited.String()
}
table.Append([]string{w.MachineId, w.IpAddress, w.UpdatedAt.Format(time.RFC3339), validated, w.Version, w.AuthType, displayLastHeartBeat(w, true)})
}
table.Render()
} else if csConfig.Cscli.Output == "json" {
x, err := json.MarshalIndent(machines, "", " ")
if err != nil {
log.Fatalf("failed to unmarshal")
}
fmt.Printf("%s", string(x))
} else if csConfig.Cscli.Output == "raw" {
csvwriter := csv.NewWriter(os.Stdout)
err := csvwriter.Write([]string{"machine_id", "ip_address", "updated_at", "validated", "version", "auth_type", "last_heartbeat"})
if err != nil {
log.Fatalf("failed to write header: %s", err)
}
for _, w := range machines {
var validated string
if w.IsValidated {
validated = "true"
} else {
validated = "false"
}
err := csvwriter.Write([]string{w.MachineId, w.IpAddress, w.UpdatedAt.Format(time.RFC3339), validated, w.Version, w.AuthType, displayLastHeartBeat(w, false)})
if err != nil {
log.Fatalf("failed to write raw output : %s", err)
}
}
csvwriter.Flush()
} else {
log.Errorf("unknown output '%s'", csConfig.Cscli.Output)
} }
fmt.Printf("%s\n", agents)
}, },
} }
cmdMachines.AddCommand(cmdMachinesList) cmdMachines.AddCommand(cmdMachinesList)

View file

@ -93,7 +93,7 @@ func initConfig() {
var validArgs = []string{ var validArgs = []string{
"scenarios", "parsers", "collections", "capi", "lapi", "postoverflows", "machines", "scenarios", "parsers", "collections", "capi", "lapi", "postoverflows", "machines",
"metrics", "bouncers", "alerts", "decisions", "simulation", "hub", "dashboard", "metrics", "bouncers", "alerts", "decisions", "simulation", "hub", "dashboard",
"config", "completion", "version", "console", "notifications", "config", "completion", "version", "console", "notifications", "support",
} }
func prepender(filename string) string { func prepender(filename string) string {
@ -200,6 +200,7 @@ It is meant to allow you to manage bans, parsers/scenarios/etc, api and generall
rootCmd.AddCommand(NewExplainCmd()) rootCmd.AddCommand(NewExplainCmd())
rootCmd.AddCommand(NewHubTestCmd()) rootCmd.AddCommand(NewHubTestCmd())
rootCmd.AddCommand(NewNotificationsCmd()) rootCmd.AddCommand(NewNotificationsCmd())
rootCmd.AddCommand(NewSupportCmd())
if err := rootCmd.Execute(); err != nil { if err := rootCmd.Execute(); err != nil {
if bincoverTesting != "" { if bincoverTesting != "" {

View file

@ -1,6 +1,7 @@
package main package main
import ( import (
"bytes"
"encoding/json" "encoding/json"
"fmt" "fmt"
"net/http" "net/http"
@ -89,7 +90,7 @@ func metricsToTable(table *tablewriter.Table, stats map[string]map[string]int, k
} }
/*This is a complete rip from prom2json*/ /*This is a complete rip from prom2json*/
func ShowPrometheus(url string) { func FormatPrometheusMetric(url string, formatType string) ([]byte, error) {
mfChan := make(chan *dto.MetricFamily, 1024) mfChan := make(chan *dto.MetricFamily, 1024)
// Start with the DefaultTransport for sane defaults. // Start with the DefaultTransport for sane defaults.
@ -99,7 +100,6 @@ func ShowPrometheus(url string) {
transport.DisableKeepAlives = true transport.DisableKeepAlives = true
// Timeout early if the server doesn't even return the headers. // Timeout early if the server doesn't even return the headers.
transport.ResponseHeaderTimeout = time.Minute transport.ResponseHeaderTimeout = time.Minute
go func() { go func() {
defer types.CatchPanic("crowdsec/ShowPrometheus") defer types.CatchPanic("crowdsec/ShowPrometheus")
err := prom2json.FetchMetricFamilies(url, mfChan, transport) err := prom2json.FetchMetricFamilies(url, mfChan, transport)
@ -283,42 +283,45 @@ func ShowPrometheus(url string) {
} }
} }
if csConfig.Cscli.Output == "human" {
acquisTable := tablewriter.NewWriter(os.Stdout) ret := bytes.NewBuffer(nil)
if formatType == "human" {
acquisTable := tablewriter.NewWriter(ret)
acquisTable.SetHeader([]string{"Source", "Lines read", "Lines parsed", "Lines unparsed", "Lines poured to bucket"}) acquisTable.SetHeader([]string{"Source", "Lines read", "Lines parsed", "Lines unparsed", "Lines poured to bucket"})
keys := []string{"reads", "parsed", "unparsed", "pour"} keys := []string{"reads", "parsed", "unparsed", "pour"}
if err := metricsToTable(acquisTable, acquis_stats, keys); err != nil { if err := metricsToTable(acquisTable, acquis_stats, keys); err != nil {
log.Warningf("while collecting acquis stats : %s", err) log.Warningf("while collecting acquis stats : %s", err)
} }
bucketsTable := tablewriter.NewWriter(os.Stdout) bucketsTable := tablewriter.NewWriter(ret)
bucketsTable.SetHeader([]string{"Bucket", "Current Count", "Overflows", "Instantiated", "Poured", "Expired"}) bucketsTable.SetHeader([]string{"Bucket", "Current Count", "Overflows", "Instantiated", "Poured", "Expired"})
keys = []string{"curr_count", "overflow", "instanciation", "pour", "underflow"} keys = []string{"curr_count", "overflow", "instanciation", "pour", "underflow"}
if err := metricsToTable(bucketsTable, buckets_stats, keys); err != nil { if err := metricsToTable(bucketsTable, buckets_stats, keys); err != nil {
log.Warningf("while collecting acquis stats : %s", err) log.Warningf("while collecting acquis stats : %s", err)
} }
parsersTable := tablewriter.NewWriter(os.Stdout) parsersTable := tablewriter.NewWriter(ret)
parsersTable.SetHeader([]string{"Parsers", "Hits", "Parsed", "Unparsed"}) parsersTable.SetHeader([]string{"Parsers", "Hits", "Parsed", "Unparsed"})
keys = []string{"hits", "parsed", "unparsed"} keys = []string{"hits", "parsed", "unparsed"}
if err := metricsToTable(parsersTable, parsers_stats, keys); err != nil { if err := metricsToTable(parsersTable, parsers_stats, keys); err != nil {
log.Warningf("while collecting acquis stats : %s", err) log.Warningf("while collecting acquis stats : %s", err)
} }
lapiMachinesTable := tablewriter.NewWriter(os.Stdout) lapiMachinesTable := tablewriter.NewWriter(ret)
lapiMachinesTable.SetHeader([]string{"Machine", "Route", "Method", "Hits"}) lapiMachinesTable.SetHeader([]string{"Machine", "Route", "Method", "Hits"})
if err := lapiMetricsToTable(lapiMachinesTable, lapi_machine_stats); err != nil { if err := lapiMetricsToTable(lapiMachinesTable, lapi_machine_stats); err != nil {
log.Warningf("while collecting machine lapi stats : %s", err) log.Warningf("while collecting machine lapi stats : %s", err)
} }
//lapiMetricsToTable //lapiMetricsToTable
lapiBouncersTable := tablewriter.NewWriter(os.Stdout) lapiBouncersTable := tablewriter.NewWriter(ret)
lapiBouncersTable.SetHeader([]string{"Bouncer", "Route", "Method", "Hits"}) lapiBouncersTable.SetHeader([]string{"Bouncer", "Route", "Method", "Hits"})
if err := lapiMetricsToTable(lapiBouncersTable, lapi_bouncer_stats); err != nil { if err := lapiMetricsToTable(lapiBouncersTable, lapi_bouncer_stats); err != nil {
log.Warningf("while collecting bouncer lapi stats : %s", err) log.Warningf("while collecting bouncer lapi stats : %s", err)
} }
lapiDecisionsTable := tablewriter.NewWriter(os.Stdout) lapiDecisionsTable := tablewriter.NewWriter(ret)
lapiDecisionsTable.SetHeader([]string{"Bouncer", "Empty answers", "Non-empty answers"}) lapiDecisionsTable.SetHeader([]string{"Bouncer", "Empty answers", "Non-empty answers"})
for bouncer, hits := range lapi_decisions_stats { for bouncer, hits := range lapi_decisions_stats {
row := []string{} row := []string{}
@ -329,7 +332,7 @@ func ShowPrometheus(url string) {
} }
/*unfortunately, we can't reuse metricsToTable as the structure is too different :/*/ /*unfortunately, we can't reuse metricsToTable as the structure is too different :/*/
lapiTable := tablewriter.NewWriter(os.Stdout) lapiTable := tablewriter.NewWriter(ret)
lapiTable.SetHeader([]string{"Route", "Method", "Hits"}) lapiTable.SetHeader([]string{"Route", "Method", "Hits"})
sortedKeys := []string{} sortedKeys := []string{}
for akey := range lapi_stats { for akey := range lapi_stats {
@ -352,7 +355,7 @@ func ShowPrometheus(url string) {
} }
} }
decisionsTable := tablewriter.NewWriter(os.Stdout) decisionsTable := tablewriter.NewWriter(ret)
decisionsTable.SetHeader([]string{"Reason", "Origin", "Action", "Count"}) decisionsTable.SetHeader([]string{"Reason", "Origin", "Action", "Count"})
for reason, origins := range decisions_stats { for reason, origins := range decisions_stats {
for origin, actions := range origins { for origin, actions := range origins {
@ -367,7 +370,7 @@ func ShowPrometheus(url string) {
} }
} }
alertsTable := tablewriter.NewWriter(os.Stdout) alertsTable := tablewriter.NewWriter(ret)
alertsTable.SetHeader([]string{"Reason", "Count"}) alertsTable.SetHeader([]string{"Reason", "Count"})
for scenario, hits := range alerts_stats { for scenario, hits := range alerts_stats {
row := []string{} row := []string{}
@ -377,71 +380,75 @@ func ShowPrometheus(url string) {
} }
if bucketsTable.NumLines() > 0 { if bucketsTable.NumLines() > 0 {
log.Printf("Buckets Metrics:") fmt.Fprintf(ret, "Buckets Metrics:\n")
bucketsTable.SetAlignment(tablewriter.ALIGN_LEFT) bucketsTable.SetAlignment(tablewriter.ALIGN_LEFT)
bucketsTable.Render() bucketsTable.Render()
} }
if acquisTable.NumLines() > 0 { if acquisTable.NumLines() > 0 {
log.Printf("Acquisition Metrics:") fmt.Fprintf(ret, "Acquisition Metrics:\n")
acquisTable.SetAlignment(tablewriter.ALIGN_LEFT) acquisTable.SetAlignment(tablewriter.ALIGN_LEFT)
acquisTable.Render() acquisTable.Render()
} }
if parsersTable.NumLines() > 0 { if parsersTable.NumLines() > 0 {
log.Printf("Parser Metrics:") fmt.Fprintf(ret, "Parser Metrics:\n")
parsersTable.SetAlignment(tablewriter.ALIGN_LEFT) parsersTable.SetAlignment(tablewriter.ALIGN_LEFT)
parsersTable.Render() parsersTable.Render()
} }
if lapiTable.NumLines() > 0 { if lapiTable.NumLines() > 0 {
log.Printf("Local Api Metrics:") fmt.Fprintf(ret, "Local Api Metrics:\n")
lapiTable.SetAlignment(tablewriter.ALIGN_LEFT) lapiTable.SetAlignment(tablewriter.ALIGN_LEFT)
lapiTable.Render() lapiTable.Render()
} }
if lapiMachinesTable.NumLines() > 0 { if lapiMachinesTable.NumLines() > 0 {
log.Printf("Local Api Machines Metrics:") fmt.Fprintf(ret, "Local Api Machines Metrics:\n")
lapiMachinesTable.SetAlignment(tablewriter.ALIGN_LEFT) lapiMachinesTable.SetAlignment(tablewriter.ALIGN_LEFT)
lapiMachinesTable.Render() lapiMachinesTable.Render()
} }
if lapiBouncersTable.NumLines() > 0 { if lapiBouncersTable.NumLines() > 0 {
log.Printf("Local Api Bouncers Metrics:") fmt.Fprintf(ret, "Local Api Bouncers Metrics:\n")
lapiBouncersTable.SetAlignment(tablewriter.ALIGN_LEFT) lapiBouncersTable.SetAlignment(tablewriter.ALIGN_LEFT)
lapiBouncersTable.Render() lapiBouncersTable.Render()
} }
if lapiDecisionsTable.NumLines() > 0 { if lapiDecisionsTable.NumLines() > 0 {
log.Printf("Local Api Bouncers Decisions:") fmt.Fprintf(ret, "Local Api Bouncers Decisions:\n")
lapiDecisionsTable.SetAlignment(tablewriter.ALIGN_LEFT) lapiDecisionsTable.SetAlignment(tablewriter.ALIGN_LEFT)
lapiDecisionsTable.Render() lapiDecisionsTable.Render()
} }
if decisionsTable.NumLines() > 0 { if decisionsTable.NumLines() > 0 {
log.Printf("Local Api Decisions:") fmt.Fprintf(ret, "Local Api Decisions:\n")
decisionsTable.SetAlignment(tablewriter.ALIGN_LEFT) decisionsTable.SetAlignment(tablewriter.ALIGN_LEFT)
decisionsTable.Render() decisionsTable.Render()
} }
if alertsTable.NumLines() > 0 { if alertsTable.NumLines() > 0 {
log.Printf("Local Api Alerts:") fmt.Fprintf(ret, "Local Api Alerts:\n")
alertsTable.SetAlignment(tablewriter.ALIGN_LEFT) alertsTable.SetAlignment(tablewriter.ALIGN_LEFT)
alertsTable.Render() alertsTable.Render()
} }
} else if csConfig.Cscli.Output == "json" { } else if formatType == "json" {
for _, val := range []interface{}{acquis_stats, parsers_stats, buckets_stats, lapi_stats, lapi_bouncer_stats, lapi_machine_stats, lapi_decisions_stats, decisions_stats, alerts_stats} { for _, val := range []interface{}{acquis_stats, parsers_stats, buckets_stats, lapi_stats, lapi_bouncer_stats, lapi_machine_stats, lapi_decisions_stats, decisions_stats, alerts_stats} {
x, err := json.MarshalIndent(val, "", " ") x, err := json.MarshalIndent(val, "", " ")
if err != nil { if err != nil {
log.Fatalf("failed to unmarshal metrics : %v", err) return nil, fmt.Errorf("failed to unmarshal metrics : %v", err)
} }
fmt.Printf("%s\n", string(x)) ret.Write(x)
} }
} else if csConfig.Cscli.Output == "raw" { return ret.Bytes(), nil
} else if formatType == "raw" {
for _, val := range []interface{}{acquis_stats, parsers_stats, buckets_stats, lapi_stats, lapi_bouncer_stats, lapi_machine_stats, lapi_decisions_stats, decisions_stats, alerts_stats} { for _, val := range []interface{}{acquis_stats, parsers_stats, buckets_stats, lapi_stats, lapi_bouncer_stats, lapi_machine_stats, lapi_decisions_stats, decisions_stats, alerts_stats} {
x, err := yaml.Marshal(val) x, err := yaml.Marshal(val)
if err != nil { if err != nil {
log.Fatalf("failed to unmarshal metrics : %v", err) return nil, fmt.Errorf("failed to unmarshal metrics : %v", err)
} }
fmt.Printf("%s\n", string(x)) ret.Write(x)
} }
return ret.Bytes(), nil
} }
return ret.Bytes(), nil
} }
var noUnit bool var noUnit bool
@ -472,7 +479,11 @@ func NewMetricsCmd() *cobra.Command {
os.Exit(1) os.Exit(1)
} }
ShowPrometheus(prometheusURL + "/metrics") metrics, err := FormatPrometheusMetric(prometheusURL+"/metrics", csConfig.Cscli.Output)
if err != nil {
log.Fatalf("could not fetch prometheus metrics: %s", err)
}
fmt.Printf("%s", metrics)
}, },
} }
cmdMetrics.PersistentFlags().StringVarP(&prometheusURL, "url", "u", "", "Prometheus url (http://<ip>:<port>/metrics)") cmdMetrics.PersistentFlags().StringVarP(&prometheusURL, "url", "u", "", "Prometheus url (http://<ip>:<port>/metrics)")

View file

@ -164,7 +164,8 @@ cscli parsers remove crowdsecurity/sshd-logs
cscli parser list crowdsecurity/xxx`, cscli parser list crowdsecurity/xxx`,
DisableAutoGenTag: true, DisableAutoGenTag: true,
Run: func(cmd *cobra.Command, args []string) { Run: func(cmd *cobra.Command, args []string) {
ListItems([]string{cwhub.PARSERS}, args, false, true, all) items := ListItems([]string{cwhub.PARSERS}, args, false, true, all)
fmt.Printf("%s\n", items)
}, },
} }
cmdParsersList.PersistentFlags().BoolVarP(&all, "all", "a", false, "List disabled items as well") cmdParsersList.PersistentFlags().BoolVarP(&all, "all", "a", false, "List disabled items as well")

View file

@ -162,7 +162,8 @@ func NewPostOverflowsCmd() *cobra.Command {
cscli postoverflows list crowdsecurity/xxx`, cscli postoverflows list crowdsecurity/xxx`,
DisableAutoGenTag: true, DisableAutoGenTag: true,
Run: func(cmd *cobra.Command, args []string) { Run: func(cmd *cobra.Command, args []string) {
ListItems([]string{cwhub.PARSERS_OVFLW}, args, false, true, all) items := ListItems([]string{cwhub.PARSERS_OVFLW}, args, false, true, all)
fmt.Printf("%s\n", items)
}, },
} }
cmdPostOverflowsList.PersistentFlags().BoolVarP(&all, "all", "a", false, "List disabled items as well") cmdPostOverflowsList.PersistentFlags().BoolVarP(&all, "all", "a", false, "List disabled items as well")

View file

@ -166,7 +166,8 @@ cscli scenarios remove crowdsecurity/ssh-bf
cscli scenarios list crowdsecurity/xxx`, cscli scenarios list crowdsecurity/xxx`,
DisableAutoGenTag: true, DisableAutoGenTag: true,
Run: func(cmd *cobra.Command, args []string) { Run: func(cmd *cobra.Command, args []string) {
ListItems([]string{cwhub.SCENARIOS}, args, false, true, all) items := ListItems([]string{cwhub.SCENARIOS}, args, false, true, all)
fmt.Printf("%s\n", items)
}, },
} }
cmdScenariosList.PersistentFlags().BoolVarP(&all, "all", "a", false, "List disabled items as well") cmdScenariosList.PersistentFlags().BoolVarP(&all, "all", "a", false, "List disabled items as well")

393
cmd/crowdsec-cli/support.go Normal file
View file

@ -0,0 +1,393 @@
package main
import (
"archive/zip"
"bytes"
"context"
"fmt"
"io/ioutil"
"net/http"
"net/url"
"path/filepath"
"regexp"
"strings"
"github.com/blackfireio/osinfo"
"github.com/crowdsecurity/crowdsec/pkg/apiclient"
"github.com/crowdsecurity/crowdsec/pkg/cwhub"
"github.com/crowdsecurity/crowdsec/pkg/cwversion"
"github.com/crowdsecurity/crowdsec/pkg/database"
"github.com/crowdsecurity/crowdsec/pkg/models"
"github.com/go-openapi/strfmt"
log "github.com/sirupsen/logrus"
"github.com/spf13/cobra"
)
const (
SUPPORT_METRICS_HUMAN_PATH = "metrics/metrics.human"
SUPPORT_METRICS_PROMETHEUS_PATH = "metrics/metrics.prometheus"
SUPPORT_VERSION_PATH = "version.txt"
SUPPORT_OS_INFO_PATH = "osinfo.txt"
SUPPORT_PARSERS_PATH = "hub/parsers.txt"
SUPPORT_SCENARIOS_PATH = "hub/scenarios.txt"
SUPPORT_COLLECTIONS_PATH = "hub/collections.txt"
SUPPORT_POSTOVERFLOWS_PATH = "hub/postoverflows.txt"
SUPPORT_BOUNCERS_PATH = "lapi/bouncers.txt"
SUPPORT_AGENTS_PATH = "lapi/agents.txt"
SUPPORT_CROWDSEC_CONFIG_PATH = "config/crowdsec.yaml"
SUPPORT_LAPI_STATUS_PATH = "lapi_status.txt"
SUPPORT_CAPI_STATUS_PATH = "capi_status.txt"
SUPPORT_ACQUISITION_CONFIG_BASE_PATH = "config/acquis/"
SUPPORT_CROWDSEC_PROFILE_PATH = "config/profiles.yaml"
)
func collectMetrics() ([]byte, []byte, error) {
log.Info("Collecting prometheus metrics")
err := csConfig.LoadPrometheus()
if err != nil {
return nil, nil, err
}
if csConfig.Cscli.PrometheusUrl == "" {
log.Warn("No Prometheus URL configured, metrics will not be collected")
return nil, nil, fmt.Errorf("prometheus_uri is not set")
}
humanMetrics, err := FormatPrometheusMetric(csConfig.Cscli.PrometheusUrl+"/metrics", "human")
if err != nil {
return nil, nil, fmt.Errorf("could not fetch promtheus metrics: %s", err)
}
req, err := http.NewRequest(http.MethodGet, csConfig.Cscli.PrometheusUrl+"/metrics", nil)
if err != nil {
return nil, nil, fmt.Errorf("could not create requests to prometheus endpoint: %s", err)
}
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
return nil, nil, fmt.Errorf("could not get metrics from prometheus endpoint: %s", err)
}
defer resp.Body.Close()
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
return nil, nil, fmt.Errorf("could not read metrics from prometheus endpoint: %s", err)
}
return humanMetrics, body, nil
}
func collectVersion() []byte {
log.Info("Collecting version")
return []byte(cwversion.ShowStr())
}
func collectOSInfo() ([]byte, error) {
log.Info("Collecting OS info")
info, err := osinfo.GetOSInfo()
if err != nil {
return nil, err
}
w := bytes.NewBuffer(nil)
w.WriteString(fmt.Sprintf("Architecture: %s\n", info.Architecture))
w.WriteString(fmt.Sprintf("Family: %s\n", info.Family))
w.WriteString(fmt.Sprintf("ID: %s\n", info.ID))
w.WriteString(fmt.Sprintf("Name: %s\n", info.Name))
w.WriteString(fmt.Sprintf("Codename: %s\n", info.Codename))
w.WriteString(fmt.Sprintf("Version: %s\n", info.Version))
w.WriteString(fmt.Sprintf("Build: %s\n", info.Build))
return w.Bytes(), nil
}
func initHub() error {
if err := csConfig.LoadHub(); err != nil {
return fmt.Errorf("cannot load hub: %s", err)
}
if csConfig.Hub == nil {
return fmt.Errorf("hub not configured")
}
if err := cwhub.SetHubBranch(); err != nil {
return fmt.Errorf("cannot set hub branch: %s", err)
}
if err := cwhub.GetHubIdx(csConfig.Hub); err != nil {
return fmt.Errorf("no hub index found: %s", err)
}
return nil
}
func collectHubItems(itemType string) []byte {
log.Infof("Collecting %s list", itemType)
items := ListItems([]string{itemType}, []string{}, false, true, all)
return items
}
func collectBouncers(dbClient *database.Client) ([]byte, error) {
return getBouncers(dbClient)
}
func collectAgents(dbClient *database.Client) ([]byte, error) {
return getAgents(dbClient)
}
func collectAPIStatus(login string, password string, endpoint string, prefix string) []byte {
if csConfig.API.Client == nil || csConfig.API.Client.Credentials == nil {
return []byte("No agent credentials found, are we LAPI ?")
}
pwd := strfmt.Password(password)
apiurl, err := url.Parse(endpoint)
if err != nil {
return []byte(fmt.Sprintf("cannot parse API URL: %s", err.Error()))
}
scenarios, err := cwhub.GetInstalledScenariosAsString()
if err != nil {
return []byte(fmt.Sprintf("could not collect scenarios: %s", err.Error()))
}
Client, err = apiclient.NewDefaultClient(apiurl,
prefix,
fmt.Sprintf("crowdsec/%s", cwversion.VersionStr()),
nil)
if err != nil {
return []byte(fmt.Sprintf("could not init client: %s", err.Error()))
}
t := models.WatcherAuthRequest{
MachineID: &login,
Password: &pwd,
Scenarios: scenarios,
}
_, err = Client.Auth.AuthenticateWatcher(context.Background(), t)
if err != nil {
return []byte(fmt.Sprintf("Could not authenticate to API: %s", err))
} else {
return []byte("Successfully authenticated to LAPI")
}
}
func collectCrowdsecConfig() []byte {
log.Info("Collecting crowdsec config")
config, err := ioutil.ReadFile(*csConfig.FilePath)
if err != nil {
return []byte(fmt.Sprintf("could not read config file: %s", err))
}
r := regexp.MustCompile(`(\s+password:|\s+user:|\s+host:)\s+.*`)
return r.ReplaceAll(config, []byte("$1 ****REDACTED****"))
}
func collectCrowdsecProfile() []byte {
log.Info("Collecting crowdsec profile")
config, err := ioutil.ReadFile(csConfig.API.Server.ProfilesPath)
if err != nil {
return []byte(fmt.Sprintf("could not read profile file: %s", err))
}
return config
}
func collectAcquisitionConfig() map[string][]byte {
log.Info("Collecting acquisition config")
ret := make(map[string][]byte)
for _, filename := range csConfig.Crowdsec.AcquisitionFiles {
fileContent, err := ioutil.ReadFile(filename)
if err != nil {
ret[filename] = []byte(fmt.Sprintf("could not read file: %s", err))
} else {
ret[filename] = fileContent
}
}
return ret
}
func NewSupportCmd() *cobra.Command {
var cmdSupport = &cobra.Command{
Use: "support [action]",
Short: "Provide commands to help during support",
Args: cobra.MinimumNArgs(1),
DisableAutoGenTag: true,
PersistentPreRunE: func(cmd *cobra.Command, args []string) error {
return nil
},
}
var outFile string
cmdDump := &cobra.Command{
Use: "dump",
Short: "Dump all your configuration to a zip file for easier support",
Long: `Dump the following informations:
- Crowdsec version
- OS version
- Installed collections list
- Installed parsers list
- Installed scenarios list
- Installed postoverflows list
- Bouncers list
- Machines list
- CAPI status
- LAPI status
- Crowdsec config (sensitive information like username and password are redacted)
- Crowdsec metrics`,
Example: `cscli support dump
cscli support dump -f /tmp/crowdsec-support.zip
`,
Args: cobra.NoArgs,
DisableAutoGenTag: true,
Run: func(cmd *cobra.Command, args []string) {
var err error
var skipHub, skipDB, skipCAPI, skipLAPI, skipAgent bool
infos := map[string][]byte{
SUPPORT_VERSION_PATH: collectVersion(),
}
if outFile == "" {
outFile = "/tmp/crowdsec-support.zip"
}
dbClient, err = database.NewClient(csConfig.DbConfig)
if err != nil {
log.Warnf("Could not connect to database: %s", err)
skipDB = true
infos[SUPPORT_BOUNCERS_PATH] = []byte(err.Error())
infos[SUPPORT_AGENTS_PATH] = []byte(err.Error())
}
if err := csConfig.LoadAPIServer(); err != nil {
log.Warnf("could not load LAPI, skipping CAPI check")
skipLAPI = true
infos[SUPPORT_CAPI_STATUS_PATH] = []byte(err.Error())
}
if err := csConfig.LoadCrowdsec(); err != nil {
log.Warnf("could not load agent config, skipping crowdsec config check")
skipAgent = true
}
err = initHub()
if err != nil {
log.Warn("Could not init hub, running on LAPI ? Hub related information will not be collected")
skipHub = true
infos[SUPPORT_PARSERS_PATH] = []byte(err.Error())
infos[SUPPORT_SCENARIOS_PATH] = []byte(err.Error())
infos[SUPPORT_POSTOVERFLOWS_PATH] = []byte(err.Error())
infos[SUPPORT_COLLECTIONS_PATH] = []byte(err.Error())
}
if csConfig.API.Client == nil || csConfig.API.Client.Credentials == nil {
log.Warn("no agent credentials found, skipping LAPI connectivity check")
if _, ok := infos[SUPPORT_LAPI_STATUS_PATH]; ok {
infos[SUPPORT_LAPI_STATUS_PATH] = append(infos[SUPPORT_LAPI_STATUS_PATH], []byte("\nNo LAPI credentials found")...)
}
skipLAPI = true
}
if csConfig.API.Server == nil || csConfig.API.Server.OnlineClient.Credentials == nil {
log.Warn("no CAPI credentials found, skipping CAPI connectivity check")
skipCAPI = true
}
infos[SUPPORT_METRICS_HUMAN_PATH], infos[SUPPORT_METRICS_PROMETHEUS_PATH], err = collectMetrics()
if err != nil {
log.Warnf("could not collect prometheus metrics information: %s", err)
infos[SUPPORT_METRICS_HUMAN_PATH] = []byte(err.Error())
infos[SUPPORT_METRICS_PROMETHEUS_PATH] = []byte(err.Error())
}
infos[SUPPORT_OS_INFO_PATH], err = collectOSInfo()
if err != nil {
log.Warnf("could not collect OS information: %s", err)
infos[SUPPORT_OS_INFO_PATH] = []byte(err.Error())
}
infos[SUPPORT_CROWDSEC_CONFIG_PATH] = collectCrowdsecConfig()
if !skipHub {
infos[SUPPORT_PARSERS_PATH] = collectHubItems(cwhub.PARSERS)
infos[SUPPORT_SCENARIOS_PATH] = collectHubItems(cwhub.SCENARIOS)
infos[SUPPORT_POSTOVERFLOWS_PATH] = collectHubItems(cwhub.PARSERS_OVFLW)
infos[SUPPORT_COLLECTIONS_PATH] = collectHubItems(cwhub.COLLECTIONS)
}
if !skipDB {
infos[SUPPORT_BOUNCERS_PATH], err = collectBouncers(dbClient)
if err != nil {
log.Warnf("could not collect bouncers information: %s", err)
infos[SUPPORT_BOUNCERS_PATH] = []byte(err.Error())
}
infos[SUPPORT_AGENTS_PATH], err = collectAgents(dbClient)
if err != nil {
log.Warnf("could not collect agents information: %s", err)
infos[SUPPORT_AGENTS_PATH] = []byte(err.Error())
}
}
if !skipCAPI {
log.Info("Collecting CAPI status")
infos[SUPPORT_CAPI_STATUS_PATH] = collectAPIStatus(csConfig.API.Server.OnlineClient.Credentials.Login,
csConfig.API.Server.OnlineClient.Credentials.Password,
csConfig.API.Server.OnlineClient.Credentials.URL,
CAPIURLPrefix)
}
if !skipLAPI {
log.Info("Collection LAPI status")
infos[SUPPORT_LAPI_STATUS_PATH] = collectAPIStatus(csConfig.API.Client.Credentials.Login,
csConfig.API.Client.Credentials.Password,
csConfig.API.Client.Credentials.URL,
LAPIURLPrefix)
infos[SUPPORT_CROWDSEC_PROFILE_PATH] = collectCrowdsecProfile()
}
if !skipAgent {
acquis := collectAcquisitionConfig()
for filename, content := range acquis {
fname := strings.ReplaceAll(filename, string(filepath.Separator), "___")
infos[SUPPORT_ACQUISITION_CONFIG_BASE_PATH+fname] = content
}
}
w := bytes.NewBuffer(nil)
zipWriter := zip.NewWriter(w)
for filename, data := range infos {
fw, err := zipWriter.Create(filename)
if err != nil {
log.Errorf("Could not add zip entry for %s: %s", filename, err)
continue
}
fw.Write(data)
}
err = zipWriter.Close()
if err != nil {
log.Fatalf("could not finalize zip file: %s", err)
}
err = ioutil.WriteFile(outFile, w.Bytes(), 0600)
if err != nil {
log.Fatalf("could not write zip file to %s: %s", outFile, err)
}
log.Infof("Written zip file to %s", outFile)
},
}
cmdDump.Flags().StringVarP(&outFile, "outFile", "f", "", "File to dump the information to")
cmdSupport.AddCommand(cmdDump)
return cmdSupport
}

View file

@ -1,6 +1,7 @@
package main package main
import ( import (
"bytes"
"encoding/csv" "encoding/csv"
"encoding/json" "encoding/json"
"fmt" "fmt"
@ -165,7 +166,7 @@ func compInstalledItems(itemType string, args []string, toComplete string) ([]st
return comp, cobra.ShellCompDirectiveNoFileComp return comp, cobra.ShellCompDirectiveNoFileComp
} }
func ListItems(itemTypes []string, args []string, showType bool, showHeader bool, all bool) { func ListItems(itemTypes []string, args []string, showType bool, showHeader bool, all bool) []byte {
var hubStatusByItemType = make(map[string][]cwhub.ItemHubStatus) var hubStatusByItemType = make(map[string][]cwhub.ItemHubStatus)
@ -177,6 +178,8 @@ func ListItems(itemTypes []string, args []string, showType bool, showHeader bool
hubStatusByItemType[itemType] = cwhub.GetHubStatusForItemType(itemType, itemName, all) hubStatusByItemType[itemType] = cwhub.GetHubStatusForItemType(itemType, itemName, all)
} }
w := bytes.NewBuffer(nil)
if csConfig.Cscli.Output == "human" { if csConfig.Cscli.Output == "human" {
for _, itemType := range itemTypes { for _, itemType := range itemTypes {
var statuses []cwhub.ItemHubStatus var statuses []cwhub.ItemHubStatus
@ -185,8 +188,8 @@ func ListItems(itemTypes []string, args []string, showType bool, showHeader bool
log.Errorf("unknown item type: %s", itemType) log.Errorf("unknown item type: %s", itemType)
continue continue
} }
fmt.Println(strings.ToUpper(itemType)) fmt.Fprintf(w, "%s\n", strings.ToUpper(itemType))
table := tablewriter.NewWriter(os.Stdout) table := tablewriter.NewWriter(w)
table.SetCenterSeparator("") table.SetCenterSeparator("")
table.SetColumnSeparator("") table.SetColumnSeparator("")
table.SetHeaderAlignment(tablewriter.ALIGN_LEFT) table.SetHeaderAlignment(tablewriter.ALIGN_LEFT)
@ -202,9 +205,9 @@ func ListItems(itemTypes []string, args []string, showType bool, showHeader bool
if err != nil { if err != nil {
log.Fatalf("failed to unmarshal") log.Fatalf("failed to unmarshal")
} }
fmt.Printf("%s", string(x)) w.Write(x)
} else if csConfig.Cscli.Output == "raw" { } else if csConfig.Cscli.Output == "raw" {
csvwriter := csv.NewWriter(os.Stdout) csvwriter := csv.NewWriter(w)
if showHeader { if showHeader {
header := []string{"name", "status", "version", "description"} header := []string{"name", "status", "version", "description"}
if showType { if showType {
@ -244,6 +247,7 @@ func ListItems(itemTypes []string, args []string, showType bool, showHeader bool
} }
csvwriter.Flush() csvwriter.Flush()
} }
return w.Bytes()
} }
func InspectItem(name string, objecitemType string) { func InspectItem(name string, objecitemType string) {

1
go.mod
View file

@ -85,6 +85,7 @@ require (
github.com/apparentlymart/go-textseg/v13 v13.0.0 // indirect github.com/apparentlymart/go-textseg/v13 v13.0.0 // indirect
github.com/asaskevich/govalidator v0.0.0-20200907205600-7a23bdc65eef // indirect github.com/asaskevich/govalidator v0.0.0-20200907205600-7a23bdc65eef // indirect
github.com/beorn7/perks v1.0.1 // indirect github.com/beorn7/perks v1.0.1 // indirect
github.com/blackfireio/osinfo v1.0.3 // indirect
github.com/cespare/xxhash/v2 v2.1.2 // indirect github.com/cespare/xxhash/v2 v2.1.2 // indirect
github.com/containerd/containerd v1.6.2 // indirect github.com/containerd/containerd v1.6.2 // indirect
github.com/cpuguy83/go-md2man/v2 v2.0.1 // indirect github.com/cpuguy83/go-md2man/v2 v2.0.1 // indirect

2
go.sum
View file

@ -105,6 +105,8 @@ github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24
github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8=
github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
github.com/blackfireio/osinfo v1.0.3 h1:Yk2t2GTPjBcESv6nDSWZKO87bGMQgO+Hi9OoXPpxX8c=
github.com/blackfireio/osinfo v1.0.3/go.mod h1:Pd987poVNmd5Wsx6PRPw4+w7kLlf9iJxoRKPtPAjOrA=
github.com/buger/jsonparser v1.1.1 h1:2PnMjfWD7wBILjqQbt530v576A/cAbQvEW9gGIpYMUs= github.com/buger/jsonparser v1.1.1 h1:2PnMjfWD7wBILjqQbt530v576A/cAbQvEW9gGIpYMUs=
github.com/buger/jsonparser v1.1.1/go.mod h1:6RYKKt7H4d4+iWqouImQ9R2FZql3VbhNgx27UK13J/0= github.com/buger/jsonparser v1.1.1/go.mod h1:6RYKKt7H4d4+iWqouImQ9R2FZql3VbhNgx27UK13J/0=
github.com/c-robinson/iplib v1.0.3 h1:NG0UF0GoEsrC1/vyfX1Lx2Ss7CySWl3KqqXh3q4DdPU= github.com/c-robinson/iplib v1.0.3 h1:NG0UF0GoEsrC1/vyfX1Lx2Ss7CySWl3KqqXh3q4DdPU=

View file

@ -215,8 +215,8 @@ declare stderr
run -0 --separate-stderr cscli metrics run -0 --separate-stderr cscli metrics
assert_output --partial "ROUTE" assert_output --partial "ROUTE"
assert_output --partial '/v1/watchers/login' assert_output --partial '/v1/watchers/login'
assert_output --partial "Local Api Metrics:"
assert_stderr --partial "Local Api Metrics:"
} }
@test "'cscli completion' with or without configuration file" { @test "'cscli completion' with or without configuration file" {

View file

@ -77,6 +77,6 @@ teardown() {
run -0 --separate-stderr cscli metrics run -0 --separate-stderr cscli metrics
assert_output --partial "ROUTE" assert_output --partial "ROUTE"
assert_output --partial '/v1/watchers/login' assert_output --partial '/v1/watchers/login'
assert_output --partial "Local Api Metrics:"
assert_stderr --partial "Local Api Metrics:"
} }