Improved expr debugger (#2495)

* new expr debugger

---------

Co-authored-by: mmetc <92726601+mmetc@users.noreply.github.com>
This commit is contained in:
Thibault "bui" Koechlin 2023-11-24 11:10:54 +01:00 committed by GitHub
parent 7ffa0cc787
commit 1dcf9d1ae1
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
23 changed files with 929 additions and 447 deletions

2
go.mod
View file

@ -13,7 +13,7 @@ require (
github.com/Masterminds/sprig/v3 v3.2.2
github.com/agext/levenshtein v1.2.1
github.com/alexliesenfeld/health v0.5.1
github.com/antonmedv/expr v1.12.5
github.com/antonmedv/expr v1.15.3
github.com/appleboy/gin-jwt/v2 v2.8.0
github.com/aquasecurity/table v1.8.0
github.com/aws/aws-lambda-go v1.38.0

2
go.sum
View file

@ -73,6 +73,8 @@ github.com/alexliesenfeld/health v0.5.1/go.mod h1:N4NDIeQtlWumG+6z1ne1v62eQxktz5
github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883/go.mod h1:rCTlJbsFo29Kk6CurOXKm700vrz8f0KW0JNfpkRJY/8=
github.com/antonmedv/expr v1.12.5 h1:Fq4okale9swwL3OeLLs9WD9H6GbgBLJyN/NUHRv+n0E=
github.com/antonmedv/expr v1.12.5/go.mod h1:FPC8iWArxls7axbVLsW+kpg1mz29A1b2M6jt+hZfDkU=
github.com/antonmedv/expr v1.15.3 h1:q3hOJZNvLvhqE8OHBs1cFRdbXFNKuA+bHmRaI+AmRmI=
github.com/antonmedv/expr v1.15.3/go.mod h1:0E/6TxnOlRNp81GMzX9QfDPAmHo2Phg00y4JUv1ihsE=
github.com/apparentlymart/go-textseg/v13 v13.0.0 h1:Y+KvPE1NYz0xl601PVImeQfFyEy6iT90AvPUL1NNfNw=
github.com/apparentlymart/go-textseg/v13 v13.0.0/go.mod h1:ZK2fH7c4NqDTLtiYLvIkEghdlcqw7yxLeM89kiTRPUo=
github.com/appleboy/gin-jwt/v2 v2.8.0 h1:Glo7cb9eBR+hj8Y7WzgfkOlqCaNLjP+RV4dNO3fpdps=

View file

@ -16,12 +16,10 @@ import (
)
type Runtime struct {
RuntimeFilters []*vm.Program `json:"-" yaml:"-"`
DebugFilters []*exprhelpers.ExprDebugger `json:"-" yaml:"-"`
RuntimeDurationExpr *vm.Program `json:"-" yaml:"-"`
DebugDurationExpr *exprhelpers.ExprDebugger `json:"-" yaml:"-"`
Cfg *csconfig.ProfileCfg `json:"-" yaml:"-"`
Logger *log.Entry `json:"-" yaml:"-"`
RuntimeFilters []*vm.Program `json:"-" yaml:"-"`
RuntimeDurationExpr *vm.Program `json:"-" yaml:"-"`
Cfg *csconfig.ProfileCfg `json:"-" yaml:"-"`
Logger *log.Entry `json:"-" yaml:"-"`
}
var defaultDuration = "4h"
@ -32,7 +30,6 @@ func NewProfile(profilesCfg []*csconfig.ProfileCfg) ([]*Runtime, error) {
for _, profile := range profilesCfg {
var runtimeFilter, runtimeDurationExpr *vm.Program
var debugFilter, debugDurationExpr *exprhelpers.ExprDebugger
runtime := &Runtime{}
xlog := log.New()
if err := types.ConfigureLogger(xlog); err != nil {
@ -45,7 +42,6 @@ func NewProfile(profilesCfg []*csconfig.ProfileCfg) ([]*Runtime, error) {
})
runtime.RuntimeFilters = make([]*vm.Program, len(profile.Filters))
runtime.DebugFilters = make([]*exprhelpers.ExprDebugger, len(profile.Filters))
runtime.Cfg = profile
if runtime.Cfg.OnSuccess != "" && runtime.Cfg.OnSuccess != "continue" && runtime.Cfg.OnSuccess != "break" {
return []*Runtime{}, fmt.Errorf("invalid 'on_success' for '%s': %s", profile.Name, runtime.Cfg.OnSuccess)
@ -60,12 +56,6 @@ func NewProfile(profilesCfg []*csconfig.ProfileCfg) ([]*Runtime, error) {
}
runtime.RuntimeFilters[fIdx] = runtimeFilter
if profile.Debug != nil && *profile.Debug {
if debugFilter, err = exprhelpers.NewDebugger(filter, exprhelpers.GetExprOptions(map[string]interface{}{"Alert": &models.Alert{}})...); err != nil {
log.Debugf("Error compiling debug filter of %s : %s", profile.Name, err)
// Don't fail if we can't compile the filter - for now
// return errors.Wrapf(err, "Error compiling debug filter of %s", profile.Name)
}
runtime.DebugFilters[fIdx] = debugFilter
runtime.Logger.Logger.SetLevel(log.DebugLevel)
}
}
@ -74,14 +64,7 @@ func NewProfile(profilesCfg []*csconfig.ProfileCfg) ([]*Runtime, error) {
if runtimeDurationExpr, err = expr.Compile(profile.DurationExpr, exprhelpers.GetExprOptions(map[string]interface{}{"Alert": &models.Alert{}})...); err != nil {
return []*Runtime{}, errors.Wrapf(err, "error compiling duration_expr of %s", profile.Name)
}
runtime.RuntimeDurationExpr = runtimeDurationExpr
if profile.Debug != nil && *profile.Debug {
if debugDurationExpr, err = exprhelpers.NewDebugger(profile.DurationExpr, exprhelpers.GetExprOptions(map[string]interface{}{"Alert": &models.Alert{}})...); err != nil {
log.Debugf("Error compiling debug duration_expr of %s : %s", profile.Name, err)
}
runtime.DebugDurationExpr = debugDurationExpr
}
}
for _, decision := range profile.Decisions {
@ -129,7 +112,11 @@ func (Profile *Runtime) GenerateDecisionFromProfile(Alert *models.Alert) ([]*mod
/*some fields are populated from the reference object : duration, scope, type*/
decision.Duration = new(string)
if Profile.Cfg.DurationExpr != "" && Profile.RuntimeDurationExpr != nil {
duration, err := expr.Run(Profile.RuntimeDurationExpr, map[string]interface{}{"Alert": Alert})
profileDebug := false
if Profile.Cfg.Debug != nil && *Profile.Cfg.Debug {
profileDebug = true
}
duration, err := exprhelpers.Run(Profile.RuntimeDurationExpr, map[string]interface{}{"Alert": Alert}, Profile.Logger, profileDebug)
if err != nil {
Profile.Logger.Warningf("Failed to run duration_expr : %v", err)
*decision.Duration = *refDecision.Duration
@ -173,16 +160,17 @@ func (Profile *Runtime) EvaluateProfile(Alert *models.Alert) ([]*models.Decision
matched := false
for eIdx, expression := range Profile.RuntimeFilters {
output, err := expr.Run(expression, map[string]interface{}{"Alert": Alert})
debugProfile := false
if Profile.Cfg.Debug != nil && *Profile.Cfg.Debug {
debugProfile = true
}
output, err := exprhelpers.Run(expression, map[string]interface{}{"Alert": Alert}, Profile.Logger, debugProfile)
if err != nil {
Profile.Logger.Warningf("failed to run profile expr for %s : %v", Profile.Cfg.Name, err)
return nil, matched, errors.Wrapf(err, "while running expression %s", Profile.Cfg.Filters[eIdx])
}
switch out := output.(type) {
case bool:
if Profile.Cfg.Debug != nil && *Profile.Cfg.Debug {
Profile.DebugFilters[eIdx].Run(Profile.Logger, out, map[string]interface{}{"Alert": Alert})
}
if out {
matched = true
/*the expression matched, create the associated decision*/

462
pkg/exprhelpers/debugger.go Normal file
View file

@ -0,0 +1,462 @@
package exprhelpers
import (
"fmt"
"strconv"
"strings"
"github.com/antonmedv/expr"
"github.com/antonmedv/expr/vm"
log "github.com/sirupsen/logrus"
)
type ExprRuntimeDebug struct {
Logger *log.Entry
Lines []string
Outputs []OpOutput
}
var IndentStep = 4
// we use this struct to store the output of the expr runtime
type OpOutput struct {
Code string //relevant code part
CodeDepth int //level of nesting
BlockStart bool
BlockEnd bool
Func bool //true if it's a function call
FuncName string
Args []string
FuncResults []string
//
Comparison bool //true if it's a comparison
Negated bool
Left string
Right string
//
JumpIf bool //true if it's conditional jump
IfTrue bool
IfFalse bool
//
Condition bool //true if it's a condition
ConditionIn bool
ConditionContains bool
//used for comparisons, conditional jumps and conditions
StrConditionResult string
ConditionResult *bool //should always be present for conditions
//
Finalized bool //used when a node is finalized, we already fetched result from next OP
}
func (o *OpOutput) String() string {
ret := fmt.Sprintf("%*c", o.CodeDepth, ' ')
if len(o.Code) != 0 {
ret += fmt.Sprintf("[%s]", o.Code)
}
ret += " "
switch {
case o.BlockStart:
ret = fmt.Sprintf("%*cBLOCK_START [%s]", o.CodeDepth-IndentStep, ' ', o.Code)
return ret
case o.BlockEnd:
indent := o.CodeDepth - (IndentStep * 2)
if indent < 0 {
indent = 0
}
ret = fmt.Sprintf("%*cBLOCK_END [%s]", indent, ' ', o.Code)
if len(o.StrConditionResult) > 0 {
ret += fmt.Sprintf(" -> %s", o.StrConditionResult)
}
return ret
//A block end can carry a value, for example if it's a count, any, all etc. XXX
case o.Func:
return ret + fmt.Sprintf("%s(%s) = %s", o.FuncName, strings.Join(o.Args, ", "), strings.Join(o.FuncResults, ", "))
case o.Comparison:
if o.Negated {
ret += "NOT "
}
ret += fmt.Sprintf("%s == %s -> %s", o.Left, o.Right, o.StrConditionResult)
return ret
case o.ConditionIn:
return ret + fmt.Sprintf("%s in %s -> %s", o.Args[0], o.Args[1], o.StrConditionResult)
case o.ConditionContains:
return ret + fmt.Sprintf("%s contains %s -> %s", o.Args[0], o.Args[1], o.StrConditionResult)
case o.JumpIf && o.IfTrue:
if o.ConditionResult != nil {
if *o.ConditionResult {
return ret + "OR -> false"
}
return ret + "OR -> true"
}
return ret + "OR(?)"
case o.JumpIf && o.IfFalse:
if o.ConditionResult != nil {
if *o.ConditionResult {
return ret + "AND -> true"
}
return ret + "AND -> false"
}
return ret + "AND(?)"
}
return ret + ""
}
func (erp ExprRuntimeDebug) extractCode(ip int, program *vm.Program, parts []string) string {
//log.Tracef("# extracting code for ip %d [%s]", ip, parts[1])
if program.Locations[ip].Line == 0 { //it seems line is zero when it's not actual code (ie. op push at the beginning)
log.Tracef("zero location ?")
return ""
}
startLine := program.Locations[ip].Line
startColumn := program.Locations[ip].Column
lines := strings.Split(program.Source.Content(), "\n")
endCol := 0
endLine := 0
for i := ip + 1; i < len(program.Locations); i++ {
if program.Locations[i].Line > startLine || (program.Locations[i].Line == startLine && program.Locations[i].Column > startColumn) {
//we didn't had values yet and it's superior to current one, take it
if endLine == 0 && endCol == 0 {
endLine = program.Locations[i].Line
endCol = program.Locations[i].Column
}
//however, we are looking for the closest upper one
if program.Locations[i].Line < endLine || (program.Locations[i].Line == endLine && program.Locations[i].Column < endCol) {
endLine = program.Locations[i].Line
endCol = program.Locations[i].Column
}
}
}
//maybe it was the last instruction ?
if endCol == 0 && endLine == 0 {
endLine = len(lines)
endCol = len(lines[endLine-1])
}
code_snippet := ""
startLine -= 1 //line count starts at 1
endLine -= 1
for i := startLine; i <= endLine; i++ {
if i == startLine {
if startLine != endLine {
code_snippet += lines[i][startColumn:]
continue
}
code_snippet += lines[i][startColumn:endCol]
break
}
if i == endLine {
code_snippet += lines[i][:endCol]
break
}
code_snippet += lines[i]
}
log.Tracef("#code extract for ip %d [%s] -> '%s'", ip, parts[1], code_snippet)
return cleanTextForDebug(code_snippet)
}
func autoQuote(v any) string {
switch x := v.(type) {
case string:
//let's avoid printing long strings. it can happen ie. when we are debugging expr with `File()` or similar helpers
if len(x) > 40 {
return fmt.Sprintf("%q", x[:40]+"...")
} else {
return fmt.Sprintf("%q", x)
}
default:
return fmt.Sprintf("%v", x)
}
}
func (erp ExprRuntimeDebug) ipDebug(ip int, vm *vm.VM, program *vm.Program, parts []string, outputs []OpOutput) ([]OpOutput, error) {
IdxOut := len(outputs)
prevIdxOut := 0
currentDepth := 0
//when there is a function call or comparison, we need to wait for the next instruction to get the result and "finalize" the previous one
if IdxOut > 0 {
prevIdxOut = IdxOut - 1
currentDepth = outputs[prevIdxOut].CodeDepth
if outputs[prevIdxOut].Func && !outputs[prevIdxOut].Finalized {
stack := vm.Stack()
num_items := 1
for i := len(stack) - 1; i >= 0 && num_items > 0; i-- {
outputs[prevIdxOut].FuncResults = append(outputs[prevIdxOut].FuncResults, autoQuote(stack[i]))
num_items--
}
outputs[prevIdxOut].Finalized = true
} else if (outputs[prevIdxOut].Comparison || outputs[prevIdxOut].Condition) && !outputs[prevIdxOut].Finalized {
stack := vm.Stack()
outputs[prevIdxOut].StrConditionResult = fmt.Sprintf("%+v", stack)
if val, ok := stack[0].(bool); ok {
outputs[prevIdxOut].ConditionResult = new(bool)
*outputs[prevIdxOut].ConditionResult = val
}
outputs[prevIdxOut].Finalized = true
}
}
erp.Logger.Tracef("[STEP %d:%s] (stack:%+v) (parts:%+v) {depth:%d}", ip, parts[1], vm.Stack(), parts, currentDepth)
out := OpOutput{}
out.CodeDepth = currentDepth
out.Code = erp.extractCode(ip, program, parts)
switch parts[1] {
case "OpBegin":
out.CodeDepth += IndentStep
out.BlockStart = true
outputs = append(outputs, out)
case "OpEnd":
out.CodeDepth -= IndentStep
out.BlockEnd = true
//OpEnd can carry value, if it's any/all/count etc.
if len(vm.Stack()) > 0 {
out.StrConditionResult = fmt.Sprintf("%v", vm.Stack())
}
outputs = append(outputs, out)
case "OpNot":
//negate the previous condition
outputs[prevIdxOut].Negated = true
case "OpTrue": //generated when possible ? (1 == 1)
out.Condition = true
out.ConditionResult = new(bool)
*out.ConditionResult = true
out.StrConditionResult = "true"
outputs = append(outputs, out)
case "OpFalse": //generated when possible ? (1 != 1)
out.Condition = true
out.ConditionResult = new(bool)
*out.ConditionResult = false
out.StrConditionResult = "false"
outputs = append(outputs, out)
case "OpJumpIfTrue": //OR
stack := vm.Stack()
out.JumpIf = true
out.IfTrue = true
out.StrConditionResult = fmt.Sprintf("%v", stack[0])
if val, ok := stack[0].(bool); ok {
out.ConditionResult = new(bool)
*out.ConditionResult = val
}
outputs = append(outputs, out)
case "OpJumpIfFalse": //AND
stack := vm.Stack()
out.JumpIf = true
out.IfFalse = true
out.StrConditionResult = fmt.Sprintf("%v", stack[0])
if val, ok := stack[0].(bool); ok {
out.ConditionResult = new(bool)
*out.ConditionResult = val
}
outputs = append(outputs, out)
case "OpCall1": //Op for function calls
out.Func = true
out.FuncName = parts[3]
stack := vm.Stack()
num_items := 1
for i := len(stack) - 1; i >= 0 && num_items > 0; i-- {
out.Args = append(out.Args, autoQuote(stack[i]))
num_items--
}
outputs = append(outputs, out)
case "OpCall2": //Op for function calls
out.Func = true
out.FuncName = parts[3]
stack := vm.Stack()
num_items := 2
for i := len(stack) - 1; i >= 0 && num_items > 0; i-- {
out.Args = append(out.Args, autoQuote(stack[i]))
num_items--
}
outputs = append(outputs, out)
case "OpCall3": //Op for function calls
out.Func = true
out.FuncName = parts[3]
stack := vm.Stack()
num_items := 3
for i := len(stack) - 1; i >= 0 && num_items > 0; i-- {
out.Args = append(out.Args, autoQuote(stack[i]))
num_items--
}
outputs = append(outputs, out)
//double check OpCallFast and OpCallTyped
case "OpCallFast", "OpCallTyped":
//
case "OpCallN": //Op for function calls with more than 3 args
out.Func = true
out.FuncName = parts[1]
stack := vm.Stack()
//for OpCallN, we get the number of args
if len(program.Arguments) >= ip {
nb_args := program.Arguments[ip]
if nb_args > 0 {
//we need to skip the top item on stack
for i := len(stack) - 2; i >= 0 && nb_args > 0; i-- {
out.Args = append(out.Args, autoQuote(stack[i]))
nb_args--
}
}
} else { //let's blindly take the items on stack
for _, val := range vm.Stack() {
out.Args = append(out.Args, autoQuote(val))
}
}
outputs = append(outputs, out)
case "OpEqualString", "OpEqual", "OpEqualInt": //comparisons
stack := vm.Stack()
out.Comparison = true
out.Left = autoQuote(stack[0])
out.Right = autoQuote(stack[1])
outputs = append(outputs, out)
case "OpIn": //in operator
stack := vm.Stack()
out.Condition = true
out.ConditionIn = true
//seems that we tend to receive stack[1] as a map.
//it is tempting to use reflect to extract keys, but we end up with an array that doesn't match the initial order
//(because of the random order of the map)
out.Args = append(out.Args, autoQuote(stack[0]))
out.Args = append(out.Args, autoQuote(stack[1]))
outputs = append(outputs, out)
case "OpContains": //kind OpIn , but reverse
stack := vm.Stack()
out.Condition = true
out.ConditionContains = true
//seems that we tend to receive stack[1] as a map.
//it is tempting to use reflect to extract keys, but we end up with an array that doesn't match the initial order
//(because of the random order of the map)
out.Args = append(out.Args, autoQuote(stack[0]))
out.Args = append(out.Args, autoQuote(stack[1]))
outputs = append(outputs, out)
}
return outputs, nil
}
func (erp ExprRuntimeDebug) ipSeek(ip int) []string {
for i := 0; i < len(erp.Lines); i++ {
parts := strings.Split(erp.Lines[i], "\t")
if parts[0] == strconv.Itoa(ip) {
return parts
}
}
return nil
}
func Run(program *vm.Program, env interface{}, logger *log.Entry, debug bool) (any, error) {
if debug {
dbgInfo, ret, err := RunWithDebug(program, env, logger)
DisplayExprDebug(program, dbgInfo, logger, ret)
return ret, err
}
return expr.Run(program, env)
}
func cleanTextForDebug(text string) string {
text = strings.Join(strings.Fields(text), " ")
text = strings.Trim(text, " \t\n")
return text
}
func DisplayExprDebug(program *vm.Program, outputs []OpOutput, logger *log.Entry, ret any) {
logger.Debugf("dbg(result=%v): %s", ret, cleanTextForDebug(program.Source.Content()))
for _, output := range outputs {
logger.Debugf("%s", output.String())
}
}
// TBD: Based on the level of the logger (ie. trace vs debug) we could decide to add more low level instructions (pop, push, etc.)
func RunWithDebug(program *vm.Program, env interface{}, logger *log.Entry) ([]OpOutput, any, error) {
var outputs []OpOutput = []OpOutput{}
var buf strings.Builder
var erp ExprRuntimeDebug = ExprRuntimeDebug{
Logger: logger,
}
var debugErr chan error = make(chan error)
vm := vm.Debug()
done := false
program.Opcodes(&buf)
lines := strings.Split(buf.String(), "\n")
erp.Lines = lines
go func() {
var err error
erp.Logger.Tracef("[START] ip 0")
ops := erp.ipSeek(0)
if ops == nil {
debugErr <- fmt.Errorf("failed getting ops for ip 0")
return
}
if outputs, err = erp.ipDebug(0, vm, program, ops, outputs); err != nil {
debugErr <- fmt.Errorf("error while debugging at ip 0")
}
vm.Step()
for ip := range vm.Position() {
ops := erp.ipSeek(ip)
if ops == nil { //we reached the end of the program, we shouldn't throw an error
erp.Logger.Tracef("[DONE] ip %d", ip)
debugErr <- nil
return
}
if outputs, err = erp.ipDebug(ip, vm, program, ops, outputs); err != nil {
debugErr <- fmt.Errorf("error while debugging at ip %d", ip)
return
}
if done {
debugErr <- nil
return
}
vm.Step()
}
debugErr <- nil
}()
var return_error error
ret, err := vm.Run(program, env)
done = true
//if the expr runtime failed, we don't need to wait for the debug to finish
if err != nil {
return_error = err
} else {
err = <-debugErr
if err != nil {
log.Warningf("error while debugging expr: %s", err)
}
}
//the overall result of expression is the result of last op ?
if len(outputs) > 0 {
lastOutIdx := len(outputs)
if lastOutIdx > 0 {
lastOutIdx -= 1
}
switch val := ret.(type) {
case bool:
log.Tracef("completing with bool %t", ret)
//if outputs[lastOutIdx].Comparison {
outputs[lastOutIdx].StrConditionResult = fmt.Sprintf("%v", ret)
outputs[lastOutIdx].ConditionResult = new(bool)
*outputs[lastOutIdx].ConditionResult = val
outputs[lastOutIdx].Finalized = true
default:
log.Tracef("completing with type %T -> %v", ret, ret)
outputs[lastOutIdx].StrConditionResult = fmt.Sprintf("%v", ret)
outputs[lastOutIdx].Finalized = true
}
} else {
log.Tracef("no output from expr runtime")
}
return outputs, ret, return_error
}

View file

@ -0,0 +1,344 @@
package exprhelpers
import (
"reflect"
"strings"
"testing"
"github.com/antonmedv/expr"
"github.com/crowdsecurity/crowdsec/pkg/types"
"github.com/davecgh/go-spew/spew"
log "github.com/sirupsen/logrus"
)
type ExprDbgTest struct {
Name string
Expr string
ExpectedOutputs []OpOutput
ExpectedFailedCompile bool
ExpectedFailRuntime bool
Env map[string]interface{}
LogLevel log.Level
}
// For the sake of testing functions with 2, 3 and N args
func UpperTwo(params ...any) (any, error) {
s := params[0].(string)
v := params[1].(string)
return strings.ToUpper(s) + strings.ToUpper(v), nil
}
func UpperThree(params ...any) (any, error) {
s := params[0].(string)
v := params[1].(string)
x := params[2].(string)
return strings.ToUpper(s) + strings.ToUpper(v) + strings.ToUpper(x), nil
}
func UpperN(params ...any) (any, error) {
s := params[0].(string)
v := params[1].(string)
x := params[2].(string)
y := params[3].(string)
return strings.ToUpper(s) + strings.ToUpper(v) + strings.ToUpper(x) + strings.ToUpper(y), nil
}
func boolPtr(b bool) *bool {
return &b
}
type teststruct struct {
Foo string
}
func TestBaseDbg(t *testing.T) {
defaultEnv := map[string]interface{}{
"queue": &types.Queue{},
"evt": &types.Event{},
"sample_array": []string{"a", "b", "c", "ZZ"},
"base_string": "hello world",
"base_int": 42,
"base_float": 42.42,
"nillvar": &teststruct{},
"base_struct": struct {
Foo string
Bar int
Myarr []string
}{
Foo: "bar",
Bar: 42,
Myarr: []string{"a", "b", "c"},
},
}
// tips for the tests:
// use '%#v' to dump in golang syntax
// use regexp to clear empty/default fields:
// [a-z]+: (false|\[\]string\(nil\)|""),
//ConditionResult:(*bool)
//Missing multi parametes function
tests := []ExprDbgTest{
{
Name: "nill deref",
Expr: "Upper('1') == '1' && nillvar.Foo == '42'",
Env: defaultEnv,
ExpectedFailRuntime: true,
ExpectedOutputs: []OpOutput{
{Code: "Upper('1')", CodeDepth: 0, Func: true, FuncName: "Upper", Args: []string{"\"1\""}, FuncResults: []string{"\"1\""}, ConditionResult: (*bool)(nil), Finalized: true},
{Code: "== '1'", CodeDepth: 0, Comparison: true, Left: "\"1\"", Right: "\"1\"", StrConditionResult: "[true]", ConditionResult: boolPtr(true), Finalized: true},
{Code: "&&", CodeDepth: 0, JumpIf: true, IfFalse: true, StrConditionResult: "<nil>", ConditionResult: boolPtr(true), Finalized: true},
},
},
{
Name: "OpCall2",
Expr: "UpperTwo('hello', 'world') == 'HELLOWORLD'",
Env: defaultEnv,
ExpectedOutputs: []OpOutput{
{Code: "UpperTwo('hello', 'world')", CodeDepth: 0, Func: true, FuncName: "UpperTwo", Args: []string{"\"world\"", "\"hello\""}, FuncResults: []string{"\"HELLOWORLD\""}, ConditionResult: (*bool)(nil), Finalized: true},
{Code: "== 'HELLOWORLD'", CodeDepth: 0, Comparison: true, Left: "\"HELLOWORLD\"", Right: "\"HELLOWORLD\"", StrConditionResult: "true", ConditionResult: boolPtr(true), Finalized: true},
},
},
{
Name: "OpCall3",
Expr: "UpperThree('hello', 'world', 'foo') == 'HELLOWORLDFOO'",
Env: defaultEnv,
ExpectedOutputs: []OpOutput{
{Code: "UpperThree('hello', 'world', 'foo')", CodeDepth: 0, Func: true, FuncName: "UpperThree", Args: []string{"\"foo\"", "\"world\"", "\"hello\""}, FuncResults: []string{"\"HELLOWORLDFOO\""}, ConditionResult: (*bool)(nil), Finalized: true},
{Code: "== 'HELLOWORLDFOO'", CodeDepth: 0, Comparison: true, Left: "\"HELLOWORLDFOO\"", Right: "\"HELLOWORLDFOO\"", StrConditionResult: "true", ConditionResult: boolPtr(true), Finalized: true},
},
},
{
Name: "OpCallN",
Expr: "UpperN('hello', 'world', 'foo', 'lol') == UpperN('hello', 'world', 'foo', 'lol')",
Env: defaultEnv,
ExpectedOutputs: []OpOutput{
{Code: "UpperN('hello', 'world', 'foo', 'lol')", CodeDepth: 0, Func: true, FuncName: "OpCallN", Args: []string{"\"lol\"", "\"foo\"", "\"world\"", "\"hello\""}, FuncResults: []string{"\"HELLOWORLDFOOLOL\""}, ConditionResult: (*bool)(nil), Finalized: true},
{Code: "UpperN('hello', 'world', 'foo', 'lol')", CodeDepth: 0, Func: true, FuncName: "OpCallN", Args: []string{"\"lol\"", "\"foo\"", "\"world\"", "\"hello\""}, FuncResults: []string{"\"HELLOWORLDFOOLOL\""}, ConditionResult: (*bool)(nil), Finalized: true},
{Code: "== UpperN('hello', 'world', 'foo', 'lol')", CodeDepth: 0, Comparison: true, Left: "\"HELLOWORLDFOOLOL\"", Right: "\"HELLOWORLDFOOLOL\"", StrConditionResult: "true", ConditionResult: boolPtr(true), Finalized: true},
},
},
{
Name: "base string cmp",
Expr: "base_string == 'hello world'",
Env: defaultEnv,
ExpectedOutputs: []OpOutput{
{Code: "== 'hello world'", CodeDepth: 0, Comparison: true, Left: "\"hello world\"", Right: "\"hello world\"", StrConditionResult: "true", ConditionResult: boolPtr(true), Finalized: true},
},
},
{
Name: "loop with func call",
Expr: "count(base_struct.Myarr, {Upper(#) == 'C'}) == 1",
Env: defaultEnv,
ExpectedOutputs: []OpOutput{
{Code: "count(base_struct.Myarr, {", CodeDepth: 4, BlockStart: true, ConditionResult: (*bool)(nil), Finalized: false},
{Code: "Upper(#)", CodeDepth: 4, Func: true, FuncName: "Upper", Args: []string{"\"a\""}, FuncResults: []string{"\"A\""}, ConditionResult: (*bool)(nil), Finalized: true},
{Code: "== 'C'})", CodeDepth: 4, Comparison: true, Left: "\"A\"", Right: "\"C\"", StrConditionResult: "[false]", ConditionResult: boolPtr(false), Finalized: true},
{Code: "count(base_struct.Myarr, {Upper(#) == 'C'})", CodeDepth: 4, JumpIf: true, IfFalse: true, StrConditionResult: "false", ConditionResult: boolPtr(false), Finalized: false},
{Code: "Upper(#)", CodeDepth: 4, Func: true, FuncName: "Upper", Args: []string{"\"b\""}, FuncResults: []string{"\"B\""}, ConditionResult: (*bool)(nil), Finalized: true},
{Code: "== 'C'})", CodeDepth: 4, Comparison: true, Left: "\"B\"", Right: "\"C\"", StrConditionResult: "[false]", ConditionResult: boolPtr(false), Finalized: true},
{Code: "count(base_struct.Myarr, {Upper(#) == 'C'})", CodeDepth: 4, JumpIf: true, IfFalse: true, StrConditionResult: "false", ConditionResult: boolPtr(false), Finalized: false},
{Code: "Upper(#)", CodeDepth: 4, Func: true, FuncName: "Upper", Args: []string{"\"c\""}, FuncResults: []string{"\"C\""}, ConditionResult: (*bool)(nil), Finalized: true},
{Code: "== 'C'})", CodeDepth: 4, Comparison: true, Left: "\"C\"", Right: "\"C\"", StrConditionResult: "[true]", ConditionResult: boolPtr(true), Finalized: true},
{Code: "count(base_struct.Myarr, {Upper(#) == 'C'})", CodeDepth: 4, JumpIf: true, IfFalse: true, StrConditionResult: "true", ConditionResult: boolPtr(true), Finalized: false},
{Code: "count(base_struct.Myarr, {Upper(#) == 'C'})", CodeDepth: 0, BlockEnd: true, StrConditionResult: "[1]", ConditionResult: (*bool)(nil), Finalized: false},
{Code: "== 1", CodeDepth: 0, Comparison: true, Left: "1", Right: "1", StrConditionResult: "true", ConditionResult: boolPtr(true), Finalized: true},
},
},
{
Name: "loop with func call and extra check",
Expr: "count(base_struct.Myarr, {Upper(#) == 'C'}) == 1 && Upper(base_struct.Foo) == 'BAR'",
Env: defaultEnv,
ExpectedOutputs: []OpOutput{
{Code: "count(base_struct.Myarr, {", CodeDepth: 4, BlockStart: true, ConditionResult: (*bool)(nil), Finalized: false},
{Code: "Upper(#)", CodeDepth: 4, Func: true, FuncName: "Upper", Args: []string{"\"a\""}, FuncResults: []string{"\"A\""}, ConditionResult: (*bool)(nil), Finalized: true},
{Code: "== 'C'})", CodeDepth: 4, Comparison: true, Left: "\"A\"", Right: "\"C\"", StrConditionResult: "[false]", ConditionResult: boolPtr(false), Finalized: true},
{Code: "count(base_struct.Myarr, {Upper(#) == 'C'})", CodeDepth: 4, JumpIf: true, IfFalse: true, StrConditionResult: "false", ConditionResult: boolPtr(false), Finalized: false},
{Code: "Upper(#)", CodeDepth: 4, Func: true, FuncName: "Upper", Args: []string{"\"b\""}, FuncResults: []string{"\"B\""}, ConditionResult: (*bool)(nil), Finalized: true},
{Code: "== 'C'})", CodeDepth: 4, Comparison: true, Left: "\"B\"", Right: "\"C\"", StrConditionResult: "[false]", ConditionResult: boolPtr(false), Finalized: true},
{Code: "count(base_struct.Myarr, {Upper(#) == 'C'})", CodeDepth: 4, JumpIf: true, IfFalse: true, StrConditionResult: "false", ConditionResult: boolPtr(false), Finalized: false},
{Code: "Upper(#)", CodeDepth: 4, Func: true, FuncName: "Upper", Args: []string{"\"c\""}, FuncResults: []string{"\"C\""}, ConditionResult: (*bool)(nil), Finalized: true},
{Code: "== 'C'})", CodeDepth: 4, Comparison: true, Left: "\"C\"", Right: "\"C\"", StrConditionResult: "[true]", ConditionResult: boolPtr(true), Finalized: true},
{Code: "count(base_struct.Myarr, {Upper(#) == 'C'})", CodeDepth: 4, JumpIf: true, IfFalse: true, StrConditionResult: "true", ConditionResult: boolPtr(true), Finalized: false},
{Code: "count(base_struct.Myarr, {Upper(#) == 'C'})", CodeDepth: 0, BlockEnd: true, StrConditionResult: "[1]", ConditionResult: (*bool)(nil), Finalized: false},
{Code: "== 1", CodeDepth: 0, Comparison: true, Left: "1", Right: "1", StrConditionResult: "[true]", ConditionResult: boolPtr(true), Finalized: true},
{Code: "&&", CodeDepth: 0, JumpIf: true, IfFalse: true, StrConditionResult: "true", ConditionResult: boolPtr(true), Finalized: false},
{Code: "Upper(base_struct.Foo)", CodeDepth: 0, Func: true, FuncName: "Upper", Args: []string{"\"bar\""}, FuncResults: []string{"\"BAR\""}, ConditionResult: (*bool)(nil), Finalized: true},
{Code: "== 'BAR'", CodeDepth: 0, Comparison: true, Left: "\"BAR\"", Right: "\"BAR\"", StrConditionResult: "true", ConditionResult: boolPtr(true), Finalized: true},
},
},
{
Name: "base 'in' test",
Expr: "base_int in [1,2,3,4,42]",
Env: defaultEnv,
ExpectedOutputs: []OpOutput{
{Code: "in [1,2,3,4,42]", CodeDepth: 0, Args: []string{"42", "map[1:{} 2:{} 3:{} 4:{} 42:{}]"}, Condition: true, ConditionIn: true, StrConditionResult: "true", ConditionResult: boolPtr(true), Finalized: true},
},
},
{
Name: "base string cmp",
Expr: "base_string == 'hello world'",
Env: defaultEnv,
ExpectedOutputs: []OpOutput{
{Code: "== 'hello world'", CodeDepth: 0, Comparison: true, Left: "\"hello world\"", Right: "\"hello world\"", StrConditionResult: "true", ConditionResult: boolPtr(true), Finalized: true},
},
},
{
Name: "base int cmp",
Expr: "base_int == 42",
Env: defaultEnv,
ExpectedOutputs: []OpOutput{
{Code: "== 42", CodeDepth: 0, Comparison: true, Left: "42", Right: "42", StrConditionResult: "true", ConditionResult: boolPtr(true), Finalized: true},
},
},
{
Name: "negative check",
Expr: "base_int != 43",
Env: defaultEnv,
ExpectedOutputs: []OpOutput{
{Code: "!= 43", CodeDepth: 0, Negated: true, Comparison: true, Left: "42", Right: "43", StrConditionResult: "true", ConditionResult: boolPtr(true), Finalized: true},
},
},
{
Name: "testing ORs",
Expr: "base_int == 43 || base_int == 42",
Env: defaultEnv,
ExpectedOutputs: []OpOutput{
{Code: "== 43", CodeDepth: 0, Comparison: true, Left: "42", Right: "43", StrConditionResult: "[false]", ConditionResult: boolPtr(false), Finalized: true},
{Code: "||", CodeDepth: 0, JumpIf: true, IfTrue: true, StrConditionResult: "false", ConditionResult: boolPtr(false), Finalized: false},
{Code: "== 42", CodeDepth: 0, Comparison: true, Left: "42", Right: "42", StrConditionResult: "true", ConditionResult: boolPtr(true), Finalized: true},
},
},
{
Name: "testing basic true",
Expr: "true",
Env: defaultEnv,
ExpectedOutputs: []OpOutput{
{Code: "true", CodeDepth: 0, Condition: true, StrConditionResult: "true", ConditionResult: boolPtr(true), Finalized: true},
},
},
{
Name: "testing basic false",
Expr: "false",
Env: defaultEnv,
ExpectedOutputs: []OpOutput{
{Code: "false", CodeDepth: 0, Condition: true, StrConditionResult: "false", ConditionResult: boolPtr(false), Finalized: true},
},
},
{
Name: "testing multi lines",
Expr: `base_int == 42 &&
base_string == 'hello world' &&
(base_struct.Bar == 41 || base_struct.Bar == 42)`,
Env: defaultEnv,
ExpectedOutputs: []OpOutput{
{Code: "== 42", CodeDepth: 0, Comparison: true, Left: "42", Right: "42", StrConditionResult: "[true]", ConditionResult: boolPtr(true), Finalized: true},
{Code: "&&", CodeDepth: 0, JumpIf: true, IfFalse: true, StrConditionResult: "true", ConditionResult: boolPtr(true), Finalized: false},
{Code: "== 'hello world'", CodeDepth: 0, Comparison: true, Left: "\"hello world\"", Right: "\"hello world\"", StrConditionResult: "[true]", ConditionResult: boolPtr(true), Finalized: true},
{Code: "&& (", CodeDepth: 0, JumpIf: true, IfFalse: true, StrConditionResult: "true", ConditionResult: boolPtr(true), Finalized: false},
{Code: "== 41", CodeDepth: 0, Comparison: true, Left: "42", Right: "41", StrConditionResult: "[false]", ConditionResult: boolPtr(false), Finalized: true},
{Code: "||", CodeDepth: 0, JumpIf: true, IfTrue: true, StrConditionResult: "false", ConditionResult: boolPtr(false), Finalized: false},
{Code: "== 42)", CodeDepth: 0, Comparison: true, Left: "42", Right: "42", StrConditionResult: "true", ConditionResult: boolPtr(true), Finalized: true},
},
},
{
Name: "upper + in",
Expr: "Upper(base_string) contains Upper('wOrlD')",
Env: defaultEnv,
ExpectedOutputs: []OpOutput{
{Code: "Upper(base_string)", CodeDepth: 0, Func: true, FuncName: "Upper", Args: []string{"\"hello world\""}, FuncResults: []string{"\"HELLO WORLD\""}, ConditionResult: (*bool)(nil), Finalized: true},
{Code: "Upper('wOrlD')", CodeDepth: 0, Func: true, FuncName: "Upper", Args: []string{"\"wOrlD\""}, FuncResults: []string{"\"WORLD\""}, ConditionResult: (*bool)(nil), Finalized: true},
{Code: "contains Upper('wOrlD')", CodeDepth: 0, Args: []string{"\"HELLO WORLD\"", "\"WORLD\""}, Condition: true, ConditionContains: true, StrConditionResult: "true", ConditionResult: boolPtr(true), Finalized: true},
},
},
{
Name: "upper + complex",
Expr: `( Upper(base_string) contains Upper('/someurl?x=1') ||
Upper(base_string) contains Upper('/someotherurl?account-name=admin&account-status=1&ow=cmd') )
and base_string startsWith ('40') and Upper(base_string) == 'POST'`,
Env: defaultEnv,
ExpectedOutputs: []OpOutput{
{Code: "Upper(base_string)", CodeDepth: 0, Func: true, FuncName: "Upper", Args: []string{"\"hello world\""}, FuncResults: []string{"\"HELLO WORLD\""}, ConditionResult: (*bool)(nil), Finalized: true},
{Code: "Upper('/someurl?x=1')", CodeDepth: 0, Func: true, FuncName: "Upper", Args: []string{"\"/someurl?x=1\""}, FuncResults: []string{"\"/SOMEURL?X=1\""}, ConditionResult: (*bool)(nil), Finalized: true},
{Code: "contains Upper('/someurl?x=1')", CodeDepth: 0, Args: []string{"\"HELLO WORLD\"", "\"/SOMEURL?X=1\""}, Condition: true, ConditionContains: true, StrConditionResult: "[false]", ConditionResult: boolPtr(false), Finalized: true},
{Code: "||", CodeDepth: 0, JumpIf: true, IfTrue: true, StrConditionResult: "false", ConditionResult: boolPtr(false), Finalized: false},
{Code: "Upper(base_string)", CodeDepth: 0, Func: true, FuncName: "Upper", Args: []string{"\"hello world\""}, FuncResults: []string{"\"HELLO WORLD\""}, ConditionResult: (*bool)(nil), Finalized: true},
{Code: "Upper('/someotherurl?account-name=admin&account-status=1&ow=cmd') )", CodeDepth: 0, Func: true, FuncName: "Upper", Args: []string{"\"/someotherurl?account-name=admin&account...\""}, FuncResults: []string{"\"/SOMEOTHERURL?ACCOUNT-NAME=ADMIN&ACCOUNT...\""}, ConditionResult: (*bool)(nil), Finalized: true},
{Code: "contains Upper('/someotherurl?account-name=admin&account-status=1&ow=cmd') )", CodeDepth: 0, Args: []string{"\"HELLO WORLD\"", "\"/SOMEOTHERURL?ACCOUNT-NAME=ADMIN&ACCOUNT...\""}, Condition: true, ConditionContains: true, StrConditionResult: "[false]", ConditionResult: boolPtr(false), Finalized: true},
{Code: "and", CodeDepth: 0, JumpIf: true, IfFalse: true, StrConditionResult: "false", ConditionResult: boolPtr(false), Finalized: false},
{Code: "and", CodeDepth: 0, JumpIf: true, IfFalse: true, StrConditionResult: "false", ConditionResult: boolPtr(false), Finalized: true},
},
},
}
logger := log.WithField("test", "exprhelpers")
for _, test := range tests {
if test.LogLevel != 0 {
log.SetLevel(test.LogLevel)
} else {
log.SetLevel(log.DebugLevel)
}
extraFuncs := []expr.Option{}
extraFuncs = append(extraFuncs,
expr.Function("UpperTwo",
UpperTwo,
[]interface{}{new(func(string, string) string)}...,
))
extraFuncs = append(extraFuncs,
expr.Function("UpperThree",
UpperThree,
[]interface{}{new(func(string, string, string) string)}...,
))
extraFuncs = append(extraFuncs,
expr.Function("UpperN",
UpperN,
[]interface{}{new(func(string, string, string, string) string)}...,
))
supaEnv := GetExprOptions(test.Env)
supaEnv = append(supaEnv, extraFuncs...)
prog, err := expr.Compile(test.Expr, supaEnv...)
if test.ExpectedFailedCompile {
if err == nil {
t.Fatalf("test %s : expected compile error", test.Name)
}
} else {
if err != nil {
t.Fatalf("test %s : unexpected compile error : %s", test.Name, err)
}
}
if test.Name == "nill deref" {
test.Env["nillvar"] = nil
}
outdbg, ret, err := RunWithDebug(prog, test.Env, logger)
if test.ExpectedFailRuntime {
if err == nil {
t.Fatalf("test %s : expected runtime error", test.Name)
}
} else {
if err != nil {
t.Fatalf("test %s : unexpected runtime error : %s", test.Name, err)
}
}
log.SetLevel(log.DebugLevel)
DisplayExprDebug(prog, outdbg, logger, ret)
if len(outdbg) != len(test.ExpectedOutputs) {
t.Errorf("failed test %s", test.Name)
t.Errorf("%#v", outdbg)
//out, _ := yaml.Marshal(outdbg)
//fmt.Printf("%s", string(out))
t.Fatalf("test %s : expected %d outputs, got %d", test.Name, len(test.ExpectedOutputs), len(outdbg))
}
for i, out := range outdbg {
if !reflect.DeepEqual(out, test.ExpectedOutputs[i]) {
spew.Config.DisableMethods = true
t.Errorf("failed test %s", test.Name)
t.Errorf("expected : %#v", test.ExpectedOutputs[i])
t.Errorf("got : %#v", out)
t.Fatalf("%d/%d : mismatch", i, len(outdbg))
}
//DisplayExprDebug(prog, outdbg, logger, ret)
}
}
}

View file

@ -97,19 +97,12 @@ func TestVisitor(t *testing.T) {
}
log.SetLevel(log.DebugLevel)
clog := log.WithFields(log.Fields{
"type": "test",
})
for _, test := range tests {
compiledFilter, err := expr.Compile(test.filter, GetExprOptions(test.env)...)
if err != nil && test.err == nil {
log.Fatalf("compile: %s", err)
}
debugFilter, err := NewDebugger(test.filter, GetExprOptions(test.env)...)
if err != nil && test.err == nil {
log.Fatalf("debug: %s", err)
}
if compiledFilter != nil {
result, err := expr.Run(compiledFilter, test.env)
@ -121,9 +114,6 @@ func TestVisitor(t *testing.T) {
}
}
if debugFilter != nil {
debugFilter.Run(clog, test.result, test.env)
}
}
}

View file

@ -1,160 +0,0 @@
package exprhelpers
import (
"fmt"
"strconv"
"strings"
"github.com/antonmedv/expr/parser"
"github.com/google/uuid"
log "github.com/sirupsen/logrus"
"github.com/antonmedv/expr"
"github.com/antonmedv/expr/ast"
"github.com/antonmedv/expr/vm"
)
/*
Visitor is used to reconstruct variables with its property called in an expr filter
Thus, we can debug expr filter by displaying all variables contents present in the filter
*/
type visitor struct {
newVar bool
currentId string
vars map[string][]string
logger *log.Entry
}
func (v *visitor) Visit(node *ast.Node) {
switch n := (*node).(type) {
case *ast.IdentifierNode:
v.newVar = true
uid, _ := uuid.NewUUID()
v.currentId = uid.String()
v.vars[v.currentId] = []string{n.Value}
case *ast.MemberNode:
if n2, ok := n.Property.(*ast.StringNode); ok {
v.vars[v.currentId] = append(v.vars[v.currentId], n2.Value)
}
case *ast.StringNode: //Don't reset here, as any attribute of a member node is a string node (in evt.X, evt is member node, X is string node)
default:
v.newVar = false
v.currentId = ""
/*case *ast.IntegerNode:
v.logger.Infof("integer node found: %+v", n)
case *ast.FloatNode:
v.logger.Infof("float node found: %+v", n)
case *ast.BoolNode:
v.logger.Infof("boolean node found: %+v", n)
case *ast.ArrayNode:
v.logger.Infof("array node found: %+v", n)
case *ast.ConstantNode:
v.logger.Infof("constant node found: %+v", n)
case *ast.UnaryNode:
v.logger.Infof("unary node found: %+v", n)
case *ast.BinaryNode:
v.logger.Infof("binary node found: %+v", n)
case *ast.CallNode:
v.logger.Infof("call node found: %+v", n)
case *ast.BuiltinNode:
v.logger.Infof("builtin node found: %+v", n)
case *ast.ConditionalNode:
v.logger.Infof("conditional node found: %+v", n)
case *ast.ChainNode:
v.logger.Infof("chain node found: %+v", n)
case *ast.PairNode:
v.logger.Infof("pair node found: %+v", n)
case *ast.MapNode:
v.logger.Infof("map node found: %+v", n)
case *ast.SliceNode:
v.logger.Infof("slice node found: %+v", n)
case *ast.ClosureNode:
v.logger.Infof("closure node found: %+v", n)
case *ast.PointerNode:
v.logger.Infof("pointer node found: %+v", n)
default:
v.logger.Infof("unknown node found: %+v | type: %T", n, n)*/
}
}
/*
Build reconstruct all the variables used in a filter (to display their content later).
*/
func (v *visitor) Build(filter string, exprEnv ...expr.Option) (*ExprDebugger, error) {
var expressions []*expression
ret := &ExprDebugger{
filter: filter,
}
if filter == "" {
v.logger.Debugf("unable to create expr debugger with empty filter")
return &ExprDebugger{}, nil
}
v.newVar = false
v.vars = make(map[string][]string)
tree, err := parser.Parse(filter)
if err != nil {
return nil, err
}
ast.Walk(&tree.Node, v)
log.Debugf("vars: %+v", v.vars)
for _, variable := range v.vars {
if variable[0] != "evt" {
continue
}
toBuild := strings.Join(variable, ".")
v.logger.Debugf("compiling expression '%s'", toBuild)
debugFilter, err := expr.Compile(toBuild, exprEnv...)
if err != nil {
return ret, fmt.Errorf("compilation of variable '%s' failed: %v", toBuild, err)
}
tmpExpression := &expression{
toBuild,
debugFilter,
}
expressions = append(expressions, tmpExpression)
}
ret.expression = expressions
return ret, nil
}
// ExprDebugger contains the list of expression to be run when debugging an expression filter
type ExprDebugger struct {
filter string
expression []*expression
}
// expression is the structure that represents the variable in string and compiled format
type expression struct {
Str string
Compiled *vm.Program
}
/*
Run display the content of each variable of a filter by evaluating them with expr,
again the expr environment given in parameter
*/
func (e *ExprDebugger) Run(logger *log.Entry, filterResult bool, exprEnv map[string]interface{}) {
if len(e.expression) == 0 {
logger.Tracef("no variable to eval for filter '%s'", e.filter)
return
}
logger.Debugf("eval(%s) = %s", e.filter, strings.ToUpper(strconv.FormatBool(filterResult)))
logger.Debugf("eval variables:")
for _, expression := range e.expression {
debug, err := expr.Run(expression.Compiled, exprEnv)
if err != nil {
logger.Errorf("unable to print debug expression for '%s': %s", expression.Str, err)
}
logger.Debugf(" %s = '%v'", expression.Str, debug)
}
}
// NewDebugger is the exported function that build the debuggers expressions
func NewDebugger(filter string, exprEnv ...expr.Option) (*ExprDebugger, error) {
logger := log.WithField("component", "expr-debugger")
visitor := &visitor{logger: logger}
exprDebugger, err := visitor.Build(filter, exprEnv...)
return exprDebugger, err
}

View file

@ -1,100 +0,0 @@
package exprhelpers
import (
"sort"
"testing"
"github.com/antonmedv/expr"
log "github.com/sirupsen/logrus"
)
func TestVisitorBuild(t *testing.T) {
tests := []struct {
name string
expr string
want []string
env map[string]interface{}
}{
{
name: "simple",
expr: "evt.X",
want: []string{"evt.X"},
env: map[string]interface{}{
"evt": map[string]interface{}{
"X": 1,
},
},
},
{
name: "two vars",
expr: "evt.X && evt.Y",
want: []string{"evt.X", "evt.Y"},
env: map[string]interface{}{
"evt": map[string]interface{}{
"X": 1,
"Y": 2,
},
},
},
{
name: "in",
expr: "evt.X in [1,2,3]",
want: []string{"evt.X"},
env: map[string]interface{}{
"evt": map[string]interface{}{
"X": 1,
},
},
},
{
name: "in complex",
expr: "evt.X in [1,2,3] && evt.Y in [1,2,3] || evt.Z in [1,2,3]",
want: []string{"evt.X", "evt.Y", "evt.Z"},
env: map[string]interface{}{
"evt": map[string]interface{}{
"X": 1,
"Y": 2,
"Z": 3,
},
},
},
{
name: "function call",
expr: "Foo(evt.X, 'ads')",
want: []string{"evt.X"},
env: map[string]interface{}{
"evt": map[string]interface{}{
"X": 1,
},
"Foo": func(x int, y string) int {
return x
},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
v := &visitor{logger: log.NewEntry(log.New())}
ret, err := v.Build(tt.expr, expr.Env(tt.env))
if err != nil {
t.Errorf("visitor.Build() error = %v", err)
return
}
if len(ret.expression) != len(tt.want) {
t.Errorf("visitor.Build() = %v, want %v", ret.expression, tt.want)
}
//Sort both slices as the order is not guaranteed ??
sort.Slice(tt.want, func(i, j int) bool {
return tt.want[i] < tt.want[j]
})
sort.Slice(ret.expression, func(i, j int) bool {
return ret.expression[i].Str < ret.expression[j].Str
})
for idx, v := range ret.expression {
if v.Str != tt.want[idx] {
t.Errorf("visitor.Build() = %v, want %v", v.Str, tt.want[idx])
}
}
})
}
}

View file

@ -107,7 +107,7 @@ func (b *BayesianEvent) bayesianUpdate(c *BayesianBucket, msg types.Event, l *Le
}
l.logger.Debugf("running condition expression: %s", b.rawCondition.ConditionalFilterName)
ret, err := expr.Run(b.conditionalFilterRuntime, map[string]interface{}{"evt": &msg, "queue": l.Queue, "leaky": l})
ret, err := exprhelpers.Run(b.conditionalFilterRuntime, map[string]interface{}{"evt": &msg, "queue": l.Queue, "leaky": l}, l.logger, l.BucketConfig.Debug)
if err != nil {
return fmt.Errorf("unable to run conditional filter: %s", err)
}
@ -151,7 +151,7 @@ func (b *BayesianEvent) compileCondition() error {
conditionalExprCacheLock.Unlock()
//release the lock during compile same as coditional bucket
compiledExpr, err = expr.Compile(b.rawCondition.ConditionalFilterName, exprhelpers.GetExprOptions(map[string]interface{}{"queue": &Queue{}, "leaky": &Leaky{}, "evt": &types.Event{}})...)
compiledExpr, err = expr.Compile(b.rawCondition.ConditionalFilterName, exprhelpers.GetExprOptions(map[string]interface{}{"queue": &types.Queue{}, "leaky": &Leaky{}, "evt": &types.Event{}})...)
if err != nil {
return fmt.Errorf("bayesian condition compile error: %w", err)
}

View file

@ -31,8 +31,8 @@ func NewBlackhole(bucketFactory *BucketFactory) (*Blackhole, error) {
}, nil
}
func (bl *Blackhole) OnBucketOverflow(bucketFactory *BucketFactory) func(*Leaky, types.RuntimeAlert, *Queue) (types.RuntimeAlert, *Queue) {
return func(leaky *Leaky, alert types.RuntimeAlert, queue *Queue) (types.RuntimeAlert, *Queue) {
func (bl *Blackhole) OnBucketOverflow(bucketFactory *BucketFactory) func(*Leaky, types.RuntimeAlert, *types.Queue) (types.RuntimeAlert, *types.Queue) {
return func(leaky *Leaky, alert types.RuntimeAlert, queue *types.Queue) (types.RuntimeAlert, *types.Queue) {
var blackholed = false
var tmp []HiddenKey
// search if we are blackholed and refresh the slice

View file

@ -31,11 +31,11 @@ type Leaky struct {
Limiter rate.RateLimiter `json:"-"`
SerializedState rate.Lstate
//Queue is used to hold the cache of objects in the bucket, it is used to know 'how many' objects we have in buffer.
Queue *Queue
Queue *types.Queue
//Leaky buckets are receiving message through a chan
In chan *types.Event `json:"-"`
//Leaky buckets are pushing their overflows through a chan
Out chan *Queue `json:"-"`
Out chan *types.Queue `json:"-"`
// shared for all buckets (the idea is to kill this afterward)
AllOut chan types.Event `json:"-"`
//max capacity (for burst)
@ -159,9 +159,9 @@ func FromFactory(bucketFactory BucketFactory) *Leaky {
Name: bucketFactory.Name,
Limiter: limiter,
Uuid: seed.Generate(),
Queue: NewQueue(Qsize),
Queue: types.NewQueue(Qsize),
CacheSize: bucketFactory.CacheSize,
Out: make(chan *Queue, 1),
Out: make(chan *types.Queue, 1),
Suicide: make(chan bool, 1),
AllOut: bucketFactory.ret,
Capacity: bucketFactory.Capacity,
@ -374,7 +374,7 @@ func Pour(leaky *Leaky, msg types.Event) {
}
}
func (leaky *Leaky) overflow(ofw *Queue) {
func (leaky *Leaky) overflow(ofw *types.Queue) {
close(leaky.Signal)
alert, err := NewAlert(leaky, ofw)
if err != nil {

View file

@ -33,7 +33,7 @@ func (c *ConditionalOverflow) OnBucketInit(g *BucketFactory) error {
} else {
conditionalExprCacheLock.Unlock()
//release the lock during compile
compiledExpr, err = expr.Compile(g.ConditionalOverflow, exprhelpers.GetExprOptions(map[string]interface{}{"queue": &Queue{}, "leaky": &Leaky{}, "evt": &types.Event{}})...)
compiledExpr, err = expr.Compile(g.ConditionalOverflow, exprhelpers.GetExprOptions(map[string]interface{}{"queue": &types.Queue{}, "leaky": &Leaky{}, "evt": &types.Event{}})...)
if err != nil {
return fmt.Errorf("conditional compile error : %w", err)
}
@ -50,12 +50,14 @@ func (c *ConditionalOverflow) AfterBucketPour(b *BucketFactory) func(types.Event
var condition, ok bool
if c.ConditionalFilterRuntime != nil {
l.logger.Debugf("Running condition expression : %s", c.ConditionalFilter)
ret, err := expr.Run(c.ConditionalFilterRuntime, map[string]interface{}{"evt": &msg, "queue": l.Queue, "leaky": l})
ret, err := exprhelpers.Run(c.ConditionalFilterRuntime,
map[string]interface{}{"evt": &msg, "queue": l.Queue, "leaky": l},
l.logger, b.Debug)
if err != nil {
l.logger.Errorf("unable to run conditional filter : %s", err)
return &msg
}
l.logger.Debugf("Conditional bucket expression returned : %v", ret)
if condition, ok = ret.(bool); !ok {

View file

@ -30,50 +30,49 @@ import (
// BucketFactory struct holds all fields for any bucket configuration. This is to have a
// generic struct for buckets. This can be seen as a bucket factory.
type BucketFactory struct {
FormatVersion string `yaml:"format"`
Author string `yaml:"author"`
Description string `yaml:"description"`
References []string `yaml:"references"`
Type string `yaml:"type"` //Type can be : leaky, counter, trigger. It determines the main bucket characteristics
Name string `yaml:"name"` //Name of the bucket, used later in log and user-messages. Should be unique
Capacity int `yaml:"capacity"` //Capacity is applicable to leaky buckets and determines the "burst" capacity
LeakSpeed string `yaml:"leakspeed"` //Leakspeed is a float representing how many events per second leak out of the bucket
Duration string `yaml:"duration"` //Duration allows 'counter' buckets to have a fixed life-time
Filter string `yaml:"filter"` //Filter is an expr that determines if an event is elligible for said bucket. Filter is evaluated against the Event struct
GroupBy string `yaml:"groupby,omitempty"` //groupy is an expr that allows to determine the partitions of the bucket. A common example is the source_ip
Distinct string `yaml:"distinct"` //Distinct, when present, adds a `Pour()` processor that will only pour uniq items (based on distinct expr result)
Debug bool `yaml:"debug"` //Debug, when set to true, will enable debugging for _this_ scenario specifically
Labels map[string]interface{} `yaml:"labels"` //Labels is K:V list aiming at providing context the overflow
Blackhole string `yaml:"blackhole,omitempty"` //Blackhole is a duration that, if present, will prevent same bucket partition to overflow more often than $duration
logger *log.Entry `yaml:"-"` //logger is bucket-specific logger (used by Debug as well)
Reprocess bool `yaml:"reprocess"` //Reprocess, if true, will for the bucket to be re-injected into processing chain
CacheSize int `yaml:"cache_size"` //CacheSize, if > 0, limits the size of in-memory cache of the bucket
Profiling bool `yaml:"profiling"` //Profiling, if true, will make the bucket record pours/overflows/etc.
OverflowFilter string `yaml:"overflow_filter"` //OverflowFilter if present, is a filter that must return true for the overflow to go through
ConditionalOverflow string `yaml:"condition"` //condition if present, is an expression that must return true for the bucket to overflow
BayesianPrior float32 `yaml:"bayesian_prior"`
BayesianThreshold float32 `yaml:"bayesian_threshold"`
BayesianConditions []RawBayesianCondition `yaml:"bayesian_conditions"` //conditions for the bayesian bucket
ScopeType types.ScopeType `yaml:"scope,omitempty"` //to enforce a different remediation than blocking an IP. Will default this to IP
BucketName string `yaml:"-"`
Filename string `yaml:"-"`
RunTimeFilter *vm.Program `json:"-"`
ExprDebugger *exprhelpers.ExprDebugger `yaml:"-" json:"-"` // used to debug expression by printing the content of each variable of the expression
RunTimeGroupBy *vm.Program `json:"-"`
Data []*types.DataSource `yaml:"data,omitempty"`
DataDir string `yaml:"-"`
CancelOnFilter string `yaml:"cancel_on,omitempty"` //a filter that, if matched, kills the bucket
leakspeed time.Duration //internal representation of `Leakspeed`
duration time.Duration //internal representation of `Duration`
ret chan types.Event //the bucket-specific output chan for overflows
processors []Processor //processors is the list of hooks for pour/overflow/create (cf. uniq, blackhole etc.)
output bool //??
ScenarioVersion string `yaml:"version,omitempty"`
hash string `yaml:"-"`
Simulated bool `yaml:"simulated"` //Set to true if the scenario instantiating the bucket was in the exclusion list
tomb *tomb.Tomb `yaml:"-"`
wgPour *sync.WaitGroup `yaml:"-"`
wgDumpState *sync.WaitGroup `yaml:"-"`
FormatVersion string `yaml:"format"`
Author string `yaml:"author"`
Description string `yaml:"description"`
References []string `yaml:"references"`
Type string `yaml:"type"` //Type can be : leaky, counter, trigger. It determines the main bucket characteristics
Name string `yaml:"name"` //Name of the bucket, used later in log and user-messages. Should be unique
Capacity int `yaml:"capacity"` //Capacity is applicable to leaky buckets and determines the "burst" capacity
LeakSpeed string `yaml:"leakspeed"` //Leakspeed is a float representing how many events per second leak out of the bucket
Duration string `yaml:"duration"` //Duration allows 'counter' buckets to have a fixed life-time
Filter string `yaml:"filter"` //Filter is an expr that determines if an event is elligible for said bucket. Filter is evaluated against the Event struct
GroupBy string `yaml:"groupby,omitempty"` //groupy is an expr that allows to determine the partitions of the bucket. A common example is the source_ip
Distinct string `yaml:"distinct"` //Distinct, when present, adds a `Pour()` processor that will only pour uniq items (based on distinct expr result)
Debug bool `yaml:"debug"` //Debug, when set to true, will enable debugging for _this_ scenario specifically
Labels map[string]interface{} `yaml:"labels"` //Labels is K:V list aiming at providing context the overflow
Blackhole string `yaml:"blackhole,omitempty"` //Blackhole is a duration that, if present, will prevent same bucket partition to overflow more often than $duration
logger *log.Entry `yaml:"-"` //logger is bucket-specific logger (used by Debug as well)
Reprocess bool `yaml:"reprocess"` //Reprocess, if true, will for the bucket to be re-injected into processing chain
CacheSize int `yaml:"cache_size"` //CacheSize, if > 0, limits the size of in-memory cache of the bucket
Profiling bool `yaml:"profiling"` //Profiling, if true, will make the bucket record pours/overflows/etc.
OverflowFilter string `yaml:"overflow_filter"` //OverflowFilter if present, is a filter that must return true for the overflow to go through
ConditionalOverflow string `yaml:"condition"` //condition if present, is an expression that must return true for the bucket to overflow
BayesianPrior float32 `yaml:"bayesian_prior"`
BayesianThreshold float32 `yaml:"bayesian_threshold"`
BayesianConditions []RawBayesianCondition `yaml:"bayesian_conditions"` //conditions for the bayesian bucket
ScopeType types.ScopeType `yaml:"scope,omitempty"` //to enforce a different remediation than blocking an IP. Will default this to IP
BucketName string `yaml:"-"`
Filename string `yaml:"-"`
RunTimeFilter *vm.Program `json:"-"`
RunTimeGroupBy *vm.Program `json:"-"`
Data []*types.DataSource `yaml:"data,omitempty"`
DataDir string `yaml:"-"`
CancelOnFilter string `yaml:"cancel_on,omitempty"` //a filter that, if matched, kills the bucket
leakspeed time.Duration //internal representation of `Leakspeed`
duration time.Duration //internal representation of `Duration`
ret chan types.Event //the bucket-specific output chan for overflows
processors []Processor //processors is the list of hooks for pour/overflow/create (cf. uniq, blackhole etc.)
output bool //??
ScenarioVersion string `yaml:"version,omitempty"`
hash string `yaml:"-"`
Simulated bool `yaml:"simulated"` //Set to true if the scenario instantiating the bucket was in the exclusion list
tomb *tomb.Tomb `yaml:"-"`
wgPour *sync.WaitGroup `yaml:"-"`
wgDumpState *sync.WaitGroup `yaml:"-"`
orderEvent bool
}
@ -314,12 +313,6 @@ func LoadBucket(bucketFactory *BucketFactory, tomb *tomb.Tomb) error {
if err != nil {
return fmt.Errorf("invalid filter '%s' in %s : %v", bucketFactory.Filter, bucketFactory.Filename, err)
}
if bucketFactory.Debug {
bucketFactory.ExprDebugger, err = exprhelpers.NewDebugger(bucketFactory.Filter, exprhelpers.GetExprOptions(map[string]interface{}{"evt": &types.Event{}})...)
if err != nil {
log.Errorf("unable to build debug filter for '%s' : %s", bucketFactory.Filter, err)
}
}
if bucketFactory.GroupBy != "" {
bucketFactory.RunTimeGroupBy, err = expr.Compile(bucketFactory.GroupBy, exprhelpers.GetExprOptions(map[string]interface{}{"evt": &types.Event{}})...)

View file

@ -9,11 +9,11 @@ import (
"sync"
"time"
"github.com/antonmedv/expr"
"github.com/mohae/deepcopy"
"github.com/prometheus/client_golang/prometheus"
log "github.com/sirupsen/logrus"
"github.com/crowdsecurity/crowdsec/pkg/exprhelpers"
"github.com/crowdsecurity/crowdsec/pkg/types"
)
@ -297,7 +297,6 @@ func PourItemToHolders(parsed types.Event, holders []BucketFactory, buckets *Buc
evt := deepcopy.Copy(parsed)
BucketPourCache["OK"] = append(BucketPourCache["OK"], evt.(types.Event))
}
parserEnv := map[string]interface{}{"evt": &parsed}
//find the relevant holders (scenarios)
for idx := 0; idx < len(holders); idx++ {
//for idx, holder := range holders {
@ -305,7 +304,10 @@ func PourItemToHolders(parsed types.Event, holders []BucketFactory, buckets *Buc
//evaluate bucket's condition
if holders[idx].RunTimeFilter != nil {
holders[idx].logger.Tracef("event against holder %d/%d", idx, len(holders))
output, err := expr.Run(holders[idx].RunTimeFilter, parserEnv)
output, err := exprhelpers.Run(holders[idx].RunTimeFilter,
map[string]interface{}{"evt": &parsed},
holders[idx].logger,
holders[idx].Debug)
if err != nil {
holders[idx].logger.Errorf("failed parsing : %v", err)
return false, fmt.Errorf("leaky failed : %s", err)
@ -315,10 +317,6 @@ func PourItemToHolders(parsed types.Event, holders []BucketFactory, buckets *Buc
holders[idx].logger.Errorf("unexpected non-bool return : %T", output)
holders[idx].logger.Fatalf("Filter issue")
}
if holders[idx].Debug {
holders[idx].ExprDebugger.Run(holders[idx].logger, condition, parserEnv)
}
if !condition {
holders[idx].logger.Debugf("Event leaving node : ko (filter mismatch)")
continue
@ -328,7 +326,7 @@ func PourItemToHolders(parsed types.Event, holders []BucketFactory, buckets *Buc
//groupby determines the partition key for the specific bucket
var groupby string
if holders[idx].RunTimeGroupBy != nil {
tmpGroupBy, err := expr.Run(holders[idx].RunTimeGroupBy, parserEnv)
tmpGroupBy, err := exprhelpers.Run(holders[idx].RunTimeGroupBy, map[string]interface{}{"evt": &parsed}, holders[idx].logger, holders[idx].Debug)
if err != nil {
holders[idx].logger.Errorf("failed groupby : %v", err)
return false, errors.New("leaky failed :/")

View file

@ -28,7 +28,7 @@ func NewOverflowFilter(g *BucketFactory) (*OverflowFilter, error) {
u := OverflowFilter{}
u.Filter = g.OverflowFilter
u.FilterRuntime, err = expr.Compile(u.Filter, exprhelpers.GetExprOptions(map[string]interface{}{"queue": &Queue{}, "signal": &types.RuntimeAlert{}, "leaky": &Leaky{}})...)
u.FilterRuntime, err = expr.Compile(u.Filter, exprhelpers.GetExprOptions(map[string]interface{}{"queue": &types.Queue{}, "signal": &types.RuntimeAlert{}, "leaky": &Leaky{}})...)
if err != nil {
g.logger.Errorf("Unable to compile filter : %v", err)
return nil, fmt.Errorf("unable to compile filter : %v", err)
@ -36,10 +36,10 @@ func NewOverflowFilter(g *BucketFactory) (*OverflowFilter, error) {
return &u, nil
}
func (u *OverflowFilter) OnBucketOverflow(Bucket *BucketFactory) func(*Leaky, types.RuntimeAlert, *Queue) (types.RuntimeAlert, *Queue) {
return func(l *Leaky, s types.RuntimeAlert, q *Queue) (types.RuntimeAlert, *Queue) {
el, err := expr.Run(u.FilterRuntime, map[string]interface{}{
"queue": q, "signal": s, "leaky": l})
func (u *OverflowFilter) OnBucketOverflow(Bucket *BucketFactory) func(*Leaky, types.RuntimeAlert, *types.Queue) (types.RuntimeAlert, *types.Queue) {
return func(l *Leaky, s types.RuntimeAlert, q *types.Queue) (types.RuntimeAlert, *types.Queue) {
el, err := exprhelpers.Run(u.FilterRuntime, map[string]interface{}{
"queue": q, "signal": s, "leaky": l}, l.logger, Bucket.Debug)
if err != nil {
l.logger.Errorf("Failed running overflow filter: %s", err)
return s, q

View file

@ -6,12 +6,12 @@ import (
"sort"
"strconv"
"github.com/antonmedv/expr"
"github.com/davecgh/go-spew/spew"
"github.com/go-openapi/strfmt"
log "github.com/sirupsen/logrus"
"github.com/crowdsecurity/crowdsec/pkg/alertcontext"
"github.com/crowdsecurity/crowdsec/pkg/exprhelpers"
"github.com/crowdsecurity/crowdsec/pkg/models"
"github.com/crowdsecurity/crowdsec/pkg/types"
)
@ -50,7 +50,7 @@ func SourceFromEvent(evt types.Event, leaky *Leaky) (map[string]models.Source, e
*src.Value = v.Range
}
if leaky.scopeType.RunTimeFilter != nil {
retValue, err := expr.Run(leaky.scopeType.RunTimeFilter, map[string]interface{}{"evt": &evt})
retValue, err := exprhelpers.Run(leaky.scopeType.RunTimeFilter, map[string]interface{}{"evt": &evt}, leaky.logger, leaky.BucketConfig.Debug)
if err != nil {
return srcs, fmt.Errorf("while running scope filter: %w", err)
}
@ -125,7 +125,7 @@ func SourceFromEvent(evt types.Event, leaky *Leaky) (map[string]models.Source, e
} else if leaky.scopeType.Scope == types.Range {
src.Value = &src.Range
if leaky.scopeType.RunTimeFilter != nil {
retValue, err := expr.Run(leaky.scopeType.RunTimeFilter, map[string]interface{}{"evt": &evt})
retValue, err := exprhelpers.Run(leaky.scopeType.RunTimeFilter, map[string]interface{}{"evt": &evt}, leaky.logger, leaky.BucketConfig.Debug)
if err != nil {
return srcs, fmt.Errorf("while running scope filter: %w", err)
}
@ -142,7 +142,7 @@ func SourceFromEvent(evt types.Event, leaky *Leaky) (map[string]models.Source, e
if leaky.scopeType.RunTimeFilter == nil {
return srcs, fmt.Errorf("empty scope information")
}
retValue, err := expr.Run(leaky.scopeType.RunTimeFilter, map[string]interface{}{"evt": &evt})
retValue, err := exprhelpers.Run(leaky.scopeType.RunTimeFilter, map[string]interface{}{"evt": &evt}, leaky.logger, leaky.BucketConfig.Debug)
if err != nil {
return srcs, fmt.Errorf("while running scope filter: %w", err)
}
@ -160,7 +160,7 @@ func SourceFromEvent(evt types.Event, leaky *Leaky) (map[string]models.Source, e
}
// EventsFromQueue iterates the queue to collect & prepare meta-datas from alert
func EventsFromQueue(queue *Queue) []*models.Event {
func EventsFromQueue(queue *types.Queue) []*models.Event {
events := []*models.Event{}
@ -207,7 +207,7 @@ func EventsFromQueue(queue *Queue) []*models.Event {
}
// alertFormatSource iterates over the queue to collect sources
func alertFormatSource(leaky *Leaky, queue *Queue) (map[string]models.Source, string, error) {
func alertFormatSource(leaky *Leaky, queue *types.Queue) (map[string]models.Source, string, error) {
var sources = make(map[string]models.Source)
var source_type string
@ -233,7 +233,7 @@ func alertFormatSource(leaky *Leaky, queue *Queue) (map[string]models.Source, st
}
// NewAlert will generate a RuntimeAlert and its APIAlert(s) from a bucket that overflowed
func NewAlert(leaky *Leaky, queue *Queue) (types.RuntimeAlert, error) {
func NewAlert(leaky *Leaky, queue *types.Queue) (types.RuntimeAlert, error) {
var runtimeAlert types.RuntimeAlert
leaky.logger.Tracef("Overflow (start: %s, end: %s)", leaky.First_ts, leaky.Ovflw_ts)

View file

@ -5,7 +5,7 @@ import "github.com/crowdsecurity/crowdsec/pkg/types"
type Processor interface {
OnBucketInit(Bucket *BucketFactory) error
OnBucketPour(Bucket *BucketFactory) func(types.Event, *Leaky) *types.Event
OnBucketOverflow(Bucket *BucketFactory) func(*Leaky, types.RuntimeAlert, *Queue) (types.RuntimeAlert, *Queue)
OnBucketOverflow(Bucket *BucketFactory) func(*Leaky, types.RuntimeAlert, *types.Queue) (types.RuntimeAlert, *types.Queue)
AfterBucketPour(Bucket *BucketFactory) func(types.Event, *Leaky) *types.Event
}
@ -23,8 +23,8 @@ func (d *DumbProcessor) OnBucketPour(bucketFactory *BucketFactory) func(types.Ev
}
}
func (d *DumbProcessor) OnBucketOverflow(b *BucketFactory) func(*Leaky, types.RuntimeAlert, *Queue) (types.RuntimeAlert, *Queue) {
return func(leaky *Leaky, alert types.RuntimeAlert, queue *Queue) (types.RuntimeAlert, *Queue) {
func (d *DumbProcessor) OnBucketOverflow(b *BucketFactory) func(*Leaky, types.RuntimeAlert, *types.Queue) (types.RuntimeAlert, *types.Queue) {
return func(leaky *Leaky, alert types.RuntimeAlert, queue *types.Queue) (types.RuntimeAlert, *types.Queue) {
return alert, queue
}
}

View file

@ -19,14 +19,13 @@ import (
// Thus, if the bucket receives a request that matches fetching a static resource (here css), it cancels itself
type CancelOnFilter struct {
CancelOnFilter *vm.Program
CancelOnFilterDebug *exprhelpers.ExprDebugger
CancelOnFilter *vm.Program
Debug bool
}
var cancelExprCacheLock sync.Mutex
var cancelExprCache map[string]struct {
CancelOnFilter *vm.Program
CancelOnFilterDebug *exprhelpers.ExprDebugger
CancelOnFilter *vm.Program
}
func (u *CancelOnFilter) OnBucketPour(bucketFactory *BucketFactory) func(types.Event, *Leaky) *types.Event {
@ -34,15 +33,11 @@ func (u *CancelOnFilter) OnBucketPour(bucketFactory *BucketFactory) func(types.E
var condition, ok bool
if u.CancelOnFilter != nil {
leaky.logger.Tracef("running cancel_on filter")
output, err := expr.Run(u.CancelOnFilter, map[string]interface{}{"evt": &msg})
output, err := exprhelpers.Run(u.CancelOnFilter, map[string]interface{}{"evt": &msg}, leaky.logger, u.Debug)
if err != nil {
leaky.logger.Warningf("cancel_on error : %s", err)
return &msg
}
//only run debugger expression if condition is false
if u.CancelOnFilterDebug != nil {
u.CancelOnFilterDebug.Run(leaky.logger, condition, map[string]interface{}{"evt": &msg})
}
if condition, ok = output.(bool); !ok {
leaky.logger.Warningf("cancel_on, unexpected non-bool return : %T", output)
return &msg
@ -58,8 +53,8 @@ func (u *CancelOnFilter) OnBucketPour(bucketFactory *BucketFactory) func(types.E
}
}
func (u *CancelOnFilter) OnBucketOverflow(bucketFactory *BucketFactory) func(*Leaky, types.RuntimeAlert, *Queue) (types.RuntimeAlert, *Queue) {
return func(leaky *Leaky, alert types.RuntimeAlert, queue *Queue) (types.RuntimeAlert, *Queue) {
func (u *CancelOnFilter) OnBucketOverflow(bucketFactory *BucketFactory) func(*Leaky, types.RuntimeAlert, *types.Queue) (types.RuntimeAlert, *types.Queue) {
return func(leaky *Leaky, alert types.RuntimeAlert, queue *types.Queue) (types.RuntimeAlert, *types.Queue) {
return alert, queue
}
}
@ -73,14 +68,12 @@ func (u *CancelOnFilter) AfterBucketPour(bucketFactory *BucketFactory) func(type
func (u *CancelOnFilter) OnBucketInit(bucketFactory *BucketFactory) error {
var err error
var compiledExpr struct {
CancelOnFilter *vm.Program
CancelOnFilterDebug *exprhelpers.ExprDebugger
CancelOnFilter *vm.Program
}
if cancelExprCache == nil {
cancelExprCache = make(map[string]struct {
CancelOnFilter *vm.Program
CancelOnFilterDebug *exprhelpers.ExprDebugger
CancelOnFilter *vm.Program
})
}
@ -88,7 +81,6 @@ func (u *CancelOnFilter) OnBucketInit(bucketFactory *BucketFactory) error {
if compiled, ok := cancelExprCache[bucketFactory.CancelOnFilter]; ok {
cancelExprCacheLock.Unlock()
u.CancelOnFilter = compiled.CancelOnFilter
u.CancelOnFilterDebug = compiled.CancelOnFilterDebug
return nil
} else {
cancelExprCacheLock.Unlock()
@ -101,13 +93,7 @@ func (u *CancelOnFilter) OnBucketInit(bucketFactory *BucketFactory) error {
}
u.CancelOnFilter = compiledExpr.CancelOnFilter
if bucketFactory.Debug {
compiledExpr.CancelOnFilterDebug, err = exprhelpers.NewDebugger(bucketFactory.CancelOnFilter, exprhelpers.GetExprOptions(map[string]interface{}{"evt": &types.Event{}})...,
)
if err != nil {
bucketFactory.logger.Errorf("reset_filter debug error : %s", err)
return err
}
u.CancelOnFilterDebug = compiledExpr.CancelOnFilterDebug
u.Debug = true
}
cancelExprCacheLock.Lock()
cancelExprCache[bucketFactory.CancelOnFilter] = compiledExpr

View file

@ -47,8 +47,8 @@ func (u *Uniq) OnBucketPour(bucketFactory *BucketFactory) func(types.Event, *Lea
}
}
func (u *Uniq) OnBucketOverflow(bucketFactory *BucketFactory) func(*Leaky, types.RuntimeAlert, *Queue) (types.RuntimeAlert, *Queue) {
return func(leaky *Leaky, alert types.RuntimeAlert, queue *Queue) (types.RuntimeAlert, *Queue) {
func (u *Uniq) OnBucketOverflow(bucketFactory *BucketFactory) func(*Leaky, types.RuntimeAlert, *types.Queue) (types.RuntimeAlert, *types.Queue) {
return func(leaky *Leaky, alert types.RuntimeAlert, queue *types.Queue) (types.RuntimeAlert, *types.Queue) {
return alert, queue
}
}

View file

@ -42,9 +42,8 @@ type Node struct {
rn string //this is only for us in debug, a random generated name for each node
//Filter is executed at runtime (with current log line as context)
//and must succeed or node is exited
Filter string `yaml:"filter,omitempty"`
RunTimeFilter *vm.Program `yaml:"-" json:"-"` //the actual compiled filter
ExprDebugger *exprhelpers.ExprDebugger `yaml:"-" json:"-"` //used to debug expression by printing the content of each variable of the expression
Filter string `yaml:"filter,omitempty"`
RunTimeFilter *vm.Program `yaml:"-" json:"-"` //the actual compiled filter
//If node has leafs, execute all of them until one asks for a 'break'
LeavesNodes []Node `yaml:"nodes,omitempty"`
//Flag used to describe when to 'break' or return an 'error'
@ -141,7 +140,7 @@ func (n *Node) process(p *types.Event, ctx UnixParserCtx, expressionEnv map[stri
clog.Tracef("Event entering node")
if n.RunTimeFilter != nil {
//Evaluate node's filter
output, err := expr.Run(n.RunTimeFilter, cachedExprEnv)
output, err := exprhelpers.Run(n.RunTimeFilter, cachedExprEnv, clog, n.Debug)
if err != nil {
clog.Warningf("failed to run filter : %v", err)
clog.Debugf("Event leaving node : ko")
@ -150,9 +149,6 @@ func (n *Node) process(p *types.Event, ctx UnixParserCtx, expressionEnv map[stri
switch out := output.(type) {
case bool:
if n.Debug {
n.ExprDebugger.Run(clog, out, cachedExprEnv)
}
if !out {
clog.Debugf("Event leaving node : ko (failed filter)")
return false, nil
@ -180,7 +176,6 @@ func (n *Node) process(p *types.Event, ctx UnixParserCtx, expressionEnv map[stri
// Previous code returned nil if there was an error, so we keep this behavior
return false, nil //nolint:nilerr
}
if isWhitelisted && !p.Whitelisted {
p.Whitelisted = true
p.WhitelistReason = n.Whitelist.Reason
@ -211,7 +206,7 @@ func (n *Node) process(p *types.Event, ctx UnixParserCtx, expressionEnv map[stri
NodeState = false
}
} else if n.Grok.RunTimeValue != nil {
output, err := expr.Run(n.Grok.RunTimeValue, cachedExprEnv)
output, err := exprhelpers.Run(n.Grok.RunTimeValue, cachedExprEnv, clog, n.Debug)
if err != nil {
clog.Warningf("failed to run RunTimeValue : %v", err)
NodeState = false
@ -270,7 +265,7 @@ func (n *Node) process(p *types.Event, ctx UnixParserCtx, expressionEnv map[stri
continue
}
//collect the data
output, err := expr.Run(stash.ValueExpression, cachedExprEnv)
output, err := exprhelpers.Run(stash.ValueExpression, cachedExprEnv, clog, n.Debug)
if err != nil {
clog.Warningf("Error while running stash val expression : %v", err)
}
@ -284,7 +279,7 @@ func (n *Node) process(p *types.Event, ctx UnixParserCtx, expressionEnv map[stri
}
//collect the key
output, err = expr.Run(stash.KeyExpression, cachedExprEnv)
output, err = exprhelpers.Run(stash.KeyExpression, cachedExprEnv, clog, n.Debug)
if err != nil {
clog.Warningf("Error while running stash key expression : %v", err)
}
@ -421,14 +416,6 @@ func (n *Node) compile(pctx *UnixParserCtx, ectx EnricherCtx) error {
if err != nil {
return fmt.Errorf("compilation of '%s' failed: %v", n.Filter, err)
}
if n.Debug {
n.ExprDebugger, err = exprhelpers.NewDebugger(n.Filter, exprhelpers.GetExprOptions(map[string]interface{}{"evt": &types.Event{}})...)
if err != nil {
log.Errorf("unable to build debug filter for '%s' : %s", n.Filter, err)
}
}
}
/* handle pattern_syntax and groks */

View file

@ -14,11 +14,11 @@ import (
"sync"
"time"
"github.com/antonmedv/expr"
"github.com/mohae/deepcopy"
"github.com/prometheus/client_golang/prometheus"
log "github.com/sirupsen/logrus"
"github.com/crowdsecurity/crowdsec/pkg/exprhelpers"
"github.com/crowdsecurity/crowdsec/pkg/types"
)
@ -117,7 +117,7 @@ func (n *Node) ProcessStatics(statics []ExtraField, event *types.Event) error {
if static.Value != "" {
value = static.Value
} else if static.RunTimeValue != nil {
output, err := expr.Run(static.RunTimeValue, map[string]interface{}{"evt": event})
output, err := exprhelpers.Run(static.RunTimeValue, map[string]interface{}{"evt": event}, clog, n.Debug)
if err != nil {
clog.Warningf("failed to run RunTimeValue : %v", err)
continue

View file

@ -6,7 +6,6 @@ import (
"github.com/antonmedv/expr"
"github.com/antonmedv/expr/vm"
"github.com/crowdsecurity/crowdsec/pkg/exprhelpers"
"github.com/crowdsecurity/crowdsec/pkg/types"
)
@ -22,8 +21,7 @@ type Whitelist struct {
}
type ExprWhitelist struct {
Filter *vm.Program
ExprDebugger *exprhelpers.ExprDebugger // used to debug expression by printing the content of each variable of the expression
Filter *vm.Program
}
func (n *Node) ContainsWLs() bool {
@ -79,7 +77,8 @@ func (n *Node) CheckExprWL(cachedExprEnv map[string]interface{}) (bool, error) {
if isWhitelisted {
break
}
output, err := expr.Run(e.Filter, cachedExprEnv)
output, err := exprhelpers.Run(e.Filter, cachedExprEnv, n.Logger, n.Debug)
if err != nil {
n.Logger.Warningf("failed to run whitelist expr : %v", err)
n.Logger.Debug("Event leaving node : ko")
@ -87,9 +86,6 @@ func (n *Node) CheckExprWL(cachedExprEnv map[string]interface{}) (bool, error) {
}
switch out := output.(type) {
case bool:
if n.Debug {
e.ExprDebugger.Run(n.Logger, out, cachedExprEnv)
}
if out {
n.Logger.Debugf("Event is whitelisted by expr, reason [%s]", n.Whitelist.Reason)
isWhitelisted = true
@ -123,11 +119,6 @@ func (n *Node) CompileWLs() (bool, error) {
if err != nil {
return false, fmt.Errorf("unable to compile whitelist expression '%s' : %v", filter, err)
}
expression.ExprDebugger, err = exprhelpers.NewDebugger(filter, exprhelpers.GetExprOptions(map[string]interface{}{"evt": &types.Event{}})...)
if err != nil {
n.Logger.Errorf("unable to build debug filter for '%s' : %s", filter, err)
}
n.Whitelist.B_Exprs = append(n.Whitelist.B_Exprs, expression)
n.Logger.Debugf("adding expression %s to whitelists", filter)
}

View file

@ -1,13 +1,12 @@
package leakybucket
package types
import (
"github.com/crowdsecurity/crowdsec/pkg/types"
log "github.com/sirupsen/logrus"
)
// Queue holds a limited size queue
type Queue struct {
Queue []types.Event
Queue []Event
L int //capacity
}
@ -15,12 +14,12 @@ type Queue struct {
func NewQueue(l int) *Queue {
if l == -1 {
return &Queue{
Queue: make([]types.Event, 0),
Queue: make([]Event, 0),
L: int(^uint(0) >> 1), // max integer value, architecture independent
}
}
q := &Queue{
Queue: make([]types.Event, 0, l),
Queue: make([]Event, 0, l),
L: l,
}
log.WithFields(log.Fields{"Capacity": q.L}).Debugf("Creating queue")
@ -29,7 +28,7 @@ func NewQueue(l int) *Queue {
// Add an event in the queue. If it has already l elements, the first
// element is dropped before adding the new m element
func (q *Queue) Add(m types.Event) {
func (q *Queue) Add(m Event) {
for len(q.Queue) > q.L { //we allow to add one element more than the true capacity
q.Queue = q.Queue[1:]
}
@ -37,6 +36,6 @@ func (q *Queue) Add(m types.Event) {
}
// GetQueue returns the entire queue
func (q *Queue) GetQueue() []types.Event {
func (q *Queue) GetQueue() []Event {
return q.Queue
}