mirror of https://github.com/grafana/loki
LogQL: Pattern Parser (#3837)
* The beginning of a fun story. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Working on adding ragel. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Adding AST parsing with Yacc and Ragel. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Got a pattern parser working. Reworking ast to works with bytes and not runes. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Setup tests and the matches algorithm. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * moar tests case. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Add some validation for the pattern expression. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Hooking to LogQL + performance boost. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Adds documentation Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Improve bound check. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Removes generated files from being linted. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Update docs/sources/logql/_index.md Co-authored-by: Danny Kopping <dannykopping@gmail.com> * Update docs/sources/logql/_index.md Co-authored-by: Danny Kopping <dannykopping@gmail.com> * Review feedback Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Update docs/sources/logql/_index.md Co-authored-by: Danny Kopping <dannykopping@gmail.com> * Update docs/sources/logql/_index.md Co-authored-by: Karen Miller <84039272+KMiller-Grafana@users.noreply.github.com> * Update docs/sources/logql/_index.md Co-authored-by: Karen Miller <84039272+KMiller-Grafana@users.noreply.github.com> * Update docs/sources/logql/_index.md Co-authored-by: Karen Miller <84039272+KMiller-Grafana@users.noreply.github.com> * Update docs/sources/logql/_index.md Co-authored-by: Karen Miller <84039272+KMiller-Grafana@users.noreply.github.com> * Update docs/sources/logql/_index.md Co-authored-by: Karen Miller <84039272+KMiller-Grafana@users.noreply.github.com> * Update docs/sources/logql/_index.md Co-authored-by: Karen Miller <84039272+KMiller-Grafana@users.noreply.github.com> * Update docs/sources/logql/_index.md Co-authored-by: Karen Miller <84039272+KMiller-Grafana@users.noreply.github.com> * Update docs/sources/logql/_index.md Co-authored-by: Karen Miller <84039272+KMiller-Grafana@users.noreply.github.com> * Update docs/sources/logql/_index.md Co-authored-by: Karen Miller <84039272+KMiller-Grafana@users.noreply.github.com> * Update docs/sources/logql/_index.md Co-authored-by: Karen Miller <84039272+KMiller-Grafana@users.noreply.github.com> * Docs suggestions Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> Co-authored-by: Danny Kopping <dannykopping@gmail.com> Co-authored-by: Karen Miller <84039272+KMiller-Grafana@users.noreply.github.com>pull/3860/head
parent
6d026d211d
commit
59bb6d3fba
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,83 @@ |
||||
package pattern |
||||
|
||||
import ( |
||||
"fmt" |
||||
"unicode/utf8" |
||||
) |
||||
|
||||
type node interface { |
||||
fmt.Stringer |
||||
} |
||||
|
||||
type expr []node |
||||
|
||||
func (e expr) hasCapture() bool { |
||||
return e.captureCount() != 0 |
||||
} |
||||
|
||||
func (e expr) validate() error { |
||||
if !e.hasCapture() { |
||||
return ErrNoCapture |
||||
} |
||||
// if there is at least 2 node, verify that none are consecutive.
|
||||
if len(e) >= 2 { |
||||
for i := 0; i < len(e); i = i + 2 { |
||||
if i+1 >= len(e) { |
||||
break |
||||
} |
||||
if _, ok := e[i].(capture); ok { |
||||
if _, ok := e[i+1].(capture); ok { |
||||
return fmt.Errorf("found consecutive capture: %w", ErrInvalidExpr) |
||||
} |
||||
} |
||||
} |
||||
} |
||||
caps := e.captures() |
||||
uniq := map[string]struct{}{} |
||||
for _, c := range caps { |
||||
if _, ok := uniq[c]; ok { |
||||
return fmt.Errorf("duplicate capture name (%s): %w", c, ErrInvalidExpr) |
||||
} |
||||
uniq[c] = struct{}{} |
||||
} |
||||
return nil |
||||
} |
||||
|
||||
func (e expr) captures() (captures []string) { |
||||
for _, n := range e { |
||||
if c, ok := n.(capture); ok && !c.isUnamed() { |
||||
captures = append(captures, c.String()) |
||||
} |
||||
} |
||||
return |
||||
} |
||||
|
||||
func (e expr) captureCount() (count int) { |
||||
return len(e.captures()) |
||||
} |
||||
|
||||
type capture string |
||||
|
||||
func (c capture) String() string { |
||||
return string(c) |
||||
} |
||||
|
||||
func (c capture) isUnamed() bool { |
||||
return string(c) == underscore |
||||
} |
||||
|
||||
type literals []byte |
||||
|
||||
func (l literals) String() string { |
||||
return string(l) |
||||
} |
||||
|
||||
func runesToLiterals(rs []rune) literals { |
||||
res := make([]byte, len(rs)*utf8.UTFMax) |
||||
count := 0 |
||||
for _, r := range rs { |
||||
count += utf8.EncodeRune(res[count:], r) |
||||
} |
||||
res = res[:count] |
||||
return res |
||||
} |
||||
@ -0,0 +1,45 @@ |
||||
%{ |
||||
|
||||
package pattern |
||||
|
||||
%} |
||||
|
||||
%union{ |
||||
Expr []node |
||||
Node node |
||||
|
||||
literal rune |
||||
Literals []rune |
||||
str string |
||||
token int |
||||
} |
||||
|
||||
%start root |
||||
|
||||
%type <Expr> expr |
||||
%type <Node> node |
||||
%type <Literals> literals |
||||
|
||||
%token <str> IDENTIFIER |
||||
%token <literal> LITERAL |
||||
%token <token> LESS_THAN MORE_THAN UNDERSCORE |
||||
|
||||
%% |
||||
|
||||
root: |
||||
expr { exprlex.(*lexer).expr = $1 }; |
||||
|
||||
expr: |
||||
node { $$ = []node{$1} } |
||||
| expr node { $$ = append($1, $2) } |
||||
; |
||||
|
||||
node: |
||||
IDENTIFIER { $$ = capture($1) } |
||||
| literals { $$ = runesToLiterals($1) } |
||||
; |
||||
|
||||
literals: |
||||
LITERAL { $$ = []rune{$1} } |
||||
| literals LITERAL { $$ = append($1, $2) } |
||||
%% |
||||
@ -0,0 +1,466 @@ |
||||
// Code generated by goyacc -p expr -o pkg/logql/log/pattern/expr.y.go pkg/logql/log/pattern/expr.y. DO NOT EDIT.
|
||||
|
||||
|
||||
package pattern |
||||
|
||||
import __yyfmt__ "fmt" |
||||
|
||||
|
||||
type exprSymType struct { |
||||
yys int |
||||
Expr []node |
||||
Node node |
||||
|
||||
literal rune |
||||
Literals []rune |
||||
str string |
||||
token int |
||||
} |
||||
|
||||
const IDENTIFIER = 57346 |
||||
const LITERAL = 57347 |
||||
const LESS_THAN = 57348 |
||||
const MORE_THAN = 57349 |
||||
const UNDERSCORE = 57350 |
||||
|
||||
var exprToknames = [...]string{ |
||||
"$end", |
||||
"error", |
||||
"$unk", |
||||
"IDENTIFIER", |
||||
"LITERAL", |
||||
"LESS_THAN", |
||||
"MORE_THAN", |
||||
"UNDERSCORE", |
||||
} |
||||
var exprStatenames = [...]string{} |
||||
|
||||
const exprEofCode = 1 |
||||
const exprErrCode = 2 |
||||
const exprInitialStackSize = 16 |
||||
|
||||
|
||||
var exprExca = [...]int{ |
||||
-1, 1, |
||||
1, -1, |
||||
-2, 0, |
||||
} |
||||
|
||||
const exprPrivate = 57344 |
||||
|
||||
const exprLast = 8 |
||||
|
||||
var exprAct = [...]int{ |
||||
|
||||
4, 6, 8, 3, 5, 2, 7, 1, |
||||
} |
||||
var exprPact = [...]int{ |
||||
|
||||
-4, -1000, -4, -1000, -1000, -3, -1000, -1000, -1000, |
||||
} |
||||
var exprPgo = [...]int{ |
||||
|
||||
0, 7, 5, 3, 4, |
||||
} |
||||
var exprR1 = [...]int{ |
||||
|
||||
0, 1, 2, 2, 3, 3, 4, 4, |
||||
} |
||||
var exprR2 = [...]int{ |
||||
|
||||
0, 1, 1, 2, 1, 1, 1, 2, |
||||
} |
||||
var exprChk = [...]int{ |
||||
|
||||
-1000, -1, -2, -3, 4, -4, 5, -3, 5, |
||||
} |
||||
var exprDef = [...]int{ |
||||
|
||||
0, -2, 1, 2, 4, 5, 6, 3, 7, |
||||
} |
||||
var exprTok1 = [...]int{ |
||||
|
||||
1, |
||||
} |
||||
var exprTok2 = [...]int{ |
||||
|
||||
2, 3, 4, 5, 6, 7, 8, |
||||
} |
||||
var exprTok3 = [...]int{ |
||||
0, |
||||
} |
||||
|
||||
var exprErrorMessages = [...]struct { |
||||
state int |
||||
token int |
||||
msg string |
||||
}{} |
||||
|
||||
|
||||
/* parser for yacc output */ |
||||
|
||||
var ( |
||||
exprDebug = 0 |
||||
exprErrorVerbose = false |
||||
) |
||||
|
||||
type exprLexer interface { |
||||
Lex(lval *exprSymType) int |
||||
Error(s string) |
||||
} |
||||
|
||||
type exprParser interface { |
||||
Parse(exprLexer) int |
||||
Lookahead() int |
||||
} |
||||
|
||||
type exprParserImpl struct { |
||||
lval exprSymType |
||||
stack [exprInitialStackSize]exprSymType |
||||
char int |
||||
} |
||||
|
||||
func (p *exprParserImpl) Lookahead() int { |
||||
return p.char |
||||
} |
||||
|
||||
func exprNewParser() exprParser { |
||||
return &exprParserImpl{} |
||||
} |
||||
|
||||
const exprFlag = -1000 |
||||
|
||||
func exprTokname(c int) string { |
||||
if c >= 1 && c-1 < len(exprToknames) { |
||||
if exprToknames[c-1] != "" { |
||||
return exprToknames[c-1] |
||||
} |
||||
} |
||||
return __yyfmt__.Sprintf("tok-%v", c) |
||||
} |
||||
|
||||
func exprStatname(s int) string { |
||||
if s >= 0 && s < len(exprStatenames) { |
||||
if exprStatenames[s] != "" { |
||||
return exprStatenames[s] |
||||
} |
||||
} |
||||
return __yyfmt__.Sprintf("state-%v", s) |
||||
} |
||||
|
||||
func exprErrorMessage(state, lookAhead int) string { |
||||
const TOKSTART = 4 |
||||
|
||||
if !exprErrorVerbose { |
||||
return "syntax error" |
||||
} |
||||
|
||||
for _, e := range exprErrorMessages { |
||||
if e.state == state && e.token == lookAhead { |
||||
return "syntax error: " + e.msg |
||||
} |
||||
} |
||||
|
||||
res := "syntax error: unexpected " + exprTokname(lookAhead) |
||||
|
||||
// To match Bison, suggest at most four expected tokens.
|
||||
expected := make([]int, 0, 4) |
||||
|
||||
// Look for shiftable tokens.
|
||||
base := exprPact[state] |
||||
for tok := TOKSTART; tok-1 < len(exprToknames); tok++ { |
||||
if n := base + tok; n >= 0 && n < exprLast && exprChk[exprAct[n]] == tok { |
||||
if len(expected) == cap(expected) { |
||||
return res |
||||
} |
||||
expected = append(expected, tok) |
||||
} |
||||
} |
||||
|
||||
if exprDef[state] == -2 { |
||||
i := 0 |
||||
for exprExca[i] != -1 || exprExca[i+1] != state { |
||||
i += 2 |
||||
} |
||||
|
||||
// Look for tokens that we accept or reduce.
|
||||
for i += 2; exprExca[i] >= 0; i += 2 { |
||||
tok := exprExca[i] |
||||
if tok < TOKSTART || exprExca[i+1] == 0 { |
||||
continue |
||||
} |
||||
if len(expected) == cap(expected) { |
||||
return res |
||||
} |
||||
expected = append(expected, tok) |
||||
} |
||||
|
||||
// If the default action is to accept or reduce, give up.
|
||||
if exprExca[i+1] != 0 { |
||||
return res |
||||
} |
||||
} |
||||
|
||||
for i, tok := range expected { |
||||
if i == 0 { |
||||
res += ", expecting " |
||||
} else { |
||||
res += " or " |
||||
} |
||||
res += exprTokname(tok) |
||||
} |
||||
return res |
||||
} |
||||
|
||||
func exprlex1(lex exprLexer, lval *exprSymType) (char, token int) { |
||||
token = 0 |
||||
char = lex.Lex(lval) |
||||
if char <= 0 { |
||||
token = exprTok1[0] |
||||
goto out |
||||
} |
||||
if char < len(exprTok1) { |
||||
token = exprTok1[char] |
||||
goto out |
||||
} |
||||
if char >= exprPrivate { |
||||
if char < exprPrivate+len(exprTok2) { |
||||
token = exprTok2[char-exprPrivate] |
||||
goto out |
||||
} |
||||
} |
||||
for i := 0; i < len(exprTok3); i += 2 { |
||||
token = exprTok3[i+0] |
||||
if token == char { |
||||
token = exprTok3[i+1] |
||||
goto out |
||||
} |
||||
} |
||||
|
||||
out: |
||||
if token == 0 { |
||||
token = exprTok2[1] /* unknown char */ |
||||
} |
||||
if exprDebug >= 3 { |
||||
__yyfmt__.Printf("lex %s(%d)\n", exprTokname(token), uint(char)) |
||||
} |
||||
return char, token |
||||
} |
||||
|
||||
func exprParse(exprlex exprLexer) int { |
||||
return exprNewParser().Parse(exprlex) |
||||
} |
||||
|
||||
func (exprrcvr *exprParserImpl) Parse(exprlex exprLexer) int { |
||||
var exprn int |
||||
var exprVAL exprSymType |
||||
var exprDollar []exprSymType |
||||
_ = exprDollar // silence set and not used
|
||||
exprS := exprrcvr.stack[:] |
||||
|
||||
Nerrs := 0 /* number of errors */ |
||||
Errflag := 0 /* error recovery flag */ |
||||
exprstate := 0 |
||||
exprrcvr.char = -1 |
||||
exprtoken := -1 // exprrcvr.char translated into internal numbering
|
||||
defer func() { |
||||
// Make sure we report no lookahead when not parsing.
|
||||
exprstate = -1 |
||||
exprrcvr.char = -1 |
||||
exprtoken = -1 |
||||
}() |
||||
exprp := -1 |
||||
goto exprstack |
||||
|
||||
ret0: |
||||
return 0 |
||||
|
||||
ret1: |
||||
return 1 |
||||
|
||||
exprstack: |
||||
/* put a state and value onto the stack */ |
||||
if exprDebug >= 4 { |
||||
__yyfmt__.Printf("char %v in %v\n", exprTokname(exprtoken), exprStatname(exprstate)) |
||||
} |
||||
|
||||
exprp++ |
||||
if exprp >= len(exprS) { |
||||
nyys := make([]exprSymType, len(exprS)*2) |
||||
copy(nyys, exprS) |
||||
exprS = nyys |
||||
} |
||||
exprS[exprp] = exprVAL |
||||
exprS[exprp].yys = exprstate |
||||
|
||||
exprnewstate: |
||||
exprn = exprPact[exprstate] |
||||
if exprn <= exprFlag { |
||||
goto exprdefault /* simple state */ |
||||
} |
||||
if exprrcvr.char < 0 { |
||||
exprrcvr.char, exprtoken = exprlex1(exprlex, &exprrcvr.lval) |
||||
} |
||||
exprn += exprtoken |
||||
if exprn < 0 || exprn >= exprLast { |
||||
goto exprdefault |
||||
} |
||||
exprn = exprAct[exprn] |
||||
if exprChk[exprn] == exprtoken { /* valid shift */ |
||||
exprrcvr.char = -1 |
||||
exprtoken = -1 |
||||
exprVAL = exprrcvr.lval |
||||
exprstate = exprn |
||||
if Errflag > 0 { |
||||
Errflag-- |
||||
} |
||||
goto exprstack |
||||
} |
||||
|
||||
exprdefault: |
||||
/* default state action */ |
||||
exprn = exprDef[exprstate] |
||||
if exprn == -2 { |
||||
if exprrcvr.char < 0 { |
||||
exprrcvr.char, exprtoken = exprlex1(exprlex, &exprrcvr.lval) |
||||
} |
||||
|
||||
/* look through exception table */ |
||||
xi := 0 |
||||
for { |
||||
if exprExca[xi+0] == -1 && exprExca[xi+1] == exprstate { |
||||
break |
||||
} |
||||
xi += 2 |
||||
} |
||||
for xi += 2; ; xi += 2 { |
||||
exprn = exprExca[xi+0] |
||||
if exprn < 0 || exprn == exprtoken { |
||||
break |
||||
} |
||||
} |
||||
exprn = exprExca[xi+1] |
||||
if exprn < 0 { |
||||
goto ret0 |
||||
} |
||||
} |
||||
if exprn == 0 { |
||||
/* error ... attempt to resume parsing */ |
||||
switch Errflag { |
||||
case 0: /* brand new error */ |
||||
exprlex.Error(exprErrorMessage(exprstate, exprtoken)) |
||||
Nerrs++ |
||||
if exprDebug >= 1 { |
||||
__yyfmt__.Printf("%s", exprStatname(exprstate)) |
||||
__yyfmt__.Printf(" saw %s\n", exprTokname(exprtoken)) |
||||
} |
||||
fallthrough |
||||
|
||||
case 1, 2: /* incompletely recovered error ... try again */ |
||||
Errflag = 3 |
||||
|
||||
/* find a state where "error" is a legal shift action */ |
||||
for exprp >= 0 { |
||||
exprn = exprPact[exprS[exprp].yys] + exprErrCode |
||||
if exprn >= 0 && exprn < exprLast { |
||||
exprstate = exprAct[exprn] /* simulate a shift of "error" */ |
||||
if exprChk[exprstate] == exprErrCode { |
||||
goto exprstack |
||||
} |
||||
} |
||||
|
||||
/* the current p has no shift on "error", pop stack */ |
||||
if exprDebug >= 2 { |
||||
__yyfmt__.Printf("error recovery pops state %d\n", exprS[exprp].yys) |
||||
} |
||||
exprp-- |
||||
} |
||||
/* there is no state on the stack with an error shift ... abort */ |
||||
goto ret1 |
||||
|
||||
case 3: /* no shift yet; clobber input char */ |
||||
if exprDebug >= 2 { |
||||
__yyfmt__.Printf("error recovery discards %s\n", exprTokname(exprtoken)) |
||||
} |
||||
if exprtoken == exprEofCode { |
||||
goto ret1 |
||||
} |
||||
exprrcvr.char = -1 |
||||
exprtoken = -1 |
||||
goto exprnewstate /* try again in the same state */ |
||||
} |
||||
} |
||||
|
||||
/* reduction by production exprn */ |
||||
if exprDebug >= 2 { |
||||
__yyfmt__.Printf("reduce %v in:\n\t%v\n", exprn, exprStatname(exprstate)) |
||||
} |
||||
|
||||
exprnt := exprn |
||||
exprpt := exprp |
||||
_ = exprpt // guard against "declared and not used"
|
||||
|
||||
exprp -= exprR2[exprn] |
||||
// exprp is now the index of $0. Perform the default action. Iff the
|
||||
// reduced production is ε, $1 is possibly out of range.
|
||||
if exprp+1 >= len(exprS) { |
||||
nyys := make([]exprSymType, len(exprS)*2) |
||||
copy(nyys, exprS) |
||||
exprS = nyys |
||||
} |
||||
exprVAL = exprS[exprp+1] |
||||
|
||||
/* consult goto table to find next state */ |
||||
exprn = exprR1[exprn] |
||||
exprg := exprPgo[exprn] |
||||
exprj := exprg + exprS[exprp].yys + 1 |
||||
|
||||
if exprj >= exprLast { |
||||
exprstate = exprAct[exprg] |
||||
} else { |
||||
exprstate = exprAct[exprj] |
||||
if exprChk[exprstate] != -exprn { |
||||
exprstate = exprAct[exprg] |
||||
} |
||||
} |
||||
// dummy call; replaced with literal code
|
||||
switch exprnt { |
||||
|
||||
case 1: |
||||
exprDollar = exprS[exprpt-1 : exprpt+1] |
||||
{ |
||||
exprlex.(*lexer).expr = exprDollar[1].Expr |
||||
} |
||||
case 2: |
||||
exprDollar = exprS[exprpt-1 : exprpt+1] |
||||
{ |
||||
exprVAL.Expr = []node{exprDollar[1].Node} |
||||
} |
||||
case 3: |
||||
exprDollar = exprS[exprpt-2 : exprpt+1] |
||||
{ |
||||
exprVAL.Expr = append(exprDollar[1].Expr, exprDollar[2].Node) |
||||
} |
||||
case 4: |
||||
exprDollar = exprS[exprpt-1 : exprpt+1] |
||||
{ |
||||
exprVAL.Node = capture(exprDollar[1].str) |
||||
} |
||||
case 5: |
||||
exprDollar = exprS[exprpt-1 : exprpt+1] |
||||
{ |
||||
exprVAL.Node = runesToLiterals(exprDollar[1].Literals) |
||||
} |
||||
case 6: |
||||
exprDollar = exprS[exprpt-1 : exprpt+1] |
||||
{ |
||||
exprVAL.Literals = []rune{exprDollar[1].literal} |
||||
} |
||||
case 7: |
||||
exprDollar = exprS[exprpt-2 : exprpt+1] |
||||
{ |
||||
exprVAL.Literals = append(exprDollar[1].Literals, exprDollar[2].literal) |
||||
} |
||||
} |
||||
goto exprstack /* stack new state and value */ |
||||
} |
||||
@ -0,0 +1,62 @@ |
||||
package pattern |
||||
|
||||
type lexer struct { |
||||
data []byte |
||||
p, pe, cs int |
||||
ts, te, act int |
||||
|
||||
lastnewline int |
||||
curline int |
||||
|
||||
errs []parseError |
||||
expr []node |
||||
} |
||||
|
||||
func newLexer() *lexer { |
||||
lex := &lexer{} |
||||
lex.init() |
||||
return lex |
||||
} |
||||
|
||||
func (lex *lexer) setData(data []byte) { |
||||
lex.data = data |
||||
lex.pe = len(data) |
||||
lex.lastnewline = -1 |
||||
lex.curline = 1 |
||||
} |
||||
|
||||
// Error implements exprLexer interface generated by yacc (yyLexer)
|
||||
func (lex *lexer) Error(e string) { |
||||
lex.errs = append(lex.errs, newParseError(e, lex.curline, lex.curcol())) |
||||
} |
||||
|
||||
// curcol calculates the current token's start column based on the last newline position
|
||||
// returns a 1-indexed value
|
||||
func (lex *lexer) curcol() int { |
||||
return (lex.ts + 1 /* 1-indexed columns */) - (lex.lastnewline + 1 /* next after newline */) |
||||
} |
||||
|
||||
func (lex *lexer) handle(token int, err error) int { |
||||
if err != nil { |
||||
lex.Error(err.Error()) |
||||
return LEXER_ERROR |
||||
} |
||||
return token |
||||
} |
||||
|
||||
func (lex *lexer) token() string { |
||||
return string(lex.data[lex.ts:lex.te]) |
||||
} |
||||
|
||||
// nolint
|
||||
func (lex *lexer) identifier(out *exprSymType) (int, error) { |
||||
t := lex.token() |
||||
out.str = t[1 : len(t)-1] |
||||
return IDENTIFIER, nil |
||||
} |
||||
|
||||
// nolint
|
||||
func (lex *lexer) literal(out *exprSymType) (int, error) { |
||||
out.literal = rune(lex.data[lex.ts]) |
||||
return LITERAL, nil |
||||
} |
||||
@ -0,0 +1,43 @@ |
||||
package pattern |
||||
|
||||
%%{ |
||||
machine pattern; |
||||
write data; |
||||
access lex.; |
||||
variable p lex.p; |
||||
variable pe lex.pe; |
||||
prepush { |
||||
if len(lex.stack) <= lex.top { |
||||
lex.stack = append(lex.stack, 0) |
||||
} |
||||
} |
||||
}%% |
||||
|
||||
const LEXER_ERROR = 0 |
||||
|
||||
%%{ |
||||
identifier = '<' (alpha| '_') (alnum | '_' )* '>'; |
||||
literal = any; |
||||
}%% |
||||
|
||||
func (lex *lexer) Lex(out *exprSymType) int { |
||||
eof := lex.pe |
||||
tok := 0 |
||||
|
||||
%%{ |
||||
|
||||
main := |* |
||||
identifier => { tok = lex.handle(lex.identifier(out)); fbreak; }; |
||||
literal => { tok = lex.handle(lex.literal(out)); fbreak; }; |
||||
*|; |
||||
|
||||
write exec; |
||||
}%% |
||||
|
||||
return tok; |
||||
} |
||||
|
||||
|
||||
func (lex *lexer) init() { |
||||
%% write init; |
||||
} |
||||
@ -0,0 +1,241 @@ |
||||
|
||||
//line pkg/logql/log/pattern/lexer.rl:1
|
||||
package pattern |
||||
|
||||
|
||||
//line pkg/logql/log/pattern/lexer.rl.go:7
|
||||
var _pattern_actions []byte = []byte{ |
||||
0, 1, 0, 1, 1, 1, 2, 1, 3,
|
||||
1, 4, 1, 5, 1, 6,
|
||||
} |
||||
|
||||
var _pattern_key_offsets []byte = []byte{ |
||||
0, 8, 9,
|
||||
} |
||||
|
||||
var _pattern_trans_keys []byte = []byte{ |
||||
62, 95, 48, 57, 65, 90, 97, 122,
|
||||
60, 95, 65, 90, 97, 122,
|
||||
} |
||||
|
||||
var _pattern_single_lengths []byte = []byte{ |
||||
2, 1, 1,
|
||||
} |
||||
|
||||
var _pattern_range_lengths []byte = []byte{ |
||||
3, 0, 2,
|
||||
} |
||||
|
||||
var _pattern_index_offsets []byte = []byte{ |
||||
0, 6, 8,
|
||||
} |
||||
|
||||
var _pattern_trans_targs []byte = []byte{ |
||||
1, 0, 0, 0, 0, 1, 2, 1,
|
||||
0, 0, 0, 1, 1, 1,
|
||||
} |
||||
|
||||
var _pattern_trans_actions []byte = []byte{ |
||||
7, 0, 0, 0, 0, 13, 5, 9,
|
||||
0, 0, 0, 11, 13, 11,
|
||||
} |
||||
|
||||
var _pattern_to_state_actions []byte = []byte{ |
||||
0, 1, 0,
|
||||
} |
||||
|
||||
var _pattern_from_state_actions []byte = []byte{ |
||||
0, 3, 0,
|
||||
} |
||||
|
||||
var _pattern_eof_trans []byte = []byte{ |
||||
13, 0, 14,
|
||||
} |
||||
|
||||
const pattern_start int = 1 |
||||
const pattern_first_final int = 1 |
||||
const pattern_error int = -1 |
||||
|
||||
const pattern_en_main int = 1 |
||||
|
||||
|
||||
//line pkg/logql/log/pattern/lexer.rl:14
|
||||
|
||||
|
||||
const LEXER_ERROR = 0 |
||||
|
||||
|
||||
//line pkg/logql/log/pattern/lexer.rl:21
|
||||
|
||||
|
||||
func (lex *lexer) Lex(out *exprSymType) int { |
||||
eof := lex.pe |
||||
tok := 0 |
||||
|
||||
|
||||
//line pkg/logql/log/pattern/lexer.rl.go:77
|
||||
{ |
||||
var _klen int |
||||
var _trans int |
||||
var _acts int |
||||
var _nacts uint |
||||
var _keys int |
||||
if ( lex.p) == ( lex.pe) { |
||||
goto _test_eof |
||||
} |
||||
_resume: |
||||
_acts = int(_pattern_from_state_actions[ lex.cs]) |
||||
_nacts = uint(_pattern_actions[_acts]); _acts++ |
||||
for ; _nacts > 0; _nacts-- { |
||||
_acts++ |
||||
switch _pattern_actions[_acts - 1] { |
||||
case 1: |
||||
//line NONE:1
|
||||
lex.ts = ( lex.p) |
||||
|
||||
//line pkg/logql/log/pattern/lexer.rl.go:97
|
||||
} |
||||
} |
||||
|
||||
_keys = int(_pattern_key_offsets[ lex.cs]) |
||||
_trans = int(_pattern_index_offsets[ lex.cs]) |
||||
|
||||
_klen = int(_pattern_single_lengths[ lex.cs]) |
||||
if _klen > 0 { |
||||
_lower := int(_keys) |
||||
var _mid int |
||||
_upper := int(_keys + _klen - 1) |
||||
for { |
||||
if _upper < _lower { |
||||
break |
||||
} |
||||
|
||||
_mid = _lower + ((_upper - _lower) >> 1) |
||||
switch { |
||||
case lex.data[( lex.p)] < _pattern_trans_keys[_mid]: |
||||
_upper = _mid - 1 |
||||
case lex.data[( lex.p)] > _pattern_trans_keys[_mid]: |
||||
_lower = _mid + 1 |
||||
default: |
||||
_trans += int(_mid - int(_keys)) |
||||
goto _match |
||||
} |
||||
} |
||||
_keys += _klen |
||||
_trans += _klen |
||||
} |
||||
|
||||
_klen = int(_pattern_range_lengths[ lex.cs]) |
||||
if _klen > 0 { |
||||
_lower := int(_keys) |
||||
var _mid int |
||||
_upper := int(_keys + (_klen << 1) - 2) |
||||
for { |
||||
if _upper < _lower { |
||||
break |
||||
} |
||||
|
||||
_mid = _lower + (((_upper - _lower) >> 1) & ^1) |
||||
switch { |
||||
case lex.data[( lex.p)] < _pattern_trans_keys[_mid]: |
||||
_upper = _mid - 2 |
||||
case lex.data[( lex.p)] > _pattern_trans_keys[_mid + 1]: |
||||
_lower = _mid + 2 |
||||
default: |
||||
_trans += int((_mid - int(_keys)) >> 1) |
||||
goto _match |
||||
} |
||||
} |
||||
_trans += _klen |
||||
} |
||||
|
||||
_match: |
||||
_eof_trans: |
||||
lex.cs = int(_pattern_trans_targs[_trans]) |
||||
|
||||
if _pattern_trans_actions[_trans] == 0 { |
||||
goto _again |
||||
} |
||||
|
||||
_acts = int(_pattern_trans_actions[_trans]) |
||||
_nacts = uint(_pattern_actions[_acts]); _acts++ |
||||
for ; _nacts > 0; _nacts-- { |
||||
_acts++ |
||||
switch _pattern_actions[_acts-1] { |
||||
case 2: |
||||
//line NONE:1
|
||||
lex.te = ( lex.p)+1 |
||||
|
||||
case 3: |
||||
//line pkg/logql/log/pattern/lexer.rl:30
|
||||
lex.te = ( lex.p)+1 |
||||
{ tok = lex.handle(lex.identifier(out)); ( lex.p)++; goto _out |
||||
} |
||||
case 4: |
||||
//line pkg/logql/log/pattern/lexer.rl:31
|
||||
lex.te = ( lex.p)+1 |
||||
{ tok = lex.handle(lex.literal(out)); ( lex.p)++; goto _out |
||||
} |
||||
case 5: |
||||
//line pkg/logql/log/pattern/lexer.rl:31
|
||||
lex.te = ( lex.p) |
||||
( lex.p)-- |
||||
{ tok = lex.handle(lex.literal(out)); ( lex.p)++; goto _out |
||||
} |
||||
case 6: |
||||
//line pkg/logql/log/pattern/lexer.rl:31
|
||||
( lex.p) = ( lex.te) - 1 |
||||
{ tok = lex.handle(lex.literal(out)); ( lex.p)++; goto _out |
||||
} |
||||
//line pkg/logql/log/pattern/lexer.rl.go:191
|
||||
} |
||||
} |
||||
|
||||
_again: |
||||
_acts = int(_pattern_to_state_actions[ lex.cs]) |
||||
_nacts = uint(_pattern_actions[_acts]); _acts++ |
||||
for ; _nacts > 0; _nacts-- { |
||||
_acts++ |
||||
switch _pattern_actions[_acts-1] { |
||||
case 0: |
||||
//line NONE:1
|
||||
lex.ts = 0 |
||||
|
||||
//line pkg/logql/log/pattern/lexer.rl.go:205
|
||||
} |
||||
} |
||||
|
||||
( lex.p)++ |
||||
if ( lex.p) != ( lex.pe) { |
||||
goto _resume |
||||
} |
||||
_test_eof: {} |
||||
if ( lex.p) == eof { |
||||
if _pattern_eof_trans[ lex.cs] > 0 { |
||||
_trans = int(_pattern_eof_trans[ lex.cs] - 1) |
||||
goto _eof_trans |
||||
} |
||||
} |
||||
|
||||
_out: {} |
||||
} |
||||
|
||||
//line pkg/logql/log/pattern/lexer.rl:35
|
||||
|
||||
|
||||
return tok; |
||||
} |
||||
|
||||
|
||||
func (lex *lexer) init() { |
||||
|
||||
//line pkg/logql/log/pattern/lexer.rl.go:233
|
||||
{ |
||||
lex.cs = pattern_start |
||||
lex.ts = 0 |
||||
lex.te = 0 |
||||
lex.act = 0 |
||||
} |
||||
|
||||
//line pkg/logql/log/pattern/lexer.rl:43
|
||||
} |
||||
@ -0,0 +1,47 @@ |
||||
package pattern |
||||
|
||||
import ( |
||||
"testing" |
||||
|
||||
"github.com/stretchr/testify/assert" |
||||
) |
||||
|
||||
func Test_Lex(t *testing.T) { |
||||
for _, tc := range []struct { |
||||
input string |
||||
expected []int |
||||
}{ |
||||
{`_foo`, []int{LITERAL, LITERAL, LITERAL, LITERAL}}, |
||||
{`<foo`, []int{LITERAL, LITERAL, LITERAL, LITERAL}}, |
||||
{`<`, []int{LITERAL}}, |
||||
{`>`, []int{LITERAL}}, |
||||
{`<_1foo>`, []int{IDENTIFIER}}, |
||||
{`<_1foo> bar <buzz>`, []int{IDENTIFIER, LITERAL, LITERAL, LITERAL, LITERAL, LITERAL, IDENTIFIER}}, |
||||
{`<1foo>`, []int{LITERAL, LITERAL, LITERAL, LITERAL, LITERAL, LITERAL}}, |
||||
} { |
||||
tc := tc |
||||
t.Run(tc.input, func(t *testing.T) { |
||||
actual := []int{} |
||||
l := newLexer() |
||||
l.setData([]byte(tc.input)) |
||||
for { |
||||
var lval exprSymType |
||||
tok := l.Lex(&lval) |
||||
if tok == 0 { |
||||
break |
||||
} |
||||
actual = append(actual, tok) |
||||
} |
||||
assert.Equal(t, toksToStrings(tc.expected), toksToStrings(actual)) |
||||
assert.Equal(t, tc.expected, actual) |
||||
}) |
||||
} |
||||
} |
||||
|
||||
func toksToStrings(toks []int) []string { |
||||
strings := make([]string, len(toks)) |
||||
for i, tok := range toks { |
||||
strings[i] = exprToknames[tok-exprPrivate+1] |
||||
} |
||||
return strings |
||||
} |
||||
@ -0,0 +1,50 @@ |
||||
package pattern |
||||
|
||||
import "fmt" |
||||
|
||||
const underscore = "_" |
||||
|
||||
var tokens = map[int]string{ |
||||
LESS_THAN: "<", |
||||
MORE_THAN: ">", |
||||
UNDERSCORE: underscore, |
||||
} |
||||
|
||||
func init() { |
||||
// Improve the error messages coming out of yacc.
|
||||
exprErrorVerbose = true |
||||
for tok, str := range tokens { |
||||
exprToknames[tok-exprPrivate+1] = str |
||||
} |
||||
} |
||||
|
||||
func parseExpr(input string) (expr, error) { |
||||
l := newLexer() |
||||
l.setData([]byte(input)) |
||||
e := exprNewParser().Parse(l) |
||||
if e != 0 || len(l.errs) > 0 { |
||||
return nil, l.errs[0] |
||||
} |
||||
return l.expr, nil |
||||
} |
||||
|
||||
// parseError is what is returned when we failed to parse.
|
||||
type parseError struct { |
||||
msg string |
||||
line, col int |
||||
} |
||||
|
||||
func (p parseError) Error() string { |
||||
if p.col == 0 && p.line == 0 { |
||||
return p.msg |
||||
} |
||||
return fmt.Sprintf("parse error at line %d, col %d: %s", p.line, p.col, p.msg) |
||||
} |
||||
|
||||
func newParseError(msg string, line, col int) parseError { |
||||
return parseError{ |
||||
msg: msg, |
||||
line: line, |
||||
col: col, |
||||
} |
||||
} |
||||
@ -0,0 +1,59 @@ |
||||
package pattern |
||||
|
||||
import ( |
||||
"testing" |
||||
|
||||
"github.com/stretchr/testify/require" |
||||
) |
||||
|
||||
func Test_Parse(t *testing.T) { |
||||
for _, tc := range []struct { |
||||
input string |
||||
expected expr |
||||
err error |
||||
}{ |
||||
{ |
||||
"<foo> bar f <f>", |
||||
expr{capture("foo"), literals(" bar f "), capture("f")}, |
||||
nil, |
||||
}, |
||||
{ |
||||
"<foo", |
||||
expr{literals("<foo")}, |
||||
nil, |
||||
}, |
||||
{ |
||||
"<foo ><bar>", |
||||
expr{literals("<foo >"), capture("bar")}, |
||||
nil, |
||||
}, |
||||
{ |
||||
"<>", |
||||
expr{literals("<>")}, |
||||
nil, |
||||
}, |
||||
{ |
||||
"<_>", |
||||
expr{capture("_")}, |
||||
nil, |
||||
}, |
||||
{ |
||||
"<1_>", |
||||
expr{literals("<1_>")}, |
||||
nil, |
||||
}, |
||||
{ |
||||
`<ip> - <user> [<_>] "<method> <path> <_>" <status> <size> <url> <user_agent>`, |
||||
expr{capture("ip"), literals(" - "), capture("user"), literals(" ["), capture("_"), literals(`] "`), capture("method"), literals(" "), capture("path"), literals(" "), capture('_'), literals(`" `), capture("status"), literals(" "), capture("size"), literals(" "), capture("url"), literals(" "), capture("user_agent")}, |
||||
nil, |
||||
}, |
||||
} { |
||||
tc := tc |
||||
actual, err := parseExpr(tc.input) |
||||
if tc.err != nil || err != nil { |
||||
require.Equal(t, tc.err, err) |
||||
return |
||||
} |
||||
require.Equal(t, tc.expected, actual) |
||||
} |
||||
} |
||||
@ -0,0 +1,95 @@ |
||||
package pattern |
||||
|
||||
import ( |
||||
"bytes" |
||||
"errors" |
||||
) |
||||
|
||||
var ( |
||||
ErrNoCapture = errors.New("at least one capture is required") |
||||
ErrInvalidExpr = errors.New("invalid expression") |
||||
) |
||||
|
||||
type Matcher interface { |
||||
Matches(in []byte) [][]byte |
||||
Names() []string |
||||
} |
||||
|
||||
type matcher struct { |
||||
e expr |
||||
|
||||
captures [][]byte |
||||
names []string |
||||
} |
||||
|
||||
func New(in string) (Matcher, error) { |
||||
e, err := parseExpr(in) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
if err := e.validate(); err != nil { |
||||
return nil, err |
||||
} |
||||
return &matcher{ |
||||
e: e, |
||||
captures: make([][]byte, 0, e.captureCount()), |
||||
names: e.captures(), |
||||
}, nil |
||||
} |
||||
|
||||
// Matches matches the given line with the provided pattern.
|
||||
// Matches invalidates the previous returned captures array.
|
||||
func (m *matcher) Matches(in []byte) [][]byte { |
||||
if len(in) == 0 { |
||||
return nil |
||||
} |
||||
if len(m.e) == 0 { |
||||
return nil |
||||
} |
||||
captures := m.captures[:0] |
||||
expr := m.e |
||||
if ls, ok := expr[0].(literals); ok { |
||||
i := bytes.Index(in, ls) |
||||
if i != 0 { |
||||
return nil |
||||
} |
||||
in = in[len(ls):] |
||||
expr = expr[1:] |
||||
} |
||||
if len(expr) == 0 { |
||||
return nil |
||||
} |
||||
// from now we have capture - literals - capture ... (literals)?
|
||||
for len(expr) != 0 { |
||||
if len(expr) == 1 { // we're ending on a capture.
|
||||
if !(expr[0].(capture)).isUnamed() { |
||||
captures = append(captures, in) |
||||
} |
||||
return captures |
||||
} |
||||
cap := expr[0].(capture) |
||||
ls := expr[1].(literals) |
||||
expr = expr[2:] |
||||
i := bytes.Index(in, ls) |
||||
if i == -1 { |
||||
// if a capture is missed we return up to the end as the capture.
|
||||
if !cap.isUnamed() { |
||||
captures = append(captures, in) |
||||
} |
||||
return captures |
||||
} |
||||
|
||||
if cap.isUnamed() { |
||||
in = in[len(ls)+i:] |
||||
continue |
||||
} |
||||
captures = append(captures, in[:i]) |
||||
in = in[len(ls)+i:] |
||||
} |
||||
|
||||
return captures |
||||
} |
||||
|
||||
func (m *matcher) Names() []string { |
||||
return m.names |
||||
} |
||||
@ -0,0 +1,162 @@ |
||||
package pattern |
||||
|
||||
import ( |
||||
"fmt" |
||||
"testing" |
||||
|
||||
"github.com/stretchr/testify/require" |
||||
) |
||||
|
||||
var fixtures = []struct { |
||||
expr string |
||||
in string |
||||
expected []string |
||||
}{ |
||||
{ |
||||
"foo <foo> bar", |
||||
"foo buzz bar", |
||||
[]string{"buzz"}, |
||||
}, |
||||
{ |
||||
"foo <foo> bar<fuzz>", |
||||
"foo buzz bar", |
||||
[]string{"buzz", ""}, |
||||
}, |
||||
{ |
||||
"<foo> bar<fuzz>", |
||||
" bar", |
||||
[]string{"", ""}, |
||||
}, |
||||
{ |
||||
"<path>?<_>", |
||||
`/api/plugins/versioncheck?slugIn=snuids-trafficlights-panel,input,gel&grafanaVersion=7.0.0-beta1`, |
||||
[]string{"/api/plugins/versioncheck"}, |
||||
}, |
||||
{ |
||||
"<path>?<_>", |
||||
`/api/plugins/status`, |
||||
[]string{"/api/plugins/status"}, |
||||
}, |
||||
{ |
||||
// Common Log Format
|
||||
`<ip> <userid> <user> [<_>] "<method> <path> <_>" <status> <size>`, |
||||
`127.0.0.1 user-identifier frank [10/Oct/2000:13:55:36 -0700] "GET /apache_pb.gif HTTP/1.0" 200 2326`, |
||||
[]string{"127.0.0.1", "user-identifier", "frank", "GET", "/apache_pb.gif", "200", "2326"}, |
||||
}, |
||||
{ |
||||
// Combined Log Format
|
||||
`<ip> - - [<_>] "<method> <path> <_>" <status> <size> `, |
||||
`35.191.8.106 - - [19/May/2021:07:21:49 +0000] "GET /api/plugins/versioncheck?slugIn=snuids-trafficlights-panel,input,gel&grafanaVersion=7.0.0-beta1 HTTP/1.1" 200 107 "-" "Go-http-client/2.0" "80.153.74.144, 34.120.177.193" "TLSv1.3" "DE" "DEBW"`, |
||||
[]string{"35.191.8.106", "GET", "/api/plugins/versioncheck?slugIn=snuids-trafficlights-panel,input,gel&grafanaVersion=7.0.0-beta1", "200", "107"}, |
||||
}, |
||||
{ |
||||
// MySQL
|
||||
`<_> <id> [<level>] [<no>] [<component>] `, |
||||
`2020-08-06T14:25:02.835618Z 0 [Note] [MY-012487] [InnoDB] DDL log recovery : begin`, |
||||
[]string{"0", "Note", "MY-012487", "InnoDB"}, |
||||
}, |
||||
{ |
||||
// MySQL
|
||||
`<_> <id> [<level>] `, |
||||
`2021-05-19T07:40:12.215792Z 42761518 [Note] Aborted connection 42761518 to db: 'hosted_grafana' user: 'hosted_grafana' host: '10.36.4.122' (Got an error reading communication packets)`, |
||||
[]string{"42761518", "Note"}, |
||||
}, |
||||
{ |
||||
// Kubernetes api-server
|
||||
`<id> <_> <_> <line>] `, |
||||
`W0519 07:46:47.647050 1 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {https://kubernetes-etcd-1.kubernetes-etcd:2379 <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 10.32.85.85:2379: connect: connection refused". Reconnecting...`, |
||||
[]string{"W0519", "clientconn.go:1223"}, |
||||
}, |
||||
{ |
||||
// Cassandra
|
||||
`<level> [<component>]<_> in <duration>.<_>`, |
||||
`INFO [Service Thread] 2021-05-19 07:40:12,130 GCInspector.java:284 - ParNew GC in 248ms. CMS Old Gen: 5043436640 -> 5091062064; Par Eden Space: 671088640 -> 0; Par Survivor Space: 70188280 -> 60139760`, |
||||
[]string{"INFO", "Service Thread", "248ms"}, |
||||
}, |
||||
{ |
||||
// Cortex & Loki distributor
|
||||
`<_> msg="<method> <path> (<status>) <duration>"`, |
||||
`level=debug ts=2021-05-19T07:54:26.864644382Z caller=logging.go:66 traceID=7fbb92fd0eb9c65d msg="POST /loki/api/v1/push (204) 1.238734ms"`, |
||||
[]string{"POST", "/loki/api/v1/push", "204", "1.238734ms"}, |
||||
}, |
||||
{ |
||||
// Etcd
|
||||
`<_> <_> <level> | <component>: <_> peer <peer_id> <_> tcp <ip>:<_>`, |
||||
`2021-05-19 08:16:50.181436 W | rafthttp: health check for peer fd8275e521cfb532 could not connect: dial tcp 10.32.85.85:2380: connect: connection refused`, |
||||
[]string{"W", "rafthttp", "fd8275e521cfb532", "10.32.85.85"}, |
||||
}, |
||||
{ |
||||
// Kafka
|
||||
`<_>] <level> [Log partition=<part>, dir=<dir>] `, |
||||
`[2021-05-19 08:35:28,681] INFO [Log partition=p-636-L-fs-117, dir=/data/kafka-logs] Deleting segment 455976081 (kafka.log.Log)`, |
||||
[]string{"INFO", "p-636-L-fs-117", "/data/kafka-logs"}, |
||||
}, |
||||
{ |
||||
// Elastic
|
||||
`<_>][<level>][<component>] [<id>] [<index>]`, |
||||
`[2021-05-19T06:54:06,994][INFO ][o.e.c.m.MetaDataMappingService] [1f605d47-8454-4bfb-a67f-49f318bf837a] [usage-stats-2021.05.19/O2Je9IbmR8CqFyUvNpTttA] update_mapping [report]`, |
||||
[]string{"INFO ", "o.e.c.m.MetaDataMappingService", "1f605d47-8454-4bfb-a67f-49f318bf837a", "usage-stats-2021.05.19/O2Je9IbmR8CqFyUvNpTttA"}, |
||||
}, |
||||
{ |
||||
// Envoy
|
||||
`<_> "<method> <path> <_>" <status> <_> <received_bytes> <sent_bytes> <duration> <upstream_time> "<forward_for>" "<agent>" <_> <_> "<upstream>"`, |
||||
`[2016-04-15T20:17:00.310Z] "POST /api/v1/locations HTTP/2" 204 - 154 0 226 100 "10.0.35.28" "nsq2http" "cc21d9b0-cf5c-432b-8c7e-98aeb7988cd2" "locations" "tcp://10.0.2.1:80"`, |
||||
[]string{"POST", "/api/v1/locations", "204", "154", "0", "226", "100", "10.0.35.28", "nsq2http", "tcp://10.0.2.1:80"}, |
||||
}, |
||||
} |
||||
|
||||
func Test_matcher_Matches(t *testing.T) { |
||||
for _, tt := range fixtures { |
||||
tt := tt |
||||
t.Run(tt.expr, func(t *testing.T) { |
||||
t.Parallel() |
||||
m, err := New(tt.expr) |
||||
require.NoError(t, err) |
||||
actual := m.Matches([]byte(tt.in)) |
||||
var actualStrings []string |
||||
for _, a := range actual { |
||||
actualStrings = append(actualStrings, string(a)) |
||||
} |
||||
require.Equal(t, tt.expected, actualStrings) |
||||
}) |
||||
} |
||||
} |
||||
|
||||
var res [][]byte |
||||
|
||||
func Benchmark_matcher_Matches(b *testing.B) { |
||||
for _, tt := range fixtures { |
||||
b.Run(tt.expr, func(b *testing.B) { |
||||
b.ReportAllocs() |
||||
m, err := New(tt.expr) |
||||
require.NoError(b, err) |
||||
b.ResetTimer() |
||||
l := []byte(tt.in) |
||||
for n := 0; n < b.N; n++ { |
||||
res = m.Matches(l) |
||||
} |
||||
}) |
||||
} |
||||
} |
||||
|
||||
func Test_Error(t *testing.T) { |
||||
for _, tt := range []struct { |
||||
name string |
||||
err error |
||||
}{ |
||||
{"<f>", nil}, |
||||
{"<f> <a>", nil}, |
||||
{"", newParseError("syntax error: unexpected $end, expecting IDENTIFIER or LITERAL", 1, 1)}, |
||||
{"<_>", ErrNoCapture}, |
||||
{"foo <_> bar <_>", ErrNoCapture}, |
||||
{"foo bar buzz", ErrNoCapture}, |
||||
{"<f><f>", fmt.Errorf("found consecutive capture: %w", ErrInvalidExpr)}, |
||||
{"<f> f<d><b>", fmt.Errorf("found consecutive capture: %w", ErrInvalidExpr)}, |
||||
{"<f> f<f>", fmt.Errorf("duplicate capture name (f): %w", ErrInvalidExpr)}, |
||||
} { |
||||
t.Run(tt.name, func(t *testing.T) { |
||||
_, err := New(tt.name) |
||||
require.Equal(t, tt.err, err) |
||||
}) |
||||
} |
||||
} |
||||
Loading…
Reference in new issue