mirror of https://github.com/grafana/loki
Loki/Promtail: Client Refactor (#3623)
* move docker driver * move fluent-bit output plugin * move fluentd plugin * move logstash plugin * move promtail * match stage uses simple logql label matcher and filter expressions only * remove remaining logql imports * remove helpers package and move contents to util package * move validation and runtime out of util package * fix imports * move build and cfg packages into util * move ParseRequest function to util package * create a new package for logql models and move marshal/unmarshal code to util folder * move ErrorLabel to logqlmodel * move stats package to logqlmodel * decouple loghttp from logql by moving validation to logql package * move gcplog to tools * move PackedEntryKey constant to logqlmodel * fix stats proto import and rebuild protos * fix windows promtail tests * update drone to new dockerfile locationspull/3624/head
parent
b04af08b1b
commit
6e67d1ea5d
@ -1,18 +1,19 @@ |
||||
package main |
||||
|
||||
import ( |
||||
"C" |
||||
"fmt" |
||||
"time" |
||||
"unsafe" |
||||
|
||||
"C" |
||||
|
||||
"github.com/fluent/fluent-bit-go/output" |
||||
"github.com/go-kit/kit/log" |
||||
"github.com/go-kit/kit/log/level" |
||||
"github.com/prometheus/common/version" |
||||
"github.com/weaveworks/common/logging" |
||||
|
||||
_ "github.com/grafana/loki/pkg/build" |
||||
_ "github.com/grafana/loki/pkg/util/build" |
||||
) |
||||
|
||||
var ( |
@ -0,0 +1,102 @@ |
||||
package logql |
||||
|
||||
import ( |
||||
"bytes" |
||||
"fmt" |
||||
"regexp" |
||||
|
||||
"github.com/prometheus/prometheus/pkg/labels" |
||||
) |
||||
|
||||
// Filter is a line filter sent to a querier to filter out log line.
|
||||
type Filter func([]byte) bool |
||||
|
||||
// Expr is a LogQL expression.
|
||||
type Expr interface { |
||||
Filter() (Filter, error) |
||||
Matchers() []*labels.Matcher |
||||
} |
||||
|
||||
type matchersExpr struct { |
||||
matchers []*labels.Matcher |
||||
} |
||||
|
||||
func (e *matchersExpr) Matchers() []*labels.Matcher { |
||||
return e.matchers |
||||
} |
||||
|
||||
func (e *matchersExpr) Filter() (Filter, error) { |
||||
return nil, nil |
||||
} |
||||
|
||||
type filterExpr struct { |
||||
left Expr |
||||
ty labels.MatchType |
||||
match string |
||||
} |
||||
|
||||
func (e *filterExpr) Matchers() []*labels.Matcher { |
||||
return e.left.Matchers() |
||||
} |
||||
|
||||
// NewFilterExpr wraps an existing Expr with a next filter expression.
|
||||
func NewFilterExpr(left Expr, ty labels.MatchType, match string) Expr { |
||||
return &filterExpr{ |
||||
left: left, |
||||
ty: ty, |
||||
match: match, |
||||
} |
||||
} |
||||
|
||||
func (e *filterExpr) Filter() (Filter, error) { |
||||
var f func([]byte) bool |
||||
switch e.ty { |
||||
case labels.MatchRegexp: |
||||
re, err := regexp.Compile(e.match) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
f = re.Match |
||||
|
||||
case labels.MatchNotRegexp: |
||||
re, err := regexp.Compile(e.match) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
f = func(line []byte) bool { |
||||
return !re.Match(line) |
||||
} |
||||
|
||||
case labels.MatchEqual: |
||||
f = func(line []byte) bool { |
||||
return bytes.Contains(line, []byte(e.match)) |
||||
} |
||||
|
||||
case labels.MatchNotEqual: |
||||
f = func(line []byte) bool { |
||||
return !bytes.Contains(line, []byte(e.match)) |
||||
} |
||||
|
||||
default: |
||||
return nil, fmt.Errorf("unknow matcher: %v", e.match) |
||||
} |
||||
next, ok := e.left.(*filterExpr) |
||||
if ok { |
||||
nextFilter, err := next.Filter() |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
return func(line []byte) bool { |
||||
return nextFilter(line) && f(line) |
||||
}, nil |
||||
} |
||||
return f, nil |
||||
} |
||||
|
||||
func mustNewMatcher(t labels.MatchType, n, v string) *labels.Matcher { |
||||
m, err := labels.NewMatcher(t, n, v) |
||||
if err != nil { |
||||
panic(err) |
||||
} |
||||
return m |
||||
} |
@ -0,0 +1,65 @@ |
||||
%{ |
||||
package logql |
||||
|
||||
import ( |
||||
"github.com/prometheus/prometheus/pkg/labels" |
||||
) |
||||
%} |
||||
|
||||
%union{ |
||||
Expr Expr |
||||
Filter labels.MatchType |
||||
Selector []*labels.Matcher |
||||
Matchers []*labels.Matcher |
||||
Matcher *labels.Matcher |
||||
str string |
||||
int int64 |
||||
} |
||||
|
||||
%start root |
||||
|
||||
%type <Expr> expr |
||||
%type <Filter> filter |
||||
%type <Selector> selector |
||||
%type <Matchers> matchers |
||||
%type <Matcher> matcher |
||||
|
||||
%token <str> IDENTIFIER STRING |
||||
%token <val> MATCHERS LABELS EQ NEQ RE NRE OPEN_BRACE CLOSE_BRACE COMMA DOT PIPE_MATCH PIPE_EXACT |
||||
|
||||
%% |
||||
|
||||
root: expr { exprlex.(*lexer).expr = $1 }; |
||||
|
||||
expr: |
||||
selector { $$ = &matchersExpr{ matchers: $1 } } |
||||
| expr filter STRING { $$ = NewFilterExpr( $1, $2, $3 ) } |
||||
| expr filter error |
||||
| expr error |
||||
; |
||||
|
||||
filter: |
||||
PIPE_MATCH { $$ = labels.MatchRegexp } |
||||
| PIPE_EXACT { $$ = labels.MatchEqual } |
||||
| NRE { $$ = labels.MatchNotRegexp } |
||||
| NEQ { $$ = labels.MatchNotEqual } |
||||
; |
||||
|
||||
selector: |
||||
OPEN_BRACE matchers CLOSE_BRACE { $$ = $2 } |
||||
| OPEN_BRACE matchers error { $$ = $2 } |
||||
| OPEN_BRACE error CLOSE_BRACE { } |
||||
; |
||||
|
||||
matchers: |
||||
matcher { $$ = []*labels.Matcher{ $1 } } |
||||
| matchers COMMA matcher { $$ = append($1, $3) } |
||||
; |
||||
|
||||
matcher: |
||||
IDENTIFIER EQ STRING { $$ = mustNewMatcher(labels.MatchEqual, $1, $3) } |
||||
| IDENTIFIER NEQ STRING { $$ = mustNewMatcher(labels.MatchNotEqual, $1, $3) } |
||||
| IDENTIFIER RE STRING { $$ = mustNewMatcher(labels.MatchRegexp, $1, $3) } |
||||
| IDENTIFIER NRE STRING { $$ = mustNewMatcher(labels.MatchNotRegexp, $1, $3) } |
||||
; |
||||
%% |
@ -0,0 +1,542 @@ |
||||
// Code generated by goyacc -p expr -o clients/pkg/logentry/logql/expr.y.go clients/pkg/logentry/logql/expr.y. DO NOT EDIT.
|
||||
|
||||
package logql |
||||
|
||||
import __yyfmt__ "fmt" |
||||
|
||||
import ( |
||||
"github.com/prometheus/prometheus/pkg/labels" |
||||
) |
||||
|
||||
type exprSymType struct { |
||||
yys int |
||||
Expr Expr |
||||
Filter labels.MatchType |
||||
Selector []*labels.Matcher |
||||
Matchers []*labels.Matcher |
||||
Matcher *labels.Matcher |
||||
str string |
||||
int int64 |
||||
} |
||||
|
||||
const IDENTIFIER = 57346 |
||||
const STRING = 57347 |
||||
const MATCHERS = 57348 |
||||
const LABELS = 57349 |
||||
const EQ = 57350 |
||||
const NEQ = 57351 |
||||
const RE = 57352 |
||||
const NRE = 57353 |
||||
const OPEN_BRACE = 57354 |
||||
const CLOSE_BRACE = 57355 |
||||
const COMMA = 57356 |
||||
const DOT = 57357 |
||||
const PIPE_MATCH = 57358 |
||||
const PIPE_EXACT = 57359 |
||||
|
||||
var exprToknames = [...]string{ |
||||
"$end", |
||||
"error", |
||||
"$unk", |
||||
"IDENTIFIER", |
||||
"STRING", |
||||
"MATCHERS", |
||||
"LABELS", |
||||
"EQ", |
||||
"NEQ", |
||||
"RE", |
||||
"NRE", |
||||
"OPEN_BRACE", |
||||
"CLOSE_BRACE", |
||||
"COMMA", |
||||
"DOT", |
||||
"PIPE_MATCH", |
||||
"PIPE_EXACT", |
||||
} |
||||
var exprStatenames = [...]string{} |
||||
|
||||
const exprEofCode = 1 |
||||
const exprErrCode = 2 |
||||
const exprInitialStackSize = 16 |
||||
|
||||
var exprExca = [...]int{ |
||||
-1, 1, |
||||
1, -1, |
||||
-2, 0, |
||||
-1, 2, |
||||
1, 1, |
||||
-2, 0, |
||||
} |
||||
|
||||
const exprPrivate = 57344 |
||||
|
||||
const exprLast = 30 |
||||
|
||||
var exprAct = [...]int{ |
||||
|
||||
6, 13, 20, 4, 29, 18, 28, 10, 14, 9, |
||||
21, 22, 23, 24, 7, 8, 17, 19, 27, 16, |
||||
26, 25, 15, 12, 11, 14, 3, 5, 2, 1, |
||||
} |
||||
var exprPact = [...]int{ |
||||
|
||||
-9, -1000, -2, -1000, 21, 17, -1000, -1000, -1000, -1000, |
||||
-1000, 3, -11, -1000, 2, -1000, -1000, -1000, -1000, 4, |
||||
-1000, 15, 13, 1, -1, -1000, -1000, -1000, -1000, -1000, |
||||
} |
||||
var exprPgo = [...]int{ |
||||
|
||||
0, 29, 28, 27, 26, 24, 1, |
||||
} |
||||
var exprR1 = [...]int{ |
||||
|
||||
0, 1, 2, 2, 2, 2, 3, 3, 3, 3, |
||||
4, 4, 4, 5, 5, 6, 6, 6, 6, |
||||
} |
||||
var exprR2 = [...]int{ |
||||
|
||||
0, 1, 1, 3, 3, 2, 1, 1, 1, 1, |
||||
3, 3, 3, 1, 3, 3, 3, 3, 3, |
||||
} |
||||
var exprChk = [...]int{ |
||||
|
||||
-1000, -1, -2, -4, 12, -3, 2, 16, 17, 11, |
||||
9, -5, 2, -6, 4, 5, 2, 13, 2, 14, |
||||
13, 8, 9, 10, 11, -6, 5, 5, 5, 5, |
||||
} |
||||
var exprDef = [...]int{ |
||||
|
||||
0, -2, -2, 2, 0, 0, 5, 6, 7, 8, |
||||
9, 0, 0, 13, 0, 3, 4, 10, 11, 0, |
||||
12, 0, 0, 0, 0, 14, 15, 16, 17, 18, |
||||
} |
||||
var exprTok1 = [...]int{ |
||||
|
||||
1, |
||||
} |
||||
var exprTok2 = [...]int{ |
||||
|
||||
2, 3, 4, 5, 6, 7, 8, 9, 10, 11, |
||||
12, 13, 14, 15, 16, 17, |
||||
} |
||||
var exprTok3 = [...]int{ |
||||
0, |
||||
} |
||||
|
||||
var exprErrorMessages = [...]struct { |
||||
state int |
||||
token int |
||||
msg string |
||||
}{} |
||||
|
||||
/* parser for yacc output */ |
||||
|
||||
var ( |
||||
exprDebug = 0 |
||||
exprErrorVerbose = false |
||||
) |
||||
|
||||
type exprLexer interface { |
||||
Lex(lval *exprSymType) int |
||||
Error(s string) |
||||
} |
||||
|
||||
type exprParser interface { |
||||
Parse(exprLexer) int |
||||
Lookahead() int |
||||
} |
||||
|
||||
type exprParserImpl struct { |
||||
lval exprSymType |
||||
stack [exprInitialStackSize]exprSymType |
||||
char int |
||||
} |
||||
|
||||
func (p *exprParserImpl) Lookahead() int { |
||||
return p.char |
||||
} |
||||
|
||||
func exprNewParser() exprParser { |
||||
return &exprParserImpl{} |
||||
} |
||||
|
||||
const exprFlag = -1000 |
||||
|
||||
func exprTokname(c int) string { |
||||
if c >= 1 && c-1 < len(exprToknames) { |
||||
if exprToknames[c-1] != "" { |
||||
return exprToknames[c-1] |
||||
} |
||||
} |
||||
return __yyfmt__.Sprintf("tok-%v", c) |
||||
} |
||||
|
||||
func exprStatname(s int) string { |
||||
if s >= 0 && s < len(exprStatenames) { |
||||
if exprStatenames[s] != "" { |
||||
return exprStatenames[s] |
||||
} |
||||
} |
||||
return __yyfmt__.Sprintf("state-%v", s) |
||||
} |
||||
|
||||
func exprErrorMessage(state, lookAhead int) string { |
||||
const TOKSTART = 4 |
||||
|
||||
if !exprErrorVerbose { |
||||
return "syntax error" |
||||
} |
||||
|
||||
for _, e := range exprErrorMessages { |
||||
if e.state == state && e.token == lookAhead { |
||||
return "syntax error: " + e.msg |
||||
} |
||||
} |
||||
|
||||
res := "syntax error: unexpected " + exprTokname(lookAhead) |
||||
|
||||
// To match Bison, suggest at most four expected tokens.
|
||||
expected := make([]int, 0, 4) |
||||
|
||||
// Look for shiftable tokens.
|
||||
base := exprPact[state] |
||||
for tok := TOKSTART; tok-1 < len(exprToknames); tok++ { |
||||
if n := base + tok; n >= 0 && n < exprLast && exprChk[exprAct[n]] == tok { |
||||
if len(expected) == cap(expected) { |
||||
return res |
||||
} |
||||
expected = append(expected, tok) |
||||
} |
||||
} |
||||
|
||||
if exprDef[state] == -2 { |
||||
i := 0 |
||||
for exprExca[i] != -1 || exprExca[i+1] != state { |
||||
i += 2 |
||||
} |
||||
|
||||
// Look for tokens that we accept or reduce.
|
||||
for i += 2; exprExca[i] >= 0; i += 2 { |
||||
tok := exprExca[i] |
||||
if tok < TOKSTART || exprExca[i+1] == 0 { |
||||
continue |
||||
} |
||||
if len(expected) == cap(expected) { |
||||
return res |
||||
} |
||||
expected = append(expected, tok) |
||||
} |
||||
|
||||
// If the default action is to accept or reduce, give up.
|
||||
if exprExca[i+1] != 0 { |
||||
return res |
||||
} |
||||
} |
||||
|
||||
for i, tok := range expected { |
||||
if i == 0 { |
||||
res += ", expecting " |
||||
} else { |
||||
res += " or " |
||||
} |
||||
res += exprTokname(tok) |
||||
} |
||||
return res |
||||
} |
||||
|
||||
func exprlex1(lex exprLexer, lval *exprSymType) (char, token int) { |
||||
token = 0 |
||||
char = lex.Lex(lval) |
||||
if char <= 0 { |
||||
token = exprTok1[0] |
||||
goto out |
||||
} |
||||
if char < len(exprTok1) { |
||||
token = exprTok1[char] |
||||
goto out |
||||
} |
||||
if char >= exprPrivate { |
||||
if char < exprPrivate+len(exprTok2) { |
||||
token = exprTok2[char-exprPrivate] |
||||
goto out |
||||
} |
||||
} |
||||
for i := 0; i < len(exprTok3); i += 2 { |
||||
token = exprTok3[i+0] |
||||
if token == char { |
||||
token = exprTok3[i+1] |
||||
goto out |
||||
} |
||||
} |
||||
|
||||
out: |
||||
if token == 0 { |
||||
token = exprTok2[1] /* unknown char */ |
||||
} |
||||
if exprDebug >= 3 { |
||||
__yyfmt__.Printf("lex %s(%d)\n", exprTokname(token), uint(char)) |
||||
} |
||||
return char, token |
||||
} |
||||
|
||||
func exprParse(exprlex exprLexer) int { |
||||
return exprNewParser().Parse(exprlex) |
||||
} |
||||
|
||||
func (exprrcvr *exprParserImpl) Parse(exprlex exprLexer) int { |
||||
var exprn int |
||||
var exprVAL exprSymType |
||||
var exprDollar []exprSymType |
||||
_ = exprDollar // silence set and not used
|
||||
exprS := exprrcvr.stack[:] |
||||
|
||||
Nerrs := 0 /* number of errors */ |
||||
Errflag := 0 /* error recovery flag */ |
||||
exprstate := 0 |
||||
exprrcvr.char = -1 |
||||
exprtoken := -1 // exprrcvr.char translated into internal numbering
|
||||
defer func() { |
||||
// Make sure we report no lookahead when not parsing.
|
||||
exprstate = -1 |
||||
exprrcvr.char = -1 |
||||
exprtoken = -1 |
||||
}() |
||||
exprp := -1 |
||||
goto exprstack |
||||
|
||||
ret0: |
||||
return 0 |
||||
|
||||
ret1: |
||||
return 1 |
||||
|
||||
exprstack: |
||||
/* put a state and value onto the stack */ |
||||
if exprDebug >= 4 { |
||||
__yyfmt__.Printf("char %v in %v\n", exprTokname(exprtoken), exprStatname(exprstate)) |
||||
} |
||||
|
||||
exprp++ |
||||
if exprp >= len(exprS) { |
||||
nyys := make([]exprSymType, len(exprS)*2) |
||||
copy(nyys, exprS) |
||||
exprS = nyys |
||||
} |
||||
exprS[exprp] = exprVAL |
||||
exprS[exprp].yys = exprstate |
||||
|
||||
exprnewstate: |
||||
exprn = exprPact[exprstate] |
||||
if exprn <= exprFlag { |
||||
goto exprdefault /* simple state */ |
||||
} |
||||
if exprrcvr.char < 0 { |
||||
exprrcvr.char, exprtoken = exprlex1(exprlex, &exprrcvr.lval) |
||||
} |
||||
exprn += exprtoken |
||||
if exprn < 0 || exprn >= exprLast { |
||||
goto exprdefault |
||||
} |
||||
exprn = exprAct[exprn] |
||||
if exprChk[exprn] == exprtoken { /* valid shift */ |
||||
exprrcvr.char = -1 |
||||
exprtoken = -1 |
||||
exprVAL = exprrcvr.lval |
||||
exprstate = exprn |
||||
if Errflag > 0 { |
||||
Errflag-- |
||||
} |
||||
goto exprstack |
||||
} |
||||
|
||||
exprdefault: |
||||
/* default state action */ |
||||
exprn = exprDef[exprstate] |
||||
if exprn == -2 { |
||||
if exprrcvr.char < 0 { |
||||
exprrcvr.char, exprtoken = exprlex1(exprlex, &exprrcvr.lval) |
||||
} |
||||
|
||||
/* look through exception table */ |
||||
xi := 0 |
||||
for { |
||||
if exprExca[xi+0] == -1 && exprExca[xi+1] == exprstate { |
||||
break |
||||
} |
||||
xi += 2 |
||||
} |
||||
for xi += 2; ; xi += 2 { |
||||
exprn = exprExca[xi+0] |
||||
if exprn < 0 || exprn == exprtoken { |
||||
break |
||||
} |
||||
} |
||||
exprn = exprExca[xi+1] |
||||
if exprn < 0 { |
||||
goto ret0 |
||||
} |
||||
} |
||||
if exprn == 0 { |
||||
/* error ... attempt to resume parsing */ |
||||
switch Errflag { |
||||
case 0: /* brand new error */ |
||||
exprlex.Error(exprErrorMessage(exprstate, exprtoken)) |
||||
Nerrs++ |
||||
if exprDebug >= 1 { |
||||
__yyfmt__.Printf("%s", exprStatname(exprstate)) |
||||
__yyfmt__.Printf(" saw %s\n", exprTokname(exprtoken)) |
||||
} |
||||
fallthrough |
||||
|
||||
case 1, 2: /* incompletely recovered error ... try again */ |
||||
Errflag = 3 |
||||
|
||||
/* find a state where "error" is a legal shift action */ |
||||
for exprp >= 0 { |
||||
exprn = exprPact[exprS[exprp].yys] + exprErrCode |
||||
if exprn >= 0 && exprn < exprLast { |
||||
exprstate = exprAct[exprn] /* simulate a shift of "error" */ |
||||
if exprChk[exprstate] == exprErrCode { |
||||
goto exprstack |
||||
} |
||||
} |
||||
|
||||
/* the current p has no shift on "error", pop stack */ |
||||
if exprDebug >= 2 { |
||||
__yyfmt__.Printf("error recovery pops state %d\n", exprS[exprp].yys) |
||||
} |
||||
exprp-- |
||||
} |
||||
/* there is no state on the stack with an error shift ... abort */ |
||||
goto ret1 |
||||
|
||||
case 3: /* no shift yet; clobber input char */ |
||||
if exprDebug >= 2 { |
||||
__yyfmt__.Printf("error recovery discards %s\n", exprTokname(exprtoken)) |
||||
} |
||||
if exprtoken == exprEofCode { |
||||
goto ret1 |
||||
} |
||||
exprrcvr.char = -1 |
||||
exprtoken = -1 |
||||
goto exprnewstate /* try again in the same state */ |
||||
} |
||||
} |
||||
|
||||
/* reduction by production exprn */ |
||||
if exprDebug >= 2 { |
||||
__yyfmt__.Printf("reduce %v in:\n\t%v\n", exprn, exprStatname(exprstate)) |
||||
} |
||||
|
||||
exprnt := exprn |
||||
exprpt := exprp |
||||
_ = exprpt // guard against "declared and not used"
|
||||
|
||||
exprp -= exprR2[exprn] |
||||
// exprp is now the index of $0. Perform the default action. Iff the
|
||||
// reduced production is ε, $1 is possibly out of range.
|
||||
if exprp+1 >= len(exprS) { |
||||
nyys := make([]exprSymType, len(exprS)*2) |
||||
copy(nyys, exprS) |
||||
exprS = nyys |
||||
} |
||||
exprVAL = exprS[exprp+1] |
||||
|
||||
/* consult goto table to find next state */ |
||||
exprn = exprR1[exprn] |
||||
exprg := exprPgo[exprn] |
||||
exprj := exprg + exprS[exprp].yys + 1 |
||||
|
||||
if exprj >= exprLast { |
||||
exprstate = exprAct[exprg] |
||||
} else { |
||||
exprstate = exprAct[exprj] |
||||
if exprChk[exprstate] != -exprn { |
||||
exprstate = exprAct[exprg] |
||||
} |
||||
} |
||||
// dummy call; replaced with literal code
|
||||
switch exprnt { |
||||
|
||||
case 1: |
||||
exprDollar = exprS[exprpt-1 : exprpt+1] |
||||
{ |
||||
exprlex.(*lexer).expr = exprDollar[1].Expr |
||||
} |
||||
case 2: |
||||
exprDollar = exprS[exprpt-1 : exprpt+1] |
||||
{ |
||||
exprVAL.Expr = &matchersExpr{matchers: exprDollar[1].Selector} |
||||
} |
||||
case 3: |
||||
exprDollar = exprS[exprpt-3 : exprpt+1] |
||||
{ |
||||
exprVAL.Expr = NewFilterExpr(exprDollar[1].Expr, exprDollar[2].Filter, exprDollar[3].str) |
||||
} |
||||
case 6: |
||||
exprDollar = exprS[exprpt-1 : exprpt+1] |
||||
{ |
||||
exprVAL.Filter = labels.MatchRegexp |
||||
} |
||||
case 7: |
||||
exprDollar = exprS[exprpt-1 : exprpt+1] |
||||
{ |
||||
exprVAL.Filter = labels.MatchEqual |
||||
} |
||||
case 8: |
||||
exprDollar = exprS[exprpt-1 : exprpt+1] |
||||
{ |
||||
exprVAL.Filter = labels.MatchNotRegexp |
||||
} |
||||
case 9: |
||||
exprDollar = exprS[exprpt-1 : exprpt+1] |
||||
{ |
||||
exprVAL.Filter = labels.MatchNotEqual |
||||
} |
||||
case 10: |
||||
exprDollar = exprS[exprpt-3 : exprpt+1] |
||||
{ |
||||
exprVAL.Selector = exprDollar[2].Matchers |
||||
} |
||||
case 11: |
||||
exprDollar = exprS[exprpt-3 : exprpt+1] |
||||
{ |
||||
exprVAL.Selector = exprDollar[2].Matchers |
||||
} |
||||
case 12: |
||||
exprDollar = exprS[exprpt-3 : exprpt+1] |
||||
{ |
||||
} |
||||
case 13: |
||||
exprDollar = exprS[exprpt-1 : exprpt+1] |
||||
{ |
||||
exprVAL.Matchers = []*labels.Matcher{exprDollar[1].Matcher} |
||||
} |
||||
case 14: |
||||
exprDollar = exprS[exprpt-3 : exprpt+1] |
||||
{ |
||||
exprVAL.Matchers = append(exprDollar[1].Matchers, exprDollar[3].Matcher) |
||||
} |
||||
case 15: |
||||
exprDollar = exprS[exprpt-3 : exprpt+1] |
||||
{ |
||||
exprVAL.Matcher = mustNewMatcher(labels.MatchEqual, exprDollar[1].str, exprDollar[3].str) |
||||
} |
||||
case 16: |
||||
exprDollar = exprS[exprpt-3 : exprpt+1] |
||||
{ |
||||
exprVAL.Matcher = mustNewMatcher(labels.MatchNotEqual, exprDollar[1].str, exprDollar[3].str) |
||||
} |
||||
case 17: |
||||
exprDollar = exprS[exprpt-3 : exprpt+1] |
||||
{ |
||||
exprVAL.Matcher = mustNewMatcher(labels.MatchRegexp, exprDollar[1].str, exprDollar[3].str) |
||||
} |
||||
case 18: |
||||
exprDollar = exprS[exprpt-3 : exprpt+1] |
||||
{ |
||||
exprVAL.Matcher = mustNewMatcher(labels.MatchNotRegexp, exprDollar[1].str, exprDollar[3].str) |
||||
} |
||||
} |
||||
goto exprstack /* stack new state and value */ |
||||
} |
@ -0,0 +1,118 @@ |
||||
package logql |
||||
|
||||
import ( |
||||
"errors" |
||||
"fmt" |
||||
"strconv" |
||||
"strings" |
||||
"text/scanner" |
||||
|
||||
"github.com/prometheus/prometheus/pkg/labels" |
||||
) |
||||
|
||||
func init() { |
||||
// Improve the error messages coming out of yacc.
|
||||
exprErrorVerbose = true |
||||
for str, tok := range tokens { |
||||
exprToknames[tok-exprPrivate+1] = str |
||||
} |
||||
} |
||||
|
||||
// ParseExpr parses a string and returns an Expr.
|
||||
func ParseExpr(input string) (Expr, error) { |
||||
l := lexer{ |
||||
parser: exprNewParser().(*exprParserImpl), |
||||
} |
||||
l.Init(strings.NewReader(input)) |
||||
l.Scanner.Error = func(_ *scanner.Scanner, msg string) { |
||||
l.Error(msg) |
||||
} |
||||
|
||||
e := l.parser.Parse(&l) |
||||
if e != 0 || len(l.errs) > 0 { |
||||
return nil, l.errs[0] |
||||
} |
||||
return l.expr, nil |
||||
} |
||||
|
||||
// ParseMatchers parses a string and returns labels matchers, if the expression contains
|
||||
// anything else it will return an error.
|
||||
func ParseMatchers(input string) ([]*labels.Matcher, error) { |
||||
expr, err := ParseExpr(input) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
matcherExpr, ok := expr.(*matchersExpr) |
||||
if !ok { |
||||
return nil, errors.New("only label matchers is supported") |
||||
} |
||||
return matcherExpr.matchers, nil |
||||
} |
||||
|
||||
var tokens = map[string]int{ |
||||
",": COMMA, |
||||
".": DOT, |
||||
"{": OPEN_BRACE, |
||||
"}": CLOSE_BRACE, |
||||
"=": EQ, |
||||
"!=": NEQ, |
||||
"=~": RE, |
||||
"!~": NRE, |
||||
"|=": PIPE_EXACT, |
||||
"|~": PIPE_MATCH, |
||||
} |
||||
|
||||
type lexer struct { |
||||
scanner.Scanner |
||||
errs []ParseError |
||||
expr Expr |
||||
parser *exprParserImpl |
||||
} |
||||
|
||||
func (l *lexer) Lex(lval *exprSymType) int { |
||||
r := l.Scan() |
||||
|
||||
switch r { |
||||
case scanner.EOF: |
||||
return 0 |
||||
|
||||
case scanner.String: |
||||
var err error |
||||
lval.str, err = strconv.Unquote(l.TokenText()) |
||||
if err != nil { |
||||
l.Error(err.Error()) |
||||
return 0 |
||||
} |
||||
return STRING |
||||
} |
||||
|
||||
if tok, ok := tokens[l.TokenText()+string(l.Peek())]; ok { |
||||
l.Next() |
||||
return tok |
||||
} |
||||
|
||||
if tok, ok := tokens[l.TokenText()]; ok { |
||||
return tok |
||||
} |
||||
|
||||
lval.str = l.TokenText() |
||||
return IDENTIFIER |
||||
} |
||||
|
||||
func (l *lexer) Error(msg string) { |
||||
l.errs = append(l.errs, ParseError{ |
||||
msg: msg, |
||||
line: l.Line, |
||||
col: l.Column, |
||||
}) |
||||
} |
||||
|
||||
// ParseError is what is returned when we failed to parse.
|
||||
type ParseError struct { |
||||
msg string |
||||
line, col int |
||||
} |
||||
|
||||
func (p ParseError) Error() string { |
||||
return fmt.Sprintf("parse error at line %d, col %d: %s", p.line, p.col, p.msg) |
||||
} |
@ -0,0 +1,157 @@ |
||||
package logql |
||||
|
||||
import ( |
||||
"strings" |
||||
"testing" |
||||
"text/scanner" |
||||
|
||||
"github.com/prometheus/prometheus/pkg/labels" |
||||
"github.com/stretchr/testify/require" |
||||
) |
||||
|
||||
func TestLex(t *testing.T) { |
||||
for _, tc := range []struct { |
||||
input string |
||||
expected []int |
||||
}{ |
||||
{`{foo="bar"}`, []int{OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE}}, |
||||
{`{ foo = "bar" }`, []int{OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE}}, |
||||
{`{ foo != "bar" }`, []int{OPEN_BRACE, IDENTIFIER, NEQ, STRING, CLOSE_BRACE}}, |
||||
{`{ foo =~ "bar" }`, []int{OPEN_BRACE, IDENTIFIER, RE, STRING, CLOSE_BRACE}}, |
||||
{`{ foo !~ "bar" }`, []int{OPEN_BRACE, IDENTIFIER, NRE, STRING, CLOSE_BRACE}}, |
||||
{`{ foo = "bar", bar != "baz" }`, []int{OPEN_BRACE, IDENTIFIER, EQ, STRING, |
||||
COMMA, IDENTIFIER, NEQ, STRING, CLOSE_BRACE}}, |
||||
{`{ foo = "ba\"r" }`, []int{OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE}}, |
||||
} { |
||||
t.Run(tc.input, func(t *testing.T) { |
||||
actual := []int{} |
||||
l := lexer{ |
||||
Scanner: scanner.Scanner{ |
||||
Mode: scanner.SkipComments | scanner.ScanStrings, |
||||
}, |
||||
} |
||||
l.Init(strings.NewReader(tc.input)) |
||||
var lval exprSymType |
||||
for { |
||||
tok := l.Lex(&lval) |
||||
if tok == 0 { |
||||
break |
||||
} |
||||
actual = append(actual, tok) |
||||
} |
||||
require.Equal(t, tc.expected, actual) |
||||
}) |
||||
} |
||||
} |
||||
|
||||
func TestParse(t *testing.T) { |
||||
for _, tc := range []struct { |
||||
in string |
||||
exp Expr |
||||
err error |
||||
}{ |
||||
{ |
||||
in: `{foo="bar"}`, |
||||
exp: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}}, |
||||
}, |
||||
{ |
||||
in: `{ foo = "bar" }`, |
||||
exp: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}}, |
||||
}, |
||||
{ |
||||
in: `{ foo != "bar" }`, |
||||
exp: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotEqual, "foo", "bar")}}, |
||||
}, |
||||
{ |
||||
in: `{ foo =~ "bar" }`, |
||||
exp: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchRegexp, "foo", "bar")}}, |
||||
}, |
||||
{ |
||||
in: `{ foo !~ "bar" }`, |
||||
exp: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, |
||||
}, |
||||
{ |
||||
in: `{ foo = "bar", bar != "baz" }`, |
||||
exp: &matchersExpr{matchers: []*labels.Matcher{ |
||||
mustNewMatcher(labels.MatchEqual, "foo", "bar"), |
||||
mustNewMatcher(labels.MatchNotEqual, "bar", "baz"), |
||||
}}, |
||||
}, |
||||
{ |
||||
in: `{foo="bar"} |= "baz"`, |
||||
exp: &filterExpr{ |
||||
left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}}, |
||||
ty: labels.MatchEqual, |
||||
match: "baz", |
||||
}, |
||||
}, |
||||
{ |
||||
in: `{foo="bar"} |= "baz" |~ "blip" != "flip" !~ "flap"`, |
||||
exp: &filterExpr{ |
||||
left: &filterExpr{ |
||||
left: &filterExpr{ |
||||
left: &filterExpr{ |
||||
left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}}, |
||||
ty: labels.MatchEqual, |
||||
match: "baz", |
||||
}, |
||||
ty: labels.MatchRegexp, |
||||
match: "blip", |
||||
}, |
||||
ty: labels.MatchNotEqual, |
||||
match: "flip", |
||||
}, |
||||
ty: labels.MatchNotRegexp, |
||||
match: "flap", |
||||
}, |
||||
}, |
||||
{ |
||||
in: `{foo="bar}`, |
||||
err: ParseError{ |
||||
msg: "literal not terminated", |
||||
line: 1, |
||||
col: 6, |
||||
}, |
||||
}, |
||||
{ |
||||
in: `{foo="bar"`, |
||||
err: ParseError{ |
||||
msg: "syntax error: unexpected $end, expecting } or ,", |
||||
line: 1, |
||||
col: 11, |
||||
}, |
||||
}, |
||||
|
||||
{ |
||||
in: `{foo="bar"} |~`, |
||||
err: ParseError{ |
||||
msg: "syntax error: unexpected $end, expecting STRING", |
||||
line: 1, |
||||
col: 15, |
||||
}, |
||||
}, |
||||
|
||||
{ |
||||
in: `{foo="bar"} "foo"`, |
||||
err: ParseError{ |
||||
msg: "syntax error: unexpected STRING, expecting != or !~ or |~ or |=", |
||||
line: 1, |
||||
col: 13, |
||||
}, |
||||
}, |
||||
{ |
||||
in: `{foo="bar"} foo`, |
||||
err: ParseError{ |
||||
msg: "syntax error: unexpected IDENTIFIER, expecting != or !~ or |~ or |=", |
||||
line: 1, |
||||
col: 13, |
||||
}, |
||||
}, |
||||
} { |
||||
t.Run(tc.in, func(t *testing.T) { |
||||
ast, err := ParseExpr(tc.in) |
||||
require.Equal(t, tc.err, err) |
||||
require.Equal(t, tc.exp, ast) |
||||
}) |
||||
} |
||||
} |
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in new issue