mirror of https://github.com/grafana/loki
Logql Analyzer (#6171)
* created logql-debugger page and API endpoint that returns information for each stage of the query * added docker-compose setup for local testing * refactored analyzer and added unit tests * removed docs page to release it in a separate PR when back-end API is published * changes structure of results object in response to return origin log line needed for case when only stream selector is sent in query * merged the latest main and resolved conflicts * added endpoint for readiness probe and configured CI * fixed drone.yml discrepancy * fixed method signature * fixed path to Dockerfile * added clean stepk113
parent
9e84648f3e
commit
615941234a
@ -0,0 +1,13 @@ |
||||
FROM golang:1.17.9 as build |
||||
|
||||
COPY . /src/loki |
||||
WORKDIR /src/loki |
||||
RUN make clean && CGO_ENABLED=0 go build ./cmd/logql-analyzer/ |
||||
|
||||
FROM alpine:3.15.4 |
||||
|
||||
RUN apk add --no-cache ca-certificates |
||||
|
||||
COPY --from=build /src/loki/logql-analyzer /usr/bin/logql-analyzer |
||||
|
||||
ENTRYPOINT [ "/usr/bin/logql-analyzer" ] |
@ -0,0 +1,16 @@ |
||||
version: "3.3" |
||||
services: |
||||
backend: |
||||
build: |
||||
context: ../../ |
||||
dockerfile: ./cmd/logql-analyzer/Dockerfile |
||||
entrypoint: [ "/usr/bin/logql-analyzer", "--server.http-listen-port=3001" ] |
||||
ports: |
||||
- "3001:3001" |
||||
|
||||
docs: |
||||
image: grafana/docs-base:latest |
||||
volumes: |
||||
- ../../docs/sources:/hugo/content/docs/loki/latest |
||||
ports: |
||||
- "3002:3002" |
@ -0,0 +1,52 @@ |
||||
package main |
||||
|
||||
import ( |
||||
"flag" |
||||
"net/http" |
||||
|
||||
"github.com/go-kit/log/level" |
||||
"github.com/gorilla/mux" |
||||
"github.com/prometheus/client_golang/prometheus" |
||||
"github.com/weaveworks/common/server" |
||||
|
||||
"github.com/grafana/loki/pkg/logqlanalyzer" |
||||
util_log "github.com/grafana/loki/pkg/util/log" |
||||
) |
||||
|
||||
func main() { |
||||
cfg := getConfig() |
||||
util_log.InitLogger(&server.Config{ |
||||
LogLevel: cfg.LogLevel, |
||||
}, prometheus.DefaultRegisterer) |
||||
s, err := createServer(cfg) |
||||
if err != nil { |
||||
level.Error(util_log.Logger).Log("msg", "error while creating the server", "err", err) |
||||
} |
||||
|
||||
err = s.Run() |
||||
defer s.Shutdown() |
||||
if err != nil { |
||||
level.Error(util_log.Logger).Log("msg", "error while running the server", "err", err) |
||||
} |
||||
} |
||||
|
||||
func getConfig() server.Config { |
||||
cfg := server.Config{} |
||||
cfg.RegisterFlags(flag.CommandLine) |
||||
flag.Parse() |
||||
return cfg |
||||
} |
||||
|
||||
func createServer(cfg server.Config) (*server.Server, error) { |
||||
s, err := server.New(cfg) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
s.HTTP.Use(mux.CORSMethodMiddleware(s.HTTP)) |
||||
s.HTTP.Use(logqlanalyzer.CorsMiddleware()) |
||||
s.HTTP.Handle("/api/logql-analyze", &logqlanalyzer.LogQLAnalyzeHandler{}).Methods(http.MethodPost, http.MethodOptions) |
||||
s.HTTP.HandleFunc("/ready", func(w http.ResponseWriter, _ *http.Request) { |
||||
http.Error(w, "ready", http.StatusOK) |
||||
}).Methods(http.MethodGet) |
||||
return s, err |
||||
} |
@ -0,0 +1,164 @@ |
||||
package logqlanalyzer |
||||
|
||||
import ( |
||||
"fmt" |
||||
"time" |
||||
"unsafe" |
||||
|
||||
"github.com/pkg/errors" |
||||
"github.com/prometheus/prometheus/model/labels" |
||||
"github.com/prometheus/prometheus/promql/parser" |
||||
|
||||
"github.com/grafana/loki/pkg/logql/log" |
||||
"github.com/grafana/loki/pkg/logql/syntax" |
||||
) |
||||
|
||||
type logQLAnalyzer struct { |
||||
} |
||||
|
||||
func (a logQLAnalyzer) analyze(query string, logs []string) (*Result, error) { |
||||
expr, err := syntax.ParseLogSelector(query, true) |
||||
if err != nil { |
||||
return nil, errors.Wrap(err, "invalid query") |
||||
} |
||||
streamSelector, stages, err := a.extractExpressionParts(expr) |
||||
if err != nil { |
||||
return nil, errors.Wrap(err, "can not extract parts of expression") |
||||
} |
||||
pipeline, err := expr.Pipeline() |
||||
if err != nil { |
||||
return nil, errors.Wrap(err, "can not create pipeline") |
||||
} |
||||
streamLabels, err := parser.ParseMetric(streamSelector) |
||||
if err != nil { |
||||
return nil, errors.Wrap(err, "can not parse labels from stream selector") |
||||
} |
||||
analyzer := NewPipelineAnalyzer(pipeline, streamLabels) |
||||
response := &Result{StreamSelector: streamSelector, Stages: stages, Results: make([]LineResult, 0, len(logs))} |
||||
for _, line := range logs { |
||||
analysisRecords := analyzer.AnalyzeLine(line) |
||||
response.Results = append(response.Results, mapAllToLineResult(line, analysisRecords)) |
||||
} |
||||
return response, nil |
||||
} |
||||
|
||||
func (a logQLAnalyzer) extractExpressionParts(expr syntax.LogSelectorExpr) (string, []string, error) { |
||||
switch expr := expr.(type) { |
||||
case *syntax.PipelineExpr: |
||||
stages := make([]string, 0, len(expr.MultiStages)+1) |
||||
streamSelector := expr.Left.String() |
||||
for _, stage := range expr.MultiStages { |
||||
stages = append(stages, stage.String()) |
||||
} |
||||
return streamSelector, stages, nil |
||||
case *syntax.MatchersExpr: |
||||
return expr.String(), []string{}, nil |
||||
default: |
||||
return "", nil, fmt.Errorf("unsupported type of expression") |
||||
} |
||||
|
||||
} |
||||
|
||||
func mapAllToLineResult(originLine string, analysisRecords []StageAnalysisRecord) LineResult { |
||||
stageRecords := make([]StageRecord, 0, len(analysisRecords)) |
||||
for _, record := range analysisRecords { |
||||
if !record.Processed { |
||||
break |
||||
} |
||||
stageRecords = append(stageRecords, StageRecord{ |
||||
LineBefore: record.LineBefore, |
||||
LabelsBefore: mapAllToLabelsResponse(record.LabelsBefore), |
||||
LineAfter: record.LineAfter, |
||||
LabelsAfter: mapAllToLabelsResponse(record.LabelsAfter), |
||||
FilteredOut: record.FilteredOut, |
||||
}) |
||||
} |
||||
return LineResult{originLine, stageRecords} |
||||
} |
||||
|
||||
func mapAllToLabelsResponse(labels labels.Labels) []Label { |
||||
result := make([]Label, 0, len(labels)) |
||||
for _, label := range labels { |
||||
result = append(result, Label{Name: label.Name, Value: label.Value}) |
||||
} |
||||
return result |
||||
} |
||||
|
||||
type PipelineAnalyzer interface { |
||||
AnalyzeLine(line string) []StageAnalysisRecord |
||||
} |
||||
type noopPipelineAnalyzer struct { |
||||
} |
||||
|
||||
func (n noopPipelineAnalyzer) AnalyzeLine(_ string) []StageAnalysisRecord { |
||||
return []StageAnalysisRecord{} |
||||
} |
||||
|
||||
type streamPipelineAnalyzer struct { |
||||
origin log.AnalyzablePipeline |
||||
stagesCount int |
||||
streamLabels labels.Labels |
||||
} |
||||
|
||||
func NewPipelineAnalyzer(origin log.Pipeline, streamLabels labels.Labels) PipelineAnalyzer { |
||||
if o, ok := origin.(log.AnalyzablePipeline); ok { |
||||
stagesCount := len(o.Stages()) |
||||
return &streamPipelineAnalyzer{o, stagesCount, streamLabels} |
||||
} |
||||
return &noopPipelineAnalyzer{} |
||||
} |
||||
|
||||
func (p streamPipelineAnalyzer) AnalyzeLine(line string) []StageAnalysisRecord { |
||||
stages := p.origin.Stages() |
||||
stageRecorders := make([]log.Stage, 0, len(stages)) |
||||
records := make([]StageAnalysisRecord, len(stages)) |
||||
for i, stage := range stages { |
||||
stageRecorders = append(stageRecorders, StageAnalysisRecorder{origin: stage, |
||||
records: records, |
||||
stageIndex: i, |
||||
}) |
||||
} |
||||
stream := log.NewStreamPipeline(stageRecorders, p.origin.LabelsBuilder().ForLabels(p.streamLabels, p.streamLabels.Hash())) |
||||
_, _, _ = stream.ProcessString(time.Now().UnixMilli(), line) |
||||
return records |
||||
} |
||||
|
||||
type StageAnalysisRecorder struct { |
||||
log.Stage |
||||
origin log.Stage |
||||
stageIndex int |
||||
records []StageAnalysisRecord |
||||
} |
||||
|
||||
func (s StageAnalysisRecorder) Process(ts int64, line []byte, lbs *log.LabelsBuilder) ([]byte, bool) { |
||||
lineBefore := unsafeGetString(line) |
||||
labelsBefore := lbs.UnsortedLabels(nil) |
||||
|
||||
lineResult, ok := s.origin.Process(ts, line, lbs) |
||||
|
||||
s.records[s.stageIndex] = StageAnalysisRecord{ |
||||
Processed: true, |
||||
LabelsBefore: labelsBefore, |
||||
LineBefore: lineBefore, |
||||
LabelsAfter: lbs.UnsortedLabels(nil), |
||||
LineAfter: unsafeGetString(lineResult), |
||||
FilteredOut: !ok, |
||||
} |
||||
return lineResult, ok |
||||
} |
||||
func (s StageAnalysisRecorder) RequiredLabelNames() []string { |
||||
return s.origin.RequiredLabelNames() |
||||
} |
||||
|
||||
type StageAnalysisRecord struct { |
||||
Processed bool |
||||
LineBefore string |
||||
LabelsBefore labels.Labels |
||||
LineAfter string |
||||
LabelsAfter labels.Labels |
||||
FilteredOut bool |
||||
} |
||||
|
||||
func unsafeGetString(buf []byte) string { |
||||
return *((*string)(unsafe.Pointer(&buf))) |
||||
} |
@ -0,0 +1,84 @@ |
||||
package logqlanalyzer |
||||
|
||||
import ( |
||||
"testing" |
||||
|
||||
"github.com/stretchr/testify/require" |
||||
) |
||||
|
||||
func Test_logQLAnalyzer_analyze_stages(t *testing.T) { |
||||
tests := map[string]struct { |
||||
query string |
||||
expectedStreamSelector string |
||||
expectedStages []string |
||||
}{ |
||||
"expected 2 stages and streamSelector to be detected": { |
||||
query: "{job=\"analyze\"} | json |= \"info\"", |
||||
expectedStreamSelector: "{job=\"analyze\"}", |
||||
expectedStages: []string{ |
||||
"| json", |
||||
"|= \"info\"", |
||||
}, |
||||
}, |
||||
"expected 2 stages and streamSelector to be detected even if query contains 4 stages": { |
||||
query: "{job=\"analyze\"} | pattern \"<_> <level> <msg>\" |= \"info\" |~ \"some_expr\"", |
||||
expectedStreamSelector: "{job=\"analyze\"}", |
||||
expectedStages: []string{ |
||||
"| pattern \"<_> <level> <msg>\"", |
||||
"|= \"info\" |~ \"some_expr\"", |
||||
}, |
||||
}, |
||||
} |
||||
for name, data := range tests { |
||||
t.Run(name, func(t *testing.T) { |
||||
result, err := logQLAnalyzer{}.analyze(data.query, []string{}) |
||||
require.NoError(t, err) |
||||
require.Equal(t, data.expectedStreamSelector, result.StreamSelector) |
||||
require.Equal(t, data.expectedStages, result.Stages) |
||||
}) |
||||
} |
||||
} |
||||
|
||||
func Test_logQLAnalyzer_analyze_expected_1_stage_record_for_each_log_line(t *testing.T) { |
||||
line0 := "lvl=error msg=a" |
||||
line1 := "lvl=info msg=b" |
||||
|
||||
result, err := logQLAnalyzer{}.analyze("{job=\"analyze\"} | logfmt", []string{line0, line1}) |
||||
|
||||
require.NoError(t, err) |
||||
require.Equal(t, 2, len(result.Results)) |
||||
require.Equal(t, 1, len(result.Results[0].StageRecords)) |
||||
require.Equal(t, 1, len(result.Results[1].StageRecords)) |
||||
} |
||||
|
||||
func Test_logQLAnalyzer_analyze_expected_all_stage_records_to_be_correct(t *testing.T) { |
||||
line := "lvl=error msg=a" |
||||
reformattedLine := "level=error message=A" |
||||
result, err := logQLAnalyzer{}.analyze("{job=\"analyze\"} | logfmt | line_format \"level={{.lvl}} message={{.msg | ToUpper}}\" |= \"info\"", []string{line}) |
||||
require.NoError(t, err) |
||||
require.Equal(t, 1, len(result.Results)) |
||||
require.Equal(t, 3, len(result.Results[0].StageRecords), "expected records for two stages") |
||||
streamLabels := []Label{{"job", "analyze"}} |
||||
parsedLabels := append(streamLabels, []Label{{"lvl", "error"}, {"msg", "a"}}...) |
||||
require.Equal(t, StageRecord{ |
||||
LineBefore: line, |
||||
LabelsBefore: streamLabels, |
||||
LineAfter: line, |
||||
LabelsAfter: parsedLabels, |
||||
FilteredOut: false, |
||||
}, result.Results[0].StageRecords[0]) |
||||
require.Equal(t, StageRecord{ |
||||
LineBefore: line, |
||||
LabelsBefore: parsedLabels, |
||||
LineAfter: reformattedLine, |
||||
LabelsAfter: parsedLabels, |
||||
FilteredOut: false, |
||||
}, result.Results[0].StageRecords[1], "line is expected to be reformatted on this stage") |
||||
require.Equal(t, StageRecord{ |
||||
LineBefore: reformattedLine, |
||||
LabelsBefore: parsedLabels, |
||||
LineAfter: reformattedLine, |
||||
LabelsAfter: parsedLabels, |
||||
FilteredOut: true, |
||||
}, result.Results[0].StageRecords[2], "line is expected to be filtered out on this stage") |
||||
} |
@ -0,0 +1,94 @@ |
||||
package logqlanalyzer |
||||
|
||||
import ( |
||||
"context" |
||||
"encoding/json" |
||||
"io/ioutil" |
||||
"net/http" |
||||
|
||||
"github.com/go-kit/log/level" |
||||
"github.com/gorilla/mux" |
||||
|
||||
util_log "github.com/grafana/loki/pkg/util/log" |
||||
) |
||||
|
||||
func CorsMiddleware() mux.MiddlewareFunc { |
||||
return func(h http.Handler) http.Handler { |
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { |
||||
w.Header().Set("Access-Control-Allow-Origin", "*") |
||||
w.Header().Set("Access-Control-Allow-Headers", "Content-Type") |
||||
if r.Method == http.MethodOptions { |
||||
w.WriteHeader(200) |
||||
return |
||||
} |
||||
h.ServeHTTP(w, r) |
||||
}) |
||||
} |
||||
} |
||||
|
||||
type LogQLAnalyzeHandler struct { |
||||
analyzer logQLAnalyzer |
||||
} |
||||
|
||||
func (s *LogQLAnalyzeHandler) ServeHTTP(w http.ResponseWriter, req *http.Request) { |
||||
payload, err := ioutil.ReadAll(req.Body) |
||||
if err != nil { |
||||
writeError(req.Context(), w, err, http.StatusBadRequest, "unable to read request body") |
||||
return |
||||
} |
||||
requestBody := &Request{} |
||||
err = json.Unmarshal(payload, requestBody) |
||||
if err != nil { |
||||
writeError(req.Context(), w, err, http.StatusBadRequest, "unable unmarshal request body") |
||||
return |
||||
} |
||||
result, err := s.analyzer.analyze(requestBody.Query, requestBody.Logs) |
||||
if err != nil { |
||||
writeError(req.Context(), w, err, http.StatusBadRequest, "unable to analyze query") |
||||
return |
||||
} |
||||
responseBody, err := json.Marshal(result) |
||||
if err != nil { |
||||
writeError(req.Context(), w, err, http.StatusInternalServerError, "can not marshal the response") |
||||
return |
||||
} |
||||
w.Header().Set("Content-Type", "application/json") |
||||
w.WriteHeader(http.StatusOK) |
||||
if n, err := w.Write(responseBody); err != nil { |
||||
level.Error(util_log.WithContext(req.Context(), util_log.Logger)).Log("msg", "error writing response", "bytesWritten", n, "err", err) |
||||
} |
||||
} |
||||
|
||||
func writeError(ctx context.Context, w http.ResponseWriter, err error, statusCode int, msg string) { |
||||
level.Error(util_log.WithContext(ctx, util_log.Logger)).Log("msg", msg, "err", err) |
||||
http.Error(w, err.Error(), statusCode) |
||||
} |
||||
|
||||
type Request struct { |
||||
Query string `json:"query"` |
||||
Logs []string `json:"logs"` |
||||
} |
||||
|
||||
type Result struct { |
||||
StreamSelector string `json:"stream_selector"` |
||||
Stages []string `json:"stages"` |
||||
Results []LineResult `json:"results"` |
||||
} |
||||
|
||||
type LineResult struct { |
||||
OriginLine string `json:"origin_line"` |
||||
StageRecords []StageRecord `json:"stage_records"` |
||||
} |
||||
|
||||
type StageRecord struct { |
||||
LineBefore string `json:"line_before"` |
||||
LabelsBefore []Label `json:"labels_before"` |
||||
LineAfter string `json:"line_after"` |
||||
LabelsAfter []Label `json:"labels_after"` |
||||
FilteredOut bool `json:"filtered_out"` |
||||
} |
||||
|
||||
type Label struct { |
||||
Name string `json:"name"` |
||||
Value string `json:"value"` |
||||
} |
Loading…
Reference in new issue