mirror of https://github.com/grafana/grafana
SSE/Alerting: First pass at query/condition translation (#31693)
- Takes the conditions property from the settings column of an alert from alerts table and turns into an ng alerting condition with the queries and classic condition. - Has temp API rest endpoint that will take the dashboard conditions json, translate it to SEE queries + classic condition, and execute it (only enabled in dev mode). - Changes expressions to catch query responses with a non-nil error property - Adds two new states for an NG instance result (NoData, Error) and updates evaluation to match those states - Changes the AsDataFrame (for frontend) from Bool to string to represent additional states - Fix bug in condition model to accept first Operator as empty string. - In ngalert, adds GetQueryDataRequest, which was part of execute and is still called from there. But this allows me to get the Expression request from a condition to make the "pipeline" can be built. - Update AsDataFrame for evalresult to be row based so it displays a little better for nowpull/32265/head
parent
24cb059a6b
commit
7bb79158ed
@ -0,0 +1,354 @@ |
||||
{ |
||||
"conditions": [ |
||||
{ |
||||
"evaluator": { |
||||
"params": [ |
||||
3 |
||||
], |
||||
"type": "gt" |
||||
}, |
||||
"operator": { |
||||
"type": "and" |
||||
}, |
||||
"query": { |
||||
"datasourceId": 4, |
||||
"model": { |
||||
"alias": "", |
||||
"csvWave": { |
||||
"timeStep": 60, |
||||
"valuesCSV": "0,0,2,2,1,1" |
||||
}, |
||||
"hide": false, |
||||
"lines": 10, |
||||
"points": [], |
||||
"pulseWave": { |
||||
"offCount": 3, |
||||
"offValue": 1, |
||||
"onCount": 3, |
||||
"onValue": 2, |
||||
"timeStep": 60 |
||||
}, |
||||
"refId": "B", |
||||
"scenarioId": "predictable_pulse", |
||||
"stringInput": "" |
||||
}, |
||||
"params": [ |
||||
"B", |
||||
"5m", |
||||
"now" |
||||
] |
||||
}, |
||||
"reducer": { |
||||
"params": [], |
||||
"type": "avg" |
||||
}, |
||||
"type": "query" |
||||
}, |
||||
{ |
||||
"evaluator": { |
||||
"params": [ |
||||
2, |
||||
5 |
||||
], |
||||
"type": "within_range" |
||||
}, |
||||
"operator": { |
||||
"type": "and" |
||||
}, |
||||
"query": { |
||||
"datasourceId": 4, |
||||
"model": { |
||||
"alias": "", |
||||
"csvWave": { |
||||
"timeStep": 60, |
||||
"valuesCSV": "0,0,2,2,1,1" |
||||
}, |
||||
"hide": false, |
||||
"lines": 10, |
||||
"points": [], |
||||
"pulseWave": { |
||||
"offCount": 3, |
||||
"offValue": 1, |
||||
"onCount": 3, |
||||
"onValue": 2, |
||||
"timeStep": 60 |
||||
}, |
||||
"refId": "B", |
||||
"scenarioId": "predictable_pulse", |
||||
"stringInput": "" |
||||
}, |
||||
"params": [ |
||||
"B", |
||||
"10m", |
||||
"now-5m" |
||||
] |
||||
}, |
||||
"reducer": { |
||||
"params": [], |
||||
"type": "max" |
||||
}, |
||||
"type": "query" |
||||
}, |
||||
{ |
||||
"evaluator": { |
||||
"params": [ |
||||
6 |
||||
], |
||||
"type": "gt" |
||||
}, |
||||
"operator": { |
||||
"type": "or" |
||||
}, |
||||
"query": { |
||||
"datasourceId": 4, |
||||
"model": { |
||||
"alias": "", |
||||
"csvWave": { |
||||
"timeStep": 60, |
||||
"valuesCSV": "0,0,2,2,1,1" |
||||
}, |
||||
"lines": 10, |
||||
"points": [], |
||||
"pulseWave": { |
||||
"offCount": 3, |
||||
"offValue": 1, |
||||
"onCount": 3, |
||||
"onValue": 2, |
||||
"timeStep": 60 |
||||
}, |
||||
"refId": "A", |
||||
"scenarioId": "predictable_csv_wave", |
||||
"stringInput": "" |
||||
}, |
||||
"params": [ |
||||
"A", |
||||
"5m", |
||||
"now" |
||||
] |
||||
}, |
||||
"reducer": { |
||||
"params": [], |
||||
"type": "sum" |
||||
}, |
||||
"type": "query" |
||||
}, |
||||
{ |
||||
"evaluator": { |
||||
"params": [ |
||||
7 |
||||
], |
||||
"type": "gt" |
||||
}, |
||||
"operator": { |
||||
"type": "and" |
||||
}, |
||||
"query": { |
||||
"datasourceId": 4, |
||||
"model": { |
||||
"alias": "", |
||||
"csvWave": { |
||||
"timeStep": 60, |
||||
"valuesCSV": "0,0,2,2,1,1" |
||||
}, |
||||
"lines": 10, |
||||
"points": [], |
||||
"pulseWave": { |
||||
"offCount": 3, |
||||
"offValue": 1, |
||||
"onCount": 3, |
||||
"onValue": 2, |
||||
"timeStep": 60 |
||||
}, |
||||
"refId": "A", |
||||
"scenarioId": "predictable_csv_wave", |
||||
"stringInput": "" |
||||
}, |
||||
"params": [ |
||||
"A", |
||||
"5m", |
||||
"now" |
||||
] |
||||
}, |
||||
"reducer": { |
||||
"params": [], |
||||
"type": "last" |
||||
}, |
||||
"type": "query" |
||||
}, |
||||
{ |
||||
"evaluator": { |
||||
"params": [], |
||||
"type": "no_value" |
||||
}, |
||||
"operator": { |
||||
"type": "and" |
||||
}, |
||||
"query": { |
||||
"datasourceId": 4, |
||||
"model": { |
||||
"alias": "", |
||||
"csvWave": { |
||||
"timeStep": 60, |
||||
"valuesCSV": "0,0,2,2,1,1" |
||||
}, |
||||
"hide": false, |
||||
"lines": 10, |
||||
"points": [], |
||||
"pulseWave": { |
||||
"offCount": 3, |
||||
"offValue": 1, |
||||
"onCount": 3, |
||||
"onValue": 2, |
||||
"timeStep": 60 |
||||
}, |
||||
"refId": "C", |
||||
"scenarioId": "no_data_points", |
||||
"stringInput": "" |
||||
}, |
||||
"params": [ |
||||
"C", |
||||
"5m", |
||||
"now" |
||||
] |
||||
}, |
||||
"reducer": { |
||||
"params": [], |
||||
"type": "diff" |
||||
}, |
||||
"type": "query" |
||||
}, |
||||
{ |
||||
"evaluator": { |
||||
"params": [ |
||||
9 |
||||
], |
||||
"type": "gt" |
||||
}, |
||||
"operator": { |
||||
"type": "or" |
||||
}, |
||||
"query": { |
||||
"datasourceId": 4, |
||||
"model": { |
||||
"alias": "", |
||||
"csvWave": { |
||||
"timeStep": 30, |
||||
"valuesCSV": "1,1,6,6,3,3" |
||||
}, |
||||
"hide": false, |
||||
"lines": 10, |
||||
"points": [], |
||||
"pulseWave": { |
||||
"offCount": 3, |
||||
"offValue": 1, |
||||
"onCount": 3, |
||||
"onValue": 2, |
||||
"timeStep": 60 |
||||
}, |
||||
"refId": "D", |
||||
"scenarioId": "predictable_csv_wave", |
||||
"stringInput": "" |
||||
}, |
||||
"params": [ |
||||
"D", |
||||
"5m", |
||||
"now" |
||||
] |
||||
}, |
||||
"reducer": { |
||||
"params": [], |
||||
"type": "diff_abs" |
||||
}, |
||||
"type": "query" |
||||
}, |
||||
{ |
||||
"evaluator": { |
||||
"params": [ |
||||
10 |
||||
], |
||||
"type": "gt" |
||||
}, |
||||
"operator": { |
||||
"type": "and" |
||||
}, |
||||
"query": { |
||||
"datasourceId": 4, |
||||
"model": { |
||||
"alias": "", |
||||
"csvWave": { |
||||
"timeStep": 30, |
||||
"valuesCSV": "1,1,6,6,3,3" |
||||
}, |
||||
"hide": false, |
||||
"lines": 10, |
||||
"points": [], |
||||
"pulseWave": { |
||||
"offCount": 3, |
||||
"offValue": 1, |
||||
"onCount": 3, |
||||
"onValue": 2, |
||||
"timeStep": 60 |
||||
}, |
||||
"refId": "D", |
||||
"scenarioId": "predictable_csv_wave", |
||||
"stringInput": "" |
||||
}, |
||||
"params": [ |
||||
"D", |
||||
"5m", |
||||
"now" |
||||
] |
||||
}, |
||||
"reducer": { |
||||
"params": [], |
||||
"type": "percent_diff" |
||||
}, |
||||
"type": "query" |
||||
}, |
||||
{ |
||||
"evaluator": { |
||||
"params": [ |
||||
11 |
||||
], |
||||
"type": "gt" |
||||
}, |
||||
"operator": { |
||||
"type": "and" |
||||
}, |
||||
"query": { |
||||
"datasourceId": 4, |
||||
"model": { |
||||
"alias": "", |
||||
"csvWave": { |
||||
"timeStep": 30, |
||||
"valuesCSV": "1,1,6,6,3,3" |
||||
}, |
||||
"hide": false, |
||||
"lines": 10, |
||||
"points": [], |
||||
"pulseWave": { |
||||
"offCount": 3, |
||||
"offValue": 1, |
||||
"onCount": 3, |
||||
"onValue": 2, |
||||
"timeStep": 60 |
||||
}, |
||||
"refId": "D", |
||||
"scenarioId": "predictable_csv_wave", |
||||
"stringInput": "" |
||||
}, |
||||
"params": [ |
||||
"D", |
||||
"10m", |
||||
"now" |
||||
] |
||||
}, |
||||
"reducer": { |
||||
"params": [], |
||||
"type": "percent_diff_abs" |
||||
}, |
||||
"type": "query" |
||||
} |
||||
] |
||||
} |
||||
|
||||
@ -0,0 +1,63 @@ |
||||
{ |
||||
"conditions": [ |
||||
{ |
||||
"evaluator": { |
||||
"params": [ |
||||
0 |
||||
], |
||||
"type": "lt" |
||||
}, |
||||
"operator": { |
||||
"type": "" |
||||
}, |
||||
"query": { |
||||
"datasourceId": 2, |
||||
"model": { |
||||
"expr": "avg_over_time(sum by (instance) (up)[1h:5m])", |
||||
"interval": "", |
||||
"legendFormat": "", |
||||
"refId": "A" |
||||
}, |
||||
"params": [ |
||||
"A", |
||||
"5m", |
||||
"now" |
||||
] |
||||
}, |
||||
"reducer": { |
||||
"params": [], |
||||
"type": "avg" |
||||
}, |
||||
"type": "query" |
||||
}, |
||||
{ |
||||
"evaluator": { |
||||
"params": [ |
||||
0 |
||||
], |
||||
"type": "gt" |
||||
}, |
||||
"operator": { |
||||
"type": "and" |
||||
}, |
||||
"query": { |
||||
"datasourceId": 2, |
||||
"model": { |
||||
"expr": "avg_over_time(sum by (instance) (up)[1h:5m])", |
||||
"interval": "", |
||||
"legendFormat": "", |
||||
"refId": "A" |
||||
}, |
||||
"params": [ |
||||
"A", |
||||
"10m", |
||||
"now-5m" |
||||
] |
||||
}, |
||||
"reducer": { |
||||
"params": [], |
||||
"type": "avg" |
||||
}, |
||||
"type": "query" |
||||
} |
||||
]} |
||||
@ -0,0 +1,345 @@ |
||||
package translate |
||||
|
||||
import ( |
||||
"encoding/json" |
||||
"fmt" |
||||
"sort" |
||||
"strings" |
||||
"time" |
||||
|
||||
"github.com/grafana/grafana/pkg/bus" |
||||
"github.com/grafana/grafana/pkg/expr" |
||||
"github.com/grafana/grafana/pkg/expr/classic" |
||||
"github.com/grafana/grafana/pkg/models" |
||||
"github.com/grafana/grafana/pkg/services/ngalert/eval" |
||||
ngmodels "github.com/grafana/grafana/pkg/services/ngalert/models" |
||||
) |
||||
|
||||
// DashboardAlertConditions turns dashboard alerting conditions into server side expression queries and a
|
||||
// classic conditions operation. A Condition from the ngalert model's package will be returned if the
|
||||
// translation is successful in creating an expression that can be parsed.
|
||||
// A query is created for each unique referenced query in the dashboard. Each query is considered to be unique
|
||||
// based on the RefID and the Time Range. Therefore, if the same RefID has multiple time ranges in the dashboard
|
||||
// condition, new RefIDs will be created.
|
||||
func DashboardAlertConditions(rawDCondJSON []byte, orgID int64) (*ngmodels.Condition, error) { |
||||
oldCond := dashConditionsJSON{} |
||||
|
||||
err := json.Unmarshal(rawDCondJSON, &oldCond) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
|
||||
ngCond, err := oldCond.GetNew(orgID) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
|
||||
backendReq, err := eval.GetQueryDataRequest(eval.AlertExecCtx{ExpressionsEnabled: true}, ngCond, time.Unix(500, 0)) |
||||
|
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
|
||||
svc := &expr.Service{} |
||||
_, err = svc.BuildPipeline(backendReq) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
return ngCond, nil |
||||
} |
||||
|
||||
type dashConditionsJSON struct { |
||||
Conditions []dashAlertingConditionJSON `json:"conditions"` |
||||
} |
||||
|
||||
// dashAlertingConditionJSON is like classic.ClassicConditionJSON except that it
|
||||
// include the model property with the query.
|
||||
type dashAlertingConditionJSON struct { |
||||
Evaluator conditionEvalJSON `json:"evaluator"` |
||||
|
||||
Operator struct { |
||||
Type string `json:"type"` |
||||
} `json:"operator"` |
||||
|
||||
Query struct { |
||||
Params []string |
||||
DatasourceID int64 `json:""` |
||||
Model json.RawMessage |
||||
} `json:"query"` |
||||
|
||||
Reducer struct { |
||||
// Params []interface{} `json:"params"` (Unused)
|
||||
Type string `json:"type"` |
||||
} |
||||
} |
||||
|
||||
type conditionEvalJSON struct { |
||||
Params []float64 `json:"params"` |
||||
Type string `json:"type"` // e.g. "gt"
|
||||
} |
||||
|
||||
func (dc *dashConditionsJSON) GetNew(orgID int64) (*ngmodels.Condition, error) { |
||||
refIDtoCondIdx := make(map[string][]int) // a map of original refIds to their corresponding condition index
|
||||
for i, cond := range dc.Conditions { |
||||
if len(cond.Query.Params) != 3 { |
||||
return nil, fmt.Errorf("unexpected number of query parameters in cond %v, want 3 got %v", i+1, len(cond.Query.Params)) |
||||
} |
||||
refID := cond.Query.Params[0] |
||||
refIDtoCondIdx[refID] = append(refIDtoCondIdx[refID], i) |
||||
} |
||||
|
||||
newRefIDstoCondIdx := make(map[string][]int) // a map of the new refIds to their coresponding condition index
|
||||
|
||||
refIDs := make([]string, 0, len(refIDtoCondIdx)) // a unique sorted list of the original refIDs
|
||||
for refID := range refIDtoCondIdx { |
||||
refIDs = append(refIDs, refID) |
||||
} |
||||
sort.Strings(refIDs) |
||||
|
||||
newRefIDsToTimeRanges := make(map[string][2]string) // a map of new RefIDs to their time range string tuple representation
|
||||
for _, refID := range refIDs { |
||||
condIdxes := refIDtoCondIdx[refID] |
||||
|
||||
if len(condIdxes) == 1 { |
||||
// If the refID is used in only condition, keep the letter a new refID
|
||||
newRefIDstoCondIdx[refID] = append(newRefIDstoCondIdx[refID], condIdxes[0]) |
||||
newRefIDsToTimeRanges[refID] = [2]string{dc.Conditions[condIdxes[0]].Query.Params[1], dc.Conditions[condIdxes[0]].Query.Params[2]} |
||||
continue |
||||
} |
||||
|
||||
// track unique time ranges within the same refID
|
||||
timeRangesToCondIdx := make(map[[2]string][]int) // a map of the time range tuple to the condition index
|
||||
for _, idx := range condIdxes { |
||||
timeParamFrom := dc.Conditions[idx].Query.Params[1] |
||||
timeParamTo := dc.Conditions[idx].Query.Params[2] |
||||
key := [2]string{timeParamFrom, timeParamTo} |
||||
timeRangesToCondIdx[key] = append(timeRangesToCondIdx[key], idx) |
||||
} |
||||
|
||||
if len(timeRangesToCondIdx) == 1 { |
||||
// if all shared time range, no need to create a new query with a new RefID
|
||||
for i := range condIdxes { |
||||
newRefIDstoCondIdx[refID] = append(newRefIDstoCondIdx[refID], condIdxes[i]) |
||||
newRefIDsToTimeRanges[refID] = [2]string{dc.Conditions[condIdxes[i]].Query.Params[1], dc.Conditions[condIdxes[i]].Query.Params[2]} |
||||
} |
||||
continue |
||||
} |
||||
|
||||
// This referenced query/refID has different time ranges, so new queries are needed for each unique time range.
|
||||
|
||||
timeRanges := make([][2]string, 0, len(timeRangesToCondIdx)) // a sorted list of unique time ranges for the query
|
||||
for tr := range timeRangesToCondIdx { |
||||
timeRanges = append(timeRanges, tr) |
||||
} |
||||
|
||||
sort.Slice(timeRanges, func(i, j int) bool { |
||||
switch { |
||||
case timeRanges[i][0] < timeRanges[j][0]: |
||||
return true |
||||
case timeRanges[i][0] > timeRanges[j][0]: |
||||
return false |
||||
default: |
||||
return timeRanges[i][1] < timeRanges[j][1] |
||||
} |
||||
}) |
||||
|
||||
for _, tr := range timeRanges { |
||||
idxes := timeRangesToCondIdx[tr] |
||||
for i := 0; i < len(idxes); i++ { |
||||
newLetter, err := getNewRefID(newRefIDstoCondIdx) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
newRefIDstoCondIdx[newLetter] = append(newRefIDstoCondIdx[newLetter], idxes[i]) |
||||
newRefIDsToTimeRanges[newLetter] = [2]string{dc.Conditions[idxes[i]].Query.Params[1], dc.Conditions[idxes[i]].Query.Params[2]} |
||||
} |
||||
} |
||||
} |
||||
|
||||
newRefIDs := make([]string, 0, len(newRefIDstoCondIdx)) // newRefIds is a sorted list of the unique refIds of new queries
|
||||
for refID := range newRefIDstoCondIdx { |
||||
newRefIDs = append(newRefIDs, refID) |
||||
} |
||||
sort.Strings(newRefIDs) |
||||
|
||||
ngCond := &ngmodels.Condition{} |
||||
condIdxToNewRefID := make(map[int]string) // a map of condition indices to the RefIDs of new queries
|
||||
|
||||
// build the new data source queries
|
||||
for _, refID := range newRefIDs { |
||||
condIdxes := newRefIDstoCondIdx[refID] |
||||
for i, condIdx := range condIdxes { |
||||
condIdxToNewRefID[condIdx] = refID |
||||
if i > 0 { |
||||
// only create each unique query once
|
||||
continue |
||||
} |
||||
|
||||
var queryObj map[string]interface{} // copy the model
|
||||
err := json.Unmarshal(dc.Conditions[condIdx].Query.Model, &queryObj) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
|
||||
getDsInfo := &models.GetDataSourceQuery{ |
||||
OrgId: orgID, |
||||
Id: dc.Conditions[condIdx].Query.DatasourceID, |
||||
} |
||||
|
||||
if err := bus.Dispatch(getDsInfo); err != nil { |
||||
return nil, fmt.Errorf("could not find datasource: %w", err) |
||||
} |
||||
|
||||
queryObj["datasource"] = getDsInfo.Result.Name |
||||
queryObj["datasourceUid"] = getDsInfo.Result.Uid |
||||
queryObj["refId"] = refID |
||||
|
||||
encodedObj, err := json.Marshal(queryObj) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
|
||||
rawFrom := newRefIDsToTimeRanges[refID][0] |
||||
rawTo := newRefIDsToTimeRanges[refID][1] |
||||
|
||||
rTR, err := getRelativeDuration(rawFrom, rawTo) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
|
||||
alertQuery := ngmodels.AlertQuery{ |
||||
RefID: refID, |
||||
Model: encodedObj, |
||||
RelativeTimeRange: *rTR, |
||||
DatasourceUID: getDsInfo.Uid, |
||||
} |
||||
ngCond.QueriesAndExpressions = append(ngCond.QueriesAndExpressions, alertQuery) |
||||
} |
||||
} |
||||
|
||||
// build the new classic condition pointing our new equivalent queries
|
||||
conditions := make([]classic.ClassicConditionJSON, len(dc.Conditions)) |
||||
for i, cond := range dc.Conditions { |
||||
newCond := classic.ClassicConditionJSON{} |
||||
newCond.Evaluator = classic.ConditionEvalJSON{ |
||||
Type: cond.Evaluator.Type, |
||||
Params: cond.Evaluator.Params, |
||||
} |
||||
newCond.Operator.Type = cond.Operator.Type |
||||
newCond.Query.Params = append(newCond.Query.Params, condIdxToNewRefID[i]) |
||||
newCond.Reducer.Type = cond.Reducer.Type |
||||
|
||||
conditions[i] = newCond |
||||
} |
||||
|
||||
ccRefID, err := getNewRefID(newRefIDstoCondIdx) // get refID for the classic condition
|
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
ngCond.RefID = ccRefID // set the alert condition to point to the classic condition
|
||||
ngCond.OrgID = orgID |
||||
|
||||
exprModel := struct { |
||||
Type string `json:"type"` |
||||
RefID string `json:"refId"` |
||||
Datasource string `json:"datasource"` |
||||
Conditions []classic.ClassicConditionJSON `json:"conditions"` |
||||
}{ |
||||
"classic_conditions", |
||||
ccRefID, |
||||
"__expr__", |
||||
conditions, |
||||
} |
||||
|
||||
exprModelJSON, err := json.Marshal(&exprModel) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
|
||||
ccAlertQuery := ngmodels.AlertQuery{ |
||||
RefID: ccRefID, |
||||
Model: exprModelJSON, |
||||
} |
||||
|
||||
ngCond.QueriesAndExpressions = append(ngCond.QueriesAndExpressions, ccAlertQuery) |
||||
|
||||
for i := range ngCond.QueriesAndExpressions { |
||||
err := ngCond.QueriesAndExpressions[i].PreSave() // Set query model properties
|
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
} |
||||
|
||||
sort.Slice(ngCond.QueriesAndExpressions, func(i, j int) bool { |
||||
return ngCond.QueriesAndExpressions[i].RefID < ngCond.QueriesAndExpressions[j].RefID |
||||
}) |
||||
|
||||
return ngCond, nil |
||||
} |
||||
|
||||
const alpha = "ABCDEFGHIJKLMNOPQRSTUVWXYZ" |
||||
|
||||
// getNewRefID finds first capital letter in the alphabet not in use
|
||||
// to use for a new RefID. It errors if it runs out of letters.
|
||||
//
|
||||
// TODO: Research if there is a limit. If so enforce is by
|
||||
// number of queries not letters. If no limit generate more types
|
||||
// of refIDs.
|
||||
func getNewRefID(refIDs map[string][]int) (string, error) { |
||||
for _, r := range alpha { |
||||
sR := string(r) |
||||
if _, ok := refIDs[sR]; ok { |
||||
continue |
||||
} |
||||
return sR, nil |
||||
} |
||||
return "", fmt.Errorf("ran out of letters when creating expression") |
||||
} |
||||
|
||||
// getRelativeDuration turns the alerting durations for dashboard conditions
|
||||
// into a relative time range.
|
||||
func getRelativeDuration(rawFrom, rawTo string) (*ngmodels.RelativeTimeRange, error) { |
||||
fromD, err := getFrom(rawFrom) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
|
||||
toD, err := getTo(rawTo) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
return &ngmodels.RelativeTimeRange{ |
||||
From: ngmodels.Duration(fromD), |
||||
To: ngmodels.Duration(toD), |
||||
}, nil |
||||
} |
||||
|
||||
func getFrom(from string) (time.Duration, error) { |
||||
fromRaw := strings.Replace(from, "now-", "", 1) |
||||
|
||||
d, err := time.ParseDuration("-" + fromRaw) |
||||
if err != nil { |
||||
return 0, err |
||||
} |
||||
return -d, err |
||||
} |
||||
|
||||
func getTo(to string) (time.Duration, error) { |
||||
if to == "now" { |
||||
return 0, nil |
||||
} else if strings.HasPrefix(to, "now-") { |
||||
withoutNow := strings.Replace(to, "now-", "", 1) |
||||
|
||||
d, err := time.ParseDuration("-" + withoutNow) |
||||
if err != nil { |
||||
return 0, err |
||||
} |
||||
return -d, nil |
||||
} |
||||
|
||||
d, err := time.ParseDuration(to) |
||||
if err != nil { |
||||
return 0, err |
||||
} |
||||
return -d, nil |
||||
} |
||||
@ -0,0 +1,146 @@ |
||||
package translate |
||||
|
||||
import ( |
||||
"encoding/json" |
||||
"fmt" |
||||
"io/ioutil" |
||||
"path/filepath" |
||||
"testing" |
||||
"time" |
||||
|
||||
"github.com/grafana/grafana/pkg/bus" |
||||
"github.com/grafana/grafana/pkg/expr/classic" |
||||
"github.com/grafana/grafana/pkg/models" |
||||
ngmodels "github.com/grafana/grafana/pkg/services/ngalert/models" |
||||
"github.com/stretchr/testify/require" |
||||
) |
||||
|
||||
func TestDashboardAlertConditions(t *testing.T) { |
||||
registerGetDsInfoHandler() |
||||
var tests = []struct { |
||||
name string |
||||
// inputJSONFName, at least for now, is as "conditions" will appear within the alert table
|
||||
// settings column JSON. Which means it has already run through the dashboard
|
||||
// alerting Extractor. It is the input.
|
||||
inputJSONFName string |
||||
|
||||
// Condition is quite large (and unexported things), so check misc attributes.
|
||||
spotCheckFn func(t *testing.T, cond *ngmodels.Condition) |
||||
}{ |
||||
{ |
||||
name: "two conditions one query but different time ranges", |
||||
inputJSONFName: `sameQueryDifferentTimeRange.json`, |
||||
spotCheckFn: func(t *testing.T, cond *ngmodels.Condition) { |
||||
require.Equal(t, "C", cond.RefID, "unexpected refId for condition") |
||||
require.Equal(t, 3, len(cond.QueriesAndExpressions), "unexpected query/expression array length") |
||||
|
||||
firstQuery := cond.QueriesAndExpressions[0] |
||||
require.Equal(t, "A", firstQuery.RefID, "unexpected refId for first query") |
||||
require.Equal(t, ngmodels.RelativeTimeRange{ |
||||
From: ngmodels.Duration(time.Second * 600), |
||||
To: ngmodels.Duration(time.Second * 300), |
||||
}, firstQuery.RelativeTimeRange, "unexpected timerange for first query") |
||||
|
||||
secondQuery := cond.QueriesAndExpressions[1] |
||||
require.Equal(t, "B", secondQuery.RefID, "unexpected refId for second query") |
||||
require.Equal(t, ngmodels.RelativeTimeRange{ |
||||
From: ngmodels.Duration(time.Second * 300), |
||||
To: ngmodels.Duration(0), |
||||
}, secondQuery.RelativeTimeRange, "unexpected timerange for second query") |
||||
|
||||
condQuery := cond.QueriesAndExpressions[2] |
||||
require.Equal(t, "C", condQuery.RefID, "unexpected refId for second query") |
||||
isExpr, err := condQuery.IsExpression() |
||||
require.NoError(t, err) |
||||
require.Equal(t, true, isExpr, "third query should be an expression") |
||||
|
||||
c := struct { |
||||
Conditions []classic.ClassicConditionJSON `json:"conditions"` |
||||
}{} |
||||
err = json.Unmarshal(condQuery.Model, &c) |
||||
require.NoError(t, err) |
||||
|
||||
require.Equal(t, 2, len(c.Conditions), "expected 2 conditions in classic condition") |
||||
|
||||
// This is "correct" in that the condition gets the correct time range,
|
||||
// but a bit odd that it creates B then A, can look into changing that
|
||||
// later.
|
||||
firstCond := c.Conditions[0] |
||||
require.Equal(t, "lt", firstCond.Evaluator.Type, "expected first cond to use lt") |
||||
require.Equal(t, "B", firstCond.Query.Params[0], "expected first cond to reference B") |
||||
|
||||
secondCond := c.Conditions[1] |
||||
require.Equal(t, "gt", secondCond.Evaluator.Type, "expected second cond to use gt") |
||||
require.Equal(t, "A", secondCond.Query.Params[0], "expected second cond to reference A") |
||||
}, |
||||
}, |
||||
{ |
||||
name: "mixed shared and unshared time ranges", |
||||
inputJSONFName: `mixedSharedUnsharedTimeRange.json`, |
||||
spotCheckFn: func(t *testing.T, cond *ngmodels.Condition) { |
||||
require.Equal(t, "G", cond.RefID, "unexpected refId for condition") |
||||
require.Equal(t, 7, len(cond.QueriesAndExpressions), "unexpected query/expression array length") |
||||
|
||||
condQuery := cond.QueriesAndExpressions[6] |
||||
isExpr, err := condQuery.IsExpression() |
||||
require.NoError(t, err) |
||||
require.Equal(t, true, isExpr, "expected last query to be an expression") |
||||
|
||||
c := struct { |
||||
Conditions []classic.ClassicConditionJSON `json:"conditions"` |
||||
}{} |
||||
err = json.Unmarshal(condQuery.Model, &c) |
||||
require.NoError(t, err) |
||||
|
||||
require.Equal(t, 8, len(c.Conditions), "expected 8 conditions in classic condition") |
||||
|
||||
firstCond := c.Conditions[0] |
||||
require.Equal(t, "gt", firstCond.Evaluator.Type, "expected first cond to use gt") |
||||
require.Equal(t, "avg", firstCond.Reducer.Type, "expected first cond to use reducer avg") |
||||
firstCondRefID := firstCond.Query.Params[0] |
||||
|
||||
aq, err := alertRuleByRefId(cond, firstCondRefID) |
||||
require.NoError(t, err) |
||||
require.Equal(t, ngmodels.Duration(300*time.Second), aq.RelativeTimeRange.From, |
||||
"expected first condition to reference a query with a from of 300 seconds") |
||||
}, |
||||
}, |
||||
} |
||||
for _, tt := range tests { |
||||
t.Run(tt.name, func(t *testing.T) { |
||||
jsonFile := filepath.Join("testdata", tt.inputJSONFName) |
||||
//nolint:GOSEC
|
||||
b, err := ioutil.ReadFile(jsonFile) |
||||
|
||||
require.NoError(t, err) |
||||
|
||||
cond, err := DashboardAlertConditions(b, 1) |
||||
require.NoError(t, err) |
||||
|
||||
tt.spotCheckFn(t, cond) |
||||
}) |
||||
} |
||||
} |
||||
|
||||
func alertRuleByRefId(cond *ngmodels.Condition, refID string) (ngmodels.AlertQuery, error) { |
||||
for _, aq := range cond.QueriesAndExpressions { |
||||
if aq.RefID == refID { |
||||
return aq, nil |
||||
} |
||||
} |
||||
return ngmodels.AlertQuery{}, fmt.Errorf("query with refId %v not found", refID) |
||||
} |
||||
|
||||
func registerGetDsInfoHandler() { |
||||
bus.AddHandler("test", func(query *models.GetDataSourceQuery) error { |
||||
switch { |
||||
case query.Id == 2: |
||||
query.Result = &models.DataSource{Id: 2, OrgId: 1, Uid: "000000002"} |
||||
case query.Id == 4: |
||||
query.Result = &models.DataSource{Id: 4, OrgId: 1, Uid: "000000004"} |
||||
default: |
||||
return fmt.Errorf("datasource not found") |
||||
} |
||||
return nil |
||||
}) |
||||
} |
||||
Loading…
Reference in new issue