mirror of https://github.com/grafana/grafana
Alerting: Add file provisioning for alert rules (#51635)
parent
e5e8747ee9
commit
41790083d2
@ -0,0 +1,77 @@ |
||||
package rules |
||||
|
||||
import ( |
||||
"context" |
||||
"fmt" |
||||
"io/fs" |
||||
"io/ioutil" |
||||
"path/filepath" |
||||
"strings" |
||||
|
||||
"github.com/grafana/grafana/pkg/infra/log" |
||||
"gopkg.in/yaml.v2" |
||||
) |
||||
|
||||
type rulesConfigReader struct { |
||||
log log.Logger |
||||
} |
||||
|
||||
func newRulesConfigReader(logger log.Logger) rulesConfigReader { |
||||
return rulesConfigReader{ |
||||
log: logger, |
||||
} |
||||
} |
||||
|
||||
func (cr *rulesConfigReader) readConfig(ctx context.Context, path string) ([]*RuleFile, error) { |
||||
var alertRulesFiles []*RuleFile |
||||
cr.log.Debug("looking for alert rules provisioning files", "path", path) |
||||
|
||||
files, err := ioutil.ReadDir(path) |
||||
if err != nil { |
||||
cr.log.Error("can't read alert rules provisioning files from directory", "path", path, "error", err) |
||||
return alertRulesFiles, nil |
||||
} |
||||
|
||||
for _, file := range files { |
||||
cr.log.Debug("parsing alert rules provisioning file", "path", path, "file.Name", file.Name()) |
||||
if !cr.isYAML(file.Name()) && !cr.isJSON(file.Name()) { |
||||
return nil, fmt.Errorf("file has invalid suffix '%s' (.yaml,.yml,.json accepted)", file.Name()) |
||||
} |
||||
ruleFileV1, err := cr.parseConfig(path, file) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
if ruleFileV1 != nil { |
||||
ruleFile, err := ruleFileV1.MapToModel() |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
alertRulesFiles = append(alertRulesFiles, &ruleFile) |
||||
} |
||||
} |
||||
return alertRulesFiles, nil |
||||
} |
||||
|
||||
func (cr *rulesConfigReader) isYAML(file string) bool { |
||||
return strings.HasSuffix(file, ".yaml") || strings.HasSuffix(file, ".yml") |
||||
} |
||||
|
||||
func (cr *rulesConfigReader) isJSON(file string) bool { |
||||
return strings.HasSuffix(file, ".json") |
||||
} |
||||
|
||||
func (cr *rulesConfigReader) parseConfig(path string, file fs.FileInfo) (*RuleFileV1, error) { |
||||
filename, _ := filepath.Abs(filepath.Join(path, file.Name())) |
||||
// nolint:gosec
|
||||
// We can ignore the gosec G304 warning on this one because `filename` comes from ps.Cfg.ProvisioningPath
|
||||
yamlFile, err := ioutil.ReadFile(filename) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
var cfg *RuleFileV1 |
||||
err = yaml.Unmarshal(yamlFile, &cfg) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
return cfg, nil |
||||
} |
||||
@ -0,0 +1,66 @@ |
||||
package rules |
||||
|
||||
import ( |
||||
"context" |
||||
"testing" |
||||
|
||||
"github.com/grafana/grafana/pkg/infra/log" |
||||
"github.com/stretchr/testify/require" |
||||
) |
||||
|
||||
const ( |
||||
testFileBrokenYAML = "./testdata/broken-yaml" |
||||
testFileCorrectProperties = "./testdata/correct-properties" |
||||
testFileCorrectPropertiesWithOrg = "./testdata/correct-properties-with-org" |
||||
testFileEmptyFile = "./testdata/empty-file" |
||||
testFileEmptyFolder = "./testdata/empty-folder" |
||||
testFileMultipleRules = "./testdata/multiple-rules" |
||||
testFileMultipleFiles = "./testdata/multiple-files" |
||||
testFileSupportedFiletypes = "./testdata/supported-filetypes" |
||||
) |
||||
|
||||
func TestConfigReader(t *testing.T) { |
||||
configReader := newRulesConfigReader(log.NewNopLogger()) |
||||
ctx := context.Background() |
||||
t.Run("a broken YAML file should error", func(t *testing.T) { |
||||
_, err := configReader.readConfig(ctx, testFileBrokenYAML) |
||||
require.Error(t, err) |
||||
}) |
||||
t.Run("a rule file with correct properties should not error", func(t *testing.T) { |
||||
ruleFiles, err := configReader.readConfig(ctx, testFileCorrectProperties) |
||||
require.NoError(t, err) |
||||
t.Run("when no organization is present it should be set to 1", func(t *testing.T) { |
||||
require.Equal(t, int64(1), ruleFiles[0].Groups[0].Rules[0].OrgID) |
||||
}) |
||||
}) |
||||
t.Run("a rule file with correct properties and specific org should not error", func(t *testing.T) { |
||||
ruleFiles, err := configReader.readConfig(ctx, testFileCorrectPropertiesWithOrg) |
||||
require.NoError(t, err) |
||||
t.Run("when an organization is set it should not overwrite if with the default of 1", func(t *testing.T) { |
||||
require.Equal(t, int64(1337), ruleFiles[0].Groups[0].Rules[0].OrgID) |
||||
}) |
||||
}) |
||||
t.Run("an empty rule file should not make the config reader error", func(t *testing.T) { |
||||
_, err := configReader.readConfig(ctx, testFileEmptyFile) |
||||
require.NoError(t, err) |
||||
}) |
||||
t.Run("an empty folder should not make the config reader error", func(t *testing.T) { |
||||
_, err := configReader.readConfig(ctx, testFileEmptyFolder) |
||||
require.NoError(t, err) |
||||
}) |
||||
t.Run("the config reader should be able to read multiple files in the folder", func(t *testing.T) { |
||||
ruleFiles, err := configReader.readConfig(ctx, testFileMultipleFiles) |
||||
require.NoError(t, err) |
||||
require.Len(t, ruleFiles, 2) |
||||
}) |
||||
t.Run("the config reader should be able to read multiple rule groups", func(t *testing.T) { |
||||
ruleFiles, err := configReader.readConfig(ctx, testFileMultipleRules) |
||||
require.NoError(t, err) |
||||
require.Len(t, ruleFiles[0].Groups, 2) |
||||
}) |
||||
t.Run("the config reader should support .yaml,.yml and .json files", func(t *testing.T) { |
||||
ruleFiles, err := configReader.readConfig(ctx, testFileSupportedFiletypes) |
||||
require.NoError(t, err) |
||||
require.Len(t, ruleFiles, 3) |
||||
}) |
||||
} |
||||
@ -0,0 +1,165 @@ |
||||
package rules |
||||
|
||||
import ( |
||||
"context" |
||||
"errors" |
||||
"fmt" |
||||
|
||||
"github.com/grafana/grafana/pkg/infra/log" |
||||
"github.com/grafana/grafana/pkg/models" |
||||
"github.com/grafana/grafana/pkg/services/dashboards" |
||||
alert_models "github.com/grafana/grafana/pkg/services/ngalert/models" |
||||
"github.com/grafana/grafana/pkg/services/ngalert/provisioning" |
||||
"github.com/grafana/grafana/pkg/util" |
||||
) |
||||
|
||||
type AlertRuleProvisioner interface { |
||||
Provision(ctx context.Context, path string) error |
||||
} |
||||
|
||||
func NewAlertRuleProvisioner( |
||||
logger log.Logger, |
||||
dashboardService dashboards.DashboardService, |
||||
dashboardProvService dashboards.DashboardProvisioningService, |
||||
ruleService provisioning.AlertRuleService) AlertRuleProvisioner { |
||||
return &defaultAlertRuleProvisioner{ |
||||
logger: logger, |
||||
cfgReader: newRulesConfigReader(logger), |
||||
dashboardService: dashboardService, |
||||
dashboardProvService: dashboardProvService, |
||||
ruleService: ruleService, |
||||
} |
||||
} |
||||
|
||||
type defaultAlertRuleProvisioner struct { |
||||
logger log.Logger |
||||
cfgReader rulesConfigReader |
||||
dashboardService dashboards.DashboardService |
||||
dashboardProvService dashboards.DashboardProvisioningService |
||||
ruleService provisioning.AlertRuleService |
||||
} |
||||
|
||||
func Provision( |
||||
ctx context.Context, |
||||
path string, |
||||
dashboardService dashboards.DashboardService, |
||||
dashboardProvisioningService dashboards.DashboardProvisioningService, |
||||
ruleService provisioning.AlertRuleService, |
||||
) error { |
||||
ruleProvisioner := NewAlertRuleProvisioner( |
||||
log.New("provisioning.alerting"), |
||||
dashboardService, |
||||
dashboardProvisioningService, |
||||
ruleService, |
||||
) |
||||
return ruleProvisioner.Provision(ctx, path) |
||||
} |
||||
|
||||
func (prov *defaultAlertRuleProvisioner) Provision(ctx context.Context, |
||||
path string) error { |
||||
prov.logger.Info("starting to provision the alert rules") |
||||
ruleFiles, err := prov.cfgReader.readConfig(ctx, path) |
||||
if err != nil { |
||||
return fmt.Errorf("failed to read alert rules files: %w", err) |
||||
} |
||||
prov.logger.Debug("read all alert rules files", "file_count", len(ruleFiles)) |
||||
err = prov.provsionRuleFiles(ctx, ruleFiles) |
||||
if err != nil { |
||||
return fmt.Errorf("failed to provision alert rules: %w", err) |
||||
} |
||||
prov.logger.Info("finished to provision the alert rules") |
||||
return nil |
||||
} |
||||
|
||||
func (prov *defaultAlertRuleProvisioner) provsionRuleFiles(ctx context.Context, |
||||
ruleFiles []*RuleFile) error { |
||||
for _, file := range ruleFiles { |
||||
for _, group := range file.Groups { |
||||
folderUID, err := prov.getOrCreateFolderUID(ctx, group.Folder, group.OrgID) |
||||
if err != nil { |
||||
return err |
||||
} |
||||
prov.logger.Debug("provisioning alert rule group", |
||||
"org", group.OrgID, |
||||
"folder", group.Folder, |
||||
"folderUID", folderUID, |
||||
"name", group.Name) |
||||
for _, rule := range group.Rules { |
||||
rule.NamespaceUID = folderUID |
||||
rule.RuleGroup = group.Name |
||||
err = prov.provisionRule(ctx, group.OrgID, rule, group.Folder, folderUID) |
||||
if err != nil { |
||||
return err |
||||
} |
||||
} |
||||
err = prov.ruleService.UpdateRuleGroup(ctx, group.OrgID, folderUID, group.Name, int64(group.Interval.Seconds())) |
||||
if err != nil { |
||||
return err |
||||
} |
||||
} |
||||
for _, deleteRule := range file.DeleteRules { |
||||
err := prov.ruleService.DeleteAlertRule(ctx, deleteRule.OrgID, |
||||
deleteRule.UID, alert_models.ProvenanceFile) |
||||
if err != nil { |
||||
return err |
||||
} |
||||
} |
||||
} |
||||
return nil |
||||
} |
||||
|
||||
func (prov *defaultAlertRuleProvisioner) provisionRule( |
||||
ctx context.Context, |
||||
orgID int64, |
||||
rule alert_models.AlertRule, |
||||
folder, |
||||
folderUID string) error { |
||||
prov.logger.Debug("provisioning alert rule", "uid", rule.UID, "org", rule.OrgID) |
||||
_, _, err := prov.ruleService.GetAlertRule(ctx, orgID, rule.UID) |
||||
if err != nil && !errors.Is(err, alert_models.ErrAlertRuleNotFound) { |
||||
return err |
||||
} else if err != nil { |
||||
prov.logger.Debug("creating rule", "uid", rule.UID, "org", rule.OrgID) |
||||
// 0 is passed as userID as then the quota logic will only check for
|
||||
// the organization quota, as we don't have any user scope here.
|
||||
_, err = prov.ruleService.CreateAlertRule(ctx, rule, alert_models.ProvenanceFile, 0) |
||||
} else { |
||||
prov.logger.Debug("updating rule", "uid", rule.UID, "org", rule.OrgID) |
||||
_, err = prov.ruleService.UpdateAlertRule(ctx, rule, alert_models.ProvenanceFile) |
||||
} |
||||
return err |
||||
} |
||||
|
||||
func (prov *defaultAlertRuleProvisioner) getOrCreateFolderUID( |
||||
ctx context.Context, folderName string, orgID int64) (string, error) { |
||||
cmd := &models.GetDashboardQuery{ |
||||
Slug: models.SlugifyTitle(folderName), |
||||
OrgId: orgID, |
||||
} |
||||
err := prov.dashboardService.GetDashboard(ctx, cmd) |
||||
if err != nil && !errors.Is(err, dashboards.ErrDashboardNotFound) { |
||||
return "", err |
||||
} |
||||
|
||||
// dashboard folder not found. create one.
|
||||
if errors.Is(err, dashboards.ErrDashboardNotFound) { |
||||
dash := &dashboards.SaveDashboardDTO{} |
||||
dash.Dashboard = models.NewDashboardFolder(folderName) |
||||
dash.Dashboard.IsFolder = true |
||||
dash.Overwrite = true |
||||
dash.OrgId = orgID |
||||
dash.Dashboard.SetUid(util.GenerateShortUID()) |
||||
dbDash, err := prov.dashboardProvService.SaveFolderForProvisionedDashboards(ctx, dash) |
||||
if err != nil { |
||||
return "", err |
||||
} |
||||
|
||||
return dbDash.Uid, nil |
||||
} |
||||
|
||||
if !cmd.Result.IsFolder { |
||||
return "", fmt.Errorf("got invalid response. expected folder, found dashboard") |
||||
} |
||||
|
||||
return cmd.Result.Uid, nil |
||||
} |
||||
@ -0,0 +1,56 @@ |
||||
apiVersion: 1 |
||||
groups: |
||||
- name: my_group |
||||
folder: my_folder |
||||
interval: 10s |
||||
rules: |
||||
- title: my_first_rule |
||||
uid: my_first_rule |
||||
condition: A |
||||
for: 1m |
||||
annotations: |
||||
runbook: https://grafana.com |
||||
labels: |
||||
team: infra |
||||
severity: warning |
||||
data: |
||||
- refId: A |
||||
queryType: '' |
||||
relativeTimeRange: |
||||
from: 600 |
||||
to: 0 |
||||
datasourceUID: PD8C576611E62080A |
||||
model: |
||||
hide: false |
||||
intervalMs: 1000 |
||||
maxDataPoints: 43200 |
||||
refId: A |
||||
- refId: B |
||||
queryType: '' |
||||
relativeTimeRange: |
||||
from: 0 |
||||
to: 0 |
||||
datasourceUID: "-100" |
||||
model: |
||||
conditions: |
||||
- evaluator: |
||||
params: |
||||
- 3 |
||||
type: gt |
||||
operator: |
||||
type: and |
||||
query: |
||||
params: |
||||
- A |
||||
reducer: |
||||
params: [] |
||||
type: last |
||||
type: query |
||||
datasource: |
||||
type: __expr__ |
||||
uid: "-100" |
||||
hide: false |
||||
intervalMs: 1000 |
||||
maxDataPoints: 43200 |
||||
refId: B |
||||
type: classic_conditions |
||||
@ -0,0 +1,57 @@ |
||||
apiVersion: 1 |
||||
groups: |
||||
- name: my_group |
||||
folder: my_folder |
||||
interval: 10s |
||||
orgId: 1337 |
||||
rules: |
||||
- title: my_first_rule |
||||
uid: my_first_rule |
||||
condition: A |
||||
for: 1m |
||||
annotations: |
||||
runbook: https://grafana.com |
||||
labels: |
||||
team: infra |
||||
severity: warning |
||||
data: |
||||
- refId: A |
||||
queryType: '' |
||||
relativeTimeRange: |
||||
from: 600 |
||||
to: 0 |
||||
datasourceUID: PD8C576611E62080A |
||||
model: |
||||
hide: false |
||||
intervalMs: 1000 |
||||
maxDataPoints: 43200 |
||||
refId: A |
||||
- refId: B |
||||
queryType: '' |
||||
relativeTimeRange: |
||||
from: 0 |
||||
to: 0 |
||||
datasourceUID: "-100" |
||||
model: |
||||
conditions: |
||||
- evaluator: |
||||
params: |
||||
- 3 |
||||
type: gt |
||||
operator: |
||||
type: and |
||||
query: |
||||
params: |
||||
- A |
||||
reducer: |
||||
params: [] |
||||
type: last |
||||
type: query |
||||
datasource: |
||||
type: __expr__ |
||||
uid: "-100" |
||||
hide: false |
||||
intervalMs: 1000 |
||||
maxDataPoints: 43200 |
||||
refId: B |
||||
type: classic_conditions |
||||
@ -0,0 +1,56 @@ |
||||
apiVersion: 1 |
||||
groups: |
||||
- name: my_group |
||||
folder: my_folder |
||||
interval: 10s |
||||
rules: |
||||
- title: my_first_rule |
||||
uid: my_first_rule |
||||
condition: A |
||||
for: 1m |
||||
annotations: |
||||
runbook: https://grafana.com |
||||
labels: |
||||
team: infra |
||||
severity: warning |
||||
data: |
||||
- refId: A |
||||
queryType: '' |
||||
relativeTimeRange: |
||||
from: 600 |
||||
to: 0 |
||||
datasourceUID: PD8C576611E62080A |
||||
model: |
||||
hide: false |
||||
intervalMs: 1000 |
||||
maxDataPoints: 43200 |
||||
refId: A |
||||
- refId: B |
||||
queryType: '' |
||||
relativeTimeRange: |
||||
from: 0 |
||||
to: 0 |
||||
datasourceUID: "-100" |
||||
model: |
||||
conditions: |
||||
- evaluator: |
||||
params: |
||||
- 3 |
||||
type: gt |
||||
operator: |
||||
type: and |
||||
query: |
||||
params: |
||||
- A |
||||
reducer: |
||||
params: [] |
||||
type: last |
||||
type: query |
||||
datasource: |
||||
type: __expr__ |
||||
uid: "-100" |
||||
hide: false |
||||
intervalMs: 1000 |
||||
maxDataPoints: 43200 |
||||
refId: B |
||||
type: classic_conditions |
||||
@ -0,0 +1,56 @@ |
||||
apiVersion: 1 |
||||
groups: |
||||
- name: my_group |
||||
folder: my_folder |
||||
interval: 10s |
||||
rules: |
||||
- title: my_first_rule |
||||
uid: my_first_rule |
||||
condition: A |
||||
for: 1m |
||||
annotations: |
||||
runbook: https://grafana.com |
||||
labels: |
||||
team: infra |
||||
severity: warning |
||||
data: |
||||
- refId: A |
||||
queryType: '' |
||||
relativeTimeRange: |
||||
from: 600 |
||||
to: 0 |
||||
datasourceUID: PD8C576611E62080A |
||||
model: |
||||
hide: false |
||||
intervalMs: 1000 |
||||
maxDataPoints: 43200 |
||||
refId: A |
||||
- refId: B |
||||
queryType: '' |
||||
relativeTimeRange: |
||||
from: 0 |
||||
to: 0 |
||||
datasourceUID: "-100" |
||||
model: |
||||
conditions: |
||||
- evaluator: |
||||
params: |
||||
- 3 |
||||
type: gt |
||||
operator: |
||||
type: and |
||||
query: |
||||
params: |
||||
- A |
||||
reducer: |
||||
params: [] |
||||
type: last |
||||
type: query |
||||
datasource: |
||||
type: __expr__ |
||||
uid: "-100" |
||||
hide: false |
||||
intervalMs: 1000 |
||||
maxDataPoints: 43200 |
||||
refId: B |
||||
type: classic_conditions |
||||
@ -0,0 +1,56 @@ |
||||
apiVersion: 1 |
||||
groups: |
||||
- name: my_other_group |
||||
folder: my_other_folder |
||||
interval: 10s |
||||
rules: |
||||
- title: my_other_rule |
||||
uid: my_other_rule |
||||
condition: A |
||||
for: 1m |
||||
annotations: |
||||
runbook: https://grafana.com |
||||
labels: |
||||
team: infra |
||||
severity: warning |
||||
data: |
||||
- refId: A |
||||
queryType: '' |
||||
relativeTimeRange: |
||||
from: 600 |
||||
to: 0 |
||||
datasourceUID: PD8C576611E62080A |
||||
model: |
||||
hide: false |
||||
intervalMs: 1000 |
||||
maxDataPoints: 43200 |
||||
refId: A |
||||
- refId: B |
||||
queryType: '' |
||||
relativeTimeRange: |
||||
from: 0 |
||||
to: 0 |
||||
datasourceUID: "-100" |
||||
model: |
||||
conditions: |
||||
- evaluator: |
||||
params: |
||||
- 3 |
||||
type: gt |
||||
operator: |
||||
type: and |
||||
query: |
||||
params: |
||||
- A |
||||
reducer: |
||||
params: [] |
||||
type: last |
||||
type: query |
||||
datasource: |
||||
type: __expr__ |
||||
uid: "-100" |
||||
hide: false |
||||
intervalMs: 1000 |
||||
maxDataPoints: 43200 |
||||
refId: B |
||||
type: classic_conditions |
||||
@ -0,0 +1,110 @@ |
||||
apiVersion: 1 |
||||
groups: |
||||
- name: my_group |
||||
folder: my_folder |
||||
interval: 10s |
||||
rules: |
||||
- title: my_first_rule |
||||
uid: my_first_rule |
||||
condition: A |
||||
for: 1m |
||||
annotations: |
||||
runbook: https://grafana.com |
||||
labels: |
||||
team: infra |
||||
severity: warning |
||||
data: |
||||
- refId: A |
||||
queryType: '' |
||||
relativeTimeRange: |
||||
from: 600 |
||||
to: 0 |
||||
datasourceUID: PD8C576611E62080A |
||||
model: |
||||
hide: false |
||||
intervalMs: 1000 |
||||
maxDataPoints: 43200 |
||||
refId: A |
||||
- refId: B |
||||
queryType: '' |
||||
relativeTimeRange: |
||||
from: 0 |
||||
to: 0 |
||||
datasourceUID: "-100" |
||||
model: |
||||
conditions: |
||||
- evaluator: |
||||
params: |
||||
- 3 |
||||
type: gt |
||||
operator: |
||||
type: and |
||||
query: |
||||
params: |
||||
- A |
||||
reducer: |
||||
params: [] |
||||
type: last |
||||
type: query |
||||
datasource: |
||||
type: __expr__ |
||||
uid: "-100" |
||||
hide: false |
||||
intervalMs: 1000 |
||||
maxDataPoints: 43200 |
||||
refId: B |
||||
type: classic_conditions |
||||
- name: my_other_group |
||||
folder: my_other_folder |
||||
interval: 10s |
||||
rules: |
||||
- title: my_other_rule |
||||
uid: my_other_rule |
||||
condition: A |
||||
for: 1m |
||||
annotations: |
||||
runbook: https://grafana.com |
||||
labels: |
||||
team: infra |
||||
severity: warning |
||||
data: |
||||
- refId: A |
||||
queryType: '' |
||||
relativeTimeRange: |
||||
from: 600 |
||||
to: 0 |
||||
datasourceUID: PD8C576611E62080A |
||||
model: |
||||
hide: false |
||||
intervalMs: 1000 |
||||
maxDataPoints: 43200 |
||||
refId: A |
||||
- refId: B |
||||
queryType: '' |
||||
relativeTimeRange: |
||||
from: 0 |
||||
to: 0 |
||||
datasourceUID: "-100" |
||||
model: |
||||
conditions: |
||||
- evaluator: |
||||
params: |
||||
- 3 |
||||
type: gt |
||||
operator: |
||||
type: and |
||||
query: |
||||
params: |
||||
- A |
||||
reducer: |
||||
params: [] |
||||
type: last |
||||
type: query |
||||
datasource: |
||||
type: __expr__ |
||||
uid: "-100" |
||||
hide: false |
||||
intervalMs: 1000 |
||||
maxDataPoints: 43200 |
||||
refId: B |
||||
type: classic_conditions |
||||
@ -0,0 +1,87 @@ |
||||
{ |
||||
"apiVersion": 1, |
||||
"groups": [ |
||||
{ |
||||
"name": "my_json_group", |
||||
"folder": "my_json_folder", |
||||
"interval": "10s", |
||||
"rules": [ |
||||
{ |
||||
"title": "my_json_rule", |
||||
"uid": "my_json_rule", |
||||
"condition": "A", |
||||
"for": "1m", |
||||
"annotations": { |
||||
"runbook": "http://google.com/" |
||||
}, |
||||
"labels": { |
||||
"team": "infra", |
||||
"severity": "warning" |
||||
}, |
||||
"data": [ |
||||
{ |
||||
"refId": "A", |
||||
"queryType": "", |
||||
"relativeTimeRange": { |
||||
"from": 600, |
||||
"to": 0 |
||||
}, |
||||
"datasourceUID": "PD8C576611E62080A", |
||||
"model": { |
||||
"hide": false, |
||||
"intervalMs": 1000, |
||||
"maxDataPoints": 43200, |
||||
"refId": "A" |
||||
} |
||||
}, |
||||
{ |
||||
"refId": "B", |
||||
"queryType": "", |
||||
"relativeTimeRange": { |
||||
"from": 0, |
||||
"to": 0 |
||||
}, |
||||
"datasourceUID": "-100", |
||||
"model": { |
||||
"conditions": [ |
||||
{ |
||||
"evaluator": { |
||||
"params": [ |
||||
3 |
||||
], |
||||
"type": "gt" |
||||
}, |
||||
"operator": { |
||||
"type": "and" |
||||
}, |
||||
"query": { |
||||
"params": [ |
||||
"A" |
||||
] |
||||
}, |
||||
"reducer": { |
||||
"params": [ |
||||
|
||||
], |
||||
"type": "last" |
||||
}, |
||||
"type": "query" |
||||
} |
||||
], |
||||
"datasource": { |
||||
"type": "__expr__", |
||||
"uid": "-100" |
||||
}, |
||||
"hide": false, |
||||
"intervalMs": 1000, |
||||
"maxDataPoints": 43200, |
||||
"refId": "B", |
||||
"type": "classic_conditions" |
||||
} |
||||
} |
||||
] |
||||
} |
||||
] |
||||
} |
||||
] |
||||
} |
||||
@ -0,0 +1,56 @@ |
||||
apiVersion: 1 |
||||
groups: |
||||
- name: my_group |
||||
folder: my_folder |
||||
interval: 10s |
||||
rules: |
||||
- title: my_first_rule |
||||
uid: my_first_rule |
||||
condition: A |
||||
for: 1m |
||||
annotations: |
||||
runbook: https://grafana.com |
||||
labels: |
||||
team: infra |
||||
severity: warning |
||||
data: |
||||
- refId: A |
||||
queryType: '' |
||||
relativeTimeRange: |
||||
from: 600 |
||||
to: 0 |
||||
datasourceUID: PD8C576611E62080A |
||||
model: |
||||
hide: false |
||||
intervalMs: 1000 |
||||
maxDataPoints: 43200 |
||||
refId: A |
||||
- refId: B |
||||
queryType: '' |
||||
relativeTimeRange: |
||||
from: 0 |
||||
to: 0 |
||||
datasourceUID: "-100" |
||||
model: |
||||
conditions: |
||||
- evaluator: |
||||
params: |
||||
- 3 |
||||
type: gt |
||||
operator: |
||||
type: and |
||||
query: |
||||
params: |
||||
- A |
||||
reducer: |
||||
params: [] |
||||
type: last |
||||
type: query |
||||
datasource: |
||||
type: __expr__ |
||||
uid: "-100" |
||||
hide: false |
||||
intervalMs: 1000 |
||||
maxDataPoints: 43200 |
||||
refId: B |
||||
type: classic_conditions |
||||
@ -0,0 +1,56 @@ |
||||
apiVersion: 1 |
||||
groups: |
||||
- name: my_other_group |
||||
folder: my_other_folder |
||||
interval: 10s |
||||
rules: |
||||
- title: my_other_rule |
||||
uid: my_other_rule |
||||
condition: A |
||||
for: 1m |
||||
annotations: |
||||
runbook: https://grafana.com |
||||
labels: |
||||
team: infra |
||||
severity: warning |
||||
data: |
||||
- refId: A |
||||
queryType: '' |
||||
relativeTimeRange: |
||||
from: 600 |
||||
to: 0 |
||||
datasourceUID: PD8C576611E62080A |
||||
model: |
||||
hide: false |
||||
intervalMs: 1000 |
||||
maxDataPoints: 43200 |
||||
refId: A |
||||
- refId: B |
||||
queryType: '' |
||||
relativeTimeRange: |
||||
from: 0 |
||||
to: 0 |
||||
datasourceUID: "-100" |
||||
model: |
||||
conditions: |
||||
- evaluator: |
||||
params: |
||||
- 3 |
||||
type: gt |
||||
operator: |
||||
type: and |
||||
query: |
||||
params: |
||||
- A |
||||
reducer: |
||||
params: [] |
||||
type: last |
||||
type: query |
||||
datasource: |
||||
type: __expr__ |
||||
uid: "-100" |
||||
hide: false |
||||
intervalMs: 1000 |
||||
maxDataPoints: 43200 |
||||
refId: B |
||||
type: classic_conditions |
||||
@ -0,0 +1,210 @@ |
||||
package rules |
||||
|
||||
import ( |
||||
"encoding/json" |
||||
"errors" |
||||
"fmt" |
||||
"strings" |
||||
"time" |
||||
|
||||
"github.com/grafana/grafana/pkg/services/ngalert/models" |
||||
"github.com/grafana/grafana/pkg/services/provisioning/values" |
||||
) |
||||
|
||||
type configVersion struct { |
||||
APIVersion values.Int64Value `json:"apiVersion" yaml:"apiVersion"` |
||||
} |
||||
|
||||
type RuleFile struct { |
||||
configVersion |
||||
Groups []AlertRuleGroup |
||||
DeleteRules []RuleDelete |
||||
} |
||||
|
||||
type RuleFileV1 struct { |
||||
configVersion |
||||
Groups []AlertRuleGroupV1 `json:"groups" yaml:"groups"` |
||||
DeleteRules []RuleDeleteV1 `json:"deleteRules" yaml:"deleteRules"` |
||||
} |
||||
|
||||
func (ruleFileV1 *RuleFileV1) MapToModel() (RuleFile, error) { |
||||
ruleFile := RuleFile{} |
||||
ruleFile.configVersion = ruleFileV1.configVersion |
||||
for _, groupV1 := range ruleFileV1.Groups { |
||||
group, err := groupV1.mapToModel() |
||||
if err != nil { |
||||
return RuleFile{}, err |
||||
} |
||||
ruleFile.Groups = append(ruleFile.Groups, group) |
||||
} |
||||
for _, ruleDeleteV1 := range ruleFileV1.DeleteRules { |
||||
orgID := ruleDeleteV1.OrgID.Value() |
||||
if orgID < 1 { |
||||
orgID = 1 |
||||
} |
||||
ruleDelete := RuleDelete{ |
||||
UID: ruleDeleteV1.UID.Value(), |
||||
OrgID: orgID, |
||||
} |
||||
ruleFile.DeleteRules = append(ruleFile.DeleteRules, ruleDelete) |
||||
} |
||||
return ruleFile, nil |
||||
} |
||||
|
||||
type RuleDelete struct { |
||||
UID string |
||||
OrgID int64 |
||||
} |
||||
|
||||
type RuleDeleteV1 struct { |
||||
UID values.StringValue `json:"uid" yaml:"uid"` |
||||
OrgID values.Int64Value `json:"orgId" yaml:"orgId"` |
||||
} |
||||
|
||||
type AlertRuleGroupV1 struct { |
||||
OrgID values.Int64Value `json:"orgId" yaml:"orgId"` |
||||
Name values.StringValue `json:"name" yaml:"name"` |
||||
Folder values.StringValue `json:"folder" yaml:"folder"` |
||||
Interval values.StringValue `json:"interval" yaml:"interval"` |
||||
Rules []AlertRuleV1 `json:"rules" yaml:"rules"` |
||||
} |
||||
|
||||
func (ruleGroupV1 *AlertRuleGroupV1) mapToModel() (AlertRuleGroup, error) { |
||||
ruleGroup := AlertRuleGroup{} |
||||
ruleGroup.Name = ruleGroupV1.Name.Value() |
||||
if strings.TrimSpace(ruleGroup.Name) == "" { |
||||
return AlertRuleGroup{}, errors.New("rule group has no name set") |
||||
} |
||||
ruleGroup.OrgID = ruleGroupV1.OrgID.Value() |
||||
if ruleGroup.OrgID < 1 { |
||||
ruleGroup.OrgID = 1 |
||||
} |
||||
interval, err := time.ParseDuration(ruleGroupV1.Interval.Value()) |
||||
if err != nil { |
||||
return AlertRuleGroup{}, err |
||||
} |
||||
ruleGroup.Interval = interval |
||||
ruleGroup.Folder = ruleGroupV1.Folder.Value() |
||||
if strings.TrimSpace(ruleGroup.Folder) == "" { |
||||
return AlertRuleGroup{}, errors.New("rule group has no folder set") |
||||
} |
||||
for _, ruleV1 := range ruleGroupV1.Rules { |
||||
rule, err := ruleV1.mapToModel(ruleGroup.OrgID) |
||||
if err != nil { |
||||
return AlertRuleGroup{}, err |
||||
} |
||||
ruleGroup.Rules = append(ruleGroup.Rules, rule) |
||||
} |
||||
return ruleGroup, nil |
||||
} |
||||
|
||||
type AlertRuleGroup struct { |
||||
OrgID int64 |
||||
Name string |
||||
Folder string |
||||
Interval time.Duration |
||||
Rules []models.AlertRule |
||||
} |
||||
|
||||
type AlertRuleV1 struct { |
||||
UID values.StringValue `json:"uid" yaml:"uid"` |
||||
Title values.StringValue `json:"title" yaml:"title"` |
||||
Condition values.StringValue `json:"condition" yaml:"condition"` |
||||
Data []QueryV1 `json:"data" yaml:"data"` |
||||
DashboardUID values.StringValue `json:"dasboardUid" yaml:"dashboardUid"` |
||||
PanelID values.Int64Value `json:"panelId" yaml:"panelId"` |
||||
NoDataState values.StringValue `json:"noDataState" yaml:"noDataState"` |
||||
ExecErrState values.StringValue `json:"execErrState" yaml:"execErrState"` |
||||
For values.StringValue `json:"for" yaml:"for"` |
||||
Annotations values.StringMapValue `json:"annotations" yaml:"annotations"` |
||||
Labels values.StringMapValue `json:"labels" yaml:"labels"` |
||||
} |
||||
|
||||
func (rule *AlertRuleV1) mapToModel(orgID int64) (models.AlertRule, error) { |
||||
alertRule := models.AlertRule{} |
||||
alertRule.Title = rule.Title.Value() |
||||
if alertRule.Title == "" { |
||||
return models.AlertRule{}, fmt.Errorf("rule has no title set") |
||||
} |
||||
alertRule.UID = rule.UID.Value() |
||||
if alertRule.UID == "" { |
||||
return models.AlertRule{}, fmt.Errorf("rule '%s' failed to parse: no UID set", alertRule.Title) |
||||
} |
||||
alertRule.OrgID = orgID |
||||
duration, err := time.ParseDuration(rule.For.Value()) |
||||
if err != nil { |
||||
return models.AlertRule{}, fmt.Errorf("rule '%s' failed to parse: %w", alertRule.Title, err) |
||||
} |
||||
alertRule.For = duration |
||||
dashboardUID := rule.DashboardUID.Value() |
||||
alertRule.DashboardUID = &dashboardUID |
||||
panelID := rule.PanelID.Value() |
||||
alertRule.PanelID = &panelID |
||||
execErrStateValue := strings.TrimSpace(rule.ExecErrState.Value()) |
||||
execErrState, err := models.ErrStateFromString(execErrStateValue) |
||||
if err != nil && execErrStateValue != "" { |
||||
return models.AlertRule{}, fmt.Errorf("rule '%s' failed to parse: %w", alertRule.Title, err) |
||||
} |
||||
if execErrStateValue == "" { |
||||
execErrState = models.AlertingErrState |
||||
} |
||||
alertRule.ExecErrState = execErrState |
||||
noDataStateValue := strings.TrimSpace(rule.NoDataState.Value()) |
||||
noDataState, err := models.NoDataStateFromString(noDataStateValue) |
||||
if err != nil && noDataStateValue != "" { |
||||
return models.AlertRule{}, fmt.Errorf("rule '%s' failed to parse: %w", alertRule.Title, err) |
||||
} |
||||
if noDataStateValue == "" { |
||||
noDataState = models.NoData |
||||
} |
||||
alertRule.NoDataState = noDataState |
||||
alertRule.Condition = rule.Condition.Value() |
||||
if alertRule.Condition == "" { |
||||
return models.AlertRule{}, fmt.Errorf("rule '%s' failed to parse: no condition set", alertRule.Title) |
||||
} |
||||
alertRule.Annotations = rule.Annotations.Value() |
||||
alertRule.Labels = rule.Labels.Value() |
||||
for _, queryV1 := range rule.Data { |
||||
query, err := queryV1.mapToModel() |
||||
if err != nil { |
||||
return models.AlertRule{}, fmt.Errorf("rule '%s' failed to parse: %w", alertRule.Title, err) |
||||
} |
||||
alertRule.Data = append(alertRule.Data, query) |
||||
} |
||||
if len(alertRule.Data) == 0 { |
||||
return models.AlertRule{}, fmt.Errorf("rule '%s' failed to parse: no data set", alertRule.Title) |
||||
} |
||||
return alertRule, nil |
||||
} |
||||
|
||||
type QueryV1 struct { |
||||
RefID values.StringValue `json:"refId" yaml:"refId"` |
||||
QueryType values.StringValue `json:"queryType" yaml:"queryType"` |
||||
RelativeTimeRange models.RelativeTimeRange `json:"relativeTimeRange" yaml:"relativeTimeRange"` |
||||
DatasourceUID values.StringValue `json:"datasourceUid" yaml:"datasourceUid"` |
||||
Model values.JSONValue `json:"model" yaml:"model"` |
||||
} |
||||
|
||||
func (queryV1 *QueryV1) mapToModel() (models.AlertQuery, error) { |
||||
// In order to get the model into the format we need,
|
||||
// we marshal it back to json and unmarshal it again
|
||||
// in json.RawMessage. We do this as we cannot use
|
||||
// json.RawMessage with a yaml files and have to use
|
||||
// JSONValue that supports both, json and yaml.
|
||||
encoded, err := json.Marshal(queryV1.Model.Value()) |
||||
if err != nil { |
||||
return models.AlertQuery{}, err |
||||
} |
||||
var rawMessage json.RawMessage |
||||
err = json.Unmarshal(encoded, &rawMessage) |
||||
if err != nil { |
||||
return models.AlertQuery{}, err |
||||
} |
||||
return models.AlertQuery{ |
||||
RefID: queryV1.RefID.Value(), |
||||
QueryType: queryV1.QueryType.Value(), |
||||
DatasourceUID: queryV1.DatasourceUID.Value(), |
||||
RelativeTimeRange: queryV1.RelativeTimeRange, |
||||
Model: rawMessage, |
||||
}, nil |
||||
} |
||||
@ -0,0 +1,218 @@ |
||||
package rules |
||||
|
||||
import ( |
||||
"testing" |
||||
|
||||
"github.com/grafana/grafana/pkg/services/ngalert/models" |
||||
"github.com/grafana/grafana/pkg/services/provisioning/values" |
||||
"github.com/stretchr/testify/require" |
||||
"gopkg.in/yaml.v3" |
||||
) |
||||
|
||||
func TestRuleGroup(t *testing.T) { |
||||
t.Run("a valid rule group should not error", func(t *testing.T) { |
||||
rg := validRuleGroupV1(t) |
||||
_, err := rg.mapToModel() |
||||
require.NoError(t, err) |
||||
}) |
||||
t.Run("a rule group with out a name should error", func(t *testing.T) { |
||||
rg := validRuleGroupV1(t) |
||||
var name values.StringValue |
||||
err := yaml.Unmarshal([]byte(""), &name) |
||||
require.NoError(t, err) |
||||
rg.Name = name |
||||
_, err = rg.mapToModel() |
||||
require.Error(t, err) |
||||
}) |
||||
t.Run("a rule group with out a folder should error", func(t *testing.T) { |
||||
rg := validRuleGroupV1(t) |
||||
var folder values.StringValue |
||||
err := yaml.Unmarshal([]byte(""), &folder) |
||||
require.NoError(t, err) |
||||
rg.Folder = folder |
||||
_, err = rg.mapToModel() |
||||
require.Error(t, err) |
||||
}) |
||||
t.Run("a rule group with out an interval should error", func(t *testing.T) { |
||||
rg := validRuleGroupV1(t) |
||||
var interval values.StringValue |
||||
err := yaml.Unmarshal([]byte(""), &interval) |
||||
require.NoError(t, err) |
||||
rg.Interval = interval |
||||
_, err = rg.mapToModel() |
||||
require.Error(t, err) |
||||
}) |
||||
t.Run("a rule group with an invalid interval should error", func(t *testing.T) { |
||||
rg := validRuleGroupV1(t) |
||||
var interval values.StringValue |
||||
err := yaml.Unmarshal([]byte("10x"), &interval) |
||||
require.NoError(t, err) |
||||
rg.Interval = interval |
||||
_, err = rg.mapToModel() |
||||
require.Error(t, err) |
||||
}) |
||||
t.Run("a rule group with an empty org id should default to 1", func(t *testing.T) { |
||||
rg := validRuleGroupV1(t) |
||||
rg.OrgID = values.Int64Value{} |
||||
rgMapped, err := rg.mapToModel() |
||||
require.NoError(t, err) |
||||
require.Equal(t, int64(1), rgMapped.OrgID) |
||||
}) |
||||
t.Run("a rule group with a negative org id should default to 1", func(t *testing.T) { |
||||
rg := validRuleGroupV1(t) |
||||
orgID := values.Int64Value{} |
||||
err := yaml.Unmarshal([]byte("-1"), &orgID) |
||||
require.NoError(t, err) |
||||
rg.OrgID = orgID |
||||
rgMapped, err := rg.mapToModel() |
||||
require.NoError(t, err) |
||||
require.Equal(t, int64(1), rgMapped.OrgID) |
||||
}) |
||||
} |
||||
|
||||
func TestRules(t *testing.T) { |
||||
t.Run("a valid rule should not error", func(t *testing.T) { |
||||
rule := validRuleV1(t) |
||||
_, err := rule.mapToModel(1) |
||||
require.NoError(t, err) |
||||
}) |
||||
t.Run("a rule with out a uid should error", func(t *testing.T) { |
||||
rule := validRuleV1(t) |
||||
rule.UID = values.StringValue{} |
||||
_, err := rule.mapToModel(1) |
||||
require.Error(t, err) |
||||
}) |
||||
t.Run("a rule with out a title should error", func(t *testing.T) { |
||||
rule := validRuleV1(t) |
||||
rule.Title = values.StringValue{} |
||||
_, err := rule.mapToModel(1) |
||||
require.Error(t, err) |
||||
}) |
||||
t.Run("a rule with out a for duration should error", func(t *testing.T) { |
||||
rule := validRuleV1(t) |
||||
rule.For = values.StringValue{} |
||||
_, err := rule.mapToModel(1) |
||||
require.Error(t, err) |
||||
}) |
||||
t.Run("a rule with an invalid for duration should error", func(t *testing.T) { |
||||
rule := validRuleV1(t) |
||||
forDuration := values.StringValue{} |
||||
err := yaml.Unmarshal([]byte("10x"), &forDuration) |
||||
rule.For = forDuration |
||||
require.NoError(t, err) |
||||
_, err = rule.mapToModel(1) |
||||
require.Error(t, err) |
||||
}) |
||||
t.Run("a rule with out a condition should error", func(t *testing.T) { |
||||
rule := validRuleV1(t) |
||||
rule.Condition = values.StringValue{} |
||||
_, err := rule.mapToModel(1) |
||||
require.Error(t, err) |
||||
}) |
||||
t.Run("a rule with out data should error", func(t *testing.T) { |
||||
rule := validRuleV1(t) |
||||
rule.Data = []QueryV1{} |
||||
_, err := rule.mapToModel(1) |
||||
require.Error(t, err) |
||||
}) |
||||
t.Run("a rule with out execErrState should have sane defaults", func(t *testing.T) { |
||||
rule := validRuleV1(t) |
||||
ruleMapped, err := rule.mapToModel(1) |
||||
require.NoError(t, err) |
||||
require.Equal(t, ruleMapped.ExecErrState, models.AlertingErrState) |
||||
}) |
||||
t.Run("a rule with invalid execErrState should error", func(t *testing.T) { |
||||
rule := validRuleV1(t) |
||||
execErrState := values.StringValue{} |
||||
err := yaml.Unmarshal([]byte("abc"), &execErrState) |
||||
require.NoError(t, err) |
||||
rule.ExecErrState = execErrState |
||||
_, err = rule.mapToModel(1) |
||||
require.Error(t, err) |
||||
}) |
||||
t.Run("a rule with a valid execErrState should map it correctly", func(t *testing.T) { |
||||
rule := validRuleV1(t) |
||||
execErrState := values.StringValue{} |
||||
err := yaml.Unmarshal([]byte(models.OkErrState), &execErrState) |
||||
require.NoError(t, err) |
||||
rule.ExecErrState = execErrState |
||||
ruleMapped, err := rule.mapToModel(1) |
||||
require.NoError(t, err) |
||||
require.Equal(t, ruleMapped.ExecErrState, models.OkErrState) |
||||
}) |
||||
t.Run("a rule with out noDataState should have sane defaults", func(t *testing.T) { |
||||
rule := validRuleV1(t) |
||||
ruleMapped, err := rule.mapToModel(1) |
||||
require.NoError(t, err) |
||||
require.Equal(t, ruleMapped.NoDataState, models.NoData) |
||||
}) |
||||
t.Run("a rule with an invalid noDataState should error", func(t *testing.T) { |
||||
rule := validRuleV1(t) |
||||
noDataState := values.StringValue{} |
||||
err := yaml.Unmarshal([]byte("abc"), &noDataState) |
||||
require.NoError(t, err) |
||||
rule.NoDataState = noDataState |
||||
_, err = rule.mapToModel(1) |
||||
require.Error(t, err) |
||||
}) |
||||
t.Run("a rule with a valid noDataState should map it correctly", func(t *testing.T) { |
||||
rule := validRuleV1(t) |
||||
noDataState := values.StringValue{} |
||||
err := yaml.Unmarshal([]byte(models.NoData), &noDataState) |
||||
require.NoError(t, err) |
||||
rule.NoDataState = noDataState |
||||
ruleMapped, err := rule.mapToModel(1) |
||||
require.NoError(t, err) |
||||
require.Equal(t, ruleMapped.NoDataState, models.NoData) |
||||
}) |
||||
} |
||||
|
||||
func validRuleGroupV1(t *testing.T) AlertRuleGroupV1 { |
||||
t.Helper() |
||||
var ( |
||||
orgID values.Int64Value |
||||
name values.StringValue |
||||
folder values.StringValue |
||||
interval values.StringValue |
||||
) |
||||
err := yaml.Unmarshal([]byte("1"), &orgID) |
||||
require.NoError(t, err) |
||||
err = yaml.Unmarshal([]byte("Test"), &name) |
||||
require.NoError(t, err) |
||||
err = yaml.Unmarshal([]byte("Test"), &folder) |
||||
require.NoError(t, err) |
||||
err = yaml.Unmarshal([]byte("10s"), &interval) |
||||
require.NoError(t, err) |
||||
return AlertRuleGroupV1{ |
||||
OrgID: orgID, |
||||
Name: name, |
||||
Folder: folder, |
||||
Interval: interval, |
||||
Rules: []AlertRuleV1{}, |
||||
} |
||||
} |
||||
|
||||
func validRuleV1(t *testing.T) AlertRuleV1 { |
||||
t.Helper() |
||||
var ( |
||||
title values.StringValue |
||||
uid values.StringValue |
||||
forDuration values.StringValue |
||||
condition values.StringValue |
||||
) |
||||
err := yaml.Unmarshal([]byte("test"), &title) |
||||
require.NoError(t, err) |
||||
err = yaml.Unmarshal([]byte("test_uid"), &uid) |
||||
require.NoError(t, err) |
||||
err = yaml.Unmarshal([]byte("10s"), &forDuration) |
||||
require.NoError(t, err) |
||||
err = yaml.Unmarshal([]byte("A"), &condition) |
||||
require.NoError(t, err) |
||||
return AlertRuleV1{ |
||||
Title: title, |
||||
UID: uid, |
||||
For: forDuration, |
||||
Condition: condition, |
||||
Data: []QueryV1{{}}, |
||||
} |
||||
} |
||||
Loading…
Reference in new issue