Alerting: Persist rule position in the group (#50051)

Migrations:
* add a new column alert_group_idx to alert_rule table
* add a new column alert_group_idx to alert_rule_version table
* re-index existing rules during migration

API:
* set group index on update. Use the natural order of items in  the array as group index
* sort rules in the group on GET
* update the version of all rules of all affected groups. This will make optimistic lock work in the case of multiple concurrent request touching the same groups.

UI:
* update UI to keep the order of alerts in a group
pull/51253/head
Yuriy Tseretyan 3 years ago committed by GitHub
parent 9fac806b6c
commit 4d02f73e5f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 1
      pkg/services/ngalert/CHANGELOG.md
  2. 2
      pkg/services/ngalert/api/api_prometheus.go
  3. 43
      pkg/services/ngalert/api/api_prometheus_test.go
  4. 58
      pkg/services/ngalert/api/api_ruler.go
  5. 267
      pkg/services/ngalert/api/api_ruler_test.go
  6. 1
      pkg/services/ngalert/api/api_ruler_validation.go
  7. 14
      pkg/services/ngalert/api/authorization_test.go
  8. 19
      pkg/services/ngalert/models/alert_rule.go
  9. 38
      pkg/services/ngalert/models/alert_rule_test.go
  10. 48
      pkg/services/ngalert/models/testing.go
  11. 4
      pkg/services/ngalert/store/alert_rule.go
  12. 2
      pkg/services/sqlstore/migrations/migrations.go
  13. 4
      pkg/services/sqlstore/migrations/ualert/alert_rule.go
  14. 20
      pkg/services/sqlstore/migrations/ualert/tables.go
  15. 91
      pkg/services/sqlstore/migrations/ualert/ualert.go
  16. 98
      pkg/tests/api/alerting/api_ruler_test.go
  17. 14
      pkg/tests/api/alerting/testing.go
  18. 15
      public/app/features/alerting/unified/utils/rulerClient.ts

@ -72,6 +72,7 @@ Scopes must have an order to ensure consistency and ease of search, this helps u
- [FEATURE] Indicate whether contact point is provisioned when GETting Alertmanager configuration #48323 - [FEATURE] Indicate whether contact point is provisioned when GETting Alertmanager configuration #48323
- [FEATURE] Indicate whether alert rule is provisioned when GETting the rule #48458 - [FEATURE] Indicate whether alert rule is provisioned when GETting the rule #48458
- [FEATURE] Alert rules with associated panels will take screenshots. #49293 #49338 #49374 #49377 #49378 #49379 #49381 #49385 #49439 #49445 - [FEATURE] Alert rules with associated panels will take screenshots. #49293 #49338 #49374 #49377 #49378 #49379 #49381 #49385 #49439 #49445
- [FEATURE] Persistent order of alert rules in a group #50051
- [BUGFIX] Migration: ignore alerts that do not belong to any existing organization\dashboard #49192 - [BUGFIX] Migration: ignore alerts that do not belong to any existing organization\dashboard #49192
- [BUGFIX] Allow anonymous access to alerts #49203 - [BUGFIX] Allow anonymous access to alerts #49203
- [BUGFIX] RBAC: replace create\update\delete actions for notification policies by alert.notifications:write #49185 - [BUGFIX] RBAC: replace create\update\delete actions for notification policies by alert.notifications:write #49185

@ -191,7 +191,7 @@ func (srv PrometheusSrv) toRuleGroup(groupName string, folder *models.Folder, ru
Name: groupName, Name: groupName,
File: folder.Title, // file is what Prometheus uses for provisioning, we replace it with namespace. File: folder.Title, // file is what Prometheus uses for provisioning, we replace it with namespace.
} }
ngmodels.RulesGroup(rules).SortByGroupIndex()
for _, rule := range rules { for _, rule := range rules {
alertingRule := apimodels.AlertingRule{ alertingRule := apimodels.AlertingRule{
State: "inactive", State: "inactive",

@ -419,6 +419,49 @@ func TestRouteGetRuleStatuses(t *testing.T) {
`, folder.Title), string(r.Body())) `, folder.Title), string(r.Body()))
}) })
t.Run("with many rules in a group", func(t *testing.T) {
t.Run("should return sorted", func(t *testing.T) {
ruleStore := store.NewFakeRuleStore(t)
fakeAIM := NewFakeAlertInstanceManager(t)
groupKey := ngmodels.GenerateGroupKey(orgID)
_, rules := ngmodels.GenerateUniqueAlertRules(rand.Intn(5)+5, ngmodels.AlertRuleGen(withGroupKey(groupKey), ngmodels.WithUniqueGroupIndex()))
ruleStore.PutRule(context.Background(), rules...)
api := PrometheusSrv{
log: log.NewNopLogger(),
manager: fakeAIM,
store: ruleStore,
ac: acmock.New().WithDisabled(),
}
response := api.RouteGetRuleStatuses(c)
require.Equal(t, http.StatusOK, response.Status())
result := &apimodels.RuleResponse{}
require.NoError(t, json.Unmarshal(response.Body(), result))
ngmodels.RulesGroup(rules).SortByGroupIndex()
require.Len(t, result.Data.RuleGroups, 1)
group := result.Data.RuleGroups[0]
require.Equal(t, groupKey.RuleGroup, group.Name)
require.Len(t, group.Rules, len(rules))
for i, actual := range group.Rules {
expected := rules[i]
if actual.Name != expected.Title {
var actualNames []string
var expectedNames []string
for _, rule := range group.Rules {
actualNames = append(actualNames, rule.Name)
}
for _, rule := range rules {
expectedNames = append(expectedNames, rule.Title)
}
require.Fail(t, fmt.Sprintf("rules are not sorted by group index. Expected: %v. Actual: %v", expectedNames, actualNames))
}
}
})
})
t.Run("when fine-grained access is enabled", func(t *testing.T) { t.Run("when fine-grained access is enabled", func(t *testing.T) {
t.Run("should return only rules if the user can query all data sources", func(t *testing.T) { t.Run("should return only rules if the user can query all data sources", func(t *testing.T) {
ruleStore := store.NewFakeRuleStore(t) ruleStore := store.NewFakeRuleStore(t)

@ -184,7 +184,7 @@ func (srv RulerSrv) RouteGetNamespaceRulesConfig(c *models.ReqContext) response.
return ErrResp(http.StatusInternalServerError, err, "failed to get provenance for rule group") return ErrResp(http.StatusInternalServerError, err, "failed to get provenance for rule group")
} }
ruleGroups := make(map[string][]*ngmodels.AlertRule) ruleGroups := make(map[string]ngmodels.RulesGroup)
for _, r := range q.Result { for _, r := range q.Result {
ruleGroups[r.RuleGroup] = append(ruleGroups[r.RuleGroup], r) ruleGroups[r.RuleGroup] = append(ruleGroups[r.RuleGroup], r)
} }
@ -284,7 +284,7 @@ func (srv RulerSrv) RouteGetRulesConfig(c *models.ReqContext) response.Response
return ErrResp(http.StatusInternalServerError, err, "failed to get alert rules") return ErrResp(http.StatusInternalServerError, err, "failed to get alert rules")
} }
configs := make(map[ngmodels.AlertRuleGroupKey][]*ngmodels.AlertRule) configs := make(map[ngmodels.AlertRuleGroupKey]ngmodels.RulesGroup)
for _, r := range q.Result { for _, r := range q.Result {
groupKey := r.GetGroupKey() groupKey := r.GetGroupKey()
group := configs[groupKey] group := configs[groupKey]
@ -360,7 +360,7 @@ func (srv RulerSrv) updateAlertRulesInGroup(c *models.ReqContext, groupKey ngmod
return err return err
} }
finalChanges = groupChanges finalChanges = calculateAutomaticChanges(groupChanges)
logger.Debug("updating database with the authorized changes", "add", len(finalChanges.New), "update", len(finalChanges.New), "delete", len(finalChanges.Delete)) logger.Debug("updating database with the authorized changes", "add", len(finalChanges.New), "update", len(finalChanges.New), "delete", len(finalChanges.Delete))
if len(finalChanges.Update) > 0 || len(finalChanges.New) > 0 { if len(finalChanges.Update) > 0 || len(finalChanges.New) > 0 {
@ -448,7 +448,8 @@ func (srv RulerSrv) updateAlertRulesInGroup(c *models.ReqContext, groupKey ngmod
return response.JSON(http.StatusAccepted, util.DynMap{"message": "rule group updated successfully"}) return response.JSON(http.StatusAccepted, util.DynMap{"message": "rule group updated successfully"})
} }
func toGettableRuleGroupConfig(groupName string, rules []*ngmodels.AlertRule, namespaceID int64, provenanceRecords map[string]ngmodels.Provenance) apimodels.GettableRuleGroupConfig { func toGettableRuleGroupConfig(groupName string, rules ngmodels.RulesGroup, namespaceID int64, provenanceRecords map[string]ngmodels.Provenance) apimodels.GettableRuleGroupConfig {
rules.SortByGroupIndex()
ruleNodes := make([]apimodels.GettableExtendedRuleNode, 0, len(rules)) ruleNodes := make([]apimodels.GettableExtendedRuleNode, 0, len(rules))
var interval time.Duration var interval time.Duration
if len(rules) > 0 { if len(rules) > 0 {
@ -516,7 +517,7 @@ type changes struct {
GroupKey ngmodels.AlertRuleGroupKey GroupKey ngmodels.AlertRuleGroupKey
// AffectedGroups contains all rules of all groups that are affected by these changes. // AffectedGroups contains all rules of all groups that are affected by these changes.
// For example, during moving a rule from one group to another this map will contain all rules from two groups // For example, during moving a rule from one group to another this map will contain all rules from two groups
AffectedGroups map[ngmodels.AlertRuleGroupKey][]*ngmodels.AlertRule AffectedGroups map[ngmodels.AlertRuleGroupKey]ngmodels.RulesGroup
New []*ngmodels.AlertRule New []*ngmodels.AlertRule
Update []ruleUpdate Update []ruleUpdate
Delete []*ngmodels.AlertRule Delete []*ngmodels.AlertRule
@ -555,7 +556,7 @@ func verifyProvisionedRulesNotAffected(ctx context.Context, provenanceStore prov
// calculateChanges calculates the difference between rules in the group in the database and the submitted rules. If a submitted rule has UID it tries to find it in the database (in other groups). // calculateChanges calculates the difference between rules in the group in the database and the submitted rules. If a submitted rule has UID it tries to find it in the database (in other groups).
// returns a list of rules that need to be added, updated and deleted. Deleted considered rules in the database that belong to the group but do not exist in the list of submitted rules. // returns a list of rules that need to be added, updated and deleted. Deleted considered rules in the database that belong to the group but do not exist in the list of submitted rules.
func calculateChanges(ctx context.Context, ruleStore store.RuleStore, groupKey ngmodels.AlertRuleGroupKey, submittedRules []*ngmodels.AlertRule) (*changes, error) { func calculateChanges(ctx context.Context, ruleStore store.RuleStore, groupKey ngmodels.AlertRuleGroupKey, submittedRules []*ngmodels.AlertRule) (*changes, error) {
affectedGroups := make(map[ngmodels.AlertRuleGroupKey][]*ngmodels.AlertRule) affectedGroups := make(map[ngmodels.AlertRuleGroupKey]ngmodels.RulesGroup)
q := &ngmodels.ListAlertRulesQuery{ q := &ngmodels.ListAlertRulesQuery{
OrgID: groupKey.OrgID, OrgID: groupKey.OrgID,
NamespaceUIDs: []string{groupKey.NamespaceUID}, NamespaceUIDs: []string{groupKey.NamespaceUID},
@ -636,5 +637,50 @@ func calculateChanges(ctx context.Context, ruleStore store.RuleStore, groupKey n
}, nil }, nil
} }
// calculateAutomaticChanges scans all affected groups and creates either a noop update that will increment the version of each rule as well as re-index other groups.
// this is needed to make sure that there are no any concurrent changes made to all affected groups.
// Returns a copy of changes enriched with either noop or group index changes for all rules in
func calculateAutomaticChanges(ch *changes) *changes {
updatingRules := make(map[ngmodels.AlertRuleKey]struct{}, len(ch.Delete)+len(ch.Update))
for _, update := range ch.Update {
updatingRules[update.Existing.GetKey()] = struct{}{}
}
for _, del := range ch.Delete {
updatingRules[del.GetKey()] = struct{}{}
}
var toUpdate []ruleUpdate
for groupKey, rules := range ch.AffectedGroups {
if groupKey != ch.GroupKey {
rules.SortByGroupIndex()
}
idx := 1
for _, rule := range rules {
if _, ok := updatingRules[rule.GetKey()]; ok { // exclude rules that are going to be either updated or deleted
continue
}
upd := ruleUpdate{
Existing: rule,
New: rule,
}
if groupKey != ch.GroupKey {
if rule.RuleGroupIndex != idx {
upd.New = ngmodels.CopyRule(rule)
upd.New.RuleGroupIndex = idx
upd.Diff = rule.Diff(upd.New, alertRuleFieldsToIgnoreInDiff...)
}
idx++
}
toUpdate = append(toUpdate, upd)
}
}
return &changes{
GroupKey: ch.GroupKey,
AffectedGroups: ch.AffectedGroups,
New: ch.New,
Update: append(ch.Update, toUpdate...),
Delete: ch.Delete,
}
}
// alertRuleFieldsToIgnoreInDiff contains fields that the AlertRule.Diff should ignore // alertRuleFieldsToIgnoreInDiff contains fields that the AlertRule.Diff should ignore
var alertRuleFieldsToIgnoreInDiff = []string{"ID", "Version", "Updated"} var alertRuleFieldsToIgnoreInDiff = []string{"ID", "Version", "Updated"}

@ -4,6 +4,7 @@ import (
"context" "context"
"encoding/json" "encoding/json"
"errors" "errors"
"fmt"
"math/rand" "math/rand"
"net/http" "net/http"
"net/url" "net/url"
@ -75,7 +76,7 @@ func TestCalculateChanges(t *testing.T) {
require.Equal(t, db, toDelete) require.Equal(t, db, toDelete)
} }
require.Contains(t, changes.AffectedGroups, groupKey) require.Contains(t, changes.AffectedGroups, groupKey)
require.Equal(t, inDatabase, changes.AffectedGroups[groupKey]) require.Equal(t, models.RulesGroup(inDatabase), changes.AffectedGroups[groupKey])
}) })
t.Run("should detect alerts that needs to be updated", func(t *testing.T) { t.Run("should detect alerts that needs to be updated", func(t *testing.T) {
@ -102,7 +103,7 @@ func TestCalculateChanges(t *testing.T) {
require.Empty(t, changes.New) require.Empty(t, changes.New)
require.Contains(t, changes.AffectedGroups, groupKey) require.Contains(t, changes.AffectedGroups, groupKey)
require.Equal(t, inDatabase, changes.AffectedGroups[groupKey]) require.Equal(t, models.RulesGroup(inDatabase), changes.AffectedGroups[groupKey])
}) })
t.Run("should include only if there are changes ignoring specific fields", func(t *testing.T) { t.Run("should include only if there are changes ignoring specific fields", func(t *testing.T) {
@ -658,6 +659,49 @@ func TestRouteGetNamespaceRulesConfig(t *testing.T) {
} }
require.True(t, found) require.True(t, found)
}) })
t.Run("should enforce order of rules in the group", func(t *testing.T) {
orgID := rand.Int63()
folder := randFolder()
ruleStore := store.NewFakeRuleStore(t)
ruleStore.Folders[orgID] = append(ruleStore.Folders[orgID], folder)
groupKey := models.GenerateGroupKey(orgID)
groupKey.NamespaceUID = folder.Uid
expectedRules := models.GenerateAlertRules(rand.Intn(5)+5, models.AlertRuleGen(withGroupKey(groupKey), models.WithUniqueGroupIndex()))
ruleStore.PutRule(context.Background(), expectedRules...)
ac := acMock.New().WithDisabled()
response := createService(ac, ruleStore, nil).RouteGetNamespaceRulesConfig(createRequestContext(orgID, models2.ROLE_VIEWER, map[string]string{
":Namespace": folder.Title,
}))
require.Equal(t, http.StatusAccepted, response.Status())
result := &apimodels.NamespaceConfigResponse{}
require.NoError(t, json.Unmarshal(response.Body(), result))
require.NotNil(t, result)
models.RulesGroup(expectedRules).SortByGroupIndex()
require.Contains(t, *result, folder.Title)
groups := (*result)[folder.Title]
require.Len(t, groups, 1)
group := groups[0]
require.Equal(t, groupKey.RuleGroup, group.Name)
for i, actual := range groups[0].Rules {
expected := expectedRules[i]
if actual.GrafanaManagedAlert.UID != expected.UID {
var actualUIDs []string
var expectedUIDs []string
for _, rule := range group.Rules {
actualUIDs = append(actualUIDs, rule.GrafanaManagedAlert.UID)
}
for _, rule := range expectedRules {
expectedUIDs = append(expectedUIDs, rule.UID)
}
require.Fail(t, fmt.Sprintf("rules are not sorted by group index. Expected: %v. Actual: %v", expectedUIDs, actualUIDs))
}
}
})
} }
func TestRouteGetRulesConfig(t *testing.T) { func TestRouteGetRulesConfig(t *testing.T) {
@ -698,6 +742,48 @@ func TestRouteGetRulesConfig(t *testing.T) {
}) })
}) })
}) })
t.Run("should return rules in group sorted by group index", func(t *testing.T) {
orgID := rand.Int63()
folder := randFolder()
ruleStore := store.NewFakeRuleStore(t)
ruleStore.Folders[orgID] = append(ruleStore.Folders[orgID], folder)
groupKey := models.GenerateGroupKey(orgID)
groupKey.NamespaceUID = folder.Uid
expectedRules := models.GenerateAlertRules(rand.Intn(5)+5, models.AlertRuleGen(withGroupKey(groupKey), models.WithUniqueGroupIndex()))
ruleStore.PutRule(context.Background(), expectedRules...)
ac := acMock.New().WithDisabled()
response := createService(ac, ruleStore, nil).RouteGetRulesConfig(createRequestContext(orgID, models2.ROLE_VIEWER, nil))
require.Equal(t, http.StatusOK, response.Status())
result := &apimodels.NamespaceConfigResponse{}
require.NoError(t, json.Unmarshal(response.Body(), result))
require.NotNil(t, result)
models.RulesGroup(expectedRules).SortByGroupIndex()
require.Contains(t, *result, folder.Title)
groups := (*result)[folder.Title]
require.Len(t, groups, 1)
group := groups[0]
require.Equal(t, groupKey.RuleGroup, group.Name)
for i, actual := range groups[0].Rules {
expected := expectedRules[i]
if actual.GrafanaManagedAlert.UID != expected.UID {
var actualUIDs []string
var expectedUIDs []string
for _, rule := range group.Rules {
actualUIDs = append(actualUIDs, rule.GrafanaManagedAlert.UID)
}
for _, rule := range expectedRules {
expectedUIDs = append(expectedUIDs, rule.UID)
}
require.Fail(t, fmt.Sprintf("rules are not sorted by group index. Expected: %v. Actual: %v", expectedUIDs, actualUIDs))
}
}
})
} }
func TestRouteGetRulesGroupConfig(t *testing.T) { func TestRouteGetRulesGroupConfig(t *testing.T) {
@ -736,12 +822,52 @@ func TestRouteGetRulesGroupConfig(t *testing.T) {
}) })
}) })
}) })
t.Run("should return rules in group sorted by group index", func(t *testing.T) {
orgID := rand.Int63()
folder := randFolder()
ruleStore := store.NewFakeRuleStore(t)
ruleStore.Folders[orgID] = append(ruleStore.Folders[orgID], folder)
groupKey := models.GenerateGroupKey(orgID)
groupKey.NamespaceUID = folder.Uid
expectedRules := models.GenerateAlertRules(rand.Intn(5)+5, models.AlertRuleGen(withGroupKey(groupKey), models.WithUniqueGroupIndex()))
ruleStore.PutRule(context.Background(), expectedRules...)
ac := acMock.New().WithDisabled()
response := createService(ac, ruleStore, nil).RouteGetRulesGroupConfig(createRequestContext(orgID, models2.ROLE_VIEWER, map[string]string{
":Namespace": folder.Title,
":Groupname": groupKey.RuleGroup,
}))
require.Equal(t, http.StatusAccepted, response.Status())
result := &apimodels.RuleGroupConfigResponse{}
require.NoError(t, json.Unmarshal(response.Body(), result))
require.NotNil(t, result)
models.RulesGroup(expectedRules).SortByGroupIndex()
for i, actual := range result.Rules {
expected := expectedRules[i]
if actual.GrafanaManagedAlert.UID != expected.UID {
var actualUIDs []string
var expectedUIDs []string
for _, rule := range result.Rules {
actualUIDs = append(actualUIDs, rule.GrafanaManagedAlert.UID)
}
for _, rule := range expectedRules {
expectedUIDs = append(expectedUIDs, rule.UID)
}
require.Fail(t, fmt.Sprintf("rules are not sorted by group index. Expected: %v. Actual: %v", expectedUIDs, actualUIDs))
}
}
})
} }
func TestVerifyProvisionedRulesNotAffected(t *testing.T) { func TestVerifyProvisionedRulesNotAffected(t *testing.T) {
orgID := rand.Int63() orgID := rand.Int63()
group := models.GenerateGroupKey(orgID) group := models.GenerateGroupKey(orgID)
affectedGroups := make(map[models.AlertRuleGroupKey][]*models.AlertRule) affectedGroups := make(map[models.AlertRuleGroupKey]models.RulesGroup)
var allRules []*models.AlertRule var allRules []*models.AlertRule
{ {
rules := models.GenerateAlertRules(rand.Intn(3)+1, models.AlertRuleGen(withGroupKey(group))) rules := models.GenerateAlertRules(rand.Intn(3)+1, models.AlertRuleGen(withGroupKey(group)))
@ -799,6 +925,141 @@ func TestVerifyProvisionedRulesNotAffected(t *testing.T) {
}) })
} }
func TestCalculateAutomaticChanges(t *testing.T) {
orgID := rand.Int63()
t.Run("should mark all rules in affected groups", func(t *testing.T) {
group := models.GenerateGroupKey(orgID)
rules := models.GenerateAlertRules(10, models.AlertRuleGen(withGroupKey(group)))
// copy rules to make sure that the function does not modify the original rules
copies := make([]*models.AlertRule, 0, len(rules))
for _, rule := range rules {
copies = append(copies, models.CopyRule(rule))
}
var updates []ruleUpdate
for i := 0; i < 5; i++ {
ruleCopy := models.CopyRule(copies[i])
ruleCopy.Title += util.GenerateShortUID()
updates = append(updates, ruleUpdate{
Existing: copies[i],
New: ruleCopy,
})
}
// simulate adding new rules, updating a few existing and delete some from the same rule
ch := &changes{
GroupKey: group,
AffectedGroups: map[models.AlertRuleGroupKey]models.RulesGroup{
group: copies,
},
New: models.GenerateAlertRules(2, models.AlertRuleGen(withGroupKey(group))),
Update: updates,
Delete: rules[5:7],
}
result := calculateAutomaticChanges(ch)
require.NotEqual(t, ch, result)
require.Equal(t, ch.GroupKey, result.GroupKey)
require.Equal(t, map[models.AlertRuleGroupKey]models.RulesGroup{
group: rules,
}, result.AffectedGroups)
require.Equal(t, ch.New, result.New)
require.Equal(t, rules[5:7], result.Delete)
var expected []ruleUpdate
expected = append(expected, updates...)
// all rules that were not updated directly by user should be added to the
for _, rule := range rules[7:] {
expected = append(expected, ruleUpdate{
Existing: rule,
New: rule,
})
}
require.Equal(t, expected, result.Update)
})
t.Run("should re-index rules in affected groups other than updated", func(t *testing.T) {
group := models.GenerateGroupKey(orgID)
rules := models.GenerateAlertRules(3, models.AlertRuleGen(withGroupKey(group), models.WithSequentialGroupIndex()))
group2 := models.GenerateGroupKey(orgID)
rules2 := models.GenerateAlertRules(4, models.AlertRuleGen(withGroupKey(group2), models.WithSequentialGroupIndex()))
movedIndex := rand.Intn(len(rules2) - 1)
movedRule := rules2[movedIndex]
copyRule := models.CopyRule(movedRule)
copyRule.RuleGroup = group.RuleGroup
copyRule.NamespaceUID = group.NamespaceUID
copyRule.RuleGroupIndex = len(rules)
update := ruleUpdate{
Existing: movedRule,
New: copyRule,
}
shuffled := make([]*models.AlertRule, 0, len(rules2))
copy(shuffled, rules2)
rand.Shuffle(len(shuffled), func(i, j int) {
shuffled[i], shuffled[j] = shuffled[j], shuffled[i]
})
// simulate moving a rule from one group to another.
ch := &changes{
GroupKey: group,
AffectedGroups: map[models.AlertRuleGroupKey]models.RulesGroup{
group: rules,
group2: shuffled,
},
Update: []ruleUpdate{
update,
},
}
result := calculateAutomaticChanges(ch)
require.NotEqual(t, ch, result)
require.Equal(t, ch.GroupKey, result.GroupKey)
require.Equal(t, ch.AffectedGroups, result.AffectedGroups)
require.Equal(t, ch.New, result.New)
require.Equal(t, ch.Delete, result.Delete)
require.Equal(t, ch.Update, result.Update[0:1])
require.Contains(t, result.Update, update)
for _, rule := range rules {
assert.Containsf(t, result.Update, ruleUpdate{
Existing: rule,
New: rule,
}, "automatic changes expected to contain all rules of the updated group")
}
// calculate expected index of the rules in the source group after the move
expectedReindex := make(map[string]int, len(rules2)-1)
idx := 1
for _, rule := range rules2 {
if rule.UID == movedRule.UID {
continue
}
expectedReindex[rule.UID] = idx
idx++
}
for _, upd := range result.Update {
expectedIdx, ok := expectedReindex[upd.Existing.UID]
if !ok {
continue
}
diff := upd.Existing.Diff(upd.New)
if upd.Existing.RuleGroupIndex != expectedIdx {
require.Lenf(t, diff, 1, fmt.Sprintf("the rule in affected group should be re-indexed to %d but it still has index %d. Moved rule with index %d", expectedIdx, upd.Existing.RuleGroupIndex, movedIndex))
require.Equal(t, "RuleGroupIndex", diff[0].Path)
require.Equal(t, expectedIdx, upd.New.RuleGroupIndex)
} else {
require.Empty(t, diff)
}
}
})
}
func createService(ac *acMock.Mock, store *store.FakeRuleStore, scheduler schedule.ScheduleService) *RulerSrv { func createService(ac *acMock.Mock, store *store.FakeRuleStore, scheduler schedule.ScheduleService) *RulerSrv {
return &RulerSrv{ return &RulerSrv{
xactManager: store, xactManager: store,

@ -177,6 +177,7 @@ func validateRuleGroup(
} }
uids[rule.UID] = idx uids[rule.UID] = idx
} }
rule.RuleGroupIndex = idx + 1
result = append(result, rule) result = append(result, rule)
} }
return result, nil return result, nil

@ -107,7 +107,7 @@ func createAllCombinationsOfPermissions(permissions map[string][]string) []map[s
return permissionCombinations return permissionCombinations
} }
func getDatasourceScopesForRules(rules []*models.AlertRule) []string { func getDatasourceScopesForRules(rules models.RulesGroup) []string {
scopesMap := map[string]struct{}{} scopesMap := map[string]struct{}{}
var result []string var result []string
for _, rule := range rules { for _, rule := range rules {
@ -123,8 +123,8 @@ func getDatasourceScopesForRules(rules []*models.AlertRule) []string {
return result return result
} }
func mapUpdates(updates []ruleUpdate, mapFunc func(ruleUpdate) *models.AlertRule) []*models.AlertRule { func mapUpdates(updates []ruleUpdate, mapFunc func(ruleUpdate) *models.AlertRule) models.RulesGroup {
result := make([]*models.AlertRule, 0, len(updates)) result := make(models.RulesGroup, 0, len(updates))
for _, update := range updates { for _, update := range updates {
result = append(result, mapFunc(update)) result = append(result, mapFunc(update))
} }
@ -172,7 +172,7 @@ func TestAuthorizeRuleChanges(t *testing.T) {
rules2 := models.GenerateAlertRules(rand.Intn(4)+1, models.AlertRuleGen(withGroupKey(groupKey))) rules2 := models.GenerateAlertRules(rand.Intn(4)+1, models.AlertRuleGen(withGroupKey(groupKey)))
return &changes{ return &changes{
GroupKey: groupKey, GroupKey: groupKey,
AffectedGroups: map[models.AlertRuleGroupKey][]*models.AlertRule{ AffectedGroups: map[models.AlertRuleGroupKey]models.RulesGroup{
groupKey: append(rules, rules2...), groupKey: append(rules, rules2...),
}, },
New: nil, New: nil,
@ -208,7 +208,7 @@ func TestAuthorizeRuleChanges(t *testing.T) {
return &changes{ return &changes{
GroupKey: groupKey, GroupKey: groupKey,
AffectedGroups: map[models.AlertRuleGroupKey][]*models.AlertRule{ AffectedGroups: map[models.AlertRuleGroupKey]models.RulesGroup{
groupKey: append(rules, rules1...), groupKey: append(rules, rules1...),
}, },
New: nil, New: nil,
@ -252,7 +252,7 @@ func TestAuthorizeRuleChanges(t *testing.T) {
return &changes{ return &changes{
GroupKey: targetGroupKey, GroupKey: targetGroupKey,
AffectedGroups: map[models.AlertRuleGroupKey][]*models.AlertRule{ AffectedGroups: map[models.AlertRuleGroupKey]models.RulesGroup{
groupKey: append(rules, rules1...), groupKey: append(rules, rules1...),
}, },
New: nil, New: nil,
@ -317,7 +317,7 @@ func TestAuthorizeRuleChanges(t *testing.T) {
return &changes{ return &changes{
GroupKey: targetGroupKey, GroupKey: targetGroupKey,
AffectedGroups: map[models.AlertRuleGroupKey][]*models.AlertRule{ AffectedGroups: map[models.AlertRuleGroupKey]models.RulesGroup{
groupKey: sourceGroup, groupKey: sourceGroup,
targetGroupKey: targetGroup, targetGroupKey: targetGroup,
}, },

@ -4,6 +4,7 @@ import (
"encoding/json" "encoding/json"
"errors" "errors"
"fmt" "fmt"
"sort"
"time" "time"
"github.com/google/go-cmp/cmp" "github.com/google/go-cmp/cmp"
@ -125,6 +126,7 @@ type AlertRule struct {
DashboardUID *string `xorm:"dashboard_uid"` DashboardUID *string `xorm:"dashboard_uid"`
PanelID *int64 `xorm:"panel_id"` PanelID *int64 `xorm:"panel_id"`
RuleGroup string RuleGroup string
RuleGroupIndex int `xorm:"rule_group_idx"`
NoDataState NoDataState NoDataState NoDataState
ExecErrState ExecutionErrorState ExecErrState ExecutionErrorState
// ideally this field should have been apimodels.ApiDuration // ideally this field should have been apimodels.ApiDuration
@ -140,6 +142,9 @@ type SchedulableAlertRule struct {
OrgID int64 `xorm:"org_id"` OrgID int64 `xorm:"org_id"`
IntervalSeconds int64 IntervalSeconds int64
Version int64 Version int64
NamespaceUID string `xorm:"namespace_uid"`
RuleGroup string
RuleGroupIndex int `xorm:"rule_group_idx"`
} }
type LabelOption func(map[string]string) type LabelOption func(map[string]string)
@ -251,6 +256,7 @@ type AlertRuleVersion struct {
RuleUID string `xorm:"rule_uid"` RuleUID string `xorm:"rule_uid"`
RuleNamespaceUID string `xorm:"rule_namespace_uid"` RuleNamespaceUID string `xorm:"rule_namespace_uid"`
RuleGroup string RuleGroup string
RuleGroupIndex int `xorm:"rule_group_idx"`
ParentVersion int64 ParentVersion int64
RestoredFrom int64 RestoredFrom int64
Version int64 Version int64
@ -297,7 +303,7 @@ type ListAlertRulesQuery struct {
DashboardUID string DashboardUID string
PanelID int64 PanelID int64
Result []*AlertRule Result RulesGroup
} }
type GetAlertRulesForSchedulingQuery struct { type GetAlertRulesForSchedulingQuery struct {
@ -394,3 +400,14 @@ func ValidateRuleGroupInterval(intervalSeconds, baseIntervalSeconds int64) error
} }
return nil return nil
} }
type RulesGroup []*AlertRule
func (g RulesGroup) SortByGroupIndex() {
sort.Slice(g, func(i, j int) bool {
if g[i].RuleGroupIndex == g[j].RuleGroupIndex {
return g[i].ID < g[j].ID
}
return g[i].RuleGroupIndex < g[j].RuleGroupIndex
})
}

@ -3,6 +3,7 @@ package models
import ( import (
"encoding/json" "encoding/json"
"math/rand" "math/rand"
"sort"
"strings" "strings"
"testing" "testing"
"time" "time"
@ -354,6 +355,13 @@ func TestDiff(t *testing.T) {
assert.Equal(t, rule2.For, diff[0].Right.Interface()) assert.Equal(t, rule2.For, diff[0].Right.Interface())
difCnt++ difCnt++
} }
if rule1.RuleGroupIndex != rule2.RuleGroupIndex {
diff := diffs.GetDiffsForField("RuleGroupIndex")
assert.Len(t, diff, 1)
assert.Equal(t, rule1.RuleGroupIndex, diff[0].Left.Interface())
assert.Equal(t, rule2.RuleGroupIndex, diff[0].Right.Interface())
difCnt++
}
require.Lenf(t, diffs, difCnt, "Got some unexpected diffs. Either add to ignore or add assert to it") require.Lenf(t, diffs, difCnt, "Got some unexpected diffs. Either add to ignore or add assert to it")
@ -538,3 +546,33 @@ func TestDiff(t *testing.T) {
}) })
}) })
} }
func TestSortByGroupIndex(t *testing.T) {
t.Run("should sort rules by GroupIndex", func(t *testing.T) {
rules := GenerateAlertRules(rand.Intn(5)+5, AlertRuleGen(WithUniqueGroupIndex()))
rand.Shuffle(len(rules), func(i, j int) {
rules[i], rules[j] = rules[j], rules[i]
})
require.False(t, sort.SliceIsSorted(rules, func(i, j int) bool {
return rules[i].RuleGroupIndex < rules[j].RuleGroupIndex
}))
RulesGroup(rules).SortByGroupIndex()
require.True(t, sort.SliceIsSorted(rules, func(i, j int) bool {
return rules[i].RuleGroupIndex < rules[j].RuleGroupIndex
}))
})
t.Run("should sort by ID if same GroupIndex", func(t *testing.T) {
rules := GenerateAlertRules(rand.Intn(5)+5, AlertRuleGen(WithUniqueID(), WithGroupIndex(rand.Int())))
rand.Shuffle(len(rules), func(i, j int) {
rules[i], rules[j] = rules[j], rules[i]
})
require.False(t, sort.SliceIsSorted(rules, func(i, j int) bool {
return rules[i].ID < rules[j].ID
}))
RulesGroup(rules).SortByGroupIndex()
require.True(t, sort.SliceIsSorted(rules, func(i, j int) bool {
return rules[i].ID < rules[j].ID
}))
})
}

@ -9,9 +9,11 @@ import (
"github.com/grafana/grafana/pkg/util" "github.com/grafana/grafana/pkg/util"
) )
type AlertRuleMutator func(*AlertRule)
// AlertRuleGen provides a factory function that generates a random AlertRule. // AlertRuleGen provides a factory function that generates a random AlertRule.
// The mutators arguments allows changing fields of the resulting structure // The mutators arguments allows changing fields of the resulting structure
func AlertRuleGen(mutators ...func(*AlertRule)) func() *AlertRule { func AlertRuleGen(mutators ...AlertRuleMutator) func() *AlertRule {
return func() *AlertRule { return func() *AlertRule {
randNoDataState := func() NoDataState { randNoDataState := func() NoDataState {
s := [...]NoDataState{ s := [...]NoDataState{
@ -74,6 +76,7 @@ func AlertRuleGen(mutators ...func(*AlertRule)) func() *AlertRule {
DashboardUID: dashUID, DashboardUID: dashUID,
PanelID: panelID, PanelID: panelID,
RuleGroup: "TEST-GROUP-" + util.GenerateShortUID(), RuleGroup: "TEST-GROUP-" + util.GenerateShortUID(),
RuleGroupIndex: rand.Int(),
NoDataState: randNoDataState(), NoDataState: randNoDataState(),
ExecErrState: randErrState(), ExecErrState: randErrState(),
For: forInterval, For: forInterval,
@ -88,6 +91,48 @@ func AlertRuleGen(mutators ...func(*AlertRule)) func() *AlertRule {
} }
} }
func WithUniqueID() AlertRuleMutator {
usedID := make(map[int64]struct{})
return func(rule *AlertRule) {
for {
id := rand.Int63()
if _, ok := usedID[id]; !ok {
usedID[id] = struct{}{}
rule.ID = id
return
}
}
}
}
func WithGroupIndex(groupIndex int) AlertRuleMutator {
return func(rule *AlertRule) {
rule.RuleGroupIndex = groupIndex
}
}
func WithUniqueGroupIndex() AlertRuleMutator {
usedIdx := make(map[int]struct{})
return func(rule *AlertRule) {
for {
idx := rand.Int()
if _, ok := usedIdx[idx]; !ok {
usedIdx[idx] = struct{}{}
rule.RuleGroupIndex = idx
return
}
}
}
}
func WithSequentialGroupIndex() AlertRuleMutator {
idx := 1
return func(rule *AlertRule) {
rule.RuleGroupIndex = idx
idx++
}
}
func GenerateAlertQuery() AlertQuery { func GenerateAlertQuery() AlertQuery {
f := rand.Intn(10) + 5 f := rand.Intn(10) + 5
t := rand.Intn(f) t := rand.Intn(f)
@ -155,6 +200,7 @@ func CopyRule(r *AlertRule) *AlertRule {
UID: r.UID, UID: r.UID,
NamespaceUID: r.NamespaceUID, NamespaceUID: r.NamespaceUID,
RuleGroup: r.RuleGroup, RuleGroup: r.RuleGroup,
RuleGroupIndex: r.RuleGroupIndex,
NoDataState: r.NoDataState, NoDataState: r.NoDataState,
ExecErrState: r.ExecErrState, ExecErrState: r.ExecErrState,
For: r.For, For: r.For,

@ -230,6 +230,7 @@ func (st DBstore) UpdateAlertRules(ctx context.Context, rules []UpdateRule) erro
RuleUID: r.New.UID, RuleUID: r.New.UID,
RuleNamespaceUID: r.New.NamespaceUID, RuleNamespaceUID: r.New.NamespaceUID,
RuleGroup: r.New.RuleGroup, RuleGroup: r.New.RuleGroup,
RuleGroupIndex: r.New.RuleGroupIndex,
ParentVersion: parentVersion, ParentVersion: parentVersion,
Version: r.New.Version + 1, Version: r.New.Version + 1,
Created: r.New.Updated, Created: r.New.Updated,
@ -283,7 +284,7 @@ func (st DBstore) ListAlertRules(ctx context.Context, query *ngmodels.ListAlertR
q = q.Where("rule_group = ?", query.RuleGroup) q = q.Where("rule_group = ?", query.RuleGroup)
} }
q = q.OrderBy("id ASC") q = q.Asc("namespace_uid", "rule_group", "rule_group_idx", "id")
alertRules := make([]*ngmodels.AlertRule, 0) alertRules := make([]*ngmodels.AlertRule, 0)
if err := q.Find(&alertRules); err != nil { if err := q.Find(&alertRules); err != nil {
@ -409,6 +410,7 @@ func (st DBstore) GetAlertRulesForScheduling(ctx context.Context, query *ngmodel
} }
q = q.NotIn("org_id", excludeOrgs...) q = q.NotIn("org_id", excludeOrgs...)
} }
q = q.Asc("namespace_uid", "rule_group", "rule_group_idx", "id")
if err := q.Find(&alerts); err != nil { if err := q.Find(&alerts); err != nil {
return err return err
} }

@ -92,6 +92,8 @@ func (*OSSMigrations) AddMigration(mg *Migrator) {
accesscontrol.AddManagedFolderAlertActionsMigration(mg) accesscontrol.AddManagedFolderAlertActionsMigration(mg)
accesscontrol.AddActionNameMigrator(mg) accesscontrol.AddActionNameMigrator(mg)
addPlaylistUIDMigration(mg) addPlaylistUIDMigration(mg)
ualert.UpdateRuleGroupIndexMigration(mg)
} }
func addMigrationLogMigrations(mg *Migrator) { func addMigrationLogMigrations(mg *Migrator) {

@ -13,6 +13,7 @@ import (
) )
type alertRule struct { type alertRule struct {
ID int64 `xorm:"pk autoincr 'id'"`
OrgID int64 `xorm:"org_id"` OrgID int64 `xorm:"org_id"`
Title string Title string
Condition string Condition string
@ -22,6 +23,7 @@ type alertRule struct {
UID string `xorm:"uid"` UID string `xorm:"uid"`
NamespaceUID string `xorm:"namespace_uid"` NamespaceUID string `xorm:"namespace_uid"`
RuleGroup string RuleGroup string
RuleGroupIndex int `xorm:"rule_group_idx"`
NoDataState string NoDataState string
ExecErrState string ExecErrState string
For duration For duration
@ -35,6 +37,7 @@ type alertRuleVersion struct {
RuleUID string `xorm:"rule_uid"` RuleUID string `xorm:"rule_uid"`
RuleNamespaceUID string `xorm:"rule_namespace_uid"` RuleNamespaceUID string `xorm:"rule_namespace_uid"`
RuleGroup string RuleGroup string
RuleGroupIndex int `xorm:"rule_group_idx"`
ParentVersion int64 ParentVersion int64
RestoredFrom int64 RestoredFrom int64
Version int64 Version int64
@ -59,6 +62,7 @@ func (a *alertRule) makeVersion() *alertRuleVersion {
RuleUID: a.UID, RuleUID: a.UID,
RuleNamespaceUID: a.NamespaceUID, RuleNamespaceUID: a.NamespaceUID,
RuleGroup: a.RuleGroup, RuleGroup: a.RuleGroup,
RuleGroupIndex: a.RuleGroupIndex,
ParentVersion: 0, ParentVersion: 0,
RestoredFrom: 0, RestoredFrom: 0,
Version: 1, Version: 1,

@ -241,6 +241,16 @@ func AddAlertRuleMigrations(mg *migrator.Migrator, defaultIntervalSeconds int64)
Cols: []string{"org_id", "dashboard_uid", "panel_id"}, Cols: []string{"org_id", "dashboard_uid", "panel_id"},
}, },
)) ))
mg.AddMigration("add rule_group_idx column to alert_rule", migrator.NewAddColumnMigration(
migrator.Table{Name: "alert_rule"},
&migrator.Column{
Name: "rule_group_idx",
Type: migrator.DB_Int,
Nullable: false,
Default: "1",
},
))
} }
func AddAlertRuleVersionMigrations(mg *migrator.Migrator) { func AddAlertRuleVersionMigrations(mg *migrator.Migrator) {
@ -284,6 +294,16 @@ func AddAlertRuleVersionMigrations(mg *migrator.Migrator) {
// add labels column // add labels column
mg.AddMigration("add column labels to alert_rule_version", migrator.NewAddColumnMigration(alertRuleVersion, &migrator.Column{Name: "labels", Type: migrator.DB_Text, Nullable: true})) mg.AddMigration("add column labels to alert_rule_version", migrator.NewAddColumnMigration(alertRuleVersion, &migrator.Column{Name: "labels", Type: migrator.DB_Text, Nullable: true}))
mg.AddMigration("add rule_group_idx column to alert_rule_version", migrator.NewAddColumnMigration(
migrator.Table{Name: "alert_rule_version"},
&migrator.Column{
Name: "rule_group_idx",
Type: migrator.DB_Int,
Nullable: false,
Default: "1",
},
))
} }
func AddAlertmanagerConfigMigrations(mg *migrator.Migrator) { func AddAlertmanagerConfigMigrations(mg *migrator.Migrator) {

@ -9,6 +9,7 @@ import (
"path/filepath" "path/filepath"
"strconv" "strconv"
"strings" "strings"
"time"
pb "github.com/prometheus/alertmanager/silence/silencepb" pb "github.com/prometheus/alertmanager/silence/silencepb"
"xorm.io/xorm" "xorm.io/xorm"
@ -37,6 +38,7 @@ var migTitle = "move dashboard alerts to unified alerting"
var rmMigTitle = "remove unified alerting data" var rmMigTitle = "remove unified alerting data"
const clearMigrationEntryTitle = "clear migration entry %q" const clearMigrationEntryTitle = "clear migration entry %q"
const codeMigration = "code migration"
type MigrationError struct { type MigrationError struct {
AlertId int64 AlertId int64
@ -227,7 +229,7 @@ type migration struct {
} }
func (m *migration) SQL(dialect migrator.Dialect) string { func (m *migration) SQL(dialect migrator.Dialect) string {
return "code migration" return codeMigration
} }
// nolint: gocyclo // nolint: gocyclo
@ -511,7 +513,7 @@ type rmMigration struct {
} }
func (m *rmMigration) SQL(dialect migrator.Dialect) string { func (m *rmMigration) SQL(dialect migrator.Dialect) string {
return "code migration" return codeMigration
} }
func (m *rmMigration) Exec(sess *xorm.Session, mg *migrator.Migrator) error { func (m *rmMigration) Exec(sess *xorm.Session, mg *migrator.Migrator) error {
@ -710,7 +712,7 @@ func (u *upgradeNgAlerting) updateAlertmanagerFiles(orgId int64, migrator *migra
} }
func (u *upgradeNgAlerting) SQL(migrator.Dialect) string { func (u *upgradeNgAlerting) SQL(migrator.Dialect) string {
return "code migration" return codeMigration
} }
// getAlertFolderNameFromDashboard generates a folder name for alerts that belong to a dashboard. Formats the string according to DASHBOARD_FOLDER format. // getAlertFolderNameFromDashboard generates a folder name for alerts that belong to a dashboard. Formats the string according to DASHBOARD_FOLDER format.
@ -776,5 +778,86 @@ func (c createDefaultFoldersForAlertingMigration) Exec(sess *xorm.Session, migra
} }
func (c createDefaultFoldersForAlertingMigration) SQL(migrator.Dialect) string { func (c createDefaultFoldersForAlertingMigration) SQL(migrator.Dialect) string {
return "code migration" return codeMigration
}
// UpdateRuleGroupIndexMigration updates a new field rule_group_index for alert rules that belong to a group with more than 1 alert.
func UpdateRuleGroupIndexMigration(mg *migrator.Migrator) {
if !mg.Cfg.UnifiedAlerting.IsEnabled() {
return
}
mg.AddMigration("update group index for alert rules", &updateRulesOrderInGroup{})
}
type updateRulesOrderInGroup struct {
migrator.MigrationBase
}
func (c updateRulesOrderInGroup) SQL(migrator.Dialect) string {
return codeMigration
}
func (c updateRulesOrderInGroup) Exec(sess *xorm.Session, migrator *migrator.Migrator) error {
var rows []*alertRule
if err := sess.Table(alertRule{}).Asc("id").Find(&rows); err != nil {
return fmt.Errorf("failed to read the list of alert rules: %w", err)
}
if len(rows) == 0 {
migrator.Logger.Debug("No rules to migrate.")
return nil
}
groups := map[ngmodels.AlertRuleGroupKey][]*alertRule{}
for _, row := range rows {
groupKey := ngmodels.AlertRuleGroupKey{
OrgID: row.OrgID,
NamespaceUID: row.NamespaceUID,
RuleGroup: row.RuleGroup,
}
groups[groupKey] = append(groups[groupKey], row)
}
toUpdate := make([]*alertRule, 0, len(rows))
for _, rules := range groups {
for i, rule := range rules {
if rule.RuleGroupIndex == i+1 {
continue
}
rule.RuleGroupIndex = i + 1
toUpdate = append(toUpdate, rule)
}
}
if len(toUpdate) == 0 {
migrator.Logger.Debug("No rules to upgrade group index")
return nil
}
updated := time.Now()
versions := make([]*alertRuleVersion, 0, len(toUpdate))
for _, rule := range toUpdate {
rule.Updated = updated
version := rule.makeVersion()
version.Version = rule.Version + 1
version.ParentVersion = rule.Version
rule.Version++
_, err := sess.ID(rule.ID).Cols("version", "updated", "rule_group_idx").Update(rule)
if err != nil {
migrator.Logger.Error("failed to update alert rule", "uid", rule.UID, "err", err)
return fmt.Errorf("unable to update alert rules with group index: %w", err)
}
migrator.Logger.Debug("updated group index for alert rule", "rule_uid", rule.UID)
versions = append(versions, version)
}
_, err := sess.Insert(&versions)
if err != nil {
migrator.Logger.Error("failed to insert changes to alert_rule_version", "err", err)
return fmt.Errorf("unable to update alert rules with group index: %w", err)
}
return nil
} }

@ -4,6 +4,7 @@ import (
"encoding/json" "encoding/json"
"fmt" "fmt"
"io/ioutil" "io/ioutil"
"math/rand"
"net/http" "net/http"
"testing" "testing"
"time" "time"
@ -17,6 +18,7 @@ import (
apimodels "github.com/grafana/grafana/pkg/services/ngalert/api/tooling/definitions" apimodels "github.com/grafana/grafana/pkg/services/ngalert/api/tooling/definitions"
ngmodels "github.com/grafana/grafana/pkg/services/ngalert/models" ngmodels "github.com/grafana/grafana/pkg/services/ngalert/models"
"github.com/grafana/grafana/pkg/tests/testinfra" "github.com/grafana/grafana/pkg/tests/testinfra"
"github.com/grafana/grafana/pkg/util"
) )
func TestAlertRulePermissions(t *testing.T) { func TestAlertRulePermissions(t *testing.T) {
@ -719,6 +721,102 @@ func TestRulerRulesFilterByDashboard(t *testing.T) {
} }
} }
func TestRuleGroupSequence(t *testing.T) {
// Setup Grafana and its Database
dir, path := testinfra.CreateGrafDir(t, testinfra.GrafanaOpts{
DisableLegacyAlerting: true,
EnableUnifiedAlerting: true,
DisableAnonymous: true,
AppModeProduction: true,
})
grafanaListedAddr, store := testinfra.StartGrafana(t, dir, path)
// Create a user to make authenticated requests
createUser(t, store, models.CreateUserCommand{
DefaultOrgRole: string(models.ROLE_EDITOR),
Password: "password",
Login: "grafana",
})
client := newAlertingApiClient(grafanaListedAddr, "grafana", "password")
folder1Title := "folder1"
client.CreateFolder(t, util.GenerateShortUID(), folder1Title)
group1 := generateAlertRuleGroup(5, alertRuleGen())
group2 := generateAlertRuleGroup(5, alertRuleGen())
status, _ := client.PostRulesGroup(t, folder1Title, &group1)
require.Equal(t, http.StatusAccepted, status)
status, _ = client.PostRulesGroup(t, folder1Title, &group2)
require.Equal(t, http.StatusAccepted, status)
t.Run("should persist order of the rules in a group", func(t *testing.T) {
group1Get := client.GetRulesGroup(t, folder1Title, group1.Name)
assert.Equal(t, group1.Name, group1Get.Name)
assert.Equal(t, group1.Interval, group1Get.Interval)
assert.Len(t, group1Get.Rules, len(group1.Rules))
for i, getRule := range group1Get.Rules {
rule := group1.Rules[i]
assert.Equal(t, getRule.GrafanaManagedAlert.Title, rule.GrafanaManagedAlert.Title)
assert.NotEmpty(t, getRule.GrafanaManagedAlert.UID)
}
// now shuffle the rules
postableGroup1 := convertGettableRuleGroupToPostable(group1Get.GettableRuleGroupConfig)
rand.Shuffle(len(postableGroup1.Rules), func(i, j int) {
postableGroup1.Rules[i], postableGroup1.Rules[j] = postableGroup1.Rules[j], postableGroup1.Rules[i]
})
expectedUids := make([]string, 0, len(postableGroup1.Rules))
for _, rule := range postableGroup1.Rules {
expectedUids = append(expectedUids, rule.GrafanaManagedAlert.UID)
}
status, _ := client.PostRulesGroup(t, folder1Title, &postableGroup1)
require.Equal(t, http.StatusAccepted, status)
group1Get = client.GetRulesGroup(t, folder1Title, group1.Name)
require.Len(t, group1Get.Rules, len(postableGroup1.Rules))
actualUids := make([]string, 0, len(group1Get.Rules))
for _, getRule := range group1Get.Rules {
actualUids = append(actualUids, getRule.GrafanaManagedAlert.UID)
}
assert.Equal(t, expectedUids, actualUids)
})
t.Run("should be able to move a rule from another group in a specific position", func(t *testing.T) {
group1Get := client.GetRulesGroup(t, folder1Title, group1.Name)
group2Get := client.GetRulesGroup(t, folder1Title, group2.Name)
movedRule := convertGettableRuleToPostable(group2Get.Rules[3])
// now shuffle the rules
postableGroup1 := convertGettableRuleGroupToPostable(group1Get.GettableRuleGroupConfig)
postableGroup1.Rules = append(append(append([]apimodels.PostableExtendedRuleNode{}, postableGroup1.Rules[0:1]...), movedRule), postableGroup1.Rules[2:]...)
expectedUids := make([]string, 0, len(postableGroup1.Rules))
for _, rule := range postableGroup1.Rules {
expectedUids = append(expectedUids, rule.GrafanaManagedAlert.UID)
}
status, _ := client.PostRulesGroup(t, folder1Title, &postableGroup1)
require.Equal(t, http.StatusAccepted, status)
group1Get = client.GetRulesGroup(t, folder1Title, group1.Name)
require.Len(t, group1Get.Rules, len(postableGroup1.Rules))
actualUids := make([]string, 0, len(group1Get.Rules))
for _, getRule := range group1Get.Rules {
actualUids = append(actualUids, getRule.GrafanaManagedAlert.UID)
}
assert.Equal(t, expectedUids, actualUids)
group2Get = client.GetRulesGroup(t, folder1Title, group2.Name)
assert.Len(t, group2Get.Rules, len(group2.Rules)-1)
for _, rule := range group2Get.Rules {
require.NotEqual(t, movedRule.GrafanaManagedAlert.UID, rule.GrafanaManagedAlert.UID)
}
})
}
func newTestingRuleConfig(t *testing.T) apimodels.PostableRuleGroupConfig { func newTestingRuleConfig(t *testing.T) apimodels.PostableRuleGroupConfig {
interval, err := model.ParseDuration("1m") interval, err := model.ParseDuration("1m")
require.NoError(t, err) require.NoError(t, err)

@ -85,7 +85,7 @@ func getBody(t *testing.T, body io.ReadCloser) string {
return string(b) return string(b)
} }
func AlertRuleGen() func() apimodels.PostableExtendedRuleNode { func alertRuleGen() func() apimodels.PostableExtendedRuleNode {
return func() apimodels.PostableExtendedRuleNode { return func() apimodels.PostableExtendedRuleNode {
return apimodels.PostableExtendedRuleNode{ return apimodels.PostableExtendedRuleNode{
ApiRuleNode: &apimodels.ApiRuleNode{ ApiRuleNode: &apimodels.ApiRuleNode{
@ -115,7 +115,7 @@ func AlertRuleGen() func() apimodels.PostableExtendedRuleNode {
} }
} }
func GenerateAlertRuleGroup(rulesCount int, gen func() apimodels.PostableExtendedRuleNode) apimodels.PostableRuleGroupConfig { func generateAlertRuleGroup(rulesCount int, gen func() apimodels.PostableExtendedRuleNode) apimodels.PostableRuleGroupConfig {
rules := make([]apimodels.PostableExtendedRuleNode, 0, rulesCount) rules := make([]apimodels.PostableExtendedRuleNode, 0, rulesCount)
for i := 0; i < rulesCount; i++ { for i := 0; i < rulesCount; i++ {
rules = append(rules, gen()) rules = append(rules, gen())
@ -127,10 +127,10 @@ func GenerateAlertRuleGroup(rulesCount int, gen func() apimodels.PostableExtende
} }
} }
func ConvertGettableRuleGroupToPostable(gettable apimodels.GettableRuleGroupConfig) apimodels.PostableRuleGroupConfig { func convertGettableRuleGroupToPostable(gettable apimodels.GettableRuleGroupConfig) apimodels.PostableRuleGroupConfig {
rules := make([]apimodels.PostableExtendedRuleNode, 0, len(gettable.Rules)) rules := make([]apimodels.PostableExtendedRuleNode, 0, len(gettable.Rules))
for _, rule := range gettable.Rules { for _, rule := range gettable.Rules {
rules = append(rules, ConvertGettableRuleToPostable(rule)) rules = append(rules, convertGettableRuleToPostable(rule))
} }
return apimodels.PostableRuleGroupConfig{ return apimodels.PostableRuleGroupConfig{
Name: gettable.Name, Name: gettable.Name,
@ -139,14 +139,14 @@ func ConvertGettableRuleGroupToPostable(gettable apimodels.GettableRuleGroupConf
} }
} }
func ConvertGettableRuleToPostable(gettable apimodels.GettableExtendedRuleNode) apimodels.PostableExtendedRuleNode { func convertGettableRuleToPostable(gettable apimodels.GettableExtendedRuleNode) apimodels.PostableExtendedRuleNode {
return apimodels.PostableExtendedRuleNode{ return apimodels.PostableExtendedRuleNode{
ApiRuleNode: gettable.ApiRuleNode, ApiRuleNode: gettable.ApiRuleNode,
GrafanaManagedAlert: ConvertGettableGrafanaRuleToPostable(gettable.GrafanaManagedAlert), GrafanaManagedAlert: convertGettableGrafanaRuleToPostable(gettable.GrafanaManagedAlert),
} }
} }
func ConvertGettableGrafanaRuleToPostable(gettable *apimodels.GettableGrafanaRule) *apimodels.PostableGrafanaRule { func convertGettableGrafanaRuleToPostable(gettable *apimodels.GettableGrafanaRule) *apimodels.PostableGrafanaRule {
if gettable == nil { if gettable == nil {
return nil return nil
} }

@ -216,11 +216,16 @@ export function getRulerClient(rulerConfig: RulerDataSourceConfig): RulerClient
// make sure our updated alert has the same UID as before // make sure our updated alert has the same UID as before
copyGrafanaUID(existingRule, newRule); copyGrafanaUID(existingRule, newRule);
// create the new array of rules we want to send to the group // create the new array of rules we want to send to the group. Keep the order of alerts in the group.
const newRules = existingRule.group.rules const newRules = existingRule.group.rules.map((rule) => {
.filter((rule): rule is RulerGrafanaRuleDTO => isGrafanaRulerRule(rule)) if (!isGrafanaRulerRule(rule)) {
.filter((rule) => rule.grafana_alert.uid !== existingRule.rule.grafana_alert.uid) return rule;
.concat(newRule as RulerGrafanaRuleDTO); }
if (rule.grafana_alert.uid === existingRule.rule.grafana_alert.uid) {
return newRule;
}
return rule;
});
await setRulerRuleGroup(rulerConfig, existingRule.namespace, { await setRulerRuleGroup(rulerConfig, existingRule.namespace, {
name: existingRule.group.name, name: existingRule.group.name,

Loading…
Cancel
Save