Alerting: In migration improve deduplication of title and group (#78351)

* Alerting: In migration improve deduplication of title and group

This change improves alert titles generated in the legacy migration 
that occur when we need to deduplicate titles. Now when duplicate 
titles are detected we will first attempt to append a sequential index, 
falling back to a random uid if none are unique within 10 attempts. 
This should cause shorter and more easily readable deduplicated 
titles in most cases.

In addition, groups are no longer deduplicated. Instead we set them 
to a combination of truncated dashboard name and humanized alert 
frequency. This way, alerts from the same dashboard share a group 
if they have the same evaluation interval. In the event that truncation 
causes overlap, it won't be a big issue as all alerts will still be in a 
group with the correct evaluation interval.
pull/78830/head
Matthew Jacobson 2 years ago committed by GitHub
parent 773e0680c5
commit 2b51f0e263
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 42
      pkg/services/ngalert/migration/alert_rule.go
  2. 82
      pkg/services/ngalert/migration/alert_rule_test.go
  3. 28
      pkg/services/ngalert/migration/channel.go
  4. 2
      pkg/services/ngalert/migration/migration_test.go
  5. 86
      pkg/services/ngalert/migration/models.go
  6. 104
      pkg/services/ngalert/migration/models/models.go
  7. 149
      pkg/services/ngalert/migration/models/models_test.go
  8. 4
      pkg/services/ngalert/migration/permissions_test.go
  9. 21
      pkg/services/ngalert/migration/ualert_test.go

@ -6,6 +6,8 @@ import (
"fmt"
"time"
"github.com/prometheus/common/model"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/infra/log"
legacymodels "github.com/grafana/grafana/pkg/services/alerting/models"
@ -65,27 +67,14 @@ func (om *OrgMigration) migrateAlert(ctx context.Context, l log.Logger, da *migr
}
// Here we ensure that the alert rule title is unique within the folder.
titleDedupSet := om.AlertTitleDeduplicator(info.NewFolderUID)
name := truncate(da.Name, store.AlertDefinitionMaxTitleLength)
if titleDedupSet.contains(name) {
dedupedName := titleDedupSet.deduplicate(name)
l.Debug("Duplicate alert rule name detected, renaming", "oldName", name, "newName", dedupedName)
name = dedupedName
}
titleDedupSet.add(name)
// Here we ensure that the alert rule group is unique within the folder.
// This is so that we don't have to ensure that the alerts rules have the same interval.
groupDedupSet := om.AlertTitleDeduplicator(info.NewFolderUID)
panelSuffix := fmt.Sprintf(" - %d", da.PanelID)
truncatedDashboard := truncate(info.DashboardName, store.AlertRuleMaxRuleGroupNameLength-len(panelSuffix))
groupName := fmt.Sprintf("%s%s", truncatedDashboard, panelSuffix) // Unique to this dash alert but still contains useful info.
if groupDedupSet.contains(groupName) {
dedupedGroupName := groupDedupSet.deduplicate(groupName)
l.Debug("Duplicate alert rule group name detected, renaming", "oldGroup", groupName, "newGroup", dedupedGroupName)
groupName = dedupedGroupName
}
groupDedupSet.add(groupName)
titleDeduplicator := om.titleDeduplicatorForFolder(info.NewFolderUID)
name, err := titleDeduplicator.Deduplicate(da.Name)
if err != nil {
return nil, err
}
if name != da.Name {
l.Info(fmt.Sprintf("Alert rule title modified to be unique within the folder and fit within the maximum length of %d", store.AlertDefinitionMaxTitleLength), "old", da.Name, "new", name)
}
dashUID := info.DashboardUID
ar := &ngmodels.AlertRule{
@ -99,7 +88,7 @@ func (om *OrgMigration) migrateAlert(ctx context.Context, l log.Logger, da *migr
NamespaceUID: info.NewFolderUID,
DashboardUID: &dashUID,
PanelID: &da.PanelID,
RuleGroup: groupName,
RuleGroup: groupName(ruleAdjustInterval(da.Frequency), info.DashboardName),
For: da.For,
Updated: time.Now().UTC(),
Annotations: annotations,
@ -304,3 +293,12 @@ func extractChannelIDs(d *migrationStore.DashAlert) (channelUids []migrationStor
return channelUids
}
// groupName constructs a group name from the dashboard title and the interval. It truncates the dashboard title
// if necessary to ensure that the group name is not longer than the maximum allowed length.
func groupName(interval int64, dashboardTitle string) string {
duration := model.Duration(time.Duration(interval) * time.Second) // Humanize.
panelSuffix := fmt.Sprintf(" - %s", duration.String())
truncatedDashboard := truncate(dashboardTitle, store.AlertRuleMaxRuleGroupNameLength-len(panelSuffix))
return fmt.Sprintf("%s%s", truncatedDashboard, panelSuffix)
}

@ -177,10 +177,7 @@ func TestMakeAlertRule(t *testing.T) {
require.NoError(t, err)
require.Len(t, ar.Title, store.AlertDefinitionMaxTitleLength)
parts := strings.SplitN(ar.Title, "_", 2)
require.Len(t, parts, 2)
require.Greater(t, len(parts[1]), 8, "unique identifier should be longer than 9 characters")
require.Equal(t, store.AlertDefinitionMaxTitleLength-1, len(parts[0])+len(parts[1]), "truncated name + underscore + unique identifier should together be DefaultFieldMaxLength")
require.Equal(t, ar.Title, fmt.Sprintf("%s #2", strings.Repeat("a", store.AlertDefinitionMaxTitleLength-3)))
})
})
@ -241,42 +238,59 @@ func TestMakeAlertRule(t *testing.T) {
require.Equal(t, expected, ar.Annotations["message"])
})
t.Run("create unique group from dashboard title and panel", func(t *testing.T) {
t.Run("create unique group from dashboard title and humanized interval", func(t *testing.T) {
service := NewTestMigrationService(t, sqlStore, nil)
m := service.newOrgMigration(1)
da := createTestDashAlert()
da.PanelID = 42
ar, err := m.migrateAlert(context.Background(), &logtest.Fake{}, &da, info)
require.NoError(t, err)
require.Equal(t, fmt.Sprintf("%s - %d", info.DashboardName, da.PanelID), ar.RuleGroup)
})
t.Run("truncate rule group if dashboard name + panel id is too long", func(t *testing.T) {
service := NewTestMigrationService(t, sqlStore, nil)
m := service.newOrgMigration(1)
da := createTestDashAlert()
da.PanelID = 42
info := migmodels.DashboardUpgradeInfo{
DashboardUID: "dashboarduid",
DashboardName: strings.Repeat("a", store.AlertRuleMaxRuleGroupNameLength-1),
NewFolderUID: "newfolderuid",
NewFolderName: "newfoldername",
intervalTests := []struct {
interval int64
expected string
}{
{interval: 10, expected: "10s"},
{interval: 30, expected: "30s"},
{interval: 60, expected: "1m"},
{interval: 120, expected: "2m"},
{interval: 3600, expected: "1h"},
{interval: 7200, expected: "2h"},
{interval: 86400, expected: "1d"},
{interval: 172800, expected: "2d"},
{interval: 604800, expected: "1w"},
{interval: 1209600, expected: "2w"},
{interval: 31536000, expected: "1y"},
{interval: 63072000, expected: "2y"},
{interval: 60 + 30, expected: "1m30s"},
{interval: 3600 + 10, expected: "1h10s"},
{interval: 3600 + 60, expected: "1h1m"},
{interval: 3600 + 60 + 10, expected: "1h1m10s"},
{interval: 86400 + 10, expected: "1d10s"},
{interval: 86400 + 60, expected: "1d1m"},
{interval: 86400 + 3600, expected: "1d1h"},
{interval: 86400 + 3600 + 60, expected: "1d1h1m"},
{interval: 86400 + 3600 + 10, expected: "1d1h10s"},
{interval: 86400 + 60 + 10, expected: "1d1m10s"},
{interval: 86400 + 3600 + 60 + 10, expected: "1d1h1m10s"},
{interval: 604800 + 86400 + 3600 + 60 + 10, expected: "8d1h1m10s"},
{interval: 31536000 + 604800 + 86400 + 3600 + 60 + 10, expected: "373d1h1m10s"},
}
for _, test := range intervalTests {
t.Run(fmt.Sprintf("interval %ds should be %s", test.interval, test.expected), func(t *testing.T) {
da.Frequency = test.interval
ar, err := m.migrateAlert(context.Background(), &logtest.Fake{}, &da, info)
require.NoError(t, err)
require.Len(t, ar.RuleGroup, store.AlertRuleMaxRuleGroupNameLength)
require.Equal(t, fmt.Sprintf("%s - %d", strings.Repeat("a", store.AlertRuleMaxRuleGroupNameLength-5), da.PanelID), ar.RuleGroup)
require.Equal(t, fmt.Sprintf("%s - %s", info.DashboardName, test.expected), ar.RuleGroup)
})
}
})
t.Run("deduplicate rule group name if truncation is not unique", func(t *testing.T) {
t.Run("truncate dashboard name part of rule group if too long", func(t *testing.T) {
service := NewTestMigrationService(t, sqlStore, nil)
m := service.newOrgMigration(1)
da := createTestDashAlert()
da.PanelID = 42
info := migmodels.DashboardUpgradeInfo{
DashboardUID: "dashboarduid",
DashboardName: strings.Repeat("a", store.AlertRuleMaxRuleGroupNameLength-1),
@ -284,26 +298,12 @@ func TestMakeAlertRule(t *testing.T) {
NewFolderName: "newfoldername",
}
_, err := m.migrateAlert(context.Background(), &logtest.Fake{}, &da, info)
require.NoError(t, err)
da = createTestDashAlert()
da.PanelID = 42
info = migmodels.DashboardUpgradeInfo{
DashboardUID: "dashboarduid",
DashboardName: strings.Repeat("a", store.AlertRuleMaxRuleGroupNameLength-1),
NewFolderUID: "newfolderuid",
NewFolderName: "newfoldername",
}
ar, err := m.migrateAlert(context.Background(), &logtest.Fake{}, &da, info)
require.NoError(t, err)
require.Len(t, ar.RuleGroup, store.AlertRuleMaxRuleGroupNameLength)
parts := strings.SplitN(ar.RuleGroup, "_", 2)
require.Len(t, parts, 2)
require.Greater(t, len(parts[1]), 8, "unique identifier should be longer than 9 characters")
require.Equal(t, store.AlertDefinitionMaxTitleLength-1, len(parts[0])+len(parts[1]), "truncated name + underscore + unique identifier should together be DefaultFieldMaxLength")
suffix := fmt.Sprintf(" - %ds", ar.IntervalSeconds)
require.Equal(t, fmt.Sprintf("%s%s", strings.Repeat("a", store.AlertRuleMaxRuleGroupNameLength-len(suffix)), suffix), ar.RuleGroup)
})
}

@ -21,7 +21,6 @@ import (
migrationStore "github.com/grafana/grafana/pkg/services/ngalert/migration/store"
ngmodels "github.com/grafana/grafana/pkg/services/ngalert/models"
"github.com/grafana/grafana/pkg/services/secrets"
"github.com/grafana/grafana/pkg/util"
)
const (
@ -164,11 +163,6 @@ func quote(s string) string {
// Create a notifier (PostableGrafanaReceiver) from a legacy notification channel
func (om *OrgMigration) createNotifier(c *legacymodels.AlertNotification) (*apimodels.PostableGrafanaReceiver, error) {
uid, err := om.determineChannelUid(c)
if err != nil {
return nil, err
}
settings, secureSettings, err := om.migrateSettingsToSecureSettings(c.Type, c.Settings, c.SecureSettings)
if err != nil {
return nil, err
@ -180,7 +174,7 @@ func (om *OrgMigration) createNotifier(c *legacymodels.AlertNotification) (*apim
}
return &apimodels.PostableGrafanaReceiver{
UID: uid,
UID: c.UID,
Name: c.Name,
Type: c.Type,
DisableResolveMessage: c.DisableResolveMessage,
@ -362,26 +356,6 @@ func (om *OrgMigration) filterReceiversForAlert(name string, channelIDs []migrat
return filteredReceiverNames
}
func (om *OrgMigration) determineChannelUid(c *legacymodels.AlertNotification) (string, error) {
legacyUid := c.UID
if legacyUid == "" {
newUid := util.GenerateShortUID()
om.seenUIDs.add(newUid)
om.log.Info("Legacy notification had an empty uid, generating a new one", "id", c.ID, "uid", newUid)
return newUid, nil
}
if om.seenUIDs.contains(legacyUid) {
newUid := util.GenerateShortUID()
om.seenUIDs.add(newUid)
om.log.Warn("Legacy notification had a UID that collides with a migrated record, generating a new one", "id", c.ID, "old", legacyUid, "new", newUid)
return newUid, nil
}
om.seenUIDs.add(legacyUid)
return legacyUid, nil
}
var secureKeysToMigrate = map[string][]string{
"slack": {"url", "token"},
"pagerduty": {"integrationKey"},

@ -718,7 +718,7 @@ func TestDashAlertQueryMigration(t *testing.T) {
mutator(rule)
}
rule.RuleGroup = fmt.Sprintf("%s - %d", *rule.DashboardUID, *rule.PanelID)
rule.RuleGroup = fmt.Sprintf("%s - 1m", *rule.DashboardUID)
rule.Annotations["__dashboardUid__"] = *rule.DashboardUID
rule.Annotations["__panelId__"] = strconv.FormatInt(*rule.PanelID, 10)

@ -1,8 +1,6 @@
package migration
import (
"strings"
pb "github.com/prometheus/alertmanager/silence/silencepb"
"github.com/grafana/grafana/pkg/infra/log"
@ -14,7 +12,6 @@ import (
"github.com/grafana/grafana/pkg/services/ngalert/store"
"github.com/grafana/grafana/pkg/services/secrets"
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/util"
)
// OrgMigration is a helper struct for migrating alerts for a single org. It contains state, services, and caches.
@ -26,10 +23,8 @@ type OrgMigration struct {
encryptionService secrets.Service
orgID int64
seenUIDs Deduplicator
silences []*pb.MeshSilence
alertRuleTitleDedup map[string]Deduplicator // Folder -> Deduplicator (Title).
alertRuleGroupDedup map[string]Deduplicator // Folder -> Deduplicator (Group).
titleDeduplicatorForFolder func(folderUID string) *migmodels.Deduplicator
// Migrated folder for a dashboard based on permissions. Parent Folder ID -> unique dashboard permission -> custom folder.
permissionsMap map[int64]map[permissionHash]*folder.Folder
@ -42,6 +37,7 @@ type OrgMigration struct {
// newOrgMigration creates a new OrgMigration for the given orgID.
func (ms *migrationService) newOrgMigration(orgID int64) *OrgMigration {
titlededuplicatorPerFolder := make(map[string]*migmodels.Deduplicator)
return &OrgMigration{
cfg: ms.cfg,
log: ms.log.New("orgID", orgID),
@ -50,13 +46,13 @@ func (ms *migrationService) newOrgMigration(orgID int64) *OrgMigration {
encryptionService: ms.encryptionService,
orgID: orgID,
// We deduplicate for case-insensitive matching in MySQL-compatible backend flavours because they use case-insensitive collation.
seenUIDs: Deduplicator{set: make(map[string]struct{}), caseInsensitive: ms.migrationStore.CaseInsensitive()},
silences: make([]*pb.MeshSilence, 0),
alertRuleTitleDedup: make(map[string]Deduplicator),
// We deduplicate alert rule groups so that we don't have to ensure that the alerts rules have the same interval.
alertRuleGroupDedup: make(map[string]Deduplicator),
titleDeduplicatorForFolder: func(folderUID string) *migmodels.Deduplicator {
if _, ok := titlededuplicatorPerFolder[folderUID]; !ok {
titlededuplicatorPerFolder[folderUID] = migmodels.NewDeduplicator(ms.migrationStore.CaseInsensitive(), store.AlertDefinitionMaxTitleLength)
}
return titlededuplicatorPerFolder[folderUID]
},
permissionsMap: make(map[int64]map[permissionHash]*folder.Folder),
folderCache: make(map[int64]*folder.Folder),
@ -69,73 +65,7 @@ func (ms *migrationService) newOrgMigration(orgID int64) *OrgMigration {
}
}
func (om *OrgMigration) AlertTitleDeduplicator(folderUID string) Deduplicator {
if _, ok := om.alertRuleTitleDedup[folderUID]; !ok {
om.alertRuleTitleDedup[folderUID] = Deduplicator{
set: make(map[string]struct{}),
caseInsensitive: om.migrationStore.CaseInsensitive(),
maxLen: store.AlertDefinitionMaxTitleLength,
}
}
return om.alertRuleTitleDedup[folderUID]
}
func (om *OrgMigration) AlertGroupDeduplicator(folderUID string) Deduplicator {
if _, ok := om.alertRuleGroupDedup[folderUID]; !ok {
om.alertRuleGroupDedup[folderUID] = Deduplicator{
set: make(map[string]struct{}),
caseInsensitive: om.migrationStore.CaseInsensitive(),
maxLen: store.AlertRuleMaxRuleGroupNameLength,
}
}
return om.alertRuleGroupDedup[folderUID]
}
type AlertPair struct {
AlertRule *models.AlertRule
DashAlert *migrationStore.DashAlert
}
// Deduplicator is a wrapper around map[string]struct{} and util.GenerateShortUID() which aims help maintain and generate
// unique strings (such as uids or titles). if caseInsensitive is true, all uniqueness is determined in a
// case-insensitive manner. if maxLen is greater than 0, all strings will be truncated to maxLen before being checked in
// contains and dedup will always return a string of length maxLen or less.
type Deduplicator struct {
set map[string]struct{}
caseInsensitive bool
maxLen int
}
// contains checks whether the given string has already been seen by this Deduplicator.
func (s *Deduplicator) contains(u string) bool {
dedup := u
if s.caseInsensitive {
dedup = strings.ToLower(dedup)
}
if s.maxLen > 0 && len(dedup) > s.maxLen {
dedup = dedup[:s.maxLen]
}
_, seen := s.set[dedup]
return seen
}
// deduplicate returns a unique string based on the given string by appending a uuid to it. Will truncate the given string if
// the resulting string would be longer than maxLen.
func (s *Deduplicator) deduplicate(dedup string) string {
uid := util.GenerateShortUID()
if s.maxLen > 0 && len(dedup)+1+len(uid) > s.maxLen {
trunc := s.maxLen - 1 - len(uid)
dedup = dedup[:trunc]
}
return dedup + "_" + uid
}
// add adds the given string to the Deduplicator.
func (s *Deduplicator) add(uid string) {
dedup := uid
if s.caseInsensitive {
dedup = strings.ToLower(dedup)
}
s.set[dedup] = struct{}{}
}

@ -0,0 +1,104 @@
package models
import (
"fmt"
"strings"
"github.com/grafana/grafana/pkg/util"
)
// MaxDeduplicationAttempts is the maximum number of attempts to try to deduplicate a string using any
// individual method, such as sequential index suffixes or uids.
const MaxDeduplicationAttempts = 10
// Deduplicator is a utility for deduplicating strings. It keeps track of the strings it has seen and appends a unique
// suffix to strings that have already been seen. It can optionally truncate strings before appending the suffix to
// ensure that the resulting string is not longer than maxLen.
// This implementation will first try to deduplicate via a sequential index suffix of the form " #2", " #3", etc.
// If after MaxIncrementDeduplicationAttempts attempts it still cannot find a unique string, it will generate a new
// unique uid and append that to the string.
type Deduplicator struct {
set map[string]int
caseInsensitive bool
maxLen int
uidGenerator func() string
}
// NewDeduplicator creates a new deduplicator.
// caseInsensitive determines whether the string comparison should be case-insensitive.
// maxLen determines the maximum length of deduplicated strings. If the deduplicated string would be longer than
// maxLen, it will be truncated.
func NewDeduplicator(caseInsensitive bool, maxLen int) *Deduplicator {
return &Deduplicator{
set: make(map[string]int),
caseInsensitive: caseInsensitive,
maxLen: maxLen,
uidGenerator: util.GenerateShortUID,
}
}
// Deduplicate returns a unique string based on the given base string. If the base string has not already been seen by
// this deduplicator, it will be returned as-is. If the base string has already been seen, a unique suffix will be
// appended to the base string to make it unique.
func (s *Deduplicator) Deduplicate(base string) (string, error) {
if s.maxLen > 0 && len(base) > s.maxLen {
base = base[:s.maxLen]
}
cnt, ok := s.contains(base)
if !ok {
s.add(base, 0)
return base, nil
}
// Start at 2, so we get a, a_2, a_3, etc.
for i := 2 + cnt; i < 2+cnt+MaxDeduplicationAttempts; i++ {
dedup := s.appendSuffix(base, fmt.Sprintf(" #%d", i))
if _, ok := s.contains(dedup); !ok {
s.add(dedup, 0)
return dedup, nil
}
}
// None of the simple suffixes worked, so we generate a new uid. We try a few times, just in case, but this should
// almost always create a unique string on the first try.
for i := 0; i < MaxDeduplicationAttempts; i++ {
dedup := s.appendSuffix(base, fmt.Sprintf("_%s", s.uidGenerator()))
if _, ok := s.contains(dedup); !ok {
s.add(dedup, 0)
return dedup, nil
}
}
return "", fmt.Errorf("failed to deduplicate %q", base)
}
// contains checks whether the given string has already been seen by this deduplicator.
func (s *Deduplicator) contains(u string) (int, bool) {
dedup := u
if s.caseInsensitive {
dedup = strings.ToLower(dedup)
}
if s.maxLen > 0 && len(dedup) > s.maxLen {
dedup = dedup[:s.maxLen]
}
cnt, seen := s.set[dedup]
return cnt, seen
}
// appendSuffix appends the given suffix to the given base string. If the resulting string would be longer than maxLen,
// the base string will be truncated.
func (s *Deduplicator) appendSuffix(base, suffix string) string {
if s.maxLen > 0 && len(base)+len(suffix) > s.maxLen {
return base[:s.maxLen-len(suffix)] + suffix
}
return base + suffix
}
// add adds the given string to the deduplicator.
func (s *Deduplicator) add(uid string, cnt int) {
dedup := uid
if s.caseInsensitive {
dedup = strings.ToLower(dedup)
}
s.set[dedup] = cnt
}

@ -0,0 +1,149 @@
package models
import (
"fmt"
"strings"
"testing"
"github.com/stretchr/testify/require"
"github.com/grafana/grafana/pkg/util"
)
func TestDeduplicator(t *testing.T) {
tc := []struct {
name string
maxLen int
caseInsensitive bool
input []string
expected []string
expectedState map[string]struct{}
}{
{
name: "when case insensitive, it deduplicates case-insensitively",
caseInsensitive: true,
input: []string{"a", "A", "B", "b", "a", "A", "b", "B"},
expected: []string{"a", "A #2", "B", "b #2", "a #3", "A #4", "b #3", "B #4"},
},
{
name: "when case sensitive, it deduplicates case-sensitively",
caseInsensitive: false,
input: []string{"a", "A", "B", "b", "a", "A", "b", "B"},
expected: []string{"a", "A", "B", "b", "a #2", "A #2", "b #2", "B #2"},
},
{
name: "when maxLen is 0, it does not truncate",
maxLen: 0,
input: []string{strings.Repeat("a", 1000), strings.Repeat("a", 1000)},
expected: []string{strings.Repeat("a", 1000), strings.Repeat("a", 1000) + " #2"},
},
{
name: "when maxLen is > 0, it truncates - caseInsensitive",
caseInsensitive: true,
maxLen: 10,
input: []string{strings.Repeat("A", 15), strings.Repeat("a", 15), strings.Repeat("A", 15)},
expected: []string{strings.Repeat("A", 10), strings.Repeat("a", 7) + " #2", strings.Repeat("A", 7) + " #3"},
},
{
name: "when maxLen is > 0, it truncates - caseSensitive",
maxLen: 10,
input: []string{strings.Repeat("A", 15), strings.Repeat("a", 15), strings.Repeat("A", 15)},
expected: []string{strings.Repeat("A", 10), strings.Repeat("a", 10), strings.Repeat("A", 7) + " #2"},
},
{
name: "when truncate causes collision, it deduplicates - caseInsensitive",
caseInsensitive: true,
maxLen: 10,
input: []string{strings.Repeat("A", 15), strings.Repeat("a", 10), strings.Repeat("b", 15), strings.Repeat("B", 10)},
expected: []string{strings.Repeat("A", 10), strings.Repeat("a", 7) + " #2", strings.Repeat("b", 10), strings.Repeat("B", 7) + " #2"},
},
{
name: "when truncate causes collision, it deduplicates - caseSensitive",
maxLen: 10,
input: []string{strings.Repeat("A", 15), strings.Repeat("A", 10)},
expected: []string{strings.Repeat("A", 10), strings.Repeat("A", 7) + " #2"},
},
{
name: "when deduplicate causes collision, it deduplicates - caseInsensitive",
caseInsensitive: true,
maxLen: 10,
input: []string{"A", "a", "a #2", "b", "B", "B #2"},
expected: []string{"A", "a #2", "a #2 #2", "b", "B #2", "B #2 #2"},
},
{
name: "when deduplicate causes collision, it deduplicates - caseSensitive",
maxLen: 10,
input: []string{"a", "a", "a #2", "b", "b", "b #2"},
expected: []string{"a", "a #2", "a #2 #2", "b", "b #2", "b #2 #2"},
},
{
name: "when deduplicate causes collision, it finds next available increment - caseInsensitive",
caseInsensitive: true,
maxLen: 10,
input: []string{"a", "A #2", "a #3", "A #4", "a #5", "A #6", "a #7", "A #8", "a #9", "A #10", "a"},
expected: []string{"a", "A #2", "a #3", "A #4", "a #5", "A #6", "a #7", "A #8", "a #9", "A #10", "a #11"},
},
{
name: "when deduplicate causes collision, it finds next available increment - caseSensitive",
maxLen: 10,
input: []string{"a", "a #2", "a #3", "a #4", "a #5", "a #6", "a #7", "a #8", "a #9", "a #10", "a"},
expected: []string{"a", "a #2", "a #3", "a #4", "a #5", "a #6", "a #7", "a #8", "a #9", "a #10", "a #11"},
},
{
name: "when deduplicate causes collision enough times, it deduplicates with uid - caseInsensitive",
caseInsensitive: true,
maxLen: 10,
input: []string{"a", "A #2", "a #3", "A #4", "a #5", "A #6", "a #7", "A #8", "a #9", "A #10", "a #11", "A"},
expected: []string{"a", "A #2", "a #3", "A #4", "a #5", "A #6", "a #7", "A #8", "a #9", "A #10", "a #11", "A_uid-1"},
},
{
name: "when deduplicate causes collision enough times, it deduplicates with uid - caseSensitive",
maxLen: 10,
input: []string{"a", "a #2", "a #3", "a #4", "a #5", "a #6", "a #7", "a #8", "a #9", "a #10", "a #11", "a"},
expected: []string{"a", "a #2", "a #3", "a #4", "a #5", "a #6", "a #7", "a #8", "a #9", "a #10", "a #11", "a_uid-1"},
},
}
for _, tt := range tc {
t.Run(tt.name, func(t *testing.T) {
inc := 0
mockUidGenerator := func() string {
inc++
return fmt.Sprintf("uid-%d", inc)
}
dedup := Deduplicator{
set: make(map[string]int),
caseInsensitive: tt.caseInsensitive,
maxLen: tt.maxLen,
uidGenerator: mockUidGenerator,
}
out := make([]string, 0, len(tt.input))
for _, in := range tt.input {
d, err := dedup.Deduplicate(in)
require.NoError(t, err)
out = append(out, d)
}
require.Equal(t, tt.expected, out)
})
}
}
func Test_shortUIDCaseInsensitiveConflicts(t *testing.T) {
s := Deduplicator{
set: make(map[string]int),
caseInsensitive: true,
}
// 10000 uids seems to be enough to cause a collision in almost every run if using util.GenerateShortUID directly.
for i := 0; i < 10000; i++ {
s.add(util.GenerateShortUID(), 0)
}
// check if any are case-insensitive duplicates.
deduped := make(map[string]struct{})
for k := range s.set {
deduped[strings.ToLower(k)] = struct{}{}
}
require.Equal(t, len(s.set), len(deduped))
}

@ -41,6 +41,7 @@ func TestDashAlertPermissionMigration(t *testing.T) {
}
genAlert := func(title string, namespaceUID string, dashboardUID string, mutators ...func(*ngModels.AlertRule)) *ngModels.AlertRule {
dashTitle := "Dashboard Title " + dashboardUID
a := &ngModels.AlertRule{
ID: 1,
OrgID: 1,
@ -55,7 +56,7 @@ func TestDashAlertPermissionMigration(t *testing.T) {
},
NamespaceUID: namespaceUID,
DashboardUID: &dashboardUID,
RuleGroup: fmt.Sprintf("Dashboard Title %s - %d", dashboardUID, 1),
RuleGroup: fmt.Sprintf("%s - 1m", dashTitle),
IntervalSeconds: 60,
Version: 1,
PanelID: pointer(int64(1)),
@ -83,7 +84,6 @@ func TestDashAlertPermissionMigration(t *testing.T) {
return func(a *ngModels.AlertRule) {
a.PanelID = pointer(id)
a.Annotations["__panelId__"] = fmt.Sprintf("%d", id)
a.RuleGroup = fmt.Sprintf("Dashboard Title %s - %d", *a.DashboardUID, id)
}
}

@ -3,7 +3,6 @@ package migration
import (
"encoding/json"
"fmt"
"strings"
"testing"
"github.com/stretchr/testify/require"
@ -134,26 +133,6 @@ func Test_getAlertFolderNameFromDashboard(t *testing.T) {
})
}
func Test_shortUIDCaseInsensitiveConflicts(t *testing.T) {
s := Deduplicator{
set: make(map[string]struct{}),
caseInsensitive: true,
}
// 10000 uids seems to be enough to cause a collision in almost every run if using util.GenerateShortUID directly.
for i := 0; i < 10000; i++ {
s.add(util.GenerateShortUID())
}
// check if any are case-insensitive duplicates.
deduped := make(map[string]struct{})
for k := range s.set {
deduped[strings.ToLower(k)] = struct{}{}
}
require.Equal(t, len(s.set), len(deduped))
}
func mustRawMessage[T any](s T) apimodels.RawMessage {
js, _ := json.Marshal(s)
return js

Loading…
Cancel
Save