mirror of https://github.com/grafana/grafana
Alerting: Move legacy alert migration from sqlstore migration to service (#72702)
parent
f6d2c11be9
commit
82f3127e23
@ -0,0 +1,440 @@ |
||||
package migration |
||||
|
||||
import ( |
||||
"context" |
||||
"crypto/md5" |
||||
"encoding/base64" |
||||
"fmt" |
||||
"regexp" |
||||
"sort" |
||||
"strings" |
||||
"time" |
||||
|
||||
alertingNotify "github.com/grafana/alerting/notify" |
||||
"github.com/prometheus/alertmanager/config" |
||||
"github.com/prometheus/alertmanager/pkg/labels" |
||||
"github.com/prometheus/common/model" |
||||
|
||||
"github.com/grafana/grafana/pkg/components/simplejson" |
||||
legacymodels "github.com/grafana/grafana/pkg/services/alerting/models" |
||||
apimodels "github.com/grafana/grafana/pkg/services/ngalert/api/tooling/definitions" |
||||
migrationStore "github.com/grafana/grafana/pkg/services/ngalert/migration/store" |
||||
ngmodels "github.com/grafana/grafana/pkg/services/ngalert/models" |
||||
"github.com/grafana/grafana/pkg/services/secrets" |
||||
"github.com/grafana/grafana/pkg/util" |
||||
) |
||||
|
||||
const ( |
||||
// DisabledRepeatInterval is a large duration that will be used as a pseudo-disable in case a legacy channel doesn't have SendReminders enabled.
|
||||
DisabledRepeatInterval = model.Duration(time.Duration(8736) * time.Hour) // 1y
|
||||
) |
||||
|
||||
// channelReceiver is a convenience struct that contains a notificationChannel and its corresponding migrated PostableApiReceiver.
|
||||
type channelReceiver struct { |
||||
channel *legacymodels.AlertNotification |
||||
receiver *apimodels.PostableApiReceiver |
||||
} |
||||
|
||||
// setupAlertmanagerConfigs creates Alertmanager configs with migrated receivers and routes.
|
||||
func (om *OrgMigration) migrateChannels(allChannels []*legacymodels.AlertNotification, pairs []*AlertPair) (*apimodels.PostableUserConfig, error) { |
||||
var defaultChannels []*legacymodels.AlertNotification |
||||
var channels []*legacymodels.AlertNotification |
||||
for _, c := range allChannels { |
||||
if c.Type == "hipchat" || c.Type == "sensu" { |
||||
om.log.Error("Alert migration error: discontinued notification channel found", "type", c.Type, "name", c.Name, "uid", c.UID) |
||||
continue |
||||
} |
||||
|
||||
if c.IsDefault { |
||||
defaultChannels = append(defaultChannels, c) |
||||
} |
||||
channels = append(channels, c) |
||||
} |
||||
|
||||
amConfig := &apimodels.PostableUserConfig{ |
||||
AlertmanagerConfig: apimodels.PostableApiAlertingConfig{ |
||||
Receivers: make([]*apimodels.PostableApiReceiver, 0), |
||||
}, |
||||
} |
||||
|
||||
// Create all newly migrated receivers from legacy notification channels.
|
||||
receiversMap, receivers, err := om.createReceivers(channels) |
||||
if err != nil { |
||||
return nil, fmt.Errorf("create receiver: %w", err) |
||||
} |
||||
|
||||
// No need to create an Alertmanager configuration if there are no receivers left that aren't obsolete.
|
||||
if len(receivers) == 0 { |
||||
om.log.Warn("No available receivers") |
||||
return nil, nil |
||||
} |
||||
|
||||
for _, cr := range receivers { |
||||
amConfig.AlertmanagerConfig.Receivers = append(amConfig.AlertmanagerConfig.Receivers, cr.receiver) |
||||
} |
||||
|
||||
defaultReceivers := make(map[string]struct{}) |
||||
// If the organization has default channels build a map of default receivers, used to create alert-specific routes later.
|
||||
for _, c := range defaultChannels { |
||||
defaultReceivers[c.Name] = struct{}{} |
||||
} |
||||
defaultReceiver, defaultRoute, err := om.createDefaultRouteAndReceiver(defaultChannels) |
||||
if err != nil { |
||||
return nil, fmt.Errorf("failed to create default route & receiver in orgId %d: %w", om.orgID, err) |
||||
} |
||||
amConfig.AlertmanagerConfig.Route = defaultRoute |
||||
if defaultReceiver != nil { |
||||
amConfig.AlertmanagerConfig.Receivers = append(amConfig.AlertmanagerConfig.Receivers, defaultReceiver) |
||||
} |
||||
|
||||
for _, cr := range receivers { |
||||
route, err := createRoute(cr) |
||||
if err != nil { |
||||
return nil, fmt.Errorf("failed to create route for receiver %s in orgId %d: %w", cr.receiver.Name, om.orgID, err) |
||||
} |
||||
|
||||
amConfig.AlertmanagerConfig.Route.Routes = append(amConfig.AlertmanagerConfig.Route.Routes, route) |
||||
} |
||||
|
||||
for _, pair := range pairs { |
||||
channelUids := extractChannelIDs(pair.DashAlert) |
||||
filteredReceiverNames := om.filterReceiversForAlert(pair.AlertRule.Title, channelUids, receiversMap, defaultReceivers) |
||||
|
||||
if len(filteredReceiverNames) != 0 { |
||||
// Only create a contact label if there are specific receivers, otherwise it defaults to the root-level route.
|
||||
pair.AlertRule.Labels[ContactLabel] = contactListToString(filteredReceiverNames) |
||||
} |
||||
} |
||||
|
||||
// Validate the alertmanager configuration produced, this gives a chance to catch bad configuration at migration time.
|
||||
// Validation between legacy and unified alerting can be different (e.g. due to bug fixes) so this would fail the migration in that case.
|
||||
if err := om.validateAlertmanagerConfig(amConfig); err != nil { |
||||
return nil, fmt.Errorf("failed to validate AlertmanagerConfig in orgId %d: %w", om.orgID, err) |
||||
} |
||||
|
||||
return amConfig, nil |
||||
} |
||||
|
||||
// validateAlertmanagerConfig validates the alertmanager configuration produced by the migration against the receivers.
|
||||
func (om *OrgMigration) validateAlertmanagerConfig(config *apimodels.PostableUserConfig) error { |
||||
for _, r := range config.AlertmanagerConfig.Receivers { |
||||
for _, gr := range r.GrafanaManagedReceivers { |
||||
data, err := gr.Settings.MarshalJSON() |
||||
if err != nil { |
||||
return err |
||||
} |
||||
var ( |
||||
cfg = &alertingNotify.GrafanaIntegrationConfig{ |
||||
UID: gr.UID, |
||||
Name: gr.Name, |
||||
Type: gr.Type, |
||||
DisableResolveMessage: gr.DisableResolveMessage, |
||||
Settings: data, |
||||
SecureSettings: gr.SecureSettings, |
||||
} |
||||
) |
||||
|
||||
_, err = alertingNotify.BuildReceiverConfiguration(context.Background(), &alertingNotify.APIReceiver{ |
||||
GrafanaIntegrations: alertingNotify.GrafanaIntegrations{Integrations: []*alertingNotify.GrafanaIntegrationConfig{cfg}}, |
||||
}, om.encryptionService.GetDecryptedValue) |
||||
if err != nil { |
||||
return err |
||||
} |
||||
} |
||||
} |
||||
|
||||
return nil |
||||
} |
||||
|
||||
// contactListToString creates a sorted string representation of a given map (set) of receiver names. Each name will be comma-separated and double-quoted. Names should not contain double quotes.
|
||||
func contactListToString(m map[string]any) string { |
||||
keys := make([]string, 0, len(m)) |
||||
for k := range m { |
||||
keys = append(keys, quote(k)) |
||||
} |
||||
sort.Strings(keys) |
||||
|
||||
return strings.Join(keys, ",") |
||||
} |
||||
|
||||
// quote will surround the given string in double quotes.
|
||||
func quote(s string) string { |
||||
return `"` + s + `"` |
||||
} |
||||
|
||||
// Create a notifier (PostableGrafanaReceiver) from a legacy notification channel
|
||||
func (om *OrgMigration) createNotifier(c *legacymodels.AlertNotification) (*apimodels.PostableGrafanaReceiver, error) { |
||||
uid, err := om.determineChannelUid(c) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
|
||||
settings, secureSettings, err := om.migrateSettingsToSecureSettings(c.Type, c.Settings, c.SecureSettings) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
|
||||
data, err := settings.MarshalJSON() |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
|
||||
return &apimodels.PostableGrafanaReceiver{ |
||||
UID: uid, |
||||
Name: c.Name, |
||||
Type: c.Type, |
||||
DisableResolveMessage: c.DisableResolveMessage, |
||||
Settings: data, |
||||
SecureSettings: secureSettings, |
||||
}, nil |
||||
} |
||||
|
||||
// Create one receiver for every unique notification channel.
|
||||
func (om *OrgMigration) createReceivers(allChannels []*legacymodels.AlertNotification) (map[migrationStore.UidOrID]*apimodels.PostableApiReceiver, []channelReceiver, error) { |
||||
receivers := make([]channelReceiver, 0, len(allChannels)) |
||||
receiversMap := make(map[migrationStore.UidOrID]*apimodels.PostableApiReceiver) |
||||
|
||||
set := make(map[string]struct{}) // Used to deduplicate sanitized names.
|
||||
for _, c := range allChannels { |
||||
notifier, err := om.createNotifier(c) |
||||
if err != nil { |
||||
return nil, nil, err |
||||
} |
||||
|
||||
// We remove double quotes because this character will be used as the separator in the ContactLabel. To prevent partial matches in the Route Matcher we choose to sanitize them early on instead of complicating the Matcher regex.
|
||||
sanitizedName := strings.ReplaceAll(c.Name, `"`, `_`) |
||||
// There can be name collisions after we sanitize. We check for this and attempt to make the name unique again using a short hash of the original name.
|
||||
if _, ok := set[sanitizedName]; ok { |
||||
sanitizedName = sanitizedName + fmt.Sprintf("_%.3x", md5.Sum([]byte(c.Name))) |
||||
om.log.Warn("Alert contains duplicate contact name after sanitization, appending unique suffix", "type", c.Type, "name", c.Name, "new_name", sanitizedName, "uid", c.UID) |
||||
} |
||||
notifier.Name = sanitizedName |
||||
|
||||
set[sanitizedName] = struct{}{} |
||||
|
||||
cr := channelReceiver{ |
||||
channel: c, |
||||
receiver: &apimodels.PostableApiReceiver{ |
||||
Receiver: config.Receiver{ |
||||
Name: sanitizedName, // Channel name is unique within an Org.
|
||||
}, |
||||
PostableGrafanaReceivers: apimodels.PostableGrafanaReceivers{ |
||||
GrafanaManagedReceivers: []*apimodels.PostableGrafanaReceiver{notifier}, |
||||
}, |
||||
}, |
||||
} |
||||
|
||||
receivers = append(receivers, cr) |
||||
|
||||
// Store receivers for creating routes from alert rules later.
|
||||
if c.UID != "" { |
||||
receiversMap[c.UID] = cr.receiver |
||||
} |
||||
if c.ID != 0 { |
||||
// In certain circumstances, the alert rule uses ID instead of uid. So, we add this to be able to lookup by ID in case.
|
||||
receiversMap[c.ID] = cr.receiver |
||||
} |
||||
} |
||||
|
||||
return receiversMap, receivers, nil |
||||
} |
||||
|
||||
// Create the root-level route with the default receiver. If no new receiver is created specifically for the root-level route, the returned receiver will be nil.
|
||||
func (om *OrgMigration) createDefaultRouteAndReceiver(defaultChannels []*legacymodels.AlertNotification) (*apimodels.PostableApiReceiver, *apimodels.Route, error) { |
||||
defaultReceiverName := "autogen-contact-point-default" |
||||
defaultRoute := &apimodels.Route{ |
||||
Receiver: defaultReceiverName, |
||||
Routes: make([]*apimodels.Route, 0), |
||||
GroupByStr: []string{ngmodels.FolderTitleLabel, model.AlertNameLabel}, // To keep parity with pre-migration notifications.
|
||||
RepeatInterval: nil, |
||||
} |
||||
newDefaultReceiver := &apimodels.PostableApiReceiver{ |
||||
Receiver: config.Receiver{ |
||||
Name: defaultReceiverName, |
||||
}, |
||||
PostableGrafanaReceivers: apimodels.PostableGrafanaReceivers{ |
||||
GrafanaManagedReceivers: []*apimodels.PostableGrafanaReceiver{}, |
||||
}, |
||||
} |
||||
|
||||
// Return early if there are no default channels
|
||||
if len(defaultChannels) == 0 { |
||||
return newDefaultReceiver, defaultRoute, nil |
||||
} |
||||
|
||||
repeatInterval := DisabledRepeatInterval // If no channels have SendReminders enabled, we will use this large value as a pseudo-disable.
|
||||
if len(defaultChannels) > 1 { |
||||
// If there are more than one default channels we create a separate contact group that is used only in the root policy. This is to simplify the migrated notification policy structure.
|
||||
// If we ever allow more than one receiver per route this won't be necessary.
|
||||
for _, c := range defaultChannels { |
||||
// Need to create a new notifier to prevent uid conflict.
|
||||
defaultNotifier, err := om.createNotifier(c) |
||||
if err != nil { |
||||
return nil, nil, err |
||||
} |
||||
|
||||
newDefaultReceiver.GrafanaManagedReceivers = append(newDefaultReceiver.GrafanaManagedReceivers, defaultNotifier) |
||||
|
||||
// Choose the lowest send reminder duration from all the notifiers to use for default route.
|
||||
if c.SendReminder && c.Frequency < time.Duration(repeatInterval) { |
||||
repeatInterval = model.Duration(c.Frequency) |
||||
} |
||||
} |
||||
} else { |
||||
// If there is only a single default channel, we don't need a separate receiver to hold it. We can reuse the existing receiver for that single notifier.
|
||||
defaultRoute.Receiver = defaultChannels[0].Name |
||||
if defaultChannels[0].SendReminder { |
||||
repeatInterval = model.Duration(defaultChannels[0].Frequency) |
||||
} |
||||
|
||||
// No need to create a new receiver.
|
||||
newDefaultReceiver = nil |
||||
} |
||||
defaultRoute.RepeatInterval = &repeatInterval |
||||
|
||||
return newDefaultReceiver, defaultRoute, nil |
||||
} |
||||
|
||||
// Create one route per contact point, matching based on ContactLabel.
|
||||
func createRoute(cr channelReceiver) (*apimodels.Route, error) { |
||||
// We create a regex matcher so that each alert rule need only have a single ContactLabel entry for all contact points it sends to.
|
||||
// For example, if an alert needs to send to contact1 and contact2 it will have ContactLabel=`"contact1","contact2"` and will match both routes looking
|
||||
// for `.*"contact1".*` and `.*"contact2".*`.
|
||||
|
||||
// We quote and escape here to ensure the regex will correctly match the ContactLabel on the alerts.
|
||||
name := fmt.Sprintf(`.*%s.*`, regexp.QuoteMeta(quote(cr.receiver.Name))) |
||||
mat, err := labels.NewMatcher(labels.MatchRegexp, ContactLabel, name) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
|
||||
repeatInterval := DisabledRepeatInterval |
||||
if cr.channel.SendReminder { |
||||
repeatInterval = model.Duration(cr.channel.Frequency) |
||||
} |
||||
|
||||
return &apimodels.Route{ |
||||
Receiver: cr.receiver.Name, |
||||
ObjectMatchers: apimodels.ObjectMatchers{mat}, |
||||
Continue: true, // We continue so that each sibling contact point route can separately match.
|
||||
RepeatInterval: &repeatInterval, |
||||
}, nil |
||||
} |
||||
|
||||
// Filter receivers to select those that were associated to the given rule as channels.
|
||||
func (om *OrgMigration) filterReceiversForAlert(name string, channelIDs []migrationStore.UidOrID, receivers map[migrationStore.UidOrID]*apimodels.PostableApiReceiver, defaultReceivers map[string]struct{}) map[string]any { |
||||
if len(channelIDs) == 0 { |
||||
// If there are no channels associated, we use the default route.
|
||||
return nil |
||||
} |
||||
|
||||
// Filter receiver names.
|
||||
filteredReceiverNames := make(map[string]any) |
||||
for _, uidOrId := range channelIDs { |
||||
recv, ok := receivers[uidOrId] |
||||
if ok { |
||||
filteredReceiverNames[recv.Name] = struct{}{} // Deduplicate on contact point name.
|
||||
} else { |
||||
om.log.Warn("Alert linked to obsolete notification channel, ignoring", "alert", name, "uid", uidOrId) |
||||
} |
||||
} |
||||
|
||||
coveredByDefault := func(names map[string]any) bool { |
||||
// Check if all receivers are also default ones and if so, just use the default route.
|
||||
for n := range names { |
||||
if _, ok := defaultReceivers[n]; !ok { |
||||
return false |
||||
} |
||||
} |
||||
return true |
||||
} |
||||
|
||||
if len(filteredReceiverNames) == 0 || coveredByDefault(filteredReceiverNames) { |
||||
// Use the default route instead.
|
||||
return nil |
||||
} |
||||
|
||||
// Add default receivers alongside rule-specific ones.
|
||||
for n := range defaultReceivers { |
||||
filteredReceiverNames[n] = struct{}{} |
||||
} |
||||
|
||||
return filteredReceiverNames |
||||
} |
||||
|
||||
func (om *OrgMigration) determineChannelUid(c *legacymodels.AlertNotification) (string, error) { |
||||
legacyUid := c.UID |
||||
if legacyUid == "" { |
||||
newUid := util.GenerateShortUID() |
||||
om.seenUIDs.add(newUid) |
||||
om.log.Info("Legacy notification had an empty uid, generating a new one", "id", c.ID, "uid", newUid) |
||||
return newUid, nil |
||||
} |
||||
|
||||
if om.seenUIDs.contains(legacyUid) { |
||||
newUid := util.GenerateShortUID() |
||||
om.seenUIDs.add(newUid) |
||||
om.log.Warn("Legacy notification had a UID that collides with a migrated record, generating a new one", "id", c.ID, "old", legacyUid, "new", newUid) |
||||
return newUid, nil |
||||
} |
||||
|
||||
om.seenUIDs.add(legacyUid) |
||||
return legacyUid, nil |
||||
} |
||||
|
||||
var secureKeysToMigrate = map[string][]string{ |
||||
"slack": {"url", "token"}, |
||||
"pagerduty": {"integrationKey"}, |
||||
"webhook": {"password"}, |
||||
"prometheus-alertmanager": {"basicAuthPassword"}, |
||||
"opsgenie": {"apiKey"}, |
||||
"telegram": {"bottoken"}, |
||||
"line": {"token"}, |
||||
"pushover": {"apiToken", "userKey"}, |
||||
"threema": {"api_secret"}, |
||||
} |
||||
|
||||
// Some settings were migrated from settings to secure settings in between.
|
||||
// See https://grafana.com/docs/grafana/latest/installation/upgrading/#ensure-encryption-of-existing-alert-notification-channel-secrets.
|
||||
// migrateSettingsToSecureSettings takes care of that.
|
||||
func (om *OrgMigration) migrateSettingsToSecureSettings(chanType string, settings *simplejson.Json, secureSettings SecureJsonData) (*simplejson.Json, map[string]string, error) { |
||||
keys := secureKeysToMigrate[chanType] |
||||
newSecureSettings := secureSettings.Decrypt() |
||||
cloneSettings := simplejson.New() |
||||
settingsMap, err := settings.Map() |
||||
if err != nil { |
||||
return nil, nil, err |
||||
} |
||||
for k, v := range settingsMap { |
||||
cloneSettings.Set(k, v) |
||||
} |
||||
for _, k := range keys { |
||||
if v, ok := newSecureSettings[k]; ok && v != "" { |
||||
continue |
||||
} |
||||
|
||||
sv := cloneSettings.Get(k).MustString() |
||||
if sv != "" { |
||||
newSecureSettings[k] = sv |
||||
cloneSettings.Del(k) |
||||
} |
||||
} |
||||
|
||||
err = om.encryptSecureSettings(newSecureSettings) |
||||
if err != nil { |
||||
return nil, nil, err |
||||
} |
||||
|
||||
return cloneSettings, newSecureSettings, nil |
||||
} |
||||
|
||||
func (om *OrgMigration) encryptSecureSettings(secureSettings map[string]string) error { |
||||
for key, value := range secureSettings { |
||||
encryptedData, err := om.encryptionService.Encrypt(context.Background(), []byte(value), secrets.WithoutScope()) |
||||
if err != nil { |
||||
return fmt.Errorf("failed to encrypt secure settings: %w", err) |
||||
} |
||||
secureSettings[key] = base64.StdEncoding.EncodeToString(encryptedData) |
||||
} |
||||
return nil |
||||
} |
@ -0,0 +1,591 @@ |
||||
package migration |
||||
|
||||
import ( |
||||
"context" |
||||
"encoding/base64" |
||||
"encoding/json" |
||||
"testing" |
||||
"time" |
||||
|
||||
"github.com/google/go-cmp/cmp" |
||||
"github.com/google/go-cmp/cmp/cmpopts" |
||||
"github.com/prometheus/alertmanager/config" |
||||
"github.com/prometheus/alertmanager/pkg/labels" |
||||
"github.com/prometheus/common/model" |
||||
"github.com/stretchr/testify/require" |
||||
|
||||
"github.com/grafana/grafana/pkg/components/simplejson" |
||||
"github.com/grafana/grafana/pkg/infra/db" |
||||
legacymodels "github.com/grafana/grafana/pkg/services/alerting/models" |
||||
apimodels "github.com/grafana/grafana/pkg/services/ngalert/api/tooling/definitions" |
||||
migrationStore "github.com/grafana/grafana/pkg/services/ngalert/migration/store" |
||||
ngModels "github.com/grafana/grafana/pkg/services/ngalert/models" |
||||
"github.com/grafana/grafana/pkg/services/ngalert/notifier/channels_config" |
||||
"github.com/grafana/grafana/pkg/setting" |
||||
"github.com/grafana/grafana/pkg/util" |
||||
) |
||||
|
||||
func TestFilterReceiversForAlert(t *testing.T) { |
||||
tc := []struct { |
||||
name string |
||||
channelIds []migrationStore.UidOrID |
||||
receivers map[migrationStore.UidOrID]*apimodels.PostableApiReceiver |
||||
defaultReceivers map[string]struct{} |
||||
expected map[string]any |
||||
}{ |
||||
{ |
||||
name: "when an alert has multiple channels, each should filter for the correct receiver", |
||||
channelIds: []migrationStore.UidOrID{"uid1", "uid2"}, |
||||
receivers: map[migrationStore.UidOrID]*apimodels.PostableApiReceiver{ |
||||
"uid1": createPostableApiReceiver("recv1", nil), |
||||
"uid2": createPostableApiReceiver("recv2", nil), |
||||
"uid3": createPostableApiReceiver("recv3", nil), |
||||
}, |
||||
defaultReceivers: map[string]struct{}{}, |
||||
expected: map[string]any{ |
||||
"recv1": struct{}{}, |
||||
"recv2": struct{}{}, |
||||
}, |
||||
}, |
||||
{ |
||||
name: "when default receivers exist, they should be added to an alert's filtered receivers", |
||||
channelIds: []migrationStore.UidOrID{"uid1"}, |
||||
receivers: map[migrationStore.UidOrID]*apimodels.PostableApiReceiver{ |
||||
"uid1": createPostableApiReceiver("recv1", nil), |
||||
"uid2": createPostableApiReceiver("recv2", nil), |
||||
"uid3": createPostableApiReceiver("recv3", nil), |
||||
}, |
||||
defaultReceivers: map[string]struct{}{ |
||||
"recv2": {}, |
||||
}, |
||||
expected: map[string]any{ |
||||
"recv1": struct{}{}, // From alert
|
||||
"recv2": struct{}{}, // From default
|
||||
}, |
||||
}, |
||||
{ |
||||
name: "when an alert has a channels associated by ID instead of UID, it should be included", |
||||
channelIds: []migrationStore.UidOrID{int64(42)}, |
||||
receivers: map[migrationStore.UidOrID]*apimodels.PostableApiReceiver{ |
||||
int64(42): createPostableApiReceiver("recv1", nil), |
||||
}, |
||||
defaultReceivers: map[string]struct{}{}, |
||||
expected: map[string]any{ |
||||
"recv1": struct{}{}, |
||||
}, |
||||
}, |
||||
{ |
||||
name: "when an alert's receivers are covered by the defaults, return nil to use default receiver downstream", |
||||
channelIds: []migrationStore.UidOrID{"uid1"}, |
||||
receivers: map[migrationStore.UidOrID]*apimodels.PostableApiReceiver{ |
||||
"uid1": createPostableApiReceiver("recv1", nil), |
||||
"uid2": createPostableApiReceiver("recv2", nil), |
||||
"uid3": createPostableApiReceiver("recv3", nil), |
||||
}, |
||||
defaultReceivers: map[string]struct{}{ |
||||
"recv1": {}, |
||||
"recv2": {}, |
||||
}, |
||||
expected: nil, // recv1 is already a default
|
||||
}, |
||||
} |
||||
|
||||
sqlStore := db.InitTestDB(t) |
||||
for _, tt := range tc { |
||||
t.Run(tt.name, func(t *testing.T) { |
||||
service := NewTestMigrationService(t, sqlStore, nil) |
||||
m := service.newOrgMigration(1) |
||||
res := m.filterReceiversForAlert("", tt.channelIds, tt.receivers, tt.defaultReceivers) |
||||
|
||||
require.Equal(t, tt.expected, res) |
||||
}) |
||||
} |
||||
} |
||||
|
||||
func TestCreateRoute(t *testing.T) { |
||||
tc := []struct { |
||||
name string |
||||
channel *legacymodels.AlertNotification |
||||
recv *apimodels.PostableApiReceiver |
||||
expected *apimodels.Route |
||||
}{ |
||||
{ |
||||
name: "when a receiver is passed in, the route should regex match based on quoted name with continue=true", |
||||
channel: &legacymodels.AlertNotification{}, |
||||
recv: createPostableApiReceiver("recv1", nil), |
||||
expected: &apimodels.Route{ |
||||
Receiver: "recv1", |
||||
ObjectMatchers: apimodels.ObjectMatchers{{Type: 2, Name: ContactLabel, Value: `.*"recv1".*`}}, |
||||
Routes: nil, |
||||
Continue: true, |
||||
GroupByStr: nil, |
||||
RepeatInterval: durationPointer(DisabledRepeatInterval), |
||||
}, |
||||
}, |
||||
{ |
||||
name: "notification channel should be escaped for regex in the matcher", |
||||
channel: &legacymodels.AlertNotification{}, |
||||
recv: createPostableApiReceiver(`. ^ $ * + - ? ( ) [ ] { } \ |`, nil), |
||||
expected: &apimodels.Route{ |
||||
Receiver: `. ^ $ * + - ? ( ) [ ] { } \ |`, |
||||
ObjectMatchers: apimodels.ObjectMatchers{{Type: 2, Name: ContactLabel, Value: `.*"\. \^ \$ \* \+ - \? \( \) \[ \] \{ \} \\ \|".*`}}, |
||||
Routes: nil, |
||||
Continue: true, |
||||
GroupByStr: nil, |
||||
RepeatInterval: durationPointer(DisabledRepeatInterval), |
||||
}, |
||||
}, |
||||
{ |
||||
name: "when a channel has sendReminder=true, the route should use the frequency in repeat interval", |
||||
channel: &legacymodels.AlertNotification{SendReminder: true, Frequency: time.Duration(42) * time.Hour}, |
||||
recv: createPostableApiReceiver("recv1", nil), |
||||
expected: &apimodels.Route{ |
||||
Receiver: "recv1", |
||||
ObjectMatchers: apimodels.ObjectMatchers{{Type: 2, Name: ContactLabel, Value: `.*"recv1".*`}}, |
||||
Routes: nil, |
||||
Continue: true, |
||||
GroupByStr: nil, |
||||
RepeatInterval: durationPointer(model.Duration(time.Duration(42) * time.Hour)), |
||||
}, |
||||
}, |
||||
{ |
||||
name: "when a channel has sendReminder=false, the route should ignore the frequency in repeat interval and use DisabledRepeatInterval", |
||||
channel: &legacymodels.AlertNotification{SendReminder: false, Frequency: time.Duration(42) * time.Hour}, |
||||
recv: createPostableApiReceiver("recv1", nil), |
||||
expected: &apimodels.Route{ |
||||
Receiver: "recv1", |
||||
ObjectMatchers: apimodels.ObjectMatchers{{Type: 2, Name: ContactLabel, Value: `.*"recv1".*`}}, |
||||
Routes: nil, |
||||
Continue: true, |
||||
GroupByStr: nil, |
||||
RepeatInterval: durationPointer(DisabledRepeatInterval), |
||||
}, |
||||
}, |
||||
} |
||||
|
||||
for _, tt := range tc { |
||||
t.Run(tt.name, func(t *testing.T) { |
||||
res, err := createRoute(channelReceiver{ |
||||
channel: tt.channel, |
||||
receiver: tt.recv, |
||||
}) |
||||
require.NoError(t, err) |
||||
|
||||
// Order of nested routes is not guaranteed.
|
||||
cOpt := []cmp.Option{ |
||||
cmpopts.SortSlices(func(a, b *apimodels.Route) bool { |
||||
if a.Receiver != b.Receiver { |
||||
return a.Receiver < b.Receiver |
||||
} |
||||
return a.ObjectMatchers[0].Value < b.ObjectMatchers[0].Value |
||||
}), |
||||
cmpopts.IgnoreUnexported(apimodels.Route{}, labels.Matcher{}), |
||||
} |
||||
|
||||
if !cmp.Equal(tt.expected, res, cOpt...) { |
||||
t.Errorf("Unexpected Route: %v", cmp.Diff(tt.expected, res, cOpt...)) |
||||
} |
||||
}) |
||||
} |
||||
} |
||||
|
||||
func createNotChannel(t *testing.T, uid string, id int64, name string) *legacymodels.AlertNotification { |
||||
t.Helper() |
||||
return &legacymodels.AlertNotification{UID: uid, ID: id, Name: name, Settings: simplejson.New()} |
||||
} |
||||
|
||||
func createNotChannelWithReminder(t *testing.T, uid string, id int64, name string, frequency time.Duration) *legacymodels.AlertNotification { |
||||
t.Helper() |
||||
return &legacymodels.AlertNotification{UID: uid, ID: id, Name: name, SendReminder: true, Frequency: frequency, Settings: simplejson.New()} |
||||
} |
||||
|
||||
func TestCreateReceivers(t *testing.T) { |
||||
tc := []struct { |
||||
name string |
||||
allChannels []*legacymodels.AlertNotification |
||||
defaultChannels []*legacymodels.AlertNotification |
||||
expRecvMap map[migrationStore.UidOrID]*apimodels.PostableApiReceiver |
||||
expRecv []channelReceiver |
||||
expErr error |
||||
}{ |
||||
{ |
||||
name: "when given notification channels migrate them to receivers", |
||||
allChannels: []*legacymodels.AlertNotification{createNotChannel(t, "uid1", int64(1), "name1"), createNotChannel(t, "uid2", int64(2), "name2")}, |
||||
expRecvMap: map[migrationStore.UidOrID]*apimodels.PostableApiReceiver{ |
||||
"uid1": createPostableApiReceiver("name1", []string{"name1"}), |
||||
"uid2": createPostableApiReceiver("name2", []string{"name2"}), |
||||
int64(1): createPostableApiReceiver("name1", []string{"name1"}), |
||||
int64(2): createPostableApiReceiver("name2", []string{"name2"}), |
||||
}, |
||||
expRecv: []channelReceiver{ |
||||
{ |
||||
channel: createNotChannel(t, "uid1", int64(1), "name1"), |
||||
receiver: createPostableApiReceiver("name1", []string{"name1"}), |
||||
}, |
||||
{ |
||||
channel: createNotChannel(t, "uid2", int64(2), "name2"), |
||||
receiver: createPostableApiReceiver("name2", []string{"name2"}), |
||||
}, |
||||
}, |
||||
}, |
||||
{ |
||||
name: "when given notification channel contains double quote sanitize with underscore", |
||||
allChannels: []*legacymodels.AlertNotification{createNotChannel(t, "uid1", int64(1), "name\"1")}, |
||||
expRecvMap: map[migrationStore.UidOrID]*apimodels.PostableApiReceiver{ |
||||
"uid1": createPostableApiReceiver("name_1", []string{"name_1"}), |
||||
int64(1): createPostableApiReceiver("name_1", []string{"name_1"}), |
||||
}, |
||||
expRecv: []channelReceiver{ |
||||
{ |
||||
channel: createNotChannel(t, "uid1", int64(1), "name\"1"), |
||||
receiver: createPostableApiReceiver("name_1", []string{"name_1"}), |
||||
}, |
||||
}, |
||||
}, |
||||
{ |
||||
name: "when given notification channels collide after sanitization add short hash to end", |
||||
allChannels: []*legacymodels.AlertNotification{createNotChannel(t, "uid1", int64(1), "name\"1"), createNotChannel(t, "uid2", int64(2), "name_1")}, |
||||
expRecvMap: map[migrationStore.UidOrID]*apimodels.PostableApiReceiver{ |
||||
"uid1": createPostableApiReceiver("name_1", []string{"name_1"}), |
||||
"uid2": createPostableApiReceiver("name_1_dba13d", []string{"name_1_dba13d"}), |
||||
int64(1): createPostableApiReceiver("name_1", []string{"name_1"}), |
||||
int64(2): createPostableApiReceiver("name_1_dba13d", []string{"name_1_dba13d"}), |
||||
}, |
||||
expRecv: []channelReceiver{ |
||||
{ |
||||
channel: createNotChannel(t, "uid1", int64(1), "name\"1"), |
||||
receiver: createPostableApiReceiver("name_1", []string{"name_1"}), |
||||
}, |
||||
{ |
||||
channel: createNotChannel(t, "uid2", int64(2), "name_1"), |
||||
receiver: createPostableApiReceiver("name_1_dba13d", []string{"name_1_dba13d"}), |
||||
}, |
||||
}, |
||||
}, |
||||
} |
||||
|
||||
sqlStore := db.InitTestDB(t) |
||||
for _, tt := range tc { |
||||
t.Run(tt.name, func(t *testing.T) { |
||||
service := NewTestMigrationService(t, sqlStore, nil) |
||||
m := service.newOrgMigration(1) |
||||
recvMap, recvs, err := m.createReceivers(tt.allChannels) |
||||
if tt.expErr != nil { |
||||
require.Error(t, err) |
||||
require.EqualError(t, err, tt.expErr.Error()) |
||||
return |
||||
} |
||||
|
||||
require.NoError(t, err) |
||||
|
||||
// We ignore certain fields for the purposes of this test
|
||||
for _, recv := range recvs { |
||||
for _, not := range recv.receiver.GrafanaManagedReceivers { |
||||
not.UID = "" |
||||
not.Settings = nil |
||||
not.SecureSettings = nil |
||||
} |
||||
} |
||||
|
||||
require.Equal(t, tt.expRecvMap, recvMap) |
||||
require.ElementsMatch(t, tt.expRecv, recvs) |
||||
}) |
||||
} |
||||
} |
||||
|
||||
func TestMigrateNotificationChannelSecureSettings(t *testing.T) { |
||||
legacyEncryptFn := func(data string) string { |
||||
raw, err := util.Encrypt([]byte(data), setting.SecretKey) |
||||
require.NoError(t, err) |
||||
return string(raw) |
||||
} |
||||
decryptFn := func(data string, m *OrgMigration) string { |
||||
decoded, err := base64.StdEncoding.DecodeString(data) |
||||
require.NoError(t, err) |
||||
raw, err := m.encryptionService.Decrypt(context.Background(), decoded) |
||||
require.NoError(t, err) |
||||
return string(raw) |
||||
} |
||||
gen := func(nType string, fn func(channel *legacymodels.AlertNotification)) *legacymodels.AlertNotification { |
||||
not := &legacymodels.AlertNotification{ |
||||
UID: "uid", |
||||
ID: 1, |
||||
Name: "channel name", |
||||
Type: nType, |
||||
Settings: simplejson.NewFromAny(map[string]any{ |
||||
"something": "some value", |
||||
}), |
||||
SecureSettings: map[string][]byte{}, |
||||
} |
||||
if fn != nil { |
||||
fn(not) |
||||
} |
||||
return not |
||||
} |
||||
genExpSlack := func(fn func(channel *apimodels.PostableGrafanaReceiver)) *apimodels.PostableGrafanaReceiver { |
||||
rawSettings, err := json.Marshal(map[string]string{ |
||||
"something": "some value", |
||||
}) |
||||
require.NoError(t, err) |
||||
|
||||
recv := &apimodels.PostableGrafanaReceiver{ |
||||
UID: "uid", |
||||
Name: "channel name", |
||||
Type: "slack", |
||||
Settings: rawSettings, |
||||
SecureSettings: map[string]string{ |
||||
"token": "secure token", |
||||
"url": "secure url", |
||||
}, |
||||
} |
||||
|
||||
if fn != nil { |
||||
fn(recv) |
||||
} |
||||
return recv |
||||
} |
||||
|
||||
tc := []struct { |
||||
name string |
||||
channel *legacymodels.AlertNotification |
||||
expRecv *apimodels.PostableGrafanaReceiver |
||||
expErr error |
||||
}{ |
||||
{ |
||||
name: "when secure settings exist, migrate them to receiver secure settings", |
||||
channel: gen("slack", func(channel *legacymodels.AlertNotification) { |
||||
channel.SecureSettings = map[string][]byte{ |
||||
"token": []byte(legacyEncryptFn("secure token")), |
||||
"url": []byte(legacyEncryptFn("secure url")), |
||||
} |
||||
}), |
||||
expRecv: genExpSlack(nil), |
||||
}, |
||||
{ |
||||
name: "when no secure settings are encrypted, do nothing", |
||||
channel: gen("slack", nil), |
||||
expRecv: genExpSlack(func(recv *apimodels.PostableGrafanaReceiver) { |
||||
delete(recv.SecureSettings, "token") |
||||
delete(recv.SecureSettings, "url") |
||||
}), |
||||
}, |
||||
{ |
||||
name: "when some secure settings are available unencrypted in settings, migrate them to secureSettings and encrypt", |
||||
channel: gen("slack", func(channel *legacymodels.AlertNotification) { |
||||
channel.SecureSettings = map[string][]byte{ |
||||
"url": []byte(legacyEncryptFn("secure url")), |
||||
} |
||||
channel.Settings.Set("token", "secure token") |
||||
}), |
||||
expRecv: genExpSlack(nil), |
||||
}, |
||||
} |
||||
sqlStore := db.InitTestDB(t) |
||||
for _, tt := range tc { |
||||
t.Run(tt.name, func(t *testing.T) { |
||||
service := NewTestMigrationService(t, sqlStore, nil) |
||||
m := service.newOrgMigration(1) |
||||
recv, err := m.createNotifier(tt.channel) |
||||
if tt.expErr != nil { |
||||
require.Error(t, err) |
||||
require.EqualError(t, err, tt.expErr.Error()) |
||||
return |
||||
} |
||||
require.NoError(t, err) |
||||
|
||||
if len(tt.expRecv.SecureSettings) > 0 { |
||||
require.NotEqual(t, tt.expRecv, recv) // Make sure they were actually encrypted at first.
|
||||
} |
||||
for k, v := range recv.SecureSettings { |
||||
recv.SecureSettings[k] = decryptFn(v, m) |
||||
} |
||||
require.Equal(t, tt.expRecv, recv) |
||||
}) |
||||
} |
||||
|
||||
// Generate tests for each notification channel type.
|
||||
t.Run("secure settings migrations for each notifier type", func(t *testing.T) { |
||||
notifiers := channels_config.GetAvailableNotifiers() |
||||
t.Run("migrate notification channel secure settings to receiver secure settings", func(t *testing.T) { |
||||
for _, notifier := range notifiers { |
||||
nType := notifier.Type |
||||
secureSettings, err := channels_config.GetSecretKeysForContactPointType(nType) |
||||
require.NoError(t, err) |
||||
t.Run(nType, func(t *testing.T) { |
||||
service := NewTestMigrationService(t, sqlStore, nil) |
||||
m := service.newOrgMigration(1) |
||||
channel := gen(nType, func(channel *legacymodels.AlertNotification) { |
||||
for _, key := range secureSettings { |
||||
channel.SecureSettings[key] = []byte(legacyEncryptFn("secure " + key)) |
||||
} |
||||
}) |
||||
recv, err := m.createNotifier(channel) |
||||
require.NoError(t, err) |
||||
|
||||
require.Equal(t, nType, recv.Type) |
||||
if len(secureSettings) > 0 { |
||||
for _, key := range secureSettings { |
||||
require.NotEqual(t, "secure "+key, recv.SecureSettings[key]) // Make sure they were actually encrypted at first.
|
||||
} |
||||
} |
||||
require.Len(t, recv.SecureSettings, len(secureSettings)) |
||||
for _, key := range secureSettings { |
||||
require.Equal(t, "secure "+key, decryptFn(recv.SecureSettings[key], m)) |
||||
} |
||||
}) |
||||
} |
||||
}) |
||||
|
||||
t.Run("for certain legacy channel types, migrate secure fields stored in settings to secure settings", func(t *testing.T) { |
||||
for _, notifier := range notifiers { |
||||
nType := notifier.Type |
||||
secureSettings, ok := secureKeysToMigrate[nType] |
||||
if !ok { |
||||
continue |
||||
} |
||||
t.Run(nType, func(t *testing.T) { |
||||
service := NewTestMigrationService(t, sqlStore, nil) |
||||
m := service.newOrgMigration(1) |
||||
|
||||
channel := gen(nType, func(channel *legacymodels.AlertNotification) { |
||||
for _, key := range secureSettings { |
||||
// Key difference to above. We store the secure settings in the settings field and expect
|
||||
// them to be migrated to secureSettings.
|
||||
channel.Settings.Set(key, "secure "+key) |
||||
} |
||||
}) |
||||
recv, err := m.createNotifier(channel) |
||||
require.NoError(t, err) |
||||
|
||||
require.Equal(t, nType, recv.Type) |
||||
if len(secureSettings) > 0 { |
||||
for _, key := range secureSettings { |
||||
require.NotEqual(t, "secure "+key, recv.SecureSettings[key]) // Make sure they were actually encrypted at first.
|
||||
} |
||||
} |
||||
require.Len(t, recv.SecureSettings, len(secureSettings)) |
||||
for _, key := range secureSettings { |
||||
require.Equal(t, "secure "+key, decryptFn(recv.SecureSettings[key], m)) |
||||
} |
||||
}) |
||||
} |
||||
}) |
||||
}) |
||||
} |
||||
|
||||
func TestCreateDefaultRouteAndReceiver(t *testing.T) { |
||||
tc := []struct { |
||||
name string |
||||
amConfig *apimodels.PostableUserConfig |
||||
defaultChannels []*legacymodels.AlertNotification |
||||
expRecv *apimodels.PostableApiReceiver |
||||
expRoute *apimodels.Route |
||||
expErr error |
||||
}{ |
||||
{ |
||||
name: "when given multiple default notification channels migrate them to a single receiver", |
||||
defaultChannels: []*legacymodels.AlertNotification{createNotChannel(t, "uid1", int64(1), "name1"), createNotChannel(t, "uid2", int64(2), "name2")}, |
||||
expRecv: createPostableApiReceiver("autogen-contact-point-default", []string{"name1", "name2"}), |
||||
expRoute: &apimodels.Route{ |
||||
Receiver: "autogen-contact-point-default", |
||||
Routes: make([]*apimodels.Route, 0), |
||||
GroupByStr: []string{ngModels.FolderTitleLabel, model.AlertNameLabel}, |
||||
RepeatInterval: durationPointer(DisabledRepeatInterval), |
||||
}, |
||||
}, |
||||
{ |
||||
name: "when given multiple default notification channels migrate them to a single receiver with RepeatInterval set to be the minimum of all channel frequencies", |
||||
defaultChannels: []*legacymodels.AlertNotification{ |
||||
createNotChannelWithReminder(t, "uid1", int64(1), "name1", time.Duration(42)), |
||||
createNotChannelWithReminder(t, "uid2", int64(2), "name2", time.Duration(100000)), |
||||
}, |
||||
expRecv: createPostableApiReceiver("autogen-contact-point-default", []string{"name1", "name2"}), |
||||
expRoute: &apimodels.Route{ |
||||
Receiver: "autogen-contact-point-default", |
||||
Routes: make([]*apimodels.Route, 0), |
||||
GroupByStr: []string{ngModels.FolderTitleLabel, model.AlertNameLabel}, |
||||
RepeatInterval: durationPointer(model.Duration(42)), |
||||
}, |
||||
}, |
||||
{ |
||||
name: "when given no default notification channels create a single empty receiver for default", |
||||
defaultChannels: []*legacymodels.AlertNotification{}, |
||||
expRecv: createPostableApiReceiver("autogen-contact-point-default", nil), |
||||
expRoute: &apimodels.Route{ |
||||
Receiver: "autogen-contact-point-default", |
||||
Routes: make([]*apimodels.Route, 0), |
||||
GroupByStr: []string{ngModels.FolderTitleLabel, model.AlertNameLabel}, |
||||
RepeatInterval: nil, |
||||
}, |
||||
}, |
||||
{ |
||||
name: "when given a single default notification channels don't create a new default receiver", |
||||
defaultChannels: []*legacymodels.AlertNotification{createNotChannel(t, "uid1", int64(1), "name1")}, |
||||
expRecv: nil, |
||||
expRoute: &apimodels.Route{ |
||||
Receiver: "name1", |
||||
Routes: make([]*apimodels.Route, 0), |
||||
GroupByStr: []string{ngModels.FolderTitleLabel, model.AlertNameLabel}, |
||||
RepeatInterval: durationPointer(DisabledRepeatInterval), |
||||
}, |
||||
}, |
||||
{ |
||||
name: "when given a single default notification channel with SendReminder=true, use the channels Frequency as the RepeatInterval", |
||||
defaultChannels: []*legacymodels.AlertNotification{createNotChannelWithReminder(t, "uid1", int64(1), "name1", time.Duration(42))}, |
||||
expRecv: nil, |
||||
expRoute: &apimodels.Route{ |
||||
Receiver: "name1", |
||||
Routes: make([]*apimodels.Route, 0), |
||||
GroupByStr: []string{ngModels.FolderTitleLabel, model.AlertNameLabel}, |
||||
RepeatInterval: durationPointer(model.Duration(42)), |
||||
}, |
||||
}, |
||||
} |
||||
|
||||
sqlStore := db.InitTestDB(t) |
||||
for _, tt := range tc { |
||||
t.Run(tt.name, func(t *testing.T) { |
||||
service := NewTestMigrationService(t, sqlStore, nil) |
||||
m := service.newOrgMigration(1) |
||||
recv, route, err := m.createDefaultRouteAndReceiver(tt.defaultChannels) |
||||
if tt.expErr != nil { |
||||
require.Error(t, err) |
||||
require.EqualError(t, err, tt.expErr.Error()) |
||||
return |
||||
} |
||||
|
||||
require.NoError(t, err) |
||||
|
||||
// We ignore certain fields for the purposes of this test
|
||||
if recv != nil { |
||||
for _, not := range recv.GrafanaManagedReceivers { |
||||
not.UID = "" |
||||
not.Settings = nil |
||||
not.SecureSettings = nil |
||||
} |
||||
} |
||||
|
||||
require.Equal(t, tt.expRecv, recv) |
||||
require.Equal(t, tt.expRoute, route) |
||||
}) |
||||
} |
||||
} |
||||
|
||||
func createPostableApiReceiver(name string, integrationNames []string) *apimodels.PostableApiReceiver { |
||||
integrations := make([]*apimodels.PostableGrafanaReceiver, 0, len(integrationNames)) |
||||
for _, integrationName := range integrationNames { |
||||
integrations = append(integrations, &apimodels.PostableGrafanaReceiver{Name: integrationName}) |
||||
} |
||||
return &apimodels.PostableApiReceiver{ |
||||
Receiver: config.Receiver{ |
||||
Name: name, |
||||
}, |
||||
PostableGrafanaReceivers: apimodels.PostableGrafanaReceivers{ |
||||
GrafanaManagedReceivers: integrations, |
||||
}, |
||||
} |
||||
} |
||||
|
||||
func durationPointer(d model.Duration) *model.Duration { |
||||
return &d |
||||
} |
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,121 @@ |
||||
package migration |
||||
|
||||
import ( |
||||
"strings" |
||||
|
||||
pb "github.com/prometheus/alertmanager/silence/silencepb" |
||||
|
||||
"github.com/grafana/grafana/pkg/infra/log" |
||||
"github.com/grafana/grafana/pkg/services/folder" |
||||
migmodels "github.com/grafana/grafana/pkg/services/ngalert/migration/models" |
||||
migrationStore "github.com/grafana/grafana/pkg/services/ngalert/migration/store" |
||||
"github.com/grafana/grafana/pkg/services/ngalert/models" |
||||
"github.com/grafana/grafana/pkg/services/ngalert/store" |
||||
"github.com/grafana/grafana/pkg/services/secrets" |
||||
"github.com/grafana/grafana/pkg/setting" |
||||
"github.com/grafana/grafana/pkg/util" |
||||
) |
||||
|
||||
// OrgMigration is a helper struct for migrating alerts for a single org. It contains state, services, and caches.
|
||||
type OrgMigration struct { |
||||
cfg *setting.Cfg |
||||
log log.Logger |
||||
|
||||
migrationStore migrationStore.Store |
||||
encryptionService secrets.Service |
||||
|
||||
orgID int64 |
||||
seenUIDs Deduplicator |
||||
silences []*pb.MeshSilence |
||||
alertRuleTitleDedup map[string]Deduplicator // Folder -> Deduplicator (Title).
|
||||
|
||||
// cache for folders created for dashboards that have custom permissions
|
||||
folderCache map[string]*folder.Folder |
||||
generalAlertingFolder *folder.Folder |
||||
|
||||
state *migmodels.OrgMigrationState |
||||
} |
||||
|
||||
// newOrgMigration creates a new OrgMigration for the given orgID.
|
||||
func (ms *MigrationService) newOrgMigration(orgID int64) *OrgMigration { |
||||
return &OrgMigration{ |
||||
cfg: ms.cfg, |
||||
log: ms.log.New("orgID", orgID), |
||||
|
||||
migrationStore: ms.migrationStore, |
||||
encryptionService: ms.encryptionService, |
||||
|
||||
orgID: orgID, |
||||
// We deduplicate for case-insensitive matching in MySQL-compatible backend flavours because they use case-insensitive collation.
|
||||
seenUIDs: Deduplicator{set: make(map[string]struct{}), caseInsensitive: ms.migrationStore.CaseInsensitive()}, |
||||
silences: make([]*pb.MeshSilence, 0), |
||||
alertRuleTitleDedup: make(map[string]Deduplicator), |
||||
|
||||
folderCache: make(map[string]*folder.Folder), |
||||
|
||||
state: &migmodels.OrgMigrationState{ |
||||
OrgID: orgID, |
||||
CreatedFolders: make([]string, 0), |
||||
}, |
||||
} |
||||
} |
||||
|
||||
func (om *OrgMigration) AlertTitleDeduplicator(folderUID string) Deduplicator { |
||||
if _, ok := om.alertRuleTitleDedup[folderUID]; !ok { |
||||
om.alertRuleTitleDedup[folderUID] = Deduplicator{ |
||||
set: make(map[string]struct{}), |
||||
caseInsensitive: om.migrationStore.CaseInsensitive(), |
||||
maxLen: store.AlertDefinitionMaxTitleLength, |
||||
} |
||||
} |
||||
return om.alertRuleTitleDedup[folderUID] |
||||
} |
||||
|
||||
type AlertPair struct { |
||||
AlertRule *models.AlertRule |
||||
DashAlert *migrationStore.DashAlert |
||||
} |
||||
|
||||
// Deduplicator is a wrapper around map[string]struct{} and util.GenerateShortUID() which aims help maintain and generate
|
||||
// unique strings (such as uids or titles). if caseInsensitive is true, all uniqueness is determined in a
|
||||
// case-insensitive manner. if maxLen is greater than 0, all strings will be truncated to maxLen before being checked in
|
||||
// contains and dedup will always return a string of length maxLen or less.
|
||||
type Deduplicator struct { |
||||
set map[string]struct{} |
||||
caseInsensitive bool |
||||
maxLen int |
||||
} |
||||
|
||||
// contains checks whether the given string has already been seen by this Deduplicator.
|
||||
func (s *Deduplicator) contains(u string) bool { |
||||
dedup := u |
||||
if s.caseInsensitive { |
||||
dedup = strings.ToLower(dedup) |
||||
} |
||||
if s.maxLen > 0 && len(dedup) > s.maxLen { |
||||
dedup = dedup[:s.maxLen] |
||||
} |
||||
_, seen := s.set[dedup] |
||||
return seen |
||||
} |
||||
|
||||
// deduplicate returns a unique string based on the given string by appending a uuid to it. Will truncate the given string if
|
||||
// the resulting string would be longer than maxLen.
|
||||
func (s *Deduplicator) deduplicate(dedup string) string { |
||||
uid := util.GenerateShortUID() |
||||
if s.maxLen > 0 && len(dedup)+1+len(uid) > s.maxLen { |
||||
trunc := s.maxLen - 1 - len(uid) |
||||
dedup = dedup[:trunc] |
||||
} |
||||
|
||||
return dedup + "_" + uid |
||||
} |
||||
|
||||
// add adds the given string to the Deduplicator.
|
||||
func (s *Deduplicator) add(uid string) { |
||||
dedup := uid |
||||
if s.caseInsensitive { |
||||
dedup = strings.ToLower(dedup) |
||||
} |
||||
s.set[dedup] = struct{}{} |
||||
} |
@ -0,0 +1,7 @@ |
||||
package models |
||||
|
||||
// OrgMigrationState contains information about the state of an org migration.
|
||||
type OrgMigrationState struct { |
||||
OrgID int64 `json:"orgId"` |
||||
CreatedFolders []string `json:"createdFolders"` |
||||
} |
@ -0,0 +1,153 @@ |
||||
package migration |
||||
|
||||
import ( |
||||
"context" |
||||
"errors" |
||||
"fmt" |
||||
|
||||
"github.com/grafana/grafana/pkg/infra/log" |
||||
"github.com/grafana/grafana/pkg/services/accesscontrol" |
||||
"github.com/grafana/grafana/pkg/services/auth/identity" |
||||
"github.com/grafana/grafana/pkg/services/dashboards" |
||||
"github.com/grafana/grafana/pkg/services/datasources" |
||||
"github.com/grafana/grafana/pkg/services/folder" |
||||
"github.com/grafana/grafana/pkg/services/org" |
||||
"github.com/grafana/grafana/pkg/services/user" |
||||
) |
||||
|
||||
const DASHBOARD_FOLDER = "%s Alerts - %s" |
||||
|
||||
// MaxFolderName is the maximum length of the folder name generated using DASHBOARD_FOLDER format
|
||||
const MaxFolderName = 255 |
||||
|
||||
var ( |
||||
migratorPermissions = []accesscontrol.Permission{ |
||||
{Action: dashboards.ActionFoldersRead, Scope: dashboards.ScopeFoldersAll}, |
||||
{Action: dashboards.ActionDashboardsRead, Scope: dashboards.ScopeDashboardsAll}, |
||||
{Action: dashboards.ActionFoldersPermissionsRead, Scope: dashboards.ScopeFoldersAll}, |
||||
{Action: dashboards.ActionDashboardsPermissionsRead, Scope: dashboards.ScopeDashboardsAll}, |
||||
{Action: dashboards.ActionFoldersCreate}, |
||||
{Action: dashboards.ActionDashboardsCreate, Scope: dashboards.ScopeFoldersAll}, |
||||
{Action: datasources.ActionRead, Scope: datasources.ScopeAll}, |
||||
} |
||||
generalAlertingFolderTitle = "General Alerting" |
||||
) |
||||
|
||||
// getMigrationUser returns a background user for the given orgID with permissions to execute migration-related tasks.
|
||||
func getMigrationUser(orgID int64) identity.Requester { |
||||
return accesscontrol.BackgroundUser("ngalert_migration", orgID, org.RoleAdmin, migratorPermissions) |
||||
} |
||||
|
||||
// getAlertFolderNameFromDashboard generates a folder name for alerts that belong to a dashboard. Formats the string according to DASHBOARD_FOLDER format.
|
||||
// If the resulting string exceeds the migrations.MaxTitleLength, the dashboard title is stripped to be at the maximum length
|
||||
func getAlertFolderNameFromDashboard(dash *dashboards.Dashboard) string { |
||||
maxLen := MaxFolderName - len(fmt.Sprintf(DASHBOARD_FOLDER, "", dash.UID)) |
||||
title := dash.Title |
||||
if len(title) > maxLen { |
||||
title = title[:maxLen] |
||||
} |
||||
return fmt.Sprintf(DASHBOARD_FOLDER, title, dash.UID) // include UID to the name to avoid collision
|
||||
} |
||||
|
||||
func (om *OrgMigration) getOrCreateMigratedFolder(ctx context.Context, log log.Logger, dashID int64) (*dashboards.Dashboard, *folder.Folder, error) { |
||||
dash, err := om.migrationStore.GetDashboard(ctx, om.orgID, dashID) |
||||
if err != nil { |
||||
if errors.Is(err, dashboards.ErrFolderNotFound) { |
||||
return nil, nil, fmt.Errorf("dashboard with ID %v under organisation %d not found: %w", dashID, om.orgID, err) |
||||
} |
||||
return nil, nil, fmt.Errorf("failed to get dashboard with ID %v under organisation %d: %w", dashID, om.orgID, err) |
||||
} |
||||
l := log.New( |
||||
"dashboardTitle", dash.Title, |
||||
"dashboardUID", dash.UID, |
||||
) |
||||
|
||||
var migratedFolder *folder.Folder |
||||
switch { |
||||
case dash.HasACL: |
||||
folderName := getAlertFolderNameFromDashboard(dash) |
||||
f, ok := om.folderCache[folderName] |
||||
if !ok { |
||||
l.Info("create a new folder for alerts that belongs to dashboard because it has custom permissions", "folder", folderName) |
||||
// create folder and assign the permissions of the dashboard (included default and inherited)
|
||||
f, err = om.createFolder(ctx, om.orgID, folderName) |
||||
if err != nil { |
||||
return nil, nil, fmt.Errorf("create new folder: %w", err) |
||||
} |
||||
permissions, err := om.migrationStore.GetACL(ctx, dash.OrgID, dash.ID) |
||||
if err != nil { |
||||
return nil, nil, fmt.Errorf("failed to get dashboard %d under organisation %d permissions: %w", dash.ID, dash.OrgID, err) |
||||
} |
||||
err = om.migrationStore.SetACL(ctx, f.OrgID, f.ID, permissions) |
||||
if err != nil { |
||||
return nil, nil, fmt.Errorf("failed to set folder %d under organisation %d permissions: %w", f.ID, f.OrgID, err) |
||||
} |
||||
om.folderCache[folderName] = f |
||||
} |
||||
migratedFolder = f |
||||
case dash.FolderID > 0: |
||||
// get folder if exists
|
||||
f, err := om.migrationStore.GetFolder(ctx, &folder.GetFolderQuery{ID: &dash.FolderID, OrgID: dash.OrgID, SignedInUser: getMigrationUser(dash.OrgID)}) |
||||
if err != nil { |
||||
// If folder does not exist then the dashboard is an orphan and we migrate the alert to the general folder.
|
||||
l.Warn("Failed to find folder for dashboard. Migrate rule to the default folder", "missing_folder_id", dash.FolderID, "error", err) |
||||
migratedFolder, err = om.getOrCreateGeneralFolder(ctx, dash.OrgID) |
||||
if err != nil { |
||||
return nil, nil, err |
||||
} |
||||
} else { |
||||
migratedFolder = f |
||||
} |
||||
default: |
||||
migratedFolder, err = om.getOrCreateGeneralFolder(ctx, dash.OrgID) |
||||
if err != nil { |
||||
return nil, nil, err |
||||
} |
||||
} |
||||
|
||||
if migratedFolder.UID == "" { |
||||
return nil, nil, fmt.Errorf("empty folder identifier") |
||||
} |
||||
|
||||
return dash, migratedFolder, nil |
||||
} |
||||
|
||||
// getOrCreateGeneralFolder returns the general folder under the specific organisation
|
||||
// If the general folder does not exist it creates it.
|
||||
func (om *OrgMigration) getOrCreateGeneralFolder(ctx context.Context, orgID int64) (*folder.Folder, error) { |
||||
if om.generalAlertingFolder != nil { |
||||
return om.generalAlertingFolder, nil |
||||
} |
||||
f, err := om.migrationStore.GetFolder(ctx, &folder.GetFolderQuery{OrgID: orgID, Title: &generalAlertingFolderTitle, SignedInUser: getMigrationUser(orgID)}) |
||||
if err != nil { |
||||
if errors.Is(err, dashboards.ErrFolderNotFound) { |
||||
// create folder
|
||||
generalAlertingFolder, err := om.createFolder(ctx, orgID, generalAlertingFolderTitle) |
||||
if err != nil { |
||||
return nil, fmt.Errorf("create general alerting folder '%s': %w", generalAlertingFolderTitle, err) |
||||
} |
||||
om.generalAlertingFolder = generalAlertingFolder |
||||
return om.generalAlertingFolder, nil |
||||
} |
||||
return nil, fmt.Errorf("get general alerting folder '%s': %w", generalAlertingFolderTitle, err) |
||||
} |
||||
om.generalAlertingFolder = f |
||||
|
||||
return om.generalAlertingFolder, nil |
||||
} |
||||
|
||||
// createFolder creates a new folder with given permissions.
|
||||
func (om *OrgMigration) createFolder(ctx context.Context, orgID int64, title string) (*folder.Folder, error) { |
||||
f, err := om.migrationStore.CreateFolder(ctx, &folder.CreateFolderCommand{ |
||||
OrgID: orgID, |
||||
Title: title, |
||||
SignedInUser: getMigrationUser(orgID).(*user.SignedInUser), |
||||
}) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
|
||||
om.state.CreatedFolders = append(om.state.CreatedFolders, f.UID) |
||||
|
||||
return f, nil |
||||
} |
@ -0,0 +1,31 @@ |
||||
package migration |
||||
|
||||
import ( |
||||
"os" |
||||
|
||||
"github.com/grafana/grafana/pkg/infra/log" |
||||
"github.com/grafana/grafana/pkg/setting" |
||||
"github.com/grafana/grafana/pkg/util" |
||||
) |
||||
|
||||
// SecureJsonData is used to store encrypted data (for example in data_source table). Only values are separately
|
||||
// encrypted.
|
||||
type SecureJsonData map[string][]byte |
||||
|
||||
var seclogger = log.New("securejsondata") |
||||
|
||||
// Decrypt returns map of the same type but where the all the values are decrypted. Opposite of what
|
||||
// GetEncryptedJsonData is doing.
|
||||
func (s SecureJsonData) Decrypt() map[string]string { |
||||
decrypted := make(map[string]string) |
||||
for key, data := range s { |
||||
decryptedData, err := util.Decrypt(data, setting.SecretKey) |
||||
if err != nil { |
||||
seclogger.Error(err.Error()) |
||||
os.Exit(1) |
||||
} |
||||
|
||||
decrypted[key] = string(decryptedData) |
||||
} |
||||
return decrypted |
||||
} |
@ -0,0 +1,137 @@ |
||||
package migration |
||||
|
||||
import ( |
||||
"context" |
||||
"fmt" |
||||
"time" |
||||
|
||||
"github.com/grafana/grafana/pkg/infra/db" |
||||
"github.com/grafana/grafana/pkg/infra/log" |
||||
"github.com/grafana/grafana/pkg/infra/serverlock" |
||||
migrationStore "github.com/grafana/grafana/pkg/services/ngalert/migration/store" |
||||
"github.com/grafana/grafana/pkg/services/secrets" |
||||
"github.com/grafana/grafana/pkg/setting" |
||||
) |
||||
|
||||
// actionName is the unique row-level lock name for serverlock.ServerLockService.
|
||||
const actionName = "alerting migration" |
||||
|
||||
//nolint:stylecheck
|
||||
var ForceMigrationError = fmt.Errorf("Grafana has already been migrated to Unified Alerting. Any alert rules created while using Unified Alerting will be deleted by rolling back. Set force_migration=true in your grafana.ini and restart Grafana to roll back and delete Unified Alerting configuration data.") |
||||
|
||||
type MigrationService struct { |
||||
lock *serverlock.ServerLockService |
||||
cfg *setting.Cfg |
||||
log log.Logger |
||||
store db.DB |
||||
migrationStore migrationStore.Store |
||||
|
||||
encryptionService secrets.Service |
||||
} |
||||
|
||||
func ProvideService( |
||||
lock *serverlock.ServerLockService, |
||||
cfg *setting.Cfg, |
||||
store db.DB, |
||||
migrationStore migrationStore.Store, |
||||
encryptionService secrets.Service, |
||||
) (*MigrationService, error) { |
||||
return &MigrationService{ |
||||
lock: lock, |
||||
log: log.New("ngalert.migration"), |
||||
cfg: cfg, |
||||
store: store, |
||||
migrationStore: migrationStore, |
||||
encryptionService: encryptionService, |
||||
}, nil |
||||
} |
||||
|
||||
// Run starts the migration. This will either migrate from legacy alerting to unified alerting or revert the migration.
|
||||
// If the migration status in the kvstore is not set and unified alerting is enabled, the migration will be executed.
|
||||
// If the migration status in the kvstore is set and both unified alerting is disabled and ForceMigration is set to true, the migration will be reverted.
|
||||
func (ms *MigrationService) Run(ctx context.Context) error { |
||||
var errMigration error |
||||
errLock := ms.lock.LockExecuteAndRelease(ctx, actionName, time.Minute*10, func(context.Context) { |
||||
ms.log.Info("Starting") |
||||
errMigration = ms.store.InTransaction(ctx, func(ctx context.Context) error { |
||||
migrated, err := ms.migrationStore.IsMigrated(ctx) |
||||
if err != nil { |
||||
return fmt.Errorf("getting migration status: %w", err) |
||||
} |
||||
if migrated == ms.cfg.UnifiedAlerting.IsEnabled() { |
||||
// Nothing to do.
|
||||
ms.log.Info("No migrations to run") |
||||
return nil |
||||
} |
||||
|
||||
if migrated { |
||||
// If legacy alerting is also disabled, there is nothing to do
|
||||
if setting.AlertingEnabled != nil && !*setting.AlertingEnabled { |
||||
return nil |
||||
} |
||||
|
||||
// Safeguard to prevent data loss when reverting from UA to LA.
|
||||
if !ms.cfg.ForceMigration { |
||||
return ForceMigrationError |
||||
} |
||||
|
||||
// Revert migration
|
||||
ms.log.Info("Reverting legacy migration") |
||||
err := ms.migrationStore.RevertAllOrgs(ctx) |
||||
if err != nil { |
||||
return fmt.Errorf("reverting migration: %w", err) |
||||
} |
||||
ms.log.Info("Legacy migration reverted") |
||||
return nil |
||||
} |
||||
|
||||
ms.log.Info("Starting legacy migration") |
||||
err = ms.migrateAllOrgs(ctx) |
||||
if err != nil { |
||||
return fmt.Errorf("executing migration: %w", err) |
||||
} |
||||
|
||||
err = ms.migrationStore.SetMigrated(ctx, true) |
||||
if err != nil { |
||||
return fmt.Errorf("setting migration status: %w", err) |
||||
} |
||||
|
||||
ms.log.Info("Completed legacy migration") |
||||
return nil |
||||
}) |
||||
}) |
||||
if errLock != nil { |
||||
ms.log.Warn("Server lock for alerting migration already exists") |
||||
return nil |
||||
} |
||||
if errMigration != nil { |
||||
return fmt.Errorf("migration failed: %w", errMigration) |
||||
} |
||||
return nil |
||||
} |
||||
|
||||
// IsDisabled returns true if the cfg is nil.
|
||||
func (ms *MigrationService) IsDisabled() bool { |
||||
return ms.cfg == nil |
||||
} |
||||
|
||||
// migrateAllOrgs executes the migration for all orgs.
|
||||
func (ms *MigrationService) migrateAllOrgs(ctx context.Context) error { |
||||
orgs, err := ms.migrationStore.GetAllOrgs(ctx) |
||||
if err != nil { |
||||
return fmt.Errorf("get orgs: %w", err) |
||||
} |
||||
|
||||
for _, o := range orgs { |
||||
om := ms.newOrgMigration(o.ID) |
||||
if err := om.migrateOrg(ctx); err != nil { |
||||
return fmt.Errorf("migrate org %d: %w", o.ID, err) |
||||
} |
||||
|
||||
err = om.migrationStore.SetOrgMigrationState(ctx, o.ID, om.state) |
||||
if err != nil { |
||||
return err |
||||
} |
||||
} |
||||
return nil |
||||
} |
@ -0,0 +1,190 @@ |
||||
package migration |
||||
|
||||
import ( |
||||
"context" |
||||
"testing" |
||||
"time" |
||||
|
||||
"github.com/stretchr/testify/require" |
||||
|
||||
"github.com/grafana/grafana/pkg/infra/db" |
||||
legacymodels "github.com/grafana/grafana/pkg/services/alerting/models" |
||||
"github.com/grafana/grafana/pkg/services/dashboards" |
||||
"github.com/grafana/grafana/pkg/services/ngalert/models" |
||||
"github.com/grafana/grafana/pkg/setting" |
||||
) |
||||
|
||||
// TestServiceRevert tests migration revert.
|
||||
func TestServiceRevert(t *testing.T) { |
||||
alerts := []*legacymodels.Alert{ |
||||
createAlert(t, 1, 1, 1, "alert1", []string{"notifier1"}), |
||||
} |
||||
channels := []*legacymodels.AlertNotification{ |
||||
createAlertNotification(t, int64(1), "notifier1", "email", emailSettings, false), |
||||
} |
||||
dashes := []*dashboards.Dashboard{ |
||||
createDashboard(t, 1, 1, "dash1-1", 5, nil), |
||||
createDashboard(t, 2, 1, "dash2-1", 5, nil), |
||||
createDashboard(t, 8, 1, "dash-in-general-1", 0, nil), |
||||
} |
||||
folders := []*dashboards.Dashboard{ |
||||
createFolder(t, 5, 1, "folder5-1"), |
||||
} |
||||
|
||||
t.Run("revert deletes UA resources", func(t *testing.T) { |
||||
sqlStore := db.InitTestDB(t) |
||||
x := sqlStore.GetEngine() |
||||
|
||||
setupLegacyAlertsTables(t, x, channels, alerts, folders, dashes) |
||||
|
||||
dashCount, err := x.Table("dashboard").Count(&dashboards.Dashboard{}) |
||||
require.NoError(t, err) |
||||
require.Equal(t, int64(4), dashCount) |
||||
|
||||
// Run migration.
|
||||
ctx := context.Background() |
||||
cfg := &setting.Cfg{ |
||||
ForceMigration: true, |
||||
UnifiedAlerting: setting.UnifiedAlertingSettings{ |
||||
Enabled: pointer(true), |
||||
}, |
||||
} |
||||
service := NewTestMigrationService(t, sqlStore, cfg) |
||||
|
||||
err = service.migrationStore.SetMigrated(ctx, false) |
||||
require.NoError(t, err) |
||||
|
||||
err = service.Run(ctx) |
||||
require.NoError(t, err) |
||||
|
||||
// Verify migration was run.
|
||||
migrated, err := service.migrationStore.IsMigrated(ctx) |
||||
require.NoError(t, err) |
||||
require.Equal(t, true, migrated) |
||||
|
||||
// Currently, we fill in some random data for tables that aren't populated during migration.
|
||||
_, err = x.Table("ngalert_configuration").Insert(models.AdminConfiguration{}) |
||||
require.NoError(t, err) |
||||
_, err = x.Table("alert_instance").Insert(models.AlertInstance{ |
||||
AlertInstanceKey: models.AlertInstanceKey{ |
||||
RuleOrgID: 1, |
||||
RuleUID: "alert1", |
||||
LabelsHash: "", |
||||
}, |
||||
CurrentState: models.InstanceStateNormal, |
||||
CurrentStateSince: time.Now(), |
||||
CurrentStateEnd: time.Now(), |
||||
LastEvalTime: time.Now(), |
||||
}) |
||||
require.NoError(t, err) |
||||
|
||||
// Verify various UA resources exist
|
||||
tables := []string{ |
||||
"alert_rule", |
||||
"alert_rule_version", |
||||
"alert_configuration", |
||||
"ngalert_configuration", |
||||
"alert_instance", |
||||
} |
||||
for _, table := range tables { |
||||
count, err := x.Table(table).Count() |
||||
require.NoError(t, err) |
||||
require.True(t, count > 0, "table %s should have at least one row", table) |
||||
} |
||||
|
||||
// Revert migration.
|
||||
service.cfg.UnifiedAlerting.Enabled = pointer(false) |
||||
err = service.Run(context.Background()) |
||||
require.NoError(t, err) |
||||
|
||||
// Verify revert was run.
|
||||
migrated, err = service.migrationStore.IsMigrated(ctx) |
||||
require.NoError(t, err) |
||||
require.Equal(t, false, migrated) |
||||
|
||||
// Verify various UA resources are gone
|
||||
for _, table := range tables { |
||||
count, err := x.Table(table).Count() |
||||
require.NoError(t, err) |
||||
require.Equal(t, int64(0), count, "table %s should have no rows", table) |
||||
} |
||||
}) |
||||
|
||||
t.Run("revert deletes folders created during migration", func(t *testing.T) { |
||||
sqlStore := db.InitTestDB(t) |
||||
x := sqlStore.GetEngine() |
||||
alerts = []*legacymodels.Alert{ |
||||
createAlert(t, 1, 8, 1, "alert1", []string{"notifier1"}), |
||||
} |
||||
setupLegacyAlertsTables(t, x, channels, alerts, folders, dashes) |
||||
|
||||
dashCount, err := x.Table("dashboard").Count(&dashboards.Dashboard{}) |
||||
require.NoError(t, err) |
||||
require.Equal(t, int64(4), dashCount) |
||||
|
||||
// Run migration.
|
||||
ctx := context.Background() |
||||
cfg := &setting.Cfg{ |
||||
ForceMigration: true, |
||||
UnifiedAlerting: setting.UnifiedAlertingSettings{ |
||||
Enabled: pointer(true), |
||||
}, |
||||
} |
||||
service := NewTestMigrationService(t, sqlStore, cfg) |
||||
|
||||
err = service.migrationStore.SetMigrated(ctx, false) |
||||
require.NoError(t, err) |
||||
|
||||
err = service.Run(ctx) |
||||
require.NoError(t, err) |
||||
|
||||
// Verify migration was run.
|
||||
migrated, err := service.migrationStore.IsMigrated(ctx) |
||||
require.NoError(t, err) |
||||
require.Equal(t, true, migrated) |
||||
|
||||
// Verify we created some folders.
|
||||
newDashCount, err := x.Table("dashboard").Count(&dashboards.Dashboard{}) |
||||
require.NoError(t, err) |
||||
require.Truef(t, newDashCount > dashCount, "newDashCount: %d should be greater than dashCount: %d", newDashCount, dashCount) |
||||
|
||||
// Check that dashboards and folders from before migration still exist.
|
||||
require.NotNil(t, getDashboard(t, x, 1, "dash1-1")) |
||||
require.NotNil(t, getDashboard(t, x, 1, "dash2-1")) |
||||
require.NotNil(t, getDashboard(t, x, 1, "dash-in-general-1")) |
||||
|
||||
state, err := service.migrationStore.GetOrgMigrationState(ctx, 1) |
||||
require.NoError(t, err) |
||||
|
||||
// Verify list of created folders.
|
||||
require.NotEmpty(t, state.CreatedFolders) |
||||
for _, uid := range state.CreatedFolders { |
||||
require.NotNil(t, getDashboard(t, x, 1, uid)) |
||||
} |
||||
|
||||
// Revert migration.
|
||||
service.cfg.UnifiedAlerting.Enabled = pointer(false) |
||||
err = service.Run(context.Background()) |
||||
require.NoError(t, err) |
||||
|
||||
// Verify revert was run.
|
||||
migrated, err = service.migrationStore.IsMigrated(ctx) |
||||
require.NoError(t, err) |
||||
require.Equal(t, false, migrated) |
||||
|
||||
// Verify we are back to the original count.
|
||||
newDashCount, err = x.Table("dashboard").Count(&dashboards.Dashboard{}) |
||||
require.NoError(t, err) |
||||
require.Equalf(t, dashCount, newDashCount, "newDashCount: %d should be equal to dashCount: %d after revert", newDashCount, dashCount) |
||||
|
||||
// Check that dashboards and folders from before migration still exist.
|
||||
require.NotNil(t, getDashboard(t, x, 1, "dash1-1")) |
||||
require.NotNil(t, getDashboard(t, x, 1, "dash2-1")) |
||||
require.NotNil(t, getDashboard(t, x, 1, "dash-in-general-1")) |
||||
|
||||
// Check that folders created during migration are gone.
|
||||
for _, uid := range state.CreatedFolders { |
||||
require.Nil(t, getDashboard(t, x, 1, uid)) |
||||
} |
||||
}) |
||||
} |
@ -0,0 +1,511 @@ |
||||
package store |
||||
|
||||
import ( |
||||
"context" |
||||
"encoding/json" |
||||
"fmt" |
||||
"os" |
||||
"path/filepath" |
||||
"strconv" |
||||
"time" |
||||
|
||||
"github.com/grafana/grafana/pkg/infra/db" |
||||
"github.com/grafana/grafana/pkg/infra/kvstore" |
||||
"github.com/grafana/grafana/pkg/infra/log" |
||||
"github.com/grafana/grafana/pkg/services/accesscontrol" |
||||
legacyalerting "github.com/grafana/grafana/pkg/services/alerting" |
||||
legacymodels "github.com/grafana/grafana/pkg/services/alerting/models" |
||||
"github.com/grafana/grafana/pkg/services/auth/identity" |
||||
"github.com/grafana/grafana/pkg/services/dashboards" |
||||
"github.com/grafana/grafana/pkg/services/datasources" |
||||
"github.com/grafana/grafana/pkg/services/folder" |
||||
apimodels "github.com/grafana/grafana/pkg/services/ngalert/api/tooling/definitions" |
||||
migmodels "github.com/grafana/grafana/pkg/services/ngalert/migration/models" |
||||
"github.com/grafana/grafana/pkg/services/ngalert/models" |
||||
"github.com/grafana/grafana/pkg/services/ngalert/notifier" |
||||
"github.com/grafana/grafana/pkg/services/ngalert/store" |
||||
"github.com/grafana/grafana/pkg/services/org" |
||||
"github.com/grafana/grafana/pkg/services/sqlstore/migrator" |
||||
"github.com/grafana/grafana/pkg/services/user" |
||||
"github.com/grafana/grafana/pkg/setting" |
||||
) |
||||
|
||||
// Store is the database abstraction for migration persistence.
|
||||
type Store interface { |
||||
InsertAlertRules(ctx context.Context, rules ...models.AlertRule) error |
||||
|
||||
SaveAlertmanagerConfiguration(ctx context.Context, orgID int64, amConfig *apimodels.PostableUserConfig) error |
||||
|
||||
GetAllOrgs(ctx context.Context) ([]*org.OrgDTO, error) |
||||
|
||||
GetDatasource(ctx context.Context, datasourceID int64, user identity.Requester) (*datasources.DataSource, error) |
||||
|
||||
GetNotificationChannels(ctx context.Context, orgID int64) ([]*legacymodels.AlertNotification, error) |
||||
|
||||
GetOrgDashboardAlerts(ctx context.Context, orgID int64) (map[int64][]*DashAlert, int, error) |
||||
|
||||
GetACL(ctx context.Context, orgID int64, dashID int64) ([]*DashboardACL, error) |
||||
SetACL(ctx context.Context, orgID int64, dashboardID int64, items []*DashboardACL) error |
||||
|
||||
GetDashboard(ctx context.Context, orgID int64, id int64) (*dashboards.Dashboard, error) |
||||
GetFolder(ctx context.Context, cmd *folder.GetFolderQuery) (*folder.Folder, error) |
||||
CreateFolder(ctx context.Context, cmd *folder.CreateFolderCommand) (*folder.Folder, error) |
||||
|
||||
IsMigrated(ctx context.Context) (bool, error) |
||||
SetMigrated(ctx context.Context, migrated bool) error |
||||
GetOrgMigrationState(ctx context.Context, orgID int64) (*migmodels.OrgMigrationState, error) |
||||
SetOrgMigrationState(ctx context.Context, orgID int64, summary *migmodels.OrgMigrationState) error |
||||
|
||||
RevertAllOrgs(ctx context.Context) error |
||||
|
||||
CaseInsensitive() bool |
||||
} |
||||
|
||||
type migrationStore struct { |
||||
store db.DB |
||||
cfg *setting.Cfg |
||||
log log.Logger |
||||
kv kvstore.KVStore |
||||
alertingStore *store.DBstore |
||||
dashboardService dashboards.DashboardService |
||||
folderService folder.Service |
||||
dataSourceCache datasources.CacheService |
||||
orgService org.Service |
||||
legacyAlertNotificationService *legacyalerting.AlertNotificationService |
||||
} |
||||
|
||||
// MigrationStore implements the Store interface.
|
||||
var _ Store = (*migrationStore)(nil) |
||||
|
||||
func ProvideMigrationStore( |
||||
cfg *setting.Cfg, |
||||
sqlStore db.DB, |
||||
kv kvstore.KVStore, |
||||
alertingStore *store.DBstore, |
||||
dashboardService dashboards.DashboardService, |
||||
folderService folder.Service, |
||||
dataSourceCache datasources.CacheService, |
||||
orgService org.Service, |
||||
legacyAlertNotificationService *legacyalerting.AlertNotificationService, |
||||
) (Store, error) { |
||||
return &migrationStore{ |
||||
log: log.New("ngalert.migration-store"), |
||||
cfg: cfg, |
||||
store: sqlStore, |
||||
kv: kv, |
||||
alertingStore: alertingStore, |
||||
dashboardService: dashboardService, |
||||
folderService: folderService, |
||||
dataSourceCache: dataSourceCache, |
||||
orgService: orgService, |
||||
legacyAlertNotificationService: legacyAlertNotificationService, |
||||
}, nil |
||||
} |
||||
|
||||
// KVNamespace is the kvstore namespace used for the migration status.
|
||||
const KVNamespace = "ngalert.migration" |
||||
|
||||
// migratedKey is the kvstore key used for the migration status.
|
||||
const migratedKey = "migrated" |
||||
|
||||
// stateKey is the kvstore key used for the OrgMigrationState.
|
||||
const stateKey = "stateKey" |
||||
|
||||
const anyOrg = 0 |
||||
|
||||
// IsMigrated returns the migration status from the kvstore.
|
||||
func (ms *migrationStore) IsMigrated(ctx context.Context) (bool, error) { |
||||
kv := kvstore.WithNamespace(ms.kv, anyOrg, KVNamespace) |
||||
content, exists, err := kv.Get(ctx, migratedKey) |
||||
if err != nil { |
||||
return false, err |
||||
} |
||||
|
||||
if !exists { |
||||
return false, nil |
||||
} |
||||
|
||||
return strconv.ParseBool(content) |
||||
} |
||||
|
||||
// SetMigrated sets the migration status in the kvstore.
|
||||
func (ms *migrationStore) SetMigrated(ctx context.Context, migrated bool) error { |
||||
kv := kvstore.WithNamespace(ms.kv, anyOrg, KVNamespace) |
||||
return kv.Set(ctx, migratedKey, strconv.FormatBool(migrated)) |
||||
} |
||||
|
||||
// GetOrgMigrationState returns a summary of a previous migration.
|
||||
func (ms *migrationStore) GetOrgMigrationState(ctx context.Context, orgID int64) (*migmodels.OrgMigrationState, error) { |
||||
kv := kvstore.WithNamespace(ms.kv, orgID, KVNamespace) |
||||
content, exists, err := kv.Get(ctx, stateKey) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
|
||||
if !exists { |
||||
return &migmodels.OrgMigrationState{OrgID: orgID}, nil |
||||
} |
||||
|
||||
var summary migmodels.OrgMigrationState |
||||
err = json.Unmarshal([]byte(content), &summary) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
|
||||
return &summary, nil |
||||
} |
||||
|
||||
// SetOrgMigrationState sets the summary of a previous migration.
|
||||
func (ms *migrationStore) SetOrgMigrationState(ctx context.Context, orgID int64, summary *migmodels.OrgMigrationState) error { |
||||
kv := kvstore.WithNamespace(ms.kv, orgID, KVNamespace) |
||||
raw, err := json.Marshal(summary) |
||||
if err != nil { |
||||
return err |
||||
} |
||||
|
||||
return kv.Set(ctx, stateKey, string(raw)) |
||||
} |
||||
|
||||
func (ms *migrationStore) InsertAlertRules(ctx context.Context, rules ...models.AlertRule) error { |
||||
if ms.store.GetDialect().DriverName() == migrator.Postgres { |
||||
// Postgresql which will automatically rollback the whole transaction on constraint violation.
|
||||
// So, for postgresql, insertions will execute in a subtransaction.
|
||||
err := ms.store.InTransaction(ctx, func(subCtx context.Context) error { |
||||
_, err := ms.alertingStore.InsertAlertRules(subCtx, rules) |
||||
if err != nil { |
||||
return err |
||||
} |
||||
return nil |
||||
}) |
||||
if err != nil { |
||||
return err |
||||
} |
||||
} else { |
||||
_, err := ms.alertingStore.InsertAlertRules(ctx, rules) |
||||
if err != nil { |
||||
return err |
||||
} |
||||
} |
||||
|
||||
return nil |
||||
} |
||||
|
||||
func (ms *migrationStore) SaveAlertmanagerConfiguration(ctx context.Context, orgID int64, amConfig *apimodels.PostableUserConfig) error { |
||||
rawAmConfig, err := json.Marshal(amConfig) |
||||
if err != nil { |
||||
return err |
||||
} |
||||
|
||||
cmd := models.SaveAlertmanagerConfigurationCmd{ |
||||
AlertmanagerConfiguration: string(rawAmConfig), |
||||
ConfigurationVersion: fmt.Sprintf("v%d", models.AlertConfigurationVersion), |
||||
Default: false, |
||||
OrgID: orgID, |
||||
LastApplied: 0, |
||||
} |
||||
return ms.alertingStore.SaveAlertmanagerConfiguration(ctx, &cmd) |
||||
} |
||||
|
||||
// revertPermissions are the permissions required for the background user to revert the migration.
|
||||
var revertPermissions = []accesscontrol.Permission{ |
||||
{Action: dashboards.ActionFoldersDelete, Scope: dashboards.ScopeFoldersAll}, |
||||
{Action: dashboards.ActionFoldersRead, Scope: dashboards.ScopeFoldersAll}, |
||||
} |
||||
|
||||
// RevertAllOrgs reverts the migration, deleting all unified alerting resources such as alert rules, alertmanager configurations, and silence files.
|
||||
// In addition, it will delete all folders and permissions originally created by this migration, these are stored in the kvstore.
|
||||
func (ms *migrationStore) RevertAllOrgs(ctx context.Context) error { |
||||
return ms.store.WithTransactionalDbSession(ctx, func(sess *db.Session) error { |
||||
if _, err := sess.Exec("DELETE FROM alert_rule"); err != nil { |
||||
return err |
||||
} |
||||
|
||||
if _, err := sess.Exec("DELETE FROM alert_rule_version"); err != nil { |
||||
return err |
||||
} |
||||
|
||||
orgs, err := ms.GetAllOrgs(ctx) |
||||
if err != nil { |
||||
return fmt.Errorf("get orgs: %w", err) |
||||
} |
||||
for _, o := range orgs { |
||||
if err := ms.DeleteMigratedFolders(ctx, o.ID); err != nil { |
||||
return err |
||||
} |
||||
} |
||||
|
||||
if _, err := sess.Exec("DELETE FROM alert_configuration"); err != nil { |
||||
return err |
||||
} |
||||
|
||||
if _, err := sess.Exec("DELETE FROM ngalert_configuration"); err != nil { |
||||
return err |
||||
} |
||||
|
||||
if _, err := sess.Exec("DELETE FROM alert_instance"); err != nil { |
||||
return err |
||||
} |
||||
|
||||
if _, err := sess.Exec("DELETE FROM kv_store WHERE namespace = ?", notifier.KVNamespace); err != nil { |
||||
return err |
||||
} |
||||
|
||||
if _, err := sess.Exec("DELETE FROM kv_store WHERE namespace = ?", KVNamespace); err != nil { |
||||
return err |
||||
} |
||||
|
||||
files, err := filepath.Glob(filepath.Join(ms.cfg.DataPath, "alerting", "*", "silences")) |
||||
if err != nil { |
||||
return err |
||||
} |
||||
for _, f := range files { |
||||
if err := os.Remove(f); err != nil { |
||||
ms.log.Error("Failed to remove silence file", "file", f, "err", err) |
||||
} |
||||
} |
||||
|
||||
err = ms.SetMigrated(ctx, false) |
||||
if err != nil { |
||||
return fmt.Errorf("setting migration status: %w", err) |
||||
} |
||||
|
||||
return nil |
||||
}) |
||||
} |
||||
|
||||
// DeleteMigratedFolders deletes all folders created by the previous migration run for the given org. This includes all folder permissions.
|
||||
// If the folder is not empty of all descendants the operation will fail and return an error.
|
||||
func (ms *migrationStore) DeleteMigratedFolders(ctx context.Context, orgID int64) error { |
||||
summary, err := ms.GetOrgMigrationState(ctx, orgID) |
||||
if err != nil { |
||||
return err |
||||
} |
||||
return ms.DeleteFolders(ctx, orgID, summary.CreatedFolders...) |
||||
} |
||||
|
||||
// DeleteFolders deletes the folders from the given orgs with the given UIDs. This includes all folder permissions.
|
||||
// If the folder is not empty of all descendants the operation will fail and return an error.
|
||||
func (ms *migrationStore) DeleteFolders(ctx context.Context, orgID int64, uids ...string) error { |
||||
if len(uids) == 0 { |
||||
return nil |
||||
} |
||||
|
||||
usr := accesscontrol.BackgroundUser("ngalert_migration_revert", orgID, org.RoleAdmin, revertPermissions) |
||||
for _, folderUID := range uids { |
||||
cmd := folder.DeleteFolderCommand{ |
||||
UID: folderUID, |
||||
OrgID: orgID, |
||||
SignedInUser: usr.(*user.SignedInUser), |
||||
} |
||||
err := ms.folderService.Delete(ctx, &cmd) // Also handles permissions and other related entities.
|
||||
if err != nil { |
||||
return err |
||||
} |
||||
} |
||||
return nil |
||||
} |
||||
|
||||
func (ms *migrationStore) GetDashboard(ctx context.Context, orgID int64, id int64) (*dashboards.Dashboard, error) { |
||||
return ms.dashboardService.GetDashboard(ctx, &dashboards.GetDashboardQuery{ID: id, OrgID: orgID}) |
||||
} |
||||
|
||||
func (ms *migrationStore) GetAllOrgs(ctx context.Context) ([]*org.OrgDTO, error) { |
||||
orgQuery := &org.SearchOrgsQuery{} |
||||
return ms.orgService.Search(ctx, orgQuery) |
||||
} |
||||
|
||||
func (ms *migrationStore) GetDatasource(ctx context.Context, datasourceID int64, user identity.Requester) (*datasources.DataSource, error) { |
||||
return ms.dataSourceCache.GetDatasource(ctx, datasourceID, user, false) |
||||
} |
||||
|
||||
// GetNotificationChannels returns all channels for this org.
|
||||
func (ms *migrationStore) GetNotificationChannels(ctx context.Context, orgID int64) ([]*legacymodels.AlertNotification, error) { |
||||
return ms.legacyAlertNotificationService.GetAllAlertNotifications(ctx, &legacymodels.GetAllAlertNotificationsQuery{ |
||||
OrgID: orgID, |
||||
}) |
||||
} |
||||
|
||||
func (ms *migrationStore) GetFolder(ctx context.Context, cmd *folder.GetFolderQuery) (*folder.Folder, error) { |
||||
return ms.folderService.Get(ctx, cmd) |
||||
} |
||||
|
||||
func (ms *migrationStore) CreateFolder(ctx context.Context, cmd *folder.CreateFolderCommand) (*folder.Folder, error) { |
||||
return ms.folderService.Create(ctx, cmd) |
||||
} |
||||
|
||||
// based on SQLStore.GetDashboardACLInfoList()
|
||||
func (ms *migrationStore) GetACL(ctx context.Context, orgID, dashboardID int64) ([]*DashboardACL, error) { |
||||
var err error |
||||
|
||||
falseStr := ms.store.GetDialect().BooleanStr(false) |
||||
|
||||
result := make([]*DashboardACL, 0) |
||||
rawSQL := ` |
||||
-- get distinct permissions for the dashboard and its parent folder |
||||
SELECT DISTINCT |
||||
da.id, |
||||
da.user_id, |
||||
da.team_id, |
||||
da.permission, |
||||
da.role |
||||
FROM dashboard as d |
||||
LEFT JOIN dashboard folder on folder.id = d.folder_id |
||||
LEFT JOIN dashboard_acl AS da ON |
||||
da.dashboard_id = d.id OR |
||||
da.dashboard_id = d.folder_id OR |
||||
( |
||||
-- include default permissions -- |
||||
da.org_id = -1 AND ( |
||||
(folder.id IS NOT NULL AND folder.has_acl = ` + falseStr + `) OR |
||||
(folder.id IS NULL AND d.has_acl = ` + falseStr + `) |
||||
) |
||||
) |
||||
WHERE d.org_id = ? AND d.id = ? AND da.id IS NOT NULL |
||||
ORDER BY da.id ASC |
||||
` |
||||
err = ms.store.WithDbSession(ctx, func(sess *db.Session) error { |
||||
return sess.SQL(rawSQL, orgID, dashboardID).Find(&result) |
||||
}) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
return result, err |
||||
} |
||||
|
||||
// based on SQLStore.UpdateDashboardACL()
|
||||
// it should be called from inside a transaction
|
||||
func (ms *migrationStore) SetACL(ctx context.Context, orgID int64, dashboardID int64, items []*DashboardACL) error { |
||||
if dashboardID <= 0 { |
||||
return fmt.Errorf("folder id must be greater than zero for a folder permission") |
||||
} |
||||
return ms.store.WithDbSession(ctx, func(sess *db.Session) error { |
||||
// userPermissionsMap is a map keeping the highest permission per user
|
||||
// for handling conficting inherited (folder) and non-inherited (dashboard) user permissions
|
||||
userPermissionsMap := make(map[int64]*DashboardACL, len(items)) |
||||
// teamPermissionsMap is a map keeping the highest permission per team
|
||||
// for handling conficting inherited (folder) and non-inherited (dashboard) team permissions
|
||||
teamPermissionsMap := make(map[int64]*DashboardACL, len(items)) |
||||
for _, item := range items { |
||||
if item.UserID != 0 { |
||||
acl, ok := userPermissionsMap[item.UserID] |
||||
if !ok { |
||||
userPermissionsMap[item.UserID] = item |
||||
} else { |
||||
if item.Permission > acl.Permission { |
||||
// the higher permission wins
|
||||
userPermissionsMap[item.UserID] = item |
||||
} |
||||
} |
||||
} |
||||
|
||||
if item.TeamID != 0 { |
||||
acl, ok := teamPermissionsMap[item.TeamID] |
||||
if !ok { |
||||
teamPermissionsMap[item.TeamID] = item |
||||
} else { |
||||
if item.Permission > acl.Permission { |
||||
// the higher permission wins
|
||||
teamPermissionsMap[item.TeamID] = item |
||||
} |
||||
} |
||||
} |
||||
} |
||||
|
||||
type keyType struct { |
||||
UserID int64 `xorm:"user_id"` |
||||
TeamID int64 `xorm:"team_id"` |
||||
Role RoleType |
||||
Permission permissionType |
||||
} |
||||
// seen keeps track of inserted perrmissions to avoid duplicates (due to inheritance)
|
||||
seen := make(map[keyType]struct{}, len(items)) |
||||
for _, item := range items { |
||||
if item.UserID == 0 && item.TeamID == 0 && (item.Role == nil || !item.Role.IsValid()) { |
||||
return dashboards.ErrDashboardACLInfoMissing |
||||
} |
||||
|
||||
// ignore duplicate user permissions
|
||||
if item.UserID != 0 { |
||||
acl, ok := userPermissionsMap[item.UserID] |
||||
if ok { |
||||
if acl.Id != item.Id { |
||||
continue |
||||
} |
||||
} |
||||
} |
||||
|
||||
// ignore duplicate team permissions
|
||||
if item.TeamID != 0 { |
||||
acl, ok := teamPermissionsMap[item.TeamID] |
||||
if ok { |
||||
if acl.Id != item.Id { |
||||
continue |
||||
} |
||||
} |
||||
} |
||||
|
||||
key := keyType{UserID: item.UserID, TeamID: item.TeamID, Role: "", Permission: item.Permission} |
||||
if item.Role != nil { |
||||
key.Role = *item.Role |
||||
} |
||||
if _, ok := seen[key]; ok { |
||||
continue |
||||
} |
||||
|
||||
// unset Id so that the new record will get a different one
|
||||
item.Id = 0 |
||||
item.OrgID = orgID |
||||
item.DashboardID = dashboardID |
||||
item.Created = time.Now() |
||||
item.Updated = time.Now() |
||||
|
||||
sess.Nullable("user_id", "team_id") |
||||
if _, err := sess.Insert(item); err != nil { |
||||
return err |
||||
} |
||||
seen[key] = struct{}{} |
||||
} |
||||
|
||||
// Update dashboard HasACL flag
|
||||
dashboard := dashboards.Dashboard{HasACL: true} |
||||
_, err := sess.Cols("has_acl").Where("id=?", dashboardID).Update(&dashboard) |
||||
|
||||
return err |
||||
}) |
||||
} |
||||
|
||||
// GetOrgDashboardAlerts loads all legacy dashboard alerts for the given org mapped by dashboard id.
|
||||
func (ms *migrationStore) GetOrgDashboardAlerts(ctx context.Context, orgID int64) (map[int64][]*DashAlert, int, error) { |
||||
var alerts []legacymodels.Alert |
||||
err := ms.store.WithDbSession(ctx, func(sess *db.Session) error { |
||||
return sess.SQL("select * from alert WHERE org_id = ? AND dashboard_id IN (SELECT id from dashboard)", orgID).Find(&alerts) |
||||
}) |
||||
if err != nil { |
||||
return nil, 0, err |
||||
} |
||||
|
||||
mappedAlerts := make(map[int64][]*DashAlert) |
||||
for i := range alerts { |
||||
alert := alerts[i] |
||||
|
||||
rawSettings, err := json.Marshal(alert.Settings) |
||||
if err != nil { |
||||
return nil, 0, fmt.Errorf("get settings for alert rule ID:%d, name:'%s', orgID:%d: %w", alert.ID, alert.Name, alert.OrgID, err) |
||||
} |
||||
var parsedSettings DashAlertSettings |
||||
err = json.Unmarshal(rawSettings, &parsedSettings) |
||||
if err != nil { |
||||
return nil, 0, fmt.Errorf("parse settings for alert rule ID:%d, name:'%s', orgID:%d: %w", alert.ID, alert.Name, alert.OrgID, err) |
||||
} |
||||
|
||||
mappedAlerts[alert.DashboardID] = append(mappedAlerts[alert.DashboardID], &DashAlert{ |
||||
Alert: &alerts[i], |
||||
ParsedSettings: &parsedSettings, |
||||
}) |
||||
} |
||||
return mappedAlerts, len(alerts), nil |
||||
} |
||||
|
||||
func (ms *migrationStore) CaseInsensitive() bool { |
||||
return ms.store.GetDialect().SupportEngine() |
||||
} |
@ -0,0 +1,91 @@ |
||||
package store |
||||
|
||||
import ( |
||||
"encoding/json" |
||||
"time" |
||||
|
||||
legacymodels "github.com/grafana/grafana/pkg/services/alerting/models" |
||||
) |
||||
|
||||
type RoleType string |
||||
|
||||
const ( |
||||
RoleNone RoleType = "None" |
||||
RoleViewer RoleType = "Viewer" |
||||
RoleEditor RoleType = "Editor" |
||||
RoleAdmin RoleType = "Admin" |
||||
) |
||||
|
||||
func (r RoleType) IsValid() bool { |
||||
return r == RoleViewer || r == RoleAdmin || r == RoleEditor || r == RoleNone |
||||
} |
||||
|
||||
type permissionType int |
||||
|
||||
type DashboardACL struct { |
||||
// nolint:stylecheck
|
||||
Id int64 |
||||
OrgID int64 `xorm:"org_id"` |
||||
DashboardID int64 `xorm:"dashboard_id"` |
||||
|
||||
UserID int64 `xorm:"user_id"` |
||||
TeamID int64 `xorm:"team_id"` |
||||
Role *RoleType // pointer to be nullable
|
||||
Permission permissionType |
||||
|
||||
Created time.Time |
||||
Updated time.Time |
||||
} |
||||
|
||||
func (p DashboardACL) TableName() string { return "dashboard_acl" } |
||||
|
||||
// uidOrID for both uid and ID, primarily used for mapping legacy channel to migrated receiver.
|
||||
type UidOrID any |
||||
|
||||
type DashAlert struct { |
||||
*legacymodels.Alert |
||||
ParsedSettings *DashAlertSettings |
||||
} |
||||
|
||||
// dashAlertSettings is a type for the JSON that is in the settings field of
|
||||
// the alert table.
|
||||
type DashAlertSettings struct { |
||||
NoDataState string `json:"noDataState"` |
||||
ExecutionErrorState string `json:"executionErrorState"` |
||||
Conditions []DashAlertCondition `json:"conditions"` |
||||
AlertRuleTags any `json:"alertRuleTags"` |
||||
Notifications []DashAlertNot `json:"notifications"` |
||||
} |
||||
|
||||
// dashAlertNot is the object that represents the Notifications array in
|
||||
// dashAlertSettings
|
||||
type DashAlertNot struct { |
||||
UID string `json:"uid,omitempty"` |
||||
ID int64 `json:"id,omitempty"` |
||||
} |
||||
|
||||
// dashAlertingConditionJSON is like classic.ClassicConditionJSON except that it
|
||||
// includes the model property with the query.
|
||||
type DashAlertCondition struct { |
||||
Evaluator ConditionEvalJSON `json:"evaluator"` |
||||
|
||||
Operator struct { |
||||
Type string `json:"type"` |
||||
} `json:"operator"` |
||||
|
||||
Query struct { |
||||
Params []string `json:"params"` |
||||
DatasourceID int64 `json:"datasourceId"` |
||||
Model json.RawMessage |
||||
} `json:"query"` |
||||
|
||||
Reducer struct { |
||||
// Params []any `json:"params"` (Unused)
|
||||
Type string `json:"type"` |
||||
} |
||||
} |
||||
|
||||
type ConditionEvalJSON struct { |
||||
Params []float64 `json:"params"` |
||||
Type string `json:"type"` // e.g. "gt"
|
||||
} |
@ -0,0 +1,57 @@ |
||||
package store |
||||
|
||||
import ( |
||||
"testing" |
||||
"time" |
||||
|
||||
"github.com/stretchr/testify/require" |
||||
|
||||
"github.com/grafana/grafana/pkg/bus" |
||||
"github.com/grafana/grafana/pkg/infra/localcache" |
||||
"github.com/grafana/grafana/pkg/infra/log/logtest" |
||||
"github.com/grafana/grafana/pkg/infra/tracing" |
||||
legacyalerting "github.com/grafana/grafana/pkg/services/alerting" |
||||
"github.com/grafana/grafana/pkg/services/datasources/guardian" |
||||
datasourceService "github.com/grafana/grafana/pkg/services/datasources/service" |
||||
encryptionservice "github.com/grafana/grafana/pkg/services/encryption/service" |
||||
"github.com/grafana/grafana/pkg/services/folder/folderimpl" |
||||
"github.com/grafana/grafana/pkg/services/ngalert/store" |
||||
"github.com/grafana/grafana/pkg/services/ngalert/tests/fakes" |
||||
"github.com/grafana/grafana/pkg/services/ngalert/testutil" |
||||
"github.com/grafana/grafana/pkg/services/org/orgimpl" |
||||
"github.com/grafana/grafana/pkg/services/quota/quotatest" |
||||
"github.com/grafana/grafana/pkg/services/sqlstore" |
||||
"github.com/grafana/grafana/pkg/setting" |
||||
) |
||||
|
||||
func NewTestMigrationStore(t *testing.T, sqlStore *sqlstore.SQLStore, cfg *setting.Cfg) *migrationStore { |
||||
if cfg.UnifiedAlerting.BaseInterval == 0 { |
||||
cfg.UnifiedAlerting.BaseInterval = time.Second * 10 |
||||
} |
||||
alertingStore := store.DBstore{ |
||||
SQLStore: sqlStore, |
||||
Cfg: cfg.UnifiedAlerting, |
||||
} |
||||
bus := bus.ProvideBus(tracing.InitializeTracerForTest()) |
||||
folderStore := folderimpl.ProvideDashboardFolderStore(sqlStore) |
||||
dashboardService, dashboardStore := testutil.SetupDashboardService(t, sqlStore, folderStore, cfg) |
||||
folderService := testutil.SetupFolderService(t, cfg, sqlStore, dashboardStore, folderStore, bus) |
||||
|
||||
quotaService := "atest.FakeQuotaService{} |
||||
orgService, err := orgimpl.ProvideService(sqlStore, cfg, quotaService) |
||||
require.NoError(t, err) |
||||
|
||||
cache := localcache.ProvideService() |
||||
return &migrationStore{ |
||||
log: &logtest.Fake{}, |
||||
cfg: cfg, |
||||
store: sqlStore, |
||||
kv: fakes.NewFakeKVStore(t), |
||||
alertingStore: &alertingStore, |
||||
dashboardService: dashboardService, |
||||
folderService: folderService, |
||||
dataSourceCache: datasourceService.ProvideCacheService(cache, sqlStore, guardian.ProvideGuardian()), |
||||
orgService: orgService, |
||||
legacyAlertNotificationService: legacyalerting.ProvideService(sqlStore, encryptionservice.SetupTestService(t), nil), |
||||
} |
||||
} |
@ -1,7 +1,7 @@ |
||||
// This file contains code that parses templates from old alerting into a sequence
|
||||
// of tokens. Each token can be either a string literal or a variable.
|
||||
|
||||
package ualert |
||||
package migration |
||||
|
||||
import ( |
||||
"bytes" |
@ -1,11 +1,12 @@ |
||||
package ualert |
||||
package migration |
||||
|
||||
import ( |
||||
"fmt" |
||||
"testing" |
||||
|
||||
"github.com/grafana/grafana/pkg/infra/log" |
||||
"github.com/stretchr/testify/assert" |
||||
|
||||
"github.com/grafana/grafana/pkg/infra/log" |
||||
) |
||||
|
||||
func TestTokenString(t *testing.T) { |
@ -0,0 +1,29 @@ |
||||
package migration |
||||
|
||||
import ( |
||||
"testing" |
||||
|
||||
"github.com/stretchr/testify/require" |
||||
|
||||
"github.com/grafana/grafana/pkg/infra/serverlock" |
||||
"github.com/grafana/grafana/pkg/infra/tracing" |
||||
migrationStore "github.com/grafana/grafana/pkg/services/ngalert/migration/store" |
||||
fake_secrets "github.com/grafana/grafana/pkg/services/secrets/fakes" |
||||
"github.com/grafana/grafana/pkg/services/sqlstore" |
||||
"github.com/grafana/grafana/pkg/setting" |
||||
) |
||||
|
||||
func NewTestMigrationService(t *testing.T, sqlStore *sqlstore.SQLStore, cfg *setting.Cfg) *MigrationService { |
||||
if cfg == nil { |
||||
cfg = setting.NewCfg() |
||||
} |
||||
ms, err := ProvideService( |
||||
serverlock.ProvideService(sqlStore, tracing.InitializeTracerForTest()), |
||||
cfg, |
||||
sqlStore, |
||||
migrationStore.NewTestMigrationStore(t, sqlStore, cfg), |
||||
fake_secrets.NewFakeSecretsService(), |
||||
) |
||||
require.NoError(t, err) |
||||
return ms |
||||
} |
@ -0,0 +1,115 @@ |
||||
package migration |
||||
|
||||
import ( |
||||
"context" |
||||
"fmt" |
||||
|
||||
apimodels "github.com/grafana/grafana/pkg/services/ngalert/api/tooling/definitions" |
||||
migrationStore "github.com/grafana/grafana/pkg/services/ngalert/migration/store" |
||||
"github.com/grafana/grafana/pkg/services/ngalert/models" |
||||
) |
||||
|
||||
func (om *OrgMigration) migrateAlerts(ctx context.Context, alerts []*migrationStore.DashAlert, dashboardUID string, folderUID string) ([]*AlertPair, error) { |
||||
log := om.log.New( |
||||
"dashboardUID", dashboardUID, |
||||
"newFolderUID", folderUID, |
||||
) |
||||
|
||||
pairs := make([]*AlertPair, 0, len(alerts)) |
||||
for _, da := range alerts { |
||||
al := log.New("ruleID", da.ID, "ruleName", da.Name) |
||||
alertRule, err := om.migrateAlert(ctx, al, da, dashboardUID, folderUID) |
||||
if err != nil { |
||||
return nil, fmt.Errorf("migrate alert: %w", err) |
||||
} |
||||
pairs = append(pairs, &AlertPair{AlertRule: alertRule, DashAlert: da}) |
||||
} |
||||
|
||||
return pairs, nil |
||||
} |
||||
|
||||
func (om *OrgMigration) migrateDashboard(ctx context.Context, dashID int64, alerts []*migrationStore.DashAlert) ([]*AlertPair, error) { |
||||
dash, newFolder, err := om.getOrCreateMigratedFolder(ctx, om.log, dashID) |
||||
if err != nil { |
||||
return nil, fmt.Errorf("get or create migrated folder: %w", err) |
||||
} |
||||
|
||||
pairs, err := om.migrateAlerts(ctx, alerts, dash.UID, newFolder.UID) |
||||
if err != nil { |
||||
return nil, fmt.Errorf("migrate and save alerts: %w", err) |
||||
} |
||||
|
||||
return pairs, nil |
||||
} |
||||
|
||||
func (om *OrgMigration) migrateOrgAlerts(ctx context.Context) ([]*AlertPair, error) { |
||||
mappedAlerts, cnt, err := om.migrationStore.GetOrgDashboardAlerts(ctx, om.orgID) |
||||
if err != nil { |
||||
return nil, fmt.Errorf("load alerts: %w", err) |
||||
} |
||||
om.log.Info("Alerts found to migrate", "alerts", cnt) |
||||
|
||||
pairs := make([]*AlertPair, 0, cnt) |
||||
for dashID, alerts := range mappedAlerts { |
||||
dashPairs, err := om.migrateDashboard(ctx, dashID, alerts) |
||||
if err != nil { |
||||
return nil, fmt.Errorf("migrate and save dashboard '%d': %w", dashID, err) |
||||
} |
||||
pairs = append(pairs, dashPairs...) |
||||
} |
||||
return pairs, nil |
||||
} |
||||
|
||||
func (om *OrgMigration) migrateOrgChannels(ctx context.Context, pairs []*AlertPair) (*apimodels.PostableUserConfig, error) { |
||||
channels, err := om.migrationStore.GetNotificationChannels(ctx, om.orgID) |
||||
if err != nil { |
||||
return nil, fmt.Errorf("load notification channels: %w", err) |
||||
} |
||||
|
||||
amConfig, err := om.migrateChannels(channels, pairs) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
|
||||
return amConfig, nil |
||||
} |
||||
|
||||
func (om *OrgMigration) migrateOrg(ctx context.Context) error { |
||||
om.log.Info("Migrating alerts for organisation") |
||||
|
||||
pairs, err := om.migrateOrgAlerts(ctx) |
||||
if err != nil { |
||||
return fmt.Errorf("migrate alerts: %w", err) |
||||
} |
||||
|
||||
// This must happen before we insert the rules into the database because it modifies the alert labels. This will
|
||||
// be changed in the future when we improve how notification policies are created.
|
||||
amConfig, err := om.migrateOrgChannels(ctx, pairs) |
||||
if err != nil { |
||||
return fmt.Errorf("migrate channels: %w", err) |
||||
} |
||||
|
||||
if err := om.writeSilencesFile(); err != nil { |
||||
return fmt.Errorf("write silence file for org %d: %w", om.orgID, err) |
||||
} |
||||
|
||||
if len(pairs) > 0 { |
||||
om.log.Debug("Inserting migrated alert rules", "count", len(pairs)) |
||||
rules := make([]models.AlertRule, 0, len(pairs)) |
||||
for _, p := range pairs { |
||||
rules = append(rules, *p.AlertRule) |
||||
} |
||||
err := om.migrationStore.InsertAlertRules(ctx, rules...) |
||||
if err != nil { |
||||
return fmt.Errorf("insert alert rules: %w", err) |
||||
} |
||||
} |
||||
|
||||
if amConfig != nil { |
||||
if err := om.migrationStore.SaveAlertmanagerConfiguration(ctx, om.orgID, amConfig); err != nil { |
||||
return err |
||||
} |
||||
} |
||||
|
||||
return nil |
||||
} |
@ -0,0 +1,102 @@ |
||||
package fakes |
||||
|
||||
import ( |
||||
"context" |
||||
"strings" |
||||
"sync" |
||||
"testing" |
||||
|
||||
"github.com/grafana/grafana/pkg/infra/kvstore" |
||||
) |
||||
|
||||
type FakeKVStore struct { |
||||
Mtx sync.Mutex |
||||
Store map[int64]map[string]map[string]string |
||||
} |
||||
|
||||
func NewFakeKVStore(t *testing.T) *FakeKVStore { |
||||
t.Helper() |
||||
|
||||
return &FakeKVStore{ |
||||
Store: map[int64]map[string]map[string]string{}, |
||||
} |
||||
} |
||||
|
||||
func (fkv *FakeKVStore) Get(_ context.Context, orgId int64, namespace string, key string) (string, bool, error) { |
||||
fkv.Mtx.Lock() |
||||
defer fkv.Mtx.Unlock() |
||||
org, ok := fkv.Store[orgId] |
||||
if !ok { |
||||
return "", false, nil |
||||
} |
||||
k, ok := org[namespace] |
||||
if !ok { |
||||
return "", false, nil |
||||
} |
||||
|
||||
v, ok := k[key] |
||||
if !ok { |
||||
return "", false, nil |
||||
} |
||||
|
||||
return v, true, nil |
||||
} |
||||
func (fkv *FakeKVStore) Set(_ context.Context, orgId int64, namespace string, key string, value string) error { |
||||
fkv.Mtx.Lock() |
||||
defer fkv.Mtx.Unlock() |
||||
org, ok := fkv.Store[orgId] |
||||
if !ok { |
||||
fkv.Store[orgId] = map[string]map[string]string{} |
||||
} |
||||
_, ok = org[namespace] |
||||
if !ok { |
||||
fkv.Store[orgId][namespace] = map[string]string{} |
||||
} |
||||
|
||||
fkv.Store[orgId][namespace][key] = value |
||||
|
||||
return nil |
||||
} |
||||
func (fkv *FakeKVStore) Del(_ context.Context, orgId int64, namespace string, key string) error { |
||||
fkv.Mtx.Lock() |
||||
defer fkv.Mtx.Unlock() |
||||
org, ok := fkv.Store[orgId] |
||||
if !ok { |
||||
return nil |
||||
} |
||||
_, ok = org[namespace] |
||||
if !ok { |
||||
return nil |
||||
} |
||||
|
||||
delete(fkv.Store[orgId][namespace], key) |
||||
|
||||
return nil |
||||
} |
||||
|
||||
func (fkv *FakeKVStore) Keys(ctx context.Context, orgID int64, namespace string, keyPrefix string) ([]kvstore.Key, error) { |
||||
fkv.Mtx.Lock() |
||||
defer fkv.Mtx.Unlock() |
||||
var keys []kvstore.Key |
||||
for orgIDFromStore, namespaceMap := range fkv.Store { |
||||
if orgID != kvstore.AllOrganizations && orgID != orgIDFromStore { |
||||
continue |
||||
} |
||||
if keyMap, exists := namespaceMap[namespace]; exists { |
||||
for k := range keyMap { |
||||
if strings.HasPrefix(k, keyPrefix) { |
||||
keys = append(keys, kvstore.Key{ |
||||
OrgId: orgIDFromStore, |
||||
Namespace: namespace, |
||||
Key: keyPrefix, |
||||
}) |
||||
} |
||||
} |
||||
} |
||||
} |
||||
return keys, nil |
||||
} |
||||
|
||||
func (fkv *FakeKVStore) GetAll(ctx context.Context, orgId int64, namespace string) (map[int64]map[string]string, error) { |
||||
return nil, nil |
||||
} |
@ -1,512 +0,0 @@ |
||||
package ualert |
||||
|
||||
import ( |
||||
"crypto/md5" |
||||
"encoding/base64" |
||||
"encoding/json" |
||||
"fmt" |
||||
"regexp" |
||||
"sort" |
||||
"strings" |
||||
"time" |
||||
|
||||
"github.com/prometheus/alertmanager/pkg/labels" |
||||
"github.com/prometheus/common/model" |
||||
|
||||
"github.com/grafana/grafana/pkg/components/simplejson" |
||||
ngModels "github.com/grafana/grafana/pkg/services/ngalert/models" |
||||
) |
||||
|
||||
const ( |
||||
// DisabledRepeatInterval is a large duration that will be used as a pseudo-disable in case a legacy channel doesn't have SendReminders enabled.
|
||||
DisabledRepeatInterval = model.Duration(time.Duration(8736) * time.Hour) // 1y
|
||||
) |
||||
|
||||
type notificationChannel struct { |
||||
ID int64 `xorm:"id"` |
||||
OrgID int64 `xorm:"org_id"` |
||||
Uid string `xorm:"uid"` |
||||
Name string `xorm:"name"` |
||||
Type string `xorm:"type"` |
||||
DisableResolveMessage bool `xorm:"disable_resolve_message"` |
||||
IsDefault bool `xorm:"is_default"` |
||||
Settings *simplejson.Json `xorm:"settings"` |
||||
SecureSettings SecureJsonData `xorm:"secure_settings"` |
||||
SendReminder bool `xorm:"send_reminder"` |
||||
Frequency model.Duration `xorm:"frequency"` |
||||
} |
||||
|
||||
// channelsPerOrg maps notification channels per organisation
|
||||
type channelsPerOrg map[int64][]*notificationChannel |
||||
|
||||
// channelMap maps notification channels per organisation
|
||||
type defaultChannelsPerOrg map[int64][]*notificationChannel |
||||
|
||||
// uidOrID for both uid and ID, primarily used for mapping legacy channel to migrated receiver.
|
||||
type uidOrID any |
||||
|
||||
// channelReceiver is a convenience struct that contains a notificationChannel and its corresponding migrated PostableApiReceiver.
|
||||
type channelReceiver struct { |
||||
channel *notificationChannel |
||||
receiver *PostableApiReceiver |
||||
} |
||||
|
||||
// setupAlertmanagerConfigs creates Alertmanager configs with migrated receivers and routes.
|
||||
func (m *migration) setupAlertmanagerConfigs(rulesPerOrg map[int64]map[*alertRule][]uidOrID) (amConfigsPerOrg, error) { |
||||
// allChannels: channelUID -> channelConfig
|
||||
allChannelsPerOrg, defaultChannelsPerOrg, err := m.getNotificationChannelMap() |
||||
if err != nil { |
||||
return nil, fmt.Errorf("failed to load notification channels: %w", err) |
||||
} |
||||
|
||||
amConfigPerOrg := make(amConfigsPerOrg, len(allChannelsPerOrg)) |
||||
for orgID, channels := range allChannelsPerOrg { |
||||
amConfig := &PostableUserConfig{ |
||||
AlertmanagerConfig: PostableApiAlertingConfig{ |
||||
Receivers: make([]*PostableApiReceiver, 0), |
||||
}, |
||||
} |
||||
amConfigPerOrg[orgID] = amConfig |
||||
|
||||
// Create all newly migrated receivers from legacy notification channels.
|
||||
receiversMap, receivers, err := m.createReceivers(channels) |
||||
if err != nil { |
||||
return nil, fmt.Errorf("failed to create receiver in orgId %d: %w", orgID, err) |
||||
} |
||||
|
||||
// No need to create an Alertmanager configuration if there are no receivers left that aren't obsolete.
|
||||
if len(receivers) == 0 { |
||||
m.mg.Logger.Warn("No available receivers", "orgId", orgID) |
||||
continue |
||||
} |
||||
|
||||
for _, cr := range receivers { |
||||
amConfig.AlertmanagerConfig.Receivers = append(amConfig.AlertmanagerConfig.Receivers, cr.receiver) |
||||
} |
||||
|
||||
defaultReceivers := make(map[string]struct{}) |
||||
defaultChannels, ok := defaultChannelsPerOrg[orgID] |
||||
if ok { |
||||
// If the organization has default channels build a map of default receivers, used to create alert-specific routes later.
|
||||
for _, c := range defaultChannels { |
||||
defaultReceivers[c.Name] = struct{}{} |
||||
} |
||||
} |
||||
defaultReceiver, defaultRoute, err := m.createDefaultRouteAndReceiver(defaultChannels) |
||||
if err != nil { |
||||
return nil, fmt.Errorf("failed to create default route & receiver in orgId %d: %w", orgID, err) |
||||
} |
||||
amConfig.AlertmanagerConfig.Route = defaultRoute |
||||
if defaultReceiver != nil { |
||||
amConfig.AlertmanagerConfig.Receivers = append(amConfig.AlertmanagerConfig.Receivers, defaultReceiver) |
||||
} |
||||
|
||||
for _, cr := range receivers { |
||||
route, err := createRoute(cr) |
||||
if err != nil { |
||||
return nil, fmt.Errorf("failed to create route for receiver %s in orgId %d: %w", cr.receiver.Name, orgID, err) |
||||
} |
||||
|
||||
amConfigPerOrg[orgID].AlertmanagerConfig.Route.Routes = append(amConfigPerOrg[orgID].AlertmanagerConfig.Route.Routes, route) |
||||
} |
||||
|
||||
for ar, channelUids := range rulesPerOrg[orgID] { |
||||
filteredReceiverNames := m.filterReceiversForAlert(ar.Title, channelUids, receiversMap, defaultReceivers) |
||||
|
||||
if len(filteredReceiverNames) != 0 { |
||||
// Only create a contact label if there are specific receivers, otherwise it defaults to the root-level route.
|
||||
ar.Labels[ContactLabel] = contactListToString(filteredReceiverNames) |
||||
} |
||||
} |
||||
|
||||
// Validate the alertmanager configuration produced, this gives a chance to catch bad configuration at migration time.
|
||||
// Validation between legacy and unified alerting can be different (e.g. due to bug fixes) so this would fail the migration in that case.
|
||||
if err := m.validateAlertmanagerConfig(amConfig); err != nil { |
||||
return nil, fmt.Errorf("failed to validate AlertmanagerConfig in orgId %d: %w", orgID, err) |
||||
} |
||||
} |
||||
|
||||
return amConfigPerOrg, nil |
||||
} |
||||
|
||||
// contactListToString creates a sorted string representation of a given map (set) of receiver names. Each name will be comma-separated and double-quoted. Names should not contain double quotes.
|
||||
func contactListToString(m map[string]any) string { |
||||
keys := make([]string, 0, len(m)) |
||||
for k := range m { |
||||
keys = append(keys, quote(k)) |
||||
} |
||||
sort.Strings(keys) |
||||
|
||||
return strings.Join(keys, ",") |
||||
} |
||||
|
||||
// quote will surround the given string in double quotes.
|
||||
func quote(s string) string { |
||||
return `"` + s + `"` |
||||
} |
||||
|
||||
// getNotificationChannelMap returns a map of all channelUIDs to channel config as well as a separate map for just those channels that are default.
|
||||
// For any given Organization, all channels in defaultChannelsPerOrg should also exist in channelsPerOrg.
|
||||
func (m *migration) getNotificationChannelMap() (channelsPerOrg, defaultChannelsPerOrg, error) { |
||||
q := ` |
||||
SELECT id, |
||||
org_id, |
||||
uid, |
||||
name, |
||||
type, |
||||
disable_resolve_message, |
||||
is_default, |
||||
settings, |
||||
secure_settings, |
||||
send_reminder, |
||||
frequency |
||||
FROM |
||||
alert_notification |
||||
` |
||||
allChannels := []notificationChannel{} |
||||
err := m.sess.SQL(q).Find(&allChannels) |
||||
if err != nil { |
||||
return nil, nil, err |
||||
} |
||||
|
||||
if len(allChannels) == 0 { |
||||
return nil, nil, nil |
||||
} |
||||
|
||||
allChannelsMap := make(channelsPerOrg) |
||||
defaultChannelsMap := make(defaultChannelsPerOrg) |
||||
for i, c := range allChannels { |
||||
if c.Type == "hipchat" || c.Type == "sensu" { |
||||
m.mg.Logger.Error("Alert migration error: discontinued notification channel found", "type", c.Type, "name", c.Name, "uid", c.Uid) |
||||
continue |
||||
} |
||||
|
||||
allChannelsMap[c.OrgID] = append(allChannelsMap[c.OrgID], &allChannels[i]) |
||||
|
||||
if c.IsDefault { |
||||
defaultChannelsMap[c.OrgID] = append(defaultChannelsMap[c.OrgID], &allChannels[i]) |
||||
} |
||||
} |
||||
|
||||
return allChannelsMap, defaultChannelsMap, nil |
||||
} |
||||
|
||||
// Create a notifier (PostableGrafanaReceiver) from a legacy notification channel
|
||||
func (m *migration) createNotifier(c *notificationChannel) (*PostableGrafanaReceiver, error) { |
||||
uid, err := m.determineChannelUid(c) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
|
||||
settings, secureSettings, err := migrateSettingsToSecureSettings(c.Type, c.Settings, c.SecureSettings) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
|
||||
return &PostableGrafanaReceiver{ |
||||
UID: uid, |
||||
Name: c.Name, |
||||
Type: c.Type, |
||||
DisableResolveMessage: c.DisableResolveMessage, |
||||
Settings: settings, |
||||
SecureSettings: secureSettings, |
||||
}, nil |
||||
} |
||||
|
||||
// Create one receiver for every unique notification channel.
|
||||
func (m *migration) createReceivers(allChannels []*notificationChannel) (map[uidOrID]*PostableApiReceiver, []channelReceiver, error) { |
||||
receivers := make([]channelReceiver, 0, len(allChannels)) |
||||
receiversMap := make(map[uidOrID]*PostableApiReceiver) |
||||
|
||||
set := make(map[string]struct{}) // Used to deduplicate sanitized names.
|
||||
for _, c := range allChannels { |
||||
notifier, err := m.createNotifier(c) |
||||
if err != nil { |
||||
return nil, nil, err |
||||
} |
||||
|
||||
// We remove double quotes because this character will be used as the separator in the ContactLabel. To prevent partial matches in the Route Matcher we choose to sanitize them early on instead of complicating the Matcher regex.
|
||||
sanitizedName := strings.ReplaceAll(c.Name, `"`, `_`) |
||||
// There can be name collisions after we sanitize. We check for this and attempt to make the name unique again using a short hash of the original name.
|
||||
if _, ok := set[sanitizedName]; ok { |
||||
sanitizedName = sanitizedName + fmt.Sprintf("_%.3x", md5.Sum([]byte(c.Name))) |
||||
m.mg.Logger.Warn("Alert contains duplicate contact name after sanitization, appending unique suffix", "type", c.Type, "name", c.Name, "new_name", sanitizedName, "uid", c.Uid) |
||||
} |
||||
notifier.Name = sanitizedName |
||||
|
||||
set[sanitizedName] = struct{}{} |
||||
|
||||
cr := channelReceiver{ |
||||
channel: c, |
||||
receiver: &PostableApiReceiver{ |
||||
Name: sanitizedName, // Channel name is unique within an Org.
|
||||
GrafanaManagedReceivers: []*PostableGrafanaReceiver{notifier}, |
||||
}, |
||||
} |
||||
|
||||
receivers = append(receivers, cr) |
||||
|
||||
// Store receivers for creating routes from alert rules later.
|
||||
if c.Uid != "" { |
||||
receiversMap[c.Uid] = cr.receiver |
||||
} |
||||
if c.ID != 0 { |
||||
// In certain circumstances, the alert rule uses ID instead of uid. So, we add this to be able to lookup by ID in case.
|
||||
receiversMap[c.ID] = cr.receiver |
||||
} |
||||
} |
||||
|
||||
return receiversMap, receivers, nil |
||||
} |
||||
|
||||
// Create the root-level route with the default receiver. If no new receiver is created specifically for the root-level route, the returned receiver will be nil.
|
||||
func (m *migration) createDefaultRouteAndReceiver(defaultChannels []*notificationChannel) (*PostableApiReceiver, *Route, error) { |
||||
defaultReceiverName := "autogen-contact-point-default" |
||||
defaultRoute := &Route{ |
||||
Receiver: defaultReceiverName, |
||||
Routes: make([]*Route, 0), |
||||
GroupByStr: []string{ngModels.FolderTitleLabel, model.AlertNameLabel}, // To keep parity with pre-migration notifications.
|
||||
RepeatInterval: nil, |
||||
} |
||||
newDefaultReceiver := &PostableApiReceiver{ |
||||
Name: defaultReceiverName, |
||||
GrafanaManagedReceivers: []*PostableGrafanaReceiver{}, |
||||
} |
||||
|
||||
// Return early if there are no default channels
|
||||
if len(defaultChannels) == 0 { |
||||
return newDefaultReceiver, defaultRoute, nil |
||||
} |
||||
|
||||
repeatInterval := DisabledRepeatInterval // If no channels have SendReminders enabled, we will use this large value as a pseudo-disable.
|
||||
if len(defaultChannels) > 1 { |
||||
// If there are more than one default channels we create a separate contact group that is used only in the root policy. This is to simplify the migrated notification policy structure.
|
||||
// If we ever allow more than one receiver per route this won't be necessary.
|
||||
for _, c := range defaultChannels { |
||||
// Need to create a new notifier to prevent uid conflict.
|
||||
defaultNotifier, err := m.createNotifier(c) |
||||
if err != nil { |
||||
return nil, nil, err |
||||
} |
||||
|
||||
newDefaultReceiver.GrafanaManagedReceivers = append(newDefaultReceiver.GrafanaManagedReceivers, defaultNotifier) |
||||
|
||||
// Choose the lowest send reminder duration from all the notifiers to use for default route.
|
||||
if c.SendReminder && c.Frequency < repeatInterval { |
||||
repeatInterval = c.Frequency |
||||
} |
||||
} |
||||
} else { |
||||
// If there is only a single default channel, we don't need a separate receiver to hold it. We can reuse the existing receiver for that single notifier.
|
||||
defaultRoute.Receiver = defaultChannels[0].Name |
||||
if defaultChannels[0].SendReminder { |
||||
repeatInterval = defaultChannels[0].Frequency |
||||
} |
||||
|
||||
// No need to create a new receiver.
|
||||
newDefaultReceiver = nil |
||||
} |
||||
defaultRoute.RepeatInterval = &repeatInterval |
||||
|
||||
return newDefaultReceiver, defaultRoute, nil |
||||
} |
||||
|
||||
// Create one route per contact point, matching based on ContactLabel.
|
||||
func createRoute(cr channelReceiver) (*Route, error) { |
||||
// We create a regex matcher so that each alert rule need only have a single ContactLabel entry for all contact points it sends to.
|
||||
// For example, if an alert needs to send to contact1 and contact2 it will have ContactLabel=`"contact1","contact2"` and will match both routes looking
|
||||
// for `.*"contact1".*` and `.*"contact2".*`.
|
||||
|
||||
// We quote and escape here to ensure the regex will correctly match the ContactLabel on the alerts.
|
||||
name := fmt.Sprintf(`.*%s.*`, regexp.QuoteMeta(quote(cr.receiver.Name))) |
||||
mat, err := labels.NewMatcher(labels.MatchRegexp, ContactLabel, name) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
|
||||
repeatInterval := DisabledRepeatInterval |
||||
if cr.channel.SendReminder { |
||||
repeatInterval = cr.channel.Frequency |
||||
} |
||||
|
||||
return &Route{ |
||||
Receiver: cr.receiver.Name, |
||||
ObjectMatchers: ObjectMatchers{mat}, |
||||
Continue: true, // We continue so that each sibling contact point route can separately match.
|
||||
RepeatInterval: &repeatInterval, |
||||
}, nil |
||||
} |
||||
|
||||
// Filter receivers to select those that were associated to the given rule as channels.
|
||||
func (m *migration) filterReceiversForAlert(name string, channelIDs []uidOrID, receivers map[uidOrID]*PostableApiReceiver, defaultReceivers map[string]struct{}) map[string]any { |
||||
if len(channelIDs) == 0 { |
||||
// If there are no channels associated, we use the default route.
|
||||
return nil |
||||
} |
||||
|
||||
// Filter receiver names.
|
||||
filteredReceiverNames := make(map[string]any) |
||||
for _, uidOrId := range channelIDs { |
||||
recv, ok := receivers[uidOrId] |
||||
if ok { |
||||
filteredReceiverNames[recv.Name] = struct{}{} // Deduplicate on contact point name.
|
||||
} else { |
||||
m.mg.Logger.Warn("Alert linked to obsolete notification channel, ignoring", "alert", name, "uid", uidOrId) |
||||
} |
||||
} |
||||
|
||||
coveredByDefault := func(names map[string]any) bool { |
||||
// Check if all receivers are also default ones and if so, just use the default route.
|
||||
for n := range names { |
||||
if _, ok := defaultReceivers[n]; !ok { |
||||
return false |
||||
} |
||||
} |
||||
return true |
||||
} |
||||
|
||||
if len(filteredReceiverNames) == 0 || coveredByDefault(filteredReceiverNames) { |
||||
// Use the default route instead.
|
||||
return nil |
||||
} |
||||
|
||||
// Add default receivers alongside rule-specific ones.
|
||||
for n := range defaultReceivers { |
||||
filteredReceiverNames[n] = struct{}{} |
||||
} |
||||
|
||||
return filteredReceiverNames |
||||
} |
||||
|
||||
func (m *migration) determineChannelUid(c *notificationChannel) (string, error) { |
||||
legacyUid := c.Uid |
||||
if legacyUid == "" { |
||||
newUid, err := m.seenUIDs.generateUid() |
||||
if err != nil { |
||||
return "", err |
||||
} |
||||
m.mg.Logger.Info("Legacy notification had an empty uid, generating a new one", "id", c.ID, "uid", newUid) |
||||
return newUid, nil |
||||
} |
||||
|
||||
if m.seenUIDs.contains(legacyUid) { |
||||
newUid, err := m.seenUIDs.generateUid() |
||||
if err != nil { |
||||
return "", err |
||||
} |
||||
m.mg.Logger.Warn("Legacy notification had a UID that collides with a migrated record, generating a new one", "id", c.ID, "old", legacyUid, "new", newUid) |
||||
return newUid, nil |
||||
} |
||||
|
||||
return legacyUid, nil |
||||
} |
||||
|
||||
// Some settings were migrated from settings to secure settings in between.
|
||||
// See https://grafana.com/docs/grafana/latest/installation/upgrading/#ensure-encryption-of-existing-alert-notification-channel-secrets.
|
||||
// migrateSettingsToSecureSettings takes care of that.
|
||||
func migrateSettingsToSecureSettings(chanType string, settings *simplejson.Json, secureSettings SecureJsonData) (*simplejson.Json, map[string]string, error) { |
||||
keys := []string{} |
||||
switch chanType { |
||||
case "slack": |
||||
keys = []string{"url", "token"} |
||||
case "pagerduty": |
||||
keys = []string{"integrationKey"} |
||||
case "webhook": |
||||
keys = []string{"password"} |
||||
case "prometheus-alertmanager": |
||||
keys = []string{"basicAuthPassword"} |
||||
case "opsgenie": |
||||
keys = []string{"apiKey"} |
||||
case "telegram": |
||||
keys = []string{"bottoken"} |
||||
case "line": |
||||
keys = []string{"token"} |
||||
case "pushover": |
||||
keys = []string{"apiToken", "userKey"} |
||||
case "threema": |
||||
keys = []string{"api_secret"} |
||||
} |
||||
|
||||
newSecureSettings := secureSettings.Decrypt() |
||||
cloneSettings := simplejson.New() |
||||
settingsMap, err := settings.Map() |
||||
if err != nil { |
||||
return nil, nil, err |
||||
} |
||||
for k, v := range settingsMap { |
||||
cloneSettings.Set(k, v) |
||||
} |
||||
for _, k := range keys { |
||||
if v, ok := newSecureSettings[k]; ok && v != "" { |
||||
continue |
||||
} |
||||
|
||||
sv := cloneSettings.Get(k).MustString() |
||||
if sv != "" { |
||||
newSecureSettings[k] = sv |
||||
cloneSettings.Del(k) |
||||
} |
||||
} |
||||
|
||||
encryptedData := GetEncryptedJsonData(newSecureSettings) |
||||
for k, v := range encryptedData { |
||||
newSecureSettings[k] = base64.StdEncoding.EncodeToString(v) |
||||
} |
||||
|
||||
return cloneSettings, newSecureSettings, nil |
||||
} |
||||
|
||||
// Below is a snapshot of all the config and supporting functions imported
|
||||
// to avoid vendoring those packages.
|
||||
|
||||
type PostableUserConfig struct { |
||||
TemplateFiles map[string]string `yaml:"template_files" json:"template_files"` |
||||
AlertmanagerConfig PostableApiAlertingConfig `yaml:"alertmanager_config" json:"alertmanager_config"` |
||||
} |
||||
|
||||
type amConfigsPerOrg = map[int64]*PostableUserConfig |
||||
|
||||
type PostableApiAlertingConfig struct { |
||||
Route *Route `yaml:"route,omitempty" json:"route,omitempty"` |
||||
Templates []string `yaml:"templates" json:"templates"` |
||||
Receivers []*PostableApiReceiver `yaml:"receivers,omitempty" json:"receivers,omitempty"` |
||||
} |
||||
|
||||
type Route struct { |
||||
Receiver string `yaml:"receiver,omitempty" json:"receiver,omitempty"` |
||||
ObjectMatchers ObjectMatchers `yaml:"object_matchers,omitempty" json:"object_matchers,omitempty"` |
||||
Routes []*Route `yaml:"routes,omitempty" json:"routes,omitempty"` |
||||
Continue bool `yaml:"continue,omitempty" json:"continue,omitempty"` |
||||
GroupByStr []string `yaml:"group_by,omitempty" json:"group_by,omitempty"` |
||||
RepeatInterval *model.Duration `yaml:"repeat_interval,omitempty" json:"repeat_interval,omitempty"` |
||||
} |
||||
|
||||
type ObjectMatchers labels.Matchers |
||||
|
||||
// MarshalJSON implements the json.Marshaler interface for Matchers. Vendored from definitions.ObjectMatchers.
|
||||
func (m ObjectMatchers) MarshalJSON() ([]byte, error) { |
||||
if len(m) == 0 { |
||||
return nil, nil |
||||
} |
||||
result := make([][3]string, len(m)) |
||||
for i, matcher := range m { |
||||
result[i] = [3]string{matcher.Name, matcher.Type.String(), matcher.Value} |
||||
} |
||||
return json.Marshal(result) |
||||
} |
||||
|
||||
type PostableApiReceiver struct { |
||||
Name string `yaml:"name" json:"name"` |
||||
GrafanaManagedReceivers []*PostableGrafanaReceiver `yaml:"grafana_managed_receiver_configs,omitempty" json:"grafana_managed_receiver_configs,omitempty"` |
||||
} |
||||
|
||||
type PostableGrafanaReceiver CreateAlertNotificationCommand |
||||
|
||||
type CreateAlertNotificationCommand struct { |
||||
UID string `json:"uid"` |
||||
Name string `json:"name"` |
||||
Type string `json:"type"` |
||||
DisableResolveMessage bool `json:"disableResolveMessage"` |
||||
Settings *simplejson.Json `json:"settings"` |
||||
SecureSettings map[string]string `json:"secureSettings"` |
||||
} |
@ -1,471 +0,0 @@ |
||||
package ualert |
||||
|
||||
import ( |
||||
"testing" |
||||
"time" |
||||
|
||||
"github.com/google/go-cmp/cmp" |
||||
"github.com/google/go-cmp/cmp/cmpopts" |
||||
"github.com/prometheus/alertmanager/pkg/labels" |
||||
"github.com/prometheus/common/model" |
||||
"github.com/stretchr/testify/require" |
||||
|
||||
"github.com/grafana/grafana/pkg/components/simplejson" |
||||
ngModels "github.com/grafana/grafana/pkg/services/ngalert/models" |
||||
) |
||||
|
||||
func TestFilterReceiversForAlert(t *testing.T) { |
||||
tc := []struct { |
||||
name string |
||||
channelIds []uidOrID |
||||
receivers map[uidOrID]*PostableApiReceiver |
||||
defaultReceivers map[string]struct{} |
||||
expected map[string]any |
||||
}{ |
||||
{ |
||||
name: "when an alert has multiple channels, each should filter for the correct receiver", |
||||
channelIds: []uidOrID{"uid1", "uid2"}, |
||||
receivers: map[uidOrID]*PostableApiReceiver{ |
||||
"uid1": { |
||||
Name: "recv1", |
||||
GrafanaManagedReceivers: []*PostableGrafanaReceiver{}, |
||||
}, |
||||
"uid2": { |
||||
Name: "recv2", |
||||
GrafanaManagedReceivers: []*PostableGrafanaReceiver{}, |
||||
}, |
||||
"uid3": { |
||||
Name: "recv3", |
||||
GrafanaManagedReceivers: []*PostableGrafanaReceiver{}, |
||||
}, |
||||
}, |
||||
defaultReceivers: map[string]struct{}{}, |
||||
expected: map[string]any{ |
||||
"recv1": struct{}{}, |
||||
"recv2": struct{}{}, |
||||
}, |
||||
}, |
||||
{ |
||||
name: "when default receivers exist, they should be added to an alert's filtered receivers", |
||||
channelIds: []uidOrID{"uid1"}, |
||||
receivers: map[uidOrID]*PostableApiReceiver{ |
||||
"uid1": { |
||||
Name: "recv1", |
||||
GrafanaManagedReceivers: []*PostableGrafanaReceiver{}, |
||||
}, |
||||
"uid2": { |
||||
Name: "recv2", |
||||
GrafanaManagedReceivers: []*PostableGrafanaReceiver{}, |
||||
}, |
||||
"uid3": { |
||||
Name: "recv3", |
||||
GrafanaManagedReceivers: []*PostableGrafanaReceiver{}, |
||||
}, |
||||
}, |
||||
defaultReceivers: map[string]struct{}{ |
||||
"recv2": {}, |
||||
}, |
||||
expected: map[string]any{ |
||||
"recv1": struct{}{}, // From alert
|
||||
"recv2": struct{}{}, // From default
|
||||
}, |
||||
}, |
||||
{ |
||||
name: "when an alert has a channels associated by ID instead of UID, it should be included", |
||||
channelIds: []uidOrID{int64(42)}, |
||||
receivers: map[uidOrID]*PostableApiReceiver{ |
||||
int64(42): { |
||||
Name: "recv1", |
||||
GrafanaManagedReceivers: []*PostableGrafanaReceiver{}, |
||||
}, |
||||
}, |
||||
defaultReceivers: map[string]struct{}{}, |
||||
expected: map[string]any{ |
||||
"recv1": struct{}{}, |
||||
}, |
||||
}, |
||||
{ |
||||
name: "when an alert's receivers are covered by the defaults, return nil to use default receiver downstream", |
||||
channelIds: []uidOrID{"uid1"}, |
||||
receivers: map[uidOrID]*PostableApiReceiver{ |
||||
"uid1": { |
||||
Name: "recv1", |
||||
GrafanaManagedReceivers: []*PostableGrafanaReceiver{}, |
||||
}, |
||||
"uid2": { |
||||
Name: "recv2", |
||||
GrafanaManagedReceivers: []*PostableGrafanaReceiver{}, |
||||
}, |
||||
"uid3": { |
||||
Name: "recv3", |
||||
GrafanaManagedReceivers: []*PostableGrafanaReceiver{}, |
||||
}, |
||||
}, |
||||
defaultReceivers: map[string]struct{}{ |
||||
"recv1": {}, |
||||
"recv2": {}, |
||||
}, |
||||
expected: nil, // recv1 is already a default
|
||||
}, |
||||
} |
||||
|
||||
for _, tt := range tc { |
||||
t.Run(tt.name, func(t *testing.T) { |
||||
m := newTestMigration(t) |
||||
res := m.filterReceiversForAlert("", tt.channelIds, tt.receivers, tt.defaultReceivers) |
||||
|
||||
require.Equal(t, tt.expected, res) |
||||
}) |
||||
} |
||||
} |
||||
|
||||
func TestCreateRoute(t *testing.T) { |
||||
tc := []struct { |
||||
name string |
||||
channel *notificationChannel |
||||
recv *PostableApiReceiver |
||||
expected *Route |
||||
}{ |
||||
{ |
||||
name: "when a receiver is passed in, the route should regex match based on quoted name with continue=true", |
||||
channel: ¬ificationChannel{}, |
||||
recv: &PostableApiReceiver{ |
||||
Name: "recv1", |
||||
}, |
||||
expected: &Route{ |
||||
Receiver: "recv1", |
||||
ObjectMatchers: ObjectMatchers{{Type: 2, Name: ContactLabel, Value: `.*"recv1".*`}}, |
||||
Routes: nil, |
||||
Continue: true, |
||||
GroupByStr: nil, |
||||
RepeatInterval: durationPointer(DisabledRepeatInterval), |
||||
}, |
||||
}, |
||||
{ |
||||
name: "notification channel should be escaped for regex in the matcher", |
||||
channel: ¬ificationChannel{}, |
||||
recv: &PostableApiReceiver{ |
||||
Name: `. ^ $ * + - ? ( ) [ ] { } \ |`, |
||||
}, |
||||
expected: &Route{ |
||||
Receiver: `. ^ $ * + - ? ( ) [ ] { } \ |`, |
||||
ObjectMatchers: ObjectMatchers{{Type: 2, Name: ContactLabel, Value: `.*"\. \^ \$ \* \+ - \? \( \) \[ \] \{ \} \\ \|".*`}}, |
||||
Routes: nil, |
||||
Continue: true, |
||||
GroupByStr: nil, |
||||
RepeatInterval: durationPointer(DisabledRepeatInterval), |
||||
}, |
||||
}, |
||||
{ |
||||
name: "when a channel has sendReminder=true, the route should use the frequency in repeat interval", |
||||
channel: ¬ificationChannel{SendReminder: true, Frequency: model.Duration(time.Duration(42) * time.Hour)}, |
||||
recv: &PostableApiReceiver{ |
||||
Name: "recv1", |
||||
}, |
||||
expected: &Route{ |
||||
Receiver: "recv1", |
||||
ObjectMatchers: ObjectMatchers{{Type: 2, Name: ContactLabel, Value: `.*"recv1".*`}}, |
||||
Routes: nil, |
||||
Continue: true, |
||||
GroupByStr: nil, |
||||
RepeatInterval: durationPointer(model.Duration(time.Duration(42) * time.Hour)), |
||||
}, |
||||
}, |
||||
{ |
||||
name: "when a channel has sendReminder=false, the route should ignore the frequency in repeat interval and use DisabledRepeatInterval", |
||||
channel: ¬ificationChannel{SendReminder: false, Frequency: model.Duration(time.Duration(42) * time.Hour)}, |
||||
recv: &PostableApiReceiver{ |
||||
Name: "recv1", |
||||
}, |
||||
expected: &Route{ |
||||
Receiver: "recv1", |
||||
ObjectMatchers: ObjectMatchers{{Type: 2, Name: ContactLabel, Value: `.*"recv1".*`}}, |
||||
Routes: nil, |
||||
Continue: true, |
||||
GroupByStr: nil, |
||||
RepeatInterval: durationPointer(DisabledRepeatInterval), |
||||
}, |
||||
}, |
||||
} |
||||
|
||||
for _, tt := range tc { |
||||
t.Run(tt.name, func(t *testing.T) { |
||||
res, err := createRoute(channelReceiver{ |
||||
channel: tt.channel, |
||||
receiver: tt.recv, |
||||
}) |
||||
require.NoError(t, err) |
||||
|
||||
// Order of nested routes is not guaranteed.
|
||||
cOpt := []cmp.Option{ |
||||
cmpopts.SortSlices(func(a, b *Route) bool { |
||||
if a.Receiver != b.Receiver { |
||||
return a.Receiver < b.Receiver |
||||
} |
||||
return a.ObjectMatchers[0].Value < b.ObjectMatchers[0].Value |
||||
}), |
||||
cmpopts.IgnoreUnexported(Route{}, labels.Matcher{}), |
||||
} |
||||
|
||||
if !cmp.Equal(tt.expected, res, cOpt...) { |
||||
t.Errorf("Unexpected Route: %v", cmp.Diff(tt.expected, res, cOpt...)) |
||||
} |
||||
}) |
||||
} |
||||
} |
||||
|
||||
func createNotChannel(t *testing.T, uid string, id int64, name string) *notificationChannel { |
||||
t.Helper() |
||||
return ¬ificationChannel{Uid: uid, ID: id, Name: name, Settings: simplejson.New()} |
||||
} |
||||
|
||||
func createNotChannelWithReminder(t *testing.T, uid string, id int64, name string, frequency model.Duration) *notificationChannel { |
||||
t.Helper() |
||||
return ¬ificationChannel{Uid: uid, ID: id, Name: name, SendReminder: true, Frequency: frequency, Settings: simplejson.New()} |
||||
} |
||||
|
||||
func TestCreateReceivers(t *testing.T) { |
||||
tc := []struct { |
||||
name string |
||||
allChannels []*notificationChannel |
||||
defaultChannels []*notificationChannel |
||||
expRecvMap map[uidOrID]*PostableApiReceiver |
||||
expRecv []channelReceiver |
||||
expErr error |
||||
}{ |
||||
{ |
||||
name: "when given notification channels migrate them to receivers", |
||||
allChannels: []*notificationChannel{createNotChannel(t, "uid1", int64(1), "name1"), createNotChannel(t, "uid2", int64(2), "name2")}, |
||||
expRecvMap: map[uidOrID]*PostableApiReceiver{ |
||||
"uid1": { |
||||
Name: "name1", |
||||
GrafanaManagedReceivers: []*PostableGrafanaReceiver{{Name: "name1"}}, |
||||
}, |
||||
"uid2": { |
||||
Name: "name2", |
||||
GrafanaManagedReceivers: []*PostableGrafanaReceiver{{Name: "name2"}}, |
||||
}, |
||||
int64(1): { |
||||
Name: "name1", |
||||
GrafanaManagedReceivers: []*PostableGrafanaReceiver{{Name: "name1"}}, |
||||
}, |
||||
int64(2): { |
||||
Name: "name2", |
||||
GrafanaManagedReceivers: []*PostableGrafanaReceiver{{Name: "name2"}}, |
||||
}, |
||||
}, |
||||
expRecv: []channelReceiver{ |
||||
{ |
||||
channel: createNotChannel(t, "uid1", int64(1), "name1"), |
||||
receiver: &PostableApiReceiver{ |
||||
Name: "name1", |
||||
GrafanaManagedReceivers: []*PostableGrafanaReceiver{{Name: "name1"}}, |
||||
}, |
||||
}, |
||||
{ |
||||
channel: createNotChannel(t, "uid2", int64(2), "name2"), |
||||
receiver: &PostableApiReceiver{ |
||||
Name: "name2", |
||||
GrafanaManagedReceivers: []*PostableGrafanaReceiver{{Name: "name2"}}, |
||||
}, |
||||
}, |
||||
}, |
||||
}, |
||||
{ |
||||
name: "when given notification channel contains double quote sanitize with underscore", |
||||
allChannels: []*notificationChannel{createNotChannel(t, "uid1", int64(1), "name\"1")}, |
||||
expRecvMap: map[uidOrID]*PostableApiReceiver{ |
||||
"uid1": { |
||||
Name: "name_1", |
||||
GrafanaManagedReceivers: []*PostableGrafanaReceiver{{Name: "name_1"}}, |
||||
}, |
||||
int64(1): { |
||||
Name: "name_1", |
||||
GrafanaManagedReceivers: []*PostableGrafanaReceiver{{Name: "name_1"}}, |
||||
}, |
||||
}, |
||||
expRecv: []channelReceiver{ |
||||
{ |
||||
channel: createNotChannel(t, "uid1", int64(1), "name\"1"), |
||||
receiver: &PostableApiReceiver{ |
||||
Name: "name_1", |
||||
GrafanaManagedReceivers: []*PostableGrafanaReceiver{{Name: "name_1"}}, |
||||
}, |
||||
}, |
||||
}, |
||||
}, |
||||
{ |
||||
name: "when given notification channels collide after sanitization add short hash to end", |
||||
allChannels: []*notificationChannel{createNotChannel(t, "uid1", int64(1), "name\"1"), createNotChannel(t, "uid2", int64(2), "name_1")}, |
||||
expRecvMap: map[uidOrID]*PostableApiReceiver{ |
||||
"uid1": { |
||||
Name: "name_1", |
||||
GrafanaManagedReceivers: []*PostableGrafanaReceiver{{Name: "name_1"}}, |
||||
}, |
||||
"uid2": { |
||||
Name: "name_1_dba13d", |
||||
GrafanaManagedReceivers: []*PostableGrafanaReceiver{{Name: "name_1_dba13d"}}, |
||||
}, |
||||
int64(1): { |
||||
Name: "name_1", |
||||
GrafanaManagedReceivers: []*PostableGrafanaReceiver{{Name: "name_1"}}, |
||||
}, |
||||
int64(2): { |
||||
Name: "name_1_dba13d", |
||||
GrafanaManagedReceivers: []*PostableGrafanaReceiver{{Name: "name_1_dba13d"}}, |
||||
}, |
||||
}, |
||||
expRecv: []channelReceiver{ |
||||
{ |
||||
channel: createNotChannel(t, "uid1", int64(1), "name\"1"), |
||||
receiver: &PostableApiReceiver{ |
||||
Name: "name_1", |
||||
GrafanaManagedReceivers: []*PostableGrafanaReceiver{{Name: "name_1"}}, |
||||
}, |
||||
}, |
||||
{ |
||||
channel: createNotChannel(t, "uid2", int64(2), "name_1"), |
||||
receiver: &PostableApiReceiver{ |
||||
Name: "name_1_dba13d", |
||||
GrafanaManagedReceivers: []*PostableGrafanaReceiver{{Name: "name_1_dba13d"}}, |
||||
}, |
||||
}, |
||||
}, |
||||
}, |
||||
} |
||||
|
||||
for _, tt := range tc { |
||||
t.Run(tt.name, func(t *testing.T) { |
||||
m := newTestMigration(t) |
||||
recvMap, recvs, err := m.createReceivers(tt.allChannels) |
||||
if tt.expErr != nil { |
||||
require.Error(t, err) |
||||
require.EqualError(t, err, tt.expErr.Error()) |
||||
return |
||||
} |
||||
|
||||
require.NoError(t, err) |
||||
|
||||
// We ignore certain fields for the purposes of this test
|
||||
for _, recv := range recvs { |
||||
for _, not := range recv.receiver.GrafanaManagedReceivers { |
||||
not.UID = "" |
||||
not.Settings = nil |
||||
not.SecureSettings = nil |
||||
} |
||||
} |
||||
|
||||
require.Equal(t, tt.expRecvMap, recvMap) |
||||
require.ElementsMatch(t, tt.expRecv, recvs) |
||||
}) |
||||
} |
||||
} |
||||
|
||||
func TestCreateDefaultRouteAndReceiver(t *testing.T) { |
||||
tc := []struct { |
||||
name string |
||||
amConfig *PostableUserConfig |
||||
defaultChannels []*notificationChannel |
||||
expRecv *PostableApiReceiver |
||||
expRoute *Route |
||||
expErr error |
||||
}{ |
||||
{ |
||||
name: "when given multiple default notification channels migrate them to a single receiver", |
||||
defaultChannels: []*notificationChannel{createNotChannel(t, "uid1", int64(1), "name1"), createNotChannel(t, "uid2", int64(2), "name2")}, |
||||
expRecv: &PostableApiReceiver{ |
||||
Name: "autogen-contact-point-default", |
||||
GrafanaManagedReceivers: []*PostableGrafanaReceiver{{Name: "name1"}, {Name: "name2"}}, |
||||
}, |
||||
expRoute: &Route{ |
||||
Receiver: "autogen-contact-point-default", |
||||
Routes: make([]*Route, 0), |
||||
GroupByStr: []string{ngModels.FolderTitleLabel, model.AlertNameLabel}, |
||||
RepeatInterval: durationPointer(DisabledRepeatInterval), |
||||
}, |
||||
}, |
||||
{ |
||||
name: "when given multiple default notification channels migrate them to a single receiver with RepeatInterval set to be the minimum of all channel frequencies", |
||||
defaultChannels: []*notificationChannel{ |
||||
createNotChannelWithReminder(t, "uid1", int64(1), "name1", model.Duration(42)), |
||||
createNotChannelWithReminder(t, "uid2", int64(2), "name2", model.Duration(100000)), |
||||
}, |
||||
expRecv: &PostableApiReceiver{ |
||||
Name: "autogen-contact-point-default", |
||||
GrafanaManagedReceivers: []*PostableGrafanaReceiver{{Name: "name1"}, {Name: "name2"}}, |
||||
}, |
||||
expRoute: &Route{ |
||||
Receiver: "autogen-contact-point-default", |
||||
Routes: make([]*Route, 0), |
||||
GroupByStr: []string{ngModels.FolderTitleLabel, model.AlertNameLabel}, |
||||
RepeatInterval: durationPointer(model.Duration(42)), |
||||
}, |
||||
}, |
||||
{ |
||||
name: "when given no default notification channels create a single empty receiver for default", |
||||
defaultChannels: []*notificationChannel{}, |
||||
expRecv: &PostableApiReceiver{ |
||||
Name: "autogen-contact-point-default", |
||||
GrafanaManagedReceivers: []*PostableGrafanaReceiver{}, |
||||
}, |
||||
expRoute: &Route{ |
||||
Receiver: "autogen-contact-point-default", |
||||
Routes: make([]*Route, 0), |
||||
GroupByStr: []string{ngModels.FolderTitleLabel, model.AlertNameLabel}, |
||||
RepeatInterval: nil, |
||||
}, |
||||
}, |
||||
{ |
||||
name: "when given a single default notification channels don't create a new default receiver", |
||||
defaultChannels: []*notificationChannel{createNotChannel(t, "uid1", int64(1), "name1")}, |
||||
expRecv: nil, |
||||
expRoute: &Route{ |
||||
Receiver: "name1", |
||||
Routes: make([]*Route, 0), |
||||
GroupByStr: []string{ngModels.FolderTitleLabel, model.AlertNameLabel}, |
||||
RepeatInterval: durationPointer(DisabledRepeatInterval), |
||||
}, |
||||
}, |
||||
{ |
||||
name: "when given a single default notification channel with SendReminder=true, use the channels Frequency as the RepeatInterval", |
||||
defaultChannels: []*notificationChannel{createNotChannelWithReminder(t, "uid1", int64(1), "name1", model.Duration(42))}, |
||||
expRecv: nil, |
||||
expRoute: &Route{ |
||||
Receiver: "name1", |
||||
Routes: make([]*Route, 0), |
||||
GroupByStr: []string{ngModels.FolderTitleLabel, model.AlertNameLabel}, |
||||
RepeatInterval: durationPointer(model.Duration(42)), |
||||
}, |
||||
}, |
||||
} |
||||
|
||||
for _, tt := range tc { |
||||
t.Run(tt.name, func(t *testing.T) { |
||||
m := newTestMigration(t) |
||||
recv, route, err := m.createDefaultRouteAndReceiver(tt.defaultChannels) |
||||
if tt.expErr != nil { |
||||
require.Error(t, err) |
||||
require.EqualError(t, err, tt.expErr.Error()) |
||||
return |
||||
} |
||||
|
||||
require.NoError(t, err) |
||||
|
||||
// We ignore certain fields for the purposes of this test
|
||||
if recv != nil { |
||||
for _, not := range recv.GrafanaManagedReceivers { |
||||
not.UID = "" |
||||
not.Settings = nil |
||||
not.SecureSettings = nil |
||||
} |
||||
} |
||||
|
||||
require.Equal(t, tt.expRecv, recv) |
||||
require.Equal(t, tt.expRoute, route) |
||||
}) |
||||
} |
||||
} |
||||
|
||||
func durationPointer(d model.Duration) *model.Duration { |
||||
return &d |
||||
} |
@ -0,0 +1,83 @@ |
||||
package ualert |
||||
|
||||
import ( |
||||
"encoding/json" |
||||
"fmt" |
||||
"time" |
||||
|
||||
"xorm.io/xorm" |
||||
|
||||
"github.com/grafana/grafana/pkg/services/sqlstore/migrator" |
||||
) |
||||
|
||||
// stateKey is a vendored migrationStore.stateKey.
|
||||
var stateKey = "stateKey" |
||||
|
||||
// CreatedFoldersMigration moves the record of created folders during legacy migration from Dashboard created_by=-8
|
||||
// to the kvstore. If there are no dashboards with created_by=-.8, then nothing needs to be done.
|
||||
func CreatedFoldersMigration(mg *migrator.Migrator) { |
||||
mg.AddMigration("migrate record of created folders during legacy migration to kvstore", &createdFoldersToKVStore{}) |
||||
} |
||||
|
||||
type createdFoldersToKVStore struct { |
||||
migrator.MigrationBase |
||||
} |
||||
|
||||
func (c createdFoldersToKVStore) SQL(migrator.Dialect) string { |
||||
return codeMigration |
||||
} |
||||
|
||||
func (c createdFoldersToKVStore) Exec(sess *xorm.Session, mg *migrator.Migrator) error { |
||||
var results []struct { |
||||
UID string `xorm:"uid"` |
||||
OrgID int64 `xorm:"org_id"` |
||||
} |
||||
folderCreatedBy := -8 |
||||
if err := sess.SQL("select * from dashboard where created_by = ?", folderCreatedBy).Find(&results); err != nil { |
||||
return err |
||||
} |
||||
|
||||
if len(results) == 0 { |
||||
mg.Logger.Debug("no dashboards with created_by=-8, nothing to set in kvstore") |
||||
return nil |
||||
} |
||||
|
||||
type orgMigrationState struct { |
||||
OrgID int64 `json:"orgId"` |
||||
CreatedFolders []string `json:"createdFolders"` |
||||
} |
||||
states := make(map[int64]*orgMigrationState) |
||||
for _, r := range results { |
||||
if _, ok := states[r.OrgID]; !ok { |
||||
states[r.OrgID] = &orgMigrationState{ |
||||
OrgID: r.OrgID, |
||||
CreatedFolders: []string{}, |
||||
} |
||||
} |
||||
states[r.OrgID].CreatedFolders = append(states[r.OrgID].CreatedFolders, r.UID) |
||||
} |
||||
|
||||
now := time.Now() |
||||
for _, state := range states { |
||||
raw, err := json.Marshal(state) |
||||
if err != nil { |
||||
return err |
||||
} |
||||
|
||||
orgId := state.OrgID |
||||
entry := kvStoreV1Entry{ |
||||
OrgID: &orgId, |
||||
Namespace: &KVNamespace, |
||||
Key: &stateKey, |
||||
Value: string(raw), |
||||
Created: now, |
||||
Updated: now, |
||||
} |
||||
if _, errCreate := sess.Table("kv_store").Insert(&entry); errCreate != nil { |
||||
mg.Logger.Error("failed to insert record of created folders to kvstore", "err", errCreate) |
||||
return fmt.Errorf("failed to insert record of created folders to kvstore: %w", errCreate) |
||||
} |
||||
} |
||||
|
||||
return nil |
||||
} |
@ -1,130 +0,0 @@ |
||||
package ualert |
||||
|
||||
import ( |
||||
"encoding/json" |
||||
"fmt" |
||||
"time" |
||||
) |
||||
|
||||
type dashAlert struct { |
||||
Id int64 |
||||
OrgId int64 |
||||
DashboardId int64 |
||||
PanelId int64 |
||||
Name string |
||||
Message string |
||||
Frequency int64 |
||||
For time.Duration |
||||
State string |
||||
|
||||
Settings json.RawMessage |
||||
ParsedSettings *dashAlertSettings |
||||
DashboardUID string // Set from separate call
|
||||
} |
||||
|
||||
var slurpDashSQL = ` |
||||
SELECT id, |
||||
org_id, |
||||
dashboard_id, |
||||
panel_id, |
||||
org_id, |
||||
name, |
||||
message, |
||||
frequency, |
||||
%s, |
||||
state, |
||||
settings |
||||
FROM |
||||
alert |
||||
WHERE org_id IN (SELECT id from org) |
||||
AND dashboard_id IN (SELECT id from dashboard) |
||||
` |
||||
|
||||
// slurpDashAlerts loads all alerts from the alert database table into
|
||||
// the dashAlert type. If there are alerts that belong to either organization or dashboard that does not exist, those alerts will not be returned/
|
||||
// Additionally it unmarshals the json settings for the alert into the
|
||||
// ParsedSettings property of the dash alert.
|
||||
func (m *migration) slurpDashAlerts() ([]dashAlert, error) { |
||||
dashAlerts := []dashAlert{} |
||||
err := m.sess.SQL(fmt.Sprintf(slurpDashSQL, m.mg.Dialect.Quote("for"))).Find(&dashAlerts) |
||||
|
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
|
||||
for i := range dashAlerts { |
||||
err = json.Unmarshal(dashAlerts[i].Settings, &dashAlerts[i].ParsedSettings) |
||||
if err != nil { |
||||
da := dashAlerts[i] |
||||
return nil, fmt.Errorf("failed to parse alert rule ID:%d, name:'%s', orgID:%d: %w", da.Id, da.Name, da.OrgId, err) |
||||
} |
||||
} |
||||
|
||||
return dashAlerts, nil |
||||
} |
||||
|
||||
// dashAlertSettings is a type for the JSON that is in the settings field of
|
||||
// the alert table.
|
||||
type dashAlertSettings struct { |
||||
NoDataState string `json:"noDataState"` |
||||
ExecutionErrorState string `json:"executionErrorState"` |
||||
Conditions []dashAlertCondition `json:"conditions"` |
||||
AlertRuleTags any `json:"alertRuleTags"` |
||||
Notifications []dashAlertNot `json:"notifications"` |
||||
} |
||||
|
||||
// dashAlertNot is the object that represents the Notifications array in
|
||||
// dashAlertSettings
|
||||
type dashAlertNot struct { |
||||
UID string `json:"uid,omitempty"` |
||||
ID int64 `json:"id,omitempty"` |
||||
} |
||||
|
||||
// dashAlertingConditionJSON is like classic.ClassicConditionJSON except that it
|
||||
// includes the model property with the query.
|
||||
type dashAlertCondition struct { |
||||
Evaluator conditionEvalJSON `json:"evaluator"` |
||||
|
||||
Operator struct { |
||||
Type string `json:"type"` |
||||
} `json:"operator"` |
||||
|
||||
Query struct { |
||||
Params []string `json:"params"` |
||||
DatasourceID int64 `json:"datasourceId"` |
||||
Model json.RawMessage |
||||
} `json:"query"` |
||||
|
||||
Reducer struct { |
||||
// Params []any `json:"params"` (Unused)
|
||||
Type string `json:"type"` |
||||
} |
||||
} |
||||
|
||||
type conditionEvalJSON struct { |
||||
Params []float64 `json:"params"` |
||||
Type string `json:"type"` // e.g. "gt"
|
||||
} |
||||
|
||||
// slurpDashUIDs returns a map of [orgID, dashboardId] -> dashUID.
|
||||
func (m *migration) slurpDashUIDs() (map[[2]int64]string, error) { |
||||
dashIDs := []struct { |
||||
OrgID int64 `xorm:"org_id"` |
||||
ID int64 `xorm:"id"` |
||||
UID string `xorm:"uid"` |
||||
}{} |
||||
|
||||
err := m.sess.SQL(`SELECT org_id, id, uid FROM dashboard`).Find(&dashIDs) |
||||
|
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
|
||||
idToUID := make(map[[2]int64]string, len(dashIDs)) |
||||
|
||||
for _, ds := range dashIDs { |
||||
idToUID[[2]int64{ds.OrgID, ds.ID}] = ds.UID |
||||
} |
||||
|
||||
return idToUID, nil |
||||
} |
@ -1,108 +0,0 @@ |
||||
package ualert |
||||
|
||||
import ( |
||||
"time" |
||||
|
||||
"github.com/grafana/grafana/pkg/components/simplejson" |
||||
"github.com/grafana/grafana/pkg/infra/slugify" |
||||
) |
||||
|
||||
type dashboard struct { |
||||
Id int64 |
||||
Uid string |
||||
Slug string |
||||
OrgId int64 |
||||
GnetId int64 |
||||
Version int |
||||
PluginId string |
||||
|
||||
Created time.Time |
||||
Updated time.Time |
||||
|
||||
UpdatedBy int64 |
||||
CreatedBy int64 |
||||
FolderId int64 |
||||
IsFolder bool |
||||
HasACL bool `xorm:"has_acl"` |
||||
|
||||
Title string |
||||
Data *simplejson.Json |
||||
} |
||||
|
||||
func (d *dashboard) setUid(uid string) { |
||||
d.Uid = uid |
||||
d.Data.Set("uid", uid) |
||||
} |
||||
|
||||
func (d *dashboard) setVersion(version int) { |
||||
d.Version = version |
||||
d.Data.Set("version", version) |
||||
} |
||||
|
||||
// UpdateSlug updates the slug
|
||||
func (d *dashboard) updateSlug() { |
||||
title := d.Data.Get("title").MustString() |
||||
d.Slug = slugify.Slugify(title) |
||||
} |
||||
|
||||
func newDashboardFromJson(data *simplejson.Json) *dashboard { |
||||
dash := &dashboard{} |
||||
dash.Data = data |
||||
dash.Title = dash.Data.Get("title").MustString() |
||||
dash.updateSlug() |
||||
update := false |
||||
|
||||
if id, err := dash.Data.Get("id").Float64(); err == nil { |
||||
dash.Id = int64(id) |
||||
update = true |
||||
} |
||||
|
||||
if uid, err := dash.Data.Get("uid").String(); err == nil { |
||||
dash.Uid = uid |
||||
update = true |
||||
} |
||||
|
||||
if version, err := dash.Data.Get("version").Float64(); err == nil && update { |
||||
dash.Version = int(version) |
||||
dash.Updated = time.Now() |
||||
} else { |
||||
dash.Data.Set("version", 0) |
||||
dash.Created = time.Now() |
||||
dash.Updated = time.Now() |
||||
} |
||||
|
||||
if gnetId, err := dash.Data.Get("gnetId").Float64(); err == nil { |
||||
dash.GnetId = int64(gnetId) |
||||
} |
||||
|
||||
return dash |
||||
} |
||||
|
||||
type saveFolderCommand struct { |
||||
Dashboard *simplejson.Json `json:"dashboard" binding:"Required"` |
||||
UserId int64 `json:"userId"` |
||||
Message string `json:"message"` |
||||
OrgId int64 `json:"-"` |
||||
RestoredFrom int `json:"-"` |
||||
PluginId string `json:"-"` |
||||
FolderId int64 `json:"folderId"` |
||||
IsFolder bool `json:"isFolder"` |
||||
} |
||||
|
||||
// GetDashboardModel turns the command into the saveable model
|
||||
func (cmd *saveFolderCommand) getDashboardModel() *dashboard { |
||||
dash := newDashboardFromJson(cmd.Dashboard) |
||||
userId := cmd.UserId |
||||
|
||||
if userId == 0 { |
||||
userId = -1 |
||||
} |
||||
|
||||
dash.UpdatedBy = userId |
||||
dash.OrgId = cmd.OrgId |
||||
dash.PluginId = cmd.PluginId |
||||
dash.IsFolder = cmd.IsFolder |
||||
dash.FolderId = cmd.FolderId |
||||
dash.updateSlug() |
||||
return dash |
||||
} |
@ -1,31 +0,0 @@ |
||||
package ualert |
||||
|
||||
type dsUIDLookup map[[2]int64]string |
||||
|
||||
// GetUID fetch thes datasource UID based on orgID+datasourceID
|
||||
func (d dsUIDLookup) GetUID(orgID, datasourceID int64) string { |
||||
return d[[2]int64{orgID, datasourceID}] |
||||
} |
||||
|
||||
// slurpDSIDs returns a map of [orgID, dataSourceId] -> UID.
|
||||
func (m *migration) slurpDSIDs() (dsUIDLookup, error) { |
||||
dsIDs := []struct { |
||||
OrgID int64 `xorm:"org_id"` |
||||
ID int64 `xorm:"id"` |
||||
UID string `xorm:"uid"` |
||||
}{} |
||||
|
||||
err := m.sess.SQL(`SELECT org_id, id, uid FROM data_source`).Find(&dsIDs) |
||||
|
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
|
||||
idToUID := make(dsUIDLookup, len(dsIDs)) |
||||
|
||||
for _, ds := range dsIDs { |
||||
idToUID[[2]int64{ds.OrgID, ds.ID}] = ds.UID |
||||
} |
||||
|
||||
return idToUID, nil |
||||
} |
@ -0,0 +1,66 @@ |
||||
package ualert |
||||
|
||||
import ( |
||||
"fmt" |
||||
"strconv" |
||||
"time" |
||||
|
||||
"xorm.io/xorm" |
||||
|
||||
"github.com/grafana/grafana/pkg/services/sqlstore/migrator" |
||||
) |
||||
|
||||
// KVNamespace is a vendored migration.KVNamespace.
|
||||
var KVNamespace = "ngalert.migration" |
||||
|
||||
// migratedKey is a vendored migration.migratedKey.
|
||||
var migratedKey = "migrated" |
||||
|
||||
// MigrationServiceMigration moves the legacy alert migration status from the migration log to kvstore.
|
||||
func MigrationServiceMigration(mg *migrator.Migrator) { |
||||
mg.AddMigration("set legacy alert migration status in kvstore", &migrationLogToKVStore{}) |
||||
} |
||||
|
||||
type migrationLogToKVStore struct { |
||||
migrator.MigrationBase |
||||
} |
||||
|
||||
func (c migrationLogToKVStore) SQL(migrator.Dialect) string { |
||||
return codeMigration |
||||
} |
||||
|
||||
func (c migrationLogToKVStore) Exec(sess *xorm.Session, mg *migrator.Migrator) error { |
||||
migrationRun, err := sess.Table("migration_log").Get(&migrator.MigrationLog{MigrationID: migTitle}) |
||||
if err != nil { |
||||
mg.Logger.Error("alert migration failure: could not get migration log", "error", err) |
||||
return err |
||||
} |
||||
|
||||
var anyOrg int64 = 0 |
||||
now := time.Now() |
||||
entry := kvStoreV1Entry{ |
||||
OrgID: &anyOrg, |
||||
Namespace: &KVNamespace, |
||||
Key: &migratedKey, |
||||
Value: strconv.FormatBool(migrationRun), |
||||
Created: now, |
||||
Updated: now, |
||||
} |
||||
if _, errCreate := sess.Table("kv_store").Insert(&entry); errCreate != nil { |
||||
mg.Logger.Error("failed to insert migration status to kvstore", "err", errCreate) |
||||
return fmt.Errorf("failed to insert migration status to kvstore: %w", errCreate) |
||||
} |
||||
return nil |
||||
} |
||||
|
||||
// kvStoreV1Entry is a vendored kvstore.Item.
|
||||
type kvStoreV1Entry struct { |
||||
ID int64 `xorm:"pk autoincr 'id'"` |
||||
OrgID *int64 `xorm:"org_id"` |
||||
Namespace *string `xorm:"namespace"` |
||||
Key *string `xorm:"key"` |
||||
Value string `xorm:"value"` |
||||
|
||||
Created time.Time `xorm:"created"` |
||||
Updated time.Time `xorm:"updated"` |
||||
} |
@ -1,847 +0,0 @@ |
||||
package ualert_test |
||||
|
||||
import ( |
||||
"encoding/json" |
||||
"fmt" |
||||
"testing" |
||||
"time" |
||||
|
||||
"github.com/google/go-cmp/cmp" |
||||
"github.com/google/go-cmp/cmp/cmpopts" |
||||
"github.com/prometheus/alertmanager/pkg/labels" |
||||
"github.com/prometheus/common/model" |
||||
"github.com/stretchr/testify/require" |
||||
"gopkg.in/ini.v1" |
||||
"xorm.io/xorm" |
||||
|
||||
"github.com/grafana/grafana/pkg/components/simplejson" |
||||
"github.com/grafana/grafana/pkg/services/alerting/models" |
||||
"github.com/grafana/grafana/pkg/services/dashboards" |
||||
"github.com/grafana/grafana/pkg/services/datasources" |
||||
ngModels "github.com/grafana/grafana/pkg/services/ngalert/models" |
||||
"github.com/grafana/grafana/pkg/services/org" |
||||
"github.com/grafana/grafana/pkg/services/sqlstore/migrations" |
||||
"github.com/grafana/grafana/pkg/services/sqlstore/migrations/ualert" |
||||
"github.com/grafana/grafana/pkg/services/sqlstore/migrator" |
||||
"github.com/grafana/grafana/pkg/services/sqlstore/sqlutil" |
||||
"github.com/grafana/grafana/pkg/setting" |
||||
) |
||||
|
||||
// TestAddDashAlertMigration tests the AddDashAlertMigration wrapper method that decides when to run the migration based on migration status and settings.
|
||||
func TestAddDashAlertMigration(t *testing.T) { |
||||
x := setupTestDB(t) |
||||
|
||||
tc := []struct { |
||||
name string |
||||
config *setting.Cfg |
||||
isMigrationRun bool |
||||
shouldPanic bool |
||||
expected []string // set of migration titles
|
||||
}{ |
||||
{ |
||||
name: "when unified alerting enabled and migration not already run, then add main migration and clear rmMigration log entry", |
||||
config: &setting.Cfg{ |
||||
UnifiedAlerting: setting.UnifiedAlertingSettings{ |
||||
Enabled: boolPointer(true), |
||||
}, |
||||
}, |
||||
isMigrationRun: false, |
||||
expected: []string{fmt.Sprintf(ualert.ClearMigrationEntryTitle, ualert.RmMigTitle), ualert.MigTitle}, |
||||
}, |
||||
{ |
||||
name: "when unified alerting disabled and migration is already run, then add rmMigration and clear main migration log entry", |
||||
config: &setting.Cfg{ |
||||
UnifiedAlerting: setting.UnifiedAlertingSettings{ |
||||
Enabled: boolPointer(false), |
||||
}, |
||||
ForceMigration: true, |
||||
}, |
||||
isMigrationRun: true, |
||||
expected: []string{fmt.Sprintf(ualert.ClearMigrationEntryTitle, ualert.MigTitle), ualert.RmMigTitle}, |
||||
}, |
||||
{ |
||||
name: "when unified alerting disabled, migration is already run and force migration is disabled, then the migration should panic", |
||||
config: &setting.Cfg{ |
||||
UnifiedAlerting: setting.UnifiedAlertingSettings{ |
||||
Enabled: boolPointer(false), |
||||
}, |
||||
ForceMigration: false, |
||||
}, |
||||
isMigrationRun: true, |
||||
expected: []string{fmt.Sprintf(ualert.ClearMigrationEntryTitle, ualert.MigTitle), ualert.RmMigTitle}, |
||||
}, |
||||
{ |
||||
name: "when unified alerting enabled and migration is already run, then do nothing", |
||||
config: &setting.Cfg{ |
||||
UnifiedAlerting: setting.UnifiedAlertingSettings{ |
||||
Enabled: boolPointer(true), |
||||
}, |
||||
}, |
||||
isMigrationRun: true, |
||||
expected: []string{}, |
||||
}, |
||||
{ |
||||
name: "when unified alerting disabled and migration is not already run, then do nothing", |
||||
config: &setting.Cfg{ |
||||
UnifiedAlerting: setting.UnifiedAlertingSettings{ |
||||
Enabled: boolPointer(false), |
||||
}, |
||||
}, |
||||
isMigrationRun: false, |
||||
expected: []string{}, |
||||
}, |
||||
} |
||||
|
||||
for _, tt := range tc { |
||||
t.Run(tt.name, func(t *testing.T) { |
||||
defer func() { |
||||
// if the code should panic, make sure it has
|
||||
if r := recover(); r == nil && tt.shouldPanic { |
||||
t.Errorf("The code did not panic") |
||||
} |
||||
}() |
||||
if tt.isMigrationRun { |
||||
log := migrator.MigrationLog{ |
||||
MigrationID: ualert.MigTitle, |
||||
SQL: "", |
||||
Timestamp: time.Now(), |
||||
Success: true, |
||||
} |
||||
_, err := x.Insert(log) |
||||
require.NoError(t, err) |
||||
} else { |
||||
_, err := x.Exec("DELETE FROM migration_log WHERE migration_id = ?", ualert.MigTitle) |
||||
require.NoError(t, err) |
||||
} |
||||
|
||||
mg := migrator.NewMigrator(x, tt.config) |
||||
|
||||
ualert.AddDashAlertMigration(mg) |
||||
require.Equal(t, tt.expected, mg.GetMigrationIDs(false)) |
||||
}) |
||||
} |
||||
} |
||||
|
||||
// TestAMConfigMigration tests the execution of the main DashAlertMigration specifically for migrations of channels and routes.
|
||||
func TestAMConfigMigration(t *testing.T) { |
||||
// Run initial migration to have a working DB.
|
||||
x := setupTestDB(t) |
||||
|
||||
tc := []struct { |
||||
name string |
||||
legacyChannels []*models.AlertNotification |
||||
alerts []*models.Alert |
||||
|
||||
expected map[int64]*ualert.PostableUserConfig |
||||
expErr error |
||||
}{ |
||||
{ |
||||
name: "general multi-org, multi-alert, multi-channel migration", |
||||
legacyChannels: []*models.AlertNotification{ |
||||
createAlertNotification(t, int64(1), "notifier1", "email", emailSettings, false), |
||||
createAlertNotification(t, int64(1), "notifier2", "slack", slackSettings, false), |
||||
createAlertNotification(t, int64(1), "notifier3", "opsgenie", opsgenieSettings, false), |
||||
createAlertNotification(t, int64(2), "notifier4", "email", emailSettings, false), |
||||
createAlertNotification(t, int64(2), "notifier5", "slack", slackSettings, false), |
||||
createAlertNotification(t, int64(2), "notifier6", "opsgenie", opsgenieSettings, true), // default
|
||||
}, |
||||
alerts: []*models.Alert{ |
||||
createAlert(t, int64(1), int64(1), int64(1), "alert1", []string{"notifier1"}), |
||||
createAlert(t, int64(1), int64(1), int64(2), "alert2", []string{"notifier2", "notifier3"}), |
||||
createAlert(t, int64(1), int64(2), int64(3), "alert3", []string{"notifier3"}), |
||||
createAlert(t, int64(2), int64(3), int64(1), "alert4", []string{"notifier4"}), |
||||
createAlert(t, int64(2), int64(3), int64(2), "alert5", []string{"notifier4", "notifier5", "notifier6"}), |
||||
createAlert(t, int64(2), int64(4), int64(3), "alert6", []string{}), |
||||
}, |
||||
expected: map[int64]*ualert.PostableUserConfig{ |
||||
int64(1): { |
||||
AlertmanagerConfig: ualert.PostableApiAlertingConfig{ |
||||
Route: &ualert.Route{ |
||||
Receiver: "autogen-contact-point-default", |
||||
GroupByStr: []string{ngModels.FolderTitleLabel, model.AlertNameLabel}, |
||||
Routes: []*ualert.Route{ |
||||
{Receiver: "notifier1", ObjectMatchers: ualert.ObjectMatchers{{Type: 2, Name: ualert.ContactLabel, Value: `.*"notifier1".*`}}, Routes: nil, Continue: true, RepeatInterval: durationPointer(ualert.DisabledRepeatInterval)}, |
||||
{Receiver: "notifier2", ObjectMatchers: ualert.ObjectMatchers{{Type: 2, Name: ualert.ContactLabel, Value: `.*"notifier2".*`}}, Routes: nil, Continue: true, RepeatInterval: durationPointer(ualert.DisabledRepeatInterval)}, |
||||
{Receiver: "notifier3", ObjectMatchers: ualert.ObjectMatchers{{Type: 2, Name: ualert.ContactLabel, Value: `.*"notifier3".*`}}, Routes: nil, Continue: true, RepeatInterval: durationPointer(ualert.DisabledRepeatInterval)}, |
||||
}, |
||||
RepeatInterval: nil, |
||||
}, |
||||
Receivers: []*ualert.PostableApiReceiver{ |
||||
{Name: "notifier1", GrafanaManagedReceivers: []*ualert.PostableGrafanaReceiver{{Name: "notifier1", Type: "email"}}}, |
||||
{Name: "notifier2", GrafanaManagedReceivers: []*ualert.PostableGrafanaReceiver{{Name: "notifier2", Type: "slack"}}}, |
||||
{Name: "notifier3", GrafanaManagedReceivers: []*ualert.PostableGrafanaReceiver{{Name: "notifier3", Type: "opsgenie"}}}, |
||||
{Name: "autogen-contact-point-default"}, // empty default
|
||||
}, |
||||
}, |
||||
}, |
||||
int64(2): { |
||||
AlertmanagerConfig: ualert.PostableApiAlertingConfig{ |
||||
Route: &ualert.Route{ |
||||
Receiver: "notifier6", |
||||
GroupByStr: []string{ngModels.FolderTitleLabel, model.AlertNameLabel}, |
||||
Routes: []*ualert.Route{ |
||||
{Receiver: "notifier4", ObjectMatchers: ualert.ObjectMatchers{{Type: 2, Name: ualert.ContactLabel, Value: `.*"notifier4".*`}}, Routes: nil, Continue: true, RepeatInterval: durationPointer(ualert.DisabledRepeatInterval)}, |
||||
{Receiver: "notifier5", ObjectMatchers: ualert.ObjectMatchers{{Type: 2, Name: ualert.ContactLabel, Value: `.*"notifier5".*`}}, Routes: nil, Continue: true, RepeatInterval: durationPointer(ualert.DisabledRepeatInterval)}, |
||||
{Receiver: "notifier6", ObjectMatchers: ualert.ObjectMatchers{{Type: 2, Name: ualert.ContactLabel, Value: `.*"notifier6".*`}}, Routes: nil, Continue: true, RepeatInterval: durationPointer(ualert.DisabledRepeatInterval)}, |
||||
}, |
||||
RepeatInterval: durationPointer(ualert.DisabledRepeatInterval), |
||||
}, |
||||
Receivers: []*ualert.PostableApiReceiver{ |
||||
{Name: "notifier4", GrafanaManagedReceivers: []*ualert.PostableGrafanaReceiver{{Name: "notifier4", Type: "email"}}}, |
||||
{Name: "notifier5", GrafanaManagedReceivers: []*ualert.PostableGrafanaReceiver{{Name: "notifier5", Type: "slack"}}}, |
||||
{Name: "notifier6", GrafanaManagedReceivers: []*ualert.PostableGrafanaReceiver{{Name: "notifier6", Type: "opsgenie"}}}, |
||||
}, |
||||
}, |
||||
}, |
||||
}, |
||||
}, |
||||
{ |
||||
name: "when no default channel, create empty autogen-contact-point-default", |
||||
legacyChannels: []*models.AlertNotification{ |
||||
createAlertNotification(t, int64(1), "notifier1", "email", emailSettings, false), |
||||
}, |
||||
alerts: []*models.Alert{}, |
||||
expected: map[int64]*ualert.PostableUserConfig{ |
||||
int64(1): { |
||||
AlertmanagerConfig: ualert.PostableApiAlertingConfig{ |
||||
Route: &ualert.Route{ |
||||
Receiver: "autogen-contact-point-default", |
||||
GroupByStr: []string{ngModels.FolderTitleLabel, model.AlertNameLabel}, |
||||
Routes: []*ualert.Route{ |
||||
{Receiver: "notifier1", ObjectMatchers: ualert.ObjectMatchers{{Type: 2, Name: ualert.ContactLabel, Value: `.*"notifier1".*`}}, Routes: nil, Continue: true, RepeatInterval: durationPointer(ualert.DisabledRepeatInterval)}, |
||||
}, |
||||
RepeatInterval: nil, |
||||
}, |
||||
Receivers: []*ualert.PostableApiReceiver{ |
||||
{Name: "notifier1", GrafanaManagedReceivers: []*ualert.PostableGrafanaReceiver{{Name: "notifier1", Type: "email"}}}, |
||||
{Name: "autogen-contact-point-default"}, |
||||
}, |
||||
}, |
||||
}, |
||||
}, |
||||
}, |
||||
{ |
||||
name: "when single default channel, don't create autogen-contact-point-default", |
||||
legacyChannels: []*models.AlertNotification{ |
||||
createAlertNotification(t, int64(1), "notifier1", "email", emailSettings, true), |
||||
}, |
||||
alerts: []*models.Alert{}, |
||||
expected: map[int64]*ualert.PostableUserConfig{ |
||||
int64(1): { |
||||
AlertmanagerConfig: ualert.PostableApiAlertingConfig{ |
||||
Route: &ualert.Route{ |
||||
Receiver: "notifier1", |
||||
GroupByStr: []string{ngModels.FolderTitleLabel, model.AlertNameLabel}, |
||||
Routes: []*ualert.Route{ |
||||
{Receiver: "notifier1", ObjectMatchers: ualert.ObjectMatchers{{Type: 2, Name: ualert.ContactLabel, Value: `.*"notifier1".*`}}, Routes: nil, Continue: true, RepeatInterval: durationPointer(ualert.DisabledRepeatInterval)}, |
||||
}, |
||||
RepeatInterval: durationPointer(ualert.DisabledRepeatInterval), |
||||
}, |
||||
Receivers: []*ualert.PostableApiReceiver{ |
||||
{Name: "notifier1", GrafanaManagedReceivers: []*ualert.PostableGrafanaReceiver{{Name: "notifier1", Type: "email"}}}, |
||||
}, |
||||
}, |
||||
}, |
||||
}, |
||||
}, |
||||
{ |
||||
name: "when single default channel with SendReminder, use channel Frequency as RepeatInterval", |
||||
legacyChannels: []*models.AlertNotification{ |
||||
createAlertNotificationWithReminder(t, int64(1), "notifier1", "email", emailSettings, true, true, time.Duration(1)*time.Hour), |
||||
}, |
||||
alerts: []*models.Alert{}, |
||||
expected: map[int64]*ualert.PostableUserConfig{ |
||||
int64(1): { |
||||
AlertmanagerConfig: ualert.PostableApiAlertingConfig{ |
||||
Route: &ualert.Route{ |
||||
Receiver: "notifier1", |
||||
GroupByStr: []string{ngModels.FolderTitleLabel, model.AlertNameLabel}, |
||||
Routes: []*ualert.Route{ |
||||
{Receiver: "notifier1", ObjectMatchers: ualert.ObjectMatchers{{Type: 2, Name: ualert.ContactLabel, Value: `.*"notifier1".*`}}, Routes: nil, Continue: true, RepeatInterval: durationPointer(model.Duration(time.Duration(1) * time.Hour))}, |
||||
}, |
||||
RepeatInterval: durationPointer(model.Duration(time.Duration(1) * time.Hour)), |
||||
}, |
||||
Receivers: []*ualert.PostableApiReceiver{ |
||||
{Name: "notifier1", GrafanaManagedReceivers: []*ualert.PostableGrafanaReceiver{{Name: "notifier1", Type: "email"}}}, |
||||
}, |
||||
}, |
||||
}, |
||||
}, |
||||
}, |
||||
{ |
||||
name: "when multiple default channels, add them to autogen-contact-point-default as well", |
||||
legacyChannels: []*models.AlertNotification{ |
||||
createAlertNotification(t, int64(1), "notifier1", "email", emailSettings, true), |
||||
createAlertNotification(t, int64(1), "notifier2", "slack", slackSettings, true), |
||||
}, |
||||
alerts: []*models.Alert{}, |
||||
expected: map[int64]*ualert.PostableUserConfig{ |
||||
int64(1): { |
||||
AlertmanagerConfig: ualert.PostableApiAlertingConfig{ |
||||
Route: &ualert.Route{ |
||||
Receiver: "autogen-contact-point-default", |
||||
GroupByStr: []string{ngModels.FolderTitleLabel, model.AlertNameLabel}, |
||||
Routes: []*ualert.Route{ |
||||
{Receiver: "notifier1", ObjectMatchers: ualert.ObjectMatchers{{Type: 2, Name: ualert.ContactLabel, Value: `.*"notifier1".*`}}, Routes: nil, Continue: true, RepeatInterval: durationPointer(ualert.DisabledRepeatInterval)}, |
||||
{Receiver: "notifier2", ObjectMatchers: ualert.ObjectMatchers{{Type: 2, Name: ualert.ContactLabel, Value: `.*"notifier2".*`}}, Routes: nil, Continue: true, RepeatInterval: durationPointer(ualert.DisabledRepeatInterval)}, |
||||
}, |
||||
RepeatInterval: durationPointer(ualert.DisabledRepeatInterval), |
||||
}, |
||||
Receivers: []*ualert.PostableApiReceiver{ |
||||
{Name: "notifier1", GrafanaManagedReceivers: []*ualert.PostableGrafanaReceiver{{Name: "notifier1", Type: "email"}}}, |
||||
{Name: "notifier2", GrafanaManagedReceivers: []*ualert.PostableGrafanaReceiver{{Name: "notifier2", Type: "slack"}}}, |
||||
{Name: "autogen-contact-point-default", GrafanaManagedReceivers: []*ualert.PostableGrafanaReceiver{{Name: "notifier1", Type: "email"}, {Name: "notifier2", Type: "slack"}}}, |
||||
}, |
||||
}, |
||||
}, |
||||
}, |
||||
}, |
||||
{ |
||||
name: "when multiple default channels with SendReminder, use minimum channel frequency as RepeatInterval", |
||||
legacyChannels: []*models.AlertNotification{ |
||||
createAlertNotificationWithReminder(t, int64(1), "notifier1", "email", emailSettings, true, true, time.Duration(1)*time.Hour), |
||||
createAlertNotificationWithReminder(t, int64(1), "notifier2", "slack", slackSettings, true, true, time.Duration(30)*time.Minute), |
||||
}, |
||||
alerts: []*models.Alert{}, |
||||
expected: map[int64]*ualert.PostableUserConfig{ |
||||
int64(1): { |
||||
AlertmanagerConfig: ualert.PostableApiAlertingConfig{ |
||||
Route: &ualert.Route{ |
||||
Receiver: "autogen-contact-point-default", |
||||
GroupByStr: []string{ngModels.FolderTitleLabel, model.AlertNameLabel}, |
||||
Routes: []*ualert.Route{ |
||||
{Receiver: "notifier1", ObjectMatchers: ualert.ObjectMatchers{{Type: 2, Name: ualert.ContactLabel, Value: `.*"notifier1".*`}}, Routes: nil, Continue: true, RepeatInterval: durationPointer(model.Duration(time.Duration(1) * time.Hour))}, |
||||
{Receiver: "notifier2", ObjectMatchers: ualert.ObjectMatchers{{Type: 2, Name: ualert.ContactLabel, Value: `.*"notifier2".*`}}, Routes: nil, Continue: true, RepeatInterval: durationPointer(model.Duration(time.Duration(30) * time.Minute))}, |
||||
}, |
||||
RepeatInterval: durationPointer(model.Duration(time.Duration(30) * time.Minute)), |
||||
}, |
||||
Receivers: []*ualert.PostableApiReceiver{ |
||||
{Name: "notifier1", GrafanaManagedReceivers: []*ualert.PostableGrafanaReceiver{{Name: "notifier1", Type: "email"}}}, |
||||
{Name: "notifier2", GrafanaManagedReceivers: []*ualert.PostableGrafanaReceiver{{Name: "notifier2", Type: "slack"}}}, |
||||
{Name: "autogen-contact-point-default", GrafanaManagedReceivers: []*ualert.PostableGrafanaReceiver{{Name: "notifier1", Type: "email"}, {Name: "notifier2", Type: "slack"}}}, |
||||
}, |
||||
}, |
||||
}, |
||||
}, |
||||
}, |
||||
{ |
||||
name: "when default channels exist alongside non-default, add only defaults to autogen-contact-point-default", |
||||
legacyChannels: []*models.AlertNotification{ |
||||
createAlertNotification(t, int64(1), "notifier1", "email", emailSettings, true), // default
|
||||
createAlertNotification(t, int64(1), "notifier2", "slack", slackSettings, false), |
||||
createAlertNotification(t, int64(1), "notifier3", "opsgenie", opsgenieSettings, true), // default
|
||||
}, |
||||
alerts: []*models.Alert{}, |
||||
expected: map[int64]*ualert.PostableUserConfig{ |
||||
int64(1): { |
||||
AlertmanagerConfig: ualert.PostableApiAlertingConfig{ |
||||
Route: &ualert.Route{ |
||||
Receiver: "autogen-contact-point-default", |
||||
GroupByStr: []string{ngModels.FolderTitleLabel, model.AlertNameLabel}, |
||||
Routes: []*ualert.Route{ |
||||
{Receiver: "notifier1", ObjectMatchers: ualert.ObjectMatchers{{Type: 2, Name: ualert.ContactLabel, Value: `.*"notifier1".*`}}, Routes: nil, Continue: true, RepeatInterval: durationPointer(ualert.DisabledRepeatInterval)}, |
||||
{Receiver: "notifier2", ObjectMatchers: ualert.ObjectMatchers{{Type: 2, Name: ualert.ContactLabel, Value: `.*"notifier2".*`}}, Routes: nil, Continue: true, RepeatInterval: durationPointer(ualert.DisabledRepeatInterval)}, |
||||
{Receiver: "notifier3", ObjectMatchers: ualert.ObjectMatchers{{Type: 2, Name: ualert.ContactLabel, Value: `.*"notifier3".*`}}, Routes: nil, Continue: true, RepeatInterval: durationPointer(ualert.DisabledRepeatInterval)}, |
||||
}, |
||||
RepeatInterval: durationPointer(ualert.DisabledRepeatInterval), |
||||
}, |
||||
Receivers: []*ualert.PostableApiReceiver{ |
||||
{Name: "notifier1", GrafanaManagedReceivers: []*ualert.PostableGrafanaReceiver{{Name: "notifier1", Type: "email"}}}, |
||||
{Name: "notifier2", GrafanaManagedReceivers: []*ualert.PostableGrafanaReceiver{{Name: "notifier2", Type: "slack"}}}, |
||||
{Name: "notifier3", GrafanaManagedReceivers: []*ualert.PostableGrafanaReceiver{{Name: "notifier3", Type: "opsgenie"}}}, |
||||
{Name: "autogen-contact-point-default", GrafanaManagedReceivers: []*ualert.PostableGrafanaReceiver{{Name: "notifier1", Type: "email"}, {Name: "notifier3", Type: "opsgenie"}}}}, |
||||
}, |
||||
}, |
||||
}, |
||||
}, |
||||
{ |
||||
name: "when alerts share channels, only create one receiver per legacy channel", |
||||
legacyChannels: []*models.AlertNotification{ |
||||
createAlertNotification(t, int64(1), "notifier1", "email", emailSettings, false), |
||||
createAlertNotification(t, int64(1), "notifier2", "slack", slackSettings, false), |
||||
}, |
||||
alerts: []*models.Alert{ |
||||
createAlert(t, int64(1), int64(1), int64(1), "alert1", []string{"notifier1"}), |
||||
createAlert(t, int64(1), int64(1), int64(1), "alert2", []string{"notifier1", "notifier2"}), |
||||
}, |
||||
expected: map[int64]*ualert.PostableUserConfig{ |
||||
int64(1): { |
||||
AlertmanagerConfig: ualert.PostableApiAlertingConfig{ |
||||
Route: &ualert.Route{ |
||||
Receiver: "autogen-contact-point-default", |
||||
GroupByStr: []string{ngModels.FolderTitleLabel, model.AlertNameLabel}, |
||||
Routes: []*ualert.Route{ |
||||
{Receiver: "notifier1", ObjectMatchers: ualert.ObjectMatchers{{Type: 2, Name: ualert.ContactLabel, Value: `.*"notifier1".*`}}, Routes: nil, Continue: true, RepeatInterval: durationPointer(ualert.DisabledRepeatInterval)}, |
||||
{Receiver: "notifier2", ObjectMatchers: ualert.ObjectMatchers{{Type: 2, Name: ualert.ContactLabel, Value: `.*"notifier2".*`}}, Routes: nil, Continue: true, RepeatInterval: durationPointer(ualert.DisabledRepeatInterval)}, |
||||
}, |
||||
}, |
||||
Receivers: []*ualert.PostableApiReceiver{ |
||||
{Name: "notifier1", GrafanaManagedReceivers: []*ualert.PostableGrafanaReceiver{{Name: "notifier1", Type: "email"}}}, |
||||
{Name: "notifier2", GrafanaManagedReceivers: []*ualert.PostableGrafanaReceiver{{Name: "notifier2", Type: "slack"}}}, |
||||
{Name: "autogen-contact-point-default"}, |
||||
}, |
||||
}, |
||||
}, |
||||
}, |
||||
}, |
||||
{ |
||||
name: "when channel not linked to any alerts, still create a receiver for it", |
||||
legacyChannels: []*models.AlertNotification{ |
||||
createAlertNotification(t, int64(1), "notifier1", "email", emailSettings, false), |
||||
}, |
||||
alerts: []*models.Alert{}, |
||||
expected: map[int64]*ualert.PostableUserConfig{ |
||||
int64(1): { |
||||
AlertmanagerConfig: ualert.PostableApiAlertingConfig{ |
||||
Route: &ualert.Route{ |
||||
Receiver: "autogen-contact-point-default", |
||||
GroupByStr: []string{ngModels.FolderTitleLabel, model.AlertNameLabel}, |
||||
Routes: []*ualert.Route{ |
||||
{Receiver: "notifier1", ObjectMatchers: ualert.ObjectMatchers{{Type: 2, Name: ualert.ContactLabel, Value: `.*"notifier1".*`}}, Routes: nil, Continue: true, RepeatInterval: durationPointer(ualert.DisabledRepeatInterval)}, |
||||
}, |
||||
}, |
||||
Receivers: []*ualert.PostableApiReceiver{ |
||||
{Name: "notifier1", GrafanaManagedReceivers: []*ualert.PostableGrafanaReceiver{{Name: "notifier1", Type: "email"}}}, |
||||
{Name: "autogen-contact-point-default"}, |
||||
}, |
||||
}, |
||||
}, |
||||
}, |
||||
}, |
||||
{ |
||||
name: "when unsupported channels, do not migrate them", |
||||
legacyChannels: []*models.AlertNotification{ |
||||
createAlertNotification(t, int64(1), "notifier1", "email", emailSettings, false), |
||||
createAlertNotification(t, int64(1), "notifier2", "hipchat", "", false), |
||||
createAlertNotification(t, int64(1), "notifier3", "sensu", "", false), |
||||
}, |
||||
alerts: []*models.Alert{}, |
||||
expected: map[int64]*ualert.PostableUserConfig{ |
||||
int64(1): { |
||||
AlertmanagerConfig: ualert.PostableApiAlertingConfig{ |
||||
Route: &ualert.Route{ |
||||
Receiver: "autogen-contact-point-default", |
||||
GroupByStr: []string{ngModels.FolderTitleLabel, model.AlertNameLabel}, |
||||
Routes: []*ualert.Route{ |
||||
{Receiver: "notifier1", ObjectMatchers: ualert.ObjectMatchers{{Type: 2, Name: ualert.ContactLabel, Value: `.*"notifier1".*`}}, Routes: nil, Continue: true, RepeatInterval: durationPointer(ualert.DisabledRepeatInterval)}, |
||||
}, |
||||
}, |
||||
Receivers: []*ualert.PostableApiReceiver{ |
||||
{Name: "notifier1", GrafanaManagedReceivers: []*ualert.PostableGrafanaReceiver{{Name: "notifier1", Type: "email"}}}, |
||||
{Name: "autogen-contact-point-default"}, |
||||
}, |
||||
}, |
||||
}, |
||||
}, |
||||
}, |
||||
{ |
||||
name: "when unsupported channel linked to alert, do not migrate only that channel", |
||||
legacyChannels: []*models.AlertNotification{ |
||||
createAlertNotification(t, int64(1), "notifier1", "email", emailSettings, false), |
||||
createAlertNotification(t, int64(1), "notifier2", "sensu", "", false), |
||||
}, |
||||
alerts: []*models.Alert{ |
||||
createAlert(t, int64(1), int64(1), int64(1), "alert1", []string{"notifier1", "notifier2"}), |
||||
}, |
||||
expected: map[int64]*ualert.PostableUserConfig{ |
||||
int64(1): { |
||||
AlertmanagerConfig: ualert.PostableApiAlertingConfig{ |
||||
Route: &ualert.Route{ |
||||
Receiver: "autogen-contact-point-default", |
||||
GroupByStr: []string{ngModels.FolderTitleLabel, model.AlertNameLabel}, |
||||
Routes: []*ualert.Route{ |
||||
{Receiver: "notifier1", ObjectMatchers: ualert.ObjectMatchers{{Type: 2, Name: ualert.ContactLabel, Value: `.*"notifier1".*`}}, Routes: nil, Continue: true, RepeatInterval: durationPointer(ualert.DisabledRepeatInterval)}, |
||||
}, |
||||
}, |
||||
Receivers: []*ualert.PostableApiReceiver{ |
||||
{Name: "notifier1", GrafanaManagedReceivers: []*ualert.PostableGrafanaReceiver{{Name: "notifier1", Type: "email"}}}, |
||||
{Name: "autogen-contact-point-default"}, |
||||
}, |
||||
}, |
||||
}, |
||||
}, |
||||
}, |
||||
} |
||||
|
||||
for _, tt := range tc { |
||||
t.Run(tt.name, func(t *testing.T) { |
||||
defer teardown(t, x) |
||||
setupLegacyAlertsTables(t, x, tt.legacyChannels, tt.alerts) |
||||
runDashAlertMigrationTestRun(t, x) |
||||
|
||||
for orgId := range tt.expected { |
||||
amConfig := getAlertmanagerConfig(t, x, orgId) |
||||
|
||||
// Order of nested GrafanaManagedReceivers is not guaranteed.
|
||||
cOpt := []cmp.Option{ |
||||
cmpopts.IgnoreUnexported(ualert.PostableApiReceiver{}), |
||||
cmpopts.IgnoreFields(ualert.PostableGrafanaReceiver{}, "UID", "Settings", "SecureSettings"), |
||||
cmpopts.SortSlices(func(a, b *ualert.PostableGrafanaReceiver) bool { return a.Name < b.Name }), |
||||
cmpopts.SortSlices(func(a, b *ualert.PostableApiReceiver) bool { return a.Name < b.Name }), |
||||
} |
||||
if !cmp.Equal(tt.expected[orgId].AlertmanagerConfig.Receivers, amConfig.AlertmanagerConfig.Receivers, cOpt...) { |
||||
t.Errorf("Unexpected Receivers: %v", cmp.Diff(tt.expected[orgId].AlertmanagerConfig.Receivers, amConfig.AlertmanagerConfig.Receivers, cOpt...)) |
||||
} |
||||
|
||||
// Order of routes is not guaranteed.
|
||||
cOpt = []cmp.Option{ |
||||
cmpopts.SortSlices(func(a, b *ualert.Route) bool { |
||||
if a.Receiver != b.Receiver { |
||||
return a.Receiver < b.Receiver |
||||
} |
||||
return a.ObjectMatchers[0].Value < b.ObjectMatchers[0].Value |
||||
}), |
||||
cmpopts.IgnoreUnexported(ualert.Route{}, labels.Matcher{}), |
||||
} |
||||
if !cmp.Equal(tt.expected[orgId].AlertmanagerConfig.Route, amConfig.AlertmanagerConfig.Route, cOpt...) { |
||||
t.Errorf("Unexpected Route: %v", cmp.Diff(tt.expected[orgId].AlertmanagerConfig.Route, amConfig.AlertmanagerConfig.Route, cOpt...)) |
||||
} |
||||
} |
||||
}) |
||||
} |
||||
} |
||||
|
||||
// TestDashAlertMigration tests the execution of the main DashAlertMigration specifically for migrations of models.
|
||||
func TestDashAlertMigration(t *testing.T) { |
||||
// Run initial migration to have a working DB.
|
||||
x := setupTestDB(t) |
||||
|
||||
t.Run("when DashAlertMigration create ContactLabel on migrated AlertRules", func(t *testing.T) { |
||||
defer teardown(t, x) |
||||
legacyChannels := []*models.AlertNotification{ |
||||
createAlertNotification(t, int64(1), "notifier1", "email", emailSettings, false), |
||||
createAlertNotification(t, int64(1), "notifier2", "slack", slackSettings, false), |
||||
createAlertNotification(t, int64(1), "notifier3", "opsgenie", opsgenieSettings, false), |
||||
createAlertNotification(t, int64(2), "notifier4", "email", emailSettings, false), |
||||
createAlertNotification(t, int64(2), "notifier5", "slack", slackSettings, false), |
||||
createAlertNotification(t, int64(2), "notifier6", "opsgenie", opsgenieSettings, true), // default
|
||||
} |
||||
alerts := []*models.Alert{ |
||||
createAlert(t, int64(1), int64(1), int64(1), "alert1", []string{"notifier1"}), |
||||
createAlert(t, int64(1), int64(1), int64(2), "alert2", []string{"notifier2", "notifier3"}), |
||||
createAlert(t, int64(1), int64(2), int64(3), "alert3", []string{"notifier3"}), |
||||
createAlert(t, int64(2), int64(3), int64(1), "alert4", []string{"notifier4"}), |
||||
createAlert(t, int64(2), int64(3), int64(2), "alert5", []string{"notifier4", "notifier5", "notifier6"}), |
||||
createAlert(t, int64(2), int64(4), int64(3), "alert6", []string{}), |
||||
} |
||||
expected := map[int64]map[string]*ngModels.AlertRule{ |
||||
int64(1): { |
||||
"alert1": {Labels: map[string]string{ualert.ContactLabel: `"notifier1"`}}, |
||||
"alert2": {Labels: map[string]string{ualert.ContactLabel: `"notifier2","notifier3"`}}, |
||||
"alert3": {Labels: map[string]string{ualert.ContactLabel: `"notifier3"`}}, |
||||
}, |
||||
int64(2): { |
||||
"alert4": {Labels: map[string]string{ualert.ContactLabel: `"notifier4","notifier6"`}}, |
||||
"alert5": {Labels: map[string]string{ualert.ContactLabel: `"notifier4","notifier5","notifier6"`}}, |
||||
"alert6": {Labels: map[string]string{}}, |
||||
}, |
||||
} |
||||
setupLegacyAlertsTables(t, x, legacyChannels, alerts) |
||||
runDashAlertMigrationTestRun(t, x) |
||||
|
||||
for orgId := range expected { |
||||
rules := getAlertRules(t, x, orgId) |
||||
expectedRulesMap := expected[orgId] |
||||
require.Len(t, rules, len(expectedRulesMap)) |
||||
for _, r := range rules { |
||||
require.Equal(t, expectedRulesMap[r.Title].Labels[ualert.ContactLabel], r.Labels[ualert.ContactLabel]) |
||||
} |
||||
} |
||||
}) |
||||
|
||||
t.Run("when DashAlertMigration create ContactLabel with sanitized name if name contains double quote", func(t *testing.T) { |
||||
defer teardown(t, x) |
||||
legacyChannels := []*models.AlertNotification{ |
||||
createAlertNotification(t, int64(1), "notif\"ier1", "email", emailSettings, false), |
||||
} |
||||
alerts := []*models.Alert{ |
||||
createAlert(t, int64(1), int64(1), int64(1), "alert1", []string{"notif\"ier1"}), |
||||
} |
||||
expected := map[int64]map[string]*ngModels.AlertRule{ |
||||
int64(1): { |
||||
"alert1": {Labels: map[string]string{ualert.ContactLabel: `"notif_ier1"`}}, |
||||
}, |
||||
} |
||||
setupLegacyAlertsTables(t, x, legacyChannels, alerts) |
||||
runDashAlertMigrationTestRun(t, x) |
||||
|
||||
for orgId := range expected { |
||||
rules := getAlertRules(t, x, orgId) |
||||
expectedRulesMap := expected[orgId] |
||||
require.Len(t, rules, len(expectedRulesMap)) |
||||
for _, r := range rules { |
||||
require.Equal(t, expectedRulesMap[r.Title].Labels[ualert.ContactLabel], r.Labels[ualert.ContactLabel]) |
||||
} |
||||
} |
||||
}) |
||||
|
||||
t.Run("when folder is missing put alert in General folder", func(t *testing.T) { |
||||
o := createOrg(t, 1) |
||||
folder1 := createDashboard(t, 1, o.ID, "folder-1") |
||||
folder1.IsFolder = true |
||||
dash1 := createDashboard(t, 3, o.ID, "dash1") |
||||
dash1.FolderID = folder1.ID |
||||
dash2 := createDashboard(t, 4, o.ID, "dash2") |
||||
dash2.FolderID = 22 // missing folder
|
||||
|
||||
a1 := createAlert(t, o.ID, dash1.ID, int64(1), "alert-1", []string{}) |
||||
a2 := createAlert(t, o.ID, dash2.ID, int64(1), "alert-2", []string{}) |
||||
|
||||
_, err := x.Insert(o, folder1, dash1, dash2, a1, a2) |
||||
require.NoError(t, err) |
||||
|
||||
runDashAlertMigrationTestRun(t, x) |
||||
|
||||
rules := getAlertRules(t, x, o.ID) |
||||
require.Len(t, rules, 2) |
||||
|
||||
var generalFolder dashboards.Dashboard |
||||
_, err = x.Table(&dashboards.Dashboard{}).Where("title = ? AND org_id = ?", ualert.GENERAL_FOLDER, o.ID).Get(&generalFolder) |
||||
require.NoError(t, err) |
||||
|
||||
require.NotNil(t, generalFolder) |
||||
|
||||
for _, rule := range rules { |
||||
var expectedFolder dashboards.Dashboard |
||||
if rule.Title == a1.Name { |
||||
expectedFolder = *folder1 |
||||
} else { |
||||
expectedFolder = generalFolder |
||||
} |
||||
require.Equal(t, expectedFolder.UID, rule.NamespaceUID) |
||||
} |
||||
}) |
||||
} |
||||
|
||||
const ( |
||||
emailSettings = `{"addresses": "test"}` |
||||
slackSettings = `{"recipient": "test", "token": "test"}` |
||||
opsgenieSettings = `{"apiKey": "test"}` |
||||
) |
||||
|
||||
// setupTestDB prepares the sqlite database and runs OSS migrations to initialize the schemas.
|
||||
func setupTestDB(t *testing.T) *xorm.Engine { |
||||
t.Helper() |
||||
testDB := sqlutil.SQLite3TestDB() |
||||
|
||||
x, err := xorm.NewEngine(testDB.DriverName, testDB.ConnStr) |
||||
require.NoError(t, err) |
||||
|
||||
err = migrator.NewDialect(x.DriverName()).CleanDB(x) |
||||
require.NoError(t, err) |
||||
|
||||
mg := migrator.NewMigrator(x, &setting.Cfg{Raw: ini.Empty()}) |
||||
migrations := &migrations.OSSMigrations{} |
||||
migrations.AddMigration(mg) |
||||
|
||||
err = mg.Start(false, 0) |
||||
require.NoError(t, err) |
||||
|
||||
return x |
||||
} |
||||
|
||||
var ( |
||||
now = time.Now() |
||||
) |
||||
|
||||
// createAlertNotificationWithReminder creates a legacy alert notification channel for inserting into the test database.
|
||||
func createAlertNotificationWithReminder(t *testing.T, orgId int64, uid string, channelType string, settings string, defaultChannel bool, sendReminder bool, frequency time.Duration) *models.AlertNotification { |
||||
t.Helper() |
||||
settingsJson := simplejson.New() |
||||
if settings != "" { |
||||
s, err := simplejson.NewJson([]byte(settings)) |
||||
if err != nil { |
||||
t.Fatalf("Failed to unmarshal alert notification json: %v", err) |
||||
} |
||||
settingsJson = s |
||||
} |
||||
|
||||
return &models.AlertNotification{ |
||||
OrgID: orgId, |
||||
UID: uid, |
||||
Name: uid, // Same as uid to make testing easier.
|
||||
Type: channelType, |
||||
DisableResolveMessage: false, |
||||
IsDefault: defaultChannel, |
||||
Settings: settingsJson, |
||||
SecureSettings: make(map[string][]byte), |
||||
Created: now, |
||||
Updated: now, |
||||
SendReminder: sendReminder, |
||||
Frequency: frequency, |
||||
} |
||||
} |
||||
|
||||
// createAlertNotification creates a legacy alert notification channel for inserting into the test database.
|
||||
func createAlertNotification(t *testing.T, orgId int64, uid string, channelType string, settings string, defaultChannel bool) *models.AlertNotification { |
||||
return createAlertNotificationWithReminder(t, orgId, uid, channelType, settings, defaultChannel, false, time.Duration(0)) |
||||
} |
||||
|
||||
// createAlert creates a legacy alert rule for inserting into the test database.
|
||||
func createAlert(t *testing.T, orgId int64, dashboardId int64, panelsId int64, name string, notifierUids []string) *models.Alert { |
||||
t.Helper() |
||||
|
||||
var settings = simplejson.New() |
||||
if len(notifierUids) != 0 { |
||||
notifiers := make([]interface{}, 0) |
||||
for _, n := range notifierUids { |
||||
notifiers = append(notifiers, struct { |
||||
Uid string |
||||
}{Uid: n}) |
||||
} |
||||
|
||||
settings.Set("notifications", notifiers) |
||||
} |
||||
|
||||
return &models.Alert{ |
||||
OrgID: orgId, |
||||
DashboardID: dashboardId, |
||||
PanelID: panelsId, |
||||
Name: name, |
||||
Message: "message", |
||||
Frequency: int64(60), |
||||
For: time.Duration(time.Duration(60).Seconds()), |
||||
State: models.AlertStateOK, |
||||
Settings: settings, |
||||
NewStateDate: now, |
||||
Created: now, |
||||
Updated: now, |
||||
} |
||||
} |
||||
|
||||
// createDashboard creates a dashboard for inserting into the test database.
|
||||
func createDashboard(t *testing.T, id int64, orgId int64, uid string) *dashboards.Dashboard { |
||||
t.Helper() |
||||
return &dashboards.Dashboard{ |
||||
ID: id, |
||||
OrgID: orgId, |
||||
UID: uid, |
||||
Created: now, |
||||
Updated: now, |
||||
Title: uid, // Not tested, needed to satisfy contraint.
|
||||
} |
||||
} |
||||
|
||||
// createDatasource creates a ddatasource for inserting into the test database.
|
||||
func createDatasource(t *testing.T, id int64, orgId int64, uid string) *datasources.DataSource { |
||||
t.Helper() |
||||
return &datasources.DataSource{ |
||||
ID: id, |
||||
OrgID: orgId, |
||||
UID: uid, |
||||
Created: now, |
||||
Updated: now, |
||||
Name: uid, // Not tested, needed to satisfy contraint.
|
||||
} |
||||
} |
||||
|
||||
func createOrg(t *testing.T, id int64) *org.Org { |
||||
t.Helper() |
||||
return &org.Org{ |
||||
ID: id, |
||||
Version: 1, |
||||
Name: fmt.Sprintf("org_%d", id), |
||||
Created: time.Now(), |
||||
Updated: time.Now(), |
||||
} |
||||
} |
||||
|
||||
// teardown cleans the input tables between test cases.
|
||||
func teardown(t *testing.T, x *xorm.Engine) { |
||||
_, err := x.Exec("DELETE from org") |
||||
require.NoError(t, err) |
||||
_, err = x.Exec("DELETE from alert") |
||||
require.NoError(t, err) |
||||
_, err = x.Exec("DELETE from alert_notification") |
||||
require.NoError(t, err) |
||||
_, err = x.Exec("DELETE from dashboard") |
||||
require.NoError(t, err) |
||||
_, err = x.Exec("DELETE from data_source") |
||||
require.NoError(t, err) |
||||
} |
||||
|
||||
// setupDashAlertMigrationTestRun runs DashAlertMigration for a new test run.
|
||||
func runDashAlertMigrationTestRun(t *testing.T, x *xorm.Engine) { |
||||
_, errDeleteMig := x.Exec("DELETE FROM migration_log WHERE migration_id = ?", ualert.MigTitle) |
||||
require.NoError(t, errDeleteMig) |
||||
|
||||
alertMigrator := migrator.NewMigrator(x, &setting.Cfg{}) |
||||
alertMigrator.AddMigration(ualert.RmMigTitle, &ualert.RmMigration{}) |
||||
ualert.AddDashAlertMigration(alertMigrator) |
||||
|
||||
errRunningMig := alertMigrator.Start(false, 0) |
||||
require.NoError(t, errRunningMig) |
||||
} |
||||
|
||||
// setupLegacyAlertsTables inserts data into the legacy alerting tables that is needed for testing the migration.
|
||||
func setupLegacyAlertsTables(t *testing.T, x *xorm.Engine, legacyChannels []*models.AlertNotification, alerts []*models.Alert) { |
||||
t.Helper() |
||||
|
||||
orgs := []org.Org{ |
||||
*createOrg(t, 1), |
||||
*createOrg(t, 2), |
||||
} |
||||
|
||||
// Setup dashboards.
|
||||
dashboards := []dashboards.Dashboard{ |
||||
*createDashboard(t, 1, 1, "dash1-1"), |
||||
*createDashboard(t, 2, 1, "dash2-1"), |
||||
*createDashboard(t, 3, 2, "dash3-2"), |
||||
*createDashboard(t, 4, 2, "dash4-2"), |
||||
} |
||||
_, errDashboards := x.Insert(dashboards) |
||||
require.NoError(t, errDashboards) |
||||
|
||||
// Setup data_sources.
|
||||
dataSources := []datasources.DataSource{ |
||||
*createDatasource(t, 1, 1, "ds1-1"), |
||||
*createDatasource(t, 2, 1, "ds2-1"), |
||||
*createDatasource(t, 3, 2, "ds3-2"), |
||||
*createDatasource(t, 4, 2, "ds4-2"), |
||||
} |
||||
|
||||
_, errOrgs := x.Insert(orgs) |
||||
require.NoError(t, errOrgs) |
||||
|
||||
_, errDataSourcess := x.Insert(dataSources) |
||||
require.NoError(t, errDataSourcess) |
||||
|
||||
if len(legacyChannels) > 0 { |
||||
_, channelErr := x.Insert(legacyChannels) |
||||
require.NoError(t, channelErr) |
||||
} |
||||
|
||||
if len(alerts) > 0 { |
||||
_, alertErr := x.Insert(alerts) |
||||
require.NoError(t, alertErr) |
||||
} |
||||
} |
||||
|
||||
// getAlertmanagerConfig retreives the Alertmanager Config from the database for a given orgId.
|
||||
func getAlertmanagerConfig(t *testing.T, x *xorm.Engine, orgId int64) *ualert.PostableUserConfig { |
||||
amConfig := "" |
||||
_, err := x.Table("alert_configuration").Where("org_id = ?", orgId).Cols("alertmanager_configuration").Get(&amConfig) |
||||
require.NoError(t, err) |
||||
|
||||
config := ualert.PostableUserConfig{} |
||||
err = json.Unmarshal([]byte(amConfig), &config) |
||||
require.NoError(t, err) |
||||
return &config |
||||
} |
||||
|
||||
// getAlertmanagerConfig retreives the Alertmanager Config from the database for a given orgId.
|
||||
func getAlertRules(t *testing.T, x *xorm.Engine, orgId int64) []*ngModels.AlertRule { |
||||
rules := make([]*ngModels.AlertRule, 0) |
||||
err := x.Table("alert_rule").Where("org_id = ?", orgId).Find(&rules) |
||||
require.NoError(t, err) |
||||
|
||||
return rules |
||||
} |
||||
|
||||
func boolPointer(b bool) *bool { |
||||
return &b |
||||
} |
||||
|
||||
func durationPointer(d model.Duration) *model.Duration { |
||||
return &d |
||||
} |
@ -1,284 +0,0 @@ |
||||
package ualert |
||||
|
||||
import ( |
||||
"fmt" |
||||
"time" |
||||
|
||||
"xorm.io/xorm" |
||||
|
||||
"github.com/grafana/grafana/pkg/components/simplejson" |
||||
"github.com/grafana/grafana/pkg/infra/metrics" |
||||
"github.com/grafana/grafana/pkg/services/dashboards" |
||||
dashver "github.com/grafana/grafana/pkg/services/dashboardversion" |
||||
"github.com/grafana/grafana/pkg/services/sqlstore/migrator" |
||||
"github.com/grafana/grafana/pkg/util" |
||||
) |
||||
|
||||
type roleType string |
||||
|
||||
const ( |
||||
RoleNone roleType = "None" |
||||
RoleViewer roleType = "Viewer" |
||||
RoleEditor roleType = "Editor" |
||||
RoleAdmin roleType = "Admin" |
||||
) |
||||
|
||||
func (r roleType) IsValid() bool { |
||||
return r == RoleViewer || r == RoleAdmin || r == RoleEditor || r == RoleNone |
||||
} |
||||
|
||||
type permissionType int |
||||
|
||||
type dashboardACL struct { |
||||
// nolint:stylecheck
|
||||
Id int64 |
||||
OrgID int64 `xorm:"org_id"` |
||||
DashboardID int64 `xorm:"dashboard_id"` |
||||
|
||||
UserID int64 `xorm:"user_id"` |
||||
TeamID int64 `xorm:"team_id"` |
||||
Role *roleType // pointer to be nullable
|
||||
Permission permissionType |
||||
|
||||
Created time.Time |
||||
Updated time.Time |
||||
} |
||||
|
||||
func (p dashboardACL) TableName() string { return "dashboard_acl" } |
||||
|
||||
type folderHelper struct { |
||||
sess *xorm.Session |
||||
mg *migrator.Migrator |
||||
} |
||||
|
||||
// getOrCreateGeneralFolder returns the general folder under the specific organisation
|
||||
// If the general folder does not exist it creates it.
|
||||
func (m *folderHelper) getOrCreateGeneralFolder(orgID int64) (*dashboard, error) { |
||||
// there is a unique constraint on org_id, folder_id, title
|
||||
// there are no nested folders so the parent folder id is always 0
|
||||
dashboard := dashboard{OrgId: orgID, FolderId: 0, Title: GENERAL_FOLDER} |
||||
has, err := m.sess.Get(&dashboard) |
||||
if err != nil { |
||||
return nil, err |
||||
} else if !has { |
||||
// create folder
|
||||
return m.createGeneralFolder(orgID) |
||||
} |
||||
return &dashboard, nil |
||||
} |
||||
|
||||
func (m *folderHelper) createGeneralFolder(orgID int64) (*dashboard, error) { |
||||
return m.createFolder(orgID, GENERAL_FOLDER) |
||||
} |
||||
|
||||
// returns the folder of the given dashboard (if exists)
|
||||
func (m *folderHelper) getFolder(dash dashboard, da dashAlert) (dashboard, error) { |
||||
// get folder if exists
|
||||
folder := dashboard{} |
||||
if dash.FolderId > 0 { |
||||
exists, err := m.sess.Where("id=?", dash.FolderId).Get(&folder) |
||||
if err != nil { |
||||
return folder, fmt.Errorf("failed to get folder %d: %w", dash.FolderId, err) |
||||
} |
||||
if !exists { |
||||
return folder, fmt.Errorf("folder with id %v not found", dash.FolderId) |
||||
} |
||||
if !folder.IsFolder { |
||||
return folder, fmt.Errorf("id %v is a dashboard not a folder", dash.FolderId) |
||||
} |
||||
} |
||||
return folder, nil |
||||
} |
||||
|
||||
// based on sqlstore.saveDashboard()
|
||||
// it should be called from inside a transaction
|
||||
func (m *folderHelper) createFolder(orgID int64, title string) (*dashboard, error) { |
||||
cmd := saveFolderCommand{ |
||||
OrgId: orgID, |
||||
FolderId: 0, |
||||
IsFolder: true, |
||||
Dashboard: simplejson.NewFromAny(map[string]any{ |
||||
"title": title, |
||||
}), |
||||
} |
||||
dash := cmd.getDashboardModel() |
||||
dash.setUid(util.GenerateShortUID()) |
||||
|
||||
parentVersion := dash.Version |
||||
dash.setVersion(1) |
||||
dash.Created = time.Now() |
||||
dash.CreatedBy = FOLDER_CREATED_BY |
||||
dash.Updated = time.Now() |
||||
dash.UpdatedBy = FOLDER_CREATED_BY |
||||
metrics.MApiDashboardInsert.Inc() |
||||
|
||||
if _, err := m.sess.Insert(dash); err != nil { |
||||
return nil, err |
||||
} |
||||
|
||||
dashVersion := &dashver.DashboardVersion{ |
||||
DashboardID: dash.Id, |
||||
ParentVersion: parentVersion, |
||||
RestoredFrom: cmd.RestoredFrom, |
||||
Version: dash.Version, |
||||
Created: time.Now(), |
||||
CreatedBy: dash.UpdatedBy, |
||||
Message: cmd.Message, |
||||
Data: dash.Data, |
||||
} |
||||
|
||||
// insert version entry
|
||||
if _, err := m.sess.Insert(dashVersion); err != nil { |
||||
return nil, err |
||||
} |
||||
return dash, nil |
||||
} |
||||
|
||||
// based on SQLStore.UpdateDashboardACL()
|
||||
// it should be called from inside a transaction
|
||||
func (m *folderHelper) setACL(orgID int64, dashboardID int64, items []*dashboardACL) error { |
||||
if dashboardID <= 0 { |
||||
return fmt.Errorf("folder id must be greater than zero for a folder permission") |
||||
} |
||||
|
||||
// userPermissionsMap is a map keeping the highest permission per user
|
||||
// for handling conficting inherited (folder) and non-inherited (dashboard) user permissions
|
||||
userPermissionsMap := make(map[int64]*dashboardACL, len(items)) |
||||
// teamPermissionsMap is a map keeping the highest permission per team
|
||||
// for handling conficting inherited (folder) and non-inherited (dashboard) team permissions
|
||||
teamPermissionsMap := make(map[int64]*dashboardACL, len(items)) |
||||
for _, item := range items { |
||||
if item.UserID != 0 { |
||||
acl, ok := userPermissionsMap[item.UserID] |
||||
if !ok { |
||||
userPermissionsMap[item.UserID] = item |
||||
} else { |
||||
if item.Permission > acl.Permission { |
||||
// the higher permission wins
|
||||
userPermissionsMap[item.UserID] = item |
||||
} |
||||
} |
||||
} |
||||
|
||||
if item.TeamID != 0 { |
||||
acl, ok := teamPermissionsMap[item.TeamID] |
||||
if !ok { |
||||
teamPermissionsMap[item.TeamID] = item |
||||
} else { |
||||
if item.Permission > acl.Permission { |
||||
// the higher permission wins
|
||||
teamPermissionsMap[item.TeamID] = item |
||||
} |
||||
} |
||||
} |
||||
} |
||||
|
||||
type keyType struct { |
||||
UserID int64 `xorm:"user_id"` |
||||
TeamID int64 `xorm:"team_id"` |
||||
Role roleType |
||||
Permission permissionType |
||||
} |
||||
// seen keeps track of inserted perrmissions to avoid duplicates (due to inheritance)
|
||||
seen := make(map[keyType]struct{}, len(items)) |
||||
for _, item := range items { |
||||
if item.UserID == 0 && item.TeamID == 0 && (item.Role == nil || !item.Role.IsValid()) { |
||||
return dashboards.ErrDashboardACLInfoMissing |
||||
} |
||||
|
||||
// ignore duplicate user permissions
|
||||
if item.UserID != 0 { |
||||
acl, ok := userPermissionsMap[item.UserID] |
||||
if ok { |
||||
if acl.Id != item.Id { |
||||
continue |
||||
} |
||||
} |
||||
} |
||||
|
||||
// ignore duplicate team permissions
|
||||
if item.TeamID != 0 { |
||||
acl, ok := teamPermissionsMap[item.TeamID] |
||||
if ok { |
||||
if acl.Id != item.Id { |
||||
continue |
||||
} |
||||
} |
||||
} |
||||
|
||||
key := keyType{UserID: item.UserID, TeamID: item.TeamID, Role: "", Permission: item.Permission} |
||||
if item.Role != nil { |
||||
key.Role = *item.Role |
||||
} |
||||
if _, ok := seen[key]; ok { |
||||
continue |
||||
} |
||||
|
||||
// unset Id so that the new record will get a different one
|
||||
item.Id = 0 |
||||
item.OrgID = orgID |
||||
item.DashboardID = dashboardID |
||||
item.Created = time.Now() |
||||
item.Updated = time.Now() |
||||
|
||||
m.sess.Nullable("user_id", "team_id") |
||||
if _, err := m.sess.Insert(item); err != nil { |
||||
return err |
||||
} |
||||
seen[key] = struct{}{} |
||||
} |
||||
|
||||
// Update dashboard HasACL flag
|
||||
dashboard := dashboards.Dashboard{HasACL: true} |
||||
_, err := m.sess.Cols("has_acl").Where("id=?", dashboardID).Update(&dashboard) |
||||
return err |
||||
} |
||||
|
||||
// based on SQLStore.GetDashboardACLInfoList()
|
||||
func (m *folderHelper) getACL(orgID, dashboardID int64) ([]*dashboardACL, error) { |
||||
var err error |
||||
|
||||
falseStr := m.mg.Dialect.BooleanStr(false) |
||||
|
||||
result := make([]*dashboardACL, 0) |
||||
rawSQL := ` |
||||
-- get distinct permissions for the dashboard and its parent folder |
||||
SELECT DISTINCT |
||||
da.id, |
||||
da.user_id, |
||||
da.team_id, |
||||
da.permission, |
||||
da.role |
||||
FROM dashboard as d |
||||
LEFT JOIN dashboard folder on folder.id = d.folder_id |
||||
LEFT JOIN dashboard_acl AS da ON |
||||
da.dashboard_id = d.id OR |
||||
da.dashboard_id = d.folder_id OR |
||||
( |
||||
-- include default permissions -- |
||||
da.org_id = -1 AND ( |
||||
(folder.id IS NOT NULL AND folder.has_acl = ` + falseStr + `) OR |
||||
(folder.id IS NULL AND d.has_acl = ` + falseStr + `) |
||||
) |
||||
) |
||||
WHERE d.org_id = ? AND d.id = ? AND da.id IS NOT NULL |
||||
ORDER BY da.id ASC |
||||
` |
||||
err = m.sess.SQL(rawSQL, orgID, dashboardID).Find(&result) |
||||
return result, err |
||||
} |
||||
|
||||
// getOrgsThatHaveFolders returns a unique list of organization ID that have at least one folder
|
||||
func (m *folderHelper) getOrgsIDThatHaveFolders() (map[int64]struct{}, error) { |
||||
// get folder if exists
|
||||
var rows []int64 |
||||
err := m.sess.Table(&dashboard{}).Where("is_folder=?", true).Distinct("org_id").Find(&rows) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
result := make(map[int64]struct{}, len(rows)) |
||||
for _, s := range rows { |
||||
result[s] = struct{}{} |
||||
} |
||||
return result, nil |
||||
} |
@ -0,0 +1,209 @@ |
||||
package ualert |
||||
|
||||
import ( |
||||
"encoding/json" |
||||
"fmt" |
||||
"time" |
||||
|
||||
"xorm.io/xorm" |
||||
|
||||
ngmodels "github.com/grafana/grafana/pkg/services/ngalert/models" |
||||
"github.com/grafana/grafana/pkg/services/sqlstore/migrator" |
||||
) |
||||
|
||||
// UpdateRuleGroupIndexMigration updates a new field rule_group_index for alert rules that belong to a group with more than 1 alert.
|
||||
func UpdateRuleGroupIndexMigration(mg *migrator.Migrator) { |
||||
mg.AddMigration("update group index for alert rules", &updateRulesOrderInGroup{}) |
||||
} |
||||
|
||||
type updateRulesOrderInGroup struct { |
||||
migrator.MigrationBase |
||||
} |
||||
|
||||
func (c updateRulesOrderInGroup) SQL(migrator.Dialect) string { |
||||
return codeMigration |
||||
} |
||||
|
||||
func (c updateRulesOrderInGroup) Exec(sess *xorm.Session, migrator *migrator.Migrator) error { |
||||
var rows []*alertRule |
||||
if err := sess.Table(alertRule{}).Asc("id").Find(&rows); err != nil { |
||||
return fmt.Errorf("failed to read the list of alert rules: %w", err) |
||||
} |
||||
|
||||
if len(rows) == 0 { |
||||
migrator.Logger.Debug("No rules to migrate.") |
||||
return nil |
||||
} |
||||
|
||||
groups := map[ngmodels.AlertRuleGroupKey][]*alertRule{} |
||||
|
||||
for _, row := range rows { |
||||
groupKey := ngmodels.AlertRuleGroupKey{ |
||||
OrgID: row.OrgID, |
||||
NamespaceUID: row.NamespaceUID, |
||||
RuleGroup: row.RuleGroup, |
||||
} |
||||
groups[groupKey] = append(groups[groupKey], row) |
||||
} |
||||
|
||||
toUpdate := make([]*alertRule, 0, len(rows)) |
||||
|
||||
for _, rules := range groups { |
||||
for i, rule := range rules { |
||||
if rule.RuleGroupIndex == i+1 { |
||||
continue |
||||
} |
||||
rule.RuleGroupIndex = i + 1 |
||||
toUpdate = append(toUpdate, rule) |
||||
} |
||||
} |
||||
|
||||
if len(toUpdate) == 0 { |
||||
migrator.Logger.Debug("No rules to upgrade group index") |
||||
return nil |
||||
} |
||||
|
||||
updated := time.Now() |
||||
versions := make([]interface{}, 0, len(toUpdate)) |
||||
|
||||
for _, rule := range toUpdate { |
||||
rule.Updated = updated |
||||
version := rule.makeVersion() |
||||
version.Version = rule.Version + 1 |
||||
version.ParentVersion = rule.Version |
||||
rule.Version++ |
||||
_, err := sess.ID(rule.ID).Cols("version", "updated", "rule_group_idx").Update(rule) |
||||
if err != nil { |
||||
migrator.Logger.Error("failed to update alert rule", "uid", rule.UID, "err", err) |
||||
return fmt.Errorf("unable to update alert rules with group index: %w", err) |
||||
} |
||||
migrator.Logger.Debug("updated group index for alert rule", "rule_uid", rule.UID) |
||||
versions = append(versions, version) |
||||
} |
||||
_, err := sess.Insert(versions...) |
||||
if err != nil { |
||||
migrator.Logger.Error("failed to insert changes to alert_rule_version", "err", err) |
||||
return fmt.Errorf("unable to update alert rules with group index: %w", err) |
||||
} |
||||
return nil |
||||
} |
||||
|
||||
type alertRule struct { |
||||
ID int64 `xorm:"pk autoincr 'id'"` |
||||
OrgID int64 `xorm:"org_id"` |
||||
Title string |
||||
Condition string |
||||
Data []alertQuery |
||||
IntervalSeconds int64 |
||||
Version int64 |
||||
UID string `xorm:"uid"` |
||||
NamespaceUID string `xorm:"namespace_uid"` |
||||
RuleGroup string |
||||
RuleGroupIndex int `xorm:"rule_group_idx"` |
||||
NoDataState string |
||||
ExecErrState string |
||||
For duration |
||||
Updated time.Time |
||||
Annotations map[string]string |
||||
Labels map[string]string |
||||
IsPaused bool |
||||
} |
||||
|
||||
type alertRuleVersion struct { |
||||
RuleOrgID int64 `xorm:"rule_org_id"` |
||||
RuleUID string `xorm:"rule_uid"` |
||||
RuleNamespaceUID string `xorm:"rule_namespace_uid"` |
||||
RuleGroup string |
||||
RuleGroupIndex int `xorm:"rule_group_idx"` |
||||
ParentVersion int64 |
||||
RestoredFrom int64 |
||||
Version int64 |
||||
|
||||
Created time.Time |
||||
Title string |
||||
Condition string |
||||
Data []alertQuery |
||||
IntervalSeconds int64 |
||||
NoDataState string |
||||
ExecErrState string |
||||
// ideally this field should have been apimodels.ApiDuration
|
||||
// but this is currently not possible because of circular dependencies
|
||||
For duration |
||||
Annotations map[string]string |
||||
Labels map[string]string |
||||
IsPaused bool |
||||
} |
||||
|
||||
func (a *alertRule) makeVersion() *alertRuleVersion { |
||||
return &alertRuleVersion{ |
||||
RuleOrgID: a.OrgID, |
||||
RuleUID: a.UID, |
||||
RuleNamespaceUID: a.NamespaceUID, |
||||
RuleGroup: a.RuleGroup, |
||||
RuleGroupIndex: a.RuleGroupIndex, |
||||
ParentVersion: 0, |
||||
RestoredFrom: 0, |
||||
Version: 1, |
||||
|
||||
Created: time.Now().UTC(), |
||||
Title: a.Title, |
||||
Condition: a.Condition, |
||||
Data: a.Data, |
||||
IntervalSeconds: a.IntervalSeconds, |
||||
NoDataState: a.NoDataState, |
||||
ExecErrState: a.ExecErrState, |
||||
For: a.For, |
||||
Annotations: a.Annotations, |
||||
Labels: map[string]string{}, |
||||
IsPaused: a.IsPaused, |
||||
} |
||||
} |
||||
|
||||
type alertQuery struct { |
||||
// RefID is the unique identifier of the query, set by the frontend call.
|
||||
RefID string `json:"refId"` |
||||
|
||||
// QueryType is an optional identifier for the type of query.
|
||||
// It can be used to distinguish different types of queries.
|
||||
QueryType string `json:"queryType"` |
||||
|
||||
// RelativeTimeRange is the relative Start and End of the query as sent by the frontend.
|
||||
RelativeTimeRange relativeTimeRange `json:"relativeTimeRange"` |
||||
|
||||
DatasourceUID string `json:"datasourceUid"` |
||||
|
||||
// JSON is the raw JSON query and includes the above properties as well as custom properties.
|
||||
Model json.RawMessage `json:"model"` |
||||
} |
||||
|
||||
// RelativeTimeRange is the per query start and end time
|
||||
// for requests.
|
||||
type relativeTimeRange struct { |
||||
From duration `json:"from"` |
||||
To duration `json:"to"` |
||||
} |
||||
|
||||
// duration is a type used for marshalling durations.
|
||||
type duration time.Duration |
||||
|
||||
func (d duration) String() string { |
||||
return time.Duration(d).String() |
||||
} |
||||
|
||||
func (d duration) MarshalJSON() ([]byte, error) { |
||||
return json.Marshal(time.Duration(d).Seconds()) |
||||
} |
||||
|
||||
func (d *duration) UnmarshalJSON(b []byte) error { |
||||
var v interface{} |
||||
if err := json.Unmarshal(b, &v); err != nil { |
||||
return err |
||||
} |
||||
switch value := v.(type) { |
||||
case float64: |
||||
*d = duration(time.Duration(value) * time.Second) |
||||
return nil |
||||
default: |
||||
return fmt.Errorf("invalid duration %v", v) |
||||
} |
||||
} |
@ -1,60 +0,0 @@ |
||||
package ualert |
||||
|
||||
import ( |
||||
"os" |
||||
|
||||
"github.com/grafana/grafana/pkg/infra/log" |
||||
"github.com/grafana/grafana/pkg/setting" |
||||
"github.com/grafana/grafana/pkg/util" |
||||
) |
||||
|
||||
// SecureJsonData is used to store encrypted data (for example in data_source table). Only values are separately
|
||||
// encrypted.
|
||||
type SecureJsonData map[string][]byte |
||||
|
||||
var seclogger = log.New("securejsondata") |
||||
|
||||
// DecryptedValue returns single decrypted value from SecureJsonData. Similar to normal map access second return value
|
||||
// is true if the key exists and false if not.
|
||||
func (s SecureJsonData) DecryptedValue(key string) (string, bool) { |
||||
if value, ok := s[key]; ok { |
||||
decryptedData, err := util.Decrypt(value, setting.SecretKey) |
||||
if err != nil { |
||||
seclogger.Error(err.Error()) |
||||
os.Exit(1) |
||||
} |
||||
return string(decryptedData), true |
||||
} |
||||
return "", false |
||||
} |
||||
|
||||
// Decrypt returns map of the same type but where the all the values are decrypted. Opposite of what
|
||||
// GetEncryptedJsonData is doing.
|
||||
func (s SecureJsonData) Decrypt() map[string]string { |
||||
decrypted := make(map[string]string) |
||||
for key, data := range s { |
||||
decryptedData, err := util.Decrypt(data, setting.SecretKey) |
||||
if err != nil { |
||||
seclogger.Error(err.Error()) |
||||
os.Exit(1) |
||||
} |
||||
|
||||
decrypted[key] = string(decryptedData) |
||||
} |
||||
return decrypted |
||||
} |
||||
|
||||
// GetEncryptedJsonData returns map where all keys are encrypted.
|
||||
func GetEncryptedJsonData(sjd map[string]string) SecureJsonData { |
||||
encrypted := make(SecureJsonData) |
||||
for key, data := range sjd { |
||||
encryptedData, err := util.Encrypt([]byte(data), setting.SecretKey) |
||||
if err != nil { |
||||
seclogger.Error(err.Error()) |
||||
os.Exit(1) |
||||
} |
||||
|
||||
encrypted[key] = encryptedData |
||||
} |
||||
return encrypted |
||||
} |
@ -1,25 +0,0 @@ |
||||
package ualert |
||||
|
||||
import ( |
||||
"testing" |
||||
|
||||
"github.com/grafana/grafana/pkg/infra/log" |
||||
"github.com/grafana/grafana/pkg/services/sqlstore/migrator" |
||||
"github.com/prometheus/alertmanager/silence/silencepb" |
||||
) |
||||
|
||||
// newTestMigration generates an empty migration to use in tests.
|
||||
func newTestMigration(t *testing.T) *migration { |
||||
t.Helper() |
||||
|
||||
return &migration{ |
||||
mg: &migrator.Migrator{ |
||||
|
||||
Logger: log.New("test"), |
||||
}, |
||||
seenUIDs: uidSet{ |
||||
set: make(map[string]struct{}), |
||||
}, |
||||
silences: make(map[int64][]*silencepb.MeshSilence), |
||||
} |
||||
} |
Loading…
Reference in new issue