mirror of https://github.com/grafana/grafana
Alerting: Opsgenie notification channel (#34418)
* Alerting: Opsgenie notification channel This translate the opsgenie notification channel from the old alerting system to the new alerting system with a few changes: - The tag system has been replaced in favour of annotation. - TBD - TBD Signed-off-by: Josue Abreu <josue@grafana.com> * Fix template URL * Bugfig: dont send resolved when autoClose is false Signed-off-by: Ganesh Vernekar <ganeshvern@gmail.com> * Fix integration tests Signed-off-by: Ganesh Vernekar <ganeshvern@gmail.com> * Fix URLs in all other channels Signed-off-by: Ganesh Vernekar <ganeshvern@gmail.com> Co-authored-by: Ganesh Vernekar <ganeshvern@gmail.com>pull/34439/head
parent
615de9bf34
commit
7b04278834
@ -0,0 +1,227 @@ |
||||
package channels |
||||
|
||||
import ( |
||||
"context" |
||||
"encoding/json" |
||||
"fmt" |
||||
"net/http" |
||||
|
||||
gokit_log "github.com/go-kit/kit/log" |
||||
"github.com/prometheus/alertmanager/notify" |
||||
"github.com/prometheus/alertmanager/template" |
||||
"github.com/prometheus/alertmanager/types" |
||||
"github.com/prometheus/common/model" |
||||
|
||||
"github.com/grafana/grafana/pkg/bus" |
||||
"github.com/grafana/grafana/pkg/components/simplejson" |
||||
"github.com/grafana/grafana/pkg/infra/log" |
||||
"github.com/grafana/grafana/pkg/models" |
||||
"github.com/grafana/grafana/pkg/services/alerting" |
||||
old_notifiers "github.com/grafana/grafana/pkg/services/alerting/notifiers" |
||||
"github.com/grafana/grafana/pkg/services/ngalert/logging" |
||||
) |
||||
|
||||
const ( |
||||
OpsgenieSendTags = "tags" |
||||
OpsgenieSendDetails = "details" |
||||
OpsgenieSendBoth = "both" |
||||
) |
||||
|
||||
var ( |
||||
OpsgenieAlertURL = "https://api.opsgenie.com/v2/alerts" |
||||
ValidPriorities = map[string]bool{"P1": true, "P2": true, "P3": true, "P4": true, "P5": true} |
||||
) |
||||
|
||||
// OpsgenieNotifier is responsible for sending alert notifications to Opsgenie.
|
||||
type OpsgenieNotifier struct { |
||||
old_notifiers.NotifierBase |
||||
APIKey string |
||||
APIUrl string |
||||
AutoClose bool |
||||
OverridePriority bool |
||||
SendTagsAs string |
||||
tmpl *template.Template |
||||
log log.Logger |
||||
} |
||||
|
||||
// NewOpsgenieNotifier is the constructor for the Opsgenie notifier
|
||||
func NewOpsgenieNotifier(model *NotificationChannelConfig, t *template.Template) (*OpsgenieNotifier, error) { |
||||
autoClose := model.Settings.Get("autoClose").MustBool(true) |
||||
overridePriority := model.Settings.Get("overridePriority").MustBool(true) |
||||
apiKey := model.DecryptedValue("apiKey", model.Settings.Get("apiKey").MustString()) |
||||
apiURL := model.Settings.Get("apiUrl").MustString() |
||||
if apiKey == "" { |
||||
return nil, alerting.ValidationError{Reason: "Could not find api key property in settings"} |
||||
} |
||||
if apiURL == "" { |
||||
apiURL = OpsgenieAlertURL |
||||
} |
||||
|
||||
sendTagsAs := model.Settings.Get("sendTagsAs").MustString(OpsgenieSendTags) |
||||
if sendTagsAs != OpsgenieSendTags && sendTagsAs != OpsgenieSendDetails && sendTagsAs != OpsgenieSendBoth { |
||||
return nil, alerting.ValidationError{ |
||||
Reason: fmt.Sprintf("Invalid value for sendTagsAs: %q", sendTagsAs), |
||||
} |
||||
} |
||||
|
||||
return &OpsgenieNotifier{ |
||||
NotifierBase: old_notifiers.NewNotifierBase(&models.AlertNotification{ |
||||
Uid: model.UID, |
||||
Name: model.Name, |
||||
Type: model.Type, |
||||
DisableResolveMessage: model.DisableResolveMessage, |
||||
Settings: model.Settings, |
||||
}), |
||||
APIKey: apiKey, |
||||
APIUrl: apiURL, |
||||
AutoClose: autoClose, |
||||
OverridePriority: overridePriority, |
||||
SendTagsAs: sendTagsAs, |
||||
tmpl: t, |
||||
log: log.New("alerting.notifier." + model.Name), |
||||
}, nil |
||||
} |
||||
|
||||
// Notify sends an alert notification to Opsgenie
|
||||
func (on *OpsgenieNotifier) Notify(ctx context.Context, as ...*types.Alert) (bool, error) { |
||||
on.log.Debug("Executing Opsgenie notification", "notification", on.Name) |
||||
|
||||
alerts := types.Alerts(as...) |
||||
if alerts.Status() == model.AlertResolved && !on.SendResolved() { |
||||
on.log.Debug("Not sending a trigger to Opsgenie", "status", alerts.Status(), "auto resolve", on.SendResolved()) |
||||
return true, nil |
||||
} |
||||
|
||||
bodyJSON, url, err := on.buildOpsgenieMessage(ctx, alerts, as) |
||||
if err != nil { |
||||
return false, fmt.Errorf("build Opsgenie message: %w", err) |
||||
} |
||||
|
||||
if url == "" { |
||||
// Resolved alert with no auto close.
|
||||
// Hence skip sending anything.
|
||||
return true, nil |
||||
} |
||||
|
||||
body, err := json.Marshal(bodyJSON) |
||||
if err != nil { |
||||
return false, fmt.Errorf("marshal json: %w", err) |
||||
} |
||||
|
||||
cmd := &models.SendWebhookSync{ |
||||
Url: url, |
||||
Body: string(body), |
||||
HttpMethod: http.MethodPost, |
||||
HttpHeader: map[string]string{ |
||||
"Content-Type": "application/json", |
||||
"Authorization": fmt.Sprintf("GenieKey %s", on.APIKey), |
||||
}, |
||||
} |
||||
|
||||
if err := bus.DispatchCtx(ctx, cmd); err != nil { |
||||
return false, fmt.Errorf("send notification to Opsgenie: %w", err) |
||||
} |
||||
|
||||
return true, nil |
||||
} |
||||
|
||||
func (on *OpsgenieNotifier) buildOpsgenieMessage(ctx context.Context, alerts model.Alerts, as []*types.Alert) (payload *simplejson.Json, apiURL string, err error) { |
||||
key, err := notify.ExtractGroupKey(ctx) |
||||
if err != nil { |
||||
return nil, "", err |
||||
} |
||||
|
||||
var ( |
||||
alias = key.Hash() |
||||
bodyJSON = simplejson.New() |
||||
details = simplejson.New() |
||||
) |
||||
|
||||
if alerts.Status() == model.AlertResolved { |
||||
// For resolved notification, we only need the source.
|
||||
// Don't need to run other templates.
|
||||
if on.AutoClose { |
||||
bodyJSON := simplejson.New() |
||||
bodyJSON.Set("source", "Grafana") |
||||
apiURL = fmt.Sprintf("%s/%s/close?identifierType=alias", on.APIUrl, alias) |
||||
return bodyJSON, apiURL, nil |
||||
} |
||||
return nil, "", nil |
||||
} |
||||
|
||||
ruleURL, err := joinUrlPath(on.tmpl.ExternalURL.String(), "/alerting/list") |
||||
if err != nil { |
||||
return nil, "", err |
||||
} |
||||
|
||||
data := notify.GetTemplateData(ctx, on.tmpl, as, gokit_log.NewLogfmtLogger(logging.NewWrapper(on.log))) |
||||
var tmplErr error |
||||
tmpl := notify.TmplText(on.tmpl, data, &tmplErr) |
||||
|
||||
title := tmpl(`{{ template "default.title" . }}`) |
||||
description := fmt.Sprintf( |
||||
"%s\n%s\n\n%s", |
||||
tmpl(`{{ template "default.title" . }}`), |
||||
ruleURL, |
||||
tmpl(`{{ template "default.message" . }}`), |
||||
) |
||||
|
||||
var priority string |
||||
|
||||
// In the new alerting system we've moved away from the grafana-tags. Instead, annotations on the rule itself should be used.
|
||||
annotations := make(map[string]string, len(data.CommonAnnotations)) |
||||
for k, v := range data.CommonAnnotations { |
||||
annotations[k] = tmpl(v) |
||||
|
||||
if k == "og_priority" { |
||||
if ValidPriorities[v] { |
||||
priority = v |
||||
} |
||||
} |
||||
} |
||||
|
||||
bodyJSON.Set("message", title) |
||||
bodyJSON.Set("source", "Grafana") |
||||
bodyJSON.Set("alias", alias) |
||||
bodyJSON.Set("description", description) |
||||
details.Set("url", ruleURL) |
||||
|
||||
if on.sendDetails() { |
||||
for k, v := range annotations { |
||||
details.Set(k, v) |
||||
} |
||||
} |
||||
|
||||
tags := make([]string, 0, len(annotations)) |
||||
if on.sendTags() { |
||||
for k, v := range annotations { |
||||
tags = append(tags, fmt.Sprintf("%s:%s", k, v)) |
||||
} |
||||
} |
||||
|
||||
if priority != "" && on.OverridePriority { |
||||
bodyJSON.Set("priority", priority) |
||||
} |
||||
|
||||
bodyJSON.Set("tags", tags) |
||||
bodyJSON.Set("details", details) |
||||
apiURL = on.APIUrl |
||||
|
||||
if tmplErr != nil { |
||||
return nil, "", fmt.Errorf("failed to template Opsgenie message: %w", tmplErr) |
||||
} |
||||
|
||||
return bodyJSON, apiURL, err |
||||
} |
||||
|
||||
func (on *OpsgenieNotifier) SendResolved() bool { |
||||
return !on.GetDisableResolveMessage() |
||||
} |
||||
|
||||
func (on *OpsgenieNotifier) sendDetails() bool { |
||||
return on.SendTagsAs == OpsgenieSendDetails || on.SendTagsAs == OpsgenieSendBoth |
||||
} |
||||
|
||||
func (on *OpsgenieNotifier) sendTags() bool { |
||||
return on.SendTagsAs == OpsgenieSendTags || on.SendTagsAs == OpsgenieSendBoth |
||||
} |
||||
@ -0,0 +1,206 @@ |
||||
package channels |
||||
|
||||
import ( |
||||
"context" |
||||
"net/url" |
||||
"testing" |
||||
"time" |
||||
|
||||
"github.com/prometheus/alertmanager/notify" |
||||
"github.com/prometheus/alertmanager/types" |
||||
"github.com/prometheus/common/model" |
||||
"github.com/stretchr/testify/require" |
||||
|
||||
"github.com/grafana/grafana/pkg/bus" |
||||
"github.com/grafana/grafana/pkg/components/simplejson" |
||||
"github.com/grafana/grafana/pkg/models" |
||||
"github.com/grafana/grafana/pkg/services/alerting" |
||||
) |
||||
|
||||
func TestOpsgenieNotifier(t *testing.T) { |
||||
tmpl := templateForTests(t) |
||||
|
||||
externalURL, err := url.Parse("http://localhost") |
||||
require.NoError(t, err) |
||||
tmpl.ExternalURL = externalURL |
||||
|
||||
cases := []struct { |
||||
name string |
||||
settings string |
||||
alerts []*types.Alert |
||||
expMsg string |
||||
expInitError error |
||||
expMsgError error |
||||
}{ |
||||
{ |
||||
name: "Default config with one alert", |
||||
settings: `{"apiKey": "abcdefgh0123456789"}`, |
||||
alerts: []*types.Alert{ |
||||
{ |
||||
Alert: model.Alert{ |
||||
Labels: model.LabelSet{"alertname": "alert1", "lbl1": "val1"}, |
||||
Annotations: model.LabelSet{"ann1": "annv1"}, |
||||
}, |
||||
}, |
||||
}, |
||||
expMsg: `{ |
||||
"alias": "6e3538104c14b583da237e9693b76debbc17f0f8058ef20492e5853096cf8733", |
||||
"description": "[FIRING:1] (val1)\nhttp://localhost/alerting/list\n\n\n**Firing**\nLabels:\n - alertname = alert1\n - lbl1 = val1\nAnnotations:\n - ann1 = annv1\nSource: \n\n\n\n\n", |
||||
"details": { |
||||
"url": "http://localhost/alerting/list" |
||||
}, |
||||
"message": "[FIRING:1] (val1)", |
||||
"source": "Grafana", |
||||
"tags": ["ann1:annv1"] |
||||
}`, |
||||
}, |
||||
{ |
||||
name: "Default config with one alert and send tags as tags", |
||||
settings: `{ |
||||
"apiKey": "abcdefgh0123456789", |
||||
"sendTagsAs": "tags" |
||||
}`, |
||||
alerts: []*types.Alert{ |
||||
{ |
||||
Alert: model.Alert{ |
||||
Labels: model.LabelSet{"alertname": "alert1", "lbl1": "val1"}, |
||||
Annotations: model.LabelSet{"ann1": "annv1"}, |
||||
}, |
||||
}, |
||||
}, |
||||
expMsg: `{ |
||||
"alias": "6e3538104c14b583da237e9693b76debbc17f0f8058ef20492e5853096cf8733", |
||||
"description": "[FIRING:1] (val1)\nhttp://localhost/alerting/list\n\n\n**Firing**\nLabels:\n - alertname = alert1\n - lbl1 = val1\nAnnotations:\n - ann1 = annv1\nSource: \n\n\n\n\n", |
||||
"details": { |
||||
"url": "http://localhost/alerting/list" |
||||
}, |
||||
"message": "[FIRING:1] (val1)", |
||||
"source": "Grafana", |
||||
"tags": ["ann1:annv1"] |
||||
}`, |
||||
}, |
||||
{ |
||||
name: "Default config with one alert and send tags as details", |
||||
settings: `{ |
||||
"apiKey": "abcdefgh0123456789", |
||||
"sendTagsAs": "details" |
||||
}`, |
||||
alerts: []*types.Alert{ |
||||
{ |
||||
Alert: model.Alert{ |
||||
Labels: model.LabelSet{"alertname": "alert1", "lbl1": "val1"}, |
||||
Annotations: model.LabelSet{"ann1": "annv1"}, |
||||
}, |
||||
}, |
||||
}, |
||||
expMsg: `{ |
||||
"alias": "6e3538104c14b583da237e9693b76debbc17f0f8058ef20492e5853096cf8733", |
||||
"description": "[FIRING:1] (val1)\nhttp://localhost/alerting/list\n\n\n**Firing**\nLabels:\n - alertname = alert1\n - lbl1 = val1\nAnnotations:\n - ann1 = annv1\nSource: \n\n\n\n\n", |
||||
"details": { |
||||
"ann1": "annv1", |
||||
"url": "http://localhost/alerting/list" |
||||
}, |
||||
"message": "[FIRING:1] (val1)", |
||||
"source": "Grafana", |
||||
"tags": [] |
||||
}`, |
||||
}, |
||||
{ |
||||
name: "Custom config with multiple alerts and send tags as both details and tag", |
||||
settings: `{ |
||||
"apiKey": "abcdefgh0123456789", |
||||
"sendTagsAs": "both" |
||||
}`, |
||||
alerts: []*types.Alert{ |
||||
{ |
||||
Alert: model.Alert{ |
||||
Labels: model.LabelSet{"alertname": "alert1", "lbl1": "val1"}, |
||||
Annotations: model.LabelSet{"ann1": "annv1"}, |
||||
}, |
||||
}, { |
||||
Alert: model.Alert{ |
||||
Labels: model.LabelSet{"alertname": "alert1", "lbl1": "val2"}, |
||||
Annotations: model.LabelSet{"ann1": "annv1"}, |
||||
}, |
||||
}, |
||||
}, |
||||
expMsg: `{ |
||||
"alias": "6e3538104c14b583da237e9693b76debbc17f0f8058ef20492e5853096cf8733", |
||||
"description": "[FIRING:2] \nhttp://localhost/alerting/list\n\n\n**Firing**\nLabels:\n - alertname = alert1\n - lbl1 = val1\nAnnotations:\n - ann1 = annv1\nSource: \nLabels:\n - alertname = alert1\n - lbl1 = val2\nAnnotations:\n - ann1 = annv1\nSource: \n\n\n\n\n", |
||||
"details": { |
||||
"ann1": "annv1", |
||||
"url": "http://localhost/alerting/list" |
||||
}, |
||||
"message": "[FIRING:2] ", |
||||
"source": "Grafana", |
||||
"tags": ["ann1:annv1"] |
||||
}`, |
||||
expInitError: nil, |
||||
expMsgError: nil, |
||||
}, |
||||
{ |
||||
name: "Resolved is not sent when auto close is false", |
||||
settings: `{"apiKey": "abcdefgh0123456789", "autoClose": false}`, |
||||
alerts: []*types.Alert{ |
||||
{ |
||||
Alert: model.Alert{ |
||||
Labels: model.LabelSet{"alertname": "alert1", "lbl1": "val1"}, |
||||
Annotations: model.LabelSet{"ann1": "annv1"}, |
||||
EndsAt: time.Now().Add(-1 * time.Minute), |
||||
}, |
||||
}, |
||||
}, |
||||
}, |
||||
{ |
||||
name: "Error when incorrect settings", |
||||
settings: `{}`, |
||||
expInitError: alerting.ValidationError{Reason: "Could not find api key property in settings"}, |
||||
}, |
||||
} |
||||
|
||||
for _, c := range cases { |
||||
t.Run(c.name, func(t *testing.T) { |
||||
settingsJSON, err := simplejson.NewJson([]byte(c.settings)) |
||||
require.NoError(t, err) |
||||
|
||||
m := &NotificationChannelConfig{ |
||||
Name: "opsgenie_testing", |
||||
Type: "opsgenie", |
||||
Settings: settingsJSON, |
||||
} |
||||
|
||||
pn, err := NewOpsgenieNotifier(m, tmpl) |
||||
if c.expInitError != nil { |
||||
require.Error(t, err) |
||||
require.Equal(t, c.expInitError.Error(), err.Error()) |
||||
return |
||||
} |
||||
require.NoError(t, err) |
||||
|
||||
body := "<not-sent>" |
||||
bus.AddHandlerCtx("test", func(ctx context.Context, webhook *models.SendWebhookSync) error { |
||||
body = webhook.Body |
||||
return nil |
||||
}) |
||||
|
||||
ctx := notify.WithGroupKey(context.Background(), "alertname") |
||||
ctx = notify.WithGroupLabels(ctx, model.LabelSet{"alertname": ""}) |
||||
ok, err := pn.Notify(ctx, c.alerts...) |
||||
if c.expMsgError != nil { |
||||
require.False(t, ok) |
||||
require.Error(t, err) |
||||
require.Equal(t, c.expMsgError.Error(), err.Error()) |
||||
return |
||||
} |
||||
require.True(t, ok) |
||||
require.NoError(t, err) |
||||
|
||||
if c.expMsg == "" { |
||||
// No notification was expected.
|
||||
require.Equal(t, "<not-sent>", body) |
||||
} else { |
||||
require.JSONEq(t, c.expMsg, body) |
||||
} |
||||
}) |
||||
} |
||||
} |
||||
Loading…
Reference in new issue