diff --git a/pkg/services/ngalert/state/historian/client.go b/pkg/services/ngalert/client/client.go similarity index 91% rename from pkg/services/ngalert/state/historian/client.go rename to pkg/services/ngalert/client/client.go index 3bbc4b713d0..90b080c0bc6 100644 --- a/pkg/services/ngalert/state/historian/client.go +++ b/pkg/services/ngalert/client/client.go @@ -1,4 +1,4 @@ -package historian +package client import ( "context" @@ -39,6 +39,11 @@ func (c TimedClient) Do(r *http.Request) (*http.Response, error) { return TimeRequest(r.Context(), c.operationName(r), c.collector, c.client, r) } +// RoundTrip implements the RoundTripper interface. +func (c TimedClient) RoundTrip(r *http.Request) (*http.Response, error) { + return c.Do(r) +} + func (c TimedClient) operationName(r *http.Request) string { operation, _ := r.Context().Value(OperationNameContextKey).(string) if operation == "" { diff --git a/pkg/services/ngalert/state/historian/client_test.go b/pkg/services/ngalert/client/client_test.go similarity index 97% rename from pkg/services/ngalert/state/historian/client_test.go rename to pkg/services/ngalert/client/client_test.go index 3538a6c1733..25d26576e66 100644 --- a/pkg/services/ngalert/state/historian/client_test.go +++ b/pkg/services/ngalert/client/client_test.go @@ -1,4 +1,4 @@ -package historian +package client import ( "context" diff --git a/pkg/services/ngalert/metrics/ngalert.go b/pkg/services/ngalert/metrics/ngalert.go index e5f7df291e2..d03597f89b7 100644 --- a/pkg/services/ngalert/metrics/ngalert.go +++ b/pkg/services/ngalert/metrics/ngalert.go @@ -30,6 +30,7 @@ type NGAlert struct { multiOrgAlertmanagerMetrics *MultiOrgAlertmanager apiMetrics *API historianMetrics *Historian + remoteAlertmanagerMetrics *RemoteAlertmanager } // NewNGAlert manages the metrics of all the alerting components. @@ -41,6 +42,7 @@ func NewNGAlert(r prometheus.Registerer) *NGAlert { multiOrgAlertmanagerMetrics: NewMultiOrgAlertmanagerMetrics(r), apiMetrics: NewAPIMetrics(r), historianMetrics: NewHistorianMetrics(r, Subsystem), + remoteAlertmanagerMetrics: NewRemoteAlertmanagerMetrics(r), } } @@ -63,3 +65,7 @@ func (ng *NGAlert) GetMultiOrgAlertmanagerMetrics() *MultiOrgAlertmanager { func (ng *NGAlert) GetHistorianMetrics() *Historian { return ng.historianMetrics } + +func (ng *NGAlert) GetRemoteAlertmanagerMetrics() *RemoteAlertmanager { + return ng.remoteAlertmanagerMetrics +} diff --git a/pkg/services/ngalert/metrics/remote_alertmanager.go b/pkg/services/ngalert/metrics/remote_alertmanager.go new file mode 100644 index 00000000000..5963f9facb6 --- /dev/null +++ b/pkg/services/ngalert/metrics/remote_alertmanager.go @@ -0,0 +1,84 @@ +package metrics + +import ( + "github.com/grafana/dskit/instrument" + "github.com/prometheus/client_golang/prometheus" + "github.com/prometheus/client_golang/prometheus/promauto" +) + +const ( + ModeRemoteSecondary = "remote_secondary" + ModeRemotePrimary = "remote_primary" + ModeRemoteOnly = "remote_only" +) + +type RemoteAlertmanager struct { + Info *prometheus.GaugeVec + RequestLatency *instrument.HistogramCollector + LastReadinessCheck prometheus.Gauge + ConfigSyncsTotal prometheus.Counter + ConfigSyncErrorsTotal prometheus.Counter + LastConfigSync prometheus.Gauge + StateSyncsTotal prometheus.Counter + StateSyncErrorsTotal prometheus.Counter + LastStateSync prometheus.Gauge +} + +func NewRemoteAlertmanagerMetrics(r prometheus.Registerer) *RemoteAlertmanager { + return &RemoteAlertmanager{ + Info: promauto.With(r).NewGaugeVec(prometheus.GaugeOpts{ + Namespace: Namespace, + Subsystem: Subsystem, + Name: "remote_alertmanager_info", + Help: "Information about the remote Alertmanager.", + }, []string{"mode"}), + RequestLatency: instrument.NewHistogramCollector(promauto.With(r).NewHistogramVec(prometheus.HistogramOpts{ + Namespace: Namespace, + Subsystem: Subsystem, + Name: "remote_alertmanager_latency_seconds", + Help: "Histogram of request latencies to the remote Alertmanager.", + }, instrument.HistogramCollectorBuckets)), + LastReadinessCheck: promauto.With(r).NewGauge(prometheus.GaugeOpts{ + Namespace: Namespace, + Subsystem: Subsystem, + Name: "remote_alertmanager_last_readiness_check_timestamp_seconds", + Help: "Timestamp of the last successful readiness check to the remote Alertmanager in seconds.", + }), + ConfigSyncsTotal: promauto.With(r).NewCounter(prometheus.CounterOpts{ + Namespace: Namespace, + Subsystem: Subsystem, + Name: "remote_alertmanager_configuration_syncs_total", + Help: "Total number of configuration syncs to the remote Alertmanager.", + }), + ConfigSyncErrorsTotal: promauto.With(r).NewCounter(prometheus.CounterOpts{ + Namespace: Namespace, + Subsystem: Subsystem, + Name: "remote_alertmanager_configuration_sync_failures_total", + Help: "Total number of failed attempts to sync configurations between Alertmanagers.", + }), + LastConfigSync: promauto.With(r).NewGauge(prometheus.GaugeOpts{ + Namespace: Namespace, + Subsystem: Subsystem, + Name: "remote_alertmanager_last_configuration_sync_timestamp_seconds", + Help: "Timestamp of the last successful configuration sync to the remote Alertmanager in seconds.", + }), + StateSyncsTotal: promauto.With(r).NewCounter(prometheus.CounterOpts{ + Namespace: Namespace, + Subsystem: Subsystem, + Name: "remote_alertmanager_state_syncs_total", + Help: "Total number of state syncs to the remote Alertmanager.", + }), + StateSyncErrorsTotal: promauto.With(r).NewCounter(prometheus.CounterOpts{ + Namespace: Namespace, + Subsystem: Subsystem, + Name: "remote_alertmanager_state_sync_failures_total", + Help: "Total number of failed attempts to sync state between Alertmanagers.", + }), + LastStateSync: promauto.With(r).NewGauge(prometheus.GaugeOpts{ + Namespace: Namespace, + Subsystem: Subsystem, + Name: "remote_alertmanager_last_state_sync_timestamp_seconds", + Help: "Timestamp of the last successful state sync to the remote Alertmanager in seconds.", + }), + } +} diff --git a/pkg/services/ngalert/ngalert.go b/pkg/services/ngalert/ngalert.go index 6277457d377..f11d1fade82 100644 --- a/pkg/services/ngalert/ngalert.go +++ b/pkg/services/ngalert/ngalert.go @@ -185,6 +185,9 @@ func (ng *AlertNG) init() error { case remoteSecondary: ng.Log.Debug("Starting Grafana with remote secondary mode enabled") + m := ng.Metrics.GetRemoteAlertmanagerMetrics() + m.Info.WithLabelValues(metrics.ModeRemoteSecondary).Set(1) + // This function will be used by the MOA to create new Alertmanagers. override := notifier.WithAlertmanagerOverride(func(factoryFn notifier.OrgAlertmanagerFactory) notifier.OrgAlertmanagerFactory { return func(ctx context.Context, orgID int64) (notifier.Alertmanager, error) { @@ -195,7 +198,7 @@ func (ng *AlertNG) init() error { } // Create remote Alertmanager. - remoteAM, err := createRemoteAlertmanager(orgID, ng.Cfg.UnifiedAlerting.RemoteAlertmanager, ng.KVStore) + remoteAM, err := createRemoteAlertmanager(orgID, ng.Cfg.UnifiedAlerting.RemoteAlertmanager, ng.KVStore, m) if err != nil { moaLogger.Error("Failed to create remote Alertmanager, falling back to using only the internal one", "err", err) return internalAM, nil @@ -540,7 +543,7 @@ func ApplyStateHistoryFeatureToggles(cfg *setting.UnifiedAlertingStateHistorySet } } -func createRemoteAlertmanager(orgID int64, amCfg setting.RemoteAlertmanagerSettings, kvstore kvstore.KVStore) (*remote.Alertmanager, error) { +func createRemoteAlertmanager(orgID int64, amCfg setting.RemoteAlertmanagerSettings, kvstore kvstore.KVStore, m *metrics.RemoteAlertmanager) (*remote.Alertmanager, error) { externalAMCfg := remote.AlertmanagerConfig{ OrgID: orgID, URL: amCfg.URL, @@ -549,5 +552,5 @@ func createRemoteAlertmanager(orgID int64, amCfg setting.RemoteAlertmanagerSetti } // We won't be handling files on disk, we can pass an empty string as workingDirPath. stateStore := notifier.NewFileStore(orgID, kvstore, "") - return remote.NewAlertmanager(externalAMCfg, stateStore) + return remote.NewAlertmanager(externalAMCfg, stateStore, m) } diff --git a/pkg/services/ngalert/notifier/multiorg_alertmanager_remote_test.go b/pkg/services/ngalert/notifier/multiorg_alertmanager_remote_test.go index daa3e1a5020..a5a3f795e50 100644 --- a/pkg/services/ngalert/notifier/multiorg_alertmanager_remote_test.go +++ b/pkg/services/ngalert/notifier/multiorg_alertmanager_remote_test.go @@ -64,7 +64,8 @@ func TestMultiorgAlertmanager_RemoteSecondaryMode(t *testing.T) { } // We won't be handling files on disk, we can pass an empty string as workingDirPath. stateStore := notifier.NewFileStore(orgID, kvStore, "") - remoteAM, err := remote.NewAlertmanager(externalAMCfg, stateStore) + m := metrics.NewRemoteAlertmanagerMetrics(prometheus.NewRegistry()) + remoteAM, err := remote.NewAlertmanager(externalAMCfg, stateStore, m) require.NoError(t, err) // Use both Alertmanager implementations in the forked Alertmanager. diff --git a/pkg/services/ngalert/remote/alertmanager.go b/pkg/services/ngalert/remote/alertmanager.go index 9d2d35d9ea8..f6d8f7b5f8a 100644 --- a/pkg/services/ngalert/remote/alertmanager.go +++ b/pkg/services/ngalert/remote/alertmanager.go @@ -10,6 +10,7 @@ import ( "github.com/go-openapi/strfmt" "github.com/grafana/grafana/pkg/infra/log" apimodels "github.com/grafana/grafana/pkg/services/ngalert/api/tooling/definitions" + "github.com/grafana/grafana/pkg/services/ngalert/metrics" "github.com/grafana/grafana/pkg/services/ngalert/models" "github.com/grafana/grafana/pkg/services/ngalert/notifier" remoteClient "github.com/grafana/grafana/pkg/services/ngalert/remote/client" @@ -26,6 +27,7 @@ type stateStore interface { type Alertmanager struct { log log.Logger + metrics *metrics.RemoteAlertmanager orgID int64 ready bool sender *sender.ExternalAlertmanager @@ -59,7 +61,7 @@ func (cfg *AlertmanagerConfig) Validate() error { return nil } -func NewAlertmanager(cfg AlertmanagerConfig, store stateStore) (*Alertmanager, error) { +func NewAlertmanager(cfg AlertmanagerConfig, store stateStore, metrics *metrics.RemoteAlertmanager) (*Alertmanager, error) { if err := cfg.Validate(); err != nil { return nil, err } @@ -76,7 +78,7 @@ func NewAlertmanager(cfg AlertmanagerConfig, store stateStore) (*Alertmanager, e Password: cfg.BasicAuthPassword, Logger: logger, } - mc, err := remoteClient.New(mcCfg) + mc, err := remoteClient.New(mcCfg, metrics) if err != nil { return nil, err } @@ -87,7 +89,7 @@ func NewAlertmanager(cfg AlertmanagerConfig, store stateStore) (*Alertmanager, e Password: cfg.BasicAuthPassword, Logger: logger, } - amc, err := remoteClient.NewAlertmanager(amcCfg) + amc, err := remoteClient.NewAlertmanager(amcCfg, metrics) if err != nil { return nil, err } @@ -104,13 +106,17 @@ func NewAlertmanager(cfg AlertmanagerConfig, store stateStore) (*Alertmanager, e return nil, err } + // Initialize LastReadinessCheck so it's present even if the check fails. + metrics.LastReadinessCheck.Set(0) + return &Alertmanager{ + amClient: amc, log: logger, + metrics: metrics, mimirClient: mc, + orgID: cfg.OrgID, state: store, - amClient: amc, sender: s, - orgID: cfg.OrgID, tenantID: cfg.TenantID, url: cfg.URL, }, nil @@ -159,6 +165,7 @@ func (am *Alertmanager) checkReadiness(ctx context.Context) error { if ready { am.log.Debug("Alertmanager readiness check successful") + am.metrics.LastReadinessCheck.SetToCurrentTime() am.ready = true return nil } @@ -170,6 +177,7 @@ func (am *Alertmanager) checkReadiness(ctx context.Context) error { // If not, it sends the configuration to the remote Alertmanager. func (am *Alertmanager) CompareAndSendConfiguration(ctx context.Context, config *models.AlertConfiguration) error { if am.shouldSendConfig(ctx, config) { + am.metrics.ConfigSyncsTotal.Inc() if err := am.mimirClient.CreateGrafanaAlertmanagerConfig( ctx, config.AlertmanagerConfiguration, @@ -178,8 +186,10 @@ func (am *Alertmanager) CompareAndSendConfiguration(ctx context.Context, config config.CreatedAt, config.Default, ); err != nil { + am.metrics.ConfigSyncErrorsTotal.Inc() return err } + am.metrics.LastConfigSync.SetToCurrentTime() } return nil } @@ -193,9 +203,12 @@ func (am *Alertmanager) CompareAndSendState(ctx context.Context) error { } if am.shouldSendState(ctx, state) { + am.metrics.StateSyncsTotal.Inc() if err := am.mimirClient.CreateGrafanaAlertmanagerState(ctx, state); err != nil { + am.metrics.ConfigSyncErrorsTotal.Inc() return err } + am.metrics.LastStateSync.SetToCurrentTime() } return nil } diff --git a/pkg/services/ngalert/remote/alertmanager_test.go b/pkg/services/ngalert/remote/alertmanager_test.go index 5a447da5b1d..59e60034677 100644 --- a/pkg/services/ngalert/remote/alertmanager_test.go +++ b/pkg/services/ngalert/remote/alertmanager_test.go @@ -14,12 +14,14 @@ import ( "github.com/go-openapi/strfmt" apimodels "github.com/grafana/grafana/pkg/services/ngalert/api/tooling/definitions" + "github.com/grafana/grafana/pkg/services/ngalert/metrics" ngmodels "github.com/grafana/grafana/pkg/services/ngalert/models" "github.com/grafana/grafana/pkg/services/ngalert/notifier" "github.com/grafana/grafana/pkg/services/ngalert/tests/fakes" "github.com/grafana/grafana/pkg/util" amv2 "github.com/prometheus/alertmanager/api/v2/models" "github.com/prometheus/alertmanager/cluster/clusterpb" + "github.com/prometheus/client_golang/prometheus" "github.com/stretchr/testify/require" ) @@ -68,7 +70,8 @@ func TestNewAlertmanager(t *testing.T) { TenantID: test.tenantID, BasicAuthPassword: test.password, } - am, err := NewAlertmanager(cfg, nil) + m := metrics.NewRemoteAlertmanagerMetrics(prometheus.NewRegistry()) + am, err := NewAlertmanager(cfg, nil, m) if test.expErr != "" { require.EqualError(tt, err, test.expErr) return @@ -106,7 +109,8 @@ func TestApplyConfig(t *testing.T) { require.NoError(t, store.Set(ctx, cfg.OrgID, "alertmanager", notifier.SilencesFilename, "test")) require.NoError(t, store.Set(ctx, cfg.OrgID, "alertmanager", notifier.NotificationLogFilename, "test")) - am, err := NewAlertmanager(cfg, fstore) + m := metrics.NewRemoteAlertmanagerMetrics(prometheus.NewRegistry()) + am, err := NewAlertmanager(cfg, fstore, m) require.NoError(t, err) config := &ngmodels.AlertConfiguration{} @@ -175,7 +179,8 @@ func TestIntegrationRemoteAlertmanagerApplyConfigOnlyUploadsOnce(t *testing.T) { require.NoError(t, err) encodedFullState := base64.StdEncoding.EncodeToString(fullState) - am, err := NewAlertmanager(cfg, fstore) + m := metrics.NewRemoteAlertmanagerMetrics(prometheus.NewRegistry()) + am, err := NewAlertmanager(cfg, fstore, m) require.NoError(t, err) // We should have no configuration or state at first. @@ -259,7 +264,8 @@ func TestIntegrationRemoteAlertmanagerSilences(t *testing.T) { TenantID: tenantID, BasicAuthPassword: password, } - am, err := NewAlertmanager(cfg, nil) + m := metrics.NewRemoteAlertmanagerMetrics(prometheus.NewRegistry()) + am, err := NewAlertmanager(cfg, nil, m) require.NoError(t, err) // We should have no silences at first. @@ -339,7 +345,8 @@ func TestIntegrationRemoteAlertmanagerAlerts(t *testing.T) { TenantID: tenantID, BasicAuthPassword: password, } - am, err := NewAlertmanager(cfg, nil) + m := metrics.NewRemoteAlertmanagerMetrics(prometheus.NewRegistry()) + am, err := NewAlertmanager(cfg, nil, m) require.NoError(t, err) // Wait until the Alertmanager is ready to send alerts. @@ -405,7 +412,8 @@ func TestIntegrationRemoteAlertmanagerReceivers(t *testing.T) { BasicAuthPassword: password, } - am, err := NewAlertmanager(cfg, nil) + m := metrics.NewRemoteAlertmanagerMetrics(prometheus.NewRegistry()) + am, err := NewAlertmanager(cfg, nil, m) require.NoError(t, err) // We should start with the default config. diff --git a/pkg/services/ngalert/remote/client/alertmanager.go b/pkg/services/ngalert/remote/client/alertmanager.go index 187b590bcbf..911fc628d17 100644 --- a/pkg/services/ngalert/remote/client/alertmanager.go +++ b/pkg/services/ngalert/remote/client/alertmanager.go @@ -9,6 +9,8 @@ import ( httptransport "github.com/go-openapi/runtime/client" "github.com/grafana/grafana/pkg/infra/log" + "github.com/grafana/grafana/pkg/services/ngalert/client" + "github.com/grafana/grafana/pkg/services/ngalert/metrics" amclient "github.com/prometheus/alertmanager/api/v2/client" ) @@ -24,12 +26,12 @@ type AlertmanagerConfig struct { type Alertmanager struct { *amclient.AlertmanagerAPI - httpClient *http.Client + httpClient client.Requester url *url.URL logger log.Logger } -func NewAlertmanager(cfg *AlertmanagerConfig) (*Alertmanager, error) { +func NewAlertmanager(cfg *AlertmanagerConfig, metrics *metrics.RemoteAlertmanager) (*Alertmanager, error) { // First, add the authentication middleware. c := &http.Client{Transport: &MimirAuthRoundTripper{ TenantID: cfg.TenantID, @@ -37,23 +39,27 @@ func NewAlertmanager(cfg *AlertmanagerConfig) (*Alertmanager, error) { Next: http.DefaultTransport, }} + tc := client.NewTimedClient(c, metrics.RequestLatency) apiEndpoint := *cfg.URL // Next, make sure you set the right path. u := apiEndpoint.JoinPath(alertmanagerAPIMountPath, amclient.DefaultBasePath) - transport := httptransport.NewWithClient(u.Host, u.Path, []string{u.Scheme}, c) + + // Create an Alertmanager client using the timed client as the transport. + r := httptransport.New(u.Host, u.Path, []string{u.Scheme}) + r.Transport = tc return &Alertmanager{ logger: cfg.Logger, url: cfg.URL, - AlertmanagerAPI: amclient.New(transport, nil), - httpClient: c, + AlertmanagerAPI: amclient.New(r, nil), + httpClient: tc, }, nil } -// GetAuthedClient returns a *http.Client that includes a configured MimirAuthRoundTripper. +// GetAuthedClient returns a client.Requester that includes a configured MimirAuthRoundTripper. // Requests using this client are fully authenticated. -func (am *Alertmanager) GetAuthedClient() *http.Client { +func (am *Alertmanager) GetAuthedClient() client.Requester { return am.httpClient } diff --git a/pkg/services/ngalert/remote/client/mimir.go b/pkg/services/ngalert/remote/client/mimir.go index 25bfa15ca7f..c4e0d96dc72 100644 --- a/pkg/services/ngalert/remote/client/mimir.go +++ b/pkg/services/ngalert/remote/client/mimir.go @@ -12,6 +12,8 @@ import ( "strings" "github.com/grafana/grafana/pkg/infra/log" + "github.com/grafana/grafana/pkg/services/ngalert/client" + "github.com/grafana/grafana/pkg/services/ngalert/metrics" ) // MimirClient contains all the methods to query the migration critical endpoints of Mimir instance, it's an interface to allow multiple implementations. @@ -26,9 +28,10 @@ type MimirClient interface { } type Mimir struct { + client client.Requester endpoint *url.URL - client http.Client logger log.Logger + metrics *metrics.RemoteAlertmanager } type Config struct { @@ -60,21 +63,22 @@ func (e *errorResponse) Error() string { return e.Error2 } -func New(cfg *Config) (*Mimir, error) { +func New(cfg *Config, metrics *metrics.RemoteAlertmanager) (*Mimir, error) { rt := &MimirAuthRoundTripper{ TenantID: cfg.TenantID, Password: cfg.Password, Next: http.DefaultTransport, } - c := http.Client{ + c := &http.Client{ Transport: rt, } return &Mimir{ endpoint: cfg.URL, - client: c, + client: client.NewTimedClient(c, metrics.RequestLatency), logger: cfg.Logger, + metrics: metrics, }, nil } diff --git a/pkg/services/ngalert/state/historian/loki.go b/pkg/services/ngalert/state/historian/loki.go index 00556adfec5..2b165988b02 100644 --- a/pkg/services/ngalert/state/historian/loki.go +++ b/pkg/services/ngalert/state/historian/loki.go @@ -14,6 +14,7 @@ import ( "github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/infra/log" + "github.com/grafana/grafana/pkg/services/ngalert/client" "github.com/grafana/grafana/pkg/services/ngalert/eval" "github.com/grafana/grafana/pkg/services/ngalert/metrics" "github.com/grafana/grafana/pkg/services/ngalert/models" @@ -54,7 +55,7 @@ type RemoteLokiBackend struct { log log.Logger } -func NewRemoteLokiBackend(cfg LokiConfig, req Requester, metrics *metrics.Historian) *RemoteLokiBackend { +func NewRemoteLokiBackend(cfg LokiConfig, req client.Requester, metrics *metrics.Historian) *RemoteLokiBackend { logger := log.New("ngalert.state.historian", "backend", "loki") return &RemoteLokiBackend{ client: NewLokiClient(cfg, req, metrics, logger), diff --git a/pkg/services/ngalert/state/historian/loki_http.go b/pkg/services/ngalert/state/historian/loki_http.go index 8f51d315291..89dde820476 100644 --- a/pkg/services/ngalert/state/historian/loki_http.go +++ b/pkg/services/ngalert/state/historian/loki_http.go @@ -12,6 +12,7 @@ import ( "time" "github.com/grafana/grafana/pkg/infra/log" + "github.com/grafana/grafana/pkg/services/ngalert/client" "github.com/grafana/grafana/pkg/services/ngalert/metrics" "github.com/grafana/grafana/pkg/setting" ) @@ -19,7 +20,7 @@ import ( const defaultPageSize = 1000 const maximumPageSize = 5000 -func NewRequester() Requester { +func NewRequester() client.Requester { return &http.Client{} } @@ -79,7 +80,7 @@ func NewLokiConfig(cfg setting.UnifiedAlertingStateHistorySettings) (LokiConfig, } type HttpLokiClient struct { - client Requester + client client.Requester encoder encoder cfg LokiConfig metrics *metrics.Historian @@ -100,8 +101,8 @@ const ( NeqRegEx Operator = "!~" ) -func NewLokiClient(cfg LokiConfig, req Requester, metrics *metrics.Historian, logger log.Logger) *HttpLokiClient { - tc := NewTimedClient(req, metrics.WriteDuration) +func NewLokiClient(cfg LokiConfig, req client.Requester, metrics *metrics.Historian, logger log.Logger) *HttpLokiClient { + tc := client.NewTimedClient(req, metrics.WriteDuration) return &HttpLokiClient{ client: tc, encoder: cfg.Encoder, diff --git a/pkg/services/ngalert/state/historian/loki_http_test.go b/pkg/services/ngalert/state/historian/loki_http_test.go index 2f7b7bc502f..f180dd5e2f7 100644 --- a/pkg/services/ngalert/state/historian/loki_http_test.go +++ b/pkg/services/ngalert/state/historian/loki_http_test.go @@ -11,6 +11,7 @@ import ( "testing" "time" + "github.com/grafana/grafana/pkg/services/ngalert/client" "github.com/grafana/grafana/pkg/services/ngalert/metrics" "github.com/grafana/grafana/pkg/setting" "github.com/prometheus/client_golang/prometheus" @@ -337,7 +338,7 @@ func TestStream(t *testing.T) { }) } -func createTestLokiClient(req Requester) *HttpLokiClient { +func createTestLokiClient(req client.Requester) *HttpLokiClient { url, _ := url.Parse("http://some.url") cfg := LokiConfig{ WritePathURL: url, diff --git a/pkg/services/ngalert/state/historian/loki_test.go b/pkg/services/ngalert/state/historian/loki_test.go index 25c29e0fb24..ea887df5745 100644 --- a/pkg/services/ngalert/state/historian/loki_test.go +++ b/pkg/services/ngalert/state/historian/loki_test.go @@ -13,6 +13,7 @@ import ( "github.com/grafana/grafana-plugin-sdk-go/data" "github.com/grafana/grafana/pkg/infra/log" + "github.com/grafana/grafana/pkg/services/ngalert/client" "github.com/grafana/grafana/pkg/services/ngalert/eval" "github.com/grafana/grafana/pkg/services/ngalert/metrics" "github.com/grafana/grafana/pkg/services/ngalert/models" @@ -503,7 +504,7 @@ grafana_alerting_state_history_writes_total{backend="loki",org="1"} 2 }) } -func createTestLokiBackend(req Requester, met *metrics.Historian) *RemoteLokiBackend { +func createTestLokiBackend(req client.Requester, met *metrics.Historian) *RemoteLokiBackend { url, _ := url.Parse("http://some.url") cfg := LokiConfig{ WritePathURL: url,