Alerting: Add metrics to the remote Alertmanager struct (#79835)

* Alerting: Add metrics to the remote Alertmanager struct

* rephrase http_requests_failed description

* make linter happy

* remove unnecessary metrics

* extract timed client to separate package

* use histogram collector from dskit

* remove weaveworks dependency

* capture metrics for all requests to the remote Alertmanager (both clients)

* use the timed client in the MimirAuthRoundTripper

* HTTPRequestsDuration -> HTTPRequestDuration, clean up mimir client factory function

* refactor

* less git diff

* gauge for last readiness check in seconds

* initialize LastReadinesCheck to 0, tweak metric names and descriptions

* add counters for sync attempts/errors

* last config sync and last state sync timestamps (gauges)

* change latency metric name

* metric for remote Alertmanager mode

* code review comments

* move label constants to metrics package
pull/80273/head
Santiago 1 year ago committed by GitHub
parent 1162c28a55
commit 9e78faa7ba
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 7
      pkg/services/ngalert/client/client.go
  2. 2
      pkg/services/ngalert/client/client_test.go
  3. 6
      pkg/services/ngalert/metrics/ngalert.go
  4. 84
      pkg/services/ngalert/metrics/remote_alertmanager.go
  5. 9
      pkg/services/ngalert/ngalert.go
  6. 3
      pkg/services/ngalert/notifier/multiorg_alertmanager_remote_test.go
  7. 23
      pkg/services/ngalert/remote/alertmanager.go
  8. 20
      pkg/services/ngalert/remote/alertmanager_test.go
  9. 20
      pkg/services/ngalert/remote/client/alertmanager.go
  10. 12
      pkg/services/ngalert/remote/client/mimir.go
  11. 3
      pkg/services/ngalert/state/historian/loki.go
  12. 9
      pkg/services/ngalert/state/historian/loki_http.go
  13. 3
      pkg/services/ngalert/state/historian/loki_http_test.go
  14. 3
      pkg/services/ngalert/state/historian/loki_test.go

@ -1,4 +1,4 @@
package historian
package client
import (
"context"
@ -39,6 +39,11 @@ func (c TimedClient) Do(r *http.Request) (*http.Response, error) {
return TimeRequest(r.Context(), c.operationName(r), c.collector, c.client, r)
}
// RoundTrip implements the RoundTripper interface.
func (c TimedClient) RoundTrip(r *http.Request) (*http.Response, error) {
return c.Do(r)
}
func (c TimedClient) operationName(r *http.Request) string {
operation, _ := r.Context().Value(OperationNameContextKey).(string)
if operation == "" {

@ -30,6 +30,7 @@ type NGAlert struct {
multiOrgAlertmanagerMetrics *MultiOrgAlertmanager
apiMetrics *API
historianMetrics *Historian
remoteAlertmanagerMetrics *RemoteAlertmanager
}
// NewNGAlert manages the metrics of all the alerting components.
@ -41,6 +42,7 @@ func NewNGAlert(r prometheus.Registerer) *NGAlert {
multiOrgAlertmanagerMetrics: NewMultiOrgAlertmanagerMetrics(r),
apiMetrics: NewAPIMetrics(r),
historianMetrics: NewHistorianMetrics(r, Subsystem),
remoteAlertmanagerMetrics: NewRemoteAlertmanagerMetrics(r),
}
}
@ -63,3 +65,7 @@ func (ng *NGAlert) GetMultiOrgAlertmanagerMetrics() *MultiOrgAlertmanager {
func (ng *NGAlert) GetHistorianMetrics() *Historian {
return ng.historianMetrics
}
func (ng *NGAlert) GetRemoteAlertmanagerMetrics() *RemoteAlertmanager {
return ng.remoteAlertmanagerMetrics
}

@ -0,0 +1,84 @@
package metrics
import (
"github.com/grafana/dskit/instrument"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promauto"
)
const (
ModeRemoteSecondary = "remote_secondary"
ModeRemotePrimary = "remote_primary"
ModeRemoteOnly = "remote_only"
)
type RemoteAlertmanager struct {
Info *prometheus.GaugeVec
RequestLatency *instrument.HistogramCollector
LastReadinessCheck prometheus.Gauge
ConfigSyncsTotal prometheus.Counter
ConfigSyncErrorsTotal prometheus.Counter
LastConfigSync prometheus.Gauge
StateSyncsTotal prometheus.Counter
StateSyncErrorsTotal prometheus.Counter
LastStateSync prometheus.Gauge
}
func NewRemoteAlertmanagerMetrics(r prometheus.Registerer) *RemoteAlertmanager {
return &RemoteAlertmanager{
Info: promauto.With(r).NewGaugeVec(prometheus.GaugeOpts{
Namespace: Namespace,
Subsystem: Subsystem,
Name: "remote_alertmanager_info",
Help: "Information about the remote Alertmanager.",
}, []string{"mode"}),
RequestLatency: instrument.NewHistogramCollector(promauto.With(r).NewHistogramVec(prometheus.HistogramOpts{
Namespace: Namespace,
Subsystem: Subsystem,
Name: "remote_alertmanager_latency_seconds",
Help: "Histogram of request latencies to the remote Alertmanager.",
}, instrument.HistogramCollectorBuckets)),
LastReadinessCheck: promauto.With(r).NewGauge(prometheus.GaugeOpts{
Namespace: Namespace,
Subsystem: Subsystem,
Name: "remote_alertmanager_last_readiness_check_timestamp_seconds",
Help: "Timestamp of the last successful readiness check to the remote Alertmanager in seconds.",
}),
ConfigSyncsTotal: promauto.With(r).NewCounter(prometheus.CounterOpts{
Namespace: Namespace,
Subsystem: Subsystem,
Name: "remote_alertmanager_configuration_syncs_total",
Help: "Total number of configuration syncs to the remote Alertmanager.",
}),
ConfigSyncErrorsTotal: promauto.With(r).NewCounter(prometheus.CounterOpts{
Namespace: Namespace,
Subsystem: Subsystem,
Name: "remote_alertmanager_configuration_sync_failures_total",
Help: "Total number of failed attempts to sync configurations between Alertmanagers.",
}),
LastConfigSync: promauto.With(r).NewGauge(prometheus.GaugeOpts{
Namespace: Namespace,
Subsystem: Subsystem,
Name: "remote_alertmanager_last_configuration_sync_timestamp_seconds",
Help: "Timestamp of the last successful configuration sync to the remote Alertmanager in seconds.",
}),
StateSyncsTotal: promauto.With(r).NewCounter(prometheus.CounterOpts{
Namespace: Namespace,
Subsystem: Subsystem,
Name: "remote_alertmanager_state_syncs_total",
Help: "Total number of state syncs to the remote Alertmanager.",
}),
StateSyncErrorsTotal: promauto.With(r).NewCounter(prometheus.CounterOpts{
Namespace: Namespace,
Subsystem: Subsystem,
Name: "remote_alertmanager_state_sync_failures_total",
Help: "Total number of failed attempts to sync state between Alertmanagers.",
}),
LastStateSync: promauto.With(r).NewGauge(prometheus.GaugeOpts{
Namespace: Namespace,
Subsystem: Subsystem,
Name: "remote_alertmanager_last_state_sync_timestamp_seconds",
Help: "Timestamp of the last successful state sync to the remote Alertmanager in seconds.",
}),
}
}

@ -185,6 +185,9 @@ func (ng *AlertNG) init() error {
case remoteSecondary:
ng.Log.Debug("Starting Grafana with remote secondary mode enabled")
m := ng.Metrics.GetRemoteAlertmanagerMetrics()
m.Info.WithLabelValues(metrics.ModeRemoteSecondary).Set(1)
// This function will be used by the MOA to create new Alertmanagers.
override := notifier.WithAlertmanagerOverride(func(factoryFn notifier.OrgAlertmanagerFactory) notifier.OrgAlertmanagerFactory {
return func(ctx context.Context, orgID int64) (notifier.Alertmanager, error) {
@ -195,7 +198,7 @@ func (ng *AlertNG) init() error {
}
// Create remote Alertmanager.
remoteAM, err := createRemoteAlertmanager(orgID, ng.Cfg.UnifiedAlerting.RemoteAlertmanager, ng.KVStore)
remoteAM, err := createRemoteAlertmanager(orgID, ng.Cfg.UnifiedAlerting.RemoteAlertmanager, ng.KVStore, m)
if err != nil {
moaLogger.Error("Failed to create remote Alertmanager, falling back to using only the internal one", "err", err)
return internalAM, nil
@ -540,7 +543,7 @@ func ApplyStateHistoryFeatureToggles(cfg *setting.UnifiedAlertingStateHistorySet
}
}
func createRemoteAlertmanager(orgID int64, amCfg setting.RemoteAlertmanagerSettings, kvstore kvstore.KVStore) (*remote.Alertmanager, error) {
func createRemoteAlertmanager(orgID int64, amCfg setting.RemoteAlertmanagerSettings, kvstore kvstore.KVStore, m *metrics.RemoteAlertmanager) (*remote.Alertmanager, error) {
externalAMCfg := remote.AlertmanagerConfig{
OrgID: orgID,
URL: amCfg.URL,
@ -549,5 +552,5 @@ func createRemoteAlertmanager(orgID int64, amCfg setting.RemoteAlertmanagerSetti
}
// We won't be handling files on disk, we can pass an empty string as workingDirPath.
stateStore := notifier.NewFileStore(orgID, kvstore, "")
return remote.NewAlertmanager(externalAMCfg, stateStore)
return remote.NewAlertmanager(externalAMCfg, stateStore, m)
}

@ -64,7 +64,8 @@ func TestMultiorgAlertmanager_RemoteSecondaryMode(t *testing.T) {
}
// We won't be handling files on disk, we can pass an empty string as workingDirPath.
stateStore := notifier.NewFileStore(orgID, kvStore, "")
remoteAM, err := remote.NewAlertmanager(externalAMCfg, stateStore)
m := metrics.NewRemoteAlertmanagerMetrics(prometheus.NewRegistry())
remoteAM, err := remote.NewAlertmanager(externalAMCfg, stateStore, m)
require.NoError(t, err)
// Use both Alertmanager implementations in the forked Alertmanager.

@ -10,6 +10,7 @@ import (
"github.com/go-openapi/strfmt"
"github.com/grafana/grafana/pkg/infra/log"
apimodels "github.com/grafana/grafana/pkg/services/ngalert/api/tooling/definitions"
"github.com/grafana/grafana/pkg/services/ngalert/metrics"
"github.com/grafana/grafana/pkg/services/ngalert/models"
"github.com/grafana/grafana/pkg/services/ngalert/notifier"
remoteClient "github.com/grafana/grafana/pkg/services/ngalert/remote/client"
@ -26,6 +27,7 @@ type stateStore interface {
type Alertmanager struct {
log log.Logger
metrics *metrics.RemoteAlertmanager
orgID int64
ready bool
sender *sender.ExternalAlertmanager
@ -59,7 +61,7 @@ func (cfg *AlertmanagerConfig) Validate() error {
return nil
}
func NewAlertmanager(cfg AlertmanagerConfig, store stateStore) (*Alertmanager, error) {
func NewAlertmanager(cfg AlertmanagerConfig, store stateStore, metrics *metrics.RemoteAlertmanager) (*Alertmanager, error) {
if err := cfg.Validate(); err != nil {
return nil, err
}
@ -76,7 +78,7 @@ func NewAlertmanager(cfg AlertmanagerConfig, store stateStore) (*Alertmanager, e
Password: cfg.BasicAuthPassword,
Logger: logger,
}
mc, err := remoteClient.New(mcCfg)
mc, err := remoteClient.New(mcCfg, metrics)
if err != nil {
return nil, err
}
@ -87,7 +89,7 @@ func NewAlertmanager(cfg AlertmanagerConfig, store stateStore) (*Alertmanager, e
Password: cfg.BasicAuthPassword,
Logger: logger,
}
amc, err := remoteClient.NewAlertmanager(amcCfg)
amc, err := remoteClient.NewAlertmanager(amcCfg, metrics)
if err != nil {
return nil, err
}
@ -104,13 +106,17 @@ func NewAlertmanager(cfg AlertmanagerConfig, store stateStore) (*Alertmanager, e
return nil, err
}
// Initialize LastReadinessCheck so it's present even if the check fails.
metrics.LastReadinessCheck.Set(0)
return &Alertmanager{
amClient: amc,
log: logger,
metrics: metrics,
mimirClient: mc,
orgID: cfg.OrgID,
state: store,
amClient: amc,
sender: s,
orgID: cfg.OrgID,
tenantID: cfg.TenantID,
url: cfg.URL,
}, nil
@ -159,6 +165,7 @@ func (am *Alertmanager) checkReadiness(ctx context.Context) error {
if ready {
am.log.Debug("Alertmanager readiness check successful")
am.metrics.LastReadinessCheck.SetToCurrentTime()
am.ready = true
return nil
}
@ -170,6 +177,7 @@ func (am *Alertmanager) checkReadiness(ctx context.Context) error {
// If not, it sends the configuration to the remote Alertmanager.
func (am *Alertmanager) CompareAndSendConfiguration(ctx context.Context, config *models.AlertConfiguration) error {
if am.shouldSendConfig(ctx, config) {
am.metrics.ConfigSyncsTotal.Inc()
if err := am.mimirClient.CreateGrafanaAlertmanagerConfig(
ctx,
config.AlertmanagerConfiguration,
@ -178,8 +186,10 @@ func (am *Alertmanager) CompareAndSendConfiguration(ctx context.Context, config
config.CreatedAt,
config.Default,
); err != nil {
am.metrics.ConfigSyncErrorsTotal.Inc()
return err
}
am.metrics.LastConfigSync.SetToCurrentTime()
}
return nil
}
@ -193,9 +203,12 @@ func (am *Alertmanager) CompareAndSendState(ctx context.Context) error {
}
if am.shouldSendState(ctx, state) {
am.metrics.StateSyncsTotal.Inc()
if err := am.mimirClient.CreateGrafanaAlertmanagerState(ctx, state); err != nil {
am.metrics.ConfigSyncErrorsTotal.Inc()
return err
}
am.metrics.LastStateSync.SetToCurrentTime()
}
return nil
}

@ -14,12 +14,14 @@ import (
"github.com/go-openapi/strfmt"
apimodels "github.com/grafana/grafana/pkg/services/ngalert/api/tooling/definitions"
"github.com/grafana/grafana/pkg/services/ngalert/metrics"
ngmodels "github.com/grafana/grafana/pkg/services/ngalert/models"
"github.com/grafana/grafana/pkg/services/ngalert/notifier"
"github.com/grafana/grafana/pkg/services/ngalert/tests/fakes"
"github.com/grafana/grafana/pkg/util"
amv2 "github.com/prometheus/alertmanager/api/v2/models"
"github.com/prometheus/alertmanager/cluster/clusterpb"
"github.com/prometheus/client_golang/prometheus"
"github.com/stretchr/testify/require"
)
@ -68,7 +70,8 @@ func TestNewAlertmanager(t *testing.T) {
TenantID: test.tenantID,
BasicAuthPassword: test.password,
}
am, err := NewAlertmanager(cfg, nil)
m := metrics.NewRemoteAlertmanagerMetrics(prometheus.NewRegistry())
am, err := NewAlertmanager(cfg, nil, m)
if test.expErr != "" {
require.EqualError(tt, err, test.expErr)
return
@ -106,7 +109,8 @@ func TestApplyConfig(t *testing.T) {
require.NoError(t, store.Set(ctx, cfg.OrgID, "alertmanager", notifier.SilencesFilename, "test"))
require.NoError(t, store.Set(ctx, cfg.OrgID, "alertmanager", notifier.NotificationLogFilename, "test"))
am, err := NewAlertmanager(cfg, fstore)
m := metrics.NewRemoteAlertmanagerMetrics(prometheus.NewRegistry())
am, err := NewAlertmanager(cfg, fstore, m)
require.NoError(t, err)
config := &ngmodels.AlertConfiguration{}
@ -175,7 +179,8 @@ func TestIntegrationRemoteAlertmanagerApplyConfigOnlyUploadsOnce(t *testing.T) {
require.NoError(t, err)
encodedFullState := base64.StdEncoding.EncodeToString(fullState)
am, err := NewAlertmanager(cfg, fstore)
m := metrics.NewRemoteAlertmanagerMetrics(prometheus.NewRegistry())
am, err := NewAlertmanager(cfg, fstore, m)
require.NoError(t, err)
// We should have no configuration or state at first.
@ -259,7 +264,8 @@ func TestIntegrationRemoteAlertmanagerSilences(t *testing.T) {
TenantID: tenantID,
BasicAuthPassword: password,
}
am, err := NewAlertmanager(cfg, nil)
m := metrics.NewRemoteAlertmanagerMetrics(prometheus.NewRegistry())
am, err := NewAlertmanager(cfg, nil, m)
require.NoError(t, err)
// We should have no silences at first.
@ -339,7 +345,8 @@ func TestIntegrationRemoteAlertmanagerAlerts(t *testing.T) {
TenantID: tenantID,
BasicAuthPassword: password,
}
am, err := NewAlertmanager(cfg, nil)
m := metrics.NewRemoteAlertmanagerMetrics(prometheus.NewRegistry())
am, err := NewAlertmanager(cfg, nil, m)
require.NoError(t, err)
// Wait until the Alertmanager is ready to send alerts.
@ -405,7 +412,8 @@ func TestIntegrationRemoteAlertmanagerReceivers(t *testing.T) {
BasicAuthPassword: password,
}
am, err := NewAlertmanager(cfg, nil)
m := metrics.NewRemoteAlertmanagerMetrics(prometheus.NewRegistry())
am, err := NewAlertmanager(cfg, nil, m)
require.NoError(t, err)
// We should start with the default config.

@ -9,6 +9,8 @@ import (
httptransport "github.com/go-openapi/runtime/client"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/services/ngalert/client"
"github.com/grafana/grafana/pkg/services/ngalert/metrics"
amclient "github.com/prometheus/alertmanager/api/v2/client"
)
@ -24,12 +26,12 @@ type AlertmanagerConfig struct {
type Alertmanager struct {
*amclient.AlertmanagerAPI
httpClient *http.Client
httpClient client.Requester
url *url.URL
logger log.Logger
}
func NewAlertmanager(cfg *AlertmanagerConfig) (*Alertmanager, error) {
func NewAlertmanager(cfg *AlertmanagerConfig, metrics *metrics.RemoteAlertmanager) (*Alertmanager, error) {
// First, add the authentication middleware.
c := &http.Client{Transport: &MimirAuthRoundTripper{
TenantID: cfg.TenantID,
@ -37,23 +39,27 @@ func NewAlertmanager(cfg *AlertmanagerConfig) (*Alertmanager, error) {
Next: http.DefaultTransport,
}}
tc := client.NewTimedClient(c, metrics.RequestLatency)
apiEndpoint := *cfg.URL
// Next, make sure you set the right path.
u := apiEndpoint.JoinPath(alertmanagerAPIMountPath, amclient.DefaultBasePath)
transport := httptransport.NewWithClient(u.Host, u.Path, []string{u.Scheme}, c)
// Create an Alertmanager client using the timed client as the transport.
r := httptransport.New(u.Host, u.Path, []string{u.Scheme})
r.Transport = tc
return &Alertmanager{
logger: cfg.Logger,
url: cfg.URL,
AlertmanagerAPI: amclient.New(transport, nil),
httpClient: c,
AlertmanagerAPI: amclient.New(r, nil),
httpClient: tc,
}, nil
}
// GetAuthedClient returns a *http.Client that includes a configured MimirAuthRoundTripper.
// GetAuthedClient returns a client.Requester that includes a configured MimirAuthRoundTripper.
// Requests using this client are fully authenticated.
func (am *Alertmanager) GetAuthedClient() *http.Client {
func (am *Alertmanager) GetAuthedClient() client.Requester {
return am.httpClient
}

@ -12,6 +12,8 @@ import (
"strings"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/services/ngalert/client"
"github.com/grafana/grafana/pkg/services/ngalert/metrics"
)
// MimirClient contains all the methods to query the migration critical endpoints of Mimir instance, it's an interface to allow multiple implementations.
@ -26,9 +28,10 @@ type MimirClient interface {
}
type Mimir struct {
client client.Requester
endpoint *url.URL
client http.Client
logger log.Logger
metrics *metrics.RemoteAlertmanager
}
type Config struct {
@ -60,21 +63,22 @@ func (e *errorResponse) Error() string {
return e.Error2
}
func New(cfg *Config) (*Mimir, error) {
func New(cfg *Config, metrics *metrics.RemoteAlertmanager) (*Mimir, error) {
rt := &MimirAuthRoundTripper{
TenantID: cfg.TenantID,
Password: cfg.Password,
Next: http.DefaultTransport,
}
c := http.Client{
c := &http.Client{
Transport: rt,
}
return &Mimir{
endpoint: cfg.URL,
client: c,
client: client.NewTimedClient(c, metrics.RequestLatency),
logger: cfg.Logger,
metrics: metrics,
}, nil
}

@ -14,6 +14,7 @@ import (
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/services/ngalert/client"
"github.com/grafana/grafana/pkg/services/ngalert/eval"
"github.com/grafana/grafana/pkg/services/ngalert/metrics"
"github.com/grafana/grafana/pkg/services/ngalert/models"
@ -54,7 +55,7 @@ type RemoteLokiBackend struct {
log log.Logger
}
func NewRemoteLokiBackend(cfg LokiConfig, req Requester, metrics *metrics.Historian) *RemoteLokiBackend {
func NewRemoteLokiBackend(cfg LokiConfig, req client.Requester, metrics *metrics.Historian) *RemoteLokiBackend {
logger := log.New("ngalert.state.historian", "backend", "loki")
return &RemoteLokiBackend{
client: NewLokiClient(cfg, req, metrics, logger),

@ -12,6 +12,7 @@ import (
"time"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/services/ngalert/client"
"github.com/grafana/grafana/pkg/services/ngalert/metrics"
"github.com/grafana/grafana/pkg/setting"
)
@ -19,7 +20,7 @@ import (
const defaultPageSize = 1000
const maximumPageSize = 5000
func NewRequester() Requester {
func NewRequester() client.Requester {
return &http.Client{}
}
@ -79,7 +80,7 @@ func NewLokiConfig(cfg setting.UnifiedAlertingStateHistorySettings) (LokiConfig,
}
type HttpLokiClient struct {
client Requester
client client.Requester
encoder encoder
cfg LokiConfig
metrics *metrics.Historian
@ -100,8 +101,8 @@ const (
NeqRegEx Operator = "!~"
)
func NewLokiClient(cfg LokiConfig, req Requester, metrics *metrics.Historian, logger log.Logger) *HttpLokiClient {
tc := NewTimedClient(req, metrics.WriteDuration)
func NewLokiClient(cfg LokiConfig, req client.Requester, metrics *metrics.Historian, logger log.Logger) *HttpLokiClient {
tc := client.NewTimedClient(req, metrics.WriteDuration)
return &HttpLokiClient{
client: tc,
encoder: cfg.Encoder,

@ -11,6 +11,7 @@ import (
"testing"
"time"
"github.com/grafana/grafana/pkg/services/ngalert/client"
"github.com/grafana/grafana/pkg/services/ngalert/metrics"
"github.com/grafana/grafana/pkg/setting"
"github.com/prometheus/client_golang/prometheus"
@ -337,7 +338,7 @@ func TestStream(t *testing.T) {
})
}
func createTestLokiClient(req Requester) *HttpLokiClient {
func createTestLokiClient(req client.Requester) *HttpLokiClient {
url, _ := url.Parse("http://some.url")
cfg := LokiConfig{
WritePathURL: url,

@ -13,6 +13,7 @@ import (
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/services/ngalert/client"
"github.com/grafana/grafana/pkg/services/ngalert/eval"
"github.com/grafana/grafana/pkg/services/ngalert/metrics"
"github.com/grafana/grafana/pkg/services/ngalert/models"
@ -503,7 +504,7 @@ grafana_alerting_state_history_writes_total{backend="loki",org="1"} 2
})
}
func createTestLokiBackend(req Requester, met *metrics.Historian) *RemoteLokiBackend {
func createTestLokiBackend(req client.Requester, met *metrics.Historian) *RemoteLokiBackend {
url, _ := url.Parse("http://some.url")
cfg := LokiConfig{
WritePathURL: url,

Loading…
Cancel
Save