[release-11.6.1] Prometheus: Add support for cloud partners Prometheus data sources (#103941)

Prometheus: Add support for cloud partners Prometheus data sources (#103482)

* wip

* Add prom flavor support for data source variables and export/import dashboards (#103321)

* add dashboard and data source var selection

* use match plugin id instead

* use updated matchpluginid

* formatting

* cleanup

* regex anchor

* update error msg

* Alerting: Clean up prometheus-flavored types and functions (#103703)

* clean up types and utility functions for dealing with
prometheus-flavored data sources

* Refactor alerting datasource types to use constants as source of truth

* Alerting: Clean up prometheus-flavored types and functions on the bac… (#103716)

Alerting: Clean up prometheus-flavored types and functions on the backend

* add matchPluginId tests

* Update matchPluginId func to bidirectional (#103746)

* update matchpluginid func to bidirectional

* lint

* formatting

* use actual isSupportedExternalRulesSourceType in test

* add tests in datasource_srv

* betterer

* remove type assertion

* remove unnecessary case

* use satisifies to not have to convert tuple to an array of string

* add prometheus_flavor test

---------

Co-authored-by: Andrew Hackmann <5140848+bossinc@users.noreply.github.com>
Co-authored-by: Gilles De Mey <gilles.de.mey@gmail.com>
Co-authored-by: Alexander Akhmetov <me@alx.cx>
(cherry picked from commit fd6fd91115)
pull/104086/head
Kevin Yu 2 months ago committed by GitHub
parent 56d976552e
commit 760b9176fc
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
  1. 64
      packages/grafana-data/src/utils/matchPluginId.test.ts
  2. 12
      packages/grafana-data/src/utils/matchPluginId.ts
  3. 30
      pkg/api/datasource/validation.go
  4. 2
      pkg/api/frontendsettings.go
  5. 2
      pkg/api/pluginproxy/ds_proxy.go
  6. 2
      pkg/expr/converter.go
  7. 16
      pkg/infra/usagestats/statscollector/prometheus_flavor.go
  8. 34
      pkg/infra/usagestats/statscollector/prometheus_flavor_test.go
  9. 44
      pkg/services/datasources/models.go
  10. 2
      pkg/services/datasources/service/datasource.go
  11. 11
      pkg/services/datasources/service/datasource_test.go
  12. 25
      pkg/services/ngalert/api/lotex_prom.go
  13. 98
      pkg/services/ngalert/api/lotex_prom_test.go
  14. 22
      pkg/services/ngalert/api/lotex_ruler.go
  15. 58
      pkg/services/ngalert/api/lotex_ruler_test.go
  16. 27
      pkg/services/ngalert/api/util.go
  17. 83
      pkg/services/ngalert/api/util_test.go
  18. 17
      public/app/features/alerting/unified/api/buildInfo.ts
  19. 21
      public/app/features/alerting/unified/components/rule-editor/ExpressionEditor.tsx
  20. 4
      public/app/features/alerting/unified/components/rule-viewer/tabs/Query/DataSourceModelPreview.tsx
  21. 5
      public/app/features/alerting/unified/rule-editor/formDefaults.test.ts
  22. 8
      public/app/features/alerting/unified/rule-editor/formProcessing.ts
  23. 60
      public/app/features/alerting/unified/utils/datasource.ts
  24. 32
      public/app/features/alerting/unified/utils/query.ts
  25. 26
      public/app/features/alerting/unified/utils/rule-form.ts
  26. 9
      public/app/features/dashboard/state/DashboardModel.ts
  27. 11
      public/app/features/explore/DrilldownAlertBox.tsx
  28. 69
      public/app/features/plugins/tests/datasource_srv.test.ts
  29. 6
      public/app/features/trails/Integrations/dashboardIntegration.ts
  30. 4
      public/app/features/trails/utils.ts
  31. 7
      public/app/plugins/panel/alertlist/module.tsx

@ -0,0 +1,64 @@
import { PluginMeta, PluginType } from '../types/plugin';
import { matchPluginId } from './matchPluginId';
const createPluginMeta = (id: string, aliasIDs?: string[]): PluginMeta => ({
id,
name: 'Test Plugin',
type: PluginType.datasource,
module: 'test',
baseUrl: 'test',
info: {
author: { name: 'Test' },
description: 'Test',
links: [],
logos: { small: '', large: '' },
screenshots: [],
updated: '',
version: '',
},
aliasIDs,
});
describe('matchPluginId', () => {
it('should match exact plugin ID', () => {
const pluginMeta = createPluginMeta('test-plugin');
expect(matchPluginId('test-plugin', pluginMeta)).toBe(true);
});
it('should not match different plugin ID', () => {
const pluginMeta = createPluginMeta('test-plugin');
expect(matchPluginId('different-plugin', pluginMeta)).toBe(false);
});
it('should match Amazon Prometheus flavor when idToMatch is prometheus', () => {
const pluginMeta = createPluginMeta('grafana-amazonprometheus-datasource');
expect(matchPluginId('prometheus', pluginMeta)).toBe(true);
});
it('should match Azure Prometheus flavor when idToMatch is prometheus', () => {
const pluginMeta = createPluginMeta('grafana-azureprometheus-datasource');
expect(matchPluginId('prometheus', pluginMeta)).toBe(true);
});
it('should not match non-prometheus flavor when idToMatch is prometheus', () => {
const pluginMeta = createPluginMeta('test-plugin');
expect(matchPluginId('prometheus', pluginMeta)).toBe(false);
});
it('should match alias IDs', () => {
const pluginMeta = createPluginMeta('test-plugin', ['alias1', 'alias2']);
expect(matchPluginId('alias1', pluginMeta)).toBe(true);
expect(matchPluginId('alias2', pluginMeta)).toBe(true);
});
it('should not match non-existent alias ID', () => {
const pluginMeta = createPluginMeta('test-plugin', ['alias1', 'alias2']);
expect(matchPluginId('alias3', pluginMeta)).toBe(false);
});
it('should handle undefined aliasIDs', () => {
const pluginMeta = createPluginMeta('test-plugin');
expect(matchPluginId('alias1', pluginMeta)).toBe(false);
});
});

@ -5,9 +5,21 @@ export function matchPluginId(idToMatch: string, pluginMeta: PluginMeta) {
return true;
}
if (isPromFlavor(idToMatch)) {
return isPromFlavor(pluginMeta.id);
}
if (pluginMeta.aliasIDs) {
return pluginMeta.aliasIDs.includes(idToMatch);
}
return false;
}
function isPromFlavor(pluginId: string): boolean {
if (pluginId === 'prometheus') {
return true;
}
const regex = new RegExp('^grafana-[0-9a-z]+prometheus-datasource$');
return regex.test(pluginId);
}

@ -16,20 +16,22 @@ var logger = log.New("datasource")
// requiredURL contains the set of data sources that require a URL.
var requiredURL = map[string]bool{
datasources.DS_GRAPHITE: true,
datasources.DS_INFLUXDB: true,
datasources.DS_INFLUXDB_08: true,
datasources.DS_ES: true,
datasources.DS_PROMETHEUS: true,
datasources.DS_ALERTMANAGER: true,
datasources.DS_JAEGER: true,
datasources.DS_LOKI: true,
datasources.DS_OPENTSDB: true,
datasources.DS_TEMPO: true,
datasources.DS_ZIPKIN: true,
datasources.DS_MYSQL: true,
datasources.DS_POSTGRES: true,
datasources.DS_MSSQL: true,
datasources.DS_GRAPHITE: true,
datasources.DS_INFLUXDB: true,
datasources.DS_INFLUXDB_08: true,
datasources.DS_ES: true,
datasources.DS_PROMETHEUS: true,
datasources.DS_AMAZON_PROMETHEUS: true,
datasources.DS_AZURE_PROMETHEUS: true,
datasources.DS_ALERTMANAGER: true,
datasources.DS_JAEGER: true,
datasources.DS_LOKI: true,
datasources.DS_OPENTSDB: true,
datasources.DS_TEMPO: true,
datasources.DS_ZIPKIN: true,
datasources.DS_MYSQL: true,
datasources.DS_POSTGRES: true,
datasources.DS_MSSQL: true,
}
// URLValidationError represents an error from validating a data source URL.

@ -550,7 +550,7 @@ func (hs *HTTPServer) getFSDataSources(c *contextmodel.ReqContext, availablePlug
dsDTO.Database = ds.Database
}
if ds.Type == datasources.DS_PROMETHEUS {
if ds.Type == datasources.DS_PROMETHEUS || ds.Type == datasources.DS_AMAZON_PROMETHEUS || ds.Type == datasources.DS_AZURE_PROMETHEUS {
// add unproxied server URL for link to Prometheus web UI
ds.JsonData.Set("directUrl", ds.URL)
}

@ -315,7 +315,7 @@ func (proxy *DataSourceProxy) validateRequest() error {
}
// Trailing validation below this point for routes that were not matched
if proxy.ds.Type == datasources.DS_PROMETHEUS {
if proxy.ds.Type == datasources.DS_PROMETHEUS || proxy.ds.Type == datasources.DS_AMAZON_PROMETHEUS || proxy.ds.Type == datasources.DS_AZURE_PROMETHEUS {
if proxy.ctx.Req.Method == "DELETE" {
return errors.New("non allow-listed DELETEs not allowed on proxied Prometheus datasource")
}

@ -139,7 +139,7 @@ func getResponseFrame(logger *log.ConcreteLogger, resp *backend.QueryDataRespons
}
func isAllFrameVectors(datasourceType string, frames data.Frames) bool {
if datasourceType != datasources.DS_PROMETHEUS {
if datasourceType != datasources.DS_PROMETHEUS && datasourceType != datasources.DS_AMAZON_PROMETHEUS && datasourceType != datasources.DS_AZURE_PROMETHEUS {
return false
}
allVector := false

@ -46,12 +46,26 @@ func (s *Service) detectPrometheusVariants(ctx context.Context) (map[string]int6
s.log.Error("Failed to read all Prometheus data sources", "error", err)
return nil, err
}
dsAmazonProm := &datasources.GetDataSourcesByTypeQuery{Type: "grafana-amazonprometheus-datasource"}
dataSourcesAmazonProm, err := s.datasources.GetDataSourcesByType(ctx, dsAmazonProm)
if err != nil {
s.log.Error("Failed to read all Amazon Prometheus data sources", "error", err)
return nil, err
}
dsAzureProm := &datasources.GetDataSourcesByTypeQuery{Type: "grafana-azureprometheus-datasource"}
dataSourcesAzureProm, err := s.datasources.GetDataSourcesByType(ctx, dsAzureProm)
if err != nil {
s.log.Error("Failed to read all Azure Prometheus data sources", "error", err)
return nil, err
}
allPromDataSources := append(append(dataSources, dataSourcesAmazonProm...), dataSourcesAzureProm...)
g, ctx := errgroup.WithContext(ctx)
g.SetLimit(10)
flavors := sync.Map{}
for _, ds := range dataSources {
for _, ds := range allPromDataSources {
ds := ds
g.Go(func() error {
variant, err := s.detectPrometheusVariant(ctx, ds)

@ -32,6 +32,18 @@ func TestDetectPrometheusVariant(t *testing.T) {
}))
t.Cleanup(cortex.Close)
// Amazon Prometheus is Cortex-like
amazonPrometheus := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(http.StatusNotFound)
}))
t.Cleanup(amazonPrometheus.Close)
// Azure Prometheus is Cortex-like
azurePrometheus := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(http.StatusNotFound)
}))
t.Cleanup(azurePrometheus.Close)
sqlStore := dbtest.NewFakeDB()
statsService := statstest.NewFakeService()
s := createService(
@ -80,6 +92,26 @@ func TestDetectPrometheusVariant(t *testing.T) {
Access: "proxy",
URL: cortex.URL,
},
{
ID: 5,
UID: "amazon-prometheus",
OrgID: 1,
Version: 1,
Name: "Amazon Prometheus",
Type: "prometheus",
Access: "proxy",
URL: amazonPrometheus.URL,
},
{
ID: 6,
UID: "azure-prometheus",
OrgID: 1,
Version: 1,
Name: "Azure Prometheus",
Type: "prometheus",
Access: "proxy",
URL: azurePrometheus.URL,
},
}}),
)
@ -88,5 +120,5 @@ func TestDetectPrometheusVariant(t *testing.T) {
assert.Equal(t, int64(2), flavors["mimir"])
assert.Equal(t, int64(1), flavors["vanilla"])
assert.Equal(t, int64(1), flavors["cortex-like"])
assert.Equal(t, int64(3), flavors["cortex-like"])
}

@ -11,27 +11,29 @@ import (
)
const (
DS_ACCESS_DIRECT = "direct"
DS_ACCESS_PROXY = "proxy"
DS_ALERTMANAGER = "alertmanager"
DS_AZURE_MONITOR = "grafana-azure-monitor-datasource"
DS_DYNATRACE = "grafana-dynatrace-datasource"
DS_ES = "elasticsearch"
DS_ES_OPEN_DISTRO = "grafana-es-open-distro-datasource"
DS_ES_OPENSEARCH = "grafana-opensearch-datasource"
DS_GRAPHITE = "graphite"
DS_INFLUXDB = "influxdb"
DS_INFLUXDB_08 = "influxdb_08"
DS_JAEGER = "jaeger"
DS_LOKI = "loki"
DS_MSSQL = "mssql"
DS_MYSQL = "mysql"
DS_OPENTSDB = "opentsdb"
DS_POSTGRES = "grafana-postgresql-datasource"
DS_PROMETHEUS = "prometheus"
DS_TEMPO = "tempo"
DS_TESTDATA = "grafana-testdata-datasource"
DS_ZIPKIN = "zipkin"
DS_ACCESS_DIRECT = "direct"
DS_ACCESS_PROXY = "proxy"
DS_ALERTMANAGER = "alertmanager"
DS_AZURE_MONITOR = "grafana-azure-monitor-datasource"
DS_DYNATRACE = "grafana-dynatrace-datasource"
DS_ES = "elasticsearch"
DS_ES_OPEN_DISTRO = "grafana-es-open-distro-datasource"
DS_ES_OPENSEARCH = "grafana-opensearch-datasource"
DS_GRAPHITE = "graphite"
DS_INFLUXDB = "influxdb"
DS_INFLUXDB_08 = "influxdb_08"
DS_JAEGER = "jaeger"
DS_LOKI = "loki"
DS_MSSQL = "mssql"
DS_MYSQL = "mysql"
DS_OPENTSDB = "opentsdb"
DS_POSTGRES = "grafana-postgresql-datasource"
DS_PROMETHEUS = "prometheus"
DS_AMAZON_PROMETHEUS = "grafana-amazonprometheus-datasource"
DS_AZURE_PROMETHEUS = "grafana-azureprometheus-datasource"
DS_TEMPO = "tempo"
DS_TESTDATA = "grafana-testdata-datasource"
DS_ZIPKIN = "zipkin"
// CustomHeaderName is the prefix that is used to store the name of a custom header.
CustomHeaderName = "httpHeaderName"
// CustomHeaderValue is the prefix that is used to store the value of a custom header.

@ -922,7 +922,7 @@ func awsServiceNamespace(dsType string, jsonData *simplejson.Json) string {
} else {
return "es"
}
case datasources.DS_PROMETHEUS, datasources.DS_ALERTMANAGER:
case datasources.DS_PROMETHEUS, datasources.DS_AMAZON_PROMETHEUS, datasources.DS_ALERTMANAGER:
return "aps"
default:
panic(fmt.Sprintf("Unsupported datasource %q", dsType))

@ -1000,6 +1000,11 @@ func TestService_awsServiceNamespace(t *testing.T) {
givenDs: datasources.DS_PROMETHEUS,
givenJson: `{ "sigV4Auth": true, "serverless": true }`,
want: "aps",
}, {
desc: "amazon prometheus",
givenDs: datasources.DS_AMAZON_PROMETHEUS,
givenJson: `{ "sigV4Auth": true }`,
want: "aps",
}, {
desc: "alertmanager",
givenDs: datasources.DS_ALERTMANAGER,
@ -1011,6 +1016,12 @@ func TestService_awsServiceNamespace(t *testing.T) {
givenJson: `{ "sigV4Auth": true, "serverless": true }`,
want: "aps",
panic: true,
}, {
desc: "azure prometheus",
givenDs: datasources.DS_AZURE_PROMETHEUS,
givenJson: `{ "sigV4Auth": true }`,
want: "aps",
panic: true,
},
}
for _, tc := range testCases {

@ -7,6 +7,7 @@ import (
"github.com/grafana/grafana/pkg/api/response"
"github.com/grafana/grafana/pkg/infra/log"
contextmodel "github.com/grafana/grafana/pkg/services/contexthandler/model"
"github.com/grafana/grafana/pkg/services/datasources"
apimodels "github.com/grafana/grafana/pkg/services/ngalert/api/tooling/definitions"
"github.com/grafana/grafana/pkg/web"
)
@ -15,16 +16,16 @@ type promEndpoints struct {
rules, alerts string
}
var dsTypeToLotexRoutes = map[string]promEndpoints{
"prometheus": {
var (
prometheusEndpoints = promEndpoints{
rules: "/api/v1/rules",
alerts: "/api/v1/alerts",
},
"loki": {
}
lokiEndpoints = promEndpoints{
rules: "/prometheus/api/v1/rules",
alerts: "/prometheus/api/v1/alerts",
},
}
}
)
type LotexProm struct {
log log.Logger
@ -91,9 +92,15 @@ func (p *LotexProm) getEndpoints(ctx *contextmodel.ReqContext) (*promEndpoints,
return nil, fmt.Errorf("URL for this data source is empty")
}
routes, ok := dsTypeToLotexRoutes[ds.Type]
if !ok {
return nil, fmt.Errorf("unexpected datasource type. expecting loki or prometheus")
var routes promEndpoints
switch {
case isPrometheusCompatible(ds.Type):
routes = prometheusEndpoints
case ds.Type == datasources.DS_LOKI:
routes = lokiEndpoints
default:
return nil, unexpectedDatasourceTypeError(ds.Type, "loki, prometheus, amazon prometheus, azure prometheus")
}
return &routes, nil
}

@ -0,0 +1,98 @@
package api
import (
"errors"
"net/http"
"testing"
"github.com/stretchr/testify/require"
"github.com/grafana/grafana/pkg/infra/log"
contextmodel "github.com/grafana/grafana/pkg/services/contexthandler/model"
"github.com/grafana/grafana/pkg/services/datasourceproxy"
"github.com/grafana/grafana/pkg/services/datasources"
"github.com/grafana/grafana/pkg/web"
)
func TestLotexProm_GetEndpoints(t *testing.T) {
tc := []struct {
name string
namedParams map[string]string
datasourceCache datasources.CacheService
expectedRoutes *promEndpoints
err error
}{
{
name: "with an empty datasource UID",
namedParams: map[string]string{":DatasourceUID": ""},
err: errors.New("datasource UID is invalid"),
},
{
name: "with an error while trying to fetch the datasource",
namedParams: map[string]string{":DatasourceUID": "d164"},
datasourceCache: fakeCacheService{err: datasources.ErrDataSourceNotFound},
err: errors.New("data source not found"),
},
{
name: "with an empty datasource URL",
namedParams: map[string]string{":DatasourceUID": "d164"},
datasourceCache: fakeCacheService{datasource: &datasources.DataSource{}},
err: errors.New("URL for this data source is empty"),
},
{
name: "with an unsupported datasource type",
namedParams: map[string]string{":DatasourceUID": "d164"},
datasourceCache: fakeCacheService{datasource: &datasources.DataSource{URL: "http://loki.com", Type: "unsupported-type"}},
err: errors.New("unexpected datasource type 'unsupported-type', expected loki, prometheus, amazon prometheus, azure prometheus"),
},
{
name: "with a Loki datasource",
namedParams: map[string]string{":DatasourceUID": "d164"},
datasourceCache: fakeCacheService{datasource: &datasources.DataSource{URL: "http://loki.com", Type: datasources.DS_LOKI}},
expectedRoutes: &lokiEndpoints,
err: nil,
},
{
name: "with a Prometheus datasource",
namedParams: map[string]string{":DatasourceUID": "d164"},
datasourceCache: fakeCacheService{datasource: &datasources.DataSource{URL: "http://prom.com", Type: datasources.DS_PROMETHEUS}},
expectedRoutes: &prometheusEndpoints,
err: nil,
},
{
name: "with an Amazon Prometheus datasource",
namedParams: map[string]string{":DatasourceUID": "d164"},
datasourceCache: fakeCacheService{datasource: &datasources.DataSource{URL: "http://amp.com", Type: datasources.DS_AMAZON_PROMETHEUS}},
expectedRoutes: &prometheusEndpoints,
err: nil,
},
{
name: "with an Azure Prometheus datasource",
namedParams: map[string]string{":DatasourceUID": "d164"},
datasourceCache: fakeCacheService{datasource: &datasources.DataSource{URL: "http://azp.com", Type: datasources.DS_AZURE_PROMETHEUS}},
expectedRoutes: &prometheusEndpoints,
err: nil,
},
}
for _, tt := range tc {
t.Run(tt.name, func(t *testing.T) {
proxy := &AlertingProxy{DataProxy: &datasourceproxy.DataSourceProxyService{DataSourceCache: tt.datasourceCache}}
prom := &LotexProm{AlertingProxy: proxy, log: log.NewNopLogger()}
// Setup request context.
httpReq, err := http.NewRequest(http.MethodGet, "http://grafanacloud.com", nil)
require.NoError(t, err)
ctx := &contextmodel.ReqContext{Context: &web.Context{Req: web.SetURLParams(httpReq, tt.namedParams)}}
endpoints, err := prom.getEndpoints(ctx)
if tt.err != nil {
require.Error(t, err)
} else {
require.NoError(t, err)
require.Equal(t, tt.expectedRoutes, endpoints)
}
})
}
}

@ -12,6 +12,7 @@ import (
"github.com/grafana/grafana/pkg/api/response"
"github.com/grafana/grafana/pkg/infra/log"
contextmodel "github.com/grafana/grafana/pkg/services/contexthandler/model"
"github.com/grafana/grafana/pkg/services/datasources"
apimodels "github.com/grafana/grafana/pkg/services/ngalert/api/tooling/definitions"
"github.com/grafana/grafana/pkg/web"
)
@ -23,9 +24,6 @@ const (
)
const (
PrometheusDatasourceType = "prometheus"
LokiDatasourceType = "loki"
mimirPrefix = "/config/v1/rules"
prometheusPrefix = "/rules"
lokiPrefix = "/api/prom/rules"
@ -33,11 +31,6 @@ const (
subtypeQuery = "subtype"
)
var dsTypeToRulerPrefix = map[string]string{
PrometheusDatasourceType: prometheusPrefix,
LokiDatasourceType: lokiPrefix,
}
var subtypeToPrefix = map[string]string{
Prometheus: prometheusPrefix,
Cortex: prometheusPrefix,
@ -237,13 +230,18 @@ func (r *LotexRuler) validateAndGetPrefix(ctx *contextmodel.ReqContext) (string,
return "", fmt.Errorf("URL for this data source is empty")
}
prefix, ok := dsTypeToRulerPrefix[ds.Type]
if !ok {
return "", fmt.Errorf("unexpected datasource type. expecting loki or prometheus")
var prefix string
switch {
case isPrometheusCompatible(ds.Type):
prefix = prometheusPrefix
case ds.Type == datasources.DS_LOKI:
prefix = lokiPrefix
default:
return "", unexpectedDatasourceTypeError(ds.Type, "loki, prometheus, amazon prometheus, azure prometheus")
}
// If the datasource is Loki, there's nothing else for us to do - it doesn't have subtypes.
if ds.Type == LokiDatasourceType {
if ds.Type == datasources.DS_LOKI {
return prefix, nil
}

@ -49,48 +49,74 @@ func TestLotexRuler_ValidateAndGetPrefix(t *testing.T) {
{
name: "with an unsupported datasource type",
namedParams: map[string]string{":DatasourceUID": "d164"},
datasourceCache: fakeCacheService{datasource: &datasources.DataSource{URL: "http://loki.com"}},
err: errors.New("unexpected datasource type. expecting loki or prometheus"),
datasourceCache: fakeCacheService{datasource: &datasources.DataSource{URL: "http://loki.com", Type: "unsupported-type"}},
err: errors.New("unexpected datasource type 'unsupported-type', expected loki, prometheus, amazon prometheus, azure prometheus"),
},
{
name: "with a Loki datasource",
namedParams: map[string]string{":DatasourceUID": "d164"},
datasourceCache: fakeCacheService{datasource: &datasources.DataSource{URL: "http://loki.com", Type: LokiDatasourceType}},
datasourceCache: fakeCacheService{datasource: &datasources.DataSource{URL: "http://loki.com", Type: datasources.DS_LOKI}},
expected: "/api/prom/rules",
},
{
name: "with a Prometheus datasource",
namedParams: map[string]string{":DatasourceUID": "d164"},
datasourceCache: fakeCacheService{datasource: &datasources.DataSource{URL: "http://loki.com", Type: PrometheusDatasourceType}},
datasourceCache: fakeCacheService{datasource: &datasources.DataSource{URL: "http://loki.com", Type: datasources.DS_PROMETHEUS}},
expected: "/rules",
},
{
name: "with an Amazon Prometheus datasource",
namedParams: map[string]string{":DatasourceUID": "d164"},
datasourceCache: fakeCacheService{datasource: &datasources.DataSource{URL: "http://amp.com", Type: datasources.DS_AMAZON_PROMETHEUS}},
expected: "/rules",
},
{
name: "with an Azure Prometheus datasource",
namedParams: map[string]string{":DatasourceUID": "d164"},
datasourceCache: fakeCacheService{datasource: &datasources.DataSource{URL: "http://azp.com", Type: datasources.DS_AZURE_PROMETHEUS}},
expected: "/rules",
},
{
name: "with a Prometheus datasource and subtype of Cortex",
namedParams: map[string]string{":DatasourceUID": "d164"},
urlParams: "?subtype=cortex",
datasourceCache: fakeCacheService{datasource: &datasources.DataSource{URL: "http://loki.com", Type: PrometheusDatasourceType}},
datasourceCache: fakeCacheService{datasource: &datasources.DataSource{URL: "http://loki.com", Type: datasources.DS_PROMETHEUS}},
expected: "/rules",
},
{
name: "with a Prometheus datasource and subtype of Mimir",
namedParams: map[string]string{":DatasourceUID": "d164"},
urlParams: "?subtype=mimir",
datasourceCache: fakeCacheService{datasource: &datasources.DataSource{URL: "http://loki.com", Type: PrometheusDatasourceType}},
datasourceCache: fakeCacheService{datasource: &datasources.DataSource{URL: "http://loki.com", Type: datasources.DS_PROMETHEUS}},
expected: "/config/v1/rules",
},
{
name: "with a Prometheus datasource and subtype of Prometheus",
namedParams: map[string]string{":DatasourceUID": "d164"},
urlParams: "?subtype=prometheus",
datasourceCache: fakeCacheService{datasource: &datasources.DataSource{URL: "http://loki.com", Type: PrometheusDatasourceType}},
datasourceCache: fakeCacheService{datasource: &datasources.DataSource{URL: "http://loki.com", Type: datasources.DS_PROMETHEUS}},
expected: "/rules",
},
{
name: "with a Prometheus datasource and no subtype",
namedParams: map[string]string{":DatasourceUID": "d164"},
datasourceCache: fakeCacheService{datasource: &datasources.DataSource{URL: "http://loki.com", Type: PrometheusDatasourceType}},
datasourceCache: fakeCacheService{datasource: &datasources.DataSource{URL: "http://loki.com", Type: datasources.DS_PROMETHEUS}},
expected: "/rules",
},
{
name: "with an Amazon Prometheus datasource and subtype of Mimir",
namedParams: map[string]string{":DatasourceUID": "d164"},
urlParams: "?subtype=mimir",
datasourceCache: fakeCacheService{datasource: &datasources.DataSource{URL: "http://amp.com", Type: datasources.DS_AMAZON_PROMETHEUS}},
expected: "/config/v1/rules",
},
{
name: "with an Azure Prometheus datasource and subtype of Mimir",
namedParams: map[string]string{":DatasourceUID": "d164"},
urlParams: "?subtype=mimir",
datasourceCache: fakeCacheService{datasource: &datasources.DataSource{URL: "http://azp.com", Type: datasources.DS_AZURE_PROMETHEUS}},
expected: "/config/v1/rules",
},
}
for _, tt := range tc {
@ -149,7 +175,7 @@ func TestLotexRuler_RouteDeleteNamespaceRulesConfig(t *testing.T) {
expected: "http://mimir.com/config/v1/rules/namespace%2Fwith%2Fslashes?subtype=mimir",
urlParams: "?subtype=mimir",
namedParams: map[string]string{":DatasourceUID": "d164"},
datasource: &datasources.DataSource{URL: "http://mimir.com", Type: PrometheusDatasourceType},
datasource: &datasources.DataSource{URL: "http://mimir.com", Type: datasources.DS_PROMETHEUS},
},
{
name: "with a namespace that does not need to be escaped",
@ -157,7 +183,7 @@ func TestLotexRuler_RouteDeleteNamespaceRulesConfig(t *testing.T) {
expected: "http://mimir.com/config/v1/rules/namespace_without_slashes?subtype=mimir",
urlParams: "?subtype=mimir",
namedParams: map[string]string{":DatasourceUID": "d164"},
datasource: &datasources.DataSource{URL: "http://mimir.com", Type: PrometheusDatasourceType},
datasource: &datasources.DataSource{URL: "http://mimir.com", Type: datasources.DS_PROMETHEUS},
},
}
@ -210,7 +236,7 @@ func TestLotexRuler_RouteDeleteRuleGroupConfig(t *testing.T) {
expected: "http://mimir.com/config/v1/rules/namespace%2Fwith%2Fslashes/group%2Fwith%2Fslashes?subtype=mimir",
urlParams: "?subtype=mimir",
namedParams: map[string]string{":DatasourceUID": "d164"},
datasource: &datasources.DataSource{URL: "http://mimir.com", Type: PrometheusDatasourceType},
datasource: &datasources.DataSource{URL: "http://mimir.com", Type: datasources.DS_PROMETHEUS},
},
{
name: "with a namespace that does not need to be escaped",
@ -219,7 +245,7 @@ func TestLotexRuler_RouteDeleteRuleGroupConfig(t *testing.T) {
expected: "http://mimir.com/config/v1/rules/namespace_without_slashes/group_without_slashes?subtype=mimir",
urlParams: "?subtype=mimir",
namedParams: map[string]string{":DatasourceUID": "d164"},
datasource: &datasources.DataSource{URL: "http://mimir.com", Type: PrometheusDatasourceType},
datasource: &datasources.DataSource{URL: "http://mimir.com", Type: datasources.DS_PROMETHEUS},
},
}
@ -271,7 +297,7 @@ func TestLotexRuler_RouteGetNamespaceRulesConfig(t *testing.T) {
expected: "http://mimir.com/config/v1/rules/namespace%2Fwith%2Fslashes?subtype=mimir",
urlParams: "?subtype=mimir",
namedParams: map[string]string{":DatasourceUID": "d164"},
datasource: &datasources.DataSource{URL: "http://mimir.com", Type: PrometheusDatasourceType},
datasource: &datasources.DataSource{URL: "http://mimir.com", Type: datasources.DS_PROMETHEUS},
},
{
name: "with a namespace that does not need to be escaped",
@ -279,7 +305,7 @@ func TestLotexRuler_RouteGetNamespaceRulesConfig(t *testing.T) {
expected: "http://mimir.com/config/v1/rules/namespace_without_slashes?subtype=mimir",
urlParams: "?subtype=mimir",
namedParams: map[string]string{":DatasourceUID": "d164"},
datasource: &datasources.DataSource{URL: "http://mimir.com", Type: PrometheusDatasourceType},
datasource: &datasources.DataSource{URL: "http://mimir.com", Type: datasources.DS_PROMETHEUS},
},
}
@ -332,7 +358,7 @@ func TestLotexRuler_RouteGetRulegGroupConfig(t *testing.T) {
expected: "http://mimir.com/config/v1/rules/namespace%2Fwith%2Fslashes/group%2Fwith%2Fslashes?subtype=mimir",
urlParams: "?subtype=mimir",
namedParams: map[string]string{":DatasourceUID": "d164"},
datasource: &datasources.DataSource{URL: "http://mimir.com", Type: PrometheusDatasourceType},
datasource: &datasources.DataSource{URL: "http://mimir.com", Type: datasources.DS_PROMETHEUS},
},
{
name: "with a namespace that does not need to be escaped",
@ -341,7 +367,7 @@ func TestLotexRuler_RouteGetRulegGroupConfig(t *testing.T) {
expected: "http://mimir.com/config/v1/rules/namespace_without_slashes/group_without_slashes?subtype=mimir",
urlParams: "?subtype=mimir",
namedParams: map[string]string{":DatasourceUID": "d164"},
datasource: &datasources.DataSource{URL: "http://mimir.com", Type: PrometheusDatasourceType},
datasource: &datasources.DataSource{URL: "http://mimir.com", Type: datasources.DS_PROMETHEUS},
},
}

@ -31,7 +31,28 @@ const (
groupQueryTag = "QUERY_GROUP"
)
var searchRegex = regexp.MustCompile(`\{(\w+)\}`)
var (
searchRegex = regexp.MustCompile(`\{(\w+)\}`)
prometheusCompatibleDsTypes = []string{
datasources.DS_PROMETHEUS,
datasources.DS_AMAZON_PROMETHEUS,
datasources.DS_AZURE_PROMETHEUS,
}
)
func isPrometheusCompatible(dsType string) bool {
for _, t := range prometheusCompatibleDsTypes {
if dsType == t {
return true
}
}
return false
}
func isLotexRulerCompatible(dsType string) bool {
return dsType == datasources.DS_LOKI || isPrometheusCompatible(dsType)
}
func toMacaronPath(path string) string {
return string(searchRegex.ReplaceAllFunc([]byte(path), func(s []byte) []byte {
@ -52,8 +73,8 @@ func getDatasourceByUID(ctx *contextmodel.ReqContext, cache datasources.CacheSer
return nil, unexpectedDatasourceTypeError(ds.Type, "alertmanager")
}
case apimodels.LoTexRulerBackend:
if ds.Type != "loki" && ds.Type != "prometheus" {
return nil, unexpectedDatasourceTypeError(ds.Type, "loki, prometheus")
if !isLotexRulerCompatible(ds.Type) {
return nil, unexpectedDatasourceTypeError(ds.Type, "loki, prometheus, amazon prometheus, azure prometheus")
}
default:
return nil, unexpectedDatasourceTypeError(ds.Type, expectedType.String())

@ -13,6 +13,7 @@ import (
accesscontrolmock "github.com/grafana/grafana/pkg/services/accesscontrol/mock"
"github.com/grafana/grafana/pkg/services/auth"
contextmodel "github.com/grafana/grafana/pkg/services/contexthandler/model"
"github.com/grafana/grafana/pkg/services/datasources"
"github.com/grafana/grafana/pkg/services/ngalert/eval"
models2 "github.com/grafana/grafana/pkg/services/ngalert/models"
"github.com/grafana/grafana/pkg/services/org"
@ -176,3 +177,85 @@ func (r *recordingConditionValidator) Validate(_ eval.EvaluationContext, conditi
}
var _ ConditionValidator = &recordingConditionValidator{}
func TestIsPrometheusCompatible(t *testing.T) {
testCases := []struct {
name string
dsType string
expected bool
}{
{
name: "prometheus datasource should be compatible",
dsType: datasources.DS_PROMETHEUS,
expected: true,
},
{
name: "amazon prometheus datasource should be compatible",
dsType: datasources.DS_AMAZON_PROMETHEUS,
expected: true,
},
{
name: "azure prometheus datasource should be compatible",
dsType: datasources.DS_AZURE_PROMETHEUS,
expected: true,
},
{
name: "loki datasource should not be prometheus compatible",
dsType: datasources.DS_LOKI,
expected: false,
},
{
name: "other datasource types should not be compatible",
dsType: "some-other-datasource",
expected: false,
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
result := isPrometheusCompatible(tc.dsType)
assert.Equal(t, tc.expected, result)
})
}
}
func TestIsLotexRulerCompatible(t *testing.T) {
testCases := []struct {
name string
dsType string
expected bool
}{
{
name: "prometheus datasource should be compatible",
dsType: datasources.DS_PROMETHEUS,
expected: true,
},
{
name: "amazon prometheus datasource should be compatible",
dsType: datasources.DS_AMAZON_PROMETHEUS,
expected: true,
},
{
name: "azure prometheus datasource should be compatible",
dsType: datasources.DS_AZURE_PROMETHEUS,
expected: true,
},
{
name: "loki datasource should be compatible",
dsType: datasources.DS_LOKI,
expected: true,
},
{
name: "other datasource types should not be compatible",
dsType: "some-other-datasource",
expected: false,
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
result := isLotexRulerCompatible(tc.dsType)
assert.Equal(t, tc.expected, result)
})
}
}

@ -9,7 +9,14 @@ import {
} from 'app/types/unified-alerting-dto';
import { RULER_NOT_SUPPORTED_MSG } from '../utils/constants';
import { GRAFANA_RULES_SOURCE_NAME, getDataSourceByName, getRulesDataSourceByUID } from '../utils/datasource';
import {
GRAFANA_RULES_SOURCE_NAME,
SUPPORTED_EXTERNAL_RULE_SOURCE_TYPES,
SupportedExternalRulesSourceType,
getDataSourceByName,
getRulesDataSourceByUID,
isSupportedExternalRulesSourceType,
} from '../utils/datasource';
import { fetchRules } from './prometheus';
import { fetchTestRulerRulesGroup } from './ruler';
@ -34,8 +41,10 @@ export async function discoverFeaturesByUid(dataSourceUid: string): Promise<Prom
throw new Error(`The data source url cannot be empty.`);
}
if (type !== 'prometheus' && type !== 'loki') {
throw new Error(`The build info request is not available for ${type}. Only 'prometheus' and 'loki' are supported`);
if (!isSupportedExternalRulesSourceType(type)) {
throw new Error(
`The build info request is not available for ${type}. Supported values are ${SUPPORTED_EXTERNAL_RULE_SOURCE_TYPES.join()}.`
);
}
return discoverDataSourceFeatures({ name, url, type });
@ -52,7 +61,7 @@ export async function discoverFeaturesByUid(dataSourceUid: string): Promise<Prom
export async function discoverDataSourceFeatures(dsSettings: {
url: string;
name: string;
type: 'prometheus' | 'loki';
type: SupportedExternalRulesSourceType;
}): Promise<PromApiFeatures> {
const { url, name, type } = dsSettings;

@ -9,6 +9,8 @@ import { getDataSourceSrv } from '@grafana/runtime';
import { Alert, Button, useStyles2 } from '@grafana/ui';
import { LokiQuery } from 'app/plugins/datasource/loki/types';
import { isSupportedExternalRulesSourceType } from '../../utils/datasource';
import { CloudAlertPreview } from './CloudAlertPreview';
import { usePreview } from './PreviewRule';
@ -122,16 +124,17 @@ type QueryMappers<T extends DataQuery = DataQuery> = {
export function useQueryMappers(dataSourceName: string): QueryMappers {
return useMemo(() => {
const settings = getDataSourceSrv().getInstanceSettings(dataSourceName);
if (!settings) {
throw new Error(`Datasource ${dataSourceName} not found`);
}
switch (settings?.type) {
case 'loki':
case 'prometheus':
return {
mapToValue: (query: DataQuery) => (query as PromQuery | LokiQuery).expr,
mapToQuery: (existing: DataQuery, value: string | undefined) => ({ ...existing, expr: value }),
};
default:
throw new Error(`${dataSourceName} is not supported as an expression editor`);
if (!isSupportedExternalRulesSourceType(settings.type)) {
throw new Error(`${settings.type} is not supported as an expression editor`);
}
return {
mapToValue: (query: DataQuery) => (query as PromQuery | LokiQuery).expr,
mapToQuery: (existing: DataQuery, value: string | undefined) => ({ ...existing, expr: value }),
};
}, [dataSourceName]);
}

@ -4,7 +4,7 @@ import * as React from 'react';
import { DataSourceInstanceSettings } from '@grafana/data';
import { AlertDataQuery } from 'app/types/unified-alerting-dto';
import { DataSourceType } from '../../../../utils/datasource';
import { DataSourceType, isSupportedExternalPrometheusFlavoredRulesSourceType } from '../../../../utils/datasource';
import { isPromOrLokiQuery } from '../../../../utils/rule-form';
import { SQLQueryPreview, isSQLLikeQuery } from './SQLQueryPreview';
@ -18,7 +18,7 @@ interface DatasourceModelPreviewProps {
}
function DatasourceModelPreview({ model, dataSource: datasource }: DatasourceModelPreviewProps): React.ReactNode {
if (datasource.type === DataSourceType.Prometheus && isPromOrLokiQuery(model)) {
if (isSupportedExternalPrometheusFlavoredRulesSourceType(datasource.type) && isPromOrLokiQuery(model)) {
return <PrometheusQueryPreview query={model.expr} />;
}

@ -10,7 +10,10 @@ import { MANUAL_ROUTING_KEY, getDefaultQueries } from '../utils/rule-form';
import { formValuesFromQueryParams, getDefaultFormValues, getDefautManualRouting } from './formDefaults';
import { isAlertQueryOfAlertData } from './formProcessing';
jest.mock('../utils/datasource');
jest.mock('../utils/datasource', () => ({
...jest.requireActual('../utils/datasource'),
getDefaultOrFirstCompatibleDataSource: jest.fn(),
}));
const mocks = {
getDefaultOrFirstCompatibleDataSource: jest.mocked(getDefaultOrFirstCompatibleDataSource),

@ -8,7 +8,7 @@ import { AlertDataQuery, AlertQuery } from 'app/types/unified-alerting-dto';
import { SimpleConditionIdentifier } from '../components/rule-editor/query-and-alert-condition/SimpleCondition';
import { KVObject, RuleFormValues } from '../types/rule-form';
import { defaultAnnotations } from '../utils/constants';
import { DataSourceType } from '../utils/datasource';
import { isSupportedExternalRulesSourceType } from '../utils/datasource';
export function setQueryEditorSettings(values: RuleFormValues): RuleFormValues {
const isQuerySwitchModeEnabled = config.featureToggles.alertingQueryAndExpressionsStepMode ?? false;
@ -47,9 +47,9 @@ export function setInstantOrRange(values: RuleFormValues): RuleFormValues {
return query;
}
// data query
const defaultToInstant =
query.model.datasource?.type === DataSourceType.Loki ||
query.model.datasource?.type === DataSourceType.Prometheus;
const defaultToInstant = query.model.datasource?.type
? isSupportedExternalRulesSourceType(query.model.datasource.type)
: false;
const isInstant =
'instant' in query.model && query.model.instant !== undefined ? query.model.instant : defaultToInstant;
return {

@ -35,10 +35,15 @@ export const GrafanaRulesSource: GrafanaRulesSourceIdentifier = {
ruleSourceType: 'grafana',
};
/**
* @deprecated use "SupportedRulesSourceType" and related types instead
*/
export enum DataSourceType {
Alertmanager = 'alertmanager',
Loki = 'loki',
Prometheus = 'prometheus',
AmazonPrometheus = 'grafana-amazonprometheus-datasource',
AzurePrometheus = 'grafana-azureprometheus-datasource',
}
export interface AlertManagerDataSource {
@ -49,8 +54,6 @@ export interface AlertManagerDataSource {
handleGrafanaManagedAlerts?: boolean;
}
export const RulesDataSourceTypes: string[] = [DataSourceType.Loki, DataSourceType.Prometheus];
export function getRulesDataSources() {
const hasReadPermission = contextSrv.hasPermission(AccessControlAction.AlertingRuleExternalRead);
const hasWritePermission = contextSrv.hasPermission(AccessControlAction.AlertingRuleExternalWrite);
@ -59,7 +62,7 @@ export function getRulesDataSources() {
}
return getAllDataSources()
.filter((ds) => RulesDataSourceTypes.includes(ds.type))
.filter((ds) => isSupportedExternalRulesSourceType(ds.type))
.filter((ds) => isDataSourceManagingAlerts(ds))
.sort((a, b) => a.name.localeCompare(b.name));
}
@ -200,17 +203,6 @@ export function getAlertManagerDataSourcesByPermission(permission: 'instance' |
return { availableInternalDataSources, availableExternalDataSources };
}
export function getLotexDataSourceByName(dataSourceName: string): DataSourceInstanceSettings {
const dataSource = getDataSourceByName(dataSourceName);
if (!dataSource) {
throw new Error(`Data source ${dataSourceName} not found`);
}
if (dataSource.type !== DataSourceType.Loki && dataSource.type !== DataSourceType.Prometheus) {
throw new Error(`Unexpected data source type ${dataSource.type}`);
}
return dataSource;
}
export function getAllRulesSourceNames(): string[] {
const availableRulesSources: string[] = getRulesDataSources().map((r) => r.name);
@ -341,3 +333,43 @@ export function getDefaultOrFirstCompatibleDataSource(): DataSourceInstanceSetti
export function isDataSourceManagingAlerts(ds: DataSourceInstanceSettings<DataSourceJsonData>) {
return ds.jsonData.manageAlerts !== false; //if this prop is undefined it defaults to true
}
/**
* Check if the given type is a supported external Prometheus flavored rules source type.
*/
export function isSupportedExternalPrometheusFlavoredRulesSourceType(
type: string
): type is SupportedExternalPrometheusFlavoredRulesSourceType {
return SUPPORTED_EXTERNAL_PROMETHEUS_FLAVORED_RULE_SOURCE_TYPES.find((t) => t === type) !== undefined;
}
export const SUPPORTED_EXTERNAL_PROMETHEUS_FLAVORED_RULE_SOURCE_TYPES = [
'prometheus',
'grafana-amazonprometheus-datasource',
'grafana-azureprometheus-datasource',
] as const;
export type SupportedExternalPrometheusFlavoredRulesSourceType =
(typeof SUPPORTED_EXTERNAL_PROMETHEUS_FLAVORED_RULE_SOURCE_TYPES)[number]; // infer the type from the tuple above so we can maintain a single source of truth
/**
* Check if the given type is a supported external rules source type. Includes Loki and Prometheus flavored types.
*/
export function isSupportedExternalRulesSourceType(type: string): type is SupportedExternalRulesSourceType {
return SUPPORTED_EXTERNAL_RULE_SOURCE_TYPES.find((t) => t === type) !== undefined;
}
export type SupportedExternalRulesSourceType = 'loki' | SupportedExternalPrometheusFlavoredRulesSourceType;
export const SUPPORTED_EXTERNAL_RULE_SOURCE_TYPES = [
'loki',
...SUPPORTED_EXTERNAL_PROMETHEUS_FLAVORED_RULE_SOURCE_TYPES,
] as const;
/**
* Check if the given type is a supported rules source type. Includes "grafana" for Grafana Managed Rules.
*/
export function isSupportedRulesSourceType(type: string): type is SupportedRulesSourceType {
return type === GRAFANA_RULES_SOURCE_NAME || isSupportedExternalRulesSourceType(type);
}
export type SupportedRulesSourceType = 'grafana' | SupportedExternalRulesSourceType;
export const SUPPORTED_RULE_SOURCE_TYPES = [
GRAFANA_RULES_SOURCE_NAME,
...SUPPORTED_EXTERNAL_RULE_SOURCE_TYPES,
] as const satisfies string[];

@ -7,7 +7,7 @@ import { LokiQuery } from 'app/plugins/datasource/loki/types';
import { CombinedRule } from 'app/types/unified-alerting';
import { AlertQuery } from 'app/types/unified-alerting-dto';
import { isCloudRulesSource } from './datasource';
import { isCloudRulesSource, isSupportedExternalRulesSourceType } from './datasource';
import { rulerRuleType } from './rules';
import { safeParsePrometheusDuration } from './time';
@ -77,28 +77,14 @@ export function dataQueryToAlertQuery(dataQuery: DataQuery, dataSourceUid: strin
}
function cloudAlertRuleToModel(dsSettings: DataSourceInstanceSettings, rule: CombinedRule): DataQuery {
const refId = 'A';
switch (dsSettings.type) {
case 'prometheus': {
const query: PromQuery = {
refId,
expr: rule.query,
};
return query;
}
case 'loki': {
const query: LokiQuery = {
refId,
expr: rule.query,
};
if (!isSupportedExternalRulesSourceType(dsSettings.type)) {
throw new Error(`Query for datasource type ${dsSettings.type} is currently not supported by cloud alert rules.`);
}
return query;
}
const query: LokiQuery | PromQuery = {
refId: 'A',
expr: rule.query,
};
default:
throw new Error(`Query for datasource type ${dsSettings.type} is currently not supported by cloud alert rules.`);
}
return query;
}

@ -52,10 +52,11 @@ import {
import { Annotation } from './constants';
import {
DataSourceType,
GRAFANA_RULES_SOURCE_NAME,
getDefaultOrFirstCompatibleDataSource,
isGrafanaRulesSource,
isSupportedExternalPrometheusFlavoredRulesSourceType,
isSupportedExternalRulesSourceType,
} from './datasource';
import { arrayToRecord, recordToArray } from './misc';
import { isGrafanaAlertingRuleByType, isGrafanaRecordingRuleByType, rulerRuleType } from './rules';
@ -416,7 +417,7 @@ export const getDefaultQueries = (isRecordingRule = false): AlertQuery[] => {
const relativeTimeRange = getDefaultRelativeTimeRange();
const expressions = isRecordingRule ? getDefaultExpressionsForRecording('B') : getDefaultExpressions('B', 'C');
const isLokiOrPrometheus = dataSource?.type === DataSourceType.Prometheus || dataSource?.type === DataSourceType.Loki;
const isLokiOrPrometheus = dataSource ? isSupportedExternalRulesSourceType(dataSource.type) : false;
return [
{
refId: 'A',
@ -803,22 +804,19 @@ export function isPromOrLokiQuery(model: AlertDataQuery): model is PromOrLokiQue
}
export function getInstantFromDataQuery(model: AlertDataQuery, type: string): boolean | undefined {
// if the datasource is not prometheus or loki, instant is defined in the model or defaults to undefined
if (type !== DataSourceType.Prometheus && type !== DataSourceType.Loki) {
if ('instant' in model) {
return model.instant;
} else {
if ('queryType' in model) {
return model.queryType === 'instant';
} else {
return undefined;
}
}
if (!type) {
return undefined;
}
// if the datasource is not a supported prometheus flavor or loki, return "undefined"
if (!isSupportedExternalRulesSourceType(type)) {
return undefined;
}
// if the datasource is prometheus or loki, instant is defined in the model, or defaults to true
const isInstantForPrometheus = 'instant' in model && model.instant !== undefined ? model.instant : true;
const isInstantForLoki = 'queryType' in model && model.queryType !== undefined ? model.queryType === 'instant' : true;
const isInstant = type === DataSourceType.Prometheus ? isInstantForPrometheus : isInstantForLoki;
const isPrometheusFlavoredDataSourceType = isSupportedExternalPrometheusFlavoredRulesSourceType(type);
const isInstant = isPrometheusFlavoredDataSourceType ? isInstantForPrometheus : isInstantForLoki;
return isInstant;
}

@ -1189,7 +1189,14 @@ export class DashboardModel implements TimeModel {
toggleExemplarsForAll() {
for (const panel of this.panels) {
for (const target of panel.targets) {
if (!(target.datasource && target.datasource.type === 'prometheus')) {
if (
!(
target.datasource &&
(target.datasource.type === 'prometheus' ||
target.datasource.type === 'grafana-amazonprometheus-datasource' ||
target.datasource.type === 'grafana-azureprometheus-datasource')
)
) {
continue;
}

@ -9,9 +9,14 @@ type Props = {
};
export function DrilldownAlertBox(props: Props) {
const isDsCompatibleWithDrilldown = ['prometheus', 'loki', 'tempo', 'grafana-pyroscope-datasource'].includes(
props.datasourceType
);
const isDsCompatibleWithDrilldown = [
'prometheus',
'grafana-amazonprometheus-datasource',
'grafana-azureprometheus-datasource',
'loki',
'tempo',
'grafana-pyroscope-datasource',
].includes(props.datasourceType);
const [dismissed, setDismissed] = useLocalStorage('grafana.explore.drilldownsBoxDismissed', false);

@ -146,6 +146,24 @@ describe('datasource_srv', () => {
name: 'TestData',
meta: { metrics: true, id: 'grafana-testdata-datasource', aliasIDs: ['testdata'] },
},
Prometheus: {
type: 'prometheus',
name: 'Prometheus',
uid: 'uid-code-prometheus',
meta: { metrics: true, id: 'prometheus' },
},
AmazonPrometheus: {
type: 'grafana-amazonprometheus-datasource',
name: 'Amazon Prometheus',
uid: 'uid-code-amp',
meta: { metrics: true, id: 'grafana-amazonprometheus-datasource' },
},
AzurePrometheus: {
type: 'grafana-azureprometheus-datasource',
name: 'Azure Prometheus',
uid: 'uid-code-azp',
meta: { metrics: true, id: 'grafana-azureprometheus-datasource' },
},
};
describe('Given a list of data sources', () => {
@ -308,7 +326,7 @@ describe('datasource_srv', () => {
describe('when getting external metric sources', () => {
it('should return list of explore sources', () => {
const externalSources = dataSourceSrv.getExternal();
expect(externalSources.length).toBe(8);
expect(externalSources.length).toBe(11);
});
});
@ -348,6 +366,28 @@ describe('datasource_srv', () => {
expect(list[0].name).toBe('Jaeger');
});
it('should include Prometheus flavor data sources when pluginId is prometheus', () => {
const list = dataSourceSrv.getList({ pluginId: 'prometheus' });
expect(list.length).toBe(3);
expect(list[0].name).toBe('Amazon Prometheus');
expect(list[0].type).toBe('grafana-amazonprometheus-datasource');
expect(list[1].name).toBe('Azure Prometheus');
expect(list[1].type).toBe('grafana-azureprometheus-datasource');
expect(list[2].name).toBe('Prometheus');
expect(list[2].type).toBe('prometheus');
});
it('should include compatible Prometheus data sources when pluginId is a flavor of prometheus', () => {
const list = dataSourceSrv.getList({ pluginId: 'grafana-amazonprometheus-datasource' });
expect(list.length).toBe(3);
expect(list[0].name).toBe('Amazon Prometheus');
expect(list[0].type).toBe('grafana-amazonprometheus-datasource');
expect(list[1].name).toBe('Azure Prometheus');
expect(list[1].type).toBe('grafana-azureprometheus-datasource');
expect(list[2].name).toBe('Prometheus');
expect(list[2].type).toBe('prometheus');
});
it('should not include runtime datasources in list', () => {
const list = dataSourceSrv.getList({ pluginId: 'grafana-runtime-datasource' });
expect(list.length).toBe(0);
@ -371,6 +411,24 @@ describe('datasource_srv', () => {
"type": "test-db",
"uid": "uid-code-aaa",
},
{
"meta": {
"id": "grafana-amazonprometheus-datasource",
"metrics": true,
},
"name": "Amazon Prometheus",
"type": "grafana-amazonprometheus-datasource",
"uid": "uid-code-amp",
},
{
"meta": {
"id": "grafana-azureprometheus-datasource",
"metrics": true,
},
"name": "Azure Prometheus",
"type": "grafana-azureprometheus-datasource",
"uid": "uid-code-azp",
},
{
"isDefault": true,
"meta": {
@ -397,6 +455,15 @@ describe('datasource_srv', () => {
"type": "test-db",
"uid": "uid-code-mmm",
},
{
"meta": {
"id": "prometheus",
"metrics": true,
},
"name": "Prometheus",
"type": "prometheus",
"uid": "uid-code-prometheus",
},
{
"meta": {
"aliasIDs": [

@ -30,7 +30,11 @@ export async function addDataTrailPanelAction(dashboard: DashboardScene, panel:
return;
}
if (datasource.type !== 'prometheus') {
if (
datasource.type !== 'prometheus' &&
datasource.type !== 'grafana-amazonprometheus-datasource' &&
datasource.type !== 'grafana-azureprometheus-datasource'
) {
return;
}

@ -117,7 +117,9 @@ export function getDatasourceForNewTrail(): string | undefined {
return prevDataSource;
}
}
const promDatasources = getDatasourceSrv().getList({ type: 'prometheus' });
const promDatasources = getDatasourceSrv().getList({
type: ['prometheus', 'grafana-amazonprometheus-datasource', 'grafana-azureprometheus-datasource'],
});
if (promDatasources.length > 0) {
const defaultDatasource = promDatasources.find((mds) => mds.isDefault);

@ -4,7 +4,10 @@ import { OldFolderPicker } from 'app/core/components/Select/OldFolderPicker';
import { DataSourcePicker } from 'app/features/datasources/components/picker/DataSourcePicker';
import { PermissionLevelString } from 'app/types';
import { GRAFANA_DATASOURCE_NAME } from '../../../features/alerting/unified/utils/datasource';
import {
GRAFANA_DATASOURCE_NAME,
SUPPORTED_RULE_SOURCE_TYPES,
} from '../../../features/alerting/unified/utils/datasource';
import { GroupBy } from './GroupByWithLoading';
import { UnifiedAlertListPanel } from './UnifiedAlertList';
@ -112,7 +115,7 @@ const unifiedAlertList = new PanelPlugin<UnifiedAlertListOptions>(UnifiedAlertLi
<Stack gap={1}>
<DataSourcePicker
{...props}
type={['prometheus', 'loki', 'grafana']}
type={SUPPORTED_RULE_SOURCE_TYPES}
noDefault
current={props.value}
onChange={(ds: DataSourceInstanceSettings) => props.onChange(ds.name)}

Loading…
Cancel
Save