mirror of https://github.com/grafana/grafana
Prometheus: Implement Streaming JSON Parser (#48477)
use `prometheusStreamingJSONParser` feature toggle to enablepull/48978/head
parent
e528f2e430
commit
87e8521591
@ -0,0 +1,55 @@ |
|||||||
|
package client |
||||||
|
|
||||||
|
import ( |
||||||
|
"sort" |
||||||
|
"strings" |
||||||
|
|
||||||
|
lru "github.com/hashicorp/golang-lru" |
||||||
|
) |
||||||
|
|
||||||
|
type ProviderCache struct { |
||||||
|
provider promClientProvider |
||||||
|
cache *lru.Cache |
||||||
|
} |
||||||
|
|
||||||
|
type promClientProvider interface { |
||||||
|
GetClient(map[string]string) (*Client, error) |
||||||
|
} |
||||||
|
|
||||||
|
func NewProviderCache(p promClientProvider) (*ProviderCache, error) { |
||||||
|
cache, err := lru.New(500) |
||||||
|
if err != nil { |
||||||
|
return nil, err |
||||||
|
} |
||||||
|
|
||||||
|
return &ProviderCache{ |
||||||
|
provider: p, |
||||||
|
cache: cache, |
||||||
|
}, nil |
||||||
|
} |
||||||
|
|
||||||
|
func (c *ProviderCache) GetClient(headers map[string]string) (*Client, error) { |
||||||
|
key := c.key(headers) |
||||||
|
if client, ok := c.cache.Get(key); ok { |
||||||
|
return client.(*Client), nil |
||||||
|
} |
||||||
|
|
||||||
|
client, err := c.provider.GetClient(headers) |
||||||
|
if err != nil { |
||||||
|
return nil, err |
||||||
|
} |
||||||
|
|
||||||
|
c.cache.Add(key, client) |
||||||
|
return client, nil |
||||||
|
} |
||||||
|
|
||||||
|
func (c *ProviderCache) key(headers map[string]string) string { |
||||||
|
vals := make([]string, len(headers)) |
||||||
|
var i int |
||||||
|
for _, v := range headers { |
||||||
|
vals[i] = v |
||||||
|
i++ |
||||||
|
} |
||||||
|
sort.Strings(vals) |
||||||
|
return strings.Join(vals, "") |
||||||
|
} |
||||||
@ -0,0 +1,135 @@ |
|||||||
|
package client_test |
||||||
|
|
||||||
|
import ( |
||||||
|
"errors" |
||||||
|
"io/ioutil" |
||||||
|
"net/http" |
||||||
|
"sort" |
||||||
|
"strings" |
||||||
|
"testing" |
||||||
|
|
||||||
|
"github.com/grafana/grafana/pkg/tsdb/prometheus/client" |
||||||
|
|
||||||
|
"github.com/stretchr/testify/require" |
||||||
|
) |
||||||
|
|
||||||
|
func TestCache_GetClient(t *testing.T) { |
||||||
|
t.Run("it caches the client for a set of auth headers", func(t *testing.T) { |
||||||
|
tc := setupCacheContext() |
||||||
|
|
||||||
|
c, err := tc.providerCache.GetClient(headers) |
||||||
|
require.Nil(t, err) |
||||||
|
|
||||||
|
c2, err := tc.providerCache.GetClient(headers) |
||||||
|
require.Nil(t, err) |
||||||
|
|
||||||
|
require.Equal(t, c, c2) |
||||||
|
require.Equal(t, 1, tc.clientProvider.numCalls) |
||||||
|
}) |
||||||
|
|
||||||
|
t.Run("it returns different clients when the headers differ", func(t *testing.T) { |
||||||
|
tc := setupCacheContext() |
||||||
|
h1 := map[string]string{"Authorization": "token", "X-ID-Token": "id-token"} |
||||||
|
h2 := map[string]string{"Authorization": "token2", "X-ID-Token": "id-token"} |
||||||
|
|
||||||
|
c, err := tc.providerCache.GetClient(h1) |
||||||
|
require.Nil(t, err) |
||||||
|
|
||||||
|
c2, err := tc.providerCache.GetClient(h2) |
||||||
|
require.Nil(t, err) |
||||||
|
|
||||||
|
require.NotEqual(t, c, c2) |
||||||
|
require.Equal(t, 2, tc.clientProvider.numCalls) |
||||||
|
}) |
||||||
|
|
||||||
|
t.Run("it returns from the cache when headers are the same", func(t *testing.T) { |
||||||
|
tc := setupCacheContext() |
||||||
|
h1 := map[string]string{"Authorization": "token", "X-ID-Token": "id-token"} |
||||||
|
h2 := map[string]string{"Authorization": "token", "X-ID-Token": "id-token"} |
||||||
|
|
||||||
|
c, err := tc.providerCache.GetClient(h1) |
||||||
|
require.Nil(t, err) |
||||||
|
|
||||||
|
c2, err := tc.providerCache.GetClient(h2) |
||||||
|
require.Nil(t, err) |
||||||
|
|
||||||
|
require.Equal(t, c, c2) |
||||||
|
require.Equal(t, 1, tc.clientProvider.numCalls) |
||||||
|
}) |
||||||
|
|
||||||
|
t.Run("it doesn't cache anything when an error occurs", func(t *testing.T) { |
||||||
|
tc := setupCacheContext() |
||||||
|
tc.clientProvider.errors <- errors.New("something bad") |
||||||
|
|
||||||
|
_, err := tc.providerCache.GetClient(headers) |
||||||
|
require.EqualError(t, err, "something bad") |
||||||
|
|
||||||
|
c, err := tc.providerCache.GetClient(headers) |
||||||
|
require.Nil(t, err) |
||||||
|
|
||||||
|
require.NotNil(t, c) |
||||||
|
require.Equal(t, 2, tc.clientProvider.numCalls) |
||||||
|
}) |
||||||
|
} |
||||||
|
|
||||||
|
type cacheTestContext struct { |
||||||
|
providerCache *client.ProviderCache |
||||||
|
clientProvider *fakeClientProvider |
||||||
|
} |
||||||
|
|
||||||
|
func setupCacheContext() *cacheTestContext { |
||||||
|
fp := newFakePromClientProvider() |
||||||
|
p, err := client.NewProviderCache(fp) |
||||||
|
if err != nil { |
||||||
|
panic(err) |
||||||
|
} |
||||||
|
|
||||||
|
return &cacheTestContext{ |
||||||
|
providerCache: p, |
||||||
|
clientProvider: fp, |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
func newFakePromClientProvider() *fakeClientProvider { |
||||||
|
return &fakeClientProvider{ |
||||||
|
errors: make(chan error, 1), |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
type fakeClientProvider struct { |
||||||
|
headers map[string]string |
||||||
|
numCalls int |
||||||
|
errors chan error |
||||||
|
} |
||||||
|
|
||||||
|
func (p *fakeClientProvider) GetClient(h map[string]string) (*client.Client, error) { |
||||||
|
p.headers = h |
||||||
|
p.numCalls++ |
||||||
|
|
||||||
|
var err error |
||||||
|
select { |
||||||
|
case err = <-p.errors: |
||||||
|
default: |
||||||
|
} |
||||||
|
|
||||||
|
var config []string |
||||||
|
for _, v := range h { |
||||||
|
config = append(config, v) |
||||||
|
} |
||||||
|
sort.Strings(config) //because map
|
||||||
|
res := &http.Response{ |
||||||
|
StatusCode: 200, |
||||||
|
Header: http.Header{}, |
||||||
|
Body: ioutil.NopCloser(strings.NewReader(strings.Join(config, ","))), |
||||||
|
} |
||||||
|
c := &fakeClient{res: res} |
||||||
|
return client.NewClient(c, "GET", "http://localhost:9090/"), err |
||||||
|
} |
||||||
|
|
||||||
|
type fakeClient struct { |
||||||
|
res *http.Response |
||||||
|
} |
||||||
|
|
||||||
|
func (c *fakeClient) Do(req *http.Request) (*http.Response, error) { |
||||||
|
return c.res, nil |
||||||
|
} |
||||||
@ -0,0 +1,103 @@ |
|||||||
|
package client |
||||||
|
|
||||||
|
import ( |
||||||
|
"context" |
||||||
|
"io/ioutil" |
||||||
|
"net/http" |
||||||
|
"net/url" |
||||||
|
"path" |
||||||
|
"strconv" |
||||||
|
"strings" |
||||||
|
"time" |
||||||
|
|
||||||
|
"github.com/grafana/grafana/pkg/tsdb/prometheus/models" |
||||||
|
) |
||||||
|
|
||||||
|
type doer interface { |
||||||
|
Do(req *http.Request) (*http.Response, error) |
||||||
|
} |
||||||
|
|
||||||
|
type Client struct { |
||||||
|
doer doer |
||||||
|
method string |
||||||
|
baseUrl string |
||||||
|
} |
||||||
|
|
||||||
|
func NewClient(d doer, method, baseUrl string) *Client { |
||||||
|
return &Client{doer: d, method: method, baseUrl: baseUrl} |
||||||
|
} |
||||||
|
|
||||||
|
func (c *Client) QueryRange(ctx context.Context, q *models.Query) (*http.Response, error) { |
||||||
|
u, err := url.ParseRequestURI(c.baseUrl) |
||||||
|
if err != nil { |
||||||
|
return nil, err |
||||||
|
} |
||||||
|
|
||||||
|
u.Path = path.Join(u.Path, "api/v1/query_range") |
||||||
|
|
||||||
|
qs := u.Query() |
||||||
|
qs.Set("query", q.Expr) |
||||||
|
tr := q.TimeRange() |
||||||
|
qs.Set("start", formatTime(tr.Start)) |
||||||
|
qs.Set("end", formatTime(tr.End)) |
||||||
|
qs.Set("step", strconv.FormatFloat(tr.Step.Seconds(), 'f', -1, 64)) |
||||||
|
|
||||||
|
return c.fetch(ctx, u, qs) |
||||||
|
} |
||||||
|
|
||||||
|
func (c *Client) QueryInstant(ctx context.Context, q *models.Query) (*http.Response, error) { |
||||||
|
u, err := url.ParseRequestURI(c.baseUrl) |
||||||
|
if err != nil { |
||||||
|
return nil, err |
||||||
|
} |
||||||
|
|
||||||
|
u.Path = path.Join(u.Path, "api/v1/query") |
||||||
|
|
||||||
|
qs := u.Query() |
||||||
|
qs.Set("query", q.Expr) |
||||||
|
tr := q.TimeRange() |
||||||
|
if !tr.End.IsZero() { |
||||||
|
qs.Set("time", formatTime(tr.End)) |
||||||
|
} |
||||||
|
|
||||||
|
return c.fetch(ctx, u, qs) |
||||||
|
} |
||||||
|
|
||||||
|
func (c *Client) QueryExemplars(ctx context.Context, q *models.Query) (*http.Response, error) { |
||||||
|
u, err := url.ParseRequestURI(c.baseUrl) |
||||||
|
if err != nil { |
||||||
|
return nil, err |
||||||
|
} |
||||||
|
|
||||||
|
u.Path = path.Join(u.Path, "api/v1/query_exemplars") |
||||||
|
|
||||||
|
qs := u.Query() |
||||||
|
tr := q.TimeRange() |
||||||
|
qs.Set("query", q.Expr) |
||||||
|
qs.Set("start", formatTime(tr.Start)) |
||||||
|
qs.Set("end", formatTime(tr.End)) |
||||||
|
|
||||||
|
return c.fetch(ctx, u, qs) |
||||||
|
} |
||||||
|
|
||||||
|
func (c *Client) fetch(ctx context.Context, u *url.URL, qs url.Values) (*http.Response, error) { |
||||||
|
if strings.ToUpper(c.method) == http.MethodGet { |
||||||
|
u.RawQuery = qs.Encode() |
||||||
|
} |
||||||
|
|
||||||
|
r, err := http.NewRequestWithContext(ctx, c.method, u.String(), nil) |
||||||
|
if err != nil { |
||||||
|
return nil, err |
||||||
|
} |
||||||
|
|
||||||
|
if strings.ToUpper(c.method) == http.MethodPost { |
||||||
|
r.Body = ioutil.NopCloser(strings.NewReader(qs.Encode())) |
||||||
|
r.Header.Set("Content-Type", "application/x-www-form-urlencoded") |
||||||
|
} |
||||||
|
|
||||||
|
return c.doer.Do(r) |
||||||
|
} |
||||||
|
|
||||||
|
func formatTime(t time.Time) string { |
||||||
|
return strconv.FormatFloat(float64(t.Unix())+float64(t.Nanosecond())/1e9, 'f', -1, 64) |
||||||
|
} |
||||||
@ -0,0 +1,88 @@ |
|||||||
|
package client |
||||||
|
|
||||||
|
import ( |
||||||
|
"github.com/grafana/grafana-plugin-sdk-go/backend" |
||||||
|
"github.com/grafana/grafana/pkg/services/featuremgmt" |
||||||
|
"github.com/grafana/grafana/pkg/setting" |
||||||
|
"github.com/grafana/grafana/pkg/tsdb/prometheus/middleware" |
||||||
|
"github.com/grafana/grafana/pkg/util/maputil" |
||||||
|
|
||||||
|
sdkhttpclient "github.com/grafana/grafana-plugin-sdk-go/backend/httpclient" |
||||||
|
"github.com/grafana/grafana/pkg/infra/httpclient" |
||||||
|
"github.com/grafana/grafana/pkg/infra/log" |
||||||
|
) |
||||||
|
|
||||||
|
type Provider struct { |
||||||
|
settings backend.DataSourceInstanceSettings |
||||||
|
jsonData map[string]interface{} |
||||||
|
httpMethod string |
||||||
|
clientProvider httpclient.Provider |
||||||
|
cfg *setting.Cfg |
||||||
|
features featuremgmt.FeatureToggles |
||||||
|
log log.Logger |
||||||
|
} |
||||||
|
|
||||||
|
func NewProvider( |
||||||
|
settings backend.DataSourceInstanceSettings, |
||||||
|
jsonData map[string]interface{}, |
||||||
|
clientProvider httpclient.Provider, |
||||||
|
cfg *setting.Cfg, |
||||||
|
features featuremgmt.FeatureToggles, |
||||||
|
log log.Logger, |
||||||
|
) *Provider { |
||||||
|
httpMethod, _ := maputil.GetStringOptional(jsonData, "httpMethod") |
||||||
|
return &Provider{ |
||||||
|
settings: settings, |
||||||
|
jsonData: jsonData, |
||||||
|
httpMethod: httpMethod, |
||||||
|
clientProvider: clientProvider, |
||||||
|
cfg: cfg, |
||||||
|
features: features, |
||||||
|
log: log, |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
func (p *Provider) GetClient(headers map[string]string) (*Client, error) { |
||||||
|
opts, err := p.settings.HTTPClientOptions() |
||||||
|
if err != nil { |
||||||
|
return nil, err |
||||||
|
} |
||||||
|
|
||||||
|
opts.Middlewares = p.middlewares() |
||||||
|
opts.Headers = reqHeaders(headers) |
||||||
|
|
||||||
|
// Set SigV4 service namespace
|
||||||
|
if opts.SigV4 != nil { |
||||||
|
opts.SigV4.Service = "aps" |
||||||
|
} |
||||||
|
|
||||||
|
// Azure authentication
|
||||||
|
err = p.configureAzureAuthentication(&opts) |
||||||
|
if err != nil { |
||||||
|
return nil, err |
||||||
|
} |
||||||
|
|
||||||
|
httpClient, err := p.clientProvider.New(opts) |
||||||
|
if err != nil { |
||||||
|
return nil, err |
||||||
|
} |
||||||
|
|
||||||
|
return NewClient(httpClient, p.httpMethod, p.settings.URL), nil |
||||||
|
} |
||||||
|
|
||||||
|
func (p *Provider) middlewares() []sdkhttpclient.Middleware { |
||||||
|
middlewares := []sdkhttpclient.Middleware{ |
||||||
|
middleware.CustomQueryParameters(p.log), |
||||||
|
sdkhttpclient.CustomHeadersMiddleware(), |
||||||
|
} |
||||||
|
return middlewares |
||||||
|
} |
||||||
|
|
||||||
|
func reqHeaders(headers map[string]string) map[string]string { |
||||||
|
// copy to avoid changing the original map
|
||||||
|
h := make(map[string]string, len(headers)) |
||||||
|
for k, v := range headers { |
||||||
|
h[k] = v |
||||||
|
} |
||||||
|
return h |
||||||
|
} |
||||||
@ -0,0 +1,50 @@ |
|||||||
|
package client |
||||||
|
|
||||||
|
import ( |
||||||
|
"fmt" |
||||||
|
"net/url" |
||||||
|
"path" |
||||||
|
|
||||||
|
"github.com/grafana/grafana-azure-sdk-go/azcredentials" |
||||||
|
"github.com/grafana/grafana-azure-sdk-go/azhttpclient" |
||||||
|
sdkhttpclient "github.com/grafana/grafana-plugin-sdk-go/backend/httpclient" |
||||||
|
|
||||||
|
"github.com/grafana/grafana/pkg/services/featuremgmt" |
||||||
|
"github.com/grafana/grafana/pkg/util/maputil" |
||||||
|
) |
||||||
|
|
||||||
|
func (p *Provider) configureAzureAuthentication(opts *sdkhttpclient.Options) error { |
||||||
|
// Azure authentication is experimental (#35857)
|
||||||
|
if !p.features.IsEnabled(featuremgmt.FlagPrometheusAzureAuth) { |
||||||
|
return nil |
||||||
|
} |
||||||
|
|
||||||
|
credentials, err := azcredentials.FromDatasourceData(p.jsonData, p.settings.DecryptedSecureJSONData) |
||||||
|
if err != nil { |
||||||
|
err = fmt.Errorf("invalid Azure credentials: %s", err) |
||||||
|
return err |
||||||
|
} |
||||||
|
|
||||||
|
if credentials != nil { |
||||||
|
resourceIdStr, err := maputil.GetStringOptional(p.jsonData, "azureEndpointResourceId") |
||||||
|
if err != nil { |
||||||
|
return err |
||||||
|
} else if resourceIdStr == "" { |
||||||
|
err := fmt.Errorf("endpoint resource ID (audience) not provided") |
||||||
|
return err |
||||||
|
} |
||||||
|
|
||||||
|
resourceId, err := url.Parse(resourceIdStr) |
||||||
|
if err != nil || resourceId.Scheme == "" || resourceId.Host == "" { |
||||||
|
err := fmt.Errorf("endpoint resource ID (audience) '%s' invalid", resourceIdStr) |
||||||
|
return err |
||||||
|
} |
||||||
|
|
||||||
|
resourceId.Path = path.Join(resourceId.Path, ".default") |
||||||
|
scopes := []string{resourceId.String()} |
||||||
|
|
||||||
|
azhttpclient.AddAzureAuthentication(opts, p.cfg.Azure, credentials, scopes) |
||||||
|
} |
||||||
|
|
||||||
|
return nil |
||||||
|
} |
||||||
@ -0,0 +1,150 @@ |
|||||||
|
package client |
||||||
|
|
||||||
|
import ( |
||||||
|
"testing" |
||||||
|
|
||||||
|
"github.com/grafana/grafana-plugin-sdk-go/backend" |
||||||
|
sdkhttpclient "github.com/grafana/grafana-plugin-sdk-go/backend/httpclient" |
||||||
|
"github.com/grafana/grafana/pkg/services/featuremgmt" |
||||||
|
"github.com/grafana/grafana/pkg/setting" |
||||||
|
"github.com/stretchr/testify/assert" |
||||||
|
"github.com/stretchr/testify/require" |
||||||
|
) |
||||||
|
|
||||||
|
func TestConfigureAzureAuthentication(t *testing.T) { |
||||||
|
cfg := &setting.Cfg{} |
||||||
|
settings := backend.DataSourceInstanceSettings{} |
||||||
|
|
||||||
|
t.Run("given feature flag enabled", func(t *testing.T) { |
||||||
|
features := featuremgmt.WithFeatures(featuremgmt.FlagPrometheusAzureAuth) |
||||||
|
|
||||||
|
t.Run("should set Azure middleware when JsonData contains valid credentials", func(t *testing.T) { |
||||||
|
jsonData := map[string]interface{}{ |
||||||
|
"httpMethod": "POST", |
||||||
|
"azureCredentials": map[string]interface{}{ |
||||||
|
"authType": "msi", |
||||||
|
}, |
||||||
|
"azureEndpointResourceId": "https://api.example.com/abd5c4ce-ca73-41e9-9cb2-bed39aa2adb5", |
||||||
|
} |
||||||
|
|
||||||
|
var p = NewProvider(settings, jsonData, nil, cfg, features, nil) |
||||||
|
|
||||||
|
var opts = &sdkhttpclient.Options{CustomOptions: map[string]interface{}{}} |
||||||
|
|
||||||
|
err := p.configureAzureAuthentication(opts) |
||||||
|
require.NoError(t, err) |
||||||
|
|
||||||
|
require.NotNil(t, opts.Middlewares) |
||||||
|
assert.Len(t, opts.Middlewares, 1) |
||||||
|
}) |
||||||
|
|
||||||
|
t.Run("should not set Azure middleware when JsonData doesn't contain valid credentials", func(t *testing.T) { |
||||||
|
jsonData := map[string]interface{}{ |
||||||
|
"httpMethod": "POST", |
||||||
|
} |
||||||
|
|
||||||
|
var p = NewProvider(settings, jsonData, nil, cfg, features, nil) |
||||||
|
|
||||||
|
var opts = &sdkhttpclient.Options{CustomOptions: map[string]interface{}{}} |
||||||
|
|
||||||
|
err := p.configureAzureAuthentication(opts) |
||||||
|
require.NoError(t, err) |
||||||
|
|
||||||
|
assert.NotContains(t, opts.CustomOptions, "_azureCredentials") |
||||||
|
}) |
||||||
|
|
||||||
|
t.Run("should return error when JsonData contains invalid credentials", func(t *testing.T) { |
||||||
|
jsonData := map[string]interface{}{ |
||||||
|
"httpMethod": "POST", |
||||||
|
"azureCredentials": "invalid", |
||||||
|
} |
||||||
|
|
||||||
|
var p = NewProvider(settings, jsonData, nil, cfg, features, nil) |
||||||
|
|
||||||
|
var opts = &sdkhttpclient.Options{CustomOptions: map[string]interface{}{}} |
||||||
|
|
||||||
|
err := p.configureAzureAuthentication(opts) |
||||||
|
assert.Error(t, err) |
||||||
|
}) |
||||||
|
|
||||||
|
t.Run("should set Azure middleware when JsonData contains credentials and valid audience", func(t *testing.T) { |
||||||
|
jsonData := map[string]interface{}{ |
||||||
|
"httpMethod": "POST", |
||||||
|
"azureCredentials": map[string]interface{}{ |
||||||
|
"authType": "msi", |
||||||
|
}, |
||||||
|
"azureEndpointResourceId": "https://api.example.com/abd5c4ce-ca73-41e9-9cb2-bed39aa2adb5", |
||||||
|
} |
||||||
|
|
||||||
|
var p = NewProvider(settings, jsonData, nil, cfg, features, nil) |
||||||
|
|
||||||
|
var opts = &sdkhttpclient.Options{CustomOptions: map[string]interface{}{}} |
||||||
|
|
||||||
|
err := p.configureAzureAuthentication(opts) |
||||||
|
require.NoError(t, err) |
||||||
|
|
||||||
|
require.NotNil(t, opts.Middlewares) |
||||||
|
assert.Len(t, opts.Middlewares, 1) |
||||||
|
}) |
||||||
|
|
||||||
|
t.Run("should not set Azure middleware when JsonData doesn't contain credentials", func(t *testing.T) { |
||||||
|
jsonData := map[string]interface{}{ |
||||||
|
"httpMethod": "POST", |
||||||
|
"azureEndpointResourceId": "https://api.example.com/abd5c4ce-ca73-41e9-9cb2-bed39aa2adb5", |
||||||
|
} |
||||||
|
|
||||||
|
var p = NewProvider(settings, jsonData, nil, cfg, features, nil) |
||||||
|
|
||||||
|
var opts = &sdkhttpclient.Options{CustomOptions: map[string]interface{}{}} |
||||||
|
|
||||||
|
err := p.configureAzureAuthentication(opts) |
||||||
|
require.NoError(t, err) |
||||||
|
|
||||||
|
if opts.Middlewares != nil { |
||||||
|
assert.Len(t, opts.Middlewares, 0) |
||||||
|
} |
||||||
|
}) |
||||||
|
|
||||||
|
t.Run("should return error when JsonData contains invalid audience", func(t *testing.T) { |
||||||
|
jsonData := map[string]interface{}{ |
||||||
|
"httpMethod": "POST", |
||||||
|
"azureCredentials": map[string]interface{}{ |
||||||
|
"authType": "msi", |
||||||
|
}, |
||||||
|
"azureEndpointResourceId": "invalid", |
||||||
|
} |
||||||
|
|
||||||
|
var p = NewProvider(settings, jsonData, nil, cfg, features, nil) |
||||||
|
|
||||||
|
var opts = &sdkhttpclient.Options{CustomOptions: map[string]interface{}{}} |
||||||
|
|
||||||
|
err := p.configureAzureAuthentication(opts) |
||||||
|
assert.Error(t, err) |
||||||
|
}) |
||||||
|
}) |
||||||
|
|
||||||
|
t.Run("given feature flag not enabled", func(t *testing.T) { |
||||||
|
features := featuremgmt.WithFeatures() |
||||||
|
|
||||||
|
t.Run("should not set Azure Credentials even when JsonData contains credentials", func(t *testing.T) { |
||||||
|
jsonData := map[string]interface{}{ |
||||||
|
"httpMethod": "POST", |
||||||
|
"azureCredentials": map[string]interface{}{ |
||||||
|
"authType": "msi", |
||||||
|
}, |
||||||
|
"azureEndpointResourceId": "https://api.example.com/abd5c4ce-ca73-41e9-9cb2-bed39aa2adb5", |
||||||
|
} |
||||||
|
|
||||||
|
var p = NewProvider(settings, jsonData, nil, cfg, features, nil) |
||||||
|
|
||||||
|
var opts = &sdkhttpclient.Options{CustomOptions: map[string]interface{}{}} |
||||||
|
|
||||||
|
err := p.configureAzureAuthentication(opts) |
||||||
|
require.NoError(t, err) |
||||||
|
|
||||||
|
if opts.Middlewares != nil { |
||||||
|
assert.Len(t, opts.Middlewares, 0) |
||||||
|
} |
||||||
|
}) |
||||||
|
}) |
||||||
|
} |
||||||
@ -0,0 +1,125 @@ |
|||||||
|
package client_test |
||||||
|
|
||||||
|
import ( |
||||||
|
"encoding/json" |
||||||
|
"net/http" |
||||||
|
"testing" |
||||||
|
|
||||||
|
"github.com/grafana/grafana-plugin-sdk-go/backend" |
||||||
|
sdkhttpclient "github.com/grafana/grafana-plugin-sdk-go/backend/httpclient" |
||||||
|
"github.com/grafana/grafana/pkg/infra/httpclient" |
||||||
|
"github.com/grafana/grafana/pkg/services/featuremgmt" |
||||||
|
"github.com/grafana/grafana/pkg/setting" |
||||||
|
"github.com/grafana/grafana/pkg/tsdb/prometheus/client" |
||||||
|
|
||||||
|
"github.com/stretchr/testify/require" |
||||||
|
) |
||||||
|
|
||||||
|
var headers = map[string]string{"Authorization": "token", "X-ID-Token": "id-token"} |
||||||
|
|
||||||
|
func TestGetClient(t *testing.T) { |
||||||
|
t.Run("it sets the SigV4 service if it exists", func(t *testing.T) { |
||||||
|
tc := setup(`{"sigV4Auth":true}`) |
||||||
|
|
||||||
|
setting.SigV4AuthEnabled = true |
||||||
|
defer func() { setting.SigV4AuthEnabled = false }() |
||||||
|
|
||||||
|
_, err := tc.clientProvider.GetClient(headers) |
||||||
|
require.Nil(t, err) |
||||||
|
|
||||||
|
require.Equal(t, "aps", tc.httpProvider.opts.SigV4.Service) |
||||||
|
}) |
||||||
|
|
||||||
|
t.Run("it always uses the custom params and custom headers middlewares", func(t *testing.T) { |
||||||
|
tc := setup() |
||||||
|
|
||||||
|
_, err := tc.clientProvider.GetClient(headers) |
||||||
|
require.Nil(t, err) |
||||||
|
|
||||||
|
require.Len(t, tc.httpProvider.middlewares(), 2) |
||||||
|
require.Contains(t, tc.httpProvider.middlewares(), "prom-custom-query-parameters") |
||||||
|
require.Contains(t, tc.httpProvider.middlewares(), "CustomHeaders") |
||||||
|
}) |
||||||
|
|
||||||
|
t.Run("extra headers", func(t *testing.T) { |
||||||
|
t.Run("it sets the headers when 'oauthPassThru' is true and auth headers are passed", func(t *testing.T) { |
||||||
|
tc := setup(`{"oauthPassThru":true}`) |
||||||
|
_, err := tc.clientProvider.GetClient(headers) |
||||||
|
require.Nil(t, err) |
||||||
|
|
||||||
|
require.Equal(t, headers, tc.httpProvider.opts.Headers) |
||||||
|
}) |
||||||
|
|
||||||
|
t.Run("it sets all headers", func(t *testing.T) { |
||||||
|
withNonAuth := map[string]string{"X-Not-Auth": "stuff"} |
||||||
|
|
||||||
|
tc := setup(`{"oauthPassThru":true}`) |
||||||
|
_, err := tc.clientProvider.GetClient(withNonAuth) |
||||||
|
require.Nil(t, err) |
||||||
|
|
||||||
|
require.Equal(t, map[string]string{"X-Not-Auth": "stuff"}, tc.httpProvider.opts.Headers) |
||||||
|
}) |
||||||
|
|
||||||
|
t.Run("it does not error when headers are nil", func(t *testing.T) { |
||||||
|
tc := setup(`{"oauthPassThru":true}`) |
||||||
|
|
||||||
|
_, err := tc.clientProvider.GetClient(nil) |
||||||
|
require.Nil(t, err) |
||||||
|
}) |
||||||
|
}) |
||||||
|
} |
||||||
|
|
||||||
|
func setup(jsonData ...string) *testContext { |
||||||
|
var rawData []byte |
||||||
|
if len(jsonData) > 0 { |
||||||
|
rawData = []byte(jsonData[0]) |
||||||
|
} |
||||||
|
|
||||||
|
var jd map[string]interface{} |
||||||
|
_ = json.Unmarshal(rawData, &jd) |
||||||
|
|
||||||
|
cfg := &setting.Cfg{} |
||||||
|
settings := backend.DataSourceInstanceSettings{URL: "test-url", JSONData: rawData} |
||||||
|
features := featuremgmt.WithFeatures() |
||||||
|
hp := &fakeHttpClientProvider{} |
||||||
|
p := client.NewProvider(settings, jd, hp, cfg, features, nil) |
||||||
|
|
||||||
|
return &testContext{ |
||||||
|
httpProvider: hp, |
||||||
|
clientProvider: p, |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
type testContext struct { |
||||||
|
httpProvider *fakeHttpClientProvider |
||||||
|
clientProvider *client.Provider |
||||||
|
} |
||||||
|
|
||||||
|
type fakeHttpClientProvider struct { |
||||||
|
httpclient.Provider |
||||||
|
|
||||||
|
opts sdkhttpclient.Options |
||||||
|
} |
||||||
|
|
||||||
|
func (p *fakeHttpClientProvider) New(opts ...sdkhttpclient.Options) (*http.Client, error) { |
||||||
|
p.opts = opts[0] |
||||||
|
return sdkhttpclient.New(opts[0]) |
||||||
|
} |
||||||
|
|
||||||
|
func (p *fakeHttpClientProvider) GetTransport(opts ...sdkhttpclient.Options) (http.RoundTripper, error) { |
||||||
|
p.opts = opts[0] |
||||||
|
return http.DefaultTransport, nil |
||||||
|
} |
||||||
|
|
||||||
|
func (p *fakeHttpClientProvider) middlewares() []string { |
||||||
|
var middlewareNames []string |
||||||
|
for _, m := range p.opts.Middlewares { |
||||||
|
mw, ok := m.(sdkhttpclient.MiddlewareName) |
||||||
|
if !ok { |
||||||
|
panic("unexpected middleware type") |
||||||
|
} |
||||||
|
|
||||||
|
middlewareNames = append(middlewareNames, mw.MiddlewareName()) |
||||||
|
} |
||||||
|
return middlewareNames |
||||||
|
} |
||||||
@ -0,0 +1,230 @@ |
|||||||
|
package models |
||||||
|
|
||||||
|
import ( |
||||||
|
"encoding/json" |
||||||
|
"math" |
||||||
|
"strconv" |
||||||
|
"strings" |
||||||
|
"time" |
||||||
|
|
||||||
|
"github.com/grafana/grafana-plugin-sdk-go/backend" |
||||||
|
"github.com/grafana/grafana/pkg/tsdb/intervalv2" |
||||||
|
) |
||||||
|
|
||||||
|
//Internal interval and range variables
|
||||||
|
const ( |
||||||
|
varInterval = "$__interval" |
||||||
|
varIntervalMs = "$__interval_ms" |
||||||
|
varRange = "$__range" |
||||||
|
varRangeS = "$__range_s" |
||||||
|
varRangeMs = "$__range_ms" |
||||||
|
varRateInterval = "$__rate_interval" |
||||||
|
) |
||||||
|
|
||||||
|
//Internal interval and range variables with {} syntax
|
||||||
|
//Repetitive code, we should have functionality to unify these
|
||||||
|
const ( |
||||||
|
varIntervalAlt = "${__interval}" |
||||||
|
varIntervalMsAlt = "${__interval_ms}" |
||||||
|
varRangeAlt = "${__range}" |
||||||
|
varRangeSAlt = "${__range_s}" |
||||||
|
varRangeMsAlt = "${__range_ms}" |
||||||
|
varRateIntervalAlt = "${__rate_interval}" |
||||||
|
) |
||||||
|
|
||||||
|
type TimeSeriesQueryType string |
||||||
|
|
||||||
|
const ( |
||||||
|
RangeQueryType TimeSeriesQueryType = "range" |
||||||
|
InstantQueryType TimeSeriesQueryType = "instant" |
||||||
|
ExemplarQueryType TimeSeriesQueryType = "exemplar" |
||||||
|
UnknownQueryType TimeSeriesQueryType = "unknown" |
||||||
|
) |
||||||
|
|
||||||
|
var safeResolution = 11000 |
||||||
|
|
||||||
|
type QueryModel struct { |
||||||
|
Expr string `json:"expr"` |
||||||
|
LegendFormat string `json:"legendFormat"` |
||||||
|
Interval string `json:"interval"` |
||||||
|
IntervalMS int64 `json:"intervalMS"` |
||||||
|
StepMode string `json:"stepMode"` |
||||||
|
RangeQuery bool `json:"range"` |
||||||
|
InstantQuery bool `json:"instant"` |
||||||
|
ExemplarQuery bool `json:"exemplar"` |
||||||
|
IntervalFactor int64 `json:"intervalFactor"` |
||||||
|
UtcOffsetSec int64 `json:"utcOffsetSec"` |
||||||
|
} |
||||||
|
|
||||||
|
type TimeRange struct { |
||||||
|
Start time.Time |
||||||
|
End time.Time |
||||||
|
Step time.Duration |
||||||
|
} |
||||||
|
|
||||||
|
type Query struct { |
||||||
|
Expr string |
||||||
|
Step time.Duration |
||||||
|
LegendFormat string |
||||||
|
Start time.Time |
||||||
|
End time.Time |
||||||
|
RefId string |
||||||
|
InstantQuery bool |
||||||
|
RangeQuery bool |
||||||
|
ExemplarQuery bool |
||||||
|
UtcOffsetSec int64 |
||||||
|
} |
||||||
|
|
||||||
|
func Parse(query backend.DataQuery, timeInterval string, intervalCalculator intervalv2.Calculator, fromAlert bool) (*Query, error) { |
||||||
|
model := &QueryModel{} |
||||||
|
if err := json.Unmarshal(query.JSON, model); err != nil { |
||||||
|
return nil, err |
||||||
|
} |
||||||
|
|
||||||
|
//Final interval value
|
||||||
|
interval, err := calculatePrometheusInterval(model, timeInterval, query, intervalCalculator) |
||||||
|
if err != nil { |
||||||
|
return nil, err |
||||||
|
} |
||||||
|
|
||||||
|
// Interpolate variables in expr
|
||||||
|
timeRange := query.TimeRange.To.Sub(query.TimeRange.From) |
||||||
|
expr := interpolateVariables(model, interval, timeRange, intervalCalculator, timeInterval) |
||||||
|
rangeQuery := model.RangeQuery |
||||||
|
if !model.InstantQuery && !model.RangeQuery { |
||||||
|
// In older dashboards, we were not setting range query param and !range && !instant was run as range query
|
||||||
|
rangeQuery = true |
||||||
|
} |
||||||
|
|
||||||
|
// We never want to run exemplar query for alerting
|
||||||
|
exemplarQuery := model.ExemplarQuery |
||||||
|
if fromAlert { |
||||||
|
exemplarQuery = false |
||||||
|
} |
||||||
|
|
||||||
|
return &Query{ |
||||||
|
Expr: expr, |
||||||
|
Step: interval, |
||||||
|
LegendFormat: model.LegendFormat, |
||||||
|
Start: query.TimeRange.From, |
||||||
|
End: query.TimeRange.To, |
||||||
|
RefId: query.RefID, |
||||||
|
InstantQuery: model.InstantQuery, |
||||||
|
RangeQuery: rangeQuery, |
||||||
|
ExemplarQuery: exemplarQuery, |
||||||
|
UtcOffsetSec: model.UtcOffsetSec, |
||||||
|
}, nil |
||||||
|
} |
||||||
|
|
||||||
|
func (query *Query) Type() TimeSeriesQueryType { |
||||||
|
if query.InstantQuery { |
||||||
|
return InstantQueryType |
||||||
|
} |
||||||
|
if query.RangeQuery { |
||||||
|
return RangeQueryType |
||||||
|
} |
||||||
|
if query.ExemplarQuery { |
||||||
|
return ExemplarQueryType |
||||||
|
} |
||||||
|
return UnknownQueryType |
||||||
|
} |
||||||
|
|
||||||
|
func (query *Query) TimeRange() TimeRange { |
||||||
|
return TimeRange{ |
||||||
|
Step: query.Step, |
||||||
|
// Align query range to step. It rounds start and end down to a multiple of step.
|
||||||
|
Start: alignTimeRange(query.Start, query.Step, query.UtcOffsetSec), |
||||||
|
End: alignTimeRange(query.End, query.Step, query.UtcOffsetSec), |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
func calculatePrometheusInterval(model *QueryModel, timeInterval string, query backend.DataQuery, intervalCalculator intervalv2.Calculator) (time.Duration, error) { |
||||||
|
queryInterval := model.Interval |
||||||
|
|
||||||
|
//If we are using variable for interval/step, we will replace it with calculated interval
|
||||||
|
if isVariableInterval(queryInterval) { |
||||||
|
queryInterval = "" |
||||||
|
} |
||||||
|
|
||||||
|
minInterval, err := intervalv2.GetIntervalFrom(timeInterval, queryInterval, model.IntervalMS, 15*time.Second) |
||||||
|
if err != nil { |
||||||
|
return time.Duration(0), err |
||||||
|
} |
||||||
|
calculatedInterval := intervalCalculator.Calculate(query.TimeRange, minInterval, query.MaxDataPoints) |
||||||
|
safeInterval := intervalCalculator.CalculateSafeInterval(query.TimeRange, int64(safeResolution)) |
||||||
|
|
||||||
|
adjustedInterval := safeInterval.Value |
||||||
|
if calculatedInterval.Value > safeInterval.Value { |
||||||
|
adjustedInterval = calculatedInterval.Value |
||||||
|
} |
||||||
|
|
||||||
|
if model.Interval == varRateInterval || model.Interval == varRateIntervalAlt { |
||||||
|
// Rate interval is final and is not affected by resolution
|
||||||
|
return calculateRateInterval(adjustedInterval, timeInterval, intervalCalculator), nil |
||||||
|
} else { |
||||||
|
intervalFactor := model.IntervalFactor |
||||||
|
if intervalFactor == 0 { |
||||||
|
intervalFactor = 1 |
||||||
|
} |
||||||
|
return time.Duration(int64(adjustedInterval) * intervalFactor), nil |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
func calculateRateInterval(interval time.Duration, scrapeInterval string, intervalCalculator intervalv2.Calculator) time.Duration { |
||||||
|
scrape := scrapeInterval |
||||||
|
if scrape == "" { |
||||||
|
scrape = "15s" |
||||||
|
} |
||||||
|
|
||||||
|
scrapeIntervalDuration, err := intervalv2.ParseIntervalStringToTimeDuration(scrape) |
||||||
|
if err != nil { |
||||||
|
return time.Duration(0) |
||||||
|
} |
||||||
|
|
||||||
|
rateInterval := time.Duration(int(math.Max(float64(interval+scrapeIntervalDuration), float64(4)*float64(scrapeIntervalDuration)))) |
||||||
|
return rateInterval |
||||||
|
} |
||||||
|
|
||||||
|
func interpolateVariables(model *QueryModel, interval time.Duration, timeRange time.Duration, intervalCalculator intervalv2.Calculator, timeInterval string) string { |
||||||
|
expr := model.Expr |
||||||
|
rangeMs := timeRange.Milliseconds() |
||||||
|
rangeSRounded := int64(math.Round(float64(rangeMs) / 1000.0)) |
||||||
|
|
||||||
|
var rateInterval time.Duration |
||||||
|
if model.Interval == varRateInterval || model.Interval == varRateIntervalAlt { |
||||||
|
rateInterval = interval |
||||||
|
} else { |
||||||
|
rateInterval = calculateRateInterval(interval, timeInterval, intervalCalculator) |
||||||
|
} |
||||||
|
|
||||||
|
expr = strings.ReplaceAll(expr, varIntervalMs, strconv.FormatInt(int64(interval/time.Millisecond), 10)) |
||||||
|
expr = strings.ReplaceAll(expr, varInterval, intervalv2.FormatDuration(interval)) |
||||||
|
expr = strings.ReplaceAll(expr, varRangeMs, strconv.FormatInt(rangeMs, 10)) |
||||||
|
expr = strings.ReplaceAll(expr, varRangeS, strconv.FormatInt(rangeSRounded, 10)) |
||||||
|
expr = strings.ReplaceAll(expr, varRange, strconv.FormatInt(rangeSRounded, 10)+"s") |
||||||
|
expr = strings.ReplaceAll(expr, varRateInterval, rateInterval.String()) |
||||||
|
|
||||||
|
// Repetitive code, we should have functionality to unify these
|
||||||
|
expr = strings.ReplaceAll(expr, varIntervalMsAlt, strconv.FormatInt(int64(interval/time.Millisecond), 10)) |
||||||
|
expr = strings.ReplaceAll(expr, varIntervalAlt, intervalv2.FormatDuration(interval)) |
||||||
|
expr = strings.ReplaceAll(expr, varRangeMsAlt, strconv.FormatInt(rangeMs, 10)) |
||||||
|
expr = strings.ReplaceAll(expr, varRangeSAlt, strconv.FormatInt(rangeSRounded, 10)) |
||||||
|
expr = strings.ReplaceAll(expr, varRangeAlt, strconv.FormatInt(rangeSRounded, 10)+"s") |
||||||
|
expr = strings.ReplaceAll(expr, varRateIntervalAlt, rateInterval.String()) |
||||||
|
return expr |
||||||
|
} |
||||||
|
|
||||||
|
func isVariableInterval(interval string) bool { |
||||||
|
if interval == varInterval || interval == varIntervalMs || interval == varRateInterval { |
||||||
|
return true |
||||||
|
} |
||||||
|
//Repetitive code, we should have functionality to unify these
|
||||||
|
if interval == varIntervalAlt || interval == varIntervalMsAlt || interval == varRateIntervalAlt { |
||||||
|
return true |
||||||
|
} |
||||||
|
return false |
||||||
|
} |
||||||
|
|
||||||
|
func alignTimeRange(t time.Time, step time.Duration, offset int64) time.Time { |
||||||
|
return time.Unix(int64(math.Floor((float64(t.Unix()+offset)/step.Seconds()))*step.Seconds()-float64(offset)), 0) |
||||||
|
} |
||||||
@ -0,0 +1,451 @@ |
|||||||
|
package models_test |
||||||
|
|
||||||
|
import ( |
||||||
|
"testing" |
||||||
|
"time" |
||||||
|
|
||||||
|
"github.com/grafana/grafana-plugin-sdk-go/backend" |
||||||
|
"github.com/grafana/grafana/pkg/tsdb/intervalv2" |
||||||
|
"github.com/grafana/grafana/pkg/tsdb/prometheus/models" |
||||||
|
"github.com/stretchr/testify/require" |
||||||
|
) |
||||||
|
|
||||||
|
var ( |
||||||
|
now = time.Now() |
||||||
|
intervalCalculator = intervalv2.NewCalculator() |
||||||
|
) |
||||||
|
|
||||||
|
func TestPrometheus_timeSeriesQuery_parseTimeSeriesQuery(t *testing.T) { |
||||||
|
t.Run("parsing query from unified alerting", func(t *testing.T) { |
||||||
|
timeRange := backend.TimeRange{ |
||||||
|
From: now, |
||||||
|
To: now.Add(12 * time.Hour), |
||||||
|
} |
||||||
|
|
||||||
|
queryJson := `{ |
||||||
|
"expr": "go_goroutines", |
||||||
|
"refId": "A", |
||||||
|
"exemplar": true |
||||||
|
}` |
||||||
|
|
||||||
|
q := backend.DataQuery{ |
||||||
|
JSON: []byte(queryJson), |
||||||
|
TimeRange: timeRange, |
||||||
|
RefID: "A", |
||||||
|
} |
||||||
|
|
||||||
|
res, err := models.Parse(q, "15s", intervalCalculator, true) |
||||||
|
require.NoError(t, err) |
||||||
|
require.Equal(t, false, res.ExemplarQuery) |
||||||
|
}) |
||||||
|
|
||||||
|
t.Run("parsing query model with step", func(t *testing.T) { |
||||||
|
timeRange := backend.TimeRange{ |
||||||
|
From: now, |
||||||
|
To: now.Add(12 * time.Hour), |
||||||
|
} |
||||||
|
|
||||||
|
q := queryContext(`{ |
||||||
|
"expr": "go_goroutines", |
||||||
|
"format": "time_series", |
||||||
|
"refId": "A" |
||||||
|
}`, timeRange) |
||||||
|
|
||||||
|
res, err := models.Parse(q, "15s", intervalCalculator, false) |
||||||
|
require.NoError(t, err) |
||||||
|
require.Equal(t, time.Second*30, res.Step) |
||||||
|
}) |
||||||
|
|
||||||
|
t.Run("parsing query model without step parameter", func(t *testing.T) { |
||||||
|
timeRange := backend.TimeRange{ |
||||||
|
From: now, |
||||||
|
To: now.Add(1 * time.Hour), |
||||||
|
} |
||||||
|
|
||||||
|
q := queryContext(`{ |
||||||
|
"expr": "go_goroutines", |
||||||
|
"format": "time_series", |
||||||
|
"intervalFactor": 1, |
||||||
|
"refId": "A" |
||||||
|
}`, timeRange) |
||||||
|
|
||||||
|
res, err := models.Parse(q, "15s", intervalCalculator, false) |
||||||
|
require.NoError(t, err) |
||||||
|
require.Equal(t, time.Second*15, res.Step) |
||||||
|
}) |
||||||
|
|
||||||
|
t.Run("parsing query model with high intervalFactor", func(t *testing.T) { |
||||||
|
timeRange := backend.TimeRange{ |
||||||
|
From: now, |
||||||
|
To: now.Add(48 * time.Hour), |
||||||
|
} |
||||||
|
|
||||||
|
q := queryContext(`{ |
||||||
|
"expr": "go_goroutines", |
||||||
|
"format": "time_series", |
||||||
|
"intervalFactor": 10, |
||||||
|
"refId": "A" |
||||||
|
}`, timeRange) |
||||||
|
|
||||||
|
res, err := models.Parse(q, "15s", intervalCalculator, false) |
||||||
|
require.NoError(t, err) |
||||||
|
require.Equal(t, time.Minute*20, res.Step) |
||||||
|
}) |
||||||
|
|
||||||
|
t.Run("parsing query model with low intervalFactor", func(t *testing.T) { |
||||||
|
timeRange := backend.TimeRange{ |
||||||
|
From: now, |
||||||
|
To: now.Add(48 * time.Hour), |
||||||
|
} |
||||||
|
|
||||||
|
q := queryContext(`{ |
||||||
|
"expr": "go_goroutines", |
||||||
|
"format": "time_series", |
||||||
|
"intervalFactor": 1, |
||||||
|
"refId": "A" |
||||||
|
}`, timeRange) |
||||||
|
|
||||||
|
res, err := models.Parse(q, "15s", intervalCalculator, false) |
||||||
|
require.NoError(t, err) |
||||||
|
require.Equal(t, time.Minute*2, res.Step) |
||||||
|
}) |
||||||
|
|
||||||
|
t.Run("parsing query model specified scrape-interval in the data source", func(t *testing.T) { |
||||||
|
timeRange := backend.TimeRange{ |
||||||
|
From: now, |
||||||
|
To: now.Add(48 * time.Hour), |
||||||
|
} |
||||||
|
|
||||||
|
q := queryContext(`{ |
||||||
|
"expr": "go_goroutines", |
||||||
|
"format": "time_series", |
||||||
|
"intervalFactor": 1, |
||||||
|
"refId": "A" |
||||||
|
}`, timeRange) |
||||||
|
|
||||||
|
res, err := models.Parse(q, "240s", intervalCalculator, false) |
||||||
|
require.NoError(t, err) |
||||||
|
require.Equal(t, time.Minute*4, res.Step) |
||||||
|
}) |
||||||
|
|
||||||
|
t.Run("parsing query model with $__interval variable", func(t *testing.T) { |
||||||
|
timeRange := backend.TimeRange{ |
||||||
|
From: now, |
||||||
|
To: now.Add(48 * time.Hour), |
||||||
|
} |
||||||
|
|
||||||
|
q := queryContext(`{ |
||||||
|
"expr": "rate(ALERTS{job=\"test\" [$__interval]})", |
||||||
|
"format": "time_series", |
||||||
|
"intervalFactor": 1, |
||||||
|
"refId": "A" |
||||||
|
}`, timeRange) |
||||||
|
|
||||||
|
res, err := models.Parse(q, "15s", intervalCalculator, false) |
||||||
|
require.NoError(t, err) |
||||||
|
require.Equal(t, "rate(ALERTS{job=\"test\" [2m]})", res.Expr) |
||||||
|
}) |
||||||
|
|
||||||
|
t.Run("parsing query model with ${__interval} variable", func(t *testing.T) { |
||||||
|
timeRange := backend.TimeRange{ |
||||||
|
From: now, |
||||||
|
To: now.Add(48 * time.Hour), |
||||||
|
} |
||||||
|
|
||||||
|
q := queryContext(`{ |
||||||
|
"expr": "rate(ALERTS{job=\"test\" [${__interval}]})", |
||||||
|
"format": "time_series", |
||||||
|
"intervalFactor": 1, |
||||||
|
"refId": "A" |
||||||
|
}`, timeRange) |
||||||
|
|
||||||
|
res, err := models.Parse(q, "15s", intervalCalculator, false) |
||||||
|
require.NoError(t, err) |
||||||
|
require.Equal(t, "rate(ALERTS{job=\"test\" [2m]})", res.Expr) |
||||||
|
}) |
||||||
|
|
||||||
|
t.Run("parsing query model with $__interval_ms variable", func(t *testing.T) { |
||||||
|
timeRange := backend.TimeRange{ |
||||||
|
From: now, |
||||||
|
To: now.Add(48 * time.Hour), |
||||||
|
} |
||||||
|
|
||||||
|
q := queryContext(`{ |
||||||
|
"expr": "rate(ALERTS{job=\"test\" [$__interval_ms]})", |
||||||
|
"format": "time_series", |
||||||
|
"intervalFactor": 1, |
||||||
|
"refId": "A" |
||||||
|
}`, timeRange) |
||||||
|
|
||||||
|
res, err := models.Parse(q, "15s", intervalCalculator, false) |
||||||
|
require.NoError(t, err) |
||||||
|
require.Equal(t, "rate(ALERTS{job=\"test\" [120000]})", res.Expr) |
||||||
|
}) |
||||||
|
|
||||||
|
t.Run("parsing query model with $__interval_ms and $__interval variable", func(t *testing.T) { |
||||||
|
timeRange := backend.TimeRange{ |
||||||
|
From: now, |
||||||
|
To: now.Add(48 * time.Hour), |
||||||
|
} |
||||||
|
|
||||||
|
q := queryContext(`{ |
||||||
|
"expr": "rate(ALERTS{job=\"test\" [$__interval_ms]}) + rate(ALERTS{job=\"test\" [$__interval]})", |
||||||
|
"format": "time_series", |
||||||
|
"intervalFactor": 1, |
||||||
|
"refId": "A" |
||||||
|
}`, timeRange) |
||||||
|
|
||||||
|
res, err := models.Parse(q, "15s", intervalCalculator, false) |
||||||
|
require.NoError(t, err) |
||||||
|
require.Equal(t, "rate(ALERTS{job=\"test\" [120000]}) + rate(ALERTS{job=\"test\" [2m]})", res.Expr) |
||||||
|
}) |
||||||
|
|
||||||
|
t.Run("parsing query model with ${__interval_ms} and ${__interval} variable", func(t *testing.T) { |
||||||
|
timeRange := backend.TimeRange{ |
||||||
|
From: now, |
||||||
|
To: now.Add(48 * time.Hour), |
||||||
|
} |
||||||
|
|
||||||
|
q := queryContext(`{ |
||||||
|
"expr": "rate(ALERTS{job=\"test\" [${__interval_ms}]}) + rate(ALERTS{job=\"test\" [${__interval}]})", |
||||||
|
"format": "time_series", |
||||||
|
"intervalFactor": 1, |
||||||
|
"refId": "A" |
||||||
|
}`, timeRange) |
||||||
|
|
||||||
|
res, err := models.Parse(q, "15s", intervalCalculator, false) |
||||||
|
require.NoError(t, err) |
||||||
|
require.Equal(t, "rate(ALERTS{job=\"test\" [120000]}) + rate(ALERTS{job=\"test\" [2m]})", res.Expr) |
||||||
|
}) |
||||||
|
|
||||||
|
t.Run("parsing query model with $__range variable", func(t *testing.T) { |
||||||
|
timeRange := backend.TimeRange{ |
||||||
|
From: now, |
||||||
|
To: now.Add(48 * time.Hour), |
||||||
|
} |
||||||
|
|
||||||
|
q := queryContext(`{ |
||||||
|
"expr": "rate(ALERTS{job=\"test\" [$__range]})", |
||||||
|
"format": "time_series", |
||||||
|
"intervalFactor": 1, |
||||||
|
"refId": "A" |
||||||
|
}`, timeRange) |
||||||
|
|
||||||
|
res, err := models.Parse(q, "15s", intervalCalculator, false) |
||||||
|
require.NoError(t, err) |
||||||
|
require.Equal(t, "rate(ALERTS{job=\"test\" [172800s]})", res.Expr) |
||||||
|
}) |
||||||
|
|
||||||
|
t.Run("parsing query model with $__range_s variable", func(t *testing.T) { |
||||||
|
timeRange := backend.TimeRange{ |
||||||
|
From: now, |
||||||
|
To: now.Add(48 * time.Hour), |
||||||
|
} |
||||||
|
|
||||||
|
q := queryContext(`{ |
||||||
|
"expr": "rate(ALERTS{job=\"test\" [$__range_s]})", |
||||||
|
"format": "time_series", |
||||||
|
"intervalFactor": 1, |
||||||
|
"refId": "A" |
||||||
|
}`, timeRange) |
||||||
|
|
||||||
|
res, err := models.Parse(q, "15s", intervalCalculator, false) |
||||||
|
require.NoError(t, err) |
||||||
|
require.Equal(t, "rate(ALERTS{job=\"test\" [172800]})", res.Expr) |
||||||
|
}) |
||||||
|
|
||||||
|
t.Run("parsing query model with ${__range_s} variable", func(t *testing.T) { |
||||||
|
timeRange := backend.TimeRange{ |
||||||
|
From: now, |
||||||
|
To: now.Add(48 * time.Hour), |
||||||
|
} |
||||||
|
|
||||||
|
q := queryContext(`{ |
||||||
|
"expr": "rate(ALERTS{job=\"test\" [${__range_s}s]})", |
||||||
|
"format": "time_series", |
||||||
|
"intervalFactor": 1, |
||||||
|
"refId": "A" |
||||||
|
}`, timeRange) |
||||||
|
|
||||||
|
res, err := models.Parse(q, "15s", intervalCalculator, false) |
||||||
|
require.NoError(t, err) |
||||||
|
require.Equal(t, "rate(ALERTS{job=\"test\" [172800s]})", res.Expr) |
||||||
|
}) |
||||||
|
|
||||||
|
t.Run("parsing query model with $__range_s variable below 0.5s", func(t *testing.T) { |
||||||
|
timeRange := backend.TimeRange{ |
||||||
|
From: now, |
||||||
|
To: now.Add(40 * time.Millisecond), |
||||||
|
} |
||||||
|
|
||||||
|
q := queryContext(`{ |
||||||
|
"expr": "rate(ALERTS{job=\"test\" [$__range_s]})", |
||||||
|
"format": "time_series", |
||||||
|
"intervalFactor": 1, |
||||||
|
"refId": "A" |
||||||
|
}`, timeRange) |
||||||
|
|
||||||
|
res, err := models.Parse(q, "15s", intervalCalculator, false) |
||||||
|
require.NoError(t, err) |
||||||
|
require.Equal(t, "rate(ALERTS{job=\"test\" [0]})", res.Expr) |
||||||
|
}) |
||||||
|
|
||||||
|
t.Run("parsing query model with $__range_s variable between 1-0.5s", func(t *testing.T) { |
||||||
|
timeRange := backend.TimeRange{ |
||||||
|
From: now, |
||||||
|
To: now.Add(800 * time.Millisecond), |
||||||
|
} |
||||||
|
|
||||||
|
q := queryContext(`{ |
||||||
|
"expr": "rate(ALERTS{job=\"test\" [$__range_s]})", |
||||||
|
"format": "time_series", |
||||||
|
"intervalFactor": 1, |
||||||
|
"refId": "A" |
||||||
|
}`, timeRange) |
||||||
|
|
||||||
|
res, err := models.Parse(q, "15s", intervalCalculator, false) |
||||||
|
require.NoError(t, err) |
||||||
|
require.Equal(t, "rate(ALERTS{job=\"test\" [1]})", res.Expr) |
||||||
|
}) |
||||||
|
|
||||||
|
t.Run("parsing query model with $__range_ms variable", func(t *testing.T) { |
||||||
|
timeRange := backend.TimeRange{ |
||||||
|
From: now, |
||||||
|
To: now.Add(48 * time.Hour), |
||||||
|
} |
||||||
|
|
||||||
|
q := queryContext(`{ |
||||||
|
"expr": "rate(ALERTS{job=\"test\" [$__range_ms]})", |
||||||
|
"format": "time_series", |
||||||
|
"intervalFactor": 1, |
||||||
|
"refId": "A" |
||||||
|
}`, timeRange) |
||||||
|
|
||||||
|
res, err := models.Parse(q, "15s", intervalCalculator, false) |
||||||
|
require.NoError(t, err) |
||||||
|
require.Equal(t, "rate(ALERTS{job=\"test\" [172800000]})", res.Expr) |
||||||
|
}) |
||||||
|
|
||||||
|
t.Run("parsing query model with $__range_ms variable below 1s", func(t *testing.T) { |
||||||
|
timeRange := backend.TimeRange{ |
||||||
|
From: now, |
||||||
|
To: now.Add(20 * time.Millisecond), |
||||||
|
} |
||||||
|
|
||||||
|
q := queryContext(`{ |
||||||
|
"expr": "rate(ALERTS{job=\"test\" [$__range_ms]})", |
||||||
|
"format": "time_series", |
||||||
|
"intervalFactor": 1, |
||||||
|
"refId": "A" |
||||||
|
}`, timeRange) |
||||||
|
|
||||||
|
res, err := models.Parse(q, "15s", intervalCalculator, false) |
||||||
|
require.NoError(t, err) |
||||||
|
require.Equal(t, "rate(ALERTS{job=\"test\" [20]})", res.Expr) |
||||||
|
}) |
||||||
|
|
||||||
|
t.Run("parsing query model with $__rate_interval variable", func(t *testing.T) { |
||||||
|
timeRange := backend.TimeRange{ |
||||||
|
From: now, |
||||||
|
To: now.Add(5 * time.Minute), |
||||||
|
} |
||||||
|
|
||||||
|
q := queryContext(`{ |
||||||
|
"expr": "rate(ALERTS{job=\"test\" [$__rate_interval]})", |
||||||
|
"format": "time_series", |
||||||
|
"intervalFactor": 1, |
||||||
|
"interval": "5m", |
||||||
|
"refId": "A" |
||||||
|
}`, timeRange) |
||||||
|
|
||||||
|
res, err := models.Parse(q, "15s", intervalCalculator, false) |
||||||
|
require.NoError(t, err) |
||||||
|
require.Equal(t, "rate(ALERTS{job=\"test\" [5m15s]})", res.Expr) |
||||||
|
}) |
||||||
|
|
||||||
|
t.Run("parsing query model with $__rate_interval variable in expr and interval", func(t *testing.T) { |
||||||
|
timeRange := backend.TimeRange{ |
||||||
|
From: now, |
||||||
|
To: now.Add(5 * time.Minute), |
||||||
|
} |
||||||
|
|
||||||
|
q := queryContext(`{ |
||||||
|
"expr": "rate(ALERTS{job=\"test\" [$__rate_interval]})", |
||||||
|
"format": "time_series", |
||||||
|
"intervalFactor": 1, |
||||||
|
"interval": "$__rate_interval", |
||||||
|
"refId": "A" |
||||||
|
}`, timeRange) |
||||||
|
|
||||||
|
res, err := models.Parse(q, "15s", intervalCalculator, false) |
||||||
|
require.NoError(t, err) |
||||||
|
require.Equal(t, "rate(ALERTS{job=\"test\" [1m0s]})", res.Expr) |
||||||
|
require.Equal(t, 1*time.Minute, res.Step) |
||||||
|
}) |
||||||
|
|
||||||
|
t.Run("parsing query model of range query", func(t *testing.T) { |
||||||
|
timeRange := backend.TimeRange{ |
||||||
|
From: now, |
||||||
|
To: now.Add(48 * time.Hour), |
||||||
|
} |
||||||
|
|
||||||
|
q := queryContext(`{ |
||||||
|
"expr": "go_goroutines", |
||||||
|
"format": "time_series", |
||||||
|
"intervalFactor": 1, |
||||||
|
"refId": "A", |
||||||
|
"range": true |
||||||
|
}`, timeRange) |
||||||
|
|
||||||
|
res, err := models.Parse(q, "15s", intervalCalculator, false) |
||||||
|
require.NoError(t, err) |
||||||
|
require.Equal(t, true, res.RangeQuery) |
||||||
|
}) |
||||||
|
|
||||||
|
t.Run("parsing query model of range and instant query", func(t *testing.T) { |
||||||
|
timeRange := backend.TimeRange{ |
||||||
|
From: now, |
||||||
|
To: now.Add(48 * time.Hour), |
||||||
|
} |
||||||
|
|
||||||
|
q := queryContext(`{ |
||||||
|
"expr": "go_goroutines", |
||||||
|
"format": "time_series", |
||||||
|
"intervalFactor": 1, |
||||||
|
"refId": "A", |
||||||
|
"range": true, |
||||||
|
"instant": true |
||||||
|
}`, timeRange) |
||||||
|
|
||||||
|
res, err := models.Parse(q, "15s", intervalCalculator, false) |
||||||
|
require.NoError(t, err) |
||||||
|
require.Equal(t, true, res.RangeQuery) |
||||||
|
require.Equal(t, true, res.InstantQuery) |
||||||
|
}) |
||||||
|
|
||||||
|
t.Run("parsing query model of with no query type", func(t *testing.T) { |
||||||
|
timeRange := backend.TimeRange{ |
||||||
|
From: now, |
||||||
|
To: now.Add(48 * time.Hour), |
||||||
|
} |
||||||
|
|
||||||
|
q := queryContext(`{ |
||||||
|
"expr": "go_goroutines", |
||||||
|
"format": "time_series", |
||||||
|
"intervalFactor": 1, |
||||||
|
"refId": "A" |
||||||
|
}`, timeRange) |
||||||
|
|
||||||
|
res, err := models.Parse(q, "15s", intervalCalculator, false) |
||||||
|
require.NoError(t, err) |
||||||
|
require.Equal(t, true, res.RangeQuery) |
||||||
|
}) |
||||||
|
} |
||||||
|
|
||||||
|
func queryContext(json string, timeRange backend.TimeRange) backend.DataQuery { |
||||||
|
return backend.DataQuery{ |
||||||
|
JSON: []byte(json), |
||||||
|
TimeRange: timeRange, |
||||||
|
RefID: "A", |
||||||
|
} |
||||||
|
} |
||||||
@ -0,0 +1,143 @@ |
|||||||
|
package querydata_test |
||||||
|
|
||||||
|
import ( |
||||||
|
"bytes" |
||||||
|
"context" |
||||||
|
"encoding/json" |
||||||
|
"fmt" |
||||||
|
"io/ioutil" |
||||||
|
"net/http" |
||||||
|
"os" |
||||||
|
"path/filepath" |
||||||
|
"testing" |
||||||
|
"time" |
||||||
|
|
||||||
|
"github.com/stretchr/testify/require" |
||||||
|
|
||||||
|
"github.com/grafana/grafana-plugin-sdk-go/backend" |
||||||
|
"github.com/grafana/grafana-plugin-sdk-go/experimental" |
||||||
|
"github.com/grafana/grafana/pkg/infra/log" |
||||||
|
"github.com/grafana/grafana/pkg/tsdb/prometheus/models" |
||||||
|
) |
||||||
|
|
||||||
|
var update = false |
||||||
|
|
||||||
|
func TestMatrixResponses(t *testing.T) { |
||||||
|
tt := []struct { |
||||||
|
name string |
||||||
|
filepath string |
||||||
|
}{ |
||||||
|
{name: "parse a simple matrix response", filepath: "range_simple"}, |
||||||
|
{name: "parse a simple matrix response with value missing steps", filepath: "range_missing"}, |
||||||
|
{name: "parse a response with Infinity", filepath: "range_infinity"}, |
||||||
|
{name: "parse a response with NaN", filepath: "range_nan"}, |
||||||
|
} |
||||||
|
|
||||||
|
for _, test := range tt { |
||||||
|
t.Run(test.name, func(t *testing.T) { |
||||||
|
queryFileName := filepath.Join("../testdata", test.filepath+".query.json") |
||||||
|
responseFileName := filepath.Join("../testdata", test.filepath+".result.json") |
||||||
|
goldenFileName := filepath.Join("../testdata", test.filepath+".result.streaming.golden") |
||||||
|
|
||||||
|
query, err := loadStoredQuery(queryFileName) |
||||||
|
require.NoError(t, err) |
||||||
|
|
||||||
|
responseBytes, err := os.ReadFile(responseFileName) |
||||||
|
require.NoError(t, err) |
||||||
|
|
||||||
|
result, err := runQuery(responseBytes, query) |
||||||
|
require.NoError(t, err) |
||||||
|
require.Len(t, result.Responses, 1) |
||||||
|
|
||||||
|
dr, found := result.Responses["A"] |
||||||
|
require.True(t, found) |
||||||
|
|
||||||
|
actual, err := json.MarshalIndent(&dr, "", " ") |
||||||
|
require.NoError(t, err) |
||||||
|
|
||||||
|
// nolint:gosec
|
||||||
|
// We can ignore the gosec G304 because this is a test with static defined paths
|
||||||
|
expected, err := ioutil.ReadFile(goldenFileName + ".json") |
||||||
|
if err != nil || update { |
||||||
|
err = os.WriteFile(goldenFileName+".json", actual, 0600) |
||||||
|
require.NoError(t, err) |
||||||
|
} |
||||||
|
|
||||||
|
require.JSONEq(t, string(expected), string(actual)) |
||||||
|
|
||||||
|
require.NoError(t, experimental.CheckGoldenDataResponse(goldenFileName+".txt", &dr, update)) |
||||||
|
}) |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
// we store the prometheus query data in a json file, here is some minimal code
|
||||||
|
// to be able to read it back. unfortunately we cannot use the models.Query
|
||||||
|
// struct here, because it has `time.time` and `time.duration` fields that
|
||||||
|
// cannot be unmarshalled from JSON automatically.
|
||||||
|
type storedPrometheusQuery struct { |
||||||
|
RefId string |
||||||
|
RangeQuery bool |
||||||
|
Start int64 |
||||||
|
End int64 |
||||||
|
Step int64 |
||||||
|
Expr string |
||||||
|
} |
||||||
|
|
||||||
|
func loadStoredQuery(fileName string) (*backend.QueryDataRequest, error) { |
||||||
|
bytes, err := os.ReadFile(fileName) |
||||||
|
if err != nil { |
||||||
|
return nil, err |
||||||
|
} |
||||||
|
|
||||||
|
var sq storedPrometheusQuery |
||||||
|
|
||||||
|
err = json.Unmarshal(bytes, &sq) |
||||||
|
if err != nil { |
||||||
|
return nil, err |
||||||
|
} |
||||||
|
|
||||||
|
qm := models.QueryModel{ |
||||||
|
RangeQuery: sq.RangeQuery, |
||||||
|
Expr: sq.Expr, |
||||||
|
Interval: fmt.Sprintf("%ds", sq.Step), |
||||||
|
IntervalMS: sq.Step * 1000, |
||||||
|
} |
||||||
|
|
||||||
|
data, err := json.Marshal(&qm) |
||||||
|
if err != nil { |
||||||
|
return nil, err |
||||||
|
} |
||||||
|
|
||||||
|
return &backend.QueryDataRequest{ |
||||||
|
Queries: []backend.DataQuery{ |
||||||
|
{ |
||||||
|
TimeRange: backend.TimeRange{ |
||||||
|
From: time.Unix(sq.Start, 0), |
||||||
|
To: time.Unix(sq.End, 0), |
||||||
|
}, |
||||||
|
RefID: sq.RefId, |
||||||
|
Interval: time.Second * time.Duration(sq.Step), |
||||||
|
JSON: json.RawMessage(data), |
||||||
|
}, |
||||||
|
}, |
||||||
|
}, nil |
||||||
|
} |
||||||
|
|
||||||
|
func runQuery(response []byte, q *backend.QueryDataRequest) (*backend.QueryDataResponse, error) { |
||||||
|
tCtx := setup() |
||||||
|
res := &http.Response{ |
||||||
|
StatusCode: 200, |
||||||
|
Body: ioutil.NopCloser(bytes.NewReader(response)), |
||||||
|
} |
||||||
|
tCtx.httpProvider.setResponse(res) |
||||||
|
return tCtx.queryData.Execute(context.Background(), q) |
||||||
|
} |
||||||
|
|
||||||
|
type fakeLogger struct { |
||||||
|
log.Logger |
||||||
|
} |
||||||
|
|
||||||
|
func (fl *fakeLogger) Debug(testMessage string, ctx ...interface{}) {} |
||||||
|
func (fl *fakeLogger) Info(testMessage string, ctx ...interface{}) {} |
||||||
|
func (fl *fakeLogger) Warn(testMessage string, ctx ...interface{}) {} |
||||||
|
func (fl *fakeLogger) Error(testMessage string, ctx ...interface{}) {} |
||||||
@ -0,0 +1,100 @@ |
|||||||
|
package querydata_test |
||||||
|
|
||||||
|
import ( |
||||||
|
"bytes" |
||||||
|
"context" |
||||||
|
"encoding/json" |
||||||
|
"fmt" |
||||||
|
"io/ioutil" |
||||||
|
"math/rand" |
||||||
|
"net/http" |
||||||
|
"strings" |
||||||
|
"testing" |
||||||
|
"time" |
||||||
|
|
||||||
|
"github.com/grafana/grafana-plugin-sdk-go/backend" |
||||||
|
"github.com/grafana/grafana/pkg/tsdb/prometheus/models" |
||||||
|
"github.com/stretchr/testify/require" |
||||||
|
) |
||||||
|
|
||||||
|
// when memory-profiling this benchmark, these commands are recommended:
|
||||||
|
// - go test -benchmem -run=^$ -benchtime 1x -memprofile memprofile.out -memprofilerate 1 -bench ^BenchmarkJson$ github.com/grafana/grafana/pkg/tsdb/prometheus
|
||||||
|
// - go tool pprof -http=localhost:6061 memprofile.out
|
||||||
|
func BenchmarkJson(b *testing.B) { |
||||||
|
body, q := createJsonTestData(1642000000, 1, 300, 400) |
||||||
|
tCtx := setup() |
||||||
|
b.ResetTimer() |
||||||
|
for n := 0; n < b.N; n++ { |
||||||
|
res := http.Response{ |
||||||
|
StatusCode: 200, |
||||||
|
Body: ioutil.NopCloser(bytes.NewReader(body)), |
||||||
|
} |
||||||
|
tCtx.httpProvider.setResponse(&res) |
||||||
|
_, err := tCtx.queryData.Execute(context.Background(), q) |
||||||
|
require.NoError(b, err) |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
const nanRate = 0.002 |
||||||
|
|
||||||
|
// we build the JSON file from strings,
|
||||||
|
// it was easier to write it this way.
|
||||||
|
func makeJsonTestMetric(index int) string { |
||||||
|
return fmt.Sprintf(`{"server":"main","category":"maintenance","case":"%v"}`, index) |
||||||
|
} |
||||||
|
|
||||||
|
// return a value between -100 and +100, sometimes NaN, in string
|
||||||
|
func makeJsonTestValue(r *rand.Rand) string { |
||||||
|
if r.Float64() < nanRate { |
||||||
|
return "NaN" |
||||||
|
} else { |
||||||
|
return fmt.Sprintf("%f", (r.Float64()*200)-100) |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
// create one time-series
|
||||||
|
func makeJsonTestSeries(start int64, step int64, timestampCount int, r *rand.Rand, seriesIndex int) string { |
||||||
|
var values []string |
||||||
|
for i := 0; i < timestampCount; i++ { |
||||||
|
value := fmt.Sprintf(`[%d,"%v"]`, start+(int64(i)*step), makeJsonTestValue(r)) |
||||||
|
values = append(values, value) |
||||||
|
} |
||||||
|
return fmt.Sprintf(`{"metric":%v,"values":[%v]}`, makeJsonTestMetric(seriesIndex), strings.Join(values, ",")) |
||||||
|
} |
||||||
|
|
||||||
|
func createJsonTestData(start int64, step int64, timestampCount int, seriesCount int) ([]byte, *backend.QueryDataRequest) { |
||||||
|
// we use random numbers as values, but they have to be the same numbers
|
||||||
|
// every time we call this, so we create a random source.
|
||||||
|
r := rand.New(rand.NewSource(42)) |
||||||
|
var allSeries []string |
||||||
|
for i := 0; i < seriesCount; i++ { |
||||||
|
allSeries = append(allSeries, makeJsonTestSeries(start, step, timestampCount, r, i)) |
||||||
|
} |
||||||
|
bytes := []byte(fmt.Sprintf(`{"status":"success","data":{"resultType":"matrix","result":[%v]}}`, strings.Join(allSeries, ","))) |
||||||
|
|
||||||
|
qm := models.QueryModel{ |
||||||
|
RangeQuery: true, |
||||||
|
Expr: "test", |
||||||
|
} |
||||||
|
|
||||||
|
data, err := json.Marshal(&qm) |
||||||
|
if err != nil { |
||||||
|
panic(err) |
||||||
|
} |
||||||
|
|
||||||
|
res := backend.QueryDataRequest{ |
||||||
|
Queries: []backend.DataQuery{ |
||||||
|
{ |
||||||
|
RefID: "A", |
||||||
|
TimeRange: backend.TimeRange{ |
||||||
|
From: time.Unix(start, 0), |
||||||
|
To: time.Unix(start+((int64(timestampCount)-1)*step), 0), |
||||||
|
}, |
||||||
|
Interval: time.Second * time.Duration(step), |
||||||
|
JSON: data, |
||||||
|
}, |
||||||
|
}, |
||||||
|
} |
||||||
|
|
||||||
|
return bytes, &res |
||||||
|
} |
||||||
@ -0,0 +1,183 @@ |
|||||||
|
package querydata |
||||||
|
|
||||||
|
import ( |
||||||
|
"context" |
||||||
|
"encoding/json" |
||||||
|
"fmt" |
||||||
|
"regexp" |
||||||
|
"time" |
||||||
|
|
||||||
|
"github.com/grafana/grafana-plugin-sdk-go/backend" |
||||||
|
"github.com/grafana/grafana-plugin-sdk-go/data" |
||||||
|
"github.com/grafana/grafana/pkg/infra/httpclient" |
||||||
|
"github.com/grafana/grafana/pkg/infra/log" |
||||||
|
"github.com/grafana/grafana/pkg/infra/tracing" |
||||||
|
"github.com/grafana/grafana/pkg/services/featuremgmt" |
||||||
|
"github.com/grafana/grafana/pkg/setting" |
||||||
|
"github.com/grafana/grafana/pkg/tsdb/intervalv2" |
||||||
|
"github.com/grafana/grafana/pkg/tsdb/prometheus/client" |
||||||
|
"github.com/grafana/grafana/pkg/tsdb/prometheus/models" |
||||||
|
"github.com/grafana/grafana/pkg/util/maputil" |
||||||
|
"go.opentelemetry.io/otel/attribute" |
||||||
|
) |
||||||
|
|
||||||
|
const legendFormatAuto = "__auto" |
||||||
|
|
||||||
|
var legendFormatRegexp = regexp.MustCompile(`\{\{\s*(.+?)\s*\}\}`) |
||||||
|
|
||||||
|
type clientGetter func(map[string]string) (*client.Client, error) |
||||||
|
|
||||||
|
type ExemplarEvent struct { |
||||||
|
Time time.Time |
||||||
|
Value float64 |
||||||
|
Labels map[string]string |
||||||
|
} |
||||||
|
|
||||||
|
type QueryData struct { |
||||||
|
intervalCalculator intervalv2.Calculator |
||||||
|
tracer tracing.Tracer |
||||||
|
getClient clientGetter |
||||||
|
log log.Logger |
||||||
|
ID int64 |
||||||
|
URL string |
||||||
|
TimeInterval string |
||||||
|
} |
||||||
|
|
||||||
|
func New( |
||||||
|
httpClientProvider httpclient.Provider, |
||||||
|
cfg *setting.Cfg, |
||||||
|
features featuremgmt.FeatureToggles, |
||||||
|
tracer tracing.Tracer, |
||||||
|
settings backend.DataSourceInstanceSettings, |
||||||
|
plog log.Logger, |
||||||
|
) (*QueryData, error) { |
||||||
|
var jsonData map[string]interface{} |
||||||
|
if err := json.Unmarshal(settings.JSONData, &jsonData); err != nil { |
||||||
|
return nil, fmt.Errorf("error reading settings: %w", err) |
||||||
|
} |
||||||
|
|
||||||
|
timeInterval, err := maputil.GetStringOptional(jsonData, "timeInterval") |
||||||
|
if err != nil { |
||||||
|
return nil, err |
||||||
|
} |
||||||
|
|
||||||
|
p := client.NewProvider(settings, jsonData, httpClientProvider, cfg, features, plog) |
||||||
|
pc, err := client.NewProviderCache(p) |
||||||
|
if err != nil { |
||||||
|
return nil, err |
||||||
|
} |
||||||
|
|
||||||
|
return &QueryData{ |
||||||
|
intervalCalculator: intervalv2.NewCalculator(), |
||||||
|
tracer: tracer, |
||||||
|
log: plog, |
||||||
|
getClient: pc.GetClient, |
||||||
|
TimeInterval: timeInterval, |
||||||
|
ID: settings.ID, |
||||||
|
URL: settings.URL, |
||||||
|
}, nil |
||||||
|
} |
||||||
|
|
||||||
|
func (s *QueryData) Execute(ctx context.Context, req *backend.QueryDataRequest) (*backend.QueryDataResponse, error) { |
||||||
|
fromAlert := req.Headers["FromAlert"] == "true" |
||||||
|
result := backend.QueryDataResponse{ |
||||||
|
Responses: backend.Responses{}, |
||||||
|
} |
||||||
|
|
||||||
|
client, err := s.getClient(req.Headers) |
||||||
|
if err != nil { |
||||||
|
return &result, err |
||||||
|
} |
||||||
|
|
||||||
|
for _, q := range req.Queries { |
||||||
|
query, err := models.Parse(q, s.TimeInterval, s.intervalCalculator, fromAlert) |
||||||
|
if err != nil { |
||||||
|
return &result, err |
||||||
|
} |
||||||
|
r, err := s.fetch(ctx, client, query) |
||||||
|
if err != nil { |
||||||
|
return &result, err |
||||||
|
} |
||||||
|
if r == nil { |
||||||
|
s.log.Debug("Received nilresponse from runQuery", "query", query.Expr) |
||||||
|
continue |
||||||
|
} |
||||||
|
result.Responses[q.RefID] = *r |
||||||
|
} |
||||||
|
|
||||||
|
return &result, nil |
||||||
|
} |
||||||
|
|
||||||
|
func (s *QueryData) fetch(ctx context.Context, client *client.Client, q *models.Query) (*backend.DataResponse, error) { |
||||||
|
s.log.Debug("Sending query", "start", q.Start, "end", q.End, "step", q.Step, "query", q.Expr) |
||||||
|
|
||||||
|
traceCtx, span := s.trace(ctx, q) |
||||||
|
defer span.End() |
||||||
|
|
||||||
|
response := &backend.DataResponse{ |
||||||
|
Frames: data.Frames{}, |
||||||
|
Error: nil, |
||||||
|
} |
||||||
|
|
||||||
|
if q.RangeQuery { |
||||||
|
res, err := s.rangeQuery(traceCtx, client, q) |
||||||
|
if err != nil { |
||||||
|
return nil, err |
||||||
|
} |
||||||
|
response.Frames = res.Frames |
||||||
|
} |
||||||
|
|
||||||
|
if q.InstantQuery { |
||||||
|
res, err := s.instantQuery(traceCtx, client, q) |
||||||
|
if err != nil { |
||||||
|
return nil, err |
||||||
|
} |
||||||
|
response.Frames = append(response.Frames, res.Frames...) |
||||||
|
} |
||||||
|
|
||||||
|
if q.ExemplarQuery { |
||||||
|
res, err := s.exemplarQuery(traceCtx, client, q) |
||||||
|
if err != nil { |
||||||
|
// If exemplar query returns error, we want to only log it and
|
||||||
|
// continue with other results processing
|
||||||
|
s.log.Error("Exemplar query failed", "query", q.Expr, "err", err) |
||||||
|
} |
||||||
|
if res != nil { |
||||||
|
response.Frames = append(response.Frames, res.Frames...) |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
return response, nil |
||||||
|
} |
||||||
|
|
||||||
|
func (s *QueryData) rangeQuery(ctx context.Context, c *client.Client, q *models.Query) (*backend.DataResponse, error) { |
||||||
|
res, err := c.QueryRange(ctx, q) |
||||||
|
if err != nil { |
||||||
|
return nil, err |
||||||
|
} |
||||||
|
return s.parseResponse(ctx, q, res) |
||||||
|
} |
||||||
|
|
||||||
|
func (s *QueryData) instantQuery(ctx context.Context, c *client.Client, q *models.Query) (*backend.DataResponse, error) { |
||||||
|
res, err := c.QueryInstant(ctx, q) |
||||||
|
if err != nil { |
||||||
|
return nil, err |
||||||
|
} |
||||||
|
return s.parseResponse(ctx, q, res) |
||||||
|
} |
||||||
|
|
||||||
|
func (s *QueryData) exemplarQuery(ctx context.Context, c *client.Client, q *models.Query) (*backend.DataResponse, error) { |
||||||
|
res, err := c.QueryExemplars(ctx, q) |
||||||
|
if err != nil { |
||||||
|
return nil, err |
||||||
|
} |
||||||
|
return s.parseResponse(ctx, q, res) |
||||||
|
} |
||||||
|
|
||||||
|
func (s *QueryData) trace(ctx context.Context, q *models.Query) (context.Context, tracing.Span) { |
||||||
|
traceCtx, span := s.tracer.Start(ctx, "datasource.prometheus") |
||||||
|
span.SetAttributes("expr", q.Expr, attribute.Key("expr").String(q.Expr)) |
||||||
|
span.SetAttributes("start_unixnano", q.Start, attribute.Key("start_unixnano").Int64(q.Start.UnixNano())) |
||||||
|
span.SetAttributes("stop_unixnano", q.End, attribute.Key("stop_unixnano").Int64(q.End.UnixNano())) |
||||||
|
return traceCtx, span |
||||||
|
} |
||||||
@ -0,0 +1,464 @@ |
|||||||
|
package querydata_test |
||||||
|
|
||||||
|
import ( |
||||||
|
"bytes" |
||||||
|
"context" |
||||||
|
"encoding/json" |
||||||
|
"io/ioutil" |
||||||
|
"math" |
||||||
|
"net/http" |
||||||
|
"testing" |
||||||
|
"time" |
||||||
|
|
||||||
|
"github.com/grafana/grafana-plugin-sdk-go/backend" |
||||||
|
sdkhttpclient "github.com/grafana/grafana-plugin-sdk-go/backend/httpclient" |
||||||
|
"github.com/grafana/grafana-plugin-sdk-go/data" |
||||||
|
"github.com/grafana/grafana/pkg/infra/httpclient" |
||||||
|
"github.com/grafana/grafana/pkg/infra/tracing" |
||||||
|
"github.com/grafana/grafana/pkg/setting" |
||||||
|
"github.com/grafana/grafana/pkg/tsdb/prometheus/models" |
||||||
|
"github.com/grafana/grafana/pkg/tsdb/prometheus/querydata" |
||||||
|
apiv1 "github.com/prometheus/client_golang/api/prometheus/v1" |
||||||
|
p "github.com/prometheus/common/model" |
||||||
|
"github.com/stretchr/testify/require" |
||||||
|
) |
||||||
|
|
||||||
|
func TestPrometheus_parseTimeSeriesResponse(t *testing.T) { |
||||||
|
t.Run("exemplars response should be sampled and parsed normally", func(t *testing.T) { |
||||||
|
t.Skip() |
||||||
|
exemplars := []apiv1.ExemplarQueryResult{ |
||||||
|
{ |
||||||
|
SeriesLabels: p.LabelSet{ |
||||||
|
"__name__": "tns_request_duration_seconds_bucket", |
||||||
|
"instance": "app:80", |
||||||
|
"job": "tns/app", |
||||||
|
}, |
||||||
|
Exemplars: []apiv1.Exemplar{ |
||||||
|
{ |
||||||
|
Labels: p.LabelSet{"traceID": "test1"}, |
||||||
|
Value: 0.003535405, |
||||||
|
Timestamp: p.TimeFromUnixNano(time.Now().Add(-2 * time.Minute).UnixNano()), |
||||||
|
}, |
||||||
|
{ |
||||||
|
Labels: p.LabelSet{"traceID": "test2"}, |
||||||
|
Value: 0.005555605, |
||||||
|
Timestamp: p.TimeFromUnixNano(time.Now().Add(-4 * time.Minute).UnixNano()), |
||||||
|
}, |
||||||
|
{ |
||||||
|
Labels: p.LabelSet{"traceID": "test3"}, |
||||||
|
Value: 0.007545445, |
||||||
|
Timestamp: p.TimeFromUnixNano(time.Now().Add(-6 * time.Minute).UnixNano()), |
||||||
|
}, |
||||||
|
{ |
||||||
|
Labels: p.LabelSet{"traceID": "test4"}, |
||||||
|
Value: 0.009545445, |
||||||
|
Timestamp: p.TimeFromUnixNano(time.Now().Add(-7 * time.Minute).UnixNano()), |
||||||
|
}, |
||||||
|
}, |
||||||
|
}, |
||||||
|
} |
||||||
|
|
||||||
|
tctx := setup() |
||||||
|
|
||||||
|
qm := models.QueryModel{ |
||||||
|
LegendFormat: "legend {{app}}", |
||||||
|
UtcOffsetSec: 0, |
||||||
|
ExemplarQuery: true, |
||||||
|
} |
||||||
|
b, err := json.Marshal(&qm) |
||||||
|
require.NoError(t, err) |
||||||
|
query := backend.DataQuery{ |
||||||
|
RefID: "A", |
||||||
|
JSON: b, |
||||||
|
} |
||||||
|
res, err := execute(tctx, query, exemplars) |
||||||
|
require.NoError(t, err) |
||||||
|
|
||||||
|
// Test fields
|
||||||
|
require.Len(t, res, 1) |
||||||
|
// require.Equal(t, res[0].Name, "exemplar")
|
||||||
|
require.Equal(t, res[0].Fields[0].Name, "Time") |
||||||
|
require.Equal(t, res[0].Fields[1].Name, "Value") |
||||||
|
require.Len(t, res[0].Fields, 6) |
||||||
|
|
||||||
|
// Test correct values (sampled to 2)
|
||||||
|
require.Equal(t, res[0].Fields[1].Len(), 2) |
||||||
|
require.Equal(t, res[0].Fields[1].At(0), 0.009545445) |
||||||
|
require.Equal(t, res[0].Fields[1].At(1), 0.003535405) |
||||||
|
}) |
||||||
|
|
||||||
|
t.Run("matrix response should be parsed normally", func(t *testing.T) { |
||||||
|
values := []p.SamplePair{ |
||||||
|
{Value: 1, Timestamp: 1000}, |
||||||
|
{Value: 2, Timestamp: 2000}, |
||||||
|
{Value: 3, Timestamp: 3000}, |
||||||
|
{Value: 4, Timestamp: 4000}, |
||||||
|
{Value: 5, Timestamp: 5000}, |
||||||
|
} |
||||||
|
result := queryResult{ |
||||||
|
Type: p.ValMatrix, |
||||||
|
Result: p.Matrix{ |
||||||
|
&p.SampleStream{ |
||||||
|
Metric: p.Metric{"app": "Application", "tag2": "tag2"}, |
||||||
|
Values: values, |
||||||
|
}, |
||||||
|
}, |
||||||
|
} |
||||||
|
|
||||||
|
qm := models.QueryModel{ |
||||||
|
LegendFormat: "legend {{app}}", |
||||||
|
UtcOffsetSec: 0, |
||||||
|
RangeQuery: true, |
||||||
|
} |
||||||
|
b, err := json.Marshal(&qm) |
||||||
|
require.NoError(t, err) |
||||||
|
query := backend.DataQuery{ |
||||||
|
TimeRange: backend.TimeRange{ |
||||||
|
From: time.Unix(1, 0).UTC(), |
||||||
|
To: time.Unix(5, 0).UTC(), |
||||||
|
}, |
||||||
|
JSON: b, |
||||||
|
} |
||||||
|
tctx := setup() |
||||||
|
res, err := execute(tctx, query, result) |
||||||
|
require.NoError(t, err) |
||||||
|
|
||||||
|
require.Len(t, res, 1) |
||||||
|
//require.Equal(t, "legend Application", res[0].Name)
|
||||||
|
require.Len(t, res[0].Fields, 2) |
||||||
|
require.Len(t, res[0].Fields[0].Labels, 0) |
||||||
|
require.Equal(t, "Time", res[0].Fields[0].Name) |
||||||
|
require.Len(t, res[0].Fields[1].Labels, 2) |
||||||
|
require.Equal(t, "app=Application, tag2=tag2", res[0].Fields[1].Labels.String()) |
||||||
|
require.Equal(t, "Value", res[0].Fields[1].Name) |
||||||
|
require.Equal(t, "legend Application", res[0].Fields[1].Config.DisplayNameFromDS) |
||||||
|
|
||||||
|
// Ensure the timestamps are UTC zoned
|
||||||
|
testValue := res[0].Fields[0].At(0) |
||||||
|
require.Equal(t, "UTC", testValue.(time.Time).Location().String()) |
||||||
|
}) |
||||||
|
|
||||||
|
t.Run("matrix response with missed data points should be parsed correctly", func(t *testing.T) { |
||||||
|
values := []p.SamplePair{ |
||||||
|
{Value: 1, Timestamp: 1000}, |
||||||
|
{Value: 4, Timestamp: 4000}, |
||||||
|
} |
||||||
|
result := queryResult{ |
||||||
|
Type: p.ValMatrix, |
||||||
|
Result: p.Matrix{ |
||||||
|
&p.SampleStream{ |
||||||
|
Metric: p.Metric{"app": "Application", "tag2": "tag2"}, |
||||||
|
Values: values, |
||||||
|
}, |
||||||
|
}, |
||||||
|
} |
||||||
|
|
||||||
|
qm := models.QueryModel{ |
||||||
|
LegendFormat: "", |
||||||
|
UtcOffsetSec: 0, |
||||||
|
RangeQuery: true, |
||||||
|
} |
||||||
|
b, err := json.Marshal(&qm) |
||||||
|
require.NoError(t, err) |
||||||
|
query := backend.DataQuery{ |
||||||
|
TimeRange: backend.TimeRange{ |
||||||
|
From: time.Unix(1, 0).UTC(), |
||||||
|
To: time.Unix(4, 0).UTC(), |
||||||
|
}, |
||||||
|
JSON: b, |
||||||
|
} |
||||||
|
tctx := setup() |
||||||
|
res, err := execute(tctx, query, result) |
||||||
|
|
||||||
|
require.NoError(t, err) |
||||||
|
require.Len(t, res, 1) |
||||||
|
require.Equal(t, res[0].Fields[0].Len(), 2) |
||||||
|
require.Equal(t, time.Unix(1, 0).UTC(), res[0].Fields[0].At(0)) |
||||||
|
require.Equal(t, time.Unix(4, 0).UTC(), res[0].Fields[0].At(1)) |
||||||
|
require.Equal(t, res[0].Fields[1].Len(), 2) |
||||||
|
require.Equal(t, float64(1), res[0].Fields[1].At(0).(float64)) |
||||||
|
require.Equal(t, float64(4), res[0].Fields[1].At(1).(float64)) |
||||||
|
}) |
||||||
|
|
||||||
|
t.Run("matrix response with from alerting missed data points should be parsed correctly", func(t *testing.T) { |
||||||
|
values := []p.SamplePair{ |
||||||
|
{Value: 1, Timestamp: 1000}, |
||||||
|
{Value: 4, Timestamp: 4000}, |
||||||
|
} |
||||||
|
result := queryResult{ |
||||||
|
Type: p.ValMatrix, |
||||||
|
Result: p.Matrix{ |
||||||
|
&p.SampleStream{ |
||||||
|
Metric: p.Metric{"app": "Application", "tag2": "tag2"}, |
||||||
|
Values: values, |
||||||
|
}, |
||||||
|
}, |
||||||
|
} |
||||||
|
|
||||||
|
qm := models.QueryModel{ |
||||||
|
LegendFormat: "", |
||||||
|
UtcOffsetSec: 0, |
||||||
|
RangeQuery: true, |
||||||
|
} |
||||||
|
b, err := json.Marshal(&qm) |
||||||
|
require.NoError(t, err) |
||||||
|
query := backend.DataQuery{ |
||||||
|
TimeRange: backend.TimeRange{ |
||||||
|
From: time.Unix(1, 0).UTC(), |
||||||
|
To: time.Unix(4, 0).UTC(), |
||||||
|
}, |
||||||
|
JSON: b, |
||||||
|
} |
||||||
|
tctx := setup() |
||||||
|
res, err := execute(tctx, query, result) |
||||||
|
|
||||||
|
require.NoError(t, err) |
||||||
|
require.Len(t, res, 1) |
||||||
|
require.Equal(t, res[0].Name, "{app=\"Application\", tag2=\"tag2\"}") |
||||||
|
require.Len(t, res[0].Fields, 2) |
||||||
|
require.Len(t, res[0].Fields[0].Labels, 0) |
||||||
|
require.Equal(t, res[0].Fields[0].Name, "Time") |
||||||
|
require.Len(t, res[0].Fields[1].Labels, 2) |
||||||
|
require.Equal(t, res[0].Fields[1].Labels.String(), "app=Application, tag2=tag2") |
||||||
|
require.Equal(t, res[0].Fields[1].Name, "Value") |
||||||
|
require.Equal(t, res[0].Fields[1].Config.DisplayNameFromDS, "{app=\"Application\", tag2=\"tag2\"}") |
||||||
|
}) |
||||||
|
|
||||||
|
t.Run("matrix response with NaN value should be changed to null", func(t *testing.T) { |
||||||
|
result := queryResult{ |
||||||
|
Type: p.ValMatrix, |
||||||
|
Result: p.Matrix{ |
||||||
|
&p.SampleStream{ |
||||||
|
Metric: p.Metric{"app": "Application"}, |
||||||
|
Values: []p.SamplePair{ |
||||||
|
{Value: p.SampleValue(math.NaN()), Timestamp: 1000}, |
||||||
|
}, |
||||||
|
}, |
||||||
|
}, |
||||||
|
} |
||||||
|
|
||||||
|
qm := models.QueryModel{ |
||||||
|
LegendFormat: "", |
||||||
|
UtcOffsetSec: 0, |
||||||
|
RangeQuery: true, |
||||||
|
} |
||||||
|
b, err := json.Marshal(&qm) |
||||||
|
require.NoError(t, err) |
||||||
|
query := backend.DataQuery{ |
||||||
|
TimeRange: backend.TimeRange{ |
||||||
|
From: time.Unix(1, 0).UTC(), |
||||||
|
To: time.Unix(4, 0).UTC(), |
||||||
|
}, |
||||||
|
JSON: b, |
||||||
|
} |
||||||
|
|
||||||
|
tctx := setup() |
||||||
|
res, err := execute(tctx, query, result) |
||||||
|
require.NoError(t, err) |
||||||
|
|
||||||
|
require.Equal(t, res[0].Fields[1].Name, "Value") |
||||||
|
require.True(t, math.IsNaN(res[0].Fields[1].At(0).(float64))) |
||||||
|
}) |
||||||
|
|
||||||
|
t.Run("vector response should be parsed normally", func(t *testing.T) { |
||||||
|
qr := queryResult{ |
||||||
|
Type: p.ValVector, |
||||||
|
Result: p.Vector{ |
||||||
|
&p.Sample{ |
||||||
|
Metric: p.Metric{"app": "Application", "tag2": "tag2"}, |
||||||
|
Value: 1, |
||||||
|
Timestamp: 123, |
||||||
|
}, |
||||||
|
}, |
||||||
|
} |
||||||
|
qm := models.QueryModel{ |
||||||
|
LegendFormat: "legend {{app}}", |
||||||
|
UtcOffsetSec: 0, |
||||||
|
InstantQuery: true, |
||||||
|
} |
||||||
|
b, err := json.Marshal(&qm) |
||||||
|
require.NoError(t, err) |
||||||
|
query := backend.DataQuery{ |
||||||
|
JSON: b, |
||||||
|
} |
||||||
|
tctx := setup() |
||||||
|
res, err := execute(tctx, query, qr) |
||||||
|
require.NoError(t, err) |
||||||
|
|
||||||
|
require.Len(t, res, 1) |
||||||
|
require.Equal(t, res[0].Name, "legend Application") |
||||||
|
require.Len(t, res[0].Fields, 2) |
||||||
|
require.Len(t, res[0].Fields[0].Labels, 0) |
||||||
|
require.Equal(t, res[0].Fields[0].Name, "Time") |
||||||
|
require.Equal(t, res[0].Fields[0].Name, "Time") |
||||||
|
require.Len(t, res[0].Fields[1].Labels, 2) |
||||||
|
require.Equal(t, res[0].Fields[1].Labels.String(), "app=Application, tag2=tag2") |
||||||
|
require.Equal(t, res[0].Fields[1].Name, "Value") |
||||||
|
require.Equal(t, res[0].Fields[1].Config.DisplayNameFromDS, "legend Application") |
||||||
|
|
||||||
|
// Ensure the timestamps are UTC zoned
|
||||||
|
testValue := res[0].Fields[0].At(0) |
||||||
|
require.Equal(t, "UTC", testValue.(time.Time).Location().String()) |
||||||
|
require.Equal(t, int64(123), testValue.(time.Time).UnixMilli()) |
||||||
|
}) |
||||||
|
|
||||||
|
t.Run("scalar response should be parsed normally", func(t *testing.T) { |
||||||
|
t.Skip("TODO: implement scalar responses") |
||||||
|
qr := queryResult{ |
||||||
|
Type: p.ValScalar, |
||||||
|
Result: &p.Scalar{ |
||||||
|
Value: 1, |
||||||
|
Timestamp: 123, |
||||||
|
}, |
||||||
|
} |
||||||
|
qm := models.QueryModel{ |
||||||
|
LegendFormat: "", |
||||||
|
UtcOffsetSec: 0, |
||||||
|
InstantQuery: true, |
||||||
|
} |
||||||
|
b, err := json.Marshal(&qm) |
||||||
|
require.NoError(t, err) |
||||||
|
query := backend.DataQuery{ |
||||||
|
JSON: b, |
||||||
|
} |
||||||
|
tctx := setup() |
||||||
|
res, err := execute(tctx, query, qr) |
||||||
|
require.NoError(t, err) |
||||||
|
|
||||||
|
require.Len(t, res, 1) |
||||||
|
require.Equal(t, res[0].Name, "1") |
||||||
|
require.Len(t, res[0].Fields, 2) |
||||||
|
require.Len(t, res[0].Fields[0].Labels, 0) |
||||||
|
require.Equal(t, res[0].Fields[0].Name, "Time") |
||||||
|
require.Equal(t, res[0].Fields[1].Name, "Value") |
||||||
|
require.Equal(t, res[0].Fields[1].Config.DisplayNameFromDS, "1") |
||||||
|
|
||||||
|
// Ensure the timestamps are UTC zoned
|
||||||
|
testValue := res[0].Fields[0].At(0) |
||||||
|
require.Equal(t, "UTC", testValue.(time.Time).Location().String()) |
||||||
|
require.Equal(t, int64(123), testValue.(time.Time).UnixMilli()) |
||||||
|
}) |
||||||
|
} |
||||||
|
|
||||||
|
type queryResult struct { |
||||||
|
Type p.ValueType `json:"resultType"` |
||||||
|
Result interface{} `json:"result"` |
||||||
|
} |
||||||
|
|
||||||
|
func execute(tctx *testContext, query backend.DataQuery, qr interface{}) (data.Frames, error) { |
||||||
|
req := backend.QueryDataRequest{ |
||||||
|
Queries: []backend.DataQuery{query}, |
||||||
|
Headers: map[string]string{}, |
||||||
|
} |
||||||
|
|
||||||
|
promRes, err := toAPIResponse(qr) |
||||||
|
if err != nil { |
||||||
|
return nil, err |
||||||
|
} |
||||||
|
tctx.httpProvider.setResponse(promRes) |
||||||
|
|
||||||
|
res, err := tctx.queryData.Execute(context.Background(), &req) |
||||||
|
if err != nil { |
||||||
|
return nil, err |
||||||
|
} |
||||||
|
|
||||||
|
return res.Responses[req.Queries[0].RefID].Frames, nil |
||||||
|
} |
||||||
|
|
||||||
|
type apiResponse struct { |
||||||
|
Status string `json:"status"` |
||||||
|
Data json.RawMessage `json:"data"` |
||||||
|
} |
||||||
|
|
||||||
|
func toAPIResponse(d interface{}) (*http.Response, error) { |
||||||
|
b, err := json.Marshal(d) |
||||||
|
if err != nil { |
||||||
|
return nil, err |
||||||
|
} |
||||||
|
|
||||||
|
res := apiResponse{ |
||||||
|
Status: "success", |
||||||
|
Data: json.RawMessage(b), |
||||||
|
} |
||||||
|
|
||||||
|
raw, err := json.Marshal(&res) |
||||||
|
if err != nil { |
||||||
|
return nil, err |
||||||
|
} |
||||||
|
|
||||||
|
return &http.Response{ |
||||||
|
StatusCode: 200, |
||||||
|
Body: ioutil.NopCloser(bytes.NewReader(raw)), |
||||||
|
}, nil |
||||||
|
} |
||||||
|
|
||||||
|
type testContext struct { |
||||||
|
httpProvider *fakeHttpClientProvider |
||||||
|
queryData *querydata.QueryData |
||||||
|
} |
||||||
|
|
||||||
|
func setup() *testContext { |
||||||
|
tracer, err := tracing.InitializeTracerForTest() |
||||||
|
if err != nil { |
||||||
|
panic(err) |
||||||
|
} |
||||||
|
httpProvider := &fakeHttpClientProvider{ |
||||||
|
opts: sdkhttpclient.Options{ |
||||||
|
Timeouts: &sdkhttpclient.DefaultTimeoutOptions, |
||||||
|
}, |
||||||
|
res: &http.Response{ |
||||||
|
StatusCode: 200, |
||||||
|
Body: ioutil.NopCloser(bytes.NewReader([]byte(`{}`))), |
||||||
|
}, |
||||||
|
} |
||||||
|
queryData, _ := querydata.New( |
||||||
|
httpProvider, |
||||||
|
setting.NewCfg(), |
||||||
|
&fakeFeatureToggles{enabled: true}, |
||||||
|
tracer, |
||||||
|
backend.DataSourceInstanceSettings{URL: "http://localhost:9090", JSONData: json.RawMessage(`{"timeInterval": "15s"}`)}, |
||||||
|
&fakeLogger{}, |
||||||
|
) |
||||||
|
|
||||||
|
return &testContext{ |
||||||
|
httpProvider: httpProvider, |
||||||
|
queryData: queryData, |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
type fakeFeatureToggles struct { |
||||||
|
enabled bool |
||||||
|
} |
||||||
|
|
||||||
|
func (f *fakeFeatureToggles) IsEnabled(feature string) bool { |
||||||
|
return f.enabled |
||||||
|
} |
||||||
|
|
||||||
|
type fakeHttpClientProvider struct { |
||||||
|
httpclient.Provider |
||||||
|
opts sdkhttpclient.Options |
||||||
|
res *http.Response |
||||||
|
} |
||||||
|
|
||||||
|
func (p *fakeHttpClientProvider) New(opts ...sdkhttpclient.Options) (*http.Client, error) { |
||||||
|
p.opts = opts[0] |
||||||
|
c, err := sdkhttpclient.New(opts[0]) |
||||||
|
if err != nil { |
||||||
|
return nil, err |
||||||
|
} |
||||||
|
c.Transport = p |
||||||
|
return c, nil |
||||||
|
} |
||||||
|
|
||||||
|
func (p *fakeHttpClientProvider) GetTransport(opts ...sdkhttpclient.Options) (http.RoundTripper, error) { |
||||||
|
p.opts = opts[0] |
||||||
|
return http.DefaultTransport, nil |
||||||
|
} |
||||||
|
|
||||||
|
func (p *fakeHttpClientProvider) setResponse(res *http.Response) { |
||||||
|
p.res = res |
||||||
|
} |
||||||
|
|
||||||
|
func (p *fakeHttpClientProvider) RoundTrip(req *http.Request) (*http.Response, error) { |
||||||
|
return p.res, nil |
||||||
|
} |
||||||
@ -0,0 +1,111 @@ |
|||||||
|
package querydata |
||||||
|
|
||||||
|
import ( |
||||||
|
"context" |
||||||
|
"fmt" |
||||||
|
"net/http" |
||||||
|
"sort" |
||||||
|
"strings" |
||||||
|
|
||||||
|
"github.com/grafana/grafana-plugin-sdk-go/backend" |
||||||
|
"github.com/grafana/grafana-plugin-sdk-go/data" |
||||||
|
"github.com/grafana/grafana/pkg/tsdb/prometheus/models" |
||||||
|
"github.com/grafana/grafana/pkg/util/converter" |
||||||
|
jsoniter "github.com/json-iterator/go" |
||||||
|
) |
||||||
|
|
||||||
|
func (s *QueryData) parseResponse(ctx context.Context, q *models.Query, res *http.Response) (*backend.DataResponse, error) { |
||||||
|
defer func() { |
||||||
|
if err := res.Body.Close(); err != nil { |
||||||
|
s.log.Error("Failed to close response body", "err", err) |
||||||
|
} |
||||||
|
}() |
||||||
|
|
||||||
|
iter := jsoniter.Parse(jsoniter.ConfigDefault, res.Body, 1024) |
||||||
|
r := converter.ReadPrometheusStyleResult(iter) |
||||||
|
if r == nil { |
||||||
|
return nil, fmt.Errorf("received empty response from prometheus") |
||||||
|
} |
||||||
|
|
||||||
|
// The ExecutedQueryString can be viewed in QueryInspector in UI
|
||||||
|
for _, frame := range r.Frames { |
||||||
|
addMetadataToFrame(q, frame) |
||||||
|
} |
||||||
|
|
||||||
|
return r, nil |
||||||
|
} |
||||||
|
|
||||||
|
func addMetadataToFrame(q *models.Query, frame *data.Frame) { |
||||||
|
if frame.Meta == nil { |
||||||
|
frame.Meta = &data.FrameMeta{} |
||||||
|
} |
||||||
|
frame.Meta.ExecutedQueryString = executedQueryString(q) |
||||||
|
if len(frame.Fields) < 2 { |
||||||
|
return |
||||||
|
} |
||||||
|
frame.Name = getName(q, frame) |
||||||
|
frame.Fields[0].Config = &data.FieldConfig{Interval: float64(q.Step.Milliseconds())} |
||||||
|
if frame.Name != "" { |
||||||
|
frame.Fields[1].Config = &data.FieldConfig{DisplayNameFromDS: frame.Name} |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
// this is based on the logic from the String() function in github.com/prometheus/common/model.go
|
||||||
|
func metricNameFromLabels(f *data.Frame) string { |
||||||
|
labels := f.Fields[1].Labels |
||||||
|
metricName, hasName := labels["__name__"] |
||||||
|
numLabels := len(labels) - 1 |
||||||
|
if !hasName { |
||||||
|
numLabels = len(labels) |
||||||
|
} |
||||||
|
labelStrings := make([]string, 0, numLabels) |
||||||
|
for label, value := range labels { |
||||||
|
if label != "__name__" { |
||||||
|
labelStrings = append(labelStrings, fmt.Sprintf("%s=%q", label, value)) |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
switch numLabels { |
||||||
|
case 0: |
||||||
|
if hasName { |
||||||
|
return metricName |
||||||
|
} |
||||||
|
return "{}" |
||||||
|
default: |
||||||
|
sort.Strings(labelStrings) |
||||||
|
return fmt.Sprintf("%s{%s}", metricName, strings.Join(labelStrings, ", ")) |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
func executedQueryString(q *models.Query) string { |
||||||
|
return "Expr: " + q.Expr + "\n" + "Step: " + q.Step.String() |
||||||
|
} |
||||||
|
|
||||||
|
func getName(q *models.Query, frame *data.Frame) string { |
||||||
|
labels := frame.Fields[1].Labels |
||||||
|
legend := metricNameFromLabels(frame) |
||||||
|
|
||||||
|
if q.LegendFormat == legendFormatAuto && len(labels) > 0 { |
||||||
|
return "" |
||||||
|
} |
||||||
|
|
||||||
|
if q.LegendFormat != "" { |
||||||
|
result := legendFormatRegexp.ReplaceAllFunc([]byte(q.LegendFormat), func(in []byte) []byte { |
||||||
|
labelName := strings.Replace(string(in), "{{", "", 1) |
||||||
|
labelName = strings.Replace(labelName, "}}", "", 1) |
||||||
|
labelName = strings.TrimSpace(labelName) |
||||||
|
if val, exists := labels[labelName]; exists { |
||||||
|
return []byte(val) |
||||||
|
} |
||||||
|
return []byte{} |
||||||
|
}) |
||||||
|
legend = string(result) |
||||||
|
} |
||||||
|
|
||||||
|
// If legend is empty brackets, use query expression
|
||||||
|
if legend == "{}" { |
||||||
|
return q.Expr |
||||||
|
} |
||||||
|
|
||||||
|
return legend |
||||||
|
} |
||||||
@ -0,0 +1,63 @@ |
|||||||
|
{ |
||||||
|
"frames": [ |
||||||
|
{ |
||||||
|
"schema": { |
||||||
|
"name": "1 / 0", |
||||||
|
"meta": { |
||||||
|
"type": "timeseries-many", |
||||||
|
"custom": { |
||||||
|
"resultType": "matrix" |
||||||
|
}, |
||||||
|
"executedQueryString": "Expr: 1 / 0\nStep: 1s" |
||||||
|
}, |
||||||
|
"fields": [ |
||||||
|
{ |
||||||
|
"name": "Time", |
||||||
|
"type": "time", |
||||||
|
"typeInfo": { |
||||||
|
"frame": "time.Time" |
||||||
|
}, |
||||||
|
"config": { |
||||||
|
"interval": 1000 |
||||||
|
} |
||||||
|
}, |
||||||
|
{ |
||||||
|
"name": "Value", |
||||||
|
"type": "number", |
||||||
|
"typeInfo": { |
||||||
|
"frame": "float64" |
||||||
|
}, |
||||||
|
"labels": {}, |
||||||
|
"config": { |
||||||
|
"displayNameFromDS": "1 / 0" |
||||||
|
} |
||||||
|
} |
||||||
|
] |
||||||
|
}, |
||||||
|
"data": { |
||||||
|
"values": [ |
||||||
|
[ |
||||||
|
1641889530000, |
||||||
|
1641889531000, |
||||||
|
1641889532000 |
||||||
|
], |
||||||
|
[ |
||||||
|
null, |
||||||
|
null, |
||||||
|
null |
||||||
|
] |
||||||
|
], |
||||||
|
"entities": [ |
||||||
|
null, |
||||||
|
{ |
||||||
|
"Inf": [ |
||||||
|
0, |
||||||
|
1, |
||||||
|
2 |
||||||
|
] |
||||||
|
} |
||||||
|
] |
||||||
|
} |
||||||
|
} |
||||||
|
] |
||||||
|
} |
||||||
@ -0,0 +1,24 @@ |
|||||||
|
🌟 This was machine generated. Do not edit. 🌟 |
||||||
|
|
||||||
|
Frame[0] { |
||||||
|
"type": "timeseries-many", |
||||||
|
"custom": { |
||||||
|
"resultType": "matrix" |
||||||
|
}, |
||||||
|
"executedQueryString": "Expr: 1 / 0\nStep: 1s" |
||||||
|
} |
||||||
|
Name: 1 / 0 |
||||||
|
Dimensions: 2 Fields by 3 Rows |
||||||
|
+-------------------------------+-----------------+ |
||||||
|
| Name: Time | Name: Value | |
||||||
|
| Labels: | Labels: | |
||||||
|
| Type: []time.Time | Type: []float64 | |
||||||
|
+-------------------------------+-----------------+ |
||||||
|
| 2022-01-11 08:25:30 +0000 UTC | +Inf | |
||||||
|
| 2022-01-11 08:25:31 +0000 UTC | +Inf | |
||||||
|
| 2022-01-11 08:25:32 +0000 UTC | +Inf | |
||||||
|
+-------------------------------+-----------------+ |
||||||
|
|
||||||
|
|
||||||
|
====== TEST DATA RESPONSE (arrow base64) ====== |
||||||
|
FRAME=QVJST1cxAAD/////kAIAABAAAAAAAAoADgAMAAsABAAKAAAAFAAAAAAAAAEEAAoADAAAAAgABAAKAAAACAAAAOAAAAADAAAAUAAAACgAAAAEAAAAEP7//wgAAAAMAAAAAAAAAAAAAAAFAAAAcmVmSWQAAAAw/v//CAAAABAAAAAFAAAAMSAvIDAAAAAEAAAAbmFtZQAAAABU/v//CAAAAHQAAABpAAAAeyJ0eXBlIjoidGltZXNlcmllcy1tYW55IiwiY3VzdG9tIjp7InJlc3VsdFR5cGUiOiJtYXRyaXgifSwiZXhlY3V0ZWRRdWVyeVN0cmluZyI6IkV4cHI6IDEgLyAwXG5TdGVwOiAxcyJ9AAAABAAAAG1ldGEAAAAAAgAAANgAAAAEAAAAQv///xQAAACgAAAAoAAAAAAAAAOgAAAAAwAAAFAAAAAsAAAABAAAABD///8IAAAAEAAAAAUAAABWYWx1ZQAAAAQAAABuYW1lAAAAADT///8IAAAADAAAAAIAAAB7fQAABgAAAGxhYmVscwAAVP///wgAAAAoAAAAHQAAAHsiZGlzcGxheU5hbWVGcm9tRFMiOiIxIC8gMCJ9AAAABgAAAGNvbmZpZwAAAAAAAFb///8AAAIABQAAAFZhbHVlABIAGAAUAAAAEwAMAAAACAAEABIAAAAUAAAAeAAAAIAAAAAAAAAKgAAAAAIAAAA0AAAABAAAANz///8IAAAAEAAAAAQAAABUaW1lAAAAAAQAAABuYW1lAAAAAAgADAAIAAQACAAAAAgAAAAcAAAAEQAAAHsiaW50ZXJ2YWwiOjEwMDB9AAAABgAAAGNvbmZpZwAAAAAAAAAABgAIAAYABgAAAAAAAwAEAAAAVGltZQAAAAAAAAAA/////7gAAAAUAAAAAAAAAAwAFgAUABMADAAEAAwAAAAwAAAAAAAAABQAAAAAAAADBAAKABgADAAIAAQACgAAABQAAABYAAAAAwAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABgAAAAAAAAAGAAAAAAAAAAAAAAAAAAAABgAAAAAAAAAGAAAAAAAAAAAAAAAAgAAAAMAAAAAAAAAAAAAAAAAAAADAAAAAAAAAAAAAAAAAAAAAEQVFNQpyRYADrBP1CnJFgDYSovUKckWAAAAAAAA8H8AAAAAAADwfwAAAAAAAPB/EAAAAAwAFAASAAwACAAEAAwAAAAQAAAALAAAADgAAAAAAAQAAQAAAKACAAAAAAAAwAAAAAAAAAAwAAAAAAAAAAAAAAAAAAAAAAAKAAwAAAAIAAQACgAAAAgAAADgAAAAAwAAAFAAAAAoAAAABAAAABD+//8IAAAADAAAAAAAAAAAAAAABQAAAHJlZklkAAAAMP7//wgAAAAQAAAABQAAADEgLyAwAAAABAAAAG5hbWUAAAAAVP7//wgAAAB0AAAAaQAAAHsidHlwZSI6InRpbWVzZXJpZXMtbWFueSIsImN1c3RvbSI6eyJyZXN1bHRUeXBlIjoibWF0cml4In0sImV4ZWN1dGVkUXVlcnlTdHJpbmciOiJFeHByOiAxIC8gMFxuU3RlcDogMXMifQAAAAQAAABtZXRhAAAAAAIAAADYAAAABAAAAEL///8UAAAAoAAAAKAAAAAAAAADoAAAAAMAAABQAAAALAAAAAQAAAAQ////CAAAABAAAAAFAAAAVmFsdWUAAAAEAAAAbmFtZQAAAAA0////CAAAAAwAAAACAAAAe30AAAYAAABsYWJlbHMAAFT///8IAAAAKAAAAB0AAAB7ImRpc3BsYXlOYW1lRnJvbURTIjoiMSAvIDAifQAAAAYAAABjb25maWcAAAAAAABW////AAACAAUAAABWYWx1ZQASABgAFAAAABMADAAAAAgABAASAAAAFAAAAHgAAACAAAAAAAAACoAAAAACAAAANAAAAAQAAADc////CAAAABAAAAAEAAAAVGltZQAAAAAEAAAAbmFtZQAAAAAIAAwACAAEAAgAAAAIAAAAHAAAABEAAAB7ImludGVydmFsIjoxMDAwfQAAAAYAAABjb25maWcAAAAAAAAAAAYACAAGAAYAAAAAAAMABAAAAFRpbWUAAAAAuAIAAEFSUk9XMQ== |
||||||
@ -0,0 +1,56 @@ |
|||||||
|
{ |
||||||
|
"frames": [ |
||||||
|
{ |
||||||
|
"schema": { |
||||||
|
"name": "go_goroutines{job=\"prometheus\"}", |
||||||
|
"meta": { |
||||||
|
"type": "timeseries-many", |
||||||
|
"custom": { |
||||||
|
"resultType": "matrix" |
||||||
|
}, |
||||||
|
"executedQueryString": "Expr: test1\nStep: 1s" |
||||||
|
}, |
||||||
|
"fields": [ |
||||||
|
{ |
||||||
|
"name": "Time", |
||||||
|
"type": "time", |
||||||
|
"typeInfo": { |
||||||
|
"frame": "time.Time" |
||||||
|
}, |
||||||
|
"config": { |
||||||
|
"interval": 1000 |
||||||
|
} |
||||||
|
}, |
||||||
|
{ |
||||||
|
"name": "Value", |
||||||
|
"type": "number", |
||||||
|
"typeInfo": { |
||||||
|
"frame": "float64" |
||||||
|
}, |
||||||
|
"labels": { |
||||||
|
"__name__": "go_goroutines", |
||||||
|
"job": "prometheus" |
||||||
|
}, |
||||||
|
"config": { |
||||||
|
"displayNameFromDS": "go_goroutines{job=\"prometheus\"}" |
||||||
|
} |
||||||
|
} |
||||||
|
] |
||||||
|
}, |
||||||
|
"data": { |
||||||
|
"values": [ |
||||||
|
[ |
||||||
|
1641889533000, |
||||||
|
1641889534000, |
||||||
|
1641889537000 |
||||||
|
], |
||||||
|
[ |
||||||
|
21, |
||||||
|
32, |
||||||
|
43 |
||||||
|
] |
||||||
|
] |
||||||
|
} |
||||||
|
} |
||||||
|
] |
||||||
|
} |
||||||
@ -0,0 +1,24 @@ |
|||||||
|
🌟 This was machine generated. Do not edit. 🌟 |
||||||
|
|
||||||
|
Frame[0] { |
||||||
|
"type": "timeseries-many", |
||||||
|
"custom": { |
||||||
|
"resultType": "matrix" |
||||||
|
}, |
||||||
|
"executedQueryString": "Expr: test1\nStep: 1s" |
||||||
|
} |
||||||
|
Name: go_goroutines{job="prometheus"} |
||||||
|
Dimensions: 2 Fields by 3 Rows |
||||||
|
+-------------------------------+------------------------------------------------+ |
||||||
|
| Name: Time | Name: Value | |
||||||
|
| Labels: | Labels: __name__=go_goroutines, job=prometheus | |
||||||
|
| Type: []time.Time | Type: []float64 | |
||||||
|
+-------------------------------+------------------------------------------------+ |
||||||
|
| 2022-01-11 08:25:33 +0000 UTC | 21 | |
||||||
|
| 2022-01-11 08:25:34 +0000 UTC | 32 | |
||||||
|
| 2022-01-11 08:25:37 +0000 UTC | 43 | |
||||||
|
+-------------------------------+------------------------------------------------+ |
||||||
|
|
||||||
|
|
||||||
|
====== TEST DATA RESPONSE (arrow base64) ====== |
||||||
|
FRAME=QVJST1cxAAD/////8AIAABAAAAAAAAoADgAMAAsABAAKAAAAFAAAAAAAAAEEAAoADAAAAAgABAAKAAAACAAAAPgAAAADAAAAaAAAACgAAAAEAAAAsP3//wgAAAAMAAAAAAAAAAAAAAAFAAAAcmVmSWQAAADQ/f//CAAAACgAAAAfAAAAZ29fZ29yb3V0aW5lc3tqb2I9InByb21ldGhldXMifQAEAAAAbmFtZQAAAAAM/v//CAAAAHQAAABpAAAAeyJ0eXBlIjoidGltZXNlcmllcy1tYW55IiwiY3VzdG9tIjp7InJlc3VsdFR5cGUiOiJtYXRyaXgifSwiZXhlY3V0ZWRRdWVyeVN0cmluZyI6IkV4cHI6IHRlc3QxXG5TdGVwOiAxcyJ9AAAABAAAAG1ldGEAAAAAAgAAACABAAAEAAAA+v7//xQAAADoAAAA6AAAAAAAAAPoAAAAAwAAAHwAAAAsAAAABAAAAMj+//8IAAAAEAAAAAUAAABWYWx1ZQAAAAQAAABuYW1lAAAAAOz+//8IAAAAOAAAAC8AAAB7Il9fbmFtZV9fIjoiZ29fZ29yb3V0aW5lcyIsImpvYiI6InByb21ldGhldXMifQAGAAAAbGFiZWxzAAA4////CAAAAEQAAAA5AAAAeyJkaXNwbGF5TmFtZUZyb21EUyI6ImdvX2dvcm91dGluZXN7am9iPVwicHJvbWV0aGV1c1wifSJ9AAAABgAAAGNvbmZpZwAAAAAAAFb///8AAAIABQAAAFZhbHVlABIAGAAUAAAAEwAMAAAACAAEABIAAAAUAAAAeAAAAIAAAAAAAAAKgAAAAAIAAAA0AAAABAAAANz///8IAAAAEAAAAAQAAABUaW1lAAAAAAQAAABuYW1lAAAAAAgADAAIAAQACAAAAAgAAAAcAAAAEQAAAHsiaW50ZXJ2YWwiOjEwMDB9AAAABgAAAGNvbmZpZwAAAAAAAAAABgAIAAYABgAAAAAAAwAEAAAAVGltZQAAAAAAAAAA/////7gAAAAUAAAAAAAAAAwAFgAUABMADAAEAAwAAAAwAAAAAAAAABQAAAAAAAADBAAKABgADAAIAAQACgAAABQAAABYAAAAAwAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABgAAAAAAAAAGAAAAAAAAAAAAAAAAAAAABgAAAAAAAAAGAAAAAAAAAAAAAAAAgAAAAMAAAAAAAAAAAAAAAAAAAADAAAAAAAAAAAAAAAAAAAAAKLlxtQpyRYAbIAC1SnJFgDKULXVKckWAAAAAAAANUAAAAAAAABAQAAAAAAAgEVAEAAAAAwAFAASAAwACAAEAAwAAAAQAAAALAAAADgAAAAAAAQAAQAAAAADAAAAAAAAwAAAAAAAAAAwAAAAAAAAAAAAAAAAAAAAAAAKAAwAAAAIAAQACgAAAAgAAAD4AAAAAwAAAGgAAAAoAAAABAAAALD9//8IAAAADAAAAAAAAAAAAAAABQAAAHJlZklkAAAA0P3//wgAAAAoAAAAHwAAAGdvX2dvcm91dGluZXN7am9iPSJwcm9tZXRoZXVzIn0ABAAAAG5hbWUAAAAADP7//wgAAAB0AAAAaQAAAHsidHlwZSI6InRpbWVzZXJpZXMtbWFueSIsImN1c3RvbSI6eyJyZXN1bHRUeXBlIjoibWF0cml4In0sImV4ZWN1dGVkUXVlcnlTdHJpbmciOiJFeHByOiB0ZXN0MVxuU3RlcDogMXMifQAAAAQAAABtZXRhAAAAAAIAAAAgAQAABAAAAPr+//8UAAAA6AAAAOgAAAAAAAAD6AAAAAMAAAB8AAAALAAAAAQAAADI/v//CAAAABAAAAAFAAAAVmFsdWUAAAAEAAAAbmFtZQAAAADs/v//CAAAADgAAAAvAAAAeyJfX25hbWVfXyI6ImdvX2dvcm91dGluZXMiLCJqb2IiOiJwcm9tZXRoZXVzIn0ABgAAAGxhYmVscwAAOP///wgAAABEAAAAOQAAAHsiZGlzcGxheU5hbWVGcm9tRFMiOiJnb19nb3JvdXRpbmVze2pvYj1cInByb21ldGhldXNcIn0ifQAAAAYAAABjb25maWcAAAAAAABW////AAACAAUAAABWYWx1ZQASABgAFAAAABMADAAAAAgABAASAAAAFAAAAHgAAACAAAAAAAAACoAAAAACAAAANAAAAAQAAADc////CAAAABAAAAAEAAAAVGltZQAAAAAEAAAAbmFtZQAAAAAIAAwACAAEAAgAAAAIAAAAHAAAABEAAAB7ImludGVydmFsIjoxMDAwfQAAAAYAAABjb25maWcAAAAAAAAAAAYACAAGAAYAAAAAAAMABAAAAFRpbWUAAAAAGAMAAEFSUk9XMQ== |
||||||
@ -0,0 +1,66 @@ |
|||||||
|
{ |
||||||
|
"frames": [ |
||||||
|
{ |
||||||
|
"schema": { |
||||||
|
"name": "{handler=\"/api/v1/query_range\", job=\"prometheus\"}", |
||||||
|
"meta": { |
||||||
|
"type": "timeseries-many", |
||||||
|
"custom": { |
||||||
|
"resultType": "matrix" |
||||||
|
}, |
||||||
|
"executedQueryString": "Expr: \nStep: 1s" |
||||||
|
}, |
||||||
|
"fields": [ |
||||||
|
{ |
||||||
|
"name": "Time", |
||||||
|
"type": "time", |
||||||
|
"typeInfo": { |
||||||
|
"frame": "time.Time" |
||||||
|
}, |
||||||
|
"config": { |
||||||
|
"interval": 1000 |
||||||
|
} |
||||||
|
}, |
||||||
|
{ |
||||||
|
"name": "Value", |
||||||
|
"type": "number", |
||||||
|
"typeInfo": { |
||||||
|
"frame": "float64" |
||||||
|
}, |
||||||
|
"labels": { |
||||||
|
"handler": "/api/v1/query_range", |
||||||
|
"job": "prometheus" |
||||||
|
}, |
||||||
|
"config": { |
||||||
|
"displayNameFromDS": "{handler=\"/api/v1/query_range\", job=\"prometheus\"}" |
||||||
|
} |
||||||
|
} |
||||||
|
] |
||||||
|
}, |
||||||
|
"data": { |
||||||
|
"values": [ |
||||||
|
[ |
||||||
|
1641889530000, |
||||||
|
1641889531000, |
||||||
|
1641889532000 |
||||||
|
], |
||||||
|
[ |
||||||
|
null, |
||||||
|
null, |
||||||
|
null |
||||||
|
] |
||||||
|
], |
||||||
|
"entities": [ |
||||||
|
null, |
||||||
|
{ |
||||||
|
"NaN": [ |
||||||
|
0, |
||||||
|
1, |
||||||
|
2 |
||||||
|
] |
||||||
|
} |
||||||
|
] |
||||||
|
} |
||||||
|
} |
||||||
|
] |
||||||
|
} |
||||||
@ -0,0 +1,24 @@ |
|||||||
|
🌟 This was machine generated. Do not edit. 🌟 |
||||||
|
|
||||||
|
Frame[0] { |
||||||
|
"type": "timeseries-many", |
||||||
|
"custom": { |
||||||
|
"resultType": "matrix" |
||||||
|
}, |
||||||
|
"executedQueryString": "Expr: \nStep: 1s" |
||||||
|
} |
||||||
|
Name: {handler="/api/v1/query_range", job="prometheus"} |
||||||
|
Dimensions: 2 Fields by 3 Rows |
||||||
|
+-------------------------------+-----------------------------------------------------+ |
||||||
|
| Name: Time | Name: Value | |
||||||
|
| Labels: | Labels: handler=/api/v1/query_range, job=prometheus | |
||||||
|
| Type: []time.Time | Type: []float64 | |
||||||
|
+-------------------------------+-----------------------------------------------------+ |
||||||
|
| 2022-01-11 08:25:30 +0000 UTC | NaN | |
||||||
|
| 2022-01-11 08:25:31 +0000 UTC | NaN | |
||||||
|
| 2022-01-11 08:25:32 +0000 UTC | NaN | |
||||||
|
+-------------------------------+-----------------------------------------------------+ |
||||||
|
|
||||||
|
|
||||||
|
====== TEST DATA RESPONSE (arrow base64) ====== |
||||||
|
FRAME=QVJST1cxAAD/////GAMAABAAAAAAAAoADgAMAAsABAAKAAAAFAAAAAAAAAEEAAoADAAAAAgABAAKAAAACAAAAAgBAAADAAAAfAAAACgAAAAEAAAAhP3//wgAAAAMAAAAAAAAAAAAAAAFAAAAcmVmSWQAAACk/f//CAAAADwAAAAxAAAAe2hhbmRsZXI9Ii9hcGkvdjEvcXVlcnlfcmFuZ2UiLCBqb2I9InByb21ldGhldXMifQAAAAQAAABuYW1lAAAAAPT9//8IAAAAcAAAAGQAAAB7InR5cGUiOiJ0aW1lc2VyaWVzLW1hbnkiLCJjdXN0b20iOnsicmVzdWx0VHlwZSI6Im1hdHJpeCJ9LCJleGVjdXRlZFF1ZXJ5U3RyaW5nIjoiRXhwcjogXG5TdGVwOiAxcyJ9AAAAAAQAAABtZXRhAAAAAAIAAAA8AQAABAAAAN7+//8UAAAABAEAAAQBAAAAAAADBAEAAAMAAACEAAAALAAAAAQAAACs/v//CAAAABAAAAAFAAAAVmFsdWUAAAAEAAAAbmFtZQAAAADQ/v//CAAAAEAAAAA0AAAAeyJoYW5kbGVyIjoiL2FwaS92MS9xdWVyeV9yYW5nZSIsImpvYiI6InByb21ldGhldXMifQAAAAAGAAAAbGFiZWxzAAAk////CAAAAFgAAABNAAAAeyJkaXNwbGF5TmFtZUZyb21EUyI6IntoYW5kbGVyPVwiL2FwaS92MS9xdWVyeV9yYW5nZVwiLCBqb2I9XCJwcm9tZXRoZXVzXCJ9In0AAAAGAAAAY29uZmlnAAAAAAAAVv///wAAAgAFAAAAVmFsdWUAEgAYABQAAAATAAwAAAAIAAQAEgAAABQAAAB4AAAAgAAAAAAAAAqAAAAAAgAAADQAAAAEAAAA3P///wgAAAAQAAAABAAAAFRpbWUAAAAABAAAAG5hbWUAAAAACAAMAAgABAAIAAAACAAAABwAAAARAAAAeyJpbnRlcnZhbCI6MTAwMH0AAAAGAAAAY29uZmlnAAAAAAAAAAAGAAgABgAGAAAAAAADAAQAAABUaW1lAAAAAP////+4AAAAFAAAAAAAAAAMABYAFAATAAwABAAMAAAAMAAAAAAAAAAUAAAAAAAAAwQACgAYAAwACAAEAAoAAAAUAAAAWAAAAAMAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAYAAAAAAAAABgAAAAAAAAAAAAAAAAAAAAYAAAAAAAAABgAAAAAAAAAAAAAAAIAAAADAAAAAAAAAAAAAAAAAAAAAwAAAAAAAAAAAAAAAAAAAABEFRTUKckWAA6wT9QpyRYA2EqL1CnJFgEAAAAAAPh/AQAAAAAA+H8BAAAAAAD4fxAAAAAMABQAEgAMAAgABAAMAAAAEAAAACwAAAA8AAAAAAAEAAEAAAAoAwAAAAAAAMAAAAAAAAAAMAAAAAAAAAAAAAAAAAAAAAAAAAAAAAoADAAAAAgABAAKAAAACAAAAAgBAAADAAAAfAAAACgAAAAEAAAAhP3//wgAAAAMAAAAAAAAAAAAAAAFAAAAcmVmSWQAAACk/f//CAAAADwAAAAxAAAAe2hhbmRsZXI9Ii9hcGkvdjEvcXVlcnlfcmFuZ2UiLCBqb2I9InByb21ldGhldXMifQAAAAQAAABuYW1lAAAAAPT9//8IAAAAcAAAAGQAAAB7InR5cGUiOiJ0aW1lc2VyaWVzLW1hbnkiLCJjdXN0b20iOnsicmVzdWx0VHlwZSI6Im1hdHJpeCJ9LCJleGVjdXRlZFF1ZXJ5U3RyaW5nIjoiRXhwcjogXG5TdGVwOiAxcyJ9AAAAAAQAAABtZXRhAAAAAAIAAAA8AQAABAAAAN7+//8UAAAABAEAAAQBAAAAAAADBAEAAAMAAACEAAAALAAAAAQAAACs/v//CAAAABAAAAAFAAAAVmFsdWUAAAAEAAAAbmFtZQAAAADQ/v//CAAAAEAAAAA0AAAAeyJoYW5kbGVyIjoiL2FwaS92MS9xdWVyeV9yYW5nZSIsImpvYiI6InByb21ldGhldXMifQAAAAAGAAAAbGFiZWxzAAAk////CAAAAFgAAABNAAAAeyJkaXNwbGF5TmFtZUZyb21EUyI6IntoYW5kbGVyPVwiL2FwaS92MS9xdWVyeV9yYW5nZVwiLCBqb2I9XCJwcm9tZXRoZXVzXCJ9In0AAAAGAAAAY29uZmlnAAAAAAAAVv///wAAAgAFAAAAVmFsdWUAEgAYABQAAAATAAwAAAAIAAQAEgAAABQAAAB4AAAAgAAAAAAAAAqAAAAAAgAAADQAAAAEAAAA3P///wgAAAAQAAAABAAAAFRpbWUAAAAABAAAAG5hbWUAAAAACAAMAAgABAAIAAAACAAAABwAAAARAAAAeyJpbnRlcnZhbCI6MTAwMH0AAAAGAAAAY29uZmlnAAAAAAAAAAAGAAgABgAGAAAAAAADAAQAAABUaW1lAAAAAEgDAABBUlJPVzE= |
||||||
@ -0,0 +1,112 @@ |
|||||||
|
{ |
||||||
|
"frames": [ |
||||||
|
{ |
||||||
|
"schema": { |
||||||
|
"name": "prometheus_http_requests_total{code=\"200\", handler=\"/api/v1/query_range\", job=\"prometheus\"}", |
||||||
|
"meta": { |
||||||
|
"type": "timeseries-many", |
||||||
|
"custom": { |
||||||
|
"resultType": "matrix" |
||||||
|
}, |
||||||
|
"executedQueryString": "Expr: \nStep: 1s" |
||||||
|
}, |
||||||
|
"fields": [ |
||||||
|
{ |
||||||
|
"name": "Time", |
||||||
|
"type": "time", |
||||||
|
"typeInfo": { |
||||||
|
"frame": "time.Time" |
||||||
|
}, |
||||||
|
"config": { |
||||||
|
"interval": 1000 |
||||||
|
} |
||||||
|
}, |
||||||
|
{ |
||||||
|
"name": "Value", |
||||||
|
"type": "number", |
||||||
|
"typeInfo": { |
||||||
|
"frame": "float64" |
||||||
|
}, |
||||||
|
"labels": { |
||||||
|
"__name__": "prometheus_http_requests_total", |
||||||
|
"code": "200", |
||||||
|
"handler": "/api/v1/query_range", |
||||||
|
"job": "prometheus" |
||||||
|
}, |
||||||
|
"config": { |
||||||
|
"displayNameFromDS": "prometheus_http_requests_total{code=\"200\", handler=\"/api/v1/query_range\", job=\"prometheus\"}" |
||||||
|
} |
||||||
|
} |
||||||
|
] |
||||||
|
}, |
||||||
|
"data": { |
||||||
|
"values": [ |
||||||
|
[ |
||||||
|
1641889530123, |
||||||
|
1641889531123, |
||||||
|
1641889532123 |
||||||
|
], |
||||||
|
[ |
||||||
|
21, |
||||||
|
32, |
||||||
|
43 |
||||||
|
] |
||||||
|
] |
||||||
|
} |
||||||
|
}, |
||||||
|
{ |
||||||
|
"schema": { |
||||||
|
"name": "prometheus_http_requests_total{code=\"400\", handler=\"/api/v1/query_range\", job=\"prometheus\"}", |
||||||
|
"meta": { |
||||||
|
"type": "timeseries-many", |
||||||
|
"custom": { |
||||||
|
"resultType": "matrix" |
||||||
|
}, |
||||||
|
"executedQueryString": "Expr: \nStep: 1s" |
||||||
|
}, |
||||||
|
"fields": [ |
||||||
|
{ |
||||||
|
"name": "Time", |
||||||
|
"type": "time", |
||||||
|
"typeInfo": { |
||||||
|
"frame": "time.Time" |
||||||
|
}, |
||||||
|
"config": { |
||||||
|
"interval": 1000 |
||||||
|
} |
||||||
|
}, |
||||||
|
{ |
||||||
|
"name": "Value", |
||||||
|
"type": "number", |
||||||
|
"typeInfo": { |
||||||
|
"frame": "float64" |
||||||
|
}, |
||||||
|
"labels": { |
||||||
|
"__name__": "prometheus_http_requests_total", |
||||||
|
"code": "400", |
||||||
|
"handler": "/api/v1/query_range", |
||||||
|
"job": "prometheus" |
||||||
|
}, |
||||||
|
"config": { |
||||||
|
"displayNameFromDS": "prometheus_http_requests_total{code=\"400\", handler=\"/api/v1/query_range\", job=\"prometheus\"}" |
||||||
|
} |
||||||
|
} |
||||||
|
] |
||||||
|
}, |
||||||
|
"data": { |
||||||
|
"values": [ |
||||||
|
[ |
||||||
|
1641889530123, |
||||||
|
1641889531123, |
||||||
|
1641889532123 |
||||||
|
], |
||||||
|
[ |
||||||
|
54, |
||||||
|
65, |
||||||
|
76 |
||||||
|
] |
||||||
|
] |
||||||
|
} |
||||||
|
} |
||||||
|
] |
||||||
|
} |
||||||
@ -0,0 +1,46 @@ |
|||||||
|
🌟 This was machine generated. Do not edit. 🌟 |
||||||
|
|
||||||
|
Frame[0] { |
||||||
|
"type": "timeseries-many", |
||||||
|
"custom": { |
||||||
|
"resultType": "matrix" |
||||||
|
}, |
||||||
|
"executedQueryString": "Expr: \nStep: 1s" |
||||||
|
} |
||||||
|
Name: prometheus_http_requests_total{code="200", handler="/api/v1/query_range", job="prometheus"} |
||||||
|
Dimensions: 2 Fields by 3 Rows |
||||||
|
+-----------------------------------+--------------------------------------------------------------------------------------------------------+ |
||||||
|
| Name: Time | Name: Value | |
||||||
|
| Labels: | Labels: __name__=prometheus_http_requests_total, code=200, handler=/api/v1/query_range, job=prometheus | |
||||||
|
| Type: []time.Time | Type: []float64 | |
||||||
|
+-----------------------------------+--------------------------------------------------------------------------------------------------------+ |
||||||
|
| 2022-01-11 08:25:30.123 +0000 UTC | 21 | |
||||||
|
| 2022-01-11 08:25:31.123 +0000 UTC | 32 | |
||||||
|
| 2022-01-11 08:25:32.123 +0000 UTC | 43 | |
||||||
|
+-----------------------------------+--------------------------------------------------------------------------------------------------------+ |
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
Frame[1] { |
||||||
|
"type": "timeseries-many", |
||||||
|
"custom": { |
||||||
|
"resultType": "matrix" |
||||||
|
}, |
||||||
|
"executedQueryString": "Expr: \nStep: 1s" |
||||||
|
} |
||||||
|
Name: prometheus_http_requests_total{code="400", handler="/api/v1/query_range", job="prometheus"} |
||||||
|
Dimensions: 2 Fields by 3 Rows |
||||||
|
+-----------------------------------+--------------------------------------------------------------------------------------------------------+ |
||||||
|
| Name: Time | Name: Value | |
||||||
|
| Labels: | Labels: __name__=prometheus_http_requests_total, code=400, handler=/api/v1/query_range, job=prometheus | |
||||||
|
| Type: []time.Time | Type: []float64 | |
||||||
|
+-----------------------------------+--------------------------------------------------------------------------------------------------------+ |
||||||
|
| 2022-01-11 08:25:30.123 +0000 UTC | 54 | |
||||||
|
| 2022-01-11 08:25:31.123 +0000 UTC | 65 | |
||||||
|
| 2022-01-11 08:25:32.123 +0000 UTC | 76 | |
||||||
|
+-----------------------------------+--------------------------------------------------------------------------------------------------------+ |
||||||
|
|
||||||
|
|
||||||
|
====== TEST DATA RESPONSE (arrow base64) ====== |
||||||
|
FRAME=QVJST1cxAAD/////qAMAABAAAAAAAAoADgAMAAsABAAKAAAAFAAAAAAAAAEEAAoADAAAAAgABAAKAAAACAAAADABAAADAAAApAAAACgAAAAEAAAA+Pz//wgAAAAMAAAAAAAAAAAAAAAFAAAAcmVmSWQAAAAY/f//CAAAAGQAAABbAAAAcHJvbWV0aGV1c19odHRwX3JlcXVlc3RzX3RvdGFse2NvZGU9IjIwMCIsIGhhbmRsZXI9Ii9hcGkvdjEvcXVlcnlfcmFuZ2UiLCBqb2I9InByb21ldGhldXMifQAEAAAAbmFtZQAAAACQ/f//CAAAAHAAAABkAAAAeyJ0eXBlIjoidGltZXNlcmllcy1tYW55IiwiY3VzdG9tIjp7InJlc3VsdFR5cGUiOiJtYXRyaXgifSwiZXhlY3V0ZWRRdWVyeVN0cmluZyI6IkV4cHI6IFxuU3RlcDogMXMifQAAAAAEAAAAbWV0YQAAAAACAAAAoAEAAAQAAAB6/v//FAAAAGgBAABoAQAAAAAAA2gBAAADAAAAvAAAACwAAAAEAAAASP7//wgAAAAQAAAABQAAAFZhbHVlAAAABAAAAG5hbWUAAAAAbP7//wgAAAB4AAAAbQAAAHsiX19uYW1lX18iOiJwcm9tZXRoZXVzX2h0dHBfcmVxdWVzdHNfdG90YWwiLCJjb2RlIjoiMjAwIiwiaGFuZGxlciI6Ii9hcGkvdjEvcXVlcnlfcmFuZ2UiLCJqb2IiOiJwcm9tZXRoZXVzIn0AAAAGAAAAbGFiZWxzAAD4/v//CAAAAIQAAAB5AAAAeyJkaXNwbGF5TmFtZUZyb21EUyI6InByb21ldGhldXNfaHR0cF9yZXF1ZXN0c190b3RhbHtjb2RlPVwiMjAwXCIsIGhhbmRsZXI9XCIvYXBpL3YxL3F1ZXJ5X3JhbmdlXCIsIGpvYj1cInByb21ldGhldXNcIn0ifQAAAAYAAABjb25maWcAAAAAAABW////AAACAAUAAABWYWx1ZQASABgAFAAAABMADAAAAAgABAASAAAAFAAAAHgAAACAAAAAAAAACoAAAAACAAAANAAAAAQAAADc////CAAAABAAAAAEAAAAVGltZQAAAAAEAAAAbmFtZQAAAAAIAAwACAAEAAgAAAAIAAAAHAAAABEAAAB7ImludGVydmFsIjoxMDAwfQAAAAYAAABjb25maWcAAAAAAAAAAAYACAAGAAYAAAAAAAMABAAAAFRpbWUAAAAAAAAAAP////+4AAAAFAAAAAAAAAAMABYAFAATAAwABAAMAAAAMAAAAAAAAAAUAAAAAAAAAwQACgAYAAwACAAEAAoAAAAUAAAAWAAAAAMAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAYAAAAAAAAABgAAAAAAAAAAAAAAAAAAAAYAAAAAAAAABgAAAAAAAAAAAAAAAIAAAADAAAAAAAAAAAAAAAAAAAAAwAAAAAAAAAAAAAAAAAAAMAYahvUKckWwOIEV9QpyRbArJ+S1CnJFgAAAAAAADVAAAAAAAAAQEAAAAAAAIBFQBAAAAAMABQAEgAMAAgABAAMAAAAEAAAACwAAAA4AAAAAAAEAAEAAAC4AwAAAAAAAMAAAAAAAAAAMAAAAAAAAAAAAAAAAAAAAAAACgAMAAAACAAEAAoAAAAIAAAAMAEAAAMAAACkAAAAKAAAAAQAAAD4/P//CAAAAAwAAAAAAAAAAAAAAAUAAAByZWZJZAAAABj9//8IAAAAZAAAAFsAAABwcm9tZXRoZXVzX2h0dHBfcmVxdWVzdHNfdG90YWx7Y29kZT0iMjAwIiwgaGFuZGxlcj0iL2FwaS92MS9xdWVyeV9yYW5nZSIsIGpvYj0icHJvbWV0aGV1cyJ9AAQAAABuYW1lAAAAAJD9//8IAAAAcAAAAGQAAAB7InR5cGUiOiJ0aW1lc2VyaWVzLW1hbnkiLCJjdXN0b20iOnsicmVzdWx0VHlwZSI6Im1hdHJpeCJ9LCJleGVjdXRlZFF1ZXJ5U3RyaW5nIjoiRXhwcjogXG5TdGVwOiAxcyJ9AAAAAAQAAABtZXRhAAAAAAIAAACgAQAABAAAAHr+//8UAAAAaAEAAGgBAAAAAAADaAEAAAMAAAC8AAAALAAAAAQAAABI/v//CAAAABAAAAAFAAAAVmFsdWUAAAAEAAAAbmFtZQAAAABs/v//CAAAAHgAAABtAAAAeyJfX25hbWVfXyI6InByb21ldGhldXNfaHR0cF9yZXF1ZXN0c190b3RhbCIsImNvZGUiOiIyMDAiLCJoYW5kbGVyIjoiL2FwaS92MS9xdWVyeV9yYW5nZSIsImpvYiI6InByb21ldGhldXMifQAAAAYAAABsYWJlbHMAAPj+//8IAAAAhAAAAHkAAAB7ImRpc3BsYXlOYW1lRnJvbURTIjoicHJvbWV0aGV1c19odHRwX3JlcXVlc3RzX3RvdGFse2NvZGU9XCIyMDBcIiwgaGFuZGxlcj1cIi9hcGkvdjEvcXVlcnlfcmFuZ2VcIiwgam9iPVwicHJvbWV0aGV1c1wifSJ9AAAABgAAAGNvbmZpZwAAAAAAAFb///8AAAIABQAAAFZhbHVlABIAGAAUAAAAEwAMAAAACAAEABIAAAAUAAAAeAAAAIAAAAAAAAAKgAAAAAIAAAA0AAAABAAAANz///8IAAAAEAAAAAQAAABUaW1lAAAAAAQAAABuYW1lAAAAAAgADAAIAAQACAAAAAgAAAAcAAAAEQAAAHsiaW50ZXJ2YWwiOjEwMDB9AAAABgAAAGNvbmZpZwAAAAAAAAAABgAIAAYABgAAAAAAAwAEAAAAVGltZQAAAADQAwAAQVJST1cx |
||||||
|
FRAME=QVJST1cxAAD/////qAMAABAAAAAAAAoADgAMAAsABAAKAAAAFAAAAAAAAAEEAAoADAAAAAgABAAKAAAACAAAADABAAADAAAApAAAACgAAAAEAAAA+Pz//wgAAAAMAAAAAAAAAAAAAAAFAAAAcmVmSWQAAAAY/f//CAAAAGQAAABbAAAAcHJvbWV0aGV1c19odHRwX3JlcXVlc3RzX3RvdGFse2NvZGU9IjQwMCIsIGhhbmRsZXI9Ii9hcGkvdjEvcXVlcnlfcmFuZ2UiLCBqb2I9InByb21ldGhldXMifQAEAAAAbmFtZQAAAACQ/f//CAAAAHAAAABkAAAAeyJ0eXBlIjoidGltZXNlcmllcy1tYW55IiwiY3VzdG9tIjp7InJlc3VsdFR5cGUiOiJtYXRyaXgifSwiZXhlY3V0ZWRRdWVyeVN0cmluZyI6IkV4cHI6IFxuU3RlcDogMXMifQAAAAAEAAAAbWV0YQAAAAACAAAAoAEAAAQAAAB6/v//FAAAAGgBAABoAQAAAAAAA2gBAAADAAAAvAAAACwAAAAEAAAASP7//wgAAAAQAAAABQAAAFZhbHVlAAAABAAAAG5hbWUAAAAAbP7//wgAAAB4AAAAbQAAAHsiX19uYW1lX18iOiJwcm9tZXRoZXVzX2h0dHBfcmVxdWVzdHNfdG90YWwiLCJjb2RlIjoiNDAwIiwiaGFuZGxlciI6Ii9hcGkvdjEvcXVlcnlfcmFuZ2UiLCJqb2IiOiJwcm9tZXRoZXVzIn0AAAAGAAAAbGFiZWxzAAD4/v//CAAAAIQAAAB5AAAAeyJkaXNwbGF5TmFtZUZyb21EUyI6InByb21ldGhldXNfaHR0cF9yZXF1ZXN0c190b3RhbHtjb2RlPVwiNDAwXCIsIGhhbmRsZXI9XCIvYXBpL3YxL3F1ZXJ5X3JhbmdlXCIsIGpvYj1cInByb21ldGhldXNcIn0ifQAAAAYAAABjb25maWcAAAAAAABW////AAACAAUAAABWYWx1ZQASABgAFAAAABMADAAAAAgABAASAAAAFAAAAHgAAACAAAAAAAAACoAAAAACAAAANAAAAAQAAADc////CAAAABAAAAAEAAAAVGltZQAAAAAEAAAAbmFtZQAAAAAIAAwACAAEAAgAAAAIAAAAHAAAABEAAAB7ImludGVydmFsIjoxMDAwfQAAAAYAAABjb25maWcAAAAAAAAAAAYACAAGAAYAAAAAAAMABAAAAFRpbWUAAAAAAAAAAP////+4AAAAFAAAAAAAAAAMABYAFAATAAwABAAMAAAAMAAAAAAAAAAUAAAAAAAAAwQACgAYAAwACAAEAAoAAAAUAAAAWAAAAAMAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAYAAAAAAAAABgAAAAAAAAAAAAAAAAAAAAYAAAAAAAAABgAAAAAAAAAAAAAAAIAAAADAAAAAAAAAAAAAAAAAAAAAwAAAAAAAAAAAAAAAAAAAMAYahvUKckWwOIEV9QpyRbArJ+S1CnJFgAAAAAAAEtAAAAAAABAUEAAAAAAAABTQBAAAAAMABQAEgAMAAgABAAMAAAAEAAAACwAAAA4AAAAAAAEAAEAAAC4AwAAAAAAAMAAAAAAAAAAMAAAAAAAAAAAAAAAAAAAAAAACgAMAAAACAAEAAoAAAAIAAAAMAEAAAMAAACkAAAAKAAAAAQAAAD4/P//CAAAAAwAAAAAAAAAAAAAAAUAAAByZWZJZAAAABj9//8IAAAAZAAAAFsAAABwcm9tZXRoZXVzX2h0dHBfcmVxdWVzdHNfdG90YWx7Y29kZT0iNDAwIiwgaGFuZGxlcj0iL2FwaS92MS9xdWVyeV9yYW5nZSIsIGpvYj0icHJvbWV0aGV1cyJ9AAQAAABuYW1lAAAAAJD9//8IAAAAcAAAAGQAAAB7InR5cGUiOiJ0aW1lc2VyaWVzLW1hbnkiLCJjdXN0b20iOnsicmVzdWx0VHlwZSI6Im1hdHJpeCJ9LCJleGVjdXRlZFF1ZXJ5U3RyaW5nIjoiRXhwcjogXG5TdGVwOiAxcyJ9AAAAAAQAAABtZXRhAAAAAAIAAACgAQAABAAAAHr+//8UAAAAaAEAAGgBAAAAAAADaAEAAAMAAAC8AAAALAAAAAQAAABI/v//CAAAABAAAAAFAAAAVmFsdWUAAAAEAAAAbmFtZQAAAABs/v//CAAAAHgAAABtAAAAeyJfX25hbWVfXyI6InByb21ldGhldXNfaHR0cF9yZXF1ZXN0c190b3RhbCIsImNvZGUiOiI0MDAiLCJoYW5kbGVyIjoiL2FwaS92MS9xdWVyeV9yYW5nZSIsImpvYiI6InByb21ldGhldXMifQAAAAYAAABsYWJlbHMAAPj+//8IAAAAhAAAAHkAAAB7ImRpc3BsYXlOYW1lRnJvbURTIjoicHJvbWV0aGV1c19odHRwX3JlcXVlc3RzX3RvdGFse2NvZGU9XCI0MDBcIiwgaGFuZGxlcj1cIi9hcGkvdjEvcXVlcnlfcmFuZ2VcIiwgam9iPVwicHJvbWV0aGV1c1wifSJ9AAAABgAAAGNvbmZpZwAAAAAAAFb///8AAAIABQAAAFZhbHVlABIAGAAUAAAAEwAMAAAACAAEABIAAAAUAAAAeAAAAIAAAAAAAAAKgAAAAAIAAAA0AAAABAAAANz///8IAAAAEAAAAAQAAABUaW1lAAAAAAQAAABuYW1lAAAAAAgADAAIAAQACAAAAAgAAAAcAAAAEQAAAHsiaW50ZXJ2YWwiOjEwMDB9AAAABgAAAGNvbmZpZwAAAAAAAAAABgAIAAYABgAAAAAAAwAEAAAAVGltZQAAAADQAwAAQVJST1cx |
||||||
Loading…
Reference in new issue