mirror of https://github.com/grafana/grafana
azuremonitor: port azure log analytics query function to the backend (#23839)
* azuremonitor: add support for log analytics macros Also adds tests for the kql macros * azuremonitor: backend implementation for Log Analytics * azuremonitor: remove gzip header from plugin route The Go net/http library adds an accept encoding header for gzip automatically. https://golang.org/src/net/http/transport.go\#L2454 So no need to specify it manually * azuremonitor: parses log analytics time series * azuremonitor: support for table data for Log Analytics * azuremonitor: for log analytics switch to calling the API... ...from the backend for time series and table queries. * azuremonitor: fix missing err check * azuremonitor: support Azure China, Azure Gov... for log analytics on the backend. * azuremonitor: review fixes * azuremonitor: rename test files folder to testdata To follow Go conventions for test data in tests * azuremonitor: review fixes * azuremonitor: better error message for http requests * azuremonitor: fix for load workspaces on config page * azuremonitor: strict null check fixes Co-authored-by: bergquist <carl.bergquist@gmail.com>pull/23957/head
parent
458f6bdb87
commit
c05049f395
@ -0,0 +1,354 @@ |
||||
package azuremonitor |
||||
|
||||
import ( |
||||
"context" |
||||
"encoding/json" |
||||
"errors" |
||||
"fmt" |
||||
"io/ioutil" |
||||
"net/http" |
||||
"net/url" |
||||
"path" |
||||
"time" |
||||
|
||||
"github.com/grafana/grafana/pkg/api/pluginproxy" |
||||
"github.com/grafana/grafana/pkg/components/null" |
||||
"github.com/grafana/grafana/pkg/components/simplejson" |
||||
"github.com/grafana/grafana/pkg/models" |
||||
"github.com/grafana/grafana/pkg/plugins" |
||||
"github.com/grafana/grafana/pkg/setting" |
||||
"github.com/grafana/grafana/pkg/tsdb" |
||||
"github.com/grafana/grafana/pkg/util/errutil" |
||||
"github.com/opentracing/opentracing-go" |
||||
"golang.org/x/net/context/ctxhttp" |
||||
) |
||||
|
||||
// AzureLogAnalyticsDatasource calls the Azure Log Analytics API's
|
||||
type AzureLogAnalyticsDatasource struct { |
||||
httpClient *http.Client |
||||
dsInfo *models.DataSource |
||||
} |
||||
|
||||
// AzureLogAnalyticsQuery is the query request that is built from the saved values for
|
||||
// from the UI
|
||||
type AzureLogAnalyticsQuery struct { |
||||
RefID string |
||||
ResultFormat string |
||||
URL string |
||||
Params url.Values |
||||
Target string |
||||
} |
||||
|
||||
// executeTimeSeriesQuery does the following:
|
||||
// 1. build the AzureMonitor url and querystring for each query
|
||||
// 2. executes each query by calling the Azure Monitor API
|
||||
// 3. parses the responses for each query into the timeseries format
|
||||
func (e *AzureLogAnalyticsDatasource) executeTimeSeriesQuery(ctx context.Context, originalQueries []*tsdb.Query, timeRange *tsdb.TimeRange) (*tsdb.Response, error) { |
||||
result := &tsdb.Response{ |
||||
Results: map[string]*tsdb.QueryResult{}, |
||||
} |
||||
|
||||
queries, err := e.buildQueries(originalQueries, timeRange) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
|
||||
for _, query := range queries { |
||||
queryRes, err := e.executeQuery(ctx, query, originalQueries, timeRange) |
||||
if err != nil { |
||||
queryRes.Error = err |
||||
} |
||||
result.Results[query.RefID] = queryRes |
||||
} |
||||
|
||||
return result, nil |
||||
} |
||||
|
||||
func (e *AzureLogAnalyticsDatasource) buildQueries(queries []*tsdb.Query, timeRange *tsdb.TimeRange) ([]*AzureLogAnalyticsQuery, error) { |
||||
azureLogAnalyticsQueries := []*AzureLogAnalyticsQuery{} |
||||
|
||||
for _, query := range queries { |
||||
azureLogAnalyticsTarget := query.Model.Get("azureLogAnalytics").MustMap() |
||||
azlog.Debug("AzureLogAnalytics", "target", azureLogAnalyticsTarget) |
||||
|
||||
resultFormat := fmt.Sprintf("%v", azureLogAnalyticsTarget["resultFormat"]) |
||||
if resultFormat == "" { |
||||
resultFormat = "time_series" |
||||
} |
||||
|
||||
urlComponents := map[string]string{} |
||||
urlComponents["subscription"] = fmt.Sprintf("%v", query.Model.Get("subscription").MustString()) |
||||
urlComponents["workspace"] = fmt.Sprintf("%v", azureLogAnalyticsTarget["workspace"]) |
||||
apiURL := fmt.Sprintf("%s/query", urlComponents["workspace"]) |
||||
|
||||
params := url.Values{} |
||||
rawQuery, err := KqlInterpolate(query, timeRange, fmt.Sprintf("%v", azureLogAnalyticsTarget["query"]), "TimeGenerated") |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
params.Add("query", rawQuery) |
||||
|
||||
azureLogAnalyticsQueries = append(azureLogAnalyticsQueries, &AzureLogAnalyticsQuery{ |
||||
RefID: query.RefId, |
||||
ResultFormat: resultFormat, |
||||
URL: apiURL, |
||||
Params: params, |
||||
Target: params.Encode(), |
||||
}) |
||||
} |
||||
|
||||
return azureLogAnalyticsQueries, nil |
||||
} |
||||
|
||||
func (e *AzureLogAnalyticsDatasource) executeQuery(ctx context.Context, query *AzureLogAnalyticsQuery, queries []*tsdb.Query, timeRange *tsdb.TimeRange) (*tsdb.QueryResult, error) { |
||||
queryResult := &tsdb.QueryResult{Meta: simplejson.New(), RefId: query.RefID} |
||||
|
||||
req, err := e.createRequest(ctx, e.dsInfo) |
||||
if err != nil { |
||||
queryResult.Error = err |
||||
return queryResult, nil |
||||
} |
||||
|
||||
req.URL.Path = path.Join(req.URL.Path, query.URL) |
||||
req.URL.RawQuery = query.Params.Encode() |
||||
|
||||
span, ctx := opentracing.StartSpanFromContext(ctx, "azure log analytics query") |
||||
span.SetTag("target", query.Target) |
||||
span.SetTag("from", timeRange.From) |
||||
span.SetTag("until", timeRange.To) |
||||
span.SetTag("datasource_id", e.dsInfo.Id) |
||||
span.SetTag("org_id", e.dsInfo.OrgId) |
||||
|
||||
defer span.Finish() |
||||
|
||||
if err := opentracing.GlobalTracer().Inject( |
||||
span.Context(), |
||||
opentracing.HTTPHeaders, |
||||
opentracing.HTTPHeadersCarrier(req.Header)); err != nil { |
||||
queryResult.Error = err |
||||
return queryResult, nil |
||||
} |
||||
|
||||
azlog.Debug("AzureLogAnalytics", "Request ApiURL", req.URL.String()) |
||||
res, err := ctxhttp.Do(ctx, e.httpClient, req) |
||||
if err != nil { |
||||
queryResult.Error = err |
||||
return queryResult, nil |
||||
} |
||||
|
||||
data, err := e.unmarshalResponse(res) |
||||
if err != nil { |
||||
queryResult.Error = err |
||||
return queryResult, nil |
||||
} |
||||
|
||||
azlog.Debug("AzureLogsAnalytics", "Response", queryResult) |
||||
|
||||
if query.ResultFormat == "table" { |
||||
queryResult.Tables, queryResult.Meta, err = e.parseToTables(data, query.Params.Get("query")) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
} else { |
||||
queryResult.Series, queryResult.Meta, err = e.parseToTimeSeries(data, query.Params.Get("query")) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
} |
||||
|
||||
return queryResult, nil |
||||
} |
||||
|
||||
func (e *AzureLogAnalyticsDatasource) createRequest(ctx context.Context, dsInfo *models.DataSource) (*http.Request, error) { |
||||
u, _ := url.Parse(dsInfo.Url) |
||||
u.Path = path.Join(u.Path, "render") |
||||
|
||||
req, err := http.NewRequest(http.MethodGet, u.String(), nil) |
||||
if err != nil { |
||||
azlog.Debug("Failed to create request", "error", err) |
||||
return nil, errutil.Wrap("Failed to create request", err) |
||||
} |
||||
|
||||
req.Header.Set("Content-Type", "application/json") |
||||
req.Header.Set("User-Agent", fmt.Sprintf("Grafana/%s", setting.BuildVersion)) |
||||
|
||||
// find plugin
|
||||
plugin, ok := plugins.DataSources[dsInfo.Type] |
||||
if !ok { |
||||
return nil, errors.New("Unable to find datasource plugin Azure Monitor") |
||||
} |
||||
cloudName := dsInfo.JsonData.Get("cloudName").MustString("azuremonitor") |
||||
|
||||
logAnalyticsRoute, proxypass, err := e.getPluginRoute(plugin, cloudName) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
pluginproxy.ApplyRoute(ctx, req, proxypass, logAnalyticsRoute, dsInfo) |
||||
|
||||
return req, nil |
||||
} |
||||
|
||||
func (e *AzureLogAnalyticsDatasource) getPluginRoute(plugin *plugins.DataSourcePlugin, cloudName string) (*plugins.AppPluginRoute, string, error) { |
||||
pluginRouteName := "loganalyticsazure" |
||||
|
||||
switch cloudName { |
||||
case "chinaazuremonitor": |
||||
pluginRouteName = "chinaloganalyticsazure" |
||||
case "govazuremonitor": |
||||
pluginRouteName = "govloganalyticsazure" |
||||
} |
||||
|
||||
var logAnalyticsRoute *plugins.AppPluginRoute |
||||
|
||||
for _, route := range plugin.Routes { |
||||
if route.Path == pluginRouteName { |
||||
logAnalyticsRoute = route |
||||
break |
||||
} |
||||
} |
||||
|
||||
return logAnalyticsRoute, pluginRouteName, nil |
||||
} |
||||
|
||||
func (e *AzureLogAnalyticsDatasource) unmarshalResponse(res *http.Response) (AzureLogAnalyticsResponse, error) { |
||||
body, err := ioutil.ReadAll(res.Body) |
||||
defer res.Body.Close() |
||||
|
||||
if err != nil { |
||||
return AzureLogAnalyticsResponse{}, err |
||||
} |
||||
|
||||
if res.StatusCode/100 != 2 { |
||||
azlog.Debug("Request failed", "status", res.Status, "body", string(body)) |
||||
return AzureLogAnalyticsResponse{}, fmt.Errorf("Request failed status: %v", res.Status) |
||||
} |
||||
|
||||
var data AzureLogAnalyticsResponse |
||||
err = json.Unmarshal(body, &data) |
||||
if err != nil { |
||||
azlog.Debug("Failed to unmarshal Azure Log Analytics response", "error", err, "status", res.Status, "body", string(body)) |
||||
return AzureLogAnalyticsResponse{}, err |
||||
} |
||||
|
||||
return data, nil |
||||
} |
||||
|
||||
func (e *AzureLogAnalyticsDatasource) parseToTables(data AzureLogAnalyticsResponse, query string) ([]*tsdb.Table, *simplejson.Json, error) { |
||||
meta := metadata{ |
||||
Query: query, |
||||
} |
||||
|
||||
tables := make([]*tsdb.Table, 0) |
||||
for _, t := range data.Tables { |
||||
if t.Name == "PrimaryResult" { |
||||
table := tsdb.Table{ |
||||
Columns: make([]tsdb.TableColumn, 0), |
||||
Rows: make([]tsdb.RowValues, 0), |
||||
} |
||||
|
||||
meta.Columns = make([]column, 0) |
||||
for _, v := range t.Columns { |
||||
meta.Columns = append(meta.Columns, column{Name: v.Name, Type: v.Type}) |
||||
table.Columns = append(table.Columns, tsdb.TableColumn{Text: v.Name}) |
||||
} |
||||
|
||||
for _, r := range t.Rows { |
||||
values := make([]interface{}, len(table.Columns)) |
||||
for i := 0; i < len(table.Columns); i++ { |
||||
values[i] = r[i] |
||||
} |
||||
table.Rows = append(table.Rows, values) |
||||
} |
||||
tables = append(tables, &table) |
||||
return tables, simplejson.NewFromAny(meta), nil |
||||
} |
||||
} |
||||
|
||||
return nil, nil, errors.New("no data as no PrimaryResult table was returned in the response") |
||||
} |
||||
|
||||
func (e *AzureLogAnalyticsDatasource) parseToTimeSeries(data AzureLogAnalyticsResponse, query string) (tsdb.TimeSeriesSlice, *simplejson.Json, error) { |
||||
meta := metadata{ |
||||
Query: query, |
||||
} |
||||
|
||||
for _, t := range data.Tables { |
||||
if t.Name == "PrimaryResult" { |
||||
timeIndex, metricIndex, valueIndex := -1, -1, -1 |
||||
meta.Columns = make([]column, 0) |
||||
for i, v := range t.Columns { |
||||
meta.Columns = append(meta.Columns, column{Name: v.Name, Type: v.Type}) |
||||
|
||||
if timeIndex == -1 && v.Type == "datetime" { |
||||
timeIndex = i |
||||
} |
||||
|
||||
if metricIndex == -1 && v.Type == "string" { |
||||
metricIndex = i |
||||
} |
||||
|
||||
if valueIndex == -1 && (v.Type == "int" || v.Type == "long" || v.Type == "real" || v.Type == "double") { |
||||
valueIndex = i |
||||
} |
||||
} |
||||
|
||||
if timeIndex == -1 { |
||||
azlog.Info("No time column specified. Returning existing columns, no data") |
||||
return nil, simplejson.NewFromAny(meta), nil |
||||
} |
||||
|
||||
if valueIndex == -1 { |
||||
azlog.Info("No value column specified. Returning existing columns, no data") |
||||
return nil, simplejson.NewFromAny(meta), nil |
||||
} |
||||
|
||||
slice := tsdb.TimeSeriesSlice{} |
||||
buckets := map[string]*tsdb.TimeSeriesPoints{} |
||||
|
||||
getSeriesBucket := func(metricName string) *tsdb.TimeSeriesPoints { |
||||
if points, ok := buckets[metricName]; ok { |
||||
return points |
||||
} |
||||
|
||||
series := tsdb.NewTimeSeries(metricName, []tsdb.TimePoint{}) |
||||
slice = append(slice, series) |
||||
buckets[metricName] = &series.Points |
||||
|
||||
return &series.Points |
||||
} |
||||
|
||||
for _, r := range t.Rows { |
||||
timeStr, ok := r[timeIndex].(string) |
||||
if !ok { |
||||
return nil, simplejson.NewFromAny(meta), errors.New("invalid time value") |
||||
} |
||||
timeValue, err := time.Parse(time.RFC3339Nano, timeStr) |
||||
if err != nil { |
||||
return nil, simplejson.NewFromAny(meta), err |
||||
} |
||||
|
||||
var value float64 |
||||
if value, err = getFloat(r[valueIndex]); err != nil { |
||||
return nil, simplejson.NewFromAny(meta), err |
||||
} |
||||
|
||||
var metricName string |
||||
if metricIndex == -1 { |
||||
metricName = t.Columns[valueIndex].Name |
||||
} else { |
||||
metricName, ok = r[metricIndex].(string) |
||||
if !ok { |
||||
return nil, simplejson.NewFromAny(meta), err |
||||
} |
||||
} |
||||
|
||||
points := getSeriesBucket(metricName) |
||||
*points = append(*points, tsdb.NewTimePoint(null.FloatFrom(value), float64(timeValue.Unix()*1000))) |
||||
} |
||||
|
||||
return slice, simplejson.NewFromAny(meta), nil |
||||
} |
||||
} |
||||
|
||||
return nil, nil, errors.New("no data as no PrimaryResult table was returned in the response") |
||||
} |
@ -0,0 +1,380 @@ |
||||
package azuremonitor |
||||
|
||||
import ( |
||||
"encoding/json" |
||||
"fmt" |
||||
"io/ioutil" |
||||
"net/url" |
||||
"path/filepath" |
||||
"testing" |
||||
"time" |
||||
|
||||
"github.com/google/go-cmp/cmp" |
||||
"github.com/google/go-cmp/cmp/cmpopts" |
||||
"github.com/grafana/grafana/pkg/components/null" |
||||
"github.com/grafana/grafana/pkg/components/simplejson" |
||||
"github.com/grafana/grafana/pkg/models" |
||||
"github.com/grafana/grafana/pkg/plugins" |
||||
"github.com/grafana/grafana/pkg/tsdb" |
||||
"github.com/stretchr/testify/require" |
||||
) |
||||
|
||||
func TestBuildingAzureLogAnalyticsQueries(t *testing.T) { |
||||
datasource := &AzureLogAnalyticsDatasource{} |
||||
fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC).In(time.Local) |
||||
|
||||
tests := []struct { |
||||
name string |
||||
queryModel []*tsdb.Query |
||||
timeRange *tsdb.TimeRange |
||||
azureLogAnalyticsQueries []*AzureLogAnalyticsQuery |
||||
Err require.ErrorAssertionFunc |
||||
}{ |
||||
{ |
||||
name: "Query with macros should be interpolated", |
||||
timeRange: &tsdb.TimeRange{ |
||||
From: fmt.Sprintf("%v", fromStart.Unix()*1000), |
||||
To: fmt.Sprintf("%v", fromStart.Add(34*time.Minute).Unix()*1000), |
||||
}, |
||||
queryModel: []*tsdb.Query{ |
||||
{ |
||||
DataSource: &models.DataSource{ |
||||
JsonData: simplejson.NewFromAny(map[string]interface{}{}), |
||||
}, |
||||
Model: simplejson.NewFromAny(map[string]interface{}{ |
||||
"queryType": "Azure Log Analytics", |
||||
"azureLogAnalytics": map[string]interface{}{ |
||||
"workspace": "aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee", |
||||
"query": "query=Perf | where $__timeFilter() | where $__contains(Computer, 'comp1','comp2') | summarize avg(CounterValue) by bin(TimeGenerated, $__interval), Computer", |
||||
"resultFormat": "time_series", |
||||
}, |
||||
}), |
||||
RefId: "A", |
||||
}, |
||||
}, |
||||
azureLogAnalyticsQueries: []*AzureLogAnalyticsQuery{ |
||||
{ |
||||
RefID: "A", |
||||
ResultFormat: "time_series", |
||||
URL: "aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee/query", |
||||
Params: url.Values{"query": {"query=Perf | where ['TimeGenerated'] >= datetime('2018-03-15T13:00:00Z') and ['TimeGenerated'] <= datetime('2018-03-15T13:34:00Z') | where ['Computer'] in ('comp1','comp2') | summarize avg(CounterValue) by bin(TimeGenerated, 34000ms), Computer"}}, |
||||
Target: "query=query%3DPerf+%7C+where+%5B%27TimeGenerated%27%5D+%3E%3D+datetime%28%272018-03-15T13%3A00%3A00Z%27%29+and+%5B%27TimeGenerated%27%5D+%3C%3D+datetime%28%272018-03-15T13%3A34%3A00Z%27%29+%7C+where+%5B%27Computer%27%5D+in+%28%27comp1%27%2C%27comp2%27%29+%7C+summarize+avg%28CounterValue%29+by+bin%28TimeGenerated%2C+34000ms%29%2C+Computer", |
||||
}, |
||||
}, |
||||
Err: require.NoError, |
||||
}, |
||||
} |
||||
|
||||
for _, tt := range tests { |
||||
t.Run(tt.name, func(t *testing.T) { |
||||
queries, err := datasource.buildQueries(tt.queryModel, tt.timeRange) |
||||
tt.Err(t, err) |
||||
if diff := cmp.Diff(tt.azureLogAnalyticsQueries, queries, cmpopts.EquateNaNs()); diff != "" { |
||||
t.Errorf("Result mismatch (-want +got):\n%s", diff) |
||||
} |
||||
}) |
||||
} |
||||
} |
||||
|
||||
func TestParsingAzureLogAnalyticsResponses(t *testing.T) { |
||||
datasource := &AzureLogAnalyticsDatasource{} |
||||
tests := []struct { |
||||
name string |
||||
testFile string |
||||
query string |
||||
series tsdb.TimeSeriesSlice |
||||
meta string |
||||
Err require.ErrorAssertionFunc |
||||
}{ |
||||
{ |
||||
name: "Response with single series should be parsed into the Grafana time series format", |
||||
testFile: "loganalytics/1-log-analytics-response-metrics-single-series.json", |
||||
query: "test query", |
||||
series: tsdb.TimeSeriesSlice{ |
||||
&tsdb.TimeSeries{ |
||||
Name: "grafana-vm", |
||||
Points: tsdb.TimeSeriesPoints{ |
||||
{null.FloatFrom(1.1), null.FloatFrom(1587323766000)}, |
||||
{null.FloatFrom(2.2), null.FloatFrom(1587323776000)}, |
||||
{null.FloatFrom(3.3), null.FloatFrom(1587323786000)}, |
||||
}, |
||||
}, |
||||
}, |
||||
meta: `{"columns":[{"name":"TimeGenerated","type":"datetime"},{"name":"Computer","type":"string"},{"name":"avg_CounterValue","type":"real"}],"query":"test query"}`, |
||||
Err: require.NoError, |
||||
}, |
||||
{ |
||||
name: "Response with multiple series should be parsed into the Grafana time series format", |
||||
testFile: "loganalytics/2-log-analytics-response-metrics-multiple-series.json", |
||||
query: "test query", |
||||
series: tsdb.TimeSeriesSlice{ |
||||
&tsdb.TimeSeries{ |
||||
Name: "Processor", |
||||
Points: tsdb.TimeSeriesPoints{ |
||||
{null.FloatFrom(0.75), null.FloatFrom(1587418800000)}, |
||||
{null.FloatFrom(1.0055555555555555), null.FloatFrom(1587419100000)}, |
||||
{null.FloatFrom(0.7407407407407407), null.FloatFrom(1587419400000)}, |
||||
}, |
||||
}, |
||||
&tsdb.TimeSeries{ |
||||
Name: "Logical Disk", |
||||
Points: tsdb.TimeSeriesPoints{ |
||||
{null.FloatFrom(16090.551851851851), null.FloatFrom(1587418800000)}, |
||||
{null.FloatFrom(16090.537037037036), null.FloatFrom(1587419100000)}, |
||||
{null.FloatFrom(16090.586419753086), null.FloatFrom(1587419400000)}, |
||||
}, |
||||
}, |
||||
&tsdb.TimeSeries{ |
||||
Name: "Memory", |
||||
Points: tsdb.TimeSeriesPoints{ |
||||
{null.FloatFrom(702.0666666666667), null.FloatFrom(1587418800000)}, |
||||
{null.FloatFrom(700.5888888888888), null.FloatFrom(1587419100000)}, |
||||
{null.FloatFrom(703.1111111111111), null.FloatFrom(1587419400000)}, |
||||
}, |
||||
}, |
||||
}, |
||||
meta: `{"columns":[{"name":"TimeGenerated","type":"datetime"},{"name":"ObjectName","type":"string"},{"name":"avg_CounterValue","type":"real"}],"query":"test query"}`, |
||||
Err: require.NoError, |
||||
}, |
||||
{ |
||||
name: "Response with no metric name column should use the value column name as the series name", |
||||
testFile: "loganalytics/3-log-analytics-response-metrics-no-metric-column.json", |
||||
query: "test query", |
||||
series: tsdb.TimeSeriesSlice{ |
||||
&tsdb.TimeSeries{ |
||||
Name: "avg_CounterValue", |
||||
Points: tsdb.TimeSeriesPoints{ |
||||
{null.FloatFrom(1), null.FloatFrom(1587323766000)}, |
||||
{null.FloatFrom(2), null.FloatFrom(1587323776000)}, |
||||
{null.FloatFrom(3), null.FloatFrom(1587323786000)}, |
||||
}, |
||||
}, |
||||
}, |
||||
meta: `{"columns":[{"name":"TimeGenerated","type":"datetime"},{"name":"avg_CounterValue","type":"int"}],"query":"test query"}`, |
||||
Err: require.NoError, |
||||
}, |
||||
{ |
||||
name: "Response with no time column should return no data", |
||||
testFile: "loganalytics/4-log-analytics-response-metrics-no-time-column.json", |
||||
query: "test query", |
||||
series: nil, |
||||
meta: `{"columns":[{"name":"Computer","type":"string"},{"name":"avg_CounterValue","type":"real"}],"query":"test query"}`, |
||||
Err: require.NoError, |
||||
}, |
||||
{ |
||||
name: "Response with no value column should return no data", |
||||
testFile: "loganalytics/5-log-analytics-response-metrics-no-value-column.json", |
||||
query: "test query", |
||||
series: nil, |
||||
meta: `{"columns":[{"name":"TimeGenerated","type":"datetime"},{"name":"Computer","type":"string"}],"query":"test query"}`, |
||||
Err: require.NoError, |
||||
}, |
||||
} |
||||
|
||||
for _, tt := range tests { |
||||
t.Run(tt.name, func(t *testing.T) { |
||||
data, _ := loadLogAnalyticsTestFile(tt.testFile) |
||||
|
||||
series, meta, err := datasource.parseToTimeSeries(data, tt.query) |
||||
tt.Err(t, err) |
||||
|
||||
if diff := cmp.Diff(tt.series, series, cmpopts.EquateNaNs()); diff != "" { |
||||
t.Errorf("Result mismatch (-want +got):\n%s", diff) |
||||
} |
||||
|
||||
json, _ := json.Marshal(meta) |
||||
cols := string(json) |
||||
|
||||
if diff := cmp.Diff(tt.meta, cols, cmpopts.EquateNaNs()); diff != "" { |
||||
t.Errorf("Result mismatch (-want +got):\n%s", diff) |
||||
} |
||||
}) |
||||
} |
||||
} |
||||
|
||||
func TestParsingAzureLogAnalyticsTableResponses(t *testing.T) { |
||||
datasource := &AzureLogAnalyticsDatasource{} |
||||
tests := []struct { |
||||
name string |
||||
testFile string |
||||
query string |
||||
tables []*tsdb.Table |
||||
meta string |
||||
Err require.ErrorAssertionFunc |
||||
}{ |
||||
{ |
||||
name: "Table data should be parsed into the table format Response", |
||||
testFile: "loganalytics/6-log-analytics-response-table.json", |
||||
query: "test query", |
||||
tables: []*tsdb.Table{ |
||||
{ |
||||
Columns: []tsdb.TableColumn{ |
||||
{Text: "TenantId"}, |
||||
{Text: "Computer"}, |
||||
{Text: "ObjectName"}, |
||||
{Text: "CounterName"}, |
||||
{Text: "InstanceName"}, |
||||
{Text: "Min"}, |
||||
{Text: "Max"}, |
||||
{Text: "SampleCount"}, |
||||
{Text: "CounterValue"}, |
||||
{Text: "TimeGenerated"}, |
||||
}, |
||||
Rows: []tsdb.RowValues{ |
||||
{ |
||||
string("a2c1b44e-3e57-4410-b027-6cc0ae6dee67"), |
||||
string("grafana-vm"), |
||||
string("Memory"), |
||||
string("Available MBytes Memory"), |
||||
string("Memory"), |
||||
nil, |
||||
nil, |
||||
nil, |
||||
float64(2040), |
||||
string("2020-04-23T11:46:03.857Z"), |
||||
}, |
||||
{ |
||||
string("a2c1b44e-3e57-4410-b027-6cc0ae6dee67"), |
||||
string("grafana-vm"), |
||||
string("Memory"), |
||||
string("Available MBytes Memory"), |
||||
string("Memory"), |
||||
nil, |
||||
nil, |
||||
nil, |
||||
float64(2066), |
||||
string("2020-04-23T11:46:13.857Z"), |
||||
}, |
||||
{ |
||||
string("a2c1b44e-3e57-4410-b027-6cc0ae6dee67"), |
||||
string("grafana-vm"), |
||||
string("Memory"), |
||||
string("Available MBytes Memory"), |
||||
string("Memory"), |
||||
nil, |
||||
nil, |
||||
nil, |
||||
float64(2066), |
||||
string("2020-04-23T11:46:23.857Z"), |
||||
}, |
||||
}, |
||||
}, |
||||
}, |
||||
meta: `{"columns":[{"name":"TenantId","type":"string"},{"name":"Computer","type":"string"},{"name":"ObjectName","type":"string"},{"name":"CounterName","type":"string"},` + |
||||
`{"name":"InstanceName","type":"string"},{"name":"Min","type":"real"},{"name":"Max","type":"real"},{"name":"SampleCount","type":"int"},{"name":"CounterValue","type":"real"},` + |
||||
`{"name":"TimeGenerated","type":"datetime"}],"query":"test query"}`, |
||||
Err: require.NoError, |
||||
}, |
||||
} |
||||
|
||||
for _, tt := range tests { |
||||
t.Run(tt.name, func(t *testing.T) { |
||||
data, _ := loadLogAnalyticsTestFile(tt.testFile) |
||||
|
||||
tables, meta, err := datasource.parseToTables(data, tt.query) |
||||
tt.Err(t, err) |
||||
|
||||
if diff := cmp.Diff(tt.tables, tables, cmpopts.EquateNaNs()); diff != "" { |
||||
t.Errorf("Result mismatch (-want +got):\n%s", diff) |
||||
} |
||||
|
||||
json, _ := json.Marshal(meta) |
||||
cols := string(json) |
||||
|
||||
if diff := cmp.Diff(tt.meta, cols, cmpopts.EquateNaNs()); diff != "" { |
||||
t.Errorf("Result mismatch (-want +got):\n%s", diff) |
||||
} |
||||
|
||||
}) |
||||
} |
||||
} |
||||
|
||||
func TestPluginRoutes(t *testing.T) { |
||||
datasource := &AzureLogAnalyticsDatasource{} |
||||
plugin := &plugins.DataSourcePlugin{ |
||||
Routes: []*plugins.AppPluginRoute{ |
||||
{ |
||||
Path: "loganalyticsazure", |
||||
Method: "GET", |
||||
URL: "https://api.loganalytics.io/v1/workspaces", |
||||
Headers: []plugins.AppPluginRouteHeader{ |
||||
{Name: "x-ms-app", Content: "Grafana"}, |
||||
}, |
||||
}, |
||||
{ |
||||
Path: "chinaloganalyticsazure", |
||||
Method: "GET", |
||||
URL: "https://api.loganalytics.azure.cn/v1/workspaces", |
||||
Headers: []plugins.AppPluginRouteHeader{ |
||||
{Name: "x-ms-app", Content: "Grafana"}, |
||||
}, |
||||
}, |
||||
{ |
||||
Path: "govloganalyticsazure", |
||||
Method: "GET", |
||||
URL: "https://api.loganalytics.us/v1/workspaces", |
||||
Headers: []plugins.AppPluginRouteHeader{ |
||||
{Name: "x-ms-app", Content: "Grafana"}, |
||||
}, |
||||
}, |
||||
}, |
||||
} |
||||
|
||||
tests := []struct { |
||||
name string |
||||
cloudName string |
||||
expectedProxypass string |
||||
expectedRouteURL string |
||||
Err require.ErrorAssertionFunc |
||||
}{ |
||||
{ |
||||
name: "plugin proxy route for the Azure public cloud", |
||||
cloudName: "azuremonitor", |
||||
expectedProxypass: "loganalyticsazure", |
||||
expectedRouteURL: "https://api.loganalytics.io/v1/workspaces", |
||||
Err: require.NoError, |
||||
}, |
||||
{ |
||||
name: "plugin proxy route for the Azure China cloud", |
||||
cloudName: "chinaazuremonitor", |
||||
expectedProxypass: "chinaloganalyticsazure", |
||||
expectedRouteURL: "https://api.loganalytics.azure.cn/v1/workspaces", |
||||
Err: require.NoError, |
||||
}, |
||||
{ |
||||
name: "plugin proxy route for the Azure Gov cloud", |
||||
cloudName: "govazuremonitor", |
||||
expectedProxypass: "govloganalyticsazure", |
||||
expectedRouteURL: "https://api.loganalytics.us/v1/workspaces", |
||||
Err: require.NoError, |
||||
}, |
||||
} |
||||
|
||||
for _, tt := range tests { |
||||
t.Run(tt.name, func(t *testing.T) { |
||||
route, proxypass, err := datasource.getPluginRoute(plugin, tt.cloudName) |
||||
tt.Err(t, err) |
||||
|
||||
if diff := cmp.Diff(tt.expectedRouteURL, route.URL, cmpopts.EquateNaNs()); diff != "" { |
||||
t.Errorf("Result mismatch (-want +got):\n%s", diff) |
||||
} |
||||
|
||||
if diff := cmp.Diff(tt.expectedProxypass, proxypass, cmpopts.EquateNaNs()); diff != "" { |
||||
t.Errorf("Result mismatch (-want +got):\n%s", diff) |
||||
} |
||||
}) |
||||
} |
||||
|
||||
} |
||||
|
||||
func loadLogAnalyticsTestFile(name string) (AzureLogAnalyticsResponse, error) { |
||||
var data AzureLogAnalyticsResponse |
||||
|
||||
path := filepath.Join("testdata", name) |
||||
jsonBody, err := ioutil.ReadFile(path) |
||||
if err != nil { |
||||
return data, err |
||||
} |
||||
err = json.Unmarshal(jsonBody, &data) |
||||
return data, err |
||||
} |
@ -0,0 +1,144 @@ |
||||
package azuremonitor |
||||
|
||||
import ( |
||||
"fmt" |
||||
"testing" |
||||
"time" |
||||
|
||||
"github.com/google/go-cmp/cmp" |
||||
"github.com/google/go-cmp/cmp/cmpopts" |
||||
"github.com/grafana/grafana/pkg/components/simplejson" |
||||
"github.com/grafana/grafana/pkg/models" |
||||
"github.com/grafana/grafana/pkg/tsdb" |
||||
"github.com/stretchr/testify/require" |
||||
) |
||||
|
||||
func TestAzureLogAnalyticsMacros(t *testing.T) { |
||||
fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC).In(time.Local) |
||||
timeRange := &tsdb.TimeRange{ |
||||
From: fmt.Sprintf("%v", fromStart.Unix()*1000), |
||||
To: fmt.Sprintf("%v", fromStart.Add(34*time.Minute).Unix()*1000), |
||||
} |
||||
|
||||
tests := []struct { |
||||
name string |
||||
query *tsdb.Query |
||||
timeRange *tsdb.TimeRange |
||||
kql string |
||||
expected string |
||||
Err require.ErrorAssertionFunc |
||||
}{ |
||||
{ |
||||
name: "invalid macro should throw error", |
||||
query: &tsdb.Query{}, |
||||
kql: "$__invalid()", |
||||
expected: "", |
||||
Err: require.Error, |
||||
}, |
||||
{ |
||||
name: "$__contains macro with a multi template variable that has multiple selected values as a parameter should build in clause", |
||||
query: &tsdb.Query{}, |
||||
kql: "$__contains(col, 'val1','val2')", |
||||
expected: "['col'] in ('val1','val2')", |
||||
Err: require.NoError, |
||||
}, |
||||
{ |
||||
name: "$__contains macro with a multi template variable that has a single selected value as a parameter should build in clause", |
||||
query: &tsdb.Query{}, |
||||
kql: "$__contains(col, 'val1' )", |
||||
expected: "['col'] in ('val1')", |
||||
Err: require.NoError, |
||||
}, |
||||
{ |
||||
name: "$__contains macro with multi template variable has custom All value as a parameter should return a true expression", |
||||
query: &tsdb.Query{}, |
||||
kql: "$__contains(col, all)", |
||||
expected: "1 == 1", |
||||
Err: require.NoError, |
||||
}, |
||||
{ |
||||
name: "$__timeFilter has no column parameter should use default time field", |
||||
query: &tsdb.Query{}, |
||||
kql: "$__timeFilter()", |
||||
expected: "['TimeGenerated'] >= datetime('2018-03-15T13:00:00Z') and ['TimeGenerated'] <= datetime('2018-03-15T13:34:00Z')", |
||||
Err: require.NoError, |
||||
}, |
||||
{ |
||||
name: "$__timeFilter has time field parameter", |
||||
query: &tsdb.Query{}, |
||||
kql: "$__timeFilter(myTimeField)", |
||||
expected: "['myTimeField'] >= datetime('2018-03-15T13:00:00Z') and ['myTimeField'] <= datetime('2018-03-15T13:34:00Z')", |
||||
Err: require.NoError, |
||||
}, |
||||
{ |
||||
name: "$__timeFrom and $__timeTo is in the query and range is a specific interval", |
||||
query: &tsdb.Query{}, |
||||
kql: "myTimeField >= $__timeFrom() and myTimeField <= $__timeTo()", |
||||
expected: "myTimeField >= datetime('2018-03-15T13:00:00Z') and myTimeField <= datetime('2018-03-15T13:34:00Z')", |
||||
Err: require.NoError, |
||||
}, |
||||
{ |
||||
name: "$__interval should use the defined interval from the query", |
||||
timeRange: timeRange, |
||||
query: &tsdb.Query{ |
||||
Model: simplejson.NewFromAny(map[string]interface{}{ |
||||
"interval": "5m", |
||||
}), |
||||
}, |
||||
kql: "bin(TimeGenerated, $__interval)", |
||||
expected: "bin(TimeGenerated, 300000ms)", |
||||
Err: require.NoError, |
||||
}, |
||||
{ |
||||
name: "$__interval should use the default interval if none is specified", |
||||
query: &tsdb.Query{ |
||||
DataSource: &models.DataSource{}, |
||||
Model: simplejson.NewFromAny(map[string]interface{}{}), |
||||
}, |
||||
kql: "bin(TimeGenerated, $__interval)", |
||||
expected: "bin(TimeGenerated, 34000ms)", |
||||
Err: require.NoError, |
||||
}, |
||||
{ |
||||
name: "$__escapeMulti with multi template variable should replace values with KQL style escaped strings", |
||||
query: &tsdb.Query{ |
||||
DataSource: &models.DataSource{}, |
||||
Model: simplejson.NewFromAny(map[string]interface{}{}), |
||||
}, |
||||
kql: `CounterPath in ($__escapeMulti('\\grafana-vm\Network(eth0)\Total','\\grafana-vm\Network(eth1)\Total'))`, |
||||
expected: `CounterPath in (@'\\grafana-vm\Network(eth0)\Total', @'\\grafana-vm\Network(eth1)\Total')`, |
||||
Err: require.NoError, |
||||
}, |
||||
{ |
||||
name: "$__escapeMulti with multi template variable and has one selected value that contains comma", |
||||
query: &tsdb.Query{ |
||||
DataSource: &models.DataSource{}, |
||||
Model: simplejson.NewFromAny(map[string]interface{}{}), |
||||
}, |
||||
kql: `$__escapeMulti('\\grafana-vm,\Network(eth0)\Total Bytes Received')`, |
||||
expected: `@'\\grafana-vm,\Network(eth0)\Total Bytes Received'`, |
||||
Err: require.NoError, |
||||
}, |
||||
{ |
||||
name: "$__escapeMulti with multi template variable and is not wrapped in single quotes should fail", |
||||
query: &tsdb.Query{ |
||||
DataSource: &models.DataSource{}, |
||||
Model: simplejson.NewFromAny(map[string]interface{}{}), |
||||
}, |
||||
kql: `$__escapeMulti(\\grafana-vm,\Network(eth0)\Total Bytes Received)`, |
||||
expected: "", |
||||
Err: require.Error, |
||||
}, |
||||
} |
||||
|
||||
for _, tt := range tests { |
||||
t.Run(tt.name, func(t *testing.T) { |
||||
defaultTimeField := "TimeGenerated" |
||||
rawQuery, err := KqlInterpolate(tt.query, timeRange, tt.kql, defaultTimeField) |
||||
tt.Err(t, err) |
||||
if diff := cmp.Diff(tt.expected, rawQuery, cmpopts.EquateNaNs()); diff != "" { |
||||
t.Errorf("Result mismatch (-want +got):\n%s", diff) |
||||
} |
||||
}) |
||||
} |
||||
} |
@ -1,9 +1,9 @@ |
||||
{ |
||||
"value": { |
||||
"start": "2019-09-13T01:02:03.456789Z", |
||||
"end": "2019-09-13T02:02:03.456789Z", |
||||
"value": { |
||||
"avg": 1.2 |
||||
} |
||||
} |
||||
} |
||||
{ |
||||
"value": { |
||||
"start": "2019-09-13T01:02:03.456789Z", |
||||
"end": "2019-09-13T02:02:03.456789Z", |
||||
"value": { |
||||
"avg": 1.2 |
||||
} |
||||
} |
||||
} |
@ -1,23 +1,23 @@ |
||||
{ |
||||
"value": { |
||||
"start": "2019-09-13T01:02:03.456789Z", |
||||
"end": "2019-09-13T03:02:03.456789Z", |
||||
"interval": "PT1H", |
||||
"segments": [ |
||||
{ |
||||
"start": "2019-09-13T01:02:03.456789Z", |
||||
"end": "2019-09-13T02:02:03.456789Z", |
||||
"value": { |
||||
"avg": 1 |
||||
} |
||||
}, |
||||
{ |
||||
"start": "2019-09-13T02:02:03.456789Z", |
||||
"end": "2019-09-13T03:02:03.456789Z", |
||||
"value": { |
||||
"avg": 2 |
||||
} |
||||
} |
||||
] |
||||
} |
||||
} |
||||
{ |
||||
"value": { |
||||
"start": "2019-09-13T01:02:03.456789Z", |
||||
"end": "2019-09-13T03:02:03.456789Z", |
||||
"interval": "PT1H", |
||||
"segments": [ |
||||
{ |
||||
"start": "2019-09-13T01:02:03.456789Z", |
||||
"end": "2019-09-13T02:02:03.456789Z", |
||||
"value": { |
||||
"avg": 1 |
||||
} |
||||
}, |
||||
{ |
||||
"start": "2019-09-13T02:02:03.456789Z", |
||||
"end": "2019-09-13T03:02:03.456789Z", |
||||
"value": { |
||||
"avg": 2 |
||||
} |
||||
} |
||||
] |
||||
} |
||||
} |
@ -1,45 +1,45 @@ |
||||
{ |
||||
"value": { |
||||
"start": "2019-09-13T01:02:03.456789Z", |
||||
"end": "2019-09-13T03:02:03.456789Z", |
||||
"interval": "PT1H", |
||||
"segments": [ |
||||
{ |
||||
"start": "2019-09-13T01:02:03.456789Z", |
||||
"end": "2019-09-13T02:02:03.456789Z", |
||||
"segments": [ |
||||
{ |
||||
"value": { |
||||
"avg": 1 |
||||
}, |
||||
"blob": "a" |
||||
}, |
||||
{ |
||||
"value": { |
||||
"avg": 3 |
||||
}, |
||||
"blob": "b" |
||||
} |
||||
] |
||||
}, |
||||
{ |
||||
"start": "2019-09-13T02:02:03.456789Z", |
||||
"end": "2019-09-13T03:02:03.456789Z", |
||||
"segments": [ |
||||
{ |
||||
"value": { |
||||
"avg": 2 |
||||
}, |
||||
"blob": "a" |
||||
}, |
||||
{ |
||||
"value": { |
||||
"avg": 4 |
||||
}, |
||||
"blob": "b" |
||||
} |
||||
] |
||||
} |
||||
] |
||||
} |
||||
} |
||||
{ |
||||
"value": { |
||||
"start": "2019-09-13T01:02:03.456789Z", |
||||
"end": "2019-09-13T03:02:03.456789Z", |
||||
"interval": "PT1H", |
||||
"segments": [ |
||||
{ |
||||
"start": "2019-09-13T01:02:03.456789Z", |
||||
"end": "2019-09-13T02:02:03.456789Z", |
||||
"segments": [ |
||||
{ |
||||
"value": { |
||||
"avg": 1 |
||||
}, |
||||
"blob": "a" |
||||
}, |
||||
{ |
||||
"value": { |
||||
"avg": 3 |
||||
}, |
||||
"blob": "b" |
||||
} |
||||
] |
||||
}, |
||||
{ |
||||
"start": "2019-09-13T02:02:03.456789Z", |
||||
"end": "2019-09-13T03:02:03.456789Z", |
||||
"segments": [ |
||||
{ |
||||
"value": { |
||||
"avg": 2 |
||||
}, |
||||
"blob": "a" |
||||
}, |
||||
{ |
||||
"value": { |
||||
"avg": 4 |
||||
}, |
||||
"blob": "b" |
||||
} |
||||
] |
||||
} |
||||
] |
||||
} |
||||
} |
@ -0,0 +1,38 @@ |
||||
{ |
||||
"tables": [ |
||||
{ |
||||
"name": "PrimaryResult", |
||||
"columns": [ |
||||
{ |
||||
"name": "TimeGenerated", |
||||
"type": "datetime" |
||||
}, |
||||
{ |
||||
"name": "Computer", |
||||
"type": "string" |
||||
}, |
||||
{ |
||||
"name": "avg_CounterValue", |
||||
"type": "real" |
||||
} |
||||
], |
||||
"rows": [ |
||||
[ |
||||
"2020-04-19T19:16:06.5Z", |
||||
"grafana-vm", |
||||
1.1 |
||||
], |
||||
[ |
||||
"2020-04-19T19:16:16.5Z", |
||||
"grafana-vm", |
||||
2.2 |
||||
], |
||||
[ |
||||
"2020-04-19T19:16:26.5Z", |
||||
"grafana-vm", |
||||
3.3 |
||||
] |
||||
] |
||||
} |
||||
] |
||||
} |
@ -0,0 +1,68 @@ |
||||
{ |
||||
"tables": [ |
||||
{ |
||||
"name": "PrimaryResult", |
||||
"columns": [ |
||||
{ |
||||
"name": "TimeGenerated", |
||||
"type": "datetime" |
||||
}, |
||||
{ |
||||
"name": "ObjectName", |
||||
"type": "string" |
||||
}, |
||||
{ |
||||
"name": "avg_CounterValue", |
||||
"type": "real" |
||||
} |
||||
], |
||||
"rows": [ |
||||
[ |
||||
"2020-04-20T21:40:00Z", |
||||
"Processor", |
||||
0.75 |
||||
], |
||||
[ |
||||
"2020-04-20T21:40:00Z", |
||||
"Logical Disk", |
||||
16090.551851851851 |
||||
], |
||||
[ |
||||
"2020-04-20T21:40:00Z", |
||||
"Memory", |
||||
702.0666666666667 |
||||
], |
||||
[ |
||||
"2020-04-20T21:45:00Z", |
||||
"Memory", |
||||
700.5888888888888 |
||||
], |
||||
[ |
||||
"2020-04-20T21:45:00Z", |
||||
"Processor", |
||||
1.0055555555555555 |
||||
], |
||||
[ |
||||
"2020-04-20T21:45:00Z", |
||||
"Logical Disk", |
||||
16090.537037037036 |
||||
], |
||||
[ |
||||
"2020-04-20T21:50:00Z", |
||||
"Logical Disk", |
||||
16090.586419753086 |
||||
], |
||||
[ |
||||
"2020-04-20T21:50:00Z", |
||||
"Processor", |
||||
0.7407407407407407 |
||||
], |
||||
[ |
||||
"2020-04-20T21:50:00Z", |
||||
"Memory", |
||||
703.1111111111111 |
||||
] |
||||
] |
||||
} |
||||
] |
||||
} |
@ -0,0 +1,31 @@ |
||||
{ |
||||
"tables": [ |
||||
{ |
||||
"name": "PrimaryResult", |
||||
"columns": [ |
||||
{ |
||||
"name": "TimeGenerated", |
||||
"type": "datetime" |
||||
}, |
||||
{ |
||||
"name": "avg_CounterValue", |
||||
"type": "int" |
||||
} |
||||
], |
||||
"rows": [ |
||||
[ |
||||
"2020-04-19T19:16:06.5Z", |
||||
1 |
||||
], |
||||
[ |
||||
"2020-04-19T19:16:16.5Z", |
||||
2 |
||||
], |
||||
[ |
||||
"2020-04-19T19:16:26.5Z", |
||||
3 |
||||
] |
||||
] |
||||
} |
||||
] |
||||
} |
@ -0,0 +1,31 @@ |
||||
{ |
||||
"tables": [ |
||||
{ |
||||
"name": "PrimaryResult", |
||||
"columns": [ |
||||
{ |
||||
"name": "Computer", |
||||
"type": "string" |
||||
}, |
||||
{ |
||||
"name": "avg_CounterValue", |
||||
"type": "real" |
||||
} |
||||
], |
||||
"rows": [ |
||||
[ |
||||
"grafana-vm", |
||||
1.1 |
||||
], |
||||
[ |
||||
"grafana-vm", |
||||
2.2 |
||||
], |
||||
[ |
||||
"grafana-vm", |
||||
3.3 |
||||
] |
||||
] |
||||
} |
||||
] |
||||
} |
@ -0,0 +1,31 @@ |
||||
{ |
||||
"tables": [ |
||||
{ |
||||
"name": "PrimaryResult", |
||||
"columns": [ |
||||
{ |
||||
"name": "TimeGenerated", |
||||
"type": "datetime" |
||||
}, |
||||
{ |
||||
"name": "Computer", |
||||
"type": "string" |
||||
} |
||||
], |
||||
"rows": [ |
||||
[ |
||||
"2020-04-19T19:16:06.5Z", |
||||
"grafana-vm" |
||||
], |
||||
[ |
||||
"2020-04-19T19:16:16.5Z", |
||||
"grafana-vm" |
||||
], |
||||
[ |
||||
"2020-04-19T19:16:26.5Z", |
||||
"grafana-vm" |
||||
] |
||||
] |
||||
} |
||||
] |
||||
} |
@ -0,0 +1,87 @@ |
||||
{ |
||||
"tables": [ |
||||
{ |
||||
"name": "PrimaryResult", |
||||
"columns": [ |
||||
{ |
||||
"name": "TenantId", |
||||
"type": "string" |
||||
}, |
||||
{ |
||||
"name": "Computer", |
||||
"type": "string" |
||||
}, |
||||
{ |
||||
"name": "ObjectName", |
||||
"type": "string" |
||||
}, |
||||
{ |
||||
"name": "CounterName", |
||||
"type": "string" |
||||
}, |
||||
{ |
||||
"name": "InstanceName", |
||||
"type": "string" |
||||
}, |
||||
{ |
||||
"name": "Min", |
||||
"type": "real" |
||||
}, |
||||
{ |
||||
"name": "Max", |
||||
"type": "real" |
||||
}, |
||||
{ |
||||
"name": "SampleCount", |
||||
"type": "int" |
||||
}, |
||||
{ |
||||
"name": "CounterValue", |
||||
"type": "real" |
||||
}, |
||||
{ |
||||
"name": "TimeGenerated", |
||||
"type": "datetime" |
||||
} |
||||
], |
||||
"rows": [ |
||||
[ |
||||
"a2c1b44e-3e57-4410-b027-6cc0ae6dee67", |
||||
"grafana-vm", |
||||
"Memory", |
||||
"Available MBytes Memory", |
||||
"Memory", |
||||
null, |
||||
null, |
||||
null, |
||||
2040, |
||||
"2020-04-23T11:46:03.857Z" |
||||
], |
||||
[ |
||||
"a2c1b44e-3e57-4410-b027-6cc0ae6dee67", |
||||
"grafana-vm", |
||||
"Memory", |
||||
"Available MBytes Memory", |
||||
"Memory", |
||||
null, |
||||
null, |
||||
null, |
||||
2066, |
||||
"2020-04-23T11:46:13.857Z" |
||||
], |
||||
[ |
||||
"a2c1b44e-3e57-4410-b027-6cc0ae6dee67", |
||||
"grafana-vm", |
||||
"Memory", |
||||
"Available MBytes Memory", |
||||
"Memory", |
||||
null, |
||||
null, |
||||
null, |
||||
2066, |
||||
"2020-04-23T11:46:23.857Z" |
||||
] |
||||
] |
||||
} |
||||
] |
||||
} |
Loading…
Reference in new issue