azuremonitor: port azure log analytics query function to the backend (#23839)

* azuremonitor: add support for log analytics macros

Also adds tests for the kql macros

* azuremonitor: backend implementation for Log Analytics

* azuremonitor: remove gzip header from plugin route

The Go net/http library adds an accept encoding header
for gzip automatically.

https://golang.org/src/net/http/transport.go\#L2454

So no need to specify it manually

* azuremonitor: parses log analytics time series

* azuremonitor: support for table data for Log Analytics

* azuremonitor: for log analytics switch to calling the API...

...from the backend for time series and table queries.

* azuremonitor: fix missing err check

* azuremonitor: support Azure China, Azure Gov...

for log analytics on the backend.

* azuremonitor: review fixes

* azuremonitor: rename test files folder to testdata

To follow Go conventions for test data in tests

* azuremonitor: review fixes

* azuremonitor: better error message for http requests

* azuremonitor: fix for load workspaces on config page

* azuremonitor: strict null check fixes

Co-authored-by: bergquist <carl.bergquist@gmail.com>
pull/23957/head
Daniel Lee 5 years ago committed by GitHub
parent 458f6bdb87
commit c05049f395
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 11
      pkg/tsdb/azuremonitor/applicationinsights-datasource.go
  2. 16
      pkg/tsdb/azuremonitor/applicationinsights-datasource_test.go
  3. 354
      pkg/tsdb/azuremonitor/azure-log-analytics-datasource.go
  4. 380
      pkg/tsdb/azuremonitor/azure-log-analytics-datasource_test.go
  5. 13
      pkg/tsdb/azuremonitor/azuremonitor-datasource.go
  6. 20
      pkg/tsdb/azuremonitor/azuremonitor-datasource_test.go
  7. 17
      pkg/tsdb/azuremonitor/azuremonitor.go
  8. 37
      pkg/tsdb/azuremonitor/macros.go
  9. 144
      pkg/tsdb/azuremonitor/macros_test.go
  10. 0
      pkg/tsdb/azuremonitor/testdata/applicationinsights/1-application-insights-response-raw-query.json
  11. 0
      pkg/tsdb/azuremonitor/testdata/applicationinsights/2-application-insights-response-raw-query-segmented.json
  12. 18
      pkg/tsdb/azuremonitor/testdata/applicationinsights/3-application-insights-response-metrics-single-value.json
  13. 46
      pkg/tsdb/azuremonitor/testdata/applicationinsights/4-application-insights-response-metrics-no-segment.json
  14. 90
      pkg/tsdb/azuremonitor/testdata/applicationinsights/4-application-insights-response-metrics-segmented.json
  15. 0
      pkg/tsdb/azuremonitor/testdata/azuremonitor/1-azure-monitor-response-avg.json
  16. 0
      pkg/tsdb/azuremonitor/testdata/azuremonitor/2-azure-monitor-response-total.json
  17. 0
      pkg/tsdb/azuremonitor/testdata/azuremonitor/3-azure-monitor-response-maximum.json
  18. 0
      pkg/tsdb/azuremonitor/testdata/azuremonitor/4-azure-monitor-response-minimum.json
  19. 0
      pkg/tsdb/azuremonitor/testdata/azuremonitor/5-azure-monitor-response-count.json
  20. 0
      pkg/tsdb/azuremonitor/testdata/azuremonitor/6-azure-monitor-response-multi-dimension.json
  21. 38
      pkg/tsdb/azuremonitor/testdata/loganalytics/1-log-analytics-response-metrics-single-series.json
  22. 68
      pkg/tsdb/azuremonitor/testdata/loganalytics/2-log-analytics-response-metrics-multiple-series.json
  23. 31
      pkg/tsdb/azuremonitor/testdata/loganalytics/3-log-analytics-response-metrics-no-metric-column.json
  24. 31
      pkg/tsdb/azuremonitor/testdata/loganalytics/4-log-analytics-response-metrics-no-time-column.json
  25. 31
      pkg/tsdb/azuremonitor/testdata/loganalytics/5-log-analytics-response-metrics-no-value-column.json
  26. 87
      pkg/tsdb/azuremonitor/testdata/loganalytics/6-log-analytics-response-table.json
  27. 47
      pkg/tsdb/azuremonitor/types.go
  28. 6
      public/app/plugins/datasource/grafana-azure-monitor-datasource/app_insights/app_insights_datasource.ts
  29. 120
      public/app/plugins/datasource/grafana-azure-monitor-datasource/azure_log_analytics/azure_log_analytics_datasource.test.ts
  30. 72
      public/app/plugins/datasource/grafana-azure-monitor-datasource/azure_log_analytics/azure_log_analytics_datasource.ts
  31. 2
      public/app/plugins/datasource/grafana-azure-monitor-datasource/components/ConfigEditor.tsx
  32. 39
      public/app/plugins/datasource/grafana-azure-monitor-datasource/plugin.json

@ -19,6 +19,7 @@ import (
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/tsdb"
"github.com/grafana/grafana/pkg/util/errutil"
"github.com/opentracing/opentracing-go"
"golang.org/x/net/context/ctxhttp"
)
@ -210,8 +211,8 @@ func (e *ApplicationInsightsDatasource) executeQuery(ctx context.Context, query
}
if res.StatusCode/100 != 2 {
azlog.Error("Request failed", "status", res.Status, "body", string(body))
return nil, fmt.Errorf(string(body))
azlog.Debug("Request failed", "status", res.Status, "body", string(body))
return nil, fmt.Errorf("Request failed status: %v", res.Status)
}
if query.IsRaw {
@ -252,8 +253,8 @@ func (e *ApplicationInsightsDatasource) createRequest(ctx context.Context, dsInf
req, err := http.NewRequest(http.MethodGet, u.String(), nil)
if err != nil {
azlog.Error("Failed to create request", "error", err)
return nil, fmt.Errorf("Failed to create request. error: %v", err)
azlog.Debug("Failed to create request", "error", err)
return nil, errutil.Wrap("Failed to create request", err)
}
req.Header.Set("User-Agent", fmt.Sprintf("Grafana/%s", setting.BuildVersion))
@ -286,7 +287,7 @@ func (e *ApplicationInsightsDatasource) parseTimeSeriesFromQuery(body []byte, qu
var data ApplicationInsightsQueryResponse
err := json.Unmarshal(body, &data)
if err != nil {
azlog.Error("Failed to unmarshal Application Insights response", "error", err, "body", string(body))
azlog.Debug("Failed to unmarshal Application Insights response", "error", err, "body", string(body))
return nil, nil, err
}

@ -163,7 +163,7 @@ func TestApplicationInsightsDatasource(t *testing.T) {
Convey("Parse Application Insights query API response in the time series format", func() {
Convey("no segments", func() {
data, err := ioutil.ReadFile("./test-data/applicationinsights/1-application-insights-response-raw-query.json")
data, err := ioutil.ReadFile("testdata/applicationinsights/1-application-insights-response-raw-query.json")
So(err, ShouldBeNil)
query := &ApplicationInsightsQuery{
@ -186,7 +186,7 @@ func TestApplicationInsightsDatasource(t *testing.T) {
})
Convey("with segments", func() {
data, err := ioutil.ReadFile("./test-data/applicationinsights/2-application-insights-response-raw-query-segmented.json")
data, err := ioutil.ReadFile("testdata/applicationinsights/2-application-insights-response-raw-query-segmented.json")
So(err, ShouldBeNil)
query := &ApplicationInsightsQuery{
@ -216,7 +216,7 @@ func TestApplicationInsightsDatasource(t *testing.T) {
So(series[1].Points[1][1].Float64, ShouldEqual, int64(1568426523000))
Convey("with alias", func() {
data, err := ioutil.ReadFile("./test-data/applicationinsights/2-application-insights-response-raw-query-segmented.json")
data, err := ioutil.ReadFile("testdata/applicationinsights/2-application-insights-response-raw-query-segmented.json")
So(err, ShouldBeNil)
query := &ApplicationInsightsQuery{
@ -239,7 +239,7 @@ func TestApplicationInsightsDatasource(t *testing.T) {
Convey("Parse Application Insights metrics API", func() {
Convey("single value", func() {
data, err := ioutil.ReadFile("./test-data/applicationinsights/3-application-insights-response-metrics-single-value.json")
data, err := ioutil.ReadFile("testdata/applicationinsights/3-application-insights-response-metrics-single-value.json")
So(err, ShouldBeNil)
query := &ApplicationInsightsQuery{
IsRaw: false,
@ -256,7 +256,7 @@ func TestApplicationInsightsDatasource(t *testing.T) {
})
Convey("1H separation", func() {
data, err := ioutil.ReadFile("./test-data/applicationinsights/4-application-insights-response-metrics-no-segment.json")
data, err := ioutil.ReadFile("testdata/applicationinsights/4-application-insights-response-metrics-no-segment.json")
So(err, ShouldBeNil)
query := &ApplicationInsightsQuery{
IsRaw: false,
@ -274,7 +274,7 @@ func TestApplicationInsightsDatasource(t *testing.T) {
So(series[0].Points[1][1].Float64, ShouldEqual, int64(1568343723000))
Convey("with segmentation", func() {
data, err := ioutil.ReadFile("./test-data/applicationinsights/4-application-insights-response-metrics-segmented.json")
data, err := ioutil.ReadFile("testdata/applicationinsights/4-application-insights-response-metrics-segmented.json")
So(err, ShouldBeNil)
query := &ApplicationInsightsQuery{
IsRaw: false,
@ -300,7 +300,7 @@ func TestApplicationInsightsDatasource(t *testing.T) {
So(series[1].Points[1][1].Float64, ShouldEqual, int64(1568343723000))
Convey("with alias", func() {
data, err := ioutil.ReadFile("./test-data/applicationinsights/4-application-insights-response-metrics-segmented.json")
data, err := ioutil.ReadFile("testdata/applicationinsights/4-application-insights-response-metrics-segmented.json")
So(err, ShouldBeNil)
query := &ApplicationInsightsQuery{
IsRaw: false,
@ -319,7 +319,7 @@ func TestApplicationInsightsDatasource(t *testing.T) {
})
}
func TestPluginRoutes(t *testing.T) {
func TestAppInsightsPluginRoutes(t *testing.T) {
datasource := &ApplicationInsightsDatasource{}
plugin := &plugins.DataSourcePlugin{
Routes: []*plugins.AppPluginRoute{

@ -0,0 +1,354 @@
package azuremonitor
import (
"context"
"encoding/json"
"errors"
"fmt"
"io/ioutil"
"net/http"
"net/url"
"path"
"time"
"github.com/grafana/grafana/pkg/api/pluginproxy"
"github.com/grafana/grafana/pkg/components/null"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/tsdb"
"github.com/grafana/grafana/pkg/util/errutil"
"github.com/opentracing/opentracing-go"
"golang.org/x/net/context/ctxhttp"
)
// AzureLogAnalyticsDatasource calls the Azure Log Analytics API's
type AzureLogAnalyticsDatasource struct {
httpClient *http.Client
dsInfo *models.DataSource
}
// AzureLogAnalyticsQuery is the query request that is built from the saved values for
// from the UI
type AzureLogAnalyticsQuery struct {
RefID string
ResultFormat string
URL string
Params url.Values
Target string
}
// executeTimeSeriesQuery does the following:
// 1. build the AzureMonitor url and querystring for each query
// 2. executes each query by calling the Azure Monitor API
// 3. parses the responses for each query into the timeseries format
func (e *AzureLogAnalyticsDatasource) executeTimeSeriesQuery(ctx context.Context, originalQueries []*tsdb.Query, timeRange *tsdb.TimeRange) (*tsdb.Response, error) {
result := &tsdb.Response{
Results: map[string]*tsdb.QueryResult{},
}
queries, err := e.buildQueries(originalQueries, timeRange)
if err != nil {
return nil, err
}
for _, query := range queries {
queryRes, err := e.executeQuery(ctx, query, originalQueries, timeRange)
if err != nil {
queryRes.Error = err
}
result.Results[query.RefID] = queryRes
}
return result, nil
}
func (e *AzureLogAnalyticsDatasource) buildQueries(queries []*tsdb.Query, timeRange *tsdb.TimeRange) ([]*AzureLogAnalyticsQuery, error) {
azureLogAnalyticsQueries := []*AzureLogAnalyticsQuery{}
for _, query := range queries {
azureLogAnalyticsTarget := query.Model.Get("azureLogAnalytics").MustMap()
azlog.Debug("AzureLogAnalytics", "target", azureLogAnalyticsTarget)
resultFormat := fmt.Sprintf("%v", azureLogAnalyticsTarget["resultFormat"])
if resultFormat == "" {
resultFormat = "time_series"
}
urlComponents := map[string]string{}
urlComponents["subscription"] = fmt.Sprintf("%v", query.Model.Get("subscription").MustString())
urlComponents["workspace"] = fmt.Sprintf("%v", azureLogAnalyticsTarget["workspace"])
apiURL := fmt.Sprintf("%s/query", urlComponents["workspace"])
params := url.Values{}
rawQuery, err := KqlInterpolate(query, timeRange, fmt.Sprintf("%v", azureLogAnalyticsTarget["query"]), "TimeGenerated")
if err != nil {
return nil, err
}
params.Add("query", rawQuery)
azureLogAnalyticsQueries = append(azureLogAnalyticsQueries, &AzureLogAnalyticsQuery{
RefID: query.RefId,
ResultFormat: resultFormat,
URL: apiURL,
Params: params,
Target: params.Encode(),
})
}
return azureLogAnalyticsQueries, nil
}
func (e *AzureLogAnalyticsDatasource) executeQuery(ctx context.Context, query *AzureLogAnalyticsQuery, queries []*tsdb.Query, timeRange *tsdb.TimeRange) (*tsdb.QueryResult, error) {
queryResult := &tsdb.QueryResult{Meta: simplejson.New(), RefId: query.RefID}
req, err := e.createRequest(ctx, e.dsInfo)
if err != nil {
queryResult.Error = err
return queryResult, nil
}
req.URL.Path = path.Join(req.URL.Path, query.URL)
req.URL.RawQuery = query.Params.Encode()
span, ctx := opentracing.StartSpanFromContext(ctx, "azure log analytics query")
span.SetTag("target", query.Target)
span.SetTag("from", timeRange.From)
span.SetTag("until", timeRange.To)
span.SetTag("datasource_id", e.dsInfo.Id)
span.SetTag("org_id", e.dsInfo.OrgId)
defer span.Finish()
if err := opentracing.GlobalTracer().Inject(
span.Context(),
opentracing.HTTPHeaders,
opentracing.HTTPHeadersCarrier(req.Header)); err != nil {
queryResult.Error = err
return queryResult, nil
}
azlog.Debug("AzureLogAnalytics", "Request ApiURL", req.URL.String())
res, err := ctxhttp.Do(ctx, e.httpClient, req)
if err != nil {
queryResult.Error = err
return queryResult, nil
}
data, err := e.unmarshalResponse(res)
if err != nil {
queryResult.Error = err
return queryResult, nil
}
azlog.Debug("AzureLogsAnalytics", "Response", queryResult)
if query.ResultFormat == "table" {
queryResult.Tables, queryResult.Meta, err = e.parseToTables(data, query.Params.Get("query"))
if err != nil {
return nil, err
}
} else {
queryResult.Series, queryResult.Meta, err = e.parseToTimeSeries(data, query.Params.Get("query"))
if err != nil {
return nil, err
}
}
return queryResult, nil
}
func (e *AzureLogAnalyticsDatasource) createRequest(ctx context.Context, dsInfo *models.DataSource) (*http.Request, error) {
u, _ := url.Parse(dsInfo.Url)
u.Path = path.Join(u.Path, "render")
req, err := http.NewRequest(http.MethodGet, u.String(), nil)
if err != nil {
azlog.Debug("Failed to create request", "error", err)
return nil, errutil.Wrap("Failed to create request", err)
}
req.Header.Set("Content-Type", "application/json")
req.Header.Set("User-Agent", fmt.Sprintf("Grafana/%s", setting.BuildVersion))
// find plugin
plugin, ok := plugins.DataSources[dsInfo.Type]
if !ok {
return nil, errors.New("Unable to find datasource plugin Azure Monitor")
}
cloudName := dsInfo.JsonData.Get("cloudName").MustString("azuremonitor")
logAnalyticsRoute, proxypass, err := e.getPluginRoute(plugin, cloudName)
if err != nil {
return nil, err
}
pluginproxy.ApplyRoute(ctx, req, proxypass, logAnalyticsRoute, dsInfo)
return req, nil
}
func (e *AzureLogAnalyticsDatasource) getPluginRoute(plugin *plugins.DataSourcePlugin, cloudName string) (*plugins.AppPluginRoute, string, error) {
pluginRouteName := "loganalyticsazure"
switch cloudName {
case "chinaazuremonitor":
pluginRouteName = "chinaloganalyticsazure"
case "govazuremonitor":
pluginRouteName = "govloganalyticsazure"
}
var logAnalyticsRoute *plugins.AppPluginRoute
for _, route := range plugin.Routes {
if route.Path == pluginRouteName {
logAnalyticsRoute = route
break
}
}
return logAnalyticsRoute, pluginRouteName, nil
}
func (e *AzureLogAnalyticsDatasource) unmarshalResponse(res *http.Response) (AzureLogAnalyticsResponse, error) {
body, err := ioutil.ReadAll(res.Body)
defer res.Body.Close()
if err != nil {
return AzureLogAnalyticsResponse{}, err
}
if res.StatusCode/100 != 2 {
azlog.Debug("Request failed", "status", res.Status, "body", string(body))
return AzureLogAnalyticsResponse{}, fmt.Errorf("Request failed status: %v", res.Status)
}
var data AzureLogAnalyticsResponse
err = json.Unmarshal(body, &data)
if err != nil {
azlog.Debug("Failed to unmarshal Azure Log Analytics response", "error", err, "status", res.Status, "body", string(body))
return AzureLogAnalyticsResponse{}, err
}
return data, nil
}
func (e *AzureLogAnalyticsDatasource) parseToTables(data AzureLogAnalyticsResponse, query string) ([]*tsdb.Table, *simplejson.Json, error) {
meta := metadata{
Query: query,
}
tables := make([]*tsdb.Table, 0)
for _, t := range data.Tables {
if t.Name == "PrimaryResult" {
table := tsdb.Table{
Columns: make([]tsdb.TableColumn, 0),
Rows: make([]tsdb.RowValues, 0),
}
meta.Columns = make([]column, 0)
for _, v := range t.Columns {
meta.Columns = append(meta.Columns, column{Name: v.Name, Type: v.Type})
table.Columns = append(table.Columns, tsdb.TableColumn{Text: v.Name})
}
for _, r := range t.Rows {
values := make([]interface{}, len(table.Columns))
for i := 0; i < len(table.Columns); i++ {
values[i] = r[i]
}
table.Rows = append(table.Rows, values)
}
tables = append(tables, &table)
return tables, simplejson.NewFromAny(meta), nil
}
}
return nil, nil, errors.New("no data as no PrimaryResult table was returned in the response")
}
func (e *AzureLogAnalyticsDatasource) parseToTimeSeries(data AzureLogAnalyticsResponse, query string) (tsdb.TimeSeriesSlice, *simplejson.Json, error) {
meta := metadata{
Query: query,
}
for _, t := range data.Tables {
if t.Name == "PrimaryResult" {
timeIndex, metricIndex, valueIndex := -1, -1, -1
meta.Columns = make([]column, 0)
for i, v := range t.Columns {
meta.Columns = append(meta.Columns, column{Name: v.Name, Type: v.Type})
if timeIndex == -1 && v.Type == "datetime" {
timeIndex = i
}
if metricIndex == -1 && v.Type == "string" {
metricIndex = i
}
if valueIndex == -1 && (v.Type == "int" || v.Type == "long" || v.Type == "real" || v.Type == "double") {
valueIndex = i
}
}
if timeIndex == -1 {
azlog.Info("No time column specified. Returning existing columns, no data")
return nil, simplejson.NewFromAny(meta), nil
}
if valueIndex == -1 {
azlog.Info("No value column specified. Returning existing columns, no data")
return nil, simplejson.NewFromAny(meta), nil
}
slice := tsdb.TimeSeriesSlice{}
buckets := map[string]*tsdb.TimeSeriesPoints{}
getSeriesBucket := func(metricName string) *tsdb.TimeSeriesPoints {
if points, ok := buckets[metricName]; ok {
return points
}
series := tsdb.NewTimeSeries(metricName, []tsdb.TimePoint{})
slice = append(slice, series)
buckets[metricName] = &series.Points
return &series.Points
}
for _, r := range t.Rows {
timeStr, ok := r[timeIndex].(string)
if !ok {
return nil, simplejson.NewFromAny(meta), errors.New("invalid time value")
}
timeValue, err := time.Parse(time.RFC3339Nano, timeStr)
if err != nil {
return nil, simplejson.NewFromAny(meta), err
}
var value float64
if value, err = getFloat(r[valueIndex]); err != nil {
return nil, simplejson.NewFromAny(meta), err
}
var metricName string
if metricIndex == -1 {
metricName = t.Columns[valueIndex].Name
} else {
metricName, ok = r[metricIndex].(string)
if !ok {
return nil, simplejson.NewFromAny(meta), err
}
}
points := getSeriesBucket(metricName)
*points = append(*points, tsdb.NewTimePoint(null.FloatFrom(value), float64(timeValue.Unix()*1000)))
}
return slice, simplejson.NewFromAny(meta), nil
}
}
return nil, nil, errors.New("no data as no PrimaryResult table was returned in the response")
}

@ -0,0 +1,380 @@
package azuremonitor
import (
"encoding/json"
"fmt"
"io/ioutil"
"net/url"
"path/filepath"
"testing"
"time"
"github.com/google/go-cmp/cmp"
"github.com/google/go-cmp/cmp/cmpopts"
"github.com/grafana/grafana/pkg/components/null"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/tsdb"
"github.com/stretchr/testify/require"
)
func TestBuildingAzureLogAnalyticsQueries(t *testing.T) {
datasource := &AzureLogAnalyticsDatasource{}
fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC).In(time.Local)
tests := []struct {
name string
queryModel []*tsdb.Query
timeRange *tsdb.TimeRange
azureLogAnalyticsQueries []*AzureLogAnalyticsQuery
Err require.ErrorAssertionFunc
}{
{
name: "Query with macros should be interpolated",
timeRange: &tsdb.TimeRange{
From: fmt.Sprintf("%v", fromStart.Unix()*1000),
To: fmt.Sprintf("%v", fromStart.Add(34*time.Minute).Unix()*1000),
},
queryModel: []*tsdb.Query{
{
DataSource: &models.DataSource{
JsonData: simplejson.NewFromAny(map[string]interface{}{}),
},
Model: simplejson.NewFromAny(map[string]interface{}{
"queryType": "Azure Log Analytics",
"azureLogAnalytics": map[string]interface{}{
"workspace": "aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee",
"query": "query=Perf | where $__timeFilter() | where $__contains(Computer, 'comp1','comp2') | summarize avg(CounterValue) by bin(TimeGenerated, $__interval), Computer",
"resultFormat": "time_series",
},
}),
RefId: "A",
},
},
azureLogAnalyticsQueries: []*AzureLogAnalyticsQuery{
{
RefID: "A",
ResultFormat: "time_series",
URL: "aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee/query",
Params: url.Values{"query": {"query=Perf | where ['TimeGenerated'] >= datetime('2018-03-15T13:00:00Z') and ['TimeGenerated'] <= datetime('2018-03-15T13:34:00Z') | where ['Computer'] in ('comp1','comp2') | summarize avg(CounterValue) by bin(TimeGenerated, 34000ms), Computer"}},
Target: "query=query%3DPerf+%7C+where+%5B%27TimeGenerated%27%5D+%3E%3D+datetime%28%272018-03-15T13%3A00%3A00Z%27%29+and+%5B%27TimeGenerated%27%5D+%3C%3D+datetime%28%272018-03-15T13%3A34%3A00Z%27%29+%7C+where+%5B%27Computer%27%5D+in+%28%27comp1%27%2C%27comp2%27%29+%7C+summarize+avg%28CounterValue%29+by+bin%28TimeGenerated%2C+34000ms%29%2C+Computer",
},
},
Err: require.NoError,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
queries, err := datasource.buildQueries(tt.queryModel, tt.timeRange)
tt.Err(t, err)
if diff := cmp.Diff(tt.azureLogAnalyticsQueries, queries, cmpopts.EquateNaNs()); diff != "" {
t.Errorf("Result mismatch (-want +got):\n%s", diff)
}
})
}
}
func TestParsingAzureLogAnalyticsResponses(t *testing.T) {
datasource := &AzureLogAnalyticsDatasource{}
tests := []struct {
name string
testFile string
query string
series tsdb.TimeSeriesSlice
meta string
Err require.ErrorAssertionFunc
}{
{
name: "Response with single series should be parsed into the Grafana time series format",
testFile: "loganalytics/1-log-analytics-response-metrics-single-series.json",
query: "test query",
series: tsdb.TimeSeriesSlice{
&tsdb.TimeSeries{
Name: "grafana-vm",
Points: tsdb.TimeSeriesPoints{
{null.FloatFrom(1.1), null.FloatFrom(1587323766000)},
{null.FloatFrom(2.2), null.FloatFrom(1587323776000)},
{null.FloatFrom(3.3), null.FloatFrom(1587323786000)},
},
},
},
meta: `{"columns":[{"name":"TimeGenerated","type":"datetime"},{"name":"Computer","type":"string"},{"name":"avg_CounterValue","type":"real"}],"query":"test query"}`,
Err: require.NoError,
},
{
name: "Response with multiple series should be parsed into the Grafana time series format",
testFile: "loganalytics/2-log-analytics-response-metrics-multiple-series.json",
query: "test query",
series: tsdb.TimeSeriesSlice{
&tsdb.TimeSeries{
Name: "Processor",
Points: tsdb.TimeSeriesPoints{
{null.FloatFrom(0.75), null.FloatFrom(1587418800000)},
{null.FloatFrom(1.0055555555555555), null.FloatFrom(1587419100000)},
{null.FloatFrom(0.7407407407407407), null.FloatFrom(1587419400000)},
},
},
&tsdb.TimeSeries{
Name: "Logical Disk",
Points: tsdb.TimeSeriesPoints{
{null.FloatFrom(16090.551851851851), null.FloatFrom(1587418800000)},
{null.FloatFrom(16090.537037037036), null.FloatFrom(1587419100000)},
{null.FloatFrom(16090.586419753086), null.FloatFrom(1587419400000)},
},
},
&tsdb.TimeSeries{
Name: "Memory",
Points: tsdb.TimeSeriesPoints{
{null.FloatFrom(702.0666666666667), null.FloatFrom(1587418800000)},
{null.FloatFrom(700.5888888888888), null.FloatFrom(1587419100000)},
{null.FloatFrom(703.1111111111111), null.FloatFrom(1587419400000)},
},
},
},
meta: `{"columns":[{"name":"TimeGenerated","type":"datetime"},{"name":"ObjectName","type":"string"},{"name":"avg_CounterValue","type":"real"}],"query":"test query"}`,
Err: require.NoError,
},
{
name: "Response with no metric name column should use the value column name as the series name",
testFile: "loganalytics/3-log-analytics-response-metrics-no-metric-column.json",
query: "test query",
series: tsdb.TimeSeriesSlice{
&tsdb.TimeSeries{
Name: "avg_CounterValue",
Points: tsdb.TimeSeriesPoints{
{null.FloatFrom(1), null.FloatFrom(1587323766000)},
{null.FloatFrom(2), null.FloatFrom(1587323776000)},
{null.FloatFrom(3), null.FloatFrom(1587323786000)},
},
},
},
meta: `{"columns":[{"name":"TimeGenerated","type":"datetime"},{"name":"avg_CounterValue","type":"int"}],"query":"test query"}`,
Err: require.NoError,
},
{
name: "Response with no time column should return no data",
testFile: "loganalytics/4-log-analytics-response-metrics-no-time-column.json",
query: "test query",
series: nil,
meta: `{"columns":[{"name":"Computer","type":"string"},{"name":"avg_CounterValue","type":"real"}],"query":"test query"}`,
Err: require.NoError,
},
{
name: "Response with no value column should return no data",
testFile: "loganalytics/5-log-analytics-response-metrics-no-value-column.json",
query: "test query",
series: nil,
meta: `{"columns":[{"name":"TimeGenerated","type":"datetime"},{"name":"Computer","type":"string"}],"query":"test query"}`,
Err: require.NoError,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
data, _ := loadLogAnalyticsTestFile(tt.testFile)
series, meta, err := datasource.parseToTimeSeries(data, tt.query)
tt.Err(t, err)
if diff := cmp.Diff(tt.series, series, cmpopts.EquateNaNs()); diff != "" {
t.Errorf("Result mismatch (-want +got):\n%s", diff)
}
json, _ := json.Marshal(meta)
cols := string(json)
if diff := cmp.Diff(tt.meta, cols, cmpopts.EquateNaNs()); diff != "" {
t.Errorf("Result mismatch (-want +got):\n%s", diff)
}
})
}
}
func TestParsingAzureLogAnalyticsTableResponses(t *testing.T) {
datasource := &AzureLogAnalyticsDatasource{}
tests := []struct {
name string
testFile string
query string
tables []*tsdb.Table
meta string
Err require.ErrorAssertionFunc
}{
{
name: "Table data should be parsed into the table format Response",
testFile: "loganalytics/6-log-analytics-response-table.json",
query: "test query",
tables: []*tsdb.Table{
{
Columns: []tsdb.TableColumn{
{Text: "TenantId"},
{Text: "Computer"},
{Text: "ObjectName"},
{Text: "CounterName"},
{Text: "InstanceName"},
{Text: "Min"},
{Text: "Max"},
{Text: "SampleCount"},
{Text: "CounterValue"},
{Text: "TimeGenerated"},
},
Rows: []tsdb.RowValues{
{
string("a2c1b44e-3e57-4410-b027-6cc0ae6dee67"),
string("grafana-vm"),
string("Memory"),
string("Available MBytes Memory"),
string("Memory"),
nil,
nil,
nil,
float64(2040),
string("2020-04-23T11:46:03.857Z"),
},
{
string("a2c1b44e-3e57-4410-b027-6cc0ae6dee67"),
string("grafana-vm"),
string("Memory"),
string("Available MBytes Memory"),
string("Memory"),
nil,
nil,
nil,
float64(2066),
string("2020-04-23T11:46:13.857Z"),
},
{
string("a2c1b44e-3e57-4410-b027-6cc0ae6dee67"),
string("grafana-vm"),
string("Memory"),
string("Available MBytes Memory"),
string("Memory"),
nil,
nil,
nil,
float64(2066),
string("2020-04-23T11:46:23.857Z"),
},
},
},
},
meta: `{"columns":[{"name":"TenantId","type":"string"},{"name":"Computer","type":"string"},{"name":"ObjectName","type":"string"},{"name":"CounterName","type":"string"},` +
`{"name":"InstanceName","type":"string"},{"name":"Min","type":"real"},{"name":"Max","type":"real"},{"name":"SampleCount","type":"int"},{"name":"CounterValue","type":"real"},` +
`{"name":"TimeGenerated","type":"datetime"}],"query":"test query"}`,
Err: require.NoError,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
data, _ := loadLogAnalyticsTestFile(tt.testFile)
tables, meta, err := datasource.parseToTables(data, tt.query)
tt.Err(t, err)
if diff := cmp.Diff(tt.tables, tables, cmpopts.EquateNaNs()); diff != "" {
t.Errorf("Result mismatch (-want +got):\n%s", diff)
}
json, _ := json.Marshal(meta)
cols := string(json)
if diff := cmp.Diff(tt.meta, cols, cmpopts.EquateNaNs()); diff != "" {
t.Errorf("Result mismatch (-want +got):\n%s", diff)
}
})
}
}
func TestPluginRoutes(t *testing.T) {
datasource := &AzureLogAnalyticsDatasource{}
plugin := &plugins.DataSourcePlugin{
Routes: []*plugins.AppPluginRoute{
{
Path: "loganalyticsazure",
Method: "GET",
URL: "https://api.loganalytics.io/v1/workspaces",
Headers: []plugins.AppPluginRouteHeader{
{Name: "x-ms-app", Content: "Grafana"},
},
},
{
Path: "chinaloganalyticsazure",
Method: "GET",
URL: "https://api.loganalytics.azure.cn/v1/workspaces",
Headers: []plugins.AppPluginRouteHeader{
{Name: "x-ms-app", Content: "Grafana"},
},
},
{
Path: "govloganalyticsazure",
Method: "GET",
URL: "https://api.loganalytics.us/v1/workspaces",
Headers: []plugins.AppPluginRouteHeader{
{Name: "x-ms-app", Content: "Grafana"},
},
},
},
}
tests := []struct {
name string
cloudName string
expectedProxypass string
expectedRouteURL string
Err require.ErrorAssertionFunc
}{
{
name: "plugin proxy route for the Azure public cloud",
cloudName: "azuremonitor",
expectedProxypass: "loganalyticsazure",
expectedRouteURL: "https://api.loganalytics.io/v1/workspaces",
Err: require.NoError,
},
{
name: "plugin proxy route for the Azure China cloud",
cloudName: "chinaazuremonitor",
expectedProxypass: "chinaloganalyticsazure",
expectedRouteURL: "https://api.loganalytics.azure.cn/v1/workspaces",
Err: require.NoError,
},
{
name: "plugin proxy route for the Azure Gov cloud",
cloudName: "govazuremonitor",
expectedProxypass: "govloganalyticsazure",
expectedRouteURL: "https://api.loganalytics.us/v1/workspaces",
Err: require.NoError,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
route, proxypass, err := datasource.getPluginRoute(plugin, tt.cloudName)
tt.Err(t, err)
if diff := cmp.Diff(tt.expectedRouteURL, route.URL, cmpopts.EquateNaNs()); diff != "" {
t.Errorf("Result mismatch (-want +got):\n%s", diff)
}
if diff := cmp.Diff(tt.expectedProxypass, proxypass, cmpopts.EquateNaNs()); diff != "" {
t.Errorf("Result mismatch (-want +got):\n%s", diff)
}
})
}
}
func loadLogAnalyticsTestFile(name string) (AzureLogAnalyticsResponse, error) {
var data AzureLogAnalyticsResponse
path := filepath.Join("testdata", name)
jsonBody, err := ioutil.ReadFile(path)
if err != nil {
return data, err
}
err = json.Unmarshal(jsonBody, &data)
return data, err
}

@ -16,6 +16,7 @@ import (
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/util/errutil"
opentracing "github.com/opentracing/opentracing-go"
"golang.org/x/net/context/ctxhttp"
@ -82,7 +83,6 @@ func (e *AzureMonitorDatasource) buildQueries(queries []*tsdb.Query, timeRange *
var target string
azureMonitorTarget := query.Model.Get("azureMonitor").MustMap()
azlog.Debug("AzureMonitor", "target", azureMonitorTarget)
urlComponents := map[string]string{}
urlComponents["subscription"] = fmt.Sprintf("%v", query.Model.Get("subscription").MustString())
@ -178,6 +178,7 @@ func (e *AzureMonitorDatasource) executeQuery(ctx context.Context, query *AzureM
}
azlog.Debug("AzureMonitor", "Request ApiURL", req.URL.String())
azlog.Debug("AzureMonitor", "Target", query.Target)
res, err := ctxhttp.Do(ctx, e.httpClient, req)
if err != nil {
queryResult.Error = err
@ -216,8 +217,8 @@ func (e *AzureMonitorDatasource) createRequest(ctx context.Context, dsInfo *mode
req, err := http.NewRequest(http.MethodGet, u.String(), nil)
if err != nil {
azlog.Error("Failed to create request", "error", err)
return nil, fmt.Errorf("Failed to create request. error: %v", err)
azlog.Debug("Failed to create request", "error", err)
return nil, errutil.Wrap("Failed to create request", err)
}
req.Header.Set("Content-Type", "application/json")
@ -236,14 +237,14 @@ func (e *AzureMonitorDatasource) unmarshalResponse(res *http.Response) (AzureMon
}
if res.StatusCode/100 != 2 {
azlog.Error("Request failed", "status", res.Status, "body", string(body))
return AzureMonitorResponse{}, fmt.Errorf(string(body))
azlog.Debug("Request failed", "status", res.Status, "body", string(body))
return AzureMonitorResponse{}, fmt.Errorf("Request failed status: %v", res.Status)
}
var data AzureMonitorResponse
err = json.Unmarshal(body, &data)
if err != nil {
azlog.Error("Failed to unmarshal AzureMonitor response", "error", err, "status", res.Status, "body", string(body))
azlog.Debug("Failed to unmarshal AzureMonitor response", "error", err, "status", res.Status, "body", string(body))
return AzureMonitorResponse{}, err
}

@ -5,6 +5,7 @@ import (
"fmt"
"io/ioutil"
"net/url"
"path/filepath"
"testing"
"time"
@ -167,7 +168,7 @@ func TestAzureMonitorDatasource(t *testing.T) {
Convey("Parse AzureMonitor API response in the time series format", func() {
Convey("when data from query aggregated as average to one time series", func() {
data, err := loadTestFile("./test-data/azuremonitor/1-azure-monitor-response-avg.json")
data, err := loadTestFile("azuremonitor/1-azure-monitor-response-avg.json")
So(err, ShouldBeNil)
So(data.Interval, ShouldEqual, "PT1M")
@ -204,7 +205,7 @@ func TestAzureMonitorDatasource(t *testing.T) {
})
Convey("when data from query aggregated as total to one time series", func() {
data, err := loadTestFile("./test-data/azuremonitor/2-azure-monitor-response-total.json")
data, err := loadTestFile("azuremonitor/2-azure-monitor-response-total.json")
So(err, ShouldBeNil)
res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"}
@ -224,7 +225,7 @@ func TestAzureMonitorDatasource(t *testing.T) {
})
Convey("when data from query aggregated as maximum to one time series", func() {
data, err := loadTestFile("./test-data/azuremonitor/3-azure-monitor-response-maximum.json")
data, err := loadTestFile("azuremonitor/3-azure-monitor-response-maximum.json")
So(err, ShouldBeNil)
res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"}
@ -244,7 +245,7 @@ func TestAzureMonitorDatasource(t *testing.T) {
})
Convey("when data from query aggregated as minimum to one time series", func() {
data, err := loadTestFile("./test-data/azuremonitor/4-azure-monitor-response-minimum.json")
data, err := loadTestFile("azuremonitor/4-azure-monitor-response-minimum.json")
So(err, ShouldBeNil)
res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"}
@ -264,7 +265,7 @@ func TestAzureMonitorDatasource(t *testing.T) {
})
Convey("when data from query aggregated as Count to one time series", func() {
data, err := loadTestFile("./test-data/azuremonitor/5-azure-monitor-response-count.json")
data, err := loadTestFile("azuremonitor/5-azure-monitor-response-count.json")
So(err, ShouldBeNil)
res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"}
@ -284,7 +285,7 @@ func TestAzureMonitorDatasource(t *testing.T) {
})
Convey("when data from query aggregated as total and has dimension filter", func() {
data, err := loadTestFile("./test-data/azuremonitor/6-azure-monitor-response-multi-dimension.json")
data, err := loadTestFile("azuremonitor/6-azure-monitor-response-multi-dimension.json")
So(err, ShouldBeNil)
res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"}
@ -311,7 +312,7 @@ func TestAzureMonitorDatasource(t *testing.T) {
})
Convey("when data from query has alias patterns", func() {
data, err := loadTestFile("./test-data/azuremonitor/2-azure-monitor-response-total.json")
data, err := loadTestFile("azuremonitor/2-azure-monitor-response-total.json")
So(err, ShouldBeNil)
res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"}
@ -331,7 +332,7 @@ func TestAzureMonitorDatasource(t *testing.T) {
})
Convey("when data has dimension filters and alias patterns", func() {
data, err := loadTestFile("./test-data/azuremonitor/6-azure-monitor-response-multi-dimension.json")
data, err := loadTestFile("azuremonitor/6-azure-monitor-response-multi-dimension.json")
So(err, ShouldBeNil)
res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"}
@ -378,9 +379,10 @@ func TestAzureMonitorDatasource(t *testing.T) {
})
}
func loadTestFile(path string) (AzureMonitorResponse, error) {
func loadTestFile(name string) (AzureMonitorResponse, error) {
var data AzureMonitorResponse
path := filepath.Join("testdata", name)
jsonBody, err := ioutil.ReadFile(path)
if err != nil {
return data, err

@ -50,6 +50,7 @@ func (e *AzureMonitorExecutor) Query(ctx context.Context, dsInfo *models.DataSou
var azureMonitorQueries []*tsdb.Query
var applicationInsightsQueries []*tsdb.Query
var azureLogAnalyticsQueries []*tsdb.Query
for _, query := range tsdbQuery.Queries {
queryType := query.Model.Get("queryType").MustString("")
@ -59,6 +60,8 @@ func (e *AzureMonitorExecutor) Query(ctx context.Context, dsInfo *models.DataSou
azureMonitorQueries = append(azureMonitorQueries, query)
case "Application Insights":
applicationInsightsQueries = append(applicationInsightsQueries, query)
case "Azure Log Analytics":
azureLogAnalyticsQueries = append(azureLogAnalyticsQueries, query)
default:
return nil, fmt.Errorf("Alerting not supported for %s", queryType)
}
@ -74,6 +77,11 @@ func (e *AzureMonitorExecutor) Query(ctx context.Context, dsInfo *models.DataSou
dsInfo: e.dsInfo,
}
alaDatasource := &AzureLogAnalyticsDatasource{
httpClient: e.httpClient,
dsInfo: e.dsInfo,
}
azResult, err := azDatasource.executeTimeSeriesQuery(ctx, azureMonitorQueries, tsdbQuery.TimeRange)
if err != nil {
return nil, err
@ -84,9 +92,18 @@ func (e *AzureMonitorExecutor) Query(ctx context.Context, dsInfo *models.DataSou
return nil, err
}
alaResult, err := alaDatasource.executeTimeSeriesQuery(ctx, azureLogAnalyticsQueries, tsdbQuery.TimeRange)
if err != nil {
return nil, err
}
for k, v := range aiResult.Results {
azResult.Results[k] = v
}
for k, v := range alaResult.Results {
azResult.Results[k] = v
}
return azResult, nil
}

@ -11,23 +11,45 @@ import (
const rsIdentifier = `([_a-zA-Z0-9]+)`
const sExpr = `\$` + rsIdentifier + `(?:\(([^\)]*)\))?`
const escapeMultiExpr = `\$__escapeMulti\(('.*')\)`
type kqlMacroEngine struct {
timeRange *tsdb.TimeRange
query *tsdb.Query
}
func KqlInterpolate(query *tsdb.Query, timeRange *tsdb.TimeRange, kql string) (string, error) {
//KqlInterpolate interpolates macros for Kusto Query Language (KQL) queries
func KqlInterpolate(query *tsdb.Query, timeRange *tsdb.TimeRange, kql string, defaultTimeField ...string) (string, error) {
engine := kqlMacroEngine{}
return engine.Interpolate(query, timeRange, kql)
defaultTimeFieldForAllDatasources := "timestamp"
if len(defaultTimeField) > 0 {
defaultTimeFieldForAllDatasources = defaultTimeField[0]
}
return engine.Interpolate(query, timeRange, kql, defaultTimeFieldForAllDatasources)
}
func (m *kqlMacroEngine) Interpolate(query *tsdb.Query, timeRange *tsdb.TimeRange, kql string) (string, error) {
func (m *kqlMacroEngine) Interpolate(query *tsdb.Query, timeRange *tsdb.TimeRange, kql string, defaultTimeField string) (string, error) {
m.timeRange = timeRange
m.query = query
rExp, _ := regexp.Compile(sExpr)
escapeMultiRegex, _ := regexp.Compile(escapeMultiExpr)
var macroError error
//First pass for the escapeMulti macro
kql = m.ReplaceAllStringSubmatchFunc(escapeMultiRegex, kql, func(groups []string) string {
args := []string{}
if len(groups) > 1 {
args = strings.Split(groups[1], "','")
}
expr := strings.Join(args, "', @'")
return fmt.Sprintf("@%s", expr)
})
//second pass for all the other macros
kql = m.ReplaceAllStringSubmatchFunc(rExp, kql, func(groups []string) string {
args := []string{}
if len(groups) > 2 {
@ -37,7 +59,7 @@ func (m *kqlMacroEngine) Interpolate(query *tsdb.Query, timeRange *tsdb.TimeRang
for i, arg := range args {
args[i] = strings.Trim(arg, " ")
}
res, err := m.evaluateMacro(groups[1], args)
res, err := m.evaluateMacro(groups[1], defaultTimeField, args)
if err != nil && macroError == nil {
macroError = err
return "macro_error()"
@ -52,10 +74,10 @@ func (m *kqlMacroEngine) Interpolate(query *tsdb.Query, timeRange *tsdb.TimeRang
return kql, nil
}
func (m *kqlMacroEngine) evaluateMacro(name string, args []string) (string, error) {
func (m *kqlMacroEngine) evaluateMacro(name string, defaultTimeField string, args []string) (string, error) {
switch name {
case "__timeFilter":
timeColumn := "timestamp"
timeColumn := defaultTimeField
if len(args) > 0 && args[0] != "" {
timeColumn = args[0]
}
@ -90,7 +112,8 @@ func (m *kqlMacroEngine) evaluateMacro(name string, args []string) (string, erro
return "1 == 1", nil
}
return fmt.Sprintf("['%s'] in ('%s')", args[0], args[1]), nil
expression := strings.Join(args[1:], ",")
return fmt.Sprintf("['%s'] in (%s)", args[0], expression), nil
default:
return "", fmt.Errorf("Unknown macro %v", name)
}

@ -0,0 +1,144 @@
package azuremonitor
import (
"fmt"
"testing"
"time"
"github.com/google/go-cmp/cmp"
"github.com/google/go-cmp/cmp/cmpopts"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/tsdb"
"github.com/stretchr/testify/require"
)
func TestAzureLogAnalyticsMacros(t *testing.T) {
fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC).In(time.Local)
timeRange := &tsdb.TimeRange{
From: fmt.Sprintf("%v", fromStart.Unix()*1000),
To: fmt.Sprintf("%v", fromStart.Add(34*time.Minute).Unix()*1000),
}
tests := []struct {
name string
query *tsdb.Query
timeRange *tsdb.TimeRange
kql string
expected string
Err require.ErrorAssertionFunc
}{
{
name: "invalid macro should throw error",
query: &tsdb.Query{},
kql: "$__invalid()",
expected: "",
Err: require.Error,
},
{
name: "$__contains macro with a multi template variable that has multiple selected values as a parameter should build in clause",
query: &tsdb.Query{},
kql: "$__contains(col, 'val1','val2')",
expected: "['col'] in ('val1','val2')",
Err: require.NoError,
},
{
name: "$__contains macro with a multi template variable that has a single selected value as a parameter should build in clause",
query: &tsdb.Query{},
kql: "$__contains(col, 'val1' )",
expected: "['col'] in ('val1')",
Err: require.NoError,
},
{
name: "$__contains macro with multi template variable has custom All value as a parameter should return a true expression",
query: &tsdb.Query{},
kql: "$__contains(col, all)",
expected: "1 == 1",
Err: require.NoError,
},
{
name: "$__timeFilter has no column parameter should use default time field",
query: &tsdb.Query{},
kql: "$__timeFilter()",
expected: "['TimeGenerated'] >= datetime('2018-03-15T13:00:00Z') and ['TimeGenerated'] <= datetime('2018-03-15T13:34:00Z')",
Err: require.NoError,
},
{
name: "$__timeFilter has time field parameter",
query: &tsdb.Query{},
kql: "$__timeFilter(myTimeField)",
expected: "['myTimeField'] >= datetime('2018-03-15T13:00:00Z') and ['myTimeField'] <= datetime('2018-03-15T13:34:00Z')",
Err: require.NoError,
},
{
name: "$__timeFrom and $__timeTo is in the query and range is a specific interval",
query: &tsdb.Query{},
kql: "myTimeField >= $__timeFrom() and myTimeField <= $__timeTo()",
expected: "myTimeField >= datetime('2018-03-15T13:00:00Z') and myTimeField <= datetime('2018-03-15T13:34:00Z')",
Err: require.NoError,
},
{
name: "$__interval should use the defined interval from the query",
timeRange: timeRange,
query: &tsdb.Query{
Model: simplejson.NewFromAny(map[string]interface{}{
"interval": "5m",
}),
},
kql: "bin(TimeGenerated, $__interval)",
expected: "bin(TimeGenerated, 300000ms)",
Err: require.NoError,
},
{
name: "$__interval should use the default interval if none is specified",
query: &tsdb.Query{
DataSource: &models.DataSource{},
Model: simplejson.NewFromAny(map[string]interface{}{}),
},
kql: "bin(TimeGenerated, $__interval)",
expected: "bin(TimeGenerated, 34000ms)",
Err: require.NoError,
},
{
name: "$__escapeMulti with multi template variable should replace values with KQL style escaped strings",
query: &tsdb.Query{
DataSource: &models.DataSource{},
Model: simplejson.NewFromAny(map[string]interface{}{}),
},
kql: `CounterPath in ($__escapeMulti('\\grafana-vm\Network(eth0)\Total','\\grafana-vm\Network(eth1)\Total'))`,
expected: `CounterPath in (@'\\grafana-vm\Network(eth0)\Total', @'\\grafana-vm\Network(eth1)\Total')`,
Err: require.NoError,
},
{
name: "$__escapeMulti with multi template variable and has one selected value that contains comma",
query: &tsdb.Query{
DataSource: &models.DataSource{},
Model: simplejson.NewFromAny(map[string]interface{}{}),
},
kql: `$__escapeMulti('\\grafana-vm,\Network(eth0)\Total Bytes Received')`,
expected: `@'\\grafana-vm,\Network(eth0)\Total Bytes Received'`,
Err: require.NoError,
},
{
name: "$__escapeMulti with multi template variable and is not wrapped in single quotes should fail",
query: &tsdb.Query{
DataSource: &models.DataSource{},
Model: simplejson.NewFromAny(map[string]interface{}{}),
},
kql: `$__escapeMulti(\\grafana-vm,\Network(eth0)\Total Bytes Received)`,
expected: "",
Err: require.Error,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
defaultTimeField := "TimeGenerated"
rawQuery, err := KqlInterpolate(tt.query, timeRange, tt.kql, defaultTimeField)
tt.Err(t, err)
if diff := cmp.Diff(tt.expected, rawQuery, cmpopts.EquateNaNs()); diff != "" {
t.Errorf("Result mismatch (-want +got):\n%s", diff)
}
})
}
}

@ -1,9 +1,9 @@
{
"value": {
"start": "2019-09-13T01:02:03.456789Z",
"end": "2019-09-13T02:02:03.456789Z",
"value": {
"avg": 1.2
}
}
}
{
"value": {
"start": "2019-09-13T01:02:03.456789Z",
"end": "2019-09-13T02:02:03.456789Z",
"value": {
"avg": 1.2
}
}
}

@ -1,23 +1,23 @@
{
"value": {
"start": "2019-09-13T01:02:03.456789Z",
"end": "2019-09-13T03:02:03.456789Z",
"interval": "PT1H",
"segments": [
{
"start": "2019-09-13T01:02:03.456789Z",
"end": "2019-09-13T02:02:03.456789Z",
"value": {
"avg": 1
}
},
{
"start": "2019-09-13T02:02:03.456789Z",
"end": "2019-09-13T03:02:03.456789Z",
"value": {
"avg": 2
}
}
]
}
}
{
"value": {
"start": "2019-09-13T01:02:03.456789Z",
"end": "2019-09-13T03:02:03.456789Z",
"interval": "PT1H",
"segments": [
{
"start": "2019-09-13T01:02:03.456789Z",
"end": "2019-09-13T02:02:03.456789Z",
"value": {
"avg": 1
}
},
{
"start": "2019-09-13T02:02:03.456789Z",
"end": "2019-09-13T03:02:03.456789Z",
"value": {
"avg": 2
}
}
]
}
}

@ -1,45 +1,45 @@
{
"value": {
"start": "2019-09-13T01:02:03.456789Z",
"end": "2019-09-13T03:02:03.456789Z",
"interval": "PT1H",
"segments": [
{
"start": "2019-09-13T01:02:03.456789Z",
"end": "2019-09-13T02:02:03.456789Z",
"segments": [
{
"value": {
"avg": 1
},
"blob": "a"
},
{
"value": {
"avg": 3
},
"blob": "b"
}
]
},
{
"start": "2019-09-13T02:02:03.456789Z",
"end": "2019-09-13T03:02:03.456789Z",
"segments": [
{
"value": {
"avg": 2
},
"blob": "a"
},
{
"value": {
"avg": 4
},
"blob": "b"
}
]
}
]
}
}
{
"value": {
"start": "2019-09-13T01:02:03.456789Z",
"end": "2019-09-13T03:02:03.456789Z",
"interval": "PT1H",
"segments": [
{
"start": "2019-09-13T01:02:03.456789Z",
"end": "2019-09-13T02:02:03.456789Z",
"segments": [
{
"value": {
"avg": 1
},
"blob": "a"
},
{
"value": {
"avg": 3
},
"blob": "b"
}
]
},
{
"start": "2019-09-13T02:02:03.456789Z",
"end": "2019-09-13T03:02:03.456789Z",
"segments": [
{
"value": {
"avg": 2
},
"blob": "a"
},
{
"value": {
"avg": 4
},
"blob": "b"
}
]
}
]
}
}

@ -0,0 +1,38 @@
{
"tables": [
{
"name": "PrimaryResult",
"columns": [
{
"name": "TimeGenerated",
"type": "datetime"
},
{
"name": "Computer",
"type": "string"
},
{
"name": "avg_CounterValue",
"type": "real"
}
],
"rows": [
[
"2020-04-19T19:16:06.5Z",
"grafana-vm",
1.1
],
[
"2020-04-19T19:16:16.5Z",
"grafana-vm",
2.2
],
[
"2020-04-19T19:16:26.5Z",
"grafana-vm",
3.3
]
]
}
]
}

@ -0,0 +1,68 @@
{
"tables": [
{
"name": "PrimaryResult",
"columns": [
{
"name": "TimeGenerated",
"type": "datetime"
},
{
"name": "ObjectName",
"type": "string"
},
{
"name": "avg_CounterValue",
"type": "real"
}
],
"rows": [
[
"2020-04-20T21:40:00Z",
"Processor",
0.75
],
[
"2020-04-20T21:40:00Z",
"Logical Disk",
16090.551851851851
],
[
"2020-04-20T21:40:00Z",
"Memory",
702.0666666666667
],
[
"2020-04-20T21:45:00Z",
"Memory",
700.5888888888888
],
[
"2020-04-20T21:45:00Z",
"Processor",
1.0055555555555555
],
[
"2020-04-20T21:45:00Z",
"Logical Disk",
16090.537037037036
],
[
"2020-04-20T21:50:00Z",
"Logical Disk",
16090.586419753086
],
[
"2020-04-20T21:50:00Z",
"Processor",
0.7407407407407407
],
[
"2020-04-20T21:50:00Z",
"Memory",
703.1111111111111
]
]
}
]
}

@ -0,0 +1,31 @@
{
"tables": [
{
"name": "PrimaryResult",
"columns": [
{
"name": "TimeGenerated",
"type": "datetime"
},
{
"name": "avg_CounterValue",
"type": "int"
}
],
"rows": [
[
"2020-04-19T19:16:06.5Z",
1
],
[
"2020-04-19T19:16:16.5Z",
2
],
[
"2020-04-19T19:16:26.5Z",
3
]
]
}
]
}

@ -0,0 +1,31 @@
{
"tables": [
{
"name": "PrimaryResult",
"columns": [
{
"name": "Computer",
"type": "string"
},
{
"name": "avg_CounterValue",
"type": "real"
}
],
"rows": [
[
"grafana-vm",
1.1
],
[
"grafana-vm",
2.2
],
[
"grafana-vm",
3.3
]
]
}
]
}

@ -0,0 +1,31 @@
{
"tables": [
{
"name": "PrimaryResult",
"columns": [
{
"name": "TimeGenerated",
"type": "datetime"
},
{
"name": "Computer",
"type": "string"
}
],
"rows": [
[
"2020-04-19T19:16:06.5Z",
"grafana-vm"
],
[
"2020-04-19T19:16:16.5Z",
"grafana-vm"
],
[
"2020-04-19T19:16:26.5Z",
"grafana-vm"
]
]
}
]
}

@ -0,0 +1,87 @@
{
"tables": [
{
"name": "PrimaryResult",
"columns": [
{
"name": "TenantId",
"type": "string"
},
{
"name": "Computer",
"type": "string"
},
{
"name": "ObjectName",
"type": "string"
},
{
"name": "CounterName",
"type": "string"
},
{
"name": "InstanceName",
"type": "string"
},
{
"name": "Min",
"type": "real"
},
{
"name": "Max",
"type": "real"
},
{
"name": "SampleCount",
"type": "int"
},
{
"name": "CounterValue",
"type": "real"
},
{
"name": "TimeGenerated",
"type": "datetime"
}
],
"rows": [
[
"a2c1b44e-3e57-4410-b027-6cc0ae6dee67",
"grafana-vm",
"Memory",
"Available MBytes Memory",
"Memory",
null,
null,
null,
2040,
"2020-04-23T11:46:03.857Z"
],
[
"a2c1b44e-3e57-4410-b027-6cc0ae6dee67",
"grafana-vm",
"Memory",
"Available MBytes Memory",
"Memory",
null,
null,
null,
2066,
"2020-04-23T11:46:13.857Z"
],
[
"a2c1b44e-3e57-4410-b027-6cc0ae6dee67",
"grafana-vm",
"Memory",
"Available MBytes Memory",
"Memory",
null,
null,
null,
2066,
"2020-04-23T11:46:23.857Z"
]
]
}
]
}

@ -51,12 +51,7 @@ type AzureMonitorResponse struct {
Resourceregion string `json:"resourceregion"`
}
type ApplicationInsightsResponse struct {
MetricResponse *ApplicationInsightsMetricsResponse
QueryResponse *ApplicationInsightsQueryResponse
}
// ApplicationInsightsResponse is the json response from the Application Insights API
//ApplicationInsightsQueryResponse is the json response from the Application Insights API
type ApplicationInsightsQueryResponse struct {
Tables []struct {
Name string `json:"name"`
@ -68,25 +63,27 @@ type ApplicationInsightsQueryResponse struct {
} `json:"tables"`
}
// ApplicationInsightsMetricsResponse is the json response from the Application Insights API
type ApplicationInsightsMetricsResponse struct {
Name string
Segments []struct {
Start time.Time
End time.Time
Segmented map[string]float64
Value float64
}
}
// AzureLogAnalyticsResponse is the json response object from the Azure Log Analytics API.
type AzureLogAnalyticsResponse struct {
Tables []struct {
Name string `json:"name"`
Columns []struct {
Name string `json:"name"`
Type string `json:"type"`
} `json:"columns"`
Rows [][]interface{} `json:"rows"`
} `json:"tables"`
Tables []AzureLogAnalyticsTable `json:"tables"`
}
//AzureLogAnalyticsTable is the table format for Log Analytics responses
type AzureLogAnalyticsTable struct {
Name string `json:"name"`
Columns []struct {
Name string `json:"name"`
Type string `json:"type"`
} `json:"columns"`
Rows [][]interface{} `json:"rows"`
}
type metadata struct {
Columns []column `json:"columns"`
Query string `json:"query"`
}
type column struct {
Name string `json:"name"`
Type string `json:"type"`
}

@ -23,9 +23,9 @@ export default class AppInsightsDatasource {
/** @ngInject */
constructor(instanceSettings: DataSourceInstanceSettings<AzureDataSourceJsonData>, private templateSrv: TemplateSrv) {
this.id = instanceSettings.id;
this.applicationId = instanceSettings.jsonData.appInsightsAppId;
this.applicationId = instanceSettings.jsonData.appInsightsAppId || '';
switch (instanceSettings.jsonData.cloudName) {
switch (instanceSettings.jsonData?.cloudName) {
// Azure US Government
case 'govazuremonitor':
break;
@ -41,7 +41,7 @@ export default class AppInsightsDatasource {
this.baseUrl = `/appinsights/${this.version}/apps/${this.applicationId}`;
}
this.url = instanceSettings.url;
this.url = instanceSettings.url || '';
}
isConfigured(): boolean {

@ -1,6 +1,5 @@
import AzureMonitorDatasource from '../datasource';
import FakeSchemaData from './__mocks__/schema';
import { TemplateSrv } from 'app/features/templating/template_srv';
import { KustoSchema, AzureLogsVariable } from '../types';
import { toUtc } from '@grafana/data';
@ -147,113 +146,54 @@ describe('AzureLogAnalyticsDatasource', () => {
};
const response = {
tables: [
{
name: 'PrimaryResult',
columns: [
{
name: 'TimeGenerated',
type: 'datetime',
},
{
name: 'Category',
type: 'string',
},
results: {
A: {
refId: 'A',
meta: {
columns: ['TimeGenerated', 'Computer', 'avg_CounterValue'],
query:
'Perf\r\n| where ObjectName == "Memory" and CounterName == "Available MBytes Memory"\n| where TimeGenerated >= datetime(\'2020-04-23T09:15:20Z\') and TimeGenerated <= datetime(\'2020-04-23T09:20:20Z\')\n| where 1 == 1\n| summarize avg(CounterValue) by bin(TimeGenerated, 1m), Computer \n| order by TimeGenerated asc',
},
series: [
{
name: 'count_',
type: 'long',
name: 'grafana-vm',
points: [
[2017.25, 1587633300000],
[2048, 1587633360000],
[2048.3333333333335, 1587633420000],
[2049, 1587633480000],
[2049, 1587633540000],
[2049, 1587633600000],
],
},
],
rows: [
['2018-06-02T20:20:00Z', 'Administrative', 2],
['2018-06-02T20:25:00Z', 'Administrative', 22],
['2018-06-02T20:30:00Z', 'Policy', 20],
],
},
],
},
};
describe('in time series format', () => {
describe('and the data is valid (has time, metric and value columns)', () => {
beforeEach(() => {
datasourceRequestMock.mockImplementation((options: { url: string }) => {
expect(options.url).toContain('query=AzureActivity');
expect(options.url).toContain('/api/tsdb/query');
return Promise.resolve({ data: response, status: 200 });
});
});
it('should return a list of datapoints', () => {
return ctx.ds.query(options).then((results: any) => {
expect(results.data.length).toBe(2);
expect(results.data[0].datapoints.length).toBe(2);
expect(results.data[0].target).toEqual('Administrative');
expect(results.data[0].datapoints[0][1]).toEqual(1527970800000);
expect(results.data[0].datapoints[0][0]).toEqual(2);
expect(results.data[0].datapoints[1][1]).toEqual(1527971100000);
expect(results.data[0].datapoints[1][0]).toEqual(22);
});
});
});
describe('and the data has no time column)', () => {
beforeEach(() => {
const invalidResponse = {
tables: [
{
name: 'PrimaryResult',
columns: [
{
name: 'Category',
type: 'string',
},
{
name: 'count_',
type: 'long',
},
],
rows: [['Administrative', 2]],
},
],
};
datasourceRequestMock.mockImplementation((options: { url: string }) => {
expect(options.url).toContain('query=AzureActivity');
return Promise.resolve({ data: invalidResponse, status: 200 });
expect(results.data.length).toBe(1);
expect(results.data[0].name).toEqual('grafana-vm');
expect(results.data[0].fields.length).toBe(2);
expect(results.data[0].fields[0].name).toBe('Time');
expect(results.data[0].fields[1].name).toBe('grafana-vm');
expect(results.data[0].fields[0].values.toArray().length).toBe(6);
expect(results.data[0].fields[0].values.get(0)).toEqual(1587633300000);
expect(results.data[0].fields[1].values.get(0)).toEqual(2017.25);
expect(results.data[0].fields[0].values.get(1)).toEqual(1587633360000);
expect(results.data[0].fields[1].values.get(1)).toEqual(2048);
});
});
it('should throw an exception', () => {
ctx.ds.query(options).catch((err: any) => {
expect(err.message).toContain('The Time Series format requires a time column.');
});
});
});
});
describe('in tableformat', () => {
beforeEach(() => {
options.targets[0].azureLogAnalytics.resultFormat = 'table';
datasourceRequestMock.mockImplementation((options: { url: string }) => {
expect(options.url).toContain('query=AzureActivity');
return Promise.resolve({ data: response, status: 200 });
});
});
it('should return a list of columns and rows', () => {
return ctx.ds.query(options).then((results: any) => {
expect(results.data[0].type).toBe('table');
expect(results.data[0].columns.length).toBe(3);
expect(results.data[0].rows.length).toBe(3);
expect(results.data[0].columns[0].text).toBe('TimeGenerated');
expect(results.data[0].columns[0].type).toBe('datetime');
expect(results.data[0].columns[1].text).toBe('Category');
expect(results.data[0].columns[1].type).toBe('string');
expect(results.data[0].columns[2].text).toBe('count_');
expect(results.data[0].columns[2].type).toBe('long');
expect(results.data[0].rows[0][0]).toEqual('2018-06-02T20:20:00Z');
expect(results.data[0].rows[0][1]).toEqual('Administrative');
expect(results.data[0].rows[0][2]).toEqual(2);
});
});
});
});

@ -2,7 +2,8 @@ import _ from 'lodash';
import LogAnalyticsQuerystringBuilder from '../log_analytics/querystring_builder';
import ResponseParser from './response_parser';
import { AzureMonitorQuery, AzureDataSourceJsonData, AzureLogsVariable } from '../types';
import { DataQueryRequest, DataSourceInstanceSettings } from '@grafana/data';
import { TimeSeries, toDataFrame } from '@grafana/data';
import { DataQueryRequest, DataQueryResponseData, DataSourceInstanceSettings } from '@grafana/data';
import { getBackendSrv } from '@grafana/runtime';
import { TemplateSrv } from 'app/features/templating/template_srv';
@ -24,10 +25,11 @@ export default class AzureLogAnalyticsDatasource {
switch (this.instanceSettings.jsonData.cloudName) {
case 'govazuremonitor': // Azure US Government
this.baseUrl = '/govloganalyticsazure';
break;
case 'germanyazuremonitor': // Azure Germany
break;
case 'chinaazuremonitor': // Azue China
case 'chinaazuremonitor': // Azure China
this.baseUrl = '/chinaloganalyticsazure';
break;
default:
@ -35,8 +37,8 @@ export default class AzureLogAnalyticsDatasource {
this.baseUrl = '/loganalyticsazure';
}
this.url = instanceSettings.url;
this.defaultOrFirstWorkspace = this.instanceSettings.jsonData.logAnalyticsDefaultWorkspace;
this.url = instanceSettings.url || '';
this.defaultOrFirstWorkspace = this.instanceSettings.jsonData.logAnalyticsDefaultWorkspace || '';
this.setWorkspaceUrl();
}
@ -59,10 +61,11 @@ export default class AzureLogAnalyticsDatasource {
switch (this.instanceSettings.jsonData.cloudName) {
case 'govazuremonitor': // Azure US Government
this.azureMonitorUrl = `/govworkspacesloganalytics/subscriptions`;
break;
case 'germanyazuremonitor': // Azure Germany
break;
case 'chinaazuremonitor': // Azue China
case 'chinaazuremonitor': // Azure China
this.azureMonitorUrl = `/chinaworkspacesloganalytics/subscriptions`;
break;
default:
@ -91,7 +94,7 @@ export default class AzureLogAnalyticsDatasource {
if (!workspace) {
return Promise.resolve();
}
const url = `${this.baseUrl}/${workspace}/metadata`;
const url = `${this.baseUrl}/${this.templateSrv.replace(workspace, {})}/metadata`;
return this.doRequest(url).then((response: any) => {
return new ResponseParser(response.data).parseSchemaResult();
@ -104,42 +107,65 @@ export default class AzureLogAnalyticsDatasource {
}).map(target => {
const item = target.azureLogAnalytics;
const querystringBuilder = new LogAnalyticsQuerystringBuilder(
this.templateSrv.replace(item.query, options.scopedVars, this.interpolateVariable),
options,
'TimeGenerated'
);
const generated = querystringBuilder.generate();
let workspace = this.templateSrv.replace(item.workspace, options.scopedVars);
if (!workspace && this.defaultOrFirstWorkspace) {
workspace = this.defaultOrFirstWorkspace;
}
const url = `${this.baseUrl}/${workspace}/query?${generated.uriString}`;
const subscriptionId = this.templateSrv.replace(target.subscription || this.subscriptionId, options.scopedVars);
const query = this.templateSrv.replace(item.query, options.scopedVars, this.interpolateVariable);
return {
refId: target.refId,
intervalMs: options.intervalMs,
maxDataPoints: options.maxDataPoints,
datasourceId: this.id,
url: url,
query: generated.rawQuery,
format: target.format,
resultFormat: item.resultFormat,
queryType: 'Azure Log Analytics',
subscriptionId: subscriptionId,
azureLogAnalytics: {
resultFormat: item.resultFormat,
query: query,
workspace: workspace,
},
};
});
if (!queries || queries.length === 0) {
return;
return [];
}
const promises = this.doQueries(queries);
return Promise.all(promises).then(results => {
return new ResponseParser(results).parseQueryResult();
const { data } = await getBackendSrv().datasourceRequest({
url: '/api/tsdb/query',
method: 'POST',
data: {
from: options.range.from.valueOf().toString(),
to: options.range.to.valueOf().toString(),
queries,
},
});
const result: DataQueryResponseData[] = [];
if (data.results) {
Object.values(data.results).forEach((queryRes: any) => {
queryRes.series?.forEach((series: any) => {
const timeSeries: TimeSeries = {
target: series.name,
datapoints: series.points,
refId: queryRes.refId,
meta: queryRes.meta,
};
result.push(toDataFrame(timeSeries));
});
queryRes.tables?.forEach((table: any) => {
result.push(toDataFrame(table));
});
});
}
return result;
}
metricFindQuery(query: string) {
@ -363,7 +389,7 @@ export default class AzureLogAnalyticsDatasource {
return undefined;
}
isValidConfigField(field: string) {
isValidConfigField(field: string | undefined) {
return field && field.length > 0;
}
}

@ -170,7 +170,7 @@ export class ConfigEditor extends PureComponent<Props, State> {
let azureMonitorUrl = '',
subscriptionId = this.templateSrv.replace(subscription || this.props.options.jsonData.subscriptionId);
if (!!subscriptionId || !!azureLogAnalyticsSameAs) {
if (azureLogAnalyticsSameAs) {
const azureCloud = cloudName || 'azuremonitor';
azureMonitorUrl = `/${azureCloud}/subscriptions`;
} else {

@ -137,6 +137,21 @@
},
"headers": [{ "name": "x-ms-app", "content": "Grafana" }]
},
{
"path": "govworkspacesloganalytics",
"method": "GET",
"url": "https://management.usgovcloudapi.net",
"tokenAuth": {
"url": "https://login.microsoftonline.us/{{.JsonData.logAnalyticsTenantId}}/oauth2/token",
"params": {
"grant_type": "client_credentials",
"client_id": "{{.JsonData.logAnalyticsClientId}}",
"client_secret": "{{.SecureJsonData.logAnalyticsClientSecret}}",
"resource": "https://management.usgovcloudapi.net/"
}
},
"headers": [{ "name": "x-ms-app", "content": "Grafana" }]
},
{
"path": "loganalyticsazure",
"method": "GET",
@ -152,8 +167,7 @@
},
"headers": [
{ "name": "x-ms-app", "content": "Grafana" },
{ "name": "Cache-Control", "content": "public, max-age=60" },
{ "name": "Accept-Encoding", "content": "gzip" }
{ "name": "Cache-Control", "content": "public, max-age=60" }
]
},
{
@ -171,8 +185,25 @@
},
"headers": [
{ "name": "x-ms-app", "content": "Grafana" },
{ "name": "Cache-Control", "content": "public, max-age=60" },
{ "name": "Accept-Encoding", "content": "gzip" }
{ "name": "Cache-Control", "content": "public, max-age=60" }
]
},
{
"path": "govloganalyticsazure",
"method": "GET",
"url": "https://api.loganalytics.us/v1/workspaces",
"tokenAuth": {
"url": "https://login.microsoftonline.us/{{.JsonData.logAnalyticsTenantId}}/oauth2/token",
"params": {
"grant_type": "client_credentials",
"client_id": "{{.JsonData.logAnalyticsClientId}}",
"client_secret": "{{.SecureJsonData.logAnalyticsClientSecret}}",
"resource": "https://api.loganalytics.us"
}
},
"headers": [
{ "name": "x-ms-app", "content": "Grafana" },
{ "name": "Cache-Control", "content": "public, max-age=60" }
]
}
],

Loading…
Cancel
Save