mirror of https://github.com/grafana/grafana
AzureMonitor: Alerting for Azure Application Insights (#19381)
* Convert Azure Application Insights datasource to Go Allows for alerting of Application Insights data source Closes: #15153 * Fix timeGrainReset * Default time interval for querys for alerts * Fix a few rename related bugs * Update readme to indicate App Insights alerting * Fix typo and add tests to ensure migration is happening * Address code review feedback (mostly typos and unintended changes)pull/19637/head^2
parent
92765a6c6f
commit
20faef8de5
@ -0,0 +1,592 @@ |
||||
package azuremonitor |
||||
|
||||
import ( |
||||
"context" |
||||
"encoding/json" |
||||
"errors" |
||||
"fmt" |
||||
"github.com/grafana/grafana/pkg/api/pluginproxy" |
||||
"github.com/grafana/grafana/pkg/components/null" |
||||
"github.com/grafana/grafana/pkg/components/simplejson" |
||||
"github.com/grafana/grafana/pkg/models" |
||||
"github.com/grafana/grafana/pkg/plugins" |
||||
"github.com/grafana/grafana/pkg/setting" |
||||
"github.com/grafana/grafana/pkg/tsdb" |
||||
"github.com/opentracing/opentracing-go" |
||||
"golang.org/x/net/context/ctxhttp" |
||||
"io/ioutil" |
||||
"net/http" |
||||
"net/url" |
||||
"path" |
||||
"strings" |
||||
"time" |
||||
) |
||||
|
||||
// ApplicationInsightsDatasource calls the application insights query API's
|
||||
type ApplicationInsightsDatasource struct { |
||||
httpClient *http.Client |
||||
dsInfo *models.DataSource |
||||
} |
||||
|
||||
type ApplicationInsightsQuery struct { |
||||
RefID string |
||||
|
||||
IsRaw bool |
||||
|
||||
// Text based raw query options
|
||||
ApiURL string |
||||
Params url.Values |
||||
Alias string |
||||
Target string |
||||
TimeColumnName string |
||||
ValueColumnName string |
||||
SegmentColumnName string |
||||
} |
||||
|
||||
func (e *ApplicationInsightsDatasource) executeTimeSeriesQuery(ctx context.Context, originalQueries []*tsdb.Query, timeRange *tsdb.TimeRange) (*tsdb.Response, error) { |
||||
result := &tsdb.Response{ |
||||
Results: map[string]*tsdb.QueryResult{}, |
||||
} |
||||
|
||||
queries, err := e.buildQueries(originalQueries, timeRange) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
|
||||
for _, query := range queries { |
||||
queryRes, err := e.executeQuery(ctx, query) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
result.Results[query.RefID] = queryRes |
||||
} |
||||
|
||||
return result, nil |
||||
} |
||||
|
||||
func (e *ApplicationInsightsDatasource) buildQueries(queries []*tsdb.Query, timeRange *tsdb.TimeRange) ([]*ApplicationInsightsQuery, error) { |
||||
applicationInsightsQueries := []*ApplicationInsightsQuery{} |
||||
startTime, err := timeRange.ParseFrom() |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
|
||||
endTime, err := timeRange.ParseTo() |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
|
||||
for _, query := range queries { |
||||
applicationInsightsTarget := query.Model.Get("appInsights").MustMap() |
||||
azlog.Debug("Application Insights", "target", applicationInsightsTarget) |
||||
|
||||
rawQuery := false |
||||
if asInterface, ok := applicationInsightsTarget["rawQuery"]; ok { |
||||
if asBool, ok := asInterface.(bool); ok { |
||||
rawQuery = asBool |
||||
} else { |
||||
return nil, errors.New("'rawQuery' should be a boolean") |
||||
} |
||||
} else { |
||||
return nil, errors.New("missing 'rawQuery' property") |
||||
} |
||||
|
||||
if rawQuery { |
||||
var rawQueryString string |
||||
if asInterface, ok := applicationInsightsTarget["rawQueryString"]; ok { |
||||
if asString, ok := asInterface.(string); ok { |
||||
rawQueryString = asString |
||||
} |
||||
} |
||||
if rawQueryString == "" { |
||||
return nil, errors.New("rawQuery requires rawQueryString") |
||||
} |
||||
|
||||
rawQueryString, err := KqlInterpolate(query, timeRange, fmt.Sprintf("%v", rawQueryString)) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
|
||||
params := url.Values{} |
||||
params.Add("query", rawQueryString) |
||||
|
||||
applicationInsightsQueries = append(applicationInsightsQueries, &ApplicationInsightsQuery{ |
||||
RefID: query.RefId, |
||||
IsRaw: true, |
||||
ApiURL: "query", |
||||
Params: params, |
||||
TimeColumnName: fmt.Sprintf("%v", applicationInsightsTarget["timeColumn"]), |
||||
ValueColumnName: fmt.Sprintf("%v", applicationInsightsTarget["valueColumn"]), |
||||
SegmentColumnName: fmt.Sprintf("%v", applicationInsightsTarget["segmentColumn"]), |
||||
Target: params.Encode(), |
||||
}) |
||||
} else { |
||||
alias := "" |
||||
if val, ok := applicationInsightsTarget["alias"]; ok { |
||||
alias = fmt.Sprintf("%v", val) |
||||
} |
||||
|
||||
azureURL := fmt.Sprintf("metrics/%s", fmt.Sprintf("%v", applicationInsightsTarget["metricName"])) |
||||
timeGrain := fmt.Sprintf("%v", applicationInsightsTarget["timeGrain"]) |
||||
timeGrains := applicationInsightsTarget["allowedTimeGrainsMs"] |
||||
if timeGrain == "auto" { |
||||
timeGrain, err = setAutoTimeGrain(query.IntervalMs, timeGrains) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
} |
||||
|
||||
params := url.Values{} |
||||
params.Add("timespan", fmt.Sprintf("%v/%v", startTime.UTC().Format(time.RFC3339), endTime.UTC().Format(time.RFC3339))) |
||||
if timeGrain != "none" { |
||||
params.Add("interval", timeGrain) |
||||
} |
||||
params.Add("aggregation", fmt.Sprintf("%v", applicationInsightsTarget["aggregation"])) |
||||
|
||||
dimension := strings.TrimSpace(fmt.Sprintf("%v", applicationInsightsTarget["dimension"])) |
||||
if applicationInsightsTarget["dimension"] != nil && len(dimension) > 0 && !strings.EqualFold(dimension, "none") { |
||||
params.Add("segment", dimension) |
||||
} |
||||
|
||||
dimensionFilter := strings.TrimSpace(fmt.Sprintf("%v", applicationInsightsTarget["dimensionFilter"])) |
||||
if applicationInsightsTarget["dimensionFilter"] != nil && len(dimensionFilter) > 0 { |
||||
params.Add("filter", fmt.Sprintf("%v", dimensionFilter)) |
||||
} |
||||
|
||||
applicationInsightsQueries = append(applicationInsightsQueries, &ApplicationInsightsQuery{ |
||||
RefID: query.RefId, |
||||
IsRaw: false, |
||||
ApiURL: azureURL, |
||||
Params: params, |
||||
Alias: alias, |
||||
Target: params.Encode(), |
||||
}) |
||||
} |
||||
} |
||||
|
||||
return applicationInsightsQueries, nil |
||||
} |
||||
|
||||
func (e *ApplicationInsightsDatasource) executeQuery(ctx context.Context, query *ApplicationInsightsQuery) (*tsdb.QueryResult, error) { |
||||
queryResult := &tsdb.QueryResult{Meta: simplejson.New(), RefId: query.RefID} |
||||
|
||||
req, err := e.createRequest(ctx, e.dsInfo) |
||||
if err != nil { |
||||
queryResult.Error = err |
||||
return queryResult, nil |
||||
} |
||||
|
||||
req.URL.Path = path.Join(req.URL.Path, query.ApiURL) |
||||
req.URL.RawQuery = query.Params.Encode() |
||||
|
||||
span, ctx := opentracing.StartSpanFromContext(ctx, "application insights query") |
||||
span.SetTag("target", query.Target) |
||||
span.SetTag("datasource_id", e.dsInfo.Id) |
||||
span.SetTag("org_id", e.dsInfo.OrgId) |
||||
|
||||
defer span.Finish() |
||||
|
||||
err = opentracing.GlobalTracer().Inject( |
||||
span.Context(), |
||||
opentracing.HTTPHeaders, |
||||
opentracing.HTTPHeadersCarrier(req.Header)) |
||||
|
||||
if err != nil { |
||||
azlog.Warn("failed to inject global tracer") |
||||
} |
||||
|
||||
azlog.Debug("ApplicationInsights", "Request URL", req.URL.String()) |
||||
res, err := ctxhttp.Do(ctx, e.httpClient, req) |
||||
if err != nil { |
||||
queryResult.Error = err |
||||
return queryResult, nil |
||||
} |
||||
|
||||
body, err := ioutil.ReadAll(res.Body) |
||||
defer res.Body.Close() |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
|
||||
if res.StatusCode/100 != 2 { |
||||
azlog.Error("Request failed", "status", res.Status, "body", string(body)) |
||||
return nil, fmt.Errorf(string(body)) |
||||
} |
||||
|
||||
if query.IsRaw { |
||||
queryResult.Series, queryResult.Meta, err = e.parseTimeSeriesFromQuery(body, query) |
||||
if err != nil { |
||||
queryResult.Error = err |
||||
return queryResult, nil |
||||
} |
||||
} else { |
||||
queryResult.Series, err = e.parseTimeSeriesFromMetrics(body, query) |
||||
if err != nil { |
||||
queryResult.Error = err |
||||
return queryResult, nil |
||||
} |
||||
} |
||||
|
||||
return queryResult, nil |
||||
} |
||||
|
||||
func (e *ApplicationInsightsDatasource) createRequest(ctx context.Context, dsInfo *models.DataSource) (*http.Request, error) { |
||||
// find plugin
|
||||
plugin, ok := plugins.DataSources[dsInfo.Type] |
||||
if !ok { |
||||
return nil, errors.New("Unable to find datasource plugin Azure Application Insights") |
||||
} |
||||
|
||||
var appInsightsRoute *plugins.AppPluginRoute |
||||
for _, route := range plugin.Routes { |
||||
if route.Path == "appinsights" { |
||||
appInsightsRoute = route |
||||
break |
||||
} |
||||
} |
||||
|
||||
appInsightsAppId := dsInfo.JsonData.Get("appInsightsAppId").MustString() |
||||
proxyPass := fmt.Sprintf("appinsights/v1/apps/%s", appInsightsAppId) |
||||
|
||||
u, _ := url.Parse(dsInfo.Url) |
||||
u.Path = path.Join(u.Path, fmt.Sprintf("/v1/apps/%s", appInsightsAppId)) |
||||
|
||||
req, err := http.NewRequest(http.MethodGet, u.String(), nil) |
||||
if err != nil { |
||||
azlog.Error("Failed to create request", "error", err) |
||||
return nil, fmt.Errorf("Failed to create request. error: %v", err) |
||||
} |
||||
|
||||
req.Header.Set("User-Agent", fmt.Sprintf("Grafana/%s", setting.BuildVersion)) |
||||
|
||||
pluginproxy.ApplyRoute(ctx, req, proxyPass, appInsightsRoute, dsInfo) |
||||
|
||||
return req, nil |
||||
} |
||||
|
||||
func (e *ApplicationInsightsDatasource) parseTimeSeriesFromQuery(body []byte, query *ApplicationInsightsQuery) (tsdb.TimeSeriesSlice, *simplejson.Json, error) { |
||||
var data ApplicationInsightsQueryResponse |
||||
err := json.Unmarshal(body, &data) |
||||
if err != nil { |
||||
azlog.Error("Failed to unmarshal Application Insights response", "error", err, "body", string(body)) |
||||
return nil, nil, err |
||||
} |
||||
|
||||
type Metadata struct { |
||||
Columns []string `json:"columns"` |
||||
} |
||||
|
||||
meta := Metadata{} |
||||
|
||||
for _, t := range data.Tables { |
||||
if t.Name == "PrimaryResult" { |
||||
timeIndex, valueIndex, segmentIndex := -1, -1, -1 |
||||
meta.Columns = make([]string, 0) |
||||
for i, v := range t.Columns { |
||||
meta.Columns = append(meta.Columns, v.Name) |
||||
switch v.Name { |
||||
case query.TimeColumnName: |
||||
timeIndex = i |
||||
case query.ValueColumnName: |
||||
valueIndex = i |
||||
case query.SegmentColumnName: |
||||
segmentIndex = i |
||||
} |
||||
} |
||||
|
||||
if timeIndex == -1 { |
||||
azlog.Info("no time column specified, returning existing columns, no data") |
||||
return nil, simplejson.NewFromAny(meta), nil |
||||
} |
||||
|
||||
if valueIndex == -1 { |
||||
azlog.Info("no value column specified, returning existing columns, no data") |
||||
return nil, simplejson.NewFromAny(meta), nil |
||||
} |
||||
|
||||
var getPoints func([]interface{}) *tsdb.TimeSeriesPoints |
||||
slice := tsdb.TimeSeriesSlice{} |
||||
if segmentIndex == -1 { |
||||
legend := formatApplicationInsightsLegendKey(query.Alias, query.ValueColumnName, "", "") |
||||
series := tsdb.NewTimeSeries(legend, []tsdb.TimePoint{}) |
||||
slice = append(slice, series) |
||||
getPoints = func(row []interface{}) *tsdb.TimeSeriesPoints { |
||||
return &series.Points |
||||
} |
||||
} else { |
||||
mapping := map[string]*tsdb.TimeSeriesPoints{} |
||||
getPoints = func(row []interface{}) *tsdb.TimeSeriesPoints { |
||||
segment := fmt.Sprintf("%v", row[segmentIndex]) |
||||
if points, ok := mapping[segment]; ok { |
||||
return points |
||||
} |
||||
legend := formatApplicationInsightsLegendKey(query.Alias, query.ValueColumnName, query.SegmentColumnName, segment) |
||||
series := tsdb.NewTimeSeries(legend, []tsdb.TimePoint{}) |
||||
slice = append(slice, series) |
||||
mapping[segment] = &series.Points |
||||
return &series.Points |
||||
} |
||||
} |
||||
|
||||
for _, r := range t.Rows { |
||||
timeStr, ok := r[timeIndex].(string) |
||||
if !ok { |
||||
return nil, simplejson.NewFromAny(meta), errors.New("invalid time value") |
||||
} |
||||
timeValue, err := time.Parse(time.RFC3339Nano, timeStr) |
||||
if err != nil { |
||||
return nil, simplejson.NewFromAny(meta), err |
||||
} |
||||
|
||||
var value float64 |
||||
if value, err = getFloat(r[valueIndex]); err != nil { |
||||
return nil, simplejson.NewFromAny(meta), err |
||||
} |
||||
|
||||
points := getPoints(r) |
||||
*points = append(*points, tsdb.NewTimePoint(null.FloatFrom(value), float64(timeValue.Unix()*1000))) |
||||
} |
||||
|
||||
return slice, simplejson.NewFromAny(meta), nil |
||||
} |
||||
} |
||||
|
||||
return nil, nil, errors.New("could not find table") |
||||
} |
||||
|
||||
func (e *ApplicationInsightsDatasource) parseTimeSeriesFromMetrics(body []byte, query *ApplicationInsightsQuery) (tsdb.TimeSeriesSlice, error) { |
||||
doc, err := simplejson.NewJson(body) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
|
||||
value := doc.Get("value").MustMap() |
||||
|
||||
if value == nil { |
||||
return nil, errors.New("could not find value element") |
||||
} |
||||
|
||||
endStr, ok := value["end"].(string) |
||||
if !ok { |
||||
return nil, errors.New("missing 'end' value in response") |
||||
} |
||||
endTime, err := time.Parse(time.RFC3339Nano, endStr) |
||||
if err != nil { |
||||
return nil, fmt.Errorf("bad 'end' value: %v", err) |
||||
} |
||||
|
||||
for k, v := range value { |
||||
switch k { |
||||
case "start": |
||||
case "end": |
||||
case "interval": |
||||
case "segments": |
||||
// we have segments!
|
||||
return parseSegmentedValueTimeSeries(query, endTime, v) |
||||
default: |
||||
return parseSingleValueTimeSeries(query, k, endTime, v) |
||||
} |
||||
} |
||||
|
||||
azlog.Error("Bad response from application insights/metrics", "body", string(body)) |
||||
return nil, errors.New("could not find expected values in response") |
||||
} |
||||
|
||||
func parseSegmentedValueTimeSeries(query *ApplicationInsightsQuery, endTime time.Time, segmentsJson interface{}) (tsdb.TimeSeriesSlice, error) { |
||||
segments, ok := segmentsJson.([]interface{}) |
||||
if !ok { |
||||
return nil, errors.New("bad segments value") |
||||
} |
||||
|
||||
slice := tsdb.TimeSeriesSlice{} |
||||
seriesMap := map[string]*tsdb.TimeSeriesPoints{} |
||||
|
||||
for _, segment := range segments { |
||||
segmentMap, ok := segment.(map[string]interface{}) |
||||
if !ok { |
||||
return nil, errors.New("bad segments value") |
||||
} |
||||
err := processSegment(&slice, segmentMap, query, endTime, seriesMap) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
} |
||||
|
||||
return slice, nil |
||||
} |
||||
|
||||
func processSegment(slice *tsdb.TimeSeriesSlice, segment map[string]interface{}, query *ApplicationInsightsQuery, endTime time.Time, pointMap map[string]*tsdb.TimeSeriesPoints) error { |
||||
var segmentName string |
||||
var segmentValue string |
||||
var childSegments []interface{} |
||||
hasChildren := false |
||||
var value float64 |
||||
var valueName string |
||||
var ok bool |
||||
var err error |
||||
for k, v := range segment { |
||||
switch k { |
||||
case "start": |
||||
case "end": |
||||
endStr, ok := v.(string) |
||||
if !ok { |
||||
return errors.New("missing 'end' value in response") |
||||
} |
||||
endTime, err = time.Parse(time.RFC3339Nano, endStr) |
||||
if err != nil { |
||||
return fmt.Errorf("bad 'end' value: %v", err) |
||||
} |
||||
case "segments": |
||||
childSegments, ok = v.([]interface{}) |
||||
if !ok { |
||||
return errors.New("invalid format segments") |
||||
} |
||||
hasChildren = true |
||||
default: |
||||
mapping, hasValues := v.(map[string]interface{}) |
||||
if hasValues { |
||||
valueName = k |
||||
value, err = getAggregatedValue(mapping, valueName) |
||||
if err != nil { |
||||
return err |
||||
} |
||||
} else { |
||||
segmentValue, ok = v.(string) |
||||
if !ok { |
||||
return fmt.Errorf("invalid mapping for key %v", k) |
||||
} |
||||
segmentName = k |
||||
} |
||||
} |
||||
} |
||||
|
||||
if hasChildren { |
||||
for _, s := range childSegments { |
||||
segmentMap, ok := s.(map[string]interface{}) |
||||
if !ok { |
||||
return errors.New("invalid format segments") |
||||
} |
||||
if err := processSegment(slice, segmentMap, query, endTime, pointMap); err != nil { |
||||
return err |
||||
} |
||||
} |
||||
} else { |
||||
|
||||
aliased := formatApplicationInsightsLegendKey(query.Alias, valueName, segmentName, segmentValue) |
||||
|
||||
if segmentValue == "" { |
||||
segmentValue = valueName |
||||
} |
||||
|
||||
points, ok := pointMap[segmentValue] |
||||
|
||||
if !ok { |
||||
series := tsdb.NewTimeSeries(aliased, tsdb.TimeSeriesPoints{}) |
||||
points = &series.Points |
||||
*slice = append(*slice, series) |
||||
pointMap[segmentValue] = points |
||||
} |
||||
|
||||
*points = append(*points, tsdb.NewTimePoint(null.FloatFrom(value), float64(endTime.Unix()*1000))) |
||||
} |
||||
|
||||
return nil |
||||
} |
||||
|
||||
func parseSingleValueTimeSeries(query *ApplicationInsightsQuery, metricName string, endTime time.Time, valueJson interface{}) (tsdb.TimeSeriesSlice, error) { |
||||
legend := formatApplicationInsightsLegendKey(query.Alias, metricName, "", "") |
||||
|
||||
valueMap, ok := valueJson.(map[string]interface{}) |
||||
if !ok { |
||||
return nil, errors.New("bad value aggregation") |
||||
} |
||||
|
||||
metricValue, err := getAggregatedValue(valueMap, metricName) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
|
||||
return []*tsdb.TimeSeries{ |
||||
tsdb.NewTimeSeries( |
||||
legend, |
||||
tsdb.TimeSeriesPoints{ |
||||
tsdb.NewTimePoint( |
||||
null.FloatFrom(metricValue), |
||||
float64(endTime.Unix()*1000)), |
||||
}, |
||||
), |
||||
}, nil |
||||
} |
||||
|
||||
func getAggregatedValue(valueMap map[string]interface{}, valueName string) (float64, error) { |
||||
|
||||
aggValue := "" |
||||
var metricValue float64 |
||||
var err error |
||||
for k, v := range valueMap { |
||||
if aggValue != "" { |
||||
return 0, fmt.Errorf("found multiple aggregations, %v, %v", aggValue, k) |
||||
} |
||||
if k == "" { |
||||
return 0, errors.New("found no aggregation name") |
||||
} |
||||
aggValue = k |
||||
metricValue, err = getFloat(v) |
||||
|
||||
if err != nil { |
||||
return 0, fmt.Errorf("bad value: %v", err) |
||||
} |
||||
} |
||||
|
||||
if aggValue == "" { |
||||
return 0, fmt.Errorf("no aggregation value found for %v", valueName) |
||||
} |
||||
|
||||
return metricValue, nil |
||||
} |
||||
|
||||
func getFloat(in interface{}) (float64, error) { |
||||
if out, ok := in.(float32); ok { |
||||
return float64(out), nil |
||||
} else if out, ok := in.(int32); ok { |
||||
return float64(out), nil |
||||
} else if out, ok := in.(json.Number); ok { |
||||
return out.Float64() |
||||
} else if out, ok := in.(int64); ok { |
||||
return float64(out), nil |
||||
} else if out, ok := in.(float64); ok { |
||||
return out, nil |
||||
} |
||||
|
||||
return 0, fmt.Errorf("cannot convert '%v' to float32", in) |
||||
} |
||||
|
||||
// formatApplicationInsightsLegendKey builds the legend key or timeseries name
|
||||
// Alias patterns like {{resourcename}} are replaced with the appropriate data values.
|
||||
func formatApplicationInsightsLegendKey(alias string, metricName string, dimensionName string, dimensionValue string) string { |
||||
if alias == "" { |
||||
if len(dimensionName) > 0 { |
||||
return fmt.Sprintf("{%s=%s}.%s", dimensionName, dimensionValue, metricName) |
||||
} |
||||
return metricName |
||||
} |
||||
|
||||
result := legendKeyFormat.ReplaceAllFunc([]byte(alias), func(in []byte) []byte { |
||||
metaPartName := strings.Replace(string(in), "{{", "", 1) |
||||
metaPartName = strings.Replace(metaPartName, "}}", "", 1) |
||||
metaPartName = strings.ToLower(strings.TrimSpace(metaPartName)) |
||||
|
||||
switch metaPartName { |
||||
case "metric": |
||||
return []byte(metricName) |
||||
case "dimensionname", "groupbyname": |
||||
return []byte(dimensionName) |
||||
case "dimensionvalue", "groupbyvalue": |
||||
return []byte(dimensionValue) |
||||
} |
||||
|
||||
return in |
||||
}) |
||||
|
||||
return string(result) |
||||
} |
||||
@ -0,0 +1,316 @@ |
||||
package azuremonitor |
||||
|
||||
import ( |
||||
"encoding/json" |
||||
"fmt" |
||||
"io/ioutil" |
||||
"testing" |
||||
"time" |
||||
|
||||
"github.com/grafana/grafana/pkg/components/simplejson" |
||||
"github.com/grafana/grafana/pkg/models" |
||||
"github.com/grafana/grafana/pkg/tsdb" |
||||
|
||||
. "github.com/smartystreets/goconvey/convey" |
||||
) |
||||
|
||||
func TestApplicationInsightsDatasource(t *testing.T) { |
||||
Convey("ApplicationInsightsDatasource", t, func() { |
||||
datasource := &ApplicationInsightsDatasource{} |
||||
|
||||
Convey("Parse queries from frontend and build AzureMonitor API queries", func() { |
||||
fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC).In(time.Local) |
||||
tsdbQuery := &tsdb.TsdbQuery{ |
||||
TimeRange: &tsdb.TimeRange{ |
||||
From: fmt.Sprintf("%v", fromStart.Unix()*1000), |
||||
To: fmt.Sprintf("%v", fromStart.Add(34*time.Minute).Unix()*1000), |
||||
}, |
||||
Queries: []*tsdb.Query{ |
||||
{ |
||||
DataSource: &models.DataSource{ |
||||
JsonData: simplejson.NewFromAny(map[string]interface{}{}), |
||||
}, |
||||
Model: simplejson.NewFromAny(map[string]interface{}{ |
||||
"appInsights": map[string]interface{}{ |
||||
"rawQuery": false, |
||||
"timeGrain": "PT1M", |
||||
"aggregation": "Average", |
||||
"metricName": "server/exceptions", |
||||
"alias": "testalias", |
||||
"queryType": "Application Insights", |
||||
}, |
||||
}), |
||||
RefId: "A", |
||||
IntervalMs: 1234, |
||||
}, |
||||
}, |
||||
} |
||||
Convey("and is a normal query", func() { |
||||
queries, err := datasource.buildQueries(tsdbQuery.Queries, tsdbQuery.TimeRange) |
||||
So(err, ShouldBeNil) |
||||
|
||||
So(len(queries), ShouldEqual, 1) |
||||
So(queries[0].RefID, ShouldEqual, "A") |
||||
So(queries[0].ApiURL, ShouldEqual, "metrics/server/exceptions") |
||||
So(queries[0].Target, ShouldEqual, "aggregation=Average&interval=PT1M×pan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z") |
||||
So(len(queries[0].Params), ShouldEqual, 3) |
||||
So(queries[0].Params["timespan"][0], ShouldEqual, "2018-03-15T13:00:00Z/2018-03-15T13:34:00Z") |
||||
So(queries[0].Params["aggregation"][0], ShouldEqual, "Average") |
||||
So(queries[0].Params["interval"][0], ShouldEqual, "PT1M") |
||||
So(queries[0].Alias, ShouldEqual, "testalias") |
||||
}) |
||||
|
||||
Convey("and has a time grain set to auto", func() { |
||||
tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ |
||||
"appInsights": map[string]interface{}{ |
||||
"rawQuery": false, |
||||
"timeGrain": "auto", |
||||
"aggregation": "Average", |
||||
"metricName": "Percentage CPU", |
||||
"alias": "testalias", |
||||
"queryType": "Application Insights", |
||||
}, |
||||
}) |
||||
tsdbQuery.Queries[0].IntervalMs = 400000 |
||||
|
||||
queries, err := datasource.buildQueries(tsdbQuery.Queries, tsdbQuery.TimeRange) |
||||
So(err, ShouldBeNil) |
||||
|
||||
So(queries[0].Params["interval"][0], ShouldEqual, "PT15M") |
||||
}) |
||||
|
||||
Convey("and has a time grain set to auto and the metric has a limited list of allowed time grains", func() { |
||||
tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ |
||||
"appInsights": map[string]interface{}{ |
||||
"rawQuery": false, |
||||
"timeGrain": "auto", |
||||
"aggregation": "Average", |
||||
"metricName": "Percentage CPU", |
||||
"alias": "testalias", |
||||
"queryType": "Application Insights", |
||||
"allowedTimeGrainsMs": []interface{}{"auto", json.Number("60000"), json.Number("300000")}, |
||||
}, |
||||
}) |
||||
tsdbQuery.Queries[0].IntervalMs = 400000 |
||||
|
||||
queries, err := datasource.buildQueries(tsdbQuery.Queries, tsdbQuery.TimeRange) |
||||
So(err, ShouldBeNil) |
||||
|
||||
So(queries[0].Params["interval"][0], ShouldEqual, "PT5M") |
||||
}) |
||||
|
||||
Convey("and has a dimension filter", func() { |
||||
tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ |
||||
"appInsights": map[string]interface{}{ |
||||
"rawQuery": false, |
||||
"timeGrain": "PT1M", |
||||
"aggregation": "Average", |
||||
"metricName": "Percentage CPU", |
||||
"alias": "testalias", |
||||
"queryType": "Application Insights", |
||||
"dimension": "blob", |
||||
"dimensionFilter": "blob eq '*'", |
||||
}, |
||||
}) |
||||
|
||||
queries, err := datasource.buildQueries(tsdbQuery.Queries, tsdbQuery.TimeRange) |
||||
So(err, ShouldBeNil) |
||||
|
||||
So(queries[0].Target, ShouldEqual, "aggregation=Average&filter=blob+eq+%27%2A%27&interval=PT1M&segment=blob×pan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z") |
||||
So(queries[0].Params["filter"][0], ShouldEqual, "blob eq '*'") |
||||
|
||||
}) |
||||
|
||||
Convey("and has a dimension filter set to None", func() { |
||||
tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ |
||||
"appInsights": map[string]interface{}{ |
||||
"rawQuery": false, |
||||
"timeGrain": "PT1M", |
||||
"aggregation": "Average", |
||||
"metricName": "Percentage CPU", |
||||
"alias": "testalias", |
||||
"queryType": "Application Insights", |
||||
"dimension": "None", |
||||
}, |
||||
}) |
||||
|
||||
queries, err := datasource.buildQueries(tsdbQuery.Queries, tsdbQuery.TimeRange) |
||||
So(err, ShouldBeNil) |
||||
|
||||
So(queries[0].Target, ShouldEqual, "aggregation=Average&interval=PT1M×pan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z") |
||||
}) |
||||
|
||||
Convey("id a raw query", func() { |
||||
tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ |
||||
"appInsights": map[string]interface{}{ |
||||
"rawQuery": true, |
||||
"rawQueryString": "exceptions | where $__timeFilter(timestamp) | summarize count=count() by bin(timestamp, $__interval)", |
||||
"timeColumn": "timestamp", |
||||
"valueColumn": "count", |
||||
}, |
||||
}) |
||||
|
||||
queries, err := datasource.buildQueries(tsdbQuery.Queries, tsdbQuery.TimeRange) |
||||
So(err, ShouldBeNil) |
||||
So(queries[0].Params["query"][0], ShouldEqual, "exceptions | where ['timestamp'] >= datetime('2018-03-15T13:00:00Z') and ['timestamp'] <= datetime('2018-03-15T13:34:00Z') | summarize count=count() by bin(timestamp, 1234ms)") |
||||
So(queries[0].Target, ShouldEqual, "query=exceptions+%7C+where+%5B%27timestamp%27%5D+%3E%3D+datetime%28%272018-03-15T13%3A00%3A00Z%27%29+and+%5B%27timestamp%27%5D+%3C%3D+datetime%28%272018-03-15T13%3A34%3A00Z%27%29+%7C+summarize+count%3Dcount%28%29+by+bin%28timestamp%2C+1234ms%29") |
||||
}) |
||||
}) |
||||
|
||||
Convey("Parse Application Insights query API response in the time series format", func() { |
||||
Convey("no segments", func() { |
||||
data, err := ioutil.ReadFile("./test-data/applicationinsights/1-application-insights-response-raw-query.json") |
||||
So(err, ShouldBeNil) |
||||
|
||||
query := &ApplicationInsightsQuery{ |
||||
IsRaw: true, |
||||
TimeColumnName: "timestamp", |
||||
ValueColumnName: "value", |
||||
} |
||||
series, _, err := datasource.parseTimeSeriesFromQuery(data, query) |
||||
So(err, ShouldBeNil) |
||||
|
||||
So(len(series), ShouldEqual, 1) |
||||
So(series[0].Name, ShouldEqual, "value") |
||||
So(len(series[0].Points), ShouldEqual, 2) |
||||
|
||||
So(series[0].Points[0][0].Float64, ShouldEqual, 1) |
||||
So(series[0].Points[0][1].Float64, ShouldEqual, int64(1568336523000)) |
||||
|
||||
So(series[0].Points[1][0].Float64, ShouldEqual, 2) |
||||
So(series[0].Points[1][1].Float64, ShouldEqual, int64(1568340123000)) |
||||
}) |
||||
|
||||
Convey("with segments", func() { |
||||
data, err := ioutil.ReadFile("./test-data/applicationinsights/2-application-insights-response-raw-query-segmented.json") |
||||
So(err, ShouldBeNil) |
||||
|
||||
query := &ApplicationInsightsQuery{ |
||||
IsRaw: true, |
||||
TimeColumnName: "timestamp", |
||||
ValueColumnName: "value", |
||||
SegmentColumnName: "segment", |
||||
} |
||||
series, _, err := datasource.parseTimeSeriesFromQuery(data, query) |
||||
So(err, ShouldBeNil) |
||||
|
||||
So(len(series), ShouldEqual, 2) |
||||
So(series[0].Name, ShouldEqual, "{segment=a}.value") |
||||
So(len(series[0].Points), ShouldEqual, 2) |
||||
|
||||
So(series[0].Points[0][0].Float64, ShouldEqual, 1) |
||||
So(series[0].Points[0][1].Float64, ShouldEqual, int64(1568336523000)) |
||||
|
||||
So(series[0].Points[1][0].Float64, ShouldEqual, 3) |
||||
So(series[0].Points[1][1].Float64, ShouldEqual, int64(1568426523000)) |
||||
|
||||
So(series[1].Name, ShouldEqual, "{segment=b}.value") |
||||
So(series[1].Points[0][0].Float64, ShouldEqual, 2) |
||||
So(series[1].Points[0][1].Float64, ShouldEqual, int64(1568336523000)) |
||||
|
||||
So(series[1].Points[1][0].Float64, ShouldEqual, 4) |
||||
So(series[1].Points[1][1].Float64, ShouldEqual, int64(1568426523000)) |
||||
|
||||
Convey("with alias", func() { |
||||
data, err := ioutil.ReadFile("./test-data/applicationinsights/2-application-insights-response-raw-query-segmented.json") |
||||
So(err, ShouldBeNil) |
||||
|
||||
query := &ApplicationInsightsQuery{ |
||||
IsRaw: true, |
||||
TimeColumnName: "timestamp", |
||||
ValueColumnName: "value", |
||||
SegmentColumnName: "segment", |
||||
Alias: "{{metric}} {{dimensionname}} {{dimensionvalue}}", |
||||
} |
||||
series, _, err := datasource.parseTimeSeriesFromQuery(data, query) |
||||
So(err, ShouldBeNil) |
||||
|
||||
So(len(series), ShouldEqual, 2) |
||||
So(series[0].Name, ShouldEqual, "value segment a") |
||||
So(series[1].Name, ShouldEqual, "value segment b") |
||||
|
||||
}) |
||||
}) |
||||
}) |
||||
|
||||
Convey("Parse Application Insights metrics API", func() { |
||||
Convey("single value", func() { |
||||
data, err := ioutil.ReadFile("./test-data/applicationinsights/3-application-insights-response-metrics-single-value.json") |
||||
So(err, ShouldBeNil) |
||||
query := &ApplicationInsightsQuery{ |
||||
IsRaw: false, |
||||
} |
||||
series, err := datasource.parseTimeSeriesFromMetrics(data, query) |
||||
So(err, ShouldBeNil) |
||||
|
||||
So(len(series), ShouldEqual, 1) |
||||
So(series[0].Name, ShouldEqual, "value") |
||||
So(len(series[0].Points), ShouldEqual, 1) |
||||
|
||||
So(series[0].Points[0][0].Float64, ShouldEqual, 1.2) |
||||
So(series[0].Points[0][1].Float64, ShouldEqual, int64(1568340123000)) |
||||
}) |
||||
|
||||
Convey("1H separation", func() { |
||||
data, err := ioutil.ReadFile("./test-data/applicationinsights/4-application-insights-response-metrics-no-segment.json") |
||||
So(err, ShouldBeNil) |
||||
query := &ApplicationInsightsQuery{ |
||||
IsRaw: false, |
||||
} |
||||
series, err := datasource.parseTimeSeriesFromMetrics(data, query) |
||||
So(err, ShouldBeNil) |
||||
|
||||
So(len(series), ShouldEqual, 1) |
||||
So(series[0].Name, ShouldEqual, "value") |
||||
So(len(series[0].Points), ShouldEqual, 2) |
||||
|
||||
So(series[0].Points[0][0].Float64, ShouldEqual, 1) |
||||
So(series[0].Points[0][1].Float64, ShouldEqual, int64(1568340123000)) |
||||
So(series[0].Points[1][0].Float64, ShouldEqual, 2) |
||||
So(series[0].Points[1][1].Float64, ShouldEqual, int64(1568343723000)) |
||||
|
||||
Convey("with segmentation", func() { |
||||
data, err := ioutil.ReadFile("./test-data/applicationinsights/4-application-insights-response-metrics-segmented.json") |
||||
So(err, ShouldBeNil) |
||||
query := &ApplicationInsightsQuery{ |
||||
IsRaw: false, |
||||
} |
||||
series, err := datasource.parseTimeSeriesFromMetrics(data, query) |
||||
So(err, ShouldBeNil) |
||||
|
||||
So(len(series), ShouldEqual, 2) |
||||
So(series[0].Name, ShouldEqual, "{blob=a}.value") |
||||
So(len(series[0].Points), ShouldEqual, 2) |
||||
|
||||
So(series[0].Points[0][0].Float64, ShouldEqual, 1) |
||||
So(series[0].Points[0][1].Float64, ShouldEqual, int64(1568340123000)) |
||||
So(series[0].Points[1][0].Float64, ShouldEqual, 2) |
||||
So(series[0].Points[1][1].Float64, ShouldEqual, int64(1568343723000)) |
||||
|
||||
So(series[1].Name, ShouldEqual, "{blob=b}.value") |
||||
So(len(series[1].Points), ShouldEqual, 2) |
||||
|
||||
So(series[1].Points[0][0].Float64, ShouldEqual, 3) |
||||
So(series[1].Points[0][1].Float64, ShouldEqual, int64(1568340123000)) |
||||
So(series[1].Points[1][0].Float64, ShouldEqual, 4) |
||||
So(series[1].Points[1][1].Float64, ShouldEqual, int64(1568343723000)) |
||||
|
||||
Convey("with alias", func() { |
||||
data, err := ioutil.ReadFile("./test-data/applicationinsights/4-application-insights-response-metrics-segmented.json") |
||||
So(err, ShouldBeNil) |
||||
query := &ApplicationInsightsQuery{ |
||||
IsRaw: false, |
||||
Alias: "{{metric}} {{dimensionname}} {{dimensionvalue}}", |
||||
} |
||||
series, err := datasource.parseTimeSeriesFromMetrics(data, query) |
||||
So(err, ShouldBeNil) |
||||
|
||||
So(len(series), ShouldEqual, 2) |
||||
So(series[0].Name, ShouldEqual, "value blob a") |
||||
So(series[1].Name, ShouldEqual, "value blob b") |
||||
}) |
||||
}) |
||||
}) |
||||
}) |
||||
}) |
||||
} |
||||
@ -0,0 +1,58 @@ |
||||
package azuremonitor |
||||
|
||||
import "encoding/json" |
||||
|
||||
// setAutoTimeGrain tries to find the closest interval to the query's intervalMs value
|
||||
// if the metric has a limited set of possible intervals/time grains then use those
|
||||
// instead of the default list of intervals
|
||||
func setAutoTimeGrain(intervalMs int64, timeGrains interface{}) (string, error) { |
||||
// parses array of numbers from the timeGrains json field
|
||||
allowedTimeGrains := []int64{} |
||||
tgs, ok := timeGrains.([]interface{}) |
||||
if ok { |
||||
for _, v := range tgs { |
||||
jsonNumber, ok := v.(json.Number) |
||||
if ok { |
||||
tg, err := jsonNumber.Int64() |
||||
if err == nil { |
||||
allowedTimeGrains = append(allowedTimeGrains, tg) |
||||
} |
||||
} |
||||
} |
||||
} |
||||
|
||||
autoInterval := findClosestAllowedIntervalMS(intervalMs, allowedTimeGrains) |
||||
tg := &TimeGrain{} |
||||
autoTimeGrain, err := tg.createISO8601DurationFromIntervalMS(autoInterval) |
||||
if err != nil { |
||||
return "", err |
||||
} |
||||
|
||||
return autoTimeGrain, nil |
||||
} |
||||
|
||||
// findClosestAllowedIntervalMs is used for the auto time grain setting.
|
||||
// It finds the closest time grain from the list of allowed time grains for Azure Monitor
|
||||
// using the Grafana interval in milliseconds
|
||||
// Some metrics only allow a limited list of time grains. The allowedTimeGrains parameter
|
||||
// allows overriding the default list of allowed time grains.
|
||||
func findClosestAllowedIntervalMS(intervalMs int64, allowedTimeGrains []int64) int64 { |
||||
allowedIntervals := defaultAllowedIntervalsMS |
||||
|
||||
if len(allowedTimeGrains) > 0 { |
||||
allowedIntervals = allowedTimeGrains |
||||
} |
||||
|
||||
closest := allowedIntervals[0] |
||||
|
||||
for i, allowed := range allowedIntervals { |
||||
if intervalMs > allowed { |
||||
if i+1 < len(allowedIntervals) { |
||||
closest = allowedIntervals[i+1] |
||||
} else { |
||||
closest = allowed |
||||
} |
||||
} |
||||
} |
||||
return closest |
||||
} |
||||
@ -0,0 +1,118 @@ |
||||
package azuremonitor |
||||
|
||||
import ( |
||||
"fmt" |
||||
"regexp" |
||||
"strings" |
||||
"time" |
||||
|
||||
"github.com/grafana/grafana/pkg/tsdb" |
||||
) |
||||
|
||||
const rsIdentifier = `([_a-zA-Z0-9]+)` |
||||
const sExpr = `\$` + rsIdentifier + `(?:\(([^\)]*)\))?` |
||||
|
||||
type kqlMacroEngine struct { |
||||
timeRange *tsdb.TimeRange |
||||
query *tsdb.Query |
||||
} |
||||
|
||||
func KqlInterpolate(query *tsdb.Query, timeRange *tsdb.TimeRange, kql string) (string, error) { |
||||
engine := kqlMacroEngine{} |
||||
return engine.Interpolate(query, timeRange, kql) |
||||
} |
||||
|
||||
func (m *kqlMacroEngine) Interpolate(query *tsdb.Query, timeRange *tsdb.TimeRange, kql string) (string, error) { |
||||
m.timeRange = timeRange |
||||
m.query = query |
||||
rExp, _ := regexp.Compile(sExpr) |
||||
var macroError error |
||||
|
||||
kql = m.ReplaceAllStringSubmatchFunc(rExp, kql, func(groups []string) string { |
||||
args := []string{} |
||||
if len(groups) > 2 { |
||||
args = strings.Split(groups[2], ",") |
||||
} |
||||
|
||||
for i, arg := range args { |
||||
args[i] = strings.Trim(arg, " ") |
||||
} |
||||
res, err := m.evaluateMacro(groups[1], args) |
||||
if err != nil && macroError == nil { |
||||
macroError = err |
||||
return "macro_error()" |
||||
} |
||||
return res |
||||
}) |
||||
|
||||
if macroError != nil { |
||||
return "", macroError |
||||
} |
||||
|
||||
return kql, nil |
||||
} |
||||
|
||||
func (m *kqlMacroEngine) evaluateMacro(name string, args []string) (string, error) { |
||||
switch name { |
||||
case "__timeFilter": |
||||
timeColumn := "timestamp" |
||||
if len(args) > 0 && args[0] != "" { |
||||
timeColumn = args[0] |
||||
} |
||||
return fmt.Sprintf("['%s'] >= datetime('%s') and ['%s'] <= datetime('%s')", timeColumn, m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339), timeColumn, m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil |
||||
case "__from": |
||||
return fmt.Sprintf("datetime('%s')", m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339)), nil |
||||
case "__to": |
||||
return fmt.Sprintf("datetime('%s')", m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil |
||||
case "__interval": |
||||
var interval time.Duration |
||||
if m.query.IntervalMs == 0 { |
||||
to := m.timeRange.MustGetTo().UnixNano() |
||||
from := m.timeRange.MustGetFrom().UnixNano() |
||||
// default to "100 datapoints" if nothing in the query is more specific
|
||||
defaultInterval := time.Duration((to - from) / 60) |
||||
var err error |
||||
interval, err = tsdb.GetIntervalFrom(m.query.DataSource, m.query.Model, defaultInterval) |
||||
if err != nil { |
||||
azlog.Warn("Unable to get interval from query", "datasource", m.query.DataSource, "model", m.query.Model) |
||||
interval = defaultInterval |
||||
} |
||||
} else { |
||||
interval = time.Millisecond * time.Duration(m.query.IntervalMs) |
||||
} |
||||
return fmt.Sprintf("%dms", int(interval/time.Millisecond)), nil |
||||
case "__contains": |
||||
if len(args) < 2 || args[0] == "" || args[1] == "" { |
||||
return "", fmt.Errorf("macro %v needs colName and variableSet", name) |
||||
} |
||||
|
||||
if args[1] == "all" { |
||||
return "1 == 1", nil |
||||
} |
||||
|
||||
return fmt.Sprintf("['%s'] in ('%s')", args[0], args[1]), nil |
||||
default: |
||||
return "", fmt.Errorf("Unknown macro %v", name) |
||||
} |
||||
} |
||||
|
||||
func (m *kqlMacroEngine) ReplaceAllStringSubmatchFunc(re *regexp.Regexp, str string, repl func([]string) string) string { |
||||
result := "" |
||||
lastIndex := 0 |
||||
|
||||
for _, v := range re.FindAllSubmatchIndex([]byte(str), -1) { |
||||
groups := []string{} |
||||
for i := 0; i < len(v); i += 2 { |
||||
if v[i] < 0 { |
||||
groups = append(groups, "") |
||||
} else { |
||||
groups = append(groups, str[v[i]:v[i+1]]) |
||||
} |
||||
} |
||||
|
||||
result += str[lastIndex:v[0]] + repl(groups) |
||||
lastIndex = v[1] |
||||
} |
||||
|
||||
return result + str[lastIndex:] |
||||
} |
||||
@ -0,0 +1,27 @@ |
||||
{ |
||||
"tables": [ |
||||
{ |
||||
"name": "PrimaryResult", |
||||
"columns": [ |
||||
{ |
||||
"name": "timestamp", |
||||
"type": "datetime" |
||||
}, |
||||
{ |
||||
"name": "value", |
||||
"type": "int" |
||||
} |
||||
], |
||||
"rows": [ |
||||
[ |
||||
"2019-09-13T01:02:03.456789Z", |
||||
1 |
||||
], |
||||
[ |
||||
"2019-09-13T02:02:03.456789Z", |
||||
2 |
||||
] |
||||
] |
||||
} |
||||
] |
||||
} |
||||
@ -0,0 +1,43 @@ |
||||
{ |
||||
"tables": [ |
||||
{ |
||||
"name": "PrimaryResult", |
||||
"columns": [ |
||||
{ |
||||
"name": "timestamp", |
||||
"type": "datetime" |
||||
}, |
||||
{ |
||||
"name": "value", |
||||
"type": "int" |
||||
}, |
||||
{ |
||||
"name": "segment", |
||||
"type": "string" |
||||
} |
||||
], |
||||
"rows": [ |
||||
[ |
||||
"2019-09-13T01:02:03.456789Z", |
||||
1, |
||||
"a" |
||||
], |
||||
[ |
||||
"2019-09-13T01:02:03.456789Z", |
||||
2, |
||||
"b" |
||||
], |
||||
[ |
||||
"2019-09-14T02:02:03.456789Z", |
||||
3, |
||||
"a" |
||||
], |
||||
[ |
||||
"2019-09-14T02:02:03.456789Z", |
||||
4, |
||||
"b" |
||||
] |
||||
] |
||||
} |
||||
] |
||||
} |
||||
@ -0,0 +1,9 @@ |
||||
{ |
||||
"value": { |
||||
"start": "2019-09-13T01:02:03.456789Z", |
||||
"end": "2019-09-13T02:02:03.456789Z", |
||||
"value": { |
||||
"avg": 1.2 |
||||
} |
||||
} |
||||
} |
||||
@ -0,0 +1,23 @@ |
||||
{ |
||||
"value": { |
||||
"start": "2019-09-13T01:02:03.456789Z", |
||||
"end": "2019-09-13T03:02:03.456789Z", |
||||
"interval": "PT1H", |
||||
"segments": [ |
||||
{ |
||||
"start": "2019-09-13T01:02:03.456789Z", |
||||
"end": "2019-09-13T02:02:03.456789Z", |
||||
"value": { |
||||
"avg": 1 |
||||
} |
||||
}, |
||||
{ |
||||
"start": "2019-09-13T02:02:03.456789Z", |
||||
"end": "2019-09-13T03:02:03.456789Z", |
||||
"value": { |
||||
"avg": 2 |
||||
} |
||||
} |
||||
] |
||||
} |
||||
} |
||||
@ -0,0 +1,45 @@ |
||||
{ |
||||
"value": { |
||||
"start": "2019-09-13T01:02:03.456789Z", |
||||
"end": "2019-09-13T03:02:03.456789Z", |
||||
"interval": "PT1H", |
||||
"segments": [ |
||||
{ |
||||
"start": "2019-09-13T01:02:03.456789Z", |
||||
"end": "2019-09-13T02:02:03.456789Z", |
||||
"segments": [ |
||||
{ |
||||
"value": { |
||||
"avg": 1 |
||||
}, |
||||
"blob": "a" |
||||
}, |
||||
{ |
||||
"value": { |
||||
"avg": 3 |
||||
}, |
||||
"blob": "b" |
||||
} |
||||
] |
||||
}, |
||||
{ |
||||
"start": "2019-09-13T02:02:03.456789Z", |
||||
"end": "2019-09-13T03:02:03.456789Z", |
||||
"segments": [ |
||||
{ |
||||
"value": { |
||||
"avg": 2 |
||||
}, |
||||
"blob": "a" |
||||
}, |
||||
{ |
||||
"value": { |
||||
"avg": 4 |
||||
}, |
||||
"blob": "b" |
||||
} |
||||
] |
||||
} |
||||
] |
||||
} |
||||
} |
||||
@ -1,72 +0,0 @@ |
||||
import AppInsightsQuerystringBuilder from './app_insights_querystring_builder'; |
||||
import { toUtc } from '@grafana/data'; |
||||
|
||||
describe('AppInsightsQuerystringBuilder', () => { |
||||
let builder: AppInsightsQuerystringBuilder; |
||||
|
||||
beforeEach(() => { |
||||
builder = new AppInsightsQuerystringBuilder(toUtc('2017-08-22 06:00'), toUtc('2017-08-22 07:00'), '1h'); |
||||
}); |
||||
|
||||
describe('with only from/to date range', () => { |
||||
it('should always add datetime filtering to the querystring', () => { |
||||
const querystring = `timespan=2017-08-22T06:00:00Z/2017-08-22T07:00:00Z`; |
||||
expect(builder.generate()).toEqual(querystring); |
||||
}); |
||||
}); |
||||
|
||||
describe('with from/to date range and aggregation type', () => { |
||||
beforeEach(() => { |
||||
builder.setAggregation('avg'); |
||||
}); |
||||
|
||||
it('should add datetime filtering and aggregation to the querystring', () => { |
||||
const querystring = `timespan=2017-08-22T06:00:00Z/2017-08-22T07:00:00Z&aggregation=avg`; |
||||
expect(builder.generate()).toEqual(querystring); |
||||
}); |
||||
}); |
||||
|
||||
describe('with from/to date range and group by segment', () => { |
||||
beforeEach(() => { |
||||
builder.setGroupBy('client/city'); |
||||
}); |
||||
|
||||
it('should add datetime filtering and segment to the querystring', () => { |
||||
const querystring = `timespan=2017-08-22T06:00:00Z/2017-08-22T07:00:00Z&segment=client/city`; |
||||
expect(builder.generate()).toEqual(querystring); |
||||
}); |
||||
}); |
||||
|
||||
describe('with from/to date range and specific group by interval', () => { |
||||
beforeEach(() => { |
||||
builder.setInterval('specific', 1, 'hour'); |
||||
}); |
||||
|
||||
it('should add datetime filtering and interval to the querystring', () => { |
||||
const querystring = `timespan=2017-08-22T06:00:00Z/2017-08-22T07:00:00Z&interval=PT1H`; |
||||
expect(builder.generate()).toEqual(querystring); |
||||
}); |
||||
}); |
||||
|
||||
describe('with from/to date range and auto group by interval', () => { |
||||
beforeEach(() => { |
||||
builder.setInterval('auto', '', ''); |
||||
}); |
||||
|
||||
it('should add datetime filtering and interval to the querystring', () => { |
||||
const querystring = `timespan=2017-08-22T06:00:00Z/2017-08-22T07:00:00Z&interval=PT1H`; |
||||
expect(builder.generate()).toEqual(querystring); |
||||
}); |
||||
}); |
||||
|
||||
describe('with filter', () => { |
||||
beforeEach(() => { |
||||
builder.setFilter(`client/city eq 'Boydton'`); |
||||
}); |
||||
|
||||
it('should add datetime filtering and interval to the querystring', () => { |
||||
const querystring = `timespan=2017-08-22T06:00:00Z/2017-08-22T07:00:00Z&filter=client/city eq 'Boydton'`; |
||||
expect(builder.generate()).toEqual(querystring); |
||||
}); |
||||
}); |
||||
}); |
||||
@ -1,56 +0,0 @@ |
||||
import TimeGrainConverter from '../time_grain_converter'; |
||||
|
||||
export default class AppInsightsQuerystringBuilder { |
||||
aggregation = ''; |
||||
groupBy = ''; |
||||
timeGrainType = ''; |
||||
timeGrain = ''; |
||||
timeGrainUnit = ''; |
||||
filter = ''; |
||||
|
||||
constructor(private from: any, private to: any, public grafanaInterval: any) {} |
||||
|
||||
setAggregation(aggregation: string) { |
||||
this.aggregation = aggregation; |
||||
} |
||||
|
||||
setGroupBy(groupBy: string) { |
||||
this.groupBy = groupBy; |
||||
} |
||||
|
||||
setInterval(timeGrainType: string, timeGrain: any, timeGrainUnit: string) { |
||||
this.timeGrainType = timeGrainType; |
||||
this.timeGrain = timeGrain; |
||||
this.timeGrainUnit = timeGrainUnit; |
||||
} |
||||
|
||||
setFilter(filter: string) { |
||||
this.filter = filter; |
||||
} |
||||
|
||||
generate() { |
||||
let querystring = `timespan=${this.from.utc().format()}/${this.to.utc().format()}`; |
||||
|
||||
if (this.aggregation && this.aggregation.length > 0) { |
||||
querystring += `&aggregation=${this.aggregation}`; |
||||
} |
||||
|
||||
if (this.groupBy && this.groupBy.length > 0) { |
||||
querystring += `&segment=${this.groupBy}`; |
||||
} |
||||
|
||||
if (this.timeGrainType === 'specific' && this.timeGrain && this.timeGrainUnit) { |
||||
querystring += `&interval=${TimeGrainConverter.createISO8601Duration(this.timeGrain, this.timeGrainUnit)}`; |
||||
} |
||||
|
||||
if (this.timeGrainType === 'auto') { |
||||
querystring += `&interval=${TimeGrainConverter.createISO8601DurationFromInterval(this.grafanaInterval)}`; |
||||
} |
||||
|
||||
if (this.filter) { |
||||
querystring += `&filter=${this.filter}`; |
||||
} |
||||
|
||||
return querystring; |
||||
} |
||||
} |
||||
Loading…
Reference in new issue