mirror of https://github.com/grafana/grafana
AzureMonitor: Remove deprecated code (#48328)
parent
07bd261cff
commit
6edefe5147
@ -1,269 +0,0 @@ |
||||
package deprecated |
||||
|
||||
import ( |
||||
"context" |
||||
"encoding/json" |
||||
"fmt" |
||||
"io/ioutil" |
||||
"net/http" |
||||
"net/url" |
||||
"path" |
||||
"sort" |
||||
"strings" |
||||
"time" |
||||
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend" |
||||
"github.com/grafana/grafana-plugin-sdk-go/data" |
||||
"go.opentelemetry.io/otel/attribute" |
||||
|
||||
"github.com/grafana/grafana/pkg/infra/tracing" |
||||
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/azlog" |
||||
azTime "github.com/grafana/grafana/pkg/tsdb/azuremonitor/time" |
||||
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types" |
||||
"github.com/grafana/grafana/pkg/util/errutil" |
||||
) |
||||
|
||||
// ApplicationInsightsDatasource calls the application insights query API.
|
||||
type ApplicationInsightsDatasource struct { |
||||
Proxy types.ServiceProxy |
||||
} |
||||
|
||||
// ApplicationInsightsQuery is the model that holds the information
|
||||
// needed to make a metrics query to Application Insights, and the information
|
||||
// used to parse the response.
|
||||
type ApplicationInsightsQuery struct { |
||||
RefID string |
||||
TimeRange backend.TimeRange |
||||
|
||||
// Text based raw query options.
|
||||
ApiURL string |
||||
Params url.Values |
||||
Alias string |
||||
Target string |
||||
|
||||
// These fields are used when parsing the response.
|
||||
metricName string |
||||
dimensions []string |
||||
aggregation string |
||||
} |
||||
|
||||
func (e *ApplicationInsightsDatasource) ResourceRequest(rw http.ResponseWriter, req *http.Request, cli *http.Client) { |
||||
e.Proxy.Do(rw, req, cli) |
||||
} |
||||
|
||||
func (e *ApplicationInsightsDatasource) ExecuteTimeSeriesQuery(ctx context.Context, |
||||
originalQueries []backend.DataQuery, dsInfo types.DatasourceInfo, client *http.Client, |
||||
url string, tracer tracing.Tracer) (*backend.QueryDataResponse, error) { |
||||
result := backend.NewQueryDataResponse() |
||||
|
||||
queries, err := e.buildQueries(originalQueries) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
|
||||
for _, query := range queries { |
||||
queryRes, err := e.executeQuery(ctx, query, dsInfo, client, url, tracer) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
result.Responses[query.RefID] = queryRes |
||||
} |
||||
|
||||
return result, nil |
||||
} |
||||
|
||||
func (e *ApplicationInsightsDatasource) buildQueries(queries []backend.DataQuery) ([]*ApplicationInsightsQuery, error) { |
||||
applicationInsightsQueries := []*ApplicationInsightsQuery{} |
||||
|
||||
for _, query := range queries { |
||||
queryBytes, err := query.JSON.MarshalJSON() |
||||
if err != nil { |
||||
return nil, fmt.Errorf("failed to re-encode the Azure Application Insights query into JSON: %w", err) |
||||
} |
||||
queryJSONModel := insightsJSONQuery{} |
||||
err = json.Unmarshal(queryBytes, &queryJSONModel) |
||||
if err != nil { |
||||
return nil, fmt.Errorf("failed to decode the Azure Application Insights query object from JSON: %w", err) |
||||
} |
||||
|
||||
insightsJSONModel := queryJSONModel.AppInsights |
||||
azlog.Debug("Application Insights", "target", insightsJSONModel) |
||||
|
||||
azureURL := fmt.Sprintf("metrics/%s", insightsJSONModel.MetricName) |
||||
timeGrain := insightsJSONModel.TimeGrain |
||||
timeGrains := insightsJSONModel.AllowedTimeGrainsMs |
||||
|
||||
// Previous versions of the query model don't specify a time grain, so we
|
||||
// need to fallback to a default value
|
||||
if timeGrain == "auto" || timeGrain == "" { |
||||
timeGrain, err = azTime.SetAutoTimeGrain(query.Interval.Milliseconds(), timeGrains) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
} |
||||
|
||||
params := url.Values{} |
||||
params.Add("timespan", fmt.Sprintf("%v/%v", query.TimeRange.From.UTC().Format(time.RFC3339), query.TimeRange.To.UTC().Format(time.RFC3339))) |
||||
if timeGrain != "none" { |
||||
params.Add("interval", timeGrain) |
||||
} |
||||
params.Add("aggregation", insightsJSONModel.Aggregation) |
||||
|
||||
dimensionFilter := strings.TrimSpace(insightsJSONModel.DimensionFilter) |
||||
if dimensionFilter != "" { |
||||
params.Add("filter", dimensionFilter) |
||||
} |
||||
|
||||
if len(insightsJSONModel.Dimensions) != 0 { |
||||
params.Add("segment", strings.Join(insightsJSONModel.Dimensions, ",")) |
||||
} |
||||
applicationInsightsQueries = append(applicationInsightsQueries, &ApplicationInsightsQuery{ |
||||
RefID: query.RefID, |
||||
TimeRange: query.TimeRange, |
||||
ApiURL: azureURL, |
||||
Params: params, |
||||
Alias: insightsJSONModel.Alias, |
||||
Target: params.Encode(), |
||||
metricName: insightsJSONModel.MetricName, |
||||
aggregation: insightsJSONModel.Aggregation, |
||||
dimensions: insightsJSONModel.Dimensions, |
||||
}) |
||||
} |
||||
|
||||
return applicationInsightsQueries, nil |
||||
} |
||||
|
||||
func (e *ApplicationInsightsDatasource) executeQuery(ctx context.Context, query *ApplicationInsightsQuery, dsInfo types.DatasourceInfo, client *http.Client, url string, tracer tracing.Tracer) ( |
||||
backend.DataResponse, error) { |
||||
dataResponse := backend.DataResponse{} |
||||
|
||||
req, err := e.createRequest(ctx, dsInfo, url) |
||||
if err != nil { |
||||
dataResponse.Error = err |
||||
return dataResponse, nil |
||||
} |
||||
|
||||
req.URL.Path = path.Join(req.URL.Path, query.ApiURL) |
||||
req.URL.RawQuery = query.Params.Encode() |
||||
|
||||
ctx, span := tracer.Start(ctx, "application insights query") |
||||
span.SetAttributes("target", query.Target, attribute.Key("target").String(query.Target)) |
||||
span.SetAttributes("from", query.TimeRange.From.UnixNano()/int64(time.Millisecond), attribute.Key("from").Int64(query.TimeRange.From.UnixNano()/int64(time.Millisecond))) |
||||
span.SetAttributes("until", query.TimeRange.To.UnixNano()/int64(time.Millisecond), attribute.Key("until").Int64(query.TimeRange.To.UnixNano()/int64(time.Millisecond))) |
||||
span.SetAttributes("datasource_id", dsInfo.DatasourceID, attribute.Key("datasource_id").Int64(dsInfo.DatasourceID)) |
||||
span.SetAttributes("org_id", dsInfo.OrgID, attribute.Key("org_id").Int64(dsInfo.OrgID)) |
||||
|
||||
defer span.End() |
||||
|
||||
tracer.Inject(ctx, req.Header, span) |
||||
|
||||
azlog.Debug("ApplicationInsights", "Request URL", req.URL.String()) |
||||
res, err := client.Do(req) |
||||
if err != nil { |
||||
dataResponse.Error = err |
||||
return dataResponse, nil |
||||
} |
||||
|
||||
body, err := ioutil.ReadAll(res.Body) |
||||
defer func() { |
||||
if err := res.Body.Close(); err != nil { |
||||
azlog.Warn("Failed to close response body", "err", err) |
||||
} |
||||
}() |
||||
if err != nil { |
||||
return backend.DataResponse{}, err |
||||
} |
||||
|
||||
if res.StatusCode/100 != 2 { |
||||
azlog.Debug("Request failed", "status", res.Status, "body", string(body)) |
||||
return backend.DataResponse{}, fmt.Errorf("request failed, status: %s", res.Status) |
||||
} |
||||
|
||||
mr := MetricsResult{} |
||||
err = json.Unmarshal(body, &mr) |
||||
if err != nil { |
||||
return backend.DataResponse{}, err |
||||
} |
||||
|
||||
frame, err := InsightsMetricsResultToFrame(mr, query.metricName, query.aggregation, query.dimensions) |
||||
if err != nil { |
||||
dataResponse.Error = err |
||||
return dataResponse, nil |
||||
} |
||||
|
||||
applyInsightsMetricAlias(frame, query.Alias) |
||||
|
||||
dataResponse.Frames = data.Frames{frame} |
||||
return dataResponse, nil |
||||
} |
||||
|
||||
func (e *ApplicationInsightsDatasource) createRequest(ctx context.Context, dsInfo types.DatasourceInfo, url string) (*http.Request, error) { |
||||
appInsightsAppID := dsInfo.Settings.AppInsightsAppId |
||||
|
||||
req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil) |
||||
if err != nil { |
||||
azlog.Debug("Failed to create request", "error", err) |
||||
return nil, errutil.Wrap("Failed to create request", err) |
||||
} |
||||
req.URL.Path = fmt.Sprintf("/v1/apps/%s", appInsightsAppID) |
||||
|
||||
return req, nil |
||||
} |
||||
|
||||
// formatApplicationInsightsLegendKey builds the legend key or timeseries name
|
||||
// Alias patterns like {{metric}} are replaced with the appropriate data values.
|
||||
func formatApplicationInsightsLegendKey(alias string, metricName string, labels data.Labels) string { |
||||
// Could be a collision problem if there were two keys that varied only in case, but I don't think that would happen in azure.
|
||||
lowerLabels := data.Labels{} |
||||
for k, v := range labels { |
||||
lowerLabels[strings.ToLower(k)] = v |
||||
} |
||||
keys := make([]string, 0, len(labels)) |
||||
for k := range lowerLabels { |
||||
keys = append(keys, k) |
||||
} |
||||
keys = sort.StringSlice(keys) |
||||
|
||||
result := types.LegendKeyFormat.ReplaceAllFunc([]byte(alias), func(in []byte) []byte { |
||||
metaPartName := strings.Replace(string(in), "{{", "", 1) |
||||
metaPartName = strings.Replace(metaPartName, "}}", "", 1) |
||||
metaPartName = strings.ToLower(strings.TrimSpace(metaPartName)) |
||||
|
||||
switch metaPartName { |
||||
case "metric": |
||||
return []byte(metricName) |
||||
case "dimensionname", "groupbyname": |
||||
return []byte(keys[0]) |
||||
case "dimensionvalue", "groupbyvalue": |
||||
return []byte(lowerLabels[keys[0]]) |
||||
} |
||||
|
||||
if v, ok := lowerLabels[metaPartName]; ok { |
||||
return []byte(v) |
||||
} |
||||
|
||||
return in |
||||
}) |
||||
|
||||
return string(result) |
||||
} |
||||
|
||||
func applyInsightsMetricAlias(frame *data.Frame, alias string) { |
||||
if alias == "" { |
||||
return |
||||
} |
||||
|
||||
for _, field := range frame.Fields { |
||||
if field.Type() == data.FieldTypeTime || field.Type() == data.FieldTypeNullableTime { |
||||
continue |
||||
} |
||||
|
||||
displayName := formatApplicationInsightsLegendKey(alias, field.Name, field.Labels) |
||||
|
||||
if field.Config == nil { |
||||
field.Config = &data.FieldConfig{} |
||||
} |
||||
|
||||
field.Config.DisplayName = displayName |
||||
} |
||||
} |
@ -1,237 +0,0 @@ |
||||
package deprecated |
||||
|
||||
import ( |
||||
"context" |
||||
"encoding/json" |
||||
"testing" |
||||
"time" |
||||
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend" |
||||
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types" |
||||
"github.com/stretchr/testify/require" |
||||
) |
||||
|
||||
func TestApplicationInsightsDatasource(t *testing.T) { |
||||
t.Run("ApplicationInsightsDatasource", func(t *testing.T) { |
||||
datasource := &ApplicationInsightsDatasource{} |
||||
|
||||
t.Run("Parse queries from frontend and build AzureMonitor API queries", func(t *testing.T) { |
||||
fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC).In(time.Local) |
||||
tsdbQuery := []backend.DataQuery{ |
||||
{ |
||||
TimeRange: backend.TimeRange{ |
||||
From: fromStart, |
||||
To: fromStart.Add(34 * time.Minute), |
||||
}, |
||||
JSON: []byte(`{ |
||||
"appInsights": { |
||||
"rawQuery": false, |
||||
"timeGrain": "PT1M", |
||||
"aggregation": "Average", |
||||
"metricName": "server/exceptions", |
||||
"alias": "testalias", |
||||
"queryType": "Application Insights" |
||||
} |
||||
}`), |
||||
RefID: "A", |
||||
Interval: 1234, |
||||
}, |
||||
} |
||||
t.Run("and is a normal query", func(t *testing.T) { |
||||
queries, err := datasource.buildQueries(tsdbQuery) |
||||
require.NoError(t, err) |
||||
|
||||
require.Equal(t, len(queries), 1) |
||||
require.Equal(t, queries[0].RefID, "A") |
||||
require.Equal(t, queries[0].ApiURL, "metrics/server/exceptions") |
||||
require.Equal(t, queries[0].Target, "aggregation=Average&interval=PT1M×pan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z") |
||||
require.Equal(t, len(queries[0].Params), 3) |
||||
require.Equal(t, queries[0].Params["timespan"][0], "2018-03-15T13:00:00Z/2018-03-15T13:34:00Z") |
||||
require.Equal(t, queries[0].Params["aggregation"][0], "Average") |
||||
require.Equal(t, queries[0].Params["interval"][0], "PT1M") |
||||
require.Equal(t, queries[0].Alias, "testalias") |
||||
}) |
||||
|
||||
t.Run("and has a time grain set to auto", func(t *testing.T) { |
||||
tsdbQuery[0].JSON = []byte(`{ |
||||
"appInsights": { |
||||
"rawQuery": false, |
||||
"timeGrain": "auto", |
||||
"aggregation": "Average", |
||||
"metricName": "Percentage CPU", |
||||
"alias": "testalias", |
||||
"queryType": "Application Insights" |
||||
} |
||||
}`) |
||||
var err error |
||||
tsdbQuery[0].Interval, err = time.ParseDuration("400s") |
||||
require.NoError(t, err) |
||||
|
||||
queries, err := datasource.buildQueries(tsdbQuery) |
||||
require.NoError(t, err) |
||||
|
||||
require.Equal(t, queries[0].Params["interval"][0], "PT15M") |
||||
}) |
||||
|
||||
t.Run("and has an empty time grain", func(t *testing.T) { |
||||
tsdbQuery[0].JSON = []byte(`{ |
||||
"appInsights": { |
||||
"rawQuery": false, |
||||
"timeGrain": "", |
||||
"aggregation": "Average", |
||||
"metricName": "Percentage CPU", |
||||
"alias": "testalias", |
||||
"queryType": "Application Insights" |
||||
} |
||||
}`) |
||||
tsdbQuery[0].Interval, _ = time.ParseDuration("400s") |
||||
|
||||
queries, err := datasource.buildQueries(tsdbQuery) |
||||
require.NoError(t, err) |
||||
|
||||
require.Equal(t, queries[0].Params["interval"][0], "PT15M") |
||||
}) |
||||
|
||||
t.Run("and has a time grain set to auto and the metric has a limited list of allowed time grains", func(t *testing.T) { |
||||
tsdbQuery[0].JSON = []byte(`{ |
||||
"appInsights": { |
||||
"rawQuery": false, |
||||
"timeGrain": "auto", |
||||
"aggregation": "Average", |
||||
"metricName": "Percentage CPU", |
||||
"alias": "testalias", |
||||
"queryType": "Application Insights", |
||||
"allowedTimeGrainsMs": [60000, 300000] |
||||
} |
||||
}`) |
||||
tsdbQuery[0].Interval, _ = time.ParseDuration("400s") |
||||
|
||||
queries, err := datasource.buildQueries(tsdbQuery) |
||||
require.NoError(t, err) |
||||
|
||||
require.Equal(t, queries[0].Params["interval"][0], "PT5M") |
||||
}) |
||||
|
||||
t.Run("and has a dimension filter", func(t *testing.T) { |
||||
tsdbQuery[0].JSON = []byte(`{ |
||||
"appInsights": { |
||||
"rawQuery": false, |
||||
"timeGrain": "PT1M", |
||||
"aggregation": "Average", |
||||
"metricName": "Percentage CPU", |
||||
"alias": "testalias", |
||||
"queryType": "Application Insights", |
||||
"dimension": "blob", |
||||
"dimensionFilter": "blob eq '*'" |
||||
} |
||||
}`) |
||||
|
||||
queries, err := datasource.buildQueries(tsdbQuery) |
||||
require.NoError(t, err) |
||||
|
||||
require.Equal(t, queries[0].Target, "aggregation=Average&filter=blob+eq+%27%2A%27&interval=PT1M&segment=blob×pan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z") |
||||
require.Equal(t, queries[0].Params["filter"][0], "blob eq '*'") |
||||
}) |
||||
|
||||
t.Run("and has a dimension filter set to None", func(t *testing.T) { |
||||
tsdbQuery[0].JSON = []byte(`{ |
||||
"appInsights": { |
||||
"rawQuery": false, |
||||
"timeGrain": "PT1M", |
||||
"aggregation": "Average", |
||||
"metricName": "Percentage CPU", |
||||
"alias": "testalias", |
||||
"queryType": "Application Insights", |
||||
"dimension": "None" |
||||
} |
||||
}`) |
||||
|
||||
queries, err := datasource.buildQueries(tsdbQuery) |
||||
require.NoError(t, err) |
||||
|
||||
require.Equal(t, queries[0].Target, "aggregation=Average&interval=PT1M×pan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z") |
||||
}) |
||||
}) |
||||
}) |
||||
} |
||||
|
||||
func TestInsightsDimensionsUnmarshalJSON(t *testing.T) { |
||||
a := []byte(`"foo"`) |
||||
b := []byte(`["foo"]`) |
||||
c := []byte(`["none"]`) |
||||
d := []byte(`["None"]`) |
||||
e := []byte("null") |
||||
f := []byte(`""`) |
||||
g := []byte(`"none"`) |
||||
|
||||
var as InsightsDimensions |
||||
var bs InsightsDimensions |
||||
err := json.Unmarshal(a, &as) |
||||
|
||||
require.NoError(t, err) |
||||
require.Equal(t, []string{"foo"}, []string(as)) |
||||
|
||||
err = json.Unmarshal(b, &bs) |
||||
require.NoError(t, err) |
||||
|
||||
require.Equal(t, []string{"foo"}, []string(bs)) |
||||
|
||||
var cs InsightsDimensions |
||||
err = json.Unmarshal(c, &cs) |
||||
require.NoError(t, err) |
||||
require.Empty(t, cs) |
||||
|
||||
var ds InsightsDimensions |
||||
err = json.Unmarshal(d, &ds) |
||||
require.NoError(t, err) |
||||
require.Empty(t, ds) |
||||
|
||||
var es InsightsDimensions |
||||
err = json.Unmarshal(e, &es) |
||||
require.NoError(t, err) |
||||
require.Empty(t, es) |
||||
|
||||
var fs InsightsDimensions |
||||
err = json.Unmarshal(f, &fs) |
||||
require.NoError(t, err) |
||||
require.Empty(t, fs) |
||||
|
||||
var gs InsightsDimensions |
||||
err = json.Unmarshal(g, &gs) |
||||
require.NoError(t, err) |
||||
require.Empty(t, gs) |
||||
} |
||||
|
||||
func TestAppInsightsCreateRequest(t *testing.T) { |
||||
ctx := context.Background() |
||||
url := "http://ds" |
||||
dsInfo := types.DatasourceInfo{ |
||||
Settings: types.AzureMonitorSettings{AppInsightsAppId: "foo"}, |
||||
DecryptedSecureJSONData: map[string]string{ |
||||
"appInsightsApiKey": "key", |
||||
}, |
||||
} |
||||
|
||||
tests := []struct { |
||||
name string |
||||
expectedURL string |
||||
Err require.ErrorAssertionFunc |
||||
}{ |
||||
{ |
||||
name: "creates a request", |
||||
expectedURL: "http://ds/v1/apps/foo", |
||||
Err: require.NoError, |
||||
}, |
||||
} |
||||
|
||||
for _, tt := range tests { |
||||
t.Run(tt.name, func(t *testing.T) { |
||||
ds := ApplicationInsightsDatasource{} |
||||
req, err := ds.createRequest(ctx, dsInfo, url) |
||||
tt.Err(t, err) |
||||
if req.URL.String() != tt.expectedURL { |
||||
t.Errorf("Expecting %s, got %s", tt.expectedURL, req.URL.String()) |
||||
} |
||||
}) |
||||
} |
||||
} |
@ -1,314 +0,0 @@ |
||||
package deprecated |
||||
|
||||
import ( |
||||
"encoding/json" |
||||
"fmt" |
||||
"time" |
||||
|
||||
"github.com/grafana/grafana-plugin-sdk-go/data" |
||||
) |
||||
|
||||
// InsightsMetricsResultToFrame converts a MetricsResult (an Application Insights metrics query response) to a dataframe.
|
||||
// Due to the dynamic nature of the MetricsResult object, the name of the metric, aggregation,
|
||||
// and requested dimensions are used to determine the expected shape of the object.
|
||||
// This builds all series into a single data.Frame with one time index (a wide formatted time series frame).
|
||||
func InsightsMetricsResultToFrame(mr MetricsResult, metric, agg string, dimensions []string) (*data.Frame, error) { |
||||
dimLen := len(dimensions) |
||||
|
||||
// The Response has both Start and End times, so we name the column "StartTime".
|
||||
frame := data.NewFrame("", data.NewField("StartTime", nil, []time.Time{})) |
||||
|
||||
fieldIdxMap := map[string]int{} // a map of a string representation of the labels to the Field index in the frame.
|
||||
|
||||
rowCounter := 0 // row in the resulting frame
|
||||
|
||||
if mr.Value == nil { // never seen this response, but to ensure there is no panic
|
||||
return nil, fmt.Errorf("unexpected nil response or response value in metrics result") |
||||
} |
||||
|
||||
for _, seg := range *mr.Value.Segments { // each top level segment in the response shares timestamps.
|
||||
frame.Extend(1) |
||||
frame.Set(0, rowCounter, seg.Start) // field 0 is the time field
|
||||
labels := data.Labels{} |
||||
|
||||
// handleLeafSegment is for the leaf MetricsSegmentInfo nodes in the response.
|
||||
// A leaf node contains an aggregated value, and when there are multiple dimensions, a label key/value pair.
|
||||
handleLeafSegment := func(s MetricsSegmentInfo) error { |
||||
// since this is a dynamic response, everything we are interested in here from JSON
|
||||
// is Marshalled (mapped) into the AdditionalProperties property.
|
||||
v, err := valFromLeafAP(s.AdditionalProperties, metric, agg) |
||||
if err != nil { |
||||
return err |
||||
} |
||||
|
||||
if dimLen != 0 { // when there are dimensions, the final dimension is in this inner segment.
|
||||
dimension := dimensions[dimLen-1] |
||||
dimVal, err := dimValueFromAP(s.AdditionalProperties, dimension) |
||||
if err != nil { |
||||
return err |
||||
} |
||||
labels[dimension] = dimVal |
||||
} |
||||
|
||||
if _, ok := fieldIdxMap[labels.String()]; !ok { |
||||
// When we find a new combination of labels for the metric, a new Field is appended.
|
||||
frame.Fields = append(frame.Fields, data.NewField(metric, labels.Copy(), make([]*float64, rowCounter+1))) |
||||
fieldIdxMap[labels.String()] = len(frame.Fields) - 1 |
||||
} |
||||
|
||||
frame.Set(fieldIdxMap[labels.String()], rowCounter, v) |
||||
|
||||
return nil |
||||
} |
||||
|
||||
// Simple case with no segments/dimensions
|
||||
if dimLen == 0 { |
||||
if err := handleLeafSegment(seg); err != nil { |
||||
return nil, err |
||||
} |
||||
rowCounter++ |
||||
continue |
||||
} |
||||
|
||||
// Multiple dimension case
|
||||
var traverse func(segments *[]MetricsSegmentInfo, depth int) error |
||||
|
||||
// traverse walks segments collecting dimensions into labels until leaf segments are
|
||||
// reached, and then handleInnerSegment is called. The final k/v label pair is
|
||||
// in the leaf segment.
|
||||
// A non-recursive implementation would probably be better.
|
||||
traverse = func(segments *[]MetricsSegmentInfo, depth int) error { |
||||
if segments == nil { |
||||
return nil |
||||
} |
||||
for _, seg := range *segments { |
||||
if seg.Segments == nil { |
||||
if err := handleLeafSegment(seg); err != nil { |
||||
return err |
||||
} |
||||
continue |
||||
} |
||||
dimension := dimensions[depth] |
||||
dimVal, err := dimValueFromAP(seg.AdditionalProperties, dimension) |
||||
if err != nil { |
||||
return err |
||||
} |
||||
labels[dimension] = dimVal |
||||
if err := traverse(seg.Segments, depth+1); err != nil { |
||||
return err |
||||
} |
||||
} |
||||
return nil |
||||
} |
||||
|
||||
if err := traverse(seg.Segments, 0); err != nil { |
||||
return nil, err |
||||
} |
||||
rowCounter++ |
||||
} |
||||
|
||||
if len(frame.Fields) == 1 { // No data, only a time column, no sort
|
||||
return frame, nil |
||||
} |
||||
|
||||
if err := data.SortWideFrameFields(frame, dimensions...); err != nil { |
||||
return nil, err |
||||
} |
||||
|
||||
return frame, nil |
||||
} |
||||
|
||||
// valFromLeafAP extracts value for the given metric and aggregation (agg)
|
||||
// from the dynamic AdditionalProperties properties of a leaf node. It is for use in the InsightsMetricsResultToFrame
|
||||
// function.
|
||||
func valFromLeafAP(ap map[string]interface{}, metric, agg string) (*float64, error) { |
||||
if ap == nil { |
||||
return nil, fmt.Errorf("expected additional properties for metric %v not found in leaf segment", metric) |
||||
} |
||||
met, ok := ap[metric] |
||||
if !ok { |
||||
return nil, fmt.Errorf("expected additional properties for metric %v not found in leaf segment", metric) |
||||
} |
||||
|
||||
metMap, ok := met.(map[string]interface{}) |
||||
if !ok { |
||||
return nil, fmt.Errorf("unexpected type for additional properties not found in leaf segment, want map[string]interface{}, but got %T", met) |
||||
} |
||||
metVal, ok := metMap[agg] |
||||
if !ok { |
||||
return nil, fmt.Errorf("expected value for aggregation %v not found in leaf segment", agg) |
||||
} |
||||
var v *float64 |
||||
if val, ok := metVal.(float64); ok { |
||||
v = &val |
||||
} |
||||
|
||||
return v, nil |
||||
} |
||||
|
||||
// dimValueFromAP fetches the value as a string for the corresponding dimension from the dynamic AdditionalProperties properties of a leaf node. It is for use in the InsightsMetricsResultToFrame
|
||||
// function.
|
||||
func dimValueFromAP(ap map[string]interface{}, dimension string) (string, error) { |
||||
rawDimValue, ok := ap[dimension] |
||||
if !ok { |
||||
return "", fmt.Errorf("expected dimension key %v not found in response", dimension) |
||||
} |
||||
dimValue, ok := rawDimValue.(string) |
||||
if !ok { |
||||
return "", fmt.Errorf("unexpected non-string value for the value for dimension %v, got type %T with a value of %v", dimension, rawDimValue, dimValue) |
||||
} |
||||
return dimValue, nil |
||||
} |
||||
|
||||
// MetricsResult a metric result.
|
||||
// This is copied from azure-sdk-for-go/services/preview/appinsights/v1/insights.
|
||||
type MetricsResult struct { |
||||
Value *MetricsResultInfo `json:"value,omitempty"` |
||||
} |
||||
|
||||
// MetricsResultInfo a metric result data.
|
||||
// This is copied from azure-sdk-for-go/services/preview/appinsights/v1/insights (except time Type is changed).
|
||||
type MetricsResultInfo struct { |
||||
// AdditionalProperties - Unmatched properties from the message are deserialized this collection
|
||||
AdditionalProperties map[string]interface{} `json:""` |
||||
// Start - Start time of the metric.
|
||||
Start time.Time `json:"start,omitempty"` |
||||
// End - Start time of the metric.
|
||||
End time.Time `json:"end,omitempty"` |
||||
// Interval - The interval used to segment the metric data.
|
||||
Interval *string `json:"interval,omitempty"` |
||||
// Segments - Segmented metric data (if segmented).
|
||||
Segments *[]MetricsSegmentInfo `json:"segments,omitempty"` |
||||
} |
||||
|
||||
// MetricsSegmentInfo is a metric segment.
|
||||
// This is copied from azure-sdk-for-go/services/preview/appinsights/v1/insights (except time Type is changed).
|
||||
type MetricsSegmentInfo struct { |
||||
// AdditionalProperties - Unmatched properties from the message are deserialized this collection
|
||||
AdditionalProperties map[string]interface{} `json:""` |
||||
// Start - Start time of the metric segment (only when an interval was specified).
|
||||
Start time.Time `json:"start,omitempty"` |
||||
// End - Start time of the metric segment (only when an interval was specified).
|
||||
End time.Time `json:"end,omitempty"` |
||||
// Segments - Segmented metric data (if further segmented).
|
||||
Segments *[]MetricsSegmentInfo `json:"segments,omitempty"` |
||||
} |
||||
|
||||
// UnmarshalJSON is the custom unmarshaler for MetricsSegmentInfo struct.
|
||||
// This is copied from azure-sdk-for-go/services/preview/appinsights/v1/insights (except time Type is changed).
|
||||
func (mri *MetricsSegmentInfo) UnmarshalJSON(body []byte) error { |
||||
var m map[string]*json.RawMessage |
||||
err := json.Unmarshal(body, &m) |
||||
if err != nil { |
||||
return err |
||||
} |
||||
for k, v := range m { |
||||
switch k { |
||||
default: |
||||
if v != nil { |
||||
var additionalProperties interface{} |
||||
err = json.Unmarshal(*v, &additionalProperties) |
||||
if err != nil { |
||||
return err |
||||
} |
||||
if mri.AdditionalProperties == nil { |
||||
mri.AdditionalProperties = make(map[string]interface{}) |
||||
} |
||||
mri.AdditionalProperties[k] = additionalProperties |
||||
} |
||||
case "start": |
||||
if v != nil { |
||||
var start time.Time |
||||
err = json.Unmarshal(*v, &start) |
||||
if err != nil { |
||||
return err |
||||
} |
||||
mri.Start = start |
||||
} |
||||
case "end": |
||||
if v != nil { |
||||
var end time.Time |
||||
err = json.Unmarshal(*v, &end) |
||||
if err != nil { |
||||
return err |
||||
} |
||||
mri.End = end |
||||
} |
||||
case "segments": |
||||
if v != nil { |
||||
var segments []MetricsSegmentInfo |
||||
err = json.Unmarshal(*v, &segments) |
||||
if err != nil { |
||||
return err |
||||
} |
||||
mri.Segments = &segments |
||||
} |
||||
} |
||||
} |
||||
|
||||
return nil |
||||
} |
||||
|
||||
// UnmarshalJSON is the custom unmarshaler for MetricsResultInfo struct.
|
||||
// This is copied from azure-sdk-for-go/services/preview/appinsights/v1/insights (except time Type is changed).
|
||||
func (mri *MetricsResultInfo) UnmarshalJSON(body []byte) error { |
||||
var m map[string]*json.RawMessage |
||||
err := json.Unmarshal(body, &m) |
||||
if err != nil { |
||||
return err |
||||
} |
||||
for k, v := range m { |
||||
switch k { |
||||
default: |
||||
if v != nil { |
||||
var additionalProperties interface{} |
||||
err = json.Unmarshal(*v, &additionalProperties) |
||||
if err != nil { |
||||
return err |
||||
} |
||||
if mri.AdditionalProperties == nil { |
||||
mri.AdditionalProperties = make(map[string]interface{}) |
||||
} |
||||
mri.AdditionalProperties[k] = additionalProperties |
||||
} |
||||
case "start": |
||||
if v != nil { |
||||
var start time.Time |
||||
err = json.Unmarshal(*v, &start) |
||||
if err != nil { |
||||
return err |
||||
} |
||||
mri.Start = start |
||||
} |
||||
case "end": |
||||
if v != nil { |
||||
var end time.Time |
||||
err = json.Unmarshal(*v, &end) |
||||
if err != nil { |
||||
return err |
||||
} |
||||
mri.End = end |
||||
} |
||||
case "interval": |
||||
if v != nil { |
||||
var interval string |
||||
err = json.Unmarshal(*v, &interval) |
||||
if err != nil { |
||||
return err |
||||
} |
||||
mri.Interval = &interval |
||||
} |
||||
case "segments": |
||||
if v != nil { |
||||
var segments []MetricsSegmentInfo |
||||
err = json.Unmarshal(*v, &segments) |
||||
if err != nil { |
||||
return err |
||||
} |
||||
mri.Segments = &segments |
||||
} |
||||
} |
||||
} |
||||
|
||||
return nil |
||||
} |
@ -1,192 +0,0 @@ |
||||
package deprecated |
||||
|
||||
import ( |
||||
"encoding/json" |
||||
"os" |
||||
"path/filepath" |
||||
"testing" |
||||
"time" |
||||
|
||||
"github.com/google/go-cmp/cmp" |
||||
"github.com/grafana/grafana-plugin-sdk-go/data" |
||||
"github.com/stretchr/testify/require" |
||||
"github.com/xorcare/pointer" |
||||
) |
||||
|
||||
func TestInsightsMetricsResultToFrame(t *testing.T) { |
||||
tests := []struct { |
||||
name string |
||||
testFile string |
||||
metric string |
||||
alias string |
||||
agg string |
||||
dimensions []string |
||||
expectedFrame func() *data.Frame |
||||
}{ |
||||
{ |
||||
name: "single series", |
||||
testFile: "applicationinsights/4-application-insights-response-metrics-no-segment.json", |
||||
metric: "value", |
||||
agg: "avg", |
||||
expectedFrame: func() *data.Frame { |
||||
frame := data.NewFrame("", |
||||
data.NewField("StartTime", nil, []time.Time{ |
||||
time.Date(2019, 9, 13, 1, 2, 3, 456789000, time.UTC), |
||||
time.Date(2019, 9, 13, 2, 2, 3, 456789000, time.UTC), |
||||
}), |
||||
data.NewField("value", nil, []*float64{ |
||||
pointer.Float64(1), |
||||
pointer.Float64(2), |
||||
}), |
||||
) |
||||
return frame |
||||
}, |
||||
}, |
||||
{ |
||||
name: "empty response", |
||||
testFile: "applicationinsights/5-application-insights-empty-response.json", |
||||
metric: "value", |
||||
agg: "avg", |
||||
expectedFrame: func() *data.Frame { |
||||
frame := data.NewFrame("", data.NewField("StartTime", nil, []time.Time{})) |
||||
return frame |
||||
}, |
||||
}, |
||||
{ |
||||
name: "segmented series", |
||||
testFile: "applicationinsights/4-application-insights-response-metrics-segmented.json", |
||||
metric: "value", |
||||
agg: "avg", |
||||
dimensions: []string{"blob"}, |
||||
expectedFrame: func() *data.Frame { |
||||
frame := data.NewFrame("", |
||||
data.NewField("StartTime", nil, []time.Time{ |
||||
time.Date(2019, 9, 13, 1, 2, 3, 456789000, time.UTC), |
||||
time.Date(2019, 9, 13, 2, 2, 3, 456789000, time.UTC), |
||||
}), |
||||
data.NewField("value", data.Labels{"blob": "a"}, []*float64{ |
||||
pointer.Float64(1), |
||||
pointer.Float64(2), |
||||
}), |
||||
data.NewField("value", data.Labels{"blob": "b"}, []*float64{ |
||||
pointer.Float64(3), |
||||
pointer.Float64(4), |
||||
}), |
||||
) |
||||
return frame |
||||
}, |
||||
}, |
||||
{ |
||||
name: "multi segmented series", |
||||
testFile: "applicationinsights/4-application-insights-response-metrics-multi-segmented.json", |
||||
metric: "traces/count", |
||||
agg: "sum", |
||||
dimensions: []string{"client/countryOrRegion", "client/city"}, |
||||
expectedFrame: func() *data.Frame { |
||||
frame := data.NewFrame("", |
||||
data.NewField("StartTime", nil, []time.Time{ |
||||
time.Date(2020, 6, 25, 16, 15, 32, 14e7, time.UTC), |
||||
time.Date(2020, 6, 25, 16, 16, 0, 0, time.UTC), |
||||
}), |
||||
data.NewField("traces/count", data.Labels{"client/city": "Tokyo", "client/countryOrRegion": "Japan"}, []*float64{ |
||||
nil, |
||||
pointer.Float64(1), |
||||
}), |
||||
data.NewField("traces/count", data.Labels{"client/city": "", "client/countryOrRegion": "United States"}, []*float64{ |
||||
nil, |
||||
pointer.Float64(11), |
||||
}), |
||||
data.NewField("traces/count", data.Labels{"client/city": "Chicago", "client/countryOrRegion": "United States"}, []*float64{ |
||||
nil, |
||||
pointer.Float64(3), |
||||
}), |
||||
data.NewField("traces/count", data.Labels{"client/city": "Des Moines", "client/countryOrRegion": "United States"}, []*float64{ |
||||
pointer.Float64(2), |
||||
pointer.Float64(1), |
||||
}), |
||||
data.NewField("traces/count", data.Labels{"client/city": "Washington", "client/countryOrRegion": "United States"}, []*float64{ |
||||
pointer.Float64(2), |
||||
nil, |
||||
}), |
||||
) |
||||
|
||||
return frame |
||||
}, |
||||
}, |
||||
{ |
||||
name: "segmented series with alias", |
||||
testFile: "applicationinsights/4-application-insights-response-metrics-multi-segmented.json", |
||||
metric: "traces/count", |
||||
alias: "{{ metric }}: Country,City: {{ client/countryOrRegion }},{{ client/city }}", |
||||
agg: "sum", |
||||
dimensions: []string{"client/countryOrRegion", "client/city"}, |
||||
expectedFrame: func() *data.Frame { |
||||
frame := data.NewFrame("", |
||||
data.NewField("StartTime", nil, []time.Time{ |
||||
time.Date(2020, 6, 25, 16, 15, 32, 14e7, time.UTC), |
||||
time.Date(2020, 6, 25, 16, 16, 0, 0, time.UTC), |
||||
}), |
||||
data.NewField("traces/count", data.Labels{"client/city": "Tokyo", "client/countryOrRegion": "Japan"}, []*float64{ |
||||
nil, |
||||
pointer.Float64(1), |
||||
}).SetConfig(&data.FieldConfig{DisplayName: "traces/count: Country,City: Japan,Tokyo"}), |
||||
data.NewField("traces/count", data.Labels{"client/city": "", "client/countryOrRegion": "United States"}, []*float64{ |
||||
nil, |
||||
pointer.Float64(11), |
||||
}).SetConfig(&data.FieldConfig{DisplayName: "traces/count: Country,City: United States,"}), |
||||
data.NewField("traces/count", data.Labels{"client/city": "Chicago", "client/countryOrRegion": "United States"}, []*float64{ |
||||
nil, |
||||
pointer.Float64(3), |
||||
}).SetConfig(&data.FieldConfig{DisplayName: "traces/count: Country,City: United States,Chicago"}), |
||||
|
||||
data.NewField("traces/count", data.Labels{"client/city": "Des Moines", "client/countryOrRegion": "United States"}, []*float64{ |
||||
pointer.Float64(2), |
||||
pointer.Float64(1), |
||||
}).SetConfig(&data.FieldConfig{DisplayName: "traces/count: Country,City: United States,Des Moines"}), |
||||
|
||||
data.NewField("traces/count", data.Labels{"client/city": "Washington", "client/countryOrRegion": "United States"}, []*float64{ |
||||
pointer.Float64(2), |
||||
nil, |
||||
}).SetConfig(&data.FieldConfig{DisplayName: "traces/count: Country,City: United States,Washington"}), |
||||
) |
||||
|
||||
return frame |
||||
}, |
||||
}, |
||||
} |
||||
for _, tt := range tests { |
||||
t.Run(tt.name, func(t *testing.T) { |
||||
res := loadInsightsMetricsResponse(t, tt.testFile) |
||||
|
||||
frame, err := InsightsMetricsResultToFrame(res, tt.metric, tt.agg, tt.dimensions) |
||||
require.NoError(t, err) |
||||
|
||||
applyInsightsMetricAlias(frame, tt.alias) |
||||
|
||||
if diff := cmp.Diff(tt.expectedFrame(), frame, data.FrameTestCompareOptions()...); diff != "" { |
||||
t.Errorf("Result mismatch (-want +got):\n%s", diff) |
||||
} |
||||
}) |
||||
} |
||||
} |
||||
|
||||
func loadInsightsMetricsResponse(t *testing.T, name string) MetricsResult { |
||||
t.Helper() |
||||
|
||||
path := filepath.Join("../testdata", name) |
||||
// Ignore gosec warning G304 since it's a test
|
||||
// nolint:gosec
|
||||
f, err := os.Open(path) |
||||
require.NoError(t, err) |
||||
defer func() { |
||||
err := f.Close() |
||||
require.NoError(t, err) |
||||
}() |
||||
|
||||
d := json.NewDecoder(f) |
||||
var mr MetricsResult |
||||
err = d.Decode(&mr) |
||||
require.NoError(t, err) |
||||
|
||||
return mr |
||||
} |
@ -1,20 +0,0 @@ |
||||
package deprecated |
||||
|
||||
import ( |
||||
"net/http" |
||||
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend/httpclient" |
||||
) |
||||
|
||||
func GetAppInsightsMiddleware(url, appInsightsApiKey string) httpclient.Middleware { |
||||
if appInsightsApiKey != "" && url == AzAppInsights.URL || url == AzChinaAppInsights.URL { |
||||
// Inject API-Key for AppInsights
|
||||
return httpclient.MiddlewareFunc(func(opts httpclient.Options, next http.RoundTripper) http.RoundTripper { |
||||
return httpclient.RoundTripperFunc(func(req *http.Request) (*http.Response, error) { |
||||
req.Header.Set("X-API-Key", appInsightsApiKey) |
||||
return next.RoundTrip(req) |
||||
}) |
||||
}) |
||||
} |
||||
return nil |
||||
} |
@ -1,190 +0,0 @@ |
||||
package deprecated |
||||
|
||||
import ( |
||||
"bytes" |
||||
"context" |
||||
"encoding/json" |
||||
"fmt" |
||||
"io/ioutil" |
||||
"net/http" |
||||
"net/url" |
||||
"path" |
||||
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend" |
||||
"github.com/grafana/grafana-plugin-sdk-go/data" |
||||
"go.opentelemetry.io/otel/attribute" |
||||
|
||||
"github.com/grafana/grafana/pkg/infra/tracing" |
||||
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/azlog" |
||||
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/loganalytics" |
||||
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/macros" |
||||
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types" |
||||
"github.com/grafana/grafana/pkg/util/errutil" |
||||
) |
||||
|
||||
type InsightsAnalyticsDatasource struct { |
||||
Proxy types.ServiceProxy |
||||
} |
||||
|
||||
type InsightsAnalyticsQuery struct { |
||||
RefID string |
||||
|
||||
RawQuery string |
||||
InterpolatedQuery string |
||||
|
||||
ResultFormat string |
||||
|
||||
Params url.Values |
||||
Target string |
||||
} |
||||
|
||||
func (e *InsightsAnalyticsDatasource) ResourceRequest(rw http.ResponseWriter, req *http.Request, cli *http.Client) { |
||||
e.Proxy.Do(rw, req, cli) |
||||
} |
||||
|
||||
func (e *InsightsAnalyticsDatasource) ExecuteTimeSeriesQuery(ctx context.Context, |
||||
originalQueries []backend.DataQuery, dsInfo types.DatasourceInfo, client *http.Client, |
||||
url string, tracer tracing.Tracer) (*backend.QueryDataResponse, error) { |
||||
result := backend.NewQueryDataResponse() |
||||
|
||||
queries, err := e.buildQueries(originalQueries, dsInfo) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
|
||||
for _, query := range queries { |
||||
result.Responses[query.RefID] = e.executeQuery(ctx, query, dsInfo, client, url, tracer) |
||||
} |
||||
|
||||
return result, nil |
||||
} |
||||
|
||||
func (e *InsightsAnalyticsDatasource) buildQueries(queries []backend.DataQuery, dsInfo types.DatasourceInfo) ([]*InsightsAnalyticsQuery, error) { |
||||
iaQueries := []*InsightsAnalyticsQuery{} |
||||
|
||||
for _, query := range queries { |
||||
qm := InsightsAnalyticsQuery{} |
||||
queryJSONModel := insightsAnalyticsJSONQuery{} |
||||
err := json.Unmarshal(query.JSON, &queryJSONModel) |
||||
if err != nil { |
||||
return nil, fmt.Errorf("failed to decode the Azure Application Insights Analytics query object from JSON: %w", err) |
||||
} |
||||
|
||||
qm.RawQuery = queryJSONModel.InsightsAnalytics.Query |
||||
qm.ResultFormat = queryJSONModel.InsightsAnalytics.ResultFormat |
||||
qm.RefID = query.RefID |
||||
|
||||
if qm.RawQuery == "" { |
||||
return nil, fmt.Errorf("query is missing query string property") |
||||
} |
||||
|
||||
qm.InterpolatedQuery, err = macros.KqlInterpolate(query, dsInfo, qm.RawQuery) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
qm.Params = url.Values{} |
||||
qm.Params.Add("query", qm.InterpolatedQuery) |
||||
|
||||
qm.Target = qm.Params.Encode() |
||||
iaQueries = append(iaQueries, &qm) |
||||
} |
||||
|
||||
return iaQueries, nil |
||||
} |
||||
|
||||
func (e *InsightsAnalyticsDatasource) executeQuery(ctx context.Context, query *InsightsAnalyticsQuery, dsInfo types.DatasourceInfo, client *http.Client, |
||||
url string, tracer tracing.Tracer) backend.DataResponse { |
||||
dataResponse := backend.DataResponse{} |
||||
|
||||
dataResponseError := func(err error) backend.DataResponse { |
||||
dataResponse.Error = err |
||||
return dataResponse |
||||
} |
||||
|
||||
req, err := e.createRequest(ctx, dsInfo, url) |
||||
if err != nil { |
||||
return dataResponseError(err) |
||||
} |
||||
req.URL.Path = path.Join(req.URL.Path, "query") |
||||
req.URL.RawQuery = query.Params.Encode() |
||||
|
||||
ctx, span := tracer.Start(ctx, "application insights analytics query") |
||||
span.SetAttributes("target", query.Target, attribute.Key("target").String(query.Target)) |
||||
span.SetAttributes("datasource_id", dsInfo.DatasourceID, attribute.Key("datasource_id").Int64(dsInfo.DatasourceID)) |
||||
span.SetAttributes("org_id", dsInfo.OrgID, attribute.Key("org_id").Int64(dsInfo.OrgID)) |
||||
|
||||
defer span.End() |
||||
tracer.Inject(ctx, req.Header, span) |
||||
|
||||
if err != nil { |
||||
azlog.Warn("failed to inject global tracer") |
||||
} |
||||
|
||||
azlog.Debug("ApplicationInsights", "Request URL", req.URL.String()) |
||||
res, err := client.Do(req) |
||||
if err != nil { |
||||
return dataResponseError(err) |
||||
} |
||||
|
||||
body, err := ioutil.ReadAll(res.Body) |
||||
if err != nil { |
||||
return dataResponseError(err) |
||||
} |
||||
defer func() { |
||||
if err := res.Body.Close(); err != nil { |
||||
azlog.Warn("Failed to close response body", "err", err) |
||||
} |
||||
}() |
||||
|
||||
if res.StatusCode/100 != 2 { |
||||
azlog.Debug("Request failed", "status", res.Status, "body", string(body)) |
||||
return dataResponseError(fmt.Errorf("request failed, status: %s, body: %s", res.Status, body)) |
||||
} |
||||
var logResponse loganalytics.AzureLogAnalyticsResponse |
||||
d := json.NewDecoder(bytes.NewReader(body)) |
||||
d.UseNumber() |
||||
err = d.Decode(&logResponse) |
||||
if err != nil { |
||||
return dataResponseError(err) |
||||
} |
||||
|
||||
t, err := logResponse.GetPrimaryResultTable() |
||||
if err != nil { |
||||
return dataResponseError(err) |
||||
} |
||||
|
||||
frame, err := loganalytics.ResponseTableToFrame(t) |
||||
if err != nil { |
||||
return dataResponseError(err) |
||||
} |
||||
|
||||
if query.ResultFormat == types.TimeSeries { |
||||
tsSchema := frame.TimeSeriesSchema() |
||||
if tsSchema.Type == data.TimeSeriesTypeLong { |
||||
wideFrame, err := data.LongToWide(frame, nil) |
||||
if err == nil { |
||||
frame = wideFrame |
||||
} else { |
||||
frame.AppendNotices(data.Notice{ |
||||
Severity: data.NoticeSeverityWarning, |
||||
Text: "could not convert frame to time series, returning raw table: " + err.Error(), |
||||
}) |
||||
} |
||||
} |
||||
} |
||||
dataResponse.Frames = data.Frames{frame} |
||||
|
||||
return dataResponse |
||||
} |
||||
|
||||
func (e *InsightsAnalyticsDatasource) createRequest(ctx context.Context, dsInfo types.DatasourceInfo, url string) (*http.Request, error) { |
||||
appInsightsAppID := dsInfo.Settings.AppInsightsAppId |
||||
|
||||
req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil) |
||||
if err != nil { |
||||
azlog.Debug("Failed to create request", "error", err) |
||||
return nil, errutil.Wrap("Failed to create request", err) |
||||
} |
||||
req.URL.Path = fmt.Sprintf("/v1/apps/%s", appInsightsAppID) |
||||
return req, nil |
||||
} |
@ -1,45 +0,0 @@ |
||||
package deprecated |
||||
|
||||
import ( |
||||
"context" |
||||
"net/http" |
||||
"testing" |
||||
|
||||
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types" |
||||
"github.com/stretchr/testify/require" |
||||
) |
||||
|
||||
func TestInsightsAnalyticsCreateRequest(t *testing.T) { |
||||
ctx := context.Background() |
||||
url := "http://ds" |
||||
dsInfo := types.DatasourceInfo{ |
||||
Settings: types.AzureMonitorSettings{AppInsightsAppId: "foo"}, |
||||
DecryptedSecureJSONData: map[string]string{ |
||||
"appInsightsApiKey": "key", |
||||
}, |
||||
} |
||||
|
||||
tests := []struct { |
||||
name string |
||||
expectedURL string |
||||
expectedHeaders http.Header |
||||
Err require.ErrorAssertionFunc |
||||
}{ |
||||
{ |
||||
name: "creates a request", |
||||
expectedURL: "http://ds/v1/apps/foo", |
||||
Err: require.NoError, |
||||
}, |
||||
} |
||||
|
||||
for _, tt := range tests { |
||||
t.Run(tt.name, func(t *testing.T) { |
||||
ds := InsightsAnalyticsDatasource{} |
||||
req, err := ds.createRequest(ctx, dsInfo, url) |
||||
tt.Err(t, err) |
||||
if req.URL.String() != tt.expectedURL { |
||||
t.Errorf("Expecting %s, got %s", tt.expectedURL, req.URL.String()) |
||||
} |
||||
}) |
||||
} |
||||
} |
@ -1,23 +0,0 @@ |
||||
package deprecated |
||||
|
||||
import ( |
||||
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types" |
||||
) |
||||
|
||||
// Azure cloud query types
|
||||
const ( |
||||
AppInsights = "Application Insights" |
||||
InsightsAnalytics = "Insights Analytics" |
||||
) |
||||
|
||||
var AzAppInsights = types.AzRoute{ |
||||
URL: "https://api.applicationinsights.io", |
||||
Scopes: []string{}, |
||||
Headers: map[string]string{"x-ms-app": "Grafana"}, |
||||
} |
||||
|
||||
var AzChinaAppInsights = types.AzRoute{ |
||||
URL: "https://api.applicationinsights.azure.cn", |
||||
Scopes: []string{}, |
||||
Headers: map[string]string{"x-ms-app": "Grafana"}, |
||||
} |
@ -1,72 +0,0 @@ |
||||
package deprecated |
||||
|
||||
import ( |
||||
"encoding/json" |
||||
"fmt" |
||||
"strings" |
||||
) |
||||
|
||||
// insightsJSONQuery is the frontend JSON query model for an Azure Application Insights query.
|
||||
type insightsJSONQuery struct { |
||||
AppInsights struct { |
||||
Aggregation string `json:"aggregation"` |
||||
Alias string `json:"alias"` |
||||
AllowedTimeGrainsMs []int64 `json:"allowedTimeGrainsMs"` |
||||
Dimensions InsightsDimensions `json:"dimension"` |
||||
DimensionFilter string `json:"dimensionFilter"` |
||||
MetricName string `json:"metricName"` |
||||
TimeGrain string `json:"timeGrain"` |
||||
} `json:"appInsights"` |
||||
Raw *bool `json:"raw"` |
||||
} |
||||
|
||||
// InsightsDimensions will unmarshal from a JSON string, or an array of strings,
|
||||
// into a string array. This exists to support an older query format which is updated
|
||||
// when a user saves the query or it is sent from the front end, but may not be when
|
||||
// alerting fetches the model.
|
||||
type InsightsDimensions []string |
||||
|
||||
// UnmarshalJSON fulfills the json.Unmarshaler interface type.
|
||||
func (s *InsightsDimensions) UnmarshalJSON(data []byte) error { |
||||
*s = InsightsDimensions{} |
||||
if string(data) == "null" || string(data) == "" { |
||||
return nil |
||||
} |
||||
if strings.ToLower(string(data)) == `"none"` { |
||||
return nil |
||||
} |
||||
if data[0] == '[' { |
||||
var sa []string |
||||
err := json.Unmarshal(data, &sa) |
||||
if err != nil { |
||||
return err |
||||
} |
||||
dimensions := []string{} |
||||
for _, v := range sa { |
||||
if v == "none" || v == "None" { |
||||
continue |
||||
} |
||||
dimensions = append(dimensions, v) |
||||
} |
||||
*s = InsightsDimensions(dimensions) |
||||
return nil |
||||
} |
||||
|
||||
var str string |
||||
err := json.Unmarshal(data, &str) |
||||
if err != nil { |
||||
return fmt.Errorf("could not parse %q as string or array: %w", string(data), err) |
||||
} |
||||
if str != "" { |
||||
*s = InsightsDimensions{str} |
||||
return nil |
||||
} |
||||
return nil |
||||
} |
||||
|
||||
type insightsAnalyticsJSONQuery struct { |
||||
InsightsAnalytics struct { |
||||
Query string `json:"query"` |
||||
ResultFormat string `json:"resultFormat"` |
||||
} `json:"insightsAnalytics"` |
||||
} |
@ -1,463 +0,0 @@ |
||||
import { lastValueFrom, of } from 'rxjs'; |
||||
|
||||
import { DataFrame, getFrameDisplayName, toUtc } from '@grafana/data'; |
||||
import { setBackendSrv } from '@grafana/runtime'; |
||||
import { backendSrv } from 'app/core/services/backend_srv'; |
||||
import { TemplateSrv } from 'app/features/templating/template_srv'; |
||||
|
||||
import AppInsightsDatasource from './app_insights_datasource'; |
||||
|
||||
const templateSrv = new TemplateSrv(); |
||||
|
||||
jest.mock('app/core/services/backend_srv'); |
||||
jest.mock('@grafana/runtime', () => ({ |
||||
...(jest.requireActual('@grafana/runtime') as unknown as object), |
||||
getBackendSrv: () => backendSrv, |
||||
getTemplateSrv: () => templateSrv, |
||||
})); |
||||
|
||||
describe('AppInsightsDatasource', () => { |
||||
const fetchMock = jest.spyOn(backendSrv, 'fetch'); |
||||
|
||||
const ctx: any = {}; |
||||
|
||||
beforeEach(() => { |
||||
jest.clearAllMocks(); |
||||
setBackendSrv(backendSrv); |
||||
|
||||
ctx.instanceSettings = { |
||||
jsonData: { appInsightsAppId: '3ad4400f-ea7d-465d-a8fb-43fb20555d85' }, |
||||
url: 'http://appinsightsapi', |
||||
}; |
||||
|
||||
ctx.ds = new AppInsightsDatasource(ctx.instanceSettings); |
||||
}); |
||||
|
||||
describe('When performing testDatasource', () => { |
||||
describe('and a list of metrics is returned', () => { |
||||
const response = { |
||||
metrics: { |
||||
'requests/count': { |
||||
displayName: 'Server requests', |
||||
defaultAggregation: 'sum', |
||||
}, |
||||
'requests/duration': { |
||||
displayName: 'Server requests', |
||||
defaultAggregation: 'sum', |
||||
}, |
||||
}, |
||||
dimensions: { |
||||
'request/source': { |
||||
displayName: 'Request source', |
||||
}, |
||||
}, |
||||
}; |
||||
|
||||
beforeEach(() => { |
||||
ctx.ds.getResource = jest.fn().mockImplementation(() => { |
||||
return Promise.resolve(response); |
||||
}); |
||||
}); |
||||
|
||||
it('should return success status', () => { |
||||
return ctx.ds.testDatasource().then((results: any) => { |
||||
expect(results.status).toEqual('success'); |
||||
}); |
||||
}); |
||||
}); |
||||
|
||||
describe('and a PathNotFoundError error is returned', () => { |
||||
const error = { |
||||
data: { |
||||
error: { |
||||
code: 'PathNotFoundError', |
||||
message: `An error message.`, |
||||
}, |
||||
}, |
||||
status: 404, |
||||
statusText: 'Not Found', |
||||
}; |
||||
|
||||
beforeEach(() => { |
||||
ctx.ds.getResource = jest.fn().mockImplementation(() => { |
||||
return Promise.reject(error); |
||||
}); |
||||
}); |
||||
|
||||
it.skip('should return error status and a detailed error message', () => { |
||||
return ctx.ds.testDatasource().then((results: any) => { |
||||
expect(results.status).toEqual('error'); |
||||
expect(results.message).toEqual( |
||||
'1. Application Insights: Not Found: Invalid Application Id for Application Insights service. ' |
||||
); |
||||
}); |
||||
}); |
||||
}); |
||||
|
||||
describe('and an error is returned', () => { |
||||
const error = { |
||||
data: { |
||||
error: { |
||||
code: 'SomeOtherError', |
||||
message: `An error message.`, |
||||
}, |
||||
}, |
||||
status: 500, |
||||
statusText: 'Error', |
||||
}; |
||||
|
||||
beforeEach(() => { |
||||
ctx.ds.getResource = jest.fn().mockImplementation(() => { |
||||
return Promise.reject(error); |
||||
}); |
||||
}); |
||||
|
||||
it.skip('should return error status and a detailed error message', () => { |
||||
return ctx.ds.testDatasource().then((results: any) => { |
||||
expect(results.status).toEqual('error'); |
||||
expect(results.message).toEqual('1. Application Insights: Error: SomeOtherError. An error message. '); |
||||
}); |
||||
}); |
||||
}); |
||||
}); |
||||
|
||||
describe('When performing raw query', () => { |
||||
const queryString = |
||||
'metrics ' + |
||||
'| where $__timeFilter(timestamp) ' + |
||||
'| where name == "testMetrics" ' + |
||||
'| summarize max=max(valueMax) by bin(timestamp, $__interval), partition'; |
||||
|
||||
const options = { |
||||
range: { |
||||
from: toUtc('2017-08-22T20:00:00Z'), |
||||
to: toUtc('2017-08-22T23:59:00Z'), |
||||
}, |
||||
targets: [ |
||||
{ |
||||
apiVersion: '2016-09-01', |
||||
refId: 'A', |
||||
queryType: 'Application Insights', |
||||
appInsights: { |
||||
rawQuery: true, |
||||
rawQueryString: queryString, |
||||
timeColumn: 'timestamp', |
||||
valueColumn: 'max', |
||||
segmentColumn: undefined as unknown as string, |
||||
}, |
||||
}, |
||||
], |
||||
}; |
||||
|
||||
describe('with no grouping', () => { |
||||
const response: any = { |
||||
results: { |
||||
A: { |
||||
refId: 'A', |
||||
meta: {}, |
||||
series: [ |
||||
{ |
||||
name: 'PrimaryResult', |
||||
points: [[2.2075, 1558278660000]], |
||||
}, |
||||
], |
||||
tables: null, |
||||
}, |
||||
}, |
||||
}; |
||||
|
||||
beforeEach(() => { |
||||
fetchMock.mockImplementation((options: any) => { |
||||
expect(options.url).toContain('/api/ds/query'); |
||||
expect(options.data.queries.length).toBe(1); |
||||
expect(options.data.queries[0].refId).toBe('A'); |
||||
return of({ data: response, status: 200 } as any); |
||||
}); |
||||
}); |
||||
|
||||
it('should return a list of datapoints', () => { |
||||
return lastValueFrom(ctx.ds.query(options)).then((results: any) => { |
||||
expect(results.data.length).toBe(1); |
||||
const data = results.data[0] as DataFrame; |
||||
expect(getFrameDisplayName(data)).toEqual('PrimaryResult'); |
||||
expect(data.fields[0].values.length).toEqual(1); |
||||
expect(data.fields[0].values.get(0)).toEqual(1558278660000); |
||||
expect(data.fields[1].values.get(0)).toEqual(2.2075); |
||||
}); |
||||
}); |
||||
}); |
||||
|
||||
describe('with grouping', () => { |
||||
const response: any = { |
||||
results: { |
||||
A: { |
||||
refId: 'A', |
||||
meta: {}, |
||||
series: [ |
||||
{ |
||||
name: 'paritionA', |
||||
points: [[2.2075, 1558278660000]], |
||||
}, |
||||
], |
||||
tables: null, |
||||
}, |
||||
}, |
||||
}; |
||||
|
||||
beforeEach(() => { |
||||
options.targets[0].appInsights.segmentColumn = 'partition'; |
||||
fetchMock.mockImplementation((options: any) => { |
||||
expect(options.url).toContain('/api/ds/query'); |
||||
expect(options.data.queries.length).toBe(1); |
||||
expect(options.data.queries[0].refId).toBe('A'); |
||||
return of({ data: response, status: 200 } as any); |
||||
}); |
||||
}); |
||||
|
||||
it('should return a list of datapoints', () => { |
||||
return lastValueFrom(ctx.ds.query(options)).then((results: any) => { |
||||
expect(results.data.length).toBe(1); |
||||
const data = results.data[0] as DataFrame; |
||||
expect(getFrameDisplayName(data)).toEqual('paritionA'); |
||||
expect(data.fields[0].values.length).toEqual(1); |
||||
expect(data.fields[0].values.get(0)).toEqual(1558278660000); |
||||
expect(data.fields[1].values.get(0)).toEqual(2.2075); |
||||
}); |
||||
}); |
||||
}); |
||||
}); |
||||
|
||||
describe('When performing metric query', () => { |
||||
const options = { |
||||
range: { |
||||
from: toUtc('2017-08-22T20:00:00Z'), |
||||
to: toUtc('2017-08-22T23:59:00Z'), |
||||
}, |
||||
targets: [ |
||||
{ |
||||
apiVersion: '2016-09-01', |
||||
refId: 'A', |
||||
queryType: 'Application Insights', |
||||
appInsights: { |
||||
metricName: 'exceptions/server', |
||||
dimension: '', |
||||
timeGrain: 'none', |
||||
}, |
||||
}, |
||||
], |
||||
}; |
||||
|
||||
describe('and with a single value', () => { |
||||
const response: any = { |
||||
results: { |
||||
A: { |
||||
refId: 'A', |
||||
meta: {}, |
||||
series: [ |
||||
{ |
||||
name: 'exceptions/server', |
||||
points: [[2.2075, 1558278660000]], |
||||
}, |
||||
], |
||||
tables: null, |
||||
}, |
||||
}, |
||||
}; |
||||
|
||||
beforeEach(() => { |
||||
fetchMock.mockImplementation((options: any) => { |
||||
expect(options.url).toContain('/api/ds/query'); |
||||
expect(options.data.queries.length).toBe(1); |
||||
expect(options.data.queries[0].refId).toBe('A'); |
||||
expect(options.data.queries[0].appInsights.rawQueryString).toBeUndefined(); |
||||
expect(options.data.queries[0].appInsights.metricName).toBe('exceptions/server'); |
||||
return of({ data: response, status: 200 } as any); |
||||
}); |
||||
}); |
||||
|
||||
it('should return a single datapoint', () => { |
||||
return lastValueFrom(ctx.ds.query(options)).then((results: any) => { |
||||
expect(results.data.length).toBe(1); |
||||
const data = results.data[0] as DataFrame; |
||||
expect(getFrameDisplayName(data)).toEqual('exceptions/server'); |
||||
expect(data.fields[0].values.get(0)).toEqual(1558278660000); |
||||
expect(data.fields[1].values.get(0)).toEqual(2.2075); |
||||
}); |
||||
}); |
||||
}); |
||||
|
||||
describe('and with an interval group and without a segment group by', () => { |
||||
const response: any = { |
||||
results: { |
||||
A: { |
||||
refId: 'A', |
||||
meta: {}, |
||||
series: [ |
||||
{ |
||||
name: 'exceptions/server', |
||||
points: [ |
||||
[3, 1504108800000], |
||||
[6, 1504112400000], |
||||
], |
||||
}, |
||||
], |
||||
tables: null, |
||||
}, |
||||
}, |
||||
}; |
||||
|
||||
beforeEach(() => { |
||||
options.targets[0].appInsights.timeGrain = 'PT30M'; |
||||
fetchMock.mockImplementation((options: any) => { |
||||
expect(options.url).toContain('/api/ds/query'); |
||||
expect(options.data.queries[0].refId).toBe('A'); |
||||
expect(options.data.queries[0].appInsights.query).toBeUndefined(); |
||||
expect(options.data.queries[0].appInsights.metricName).toBe('exceptions/server'); |
||||
expect(options.data.queries[0].appInsights.timeGrain).toBe('PT30M'); |
||||
return of({ data: response, status: 200 } as any); |
||||
}); |
||||
}); |
||||
|
||||
it('should return a list of datapoints', () => { |
||||
return lastValueFrom(ctx.ds.query(options)).then((results: any) => { |
||||
expect(results.data.length).toBe(1); |
||||
const data = results.data[0] as DataFrame; |
||||
expect(getFrameDisplayName(data)).toEqual('exceptions/server'); |
||||
expect(data.fields[0].values.length).toEqual(2); |
||||
expect(data.fields[0].values.get(0)).toEqual(1504108800000); |
||||
expect(data.fields[1].values.get(0)).toEqual(3); |
||||
expect(data.fields[0].values.get(1)).toEqual(1504112400000); |
||||
expect(data.fields[1].values.get(1)).toEqual(6); |
||||
}); |
||||
}); |
||||
}); |
||||
|
||||
describe('and with a group by', () => { |
||||
const response: any = { |
||||
results: { |
||||
A: { |
||||
refId: 'A', |
||||
meta: {}, |
||||
series: [ |
||||
{ |
||||
name: 'exceptions/server{client/city="Miami"}', |
||||
points: [ |
||||
[10, 1504108800000], |
||||
[20, 1504112400000], |
||||
], |
||||
}, |
||||
{ |
||||
name: 'exceptions/server{client/city="San Antonio"}', |
||||
points: [ |
||||
[1, 1504108800000], |
||||
[2, 1504112400000], |
||||
], |
||||
}, |
||||
], |
||||
tables: null, |
||||
}, |
||||
}, |
||||
}; |
||||
|
||||
describe('and with no alias specified', () => { |
||||
beforeEach(() => { |
||||
options.targets[0].appInsights.dimension = 'client/city'; |
||||
|
||||
fetchMock.mockImplementation((options: any) => { |
||||
expect(options.url).toContain('/api/ds/query'); |
||||
expect(options.data.queries[0].appInsights.rawQueryString).toBeUndefined(); |
||||
expect(options.data.queries[0].appInsights.metricName).toBe('exceptions/server'); |
||||
expect([...options.data.queries[0].appInsights.dimension]).toMatchObject(['client/city']); |
||||
return of({ data: response, status: 200 } as any); |
||||
}); |
||||
}); |
||||
|
||||
it('should return a list of datapoints', () => { |
||||
return lastValueFrom(ctx.ds.query(options)).then((results: any) => { |
||||
expect(results.data.length).toBe(2); |
||||
let data = results.data[0] as DataFrame; |
||||
expect(getFrameDisplayName(data)).toEqual('exceptions/server{client/city="Miami"}'); |
||||
expect(data.fields[1].values.length).toEqual(2); |
||||
expect(data.fields[0].values.get(0)).toEqual(1504108800000); |
||||
expect(data.fields[1].values.get(0)).toEqual(10); |
||||
expect(data.fields[0].values.get(1)).toEqual(1504112400000); |
||||
expect(data.fields[1].values.get(1)).toEqual(20); |
||||
data = results.data[1] as DataFrame; |
||||
expect(getFrameDisplayName(data)).toEqual('exceptions/server{client/city="San Antonio"}'); |
||||
expect(data.fields[1].values.length).toEqual(2); |
||||
expect(data.fields[0].values.get(0)).toEqual(1504108800000); |
||||
expect(data.fields[1].values.get(0)).toEqual(1); |
||||
expect(data.fields[0].values.get(1)).toEqual(1504112400000); |
||||
expect(data.fields[1].values.get(1)).toEqual(2); |
||||
}); |
||||
}); |
||||
}); |
||||
}); |
||||
}); |
||||
|
||||
describe('When getting Metric Names', () => { |
||||
const response = { |
||||
metrics: { |
||||
'exceptions/server': {}, |
||||
'requests/count': {}, |
||||
}, |
||||
}; |
||||
|
||||
beforeEach(() => { |
||||
ctx.ds.getResource = jest.fn().mockImplementation((path) => { |
||||
expect(path).toContain('/metrics/metadata'); |
||||
return Promise.resolve({ data: response, status: 200 }); |
||||
}); |
||||
}); |
||||
|
||||
it.skip('should return a list of metric names', () => { |
||||
return ctx.ds.getAppInsightsMetricNames().then((results: any) => { |
||||
expect(results.length).toBe(2); |
||||
expect(results[0].text).toBe('exceptions/server'); |
||||
expect(results[0].value).toBe('exceptions/server'); |
||||
expect(results[1].text).toBe('requests/count'); |
||||
expect(results[1].value).toBe('requests/count'); |
||||
}); |
||||
}); |
||||
}); |
||||
|
||||
describe('When getting Metric Metadata', () => { |
||||
const response = { |
||||
metrics: { |
||||
'exceptions/server': { |
||||
supportedAggregations: ['sum'], |
||||
supportedGroupBy: { |
||||
all: ['client/os', 'client/city', 'client/browser'], |
||||
}, |
||||
defaultAggregation: 'sum', |
||||
}, |
||||
'requests/count': { |
||||
supportedAggregations: ['avg', 'sum', 'total'], |
||||
supportedGroupBy: { |
||||
all: ['client/os', 'client/city', 'client/browser'], |
||||
}, |
||||
defaultAggregation: 'avg', |
||||
}, |
||||
}, |
||||
}; |
||||
|
||||
beforeEach(() => { |
||||
ctx.ds.getResource = jest.fn().mockImplementation((path) => { |
||||
expect(path).toContain('/metrics/metadata'); |
||||
return Promise.resolve({ data: response, status: 200 }); |
||||
}); |
||||
}); |
||||
|
||||
it.skip('should return a list of group bys', () => { |
||||
return ctx.ds.getAppInsightsMetricMetadata('requests/count').then((results: any) => { |
||||
expect(results.primaryAggType).toEqual('avg'); |
||||
expect(results.supportedAggTypes).toContain('avg'); |
||||
expect(results.supportedAggTypes).toContain('sum'); |
||||
expect(results.supportedAggTypes).toContain('total'); |
||||
expect(results.supportedGroupBy).toContain('client/os'); |
||||
expect(results.supportedGroupBy).toContain('client/city'); |
||||
expect(results.supportedGroupBy).toContain('client/browser'); |
||||
}); |
||||
}); |
||||
}); |
||||
}); |
@ -1,171 +0,0 @@ |
||||
import { isString } from 'lodash'; |
||||
|
||||
import { DataQueryRequest, DataSourceInstanceSettings, ScopedVars } from '@grafana/data'; |
||||
import { DataSourceWithBackend, getTemplateSrv } from '@grafana/runtime'; |
||||
|
||||
import TimegrainConverter from '../../../time_grain_converter'; |
||||
import { |
||||
AzureDataSourceJsonData, |
||||
AzureMonitorQuery, |
||||
DatasourceValidationResult, |
||||
DeprecatedAzureQueryType, |
||||
} from '../../../types'; |
||||
import { routeNames } from '../../../utils/common'; |
||||
|
||||
import ResponseParser from './response_parser'; |
||||
|
||||
export interface LogAnalyticsColumn { |
||||
text: string; |
||||
value: string; |
||||
} |
||||
|
||||
export default class AppInsightsDatasource extends DataSourceWithBackend<AzureMonitorQuery, AzureDataSourceJsonData> { |
||||
resourcePath: string; |
||||
version = 'beta'; |
||||
applicationId: string; |
||||
logAnalyticsColumns: { [key: string]: LogAnalyticsColumn[] } = {}; |
||||
|
||||
constructor(instanceSettings: DataSourceInstanceSettings<AzureDataSourceJsonData>) { |
||||
super(instanceSettings); |
||||
this.applicationId = instanceSettings.jsonData.appInsightsAppId || ''; |
||||
|
||||
this.resourcePath = `${routeNames.appInsights}/${this.version}/apps/${this.applicationId}`; |
||||
} |
||||
|
||||
isConfigured(): boolean { |
||||
return !!this.applicationId && this.applicationId.length > 0; |
||||
} |
||||
|
||||
createRawQueryRequest(item: any, options: DataQueryRequest<AzureMonitorQuery>, target: AzureMonitorQuery) { |
||||
if (item.xaxis && !item.timeColumn) { |
||||
item.timeColumn = item.xaxis; |
||||
} |
||||
|
||||
if (item.yaxis && !item.valueColumn) { |
||||
item.valueColumn = item.yaxis; |
||||
} |
||||
|
||||
if (item.spliton && !item.segmentColumn) { |
||||
item.segmentColumn = item.spliton; |
||||
} |
||||
|
||||
return { |
||||
type: 'timeSeriesQuery', |
||||
raw: false, |
||||
appInsights: { |
||||
rawQuery: true, |
||||
rawQueryString: getTemplateSrv().replace(item.rawQueryString, options.scopedVars), |
||||
timeColumn: item.timeColumn, |
||||
valueColumn: item.valueColumn, |
||||
segmentColumn: item.segmentColumn, |
||||
}, |
||||
}; |
||||
} |
||||
|
||||
applyTemplateVariables(target: AzureMonitorQuery, scopedVars: ScopedVars): AzureMonitorQuery { |
||||
const item = target.appInsights; |
||||
|
||||
if (!item) { |
||||
return target; |
||||
} |
||||
|
||||
const old: any = item; |
||||
// fix for timeGrainUnit which is a deprecated/removed field name
|
||||
if (old.timeGrainCount) { |
||||
item.timeGrain = TimegrainConverter.createISO8601Duration(old.timeGrainCount, item.timeGrainUnit); |
||||
} else if (item.timeGrain && item.timeGrainUnit && item.timeGrain !== 'auto') { |
||||
item.timeGrain = TimegrainConverter.createISO8601Duration(item.timeGrain, item.timeGrainUnit); |
||||
} |
||||
|
||||
// migration for non-standard names
|
||||
if (old.groupBy && !item.dimension) { |
||||
item.dimension = [old.groupBy]; |
||||
} |
||||
if (old.filter && !item.dimensionFilter) { |
||||
item.dimensionFilter = old.filter; |
||||
} |
||||
|
||||
// Migrate single dimension string to array
|
||||
if (isString(item.dimension)) { |
||||
if (item.dimension === 'None') { |
||||
item.dimension = []; |
||||
} else { |
||||
item.dimension = [item.dimension as string]; |
||||
} |
||||
} |
||||
if (!item.dimension) { |
||||
item.dimension = []; |
||||
} |
||||
|
||||
const templateSrv = getTemplateSrv(); |
||||
|
||||
return { |
||||
refId: target.refId, |
||||
queryType: DeprecatedAzureQueryType.ApplicationInsights, |
||||
appInsights: { |
||||
timeGrain: templateSrv.replace((item.timeGrain || '').toString(), scopedVars), |
||||
metricName: templateSrv.replace(item.metricName, scopedVars), |
||||
aggregation: templateSrv.replace(item.aggregation, scopedVars), |
||||
dimension: item.dimension.map((d) => templateSrv.replace(d, scopedVars)), |
||||
dimensionFilter: templateSrv.replace(item.dimensionFilter, scopedVars), |
||||
alias: item.alias, |
||||
}, |
||||
}; |
||||
} |
||||
|
||||
testDatasource(): Promise<DatasourceValidationResult> { |
||||
const path = `${this.resourcePath}/metrics/metadata`; |
||||
return this.getResource(path) |
||||
.then<DatasourceValidationResult>((response: any) => { |
||||
return { |
||||
status: 'success', |
||||
message: 'Successfully queried the Application Insights service.', |
||||
title: 'Success', |
||||
}; |
||||
}) |
||||
.catch((error: any) => { |
||||
let message = 'Application Insights: '; |
||||
message += error.statusText ? error.statusText + ': ' : ''; |
||||
|
||||
if (error.data && error.data.error && error.data.error.code === 'PathNotFoundError') { |
||||
message += 'Invalid Application Id for Application Insights service.'; |
||||
} else if (error.data && error.data.error) { |
||||
message += error.data.error.code + '. ' + error.data.error.message; |
||||
} else { |
||||
message += 'Cannot connect to Application Insights REST API.'; |
||||
} |
||||
|
||||
return { |
||||
status: 'error', |
||||
message: message, |
||||
}; |
||||
}); |
||||
} |
||||
|
||||
getMetricNames() { |
||||
const path = `${this.resourcePath}/metrics/metadata`; |
||||
return this.getResource(path).then(ResponseParser.parseMetricNames); |
||||
} |
||||
|
||||
getMetricMetadata(metricName: string) { |
||||
const path = `${this.resourcePath}/metrics/metadata`; |
||||
return this.getResource(path).then((result: any) => { |
||||
return new ResponseParser(result).parseMetadata(metricName); |
||||
}); |
||||
} |
||||
|
||||
getGroupBys(metricName: string) { |
||||
return this.getMetricMetadata(metricName).then((result: any) => { |
||||
return new ResponseParser(result).parseGroupBys(); |
||||
}); |
||||
} |
||||
|
||||
getQuerySchema() { |
||||
const path = `${this.resourcePath}/query/schema`; |
||||
return this.getResource(path).then((result: any) => { |
||||
const schema = new ResponseParser(result).parseQuerySchema(); |
||||
// console.log(schema);
|
||||
return schema; |
||||
}); |
||||
} |
||||
} |
@ -1,236 +0,0 @@ |
||||
import { concat, filter, find, forEach, indexOf, intersection, isObject, map, without, keys as _keys } from 'lodash'; |
||||
|
||||
import { dateTime } from '@grafana/data'; |
||||
|
||||
export default class ResponseParser { |
||||
constructor(private results: any) {} |
||||
|
||||
parseQueryResult() { |
||||
let data: any = []; |
||||
let columns: any = []; |
||||
for (let i = 0; i < this.results.length; i++) { |
||||
if (this.results[i].query.raw) { |
||||
const xaxis = this.results[i].query.xaxis; |
||||
const yaxises = this.results[i].query.yaxis; |
||||
const spliton = this.results[i].query.spliton; |
||||
columns = this.results[i].result.Tables[0].Columns; |
||||
const rows = this.results[i].result.Tables[0].Rows; |
||||
data = concat(data, this.parseRawQueryResultRow(this.results[i].query, columns, rows, xaxis, yaxises, spliton)); |
||||
} else { |
||||
const value = this.results[i].result.value; |
||||
const alias = this.results[i].query.alias; |
||||
data = concat(data, this.parseQueryResultRow(this.results[i].query, value, alias)); |
||||
} |
||||
} |
||||
return data; |
||||
} |
||||
|
||||
parseRawQueryResultRow(query: any, columns: any, rows: any, xaxis: string, yaxises: string, spliton: string) { |
||||
const data: any[] = []; |
||||
const columnsForDropdown = map(columns, (column) => ({ text: column.ColumnName, value: column.ColumnName })); |
||||
|
||||
const xaxisColumn = columns.findIndex((column: any) => column.ColumnName === xaxis); |
||||
const yaxisesSplit = yaxises.split(','); |
||||
const yaxisColumns: any = {}; |
||||
forEach(yaxisesSplit, (yaxis) => { |
||||
yaxisColumns[yaxis] = columns.findIndex((column: any) => column.ColumnName === yaxis); |
||||
}); |
||||
const splitonColumn = columns.findIndex((column: any) => column.ColumnName === spliton); |
||||
const convertTimestamp = xaxis === 'timestamp'; |
||||
|
||||
forEach(rows, (row) => { |
||||
forEach(yaxisColumns, (yaxisColumn, yaxisName) => { |
||||
const bucket = |
||||
splitonColumn === -1 |
||||
? ResponseParser.findOrCreateBucket(data, yaxisName) |
||||
: ResponseParser.findOrCreateBucket(data, row[splitonColumn]); |
||||
const epoch = convertTimestamp ? ResponseParser.dateTimeToEpoch(row[xaxisColumn]) : row[xaxisColumn]; |
||||
bucket.datapoints.push([row[yaxisColumn], epoch]); |
||||
bucket.refId = query.refId; |
||||
bucket.query = query.query; |
||||
bucket.columnsForDropdown = columnsForDropdown; |
||||
}); |
||||
}); |
||||
|
||||
return data; |
||||
} |
||||
|
||||
parseQueryResultRow(query: any, value: any, alias: string) { |
||||
const data: any[] = []; |
||||
|
||||
if (ResponseParser.isSingleValue(value)) { |
||||
const metricName = ResponseParser.getMetricFieldKey(value); |
||||
const aggField = ResponseParser.getKeyForAggregationField(value[metricName]); |
||||
const epoch = ResponseParser.dateTimeToEpoch(value.end); |
||||
data.push({ |
||||
target: metricName, |
||||
datapoints: [[value[metricName][aggField], epoch]], |
||||
refId: query.refId, |
||||
query: query.query, |
||||
}); |
||||
return data; |
||||
} |
||||
|
||||
const groupedBy = ResponseParser.hasSegmentsField(value.segments[0]); |
||||
if (!groupedBy) { |
||||
const metricName = ResponseParser.getMetricFieldKey(value.segments[0]); |
||||
const dataTarget = ResponseParser.findOrCreateBucket(data, metricName); |
||||
|
||||
for (let i = 0; i < value.segments.length; i++) { |
||||
const epoch = ResponseParser.dateTimeToEpoch(value.segments[i].end); |
||||
const aggField: string = ResponseParser.getKeyForAggregationField(value.segments[i][metricName]); |
||||
|
||||
dataTarget.datapoints.push([value.segments[i][metricName][aggField], epoch]); |
||||
} |
||||
dataTarget.refId = query.refId; |
||||
dataTarget.query = query.query; |
||||
} else { |
||||
for (let i = 0; i < value.segments.length; i++) { |
||||
const epoch = ResponseParser.dateTimeToEpoch(value.segments[i].end); |
||||
|
||||
for (let j = 0; j < value.segments[i].segments.length; j++) { |
||||
const metricName = ResponseParser.getMetricFieldKey(value.segments[i].segments[j]); |
||||
const aggField = ResponseParser.getKeyForAggregationField(value.segments[i].segments[j][metricName]); |
||||
const target = this.getTargetName(value.segments[i].segments[j], alias); |
||||
|
||||
const bucket = ResponseParser.findOrCreateBucket(data, target); |
||||
bucket.datapoints.push([value.segments[i].segments[j][metricName][aggField], epoch]); |
||||
bucket.refId = query.refId; |
||||
bucket.meta = { |
||||
query: query.query, |
||||
}; |
||||
} |
||||
} |
||||
} |
||||
|
||||
return data; |
||||
} |
||||
|
||||
getTargetName(segment: { [x: string]: string }, alias: string) { |
||||
let metric = ''; |
||||
let segmentName = ''; |
||||
let segmentValue = ''; |
||||
for (const prop in segment) { |
||||
if (isObject(segment[prop])) { |
||||
metric = prop; |
||||
} else { |
||||
segmentName = prop; |
||||
segmentValue = segment[prop]; |
||||
} |
||||
} |
||||
|
||||
if (alias) { |
||||
const regex = /\{\{([\s\S]+?)\}\}/g; |
||||
return alias.replace(regex, (match, g1, g2) => { |
||||
const group = g1 || g2; |
||||
|
||||
if (group === 'metric') { |
||||
return metric; |
||||
} else if (group === 'groupbyname') { |
||||
return segmentName; |
||||
} else if (group === 'groupbyvalue') { |
||||
return segmentValue; |
||||
} |
||||
|
||||
return match; |
||||
}); |
||||
} |
||||
|
||||
return metric + `{${segmentName}="${segmentValue}"}`; |
||||
} |
||||
|
||||
static isSingleValue(value: any) { |
||||
return !ResponseParser.hasSegmentsField(value); |
||||
} |
||||
|
||||
static findOrCreateBucket(data: any[], target: string) { |
||||
let dataTarget: any = find(data, ['target', target]); |
||||
if (!dataTarget) { |
||||
dataTarget = { target: target, datapoints: [] }; |
||||
data.push(dataTarget); |
||||
} |
||||
|
||||
return dataTarget; |
||||
} |
||||
|
||||
static hasSegmentsField(obj: any) { |
||||
const keys = _keys(obj); |
||||
return indexOf(keys, 'segments') > -1; |
||||
} |
||||
|
||||
static getMetricFieldKey(segment: { [x: string]: any }) { |
||||
const keys = _keys(segment); |
||||
|
||||
return filter(without(keys, 'start', 'end'), (key) => { |
||||
return isObject(segment[key]); |
||||
})[0]; |
||||
} |
||||
|
||||
static getKeyForAggregationField(dataObj: any): string { |
||||
const keys = _keys(dataObj); |
||||
return intersection(keys, ['sum', 'avg', 'min', 'max', 'count', 'unique'])[0]; |
||||
} |
||||
|
||||
static dateTimeToEpoch(dateTimeValue: any) { |
||||
return dateTime(dateTimeValue).valueOf(); |
||||
} |
||||
|
||||
static parseMetricNames(result: { metrics: any }) { |
||||
const keys = _keys(result.metrics); |
||||
|
||||
return ResponseParser.toTextValueList(keys); |
||||
} |
||||
|
||||
parseMetadata(metricName: string) { |
||||
const metric = this.results.metrics[metricName]; |
||||
|
||||
if (!metric) { |
||||
throw Error('No data found for metric: ' + metricName); |
||||
} |
||||
|
||||
return { |
||||
primaryAggType: metric.defaultAggregation, |
||||
supportedAggTypes: metric.supportedAggregations, |
||||
supportedGroupBy: metric.supportedGroupBy.all, |
||||
}; |
||||
} |
||||
|
||||
parseGroupBys() { |
||||
return ResponseParser.toTextValueList(this.results.supportedGroupBy); |
||||
} |
||||
|
||||
parseQuerySchema() { |
||||
const result: any = { |
||||
Type: 'AppInsights', |
||||
Tables: {}, |
||||
}; |
||||
if (this.results && this.results && this.results.Tables) { |
||||
for (let i = 0; i < this.results.Tables[0].Rows.length; i++) { |
||||
const column = this.results.Tables[0].Rows[i]; |
||||
const columnTable = column[0]; |
||||
const columnName = column[1]; |
||||
const columnType = column[2]; |
||||
if (result.Tables[columnTable]) { |
||||
result.Tables[columnTable].OrderedColumns.push({ Name: columnName, Type: columnType }); |
||||
} else { |
||||
result.Tables[columnTable] = { |
||||
Name: columnTable, |
||||
OrderedColumns: [{ Name: columnName, Type: columnType }], |
||||
}; |
||||
} |
||||
} |
||||
} |
||||
return result; |
||||
} |
||||
|
||||
static toTextValueList(values: any) { |
||||
const list: any[] = []; |
||||
for (let i = 0; i < values.length; i++) { |
||||
list.push({ |
||||
text: values[i], |
||||
value: values[i], |
||||
}); |
||||
} |
||||
return list; |
||||
} |
||||
} |
@ -1,99 +0,0 @@ |
||||
import { render, screen } from '@testing-library/react'; |
||||
import userEvent from '@testing-library/user-event'; |
||||
import React from 'react'; |
||||
|
||||
import AnalyticsConfig, { Props } from './AnalyticsConfig'; |
||||
|
||||
const setup = (propsFunc?: (props: Props) => Props) => { |
||||
let props: Props = { |
||||
options: { |
||||
id: 21, |
||||
uid: 'x', |
||||
orgId: 1, |
||||
name: 'Azure Monitor-10-10', |
||||
type: 'grafana-azure-monitor-datasource', |
||||
typeName: 'Azure', |
||||
typeLogoUrl: '', |
||||
access: 'proxy', |
||||
url: '', |
||||
password: '', |
||||
user: '', |
||||
database: '', |
||||
basicAuth: false, |
||||
basicAuthUser: '', |
||||
basicAuthPassword: '', |
||||
withCredentials: false, |
||||
isDefault: false, |
||||
secureJsonFields: {}, |
||||
jsonData: { |
||||
cloudName: '', |
||||
subscriptionId: '', |
||||
}, |
||||
version: 1, |
||||
readOnly: false, |
||||
}, |
||||
updateOptions: jest.fn(), |
||||
}; |
||||
|
||||
if (propsFunc) { |
||||
props = propsFunc(props); |
||||
} |
||||
|
||||
return render(<AnalyticsConfig {...props} />); |
||||
}; |
||||
|
||||
describe('Render', () => { |
||||
it('should disable log analytics credentials form', () => { |
||||
setup((props) => ({ |
||||
...props, |
||||
options: { |
||||
...props.options, |
||||
jsonData: { |
||||
...props.options.jsonData, |
||||
azureLogAnalyticsSameAs: true, |
||||
}, |
||||
}, |
||||
})); |
||||
expect(screen.queryByText('Azure Monitor Logs')).not.toBeInTheDocument(); |
||||
}); |
||||
|
||||
it('should not render the Switch to use different creds for log analytics by default', () => { |
||||
setup(); |
||||
expect(screen.queryByText('is no longer supported', { exact: false })).not.toBeInTheDocument(); |
||||
}); |
||||
|
||||
// Remove this test with deprecated code
|
||||
it('should not render the Switch if different creds for log analytics were set from before', () => { |
||||
setup((props) => ({ |
||||
...props, |
||||
options: { |
||||
...props.options, |
||||
jsonData: { |
||||
...props.options.jsonData, |
||||
azureLogAnalyticsSameAs: false, |
||||
}, |
||||
}, |
||||
})); |
||||
expect(screen.queryByText('is no longer supported', { exact: false })).toBeInTheDocument(); |
||||
}); |
||||
|
||||
it('should clean up the error when resetting the credentials', async () => { |
||||
const onUpdate = jest.fn(); |
||||
setup((props) => ({ |
||||
...props, |
||||
options: { |
||||
...props.options, |
||||
jsonData: { |
||||
...props.options.jsonData, |
||||
azureLogAnalyticsSameAs: false, |
||||
}, |
||||
}, |
||||
updateOptions: onUpdate, |
||||
})); |
||||
expect(screen.queryByText('is no longer supported', { exact: false })).toBeInTheDocument(); |
||||
await userEvent.click(screen.getByText('Clear Azure Monitor Logs Credentials')); |
||||
expect(onUpdate).toHaveBeenCalled(); |
||||
const newOpts = onUpdate.mock.calls[0][0]({}); |
||||
expect(newOpts).toEqual({ jsonData: { azureLogAnalyticsSameAs: true } }); |
||||
}); |
||||
}); |
@ -1,64 +0,0 @@ |
||||
import React, { FunctionComponent, useMemo } from 'react'; |
||||
|
||||
import { Alert, Button } from '@grafana/ui'; |
||||
|
||||
import { getCredentials } from '../../../credentials'; |
||||
import { AzureDataSourceSettings } from '../../../types'; |
||||
import { AzureCredentialsForm } from '../../AzureCredentialsForm'; |
||||
|
||||
export interface Props { |
||||
options: AzureDataSourceSettings; |
||||
updateOptions: (optionsFunc: (options: AzureDataSourceSettings) => AzureDataSourceSettings) => void; |
||||
} |
||||
|
||||
export const AnalyticsConfig: FunctionComponent<Props> = (props: Props) => { |
||||
const { updateOptions } = props; |
||||
const primaryCredentials = useMemo(() => getCredentials(props.options), [props.options]); |
||||
|
||||
// Only show a section for setting LogAnalytics credentials if
|
||||
// they were set from before with different values and the
|
||||
// authType is supported
|
||||
const logCredentialsEnabled = |
||||
primaryCredentials.authType === 'clientsecret' && props.options.jsonData.azureLogAnalyticsSameAs === false; |
||||
|
||||
const onClearAzLogsCreds = () => { |
||||
updateOptions((options) => { |
||||
return { |
||||
...options, |
||||
jsonData: { |
||||
...options.jsonData, |
||||
azureLogAnalyticsSameAs: true, |
||||
}, |
||||
}; |
||||
}); |
||||
}; |
||||
|
||||
return logCredentialsEnabled ? ( |
||||
<> |
||||
<h3 className="page-heading">Azure Monitor Logs</h3> |
||||
<> |
||||
<Alert severity="error" title="Deprecated"> |
||||
Using different credentials for Azure Monitor Logs is no longer supported. Authentication information above |
||||
will be used instead. Please create a new data source with the credentials below. |
||||
</Alert> |
||||
|
||||
<AzureCredentialsForm |
||||
managedIdentityEnabled={false} |
||||
credentials={{ |
||||
...primaryCredentials, |
||||
authType: 'clientsecret', |
||||
// Use deprecated Log Analytics credentials read-only
|
||||
// to help with a possible migration
|
||||
tenantId: props.options.jsonData.logAnalyticsTenantId, |
||||
clientId: props.options.jsonData.logAnalyticsClientId, |
||||
}} |
||||
disabled={true} |
||||
> |
||||
<Button onClick={onClearAzLogsCreds}>Clear Azure Monitor Logs Credentials</Button> |
||||
</AzureCredentialsForm> |
||||
</> |
||||
</> |
||||
) : null; |
||||
}; |
||||
|
||||
export default AnalyticsConfig; |
@ -1,73 +0,0 @@ |
||||
import React from 'react'; |
||||
|
||||
import { Alert, Input } from '@grafana/ui'; |
||||
|
||||
import { Field } from '../../../Field'; |
||||
import { DeprecatedAzureMonitorQuery } from '../../types'; |
||||
|
||||
const ReadOnlyTimeGrain = ({ |
||||
timeGrainCount, |
||||
timeGrainType, |
||||
timeGrainUnit, |
||||
}: { |
||||
timeGrainCount: string; |
||||
timeGrainType: string; |
||||
timeGrainUnit: string; |
||||
}) => { |
||||
const timeFields = timeGrainType === 'specific' ? ['specific', timeGrainCount, timeGrainUnit] : [timeGrainType]; |
||||
|
||||
return ( |
||||
<Field label="Timegrain"> |
||||
<> |
||||
{timeFields.map((timeField) => ( |
||||
<Input value={timeField} disabled={true} onChange={() => {}} key={timeField} width={10} /> |
||||
))} |
||||
</> |
||||
</Field> |
||||
); |
||||
}; |
||||
|
||||
const ApplicationInsightsEditor = ({ query }: { query: DeprecatedAzureMonitorQuery }) => { |
||||
const groupBy = query.appInsights?.dimension || []; |
||||
|
||||
return ( |
||||
<div data-testid="azure-monitor-application-insights-query-editor"> |
||||
<Field label="Metric" disabled={true}> |
||||
<Input |
||||
value={query.appInsights?.metricName} |
||||
disabled={true} |
||||
onChange={() => {}} |
||||
id="azure-monitor-application-insights-metric" |
||||
/> |
||||
</Field> |
||||
<Field label="Aggregation" disabled={true}> |
||||
<Input value={query.appInsights?.aggregation} disabled={true} onChange={() => {}} /> |
||||
</Field> |
||||
{groupBy.length > 0 && ( |
||||
<Field label="Group by"> |
||||
<> |
||||
{groupBy.map((dimension) => ( |
||||
<Input value={dimension} disabled={true} onChange={() => {}} key={dimension} /> |
||||
))} |
||||
</> |
||||
</Field> |
||||
)} |
||||
<Field label="Filter" disabled={true}> |
||||
<Input value={query.appInsights?.dimensionFilter} disabled={true} onChange={() => {}} /> |
||||
</Field> |
||||
<ReadOnlyTimeGrain |
||||
timeGrainCount={query.appInsights?.timeGrainCount || ''} |
||||
timeGrainType={query.appInsights?.timeGrainType || 'auto'} |
||||
timeGrainUnit={query.appInsights?.timeGrainUnit || 'minute'} |
||||
/> |
||||
<Field label="Legend format" disabled={true}> |
||||
<Input placeholder="Alias patterns" value={query.appInsights?.alias} onChange={() => {}} disabled={true} /> |
||||
</Field> |
||||
<Alert severity="info" title="Deprecated"> |
||||
Application Insights is deprecated and is now read only. Migrate your queries to Metrics to make changes. |
||||
</Alert> |
||||
</div> |
||||
); |
||||
}; |
||||
|
||||
export default ApplicationInsightsEditor; |
@ -1,52 +0,0 @@ |
||||
import React from 'react'; |
||||
|
||||
import { Alert, CodeEditor, Select } from '@grafana/ui'; |
||||
|
||||
import { AzureMonitorOption } from '../../../../types'; |
||||
import { Field } from '../../../Field'; |
||||
import { Space } from '../../../Space'; |
||||
import { DeprecatedAzureMonitorQuery } from '../../types'; |
||||
|
||||
interface InsightsAnalyticsEditorProps { |
||||
query: DeprecatedAzureMonitorQuery; |
||||
} |
||||
|
||||
const FORMAT_OPTIONS: Array<AzureMonitorOption<string>> = [ |
||||
{ label: 'Time series', value: 'time_series' }, |
||||
{ label: 'Table', value: 'table' }, |
||||
]; |
||||
|
||||
const InsightsAnalyticsEditor: React.FC<InsightsAnalyticsEditorProps> = ({ query }) => { |
||||
return ( |
||||
<div data-testid="azure-monitor-insights-analytics-query-editor"> |
||||
<CodeEditor |
||||
language="kusto" |
||||
value={query.insightsAnalytics?.query ?? ''} |
||||
height={200} |
||||
width="100%" |
||||
readOnly={true} |
||||
showMiniMap={false} |
||||
/> |
||||
|
||||
<Field label="Format as"> |
||||
<Select |
||||
menuShouldPortal |
||||
inputId="azure-monitor-logs-workspaces-field" |
||||
value={query.insightsAnalytics?.resultFormat} |
||||
disabled={true} |
||||
options={FORMAT_OPTIONS} |
||||
onChange={() => {}} |
||||
width={38} |
||||
/> |
||||
</Field> |
||||
|
||||
<Space v={2} /> |
||||
|
||||
<Alert severity="info" title="Deprecated"> |
||||
Insights Analytics is deprecated and is now read only. Migrate your queries to Logs to make changes. |
||||
</Alert> |
||||
</div> |
||||
); |
||||
}; |
||||
|
||||
export default InsightsAnalyticsEditor; |
@ -1,110 +0,0 @@ |
||||
import { shallow } from 'enzyme'; |
||||
import React from 'react'; |
||||
|
||||
import { Button, LegacyForms } from '@grafana/ui'; |
||||
|
||||
import { Props } from '../../ConfigEditor'; |
||||
|
||||
import InsightsConfig from './InsightsConfig'; |
||||
|
||||
const { Input } = LegacyForms; |
||||
|
||||
const setup = (propOverrides?: object) => { |
||||
const props: Props = { |
||||
options: { |
||||
id: 21, |
||||
uid: 'x', |
||||
orgId: 1, |
||||
name: 'Azure Monitor-10-10', |
||||
type: 'grafana-azure-monitor-datasource', |
||||
typeLogoUrl: '', |
||||
typeName: 'Azure', |
||||
access: 'proxy', |
||||
url: '', |
||||
password: '', |
||||
user: '', |
||||
database: '', |
||||
basicAuth: false, |
||||
basicAuthUser: '', |
||||
basicAuthPassword: '', |
||||
withCredentials: false, |
||||
isDefault: false, |
||||
secureJsonFields: {}, |
||||
jsonData: { |
||||
cloudName: '', |
||||
subscriptionId: '', |
||||
}, |
||||
secureJsonData: {}, |
||||
version: 1, |
||||
readOnly: false, |
||||
}, |
||||
onOptionsChange: jest.fn(), |
||||
}; |
||||
|
||||
Object.assign(props, propOverrides); |
||||
|
||||
return shallow(<InsightsConfig {...props} />); |
||||
}; |
||||
|
||||
describe('Render', () => { |
||||
it('should render component', () => { |
||||
const wrapper = setup(); |
||||
|
||||
expect(wrapper).toMatchSnapshot(); |
||||
}); |
||||
|
||||
it('should disable insights api key input', () => { |
||||
const wrapper = setup({ |
||||
options: { |
||||
secureJsonFields: { |
||||
appInsightsApiKey: true, |
||||
}, |
||||
jsonData: { |
||||
appInsightsAppId: 'cddcc020-2c94-460a-a3d0-df3147ffa792', |
||||
}, |
||||
secureJsonData: { |
||||
appInsightsApiKey: 'e7f3f661-a933-4b3f-8176-51c4f982ec48', |
||||
}, |
||||
}, |
||||
}); |
||||
expect(wrapper).toMatchSnapshot(); |
||||
}); |
||||
|
||||
it('should enable insights api key input', () => { |
||||
const wrapper = setup({ |
||||
options: { |
||||
secureJsonFields: { |
||||
appInsightsApiKey: false, |
||||
}, |
||||
jsonData: { |
||||
appInsightsAppId: 'cddcc020-2c94-460a-a3d0-df3147ffa792', |
||||
}, |
||||
secureJsonData: { |
||||
appInsightsApiKey: 'e7f3f661-a933-4b3f-8176-51c4f982ec48', |
||||
}, |
||||
}, |
||||
}); |
||||
expect(wrapper).toMatchSnapshot(); |
||||
}); |
||||
|
||||
it('should disable buttons and inputs', () => { |
||||
const wrapper = setup({ |
||||
options: { |
||||
secureJsonFields: { |
||||
appInsightsApiKey: true, |
||||
}, |
||||
jsonData: { |
||||
appInsightsAppId: 'cddcc020-2c94-460a-a3d0-df3147ffa792', |
||||
}, |
||||
secureJsonData: { |
||||
appInsightsApiKey: 'e7f3f661-a933-4b3f-8176-51c4f982ec48', |
||||
}, |
||||
readOnly: true, |
||||
}, |
||||
}); |
||||
const buttons = wrapper.find(Button); |
||||
const inputs = wrapper.find(Input); |
||||
buttons.forEach((b) => expect(b.prop('disabled')).toBe(true)); |
||||
inputs.forEach((i) => expect(i.prop('disabled')).toBe(true)); |
||||
}); |
||||
}); |
@ -1,98 +0,0 @@ |
||||
import React, { PureComponent } from 'react'; |
||||
|
||||
import { |
||||
updateDatasourcePluginJsonDataOption, |
||||
updateDatasourcePluginResetOption, |
||||
updateDatasourcePluginSecureJsonDataOption, |
||||
} from '@grafana/data'; |
||||
import { Alert, Button, InlineFormLabel, LegacyForms } from '@grafana/ui'; |
||||
|
||||
import { AzureDataSourceJsonData, AzureDataSourceSecureJsonData } from '../../../types'; |
||||
import { Props } from '../../ConfigEditor'; |
||||
|
||||
const { Input } = LegacyForms; |
||||
|
||||
export class InsightsConfig extends PureComponent<Props> { |
||||
private onAppInsightsResetApiKey = () => { |
||||
this.resetSecureKey('appInsightsApiKey'); |
||||
}; |
||||
|
||||
private onUpdateJsonDataOption = |
||||
(key: keyof AzureDataSourceJsonData) => (event: React.SyntheticEvent<HTMLInputElement | HTMLSelectElement>) => { |
||||
updateDatasourcePluginJsonDataOption(this.props, key, event.currentTarget.value); |
||||
}; |
||||
|
||||
private onUpdateSecureJsonDataOption = |
||||
(key: keyof AzureDataSourceSecureJsonData) => |
||||
(event: React.SyntheticEvent<HTMLInputElement | HTMLSelectElement>) => { |
||||
updateDatasourcePluginSecureJsonDataOption(this.props, key, event.currentTarget.value); |
||||
}; |
||||
|
||||
private resetSecureKey = (key: keyof AzureDataSourceSecureJsonData) => { |
||||
updateDatasourcePluginResetOption(this.props, key); |
||||
}; |
||||
|
||||
render() { |
||||
const { options } = this.props; |
||||
return ( |
||||
<> |
||||
<h3 className="page-heading">Azure Application Insights</h3> |
||||
<Alert severity="info" title="Application Insights credentials are deprecated"> |
||||
Configure using Azure AD App Registration above and update existing queries to use Metrics or Logs. |
||||
</Alert> |
||||
<div className="gf-form-group"> |
||||
{options.secureJsonFields.appInsightsApiKey ? ( |
||||
<div className="gf-form-inline"> |
||||
<div className="gf-form"> |
||||
<InlineFormLabel className="width-12">API Key</InlineFormLabel> |
||||
<Input className="width-25" placeholder="configured" disabled={true} /> |
||||
</div> |
||||
<div className="gf-form"> |
||||
<div className="max-width-30 gf-form-inline"> |
||||
<Button |
||||
variant="secondary" |
||||
type="button" |
||||
onClick={this.onAppInsightsResetApiKey} |
||||
disabled={this.props.options.readOnly} |
||||
> |
||||
reset |
||||
</Button> |
||||
</div> |
||||
</div> |
||||
</div> |
||||
) : ( |
||||
<div className="gf-form-inline"> |
||||
<div className="gf-form"> |
||||
<InlineFormLabel className="width-12">API Key</InlineFormLabel> |
||||
<div className="width-15"> |
||||
<Input |
||||
className="width-30" |
||||
placeholder="XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX" |
||||
value={options.secureJsonData!.appInsightsApiKey || ''} |
||||
onChange={this.onUpdateSecureJsonDataOption('appInsightsApiKey')} |
||||
disabled={this.props.options.readOnly} |
||||
/> |
||||
</div> |
||||
</div> |
||||
</div> |
||||
)} |
||||
<div className="gf-form-inline"> |
||||
<div className="gf-form"> |
||||
<InlineFormLabel className="width-12">Application ID</InlineFormLabel> |
||||
<div className="width-15"> |
||||
<Input |
||||
className="width-30" |
||||
value={options.jsonData.appInsightsAppId || ''} |
||||
onChange={this.onUpdateJsonDataOption('appInsightsAppId')} |
||||
disabled={this.props.options.readOnly} |
||||
/> |
||||
</div> |
||||
</div> |
||||
</div> |
||||
</div> |
||||
</> |
||||
); |
||||
} |
||||
} |
||||
|
||||
export default InsightsConfig; |
@ -1,208 +0,0 @@ |
||||
// Jest Snapshot v1, https://goo.gl/fbAQLP |
||||
|
||||
exports[`Render should disable insights api key input 1`] = ` |
||||
<Fragment> |
||||
<h3 |
||||
className="page-heading" |
||||
> |
||||
Azure Application Insights |
||||
</h3> |
||||
<Alert |
||||
severity="info" |
||||
title="Application Insights credentials are deprecated" |
||||
> |
||||
Configure using Azure AD App Registration above and update existing queries to use Metrics or Logs. |
||||
</Alert> |
||||
<div |
||||
className="gf-form-group" |
||||
> |
||||
<div |
||||
className="gf-form-inline" |
||||
> |
||||
<div |
||||
className="gf-form" |
||||
> |
||||
<FormLabel |
||||
className="width-12" |
||||
> |
||||
API Key |
||||
</FormLabel> |
||||
<Input |
||||
className="width-25" |
||||
disabled={true} |
||||
placeholder="configured" |
||||
/> |
||||
</div> |
||||
<div |
||||
className="gf-form" |
||||
> |
||||
<div |
||||
className="max-width-30 gf-form-inline" |
||||
> |
||||
<Button |
||||
onClick={[Function]} |
||||
type="button" |
||||
variant="secondary" |
||||
> |
||||
reset |
||||
</Button> |
||||
</div> |
||||
</div> |
||||
</div> |
||||
<div |
||||
className="gf-form-inline" |
||||
> |
||||
<div |
||||
className="gf-form" |
||||
> |
||||
<FormLabel |
||||
className="width-12" |
||||
> |
||||
Application ID |
||||
</FormLabel> |
||||
<div |
||||
className="width-15" |
||||
> |
||||
<Input |
||||
className="width-30" |
||||
onChange={[Function]} |
||||
value="cddcc020-2c94-460a-a3d0-df3147ffa792" |
||||
/> |
||||
</div> |
||||
</div> |
||||
</div> |
||||
</div> |
||||
</Fragment> |
||||
`; |
||||
|
||||
exports[`Render should enable insights api key input 1`] = ` |
||||
<Fragment> |
||||
<h3 |
||||
className="page-heading" |
||||
> |
||||
Azure Application Insights |
||||
</h3> |
||||
<Alert |
||||
severity="info" |
||||
title="Application Insights credentials are deprecated" |
||||
> |
||||
Configure using Azure AD App Registration above and update existing queries to use Metrics or Logs. |
||||
</Alert> |
||||
<div |
||||
className="gf-form-group" |
||||
> |
||||
<div |
||||
className="gf-form-inline" |
||||
> |
||||
<div |
||||
className="gf-form" |
||||
> |
||||
<FormLabel |
||||
className="width-12" |
||||
> |
||||
API Key |
||||
</FormLabel> |
||||
<div |
||||
className="width-15" |
||||
> |
||||
<Input |
||||
className="width-30" |
||||
onChange={[Function]} |
||||
placeholder="XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX" |
||||
value="e7f3f661-a933-4b3f-8176-51c4f982ec48" |
||||
/> |
||||
</div> |
||||
</div> |
||||
</div> |
||||
<div |
||||
className="gf-form-inline" |
||||
> |
||||
<div |
||||
className="gf-form" |
||||
> |
||||
<FormLabel |
||||
className="width-12" |
||||
> |
||||
Application ID |
||||
</FormLabel> |
||||
<div |
||||
className="width-15" |
||||
> |
||||
<Input |
||||
className="width-30" |
||||
onChange={[Function]} |
||||
value="cddcc020-2c94-460a-a3d0-df3147ffa792" |
||||
/> |
||||
</div> |
||||
</div> |
||||
</div> |
||||
</div> |
||||
</Fragment> |
||||
`; |
||||
|
||||
exports[`Render should render component 1`] = ` |
||||
<Fragment> |
||||
<h3 |
||||
className="page-heading" |
||||
> |
||||
Azure Application Insights |
||||
</h3> |
||||
<Alert |
||||
severity="info" |
||||
title="Application Insights credentials are deprecated" |
||||
> |
||||
Configure using Azure AD App Registration above and update existing queries to use Metrics or Logs. |
||||
</Alert> |
||||
<div |
||||
className="gf-form-group" |
||||
> |
||||
<div |
||||
className="gf-form-inline" |
||||
> |
||||
<div |
||||
className="gf-form" |
||||
> |
||||
<FormLabel |
||||
className="width-12" |
||||
> |
||||
API Key |
||||
</FormLabel> |
||||
<div |
||||
className="width-15" |
||||
> |
||||
<Input |
||||
className="width-30" |
||||
disabled={false} |
||||
onChange={[Function]} |
||||
placeholder="XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX" |
||||
value="" |
||||
/> |
||||
</div> |
||||
</div> |
||||
</div> |
||||
<div |
||||
className="gf-form-inline" |
||||
> |
||||
<div |
||||
className="gf-form" |
||||
> |
||||
<FormLabel |
||||
className="width-12" |
||||
> |
||||
Application ID |
||||
</FormLabel> |
||||
<div |
||||
className="width-15" |
||||
> |
||||
<Input |
||||
className="width-30" |
||||
disabled={false} |
||||
onChange={[Function]} |
||||
value="" |
||||
/> |
||||
</div> |
||||
</div> |
||||
</div> |
||||
</div> |
||||
</Fragment> |
||||
`; |
@ -1,30 +0,0 @@ |
||||
import { DataSourceInstanceSettings, ScopedVars } from '@grafana/data'; |
||||
import { getTemplateSrv } from '@grafana/runtime'; |
||||
|
||||
import { AzureDataSourceJsonData, DeprecatedAzureQueryType } from '../../../types'; |
||||
import AppInsightsDatasource from '../app_insights/app_insights_datasource'; |
||||
import { DeprecatedAzureMonitorQuery } from '../types'; |
||||
|
||||
export default class InsightsAnalyticsDatasource extends AppInsightsDatasource { |
||||
constructor(instanceSettings: DataSourceInstanceSettings<AzureDataSourceJsonData>) { |
||||
super(instanceSettings); |
||||
} |
||||
|
||||
applyTemplateVariables(target: DeprecatedAzureMonitorQuery, scopedVars: ScopedVars): DeprecatedAzureMonitorQuery { |
||||
const item = target.insightsAnalytics; |
||||
if (!item) { |
||||
return target; |
||||
} |
||||
|
||||
const query = item.rawQueryString && !item.query ? item.rawQueryString : item.query; |
||||
|
||||
return { |
||||
refId: target.refId, |
||||
queryType: DeprecatedAzureQueryType.InsightsAnalytics, |
||||
insightsAnalytics: { |
||||
query: getTemplateSrv().replace(query, scopedVars), |
||||
resultFormat: item.resultFormat, |
||||
}, |
||||
}; |
||||
} |
||||
} |
@ -1 +0,0 @@ |
||||
export * from './query'; |
@ -1,61 +0,0 @@ |
||||
import { DataQuery } from '@grafana/data'; |
||||
|
||||
import { |
||||
AzureLogsQuery, |
||||
AzureMetricQuery, |
||||
AzureQueryType, |
||||
AzureResourceGraphQuery, |
||||
DeprecatedAzureQueryType, |
||||
} from '../../../types'; |
||||
import { GrafanaTemplateVariableQuery } from '../../../types/templateVariables'; |
||||
|
||||
export interface DeprecatedAzureMonitorQuery extends DataQuery { |
||||
queryType?: AzureQueryType | DeprecatedAzureQueryType; |
||||
|
||||
subscription?: string; |
||||
|
||||
/** ARG uses multiple subscriptions */ |
||||
subscriptions?: string[]; |
||||
|
||||
azureMonitor?: AzureMetricQuery; |
||||
azureLogAnalytics?: AzureLogsQuery; |
||||
azureResourceGraph?: AzureResourceGraphQuery; |
||||
grafanaTemplateVariableFn?: GrafanaTemplateVariableQuery; |
||||
|
||||
/** @deprecated App Insights/Insights Analytics deprecated in v8 */ |
||||
appInsights?: ApplicationInsightsQuery; |
||||
|
||||
/** @deprecated App Insights/Insights Analytics deprecated in v8 */ |
||||
insightsAnalytics?: InsightsAnalyticsQuery; |
||||
} |
||||
|
||||
/** |
||||
* Azure Monitor App Insights sub-query properties |
||||
* @deprecated App Insights deprecated in v8 in favor of Metrics queries |
||||
*/ |
||||
export interface ApplicationInsightsQuery { |
||||
metricName?: string; |
||||
timeGrain?: string; |
||||
timeGrainCount?: string; |
||||
timeGrainType?: string; |
||||
timeGrainUnit?: string; |
||||
aggregation?: string; |
||||
dimension?: string[]; // Was string before 7.1
|
||||
dimensionFilter?: string; |
||||
alias?: string; |
||||
|
||||
/** @deprecated Migrated to Insights Analytics query */ |
||||
rawQuery?: string; |
||||
} |
||||
|
||||
/** |
||||
* Azure Monitor Insights Analytics sub-query properties |
||||
* @deprecated Insights Analytics deprecated in v8 in favor of Logs queries |
||||
*/ |
||||
export interface InsightsAnalyticsQuery { |
||||
query?: string; |
||||
resultFormat?: string; |
||||
|
||||
/** @deprecated Migrate field to query */ |
||||
rawQueryString?: string; |
||||
} |
@ -1,14 +0,0 @@ |
||||
import { gt, valid } from 'semver'; |
||||
|
||||
import { config } from '@grafana/runtime'; |
||||
|
||||
import { AzureDataSourceSettings } from '../../types'; |
||||
|
||||
export function isAppInsightsConfigured(options: AzureDataSourceSettings) { |
||||
return !!(options.jsonData.appInsightsAppId && options.secureJsonFields.appInsightsApiKey); |
||||
} |
||||
|
||||
export function gtGrafana9() { |
||||
// AppInsights configuration will be removed with Grafana 9
|
||||
return valid(config.buildInfo.version) && gt(config.buildInfo.version, '9.0.0-beta1'); |
||||
} |
Loading…
Reference in new issue