mirror of https://github.com/grafana/grafana
parent
73295ab3ed
commit
39607d09d7
@ -0,0 +1,350 @@ |
||||
package cloudwatch |
||||
|
||||
import ( |
||||
"context" |
||||
"errors" |
||||
"regexp" |
||||
"sort" |
||||
"strconv" |
||||
"strings" |
||||
"time" |
||||
|
||||
"github.com/grafana/grafana/pkg/log" |
||||
"github.com/grafana/grafana/pkg/models" |
||||
"github.com/grafana/grafana/pkg/tsdb" |
||||
|
||||
"github.com/aws/aws-sdk-go/aws" |
||||
"github.com/aws/aws-sdk-go/aws/request" |
||||
"github.com/aws/aws-sdk-go/aws/session" |
||||
"github.com/aws/aws-sdk-go/service/cloudwatch" |
||||
cwapi "github.com/grafana/grafana/pkg/api/cloudwatch" |
||||
"github.com/grafana/grafana/pkg/components/null" |
||||
"github.com/grafana/grafana/pkg/components/simplejson" |
||||
) |
||||
|
||||
type CloudWatchExecutor struct { |
||||
*models.DataSource |
||||
} |
||||
|
||||
func NewCloudWatchExecutor(dsInfo *models.DataSource) (tsdb.Executor, error) { |
||||
return &CloudWatchExecutor{ |
||||
DataSource: dsInfo, |
||||
}, nil |
||||
} |
||||
|
||||
var ( |
||||
plog log.Logger |
||||
standardStatistics map[string]bool |
||||
aliasFormat *regexp.Regexp |
||||
) |
||||
|
||||
func init() { |
||||
plog = log.New("tsdb.cloudwatch") |
||||
tsdb.RegisterExecutor("cloudwatch", NewCloudWatchExecutor) |
||||
standardStatistics = map[string]bool{ |
||||
"Average": true, |
||||
"Maximum": true, |
||||
"Minimum": true, |
||||
"Sum": true, |
||||
"SampleCount": true, |
||||
} |
||||
aliasFormat = regexp.MustCompile(`\{\{\s*(.+?)\s*\}\}`) |
||||
} |
||||
|
||||
func (e *CloudWatchExecutor) Execute(ctx context.Context, queries tsdb.QuerySlice, queryContext *tsdb.QueryContext) *tsdb.BatchResult { |
||||
result := &tsdb.BatchResult{ |
||||
QueryResults: make(map[string]*tsdb.QueryResult), |
||||
} |
||||
|
||||
errCh := make(chan error, 1) |
||||
resCh := make(chan *tsdb.QueryResult, 1) |
||||
|
||||
currentlyExecuting := 0 |
||||
for _, model := range queries { |
||||
currentlyExecuting++ |
||||
go func(refId string) { |
||||
queryRes, err := e.executeQuery(ctx, model, queryContext) |
||||
currentlyExecuting-- |
||||
if err != nil { |
||||
errCh <- err |
||||
} else { |
||||
queryRes.RefId = refId |
||||
resCh <- queryRes |
||||
} |
||||
}(model.RefId) |
||||
} |
||||
|
||||
for currentlyExecuting != 0 { |
||||
select { |
||||
case res := <-resCh: |
||||
result.QueryResults[res.RefId] = res |
||||
case err := <-errCh: |
||||
return result.WithError(err) |
||||
case <-ctx.Done(): |
||||
return result.WithError(ctx.Err()) |
||||
} |
||||
} |
||||
|
||||
return result |
||||
} |
||||
|
||||
func (e *CloudWatchExecutor) getClient(region string) (*cloudwatch.CloudWatch, error) { |
||||
assumeRoleArn := e.DataSource.JsonData.Get("assumeRoleArn").MustString() |
||||
|
||||
accessKey := "" |
||||
secretKey := "" |
||||
for key, value := range e.DataSource.SecureJsonData.Decrypt() { |
||||
if key == "accessKey" { |
||||
accessKey = value |
||||
} |
||||
if key == "secretKey" { |
||||
secretKey = value |
||||
} |
||||
} |
||||
|
||||
datasourceInfo := &cwapi.DatasourceInfo{ |
||||
Region: region, |
||||
Profile: e.DataSource.Database, |
||||
AssumeRoleArn: assumeRoleArn, |
||||
AccessKey: accessKey, |
||||
SecretKey: secretKey, |
||||
} |
||||
|
||||
credentials, err := cwapi.GetCredentials(datasourceInfo) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
|
||||
cfg := &aws.Config{ |
||||
Region: aws.String(region), |
||||
Credentials: credentials, |
||||
} |
||||
|
||||
sess, err := session.NewSession(cfg) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
|
||||
client := cloudwatch.New(sess, cfg) |
||||
return client, nil |
||||
} |
||||
|
||||
func (e *CloudWatchExecutor) executeQuery(ctx context.Context, model *tsdb.Query, queryContext *tsdb.QueryContext) (*tsdb.QueryResult, error) { |
||||
query, err := parseQuery(model.Model) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
|
||||
client, err := e.getClient(query.Region) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
|
||||
startTime, err := queryContext.TimeRange.ParseFrom() |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
|
||||
endTime, err := queryContext.TimeRange.ParseTo() |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
|
||||
params := &cloudwatch.GetMetricStatisticsInput{ |
||||
Namespace: aws.String(query.Namespace), |
||||
MetricName: aws.String(query.MetricName), |
||||
Dimensions: query.Dimensions, |
||||
Period: aws.Int64(int64(query.Period)), |
||||
StartTime: aws.Time(startTime.Add(-time.Minute * 15)), |
||||
EndTime: aws.Time(endTime), |
||||
} |
||||
if len(query.Statistics) > 0 { |
||||
params.Statistics = query.Statistics |
||||
} |
||||
if len(query.ExtendedStatistics) > 0 { |
||||
params.ExtendedStatistics = query.ExtendedStatistics |
||||
} |
||||
|
||||
resp, err := client.GetMetricStatisticsWithContext(ctx, params, request.WithResponseReadTimeout(10*time.Second)) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
|
||||
queryRes, err := parseResponse(resp, query) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
|
||||
return queryRes, nil |
||||
} |
||||
|
||||
func parseDimensions(model *simplejson.Json) ([]*cloudwatch.Dimension, error) { |
||||
var result []*cloudwatch.Dimension |
||||
|
||||
for k, v := range model.Get("dimensions").MustMap() { |
||||
kk := k |
||||
if vv, ok := v.(string); ok { |
||||
result = append(result, &cloudwatch.Dimension{ |
||||
Name: &kk, |
||||
Value: &vv, |
||||
}) |
||||
} else { |
||||
return nil, errors.New("failed to parse") |
||||
} |
||||
} |
||||
|
||||
sort.Slice(result, func(i, j int) bool { |
||||
return *result[i].Name < *result[j].Name |
||||
}) |
||||
return result, nil |
||||
} |
||||
|
||||
func parseStatistics(model *simplejson.Json) ([]*string, []*string, error) { |
||||
var statistics []*string |
||||
var extendedStatistics []*string |
||||
|
||||
for _, s := range model.Get("statistics").MustArray() { |
||||
if ss, ok := s.(string); ok { |
||||
if _, isStandard := standardStatistics[ss]; isStandard { |
||||
statistics = append(statistics, &ss) |
||||
} else { |
||||
extendedStatistics = append(extendedStatistics, &ss) |
||||
} |
||||
} else { |
||||
return nil, nil, errors.New("failed to parse") |
||||
} |
||||
} |
||||
|
||||
return statistics, extendedStatistics, nil |
||||
} |
||||
|
||||
func parseQuery(model *simplejson.Json) (*CloudWatchQuery, error) { |
||||
region, err := model.Get("region").String() |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
|
||||
namespace, err := model.Get("namespace").String() |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
|
||||
metricName, err := model.Get("metricName").String() |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
|
||||
dimensions, err := parseDimensions(model) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
|
||||
statistics, extendedStatistics, err := parseStatistics(model) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
|
||||
p := model.Get("period").MustString("") |
||||
if p == "" { |
||||
if namespace == "AWS/EC2" { |
||||
p = "300" |
||||
} else { |
||||
p = "60" |
||||
} |
||||
} |
||||
period, err := strconv.Atoi(p) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
|
||||
alias := model.Get("alias").MustString("{{metric}}_{{stat}}") |
||||
|
||||
return &CloudWatchQuery{ |
||||
Region: region, |
||||
Namespace: namespace, |
||||
MetricName: metricName, |
||||
Dimensions: dimensions, |
||||
Statistics: statistics, |
||||
ExtendedStatistics: extendedStatistics, |
||||
Period: period, |
||||
Alias: alias, |
||||
}, nil |
||||
} |
||||
|
||||
func formatAlias(query *CloudWatchQuery, stat string, dimensions map[string]string) string { |
||||
data := map[string]string{} |
||||
data["region"] = query.Region |
||||
data["namespace"] = query.Namespace |
||||
data["metric"] = query.MetricName |
||||
data["stat"] = stat |
||||
for k, v := range dimensions { |
||||
data[k] = v |
||||
} |
||||
|
||||
result := aliasFormat.ReplaceAllFunc([]byte(query.Alias), func(in []byte) []byte { |
||||
labelName := strings.Replace(string(in), "{{", "", 1) |
||||
labelName = strings.Replace(labelName, "}}", "", 1) |
||||
labelName = strings.TrimSpace(labelName) |
||||
if val, exists := data[labelName]; exists { |
||||
return []byte(val) |
||||
} |
||||
|
||||
return in |
||||
}) |
||||
|
||||
return string(result) |
||||
} |
||||
|
||||
func parseResponse(resp *cloudwatch.GetMetricStatisticsOutput, query *CloudWatchQuery) (*tsdb.QueryResult, error) { |
||||
queryRes := tsdb.NewQueryResult() |
||||
|
||||
var value float64 |
||||
for _, s := range append(query.Statistics, query.ExtendedStatistics...) { |
||||
series := tsdb.TimeSeries{ |
||||
Tags: map[string]string{}, |
||||
} |
||||
for _, d := range query.Dimensions { |
||||
series.Tags[*d.Name] = *d.Value |
||||
} |
||||
series.Name = formatAlias(query, *s, series.Tags) |
||||
|
||||
lastTimestamp := make(map[string]time.Time) |
||||
sort.Slice(resp.Datapoints, func(i, j int) bool { |
||||
return (*resp.Datapoints[i].Timestamp).Before(*resp.Datapoints[j].Timestamp) |
||||
}) |
||||
for _, v := range resp.Datapoints { |
||||
switch *s { |
||||
case "Average": |
||||
value = *v.Average |
||||
case "Maximum": |
||||
value = *v.Maximum |
||||
case "Minimum": |
||||
value = *v.Minimum |
||||
case "Sum": |
||||
value = *v.Sum |
||||
case "SampleCount": |
||||
value = *v.SampleCount |
||||
default: |
||||
if strings.Index(*s, "p") == 0 && v.ExtendedStatistics[*s] != nil { |
||||
value = *v.ExtendedStatistics[*s] |
||||
} |
||||
} |
||||
|
||||
// terminate gap of data points
|
||||
timestamp := *v.Timestamp |
||||
if _, ok := lastTimestamp[*s]; ok { |
||||
nextTimestampFromLast := lastTimestamp[*s].Add(time.Duration(query.Period) * time.Second) |
||||
if timestamp.After(nextTimestampFromLast) { |
||||
series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFromPtr(nil), float64(nextTimestampFromLast.Unix()*1000))) |
||||
} |
||||
} |
||||
lastTimestamp[*s] = timestamp |
||||
|
||||
series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFrom(value), float64(timestamp.Unix()*1000))) |
||||
} |
||||
|
||||
queryRes.Series = append(queryRes.Series, &series) |
||||
} |
||||
|
||||
return queryRes, nil |
||||
} |
@ -0,0 +1,181 @@ |
||||
package cloudwatch |
||||
|
||||
import ( |
||||
"testing" |
||||
"time" |
||||
|
||||
"github.com/aws/aws-sdk-go/aws" |
||||
"github.com/aws/aws-sdk-go/service/cloudwatch" |
||||
"github.com/grafana/grafana/pkg/components/null" |
||||
"github.com/grafana/grafana/pkg/components/simplejson" |
||||
. "github.com/smartystreets/goconvey/convey" |
||||
) |
||||
|
||||
func TestCloudWatch(t *testing.T) { |
||||
Convey("CloudWatch", t, func() { |
||||
|
||||
Convey("can parse cloudwatch json model", func() { |
||||
json := ` |
||||
{ |
||||
"region": "us-east-1", |
||||
"namespace": "AWS/ApplicationELB", |
||||
"metricName": "TargetResponseTime", |
||||
"dimensions": { |
||||
"LoadBalancer": "lb", |
||||
"TargetGroup": "tg" |
||||
}, |
||||
"statistics": [ |
||||
"Average", |
||||
"Maximum", |
||||
"p50.00", |
||||
"p90.00" |
||||
], |
||||
"period": "60", |
||||
"alias": "{{metric}}_{{stat}}" |
||||
} |
||||
` |
||||
modelJson, err := simplejson.NewJson([]byte(json)) |
||||
So(err, ShouldBeNil) |
||||
|
||||
res, err := parseQuery(modelJson) |
||||
So(err, ShouldBeNil) |
||||
So(res.Region, ShouldEqual, "us-east-1") |
||||
So(res.Namespace, ShouldEqual, "AWS/ApplicationELB") |
||||
So(res.MetricName, ShouldEqual, "TargetResponseTime") |
||||
So(len(res.Dimensions), ShouldEqual, 2) |
||||
So(*res.Dimensions[0].Name, ShouldEqual, "LoadBalancer") |
||||
So(*res.Dimensions[0].Value, ShouldEqual, "lb") |
||||
So(*res.Dimensions[1].Name, ShouldEqual, "TargetGroup") |
||||
So(*res.Dimensions[1].Value, ShouldEqual, "tg") |
||||
So(len(res.Statistics), ShouldEqual, 2) |
||||
So(*res.Statistics[0], ShouldEqual, "Average") |
||||
So(*res.Statistics[1], ShouldEqual, "Maximum") |
||||
So(len(res.ExtendedStatistics), ShouldEqual, 2) |
||||
So(*res.ExtendedStatistics[0], ShouldEqual, "p50.00") |
||||
So(*res.ExtendedStatistics[1], ShouldEqual, "p90.00") |
||||
So(res.Period, ShouldEqual, 60) |
||||
So(res.Alias, ShouldEqual, "{{metric}}_{{stat}}") |
||||
}) |
||||
|
||||
Convey("can parse cloudwatch response", func() { |
||||
timestamp := time.Unix(0, 0) |
||||
resp := &cloudwatch.GetMetricStatisticsOutput{ |
||||
Label: aws.String("TargetResponseTime"), |
||||
Datapoints: []*cloudwatch.Datapoint{ |
||||
{ |
||||
Timestamp: aws.Time(timestamp), |
||||
Average: aws.Float64(10.0), |
||||
Maximum: aws.Float64(20.0), |
||||
ExtendedStatistics: map[string]*float64{ |
||||
"p50.00": aws.Float64(30.0), |
||||
"p90.00": aws.Float64(40.0), |
||||
}, |
||||
}, |
||||
}, |
||||
} |
||||
query := &CloudWatchQuery{ |
||||
Region: "us-east-1", |
||||
Namespace: "AWS/ApplicationELB", |
||||
MetricName: "TargetResponseTime", |
||||
Dimensions: []*cloudwatch.Dimension{ |
||||
{ |
||||
Name: aws.String("LoadBalancer"), |
||||
Value: aws.String("lb"), |
||||
}, |
||||
{ |
||||
Name: aws.String("TargetGroup"), |
||||
Value: aws.String("tg"), |
||||
}, |
||||
}, |
||||
Statistics: []*string{aws.String("Average"), aws.String("Maximum")}, |
||||
ExtendedStatistics: []*string{aws.String("p50.00"), aws.String("p90.00")}, |
||||
Period: 60, |
||||
Alias: "{{namespace}}_{{metric}}_{{stat}}", |
||||
} |
||||
|
||||
queryRes, err := parseResponse(resp, query) |
||||
So(err, ShouldBeNil) |
||||
So(queryRes.Series[0].Name, ShouldEqual, "AWS/ApplicationELB_TargetResponseTime_Average") |
||||
So(queryRes.Series[0].Tags["LoadBalancer"], ShouldEqual, "lb") |
||||
So(queryRes.Series[0].Tags["TargetGroup"], ShouldEqual, "tg") |
||||
So(queryRes.Series[0].Points[0][0].String(), ShouldEqual, null.FloatFrom(10.0).String()) |
||||
So(queryRes.Series[1].Points[0][0].String(), ShouldEqual, null.FloatFrom(20.0).String()) |
||||
So(queryRes.Series[2].Points[0][0].String(), ShouldEqual, null.FloatFrom(30.0).String()) |
||||
So(queryRes.Series[3].Points[0][0].String(), ShouldEqual, null.FloatFrom(40.0).String()) |
||||
}) |
||||
|
||||
Convey("terminate gap of data points", func() { |
||||
timestamp := time.Unix(0, 0) |
||||
resp := &cloudwatch.GetMetricStatisticsOutput{ |
||||
Label: aws.String("TargetResponseTime"), |
||||
Datapoints: []*cloudwatch.Datapoint{ |
||||
{ |
||||
Timestamp: aws.Time(timestamp), |
||||
Average: aws.Float64(10.0), |
||||
Maximum: aws.Float64(20.0), |
||||
ExtendedStatistics: map[string]*float64{ |
||||
"p50.00": aws.Float64(30.0), |
||||
"p90.00": aws.Float64(40.0), |
||||
}, |
||||
}, |
||||
{ |
||||
Timestamp: aws.Time(timestamp.Add(60 * time.Second)), |
||||
Average: aws.Float64(20.0), |
||||
Maximum: aws.Float64(30.0), |
||||
ExtendedStatistics: map[string]*float64{ |
||||
"p50.00": aws.Float64(40.0), |
||||
"p90.00": aws.Float64(50.0), |
||||
}, |
||||
}, |
||||
{ |
||||
Timestamp: aws.Time(timestamp.Add(180 * time.Second)), |
||||
Average: aws.Float64(30.0), |
||||
Maximum: aws.Float64(40.0), |
||||
ExtendedStatistics: map[string]*float64{ |
||||
"p50.00": aws.Float64(50.0), |
||||
"p90.00": aws.Float64(60.0), |
||||
}, |
||||
}, |
||||
}, |
||||
} |
||||
query := &CloudWatchQuery{ |
||||
Region: "us-east-1", |
||||
Namespace: "AWS/ApplicationELB", |
||||
MetricName: "TargetResponseTime", |
||||
Dimensions: []*cloudwatch.Dimension{ |
||||
{ |
||||
Name: aws.String("LoadBalancer"), |
||||
Value: aws.String("lb"), |
||||
}, |
||||
{ |
||||
Name: aws.String("TargetGroup"), |
||||
Value: aws.String("tg"), |
||||
}, |
||||
}, |
||||
Statistics: []*string{aws.String("Average"), aws.String("Maximum")}, |
||||
ExtendedStatistics: []*string{aws.String("p50.00"), aws.String("p90.00")}, |
||||
Period: 60, |
||||
Alias: "{{namespace}}_{{metric}}_{{stat}}", |
||||
} |
||||
|
||||
queryRes, err := parseResponse(resp, query) |
||||
So(err, ShouldBeNil) |
||||
So(queryRes.Series[0].Points[0][0].String(), ShouldEqual, null.FloatFrom(10.0).String()) |
||||
So(queryRes.Series[1].Points[0][0].String(), ShouldEqual, null.FloatFrom(20.0).String()) |
||||
So(queryRes.Series[2].Points[0][0].String(), ShouldEqual, null.FloatFrom(30.0).String()) |
||||
So(queryRes.Series[3].Points[0][0].String(), ShouldEqual, null.FloatFrom(40.0).String()) |
||||
So(queryRes.Series[0].Points[1][0].String(), ShouldEqual, null.FloatFrom(20.0).String()) |
||||
So(queryRes.Series[1].Points[1][0].String(), ShouldEqual, null.FloatFrom(30.0).String()) |
||||
So(queryRes.Series[2].Points[1][0].String(), ShouldEqual, null.FloatFrom(40.0).String()) |
||||
So(queryRes.Series[3].Points[1][0].String(), ShouldEqual, null.FloatFrom(50.0).String()) |
||||
So(queryRes.Series[0].Points[2][0].String(), ShouldEqual, null.FloatFromPtr(nil).String()) |
||||
So(queryRes.Series[1].Points[2][0].String(), ShouldEqual, null.FloatFromPtr(nil).String()) |
||||
So(queryRes.Series[2].Points[2][0].String(), ShouldEqual, null.FloatFromPtr(nil).String()) |
||||
So(queryRes.Series[3].Points[2][0].String(), ShouldEqual, null.FloatFromPtr(nil).String()) |
||||
So(queryRes.Series[0].Points[3][0].String(), ShouldEqual, null.FloatFrom(30.0).String()) |
||||
So(queryRes.Series[1].Points[3][0].String(), ShouldEqual, null.FloatFrom(40.0).String()) |
||||
So(queryRes.Series[2].Points[3][0].String(), ShouldEqual, null.FloatFrom(50.0).String()) |
||||
So(queryRes.Series[3].Points[3][0].String(), ShouldEqual, null.FloatFrom(60.0).String()) |
||||
}) |
||||
}) |
||||
} |
@ -0,0 +1,16 @@ |
||||
package cloudwatch |
||||
|
||||
import ( |
||||
"github.com/aws/aws-sdk-go/service/cloudwatch" |
||||
) |
||||
|
||||
type CloudWatchQuery struct { |
||||
Region string |
||||
Namespace string |
||||
MetricName string |
||||
Dimensions []*cloudwatch.Dimension |
||||
Statistics []*string |
||||
ExtendedStatistics []*string |
||||
Period int |
||||
Alias string |
||||
} |
Loading…
Reference in new issue