Cloudwatch: Do not parse log query grouping field to float (#102244)

pull/102368/head
Isabella Siu 2 months ago committed by GitHub
parent ff6a97f1a1
commit 8c5a4591fd
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
  1. 2
      pkg/tsdb/cloudwatch/log_actions.go
  2. 37
      pkg/tsdb/cloudwatch/log_query.go
  3. 190
      pkg/tsdb/cloudwatch/log_query_test.go
  4. 2
      pkg/tsdb/cloudwatch/log_sync_query.go

@ -376,7 +376,7 @@ func (e *cloudWatchExecutor) handleGetQueryResults(ctx context.Context, logsClie
return nil, err
}
dataFrame, err := logsResultsToDataframes(getQueryResultsOutput)
dataFrame, err := logsResultsToDataframes(getQueryResultsOutput, logsQuery.StatsGroups)
if err != nil {
return nil, err
}

@ -2,18 +2,18 @@ package cloudwatch
import (
"fmt"
"slices"
"sort"
"strconv"
"time"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/service/cloudwatchlogs"
"github.com/grafana/grafana-plugin-sdk-go/data"
)
const cloudWatchTSFormat = "2006-01-02 15:04:05.000"
func logsResultsToDataframes(response *cloudwatchlogs.GetQueryResultsOutput) (*data.Frame, error) {
func logsResultsToDataframes(response *cloudwatchlogs.GetQueryResultsOutput, groupingFieldNames []string) (*data.Frame, error) {
if response == nil {
return nil, fmt.Errorf("response is nil, cannot convert log results to data frames")
}
@ -59,6 +59,8 @@ func logsResultsToDataframes(response *cloudwatchlogs.GetQueryResultsOutput) (*d
// which can be in a millisecond format as well as cloudWatchTSFormat string format
if _, err := time.Parse(cloudWatchTSFormat, *resultField.Value); err == nil || isTimestampField(*resultField.Field) {
fieldValues[*resultField.Field] = make([]*time.Time, rowCount)
} else if slices.Contains[[]string, string](groupingFieldNames, *resultField.Field) {
fieldValues[*resultField.Field] = make([]*string, rowCount)
} else if _, err := strconv.ParseFloat(*resultField.Value, 64); err == nil {
fieldValues[*resultField.Field] = make([]*float64, rowCount)
} else {
@ -176,15 +178,6 @@ func groupResults(results *data.Frame, groupingFieldNames []string, fromSyncQuer
for i, field := range results.Fields {
for _, groupingField := range groupingFieldNames {
if field.Name == groupingField {
// convert numeric grouping field to string field
if field.Type().Numeric() {
newField, err := numericFieldToStringField(field)
if err != nil {
return nil, err
}
results.Fields[i] = newField
field = newField
}
// For expressions and alerts to work properly we need to remove non-time grouping fields
if fromSyncQuery && !field.Type().Time() {
removeFieldIndices = append(removeFieldIndices, i)
@ -298,28 +291,6 @@ func generateLabels(fields []*data.Field, row int) data.Labels {
return labels
}
func numericFieldToStringField(field *data.Field) (*data.Field, error) {
if !field.Type().Numeric() {
return nil, fmt.Errorf("field is not numeric")
}
strings := make([]*string, field.Len())
for i := 0; i < field.Len(); i++ {
floatVal, err := field.FloatAt(i)
if err != nil {
return nil, err
}
strVal := fmt.Sprintf("%g", floatVal)
strings[i] = aws.String(strVal)
}
newField := data.NewField(field.Name, field.Labels, strings)
newField.Config = field.Config
return newField, nil
}
func isTimestampField(fieldName string) bool {
return fieldName == "@timestamp" || fieldName == "@ingestionTime"
}

@ -128,7 +128,7 @@ func TestLogsResultsToDataframes(t *testing.T) {
},
}
dataframes, err := logsResultsToDataframes(fakeCloudwatchResponse)
dataframes, err := logsResultsToDataframes(fakeCloudwatchResponse, []string{})
require.NoError(t, err)
timeA, err := time.Parse("2006-01-02 15:04:05.000", "2020-03-02 15:04:05.000")
require.NoError(t, err)
@ -251,7 +251,7 @@ func TestLogsResultsToDataframes_MixedTypes_NumericValuesMixedWithStringFallBack
},
},
Status: aws.String("ok"),
})
}, []string{})
require.NoError(t, err)
expectedDataframe := &data.Frame{
@ -305,7 +305,7 @@ func TestLogsResultsToDataframes_With_Millisecond_Timestamps(t *testing.T) {
},
},
Status: aws.String("ok"),
})
}, []string{})
require.NoError(t, err)
timeStampResult := time.Unix(timestampField/1000, (timestampField%1000)*int64(time.Millisecond))
@ -344,6 +344,59 @@ func TestLogsResultsToDataframes_With_Millisecond_Timestamps(t *testing.T) {
assert.ElementsMatch(t, expectedDataframe.Fields, dataframes.Fields)
}
func TestLogsResultsToDataframes_With_Int_Grouping_Field(t *testing.T) {
timestampField := int64(1732749534876)
dataframes, err := logsResultsToDataframes(&cloudwatchlogs.GetQueryResultsOutput{
Results: [][]*cloudwatchlogs.ResultField{
{
&cloudwatchlogs.ResultField{
Field: aws.String("@timestamp"),
Value: aws.String(fmt.Sprintf("%d", timestampField)),
},
&cloudwatchlogs.ResultField{
Field: aws.String("numberField"),
Value: aws.String("8"),
},
&cloudwatchlogs.ResultField{
Field: aws.String("groupingNumber"),
Value: aws.String("100"),
},
},
},
Status: aws.String("ok"),
}, []string{"groupingNumber"})
require.NoError(t, err)
timeStampResult := time.Unix(timestampField/1000, (timestampField%1000)*int64(time.Millisecond))
require.NoError(t, err)
expectedDataframe := &data.Frame{
Name: "CloudWatchLogsResponse",
Fields: []*data.Field{
data.NewField("@timestamp", nil, []*time.Time{
&timeStampResult,
}),
data.NewField("numberField", nil, []*float64{aws.Float64(8)}),
data.NewField("groupingNumber", nil, []*string{
aws.String("100"),
}),
},
RefID: "",
Meta: &data.FrameMeta{
Custom: map[string]any{
"Status": "ok",
},
},
}
expectedDataframe.Fields[0].SetConfig(&data.FieldConfig{DisplayName: "Time"})
assert.Equal(t, expectedDataframe.Name, dataframes.Name)
assert.Equal(t, expectedDataframe.RefID, dataframes.RefID)
assert.Equal(t, expectedDataframe.Meta, dataframes.Meta)
assert.ElementsMatch(t, expectedDataframe.Fields, dataframes.Fields)
}
func TestGroupKeyGeneration(t *testing.T) {
logField := data.NewField("@log", data.Labels{}, []*string{
aws.String("fakelog-a"),
@ -489,127 +542,6 @@ func TestGroupingResults(t *testing.T) {
assert.ElementsMatch(t, expectedGroupedFrames, groupedResults)
}
func TestGroupingResultsWithNumericField(t *testing.T) {
timeA, err := time.Parse("2006-01-02 15:04:05.000", "2020-03-02 15:04:05.000")
require.NoError(t, err)
timeB, err := time.Parse("2006-01-02 15:04:05.000", "2020-03-02 16:04:05.000")
require.NoError(t, err)
timeC, err := time.Parse("2006-01-02 15:04:05.000", "2020-03-02 17:04:05.000")
require.NoError(t, err)
timeVals := []*time.Time{
&timeA, &timeA, &timeA, &timeB, &timeB, &timeB, &timeC, &timeC, &timeC,
}
timeField := data.NewField("@timestamp", data.Labels{}, timeVals)
httpResponseField := data.NewField("httpresponse", data.Labels{}, []*float64{
aws.Float64(400),
aws.Float64(404),
aws.Float64(500),
aws.Float64(400),
aws.Float64(404),
aws.Float64(500),
aws.Float64(400),
aws.Float64(404),
aws.Float64(500),
})
countField := data.NewField("count", data.Labels{}, []*string{
aws.String("100"),
aws.String("150"),
aws.String("20"),
aws.String("34"),
aws.String("57"),
aws.String("62"),
aws.String("105"),
aws.String("200"),
aws.String("99"),
})
fakeDataFrame := &data.Frame{
Name: "CloudWatchLogsResponse",
Fields: []*data.Field{
timeField,
httpResponseField,
countField,
},
RefID: "",
}
groupedTimeVals := []*time.Time{
&timeA, &timeB, &timeC,
}
groupedTimeField := data.NewField("@timestamp", data.Labels{}, groupedTimeVals)
groupedHttpResponseFieldA := data.NewField("httpresponse", data.Labels{}, []*string{
aws.String("400"),
aws.String("400"),
aws.String("400"),
})
groupedCountFieldA := data.NewField("count", data.Labels{}, []*string{
aws.String("100"),
aws.String("34"),
aws.String("105"),
})
groupedHttpResponseFieldB := data.NewField("httpresponse", data.Labels{}, []*string{
aws.String("404"),
aws.String("404"),
aws.String("404"),
})
groupedCountFieldB := data.NewField("count", data.Labels{}, []*string{
aws.String("150"),
aws.String("57"),
aws.String("200"),
})
groupedHttpResponseFieldC := data.NewField("httpresponse", data.Labels{}, []*string{
aws.String("500"),
aws.String("500"),
aws.String("500"),
})
groupedCountFieldC := data.NewField("count", data.Labels{}, []*string{
aws.String("20"),
aws.String("62"),
aws.String("99"),
})
expectedGroupedFrames := []*data.Frame{
{
Name: "400",
Fields: []*data.Field{
groupedTimeField,
groupedHttpResponseFieldA,
groupedCountFieldA,
},
RefID: "",
},
{
Name: "404",
Fields: []*data.Field{
groupedTimeField,
groupedHttpResponseFieldB,
groupedCountFieldB,
},
RefID: "",
},
{
Name: "500",
Fields: []*data.Field{
groupedTimeField,
groupedHttpResponseFieldC,
groupedCountFieldC,
},
RefID: "",
},
}
groupedResults, err := groupResults(fakeDataFrame, []string{"httpresponse"}, false)
require.NoError(t, err)
assert.ElementsMatch(t, expectedGroupedFrames, groupedResults)
}
func TestGroupingResultsWithFromSyncQueryTrue(t *testing.T) {
logField := data.NewField("@log", data.Labels{}, []*string{
aws.String("fakelog-a"),
@ -618,11 +550,11 @@ func TestGroupingResultsWithFromSyncQueryTrue(t *testing.T) {
aws.String("fakelog-b"),
})
streamField := data.NewField("stream", data.Labels{}, []*int32{
aws.Int32(1),
aws.Int32(1),
aws.Int32(1),
aws.Int32(1),
streamField := data.NewField("stream", data.Labels{}, []*string{
aws.String("1"),
aws.String("1"),
aws.String("1"),
aws.String("1"),
})
countField := data.NewField("count", data.Labels{}, []*string{

@ -63,7 +63,7 @@ var executeSyncLogQuery = func(ctx context.Context, e *cloudWatchExecutor, req *
return nil, err
}
dataframe, err := logsResultsToDataframes(getQueryResultsOutput)
dataframe, err := logsResultsToDataframes(getQueryResultsOutput, logsQuery.StatsGroups)
if err != nil {
return nil, err
}

Loading…
Cancel
Save