elastic: backend-mode: fix trimEdges functionality (#56985)

pull/57256/head
Gábor Farkas 3 years ago committed by GitHub
parent f0b882ebad
commit 0491c19712
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 12
      pkg/tsdb/elasticsearch/response_parser.go
  2. 50
      pkg/tsdb/elasticsearch/response_parser_test.go
  3. 68
      pkg/tsdb/elasticsearch/testdata/trimedges_string.golden.jsonc

@ -537,10 +537,14 @@ func (rp *responseParser) trimDatapoints(queryResult backend.DataResponse, targe
for _, frame := range frames {
for _, field := range frame.Fields {
if field.Len() > trimEdges*2 {
for i := 0; i < field.Len(); i++ {
if i < trimEdges || i >= field.Len()-trimEdges {
field.Delete(i)
}
// first we delete the first "trim" items
for i := 0; i < trimEdges; i++ {
field.Delete(0)
}
// then we delete the last "trim" items
for i := 0; i < trimEdges; i++ {
field.Delete(field.Len() - 1)
}
}
}

@ -7,6 +7,7 @@ import (
"time"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/experimental"
es "github.com/grafana/grafana/pkg/tsdb/elasticsearch/client"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
@ -776,6 +777,55 @@ func TestResponseParser(t *testing.T) {
assert.Equal(t, frame.Fields[1].Config.DisplayNameFromDS, "Count")
})
t.Run("Larger trimEdges value", func(t *testing.T) {
targets := map[string]string{
"A": `{
"timeField": "@timestamp",
"metrics": [{ "type": "count" }],
"bucketAggs": [
{
"type": "date_histogram",
"field": "@timestamp",
"id": "2",
"settings": { "trimEdges": "3" }
}
]
}`,
}
response := `{
"responses": [
{
"aggregations": {
"2": {
"buckets": [
{ "key": 1000, "doc_count": 10},
{ "key": 2000, "doc_count": 20},
{ "key": 3000, "doc_count": 30},
{ "key": 4000, "doc_count": 40},
{ "key": 5000, "doc_count": 50},
{ "key": 6000, "doc_count": 60},
{ "key": 7000, "doc_count": 70},
{ "key": 8000, "doc_count": 80},
{ "key": 9000, "doc_count": 90}
]
}
}
}
]
}`
rp, err := newResponseParserForTest(targets, response)
require.NoError(t, err)
result, err := rp.getTimeSeries()
require.NoError(t, err)
require.Len(t, result.Responses, 1)
queryRes := result.Responses["A"]
require.NotNil(t, queryRes)
experimental.CheckGoldenJSONResponse(t, "testdata", "trimedges_string.golden", &queryRes, false)
})
t.Run("No group by time", func(t *testing.T) {
targets := map[string]string{
"A": `{

@ -0,0 +1,68 @@
// 🌟 This was machine generated. Do not edit. 🌟
//
// Frame[0] {
// "custom": null
// }
// Name:
// Dimensions: 2 Fields by 3 Rows
// +-------------------------------+------------------+
// | Name: time | Name: value |
// | Labels: | Labels: |
// | Type: []time.Time | Type: []*float64 |
// +-------------------------------+------------------+
// | 1970-01-01 00:00:04 +0000 UTC | 40 |
// | 1970-01-01 00:00:05 +0000 UTC | 50 |
// | 1970-01-01 00:00:06 +0000 UTC | 60 |
// +-------------------------------+------------------+
//
//
// 🌟 This was machine generated. Do not edit. 🌟
{
"frames": [
{
"schema": {
"meta": {
"custom": null
},
"fields": [
{
"name": "time",
"type": "time",
"typeInfo": {
"frame": "time.Time"
},
"config": {
"displayNameFromDS": "Count"
}
},
{
"name": "value",
"type": "number",
"typeInfo": {
"frame": "float64",
"nullable": true
},
"labels": {},
"config": {
"displayNameFromDS": "Count"
}
}
]
},
"data": {
"values": [
[
4000,
5000,
6000
],
[
40,
50,
60
]
]
}
}
]
}
Loading…
Cancel
Save