From d4f785d20a66c93d3da4c9ecefab78bf0c0ca366 Mon Sep 17 00:00:00 2001 From: Giordano Ricci Date: Thu, 24 Sep 2020 18:06:19 +0100 Subject: [PATCH] Elasticsearch: Add query's refId to each series returned by a query (#27614) --- .../datasource/elasticsearch/bucket_agg.ts | 2 +- .../elasticsearch/elastic_response.ts | 25 +- .../specs/elastic_response.test.ts | 248 +++++++++++++++++- 3 files changed, 266 insertions(+), 9 deletions(-) diff --git a/public/app/plugins/datasource/elasticsearch/bucket_agg.ts b/public/app/plugins/datasource/elasticsearch/bucket_agg.ts index be7573007a4..e2a754ae859 100644 --- a/public/app/plugins/datasource/elasticsearch/bucket_agg.ts +++ b/public/app/plugins/datasource/elasticsearch/bucket_agg.ts @@ -32,7 +32,7 @@ export class ElasticBucketAggCtrl { ); $scope.init = () => { - $scope.agg = bucketAggs[$scope.index]; + $scope.agg = bucketAggs[$scope.index] || {}; $scope.validateModel(); }; diff --git a/public/app/plugins/datasource/elasticsearch/elastic_response.ts b/public/app/plugins/datasource/elasticsearch/elastic_response.ts index 9d9c1dff0aa..7b10665d271 100644 --- a/public/app/plugins/datasource/elasticsearch/elastic_response.ts +++ b/public/app/plugins/datasource/elasticsearch/elastic_response.ts @@ -30,7 +30,7 @@ export class ElasticResponse { switch (metric.type) { case 'count': { - newSeries = { datapoints: [], metric: 'count', props: props }; + newSeries = { datapoints: [], metric: 'count', props: props, refId: target.refId }; for (i = 0; i < esAgg.buckets.length; i++) { bucket = esAgg.buckets[i]; value = bucket.doc_count; @@ -53,6 +53,7 @@ export class ElasticResponse { metric: 'p' + percentileName, props: props, field: metric.field, + refId: target.refId, }; for (i = 0; i < esAgg.buckets.length; i++) { @@ -76,6 +77,7 @@ export class ElasticResponse { metric: statName, props: props, field: metric.field, + refId: target.refId, }; for (i = 0; i < esAgg.buckets.length; i++) { @@ -101,6 +103,7 @@ export class ElasticResponse { field: metric.field, metricId: metric.id, props: props, + refId: target.refId, }; for (i = 0; i < esAgg.buckets.length; i++) { bucket = esAgg.buckets[i]; @@ -200,7 +203,7 @@ export class ElasticResponse { // This is quite complex // need to recurse down the nested buckets to build series - processBuckets(aggs: any, target: any, seriesList: any, table: any, props: any, depth: any) { + processBuckets(aggs: any, target: any, seriesList: any, table: TableModel, props: any, depth: any) { let bucket, aggDef: any, esAgg, aggId; const maxDepth = target.bucketAggs.length - 1; @@ -324,12 +327,13 @@ export class ElasticResponse { } } - processHits(hits: { total: { value: any }; hits: any[] }, seriesList: any[]) { + processHits(hits: { total: { value: any }; hits: any[] }, seriesList: any[], target: any) { const hitsTotal = typeof hits.total === 'number' ? hits.total : hits.total.value; // <- Works with Elasticsearch 7.0+ const series: any = { - target: 'docs', + target: target.refId, type: 'docs', + refId: target.refId, datapoints: [], total: hitsTotal, filterable: true, @@ -438,6 +442,8 @@ export class ElasticResponse { if (isLogsRequest) { series = addPreferredVisualisationType(series, 'logs'); } + const target = this.targets[n]; + series.refId = target.refId; dataFrame.push(series); } } @@ -453,7 +459,9 @@ export class ElasticResponse { this.nameSeries(tmpSeriesList, target); if (table.rows.length > 0) { - dataFrame.push(toDataFrame(table)); + const series = toDataFrame(table); + series.refId = target.refId; + dataFrame.push(series); } for (let y = 0; y < tmpSeriesList.length; y++) { @@ -464,6 +472,7 @@ export class ElasticResponse { series = addPreferredVisualisationType(series, 'graph'); } + series.refId = target.refId; dataFrame.push(series); } } @@ -477,19 +486,21 @@ export class ElasticResponse { for (let i = 0; i < this.response.responses.length; i++) { const response = this.response.responses[i]; + const target = this.targets[i]; + if (response.error) { throw this.getErrorFromElasticResponse(this.response, response.error); } if (response.hits && response.hits.hits.length > 0) { - this.processHits(response.hits, seriesList); + this.processHits(response.hits, seriesList, target); } if (response.aggregations) { const aggregations = response.aggregations; - const target = this.targets[i]; const tmpSeriesList: any[] = []; const table = new TableModel(); + table.refId = target.refId; this.processBuckets(aggregations, target, tmpSeriesList, table, {}, 0); this.trimDatapoints(tmpSeriesList, target); diff --git a/public/app/plugins/datasource/elasticsearch/specs/elastic_response.test.ts b/public/app/plugins/datasource/elasticsearch/specs/elastic_response.test.ts index 4ec0dd8c8cc..5d14b69445c 100644 --- a/public/app/plugins/datasource/elasticsearch/specs/elastic_response.test.ts +++ b/public/app/plugins/datasource/elasticsearch/specs/elastic_response.test.ts @@ -3,10 +3,256 @@ import { ElasticResponse } from '../elastic_response'; import flatten from 'app/core/utils/flatten'; describe('ElasticResponse', () => { - let targets; + let targets: any; let response: any; let result: any; + describe('refId matching', () => { + // We default to the old table structure to ensure backward compatibility, + // therefore we only process responses as DataFrames when there's at least one + // raw_data (new) query type. + // We should test if refId gets populated wether there's such type of query or not + const countQuery = { + target: { + refId: 'COUNT_GROUPBY_DATE_HISTOGRAM', + metrics: [{ type: 'count', id: 'c_1' }], + bucketAggs: [{ type: 'date_histogram', field: '@timestamp', id: 'c_2' }], + }, + response: { + aggregations: { + c_2: { + buckets: [ + { + doc_count: 10, + key: 1000, + }, + ], + }, + }, + }, + }; + + const countGroupByHistogramQuery = { + target: { + refId: 'COUNT_GROUPBY_HISTOGRAM', + metrics: [{ type: 'count', id: 'h_3' }], + bucketAggs: [{ type: 'histogram', field: 'bytes', id: 'h_4' }], + }, + response: { + aggregations: { + h_4: { + buckets: [{ doc_count: 1, key: 1000 }], + }, + }, + }, + }; + + const rawDocumentQuery = { + target: { + refId: 'RAW_DOC', + metrics: [{ type: 'raw_document', id: 'r_5' }], + bucketAggs: [], + }, + response: { + hits: { + total: 2, + hits: [ + { + _id: '5', + _type: 'type', + _index: 'index', + _source: { sourceProp: 'asd' }, + fields: { fieldProp: 'field' }, + }, + { + _source: { sourceProp: 'asd2' }, + fields: { fieldProp: 'field2' }, + }, + ], + }, + }, + }; + + const percentilesQuery = { + target: { + refId: 'PERCENTILE', + metrics: [{ type: 'percentiles', settings: { percents: [75, 90] }, id: 'p_1' }], + bucketAggs: [{ type: 'date_histogram', field: '@timestamp', id: 'p_3' }], + }, + response: { + aggregations: { + p_3: { + buckets: [ + { + p_1: { values: { '75': 3.3, '90': 5.5 } }, + doc_count: 10, + key: 1000, + }, + { + p_1: { values: { '75': 2.3, '90': 4.5 } }, + doc_count: 15, + key: 2000, + }, + ], + }, + }, + }, + }; + + const extendedStatsQuery = { + target: { + refId: 'EXTENDEDSTATS', + metrics: [ + { + type: 'extended_stats', + meta: { max: true, std_deviation_bounds_upper: true }, + id: 'e_1', + }, + ], + bucketAggs: [ + { type: 'terms', field: 'host', id: 'e_3' }, + { type: 'date_histogram', id: 'e_4' }, + ], + }, + response: { + aggregations: { + e_3: { + buckets: [ + { + key: 'server1', + e_4: { + buckets: [ + { + e_1: { + max: 10.2, + min: 5.5, + std_deviation_bounds: { upper: 3, lower: -2 }, + }, + doc_count: 10, + key: 1000, + }, + ], + }, + }, + { + key: 'server2', + e_4: { + buckets: [ + { + e_1: { + max: 10.2, + min: 5.5, + std_deviation_bounds: { upper: 3, lower: -2 }, + }, + doc_count: 10, + key: 1000, + }, + ], + }, + }, + ], + }, + }, + }, + }; + + const commonTargets = [ + { ...countQuery.target }, + { ...countGroupByHistogramQuery.target }, + { ...rawDocumentQuery.target }, + { ...percentilesQuery.target }, + { ...extendedStatsQuery.target }, + ]; + + const commonResponses = [ + { ...countQuery.response }, + { ...countGroupByHistogramQuery.response }, + { ...rawDocumentQuery.response }, + { ...percentilesQuery.response }, + { ...extendedStatsQuery.response }, + ]; + + describe('When processing responses as DataFrames (raw_data query present)', () => { + beforeEach(() => { + targets = [ + ...commonTargets, + // Raw Data Query + { + refId: 'D', + metrics: [{ type: 'raw_data', id: '6' }], + bucketAggs: [], + }, + ]; + + response = { + responses: [ + ...commonResponses, + // Raw Data Query + { + hits: { + total: { + relation: 'eq', + value: 1, + }, + hits: [ + { + _id: '6', + _type: '_doc', + _index: 'index', + _source: { sourceProp: 'asd' }, + }, + ], + }, + }, + ], + }; + + result = new ElasticResponse(targets, response).getTimeSeries(); + }); + + it('should add the correct refId to each returned series', () => { + expect(result.data[0].refId).toBe(countQuery.target.refId); + + expect(result.data[1].refId).toBe(countGroupByHistogramQuery.target.refId); + + expect(result.data[2].refId).toBe(rawDocumentQuery.target.refId); + + expect(result.data[3].refId).toBe(percentilesQuery.target.refId); + expect(result.data[4].refId).toBe(percentilesQuery.target.refId); + + expect(result.data[5].refId).toBe(extendedStatsQuery.target.refId); + + // Raw Data query + expect(result.data[result.data.length - 1].refId).toBe('D'); + }); + }); + + describe('When NOT processing responses as DataFrames (raw_data query NOT present)', () => { + beforeEach(() => { + targets = [...commonTargets]; + + response = { + responses: [...commonResponses], + }; + + result = new ElasticResponse(targets, response).getTimeSeries(); + }); + + it('should add the correct refId to each returned series', () => { + expect(result.data[0].refId).toBe(countQuery.target.refId); + + expect(result.data[1].refId).toBe(countGroupByHistogramQuery.target.refId); + + expect(result.data[2].refId).toBe(rawDocumentQuery.target.refId); + + expect(result.data[3].refId).toBe(percentilesQuery.target.refId); + expect(result.data[4].refId).toBe(percentilesQuery.target.refId); + + expect(result.data[5].refId).toBe(extendedStatsQuery.target.refId); + }); + }); + }); + describe('simple query and count', () => { beforeEach(() => { targets = [