From 45bd08baf50dcd4bb64b8ddfd838bd9e63ff2c08 Mon Sep 17 00:00:00 2001 From: Florian Verdonck Date: Tue, 17 Jun 2025 21:20:28 +0200 Subject: [PATCH] ElasticSearch: Remove frontend response parsing (#104148) * chore: Remove ElasticReponse * Ran betterer --- .betterer.results | 34 - .../elasticsearch/ElasticResponse.test.ts | 1558 ----------------- .../elasticsearch/ElasticResponse.ts | 807 --------- 3 files changed, 2399 deletions(-) delete mode 100644 public/app/plugins/datasource/elasticsearch/ElasticResponse.test.ts delete mode 100644 public/app/plugins/datasource/elasticsearch/ElasticResponse.ts diff --git a/.betterer.results b/.betterer.results index 6072c56899c..c766d6f60e8 100644 --- a/.betterer.results +++ b/.betterer.results @@ -3383,40 +3383,6 @@ exports[`better eslint`] = { [0, 0, 0, "Do not use any type assertions.", "0"], [0, 0, 0, "Do not use any type assertions.", "1"] ], - "public/app/plugins/datasource/elasticsearch/ElasticResponse.ts:5381": [ - [0, 0, 0, "Do not use any type assertions.", "0"], - [0, 0, 0, "Do not use any type assertions.", "1"], - [0, 0, 0, "Unexpected any. Specify a different type.", "2"], - [0, 0, 0, "Unexpected any. Specify a different type.", "3"], - [0, 0, 0, "Unexpected any. Specify a different type.", "4"], - [0, 0, 0, "Unexpected any. Specify a different type.", "5"], - [0, 0, 0, "Unexpected any. Specify a different type.", "6"], - [0, 0, 0, "Unexpected any. Specify a different type.", "7"], - [0, 0, 0, "Unexpected any. Specify a different type.", "8"], - [0, 0, 0, "Unexpected any. Specify a different type.", "9"], - [0, 0, 0, "Unexpected any. Specify a different type.", "10"], - [0, 0, 0, "Unexpected any. Specify a different type.", "11"], - [0, 0, 0, "Unexpected any. Specify a different type.", "12"], - [0, 0, 0, "Unexpected any. Specify a different type.", "13"], - [0, 0, 0, "Unexpected any. Specify a different type.", "14"], - [0, 0, 0, "Unexpected any. Specify a different type.", "15"], - [0, 0, 0, "Unexpected any. Specify a different type.", "16"], - [0, 0, 0, "Unexpected any. Specify a different type.", "17"], - [0, 0, 0, "Unexpected any. Specify a different type.", "18"], - [0, 0, 0, "Unexpected any. Specify a different type.", "19"], - [0, 0, 0, "Unexpected any. Specify a different type.", "20"], - [0, 0, 0, "Unexpected any. Specify a different type.", "21"], - [0, 0, 0, "Unexpected any. Specify a different type.", "22"], - [0, 0, 0, "Unexpected any. Specify a different type.", "23"], - [0, 0, 0, "Unexpected any. Specify a different type.", "24"], - [0, 0, 0, "Unexpected any. Specify a different type.", "25"], - [0, 0, 0, "Unexpected any. Specify a different type.", "26"], - [0, 0, 0, "Unexpected any. Specify a different type.", "27"], - [0, 0, 0, "Unexpected any. Specify a different type.", "28"], - [0, 0, 0, "Unexpected any. Specify a different type.", "29"], - [0, 0, 0, "Unexpected any. Specify a different type.", "30"], - [0, 0, 0, "Unexpected any. Specify a different type.", "31"] - ], "public/app/plugins/datasource/elasticsearch/LanguageProvider.ts:5381": [ [0, 0, 0, "Unexpected any. Specify a different type.", "0"], [0, 0, 0, "Unexpected any. Specify a different type.", "1"], diff --git a/public/app/plugins/datasource/elasticsearch/ElasticResponse.test.ts b/public/app/plugins/datasource/elasticsearch/ElasticResponse.test.ts deleted file mode 100644 index 0f8a9aa059d..00000000000 --- a/public/app/plugins/datasource/elasticsearch/ElasticResponse.test.ts +++ /dev/null @@ -1,1558 +0,0 @@ -import { DataFrame, DataFrameView, Field, FieldCache, FieldType, KeyValue, MutableDataFrame } from '@grafana/data'; - -import { ElasticResponse } from './ElasticResponse'; -import { highlightTags } from './queryDef'; -import { ElasticsearchQuery } from './types'; -import { flattenObject } from './utils'; - -function getTimeField(frame: DataFrame): Field { - const field = frame.fields[0]; - if (field.type !== FieldType.time) { - throw new Error('first field should be the time-field'); - } - return field; -} - -function getValueField(frame: DataFrame): Field { - const field = frame.fields[1]; - if (field.type !== FieldType.number) { - throw new Error('second field should be the number-field'); - } - return field; -} - -describe('ElasticResponse', () => { - let targets: ElasticsearchQuery[]; - let response: { - responses: unknown[]; - }; - let result: { - data: DataFrame[]; - }; - - describe('refId matching', () => { - // We default to the old table structure to ensure backward compatibility, - // therefore we only process responses as DataFrames when there's at least one - // raw_data (new) query type. - // We should test if refId gets populated wether there's such type of query or not - - const countQuery: MockedQueryData = { - target: { - refId: 'COUNT_GROUPBY_DATE_HISTOGRAM', - metrics: [{ type: 'count', id: 'c_1' }], - bucketAggs: [{ type: 'date_histogram', field: '@timestamp', id: 'c_2' }], - } as ElasticsearchQuery, - response: { - aggregations: { - c_2: { - buckets: [ - { - doc_count: 10, - key: 1000, - }, - ], - }, - }, - }, - }; - - const countGroupByHistogramQuery: MockedQueryData = { - target: { - refId: 'COUNT_GROUPBY_HISTOGRAM', - metrics: [{ type: 'count', id: 'h_3' }], - bucketAggs: [{ type: 'histogram', field: 'bytes', id: 'h_4' }], - }, - response: { - aggregations: { - h_4: { - buckets: [{ doc_count: 1, key: 1000 }], - }, - }, - }, - }; - - const rawDocumentQuery: MockedQueryData = { - target: { - refId: 'RAW_DOC', - metrics: [{ type: 'raw_document', id: 'r_5' }], - bucketAggs: [], - }, - response: { - hits: { - total: 2, - hits: [ - { - _id: '5', - _type: 'type', - _index: 'index', - _source: { sourceProp: 'asd' }, - fields: { fieldProp: 'field' }, - }, - { - _source: { sourceProp: 'asd2' }, - fields: { fieldProp: 'field2' }, - }, - ], - }, - }, - }; - - const percentilesQuery: MockedQueryData = { - target: { - refId: 'PERCENTILE', - metrics: [{ type: 'percentiles', settings: { percents: ['75', '90'] }, id: 'p_1' }], - bucketAggs: [{ type: 'date_histogram', field: '@timestamp', id: 'p_3' }], - }, - response: { - aggregations: { - p_3: { - buckets: [ - { - p_1: { values: { '75': 3.3, '90': 5.5 } }, - doc_count: 10, - key: 1000, - }, - { - p_1: { values: { '75': 2.3, '90': 4.5 } }, - doc_count: 15, - key: 2000, - }, - ], - }, - }, - }, - }; - - const extendedStatsQuery: MockedQueryData = { - target: { - refId: 'EXTENDEDSTATS', - metrics: [ - { - type: 'extended_stats', - meta: { max: true, std_deviation_bounds_upper: true }, - id: 'e_1', - }, - ], - bucketAggs: [ - { type: 'terms', field: 'host', id: 'e_3' }, - { type: 'date_histogram', id: 'e_4' }, - ], - }, - response: { - aggregations: { - e_3: { - buckets: [ - { - key: 'server1', - e_4: { - buckets: [ - { - e_1: { - max: 10.2, - min: 5.5, - std_deviation_bounds: { upper: 3, lower: -2 }, - }, - doc_count: 10, - key: 1000, - }, - ], - }, - }, - { - key: 'server2', - e_4: { - buckets: [ - { - e_1: { - max: 10.2, - min: 5.5, - std_deviation_bounds: { upper: 3, lower: -2 }, - }, - doc_count: 10, - key: 1000, - }, - ], - }, - }, - ], - }, - }, - }, - }; - - const commonTargets = [ - { ...countQuery.target }, - { ...countGroupByHistogramQuery.target }, - { ...rawDocumentQuery.target }, - { ...percentilesQuery.target }, - { ...extendedStatsQuery.target }, - ]; - - const commonResponses = [ - { ...countQuery.response }, - { ...countGroupByHistogramQuery.response }, - { ...rawDocumentQuery.response }, - { ...percentilesQuery.response }, - { ...extendedStatsQuery.response }, - ]; - - describe('When processing responses as DataFrames (raw_data query present)', () => { - beforeEach(() => { - targets = [ - ...commonTargets, - // Raw Data Query - { - refId: 'D', - metrics: [{ type: 'raw_data', id: '6' }], - bucketAggs: [], - }, - ]; - - response = { - responses: [ - ...commonResponses, - // Raw Data Query - { - hits: { - total: { - relation: 'eq', - value: 1, - }, - hits: [ - { - _id: '6', - _type: '_doc', - _index: 'index', - _source: { sourceProp: 'asd' }, - }, - ], - }, - }, - ], - }; - - result = new ElasticResponse(targets, response).getTimeSeries(); - }); - - it('should add the correct refId to each returned series', () => { - expect(result.data[0].refId).toBe(countQuery.target.refId); - - expect(result.data[1].refId).toBe(countGroupByHistogramQuery.target.refId); - - expect(result.data[2].refId).toBe(rawDocumentQuery.target.refId); - - expect(result.data[3].refId).toBe(percentilesQuery.target.refId); - expect(result.data[4].refId).toBe(percentilesQuery.target.refId); - - expect(result.data[5].refId).toBe(extendedStatsQuery.target.refId); - - // Raw Data query - expect(result.data[result.data.length - 1].refId).toBe('D'); - }); - }); - - describe('When NOT processing responses as DataFrames (raw_data query NOT present)', () => { - beforeEach(() => { - targets = [...commonTargets]; - - response = { - responses: [...commonResponses], - }; - - result = new ElasticResponse(targets, response).getTimeSeries(); - }); - - it('should add the correct refId to each returned series', () => { - expect(result.data[0].refId).toBe(countQuery.target.refId); - - expect(result.data[1].refId).toBe(countGroupByHistogramQuery.target.refId); - - expect(result.data[2].refId).toBe(rawDocumentQuery.target.refId); - - expect(result.data[3].refId).toBe(percentilesQuery.target.refId); - expect(result.data[4].refId).toBe(percentilesQuery.target.refId); - - expect(result.data[5].refId).toBe(extendedStatsQuery.target.refId); - }); - }); - }); - - describe('simple query and count', () => { - beforeEach(() => { - targets = [ - { - refId: 'A', - metrics: [{ type: 'count', id: '1' }], - bucketAggs: [{ type: 'date_histogram', field: '@timestamp', id: '2' }], - }, - ]; - response = { - responses: [ - { - aggregations: { - '2': { - buckets: [ - { - doc_count: 10, - key: 1000, - }, - { - doc_count: 15, - key: 2000, - }, - ], - }, - }, - }, - ], - }; - - result = new ElasticResponse(targets, response).getTimeSeries(); - }); - - it('should return 1 series', () => { - expect(result.data.length).toBe(1); - const frame = result.data[0]; - expect(frame.name).toBe('Count'); - expect(frame.length).toBe(2); - expect(getTimeField(frame).values[0]).toBe(1000); - expect(getValueField(frame).values[0]).toBe(10); - }); - }); - - describe('simple query count & avg aggregation', () => { - let result: { - data: DataFrame[]; - }; - - beforeEach(() => { - targets = [ - { - refId: 'A', - metrics: [ - { type: 'count', id: '1' }, - { type: 'avg', field: 'value', id: '2' }, - ], - bucketAggs: [{ type: 'date_histogram', field: '@timestamp', id: '3' }], - }, - ]; - response = { - responses: [ - { - aggregations: { - '3': { - buckets: [ - { - '2': { value: 88 }, - doc_count: 10, - key: 1000, - }, - { - '2': { value: 99 }, - doc_count: 15, - key: 2000, - }, - ], - }, - }, - }, - ], - }; - - result = new ElasticResponse(targets, response).getTimeSeries(); - }); - - it('should return 2 series', () => { - expect(result.data.length).toBe(2); - const frame1 = result.data[0]; - const frame2 = result.data[1]; - expect(frame1.length).toBe(2); - expect(getValueField(frame1).values[0]).toBe(10); - expect(getTimeField(frame1).values[0]).toBe(1000); - - expect(frame2.name).toBe('Average value'); - expect(getValueField(frame2).values).toEqual([88, 99]); - }); - }); - - describe('single group by query one metric', () => { - let result: { - data: DataFrame[]; - }; - - beforeEach(() => { - targets = [ - { - refId: 'A', - metrics: [{ type: 'count', id: '1' }], - bucketAggs: [ - { type: 'terms', field: 'host', id: '2' }, - { type: 'date_histogram', field: '@timestamp', id: '3' }, - ], - }, - ]; - response = { - responses: [ - { - aggregations: { - '2': { - buckets: [ - { - '3': { - buckets: [ - { doc_count: 1, key: 1000 }, - { doc_count: 3, key: 2000 }, - ], - }, - doc_count: 4, - key: 'server1', - }, - { - '3': { - buckets: [ - { doc_count: 2, key: 1000 }, - { doc_count: 8, key: 2000 }, - ], - }, - doc_count: 10, - key: 'server2', - }, - ], - }, - }, - }, - ], - }; - - result = new ElasticResponse(targets, response).getTimeSeries(); - }); - - it('should return 2 series', () => { - expect(result.data.length).toBe(2); - const frame1 = result.data[0]; - const frame2 = result.data[1]; - expect(frame1.length).toBe(2); - expect(frame1.name).toBe('server1'); - expect(frame2.name).toBe('server2'); - }); - }); - - describe('single group by query two metrics', () => { - let result: { - data: DataFrame[]; - }; - - beforeEach(() => { - targets = [ - { - refId: 'A', - metrics: [ - { type: 'count', id: '1' }, - { type: 'avg', field: '@value', id: '4' }, - ], - bucketAggs: [ - { type: 'terms', field: 'host', id: '2' }, - { type: 'date_histogram', field: '@timestamp', id: '3' }, - ], - }, - ]; - response = { - responses: [ - { - aggregations: { - '2': { - buckets: [ - { - '3': { - buckets: [ - { '4': { value: 10 }, doc_count: 1, key: 1000 }, - { '4': { value: 12 }, doc_count: 3, key: 2000 }, - ], - }, - doc_count: 4, - key: 'server1', - }, - { - '3': { - buckets: [ - { '4': { value: 20 }, doc_count: 1, key: 1000 }, - { '4': { value: 32 }, doc_count: 3, key: 2000 }, - ], - }, - doc_count: 10, - key: 'server2', - }, - ], - }, - }, - }, - ], - }; - - result = new ElasticResponse(targets, response).getTimeSeries(); - }); - - it('should return 2 series', () => { - expect(result.data.length).toBe(4); - expect(result.data[0].length).toBe(2); - expect(result.data[0].name).toBe('server1 Count'); - expect(result.data[1].name).toBe('server1 Average @value'); - expect(result.data[2].name).toBe('server2 Count'); - expect(result.data[3].name).toBe('server2 Average @value'); - }); - }); - - describe('with percentiles ', () => { - let result: { - data: DataFrame[]; - }; - - beforeEach(() => { - targets = [ - { - refId: 'A', - metrics: [{ type: 'percentiles', settings: { percents: ['75', '90'] }, id: '1', field: '@value' }], - bucketAggs: [{ type: 'date_histogram', field: '@timestamp', id: '3' }], - }, - ]; - response = { - responses: [ - { - aggregations: { - '3': { - buckets: [ - { - '1': { values: { '75': 3.3, '90': 5.5 } }, - doc_count: 10, - key: 1000, - }, - { - '1': { values: { '75': 2.3, '90': 4.5 } }, - doc_count: 15, - key: 2000, - }, - ], - }, - }, - }, - ], - }; - - result = new ElasticResponse(targets, response).getTimeSeries(); - }); - - it('should return 2 series', () => { - expect(result.data.length).toBe(2); - expect(result.data[0].length).toBe(2); - expect(result.data[0].name).toBe('p75 @value'); - expect(result.data[1].name).toBe('p90 @value'); - expect(getValueField(result.data[0]).values[0]).toBe(3.3); - expect(getTimeField(result.data[0]).values[0]).toBe(1000); - expect(getValueField(result.data[1]).values[1]).toBe(4.5); - }); - }); - - describe('with extended_stats', () => { - let result: { - data: DataFrame[]; - }; - - beforeEach(() => { - targets = [ - { - refId: 'A', - metrics: [ - { - type: 'extended_stats', - meta: { max: true, std_deviation_bounds_upper: true }, - id: '1', - field: '@value', - }, - ], - bucketAggs: [ - { type: 'terms', field: 'host', id: '3' }, - { type: 'date_histogram', id: '4' }, - ], - }, - ]; - response = { - responses: [ - { - aggregations: { - '3': { - buckets: [ - { - key: 'server1', - '4': { - buckets: [ - { - '1': { - max: 10.2, - min: 5.5, - std_deviation_bounds: { upper: 3, lower: -2 }, - }, - doc_count: 10, - key: 1000, - }, - ], - }, - }, - { - key: 'server2', - '4': { - buckets: [ - { - '1': { - max: 10.2, - min: 5.5, - std_deviation_bounds: { upper: 3, lower: -2 }, - }, - doc_count: 10, - key: 1000, - }, - ], - }, - }, - ], - }, - }, - }, - ], - }; - - result = new ElasticResponse(targets, response).getTimeSeries(); - }); - - it('should return 4 series', () => { - expect(result.data.length).toBe(4); - expect(result.data[0].length).toBe(1); - expect(result.data[0].name).toBe('server1 Max @value'); - expect(result.data[1].name).toBe('server1 Std Dev Upper @value'); - - expect(getValueField(result.data[0]).values[0]).toBe(10.2); - expect(getValueField(result.data[1]).values[0]).toBe(3); - }); - }); - - describe('with top_metrics', () => { - beforeEach(() => { - targets = [ - { - refId: 'A', - metrics: [ - { - type: 'top_metrics', - settings: { - order: 'top', - orderBy: '@timestamp', - metrics: ['@value', '@anotherValue'], - }, - id: '1', - }, - ], - bucketAggs: [{ type: 'date_histogram', id: '2' }], - }, - ]; - response = { - responses: [ - { - aggregations: { - '2': { - buckets: [ - { - key: new Date('2021-01-01T00:00:00.000Z').valueOf(), - key_as_string: '2021-01-01T00:00:00.000Z', - '1': { - top: [{ sort: ['2021-01-01T00:00:00.000Z'], metrics: { '@value': 1, '@anotherValue': 2 } }], - }, - }, - { - key: new Date('2021-01-01T00:00:10.000Z').valueOf(), - key_as_string: '2021-01-01T00:00:10.000Z', - '1': { - top: [{ sort: ['2021-01-01T00:00:10.000Z'], metrics: { '@value': 1, '@anotherValue': 2 } }], - }, - }, - ], - }, - }, - }, - ], - }; - }); - - it('should return 2 series', () => { - const result = new ElasticResponse(targets, response).getTimeSeries(); - expect(result.data.length).toBe(2); - - const firstSeries = result.data[0]; - expect(firstSeries.name).toBe('Top Metrics @value'); - expect(firstSeries.length).toBe(2); - expect(getTimeField(firstSeries).values).toEqual([ - new Date('2021-01-01T00:00:00.000Z').valueOf(), - new Date('2021-01-01T00:00:10.000Z').valueOf(), - ]); - expect(getValueField(firstSeries).values).toEqual([1, 1]); - - const secondSeries = result.data[1]; - expect(secondSeries.name).toBe('Top Metrics @anotherValue'); - expect(secondSeries.length).toBe(2); - expect(getTimeField(secondSeries).values).toEqual([ - new Date('2021-01-01T00:00:00.000Z').valueOf(), - new Date('2021-01-01T00:00:10.000Z').valueOf(), - ]); - expect(getValueField(secondSeries).values).toEqual([2, 2]); - }); - }); - - describe('single group by with alias pattern', () => { - let result: { - data: DataFrame[]; - }; - - beforeEach(() => { - targets = [ - { - refId: 'A', - metrics: [{ type: 'count', id: '1' }], - alias: '{{term @host}} {{metric}} and {{not_exist}} {{@host}}', - bucketAggs: [ - { type: 'terms', field: '@host', id: '2' }, - { type: 'date_histogram', field: '@timestamp', id: '3' }, - ], - }, - ]; - response = { - responses: [ - { - aggregations: { - '2': { - buckets: [ - { - '3': { - buckets: [ - { doc_count: 1, key: 1000 }, - { doc_count: 3, key: 2000 }, - ], - }, - doc_count: 4, - key: 'server1', - }, - { - '3': { - buckets: [ - { doc_count: 2, key: 1000 }, - { doc_count: 8, key: 2000 }, - ], - }, - doc_count: 10, - key: 'server2', - }, - { - '3': { - buckets: [ - { doc_count: 2, key: 1000 }, - { doc_count: 8, key: 2000 }, - ], - }, - doc_count: 10, - key: 0, - }, - ], - }, - }, - }, - ], - }; - - result = new ElasticResponse(targets, response).getTimeSeries(); - }); - - it('should return 2 series', () => { - expect(result.data.length).toBe(3); - expect(result.data[0].length).toBe(2); - expect(result.data[0].name).toBe('server1 Count and {{not_exist}} server1'); - expect(result.data[1].name).toBe('server2 Count and {{not_exist}} server2'); - expect(result.data[2].name).toBe('0 Count and {{not_exist}} 0'); - }); - }); - - describe('histogram response', () => { - let result: { - data: DataFrame[]; - }; - - beforeEach(() => { - targets = [ - { - refId: 'A', - metrics: [{ type: 'count', id: '1' }], - bucketAggs: [{ type: 'histogram', field: 'bytes', id: '3' }], - }, - ]; - response = { - responses: [ - { - aggregations: { - '3': { - buckets: [ - { doc_count: 1, key: 1000 }, - { doc_count: 3, key: 2000 }, - { doc_count: 2, key: 1000 }, - ], - }, - }, - }, - ], - }; - - result = new ElasticResponse(targets, response).getTimeSeries(); - }); - - it('should return dataframe with byte and count', () => { - expect(result.data[0].length).toBe(3); - const { fields } = result.data[0]; - expect(fields.length).toBe(2); - expect(fields[0].name).toBe('bytes'); - expect(fields[0].config).toEqual({ filterable: true }); - expect(fields[1].name).toBe('Count'); - expect(fields[1].config).toEqual({}); - }); - }); - - describe('with two filters agg', () => { - let result: { - data: DataFrame[]; - }; - - beforeEach(() => { - targets = [ - { - refId: 'A', - metrics: [{ type: 'count', id: '1' }], - bucketAggs: [ - { - id: '2', - type: 'filters', - settings: { - filters: [ - { query: '@metric:cpu', label: '' }, - { query: '@metric:logins.count', label: '' }, - ], - }, - }, - { type: 'date_histogram', field: '@timestamp', id: '3' }, - ], - }, - ]; - response = { - responses: [ - { - aggregations: { - '2': { - buckets: { - '@metric:cpu': { - '3': { - buckets: [ - { doc_count: 1, key: 1000 }, - { doc_count: 3, key: 2000 }, - ], - }, - }, - '@metric:logins.count': { - '3': { - buckets: [ - { doc_count: 2, key: 1000 }, - { doc_count: 8, key: 2000 }, - ], - }, - }, - }, - }, - }, - }, - ], - }; - - result = new ElasticResponse(targets, response).getTimeSeries(); - }); - - it('should return 2 series', () => { - expect(result.data.length).toBe(2); - expect(result.data[0].length).toBe(2); - expect(result.data[0].name).toBe('@metric:cpu'); - expect(result.data[1].name).toBe('@metric:logins.count'); - }); - }); - - describe('with dropfirst and last aggregation', () => { - beforeEach(() => { - targets = [ - { - refId: 'A', - metrics: [ - { type: 'avg', id: '1', field: '@value' }, - { type: 'count', id: '3' }, - ], - bucketAggs: [ - { - id: '2', - type: 'date_histogram', - field: 'host', - settings: { trimEdges: '1' }, - }, - ], - }, - ]; - - response = { - responses: [ - { - aggregations: { - '2': { - buckets: [ - { - '1': { value: 1000 }, - key: 1, - doc_count: 369, - }, - { - '1': { value: 2000 }, - key: 2, - doc_count: 200, - }, - { - '1': { value: 2000 }, - key: 3, - doc_count: 200, - }, - ], - }, - }, - }, - ], - }; - - result = new ElasticResponse(targets, response).getTimeSeries(); - }); - - it('should remove first and last value', () => { - expect(result.data.length).toBe(2); - expect(result.data[0].length).toBe(1); - }); - }); - - describe('No group by time', () => { - beforeEach(() => { - targets = [ - { - refId: 'A', - metrics: [ - { type: 'avg', id: '1', field: '@value' }, - { type: 'count', id: '3' }, - ], - bucketAggs: [{ id: '2', type: 'terms', field: 'host' }], - }, - ]; - - response = { - responses: [ - { - aggregations: { - '2': { - buckets: [ - { - '1': { value: 1000 }, - key: 'server-1', - doc_count: 369, - }, - { - '1': { value: 2000 }, - key: 'server-2', - doc_count: 200, - }, - ], - }, - }, - }, - ], - }; - - result = new ElasticResponse(targets, response).getTimeSeries(); - }); - - it('should return dataframe', () => { - expect(result.data.length).toBe(1); - expect(result.data[0].length).toBe(2); - expect(result.data[0].fields.length).toBe(3); - const field1 = result.data[0].fields[0]; - const field2 = result.data[0].fields[1]; - const field3 = result.data[0].fields[2]; - - expect(field1.values).toEqual(['server-1', 'server-2']); - expect(field2.values).toEqual([1000, 2000]); - expect(field3.values).toEqual([369, 200]); - }); - }); - - describe('No group by time with percentiles ', () => { - let result: { - data: DataFrame[]; - }; - - beforeEach(() => { - targets = [ - { - refId: 'A', - metrics: [{ type: 'percentiles', field: 'value', settings: { percents: ['75', '90'] }, id: '1' }], - bucketAggs: [{ type: 'terms', field: 'id', id: '3' }], - }, - ]; - response = { - responses: [ - { - aggregations: { - '3': { - buckets: [ - { - '1': { values: { '75': 3.3, '90': 5.5 } }, - doc_count: 10, - key: 'id1', - }, - { - '1': { values: { '75': 2.3, '90': 4.5 } }, - doc_count: 15, - key: 'id2', - }, - ], - }, - }, - }, - ], - }; - - result = new ElasticResponse(targets, response).getTimeSeries(); - }); - - it('should return dataframe', () => { - expect(result.data.length).toBe(1); - expect(result.data[0].length).toBe(2); - const field1 = result.data[0].fields[0]; - const field2 = result.data[0].fields[1]; - const field3 = result.data[0].fields[2]; - expect(field1.name).toBe('id'); - expect(field2.name).toBe('p75 value'); - expect(field3.name).toBe('p90 value'); - - expect(field1.values).toEqual(['id1', 'id2']); - expect(field2.values).toEqual([3.3, 2.3]); - expect(field3.values).toEqual([5.5, 4.5]); - }); - }); - - describe('Multiple metrics of same type', () => { - beforeEach(() => { - targets = [ - { - refId: 'A', - metrics: [ - { type: 'avg', id: '1', field: 'test' }, - { type: 'avg', id: '2', field: 'test2' }, - ], - bucketAggs: [{ id: '2', type: 'terms', field: 'host' }], - }, - ]; - - response = { - responses: [ - { - aggregations: { - '2': { - buckets: [ - { - '1': { value: 1000 }, - '2': { value: 3000 }, - key: 'server-1', - doc_count: 369, - }, - ], - }, - }, - }, - ], - }; - - result = new ElasticResponse(targets, response).getTimeSeries(); - }); - - it('should include field in metric name', () => { - expect(result.data[0].length).toBe(1); - expect(result.data[0].fields.length).toBe(3); - expect(result.data[0].fields[0].values).toEqual(['server-1']); - expect(result.data[0].fields[1].values).toEqual([1000]); - expect(result.data[0].fields[2].values).toEqual([3000]); - }); - }); - - describe('Raw documents query', () => { - let result: { - data: DataFrame[]; - }; - beforeEach(() => { - targets = [ - { - refId: 'A', - metrics: [{ type: 'raw_document', id: '1' }], - bucketAggs: [], - }, - ]; - response = { - responses: [ - { - hits: { - total: 100, - hits: [ - { - _id: '1', - _type: 'type', - _index: 'index', - _source: { sourceProp: 'asd' }, - fields: { fieldProp: 'field' }, - }, - { - _source: { sourceProp: 'asd2' }, - fields: { fieldProp: 'field2' }, - }, - ], - }, - }, - ], - }; - - result = new ElasticResponse(targets, response).getTimeSeries(); - }); - - it('should return raw_document formatted data', () => { - expect(result.data.length).toBe(1); - const frame = result.data[0]; - const { fields } = frame; - expect(fields.length).toBe(1); - const field = fields[0]; - expect(field.type === FieldType.other); - const values = field.values; - expect(values.length).toBe(2); - expect(values[0].sourceProp).toBe('asd'); - expect(values[0].fieldProp).toBe('field'); - }); - }); - - describe('with bucket_script ', () => { - let result: { - data: DataFrame[]; - }; - - beforeEach(() => { - targets = [ - { - refId: 'A', - metrics: [ - { id: '1', type: 'sum', field: '@value' }, - { id: '3', type: 'max', field: '@value' }, - { - id: '4', - pipelineVariables: [ - { name: 'var1', pipelineAgg: '1' }, - { name: 'var2', pipelineAgg: '3' }, - ], - settings: { script: 'params.var1 * params.var2' }, - type: 'bucket_script', - }, - ], - bucketAggs: [{ type: 'date_histogram', field: '@timestamp', id: '2' }], - }, - ]; - response = { - responses: [ - { - aggregations: { - '2': { - buckets: [ - { - 1: { value: 2 }, - 3: { value: 3 }, - 4: { value: 6 }, - doc_count: 60, - key: 1000, - }, - { - 1: { value: 3 }, - 3: { value: 4 }, - 4: { value: 12 }, - doc_count: 60, - key: 2000, - }, - ], - }, - }, - }, - ], - }; - - result = new ElasticResponse(targets, response).getTimeSeries(); - }); - it('should return 3 series', () => { - expect(result.data.length).toBe(3); - expect(result.data[0].length).toBe(2); - expect(result.data[0].name).toBe('Sum @value'); - expect(result.data[1].name).toBe('Max @value'); - expect(result.data[2].name).toBe('Sum @value * Max @value'); - expect(getValueField(result.data[0]).values[0]).toBe(2); - expect(getValueField(result.data[1]).values[0]).toBe(3); - expect(getValueField(result.data[2]).values[0]).toBe(6); - expect(getValueField(result.data[0]).values[1]).toBe(3); - expect(getValueField(result.data[1]).values[1]).toBe(4); - expect(getValueField(result.data[2]).values[1]).toBe(12); - }); - }); - - describe('terms with bucket_script and two scripts', () => { - let result: { - data: DataFrame[]; - }; - - beforeEach(() => { - targets = [ - { - refId: 'A', - metrics: [ - { id: '1', type: 'sum', field: '@value' }, - { id: '3', type: 'max', field: '@value' }, - { - id: '4', - pipelineVariables: [ - { name: 'var1', pipelineAgg: '1' }, - { name: 'var2', pipelineAgg: '3' }, - ], - settings: { script: 'params.var1 * params.var2' }, - type: 'bucket_script', - }, - { - id: '5', - pipelineVariables: [ - { name: 'var1', pipelineAgg: '1' }, - { name: 'var2', pipelineAgg: '3' }, - ], - settings: { script: 'params.var1 * params.var2 * 4' }, - type: 'bucket_script', - }, - ], - bucketAggs: [{ type: 'terms', field: '@timestamp', id: '2' }], - }, - ]; - response = { - responses: [ - { - aggregations: { - '2': { - buckets: [ - { - 1: { value: 2 }, - 3: { value: 3 }, - 4: { value: 6 }, - 5: { value: 24 }, - doc_count: 60, - key: 1000, - }, - { - 1: { value: 3 }, - 3: { value: 4 }, - 4: { value: 12 }, - 5: { value: 48 }, - doc_count: 60, - key: 2000, - }, - ], - }, - }, - }, - ], - }; - - result = new ElasticResponse(targets, response).getTimeSeries(); - }); - - it('should return 2 rows with 5 columns', () => { - const frame = result.data[0]; - expect(frame.length).toBe(2); - const { fields } = frame; - expect(fields.length).toBe(5); - expect(fields[0].values).toEqual([1000, 2000]); - expect(fields[1].values).toEqual([2, 3]); - expect(fields[2].values).toEqual([3, 4]); - expect(fields[3].values).toEqual([6, 12]); - expect(fields[4].values).toEqual([24, 48]); - }); - }); - - describe('Raw Data Query', () => { - beforeEach(() => { - targets = [ - { - refId: 'A', - metrics: [{ type: 'raw_data', id: '1' }], - bucketAggs: [], - timeField: '@timestamp', - }, - ]; - - response = { - responses: [ - { - hits: { - total: { - relation: 'eq', - value: 1, - }, - hits: [ - { - _id: '1', - _type: '_doc', - _index: 'index', - _source: { sourceProp: 'asd', '@timestamp': '2019-01-01T00:00:00Z' }, - }, - ], - }, - }, - ], - }; - - result = new ElasticResponse(targets, response).getTimeSeries(); - }); - - it('should create dataframes with filterable fields', () => { - for (const field of result.data[0].fields) { - expect(field.config.filterable).toBe(true); - } - }); - - it('should have time field values in DateTime format', () => { - const timeField = result.data[0].fields.find((field) => field.name === '@timestamp'); - expect(timeField).toBeDefined(); - expect(timeField?.values[0]).toBe(1546300800000); - }); - }); - - describe('simple logs query and count', () => { - const targets: ElasticsearchQuery[] = [ - { - refId: 'A', - metrics: [{ type: 'count', id: '1' }], - bucketAggs: [{ type: 'date_histogram', settings: { interval: 'auto' }, id: '2' }], - key: 'Q-1561369883389-0.7611823271062786-0', - query: 'hello AND message', - timeField: '@timestamp', - }, - ]; - const response = { - responses: [ - { - aggregations: { - '2': { - buckets: [ - { - doc_count: 10, - key: 1000, - }, - { - doc_count: 15, - key: 2000, - }, - ], - }, - }, - hits: { - hits: [ - { - _id: 'fdsfs', - _type: '_doc', - _index: 'mock-index', - _source: { - '@timestamp': '2019-06-24T09:51:19.765Z', - host: 'djisaodjsoad', - number: 1, - message: 'hello, i am a message', - level: 'debug', - fields: { - lvl: 'debug', - }, - }, - highlight: { - message: [ - `${highlightTags.pre}hello${highlightTags.post}, i am a ${highlightTags.pre}message${highlightTags.post}`, - ], - }, - }, - { - _id: 'kdospaidopa', - _type: '_doc', - _index: 'mock-index', - _source: { - '@timestamp': '2019-06-24T09:52:19.765Z', - host: 'dsalkdakdop', - number: 2, - message: 'hello, i am also message', - level: 'error', - fields: { - lvl: 'info', - }, - }, - highlight: { - message: [ - `${highlightTags.pre}hello${highlightTags.post}, i am a ${highlightTags.pre}message${highlightTags.post}`, - ], - }, - }, - ], - }, - }, - ], - }; - - it('should return histogram aggregation and documents', () => { - const result = new ElasticResponse(targets, response).getLogs(); - expect(result.data.length).toBe(2); - const logResults = result.data[0] as MutableDataFrame; - expect(logResults).toHaveProperty('meta'); - expect(logResults.meta).toEqual({ - searchWords: ['hello', 'message'], - preferredVisualisationType: 'logs', - }); - - const fields = logResults.fields.map((f) => { - return { - name: f.name, - type: f.type, - }; - }); - - expect(fields).toContainEqual({ name: '@timestamp', type: 'time' }); - expect(fields).toContainEqual({ name: 'host', type: 'string' }); - expect(fields).toContainEqual({ name: 'message', type: 'string' }); - - let rows = new DataFrameView(logResults); - for (let i = 0; i < rows.length; i++) { - const r = rows.get(i); - expect(r._id).toEqual(response.responses[0].hits.hits[i]._id); - expect(r._type).toEqual(response.responses[0].hits.hits[i]._type); - expect(r._index).toEqual(response.responses[0].hits.hits[i]._index); - expect(r._source).toEqual(flattenObject(response.responses[0].hits.hits[i]._source)); - } - - // Make a map from the histogram results - const hist: KeyValue = {}; - const histogramResults = new MutableDataFrame(result.data[1]); - rows = new DataFrameView(histogramResults); - - for (let i = 0; i < rows.length; i++) { - const row = rows.get(i); - hist[row.Time] = row.Value; - } - - response.responses[0].aggregations['2'].buckets.forEach((bucket) => { - expect(hist[bucket.key]).toEqual(bucket.doc_count); - }); - }); - - it('should map levels field', () => { - const result = new ElasticResponse(targets, response).getLogs(undefined, 'level'); - const fieldCache = new FieldCache(result.data[0]); - const field = fieldCache.getFieldByName('level'); - expect(field?.values).toEqual(['debug', 'error']); - }); - - it('should re map levels field to new field', () => { - const result = new ElasticResponse(targets, response).getLogs(undefined, 'fields.lvl'); - const fieldCache = new FieldCache(result.data[0]); - const field = fieldCache.getFieldByName('level'); - expect(field?.values).toEqual(['debug', 'info']); - }); - - it('should correctly guess field types', () => { - const result = new ElasticResponse(targets, response).getLogs(); - const logResults = result.data[0] as MutableDataFrame; - - const fields = logResults.fields.map((f) => { - return { - name: f.name, - type: f.type, - }; - }); - - expect(fields).toContainEqual({ name: '@timestamp', type: 'time' }); - expect(fields).toContainEqual({ name: 'number', type: 'number' }); - expect(fields).toContainEqual({ name: 'message', type: 'string' }); - }); - }); - - describe('logs query with empty response', () => { - const targets: ElasticsearchQuery[] = [ - { - refId: 'A', - metrics: [{ type: 'logs', id: '2' }], - bucketAggs: [{ type: 'date_histogram', settings: { interval: 'auto' }, id: '1' }], - key: 'Q-1561369883389-0.7611823271062786-0', - query: 'hello AND message', - timeField: '@timestamp', - }, - ]; - const response = { - responses: [ - { - hits: { hits: [] }, - aggregations: { - '1': { - buckets: [ - { key_as_string: '1633676760000', key: 1633676760000, doc_count: 0 }, - { key_as_string: '1633676770000', key: 1633676770000, doc_count: 0 }, - { key_as_string: '1633676780000', key: 1633676780000, doc_count: 0 }, - ], - }, - }, - status: 200, - }, - ], - }; - - it('should return histogram aggregation and documents', () => { - const result = new ElasticResponse(targets, response).getLogs('message', 'level'); - expect(result.data.length).toBe(2); - }); - }); -}); - -interface MockedElasticResponse { - aggregations?: { - [key: string]: { - buckets: Array<{ - doc_count?: number; - key: string | number; - [key: string]: unknown; - }>; - }; - }; - hits?: { - total: number; - hits: Array<{ - _id?: string; - _type?: string; - _index?: string; - _source: { sourceProp: string }; - fields: { fieldProp: string }; - }>; - }; -} - -interface MockedQueryData { - target: ElasticsearchQuery; - response: MockedElasticResponse; -} diff --git a/public/app/plugins/datasource/elasticsearch/ElasticResponse.ts b/public/app/plugins/datasource/elasticsearch/ElasticResponse.ts deleted file mode 100644 index 103a571b8a6..00000000000 --- a/public/app/plugins/datasource/elasticsearch/ElasticResponse.ts +++ /dev/null @@ -1,807 +0,0 @@ -import { clone, filter, find, identity, isArray, keys, map, uniq, values as _values } from 'lodash'; - -import { - DataQueryResponse, - DataFrame, - toDataFrame, - FieldType, - MutableDataFrame, - PreferredVisualisationType, -} from '@grafana/data'; -import { convertFieldType } from '@grafana/data/internal'; -import TableModel from 'app/core/TableModel'; - -import { isMetricAggregationWithField } from './components/QueryEditor/MetricAggregationsEditor/aggregations'; -import { metricAggregationConfig } from './components/QueryEditor/MetricAggregationsEditor/utils'; -import * as queryDef from './queryDef'; -import { ElasticsearchAggregation, ElasticsearchQuery, TopMetrics, ExtendedStatMetaType } from './types'; -import { describeMetric, flattenObject, getScriptValue } from './utils'; - -const HIGHLIGHT_TAGS_EXP = `${queryDef.highlightTags.pre}([^@]+)${queryDef.highlightTags.post}`; -type TopMetricMetric = Record; -interface TopMetricBucket { - top: Array<{ - metrics: TopMetricMetric; - }>; -} - -export class ElasticResponse { - constructor( - private targets: ElasticsearchQuery[], - private response: any - ) { - this.targets = targets; - this.response = response; - } - - processMetrics(esAgg: any, target: ElasticsearchQuery, seriesList: any, props: any) { - let newSeries: any; - - for (let y = 0; y < target.metrics!.length; y++) { - const metric = target.metrics![y]; - if (metric.hide) { - continue; - } - - switch (metric.type) { - case 'count': { - newSeries = { datapoints: [], metric: 'count', props, refId: target.refId }; - for (let i = 0; i < esAgg.buckets.length; i++) { - const bucket = esAgg.buckets[i]; - const value = bucket.doc_count; - newSeries.datapoints.push([value, bucket.key]); - } - seriesList.push(newSeries); - break; - } - case 'percentiles': { - if (esAgg.buckets.length === 0) { - break; - } - - const firstBucket = esAgg.buckets[0]; - const percentiles = firstBucket[metric.id].values; - - for (const percentileName in percentiles) { - newSeries = { - datapoints: [], - metric: 'p' + percentileName, - props: props, - field: metric.field, - refId: target.refId, - }; - - for (let i = 0; i < esAgg.buckets.length; i++) { - const bucket = esAgg.buckets[i]; - const values = bucket[metric.id].values; - newSeries.datapoints.push([values[percentileName], bucket.key]); - } - seriesList.push(newSeries); - } - - break; - } - case 'extended_stats': { - for (const statName in metric.meta) { - if (!metric.meta[statName]) { - continue; - } - - newSeries = { - datapoints: [], - metric: statName, - props: props, - field: metric.field, - refId: target.refId, - }; - - for (let i = 0; i < esAgg.buckets.length; i++) { - const bucket = esAgg.buckets[i]; - const stats = bucket[metric.id]; - - // add stats that are in nested obj to top level obj - stats.std_deviation_bounds_upper = stats.std_deviation_bounds.upper; - stats.std_deviation_bounds_lower = stats.std_deviation_bounds.lower; - - newSeries.datapoints.push([stats[statName], bucket.key]); - } - - seriesList.push(newSeries); - } - - break; - } - case 'top_metrics': { - if (metric.settings?.metrics?.length) { - for (const metricField of metric.settings?.metrics) { - newSeries = { - datapoints: [], - metric: metric.type, - props: props, - refId: target.refId, - field: metricField, - }; - for (let i = 0; i < esAgg.buckets.length; i++) { - const bucket = esAgg.buckets[i]; - const stats: TopMetricBucket = bucket[metric.id]; - const values = stats.top.map((hit) => { - if (hit.metrics[metricField]) { - return hit.metrics[metricField]; - } - return null; - }); - const point = [values[values.length - 1], bucket.key]; - newSeries.datapoints.push(point); - } - seriesList.push(newSeries); - } - } - break; - } - default: { - newSeries = { - datapoints: [], - metric: metric.type, - metricId: metric.id, - props: props, - refId: target.refId, - }; - - if (isMetricAggregationWithField(metric)) { - newSeries.field = metric.field; - } - - for (let i = 0; i < esAgg.buckets.length; i++) { - const bucket = esAgg.buckets[i]; - const value = bucket[metric.id]; - - if (value !== undefined) { - if (value.normalized_value) { - newSeries.datapoints.push([value.normalized_value, bucket.key]); - } else { - newSeries.datapoints.push([value.value, bucket.key]); - } - } - } - seriesList.push(newSeries); - break; - } - } - } - } - - processAggregationDocs( - esAgg: any, - aggDef: ElasticsearchAggregation, - target: ElasticsearchQuery, - table: any, - props: any - ) { - // add columns - if (table.columns.length === 0) { - for (const propKey of keys(props)) { - table.addColumn({ text: propKey, filterable: true }); - } - table.addColumn({ text: aggDef.field, filterable: true }); - } - - // helper func to add values to value array - const addMetricValue = (values: unknown[], metricName: string, value: unknown) => { - table.addColumn({ text: metricName }); - values.push(value); - }; - const buckets = isArray(esAgg.buckets) ? esAgg.buckets : [esAgg.buckets]; - for (const bucket of buckets) { - const values = []; - - for (const propValues of _values(props)) { - values.push(propValues); - } - - // add bucket key (value) - values.push(bucket.key); - - for (const metric of target.metrics || []) { - switch (metric.type) { - case 'count': { - addMetricValue(values, this.getMetricName(metric.type), bucket.doc_count); - break; - } - case 'extended_stats': { - for (const statName in metric.meta) { - if (!metric.meta[statName]) { - continue; - } - - const stats = bucket[metric.id]; - // add stats that are in nested obj to top level obj - stats.std_deviation_bounds_upper = stats.std_deviation_bounds.upper; - stats.std_deviation_bounds_lower = stats.std_deviation_bounds.lower; - - addMetricValue(values, this.getMetricName(statName as ExtendedStatMetaType), stats[statName]); - } - break; - } - case 'percentiles': { - const percentiles = bucket[metric.id].values; - - for (const percentileName in percentiles) { - addMetricValue(values, `p${percentileName} ${metric.field}`, percentiles[percentileName]); - } - break; - } - case 'top_metrics': { - const baseName = this.getMetricName(metric.type); - - if (metric.settings?.metrics) { - for (const metricField of metric.settings.metrics) { - // If we selected more than one metric we also add each metric name - const metricName = metric.settings.metrics.length > 1 ? `${baseName} ${metricField}` : baseName; - - const stats: TopMetricBucket = bucket[metric.id]; - - // Size of top_metrics is fixed to 1. - addMetricValue(values, metricName, stats.top[0].metrics[metricField]); - } - } - - break; - } - default: { - let metricName = this.getMetricName(metric.type); - const otherMetrics = filter(target.metrics, { type: metric.type }); - - // if more of the same metric type include field field name in property - if (otherMetrics.length > 1) { - if (isMetricAggregationWithField(metric)) { - metricName += ' ' + metric.field; - } - - if (metric.type === 'bucket_script') { - //Use the formula in the column name - metricName = getScriptValue(metric); - } - } - - addMetricValue(values, metricName, bucket[metric.id].value); - break; - } - } - } - - table.rows.push(values); - } - } - - // This is quite complex - // need to recurse down the nested buckets to build series - processBuckets(aggs: any, target: ElasticsearchQuery, seriesList: any, table: TableModel, props: any, depth: number) { - let bucket, aggDef: any, esAgg, aggId; - const maxDepth = target.bucketAggs!.length - 1; - - for (aggId in aggs) { - aggDef = find(target.bucketAggs, { id: aggId }); - esAgg = aggs[aggId]; - - if (!aggDef) { - continue; - } - - if (aggDef.type === 'nested') { - this.processBuckets(esAgg, target, seriesList, table, props, depth + 1); - continue; - } - - if (depth === maxDepth) { - if (aggDef.type === 'date_histogram') { - this.processMetrics(esAgg, target, seriesList, props); - } else { - this.processAggregationDocs(esAgg, aggDef, target, table, props); - } - } else { - for (const nameIndex in esAgg.buckets) { - bucket = esAgg.buckets[nameIndex]; - props = clone(props); - if (bucket.key !== void 0) { - props[aggDef.field] = bucket.key; - } else { - props['filter'] = nameIndex; - } - if (bucket.key_as_string) { - props[aggDef.field] = bucket.key_as_string; - } - this.processBuckets(bucket, target, seriesList, table, props, depth + 1); - } - } - } - } - - private getMetricName(metric: string): string { - const metricDef = Object.entries(metricAggregationConfig) - .filter(([key]) => key === metric) - .map(([_, value]) => value)[0]; - - if (metricDef) { - return metricDef.label; - } - - const extendedStat = queryDef.extendedStats.find((e) => e.value === metric); - if (extendedStat) { - return extendedStat.label; - } - - return metric; - } - - private getSeriesName(series: any, target: ElasticsearchQuery, dedup: boolean) { - let metricName = this.getMetricName(series.metric); - - if (target.alias) { - const regex = /\{\{([\s\S]+?)\}\}/g; - - return target.alias.replace(regex, (match, g1, g2) => { - const group = g1 || g2; - - if (group.indexOf('term ') === 0) { - return series.props[group.substring(5)]; - } - if (series.props[group] !== void 0) { - return series.props[group]; - } - if (group === 'metric') { - return metricName; - } - if (group === 'field') { - return series.field || ''; - } - - return match; - }); - } - - if (queryDef.isPipelineAgg(series.metric)) { - if (series.metric && queryDef.isPipelineAggWithMultipleBucketPaths(series.metric)) { - const agg: any = find(target.metrics, { id: series.metricId }); - if (agg && agg.settings.script) { - metricName = getScriptValue(agg); - - for (const pv of agg.pipelineVariables) { - const appliedAgg = find(target.metrics, { id: pv.pipelineAgg }); - if (appliedAgg) { - metricName = metricName.replace('params.' + pv.name, describeMetric(appliedAgg)); - } - } - } else { - metricName = 'Unset'; - } - } else { - const appliedAgg = find(target.metrics, { id: series.field }); - if (appliedAgg) { - metricName += ' ' + describeMetric(appliedAgg); - } else { - metricName = 'Unset'; - } - } - } else if (series.field) { - metricName += ' ' + series.field; - } - - const propKeys = keys(series.props); - if (propKeys.length === 0) { - return metricName; - } - - let name = ''; - for (const propName in series.props) { - name += series.props[propName] + ' '; - } - - if (dedup) { - return name.trim() + ' ' + metricName; - } - - return name.trim(); - } - - nameSeries(seriesList: any, target: ElasticsearchQuery) { - const metricTypeCount = uniq(map(seriesList, 'metric')).length; - const hasTopMetricWithMultipleMetrics = ( - target.metrics?.filter((m) => m.type === 'top_metrics') as TopMetrics[] - ).some((m) => (m?.settings?.metrics?.length || 0) > 1); - - for (let i = 0; i < seriesList.length; i++) { - const series = seriesList[i]; - series.target = this.getSeriesName(series, target, metricTypeCount > 1 || hasTopMetricWithMultipleMetrics); - } - } - - processHits(hits: { total: { value: any }; hits: any[] }, seriesList: any[], target: ElasticsearchQuery) { - const hitsTotal = typeof hits.total === 'number' ? hits.total : hits.total.value; // <- Works with Elasticsearch 7.0+ - - const series: any = { - target: target.refId, - type: 'docs', - refId: target.refId, - datapoints: [], - total: hitsTotal, - filterable: true, - }; - let propName, hit, doc: any, i; - - for (i = 0; i < hits.hits.length; i++) { - hit = hits.hits[i]; - doc = { - _id: hit._id, - _type: hit._type, - _index: hit._index, - sort: hit.sort, - highlight: hit.highlight, - }; - - if (hit._source) { - for (propName in hit._source) { - doc[propName] = hit._source[propName]; - } - } - - for (propName in hit.fields) { - doc[propName] = hit.fields[propName]; - } - series.datapoints.push(doc); - } - - seriesList.push(series); - } - - trimDatapoints(aggregations: any, target: ElasticsearchQuery) { - const histogram: any = find(target.bucketAggs, { type: 'date_histogram' }); - - const shouldDropFirstAndLast = histogram && histogram.settings && histogram.settings.trimEdges; - if (shouldDropFirstAndLast) { - const trim = histogram.settings.trimEdges; - for (const prop in aggregations) { - const points = aggregations[prop]; - if (points.datapoints.length > trim * 2) { - points.datapoints = points.datapoints.slice(trim, points.datapoints.length - trim); - } - } - } - } - - getErrorFromElasticResponse(response: any, err: any) { - const result: any = {}; - result.data = JSON.stringify(err, null, 4); - if (err.root_cause && err.root_cause.length > 0 && err.root_cause[0].reason) { - result.message = err.root_cause[0].reason; - } else { - result.message = err.reason || 'Unknown elastic error response'; - } - - if (response.$$config) { - result.config = response.$$config; - } - - return result; - } - - getTimeSeries() { - if (this.targets.some((target) => queryDef.hasMetricOfType(target, 'raw_data'))) { - return this.processResponseToDataFrames(false); - } - const result = this.processResponseToSeries(); - return { - ...result, - data: result.data.map((item) => toDataFrame(item)), - }; - } - - getLogs(logMessageField?: string, logLevelField?: string): DataQueryResponse { - return this.processResponseToDataFrames(true, logMessageField, logLevelField); - } - - private processResponseToDataFrames( - isLogsRequest: boolean, - logMessageField?: string, - logLevelField?: string - ): DataQueryResponse { - const dataFrame: DataFrame[] = []; - for (let n = 0; n < this.response.responses.length; n++) { - const response = this.response.responses[n]; - if (response.error) { - throw this.getErrorFromElasticResponse(this.response, response.error); - } - - if (response.hits) { - const { propNames, docs } = flattenHits(response.hits.hits); - - const series = docs.length - ? createEmptyDataFrame( - propNames.map(toNameTypePair(docs)), - isLogsRequest, - this.targets[0].timeField, - logMessageField, - logLevelField - ) - : createEmptyDataFrame([], isLogsRequest); - - if (isLogsRequest) { - addPreferredVisualisationType(series, 'logs'); - } - - // Add a row for each document - for (const doc of docs) { - if (logLevelField) { - // Remap level field based on the datasource config. This field is - // then used in explore to figure out the log level. We may rewrite - // some actual data in the level field if they are different. - doc['level'] = doc[logLevelField]; - } - // When highlighting exists, we need to collect all the highlighted - // phrases and add them to the DataFrame's meta.searchWords array. - if (doc.highlight) { - // There might be multiple words so we need two versions of the - // regular expression. One to match gobally, when used with part.match, - // it returns and array of matches. The second one is used to capture the - // values between the tags. - const globalHighlightWordRegex = new RegExp(HIGHLIGHT_TAGS_EXP, 'g'); - const highlightWordRegex = new RegExp(HIGHLIGHT_TAGS_EXP); - const newSearchWords = Object.keys(doc.highlight) - .flatMap((key) => { - return doc.highlight[key].flatMap((line: string) => { - const matchedPhrases = line.match(globalHighlightWordRegex); - if (!matchedPhrases) { - return []; - } - return matchedPhrases.map((part) => { - const matches = part.match(highlightWordRegex); - return (matches && matches[1]) || null; - }); - }); - }) - .filter(identity); - // If meta and searchWords already exists, add the words and - // deduplicate otherwise create a new set of search words. - const searchWords = series.meta?.searchWords - ? uniq([...series.meta.searchWords, ...newSearchWords]) - : [...newSearchWords]; - series.meta = series.meta ? { ...series.meta, searchWords } : { searchWords }; - } - series.add(doc); - } - - const target = this.targets[n]; - series.refId = target.refId; - dataFrame.push(series); - } - - if (response.aggregations) { - const aggregations = response.aggregations; - const target = this.targets[n]; - const tmpSeriesList: any[] = []; - const table = new TableModel(); - - this.processBuckets(aggregations, target, tmpSeriesList, table, {}, 0); - this.trimDatapoints(tmpSeriesList, target); - this.nameSeries(tmpSeriesList, target); - - if (table.rows.length > 0) { - const series = toDataFrame(table); - series.refId = target.refId; - dataFrame.push(series); - } - - for (let y = 0; y < tmpSeriesList.length; y++) { - let series = toDataFrame(tmpSeriesList[y]); - - // When log results, show aggregations only in graph. Log fields are then going to be shown in table. - if (isLogsRequest) { - addPreferredVisualisationType(series, 'graph'); - } - - series.refId = target.refId; - dataFrame.push(series); - } - } - } - - for (let frame of dataFrame) { - for (let field of frame.fields) { - if (field.type === FieldType.time && typeof field.values[0] !== 'number') { - field.values = convertFieldType(field, { destinationType: FieldType.time }).values; - } - } - } - - return { data: dataFrame }; - } - - processResponseToSeries = () => { - const seriesList = []; - - for (let i = 0; i < this.response.responses.length; i++) { - const response = this.response.responses[i]; - const target = this.targets[i]; - - if (response.error) { - throw this.getErrorFromElasticResponse(this.response, response.error); - } - - if (response.hits && response.hits.hits.length > 0) { - this.processHits(response.hits, seriesList, target); - } - - if (response.aggregations) { - const aggregations = response.aggregations; - const target = this.targets[i]; - const tmpSeriesList: any[] = []; - const table = new TableModel(); - table.refId = target.refId; - - this.processBuckets(aggregations, target, tmpSeriesList, table, {}, 0); - this.trimDatapoints(tmpSeriesList, target); - this.nameSeries(tmpSeriesList, target); - - for (let y = 0; y < tmpSeriesList.length; y++) { - seriesList.push(tmpSeriesList[y]); - } - - if (table.rows.length > 0) { - seriesList.push(table); - } - } - } - - return { data: seriesList }; - }; -} - -type Doc = { - _id: string; - _type: string; - _index: string; - _source?: any; - sort?: Array; - highlight?: Record; -}; - -/** - * Flatten the docs from response mainly the _source part which can be nested. This flattens it so that it is one level - * deep and the keys are: `level1Name.level2Name...`. Also returns list of all properties from all the docs (not all - * docs have to have the same keys). - * @param hits - */ -const flattenHits = (hits: Doc[]): { docs: Array>; propNames: string[] } => { - const docs: any[] = []; - // We keep a list of all props so that we can create all the fields in the dataFrame, this can lead - // to wide sparse dataframes in case the scheme is different per document. - let propNames: string[] = []; - - for (const hit of hits) { - const flattened = hit._source ? flattenObject(hit._source) : {}; - const doc = { - _id: hit._id, - _type: hit._type, - _index: hit._index, - sort: hit.sort, - highlight: hit.highlight, - _source: { ...flattened }, - ...flattened, - }; - - for (const propName of Object.keys(doc)) { - if (propNames.indexOf(propName) === -1) { - propNames.push(propName); - } - } - - docs.push(doc); - } - - propNames.sort(); - return { docs, propNames }; -}; - -/** - * Create empty dataframe but with created fields. Fields are based from propNames (should be from the response) and - * also from configuration specified fields for message, time, and level. - * @param propNames - * @param timeField - * @param logMessageField - * @param logLevelField - */ -const createEmptyDataFrame = ( - props: Array<[string, FieldType]>, - isLogsRequest: boolean, - timeField?: string, - logMessageField?: string, - logLevelField?: string -): MutableDataFrame => { - const series = new MutableDataFrame({ fields: [] }); - - if (timeField) { - series.addField({ - config: { - filterable: true, - }, - name: timeField, - type: FieldType.time, - }); - } - - if (logMessageField) { - const f = series.addField({ - name: logMessageField, - type: FieldType.string, - }); - series.setParser(f, (v) => { - return v || ''; - }); - } - - if (logLevelField) { - const f = series.addField({ - name: 'level', - type: FieldType.string, - }); - series.setParser(f, (v) => { - return v || ''; - }); - } - - const fieldNames = series.fields.map((field) => field.name); - - for (const [name, type] of props) { - // Do not duplicate fields. This can mean that we will shadow some fields. - if (fieldNames.includes(name)) { - continue; - } - // Do not add _source field (besides logs) as we are showing each _source field in table instead. - if (!isLogsRequest && name === '_source') { - continue; - } - - const f = series.addField({ - config: { - filterable: true, - }, - name, - type, - }); - series.setParser(f, (v) => { - return v || ''; - }); - } - - return series; -}; - -const addPreferredVisualisationType = (series: DataFrame, type: PreferredVisualisationType) => { - let s = series; - s.meta - ? (s.meta.preferredVisualisationType = type) - : (s.meta = { - preferredVisualisationType: type, - }); -}; - -const toNameTypePair = - (docs: Array>) => - (propName: string): [string, FieldType] => [ - propName, - guessType(docs.find((doc) => doc[propName] !== undefined)?.[propName]), - ]; - -/** - * Trying to guess data type from its value. This is far from perfect, as in order to have accurate guess - * we should have access to the elasticsearch mapping, but it covers the most common use cases for numbers, strings & arrays. - */ -const guessType = (value: unknown): FieldType => { - switch (typeof value) { - case 'number': - return FieldType.number; - case 'string': - return FieldType.string; - default: - return FieldType.other; - } -};