diff --git a/public/app/plugins/datasource/elasticsearch/components/QueryEditor/BucketAggregationsEditor/BucketAggregationEditor.tsx b/public/app/plugins/datasource/elasticsearch/components/QueryEditor/BucketAggregationsEditor/BucketAggregationEditor.tsx index f026d5715f1..0ddd713cd42 100644 --- a/public/app/plugins/datasource/elasticsearch/components/QueryEditor/BucketAggregationsEditor/BucketAggregationEditor.tsx +++ b/public/app/plugins/datasource/elasticsearch/components/QueryEditor/BucketAggregationsEditor/BucketAggregationEditor.tsx @@ -48,7 +48,7 @@ export const BucketAggregationEditor: FunctionComponent } }; - return (await get()).map(toSelectableValue); + return (await get().toPromise()).map(toSelectableValue); }; return ( diff --git a/public/app/plugins/datasource/elasticsearch/components/QueryEditor/MetricAggregationsEditor/MetricEditor.tsx b/public/app/plugins/datasource/elasticsearch/components/QueryEditor/MetricAggregationsEditor/MetricEditor.tsx index c55287ced5b..34e3eeb4015 100644 --- a/public/app/plugins/datasource/elasticsearch/components/QueryEditor/MetricAggregationsEditor/MetricEditor.tsx +++ b/public/app/plugins/datasource/elasticsearch/components/QueryEditor/MetricAggregationsEditor/MetricEditor.tsx @@ -83,7 +83,7 @@ export const MetricEditor: FunctionComponent = ({ value }) => { return datasource.getFields('number'); }; - return (await get()).map(toSelectableValue); + return (await get().toPromise()).map(toSelectableValue); }; return ( diff --git a/public/app/plugins/datasource/elasticsearch/datasource.test.ts b/public/app/plugins/datasource/elasticsearch/datasource.test.ts index 2baf8844013..fde216dc480 100644 --- a/public/app/plugins/datasource/elasticsearch/datasource.test.ts +++ b/public/app/plugins/datasource/elasticsearch/datasource.test.ts @@ -1,23 +1,26 @@ +import _ from 'lodash'; +import { Observable, of, throwError } from 'rxjs'; import { ArrayVector, CoreApp, DataQueryRequest, DataSourceInstanceSettings, + DataSourcePluginMeta, dateMath, DateTime, dateTime, Field, - MetricFindValue, MutableDataFrame, TimeRange, toUtc, } from '@grafana/data'; -import _ from 'lodash'; +import { BackendSrvRequest, FetchResponse } from '@grafana/runtime'; + import { ElasticDatasource, enhanceDataFrame } from './datasource'; import { backendSrv } from 'app/core/services/backend_srv'; // will use the version in __mocks__ -import { TemplateSrv } from 'app/features/templating/template_srv'; import { ElasticsearchOptions, ElasticsearchQuery } from './types'; import { Filters } from './components/QueryEditor/BucketAggregationsEditor/aggregations'; +import { createFetchResponse } from '../../../../test/helpers/createFetchResponse'; const ELASTICSEARCH_MOCK_URL = 'http://elasticsearch.local'; @@ -42,12 +45,27 @@ const createTimeRange = (from: DateTime, to: DateTime): TimeRange => ({ }, }); -describe('ElasticDatasource', function(this: any) { - const datasourceRequestMock = jest.spyOn(backendSrv, 'datasourceRequest'); +interface Args { + data?: any; + from?: string; + jsonData?: any; + database?: string; + mockImplementation?: (options: BackendSrvRequest) => Observable; +} - beforeEach(() => { - jest.clearAllMocks(); - }); +function getTestContext({ + data = {}, + from = 'now-5m', + jsonData = {}, + database = '[asd-]YYYY.MM.DD', + mockImplementation = undefined, +}: Args = {}) { + jest.clearAllMocks(); + + const defaultMock = (options: BackendSrvRequest) => of(createFetchResponse(data)); + + const fetchMock = jest.spyOn(backendSrv, 'fetch'); + fetchMock.mockImplementation(mockImplementation ?? defaultMock); const templateSrv: any = { replace: jest.fn(text => { @@ -60,144 +78,139 @@ describe('ElasticDatasource', function(this: any) { getAdhocFilters: jest.fn(() => []), }; - interface TestContext { - ds: ElasticDatasource; - } - const ctx = {} as TestContext; + const timeSrv: any = { + time: { from, to: 'now' }, + }; - function createTimeSrv(from: string) { - const srv: any = { - time: { from: from, to: 'now' }, + timeSrv.timeRange = jest.fn(() => { + return { + from: dateMath.parse(timeSrv.time.from, false), + to: dateMath.parse(timeSrv.time.to, true), }; + }); - srv.timeRange = jest.fn(() => { - return { - from: dateMath.parse(srv.time.from, false), - to: dateMath.parse(srv.time.to, true), - }; - }); + timeSrv.setTime = jest.fn(time => { + timeSrv.time = time; + }); - srv.setTime = jest.fn(time => { - srv.time = time; - }); + const instanceSettings: DataSourceInstanceSettings = { + id: 1, + meta: {} as DataSourcePluginMeta, + name: 'test-elastic', + type: 'type', + uid: 'uid', + url: ELASTICSEARCH_MOCK_URL, + database, + jsonData, + }; - return srv; - } + const ds = new ElasticDatasource(instanceSettings, templateSrv); - function createDatasource(instanceSettings: DataSourceInstanceSettings) { - instanceSettings.jsonData = instanceSettings.jsonData || ({} as ElasticsearchOptions); - ctx.ds = new ElasticDatasource(instanceSettings, templateSrv as TemplateSrv); - } + return { timeSrv, ds, fetchMock }; +} +describe('ElasticDatasource', function(this: any) { describe('When testing datasource with index pattern', () => { - beforeEach(() => { - createDatasource({ - url: ELASTICSEARCH_MOCK_URL, - database: '[asd-]YYYY.MM.DD', - jsonData: { interval: 'Daily', esVersion: 2 } as ElasticsearchOptions, - } as DataSourceInstanceSettings); - }); - it('should translate index pattern to current day', () => { - let requestOptions: any; - datasourceRequestMock.mockImplementation(options => { - requestOptions = options; - return Promise.resolve({ data: {} }); - }); + const { ds, fetchMock } = getTestContext({ jsonData: { interval: 'Daily', esVersion: 2 } }); - ctx.ds.testDatasource(); + ds.testDatasource(); const today = toUtc().format('YYYY.MM.DD'); - expect(requestOptions.url).toBe(`${ELASTICSEARCH_MOCK_URL}/asd-${today}/_mapping`); + expect(fetchMock).toHaveBeenCalledTimes(1); + expect(fetchMock.mock.calls[0][0].url).toBe(`${ELASTICSEARCH_MOCK_URL}/asd-${today}/_mapping`); }); }); describe('When issuing metric query with interval pattern', () => { - let requestOptions: any, parts: any, header: any, query: any, result: any; - - beforeEach(async () => { - createDatasource({ - url: ELASTICSEARCH_MOCK_URL, - database: '[asd-]YYYY.MM.DD', - jsonData: { interval: 'Daily', esVersion: 2 } as ElasticsearchOptions, - } as DataSourceInstanceSettings); - - datasourceRequestMock.mockImplementation(options => { - requestOptions = options; - return Promise.resolve({ - data: { - responses: [ - { - aggregations: { - '1': { - buckets: [ - { - doc_count: 10, - key: 1000, - }, - ], - }, - }, - }, - ], - }, - }); - }); - - query = { - range: { - from: toUtc([2015, 4, 30, 10]), - to: toUtc([2015, 5, 1, 10]), + async function runScenario() { + const range = { from: toUtc([2015, 4, 30, 10]), to: toUtc([2015, 5, 1, 10]) }; + const targets = [ + { + alias: '$varAlias', + bucketAggs: [{ type: 'date_histogram', field: '@timestamp', id: '1' }], + metrics: [{ type: 'count', id: '1' }], + query: 'escape\\:test', }, - targets: [ + ]; + const query: any = { range, targets }; + const data = { + responses: [ { - alias: '$varAlias', - bucketAggs: [{ type: 'date_histogram', field: '@timestamp', id: '1' }], - metrics: [{ type: 'count', id: '1' }], - query: 'escape\\:test', + aggregations: { + '1': { + buckets: [ + { + doc_count: 10, + key: 1000, + }, + ], + }, + }, }, ], }; + const { ds, fetchMock } = getTestContext({ jsonData: { interval: 'Daily', esVersion: 2 }, data }); - result = await ctx.ds.query(query); + let result: any = {}; + await expect(ds.query(query)).toEmitValuesWith(received => { + expect(received.length).toBe(1); + expect(received[0]).toEqual({ + data: [ + { + datapoints: [[10, 1000]], + metric: 'count', + props: {}, + refId: undefined, + target: 'resolvedVariable', + }, + ], + }); + result = received[0]; + }); - parts = requestOptions.data.split('\n'); - header = JSON.parse(parts[0]); - }); + expect(fetchMock).toHaveBeenCalledTimes(1); + const requestOptions = fetchMock.mock.calls[0][0]; + const parts = requestOptions.data.split('\n'); + const header = JSON.parse(parts[0]); + const body = JSON.parse(parts[1]); - it('should translate index pattern to current day', () => { + return { result, body, header, query }; + } + + it('should translate index pattern to current day', async () => { + const { header } = await runScenario(); expect(header.index).toEqual(['asd-2015.05.30', 'asd-2015.05.31', 'asd-2015.06.01']); }); - it('should not resolve the variable in the original alias field in the query', () => { + it('should not resolve the variable in the original alias field in the query', async () => { + const { query } = await runScenario(); expect(query.targets[0].alias).toEqual('$varAlias'); }); - it('should resolve the alias variable for the alias/target in the result', () => { + it('should resolve the alias variable for the alias/target in the result', async () => { + const { result } = await runScenario(); expect(result.data[0].target).toEqual('resolvedVariable'); }); - it('should json escape lucene query', () => { - const body = JSON.parse(parts[1]); + it('should json escape lucene query', async () => { + const { body } = await runScenario(); expect(body.query.bool.filter[1].query_string.query).toBe('escape\\:test'); }); }); describe('When issuing logs query with interval pattern', () => { async function setupDataSource(jsonData?: Partial) { - createDatasource({ - url: ELASTICSEARCH_MOCK_URL, + jsonData = { + interval: 'Daily', + esVersion: 2, + timeField: '@timestamp', + ...(jsonData || {}), + }; + const { ds } = getTestContext({ + jsonData, + data: logsResponse.data, database: 'mock-index', - jsonData: { - interval: 'Daily', - esVersion: 2, - timeField: '@timestamp', - ...(jsonData || {}), - } as ElasticsearchOptions, - } as DataSourceInstanceSettings); - - datasourceRequestMock.mockImplementation(options => { - return Promise.resolve(logsResponse); }); const query: DataQueryRequest = { @@ -206,7 +219,13 @@ describe('ElasticDatasource', function(this: any) { { alias: '$varAlias', refId: 'A', - bucketAggs: [{ type: 'date_histogram', settings: { interval: 'auto' }, id: '2' }], + bucketAggs: [ + { + type: 'date_histogram', + settings: { interval: 'auto' }, + id: '2', + }, + ], metrics: [{ type: 'count', id: '1' }], query: 'escape\\:test', isLogsQuery: true, @@ -215,8 +234,14 @@ describe('ElasticDatasource', function(this: any) { ], } as DataQueryRequest; - const queryBuilderSpy = jest.spyOn(ctx.ds.queryBuilder, 'getLogsQuery'); - const response = await ctx.ds.query(query); + const queryBuilderSpy = jest.spyOn(ds.queryBuilder, 'getLogsQuery'); + let response: any = {}; + + await expect(ds.query(query)).toEmitValuesWith(received => { + expect(received.length).toBe(1); + response = received[0]; + }); + return { queryBuilderSpy, response }; } @@ -243,43 +268,35 @@ describe('ElasticDatasource', function(this: any) { }); describe('When issuing document query', () => { - let requestOptions: any, parts: any, header: any; - - beforeEach(() => { - createDatasource({ - url: ELASTICSEARCH_MOCK_URL, - database: 'test', - jsonData: { esVersion: 2 } as ElasticsearchOptions, - } as DataSourceInstanceSettings); - - datasourceRequestMock.mockImplementation(options => { - requestOptions = options; - return Promise.resolve({ data: { responses: [] } }); - }); + async function runScenario() { + const range = createTimeRange(dateTime([2015, 4, 30, 10]), dateTime([2015, 5, 1, 10])); + const targets = [{ refId: 'A', metrics: [{ type: 'raw_document', id: '1' }], query: 'test' }]; + const query: any = { range, targets }; + const data = { responses: [] }; - const query: DataQueryRequest = { - range: createTimeRange(dateTime([2015, 4, 30, 10]), dateTime([2015, 5, 1, 10])), - targets: [ - { - refId: 'A', - metrics: [{ type: 'raw_document', id: '1' }], - query: 'test', - }, - ], - } as DataQueryRequest; + const { ds, fetchMock } = getTestContext({ jsonData: { esVersion: 2 }, data, database: 'test' }); - ctx.ds.query(query); + await expect(ds.query(query)).toEmitValuesWith(received => { + expect(received.length).toBe(1); + expect(received[0]).toEqual({ data: [] }); + }); - parts = requestOptions.data.split('\n'); - header = JSON.parse(parts[0]); - }); + expect(fetchMock).toHaveBeenCalledTimes(1); + const requestOptions = fetchMock.mock.calls[0][0]; + const parts = requestOptions.data.split('\n'); + const header = JSON.parse(parts[0]); + const body = JSON.parse(parts[1]); - it('should set search type to query_then_fetch', () => { + return { body, header }; + } + + it('should set search type to query_then_fetch', async () => { + const { header } = await runScenario(); expect(header.search_type).toEqual('query_then_fetch'); }); - it('should set size', () => { - const body = JSON.parse(parts[1]); + it('should set size', async () => { + const { body } = await runScenario(); expect(body.size).toBe(500); }); }); @@ -298,180 +315,172 @@ describe('ElasticDatasource', function(this: any) { ], } as DataQueryRequest; - createDatasource({ - url: ELASTICSEARCH_MOCK_URL, - database: '[asd-]YYYY.MM.DD', - jsonData: { interval: 'Daily', esVersion: 7 } as ElasticsearchOptions, - } as DataSourceInstanceSettings); - it('should process it properly', async () => { - datasourceRequestMock.mockImplementation(() => { - return Promise.resolve({ - data: { - took: 1, - responses: [ - { - error: { - reason: 'all shards failed', - }, - status: 400, + const { ds } = getTestContext({ + jsonData: { interval: 'Daily', esVersion: 7 }, + data: { + took: 1, + responses: [ + { + error: { + reason: 'all shards failed', }, - ], - }, - }); + status: 400, + }, + ], + }, }); const errObject = { data: '{\n "reason": "all shards failed"\n}', message: 'all shards failed', + config: { + url: 'http://localhost:3000/api/tsdb/query', + }, }; - try { - await ctx.ds.query(query); - } catch (err) { - expect(err).toEqual(errObject); - } + await expect(ds.query(query)).toEmitValuesWith(received => { + expect(received.length).toBe(1); + expect(received[0]).toEqual(errObject); + }); }); it('should properly throw an unknown error', async () => { - datasourceRequestMock.mockImplementation(() => { - return Promise.resolve({ - data: { - took: 1, - responses: [ - { - error: {}, - status: 400, - }, - ], - }, - }); + const { ds } = getTestContext({ + jsonData: { interval: 'Daily', esVersion: 7 }, + data: { + took: 1, + responses: [ + { + error: {}, + status: 400, + }, + ], + }, }); const errObject = { data: '{}', message: 'Unknown elastic error response', + config: { + url: 'http://localhost:3000/api/tsdb/query', + }, }; - try { - await ctx.ds.query(query); - } catch (err) { - expect(err).toEqual(errObject); - } + await expect(ds.query(query)).toEmitValuesWith(received => { + expect(received.length).toBe(1); + expect(received[0]).toEqual(errObject); + }); }); }); describe('When getting fields', () => { - beforeEach(() => { - createDatasource({ - url: ELASTICSEARCH_MOCK_URL, - database: 'metricbeat', - jsonData: { esVersion: 50 } as ElasticsearchOptions, - } as DataSourceInstanceSettings); - - datasourceRequestMock.mockImplementation(options => { - return Promise.resolve({ - data: { - metricbeat: { - mappings: { - metricsets: { - _all: {}, - _meta: { - test: 'something', + const data = { + metricbeat: { + mappings: { + metricsets: { + _all: {}, + _meta: { + test: 'something', + }, + properties: { + '@timestamp': { type: 'date' }, + __timestamp: { type: 'date' }, + '@timestampnano': { type: 'date_nanos' }, + beat: { + properties: { + name: { + fields: { raw: { type: 'keyword' } }, + type: 'string', }, - properties: { - '@timestamp': { type: 'date' }, - __timestamp: { type: 'date' }, - '@timestampnano': { type: 'date_nanos' }, - beat: { - properties: { - name: { - fields: { raw: { type: 'keyword' } }, - type: 'string', - }, - hostname: { type: 'string' }, - }, + hostname: { type: 'string' }, + }, + }, + system: { + properties: { + cpu: { + properties: { + system: { type: 'float' }, + user: { type: 'float' }, }, - system: { - properties: { - cpu: { - properties: { - system: { type: 'float' }, - user: { type: 'float' }, - }, - }, - process: { - properties: { - cpu: { - properties: { - total: { type: 'float' }, - }, - }, - name: { type: 'string' }, - }, + }, + process: { + properties: { + cpu: { + properties: { + total: { type: 'float' }, }, }, + name: { type: 'string' }, }, }, }, }, }, }, - }); - }); - }); + }, + }, + }; it('should return nested fields', async () => { - const fieldObjects = await ctx.ds.getFields(); - - const fields = _.map(fieldObjects, 'text'); - - expect(fields).toEqual([ - '@timestamp', - '__timestamp', - '@timestampnano', - 'beat.name.raw', - 'beat.name', - 'beat.hostname', - 'system.cpu.system', - 'system.cpu.user', - 'system.process.cpu.total', - 'system.process.name', - ]); + const { ds } = getTestContext({ data, jsonData: { esVersion: 50 }, database: 'metricbeat' }); + + await expect(ds.getFields()).toEmitValuesWith(received => { + expect(received.length).toBe(1); + const fieldObjects = received[0]; + const fields = _.map(fieldObjects, 'text'); + + expect(fields).toEqual([ + '@timestamp', + '__timestamp', + '@timestampnano', + 'beat.name.raw', + 'beat.name', + 'beat.hostname', + 'system.cpu.system', + 'system.cpu.user', + 'system.process.cpu.total', + 'system.process.name', + ]); + }); }); it('should return number fields', async () => { - const fieldObjects = await ctx.ds.getFields('number'); + const { ds } = getTestContext({ data, jsonData: { esVersion: 50 }, database: 'metricbeat' }); - const fields = _.map(fieldObjects, 'text'); + await expect(ds.getFields('number')).toEmitValuesWith(received => { + expect(received.length).toBe(1); + const fieldObjects = received[0]; + const fields = _.map(fieldObjects, 'text'); - expect(fields).toEqual(['system.cpu.system', 'system.cpu.user', 'system.process.cpu.total']); + expect(fields).toEqual(['system.cpu.system', 'system.cpu.user', 'system.process.cpu.total']); + }); }); it('should return date fields', async () => { - const fieldObjects = await ctx.ds.getFields('date'); + const { ds } = getTestContext({ data, jsonData: { esVersion: 50 }, database: 'metricbeat' }); - const fields = _.map(fieldObjects, 'text'); + await expect(ds.getFields('date')).toEmitValuesWith(received => { + expect(received.length).toBe(1); + const fieldObjects = received[0]; + const fields = _.map(fieldObjects, 'text'); - expect(fields).toEqual(['@timestamp', '__timestamp', '@timestampnano']); + expect(fields).toEqual(['@timestamp', '__timestamp', '@timestampnano']); + }); }); }); describe('When getting field mappings on indices with gaps', () => { - const twoWeekTimeSrv: any = createTimeSrv('now-2w'); - const basicResponse = { - data: { - metricbeat: { - mappings: { - metricsets: { - _all: {}, - properties: { - '@timestamp': { type: 'date' }, - beat: { - properties: { - hostname: { type: 'string' }, - }, + metricbeat: { + mappings: { + metricsets: { + _all: {}, + properties: { + '@timestamp': { type: 'date' }, + beat: { + properties: { + hostname: { type: 'string' }, }, }, }, @@ -481,51 +490,49 @@ describe('ElasticDatasource', function(this: any) { }; const alternateResponse = { - data: { - metricbeat: { - mappings: { - metricsets: { - _all: {}, - properties: { - '@timestamp': { type: 'date' }, - }, + metricbeat: { + mappings: { + metricsets: { + _all: {}, + properties: { + '@timestamp': { type: 'date' }, }, }, }, }, }; - beforeEach(() => { - createDatasource({ - url: ELASTICSEARCH_MOCK_URL, - database: '[asd-]YYYY.MM.DD', - jsonData: { interval: 'Daily', esVersion: 50 } as ElasticsearchOptions, - } as DataSourceInstanceSettings); - }); - it('should return fields of the newest available index', async () => { const twoDaysBefore = toUtc() .subtract(2, 'day') .format('YYYY.MM.DD'); - const threeDaysBefore = toUtc() .subtract(3, 'day') .format('YYYY.MM.DD'); - - datasourceRequestMock.mockImplementation(options => { - if (options.url === `${ELASTICSEARCH_MOCK_URL}/asd-${twoDaysBefore}/_mapping`) { - return Promise.resolve(basicResponse); - } else if (options.url === `${ELASTICSEARCH_MOCK_URL}/asd-${threeDaysBefore}/_mapping`) { - return Promise.resolve(alternateResponse); - } - return Promise.reject({ status: 404 }); + const baseUrl = `${ELASTICSEARCH_MOCK_URL}/asd-${twoDaysBefore}/_mapping`; + const alternateUrl = `${ELASTICSEARCH_MOCK_URL}/asd-${threeDaysBefore}/_mapping`; + + const { ds, timeSrv } = getTestContext({ + from: 'now-2w', + jsonData: { interval: 'Daily', esVersion: 50 }, + mockImplementation: options => { + if (options.url === baseUrl) { + return of(createFetchResponse(basicResponse)); + } else if (options.url === alternateUrl) { + return of(createFetchResponse(alternateResponse)); + } + return throwError({ status: 404 }); + }, }); - const range = twoWeekTimeSrv.timeRange(); - const fieldObjects = await ctx.ds.getFields(undefined, range); + const range = timeSrv.timeRange(); - const fields = _.map(fieldObjects, 'text'); - expect(fields).toEqual(['@timestamp', 'beat.hostname']); + await expect(ds.getFields(undefined, range)).toEmitValuesWith(received => { + expect(received.length).toBe(1); + const fieldObjects = received[0]; + const fields = _.map(fieldObjects, 'text'); + expect(fields).toEqual(['@timestamp', 'beat.hostname']); + }); }); it('should not retry when ES is down', async () => { @@ -533,108 +540,101 @@ describe('ElasticDatasource', function(this: any) { .subtract(2, 'day') .format('YYYY.MM.DD'); - const range = twoWeekTimeSrv.timeRange(); - datasourceRequestMock.mockImplementation(options => { - if (options.url === `${ELASTICSEARCH_MOCK_URL}/asd-${twoDaysBefore}/_mapping`) { - return Promise.resolve(basicResponse); - } - return Promise.reject({ status: 500 }); + const { ds, timeSrv, fetchMock } = getTestContext({ + from: 'now-2w', + jsonData: { interval: 'Daily', esVersion: 50 }, + mockImplementation: options => { + if (options.url === `${ELASTICSEARCH_MOCK_URL}/asd-${twoDaysBefore}/_mapping`) { + return of(createFetchResponse(basicResponse)); + } + return throwError({ status: 500 }); + }, }); - expect.assertions(2); - try { - await ctx.ds.getFields(undefined, range); - } catch (e) { - expect(e).toStrictEqual({ status: 500 }); - expect(datasourceRequestMock).toBeCalledTimes(1); - } + const range = timeSrv.timeRange(); + + await expect(ds.getFields(undefined, range)).toEmitValuesWith(received => { + expect(received.length).toBe(1); + expect(received[0]).toStrictEqual({ status: 500 }); + expect(fetchMock).toBeCalledTimes(1); + }); }); it('should not retry more than 7 indices', async () => { - const range = twoWeekTimeSrv.timeRange(); - datasourceRequestMock.mockImplementation(() => { - return Promise.reject({ status: 404 }); + const { ds, timeSrv, fetchMock } = getTestContext({ + from: 'now-2w', + jsonData: { interval: 'Daily', esVersion: 50 }, + mockImplementation: options => { + return throwError({ status: 404 }); + }, }); + const range = timeSrv.timeRange(); - expect.assertions(2); - try { - await ctx.ds.getFields(undefined, range); - } catch (e) { - expect(e).toStrictEqual({ status: 404 }); - expect(datasourceRequestMock).toBeCalledTimes(7); - } + await expect(ds.getFields(undefined, range)).toEmitValuesWith(received => { + expect(received.length).toBe(1); + expect(received[0]).toStrictEqual('Could not find an available index for this time range.'); + expect(fetchMock).toBeCalledTimes(7); + }); }); }); describe('When getting fields from ES 7.0', () => { - beforeEach(() => { - createDatasource({ - url: ELASTICSEARCH_MOCK_URL, - database: 'genuine.es7._mapping.response', - jsonData: { esVersion: 70 } as ElasticsearchOptions, - } as DataSourceInstanceSettings); - - datasourceRequestMock.mockImplementation(options => { - return Promise.resolve({ - data: { - 'genuine.es7._mapping.response': { - mappings: { - properties: { - '@timestamp_millis': { - type: 'date', - format: 'epoch_millis', - }, - classification_terms: { - type: 'keyword', - }, - domains: { - type: 'keyword', - }, - ip_address: { - type: 'ip', + const data = { + 'genuine.es7._mapping.response': { + mappings: { + properties: { + '@timestamp_millis': { + type: 'date', + format: 'epoch_millis', + }, + classification_terms: { + type: 'keyword', + }, + domains: { + type: 'keyword', + }, + ip_address: { + type: 'ip', + }, + justification_blob: { + properties: { + criterion: { + type: 'text', + fields: { + keyword: { + type: 'keyword', + ignore_above: 256, + }, }, - justification_blob: { - properties: { - criterion: { - type: 'text', - fields: { - keyword: { - type: 'keyword', - ignore_above: 256, - }, - }, - }, - overall_vote_score: { - type: 'float', - }, - shallow: { - properties: { - jsi: { - properties: { - sdb: { - properties: { - dsel2: { - properties: { - 'bootlegged-gille': { - properties: { - botness: { - type: 'float', - }, - general_algorithm_score: { - type: 'float', - }, - }, - }, - 'uncombed-boris': { - properties: { - botness: { - type: 'float', - }, - general_algorithm_score: { - type: 'float', - }, - }, - }, + }, + overall_vote_score: { + type: 'float', + }, + shallow: { + properties: { + jsi: { + properties: { + sdb: { + properties: { + dsel2: { + properties: { + 'bootlegged-gille': { + properties: { + botness: { + type: 'float', + }, + general_algorithm_score: { + type: 'float', + }, + }, + }, + 'uncombed-boris': { + properties: { + botness: { + type: 'float', + }, + general_algorithm_score: { + type: 'float', }, }, }, @@ -645,202 +645,201 @@ describe('ElasticDatasource', function(this: any) { }, }, }, - overall_vote_score: { - type: 'float', - }, - ua_terms_long: { - type: 'keyword', - }, - ua_terms_short: { - type: 'keyword', - }, }, }, }, + overall_vote_score: { + type: 'float', + }, + ua_terms_long: { + type: 'keyword', + }, + ua_terms_short: { + type: 'keyword', + }, }, - }); - }); - }); + }, + }, + }; it('should return nested fields', async () => { - const fieldObjects = await ctx.ds.getFields(); - - const fields = _.map(fieldObjects, 'text'); - - expect(fields).toEqual([ - '@timestamp_millis', - 'classification_terms', - 'domains', - 'ip_address', - 'justification_blob.criterion.keyword', - 'justification_blob.criterion', - 'justification_blob.overall_vote_score', - 'justification_blob.shallow.jsi.sdb.dsel2.bootlegged-gille.botness', - 'justification_blob.shallow.jsi.sdb.dsel2.bootlegged-gille.general_algorithm_score', - 'justification_blob.shallow.jsi.sdb.dsel2.uncombed-boris.botness', - 'justification_blob.shallow.jsi.sdb.dsel2.uncombed-boris.general_algorithm_score', - 'overall_vote_score', - 'ua_terms_long', - 'ua_terms_short', - ]); + const { ds } = getTestContext({ data, database: 'genuine.es7._mapping.response', jsonData: { esVersion: 70 } }); + + await expect(ds.getFields()).toEmitValuesWith(received => { + expect(received.length).toBe(1); + + const fieldObjects = received[0]; + const fields = _.map(fieldObjects, 'text'); + expect(fields).toEqual([ + '@timestamp_millis', + 'classification_terms', + 'domains', + 'ip_address', + 'justification_blob.criterion.keyword', + 'justification_blob.criterion', + 'justification_blob.overall_vote_score', + 'justification_blob.shallow.jsi.sdb.dsel2.bootlegged-gille.botness', + 'justification_blob.shallow.jsi.sdb.dsel2.bootlegged-gille.general_algorithm_score', + 'justification_blob.shallow.jsi.sdb.dsel2.uncombed-boris.botness', + 'justification_blob.shallow.jsi.sdb.dsel2.uncombed-boris.general_algorithm_score', + 'overall_vote_score', + 'ua_terms_long', + 'ua_terms_short', + ]); + }); }); it('should return number fields', async () => { - const fieldObjects = await ctx.ds.getFields('number'); - - const fields = _.map(fieldObjects, 'text'); - - expect(fields).toEqual([ - 'justification_blob.overall_vote_score', - 'justification_blob.shallow.jsi.sdb.dsel2.bootlegged-gille.botness', - 'justification_blob.shallow.jsi.sdb.dsel2.bootlegged-gille.general_algorithm_score', - 'justification_blob.shallow.jsi.sdb.dsel2.uncombed-boris.botness', - 'justification_blob.shallow.jsi.sdb.dsel2.uncombed-boris.general_algorithm_score', - 'overall_vote_score', - ]); + const { ds } = getTestContext({ data, database: 'genuine.es7._mapping.response', jsonData: { esVersion: 70 } }); + + await expect(ds.getFields('number')).toEmitValuesWith(received => { + expect(received.length).toBe(1); + + const fieldObjects = received[0]; + const fields = _.map(fieldObjects, 'text'); + expect(fields).toEqual([ + 'justification_blob.overall_vote_score', + 'justification_blob.shallow.jsi.sdb.dsel2.bootlegged-gille.botness', + 'justification_blob.shallow.jsi.sdb.dsel2.bootlegged-gille.general_algorithm_score', + 'justification_blob.shallow.jsi.sdb.dsel2.uncombed-boris.botness', + 'justification_blob.shallow.jsi.sdb.dsel2.uncombed-boris.general_algorithm_score', + 'overall_vote_score', + ]); + }); }); it('should return date fields', async () => { - const fieldObjects = await ctx.ds.getFields('date'); + const { ds } = getTestContext({ data, database: 'genuine.es7._mapping.response', jsonData: { esVersion: 70 } }); - const fields = _.map(fieldObjects, 'text'); + await expect(ds.getFields('date')).toEmitValuesWith(received => { + expect(received.length).toBe(1); - expect(fields).toEqual(['@timestamp_millis']); + const fieldObjects = received[0]; + const fields = _.map(fieldObjects, 'text'); + expect(fields).toEqual(['@timestamp_millis']); + }); }); }); describe('When issuing aggregation query on es5.x', () => { - let requestOptions: any, parts: any, header: any; - - beforeEach(() => { - createDatasource({ - url: ELASTICSEARCH_MOCK_URL, - database: 'test', - jsonData: { esVersion: 5 } as ElasticsearchOptions, - } as DataSourceInstanceSettings); - - datasourceRequestMock.mockImplementation(options => { - requestOptions = options; - return Promise.resolve({ data: { responses: [] } }); - }); + async function runScenario() { + const range = createTimeRange(dateTime([2015, 4, 30, 10]), dateTime([2015, 5, 1, 10])); + const targets = [ + { + refId: 'A', + bucketAggs: [{ type: 'date_histogram', field: '@timestamp', id: '2' }], + metrics: [{ type: 'count', id: '1' }], + query: 'test', + }, + ]; + const query: any = { range, targets }; + const data = { responses: [] }; - const query: DataQueryRequest = { - range: createTimeRange(dateTime([2015, 4, 30, 10]), dateTime([2015, 5, 1, 10])), - targets: [ - { - refId: 'A', - bucketAggs: [{ type: 'date_histogram', field: '@timestamp', id: '2' }], - metrics: [{ type: 'count', id: '1' }], - query: 'test', - }, - ], - } as DataQueryRequest; + const { ds, fetchMock } = getTestContext({ jsonData: { esVersion: 5 }, data, database: 'test' }); - ctx.ds.query(query); + await expect(ds.query(query)).toEmitValuesWith(received => { + expect(received.length).toBe(1); + expect(received[0]).toEqual({ data: [] }); + }); - parts = requestOptions.data.split('\n'); - header = JSON.parse(parts[0]); - }); + expect(fetchMock).toHaveBeenCalledTimes(1); + const requestOptions = fetchMock.mock.calls[0][0]; + const parts = requestOptions.data.split('\n'); + const header = JSON.parse(parts[0]); + const body = JSON.parse(parts[1]); - it('should not set search type to count', () => { + return { body, header }; + } + + it('should not set search type to count', async () => { + const { header } = await runScenario(); expect(header.search_type).not.toEqual('count'); }); - it('should set size to 0', () => { - const body = JSON.parse(parts[1]); + it('should set size to 0', async () => { + const { body } = await runScenario(); expect(body.size).toBe(0); }); }); describe('When issuing metricFind query on es5.x', () => { - let requestOptions: any, parts, header: any, body: any; - let results: MetricFindValue[]; - - beforeEach(() => { - createDatasource({ - url: ELASTICSEARCH_MOCK_URL, - database: 'test', - jsonData: { esVersion: 5 } as ElasticsearchOptions, - } as DataSourceInstanceSettings); - - datasourceRequestMock.mockImplementation(options => { - requestOptions = options; - return Promise.resolve({ - data: { - responses: [ - { - aggregations: { - '1': { - buckets: [ - { doc_count: 1, key: 'test' }, - { - doc_count: 2, - key: 'test2', - key_as_string: 'test2_as_string', - }, - ], + async function runScenario() { + const data = { + responses: [ + { + aggregations: { + '1': { + buckets: [ + { doc_count: 1, key: 'test' }, + { + doc_count: 2, + key: 'test2', + key_as_string: 'test2_as_string', }, - }, + ], }, - ], + }, }, - }); - }); + ], + }; - ctx.ds.metricFindQuery('{"find": "terms", "field": "test"}').then(res => { - results = res; - }); + const { ds, fetchMock } = getTestContext({ jsonData: { esVersion: 5 }, data, database: 'test' }); - parts = requestOptions.data.split('\n'); - header = JSON.parse(parts[0]); - body = JSON.parse(parts[1]); - }); + const results = await ds.metricFindQuery('{"find": "terms", "field": "test"}'); + + expect(fetchMock).toHaveBeenCalledTimes(1); + const requestOptions = fetchMock.mock.calls[0][0]; + const parts = requestOptions.data.split('\n'); + const header = JSON.parse(parts[0]); + const body = JSON.parse(parts[1]); - it('should get results', () => { + return { results, body, header }; + } + + it('should get results', async () => { + const { results } = await runScenario(); expect(results.length).toEqual(2); }); - it('should use key or key_as_string', () => { + it('should use key or key_as_string', async () => { + const { results } = await runScenario(); expect(results[0].text).toEqual('test'); expect(results[1].text).toEqual('test2_as_string'); }); - it('should not set search type to count', () => { + it('should not set search type to count', async () => { + const { header } = await runScenario(); expect(header.search_type).not.toEqual('count'); }); - it('should set size to 0', () => { + it('should set size to 0', async () => { + const { body } = await runScenario(); expect(body.size).toBe(0); }); - it('should not set terms aggregation size to 0', () => { + it('should not set terms aggregation size to 0', async () => { + const { body } = await runScenario(); expect(body['aggs']['1']['terms'].size).not.toBe(0); }); }); describe('query', () => { - it('should replace range as integer not string', () => { - const dataSource = new ElasticDatasource( - { - url: ELASTICSEARCH_MOCK_URL, - database: '[asd-]YYYY.MM.DD', - jsonData: { - interval: 'Daily', - esVersion: 2, - timeField: '@time', - }, - } as DataSourceInstanceSettings, - templateSrv as TemplateSrv - ); - (dataSource as any).post = jest.fn(() => Promise.resolve({ responses: [] })); - dataSource.query(createElasticQuery()); - - const query = ((dataSource as any).post as jest.Mock).mock.calls[0][1]; - expect(typeof JSON.parse(query.split('\n')[1]).query.bool.filter[0].range['@time'].gte).toBe('number'); + it('should replace range as integer not string', async () => { + const { ds } = getTestContext({ jsonData: { interval: 'Daily', esVersion: 2, timeField: '@time' } }); + const postMock = jest.fn((url: string, data: any) => of(createFetchResponse({ responses: [] }))); + ds['post'] = postMock; + + await expect(ds.query(createElasticQuery())).toEmitValuesWith(received => { + expect(postMock).toHaveBeenCalledTimes(1); + + const query = postMock.mock.calls[0][1]; + expect(typeof JSON.parse(query.split('\n')[1]).query.bool.filter[0].range['@time'].gte).toBe('number'); + }); }); }); it('should correctly interpolate variables in query', () => { + const { ds } = getTestContext(); const query: ElasticsearchQuery = { refId: 'A', bucketAggs: [{ type: 'filters', settings: { filters: [{ query: '$var', label: '' }] }, id: '1' }], @@ -848,13 +847,14 @@ describe('ElasticDatasource', function(this: any) { query: '$var', }; - const interpolatedQuery = ctx.ds.interpolateVariablesInQueries([query], {})[0]; + const interpolatedQuery = ds.interpolateVariablesInQueries([query], {})[0]; expect(interpolatedQuery.query).toBe('resolvedVariable'); expect((interpolatedQuery.bucketAggs![0] as Filters).settings!.filters![0].query).toBe('resolvedVariable'); }); it('should correctly handle empty query strings', () => { + const { ds } = getTestContext(); const query: ElasticsearchQuery = { refId: 'A', bucketAggs: [{ type: 'filters', settings: { filters: [{ query: '', label: '' }] }, id: '1' }], @@ -862,7 +862,7 @@ describe('ElasticDatasource', function(this: any) { query: '', }; - const interpolatedQuery = ctx.ds.interpolateVariablesInQueries([query], {})[0]; + const interpolatedQuery = ds.interpolateVariablesInQueries([query], {})[0]; expect(interpolatedQuery.query).toBe('*'); expect((interpolatedQuery.bucketAggs![0] as Filters).settings!.filters![0].query).toBe('*'); diff --git a/public/app/plugins/datasource/elasticsearch/datasource.ts b/public/app/plugins/datasource/elasticsearch/datasource.ts index 8e831e8dc6d..a5bd552765f 100644 --- a/public/app/plugins/datasource/elasticsearch/datasource.ts +++ b/public/app/plugins/datasource/elasticsearch/datasource.ts @@ -34,6 +34,8 @@ import { } from './components/QueryEditor/MetricAggregationsEditor/aggregations'; import { bucketAggregationConfig } from './components/QueryEditor/BucketAggregationsEditor/utils'; import { isBucketAggregationWithField } from './components/QueryEditor/BucketAggregationsEditor/aggregations'; +import { generate, Observable, of, throwError } from 'rxjs'; +import { catchError, first, map, mergeMap, skipWhile, throwIfEmpty } from 'rxjs/operators'; // Those are metadata fields as defined in https://www.elastic.co/guide/en/elasticsearch/reference/current/mapping-fields.html#_identity_metadata_fields. // custom fields can start with underscores, therefore is not safe to exclude anything that starts with one. @@ -101,7 +103,7 @@ export class ElasticDatasource extends DataSourceApi { const options: any = { url: this.url + '/' + url, method: method, @@ -118,16 +120,23 @@ export class ElasticDatasource extends DataSourceApi { - if (err.data && err.data.error) { - throw { - message: 'Elasticsearch error: ' + err.data.error.reason, - error: err.data.error, - }; - } - throw err; - }); + .fetch(options) + .pipe( + map(results => { + results.data.$$config = results.config; + return results.data; + }), + catchError(err => { + if (err.data && err.data.error) { + return throwError({ + message: 'Elasticsearch error: ' + err.data.error.reason, + error: err.data.error, + }); + } + + return throwError(err); + }) + ); } async importQueries(queries: DataQuery[], originMeta: PluginMeta): Promise { @@ -142,40 +151,45 @@ export class ElasticDatasource extends DataSourceApi { - results.data.$$config = results.config; - return results.data; - }); - } else { - return this.request('GET', this.indexPattern.getIndexForToday() + url).then((results: any) => { - results.data.$$config = results.config; - return results.data; - }); + private get(url: string, range = getDefaultTimeRange()): Observable { + let indexList = this.indexPattern.getIndexList(range.from, range.to); + if (!Array.isArray(indexList)) { + indexList = [this.indexPattern.getIndexForToday()]; } + + const indexUrlList = indexList.map(index => index + url); + + return this.requestAllIndices(indexUrlList); } - private async requestAllIndices(indexList: string[], url: string): Promise { + private requestAllIndices(indexList: string[]): Observable { const maxTraversals = 7; // do not go beyond one week (for a daily pattern) const listLen = indexList.length; - for (let i = 0; i < Math.min(listLen, maxTraversals); i++) { - try { - return await this.request('GET', indexList[listLen - i - 1] + url); - } catch (err) { - if (err.status !== 404 || i === maxTraversals - 1) { - throw err; + + return generate( + 0, + i => i < Math.min(listLen, maxTraversals), + i => i + 1 + ).pipe( + mergeMap(index => { + // catch all errors and emit an object with an err property to simplify checks later in the pipeline + return this.request('GET', indexList[listLen - index - 1]).pipe(catchError(err => of({ err }))); + }), + skipWhile(resp => resp.err && resp.err.status === 404), // skip all requests that fail because missing Elastic index + throwIfEmpty(() => 'Could not find an available index for this time range.'), // when i === Math.min(listLen, maxTraversals) generate will complete but without emitting any values which means we didn't find a valid index + first(), // take the first value that isn't skipped + map(resp => { + if (resp.err) { + throw resp.err; // if there is some other error except 404 then we must throw it } - } - } + + return resp; + }) + ); } - private post(url: string, data: any) { - return this.request('POST', url, data).then((results: any) => { - results.data.$$config = results.config; - return results.data; - }); + private post(url: string, data: any): Observable { + return this.request('POST', url, data); } annotationQuery(options: any): Promise { @@ -248,75 +262,79 @@ export class ElasticDatasource extends DataSourceApi { - const list = []; - const hits = res.responses[0].hits.hits; + return this.post('_msearch', payload) + .pipe( + map(res => { + const list = []; + const hits = res.responses[0].hits.hits; - const getFieldFromSource = (source: any, fieldName: any) => { - if (!fieldName) { - return; - } - - const fieldNames = fieldName.split('.'); - let fieldValue = source; + const getFieldFromSource = (source: any, fieldName: any) => { + if (!fieldName) { + return; + } - for (let i = 0; i < fieldNames.length; i++) { - fieldValue = fieldValue[fieldNames[i]]; - if (!fieldValue) { - console.log('could not find field in annotation: ', fieldName); - return ''; - } - } + const fieldNames = fieldName.split('.'); + let fieldValue = source; - return fieldValue; - }; + for (let i = 0; i < fieldNames.length; i++) { + fieldValue = fieldValue[fieldNames[i]]; + if (!fieldValue) { + console.log('could not find field in annotation: ', fieldName); + return ''; + } + } - for (let i = 0; i < hits.length; i++) { - const source = hits[i]._source; - let time = getFieldFromSource(source, timeField); - if (typeof hits[i].fields !== 'undefined') { - const fields = hits[i].fields; - if (_.isString(fields[timeField]) || _.isNumber(fields[timeField])) { - time = fields[timeField]; - } - } + return fieldValue; + }; - const event: { - annotation: any; - time: number; - timeEnd?: number; - text: string; - tags: string | string[]; - } = { - annotation: annotation, - time: toUtc(time).valueOf(), - text: getFieldFromSource(source, textField), - tags: getFieldFromSource(source, tagsField), - }; + for (let i = 0; i < hits.length; i++) { + const source = hits[i]._source; + let time = getFieldFromSource(source, timeField); + if (typeof hits[i].fields !== 'undefined') { + const fields = hits[i].fields; + if (_.isString(fields[timeField]) || _.isNumber(fields[timeField])) { + time = fields[timeField]; + } + } - if (timeEndField) { - const timeEnd = getFieldFromSource(source, timeEndField); - if (timeEnd) { - event.timeEnd = toUtc(timeEnd).valueOf(); - } - } + const event: { + annotation: any; + time: number; + timeEnd?: number; + text: string; + tags: string | string[]; + } = { + annotation: annotation, + time: toUtc(time).valueOf(), + text: getFieldFromSource(source, textField), + tags: getFieldFromSource(source, tagsField), + }; + + if (timeEndField) { + const timeEnd = getFieldFromSource(source, timeEndField); + if (timeEnd) { + event.timeEnd = toUtc(timeEnd).valueOf(); + } + } - // legacy support for title tield - if (annotation.titleField) { - const title = getFieldFromSource(source, annotation.titleField); - if (title) { - event.text = title + '\n' + event.text; - } - } + // legacy support for title tield + if (annotation.titleField) { + const title = getFieldFromSource(source, annotation.titleField); + if (title) { + event.text = title + '\n' + event.text; + } + } - if (typeof event.tags === 'string') { - event.tags = event.tags.split(','); - } + if (typeof event.tags === 'string') { + event.tags = event.tags.split(','); + } - list.push(event); - } - return list; - }); + list.push(event); + } + return list; + }) + ) + .toPromise(); } private interpolateLuceneQuery(queryString: string, scopedVars: ScopedVars) { @@ -349,26 +367,25 @@ export class ElasticDatasource extends DataSourceApi { - const timeField: any = _.find(dateFields, { text: this.timeField }); - if (!timeField) { - return { - status: 'error', - message: 'No date field named ' + this.timeField + ' found', - }; - } - return { status: 'success', message: 'Index OK. Time field name OK.' }; - }, - (err: any) => { - console.error(err); - if (err.message) { - return { status: 'error', message: err.message }; - } else { - return { status: 'error', message: err.status }; - } - } - ); + return this.getFields('date') + .pipe( + mergeMap(dateFields => { + const timeField: any = _.find(dateFields, { text: this.timeField }); + if (!timeField) { + return of({ status: 'error', message: 'No date field named ' + this.timeField + ' found' }); + } + return of({ status: 'success', message: 'Index OK. Time field name OK.' }); + }), + catchError(err => { + console.error(err); + if (err.message) { + return of({ status: 'error', message: err.message }); + } else { + return of({ status: 'error', message: err.status }); + } + }) + ) + .toPromise(); } getQueryHeader(searchType: any, timeFrom?: DateTime, timeTo?: DateTime): string { @@ -507,7 +524,7 @@ export class ElasticDatasource extends DataSourceApi): Promise { + query(options: DataQueryRequest): Observable { let payload = ''; const targets = this.interpolateVariablesInQueries(_.cloneDeep(options.targets), options.scopedVars); const sentTargets: ElasticsearchQuery[] = []; @@ -547,7 +564,7 @@ export class ElasticDatasource extends DataSourceApi { - const er = new ElasticResponse(sentTargets, res); + return this.post(url, payload).pipe( + map(res => { + const er = new ElasticResponse(sentTargets, res); - if (sentTargets.some(target => target.isLogsQuery)) { - const response = er.getLogs(this.logMessageField, this.logLevelField); - for (const dataFrame of response.data) { - enhanceDataFrame(dataFrame, this.dataLinks); + if (sentTargets.some(target => target.isLogsQuery)) { + const response = er.getLogs(this.logMessageField, this.logLevelField); + for (const dataFrame of response.data) { + enhanceDataFrame(dataFrame, this.dataLinks); + } + return response; } - return response; - } - return er.getTimeSeries(); - }); + return er.getTimeSeries(); + }) + ); } isMetadataField(fieldName: string) { @@ -580,94 +599,96 @@ export class ElasticDatasource extends DataSourceApi { + getFields(type?: string, range?: TimeRange): Observable { const configuredEsVersion = this.esVersion; - return this.get('/_mapping', range).then((result: any) => { - const typeMap: any = { - float: 'number', - double: 'number', - integer: 'number', - long: 'number', - date: 'date', - date_nanos: 'date', - string: 'string', - text: 'string', - scaled_float: 'number', - nested: 'nested', - }; + return this.get('/_mapping', range).pipe( + map(result => { + const typeMap: any = { + float: 'number', + double: 'number', + integer: 'number', + long: 'number', + date: 'date', + date_nanos: 'date', + string: 'string', + text: 'string', + scaled_float: 'number', + nested: 'nested', + }; - const shouldAddField = (obj: any, key: string) => { - if (this.isMetadataField(key)) { - return false; - } + const shouldAddField = (obj: any, key: string) => { + if (this.isMetadataField(key)) { + return false; + } - if (!type) { - return true; - } + if (!type) { + return true; + } - // equal query type filter, or via typemap translation - return type === obj.type || type === typeMap[obj.type]; - }; + // equal query type filter, or via typemap translation + return type === obj.type || type === typeMap[obj.type]; + }; - // Store subfield names: [system, process, cpu, total] -> system.process.cpu.total - const fieldNameParts: any = []; - const fields: any = {}; + // Store subfield names: [system, process, cpu, total] -> system.process.cpu.total + const fieldNameParts: any = []; + const fields: any = {}; - function getFieldsRecursively(obj: any) { - for (const key in obj) { - const subObj = obj[key]; + function getFieldsRecursively(obj: any) { + for (const key in obj) { + const subObj = obj[key]; - // Check mapping field for nested fields - if (_.isObject(subObj.properties)) { - fieldNameParts.push(key); - getFieldsRecursively(subObj.properties); - } + // Check mapping field for nested fields + if (_.isObject(subObj.properties)) { + fieldNameParts.push(key); + getFieldsRecursively(subObj.properties); + } - if (_.isObject(subObj.fields)) { - fieldNameParts.push(key); - getFieldsRecursively(subObj.fields); - } + if (_.isObject(subObj.fields)) { + fieldNameParts.push(key); + getFieldsRecursively(subObj.fields); + } - if (_.isString(subObj.type)) { - const fieldName = fieldNameParts.concat(key).join('.'); + if (_.isString(subObj.type)) { + const fieldName = fieldNameParts.concat(key).join('.'); - // Hide meta-fields and check field type - if (shouldAddField(subObj, key)) { - fields[fieldName] = { - text: fieldName, - type: subObj.type, - }; + // Hide meta-fields and check field type + if (shouldAddField(subObj, key)) { + fields[fieldName] = { + text: fieldName, + type: subObj.type, + }; + } } } + fieldNameParts.pop(); } - fieldNameParts.pop(); - } - - for (const indexName in result) { - const index = result[indexName]; - if (index && index.mappings) { - const mappings = index.mappings; - if (configuredEsVersion < 70) { - for (const typeName in mappings) { - const properties = mappings[typeName].properties; + for (const indexName in result) { + const index = result[indexName]; + if (index && index.mappings) { + const mappings = index.mappings; + + if (configuredEsVersion < 70) { + for (const typeName in mappings) { + const properties = mappings[typeName].properties; + getFieldsRecursively(properties); + } + } else { + const properties = mappings.properties; getFieldsRecursively(properties); } - } else { - const properties = mappings.properties; - getFieldsRecursively(properties); } } - } - // transform to array - return _.map(fields, value => { - return value; - }); - }); + // transform to array + return _.map(fields, value => { + return value; + }); + }) + ); } - getTerms(queryDef: any, range = getDefaultTimeRange()) { + getTerms(queryDef: any, range = getDefaultTimeRange()): Observable { const searchType = this.esVersion >= 5 ? 'query_then_fetch' : 'count'; const header = this.getQueryHeader(searchType, range.from, range.to); let esQuery = JSON.stringify(this.queryBuilder.getTermsQuery(queryDef)); @@ -678,19 +699,21 @@ export class ElasticDatasource extends DataSourceApi { - if (!res.responses[0].aggregations) { - return []; - } + return this.post(url, esQuery).pipe( + map(res => { + if (!res.responses[0].aggregations) { + return []; + } - const buckets = res.responses[0].aggregations['1'].buckets; - return _.map(buckets, bucket => { - return { - text: bucket.key_as_string || bucket.key, - value: bucket.key, - }; - }); - }); + const buckets = res.responses[0].aggregations['1'].buckets; + return _.map(buckets, bucket => { + return { + text: bucket.key_as_string || bucket.key, + value: bucket.key, + }; + }); + }) + ); } getMultiSearchUrl() { @@ -707,13 +730,13 @@ export class ElasticDatasource extends DataSourceApi