mirror of https://github.com/grafana/grafana
Explore: Transform prometheus query to elasticsearch query (#23670)
* Explore: Transform prometheus query to elasticsearch query Enable a way to transform prometheus/loki labels to elasticsearch query. This make a link between metrics to logs. Examples: A prometheus query : rate(my_metric{label1="value1",label2!="value2",label3=~"value.+",label4!~".*tothemoon"}[5m]) Will be this elasticsearch query when switching to elasticsearch datasource: __name__:"my_metric" AND label1:"value1" AND NOT label2:"value2" AND label3:/value.+/ AND NOT label4:/.*tothemoon/ * fix test * remove non needed async * Use prism token instead of regex * fix test ./scripts/ci-frontend-metrics.sh * mock timesrv and TemplateSrv in test * Remove unnecessary await/async Co-authored-by: Melchior MOULIN <m.moulin@criteo.com> Co-authored-by: Andrej Ocenas <mr.ocenas@gmail.com>pull/27448/head
parent
561920f18b
commit
dfc78d0979
@ -0,0 +1,119 @@ |
|||||||
|
import LanguageProvider from './language_provider'; |
||||||
|
import { PromQuery } from '../prometheus/types'; |
||||||
|
import { ElasticDatasource } from './datasource'; |
||||||
|
import { DataSourceInstanceSettings, dateTime } from '@grafana/data'; |
||||||
|
import { ElasticsearchOptions } from './types'; |
||||||
|
import { TemplateSrv } from '../../../features/templating/template_srv'; |
||||||
|
import { getTimeSrv, TimeSrv } from '../../../features/dashboard/services/TimeSrv'; |
||||||
|
import { getTemplateSrv } from '@grafana/runtime'; |
||||||
|
|
||||||
|
jest.mock('app/features/templating/template_srv', () => { |
||||||
|
return { |
||||||
|
getAdhocFilters: jest.fn(() => [] as any[]), |
||||||
|
replace: jest.fn((a: string) => a), |
||||||
|
}; |
||||||
|
}); |
||||||
|
|
||||||
|
jest.mock('app/features/dashboard/services/TimeSrv', () => ({ |
||||||
|
__esModule: true, |
||||||
|
getTimeSrv: jest.fn().mockReturnValue({ |
||||||
|
timeRange(): any { |
||||||
|
return { |
||||||
|
from: dateTime(1531468681), |
||||||
|
to: dateTime(1531489712), |
||||||
|
}; |
||||||
|
}, |
||||||
|
}), |
||||||
|
})); |
||||||
|
|
||||||
|
const dataSource = new ElasticDatasource( |
||||||
|
{ |
||||||
|
url: 'http://es.com', |
||||||
|
database: '[asd-]YYYY.MM.DD', |
||||||
|
jsonData: { |
||||||
|
interval: 'Daily', |
||||||
|
esVersion: 2, |
||||||
|
timeField: '@time', |
||||||
|
}, |
||||||
|
} as DataSourceInstanceSettings<ElasticsearchOptions>, |
||||||
|
getTemplateSrv() as TemplateSrv, |
||||||
|
getTimeSrv() as TimeSrv |
||||||
|
); |
||||||
|
describe('transform prometheus query to elasticsearch query', () => { |
||||||
|
it('Prometheus query with exact equals labels ( 2 labels ) and metric __name__', () => { |
||||||
|
const instance = new LanguageProvider(dataSource); |
||||||
|
var promQuery: PromQuery = { refId: 'bar', expr: 'my_metric{label1="value1",label2="value2"}' }; |
||||||
|
const result = instance.importQueries([promQuery], 'prometheus'); |
||||||
|
expect(result).toEqual([ |
||||||
|
{ isLogsQuery: true, query: '__name__:"my_metric" AND label1:"value1" AND label2:"value2"', refId: 'bar' }, |
||||||
|
]); |
||||||
|
}); |
||||||
|
it('Prometheus query with exact equals labels ( 1 labels ) and metric __name__', () => { |
||||||
|
const instance = new LanguageProvider(dataSource); |
||||||
|
var promQuery: PromQuery = { refId: 'bar', expr: 'my_metric{label1="value1"}' }; |
||||||
|
const result = instance.importQueries([promQuery], 'prometheus'); |
||||||
|
expect(result).toEqual([{ isLogsQuery: true, query: '__name__:"my_metric" AND label1:"value1"', refId: 'bar' }]); |
||||||
|
}); |
||||||
|
it('Prometheus query with exact equals labels ( 1 labels )', () => { |
||||||
|
const instance = new LanguageProvider(dataSource); |
||||||
|
var promQuery: PromQuery = { refId: 'bar', expr: '{label1="value1"}' }; |
||||||
|
const result = instance.importQueries([promQuery], 'prometheus'); |
||||||
|
expect(result).toEqual([{ isLogsQuery: true, query: 'label1:"value1"', refId: 'bar' }]); |
||||||
|
}); |
||||||
|
it('Prometheus query with no label and metric __name__', () => { |
||||||
|
const instance = new LanguageProvider(dataSource); |
||||||
|
var promQuery: PromQuery = { refId: 'bar', expr: 'my_metric{}' }; |
||||||
|
const result = instance.importQueries([promQuery], 'prometheus'); |
||||||
|
expect(result).toEqual([{ isLogsQuery: true, query: '__name__:"my_metric"', refId: 'bar' }]); |
||||||
|
}); |
||||||
|
it('Prometheus query with no label and metric __name__ without bracket', () => { |
||||||
|
const instance = new LanguageProvider(dataSource); |
||||||
|
var promQuery: PromQuery = { refId: 'bar', expr: 'my_metric' }; |
||||||
|
const result = instance.importQueries([promQuery], 'prometheus'); |
||||||
|
expect(result).toEqual([{ isLogsQuery: true, query: '__name__:"my_metric"', refId: 'bar' }]); |
||||||
|
}); |
||||||
|
it('Prometheus query with rate function and exact equals labels ( 2 labels ) and metric __name__', () => { |
||||||
|
const instance = new LanguageProvider(dataSource); |
||||||
|
var promQuery: PromQuery = { refId: 'bar', expr: 'rate(my_metric{label1="value1",label2="value2"}[5m])' }; |
||||||
|
const result = instance.importQueries([promQuery], 'prometheus'); |
||||||
|
expect(result).toEqual([ |
||||||
|
{ isLogsQuery: true, query: '__name__:"my_metric" AND label1:"value1" AND label2:"value2"', refId: 'bar' }, |
||||||
|
]); |
||||||
|
}); |
||||||
|
it('Prometheus query with rate function and exact equals labels not equals labels regex and not regex labels and metric __name__', () => { |
||||||
|
const instance = new LanguageProvider(dataSource); |
||||||
|
var promQuery: PromQuery = { |
||||||
|
refId: 'bar', |
||||||
|
expr: 'rate(my_metric{label1="value1",label2!="value2",label3=~"value.+",label4!~".*tothemoon"}[5m])', |
||||||
|
}; |
||||||
|
const result = instance.importQueries([promQuery], 'prometheus'); |
||||||
|
expect(result).toEqual([ |
||||||
|
{ |
||||||
|
isLogsQuery: true, |
||||||
|
query: |
||||||
|
'__name__:"my_metric" AND label1:"value1" AND NOT label2:"value2" AND label3:/value.+/ AND NOT label4:/.*tothemoon/', |
||||||
|
refId: 'bar', |
||||||
|
}, |
||||||
|
]); |
||||||
|
}); |
||||||
|
}); |
||||||
|
describe('transform prometheus query to elasticsearch query errors', () => { |
||||||
|
it('bad prometheus query with only bracket', () => { |
||||||
|
const instance = new LanguageProvider(dataSource); |
||||||
|
var promQuery: PromQuery = { refId: 'bar', expr: '{' }; |
||||||
|
const result = instance.importQueries([promQuery], 'prometheus'); |
||||||
|
expect(result).toEqual([{ isLogsQuery: true, query: '', refId: 'bar' }]); |
||||||
|
}); |
||||||
|
it('bad prometheus empty query', async () => { |
||||||
|
const instance = new LanguageProvider(dataSource); |
||||||
|
var promQuery: PromQuery = { refId: 'bar', expr: '' }; |
||||||
|
const result = instance.importQueries([promQuery], 'prometheus'); |
||||||
|
expect(result).toEqual([{ isLogsQuery: true, query: '', refId: 'bar' }]); |
||||||
|
}); |
||||||
|
it('graphite query not handle', async () => { |
||||||
|
const instance = new LanguageProvider(dataSource); |
||||||
|
var promQuery: PromQuery = { refId: 'bar', expr: '' }; |
||||||
|
const result = instance.importQueries([promQuery], 'graphite'); |
||||||
|
expect(result).toEqual([{ isLogsQuery: true, query: '', refId: 'bar' }]); |
||||||
|
}); |
||||||
|
}); |
@ -0,0 +1,127 @@ |
|||||||
|
import { ElasticsearchQuery } from './types'; |
||||||
|
import { DataQuery, LanguageProvider } from '@grafana/data'; |
||||||
|
|
||||||
|
import { ElasticDatasource } from './datasource'; |
||||||
|
|
||||||
|
import { PromQuery } from '../prometheus/types'; |
||||||
|
|
||||||
|
import Prism, { Token } from 'prismjs'; |
||||||
|
import grammar from '../prometheus/promql'; |
||||||
|
|
||||||
|
function getNameLabelValue(promQuery: string, tokens: any): string { |
||||||
|
let nameLabelValue = ''; |
||||||
|
for (let prop in tokens) { |
||||||
|
if (typeof tokens[prop] === 'string') { |
||||||
|
nameLabelValue = tokens[prop] as string; |
||||||
|
break; |
||||||
|
} |
||||||
|
} |
||||||
|
return nameLabelValue; |
||||||
|
} |
||||||
|
|
||||||
|
function extractPrometheusLabels(promQuery: string): string[][] { |
||||||
|
const labels: string[][] = []; |
||||||
|
if (!promQuery || promQuery.length === 0) { |
||||||
|
return labels; |
||||||
|
} |
||||||
|
const tokens = Prism.tokenize(promQuery, grammar); |
||||||
|
const nameLabelValue = getNameLabelValue(promQuery, tokens); |
||||||
|
if (nameLabelValue && nameLabelValue.length > 0) { |
||||||
|
labels.push(['__name__', '=', '"' + nameLabelValue + '"']); |
||||||
|
} |
||||||
|
|
||||||
|
for (let prop in tokens) { |
||||||
|
if (tokens[prop] instanceof Token) { |
||||||
|
let token: Token = tokens[prop] as Token; |
||||||
|
if (token.type === 'context-labels') { |
||||||
|
let labelKey = ''; |
||||||
|
let labelValue = ''; |
||||||
|
let labelOperator = ''; |
||||||
|
let contentTokens: any[] = token.content as any[]; |
||||||
|
for (let currentToken in contentTokens) { |
||||||
|
if (typeof contentTokens[currentToken] === 'string') { |
||||||
|
let currentStr: string; |
||||||
|
currentStr = contentTokens[currentToken] as string; |
||||||
|
if (currentStr === '=' || currentStr === '!=' || currentStr === '=~' || currentStr === '!~') { |
||||||
|
labelOperator = currentStr; |
||||||
|
} |
||||||
|
} else if (contentTokens[currentToken] instanceof Token) { |
||||||
|
switch (contentTokens[currentToken].type) { |
||||||
|
case 'label-key': |
||||||
|
labelKey = contentTokens[currentToken].content as string; |
||||||
|
break; |
||||||
|
case 'label-value': |
||||||
|
labelValue = contentTokens[currentToken].content as string; |
||||||
|
labels.push([labelKey, labelOperator, labelValue]); |
||||||
|
break; |
||||||
|
} |
||||||
|
} |
||||||
|
} |
||||||
|
} |
||||||
|
} |
||||||
|
} |
||||||
|
return labels; |
||||||
|
} |
||||||
|
|
||||||
|
function getElasticsearchQuery(prometheusLabels: string[][]): string { |
||||||
|
let elasticsearchLuceneLabels = []; |
||||||
|
for (let keyOperatorValue of prometheusLabels) { |
||||||
|
switch (keyOperatorValue[1]) { |
||||||
|
case '=': { |
||||||
|
elasticsearchLuceneLabels.push(keyOperatorValue[0] + ':' + keyOperatorValue[2]); |
||||||
|
break; |
||||||
|
} |
||||||
|
case '!=': { |
||||||
|
elasticsearchLuceneLabels.push('NOT ' + keyOperatorValue[0] + ':' + keyOperatorValue[2]); |
||||||
|
break; |
||||||
|
} |
||||||
|
case '=~': { |
||||||
|
elasticsearchLuceneLabels.push( |
||||||
|
keyOperatorValue[0] + ':/' + keyOperatorValue[2].substring(1, keyOperatorValue[2].length - 1) + '/' |
||||||
|
); |
||||||
|
break; |
||||||
|
} |
||||||
|
case '!~': { |
||||||
|
elasticsearchLuceneLabels.push( |
||||||
|
'NOT ' + keyOperatorValue[0] + ':/' + keyOperatorValue[2].substring(1, keyOperatorValue[2].length - 1) + '/' |
||||||
|
); |
||||||
|
break; |
||||||
|
} |
||||||
|
} |
||||||
|
} |
||||||
|
return elasticsearchLuceneLabels.join(' AND '); |
||||||
|
} |
||||||
|
|
||||||
|
export default class ElasticsearchLanguageProvider extends LanguageProvider { |
||||||
|
request: (url: string, params?: any) => Promise<any>; |
||||||
|
start: () => Promise<any[]>; |
||||||
|
datasource: ElasticDatasource; |
||||||
|
|
||||||
|
constructor(datasource: ElasticDatasource, initialValues?: any) { |
||||||
|
super(); |
||||||
|
this.datasource = datasource; |
||||||
|
|
||||||
|
Object.assign(this, initialValues); |
||||||
|
} |
||||||
|
|
||||||
|
importQueries(queries: DataQuery[], datasourceType: string): ElasticsearchQuery[] { |
||||||
|
if (datasourceType === 'prometheus' || datasourceType === 'loki') { |
||||||
|
return queries.map(query => { |
||||||
|
let prometheusQuery: PromQuery = query as PromQuery; |
||||||
|
const expr = getElasticsearchQuery(extractPrometheusLabels(prometheusQuery.expr)); |
||||||
|
return { |
||||||
|
isLogsQuery: true, |
||||||
|
query: expr, |
||||||
|
refId: query.refId, |
||||||
|
}; |
||||||
|
}); |
||||||
|
} |
||||||
|
return queries.map(query => { |
||||||
|
return { |
||||||
|
isLogsQuery: true, |
||||||
|
query: '', |
||||||
|
refId: query.refId, |
||||||
|
}; |
||||||
|
}); |
||||||
|
} |
||||||
|
} |
Loading…
Reference in new issue