mirror of https://github.com/grafana/grafana
AzureMonitor: Frontend cleanup (#66871)
* Remove unused mocks * Remove time grain converter anys * Improve mocks - Add context mock - Update datasource mock - Add util functions * Remove anys from log_analytics_test * Improve response typing * Remove redundant angular code * Remove more anys - Add Resource type * More type updates * Remove unused code and update arg ds test * Remove old annotations test * Remove unused code and update some more types * Fix lint * Fix lintpull/69122/head
parent
c9adcc1e97
commit
6b67bade55
@ -1,29 +1,71 @@ |
||||
import { DataSourceInstanceSettings, DataSourcePluginMeta } from '@grafana/data'; |
||||
import { DataSourceInstanceSettings, PluginType } from '@grafana/data'; |
||||
|
||||
import { AzureDataSourceInstanceSettings, AzureDataSourceJsonData } from '../types'; |
||||
import { AzureDataSourceInstanceSettings } from '../types'; |
||||
|
||||
export const createMockInstanceSetttings = ( |
||||
overrides?: Partial<DataSourceInstanceSettings>, |
||||
jsonDataOverrides?: Partial<AzureDataSourceJsonData> |
||||
): AzureDataSourceInstanceSettings => ({ |
||||
url: '/ds/1', |
||||
id: 1, |
||||
uid: 'abc', |
||||
type: 'azuremonitor', |
||||
access: 'proxy', |
||||
meta: {} as DataSourcePluginMeta, |
||||
name: 'azure', |
||||
readOnly: false, |
||||
...overrides, |
||||
import { DeepPartial, mapPartialArrayObject } from './utils'; |
||||
|
||||
jsonData: { |
||||
cloudName: 'azuremonitor', |
||||
azureAuthType: 'clientsecret', |
||||
export const createMockInstanceSetttings = ( |
||||
overrides?: DeepPartial<DataSourceInstanceSettings> |
||||
): AzureDataSourceInstanceSettings => { |
||||
const metaOverrides = overrides?.meta; |
||||
return { |
||||
url: '/ds/1', |
||||
id: 1, |
||||
uid: 'abc', |
||||
type: 'azuremonitor', |
||||
access: 'proxy', |
||||
name: 'azure', |
||||
readOnly: false, |
||||
...overrides, |
||||
meta: { |
||||
id: 'grafana-azure-monitor-datasource', |
||||
name: 'Azure Monitor', |
||||
type: PluginType.datasource, |
||||
module: 'path_to_module', |
||||
baseUrl: 'base_url', |
||||
...metaOverrides, |
||||
info: { |
||||
description: 'Azure Monitor', |
||||
updated: 'updated', |
||||
version: '1.0.0', |
||||
...metaOverrides?.info, |
||||
screenshots: mapPartialArrayObject( |
||||
{ name: 'Azure Screenshot', path: 'path_to_screenshot' }, |
||||
metaOverrides?.info?.screenshots |
||||
), |
||||
links: mapPartialArrayObject( |
||||
{ name: 'Azure Link', url: 'link_url', target: '_blank' }, |
||||
metaOverrides?.info?.links |
||||
), |
||||
author: { |
||||
name: 'test', |
||||
...metaOverrides?.info?.author, |
||||
}, |
||||
logos: { |
||||
large: 'large.logo', |
||||
small: 'small.logo', |
||||
...metaOverrides?.info?.logos, |
||||
}, |
||||
build: { |
||||
time: 0, |
||||
repo: 'repo', |
||||
branch: 'branch', |
||||
hash: 'hash', |
||||
number: 1, |
||||
pr: 1, |
||||
...metaOverrides?.info?.build, |
||||
}, |
||||
}, |
||||
}, |
||||
jsonData: { |
||||
cloudName: 'azuremonitor', |
||||
azureAuthType: 'clientsecret', |
||||
|
||||
// monitor
|
||||
tenantId: 'abc-123', |
||||
clientId: 'def-456', |
||||
subscriptionId: 'ghi-789', |
||||
...jsonDataOverrides, |
||||
}, |
||||
}); |
||||
// monitor
|
||||
tenantId: 'abc-123', |
||||
clientId: 'def-456', |
||||
subscriptionId: 'ghi-789', |
||||
...overrides?.jsonData, |
||||
}, |
||||
}; |
||||
}; |
||||
|
@ -1,16 +0,0 @@ |
||||
export class QueryCtrl { |
||||
target: any; |
||||
datasource: any; |
||||
panelCtrl: any; |
||||
panel: any; |
||||
hasRawMode = false; |
||||
error = ''; |
||||
|
||||
constructor(public $scope: any) { |
||||
this.panelCtrl = this.panelCtrl || { panel: {} }; |
||||
this.target = this.target || { target: '' }; |
||||
this.panel = this.panelCtrl.panel; |
||||
} |
||||
|
||||
refresh() {} |
||||
} |
@ -1,3 +0,0 @@ |
||||
import { QueryCtrl } from './query_ctrl'; |
||||
|
||||
export { QueryCtrl }; |
@ -1,303 +0,0 @@ |
||||
import { concat, find, flattenDeep, forEach, get, map } from 'lodash'; |
||||
|
||||
import { AnnotationEvent, dateTime, TimeSeries, VariableModel } from '@grafana/data'; |
||||
|
||||
import { AzureLogsTableData, AzureLogsVariable } from '../types'; |
||||
import { AzureLogAnalyticsMetadata } from '../types/logAnalyticsMetadata'; |
||||
|
||||
export default class ResponseParser { |
||||
declare columns: string[]; |
||||
constructor(private results: any) {} |
||||
|
||||
parseQueryResult(): any { |
||||
let data: any[] = []; |
||||
let columns: any[] = []; |
||||
for (let i = 0; i < this.results.length; i++) { |
||||
if (this.results[i].result.tables.length === 0) { |
||||
continue; |
||||
} |
||||
columns = this.results[i].result.tables[0].columns; |
||||
const rows = this.results[i].result.tables[0].rows; |
||||
|
||||
if (this.results[i].query.resultFormat === 'time_series') { |
||||
data = concat(data, this.parseTimeSeriesResult(this.results[i].query, columns, rows)); |
||||
} else { |
||||
data = concat(data, this.parseTableResult(this.results[i].query, columns, rows)); |
||||
} |
||||
} |
||||
|
||||
return data; |
||||
} |
||||
|
||||
parseTimeSeriesResult(query: { refId: string; query: any }, columns: any[], rows: any): TimeSeries[] { |
||||
const data: TimeSeries[] = []; |
||||
let timeIndex = -1; |
||||
let metricIndex = -1; |
||||
let valueIndex = -1; |
||||
|
||||
for (let i = 0; i < columns.length; i++) { |
||||
if (timeIndex === -1 && columns[i].type === 'datetime') { |
||||
timeIndex = i; |
||||
} |
||||
|
||||
if (metricIndex === -1 && columns[i].type === 'string') { |
||||
metricIndex = i; |
||||
} |
||||
|
||||
if (valueIndex === -1 && ['int', 'long', 'real', 'double'].indexOf(columns[i].type) > -1) { |
||||
valueIndex = i; |
||||
} |
||||
} |
||||
|
||||
if (timeIndex === -1) { |
||||
throw new Error('No datetime column found in the result. The Time Series format requires a time column.'); |
||||
} |
||||
|
||||
forEach(rows, (row) => { |
||||
const epoch = ResponseParser.dateTimeToEpoch(row[timeIndex]); |
||||
const metricName = metricIndex > -1 ? row[metricIndex] : columns[valueIndex].name; |
||||
const bucket = ResponseParser.findOrCreateBucket(data, metricName); |
||||
bucket.datapoints.push([row[valueIndex], epoch]); |
||||
bucket.refId = query.refId; |
||||
bucket.meta = { |
||||
executedQueryString: query.query, |
||||
}; |
||||
}); |
||||
|
||||
return data; |
||||
} |
||||
|
||||
parseTableResult(query: { refId: string; query: string }, columns: any[], rows: any[]): AzureLogsTableData { |
||||
const tableResult: AzureLogsTableData = { |
||||
type: 'table', |
||||
columns: map(columns, (col) => { |
||||
return { text: col.name, type: col.type }; |
||||
}), |
||||
rows: rows, |
||||
refId: query.refId, |
||||
meta: { |
||||
executedQueryString: query.query, |
||||
}, |
||||
}; |
||||
|
||||
return tableResult; |
||||
} |
||||
|
||||
parseToVariables(): AzureLogsVariable[] { |
||||
const queryResult = this.parseQueryResult(); |
||||
|
||||
const variables: AzureLogsVariable[] = []; |
||||
forEach(queryResult, (result) => { |
||||
forEach(flattenDeep(result.rows), (row) => { |
||||
variables.push({ |
||||
text: row, |
||||
value: row, |
||||
} as AzureLogsVariable); |
||||
}); |
||||
}); |
||||
|
||||
return variables; |
||||
} |
||||
|
||||
transformToAnnotations(options: any) { |
||||
const queryResult = this.parseQueryResult(); |
||||
|
||||
const list: AnnotationEvent[] = []; |
||||
|
||||
forEach(queryResult, (result) => { |
||||
let timeIndex = -1; |
||||
let textIndex = -1; |
||||
let tagsIndex = -1; |
||||
|
||||
for (let i = 0; i < result.columns.length; i++) { |
||||
if (timeIndex === -1 && result.columns[i].type === 'datetime') { |
||||
timeIndex = i; |
||||
} |
||||
|
||||
if (textIndex === -1 && result.columns[i].text.toLowerCase() === 'text') { |
||||
textIndex = i; |
||||
} |
||||
|
||||
if (tagsIndex === -1 && result.columns[i].text.toLowerCase() === 'tags') { |
||||
tagsIndex = i; |
||||
} |
||||
} |
||||
|
||||
forEach(result.rows, (row) => { |
||||
list.push({ |
||||
annotation: options.annotation, |
||||
time: Math.floor(ResponseParser.dateTimeToEpoch(row[timeIndex])), |
||||
text: row[textIndex] ? row[textIndex].toString() : '', |
||||
tags: row[tagsIndex] ? row[tagsIndex].trim().split(/\s*,\s*/) : [], |
||||
}); |
||||
}); |
||||
}); |
||||
|
||||
return list; |
||||
} |
||||
|
||||
static findOrCreateBucket(data: TimeSeries[], target: any): TimeSeries { |
||||
let dataTarget: any = find(data, ['target', target]); |
||||
if (!dataTarget) { |
||||
dataTarget = { target: target, datapoints: [], refId: '', query: '' }; |
||||
data.push(dataTarget); |
||||
} |
||||
|
||||
return dataTarget; |
||||
} |
||||
|
||||
static dateTimeToEpoch(dateTimeValue: any) { |
||||
return dateTime(dateTimeValue).valueOf(); |
||||
} |
||||
|
||||
static parseSubscriptions(result: any): Array<{ text: string; value: string }> { |
||||
const list: Array<{ text: string; value: string }> = []; |
||||
|
||||
if (!result) { |
||||
return list; |
||||
} |
||||
|
||||
const valueFieldName = 'subscriptionId'; |
||||
const textFieldName = 'displayName'; |
||||
for (let i = 0; i < result.value.length; i++) { |
||||
if (!find(list, ['value', get(result.value[i], valueFieldName)])) { |
||||
list.push({ |
||||
text: `${get(result.value[i], textFieldName)}`, |
||||
value: get(result.value[i], valueFieldName), |
||||
}); |
||||
} |
||||
} |
||||
|
||||
return list; |
||||
} |
||||
} |
||||
|
||||
// matches (name):(type) = (defaultValue)
|
||||
// e.g. fromRangeStart:datetime = datetime(null)
|
||||
// - name: fromRangeStart
|
||||
// - type: datetime
|
||||
// - defaultValue: datetime(null)
|
||||
const METADATA_FUNCTION_PARAMS = /([\w\W]+):([\w]+)(?:\s?=\s?([\w\W]+))?/; |
||||
|
||||
function transformMetadataFunction(sourceSchema: AzureLogAnalyticsMetadata) { |
||||
if (!sourceSchema.functions) { |
||||
return []; |
||||
} |
||||
|
||||
return sourceSchema.functions.map((fn) => { |
||||
const params = |
||||
fn.parameters && |
||||
fn.parameters |
||||
.split(', ') |
||||
.map((arg) => { |
||||
const match = arg.match(METADATA_FUNCTION_PARAMS); |
||||
if (!match) { |
||||
return; |
||||
} |
||||
|
||||
const [, name, type, defaultValue] = match; |
||||
|
||||
return { |
||||
name, |
||||
type, |
||||
defaultValue, |
||||
cslDefaultValue: defaultValue, |
||||
}; |
||||
}) |
||||
.filter(<T>(v: T): v is Exclude<T, undefined> => !!v); |
||||
|
||||
return { |
||||
name: fn.name, |
||||
body: fn.body, |
||||
inputParameters: params || [], |
||||
}; |
||||
}); |
||||
} |
||||
|
||||
export function transformMetadataToKustoSchema( |
||||
sourceSchema: AzureLogAnalyticsMetadata, |
||||
nameOrIdOrSomething: string, |
||||
templateVariables: VariableModel[] |
||||
) { |
||||
const database = { |
||||
name: nameOrIdOrSomething, |
||||
tables: sourceSchema.tables, |
||||
functions: transformMetadataFunction(sourceSchema), |
||||
majorVersion: 0, |
||||
minorVersion: 0, |
||||
}; |
||||
|
||||
// Adding macros as known functions
|
||||
database.functions.push( |
||||
{ |
||||
name: '$__timeFilter', |
||||
body: '{ true }', |
||||
inputParameters: [ |
||||
{ |
||||
name: 'timeColumn', |
||||
type: 'System.String', |
||||
defaultValue: '""', |
||||
cslDefaultValue: '""', |
||||
}, |
||||
], |
||||
}, |
||||
{ |
||||
name: '$__timeFrom', |
||||
body: '{ datetime(2018-06-05T18:09:58.907Z) }', |
||||
inputParameters: [], |
||||
}, |
||||
{ |
||||
name: '$__timeTo', |
||||
body: '{ datetime(2018-06-05T20:09:58.907Z) }', |
||||
inputParameters: [], |
||||
}, |
||||
{ |
||||
name: '$__escapeMulti', |
||||
body: `{ @'\\grafana-vm\Network(eth0)\Total', @'\\hello!'}`, |
||||
inputParameters: [ |
||||
{ |
||||
name: '$myVar', |
||||
type: 'System.String', |
||||
defaultValue: '$myVar', |
||||
cslDefaultValue: '$myVar', |
||||
}, |
||||
], |
||||
}, |
||||
{ |
||||
name: '$__contains', |
||||
body: `{ colName in ('value1','value2') }`, |
||||
inputParameters: [ |
||||
{ |
||||
name: 'colName', |
||||
type: 'System.String', |
||||
defaultValue: 'colName', |
||||
cslDefaultValue: 'colName', |
||||
}, |
||||
{ |
||||
name: '$myVar', |
||||
type: 'System.String', |
||||
defaultValue: '$myVar', |
||||
cslDefaultValue: '$myVar', |
||||
}, |
||||
], |
||||
} |
||||
); |
||||
|
||||
// Adding macros as global parameters
|
||||
const globalParameters = templateVariables.map((v) => { |
||||
return { |
||||
name: `$${v.name}`, |
||||
type: 'dynamic', |
||||
}; |
||||
}); |
||||
|
||||
return { |
||||
clusterType: 'Engine', |
||||
cluster: { |
||||
connectionString: nameOrIdOrSomething, |
||||
databases: [database], |
||||
}, |
||||
database: database, |
||||
globalParameters, |
||||
}; |
||||
} |
@ -0,0 +1,133 @@ |
||||
import { VariableModel } from '@grafana/data'; |
||||
|
||||
import { AzureLogAnalyticsMetadata } from '../types/logAnalyticsMetadata'; |
||||
|
||||
// matches (name):(type) = (defaultValue)
|
||||
// e.g. fromRangeStart:datetime = datetime(null)
|
||||
// - name: fromRangeStart
|
||||
// - type: datetime
|
||||
// - defaultValue: datetime(null)
|
||||
const METADATA_FUNCTION_PARAMS = /([\w\W]+):([\w]+)(?:\s?=\s?([\w\W]+))?/; |
||||
|
||||
function transformMetadataFunction(sourceSchema: AzureLogAnalyticsMetadata) { |
||||
if (!sourceSchema.functions) { |
||||
return []; |
||||
} |
||||
|
||||
return sourceSchema.functions.map((fn) => { |
||||
const params = |
||||
fn.parameters && |
||||
fn.parameters |
||||
.split(', ') |
||||
.map((arg) => { |
||||
const match = arg.match(METADATA_FUNCTION_PARAMS); |
||||
if (!match) { |
||||
return; |
||||
} |
||||
|
||||
const [, name, type, defaultValue] = match; |
||||
|
||||
return { |
||||
name, |
||||
type, |
||||
defaultValue, |
||||
cslDefaultValue: defaultValue, |
||||
}; |
||||
}) |
||||
.filter(<T>(v: T): v is Exclude<T, undefined> => !!v); |
||||
|
||||
return { |
||||
name: fn.name, |
||||
body: fn.body, |
||||
inputParameters: params || [], |
||||
}; |
||||
}); |
||||
} |
||||
|
||||
export function transformMetadataToKustoSchema( |
||||
sourceSchema: AzureLogAnalyticsMetadata, |
||||
nameOrIdOrSomething: string, |
||||
templateVariables: VariableModel[] |
||||
) { |
||||
const database = { |
||||
name: nameOrIdOrSomething, |
||||
tables: sourceSchema.tables, |
||||
functions: transformMetadataFunction(sourceSchema), |
||||
majorVersion: 0, |
||||
minorVersion: 0, |
||||
}; |
||||
|
||||
// Adding macros as known functions
|
||||
database.functions.push( |
||||
{ |
||||
name: '$__timeFilter', |
||||
body: '{ true }', |
||||
inputParameters: [ |
||||
{ |
||||
name: 'timeColumn', |
||||
type: 'System.String', |
||||
defaultValue: '""', |
||||
cslDefaultValue: '""', |
||||
}, |
||||
], |
||||
}, |
||||
{ |
||||
name: '$__timeFrom', |
||||
body: '{ datetime(2018-06-05T18:09:58.907Z) }', |
||||
inputParameters: [], |
||||
}, |
||||
{ |
||||
name: '$__timeTo', |
||||
body: '{ datetime(2018-06-05T20:09:58.907Z) }', |
||||
inputParameters: [], |
||||
}, |
||||
{ |
||||
name: '$__escapeMulti', |
||||
body: `{ @'\\grafana-vm\Network(eth0)\Total', @'\\hello!'}`, |
||||
inputParameters: [ |
||||
{ |
||||
name: '$myVar', |
||||
type: 'System.String', |
||||
defaultValue: '$myVar', |
||||
cslDefaultValue: '$myVar', |
||||
}, |
||||
], |
||||
}, |
||||
{ |
||||
name: '$__contains', |
||||
body: `{ colName in ('value1','value2') }`, |
||||
inputParameters: [ |
||||
{ |
||||
name: 'colName', |
||||
type: 'System.String', |
||||
defaultValue: 'colName', |
||||
cslDefaultValue: 'colName', |
||||
}, |
||||
{ |
||||
name: '$myVar', |
||||
type: 'System.String', |
||||
defaultValue: '$myVar', |
||||
cslDefaultValue: '$myVar', |
||||
}, |
||||
], |
||||
} |
||||
); |
||||
|
||||
// Adding macros as global parameters
|
||||
const globalParameters = templateVariables.map((v) => { |
||||
return { |
||||
name: `$${v.name}`, |
||||
type: 'dynamic', |
||||
}; |
||||
}); |
||||
|
||||
return { |
||||
clusterType: 'Engine', |
||||
cluster: { |
||||
connectionString: nameOrIdOrSomething, |
||||
databases: [database], |
||||
}, |
||||
database: database, |
||||
globalParameters, |
||||
}; |
||||
} |
@ -1,186 +0,0 @@ |
||||
import { dateTime } from '@grafana/data'; |
||||
|
||||
import LogAnalyticsQuerystringBuilder from './querystring_builder'; |
||||
|
||||
describe('LogAnalyticsDatasource', () => { |
||||
let builder: LogAnalyticsQuerystringBuilder; |
||||
|
||||
beforeEach(() => { |
||||
builder = new LogAnalyticsQuerystringBuilder( |
||||
'query=Tablename | where $__timeFilter()', |
||||
{ |
||||
interval: '5m', |
||||
range: { |
||||
from: dateTime().subtract(24, 'hours'), |
||||
to: dateTime(), |
||||
}, |
||||
rangeRaw: { |
||||
from: 'now-24h', |
||||
to: 'now', |
||||
}, |
||||
}, |
||||
'TimeGenerated' |
||||
); |
||||
}); |
||||
|
||||
describe('when $__timeFilter has no column parameter', () => { |
||||
it('should generate a time filter condition with TimeGenerated as the datetime field', () => { |
||||
const query = builder.generate().uriString; |
||||
|
||||
expect(query).toContain('where%20TimeGenerated%20%3E%3D%20datetime('); |
||||
}); |
||||
}); |
||||
|
||||
describe('when $__timeFilter has a column parameter', () => { |
||||
beforeEach(() => { |
||||
builder.rawQueryString = 'query=Tablename | where $__timeFilter(myTime)'; |
||||
}); |
||||
|
||||
it('should generate a time filter condition with myTime as the datetime field', () => { |
||||
const query = builder.generate().uriString; |
||||
|
||||
expect(query).toContain('where%20myTime%20%3E%3D%20datetime('); |
||||
}); |
||||
}); |
||||
|
||||
describe('when $__contains and multi template variable has custom All value', () => { |
||||
beforeEach(() => { |
||||
builder.rawQueryString = 'query=Tablename | where $__contains(col, all)'; |
||||
}); |
||||
|
||||
it('should generate a where..in clause', () => { |
||||
const query = builder.generate().rawQuery; |
||||
|
||||
expect(query).toContain(`where 1 == 1`); |
||||
}); |
||||
}); |
||||
|
||||
describe('when $__contains and multi template variable has one selected value', () => { |
||||
beforeEach(() => { |
||||
builder.rawQueryString = `query=Tablename | where $__contains(col, 'val1')`; |
||||
}); |
||||
|
||||
it('should generate a where..in clause', () => { |
||||
const query = builder.generate().rawQuery; |
||||
|
||||
expect(query).toContain(`where col in ('val1')`); |
||||
}); |
||||
}); |
||||
|
||||
describe('when $__contains and multi template variable has multiple selected values', () => { |
||||
beforeEach(() => { |
||||
builder.rawQueryString = `query=Tablename | where $__contains(col, 'val1','val2')`; |
||||
}); |
||||
|
||||
it('should generate a where..in clause', () => { |
||||
const query = builder.generate().rawQuery; |
||||
|
||||
expect(query).toContain(`where col in ('val1','val2')`); |
||||
}); |
||||
}); |
||||
|
||||
describe('when $__interval is in the query', () => { |
||||
beforeEach(() => { |
||||
builder.rawQueryString = 'query=Tablename | summarize count() by Category, bin(TimeGenerated, $__interval)'; |
||||
}); |
||||
|
||||
it('should replace $__interval with the inbuilt interval option', () => { |
||||
const query = builder.generate().uriString; |
||||
|
||||
expect(query).toContain('bin(TimeGenerated%2C%205m'); |
||||
}); |
||||
}); |
||||
|
||||
describe('when using $__timeFrom and $__timeTo is in the query and range is until now', () => { |
||||
beforeEach(() => { |
||||
builder.rawQueryString = 'query=Tablename | where myTime >= $__timeFrom() and myTime <= $__timeTo()'; |
||||
}); |
||||
|
||||
it('should replace $__timeFrom and $__timeTo with a datetime and the now() function', () => { |
||||
const query = builder.generate().uriString; |
||||
|
||||
expect(query).toContain('where%20myTime%20%3E%3D%20datetime('); |
||||
expect(query).toContain('myTime%20%3C%3D%20datetime('); |
||||
}); |
||||
}); |
||||
|
||||
describe('when using $__timeFrom and $__timeTo is in the query and range is a specific interval', () => { |
||||
beforeEach(() => { |
||||
builder.rawQueryString = 'query=Tablename | where myTime >= $__timeFrom() and myTime <= $__timeTo()'; |
||||
builder.options.range.to = dateTime().subtract(1, 'hour'); |
||||
builder.options.rangeRaw.to = 'now-1h'; |
||||
}); |
||||
|
||||
it('should replace $__timeFrom and $__timeTo with datetimes', () => { |
||||
const query = builder.generate().uriString; |
||||
|
||||
expect(query).toContain('where%20myTime%20%3E%3D%20datetime('); |
||||
expect(query).toContain('myTime%20%3C%3D%20datetime('); |
||||
}); |
||||
}); |
||||
|
||||
describe('when using $__escape and multi template variable has one selected value', () => { |
||||
beforeEach(() => { |
||||
builder.rawQueryString = `$__escapeMulti('\\grafana-vm\Network(eth0)\Total Bytes Received')`; |
||||
}); |
||||
|
||||
it('should replace $__escape(val) with KQL style escaped string', () => { |
||||
const query = builder.generate().uriString; |
||||
expect(query).toContain(`%40'%5Cgrafana-vmNetwork(eth0)Total%20Bytes%20Received'`); |
||||
}); |
||||
}); |
||||
|
||||
describe('when using $__escape and multi template variable has multiple selected values', () => { |
||||
beforeEach(() => { |
||||
builder.rawQueryString = `CounterPath in ($__escapeMulti('\\grafana-vm\Network(eth0)\Total','\\grafana-vm\Network(eth0)\Total'))`; |
||||
}); |
||||
|
||||
it('should replace $__escape(val) with multiple KQL style escaped string', () => { |
||||
const query = builder.generate().uriString; |
||||
expect(query).toContain( |
||||
`CounterPath%20in%20(%40'%5Cgrafana-vmNetwork(eth0)Total'%2C%20%40'%5Cgrafana-vmNetwork(eth0)Total')` |
||||
); |
||||
}); |
||||
}); |
||||
|
||||
describe('when using $__escape and multi template variable has one selected value that contains comma', () => { |
||||
beforeEach(() => { |
||||
builder.rawQueryString = `$__escapeMulti('\\grafana-vm,\Network(eth0)\Total Bytes Received')`; |
||||
}); |
||||
|
||||
it('should replace $__escape(val) with KQL style escaped string', () => { |
||||
const query = builder.generate().uriString; |
||||
expect(query).toContain(`%40'%5Cgrafana-vm%2CNetwork(eth0)Total%20Bytes%20Received'`); |
||||
}); |
||||
}); |
||||
|
||||
describe(`when using $__escape and multi template variable value is not wrapped in single '`, () => { |
||||
beforeEach(() => { |
||||
builder.rawQueryString = `$__escapeMulti(\\grafana-vm,\Network(eth0)\Total Bytes Received)`; |
||||
}); |
||||
|
||||
it('should not replace macro', () => { |
||||
const query = builder.generate().uriString; |
||||
expect(query).toContain(`%24__escapeMulti(%5Cgrafana-vm%2CNetwork(eth0)Total%20Bytes%20Received)`); |
||||
}); |
||||
}); |
||||
|
||||
describe('when there is no raw range', () => { |
||||
it('should still generate a time filter condition', () => { |
||||
builder = new LogAnalyticsQuerystringBuilder( |
||||
'query=Tablename | where $__timeFilter()', |
||||
{ |
||||
interval: '5m', |
||||
range: { |
||||
from: dateTime().subtract(24, 'hours'), |
||||
to: dateTime(), |
||||
}, |
||||
}, |
||||
'TimeGenerated' |
||||
); |
||||
const query = builder.generate().uriString; |
||||
|
||||
expect(query).toContain('where%20TimeGenerated%20%20%3E%3D%20datetime('); |
||||
}); |
||||
}); |
||||
}); |
@ -1,85 +0,0 @@ |
||||
import { dateTime } from '@grafana/data'; |
||||
|
||||
export default class LogAnalyticsQuerystringBuilder { |
||||
constructor(public rawQueryString: string, public options: any, public defaultTimeField: any) {} |
||||
|
||||
generate() { |
||||
let queryString = this.rawQueryString; |
||||
const macroRegexp = /\$__([_a-zA-Z0-9]+)\(([^()]*)\)/gi; |
||||
queryString = queryString.replace(macroRegexp, (match, p1, p2) => { |
||||
if (p1 === 'contains') { |
||||
return this.getMultiContains(p2); |
||||
} |
||||
|
||||
return match; |
||||
}); |
||||
|
||||
queryString = queryString.replace(/\$__escapeMulti\(('[^]*')\)/gi, (match, p1) => this.escape(p1)); |
||||
|
||||
if (this.options) { |
||||
queryString = queryString.replace(macroRegexp, (match, p1, p2) => { |
||||
if (p1 === 'timeFilter') { |
||||
return this.getTimeFilter(p2, this.options); |
||||
} |
||||
if (p1 === 'timeFrom') { |
||||
return this.getFrom(this.options); |
||||
} |
||||
if (p1 === 'timeTo') { |
||||
return this.getUntil(this.options); |
||||
} |
||||
|
||||
return match; |
||||
}); |
||||
queryString = queryString.replace(/\$__interval/gi, this.options.interval); |
||||
} |
||||
const rawQuery = queryString; |
||||
queryString = encodeURIComponent(queryString); |
||||
const uriString = `query=${queryString}`; |
||||
|
||||
return { uriString, rawQuery }; |
||||
} |
||||
|
||||
getFrom(options: any) { |
||||
const from = options.range.from; |
||||
return `datetime(${dateTime(from).startOf('minute').toISOString()})`; |
||||
} |
||||
|
||||
getUntil(options: any) { |
||||
if (options.rangeRaw?.to === 'now') { |
||||
const now = Date.now(); |
||||
return `datetime(${dateTime(now).startOf('minute').toISOString()})`; |
||||
} else { |
||||
const until = options.range.to; |
||||
return `datetime(${dateTime(until).startOf('minute').toISOString()})`; |
||||
} |
||||
} |
||||
|
||||
getTimeFilter(timeFieldArg: any, options: any) { |
||||
const timeField = timeFieldArg || this.defaultTimeField; |
||||
if (options.rangeRaw?.to === 'now') { |
||||
return `${timeField} >= ${this.getFrom(options)}`; |
||||
} else { |
||||
return `${timeField} >= ${this.getFrom(options)} and ${timeField} <= ${this.getUntil(options)}`; |
||||
} |
||||
} |
||||
|
||||
getMultiContains(inputs: string) { |
||||
const firstCommaIndex = inputs.indexOf(','); |
||||
const field = inputs.substring(0, firstCommaIndex); |
||||
const templateVar = inputs.substring(inputs.indexOf(',') + 1); |
||||
|
||||
if (templateVar && templateVar.toLowerCase().trim() === 'all') { |
||||
return '1 == 1'; |
||||
} |
||||
|
||||
return `${field.trim()} in (${templateVar.trim()})`; |
||||
} |
||||
|
||||
escape(inputs: string) { |
||||
return inputs |
||||
.substring(1, inputs.length - 1) |
||||
.split(`','`) |
||||
.map((v) => `@'${v}'`) |
||||
.join(', '); |
||||
} |
||||
} |
Loading…
Reference in new issue