mirror of https://github.com/grafana/grafana
Chore: InfluxDB unit test overhaul (#75436)
* Rename the mock function * Move tests * Refactor existing tests * add influxql_metadata_query tests * move to root * remove unnecessary file * adhoc test * Remove unused parameter * tests for future * fix mocks * betterer * changes after reviewpull/75724/head
parent
a2ee9833dc
commit
9d44fefe82
@ -1,55 +0,0 @@ |
||||
type FieldsDefinition = { |
||||
name: string; |
||||
// String type, usually something like 'string' or 'float'.
|
||||
type: string; |
||||
}; |
||||
type Measurements = { [measurement: string]: FieldsDefinition[] }; |
||||
type FieldReturnValue = { text: string }; |
||||
|
||||
/** |
||||
* Datasource mock for influx. At the moment this only works for queries that should return measurements or their |
||||
* fields and no other functionality is implemented. |
||||
*/ |
||||
export class InfluxDatasourceMock { |
||||
constructor(private measurements: Measurements) {} |
||||
|
||||
metricFindQuery(query: string) { |
||||
if (isMeasurementsQuery(query)) { |
||||
return this.getMeasurements(); |
||||
} else { |
||||
return this.getMeasurementFields(query); |
||||
} |
||||
} |
||||
|
||||
private getMeasurements(): FieldReturnValue[] { |
||||
return Object.keys(this.measurements).map((key) => ({ text: key })); |
||||
} |
||||
|
||||
private getMeasurementFields(query: string): FieldReturnValue[] { |
||||
const match = query.match(/SHOW FIELD KEYS FROM \"(.+)\"/); |
||||
if (!match) { |
||||
throw new Error(`Failed to match query="${query}"`); |
||||
} |
||||
const measurementName = match[1]; |
||||
if (!measurementName) { |
||||
throw new Error(`Failed to match measurement name from query="${query}"`); |
||||
} |
||||
|
||||
const fields = this.measurements[measurementName]; |
||||
if (!fields) { |
||||
throw new Error( |
||||
`Failed to find measurement with name="${measurementName}" in measurements="[${Object.keys( |
||||
this.measurements |
||||
).join(', ')}]"` |
||||
); |
||||
} |
||||
|
||||
return fields.map((field) => ({ |
||||
text: field.name, |
||||
})); |
||||
} |
||||
} |
||||
|
||||
function isMeasurementsQuery(query: string) { |
||||
return /SHOW MEASUREMENTS/.test(query); |
||||
} |
@ -0,0 +1,318 @@ |
||||
import { lastValueFrom, of } from 'rxjs'; |
||||
|
||||
import { ScopedVars } from '@grafana/data'; |
||||
import { BackendSrvRequest } from '@grafana/runtime/'; |
||||
import config from 'app/core/config'; |
||||
|
||||
import { TemplateSrv } from '../../../features/templating/template_srv'; |
||||
|
||||
import { BROWSER_MODE_DISABLED_MESSAGE } from './constants'; |
||||
import InfluxDatasource from './datasource'; |
||||
import { |
||||
getMockDSInstanceSettings, |
||||
getMockInfluxDS, |
||||
mockBackendService, |
||||
mockInfluxFetchResponse, |
||||
mockInfluxQueryRequest, |
||||
mockInfluxQueryWithTemplateVars, |
||||
mockTemplateSrv, |
||||
} from './mocks'; |
||||
import { InfluxQuery, InfluxVersion } from './types'; |
||||
|
||||
// we want only frontend mode in this file
|
||||
config.featureToggles.influxdbBackendMigration = false; |
||||
const fetchMock = mockBackendService(mockInfluxFetchResponse()); |
||||
|
||||
describe('InfluxDataSource Frontend Mode', () => { |
||||
beforeEach(() => { |
||||
jest.clearAllMocks(); |
||||
}); |
||||
|
||||
it('should throw an error if there is 200 response with error', async () => { |
||||
const ds = getMockInfluxDS(); |
||||
fetchMock.mockImplementation(() => { |
||||
return of({ |
||||
data: { |
||||
results: [ |
||||
{ |
||||
error: 'Query timeout', |
||||
}, |
||||
], |
||||
}, |
||||
}); |
||||
}); |
||||
|
||||
try { |
||||
await lastValueFrom(ds.query(mockInfluxQueryRequest())); |
||||
} catch (err) { |
||||
if (err instanceof Error) { |
||||
expect(err.message).toBe('InfluxDB Error: Query timeout'); |
||||
} |
||||
} |
||||
}); |
||||
|
||||
describe('outdated browser mode', () => { |
||||
it('should throw an error when querying data', async () => { |
||||
expect.assertions(1); |
||||
const instanceSettings = getMockDSInstanceSettings(); |
||||
instanceSettings.access = 'direct'; |
||||
const ds = getMockInfluxDS(instanceSettings); |
||||
try { |
||||
await lastValueFrom(ds.query(mockInfluxQueryRequest())); |
||||
} catch (err) { |
||||
if (err instanceof Error) { |
||||
expect(err.message).toBe(BROWSER_MODE_DISABLED_MESSAGE); |
||||
} |
||||
} |
||||
}); |
||||
}); |
||||
|
||||
describe('metricFindQuery with HTTP GET', () => { |
||||
let ds: InfluxDatasource; |
||||
const query = 'SELECT max(value) FROM measurement WHERE $timeFilter'; |
||||
const queryOptions = { |
||||
range: { |
||||
from: '2018-01-01T00:00:00Z', |
||||
to: '2018-01-02T00:00:00Z', |
||||
}, |
||||
}; |
||||
|
||||
let requestQuery: string; |
||||
let requestMethod: string | undefined; |
||||
let requestData: string | null; |
||||
const fetchMockImpl = (req: BackendSrvRequest) => { |
||||
requestMethod = req.method; |
||||
requestQuery = req.params?.q; |
||||
requestData = req.data; |
||||
return of({ |
||||
data: { |
||||
status: 'success', |
||||
results: [ |
||||
{ |
||||
series: [ |
||||
{ |
||||
name: 'measurement', |
||||
columns: ['name'], |
||||
values: [['cpu']], |
||||
}, |
||||
], |
||||
}, |
||||
], |
||||
}, |
||||
}); |
||||
}; |
||||
|
||||
beforeEach(async () => { |
||||
jest.clearAllMocks(); |
||||
fetchMock.mockImplementation(fetchMockImpl); |
||||
}); |
||||
|
||||
it('should read the http method from jsonData', async () => { |
||||
ds = getMockInfluxDS(getMockDSInstanceSettings({ httpMode: 'GET' })); |
||||
await ds.metricFindQuery(query, queryOptions); |
||||
expect(requestMethod).toBe('GET'); |
||||
ds = getMockInfluxDS(getMockDSInstanceSettings({ httpMode: 'POST' })); |
||||
await ds.metricFindQuery(query, queryOptions); |
||||
expect(requestMethod).toBe('POST'); |
||||
}); |
||||
|
||||
it('should replace $timefilter', async () => { |
||||
ds = getMockInfluxDS(getMockDSInstanceSettings({ httpMode: 'GET' })); |
||||
await ds.metricFindQuery(query, queryOptions); |
||||
expect(requestQuery).toMatch('time >= 1514764800000ms and time <= 1514851200000ms'); |
||||
ds = getMockInfluxDS(getMockDSInstanceSettings({ httpMode: 'POST' })); |
||||
await ds.metricFindQuery(query, queryOptions); |
||||
expect(requestQuery).toBeFalsy(); |
||||
expect(requestData).toMatch('time%20%3E%3D%201514764800000ms%20and%20time%20%3C%3D%201514851200000ms'); |
||||
}); |
||||
|
||||
it('should not have any data in request body if http mode is GET', async () => { |
||||
ds = getMockInfluxDS(getMockDSInstanceSettings({ httpMode: 'GET' })); |
||||
await ds.metricFindQuery(query, queryOptions); |
||||
expect(requestData).toBeNull(); |
||||
}); |
||||
|
||||
it('should have data in request body if http mode is POST', async () => { |
||||
ds = getMockInfluxDS(getMockDSInstanceSettings({ httpMode: 'POST' })); |
||||
await ds.metricFindQuery(query, queryOptions); |
||||
expect(requestData).not.toBeNull(); |
||||
expect(requestData).toMatch('q=SELECT'); |
||||
}); |
||||
|
||||
it('parse response correctly', async () => { |
||||
ds = getMockInfluxDS(getMockDSInstanceSettings({ httpMode: 'GET' })); |
||||
let responseGet = await ds.metricFindQuery(query, queryOptions); |
||||
expect(responseGet).toEqual([{ text: 'cpu' }]); |
||||
ds = getMockInfluxDS(getMockDSInstanceSettings({ httpMode: 'POST' })); |
||||
let responsePost = await ds.metricFindQuery(query, queryOptions); |
||||
expect(responsePost).toEqual([{ text: 'cpu' }]); |
||||
}); |
||||
}); |
||||
|
||||
describe('adhoc variables', () => { |
||||
const adhocFilters = [ |
||||
{ |
||||
key: 'adhoc_key', |
||||
operator: '=', |
||||
value: 'adhoc_val', |
||||
condition: '', |
||||
}, |
||||
]; |
||||
const mockTemplateService = new TemplateSrv(); |
||||
mockTemplateService.getAdhocFilters = jest.fn((_: string) => adhocFilters); |
||||
let ds = getMockInfluxDS(getMockDSInstanceSettings(), mockTemplateService); |
||||
it('query should contain the ad-hoc variable', () => { |
||||
ds.query(mockInfluxQueryRequest()); |
||||
const expected = encodeURIComponent( |
||||
'SELECT mean("value") FROM "cpu" WHERE time >= 0ms and time <= 10ms AND "adhoc_key" = \'adhoc_val\' GROUP BY time($__interval) fill(null)' |
||||
); |
||||
expect(fetchMock.mock.calls[0][0].data).toBe(`q=${expected}`); |
||||
}); |
||||
}); |
||||
|
||||
describe('datasource contract', () => { |
||||
let ds: InfluxDatasource; |
||||
const metricFindQueryMock = jest.fn(); |
||||
beforeEach(() => { |
||||
jest.clearAllMocks(); |
||||
ds = getMockInfluxDS(); |
||||
ds.metricFindQuery = metricFindQueryMock; |
||||
}); |
||||
|
||||
afterEach(() => { |
||||
jest.clearAllMocks(); |
||||
}); |
||||
|
||||
it('should check the datasource has "getTagKeys" function defined', () => { |
||||
expect(Object.getOwnPropertyNames(Object.getPrototypeOf(ds))).toContain('getTagKeys'); |
||||
}); |
||||
|
||||
it('should check the datasource has "getTagValues" function defined', () => { |
||||
expect(Object.getOwnPropertyNames(Object.getPrototypeOf(ds))).toContain('getTagValues'); |
||||
}); |
||||
|
||||
it('should be able to call getTagKeys without specifying any parameter', () => { |
||||
ds.getTagKeys(); |
||||
expect(metricFindQueryMock).toHaveBeenCalled(); |
||||
}); |
||||
|
||||
it('should be able to call getTagValues without specifying anything but key', () => { |
||||
ds.getTagValues({ key: 'test', filters: [] }); |
||||
expect(metricFindQueryMock).toHaveBeenCalled(); |
||||
}); |
||||
}); |
||||
|
||||
describe('variable interpolation', () => { |
||||
const text = 'interpolationText'; |
||||
const text2 = 'interpolationText2'; |
||||
const textWithoutFormatRegex = 'interpolationText,interpolationText2'; |
||||
const textWithFormatRegex = 'interpolationText|interpolationText2'; |
||||
const variableMap: Record<string, string> = { |
||||
$interpolationVar: text, |
||||
$interpolationVar2: text2, |
||||
}; |
||||
const adhocFilters = [ |
||||
{ |
||||
key: 'adhoc', |
||||
operator: '=', |
||||
value: 'val', |
||||
condition: '', |
||||
}, |
||||
]; |
||||
const templateSrv = mockTemplateSrv( |
||||
jest.fn((_: string) => adhocFilters), |
||||
jest.fn((target?: string, scopedVars?: ScopedVars, format?: string | Function): string => { |
||||
if (!format) { |
||||
return variableMap[target!] || ''; |
||||
} |
||||
if (format === 'regex') { |
||||
return textWithFormatRegex; |
||||
} |
||||
return textWithoutFormatRegex; |
||||
}) |
||||
); |
||||
const ds = new InfluxDatasource(getMockDSInstanceSettings(), templateSrv); |
||||
|
||||
function influxChecks(query: InfluxQuery) { |
||||
expect(templateSrv.replace).toBeCalledTimes(10); |
||||
expect(query.alias).toBe(text); |
||||
expect(query.measurement).toBe(textWithFormatRegex); |
||||
expect(query.policy).toBe(textWithFormatRegex); |
||||
expect(query.limit).toBe(textWithFormatRegex); |
||||
expect(query.slimit).toBe(textWithFormatRegex); |
||||
expect(query.tz).toBe(text); |
||||
expect(query.tags![0].value).toBe(textWithFormatRegex); |
||||
expect(query.groupBy![0].params![0]).toBe(textWithFormatRegex); |
||||
expect(query.select![0][0].params![0]).toBe(textWithFormatRegex); |
||||
expect(query.adhocFilters?.[0].key).toBe(adhocFilters[0].key); |
||||
} |
||||
|
||||
describe('when interpolating query variables for dashboard->explore', () => { |
||||
it('should interpolate all variables with Flux mode', () => { |
||||
ds.version = InfluxVersion.Flux; |
||||
const fluxQuery = { |
||||
refId: 'x', |
||||
query: '$interpolationVar,$interpolationVar2', |
||||
}; |
||||
const queries = ds.interpolateVariablesInQueries([fluxQuery], { |
||||
interpolationVar: { text: text, value: text }, |
||||
interpolationVar2: { text: text2, value: text2 }, |
||||
}); |
||||
expect(templateSrv.replace).toBeCalledTimes(1); |
||||
expect(queries[0].query).toBe(textWithFormatRegex); |
||||
}); |
||||
|
||||
it('should interpolate all variables with InfluxQL mode', () => { |
||||
ds.version = InfluxVersion.InfluxQL; |
||||
const queries = ds.interpolateVariablesInQueries([mockInfluxQueryWithTemplateVars(adhocFilters)], { |
||||
interpolationVar: { text: text, value: text }, |
||||
interpolationVar2: { text: text2, value: text2 }, |
||||
}); |
||||
influxChecks(queries[0]); |
||||
}); |
||||
}); |
||||
|
||||
describe('when interpolating template variables', () => { |
||||
it('should apply all template variables with Flux mode', () => { |
||||
ds.version = InfluxVersion.Flux; |
||||
const fluxQuery = { |
||||
refId: 'x', |
||||
query: '$interpolationVar', |
||||
}; |
||||
const query = ds.applyTemplateVariables(fluxQuery, { |
||||
interpolationVar: { |
||||
text: text, |
||||
value: text, |
||||
}, |
||||
}); |
||||
expect(templateSrv.replace).toBeCalledTimes(1); |
||||
expect(query.query).toBe(text); |
||||
}); |
||||
|
||||
it('should apply all template variables with InfluxQL mode', () => { |
||||
ds.version = ds.version = InfluxVersion.InfluxQL; |
||||
ds.access = 'proxy'; |
||||
config.featureToggles.influxdbBackendMigration = true; |
||||
const query = ds.applyTemplateVariables(mockInfluxQueryWithTemplateVars(adhocFilters), { |
||||
interpolationVar: { text: text, value: text }, |
||||
interpolationVar2: { text: 'interpolationText2', value: 'interpolationText2' }, |
||||
}); |
||||
influxChecks(query); |
||||
}); |
||||
|
||||
it('should apply all scopedVars to tags', () => { |
||||
ds.version = InfluxVersion.InfluxQL; |
||||
ds.access = 'proxy'; |
||||
config.featureToggles.influxdbBackendMigration = true; |
||||
const query = ds.applyTemplateVariables(mockInfluxQueryWithTemplateVars(adhocFilters), { |
||||
interpolationVar: { text: text, value: text }, |
||||
interpolationVar2: { text: 'interpolationText2', value: 'interpolationText2' }, |
||||
}); |
||||
expect(query.tags?.length).toBeGreaterThan(0); |
||||
const value = query.tags?.[0].value; |
||||
const scopedVars = 'interpolationText|interpolationText2'; |
||||
expect(value).toBe(scopedVars); |
||||
}); |
||||
}); |
||||
}); |
||||
}); |
@ -1,6 +1,6 @@ |
||||
import { TemplateSrv } from 'app/features/templating/template_srv'; |
||||
|
||||
import InfluxQueryModel from '../influx_query_model'; |
||||
import InfluxQueryModel from './influx_query_model'; |
||||
|
||||
describe('InfluxQuery', () => { |
||||
const templateSrv = { replace: (val) => val } as TemplateSrv; |
@ -1,6 +1,6 @@ |
||||
import { produce } from 'immer'; |
||||
|
||||
import InfluxSeries from '../influx_series'; |
||||
import InfluxSeries from './influx_series'; |
||||
|
||||
describe('when generating timeseries from influxdb response', () => { |
||||
describe('given multiple fields for series', () => { |
@ -0,0 +1,273 @@ |
||||
import config from 'app/core/config'; |
||||
|
||||
import { getAllMeasurements, getAllPolicies, getFieldKeys, getTagKeys, getTagValues } from './influxql_metadata_query'; |
||||
import { getMockInfluxDS } from './mocks'; |
||||
import { InfluxQuery } from './types'; |
||||
|
||||
describe('influx_metadata_query', () => { |
||||
let query: string | undefined; |
||||
let target: InfluxQuery; |
||||
const mockMetricFindQuery = jest.fn(); |
||||
const mockRunMetadataQuery = jest.fn(); |
||||
mockMetricFindQuery.mockImplementation((q: string) => { |
||||
query = q; |
||||
return Promise.resolve([]); |
||||
}); |
||||
mockRunMetadataQuery.mockImplementation((t: InfluxQuery) => { |
||||
target = t; |
||||
query = t.query; |
||||
return Promise.resolve([]); |
||||
}); |
||||
|
||||
const ds = getMockInfluxDS(); |
||||
ds.metricFindQuery = mockMetricFindQuery; |
||||
ds.runMetadataQuery = mockRunMetadataQuery; |
||||
|
||||
beforeEach(() => { |
||||
jest.clearAllMocks(); |
||||
}); |
||||
|
||||
// This should be removed when backend mode is default
|
||||
describe('backend mode disabled', () => { |
||||
beforeEach(() => { |
||||
config.featureToggles.influxdbBackendMigration = false; |
||||
}); |
||||
|
||||
function frontendModeChecks() { |
||||
expect(mockRunMetadataQuery).not.toHaveBeenCalled(); |
||||
expect(mockMetricFindQuery).toHaveBeenCalled(); |
||||
} |
||||
|
||||
describe('getAllPolicies', () => { |
||||
it('should call metricFindQuery with SHOW RETENTION POLICIES', () => { |
||||
getAllPolicies(ds); |
||||
frontendModeChecks(); |
||||
expect(query).toMatch('SHOW RETENTION POLICIES'); |
||||
}); |
||||
}); |
||||
|
||||
describe('getAllMeasurements', () => { |
||||
it('no tags specified', () => { |
||||
getAllMeasurements(ds, []); |
||||
frontendModeChecks(); |
||||
expect(query).toBe('SHOW MEASUREMENTS LIMIT 100'); |
||||
}); |
||||
|
||||
it('with tags', () => { |
||||
getAllMeasurements(ds, [{ key: 'key', value: 'val' }]); |
||||
frontendModeChecks(); |
||||
expect(query).toMatch('SHOW MEASUREMENTS WHERE "key"'); |
||||
}); |
||||
|
||||
it('with measurement filter', () => { |
||||
getAllMeasurements(ds, [{ key: 'key', value: 'val' }], 'measurementFilter'); |
||||
frontendModeChecks(); |
||||
expect(query).toMatch('SHOW MEASUREMENTS WITH MEASUREMENT =~ /(?i)measurementFilter/ WHERE "key"'); |
||||
}); |
||||
}); |
||||
|
||||
describe('getTagKeys', () => { |
||||
it('no tags specified', () => { |
||||
getTagKeys(ds); |
||||
frontendModeChecks(); |
||||
expect(query).toBe('SHOW TAG KEYS'); |
||||
}); |
||||
|
||||
it('with measurement', () => { |
||||
getTagKeys(ds, 'test_measurement'); |
||||
frontendModeChecks(); |
||||
expect(query).toBe('SHOW TAG KEYS FROM "test_measurement"'); |
||||
}); |
||||
|
||||
it('with retention policy', () => { |
||||
getTagKeys(ds, 'test_measurement', 'rp'); |
||||
frontendModeChecks(); |
||||
expect(query).toBe('SHOW TAG KEYS FROM "rp"."test_measurement"'); |
||||
}); |
||||
}); |
||||
|
||||
describe('getTagValues', () => { |
||||
it('with key', () => { |
||||
getTagValues(ds, [], 'test_key'); |
||||
frontendModeChecks(); |
||||
expect(query).toBe('SHOW TAG VALUES WITH KEY = "test_key"'); |
||||
}); |
||||
|
||||
it('with key ends with ::tag', () => { |
||||
getTagValues(ds, [], 'test_key::tag'); |
||||
frontendModeChecks(); |
||||
expect(query).toBe('SHOW TAG VALUES WITH KEY = "test_key"'); |
||||
}); |
||||
|
||||
it('with key ends with ::field', async () => { |
||||
const result = await getTagValues(ds, [], 'test_key::field'); |
||||
expect(result.length).toBe(0); |
||||
}); |
||||
|
||||
it('with tags', () => { |
||||
getTagValues(ds, [{ key: 'tagKey', value: 'tag_val' }], 'test_key'); |
||||
frontendModeChecks(); |
||||
expect(query).toBe('SHOW TAG VALUES WITH KEY = "test_key" WHERE "tagKey" = \'tag_val\''); |
||||
}); |
||||
|
||||
it('with measurement', () => { |
||||
getTagValues(ds, [{ key: 'tagKey', value: 'tag_val' }], 'test_key', 'test_measurement'); |
||||
frontendModeChecks(); |
||||
expect(query).toBe( |
||||
'SHOW TAG VALUES FROM "test_measurement" WITH KEY = "test_key" WHERE "tagKey" = \'tag_val\'' |
||||
); |
||||
}); |
||||
|
||||
it('with retention policy', () => { |
||||
getTagValues(ds, [{ key: 'tagKey', value: 'tag_val' }], 'test_key', 'test_measurement', 'rp'); |
||||
frontendModeChecks(); |
||||
expect(query).toBe( |
||||
'SHOW TAG VALUES FROM "rp"."test_measurement" WITH KEY = "test_key" WHERE "tagKey" = \'tag_val\'' |
||||
); |
||||
}); |
||||
}); |
||||
|
||||
describe('getFieldKeys', () => { |
||||
it('with no retention policy', () => { |
||||
getFieldKeys(ds, 'test_measurement'); |
||||
frontendModeChecks(); |
||||
expect(query).toBe('SHOW FIELD KEYS FROM "test_measurement"'); |
||||
}); |
||||
|
||||
it('with empty measurement', () => { |
||||
getFieldKeys(ds, ''); |
||||
frontendModeChecks(); |
||||
expect(query).toBe('SHOW FIELD KEYS'); |
||||
}); |
||||
|
||||
it('with retention policy', () => { |
||||
getFieldKeys(ds, 'test_measurement', 'rp'); |
||||
frontendModeChecks(); |
||||
expect(query).toBe('SHOW FIELD KEYS FROM "rp"."test_measurement"'); |
||||
}); |
||||
}); |
||||
}); |
||||
|
||||
describe('backend mode enabled', () => { |
||||
beforeEach(() => { |
||||
config.featureToggles.influxdbBackendMigration = true; |
||||
}); |
||||
|
||||
function backendModeChecks() { |
||||
expect(mockMetricFindQuery).not.toHaveBeenCalled(); |
||||
expect(mockRunMetadataQuery).toHaveBeenCalled(); |
||||
expect(target).toBeDefined(); |
||||
expect(target.refId).toBe('metadataQuery'); |
||||
expect(target.rawQuery).toBe(true); |
||||
} |
||||
|
||||
describe('getAllPolicies', () => { |
||||
it('should call metricFindQuery with SHOW RETENTION POLICIES', () => { |
||||
getAllPolicies(ds); |
||||
backendModeChecks(); |
||||
expect(query).toMatch('SHOW RETENTION POLICIES'); |
||||
}); |
||||
}); |
||||
|
||||
describe('getAllMeasurements', () => { |
||||
it('no tags specified', () => { |
||||
getAllMeasurements(ds, []); |
||||
backendModeChecks(); |
||||
expect(query).toBe('SHOW MEASUREMENTS LIMIT 100'); |
||||
}); |
||||
|
||||
it('with tags', () => { |
||||
getAllMeasurements(ds, [{ key: 'key', value: 'val' }]); |
||||
backendModeChecks(); |
||||
expect(query).toMatch('SHOW MEASUREMENTS WHERE "key"'); |
||||
}); |
||||
|
||||
it('with measurement filter', () => { |
||||
getAllMeasurements(ds, [{ key: 'key', value: 'val' }], 'measurementFilter'); |
||||
backendModeChecks(); |
||||
expect(query).toMatch('SHOW MEASUREMENTS WITH MEASUREMENT =~ /(?i)measurementFilter/ WHERE "key"'); |
||||
}); |
||||
}); |
||||
|
||||
describe('getTagKeys', () => { |
||||
it('no tags specified', () => { |
||||
getTagKeys(ds); |
||||
backendModeChecks(); |
||||
expect(query).toBe('SHOW TAG KEYS'); |
||||
}); |
||||
|
||||
it('with measurement', () => { |
||||
getTagKeys(ds, 'test_measurement'); |
||||
backendModeChecks(); |
||||
expect(query).toBe('SHOW TAG KEYS FROM "test_measurement"'); |
||||
}); |
||||
|
||||
it('with retention policy', () => { |
||||
getTagKeys(ds, 'test_measurement', 'rp'); |
||||
backendModeChecks(); |
||||
expect(query).toBe('SHOW TAG KEYS FROM "rp"."test_measurement"'); |
||||
}); |
||||
}); |
||||
|
||||
describe('getTagValues', () => { |
||||
it('with key', () => { |
||||
getTagValues(ds, [], 'test_key'); |
||||
backendModeChecks(); |
||||
expect(query).toBe('SHOW TAG VALUES WITH KEY = "test_key"'); |
||||
}); |
||||
|
||||
it('with key ends with ::tag', () => { |
||||
getTagValues(ds, [], 'test_key::tag'); |
||||
backendModeChecks(); |
||||
expect(query).toBe('SHOW TAG VALUES WITH KEY = "test_key"'); |
||||
}); |
||||
|
||||
it('with key ends with ::field', async () => { |
||||
const result = await getTagValues(ds, [], 'test_key::field'); |
||||
expect(result.length).toBe(0); |
||||
}); |
||||
|
||||
it('with tags', () => { |
||||
getTagValues(ds, [{ key: 'tagKey', value: 'tag_val' }], 'test_key'); |
||||
backendModeChecks(); |
||||
expect(query).toBe('SHOW TAG VALUES WITH KEY = "test_key" WHERE "tagKey" = \'tag_val\''); |
||||
}); |
||||
|
||||
it('with measurement', () => { |
||||
getTagValues(ds, [{ key: 'tagKey', value: 'tag_val' }], 'test_key', 'test_measurement'); |
||||
backendModeChecks(); |
||||
expect(query).toBe( |
||||
'SHOW TAG VALUES FROM "test_measurement" WITH KEY = "test_key" WHERE "tagKey" = \'tag_val\'' |
||||
); |
||||
}); |
||||
|
||||
it('with retention policy', () => { |
||||
getTagValues(ds, [{ key: 'tagKey', value: 'tag_val' }], 'test_key', 'test_measurement', 'rp'); |
||||
backendModeChecks(); |
||||
expect(query).toBe( |
||||
'SHOW TAG VALUES FROM "rp"."test_measurement" WITH KEY = "test_key" WHERE "tagKey" = \'tag_val\'' |
||||
); |
||||
}); |
||||
}); |
||||
|
||||
describe('getFieldKeys', () => { |
||||
it('with no retention policy', () => { |
||||
getFieldKeys(ds, 'test_measurement'); |
||||
backendModeChecks(); |
||||
expect(query).toBe('SHOW FIELD KEYS FROM "test_measurement"'); |
||||
}); |
||||
|
||||
it('with empty measurement', () => { |
||||
getFieldKeys(ds, ''); |
||||
backendModeChecks(); |
||||
expect(query).toBe('SHOW FIELD KEYS'); |
||||
}); |
||||
|
||||
it('with retention policy', () => { |
||||
getFieldKeys(ds, 'test_measurement', 'rp'); |
||||
backendModeChecks(); |
||||
expect(query).toBe('SHOW FIELD KEYS FROM "rp"."test_measurement"'); |
||||
}); |
||||
}); |
||||
}); |
||||
}); |
@ -1,4 +1,4 @@ |
||||
import queryPart from '../query_part'; |
||||
import queryPart from './query_part'; |
||||
|
||||
describe('InfluxQueryPart', () => { |
||||
describe('series with measurement only', () => { |
@ -1,393 +0,0 @@ |
||||
import { lastValueFrom, of } from 'rxjs'; |
||||
import { TemplateSrvStub } from 'test/specs/helpers'; |
||||
|
||||
import { ScopedVars } from '@grafana/data/src'; |
||||
import { FetchResponse } from '@grafana/runtime'; |
||||
import config from 'app/core/config'; |
||||
import { backendSrv } from 'app/core/services/backend_srv'; // will use the version in __mocks__
|
||||
|
||||
import { BROWSER_MODE_DISABLED_MESSAGE } from '../constants'; |
||||
import InfluxDatasource from '../datasource'; |
||||
import { InfluxQuery, InfluxVersion } from '../types'; |
||||
|
||||
//@ts-ignore
|
||||
const templateSrv = new TemplateSrvStub(); |
||||
|
||||
jest.mock('@grafana/runtime', () => ({ |
||||
...(jest.requireActual('@grafana/runtime') as unknown as object), |
||||
getBackendSrv: () => backendSrv, |
||||
})); |
||||
|
||||
describe('InfluxDataSource', () => { |
||||
const ctx: any = { |
||||
instanceSettings: { url: 'url', name: 'influxDb', jsonData: { httpMode: 'GET' } }, |
||||
}; |
||||
|
||||
const fetchMock = jest.spyOn(backendSrv, 'fetch'); |
||||
|
||||
beforeEach(() => { |
||||
jest.clearAllMocks(); |
||||
ctx.instanceSettings.url = '/api/datasources/proxy/1'; |
||||
ctx.instanceSettings.access = 'proxy'; |
||||
ctx.ds = new InfluxDatasource(ctx.instanceSettings, templateSrv); |
||||
}); |
||||
|
||||
describe('When issuing metricFindQuery', () => { |
||||
const query = 'SELECT max(value) FROM measurement WHERE $timeFilter'; |
||||
const queryOptions = { |
||||
range: { |
||||
from: '2018-01-01T00:00:00Z', |
||||
to: '2018-01-02T00:00:00Z', |
||||
}, |
||||
}; |
||||
let requestQuery: any; |
||||
let requestMethod: string | undefined; |
||||
let requestData: any; |
||||
let response: any; |
||||
|
||||
beforeEach(async () => { |
||||
fetchMock.mockImplementation((req) => { |
||||
requestMethod = req.method; |
||||
requestQuery = req.params?.q; |
||||
requestData = req.data; |
||||
return of({ |
||||
data: { |
||||
status: 'success', |
||||
results: [ |
||||
{ |
||||
series: [ |
||||
{ |
||||
name: 'measurement', |
||||
columns: ['name'], |
||||
values: [['cpu']], |
||||
}, |
||||
], |
||||
}, |
||||
], |
||||
}, |
||||
} as FetchResponse); |
||||
}); |
||||
|
||||
response = await ctx.ds.metricFindQuery(query, queryOptions); |
||||
}); |
||||
|
||||
it('should replace $timefilter', () => { |
||||
expect(requestQuery).toMatch('time >= 1514764800000ms and time <= 1514851200000ms'); |
||||
}); |
||||
|
||||
it('should use the HTTP GET method', () => { |
||||
expect(requestMethod).toBe('GET'); |
||||
}); |
||||
|
||||
it('should not have any data in request body', () => { |
||||
expect(requestData).toBeNull(); |
||||
}); |
||||
|
||||
it('parse response correctly', () => { |
||||
expect(response).toEqual([{ text: 'cpu' }]); |
||||
}); |
||||
}); |
||||
|
||||
describe('When getting error on 200 after issuing a query', () => { |
||||
const queryOptions = { |
||||
range: { |
||||
from: '2018-01-01T00:00:00Z', |
||||
to: '2018-01-02T00:00:00Z', |
||||
}, |
||||
rangeRaw: { |
||||
from: '2018-01-01T00:00:00Z', |
||||
to: '2018-01-02T00:00:00Z', |
||||
}, |
||||
targets: [{}], |
||||
timezone: 'UTC', |
||||
scopedVars: { |
||||
interval: { text: '1m', value: '1m' }, |
||||
__interval: { text: '1m', value: '1m' }, |
||||
__interval_ms: { text: 60000, value: 60000 }, |
||||
}, |
||||
}; |
||||
|
||||
it('throws an error', async () => { |
||||
fetchMock.mockImplementation(() => { |
||||
return of({ |
||||
data: { |
||||
results: [ |
||||
{ |
||||
error: 'Query timeout', |
||||
}, |
||||
], |
||||
}, |
||||
} as FetchResponse); |
||||
}); |
||||
|
||||
ctx.ds.retentionPolicies = ['']; |
||||
|
||||
try { |
||||
await lastValueFrom(ctx.ds.query(queryOptions)); |
||||
} catch (err) { |
||||
if (err instanceof Error) { |
||||
expect(err.message).toBe('InfluxDB Error: Query timeout'); |
||||
} |
||||
} |
||||
}); |
||||
}); |
||||
|
||||
describe('When getting a request after issuing a query using outdated Browser Mode', () => { |
||||
beforeEach(() => { |
||||
jest.clearAllMocks(); |
||||
ctx.instanceSettings.url = '/api/datasources/proxy/1'; |
||||
ctx.instanceSettings.access = 'direct'; |
||||
ctx.ds = new InfluxDatasource(ctx.instanceSettings, templateSrv); |
||||
}); |
||||
|
||||
it('throws an error', async () => { |
||||
try { |
||||
await lastValueFrom(ctx.ds.query({})); |
||||
} catch (err) { |
||||
if (err instanceof Error) { |
||||
expect(err.message).toBe(BROWSER_MODE_DISABLED_MESSAGE); |
||||
} |
||||
} |
||||
}); |
||||
}); |
||||
|
||||
describe('InfluxDataSource in POST query mode', () => { |
||||
const ctx: any = { |
||||
instanceSettings: { url: 'url', name: 'influxDb', jsonData: { httpMode: 'POST' } }, |
||||
}; |
||||
|
||||
beforeEach(() => { |
||||
ctx.instanceSettings.url = '/api/datasources/proxy/1'; |
||||
ctx.ds = new InfluxDatasource(ctx.instanceSettings, templateSrv); |
||||
}); |
||||
|
||||
describe('When issuing metricFindQuery', () => { |
||||
const query = 'SELECT max(value) FROM measurement'; |
||||
const queryOptions = {}; |
||||
let requestMethod: string | undefined; |
||||
let requestQueryParameter: Record<string, any> | undefined; |
||||
let queryEncoded: any; |
||||
let requestQuery: any; |
||||
|
||||
beforeEach(async () => { |
||||
fetchMock.mockImplementation((req) => { |
||||
requestMethod = req.method; |
||||
requestQueryParameter = req.params; |
||||
requestQuery = req.data; |
||||
return of({ |
||||
data: { |
||||
results: [ |
||||
{ |
||||
series: [ |
||||
{ |
||||
name: 'measurement', |
||||
columns: ['max'], |
||||
values: [[1]], |
||||
}, |
||||
], |
||||
}, |
||||
], |
||||
}, |
||||
} as FetchResponse); |
||||
}); |
||||
|
||||
queryEncoded = await ctx.ds.serializeParams({ q: query }); |
||||
await ctx.ds.metricFindQuery(query, queryOptions).then(() => {}); |
||||
}); |
||||
|
||||
it('should have the query form urlencoded', () => { |
||||
expect(requestQuery).toBe(queryEncoded); |
||||
}); |
||||
|
||||
it('should use the HTTP POST method', () => { |
||||
expect(requestMethod).toBe('POST'); |
||||
}); |
||||
|
||||
it('should not have q as a query parameter', () => { |
||||
expect(requestQueryParameter).not.toHaveProperty('q'); |
||||
}); |
||||
}); |
||||
}); |
||||
|
||||
// Some functions are required by the parent datasource class to provide functionality
|
||||
// such as ad-hoc filters, which requires the definition of the getTagKeys, and getTagValues
|
||||
describe('Datasource contract', () => { |
||||
const metricFindQueryMock = jest.fn(); |
||||
beforeEach(() => { |
||||
ctx.ds.metricFindQuery = metricFindQueryMock; |
||||
}); |
||||
|
||||
afterEach(() => { |
||||
jest.clearAllMocks(); |
||||
}); |
||||
|
||||
it('has function called getTagKeys', () => { |
||||
expect(Object.getOwnPropertyNames(Object.getPrototypeOf(ctx.ds))).toContain('getTagKeys'); |
||||
}); |
||||
|
||||
it('has function called getTagValues', () => { |
||||
expect(Object.getOwnPropertyNames(Object.getPrototypeOf(ctx.ds))).toContain('getTagValues'); |
||||
}); |
||||
|
||||
it('should be able to call getTagKeys without specifying any parameter', () => { |
||||
ctx.ds.getTagKeys(); |
||||
expect(metricFindQueryMock).toHaveBeenCalled(); |
||||
}); |
||||
|
||||
it('should be able to call getTagValues without specifying anything but key', () => { |
||||
ctx.ds.getTagValues({ key: 'test' }); |
||||
expect(metricFindQueryMock).toHaveBeenCalled(); |
||||
}); |
||||
}); |
||||
|
||||
describe('Variables should be interpolated correctly', () => { |
||||
const instanceSettings: any = {}; |
||||
const text = 'interpolationText'; |
||||
const text2 = 'interpolationText2'; |
||||
const textWithoutFormatRegex = 'interpolationText,interpolationText2'; |
||||
const textWithFormatRegex = 'interpolationText|interpolationText2'; |
||||
const variableMap: Record<string, string> = { |
||||
$interpolationVar: text, |
||||
$interpolationVar2: text2, |
||||
}; |
||||
const adhocFilters = [ |
||||
{ |
||||
key: 'adhoc', |
||||
operator: '=', |
||||
value: 'val', |
||||
condition: '', |
||||
}, |
||||
]; |
||||
const templateSrv: any = { |
||||
getAdhocFilters: jest.fn((name: string) => { |
||||
return adhocFilters; |
||||
}), |
||||
replace: jest.fn((target?: string, scopedVars?: ScopedVars, format?: string | Function): string => { |
||||
if (!format) { |
||||
return variableMap[target!] || ''; |
||||
} |
||||
if (format === 'regex') { |
||||
return textWithFormatRegex; |
||||
} |
||||
return textWithoutFormatRegex; |
||||
}), |
||||
}; |
||||
const ds = new InfluxDatasource(instanceSettings, templateSrv); |
||||
|
||||
const influxQuery = { |
||||
refId: 'x', |
||||
alias: '$interpolationVar', |
||||
measurement: '$interpolationVar', |
||||
policy: '$interpolationVar', |
||||
limit: '$interpolationVar', |
||||
slimit: '$interpolationVar', |
||||
tz: '$interpolationVar', |
||||
tags: [ |
||||
{ |
||||
key: 'cpu', |
||||
operator: '=~', |
||||
value: '/^$interpolationVar,$interpolationVar2$/', |
||||
}, |
||||
], |
||||
groupBy: [ |
||||
{ |
||||
params: ['$interpolationVar'], |
||||
type: 'tag', |
||||
}, |
||||
], |
||||
select: [ |
||||
[ |
||||
{ |
||||
params: ['$interpolationVar'], |
||||
type: 'field', |
||||
}, |
||||
], |
||||
], |
||||
adhocFilters, |
||||
}; |
||||
|
||||
function influxChecks(query: InfluxQuery) { |
||||
expect(templateSrv.replace).toBeCalledTimes(10); |
||||
expect(query.alias).toBe(text); |
||||
expect(query.measurement).toBe(textWithFormatRegex); |
||||
expect(query.policy).toBe(textWithFormatRegex); |
||||
expect(query.limit).toBe(textWithFormatRegex); |
||||
expect(query.slimit).toBe(textWithFormatRegex); |
||||
expect(query.tz).toBe(text); |
||||
expect(query.tags![0].value).toBe(textWithFormatRegex); |
||||
expect(query.groupBy![0].params![0]).toBe(textWithFormatRegex); |
||||
expect(query.select![0][0].params![0]).toBe(textWithFormatRegex); |
||||
expect(query.adhocFilters?.[0].key).toBe(adhocFilters[0].key); |
||||
} |
||||
|
||||
describe('when interpolating query variables for dashboard->explore', () => { |
||||
it('should interpolate all variables with Flux mode', () => { |
||||
ds.version = InfluxVersion.Flux; |
||||
const fluxQuery = { |
||||
refId: 'x', |
||||
query: '$interpolationVar,$interpolationVar2', |
||||
}; |
||||
const queries = ds.interpolateVariablesInQueries([fluxQuery], { |
||||
interpolationVar: { text: text, value: text }, |
||||
interpolationVar2: { text: text2, value: text2 }, |
||||
}); |
||||
expect(templateSrv.replace).toBeCalledTimes(1); |
||||
expect(queries[0].query).toBe(textWithFormatRegex); |
||||
}); |
||||
|
||||
it('should interpolate all variables with InfluxQL mode', () => { |
||||
ds.version = InfluxVersion.InfluxQL; |
||||
const queries = ds.interpolateVariablesInQueries([influxQuery], { |
||||
interpolationVar: { text: text, value: text }, |
||||
interpolationVar2: { text: text2, value: text2 }, |
||||
}); |
||||
influxChecks(queries[0]); |
||||
}); |
||||
}); |
||||
|
||||
describe('when interpolating template variables', () => { |
||||
it('should apply all template variables with Flux mode', () => { |
||||
ds.version = InfluxVersion.Flux; |
||||
const fluxQuery = { |
||||
refId: 'x', |
||||
query: '$interpolationVar', |
||||
}; |
||||
const query = ds.applyTemplateVariables(fluxQuery, { |
||||
interpolationVar: { |
||||
text: text, |
||||
value: text, |
||||
}, |
||||
}); |
||||
expect(templateSrv.replace).toBeCalledTimes(1); |
||||
expect(query.query).toBe(text); |
||||
}); |
||||
|
||||
it('should apply all template variables with InfluxQL mode', () => { |
||||
ds.version = ds.version = InfluxVersion.InfluxQL; |
||||
ds.access = 'proxy'; |
||||
config.featureToggles.influxdbBackendMigration = true; |
||||
const query = ds.applyTemplateVariables(influxQuery, { |
||||
interpolationVar: { text: text, value: text }, |
||||
interpolationVar2: { text: 'interpolationText2', value: 'interpolationText2' }, |
||||
}); |
||||
influxChecks(query); |
||||
}); |
||||
|
||||
it('should apply all scopedVars to tags', () => { |
||||
ds.version = InfluxVersion.InfluxQL; |
||||
ds.access = 'proxy'; |
||||
config.featureToggles.influxdbBackendMigration = true; |
||||
const query = ds.applyTemplateVariables(influxQuery, { |
||||
interpolationVar: { text: text, value: text }, |
||||
interpolationVar2: { text: 'interpolationText2', value: 'interpolationText2' }, |
||||
}); |
||||
if (!query.tags?.length) { |
||||
throw new Error('Tags are not defined'); |
||||
} |
||||
const value = query.tags[0].value; |
||||
const scopedVars = 'interpolationText|interpolationText2'; |
||||
expect(value).toBe(scopedVars); |
||||
}); |
||||
}); |
||||
}); |
||||
}); |
Loading…
Reference in new issue