mirror of https://github.com/grafana/grafana
More work on restoring features after moving to plugin model for datasources, no annotations work again #1276 #1472
parent
4a72c37fc1
commit
232f980c72
@ -1,78 +1,78 @@ |
||||
define([ |
||||
'features/influxdb/queryBuilder' |
||||
], function(/*InfluxQueryBuilder*/) { |
||||
'plugins/datasource/influxdb_08/queryBuilder' |
||||
], function(InfluxQueryBuilder) { |
||||
'use strict'; |
||||
|
||||
// describe('InfluxQueryBuilder', function() {
|
||||
//
|
||||
// describe('series with conditon and group by', function() {
|
||||
// var builder = new InfluxQueryBuilder({
|
||||
// series: 'google.test',
|
||||
// column: 'value',
|
||||
// function: 'mean',
|
||||
// condition: "code=1",
|
||||
// groupby_field: 'code'
|
||||
// });
|
||||
//
|
||||
// var query = builder.build();
|
||||
//
|
||||
// it('should generate correct query', function() {
|
||||
// expect(query).to.be('select code, mean(value) from "google.test" where $timeFilter and code=1 ' +
|
||||
// 'group by time($interval), code order asc');
|
||||
// });
|
||||
//
|
||||
// it('should expose groupByFiled', function() {
|
||||
// expect(builder.groupByField).to.be('code');
|
||||
// });
|
||||
//
|
||||
// });
|
||||
//
|
||||
// describe('series with fill and minimum group by time', function() {
|
||||
// var builder = new InfluxQueryBuilder({
|
||||
// series: 'google.test',
|
||||
// column: 'value',
|
||||
// function: 'mean',
|
||||
// fill: '0',
|
||||
// });
|
||||
//
|
||||
// var query = builder.build();
|
||||
//
|
||||
// it('should generate correct query', function() {
|
||||
// expect(query).to.be('select mean(value) from "google.test" where $timeFilter ' +
|
||||
// 'group by time($interval) fill(0) order asc');
|
||||
// });
|
||||
//
|
||||
// });
|
||||
//
|
||||
// describe('merge function detection', function() {
|
||||
// it('should not quote wrap regex merged series', function() {
|
||||
// var builder = new InfluxQueryBuilder({
|
||||
// series: 'merge(/^google.test/)',
|
||||
// column: 'value',
|
||||
// function: 'mean'
|
||||
// });
|
||||
//
|
||||
// var query = builder.build();
|
||||
//
|
||||
// expect(query).to.be('select mean(value) from merge(/^google.test/) where $timeFilter ' +
|
||||
// 'group by time($interval) order asc');
|
||||
// });
|
||||
//
|
||||
// it('should quote wrap series names that start with "merge"', function() {
|
||||
// var builder = new InfluxQueryBuilder({
|
||||
// series: 'merge.google.test',
|
||||
// column: 'value',
|
||||
// function: 'mean'
|
||||
// });
|
||||
//
|
||||
// var query = builder.build();
|
||||
//
|
||||
// expect(query).to.be('select mean(value) from "merge.google.test" where $timeFilter ' +
|
||||
// 'group by time($interval) order asc');
|
||||
// });
|
||||
//
|
||||
// });
|
||||
//
|
||||
// });
|
||||
describe('InfluxQueryBuilder', function() { |
||||
|
||||
describe('series with conditon and group by', function() { |
||||
var builder = new InfluxQueryBuilder({ |
||||
series: 'google.test', |
||||
column: 'value', |
||||
function: 'mean', |
||||
condition: "code=1", |
||||
groupby_field: 'code' |
||||
}); |
||||
|
||||
var query = builder.build(); |
||||
|
||||
it('should generate correct query', function() { |
||||
expect(query).to.be('select code, mean(value) from "google.test" where $timeFilter and code=1 ' + |
||||
'group by time($interval), code order asc'); |
||||
}); |
||||
|
||||
it('should expose groupByFiled', function() { |
||||
expect(builder.groupByField).to.be('code'); |
||||
}); |
||||
|
||||
}); |
||||
|
||||
describe('series with fill and minimum group by time', function() { |
||||
var builder = new InfluxQueryBuilder({ |
||||
series: 'google.test', |
||||
column: 'value', |
||||
function: 'mean', |
||||
fill: '0', |
||||
}); |
||||
|
||||
var query = builder.build(); |
||||
|
||||
it('should generate correct query', function() { |
||||
expect(query).to.be('select mean(value) from "google.test" where $timeFilter ' + |
||||
'group by time($interval) fill(0) order asc'); |
||||
}); |
||||
|
||||
}); |
||||
|
||||
describe('merge function detection', function() { |
||||
it('should not quote wrap regex merged series', function() { |
||||
var builder = new InfluxQueryBuilder({ |
||||
series: 'merge(/^google.test/)', |
||||
column: 'value', |
||||
function: 'mean' |
||||
}); |
||||
|
||||
var query = builder.build(); |
||||
|
||||
expect(query).to.be('select mean(value) from merge(/^google.test/) where $timeFilter ' + |
||||
'group by time($interval) order asc'); |
||||
}); |
||||
|
||||
it('should quote wrap series names that start with "merge"', function() { |
||||
var builder = new InfluxQueryBuilder({ |
||||
series: 'merge.google.test', |
||||
column: 'value', |
||||
function: 'mean' |
||||
}); |
||||
|
||||
var query = builder.build(); |
||||
|
||||
expect(query).to.be('select mean(value) from "merge.google.test" where $timeFilter ' + |
||||
'group by time($interval) order asc'); |
||||
}); |
||||
|
||||
}); |
||||
|
||||
}); |
||||
|
||||
}); |
||||
|
||||
@ -1,220 +1,220 @@ |
||||
define([ |
||||
'features/influxdb/influxSeries' |
||||
], function(/*InfluxSeries*/) { |
||||
'plugins/datasource/influxdb_08/influxSeries' |
||||
], function(InfluxSeries) { |
||||
'use strict'; |
||||
|
||||
// describe('when generating timeseries from influxdb response', function() {
|
||||
//
|
||||
// describe('given two series', function() {
|
||||
// var series = new InfluxSeries({
|
||||
// seriesList: [
|
||||
// {
|
||||
// columns: ['time', 'mean', 'sequence_number'],
|
||||
// name: 'prod.server1.cpu',
|
||||
// points: [[1402596000, 10, 1], [1402596001, 12, 2]]
|
||||
// },
|
||||
// {
|
||||
// columns: ['time', 'mean', 'sequence_number'],
|
||||
// name: 'prod.server2.cpu',
|
||||
// points: [[1402596000, 15, 1], [1402596001, 16, 2]]
|
||||
// }
|
||||
// ]
|
||||
// });
|
||||
//
|
||||
// var result = series.getTimeSeries();
|
||||
//
|
||||
// it('should generate two time series', function() {
|
||||
// expect(result.length).to.be(2);
|
||||
// expect(result[0].target).to.be('prod.server1.cpu.mean');
|
||||
// expect(result[0].datapoints[0][0]).to.be(10);
|
||||
// expect(result[0].datapoints[0][1]).to.be(1402596000);
|
||||
// expect(result[0].datapoints[1][0]).to.be(12);
|
||||
// expect(result[0].datapoints[1][1]).to.be(1402596001);
|
||||
//
|
||||
// expect(result[1].target).to.be('prod.server2.cpu.mean');
|
||||
// expect(result[1].datapoints[0][0]).to.be(15);
|
||||
// expect(result[1].datapoints[0][1]).to.be(1402596000);
|
||||
// expect(result[1].datapoints[1][0]).to.be(16);
|
||||
// expect(result[1].datapoints[1][1]).to.be(1402596001);
|
||||
// });
|
||||
//
|
||||
// });
|
||||
//
|
||||
// describe('given an alias format', function() {
|
||||
// var series = new InfluxSeries({
|
||||
// seriesList: [
|
||||
// {
|
||||
// columns: ['time', 'mean', 'sequence_number'],
|
||||
// name: 'prod.server1.cpu',
|
||||
// points: [[1402596000, 10, 1], [1402596001, 12, 2]]
|
||||
// }
|
||||
// ],
|
||||
// alias: '$s.testing'
|
||||
// });
|
||||
//
|
||||
// var result = series.getTimeSeries();
|
||||
//
|
||||
// it('should generate correct series name', function() {
|
||||
// expect(result[0].target).to.be('prod.server1.cpu.testing');
|
||||
// });
|
||||
//
|
||||
// });
|
||||
//
|
||||
// describe('given an alias format with segment numbers', function() {
|
||||
// var series = new InfluxSeries({
|
||||
// seriesList: [
|
||||
// {
|
||||
// columns: ['time', 'mean', 'sequence_number'],
|
||||
// name: 'prod.server1.cpu',
|
||||
// points: [[1402596000, 10, 1], [1402596001, 12, 2]]
|
||||
// }
|
||||
// ],
|
||||
// alias: '$1.mean'
|
||||
// });
|
||||
//
|
||||
// var result = series.getTimeSeries();
|
||||
//
|
||||
// it('should generate correct series name', function() {
|
||||
// expect(result[0].target).to.be('server1.mean');
|
||||
// });
|
||||
//
|
||||
// });
|
||||
//
|
||||
// describe('given an alias format and many segments', function() {
|
||||
// var series = new InfluxSeries({
|
||||
// seriesList: [
|
||||
// {
|
||||
// columns: ['time', 'mean', 'sequence_number'],
|
||||
// name: 'a0.a1.a2.a3.a4.a5.a6.a7.a8.a9.a10.a11.a12',
|
||||
// points: [[1402596000, 10, 1], [1402596001, 12, 2]]
|
||||
// }
|
||||
// ],
|
||||
// alias: '$5.$11.mean'
|
||||
// });
|
||||
//
|
||||
// var result = series.getTimeSeries();
|
||||
//
|
||||
// it('should generate correct series name', function() {
|
||||
// expect(result[0].target).to.be('a5.a11.mean');
|
||||
// });
|
||||
//
|
||||
// });
|
||||
//
|
||||
//
|
||||
// describe('given an alias format with group by field', function() {
|
||||
// var series = new InfluxSeries({
|
||||
// seriesList: [
|
||||
// {
|
||||
// columns: ['time', 'mean', 'host'],
|
||||
// name: 'prod.cpu',
|
||||
// points: [[1402596000, 10, 'A']]
|
||||
// }
|
||||
// ],
|
||||
// groupByField: 'host',
|
||||
// alias: '$g.$1'
|
||||
// });
|
||||
//
|
||||
// var result = series.getTimeSeries();
|
||||
//
|
||||
// it('should generate correct series name', function() {
|
||||
// expect(result[0].target).to.be('A.cpu');
|
||||
// });
|
||||
//
|
||||
// });
|
||||
//
|
||||
// describe('given group by column', function() {
|
||||
// var series = new InfluxSeries({
|
||||
// seriesList: [
|
||||
// {
|
||||
// columns: ['time', 'mean', 'host'],
|
||||
// name: 'prod.cpu',
|
||||
// points: [
|
||||
// [1402596000, 10, 'A'],
|
||||
// [1402596001, 11, 'A'],
|
||||
// [1402596000, 5, 'B'],
|
||||
// [1402596001, 6, 'B'],
|
||||
// ]
|
||||
// }
|
||||
// ],
|
||||
// groupByField: 'host'
|
||||
// });
|
||||
//
|
||||
// var result = series.getTimeSeries();
|
||||
//
|
||||
// it('should generate two time series', function() {
|
||||
// expect(result.length).to.be(2);
|
||||
// expect(result[0].target).to.be('prod.cpu.A');
|
||||
// expect(result[0].datapoints[0][0]).to.be(10);
|
||||
// expect(result[0].datapoints[0][1]).to.be(1402596000);
|
||||
// expect(result[0].datapoints[1][0]).to.be(11);
|
||||
// expect(result[0].datapoints[1][1]).to.be(1402596001);
|
||||
//
|
||||
// expect(result[1].target).to.be('prod.cpu.B');
|
||||
// expect(result[1].datapoints[0][0]).to.be(5);
|
||||
// expect(result[1].datapoints[0][1]).to.be(1402596000);
|
||||
// expect(result[1].datapoints[1][0]).to.be(6);
|
||||
// expect(result[1].datapoints[1][1]).to.be(1402596001);
|
||||
// });
|
||||
//
|
||||
// });
|
||||
//
|
||||
// });
|
||||
//
|
||||
// describe("when creating annotations from influxdb response", function() {
|
||||
// describe('given column mapping for all columns', function() {
|
||||
// var series = new InfluxSeries({
|
||||
// seriesList: [
|
||||
// {
|
||||
// columns: ['time', 'text', 'sequence_number', 'title', 'tags'],
|
||||
// name: 'events1',
|
||||
// points: [[1402596000000, 'some text', 1, 'Hello', 'B'], [1402596001000, 'asd', 2, 'Hello2', 'B']]
|
||||
// }
|
||||
// ],
|
||||
// annotation: {
|
||||
// query: 'select',
|
||||
// titleColumn: 'title',
|
||||
// tagsColumn: 'tags',
|
||||
// textColumn: 'text',
|
||||
// }
|
||||
// });
|
||||
//
|
||||
// var result = series.getAnnotations();
|
||||
//
|
||||
// it(' should generate 2 annnotations ', function() {
|
||||
// expect(result.length).to.be(2);
|
||||
// expect(result[0].annotation.query).to.be('select');
|
||||
// expect(result[0].title).to.be('Hello');
|
||||
// expect(result[0].time).to.be(1402596000000);
|
||||
// expect(result[0].tags).to.be('B');
|
||||
// expect(result[0].text).to.be('some text');
|
||||
// });
|
||||
//
|
||||
// });
|
||||
//
|
||||
// describe('given no column mapping', function() {
|
||||
// var series = new InfluxSeries({
|
||||
// seriesList: [
|
||||
// {
|
||||
// columns: ['time', 'text', 'sequence_number'],
|
||||
// name: 'events1',
|
||||
// points: [[1402596000000, 'some text', 1]]
|
||||
// }
|
||||
// ],
|
||||
// annotation: { query: 'select' }
|
||||
// });
|
||||
//
|
||||
// var result = series.getAnnotations();
|
||||
//
|
||||
// it('should generate 1 annnotation', function() {
|
||||
// expect(result.length).to.be(1);
|
||||
// expect(result[0].title).to.be('some text');
|
||||
// expect(result[0].time).to.be(1402596000000);
|
||||
// expect(result[0].tags).to.be(undefined);
|
||||
// expect(result[0].text).to.be(undefined);
|
||||
// });
|
||||
//
|
||||
// });
|
||||
//
|
||||
// });
|
||||
describe('when generating timeseries from influxdb response', function() { |
||||
|
||||
describe('given two series', function() { |
||||
var series = new InfluxSeries({ |
||||
seriesList: [ |
||||
{ |
||||
columns: ['time', 'mean', 'sequence_number'], |
||||
name: 'prod.server1.cpu', |
||||
points: [[1402596000, 10, 1], [1402596001, 12, 2]] |
||||
}, |
||||
{ |
||||
columns: ['time', 'mean', 'sequence_number'], |
||||
name: 'prod.server2.cpu', |
||||
points: [[1402596000, 15, 1], [1402596001, 16, 2]] |
||||
} |
||||
] |
||||
}); |
||||
|
||||
var result = series.getTimeSeries(); |
||||
|
||||
it('should generate two time series', function() { |
||||
expect(result.length).to.be(2); |
||||
expect(result[0].target).to.be('prod.server1.cpu.mean'); |
||||
expect(result[0].datapoints[0][0]).to.be(10); |
||||
expect(result[0].datapoints[0][1]).to.be(1402596000); |
||||
expect(result[0].datapoints[1][0]).to.be(12); |
||||
expect(result[0].datapoints[1][1]).to.be(1402596001); |
||||
|
||||
expect(result[1].target).to.be('prod.server2.cpu.mean'); |
||||
expect(result[1].datapoints[0][0]).to.be(15); |
||||
expect(result[1].datapoints[0][1]).to.be(1402596000); |
||||
expect(result[1].datapoints[1][0]).to.be(16); |
||||
expect(result[1].datapoints[1][1]).to.be(1402596001); |
||||
}); |
||||
|
||||
}); |
||||
|
||||
describe('given an alias format', function() { |
||||
var series = new InfluxSeries({ |
||||
seriesList: [ |
||||
{ |
||||
columns: ['time', 'mean', 'sequence_number'], |
||||
name: 'prod.server1.cpu', |
||||
points: [[1402596000, 10, 1], [1402596001, 12, 2]] |
||||
} |
||||
], |
||||
alias: '$s.testing' |
||||
}); |
||||
|
||||
var result = series.getTimeSeries(); |
||||
|
||||
it('should generate correct series name', function() { |
||||
expect(result[0].target).to.be('prod.server1.cpu.testing'); |
||||
}); |
||||
|
||||
}); |
||||
|
||||
describe('given an alias format with segment numbers', function() { |
||||
var series = new InfluxSeries({ |
||||
seriesList: [ |
||||
{ |
||||
columns: ['time', 'mean', 'sequence_number'], |
||||
name: 'prod.server1.cpu', |
||||
points: [[1402596000, 10, 1], [1402596001, 12, 2]] |
||||
} |
||||
], |
||||
alias: '$1.mean' |
||||
}); |
||||
|
||||
var result = series.getTimeSeries(); |
||||
|
||||
it('should generate correct series name', function() { |
||||
expect(result[0].target).to.be('server1.mean'); |
||||
}); |
||||
|
||||
}); |
||||
|
||||
describe('given an alias format and many segments', function() { |
||||
var series = new InfluxSeries({ |
||||
seriesList: [ |
||||
{ |
||||
columns: ['time', 'mean', 'sequence_number'], |
||||
name: 'a0.a1.a2.a3.a4.a5.a6.a7.a8.a9.a10.a11.a12', |
||||
points: [[1402596000, 10, 1], [1402596001, 12, 2]] |
||||
} |
||||
], |
||||
alias: '$5.$11.mean' |
||||
}); |
||||
|
||||
var result = series.getTimeSeries(); |
||||
|
||||
it('should generate correct series name', function() { |
||||
expect(result[0].target).to.be('a5.a11.mean'); |
||||
}); |
||||
|
||||
}); |
||||
|
||||
|
||||
describe('given an alias format with group by field', function() { |
||||
var series = new InfluxSeries({ |
||||
seriesList: [ |
||||
{ |
||||
columns: ['time', 'mean', 'host'], |
||||
name: 'prod.cpu', |
||||
points: [[1402596000, 10, 'A']] |
||||
} |
||||
], |
||||
groupByField: 'host', |
||||
alias: '$g.$1' |
||||
}); |
||||
|
||||
var result = series.getTimeSeries(); |
||||
|
||||
it('should generate correct series name', function() { |
||||
expect(result[0].target).to.be('A.cpu'); |
||||
}); |
||||
|
||||
}); |
||||
|
||||
describe('given group by column', function() { |
||||
var series = new InfluxSeries({ |
||||
seriesList: [ |
||||
{ |
||||
columns: ['time', 'mean', 'host'], |
||||
name: 'prod.cpu', |
||||
points: [ |
||||
[1402596000, 10, 'A'], |
||||
[1402596001, 11, 'A'], |
||||
[1402596000, 5, 'B'], |
||||
[1402596001, 6, 'B'], |
||||
] |
||||
} |
||||
], |
||||
groupByField: 'host' |
||||
}); |
||||
|
||||
var result = series.getTimeSeries(); |
||||
|
||||
it('should generate two time series', function() { |
||||
expect(result.length).to.be(2); |
||||
expect(result[0].target).to.be('prod.cpu.A'); |
||||
expect(result[0].datapoints[0][0]).to.be(10); |
||||
expect(result[0].datapoints[0][1]).to.be(1402596000); |
||||
expect(result[0].datapoints[1][0]).to.be(11); |
||||
expect(result[0].datapoints[1][1]).to.be(1402596001); |
||||
|
||||
expect(result[1].target).to.be('prod.cpu.B'); |
||||
expect(result[1].datapoints[0][0]).to.be(5); |
||||
expect(result[1].datapoints[0][1]).to.be(1402596000); |
||||
expect(result[1].datapoints[1][0]).to.be(6); |
||||
expect(result[1].datapoints[1][1]).to.be(1402596001); |
||||
}); |
||||
|
||||
}); |
||||
|
||||
}); |
||||
|
||||
describe("when creating annotations from influxdb response", function() { |
||||
describe('given column mapping for all columns', function() { |
||||
var series = new InfluxSeries({ |
||||
seriesList: [ |
||||
{ |
||||
columns: ['time', 'text', 'sequence_number', 'title', 'tags'], |
||||
name: 'events1', |
||||
points: [[1402596000000, 'some text', 1, 'Hello', 'B'], [1402596001000, 'asd', 2, 'Hello2', 'B']] |
||||
} |
||||
], |
||||
annotation: { |
||||
query: 'select', |
||||
titleColumn: 'title', |
||||
tagsColumn: 'tags', |
||||
textColumn: 'text', |
||||
} |
||||
}); |
||||
|
||||
var result = series.getAnnotations(); |
||||
|
||||
it(' should generate 2 annnotations ', function() { |
||||
expect(result.length).to.be(2); |
||||
expect(result[0].annotation.query).to.be('select'); |
||||
expect(result[0].title).to.be('Hello'); |
||||
expect(result[0].time).to.be(1402596000000); |
||||
expect(result[0].tags).to.be('B'); |
||||
expect(result[0].text).to.be('some text'); |
||||
}); |
||||
|
||||
}); |
||||
|
||||
describe('given no column mapping', function() { |
||||
var series = new InfluxSeries({ |
||||
seriesList: [ |
||||
{ |
||||
columns: ['time', 'text', 'sequence_number'], |
||||
name: 'events1', |
||||
points: [[1402596000000, 'some text', 1]] |
||||
} |
||||
], |
||||
annotation: { query: 'select' } |
||||
}); |
||||
|
||||
var result = series.getAnnotations(); |
||||
|
||||
it('should generate 1 annnotation', function() { |
||||
expect(result.length).to.be(1); |
||||
expect(result[0].title).to.be('some text'); |
||||
expect(result[0].time).to.be(1402596000000); |
||||
expect(result[0].tags).to.be(undefined); |
||||
expect(result[0].text).to.be(undefined); |
||||
}); |
||||
|
||||
}); |
||||
|
||||
}); |
||||
|
||||
}); |
||||
|
||||
@ -1,100 +1,100 @@ |
||||
define([ |
||||
'helpers', |
||||
'features/influxdb/datasource' |
||||
], function(/*helpers*/) { |
||||
'plugins/datasource/influxdb_08/datasource' |
||||
], function(helpers) { |
||||
'use strict'; |
||||
|
||||
// describe('InfluxDatasource', function() {
|
||||
// var ctx = new helpers.ServiceTestContext();
|
||||
//
|
||||
// beforeEach(module('grafana.services'));
|
||||
// beforeEach(ctx.providePhase(['templateSrv']));
|
||||
// beforeEach(ctx.createService('InfluxDatasource'));
|
||||
// beforeEach(function() {
|
||||
// ctx.ds = new ctx.service({ url: '', user: 'test', password: 'mupp' });
|
||||
// });
|
||||
//
|
||||
// describe('When querying influxdb with one target using query editor target spec', function() {
|
||||
// var results;
|
||||
// var urlExpected = "/series?p=mupp&q=select+mean(value)+from+%22test%22"+
|
||||
// "+where+time+%3E+now()-1h+group+by+time(1s)+order+asc";
|
||||
// var query = {
|
||||
// range: { from: 'now-1h', to: 'now' },
|
||||
// targets: [{ series: 'test', column: 'value', function: 'mean' }],
|
||||
// interval: '1s'
|
||||
// };
|
||||
//
|
||||
// var response = [{
|
||||
// columns: ["time", "sequence_nr", "value"],
|
||||
// name: 'test',
|
||||
// points: [[10, 1, 1]],
|
||||
// }];
|
||||
//
|
||||
// beforeEach(function() {
|
||||
// ctx.$httpBackend.expect('GET', urlExpected).respond(response);
|
||||
// ctx.ds.query(query).then(function(data) { results = data; });
|
||||
// ctx.$httpBackend.flush();
|
||||
// });
|
||||
//
|
||||
// it('should generate the correct query', function() {
|
||||
// ctx.$httpBackend.verifyNoOutstandingExpectation();
|
||||
// });
|
||||
//
|
||||
// it('should return series list', function() {
|
||||
// expect(results.data.length).to.be(1);
|
||||
// expect(results.data[0].target).to.be('test.value');
|
||||
// });
|
||||
//
|
||||
// });
|
||||
//
|
||||
// describe('When querying influxdb with one raw query', function() {
|
||||
// var results;
|
||||
// var urlExpected = "/series?p=mupp&q=select+value+from+series"+
|
||||
// "+where+time+%3E+now()-1h";
|
||||
// var query = {
|
||||
// range: { from: 'now-1h', to: 'now' },
|
||||
// targets: [{ query: "select value from series where $timeFilter", rawQuery: true }]
|
||||
// };
|
||||
//
|
||||
// var response = [];
|
||||
//
|
||||
// beforeEach(function() {
|
||||
// ctx.$httpBackend.expect('GET', urlExpected).respond(response);
|
||||
// ctx.ds.query(query).then(function(data) { results = data; });
|
||||
// ctx.$httpBackend.flush();
|
||||
// });
|
||||
//
|
||||
// it('should generate the correct query', function() {
|
||||
// ctx.$httpBackend.verifyNoOutstandingExpectation();
|
||||
// });
|
||||
//
|
||||
// });
|
||||
//
|
||||
// describe('When issuing annotation query', function() {
|
||||
// var results;
|
||||
// var urlExpected = "/series?p=mupp&q=select+title+from+events.backend_01"+
|
||||
// "+where+time+%3E+now()-1h";
|
||||
//
|
||||
// var range = { from: 'now-1h', to: 'now' };
|
||||
// var annotation = { query: 'select title from events.$server where $timeFilter' };
|
||||
// var response = [];
|
||||
//
|
||||
// beforeEach(function() {
|
||||
// ctx.templateSrv.replace = function(str) {
|
||||
// return str.replace('$server', 'backend_01');
|
||||
// };
|
||||
// ctx.$httpBackend.expect('GET', urlExpected).respond(response);
|
||||
// ctx.ds.annotationQuery(annotation, range).then(function(data) { results = data; });
|
||||
// ctx.$httpBackend.flush();
|
||||
// });
|
||||
//
|
||||
// it('should generate the correct query', function() {
|
||||
// ctx.$httpBackend.verifyNoOutstandingExpectation();
|
||||
// });
|
||||
//
|
||||
// });
|
||||
//
|
||||
// });
|
||||
//
|
||||
describe('InfluxDatasource', function() { |
||||
var ctx = new helpers.ServiceTestContext(); |
||||
|
||||
beforeEach(module('grafana.services')); |
||||
beforeEach(ctx.providePhase(['templateSrv'])); |
||||
beforeEach(ctx.createService('InfluxDatasource_08')); |
||||
beforeEach(function() { |
||||
ctx.ds = new ctx.service({ url: '', user: 'test', password: 'mupp' }); |
||||
}); |
||||
|
||||
describe('When querying influxdb with one target using query editor target spec', function() { |
||||
var results; |
||||
var urlExpected = "/series?p=mupp&q=select+mean(value)+from+%22test%22"+ |
||||
"+where+time+%3E+now()-1h+group+by+time(1s)+order+asc"; |
||||
var query = { |
||||
range: { from: 'now-1h', to: 'now' }, |
||||
targets: [{ series: 'test', column: 'value', function: 'mean' }], |
||||
interval: '1s' |
||||
}; |
||||
|
||||
var response = [{ |
||||
columns: ["time", "sequence_nr", "value"], |
||||
name: 'test', |
||||
points: [[10, 1, 1]], |
||||
}]; |
||||
|
||||
beforeEach(function() { |
||||
ctx.$httpBackend.expect('GET', urlExpected).respond(response); |
||||
ctx.ds.query(query).then(function(data) { results = data; }); |
||||
ctx.$httpBackend.flush(); |
||||
}); |
||||
|
||||
it('should generate the correct query', function() { |
||||
ctx.$httpBackend.verifyNoOutstandingExpectation(); |
||||
}); |
||||
|
||||
it('should return series list', function() { |
||||
expect(results.data.length).to.be(1); |
||||
expect(results.data[0].target).to.be('test.value'); |
||||
}); |
||||
|
||||
}); |
||||
|
||||
describe('When querying influxdb with one raw query', function() { |
||||
var results; |
||||
var urlExpected = "/series?p=mupp&q=select+value+from+series"+ |
||||
"+where+time+%3E+now()-1h"; |
||||
var query = { |
||||
range: { from: 'now-1h', to: 'now' }, |
||||
targets: [{ query: "select value from series where $timeFilter", rawQuery: true }] |
||||
}; |
||||
|
||||
var response = []; |
||||
|
||||
beforeEach(function() { |
||||
ctx.$httpBackend.expect('GET', urlExpected).respond(response); |
||||
ctx.ds.query(query).then(function(data) { results = data; }); |
||||
ctx.$httpBackend.flush(); |
||||
}); |
||||
|
||||
it('should generate the correct query', function() { |
||||
ctx.$httpBackend.verifyNoOutstandingExpectation(); |
||||
}); |
||||
|
||||
}); |
||||
|
||||
describe('When issuing annotation query', function() { |
||||
var results; |
||||
var urlExpected = "/series?p=mupp&q=select+title+from+events.backend_01"+ |
||||
"+where+time+%3E+now()-1h"; |
||||
|
||||
var range = { from: 'now-1h', to: 'now' }; |
||||
var annotation = { query: 'select title from events.$server where $timeFilter' }; |
||||
var response = []; |
||||
|
||||
beforeEach(function() { |
||||
ctx.templateSrv.replace = function(str) { |
||||
return str.replace('$server', 'backend_01'); |
||||
}; |
||||
ctx.$httpBackend.expect('GET', urlExpected).respond(response); |
||||
ctx.ds.annotationQuery(annotation, range).then(function(data) { results = data; }); |
||||
ctx.$httpBackend.flush(); |
||||
}); |
||||
|
||||
it('should generate the correct query', function() { |
||||
ctx.$httpBackend.verifyNoOutstandingExpectation(); |
||||
}); |
||||
|
||||
}); |
||||
|
||||
}); |
||||
|
||||
}); |
||||
|
||||
|
||||
Loading…
Reference in new issue