mirror of https://github.com/grafana/grafana
feat(elasticsearch): extended stats like std deviation now works, and sigma option as well, added unique count (cardinality as well, #1034
parent
efc3def7f2
commit
3999a3caa2
@ -0,0 +1,103 @@ |
||||
define([ |
||||
], |
||||
function () { |
||||
'use strict'; |
||||
|
||||
function ElasticResponse(targets, response) { |
||||
this.targets = targets; |
||||
this.response = response; |
||||
} |
||||
|
||||
// This is quite complex
|
||||
// neeed to recurise down the nested buckets to build series
|
||||
ElasticResponse.prototype.processBuckets = function(aggs, target, series, level, parentName) { |
||||
var seriesName, value, metric, i, y, bucket, aggDef, esAgg; |
||||
|
||||
function addMetricPoint(seriesName, value, time) { |
||||
var current = series[seriesName]; |
||||
if (!current) { |
||||
current = series[seriesName] = {target: seriesName, datapoints: []}; |
||||
} |
||||
current.datapoints.push([value, time]); |
||||
} |
||||
|
||||
aggDef = target.bucketAggs[level]; |
||||
esAgg = aggs[aggDef.id]; |
||||
|
||||
for (i = 0; i < esAgg.buckets.length; i++) { |
||||
bucket = esAgg.buckets[i]; |
||||
|
||||
// if last agg collect series
|
||||
if (level === target.bucketAggs.length - 1) { |
||||
for (y = 0; y < target.metrics.length; y++) { |
||||
metric = target.metrics[y]; |
||||
seriesName = parentName; |
||||
|
||||
switch(metric.type) { |
||||
case 'count': { |
||||
seriesName += ' count'; |
||||
value = bucket.doc_count; |
||||
addMetricPoint(seriesName, value, bucket.key); |
||||
break; |
||||
} |
||||
case 'percentiles': { |
||||
var values = bucket[metric.id].values; |
||||
for (var prop in values) { |
||||
addMetricPoint(seriesName + ' ' + prop, values[prop], bucket.key); |
||||
} |
||||
break; |
||||
} |
||||
case 'extended_stats': { |
||||
var stats = bucket[metric.id]; |
||||
stats.std_deviation_bounds_upper = stats.std_deviation_bounds.upper; |
||||
stats.std_deviation_bounds_lower = stats.std_deviation_bounds.lower; |
||||
|
||||
for (var statName in metric.meta) { |
||||
if (metric.meta[statName]) { |
||||
addMetricPoint(seriesName + ' ' + statName, stats[statName], bucket.key); |
||||
} |
||||
} |
||||
break; |
||||
} |
||||
default: { |
||||
seriesName += ' ' + metric.field + ' ' + metric.type; |
||||
value = bucket[metric.id].value; |
||||
addMetricPoint(seriesName, value, bucket.key); |
||||
break; |
||||
} |
||||
} |
||||
} |
||||
} |
||||
else { |
||||
this.processBuckets(bucket, target, series, level+1, parentName + ' ' + bucket.key); |
||||
} |
||||
} |
||||
}; |
||||
|
||||
ElasticResponse.prototype.getTimeSeries = function() { |
||||
var series = []; |
||||
|
||||
for (var i = 0; i < this.response.responses.length; i++) { |
||||
var response = this.response.responses[i]; |
||||
if (response.error) { |
||||
throw { message: response.error }; |
||||
} |
||||
|
||||
var aggregations = response.aggregations; |
||||
var target = this.targets[i]; |
||||
var querySeries = {}; |
||||
|
||||
this.processBuckets(aggregations, target, querySeries, 0, target.refId); |
||||
|
||||
for (var prop in querySeries) { |
||||
if (querySeries.hasOwnProperty(prop)) { |
||||
series.push(querySeries[prop]); |
||||
} |
||||
} |
||||
} |
||||
|
||||
return { data: series }; |
||||
}; |
||||
|
||||
return ElasticResponse; |
||||
}); |
||||
@ -0,0 +1,239 @@ |
||||
define([ |
||||
'plugins/datasource/elasticsearch/elasticResponse', |
||||
], function(ElasticResponse) { |
||||
'use strict'; |
||||
|
||||
describe('ElasticResponse', function() { |
||||
var targets; |
||||
var response; |
||||
var result; |
||||
|
||||
describe('simple query and count', function() { |
||||
|
||||
beforeEach(function() { |
||||
targets = [{ |
||||
refId: 'A', |
||||
metrics: [{type: 'count', id: '1'}], |
||||
bucketAggs: [{type: 'date_histogram', field: '@timestamp', id: '2'}], |
||||
}]; |
||||
response = { |
||||
responses: [{ |
||||
aggregations: { |
||||
"2": { |
||||
buckets: [ |
||||
{ |
||||
doc_count: 10, |
||||
key: 1000 |
||||
}, |
||||
{ |
||||
doc_count: 15, |
||||
key: 2000 |
||||
} |
||||
] |
||||
} |
||||
} |
||||
}] |
||||
}; |
||||
|
||||
result = new ElasticResponse(targets, response).getTimeSeries(); |
||||
}); |
||||
|
||||
it('should return 1 series', function() { |
||||
expect(result.data.length).to.be(1); |
||||
expect(result.data[0].datapoints.length).to.be(2); |
||||
expect(result.data[0].datapoints[0][0]).to.be(10); |
||||
expect(result.data[0].datapoints[0][1]).to.be(1000); |
||||
}); |
||||
|
||||
}); |
||||
|
||||
describe('simple query count & avg aggregation', function() { |
||||
var result; |
||||
|
||||
beforeEach(function() { |
||||
targets = [{ |
||||
refId: 'A', |
||||
metrics: [{type: 'count', id: '1'}, {type: 'avg', field: 'value', id: '2'}], |
||||
bucketAggs: [{type: 'date_histogram', field: '@timestamp', id: '3'}], |
||||
}]; |
||||
response = { |
||||
responses: [{ |
||||
aggregations: { |
||||
"3": { |
||||
buckets: [ |
||||
{ |
||||
"2": {value: 88}, |
||||
doc_count: 10, |
||||
key: 1000 |
||||
}, |
||||
{ |
||||
"2": {value: 99}, |
||||
doc_count: 15, |
||||
key: 2000 |
||||
} |
||||
] |
||||
} |
||||
} |
||||
}] |
||||
}; |
||||
|
||||
result = new ElasticResponse(targets, response).getTimeSeries(); |
||||
}); |
||||
|
||||
it('should return 2 series', function() { |
||||
expect(result.data.length).to.be(2); |
||||
expect(result.data[0].datapoints.length).to.be(2); |
||||
expect(result.data[0].datapoints[0][0]).to.be(10); |
||||
expect(result.data[0].datapoints[0][1]).to.be(1000); |
||||
|
||||
expect(result.data[1].target).to.be("A value avg"); |
||||
expect(result.data[1].datapoints[0][0]).to.be(88); |
||||
expect(result.data[1].datapoints[1][0]).to.be(99); |
||||
}); |
||||
|
||||
}); |
||||
|
||||
describe('single group by query', function() { |
||||
var result; |
||||
|
||||
beforeEach(function() { |
||||
targets = [{ |
||||
refId: 'A', |
||||
metrics: [{type: 'count', id: '1'}], |
||||
bucketAggs: [{type: 'terms', field: 'host', id: '2'}, {type: 'date_histogram', field: '@timestamp', id: '3'}], |
||||
}]; |
||||
response = { |
||||
responses: [{ |
||||
aggregations: { |
||||
"2": { |
||||
buckets: [ |
||||
{ |
||||
"3": { |
||||
buckets: [ |
||||
{doc_count: 1, key: 1000}, |
||||
{doc_count: 3, key: 2000} |
||||
] |
||||
}, |
||||
doc_count: 4, |
||||
key: 'server1', |
||||
}, |
||||
{ |
||||
"3": { |
||||
buckets: [ |
||||
{doc_count: 2, key: 1000}, |
||||
{doc_count: 8, key: 2000} |
||||
] |
||||
}, |
||||
doc_count: 10, |
||||
key: 'server2', |
||||
}, |
||||
] |
||||
} |
||||
} |
||||
}] |
||||
}; |
||||
|
||||
result = new ElasticResponse(targets, response).getTimeSeries(); |
||||
}); |
||||
|
||||
it('should return 2 series', function() { |
||||
expect(result.data.length).to.be(2); |
||||
expect(result.data[0].datapoints.length).to.be(2); |
||||
expect(result.data[0].target).to.be('A server1 count'); |
||||
expect(result.data[1].target).to.be('A server2 count'); |
||||
}); |
||||
}); |
||||
|
||||
describe('with percentiles ', function() { |
||||
var result; |
||||
|
||||
beforeEach(function() { |
||||
targets = [{ |
||||
refId: 'A', |
||||
metrics: [{type: 'percentiles', settings: {percents: [75, 90]}, id: '1'}], |
||||
bucketAggs: [{type: 'date_histogram', field: '@timestamp', id: '3'}], |
||||
}]; |
||||
response = { |
||||
responses: [{ |
||||
aggregations: { |
||||
"3": { |
||||
buckets: [ |
||||
{ |
||||
"1": {values: {"75": 3.3, "90": 5.5}}, |
||||
doc_count: 10, |
||||
key: 1000 |
||||
}, |
||||
{ |
||||
"1": {values: {"75": 2.3, "90": 4.5}}, |
||||
doc_count: 15, |
||||
key: 2000 |
||||
} |
||||
] |
||||
} |
||||
} |
||||
}] |
||||
}; |
||||
|
||||
result = new ElasticResponse(targets, response).getTimeSeries(); |
||||
}); |
||||
|
||||
it('should return 2 series', function() { |
||||
expect(result.data.length).to.be(2); |
||||
expect(result.data[0].datapoints.length).to.be(2); |
||||
expect(result.data[0].target).to.be('A 75'); |
||||
expect(result.data[1].target).to.be('A 90'); |
||||
expect(result.data[0].datapoints[0][0]).to.be(3.3); |
||||
expect(result.data[0].datapoints[0][1]).to.be(1000); |
||||
expect(result.data[1].datapoints[1][0]).to.be(4.5); |
||||
}); |
||||
}); |
||||
|
||||
describe('with extended_stats ', function() { |
||||
var result; |
||||
|
||||
beforeEach(function() { |
||||
targets = [{ |
||||
refId: 'A', |
||||
metrics: [{type: 'extended_stats', meta: {max: true, std_deviation_bounds_upper: true}, id: '1'}], |
||||
bucketAggs: [{type: 'date_histogram', id: '3'}], |
||||
}]; |
||||
response = { |
||||
responses: [{ |
||||
aggregations: { |
||||
"3": { |
||||
buckets: [ |
||||
{ |
||||
"1": {max: 10.2, min: 5.5, std_deviation_bounds: {upper: 3, lower: -2}}, |
||||
doc_count: 10, |
||||
key: 1000 |
||||
}, |
||||
{ |
||||
"1": {max: 7.2, min: 3.5, std_deviation_bounds: {upper: 4, lower: -1}}, |
||||
doc_count: 15, |
||||
key: 2000 |
||||
} |
||||
] |
||||
} |
||||
} |
||||
}] |
||||
}; |
||||
|
||||
result = new ElasticResponse(targets, response).getTimeSeries(); |
||||
}); |
||||
|
||||
it('should return 2 series', function() { |
||||
expect(result.data.length).to.be(2); |
||||
expect(result.data[0].datapoints.length).to.be(2); |
||||
expect(result.data[0].target).to.be('A max'); |
||||
expect(result.data[1].target).to.be('A std_deviation_bounds_upper'); |
||||
|
||||
expect(result.data[0].datapoints[0][0]).to.be(10.2); |
||||
expect(result.data[0].datapoints[1][0]).to.be(7.2); |
||||
|
||||
expect(result.data[1].datapoints[0][0]).to.be(3); |
||||
expect(result.data[1].datapoints[1][0]).to.be(4); |
||||
}); |
||||
}); |
||||
|
||||
}); |
||||
}); |
||||
Loading…
Reference in new issue