mirror of https://github.com/grafana/grafana
Prometheus: Incremental querying option for `to: now` dashboards (#62932)
Provide new feature in Prometheus dashboards to cache queried time series data, modify requests to not include previously cached data in order to improve dashboard refresh performance. Co-authored-by: Galen <galen.kistler@grafana.com> Co-authored-by: Leon <leon.sorokin@grafana.com>samu6851-patch-4
parent
c5172247a9
commit
2b2a4e13e5
@ -0,0 +1,93 @@ |
||||
import { closestIdx } from "./StreamingDataFrame"; |
||||
|
||||
export type Table = [times: number[], ...values: any[][]]; |
||||
|
||||
// prevTable and nextTable are assumed sorted ASC on reference [0] arrays
|
||||
// nextTable is assumed to be contiguous, only edges are checked for overlap
|
||||
// ...so prev: [1,2,5] + next: [3,4,6] -> [1,2,3,4,6]
|
||||
export function amendTable(prevTable: Table, nextTable: Table): Table { |
||||
let [prevTimes] = prevTable; |
||||
let [nextTimes] = nextTable; |
||||
|
||||
let pLen = prevTimes.length; |
||||
let pStart = prevTimes[0]; |
||||
let pEnd = prevTimes[pLen - 1]; |
||||
|
||||
let nLen = nextTimes.length; |
||||
let nStart = nextTimes[0]; |
||||
let nEnd = nextTimes[nLen - 1]; |
||||
|
||||
let outTable: Table; |
||||
|
||||
if (pLen) { |
||||
if (nLen) { |
||||
// append, no overlap
|
||||
if (nStart > pEnd) { |
||||
outTable = prevTable.map((_, i) => prevTable[i].concat(nextTable[i])) as Table; |
||||
} |
||||
// prepend, no overlap
|
||||
else if (nEnd < pStart) { |
||||
outTable = nextTable.map((_, i) => nextTable[i].concat(prevTable[i])) as Table; |
||||
} |
||||
// full replace
|
||||
else if (nStart <= pStart && nEnd >= pEnd) { |
||||
outTable = nextTable; |
||||
} |
||||
// partial replace
|
||||
else if (nStart > pStart && nEnd < pEnd) { |
||||
} |
||||
// append, with overlap
|
||||
else if (nStart >= pStart) { |
||||
let idx = closestIdx(nStart, prevTimes); |
||||
idx = prevTimes[idx] < nStart ? idx - 1 : idx; |
||||
outTable = prevTable.map((_, i) => prevTable[i].slice(0, idx).concat(nextTable[i])) as Table; |
||||
} |
||||
// prepend, with overlap
|
||||
else if (nEnd >= pStart) { |
||||
let idx = closestIdx(nEnd, prevTimes); |
||||
idx = prevTimes[idx] < nEnd ? idx : idx + 1; |
||||
outTable = nextTable.map((_, i) => nextTable[i].concat(prevTable[i].slice(idx))) as Table; |
||||
} |
||||
} else { |
||||
outTable = prevTable; |
||||
} |
||||
} else { |
||||
if (nLen) { |
||||
outTable = nextTable; |
||||
} else { |
||||
outTable = [[]]; |
||||
} |
||||
} |
||||
|
||||
return outTable!; |
||||
} |
||||
|
||||
export function trimTable(table: Table, fromTime: number, toTime: number): Table { |
||||
let [times, ...vals] = table; |
||||
let fromIdx: number | undefined; |
||||
let toIdx: number | undefined; |
||||
|
||||
// trim to bounds
|
||||
if (times[0] < fromTime) { |
||||
fromIdx = closestIdx(fromTime, times); |
||||
|
||||
if (times[fromIdx] < fromTime) { |
||||
fromIdx++; |
||||
} |
||||
} |
||||
|
||||
if (times[times.length - 1] > toTime) { |
||||
toIdx = closestIdx(toTime, times); |
||||
|
||||
if (times[toIdx] > toTime) { |
||||
toIdx--; |
||||
} |
||||
} |
||||
|
||||
if (fromIdx != null || toIdx != null) { |
||||
times = times.slice(fromIdx ?? 0, toIdx); |
||||
vals = vals.map(vals2 => vals2.slice(fromIdx ?? 0, toIdx)); |
||||
} |
||||
|
||||
return [times, ...vals]; |
||||
} |
@ -0,0 +1,490 @@ |
||||
import moment from 'moment'; |
||||
|
||||
import { DataFrame, DataQueryRequest, DateTime, dateTime, TimeRange } from '@grafana/data/src'; |
||||
|
||||
import { QueryEditorMode } from '../querybuilder/shared/types'; |
||||
import { PromQuery } from '../types'; |
||||
|
||||
import { getTargSig, QueryCache } from './QueryCache'; |
||||
import { IncrementalStorageDataFrameScenarios } from './QueryCacheTestData'; |
||||
|
||||
const mockRequest = (request?: Partial<DataQueryRequest<PromQuery>>): DataQueryRequest<PromQuery> => { |
||||
// Histogram
|
||||
const defaultRequest: DataQueryRequest<PromQuery> = { |
||||
app: 'undefined', |
||||
requestId: '', |
||||
timezone: '', |
||||
range: { |
||||
from: moment('2023-01-30T19:33:01.332Z') as DateTime, |
||||
to: moment('2023-01-30T20:33:01.332Z') as DateTime, |
||||
raw: { from: 'now-1h', to: 'now' }, |
||||
}, |
||||
interval: '15s', |
||||
intervalMs: 15000, |
||||
targets: [ |
||||
{ |
||||
datasource: { type: 'prometheus', uid: 'OPQv8Kc4z' }, |
||||
editorMode: QueryEditorMode.Code, |
||||
exemplar: false, |
||||
expr: 'sum by(le) (rate(cortex_request_duration_seconds_bucket{cluster="dev-us-central-0", job="cortex-dev-01/cortex-gw-internal", namespace="cortex-dev-01"}[$__rate_interval]))', |
||||
format: 'heatmap', |
||||
legendFormat: '{{le}}', |
||||
range: true, |
||||
refId: 'A', |
||||
utcOffsetSec: -21600, |
||||
}, |
||||
], |
||||
maxDataPoints: 871, |
||||
scopedVars: { |
||||
__interval: { text: '15s', value: '15s' }, |
||||
__interval_ms: { text: '15000', value: 15000 }, |
||||
}, |
||||
startTime: 1675110781332, |
||||
rangeRaw: { from: 'now-1h', to: 'now' }, |
||||
}; |
||||
return { |
||||
...defaultRequest, |
||||
...request, |
||||
}; |
||||
}; |
||||
|
||||
describe('QueryCache', function () { |
||||
it('instantiates', () => { |
||||
const storage = new QueryCache(); |
||||
expect(storage).toBeInstanceOf(QueryCache); |
||||
}); |
||||
|
||||
it('will not modify or crash with empty response', () => { |
||||
const storage = new QueryCache(); |
||||
const firstFrames: DataFrame[] = []; |
||||
const secondFrames: DataFrame[] = []; |
||||
|
||||
const cache = new Map<string, string>(); |
||||
|
||||
// start time of scenario
|
||||
const firstFrom = dateTime(new Date(1675262550000)); |
||||
// End time of scenario
|
||||
const firstTo = dateTime(new Date(1675262550000)).add(6, 'hours'); |
||||
|
||||
const firstRange: TimeRange = { |
||||
from: firstFrom, |
||||
to: firstTo, |
||||
raw: { |
||||
from: 'now-6h', |
||||
to: 'now', |
||||
}, |
||||
}; |
||||
|
||||
// Same query 2 minutes later
|
||||
const numberOfSamplesLater = 4; |
||||
const interval = 30000; |
||||
|
||||
const secondFrom = dateTime(new Date(1675262550000 + interval * numberOfSamplesLater)); |
||||
const secondTo = dateTime(new Date(1675262550000 + interval * numberOfSamplesLater)).add(6, 'hours'); |
||||
|
||||
const secondRange: TimeRange = { |
||||
from: secondFrom, |
||||
to: secondTo, |
||||
raw: { |
||||
from: 'now-6h', |
||||
to: 'now', |
||||
}, |
||||
}; |
||||
|
||||
const targetSignature = `'1=1'|${interval}|${JSON.stringify(secondRange.raw)}`; |
||||
const dashboardId = `dashid`; |
||||
const panelId = 2; |
||||
const targetIdentity = `${dashboardId}|${panelId}|A`; |
||||
|
||||
cache.set(targetIdentity, targetSignature); |
||||
|
||||
const firstStoredFrames = storage.procFrames( |
||||
mockRequest({ |
||||
range: firstRange, |
||||
dashboardUID: dashboardId, |
||||
panelId: panelId, |
||||
}), |
||||
{ |
||||
requests: [], // unused
|
||||
targSigs: cache, |
||||
shouldCache: true, |
||||
}, |
||||
firstFrames |
||||
); |
||||
|
||||
const cached = storage.cache.get(targetIdentity); |
||||
|
||||
expect(cached?.frames[0].fields[0].values.length).toEqual(firstFrames[0]?.fields[0]?.values?.length); |
||||
expect(firstStoredFrames[0]?.fields[0].values.length).toEqual(firstFrames[0]?.fields[0]?.values?.length); |
||||
|
||||
// Should return the request frames unaltered
|
||||
expect(firstStoredFrames).toEqual(firstFrames); |
||||
|
||||
const secondRequest = mockRequest({ |
||||
range: secondRange, |
||||
dashboardUID: dashboardId, |
||||
panelId: panelId, |
||||
}); |
||||
|
||||
const secondStoredFrames = storage.procFrames( |
||||
secondRequest, |
||||
{ |
||||
requests: [], // unused
|
||||
targSigs: cache, |
||||
shouldCache: true, |
||||
}, |
||||
secondFrames |
||||
); |
||||
|
||||
const storageLengthAfterSubsequentQuery = storage.cache.get(targetIdentity); |
||||
|
||||
expect(secondStoredFrames).toEqual([]); |
||||
|
||||
storageLengthAfterSubsequentQuery?.frames.forEach((dataFrame, index) => { |
||||
const secondFramesLength = secondFrames[index].fields[0].values.length; |
||||
const firstFramesLength = firstFrames[index].fields[0].values.length; |
||||
|
||||
const cacheLength = dataFrame.fields[0].values.length; |
||||
|
||||
// Cache can contain more, but never less
|
||||
expect(cacheLength).toBeGreaterThanOrEqual(secondFramesLength + firstFramesLength - (20 + numberOfSamplesLater)); |
||||
|
||||
// Fewer results are sent in incremental result
|
||||
expect(firstFramesLength).toBeGreaterThan(secondFramesLength); |
||||
}); |
||||
}); |
||||
|
||||
it('Merges incremental queries in storage', () => { |
||||
const scenarios = [ |
||||
IncrementalStorageDataFrameScenarios.histogram.getSeriesWithGapAtEnd(), |
||||
IncrementalStorageDataFrameScenarios.histogram.getSeriesWithGapInMiddle(), |
||||
IncrementalStorageDataFrameScenarios.histogram.getSeriesWithGapAtStart(), |
||||
]; |
||||
scenarios.forEach((scenario, index) => { |
||||
const storage = new QueryCache(); |
||||
const firstFrames = scenario.first.dataFrames as unknown as DataFrame[]; |
||||
const secondFrames = scenario.second.dataFrames as unknown as DataFrame[]; |
||||
|
||||
const targetSignatures = new Map<string, string>(); |
||||
|
||||
// start time of scenario
|
||||
const firstFrom = dateTime(new Date(1675262550000)); |
||||
// End time of scenario
|
||||
const firstTo = dateTime(new Date(1675262550000)).add(6, 'hours'); |
||||
|
||||
const firstRange: TimeRange = { |
||||
from: firstFrom, |
||||
to: firstTo, |
||||
raw: { |
||||
from: 'now-6h', |
||||
to: 'now', |
||||
}, |
||||
}; |
||||
|
||||
// Same query 2 minutes later
|
||||
const numberOfSamplesLater = 4; |
||||
const interval = 30000; |
||||
|
||||
const secondFrom = dateTime(new Date(1675262550000 + interval * numberOfSamplesLater)); |
||||
const secondTo = dateTime(new Date(1675262550000 + interval * numberOfSamplesLater)).add(6, 'hours'); |
||||
|
||||
const secondRange: TimeRange = { |
||||
from: secondFrom, |
||||
to: secondTo, |
||||
raw: { |
||||
from: 'now-6h', |
||||
to: 'now', |
||||
}, |
||||
}; |
||||
|
||||
const dashboardId = `dashid--${index}`; |
||||
const panelId = 2 + index; |
||||
|
||||
// This can't change
|
||||
const targetIdentity = `${dashboardId}|${panelId}|A`; |
||||
|
||||
const request = mockRequest({ |
||||
range: firstRange, |
||||
dashboardUID: dashboardId, |
||||
panelId: panelId, |
||||
}); |
||||
|
||||
// But the signature can, and we should clean up any non-matching signatures
|
||||
const targetSignature = getTargSig(request.targets[0].expr, request, request.targets[0]); |
||||
|
||||
targetSignatures.set(targetIdentity, targetSignature); |
||||
|
||||
const firstStoredFrames = storage.procFrames( |
||||
request, |
||||
{ |
||||
requests: [], // unused
|
||||
targSigs: targetSignatures, |
||||
shouldCache: true, |
||||
}, |
||||
firstFrames |
||||
); |
||||
|
||||
const cached = storage.cache.get(targetIdentity); |
||||
|
||||
// I would expect that the number of values received from the API should be the same as the cached values?
|
||||
expect(cached?.frames[0].fields[0].values.length).toEqual(firstFrames[0].fields[0].values.length); |
||||
|
||||
// Should return the request frames unaltered
|
||||
expect(firstStoredFrames).toEqual(firstFrames); |
||||
|
||||
const secondRequest = mockRequest({ |
||||
range: secondRange, |
||||
dashboardUID: dashboardId, |
||||
panelId: panelId, |
||||
}); |
||||
|
||||
const secondStoredFrames = storage.procFrames( |
||||
secondRequest, |
||||
{ |
||||
requests: [], // unused
|
||||
targSigs: targetSignatures, |
||||
shouldCache: true, |
||||
}, |
||||
secondFrames |
||||
); |
||||
|
||||
const storageLengthAfterSubsequentQuery = storage.cache.get(targetIdentity); |
||||
|
||||
storageLengthAfterSubsequentQuery?.frames.forEach((dataFrame, index) => { |
||||
const secondFramesLength = secondFrames[index].fields[0].values.length; |
||||
const firstFramesLength = firstFrames[index].fields[0].values.length; |
||||
|
||||
const cacheLength = dataFrame.fields[0].values.length; |
||||
|
||||
// Cache can contain more, but never less
|
||||
expect(cacheLength).toBeGreaterThanOrEqual( |
||||
secondFramesLength + firstFramesLength - (20 + numberOfSamplesLater) |
||||
); |
||||
|
||||
// Fewer results are sent in incremental result
|
||||
expect(firstFramesLength).toBeGreaterThan(secondFramesLength); |
||||
}); |
||||
|
||||
// All of the new values should be the ones that were stored, this is overkill
|
||||
secondFrames.forEach((frame, frameIdx) => { |
||||
frame.fields.forEach((field, fieldIdx) => { |
||||
secondFrames[frameIdx].fields[fieldIdx].values.toArray().forEach((value) => { |
||||
expect(secondStoredFrames[frameIdx].fields[fieldIdx].values).toContain(value); |
||||
}); |
||||
}); |
||||
}); |
||||
|
||||
const interpolateString = (s: string) => { |
||||
return s; |
||||
}; |
||||
const secondRequestModified = { |
||||
...secondRequest, |
||||
range: { |
||||
...secondRequest.range, |
||||
to: dateTime(secondRequest.range.to.valueOf() + 30000), |
||||
}, |
||||
}; |
||||
const cacheRequest = storage.requestInfo(secondRequestModified, interpolateString); |
||||
expect(cacheRequest.requests[0].targets).toEqual(secondRequestModified.targets); |
||||
expect(cacheRequest.requests[0].range.to).toEqual(secondRequestModified.range.to); |
||||
expect(cacheRequest.requests[0].range.raw).toEqual(secondRequestModified.range.raw); |
||||
expect(cacheRequest.requests[0].range.from.valueOf() - 21000000).toEqual( |
||||
secondRequestModified.range.from.valueOf() |
||||
); |
||||
expect(cacheRequest.shouldCache).toBe(true); |
||||
}); |
||||
}); |
||||
|
||||
it('Will evict old dataframes, and use stored data when user shortens query window', () => { |
||||
const storage = new QueryCache(); |
||||
|
||||
// Initial request with all data for time range
|
||||
const firstFrames = IncrementalStorageDataFrameScenarios.histogram.evictionRequests.first |
||||
.dataFrames as unknown as DataFrame[]; |
||||
|
||||
// Shortened request 30s later
|
||||
const secondFrames = IncrementalStorageDataFrameScenarios.histogram.evictionRequests.second |
||||
.dataFrames as unknown as DataFrame[]; |
||||
|
||||
// Now the user waits a minute and changes the query duration to just the last 5 minutes, luckily the interval hasn't changed, so we can still use the data in storage except for the latest minute
|
||||
const thirdFrames = IncrementalStorageDataFrameScenarios.histogram.evictionRequests.second |
||||
.dataFrames as unknown as DataFrame[]; |
||||
|
||||
const cache = new Map<string, string>(); |
||||
const interval = 15000; |
||||
|
||||
// start time of scenario
|
||||
const firstFrom = dateTime(new Date(1675107180000)); |
||||
const firstTo = dateTime(new Date(1675107180000)).add(1, 'hours'); |
||||
const firstRange: TimeRange = { |
||||
from: firstFrom, |
||||
to: firstTo, |
||||
raw: { |
||||
from: 'now-1h', |
||||
to: 'now', |
||||
}, |
||||
}; |
||||
|
||||
// 30 seconds later
|
||||
const secondNumberOfSamplesLater = 2; |
||||
const secondFrom = dateTime(new Date(1675107180000 + interval * secondNumberOfSamplesLater)); |
||||
const secondTo = dateTime(new Date(1675107180000 + interval * secondNumberOfSamplesLater)).add(1, 'hours'); |
||||
const secondRange: TimeRange = { |
||||
from: secondFrom, |
||||
to: secondTo, |
||||
raw: { |
||||
from: 'now-1h', |
||||
to: 'now', |
||||
}, |
||||
}; |
||||
|
||||
// 1 minute + 30 seconds later, but 5 minute viewing window
|
||||
const thirdNumberOfSamplesLater = 6; |
||||
const thirdFrom = dateTime(new Date(1675107180000 + interval * thirdNumberOfSamplesLater)); |
||||
const thirdTo = dateTime(new Date(1675107180000 + interval * thirdNumberOfSamplesLater)).add(5, 'minutes'); |
||||
const thirdRange: TimeRange = { |
||||
from: thirdFrom, |
||||
to: thirdTo, |
||||
raw: { |
||||
from: 'now-5m', |
||||
to: 'now', |
||||
}, |
||||
}; |
||||
|
||||
// Signifier definition
|
||||
|
||||
const dashboardId = `dashid`; |
||||
const panelId = 200; |
||||
|
||||
const targetIdentity = `${dashboardId}|${panelId}|A`; |
||||
|
||||
const request = mockRequest({ |
||||
range: firstRange, |
||||
dashboardUID: dashboardId, |
||||
panelId: panelId, |
||||
}); |
||||
|
||||
const requestInfo = { |
||||
requests: [], // unused
|
||||
targSigs: cache, |
||||
shouldCache: true, |
||||
}; |
||||
const targetSignature = `1=1|${interval}|${JSON.stringify(request.rangeRaw ?? '')}`; |
||||
cache.set(targetIdentity, targetSignature); |
||||
|
||||
const firstQueryResult = storage.procFrames(request, requestInfo, firstFrames); |
||||
|
||||
const firstMergedLength = firstQueryResult[0].fields[0].values.length; |
||||
|
||||
const secondQueryResult = storage.procFrames( |
||||
mockRequest({ |
||||
range: secondRange, |
||||
dashboardUID: dashboardId, |
||||
panelId: panelId, |
||||
}), |
||||
{ |
||||
requests: [], // unused
|
||||
targSigs: cache, |
||||
shouldCache: true, |
||||
}, |
||||
secondFrames |
||||
); |
||||
|
||||
const secondMergedLength = secondQueryResult[0].fields[0].values.length; |
||||
|
||||
// Since the step is 15s, and the request was 30 seconds later, we should have 2 extra frames, but we should evict the first two, so we should get the same length
|
||||
expect(firstMergedLength).toEqual(secondMergedLength); |
||||
expect(firstQueryResult[0].fields[0].values.toArray()[2]).toEqual( |
||||
secondQueryResult[0].fields[0].values.toArray()[0] |
||||
); |
||||
expect(firstQueryResult[0].fields[0].values.toArray()[0] + 30000).toEqual( |
||||
secondQueryResult[0].fields[0].values.toArray()[0] |
||||
); |
||||
|
||||
cache.set(targetIdentity, `'1=1'|${interval}|${JSON.stringify(thirdRange.raw)}`); |
||||
|
||||
storage.procFrames( |
||||
mockRequest({ |
||||
range: thirdRange, |
||||
dashboardUID: dashboardId, |
||||
panelId: panelId, |
||||
}), |
||||
{ |
||||
requests: [], // unused
|
||||
targSigs: cache, |
||||
shouldCache: true, |
||||
}, |
||||
thirdFrames |
||||
); |
||||
|
||||
const cachedAfterThird = storage.cache.get(targetIdentity); |
||||
const storageLengthAfterThirdQuery = cachedAfterThird?.frames[0].fields[0].values.toArray().length; |
||||
expect(storageLengthAfterThirdQuery).toEqual(20); |
||||
}); |
||||
|
||||
it('Will build signature using target overrides', () => { |
||||
const targetInterval = '30s'; |
||||
const requestInterval = '15s'; |
||||
|
||||
const target: PromQuery = { |
||||
datasource: { type: 'prometheus', uid: 'OPQv8Kc4z' }, |
||||
editorMode: QueryEditorMode.Code, |
||||
exemplar: false, |
||||
expr: 'sum by(le) (rate(cortex_request_duration_seconds_bucket{cluster="dev-us-central-0", job="cortex-dev-01/cortex-gw-internal", namespace="cortex-dev-01"}[$__rate_interval]))', |
||||
format: 'heatmap', |
||||
interval: targetInterval, |
||||
legendFormat: '{{le}}', |
||||
range: true, |
||||
refId: 'A', |
||||
utcOffsetSec: -21600, |
||||
}; |
||||
|
||||
const request = mockRequest({ |
||||
interval: requestInterval, |
||||
targets: [target], |
||||
}); |
||||
const targSig = getTargSig('__EXPR__', request, target); |
||||
expect(targSig).toContain(targetInterval); |
||||
expect(targSig.includes(requestInterval)).toBeFalsy(); |
||||
}); |
||||
|
||||
it('will not modify request with absolute duration', () => { |
||||
const request = mockRequest({ |
||||
range: { |
||||
from: moment('2023-01-30T19:33:01.332Z') as DateTime, |
||||
to: moment('2023-01-30T20:33:01.332Z') as DateTime, |
||||
raw: { from: '2023-01-30T19:33:01.332Z', to: '2023-01-30T20:33:01.332Z' }, |
||||
}, |
||||
rangeRaw: { from: '2023-01-30T19:33:01.332Z', to: '2023-01-30T20:33:01.332Z' }, |
||||
}); |
||||
const storage = new QueryCache(); |
||||
const interpolateString = (s: string) => { |
||||
return s; |
||||
}; |
||||
const cacheRequest = storage.requestInfo(request, interpolateString); |
||||
expect(cacheRequest.requests[0]).toBe(request); |
||||
expect(cacheRequest.shouldCache).toBe(false); |
||||
}); |
||||
|
||||
it('mark request as shouldCache', () => { |
||||
const request = mockRequest(); |
||||
const storage = new QueryCache(); |
||||
const interpolateString = (s: string) => { |
||||
return s; |
||||
}; |
||||
const cacheRequest = storage.requestInfo(request, interpolateString); |
||||
expect(cacheRequest.requests[0]).toBe(request); |
||||
expect(cacheRequest.shouldCache).toBe(true); |
||||
}); |
||||
|
||||
it('Should modify request', () => { |
||||
const request = mockRequest(); |
||||
const storage = new QueryCache(); |
||||
const interpolateString = (s: string) => { |
||||
return s; |
||||
}; |
||||
const cacheRequest = storage.requestInfo(request, interpolateString); |
||||
expect(cacheRequest.requests[0]).toBe(request); |
||||
expect(cacheRequest.shouldCache).toBe(true); |
||||
}); |
||||
}); |
@ -0,0 +1,258 @@ |
||||
import { |
||||
ArrayVector, |
||||
DataFrame, |
||||
DataQueryRequest, |
||||
dateTime, |
||||
durationToMilliseconds, |
||||
Field, |
||||
isValidDuration, |
||||
parseDuration, |
||||
} from '@grafana/data/src'; |
||||
import { amendTable, Table, trimTable } from 'app/features/live/data/amendTimeSeries'; |
||||
|
||||
import { PromQuery } from '../types'; |
||||
|
||||
// dashboardUID + panelId + refId
|
||||
// (must be stable across query changes, time range changes / interval changes / panel resizes / template variable changes)
|
||||
type TargetIdent = string; |
||||
|
||||
// query + template variables + interval + raw time range
|
||||
// used for full target cache busting -> full range re-query
|
||||
type TargetSig = string; |
||||
|
||||
type TimestampMs = number; |
||||
|
||||
type StringInterpolator = (expr: string) => string; |
||||
|
||||
// string matching requirements defined in durationutil.ts
|
||||
export const defaultPrometheusQueryOverlapWindow = '10m'; |
||||
|
||||
interface TargetCache { |
||||
sig: TargetSig; |
||||
prevTo: TimestampMs; |
||||
frames: DataFrame[]; |
||||
} |
||||
|
||||
export interface CacheRequestInfo { |
||||
requests: Array<DataQueryRequest<PromQuery>>; |
||||
targSigs: Map<TargetIdent, TargetSig>; |
||||
shouldCache: boolean; |
||||
} |
||||
|
||||
/** |
||||
* Get field identity |
||||
* This is the string used to uniquely identify a field within a "target" |
||||
* @param field |
||||
*/ |
||||
export const getFieldIdent = (field: Field) => `${field.type}|${field.name}|${JSON.stringify(field.labels ?? '')}`; |
||||
|
||||
/** |
||||
* Get target signature |
||||
* @param targExpr |
||||
* @param request |
||||
* @param targ |
||||
*/ |
||||
export function getTargSig(targExpr: string, request: DataQueryRequest<PromQuery>, targ: PromQuery) { |
||||
return `${targExpr}|${targ.interval ?? request.interval}|${JSON.stringify(request.rangeRaw ?? '')}|${targ.exemplar}`; |
||||
} |
||||
|
||||
/** |
||||
* NOMENCLATURE |
||||
* Target: The request target (DataQueryRequest), i.e. a specific query reference within a panel |
||||
* Ident: Identity: the string that is not expected to change |
||||
* Sig: Signature: the string that is expected to change, upon which we wipe the cache fields |
||||
*/ |
||||
export class QueryCache { |
||||
private overlapWindowMs: number; |
||||
constructor(overlapString?: string) { |
||||
const unverifiedOverlap = overlapString ?? defaultPrometheusQueryOverlapWindow; |
||||
if (isValidDuration(unverifiedOverlap)) { |
||||
const duration = parseDuration(unverifiedOverlap); |
||||
this.overlapWindowMs = durationToMilliseconds(duration); |
||||
} else { |
||||
const duration = parseDuration(defaultPrometheusQueryOverlapWindow); |
||||
this.overlapWindowMs = durationToMilliseconds(duration); |
||||
} |
||||
} |
||||
|
||||
cache = new Map<TargetIdent, TargetCache>(); |
||||
|
||||
// can be used to change full range request to partial, split into multiple requests
|
||||
requestInfo(request: DataQueryRequest<PromQuery>, interpolateString: StringInterpolator): CacheRequestInfo { |
||||
// TODO: align from/to to interval to increase probability of hitting backend cache
|
||||
|
||||
const newFrom = request.range.from.valueOf(); |
||||
const newTo = request.range.to.valueOf(); |
||||
|
||||
// only cache 'now'-relative queries (that can benefit from a backfill cache)
|
||||
const shouldCache = request.rangeRaw?.to?.toString() === 'now'; |
||||
|
||||
// all targets are queried together, so we check for any that causes group cache invalidation & full re-query
|
||||
let doPartialQuery = shouldCache; |
||||
let prevTo: TimestampMs; |
||||
|
||||
// pre-compute reqTargSigs
|
||||
const reqTargSigs = new Map<TargetIdent, TargetSig>(); |
||||
request.targets.forEach((targ) => { |
||||
let targIdent = `${request.dashboardUID}|${request.panelId}|${targ.refId}`; |
||||
// @todo refactor getTargSig into datasource class and remove targExpr. See #65952 for a potential implementation
|
||||
let targExpr = interpolateString(targ.expr); |
||||
let targSig = getTargSig(targExpr, request, targ); |
||||
|
||||
reqTargSigs.set(targIdent, targSig); |
||||
}); |
||||
|
||||
// figure out if new query range or new target props trigger full cache invalidation & re-query
|
||||
for (const [targIdent, targSig] of reqTargSigs) { |
||||
let cached = this.cache.get(targIdent); |
||||
let cachedSig = cached?.sig; |
||||
|
||||
if (cachedSig !== targSig) { |
||||
doPartialQuery = false; |
||||
} else { |
||||
// only do partial queries when new request range follows prior request range (possibly with overlap)
|
||||
// e.g. now-6h with refresh <= 6h
|
||||
prevTo = cached?.prevTo ?? Infinity; |
||||
doPartialQuery = newTo > prevTo && newFrom <= prevTo; |
||||
} |
||||
|
||||
if (!doPartialQuery) { |
||||
break; |
||||
} |
||||
} |
||||
|
||||
if (doPartialQuery) { |
||||
// 10m re-query overlap
|
||||
|
||||
// clamp to make sure we don't re-query previous 10m when newFrom is ahead of it (e.g. 5min range, 30s refresh)
|
||||
let newFromPartial = Math.max(prevTo! - this.overlapWindowMs, newFrom); |
||||
|
||||
// modify to partial query
|
||||
request = { |
||||
...request, |
||||
range: { |
||||
...request.range, |
||||
from: dateTime(newFromPartial), |
||||
to: dateTime(newTo), |
||||
}, |
||||
}; |
||||
} else { |
||||
reqTargSigs.forEach((targSig, targIdent) => { |
||||
this.cache.delete(targIdent); |
||||
}); |
||||
} |
||||
|
||||
return { |
||||
requests: [request], |
||||
targSigs: reqTargSigs, |
||||
shouldCache, |
||||
}; |
||||
} |
||||
|
||||
// should amend existing cache with new frames and return full response
|
||||
procFrames( |
||||
request: DataQueryRequest<PromQuery>, |
||||
requestInfo: CacheRequestInfo | undefined, |
||||
respFrames: DataFrame[] |
||||
): DataFrame[] { |
||||
if (requestInfo?.shouldCache) { |
||||
const newFrom = request.range.from.valueOf(); |
||||
const newTo = request.range.to.valueOf(); |
||||
|
||||
// group frames by targets
|
||||
const respByTarget = new Map<TargetIdent, DataFrame[]>(); |
||||
|
||||
respFrames.forEach((frame: DataFrame) => { |
||||
let targIdent = `${request.dashboardUID}|${request.panelId}|${frame.refId}`; |
||||
|
||||
let frames = respByTarget.get(targIdent); |
||||
|
||||
if (!frames) { |
||||
frames = []; |
||||
respByTarget.set(targIdent, frames); |
||||
} |
||||
|
||||
frames.push(frame); |
||||
}); |
||||
|
||||
let outFrames: DataFrame[] = []; |
||||
|
||||
respByTarget.forEach((respFrames, targIdent) => { |
||||
let cachedFrames = (targIdent ? this.cache.get(targIdent)?.frames : null) ?? []; |
||||
|
||||
respFrames.forEach((respFrame: DataFrame) => { |
||||
// skip empty frames
|
||||
if (respFrame.length === 0 || respFrame.fields.length === 0) { |
||||
return; |
||||
} |
||||
|
||||
// frames are identified by their second (non-time) field's name + labels
|
||||
// TODO: maybe also frame.meta.type?
|
||||
let respFrameIdent = getFieldIdent(respFrame.fields[1]); |
||||
|
||||
let cachedFrame = cachedFrames.find((cached) => getFieldIdent(cached.fields[1]) === respFrameIdent); |
||||
|
||||
if (!cachedFrame) { |
||||
// append new unknown frames
|
||||
cachedFrames.push(respFrame); |
||||
} else { |
||||
// we assume that fields cannot appear/disappear and will all exist in same order
|
||||
|
||||
// amend & re-cache
|
||||
// eslint-disable-next-line @typescript-eslint/consistent-type-assertions
|
||||
let prevTable: Table = cachedFrame.fields.map((field) => field.values.toArray()) as Table; |
||||
// eslint-disable-next-line @typescript-eslint/consistent-type-assertions
|
||||
let nextTable: Table = respFrame.fields.map((field) => field.values.toArray()) as Table; |
||||
|
||||
let amendedTable = amendTable(prevTable, nextTable); |
||||
|
||||
for (let i = 0; i < amendedTable.length; i++) { |
||||
cachedFrame.fields[i].values = new ArrayVector(amendedTable[i]); |
||||
} |
||||
|
||||
cachedFrame.length = cachedFrame.fields[0].values.length; |
||||
} |
||||
}); |
||||
|
||||
// trim all frames to in-view range, evict those that end up with 0 length
|
||||
let nonEmptyCachedFrames: DataFrame[] = []; |
||||
|
||||
cachedFrames.forEach((frame) => { |
||||
// eslint-disable-next-line @typescript-eslint/consistent-type-assertions
|
||||
let table: Table = frame.fields.map((field) => field.values.toArray()) as Table; |
||||
|
||||
let trimmed = trimTable(table, newFrom, newTo); |
||||
|
||||
if (trimmed[0].length > 0) { |
||||
for (let i = 0; i < trimmed.length; i++) { |
||||
frame.fields[i].values = new ArrayVector(trimmed[i]); |
||||
} |
||||
nonEmptyCachedFrames.push(frame); |
||||
} |
||||
}); |
||||
|
||||
this.cache.set(targIdent, { |
||||
sig: requestInfo.targSigs.get(targIdent)!, |
||||
frames: nonEmptyCachedFrames, |
||||
prevTo: newTo, |
||||
}); |
||||
|
||||
outFrames.push(...nonEmptyCachedFrames); |
||||
}); |
||||
|
||||
// transformV2 mutates field values for heatmap de-accum, and modifies field order, so we gotta clone here, for now :(
|
||||
respFrames = outFrames.map((frame) => ({ |
||||
...frame, |
||||
fields: frame.fields.map((field) => ({ |
||||
...field, |
||||
config: { |
||||
...field.config, // prevents mutatative exemplars links (re)enrichment
|
||||
}, |
||||
values: new ArrayVector(field.values.toArray().slice()), |
||||
})), |
||||
})); |
||||
} |
||||
|
||||
return respFrames; |
||||
} |
||||
} |
@ -0,0 +1,864 @@ |
||||
import { clone } from 'lodash'; |
||||
|
||||
import { ArrayVector } from '@grafana/data/src'; |
||||
|
||||
/** |
||||
* |
||||
* @param length - Number of values to add |
||||
* @param start - First timestamp (ms) |
||||
* @param step - step duration (ms) |
||||
*/ |
||||
export const getMockTimeFrameArray = (length: number, start: number, step: number): ArrayVector => { |
||||
let timeValues = []; |
||||
for (let i = 0; i < length; i++) { |
||||
timeValues.push(start + i * step); |
||||
} |
||||
|
||||
return new ArrayVector(timeValues); |
||||
}; |
||||
|
||||
/** |
||||
* @param length - number of "Values" to add |
||||
* @param values |
||||
* @param high |
||||
*/ |
||||
export const getMockValueFrameArray = (length: number, values = 0): ArrayVector => { |
||||
return new ArrayVector(Array(length).fill(values)); |
||||
}; |
||||
|
||||
const timeFrameWithMissingValuesInMiddle = getMockTimeFrameArray(721, 1675262550000, 30000); |
||||
const timeFrameWithMissingValuesAtStart = getMockTimeFrameArray(721, 1675262550000, 30000); |
||||
const timeFrameWithMissingValuesAtEnd = getMockTimeFrameArray(721, 1675262550000, 30000); |
||||
|
||||
// Deleting some out the middle
|
||||
timeFrameWithMissingValuesInMiddle.toArray().splice(360, 721 - 684); |
||||
timeFrameWithMissingValuesAtStart.toArray().splice(0, 721 - 684); |
||||
timeFrameWithMissingValuesAtEnd.toArray().splice(721 - 684, 721 - 684); |
||||
|
||||
const mockLabels = { |
||||
__name__: 'cortex_request_duration_seconds_bucket', |
||||
cluster: 'dev-us-central-0', |
||||
container: 'aggregator', |
||||
instance: 'aggregator-7:aggregator:http-metrics', |
||||
job: 'mimir-dev-11/aggregator', |
||||
le: '0.5', |
||||
method: 'GET', |
||||
namespace: 'mimir-dev-11', |
||||
pod: 'aggregator-7', |
||||
route: 'metrics', |
||||
status_code: '200', |
||||
ws: 'false', |
||||
}; |
||||
|
||||
const twoRequestsOneCachedMissingData = { |
||||
first: { |
||||
request: { |
||||
app: 'panel-viewer', |
||||
requestId: 'Q100', |
||||
panelId: 19, |
||||
dashboardId: 884, |
||||
dashboardUID: 'dtngicc4z', |
||||
range: { |
||||
from: '2023-02-01T14:42:54.929Z', |
||||
to: '2023-02-01T20:42:54.929Z', |
||||
raw: { from: 'now-6h', to: 'now' }, |
||||
}, |
||||
interval: '30s', |
||||
intervalMs: 30000, |
||||
targets: [ |
||||
{ |
||||
datasource: { type: 'prometheus', uid: 'OPQv8Kc4z' }, |
||||
editorMode: 'code', |
||||
expr: '', |
||||
legendFormat: '', |
||||
range: true, |
||||
refId: 'A', |
||||
exemplar: false, |
||||
requestId: '19A', |
||||
utcOffsetSec: -21600, |
||||
}, |
||||
], |
||||
startTime: 1675284174929, |
||||
rangeRaw: { from: 'now-6h', to: 'now' }, |
||||
}, |
||||
dataFrames: [ |
||||
{ |
||||
name: '+Inf', |
||||
refId: 'A', |
||||
fields: [ |
||||
{ |
||||
name: 'Time', |
||||
type: 'time', |
||||
typeInfo: { frame: 'time.Time' }, |
||||
config: { interval: 30000 }, |
||||
// Delete values from the middle
|
||||
values: timeFrameWithMissingValuesInMiddle, |
||||
entities: {}, |
||||
}, |
||||
{ |
||||
name: 'Value', |
||||
type: 'number', |
||||
typeInfo: { frame: 'float64' }, |
||||
labels: { ...mockLabels, le: '+Inf' }, |
||||
config: { displayNameFromDS: '+Inf' }, |
||||
values: getMockValueFrameArray(684, 1), |
||||
entities: {}, |
||||
}, |
||||
], |
||||
length: 684, |
||||
}, |
||||
{ |
||||
name: '0.5', |
||||
refId: 'A', |
||||
meta: { |
||||
type: 'timeseries-multi', |
||||
custom: { resultType: 'matrix' }, |
||||
executedQueryString: |
||||
'Expr: {__name__="cortex_request_duration_seconds_bucket", cluster="dev-us-central-0", container="aggregator", instance=~"aggregator-7:aggregator:http-metrics|aggregator-6:aggregator:http-metrics", job="mimir-dev-11/aggregator", le=~"\\\\+Inf|0.5", method="GET", namespace="mimir-dev-11", pod="aggregator-7"}\nStep: 30s', |
||||
preferredVisualisationType: 'graph', |
||||
}, |
||||
fields: [ |
||||
{ |
||||
name: 'Time', |
||||
type: 'time', |
||||
typeInfo: { frame: 'time.Time' }, |
||||
config: { interval: 30000 }, |
||||
values: timeFrameWithMissingValuesInMiddle, |
||||
entities: {}, |
||||
}, |
||||
{ |
||||
name: 'Value', |
||||
type: 'number', |
||||
typeInfo: { frame: 'float64' }, |
||||
labels: { ...mockLabels, le: '0.5' }, |
||||
config: { displayNameFromDS: '0.5' }, |
||||
values: getMockValueFrameArray(684, 25349), |
||||
entities: {}, |
||||
}, |
||||
], |
||||
length: 684, |
||||
}, |
||||
], |
||||
originalRange: undefined, |
||||
timeSrv: { from: 'now-6h', to: 'now' }, |
||||
}, |
||||
second: { |
||||
request: { |
||||
app: 'panel-viewer', |
||||
requestId: 'Q101', |
||||
timezone: 'browser', |
||||
panelId: 19, |
||||
dashboardId: 884, |
||||
dashboardUID: 'dtngicc4z', |
||||
publicDashboardAccessToken: '', |
||||
range: { |
||||
from: '2023-02-01T14:44:01.928Z', |
||||
to: '2023-02-01T20:44:01.928Z', |
||||
raw: { from: 'now-6h', to: 'now' }, |
||||
}, |
||||
timeInfo: '', |
||||
interval: '30s', |
||||
intervalMs: 30000, |
||||
targets: [ |
||||
{ |
||||
datasource: { type: 'prometheus', uid: 'OPQv8Kc4z' }, |
||||
editorMode: 'code', |
||||
expr: '{__name__="cortex_request_duration_seconds_bucket", cluster="dev-us-central-0", container="aggregator", instance=~"aggregator-7:aggregator:http-metrics|aggregator-6:aggregator:http-metrics", job="mimir-dev-11/aggregator", le=~"\\\\+Inf|0.5", method="GET", namespace="mimir-dev-11", pod="aggregator-7"}', |
||||
legendFormat: '{{le}}', |
||||
range: true, |
||||
refId: 'A', |
||||
exemplar: false, |
||||
requestId: '19A', |
||||
utcOffsetSec: -21600, |
||||
}, |
||||
], |
||||
maxDataPoints: 775, |
||||
scopedVars: { __interval: { text: '30s', value: '30s' }, __interval_ms: { text: '30000', value: 30000 } }, |
||||
startTime: 1675284241929, |
||||
rangeRaw: { from: 'now-6h', to: 'now' }, |
||||
}, |
||||
dataFrames: [ |
||||
{ |
||||
name: '+Inf', |
||||
refId: 'A', |
||||
meta: { |
||||
type: 'timeseries-multi', |
||||
custom: { resultType: 'matrix' }, |
||||
executedQueryString: |
||||
'Expr: {__name__="cortex_request_duration_seconds_bucket", cluster="dev-us-central-0", container="aggregator", instance=~"aggregator-7:aggregator:http-metrics|aggregator-6:aggregator:http-metrics", job="mimir-dev-11/aggregator", le=~"\\\\+Inf|0.5", method="GET", namespace="mimir-dev-11", pod="aggregator-7"}\nStep: 30s', |
||||
preferredVisualisationType: 'graph', |
||||
}, |
||||
fields: [ |
||||
{ |
||||
name: 'Time', |
||||
type: 'time', |
||||
typeInfo: { frame: 'time.Time' }, |
||||
config: { interval: 30000 }, |
||||
values: getMockTimeFrameArray(24, 1675283550000, 30000), |
||||
entities: {}, |
||||
}, |
||||
{ |
||||
name: 'Value', |
||||
type: 'number', |
||||
typeInfo: { frame: 'float64' }, |
||||
labels: { ...mockLabels, le: '+Inf' }, |
||||
config: { displayNameFromDS: '+Inf' }, |
||||
values: getMockValueFrameArray(24, 1), |
||||
entities: {}, |
||||
}, |
||||
], |
||||
length: 24, |
||||
}, |
||||
{ |
||||
name: '0.5', |
||||
refId: 'A', |
||||
meta: { |
||||
type: 'timeseries-multi', |
||||
custom: { resultType: 'matrix' }, |
||||
executedQueryString: |
||||
'Expr: {__name__="cortex_request_duration_seconds_bucket", cluster="dev-us-central-0", container="aggregator", instance=~"aggregator-7:aggregator:http-metrics|aggregator-6:aggregator:http-metrics", job="mimir-dev-11/aggregator", le=~"\\\\+Inf|0.5", method="GET", namespace="mimir-dev-11", pod="aggregator-7"}\nStep: 30s', |
||||
preferredVisualisationType: 'graph', |
||||
}, |
||||
fields: [ |
||||
{ |
||||
name: 'Time', |
||||
type: 'time', |
||||
typeInfo: { frame: 'time.Time' }, |
||||
config: { interval: 30000 }, |
||||
values: getMockTimeFrameArray(21, 1675283550000, 30000), |
||||
entities: {}, |
||||
}, |
||||
{ |
||||
name: 'Value', |
||||
type: 'number', |
||||
typeInfo: { frame: 'float64' }, |
||||
labels: { |
||||
__name__: 'cortex_request_duration_seconds_bucket', |
||||
cluster: 'dev-us-central-0', |
||||
container: 'aggregator', |
||||
instance: 'aggregator-7:aggregator:http-metrics', |
||||
job: 'mimir-dev-11/aggregator', |
||||
le: '0.5', |
||||
method: 'GET', |
||||
namespace: 'mimir-dev-11', |
||||
pod: 'aggregator-7', |
||||
route: 'metrics', |
||||
status_code: '200', |
||||
ws: 'false', |
||||
}, |
||||
config: { displayNameFromDS: '0.5' }, |
||||
values: getMockValueFrameArray(21, 2), |
||||
entities: {}, |
||||
}, |
||||
], |
||||
length: 21, |
||||
}, |
||||
], |
||||
originalRange: { end: 1675284241920, start: 1675262641920 }, |
||||
timeSrv: { from: 'now-6h', to: 'now' }, |
||||
}, |
||||
}; |
||||
|
||||
export const IncrementalStorageDataFrameScenarios = { |
||||
histogram: { |
||||
// 3 requests, one 30 seconds after the first, and then the user waits a minute and shortens to a 5 minute query window from 1 hour to force frames to get evicted
|
||||
evictionRequests: { |
||||
first: { |
||||
request: { |
||||
range: { |
||||
from: '2023-01-30T19:33:01.332Z', |
||||
to: '2023-01-30T20:33:01.332Z', |
||||
raw: { from: 'now-1h', to: 'now' }, |
||||
}, |
||||
interval: '15s', |
||||
intervalMs: 15000, |
||||
targets: [ |
||||
{ |
||||
datasource: { type: 'prometheus', uid: 'OPQv8Kc4z' }, |
||||
editorMode: 'code', |
||||
exemplar: false, |
||||
expr: 'sum by(le) (rate(cortex_request_duration_seconds_bucket{cluster="dev-us-central-0", job="cortex-dev-01/cortex-gw-internal", namespace="cortex-dev-01"}[$__rate_interval]))', |
||||
format: 'heatmap', |
||||
legendFormat: '{{le}}', |
||||
range: true, |
||||
refId: 'A', |
||||
requestId: '2A', |
||||
utcOffsetSec: -21600, |
||||
}, |
||||
], |
||||
maxDataPoints: 871, |
||||
scopedVars: { |
||||
__interval: { text: '15s', value: '15s' }, |
||||
__interval_ms: { text: '15000', value: 15000 }, |
||||
}, |
||||
startTime: 1675110781332, |
||||
rangeRaw: { from: 'now-1h', to: 'now' }, |
||||
}, |
||||
dataFrames: [ |
||||
{ |
||||
name: '0.005', |
||||
refId: 'A', |
||||
meta: { |
||||
type: 'heatmap-rows', |
||||
custom: { resultType: 'matrix' }, |
||||
executedQueryString: |
||||
'Expr: sum by(le) (rate(cortex_request_duration_seconds_bucket{cluster="dev-us-central-0", job="cortex-dev-01/cortex-gw-internal", namespace="cortex-dev-01"}[1m0s]))\nStep: 15s', |
||||
}, |
||||
fields: [ |
||||
{ |
||||
name: 'Time', |
||||
type: 'time', |
||||
typeInfo: { frame: 'time.Time' }, |
||||
config: { interval: 15000 }, |
||||
values: getMockTimeFrameArray(241, 1675107180000, 15000), |
||||
entities: {}, |
||||
}, |
||||
{ |
||||
name: '0.005', |
||||
type: 'number', |
||||
typeInfo: { frame: 'float64' }, |
||||
labels: { le: '0.005' }, |
||||
config: { displayNameFromDS: '0.005' }, |
||||
values: getMockValueFrameArray(241, 2.8), |
||||
entities: {}, |
||||
}, |
||||
{ |
||||
name: '0.01', |
||||
type: 'number', |
||||
typeInfo: { frame: 'float64' }, |
||||
labels: { le: '0.01' }, |
||||
config: { displayNameFromDS: '0.01' }, |
||||
values: getMockValueFrameArray(241, 2.8), |
||||
entities: {}, |
||||
}, |
||||
{ |
||||
name: '0.025', |
||||
type: 'number', |
||||
typeInfo: { frame: 'float64' }, |
||||
labels: { le: '0.025' }, |
||||
config: { displayNameFromDS: '0.025' }, |
||||
values: getMockValueFrameArray(241, 2.8), |
||||
entities: {}, |
||||
}, |
||||
{ |
||||
name: '0.05', |
||||
type: 'number', |
||||
typeInfo: { frame: 'float64' }, |
||||
labels: { le: '0.05' }, |
||||
config: { displayNameFromDS: '0.05' }, |
||||
values: getMockValueFrameArray(241, 2.8), |
||||
entities: {}, |
||||
}, |
||||
{ |
||||
name: '0.1', |
||||
type: 'number', |
||||
typeInfo: { frame: 'float64' }, |
||||
labels: { le: '0.1' }, |
||||
config: { displayNameFromDS: '0.1' }, |
||||
values: getMockValueFrameArray(241, 2.8), |
||||
entities: {}, |
||||
}, |
||||
{ |
||||
name: '0.25', |
||||
type: 'number', |
||||
typeInfo: { frame: 'float64' }, |
||||
labels: { le: '0.25' }, |
||||
config: { displayNameFromDS: '0.25' }, |
||||
values: getMockValueFrameArray(241, 2.8), |
||||
entities: {}, |
||||
}, |
||||
{ |
||||
name: '0.5', |
||||
type: 'number', |
||||
typeInfo: { frame: 'float64' }, |
||||
labels: { le: '0.5' }, |
||||
config: { displayNameFromDS: '0.5' }, |
||||
values: getMockValueFrameArray(241, 2.8), |
||||
entities: {}, |
||||
}, |
||||
{ |
||||
name: '1.0', |
||||
type: 'number', |
||||
typeInfo: { frame: 'float64' }, |
||||
labels: { le: '1.0' }, |
||||
config: { displayNameFromDS: '1.0' }, |
||||
values: getMockValueFrameArray(241, 2.8), |
||||
entities: {}, |
||||
}, |
||||
{ |
||||
name: '2.5', |
||||
type: 'number', |
||||
typeInfo: { frame: 'float64' }, |
||||
labels: { le: '2.5' }, |
||||
config: { displayNameFromDS: '2.5' }, |
||||
values: getMockValueFrameArray(241, 2.8), |
||||
entities: {}, |
||||
}, |
||||
{ |
||||
name: '5.0', |
||||
type: 'number', |
||||
typeInfo: { frame: 'float64' }, |
||||
labels: { le: '5.0' }, |
||||
config: { displayNameFromDS: '5.0' }, |
||||
values: getMockValueFrameArray(241, 2.8), |
||||
entities: {}, |
||||
}, |
||||
{ |
||||
name: '10.0', |
||||
type: 'number', |
||||
typeInfo: { frame: 'float64' }, |
||||
labels: { le: '10.0' }, |
||||
config: { displayNameFromDS: '10.0' }, |
||||
values: getMockValueFrameArray(241, 2.8), |
||||
entities: {}, |
||||
}, |
||||
{ |
||||
name: '25.0', |
||||
type: 'number', |
||||
typeInfo: { frame: 'float64' }, |
||||
labels: { le: '25.0' }, |
||||
config: { displayNameFromDS: '25.0' }, |
||||
values: getMockValueFrameArray(241, 2.8), |
||||
entities: {}, |
||||
}, |
||||
{ |
||||
name: '50.0', |
||||
type: 'number', |
||||
typeInfo: { frame: 'float64' }, |
||||
labels: { le: '50.0' }, |
||||
config: { displayNameFromDS: '50.0' }, |
||||
values: getMockValueFrameArray(241, 2.8), |
||||
entities: {}, |
||||
}, |
||||
{ |
||||
name: '100.0', |
||||
type: 'number', |
||||
typeInfo: { frame: 'float64' }, |
||||
labels: { le: '100.0' }, |
||||
config: { displayNameFromDS: '100.0' }, |
||||
values: getMockValueFrameArray(241, 2.8), |
||||
entities: {}, |
||||
}, |
||||
{ |
||||
name: '+Inf', |
||||
type: 'number', |
||||
typeInfo: { frame: 'float64' }, |
||||
labels: { le: '+Inf' }, |
||||
config: { displayNameFromDS: '+Inf' }, |
||||
values: getMockValueFrameArray(241, 2.8), |
||||
entities: {}, |
||||
}, |
||||
], |
||||
length: 241, |
||||
}, |
||||
], |
||||
}, |
||||
second: { |
||||
request: { |
||||
range: { |
||||
from: '2023-01-30T19:33:31.357Z', |
||||
to: '2023-01-30T20:33:31.357Z', |
||||
raw: { from: 'now-1h', to: 'now' }, |
||||
}, |
||||
interval: '15s', |
||||
intervalMs: 15000, |
||||
targets: [ |
||||
{ |
||||
datasource: { type: 'prometheus' }, |
||||
editorMode: 'code', |
||||
exemplar: false, |
||||
expr: 'sum by(le) (rate(cortex_request_duration_seconds_bucket{cluster="dev-us-central-0", job="cortex-dev-01/cortex-gw-internal", namespace="cortex-dev-01"}[$__rate_interval]))', |
||||
format: 'heatmap', |
||||
legendFormat: '{{le}}', |
||||
range: true, |
||||
refId: 'A', |
||||
requestId: '2A', |
||||
utcOffsetSec: -21600, |
||||
}, |
||||
], |
||||
maxDataPoints: 871, |
||||
scopedVars: { |
||||
__interval: { text: '15s', value: '15s' }, |
||||
__interval_ms: { text: '15000', value: 15000 }, |
||||
}, |
||||
startTime: 1675110811357, |
||||
rangeRaw: { from: 'now-1h', to: 'now' }, |
||||
}, |
||||
dataFrames: [ |
||||
{ |
||||
name: '0.005', |
||||
refId: 'A', |
||||
meta: { |
||||
type: 'heatmap-rows', |
||||
custom: { resultType: 'matrix' }, |
||||
executedQueryString: |
||||
'Expr: sum by(le) (rate(cortex_request_duration_seconds_bucket{cluster="dev-us-central-0", job="cortex-dev-01/cortex-gw-internal", namespace="cortex-dev-01"}[1m0s]))\nStep: 15s', |
||||
}, |
||||
fields: [ |
||||
{ |
||||
name: 'Time', |
||||
type: 'time', |
||||
typeInfo: { frame: 'time.Time' }, |
||||
config: { interval: 15000 }, |
||||
values: getMockTimeFrameArray(43, 1675110180000, 15000), |
||||
entities: {}, |
||||
}, |
||||
{ |
||||
name: '0.005', |
||||
type: 'number', |
||||
typeInfo: { frame: 'float64' }, |
||||
labels: { le: '0.005' }, |
||||
config: { displayNameFromDS: '0.005' }, |
||||
values: getMockValueFrameArray(43, 2.8), |
||||
entities: {}, |
||||
}, |
||||
{ |
||||
name: '0.01', |
||||
type: 'number', |
||||
typeInfo: { frame: 'float64' }, |
||||
labels: { le: '0.01' }, |
||||
config: { displayNameFromDS: '0.01' }, |
||||
values: getMockValueFrameArray(43, 2.8), |
||||
entities: {}, |
||||
}, |
||||
{ |
||||
name: '0.025', |
||||
type: 'number', |
||||
typeInfo: { frame: 'float64' }, |
||||
labels: { le: '0.025' }, |
||||
config: { displayNameFromDS: '0.025' }, |
||||
values: getMockValueFrameArray(43, 2.8), |
||||
entities: {}, |
||||
}, |
||||
{ |
||||
name: '0.05', |
||||
type: 'number', |
||||
typeInfo: { frame: 'float64' }, |
||||
labels: { le: '0.05' }, |
||||
config: { displayNameFromDS: '0.05' }, |
||||
values: getMockValueFrameArray(43, 2.8), |
||||
entities: {}, |
||||
}, |
||||
{ |
||||
name: '0.1', |
||||
type: 'number', |
||||
typeInfo: { frame: 'float64' }, |
||||
labels: { le: '0.1' }, |
||||
config: { displayNameFromDS: '0.1' }, |
||||
values: getMockValueFrameArray(43, 2.8), |
||||
entities: {}, |
||||
}, |
||||
{ |
||||
name: '0.25', |
||||
type: 'number', |
||||
typeInfo: { frame: 'float64' }, |
||||
labels: { le: '0.25' }, |
||||
config: { displayNameFromDS: '0.25' }, |
||||
values: getMockValueFrameArray(43, 2.8), |
||||
entities: {}, |
||||
}, |
||||
{ |
||||
name: '0.5', |
||||
type: 'number', |
||||
typeInfo: { frame: 'float64' }, |
||||
labels: { le: '0.5' }, |
||||
config: { displayNameFromDS: '0.5' }, |
||||
values: getMockValueFrameArray(43, 2.8), |
||||
entities: {}, |
||||
}, |
||||
{ |
||||
name: '1.0', |
||||
type: 'number', |
||||
typeInfo: { frame: 'float64' }, |
||||
labels: { le: '1.0' }, |
||||
config: { displayNameFromDS: '1.0' }, |
||||
values: getMockValueFrameArray(43, 2.8), |
||||
entities: {}, |
||||
}, |
||||
{ |
||||
name: '2.5', |
||||
type: 'number', |
||||
typeInfo: { frame: 'float64' }, |
||||
labels: { le: '2.5' }, |
||||
config: { displayNameFromDS: '2.5' }, |
||||
values: getMockValueFrameArray(43, 2.8), |
||||
entities: {}, |
||||
}, |
||||
{ |
||||
name: '5.0', |
||||
type: 'number', |
||||
typeInfo: { frame: 'float64' }, |
||||
labels: { le: '5.0' }, |
||||
config: { displayNameFromDS: '5.0' }, |
||||
values: getMockValueFrameArray(43, 2.8), |
||||
entities: {}, |
||||
}, |
||||
{ |
||||
name: '10.0', |
||||
type: 'number', |
||||
typeInfo: { frame: 'float64' }, |
||||
labels: { le: '10.0' }, |
||||
config: { displayNameFromDS: '10.0' }, |
||||
values: getMockValueFrameArray(43, 2.8), |
||||
entities: {}, |
||||
}, |
||||
{ |
||||
name: '25.0', |
||||
type: 'number', |
||||
typeInfo: { frame: 'float64' }, |
||||
labels: { le: '25.0' }, |
||||
config: { displayNameFromDS: '25.0' }, |
||||
values: getMockValueFrameArray(43, 2.8), |
||||
entities: {}, |
||||
}, |
||||
{ |
||||
name: '50.0', |
||||
type: 'number', |
||||
typeInfo: { frame: 'float64' }, |
||||
labels: { le: '50.0' }, |
||||
config: { displayNameFromDS: '50.0' }, |
||||
values: getMockValueFrameArray(43, 2.8), |
||||
entities: {}, |
||||
}, |
||||
{ |
||||
name: '100.0', |
||||
type: 'number', |
||||
typeInfo: { frame: 'float64' }, |
||||
labels: { le: '100.0' }, |
||||
config: { displayNameFromDS: '100.0' }, |
||||
values: getMockValueFrameArray(43, 2.8), |
||||
entities: {}, |
||||
}, |
||||
{ |
||||
name: '+Inf', |
||||
type: 'number', |
||||
typeInfo: { frame: 'float64' }, |
||||
labels: { le: '+Inf' }, |
||||
config: { displayNameFromDS: '+Inf' }, |
||||
values: getMockValueFrameArray(43, 2.8), |
||||
entities: {}, |
||||
}, |
||||
], |
||||
length: 43, |
||||
}, |
||||
], |
||||
}, |
||||
third: { |
||||
request: { |
||||
range: { |
||||
from: '2023-01-30T20:33:31.357Z', |
||||
to: '2023-01-30T20:34:31.357Z', |
||||
raw: { from: 'now-5m', to: 'now' }, |
||||
}, |
||||
interval: '15s', |
||||
intervalMs: 15000, |
||||
targets: [ |
||||
{ |
||||
datasource: { type: 'prometheus' }, |
||||
editorMode: 'code', |
||||
exemplar: false, |
||||
expr: 'sum by(le) (rate(cortex_request_duration_seconds_bucket{cluster="dev-us-central-0", job="cortex-dev-01/cortex-gw-internal", namespace="cortex-dev-01"}[$__rate_interval]))', |
||||
format: 'heatmap', |
||||
legendFormat: '{{le}}', |
||||
range: true, |
||||
refId: 'A', |
||||
requestId: '2A', |
||||
utcOffsetSec: -21600, |
||||
}, |
||||
], |
||||
maxDataPoints: 871, |
||||
scopedVars: { |
||||
__interval: { text: '15s', value: '15s' }, |
||||
__interval_ms: { text: '15000', value: 15000 }, |
||||
}, |
||||
startTime: 1675110811357, |
||||
rangeRaw: { from: 'now-1h', to: 'now' }, |
||||
}, |
||||
dataFrames: [ |
||||
{ |
||||
name: '0.005', |
||||
refId: 'A', |
||||
meta: { |
||||
type: 'heatmap-rows', |
||||
custom: { resultType: 'matrix' }, |
||||
executedQueryString: |
||||
'Expr: sum by(le) (rate(cortex_request_duration_seconds_bucket{cluster="dev-us-central-0", job="cortex-dev-01/cortex-gw-internal", namespace="cortex-dev-01"}[1m0s]))\nStep: 15s', |
||||
}, |
||||
fields: [ |
||||
{ |
||||
name: 'Time', |
||||
type: 'time', |
||||
typeInfo: { frame: 'time.Time' }, |
||||
config: { interval: 15000 }, |
||||
values: getMockTimeFrameArray(20, 1675110810000, 15000), |
||||
entities: {}, |
||||
}, |
||||
{ |
||||
name: '0.005', |
||||
type: 'number', |
||||
typeInfo: { frame: 'float64' }, |
||||
labels: { le: '0.005' }, |
||||
config: { displayNameFromDS: '0.005' }, |
||||
values: getMockValueFrameArray(20, 4.3), |
||||
entities: {}, |
||||
}, |
||||
{ |
||||
name: '0.01', |
||||
type: 'number', |
||||
typeInfo: { frame: 'float64' }, |
||||
labels: { le: '0.01' }, |
||||
config: { displayNameFromDS: '0.01' }, |
||||
values: getMockValueFrameArray(20, 4.3), |
||||
entities: {}, |
||||
}, |
||||
{ |
||||
name: '0.025', |
||||
type: 'number', |
||||
typeInfo: { frame: 'float64' }, |
||||
labels: { le: '0.025' }, |
||||
config: { displayNameFromDS: '0.025' }, |
||||
values: getMockValueFrameArray(20, 4.3), |
||||
entities: {}, |
||||
}, |
||||
{ |
||||
name: '0.05', |
||||
type: 'number', |
||||
typeInfo: { frame: 'float64' }, |
||||
labels: { le: '0.05' }, |
||||
config: { displayNameFromDS: '0.05' }, |
||||
values: getMockValueFrameArray(20, 4.3), |
||||
entities: {}, |
||||
}, |
||||
{ |
||||
name: '0.1', |
||||
type: 'number', |
||||
typeInfo: { frame: 'float64' }, |
||||
labels: { le: '0.1' }, |
||||
config: { displayNameFromDS: '0.1' }, |
||||
values: getMockValueFrameArray(20, 4.3), |
||||
entities: {}, |
||||
}, |
||||
{ |
||||
name: '0.25', |
||||
type: 'number', |
||||
typeInfo: { frame: 'float64' }, |
||||
labels: { le: '0.25' }, |
||||
config: { displayNameFromDS: '0.25' }, |
||||
values: getMockValueFrameArray(20, 4.3), |
||||
entities: {}, |
||||
}, |
||||
|
||||
// Sometimes we don't always get new values, the preprocessing will need to back-fill any missing values
|
||||
{ |
||||
name: '0.5', |
||||
type: 'number', |
||||
typeInfo: { frame: 'float64' }, |
||||
labels: { le: '0.5' }, |
||||
config: { displayNameFromDS: '0.5' }, |
||||
values: getMockValueFrameArray(10, 4.3), |
||||
entities: {}, |
||||
}, |
||||
{ |
||||
name: '1.0', |
||||
type: 'number', |
||||
typeInfo: { frame: 'float64' }, |
||||
labels: { le: '1.0' }, |
||||
config: { displayNameFromDS: '1.0' }, |
||||
values: getMockValueFrameArray(20, 4.3), |
||||
entities: {}, |
||||
}, |
||||
{ |
||||
name: '2.5', |
||||
type: 'number', |
||||
typeInfo: { frame: 'float64' }, |
||||
labels: { le: '2.5' }, |
||||
config: { displayNameFromDS: '2.5' }, |
||||
values: getMockValueFrameArray(20, 4.3), |
||||
entities: {}, |
||||
}, |
||||
{ |
||||
name: '5.0', |
||||
type: 'number', |
||||
typeInfo: { frame: 'float64' }, |
||||
labels: { le: '5.0' }, |
||||
config: { displayNameFromDS: '5.0' }, |
||||
values: getMockValueFrameArray(20, 4.3), |
||||
entities: {}, |
||||
}, |
||||
{ |
||||
name: '10.0', |
||||
type: 'number', |
||||
typeInfo: { frame: 'float64' }, |
||||
labels: { le: '10.0' }, |
||||
config: { displayNameFromDS: '10.0' }, |
||||
values: getMockValueFrameArray(20, 4.3), |
||||
entities: {}, |
||||
}, |
||||
{ |
||||
name: '25.0', |
||||
type: 'number', |
||||
typeInfo: { frame: 'float64' }, |
||||
labels: { le: '25.0' }, |
||||
config: { displayNameFromDS: '25.0' }, |
||||
values: getMockValueFrameArray(10, 4.3), |
||||
entities: {}, |
||||
}, |
||||
{ |
||||
name: '50.0', |
||||
type: 'number', |
||||
typeInfo: { frame: 'float64' }, |
||||
labels: { le: '50.0' }, |
||||
config: { displayNameFromDS: '50.0' }, |
||||
values: getMockValueFrameArray(20, 4.3), |
||||
entities: {}, |
||||
}, |
||||
{ |
||||
name: '100.0', |
||||
type: 'number', |
||||
typeInfo: { frame: 'float64' }, |
||||
labels: { le: '100.0' }, |
||||
config: { displayNameFromDS: '100.0' }, |
||||
values: getMockValueFrameArray(20, 4.3), |
||||
entities: {}, |
||||
}, |
||||
{ |
||||
name: '+Inf', |
||||
type: 'number', |
||||
typeInfo: { frame: 'float64' }, |
||||
labels: { le: '+Inf' }, |
||||
config: { displayNameFromDS: '+Inf' }, |
||||
values: getMockValueFrameArray(20, 4.3), |
||||
entities: {}, |
||||
}, |
||||
], |
||||
length: 43, |
||||
}, |
||||
], |
||||
}, |
||||
}, |
||||
|
||||
getSeriesWithGapAtEnd: (countOfSeries = 2) => { |
||||
const templateClone = clone(twoRequestsOneCachedMissingData); |
||||
for (let i = 0; i < countOfSeries - 1; i++) { |
||||
templateClone.first.dataFrames[i].fields[0].values = timeFrameWithMissingValuesAtEnd; |
||||
} |
||||
return templateClone; |
||||
}, |
||||
|
||||
getSeriesWithGapAtStart: (countOfSeries = 2) => { |
||||
const templateClone = clone(twoRequestsOneCachedMissingData); |
||||
for (let i = 0; i < countOfSeries - 1; i++) { |
||||
templateClone.first.dataFrames[i].fields[0].values = timeFrameWithMissingValuesAtStart; |
||||
} |
||||
return templateClone; |
||||
}, |
||||
|
||||
getSeriesWithGapInMiddle: (countOfSeries = 2) => { |
||||
const templateClone = clone(twoRequestsOneCachedMissingData); |
||||
for (let i = 0; i < countOfSeries - 1; i++) { |
||||
templateClone.first.dataFrames[i].fields[0].values = timeFrameWithMissingValuesInMiddle; |
||||
} |
||||
return templateClone; |
||||
}, |
||||
}, |
||||
}; |
Loading…
Reference in new issue