mirror of https://github.com/grafana/grafana
Prometheus: Introduce resource clients in language provider (#105818)
* refactor language provider * update tests * more tests * betterer and api endpoints * copilot updates * betterer * remove default value * prettier * introduce resource clients and better refactoring * prettier * type fixes * betterer * no empty matcher for series calls * better matchers * addressing the review feedbackpull/106920/head
parent
e3cbe54b45
commit
c7e338342a
@ -0,0 +1,149 @@ |
||||
import { |
||||
getCacheDurationInMinutes, |
||||
getDaysToCacheMetadata, |
||||
getDebounceTimeInMilliseconds, |
||||
buildCacheHeaders, |
||||
getDefaultCacheHeaders, |
||||
} from './caching'; |
||||
import { PrometheusCacheLevel } from './types'; |
||||
|
||||
describe('caching', () => { |
||||
describe('getDebounceTimeInMilliseconds', () => { |
||||
it('should return 600ms for Medium cache level', () => { |
||||
expect(getDebounceTimeInMilliseconds(PrometheusCacheLevel.Medium)).toBe(600); |
||||
}); |
||||
|
||||
it('should return 1200ms for High cache level', () => { |
||||
expect(getDebounceTimeInMilliseconds(PrometheusCacheLevel.High)).toBe(1200); |
||||
}); |
||||
|
||||
it('should return 350ms for Low cache level', () => { |
||||
expect(getDebounceTimeInMilliseconds(PrometheusCacheLevel.Low)).toBe(350); |
||||
}); |
||||
|
||||
it('should return 350ms for None cache level', () => { |
||||
expect(getDebounceTimeInMilliseconds(PrometheusCacheLevel.None)).toBe(350); |
||||
}); |
||||
|
||||
it('should return default value (350ms) for unknown cache level', () => { |
||||
expect(getDebounceTimeInMilliseconds('invalid' as PrometheusCacheLevel)).toBe(350); |
||||
}); |
||||
}); |
||||
|
||||
describe('getDaysToCacheMetadata', () => { |
||||
it('should return 7 days for Medium cache level', () => { |
||||
expect(getDaysToCacheMetadata(PrometheusCacheLevel.Medium)).toBe(7); |
||||
}); |
||||
|
||||
it('should return 30 days for High cache level', () => { |
||||
expect(getDaysToCacheMetadata(PrometheusCacheLevel.High)).toBe(30); |
||||
}); |
||||
|
||||
it('should return 1 day for Low cache level', () => { |
||||
expect(getDaysToCacheMetadata(PrometheusCacheLevel.Low)).toBe(1); |
||||
}); |
||||
|
||||
it('should return 1 day for None cache level', () => { |
||||
expect(getDaysToCacheMetadata(PrometheusCacheLevel.None)).toBe(1); |
||||
}); |
||||
|
||||
it('should return default value (1 day) for unknown cache level', () => { |
||||
expect(getDaysToCacheMetadata('invalid' as PrometheusCacheLevel)).toBe(1); |
||||
}); |
||||
}); |
||||
|
||||
describe('getCacheDurationInMinutes', () => { |
||||
it('should return 10 minutes for Medium cache level', () => { |
||||
expect(getCacheDurationInMinutes(PrometheusCacheLevel.Medium)).toBe(10); |
||||
}); |
||||
|
||||
it('should return 60 minutes for High cache level', () => { |
||||
expect(getCacheDurationInMinutes(PrometheusCacheLevel.High)).toBe(60); |
||||
}); |
||||
|
||||
it('should return 1 minute for Low cache level', () => { |
||||
expect(getCacheDurationInMinutes(PrometheusCacheLevel.Low)).toBe(1); |
||||
}); |
||||
|
||||
it('should return 1 minute for None cache level', () => { |
||||
expect(getCacheDurationInMinutes(PrometheusCacheLevel.None)).toBe(1); |
||||
}); |
||||
|
||||
it('should return default value (1 minute) for unknown cache level', () => { |
||||
expect(getCacheDurationInMinutes('invalid' as PrometheusCacheLevel)).toBe(1); |
||||
}); |
||||
}); |
||||
|
||||
describe('buildCacheHeaders', () => { |
||||
it('should build cache headers with provided duration in seconds', () => { |
||||
const result = buildCacheHeaders(300); |
||||
expect(result).toEqual({ |
||||
headers: { |
||||
'X-Grafana-Cache': 'private, max-age=300', |
||||
}, |
||||
}); |
||||
}); |
||||
|
||||
it('should handle zero duration', () => { |
||||
const result = buildCacheHeaders(0); |
||||
expect(result).toEqual({ |
||||
headers: { |
||||
'X-Grafana-Cache': 'private, max-age=0', |
||||
}, |
||||
}); |
||||
}); |
||||
|
||||
it('should handle large duration values', () => { |
||||
const oneDayInSeconds = 86400; |
||||
const result = buildCacheHeaders(oneDayInSeconds); |
||||
expect(result).toEqual({ |
||||
headers: { |
||||
'X-Grafana-Cache': 'private, max-age=86400', |
||||
}, |
||||
}); |
||||
}); |
||||
}); |
||||
|
||||
describe('getDefaultCacheHeaders', () => { |
||||
it('should return cache headers for Medium cache level', () => { |
||||
const result = getDefaultCacheHeaders(PrometheusCacheLevel.Medium); |
||||
expect(result).toEqual({ |
||||
headers: { |
||||
'X-Grafana-Cache': 'private, max-age=600', // 10 minutes in seconds
|
||||
}, |
||||
}); |
||||
}); |
||||
|
||||
it('should return cache headers for High cache level', () => { |
||||
const result = getDefaultCacheHeaders(PrometheusCacheLevel.High); |
||||
expect(result).toEqual({ |
||||
headers: { |
||||
'X-Grafana-Cache': 'private, max-age=3600', // 60 minutes in seconds
|
||||
}, |
||||
}); |
||||
}); |
||||
|
||||
it('should return cache headers for Low cache level', () => { |
||||
const result = getDefaultCacheHeaders(PrometheusCacheLevel.Low); |
||||
expect(result).toEqual({ |
||||
headers: { |
||||
'X-Grafana-Cache': 'private, max-age=60', // 1 minute in seconds
|
||||
}, |
||||
}); |
||||
}); |
||||
|
||||
it('should return undefined for None cache level', () => { |
||||
const result = getDefaultCacheHeaders(PrometheusCacheLevel.None); |
||||
expect(result).toBeUndefined(); |
||||
}); |
||||
|
||||
it('should handle unknown cache level as default (1 minute)', () => { |
||||
const result = getDefaultCacheHeaders('invalid' as PrometheusCacheLevel); |
||||
expect(result).toEqual({ |
||||
headers: { |
||||
'X-Grafana-Cache': 'private, max-age=60', // 1 minute in seconds
|
||||
}, |
||||
}); |
||||
}); |
||||
}); |
||||
}); |
@ -0,0 +1,101 @@ |
||||
import { PrometheusCacheLevel } from './types'; |
||||
|
||||
/** |
||||
* Returns the debounce time in milliseconds based on the cache level. |
||||
* Used to control the frequency of API requests. |
||||
* |
||||
* @param {PrometheusCacheLevel} cacheLevel - The cache level (None, Low, Medium, High) |
||||
* @returns {number} Debounce time in milliseconds: |
||||
* - Medium: 600ms |
||||
* - High: 1200ms |
||||
* - Default (None/Low): 350ms |
||||
*/ |
||||
export const getDebounceTimeInMilliseconds = (cacheLevel: PrometheusCacheLevel): number => { |
||||
switch (cacheLevel) { |
||||
case PrometheusCacheLevel.Medium: |
||||
return 600; |
||||
case PrometheusCacheLevel.High: |
||||
return 1200; |
||||
default: |
||||
return 350; |
||||
} |
||||
}; |
||||
|
||||
/** |
||||
* Returns the number of days to cache metadata based on the cache level. |
||||
* Used for caching Prometheus metric metadata. |
||||
* |
||||
* @param {PrometheusCacheLevel} cacheLevel - The cache level (None, Low, Medium, High) |
||||
* @returns {number} Number of days to cache: |
||||
* - Medium: 7 days |
||||
* - High: 30 days |
||||
* - Default (None/Low): 1 day |
||||
*/ |
||||
export const getDaysToCacheMetadata = (cacheLevel: PrometheusCacheLevel): number => { |
||||
switch (cacheLevel) { |
||||
case PrometheusCacheLevel.Medium: |
||||
return 7; |
||||
case PrometheusCacheLevel.High: |
||||
return 30; |
||||
default: |
||||
return 1; |
||||
} |
||||
}; |
||||
|
||||
/** |
||||
* Returns the cache duration in minutes based on the cache level. |
||||
* Used for general API response caching. |
||||
* |
||||
* @param {PrometheusCacheLevel} cacheLevel - The cache level (None, Low, Medium, High) |
||||
* @returns {number} Cache duration in minutes: |
||||
* - Medium: 10 minutes |
||||
* - High: 60 minutes |
||||
* - Default (None/Low): 1 minute |
||||
*/ |
||||
export function getCacheDurationInMinutes(cacheLevel: PrometheusCacheLevel) { |
||||
switch (cacheLevel) { |
||||
case PrometheusCacheLevel.Medium: |
||||
return 10; |
||||
case PrometheusCacheLevel.High: |
||||
return 60; |
||||
default: |
||||
return 1; |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Builds cache headers for Prometheus API requests. |
||||
* Creates a standard cache control header with private scope and max-age directive. |
||||
* |
||||
* @param {number} durationInSeconds - Cache duration in seconds |
||||
* @returns {object} Object containing headers with cache control directives: |
||||
* - X-Grafana-Cache: private, max-age=<duration> |
||||
* @example |
||||
* // Returns { headers: { 'X-Grafana-Cache': 'private, max-age=300' } }
|
||||
* buildCacheHeaders(300) |
||||
*/ |
||||
export const buildCacheHeaders = (durationInSeconds: number) => { |
||||
return { |
||||
headers: { |
||||
'X-Grafana-Cache': `private, max-age=${durationInSeconds}`, |
||||
}, |
||||
}; |
||||
}; |
||||
|
||||
/** |
||||
* Gets appropriate cache headers based on the configured cache level. |
||||
* Converts cache duration from minutes to seconds and builds the headers. |
||||
* Returns undefined if caching is disabled (None level). |
||||
* |
||||
* @param {PrometheusCacheLevel} cacheLevel - Cache level (None, Low, Medium, High) |
||||
* @returns {object|undefined} Cache headers object or undefined if caching is disabled |
||||
* @example |
||||
* // For Medium level, returns { headers: { 'X-Grafana-Cache': 'private, max-age=600' } }
|
||||
* getDefaultCacheHeaders(PrometheusCacheLevel.Medium) |
||||
*/ |
||||
export const getDefaultCacheHeaders = (cacheLevel: PrometheusCacheLevel) => { |
||||
if (cacheLevel !== PrometheusCacheLevel.None) { |
||||
return buildCacheHeaders(getCacheDurationInMinutes(cacheLevel) * 60); |
||||
} |
||||
return; |
||||
}; |
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,474 @@ |
||||
import { dateTime, TimeRange } from '@grafana/data'; |
||||
|
||||
import { PrometheusDatasource } from './datasource'; |
||||
import { BaseResourceClient, LabelsApiClient, processSeries, SeriesApiClient } from './resource_clients'; |
||||
import { PrometheusCacheLevel } from './types'; |
||||
|
||||
const mockTimeRange: TimeRange = { |
||||
from: dateTime(1681300292392), |
||||
to: dateTime(1681300293392), |
||||
raw: { |
||||
from: 'now-1s', |
||||
to: 'now', |
||||
}, |
||||
}; |
||||
|
||||
const mockRequest = jest.fn().mockResolvedValue([]); |
||||
const mockGetAdjustedInterval = jest.fn().mockReturnValue({ |
||||
start: '1681300260', |
||||
end: '1681300320', |
||||
}); |
||||
const mockGetTimeRangeParams = jest.fn().mockReturnValue({ |
||||
start: '1681300260', |
||||
end: '1681300320', |
||||
}); |
||||
const mockInterpolateString = jest.fn((str) => str); |
||||
const defaultCacheHeaders = { headers: { 'X-Grafana-Cache': 'private, max-age=60' } }; |
||||
|
||||
describe('LabelsApiClient', () => { |
||||
let client: LabelsApiClient; |
||||
|
||||
beforeEach(() => { |
||||
jest.clearAllMocks(); |
||||
client = new LabelsApiClient(mockRequest, { |
||||
cacheLevel: PrometheusCacheLevel.Low, |
||||
getAdjustedInterval: mockGetAdjustedInterval, |
||||
getTimeRangeParams: mockGetTimeRangeParams, |
||||
interpolateString: mockInterpolateString, |
||||
} as unknown as PrometheusDatasource); |
||||
}); |
||||
|
||||
describe('start', () => { |
||||
it('should initialize metrics and label keys', async () => { |
||||
mockRequest.mockResolvedValueOnce(['metric1', 'metric2']).mockResolvedValueOnce(['label1', 'label2']); |
||||
|
||||
await client.start(mockTimeRange); |
||||
|
||||
expect(client.metrics).toEqual(['metric1', 'metric2']); |
||||
expect(client.labelKeys).toEqual(['label1', 'label2']); |
||||
}); |
||||
}); |
||||
|
||||
describe('queryMetrics', () => { |
||||
it('should fetch metrics and process histogram metrics', async () => { |
||||
mockRequest.mockResolvedValueOnce(['metric1_bucket', 'metric2_sum', 'metric3_count']); |
||||
|
||||
const result = await client.queryMetrics(mockTimeRange); |
||||
|
||||
expect(result.metrics).toEqual(['metric1_bucket', 'metric2_sum', 'metric3_count']); |
||||
expect(result.histogramMetrics).toEqual(['metric1_bucket']); |
||||
}); |
||||
}); |
||||
|
||||
describe('queryLabelKeys', () => { |
||||
it('should fetch and sort label keys', async () => { |
||||
mockRequest.mockResolvedValueOnce(['label2', 'label1', 'label3']); |
||||
|
||||
const result = await client.queryLabelKeys(mockTimeRange); |
||||
|
||||
expect(result).toEqual(['label1', 'label2', 'label3']); |
||||
expect(mockRequest).toHaveBeenCalledWith( |
||||
'/api/v1/labels', |
||||
{ |
||||
limit: '40000', |
||||
start: expect.any(String), |
||||
end: expect.any(String), |
||||
}, |
||||
defaultCacheHeaders |
||||
); |
||||
}); |
||||
|
||||
it('should include match parameter when provided', async () => { |
||||
mockRequest.mockResolvedValueOnce(['label1', 'label2']); |
||||
|
||||
await client.queryLabelKeys(mockTimeRange, '{job="grafana"}'); |
||||
|
||||
expect(mockRequest).toHaveBeenCalledWith( |
||||
'/api/v1/labels', |
||||
{ |
||||
'match[]': '{job="grafana"}', |
||||
limit: '40000', |
||||
start: expect.any(String), |
||||
end: expect.any(String), |
||||
}, |
||||
defaultCacheHeaders |
||||
); |
||||
}); |
||||
}); |
||||
|
||||
describe('queryLabelValues', () => { |
||||
it('should fetch label values with proper encoding', async () => { |
||||
mockRequest.mockResolvedValueOnce(['value1', 'value2']); |
||||
mockInterpolateString.mockImplementationOnce((str) => str); |
||||
|
||||
const result = await client.queryLabelValues(mockTimeRange, 'job'); |
||||
|
||||
expect(result).toEqual(['value1', 'value2']); |
||||
expect(mockRequest).toHaveBeenCalledWith( |
||||
'/api/v1/label/job/values', |
||||
{ |
||||
start: expect.any(String), |
||||
end: expect.any(String), |
||||
limit: '40000', |
||||
}, |
||||
defaultCacheHeaders |
||||
); |
||||
}); |
||||
|
||||
it('should handle UTF-8 label names', async () => { |
||||
mockRequest.mockResolvedValueOnce(['value1', 'value2']); |
||||
mockInterpolateString.mockImplementationOnce((str) => 'http.status:sum'); |
||||
|
||||
await client.queryLabelValues(mockTimeRange, '"http.status:sum"'); |
||||
|
||||
expect(mockRequest).toHaveBeenCalledWith( |
||||
'/api/v1/label/U__http_2e_status:sum/values', |
||||
{ |
||||
start: expect.any(String), |
||||
end: expect.any(String), |
||||
limit: '40000', |
||||
}, |
||||
defaultCacheHeaders |
||||
); |
||||
}); |
||||
}); |
||||
}); |
||||
|
||||
describe('SeriesApiClient', () => { |
||||
let client: SeriesApiClient; |
||||
|
||||
beforeEach(() => { |
||||
jest.clearAllMocks(); |
||||
client = new SeriesApiClient(mockRequest, { |
||||
cacheLevel: PrometheusCacheLevel.Low, |
||||
getAdjustedInterval: mockGetAdjustedInterval, |
||||
getTimeRangeParams: mockGetTimeRangeParams, |
||||
interpolateString: mockInterpolateString, |
||||
} as unknown as PrometheusDatasource); |
||||
}); |
||||
|
||||
describe('start', () => { |
||||
it('should initialize metrics and histogram metrics', async () => { |
||||
mockRequest.mockResolvedValueOnce([{ __name__: 'metric1_bucket' }, { __name__: 'metric2_sum' }]); |
||||
|
||||
await client.start(mockTimeRange); |
||||
|
||||
expect(client.metrics).toEqual(['metric1_bucket', 'metric2_sum']); |
||||
expect(client.histogramMetrics).toEqual(['metric1_bucket']); |
||||
}); |
||||
}); |
||||
|
||||
describe('queryMetrics', () => { |
||||
it('should fetch and process series data', async () => { |
||||
mockRequest.mockResolvedValueOnce([ |
||||
{ __name__: 'metric1', label1: 'value1' }, |
||||
{ __name__: 'metric2', label2: 'value2' }, |
||||
]); |
||||
|
||||
const result = await client.queryMetrics(mockTimeRange); |
||||
|
||||
expect(result.metrics).toEqual(['metric1', 'metric2']); |
||||
expect(client.labelKeys).toEqual(['label1', 'label2']); |
||||
}); |
||||
}); |
||||
|
||||
describe('queryLabelKeys', () => { |
||||
it('should throw error if match parameter is not provided', async () => { |
||||
await expect(client.queryLabelKeys(mockTimeRange)).rejects.toThrow( |
||||
'Series endpoint always expects at least one matcher' |
||||
); |
||||
}); |
||||
|
||||
it('should fetch and process label keys from series', async () => { |
||||
mockRequest.mockResolvedValueOnce([{ __name__: 'metric1', label1: 'value1', label2: 'value2' }]); |
||||
|
||||
const result = await client.queryLabelKeys(mockTimeRange, '{job="grafana"}'); |
||||
|
||||
expect(result).toEqual(['label1', 'label2']); |
||||
}); |
||||
|
||||
it('should use MATCH_ALL_LABELS when empty matcher is provided', async () => { |
||||
mockRequest.mockResolvedValueOnce([{ __name__: 'metric1', label1: 'value1', label2: 'value2' }]); |
||||
|
||||
const result = await client.queryLabelKeys(mockTimeRange, '{}'); |
||||
|
||||
expect(mockRequest).toHaveBeenCalledWith( |
||||
'/api/v1/series', |
||||
expect.objectContaining({ |
||||
'match[]': '{__name__!=""}', |
||||
}), |
||||
expect.any(Object) |
||||
); |
||||
expect(result).toEqual(['label1', 'label2']); |
||||
}); |
||||
}); |
||||
|
||||
describe('queryLabelValues', () => { |
||||
it('should fetch and process label values from series', async () => { |
||||
mockRequest.mockResolvedValueOnce([ |
||||
{ __name__: 'metric1', job: 'grafana' }, |
||||
{ __name__: 'metric2', job: 'prometheus' }, |
||||
]); |
||||
|
||||
const result = await client.queryLabelValues(mockTimeRange, 'job', '{__name__="metric1"}'); |
||||
|
||||
expect(mockRequest).toHaveBeenCalledWith( |
||||
'/api/v1/series', |
||||
expect.objectContaining({ |
||||
'match[]': '{__name__="metric1"}', |
||||
}), |
||||
expect.any(Object) |
||||
); |
||||
expect(result).toEqual(['grafana', 'prometheus']); |
||||
}); |
||||
|
||||
it('should create matcher with label when no matcher is provided', async () => { |
||||
mockRequest.mockResolvedValueOnce([{ __name__: 'metric1', job: 'grafana' }]); |
||||
|
||||
await client.queryLabelValues(mockTimeRange, 'job'); |
||||
|
||||
expect(mockRequest).toHaveBeenCalledWith( |
||||
'/api/v1/series', |
||||
expect.objectContaining({ |
||||
'match[]': '{job!=""}', |
||||
}), |
||||
expect.any(Object) |
||||
); |
||||
}); |
||||
|
||||
it('should create matcher with label when empty matcher is provided', async () => { |
||||
mockRequest.mockResolvedValueOnce([{ __name__: 'metric1', job: 'grafana' }]); |
||||
|
||||
await client.queryLabelValues(mockTimeRange, 'job', '{}'); |
||||
|
||||
expect(mockRequest).toHaveBeenCalledWith( |
||||
'/api/v1/series', |
||||
expect.objectContaining({ |
||||
'match[]': '{job!=""}', |
||||
}), |
||||
expect.any(Object) |
||||
); |
||||
}); |
||||
}); |
||||
}); |
||||
|
||||
describe('processSeries', () => { |
||||
it('should extract metrics and label keys from series data', () => { |
||||
const result = processSeries([ |
||||
{ |
||||
__name__: 'alerts', |
||||
alertname: 'AppCrash', |
||||
alertstate: 'firing', |
||||
instance: 'host.docker.internal:3000', |
||||
job: 'grafana', |
||||
severity: 'critical', |
||||
}, |
||||
{ |
||||
__name__: 'alerts', |
||||
alertname: 'AppCrash', |
||||
alertstate: 'firing', |
||||
instance: 'prometheus-utf8:9112', |
||||
job: 'prometheus-utf8', |
||||
severity: 'critical', |
||||
}, |
||||
{ |
||||
__name__: 'counters_logins', |
||||
app: 'backend', |
||||
geohash: '9wvfgzurfzb', |
||||
instance: 'fake-prometheus-data:9091', |
||||
job: 'fake-data-gen', |
||||
server: 'backend-01', |
||||
}, |
||||
]); |
||||
|
||||
// Check structure
|
||||
expect(result).toHaveProperty('metrics'); |
||||
expect(result).toHaveProperty('labelKeys'); |
||||
|
||||
// Verify metrics are extracted correctly
|
||||
expect(result.metrics).toEqual(['alerts', 'counters_logins']); |
||||
|
||||
// Verify all metrics are unique
|
||||
expect(result.metrics.length).toBe(new Set(result.metrics).size); |
||||
|
||||
// Verify label keys are extracted correctly and don't include __name__
|
||||
expect(result.labelKeys).toContain('instance'); |
||||
expect(result.labelKeys).toContain('job'); |
||||
expect(result.labelKeys).not.toContain('__name__'); |
||||
|
||||
// Verify all label keys are unique
|
||||
expect(result.labelKeys.length).toBe(new Set(result.labelKeys).size); |
||||
}); |
||||
|
||||
it('should handle empty series data', () => { |
||||
const result = processSeries([]); |
||||
|
||||
expect(result.metrics).toEqual([]); |
||||
expect(result.labelKeys).toEqual([]); |
||||
}); |
||||
|
||||
it('should handle series without __name__ attribute', () => { |
||||
const series = [ |
||||
{ instance: 'localhost:9090', job: 'prometheus' }, |
||||
{ instance: 'localhost:9100', job: 'node' }, |
||||
]; |
||||
|
||||
const result = processSeries(series); |
||||
|
||||
expect(result.metrics).toEqual([]); |
||||
expect(result.labelKeys).toEqual(['instance', 'job']); |
||||
}); |
||||
|
||||
it('should extract label values for a specific key when findValuesForKey is provided', () => { |
||||
const series = [ |
||||
{ |
||||
__name__: 'alerts', |
||||
instance: 'host.docker.internal:3000', |
||||
job: 'grafana', |
||||
severity: 'critical', |
||||
}, |
||||
{ |
||||
__name__: 'alerts', |
||||
instance: 'prometheus-utf8:9112', |
||||
job: 'prometheus-utf8', |
||||
severity: 'critical', |
||||
}, |
||||
{ |
||||
__name__: 'counters_logins', |
||||
instance: 'fake-prometheus-data:9091', |
||||
job: 'fake-data-gen', |
||||
severity: 'warning', |
||||
}, |
||||
]; |
||||
|
||||
// Test finding values for 'job' label
|
||||
const jobResult = processSeries(series, 'job'); |
||||
expect(jobResult.labelValues).toEqual(['fake-data-gen', 'grafana', 'prometheus-utf8']); |
||||
|
||||
// Test finding values for 'severity' label
|
||||
const severityResult = processSeries(series, 'severity'); |
||||
expect(severityResult.labelValues).toEqual(['critical', 'warning']); |
||||
|
||||
// Test finding values for 'instance' label
|
||||
const instanceResult = processSeries(series, 'instance'); |
||||
expect(instanceResult.labelValues).toEqual([ |
||||
'fake-prometheus-data:9091', |
||||
'host.docker.internal:3000', |
||||
'prometheus-utf8:9112', |
||||
]); |
||||
}); |
||||
|
||||
it('should return empty labelValues array when findValuesForKey is not provided', () => { |
||||
const series = [ |
||||
{ |
||||
__name__: 'alerts', |
||||
instance: 'host.docker.internal:3000', |
||||
job: 'grafana', |
||||
}, |
||||
]; |
||||
|
||||
const result = processSeries(series); |
||||
expect(result.labelValues).toEqual([]); |
||||
}); |
||||
|
||||
it('should return empty labelValues array when findValuesForKey does not match any labels', () => { |
||||
const series = [ |
||||
{ |
||||
__name__: 'alerts', |
||||
instance: 'host.docker.internal:3000', |
||||
job: 'grafana', |
||||
}, |
||||
]; |
||||
|
||||
const result = processSeries(series, 'non_existent_label'); |
||||
expect(result.labelValues).toEqual([]); |
||||
}); |
||||
}); |
||||
|
||||
describe('BaseResourceClient', () => { |
||||
const mockRequest = jest.fn(); |
||||
const mockGetTimeRangeParams = jest.fn(); |
||||
const mockDatasource = { |
||||
cacheLevel: PrometheusCacheLevel.Low, |
||||
getTimeRangeParams: mockGetTimeRangeParams, |
||||
} as unknown as PrometheusDatasource; |
||||
|
||||
class TestBaseResourceClient extends BaseResourceClient { |
||||
constructor() { |
||||
super(mockRequest, mockDatasource); |
||||
} |
||||
} |
||||
|
||||
let client: TestBaseResourceClient; |
||||
|
||||
beforeEach(() => { |
||||
jest.clearAllMocks(); |
||||
client = new TestBaseResourceClient(); |
||||
}); |
||||
|
||||
describe('querySeries', () => { |
||||
const mockTimeRange = { |
||||
from: dateTime(1681300292392), |
||||
to: dateTime(1681300293392), |
||||
raw: { |
||||
from: 'now-1s', |
||||
to: 'now', |
||||
}, |
||||
}; |
||||
|
||||
beforeEach(() => { |
||||
mockGetTimeRangeParams.mockReturnValue({ start: '1681300260', end: '1681300320' }); |
||||
}); |
||||
|
||||
it('should make request with correct parameters', async () => { |
||||
mockRequest.mockResolvedValueOnce([{ __name__: 'metric1' }]); |
||||
|
||||
const result = await client.querySeries(mockTimeRange, '{job="grafana"}'); |
||||
|
||||
expect(mockRequest).toHaveBeenCalledWith( |
||||
'/api/v1/series', |
||||
{ |
||||
start: '1681300260', |
||||
end: '1681300320', |
||||
'match[]': '{job="grafana"}', |
||||
limit: '40000', |
||||
}, |
||||
{ headers: { 'X-Grafana-Cache': 'private, max-age=60' } } |
||||
); |
||||
expect(result).toEqual([{ __name__: 'metric1' }]); |
||||
}); |
||||
|
||||
it('should use custom limit when provided', async () => { |
||||
mockRequest.mockResolvedValueOnce([]); |
||||
|
||||
await client.querySeries(mockTimeRange, '{job="grafana"}', '1000'); |
||||
|
||||
expect(mockRequest).toHaveBeenCalledWith( |
||||
'/api/v1/series', |
||||
{ |
||||
start: '1681300260', |
||||
end: '1681300320', |
||||
'match[]': '{job="grafana"}', |
||||
limit: '1000', |
||||
}, |
||||
{ headers: { 'X-Grafana-Cache': 'private, max-age=60' } } |
||||
); |
||||
}); |
||||
|
||||
it('should handle empty response', async () => { |
||||
mockRequest.mockResolvedValueOnce(null); |
||||
|
||||
const result = await client.querySeries(mockTimeRange, '{job="grafana"}'); |
||||
|
||||
expect(result).toEqual([]); |
||||
}); |
||||
|
||||
it('should handle non-array response', async () => { |
||||
mockRequest.mockResolvedValueOnce({ error: 'invalid response' }); |
||||
|
||||
const result = await client.querySeries(mockTimeRange, '{job="grafana"}'); |
||||
|
||||
expect(result).toEqual([]); |
||||
}); |
||||
}); |
||||
}); |
@ -0,0 +1,239 @@ |
||||
import { TimeRange } from '@grafana/data'; |
||||
import { BackendSrvRequest } from '@grafana/runtime'; |
||||
|
||||
import { getDefaultCacheHeaders } from './caching'; |
||||
import { DEFAULT_SERIES_LIMIT } from './components/metrics-browser/types'; |
||||
import { PrometheusDatasource } from './datasource'; |
||||
import { removeQuotesIfExist } from './language_provider'; |
||||
import { getRangeSnapInterval, processHistogramMetrics } from './language_utils'; |
||||
import { escapeForUtf8Support } from './utf8_support'; |
||||
|
||||
type PrometheusSeriesResponse = Array<{ [key: string]: string }>; |
||||
type PrometheusLabelsResponse = string[]; |
||||
|
||||
export interface ResourceApiClient { |
||||
metrics: string[]; |
||||
histogramMetrics: string[]; |
||||
labelKeys: string[]; |
||||
cachedLabelValues: Record<string, string[]>; |
||||
|
||||
start: (timeRange: TimeRange) => Promise<void>; |
||||
|
||||
queryMetrics: (timeRange: TimeRange) => Promise<{ metrics: string[]; histogramMetrics: string[] }>; |
||||
queryLabelKeys: (timeRange: TimeRange, match?: string, limit?: string) => Promise<string[]>; |
||||
queryLabelValues: (timeRange: TimeRange, labelKey: string, match?: string, limit?: string) => Promise<string[]>; |
||||
|
||||
querySeries: (timeRange: TimeRange, match: string, limit?: string) => Promise<PrometheusSeriesResponse>; |
||||
} |
||||
|
||||
type RequestFn = ( |
||||
url: string, |
||||
params?: Record<string, unknown>, |
||||
options?: Partial<BackendSrvRequest> |
||||
) => Promise<unknown>; |
||||
|
||||
const EMPTY_MATCHER = '{}'; |
||||
const MATCH_ALL_LABELS = '{__name__!=""}'; |
||||
const METRIC_LABEL = '__name__'; |
||||
|
||||
export abstract class BaseResourceClient { |
||||
constructor( |
||||
protected readonly request: RequestFn, |
||||
protected readonly datasource: PrometheusDatasource |
||||
) {} |
||||
|
||||
protected async requestLabels( |
||||
url: string, |
||||
params?: Record<string, unknown>, |
||||
options?: Partial<BackendSrvRequest> |
||||
): Promise<PrometheusLabelsResponse> { |
||||
const response = await this.request(url, params, options); |
||||
return Array.isArray(response) ? response : []; |
||||
} |
||||
|
||||
protected async requestSeries( |
||||
url: string, |
||||
params?: Record<string, unknown>, |
||||
options?: Partial<BackendSrvRequest> |
||||
): Promise<PrometheusSeriesResponse> { |
||||
const response = await this.request(url, params, options); |
||||
return Array.isArray(response) ? response : []; |
||||
} |
||||
|
||||
/** |
||||
* Validates and transforms a matcher string for Prometheus series queries. |
||||
* |
||||
* @param match - The matcher string to validate and transform. Can be undefined, a specific matcher, or '{}'. |
||||
* @returns The validated and potentially transformed matcher string. |
||||
* @throws Error if the matcher is undefined or empty (null, undefined, or empty string). |
||||
* |
||||
* @example |
||||
* // Returns '{__name__!=""}' for empty matcher
|
||||
* validateAndTransformMatcher('{}') |
||||
* |
||||
* // Returns the original matcher for specific matchers
|
||||
* validateAndTransformMatcher('{job="grafana"}') |
||||
*/ |
||||
protected validateAndTransformMatcher(match?: string): string { |
||||
if (!match) { |
||||
throw new Error('Series endpoint always expects at least one matcher'); |
||||
} |
||||
return match === '{}' ? MATCH_ALL_LABELS : match; |
||||
} |
||||
|
||||
/** |
||||
* Fetches all time series that match a specific label matcher using **series** endpoint. |
||||
* |
||||
* @param {TimeRange} timeRange - Time range to use for the query |
||||
* @param {string} match - Label matcher to filter time series |
||||
* @param {string} limit - Maximum number of series to return |
||||
*/ |
||||
public querySeries = async (timeRange: TimeRange, match: string, limit: string = DEFAULT_SERIES_LIMIT) => { |
||||
const effectiveMatch = this.validateAndTransformMatcher(match); |
||||
const timeParams = this.datasource.getTimeRangeParams(timeRange); |
||||
const searchParams = { ...timeParams, 'match[]': effectiveMatch, limit }; |
||||
return await this.requestSeries('/api/v1/series', searchParams, getDefaultCacheHeaders(this.datasource.cacheLevel)); |
||||
}; |
||||
} |
||||
|
||||
export class LabelsApiClient extends BaseResourceClient implements ResourceApiClient { |
||||
public histogramMetrics: string[] = []; |
||||
public metrics: string[] = []; |
||||
public labelKeys: string[] = []; |
||||
public cachedLabelValues: Record<string, string[]> = {}; |
||||
|
||||
start = async (timeRange: TimeRange) => { |
||||
await this.queryMetrics(timeRange); |
||||
this.labelKeys = await this.queryLabelKeys(timeRange); |
||||
}; |
||||
|
||||
public queryMetrics = async (timeRange: TimeRange): Promise<{ metrics: string[]; histogramMetrics: string[] }> => { |
||||
this.metrics = await this.queryLabelValues(timeRange, METRIC_LABEL); |
||||
this.histogramMetrics = processHistogramMetrics(this.metrics); |
||||
return { metrics: this.metrics, histogramMetrics: this.histogramMetrics }; |
||||
}; |
||||
|
||||
/** |
||||
* Fetches all available label keys from Prometheus using labels endpoint. |
||||
* Uses the labels endpoint with optional match parameter for filtering. |
||||
* |
||||
* @param {TimeRange} timeRange - Time range to use for the query |
||||
* @param {string} match - Optional label matcher to filter results |
||||
* @param {string} limit - Maximum number of results to return |
||||
* @returns {Promise<string[]>} Array of label keys sorted alphabetically |
||||
*/ |
||||
public queryLabelKeys = async ( |
||||
timeRange: TimeRange, |
||||
match?: string, |
||||
limit: string = DEFAULT_SERIES_LIMIT |
||||
): Promise<string[]> => { |
||||
let url = '/api/v1/labels'; |
||||
const timeParams = getRangeSnapInterval(this.datasource.cacheLevel, timeRange); |
||||
const searchParams = { limit, ...timeParams, ...(match ? { 'match[]': match } : {}) }; |
||||
|
||||
const res = await this.requestLabels(url, searchParams, getDefaultCacheHeaders(this.datasource.cacheLevel)); |
||||
if (Array.isArray(res)) { |
||||
this.labelKeys = res.slice().sort(); |
||||
return this.labelKeys.slice(); |
||||
} |
||||
|
||||
return []; |
||||
}; |
||||
|
||||
/** |
||||
* Fetches all values for a specific label key from Prometheus using labels values endpoint. |
||||
* |
||||
* @param {TimeRange} timeRange - Time range to use for the query |
||||
* @param {string} labelKey - The label key to fetch values for |
||||
* @param {string} match - Optional label matcher to filter results |
||||
* @param {string} limit - Maximum number of results to return |
||||
* @returns {Promise<string[]>} Array of label values |
||||
*/ |
||||
public queryLabelValues = async ( |
||||
timeRange: TimeRange, |
||||
labelKey: string, |
||||
match?: string, |
||||
limit: string = DEFAULT_SERIES_LIMIT |
||||
): Promise<string[]> => { |
||||
const timeParams = this.datasource.getAdjustedInterval(timeRange); |
||||
const searchParams = { limit, ...timeParams, ...(match ? { 'match[]': match } : {}) }; |
||||
const interpolatedName = this.datasource.interpolateString(labelKey); |
||||
const interpolatedAndEscapedName = escapeForUtf8Support(removeQuotesIfExist(interpolatedName)); |
||||
const url = `/api/v1/label/${interpolatedAndEscapedName}/values`; |
||||
const value = await this.requestLabels(url, searchParams, getDefaultCacheHeaders(this.datasource.cacheLevel)); |
||||
return value ?? []; |
||||
}; |
||||
} |
||||
|
||||
export class SeriesApiClient extends BaseResourceClient implements ResourceApiClient { |
||||
public histogramMetrics: string[] = []; |
||||
public metrics: string[] = []; |
||||
public labelKeys: string[] = []; |
||||
public cachedLabelValues: Record<string, string[]> = {}; |
||||
|
||||
start = async (timeRange: TimeRange) => { |
||||
await this.queryMetrics(timeRange); |
||||
}; |
||||
|
||||
public queryMetrics = async (timeRange: TimeRange): Promise<{ metrics: string[]; histogramMetrics: string[] }> => { |
||||
const series = await this.querySeries(timeRange, MATCH_ALL_LABELS); |
||||
const { metrics, labelKeys } = processSeries(series); |
||||
this.metrics = metrics; |
||||
this.histogramMetrics = processHistogramMetrics(this.metrics); |
||||
this.labelKeys = labelKeys; |
||||
return { metrics: this.metrics, histogramMetrics: this.histogramMetrics }; |
||||
}; |
||||
|
||||
public queryLabelKeys = async ( |
||||
timeRange: TimeRange, |
||||
match?: string, |
||||
limit: string = DEFAULT_SERIES_LIMIT |
||||
): Promise<string[]> => { |
||||
const effectiveMatch = this.validateAndTransformMatcher(match); |
||||
const series = await this.querySeries(timeRange, effectiveMatch, limit); |
||||
const { labelKeys } = processSeries(series); |
||||
return labelKeys; |
||||
}; |
||||
|
||||
public queryLabelValues = async ( |
||||
timeRange: TimeRange, |
||||
labelKey: string, |
||||
match?: string, |
||||
limit: string = DEFAULT_SERIES_LIMIT |
||||
): Promise<string[]> => { |
||||
const effectiveMatch = !match || match === EMPTY_MATCHER ? `{${labelKey}!=""}` : match; |
||||
const series = await this.querySeries(timeRange, effectiveMatch, limit); |
||||
const { labelValues } = processSeries(series, labelKey); |
||||
return labelValues; |
||||
}; |
||||
} |
||||
|
||||
export function processSeries(series: Array<{ [key: string]: string }>, findValuesForKey?: string) { |
||||
const metrics: Set<string> = new Set(); |
||||
const labelKeys: Set<string> = new Set(); |
||||
const labelValues: Set<string> = new Set(); |
||||
|
||||
// Extract metrics and label keys
|
||||
series.forEach((item) => { |
||||
// Add the __name__ value to metrics
|
||||
if (METRIC_LABEL in item) { |
||||
metrics.add(item.__name__); |
||||
} |
||||
|
||||
// Add all keys except __name__ to labelKeys
|
||||
Object.keys(item).forEach((key) => { |
||||
if (key !== METRIC_LABEL) { |
||||
labelKeys.add(key); |
||||
} |
||||
if (findValuesForKey && key === findValuesForKey) { |
||||
labelValues.add(item[key]); |
||||
} |
||||
}); |
||||
}); |
||||
|
||||
return { |
||||
metrics: Array.from(metrics).sort(), |
||||
labelKeys: Array.from(labelKeys).sort(), |
||||
labelValues: Array.from(labelValues).sort(), |
||||
}; |
||||
} |
Loading…
Reference in new issue