mirror of https://github.com/grafana/grafana
Alerting: Use runtime data source for getting events from alert state history in the bar chart (#89307)
* Use runtime data source for getting events from alert state history in the bar chart * extract translations * refactor * More refactor * Update events limit * Add info icon with tooltip info for label querying filter * Add translations * Create new useRuleHistoryRecords hook skipping extraction of common labels as they are not used * Fix test * update limit value for the events in the api to 5000 * Use state for rows key * remove React import * Address review comments * Address review comments * run prettier * Remove duplicated handlerspull/89995/head
parent
c3337c39b1
commit
523d999414
@ -0,0 +1,79 @@ |
||||
import { useEffect, useMemo } from 'react'; |
||||
|
||||
import { DataQuery, DataQueryRequest, DataQueryResponse, TestDataSourceResponse } from '@grafana/data'; |
||||
import { RuntimeDataSource, sceneUtils } from '@grafana/scenes'; |
||||
import { getTimeSrv } from 'app/features/dashboard/services/TimeSrv'; |
||||
import { dispatch } from 'app/store/store'; |
||||
|
||||
import { stateHistoryApi } from '../../../api/stateHistoryApi'; |
||||
import { DataSourceInformation } from '../../../home/Insights'; |
||||
|
||||
import { LIMIT_EVENTS } from './EventListSceneObject'; |
||||
import { historyResultToDataFrame } from './utils'; |
||||
|
||||
const historyDataSourceUid = '__history_api_ds_uid__'; |
||||
const historyDataSourcePluginId = '__history_api_ds_pluginId__'; |
||||
|
||||
export const alertStateHistoryDatasource: DataSourceInformation = { |
||||
type: historyDataSourcePluginId, |
||||
uid: historyDataSourceUid, |
||||
settings: undefined, |
||||
}; |
||||
|
||||
export function useRegisterHistoryRuntimeDataSource() { |
||||
// we need to memoize the datasource so it is not registered multiple times for each render
|
||||
const ds = useMemo(() => new HistoryAPIDatasource(historyDataSourceUid, historyDataSourcePluginId), []); |
||||
useEffect(() => { |
||||
try { |
||||
// avoid showing error when the datasource is already registered
|
||||
sceneUtils.registerRuntimeDataSource({ dataSource: ds }); |
||||
} catch (e) {} |
||||
}, [ds]); |
||||
} |
||||
|
||||
/** |
||||
* This class is a runtime datasource that fetches the events from the history api. |
||||
* The events are grouped by alert instance and then converted to a DataFrame list. |
||||
* The DataFrame list is then grouped by time. |
||||
* This allows us to filter the events by labels. |
||||
* The result is a timeseries panel that shows the events for the selected time range and filtered by labels. |
||||
*/ |
||||
class HistoryAPIDatasource extends RuntimeDataSource { |
||||
constructor(pluginId: string, uid: string) { |
||||
super(uid, pluginId); |
||||
} |
||||
|
||||
async query(request: DataQueryRequest<DataQuery>): Promise<DataQueryResponse> { |
||||
const from = request.range.from.unix(); |
||||
const to = request.range.to.unix(); |
||||
|
||||
return { |
||||
data: historyResultToDataFrame(await getHistory(from, to)), |
||||
}; |
||||
} |
||||
|
||||
testDatasource(): Promise<TestDataSourceResponse> { |
||||
return Promise.resolve({ status: 'success', message: 'Data source is working', title: 'Success' }); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Fetch the history events from the history api. |
||||
* @param from the start time |
||||
* @param to the end time |
||||
* @returns the history events only filtered by time |
||||
*/ |
||||
export const getHistory = (from: number, to: number) => { |
||||
return dispatch( |
||||
stateHistoryApi.endpoints.getRuleHistory.initiate( |
||||
{ |
||||
from: from, |
||||
to: to, |
||||
limit: LIMIT_EVENTS, |
||||
}, |
||||
{ |
||||
forceRefetch: Boolean(getTimeSrv().getAutoRefreshInteval().interval), // force refetch in case we are using the refresh option
|
||||
} |
||||
) |
||||
).unwrap(); |
||||
}; |
@ -0,0 +1,30 @@ |
||||
import { render, waitFor } from 'test/test-utils'; |
||||
import { byLabelText, byTestId } from 'testing-library-selector'; |
||||
|
||||
import { setupMswServer } from '../../../mockApi'; |
||||
|
||||
import { HistoryEventsList } from './EventListSceneObject'; |
||||
|
||||
setupMswServer(); |
||||
// msw server is setup to intercept the history api call and return the mocked data by default
|
||||
// that consists in 4 rows.
|
||||
// 2 rows for alert1 and 2 rows for alert2
|
||||
|
||||
const ui = { |
||||
rowHeader: byTestId('event-row-header'), |
||||
}; |
||||
describe('HistoryEventsList', () => { |
||||
it('should render the list correctly filtered by label in filter variable', async () => { |
||||
render(<HistoryEventsList valueInfilterTextBox={'alertname=alert1'} />); |
||||
await waitFor(() => { |
||||
expect(byLabelText('Loading bar').query()).not.toBeInTheDocument(); |
||||
}); |
||||
expect(ui.rowHeader.getAll()).toHaveLength(2); // 2 events for alert1
|
||||
expect(ui.rowHeader.getAll()[0]).toHaveTextContent( |
||||
'June 14 at 06:39:00alert1alertnamealert1grafana_folderFOLDER Ahandler/alerting/*' |
||||
); |
||||
expect(ui.rowHeader.getAll()[1]).toHaveTextContent( |
||||
'June 14 at 06:38:30alert1alertnamealert1grafana_folderFOLDER Ahandler/alerting/*' |
||||
); |
||||
}); |
||||
}); |
@ -0,0 +1,35 @@ |
||||
import { |
||||
getHistoryResponse, |
||||
time_0, |
||||
time_plus_10, |
||||
time_plus_15, |
||||
time_plus_30, |
||||
time_plus_5, |
||||
} from '../../../mocks/alertRuleApi'; |
||||
|
||||
import { historyResultToDataFrame } from './utils'; |
||||
|
||||
describe('historyResultToDataFrame', () => { |
||||
it('should return correct result grouping by 10 seconds', async () => { |
||||
const result = historyResultToDataFrame(getHistoryResponse([time_0, time_0, time_plus_30, time_plus_30])); |
||||
expect(result[0].length).toBe(2); |
||||
expect(result[0].fields[0].name).toBe('time'); |
||||
expect(result[0].fields[1].name).toBe('value'); |
||||
expect(result[0].fields[0].values).toStrictEqual([time_0, time_plus_30]); |
||||
expect(result[0].fields[1].values).toStrictEqual([2, 2]); |
||||
|
||||
const result2 = historyResultToDataFrame(getHistoryResponse([time_0, time_plus_5, time_plus_30, time_plus_30])); |
||||
expect(result2[0].length).toBe(2); |
||||
expect(result2[0].fields[0].name).toBe('time'); |
||||
expect(result2[0].fields[1].name).toBe('value'); |
||||
expect(result2[0].fields[0].values).toStrictEqual([time_0, time_plus_30]); |
||||
expect(result2[0].fields[1].values).toStrictEqual([2, 2]); |
||||
|
||||
const result3 = historyResultToDataFrame(getHistoryResponse([time_0, time_plus_15, time_plus_10, time_plus_30])); |
||||
expect(result3[0].length).toBe(3); |
||||
expect(result3[0].fields[0].name).toBe('time'); |
||||
expect(result3[0].fields[1].name).toBe('value'); |
||||
expect(result3[0].fields[0].values).toStrictEqual([time_0, time_plus_10, time_plus_30]); |
||||
expect(result3[0].fields[1].values).toStrictEqual([1, 2, 1]); |
||||
}); |
||||
}); |
@ -0,0 +1,138 @@ |
||||
import { groupBy } from 'lodash'; |
||||
|
||||
import { DataFrame, Field as DataFrameField, DataFrameJSON, Field, FieldType } from '@grafana/data'; |
||||
import { fieldIndexComparer } from '@grafana/data/src/field/fieldComparers'; |
||||
|
||||
import { labelsMatchMatchers, parseMatchers } from '../../../utils/alertmanager'; |
||||
import { LogRecord } from '../state-history/common'; |
||||
import { isLine, isNumbers } from '../state-history/useRuleHistoryRecords'; |
||||
|
||||
import { LABELS_FILTER } from './CentralAlertHistoryScene'; |
||||
|
||||
const GROUPING_INTERVAL = 10 * 1000; // 10 seconds
|
||||
const QUERY_PARAM_PREFIX = 'var-'; // Prefix used by Grafana to sync variables in the URL
|
||||
/* |
||||
* This function is used to convert the history response to a DataFrame list and filter the data by labels. |
||||
* The response is a list of log records, each log record has a timestamp and a line. |
||||
* We group all records by alert instance (unique set of labels) and create a DataFrame for each group (instance). |
||||
* This allows us to be able to filter by labels in the groupDataFramesByTime function. |
||||
*/ |
||||
export function historyResultToDataFrame(data: DataFrameJSON): DataFrame[] { |
||||
const tsValues = data?.data?.values[0] ?? []; |
||||
const timestamps: number[] = isNumbers(tsValues) ? tsValues : []; |
||||
const lines = data?.data?.values[1] ?? []; |
||||
|
||||
const logRecords = timestamps.reduce((acc: LogRecord[], timestamp: number, index: number) => { |
||||
const line = lines[index]; |
||||
// values property can be undefined for some instance states (e.g. NoData)
|
||||
if (isLine(line)) { |
||||
acc.push({ timestamp, line }); |
||||
} |
||||
return acc; |
||||
}, []); |
||||
|
||||
// Group log records by alert instance
|
||||
const logRecordsByInstance = groupBy(logRecords, (record: LogRecord) => { |
||||
return JSON.stringify(record.line.labels); |
||||
}); |
||||
|
||||
// Convert each group of log records to a DataFrame
|
||||
const dataFrames: DataFrame[] = Object.entries(logRecordsByInstance).map<DataFrame>(([key, records]) => { |
||||
// key is the stringified labels
|
||||
return logRecordsToDataFrame(key, records); |
||||
}); |
||||
|
||||
// Group DataFrames by time and filter by labels
|
||||
return groupDataFramesByTimeAndFilterByLabels(dataFrames); |
||||
} |
||||
|
||||
// Scenes sync variables in the URL adding a prefix to the variable name.
|
||||
function getFilterInQueryParams() { |
||||
const queryParams = new URLSearchParams(window.location.search); |
||||
return queryParams.get(`${QUERY_PARAM_PREFIX}${LABELS_FILTER}`) ?? ''; |
||||
} |
||||
|
||||
/* |
||||
* This function groups the data frames by time and filters them by labels. |
||||
* The interval is set to 10 seconds. |
||||
* */ |
||||
function groupDataFramesByTimeAndFilterByLabels(dataFrames: DataFrame[]): DataFrame[] { |
||||
// Filter data frames by labels. This is used to filter out the data frames that do not match the query.
|
||||
const filterValue = getFilterInQueryParams(); |
||||
const dataframesFiltered = dataFrames.filter((frame) => { |
||||
const labels = JSON.parse(frame.name ?? ''); // in name we store the labels stringified
|
||||
const matchers = Boolean(filterValue) ? parseMatchers(filterValue) : []; |
||||
return labelsMatchMatchers(labels, matchers); |
||||
}); |
||||
// Extract time fields from filtered data frames
|
||||
const timeFieldList = dataframesFiltered.flatMap((frame) => frame.fields.find((field) => field.name === 'time')); |
||||
|
||||
// Group time fields by interval
|
||||
const groupedTimeFields = groupBy( |
||||
timeFieldList?.flatMap((tf) => tf?.values), |
||||
(time: number) => Math.floor(time / GROUPING_INTERVAL) * GROUPING_INTERVAL |
||||
); |
||||
|
||||
// Create new time field with grouped time values
|
||||
const newTimeField: Field = { |
||||
name: 'time', |
||||
type: FieldType.time, |
||||
values: Object.keys(groupedTimeFields).map(Number), |
||||
config: { displayName: 'Time', custom: { fillOpacity: 100 } }, |
||||
}; |
||||
|
||||
// Create count field with count of records in each group
|
||||
const countField: Field = { |
||||
name: 'value', |
||||
type: FieldType.number, |
||||
values: Object.values(groupedTimeFields).map((group) => group.length), |
||||
config: {}, |
||||
}; |
||||
|
||||
// Return new DataFrame with time and count fields
|
||||
return [ |
||||
{ |
||||
fields: [newTimeField, countField], |
||||
length: newTimeField.values.length, |
||||
}, |
||||
]; |
||||
} |
||||
|
||||
/* |
||||
* This function is used to convert the log records to a DataFrame. |
||||
* The DataFrame has two fields: time and value. |
||||
* The time field is the timestamp of the log record. |
||||
* The value field is always 1. |
||||
* */ |
||||
function logRecordsToDataFrame(instanceLabels: string, records: LogRecord[]): DataFrame { |
||||
const timeField: DataFrameField = { |
||||
name: 'time', |
||||
type: FieldType.time, |
||||
values: [...records.map((record) => record.timestamp)], |
||||
config: { displayName: 'Time', custom: { fillOpacity: 100 } }, |
||||
}; |
||||
|
||||
// Sort time field values
|
||||
const timeIndex = timeField.values.map((_, index) => index); |
||||
timeIndex.sort(fieldIndexComparer(timeField)); |
||||
|
||||
// Create DataFrame with time and value fields
|
||||
const frame: DataFrame = { |
||||
fields: [ |
||||
{ |
||||
...timeField, |
||||
values: timeField.values.map((_, i) => timeField.values[timeIndex[i]]), |
||||
}, |
||||
{ |
||||
name: instanceLabels, |
||||
type: FieldType.number, |
||||
values: timeField.values.map((record) => 1), |
||||
config: {}, |
||||
}, |
||||
], |
||||
length: timeField.values.length, |
||||
name: instanceLabels, |
||||
}; |
||||
|
||||
return frame; |
||||
} |
Loading…
Reference in new issue