Infinite scroll: exclude visible range from new requests (#80638)

* Infinite scroll: exclude visible range from new requests

* Infinite scroll: revert skipping 1 millisecond on new requests

* Formatting

* Logs models: filter exact duplicates in the response

* mergeDataSeries: do not mutate currentData

* runMoreLogsQueries: return queryResponse while loading

* Infinite scrolling: use special refId for infinite scrolling queries

* Remove log

* Update public/app/features/logs/logsModel.ts

Co-authored-by: Sven Grossmann <sven.grossmann@grafana.com>

* combinePanelData: refactor and add unit test

* logsModel: export infinite scroll refid

* Formatting

* logsModel: add deduplication unit test

* Formatting

* findMatchingRow: add unit test

* Fix test title

* Fix imports order

---------

Co-authored-by: Sven Grossmann <sven.grossmann@grafana.com>
pull/80695/head^2
Matias Chomicki 1 year ago committed by GitHub
parent 2aa533c6c5
commit 88ee7a1c62
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
  1. 20
      public/app/features/explore/state/query.ts
  2. 6
      public/app/features/explore/utils/decorators.ts
  3. 48
      public/app/features/logs/components/InfiniteScroll.test.tsx
  4. 10
      public/app/features/logs/components/InfiniteScroll.tsx
  5. 49
      public/app/features/logs/logsModel.test.ts
  6. 37
      public/app/features/logs/logsModel.ts
  7. 84
      public/app/features/logs/response.test.ts
  8. 6
      public/app/features/logs/response.ts
  9. 44
      public/app/features/logs/utils.test.ts
  10. 11
      public/app/features/logs/utils.ts

@ -38,6 +38,8 @@ import {
} from 'app/core/utils/explore';
import { getShiftedTimeRange } from 'app/core/utils/timePicker';
import { getCorrelationsBySourceUIDs } from 'app/features/correlations/utils';
import { infiniteScrollRefId } from 'app/features/logs/logsModel';
import { combinePanelData } from 'app/features/logs/response';
import { getFiscalYearStartMonth, getTimeZone } from 'app/features/profile/state/selectors';
import { MIXED_DATASOURCE_NAME } from 'app/plugins/datasource/mixed/MixedDataSource';
import {
@ -55,7 +57,7 @@ import { notifyApp } from '../../../core/actions';
import { createErrorNotification } from '../../../core/copy/appNotification';
import { runRequest } from '../../query/state/runRequest';
import { visualisationTypeKey } from '../Logs/utils/logs';
import { decorateData, mergeDataSeries } from '../utils/decorators';
import { decorateData } from '../utils/decorators';
import {
getSupplementaryQueryProvider,
storeSupplementaryQueryEnabled,
@ -726,6 +728,7 @@ export const runLoadMoreLogsQueries = createAsyncThunk<void, RunLoadMoreLogsQuer
const queries = logQueries.map((query: DataQuery) => ({
...query,
datasource: query.datasource || datasourceInstance?.getRef(),
refId: `${infiniteScrollRefId}${query.refId}`,
}));
if (!hasNonEmptyQuery(queries) || !datasourceInstance) {
@ -751,10 +754,13 @@ export const runLoadMoreLogsQueries = createAsyncThunk<void, RunLoadMoreLogsQuer
dispatch(changeLoadingStateAction({ exploreId, loadingState: LoadingState.Loading }));
newQuerySource = combineLatest([runRequest(datasourceInstance, transaction.request), correlations$]).pipe(
mergeMap(([data, correlations]) =>
decorateData(
// Query splitting, otherwise duplicates results
data.state === LoadingState.Done ? mergeDataSeries(queryResponse, data) : data,
mergeMap(([data, correlations]) => {
// For query splitting, otherwise duplicates results
if (data.state !== LoadingState.Done) {
return of(queryResponse);
}
return decorateData(
combinePanelData(queryResponse, data),
queryResponse,
absoluteRange,
undefined,
@ -762,8 +768,8 @@ export const runLoadMoreLogsQueries = createAsyncThunk<void, RunLoadMoreLogsQuer
correlations,
showCorrelationEditorLinks,
defaultCorrelationEditorDatasource
)
)
);
})
);
newQuerySource.subscribe({

@ -16,7 +16,6 @@ import {
} from '@grafana/data';
import { config } from '@grafana/runtime';
import { DataQuery } from '@grafana/schema';
import { combineResponses } from 'app/features/logs/response';
import { refreshIntervalToSortOrder } from '../../../core/utils/explore';
import { ExplorePanelData } from '../../../types';
@ -312,11 +311,6 @@ export function decorateData(
);
}
export function mergeDataSeries(currentData: PanelData, newData: PanelData): PanelData {
currentData.series = combineResponses({ data: currentData.series }, { data: newData.series }).data;
return currentData;
}
/**
* Check if frame contains time series, which for our purpose means 1 time column and 1 or more numeric columns.
*/

@ -152,7 +152,47 @@ describe('InfiniteScroll', () => {
expect(screen.queryByTestId('Spinner')).not.toBeInTheDocument();
});
describe('With absolute range', () => {
test('Requests newer logs from the most recent timestamp', async () => {
const startPosition = order === LogsSortOrder.Descending ? 10 : 90; // Scroll top
const endPosition = order === LogsSortOrder.Descending ? 0 : 100; // Scroll bottom
const loadMoreMock = jest.fn();
const { element, events } = setup(loadMoreMock, startPosition);
expect(await screen.findByTestId('contents')).toBeInTheDocument();
element.scrollTop = endPosition;
act(() => {
events['scroll'](new Event('scroll'));
});
expect(loadMoreMock).toHaveBeenCalledWith({
from: rows[rows.length - 1].timeEpochMs,
to: absoluteRange.to,
});
});
test('Requests older logs from the oldest timestamp', async () => {
const startPosition = order === LogsSortOrder.Ascending ? 10 : 90; // Scroll top
const endPosition = order === LogsSortOrder.Ascending ? 0 : 100; // Scroll bottom
const loadMoreMock = jest.fn();
const { element, events } = setup(loadMoreMock, startPosition);
expect(await screen.findByTestId('contents')).toBeInTheDocument();
element.scrollTop = endPosition;
act(() => {
events['scroll'](new Event('scroll'));
});
expect(loadMoreMock).toHaveBeenCalledWith({
from: absoluteRange.from,
to: rows[0].timeEpochMs,
});
});
describe('With absolute range matching visible range', () => {
function setup(loadMoreMock: () => void, startPosition: number, rows: LogRowModel[]) {
const { element, events } = getMockElement(startPosition);
render(
@ -175,6 +215,7 @@ describe('InfiniteScroll', () => {
])(
'It does not request more when scrolling %s',
async (_: string, startPosition: number, endPosition: number) => {
// Visible range matches the current range
const rows = createLogRows(absoluteRange.from, absoluteRange.to);
const loadMoreMock = jest.fn();
const { element, events } = setup(loadMoreMock, startPosition, rows);
@ -188,11 +229,12 @@ describe('InfiniteScroll', () => {
expect(loadMoreMock).not.toHaveBeenCalled();
expect(screen.queryByTestId('Spinner')).not.toBeInTheDocument();
expect(await screen.findByTestId('end-of-range')).toBeInTheDocument();
}
);
});
describe('With relative range', () => {
describe('With relative range matching visible range', () => {
function setup(loadMoreMock: () => void, startPosition: number, rows: LogRowModel[]) {
const { element, events } = getMockElement(startPosition);
render(
@ -215,6 +257,7 @@ describe('InfiniteScroll', () => {
])(
'It does not request more when scrolling %s',
async (_: string, startPosition: number, endPosition: number) => {
// Visible range matches the current range
const rows = createLogRows(absoluteRange.from, absoluteRange.to);
const loadMoreMock = jest.fn();
const { element, events } = setup(loadMoreMock, startPosition, rows);
@ -228,6 +271,7 @@ describe('InfiniteScroll', () => {
expect(loadMoreMock).not.toHaveBeenCalled();
expect(screen.queryByTestId('Spinner')).not.toBeInTheDocument();
expect(await screen.findByTestId('end-of-range')).toBeInTheDocument();
}
);
});

@ -128,15 +128,19 @@ export const InfiniteScroll = ({
};
const styles = {
limitReached: css({
messageContainer: css({
textAlign: 'center',
padding: 0.25,
}),
};
const outOfRangeMessage = <div className={styles.limitReached}>End of the selected time range.</div>;
const outOfRangeMessage = (
<div className={styles.messageContainer} data-testid="end-of-range">
End of the selected time range.
</div>
);
const loadingMessage = (
<div className={styles.limitReached}>
<div className={styles.messageContainer}>
<Spinner />
</div>
);

@ -21,6 +21,7 @@ import {
toDataFrame,
} from '@grafana/data';
import { config } from '@grafana/runtime';
import { getMockFrames } from 'app/plugins/datasource/loki/__mocks__/frames';
import { MockObservableDataSourceApi } from '../../../test/mocks/datasource_srv';
@ -30,6 +31,7 @@ import {
dedupLogRows,
filterLogLevels,
getSeriesProperties,
infiniteScrollRefId,
LIMIT_LABEL,
logRowToSingleRowDataFrame,
logSeriesToLogsModel,
@ -231,7 +233,7 @@ const emptyLogsModel = {
describe('dataFrameToLogsModel', () => {
it('given empty series should return empty logs model', () => {
expect(dataFrameToLogsModel([] as DataFrame[], 0)).toMatchObject(emptyLogsModel);
expect(dataFrameToLogsModel([], 0)).toMatchObject(emptyLogsModel);
});
it('given series without correct series name should return empty logs model', () => {
@ -1016,6 +1018,51 @@ describe('dataFrameToLogsModel', () => {
const logsModel = dataFrameToLogsModel(series, 1);
expect(logsModel.rows[0].uid).toBe('A_0');
});
describe('infinite scrolling', () => {
let frameA: DataFrame, frameB: DataFrame;
beforeEach(() => {
const { logFrameA, logFrameB } = getMockFrames();
logFrameA.refId = `${infiniteScrollRefId}-A`;
logFrameA.fields[0].values = [1, 1];
logFrameA.fields[1].values = ['line', 'line'];
logFrameA.fields[3].values = ['3000000', '3000000'];
logFrameA.fields[4].values = ['id', 'id'];
logFrameB.refId = `${infiniteScrollRefId}-B`;
logFrameB.fields[0].values = [2, 2];
logFrameB.fields[1].values = ['line 2', 'line 2'];
logFrameB.fields[3].values = ['4000000', '4000000'];
logFrameB.fields[4].values = ['id2', 'id2'];
frameA = logFrameA;
frameB = logFrameB;
});
it('deduplicates repeated log frames when invoked from infinite scrolling results', () => {
const logsModel = dataFrameToLogsModel([frameA, frameB], 1, { from: 1556270591353, to: 1556289770991 }, [
{ refId: `${infiniteScrollRefId}-A` },
{ refId: `${infiniteScrollRefId}-B` },
]);
expect(logsModel.rows).toHaveLength(2);
expect(logsModel.rows[0].entry).toBe(frameA.fields[1].values[0]);
expect(logsModel.rows[1].entry).toBe(frameB.fields[1].values[0]);
});
it('does not remove repeated log frames when invoked from other contexts', () => {
frameA.refId = 'A';
frameB.refId = 'B';
const logsModel = dataFrameToLogsModel([frameA, frameB], 1, { from: 1556270591353, to: 1556289770991 }, [
{ refId: 'A' },
{ refId: 'B' },
]);
expect(logsModel.rows).toHaveLength(4);
expect(logsModel.rows[0].entry).toBe(frameA.fields[1].values[0]);
expect(logsModel.rows[1].entry).toBe(frameA.fields[1].values[1]);
expect(logsModel.rows[2].entry).toBe(frameB.fields[1].values[0]);
expect(logsModel.rows[3].entry).toBe(frameB.fields[1].values[1]);
});
});
});
describe('logSeriesToLogsModel', () => {

@ -46,7 +46,7 @@ import { ansicolor, colors } from '@grafana/ui';
import { getThemeColor } from 'app/core/utils/colors';
import { LogsFrame, parseLogsFrame } from './logsFrame';
import { getLogLevel, getLogLevelFromKey, sortInAscendingOrder } from './utils';
import { findMatchingRow, getLogLevel, getLogLevelFromKey, sortInAscendingOrder } from './utils';
export const LIMIT_LABEL = 'Line limit';
export const COMMON_LABELS = 'Common labels';
@ -202,6 +202,8 @@ function isLogsData(series: DataFrame) {
return series.fields.some((f) => f.type === FieldType.time) && series.fields.some((f) => f.type === FieldType.string);
}
export const infiniteScrollRefId = 'infinite-scroll-';
/**
* Convert dataFrame into LogsModel which consists of creating separate array of log rows and metrics series. Metrics
* series can be either already included in the dataFrame or will be computed from the log rows.
@ -216,8 +218,27 @@ export function dataFrameToLogsModel(
absoluteRange?: AbsoluteTimeRange,
queries?: DataQuery[]
): LogsModel {
// Until nanosecond precision for requests is supported, we need to account for possible duplicate rows.
let infiniteScrollingResults = false;
queries = queries?.map((query) => {
if (query.refId.includes(infiniteScrollRefId)) {
infiniteScrollingResults = true;
return {
...query,
refId: query.refId.replace(infiniteScrollRefId, ''),
};
}
return query;
});
if (infiniteScrollingResults) {
dataFrame = dataFrame.map((frame) => ({
...frame,
refId: frame.refId?.replace(infiniteScrollRefId, ''),
}));
}
const { logSeries } = separateLogsAndMetrics(dataFrame);
const logsModel = logSeriesToLogsModel(logSeries, queries);
const logsModel = logSeriesToLogsModel(logSeries, queries, infiniteScrollingResults);
if (logsModel) {
// Create histogram metrics from logs using the interval as bucket size for the line count
@ -351,7 +372,11 @@ function parseTime(
* Converts dataFrames into LogsModel. This involves merging them into one list, sorting them and computing metadata
* like common labels.
*/
export function logSeriesToLogsModel(logSeries: DataFrame[], queries: DataQuery[] = []): LogsModel | undefined {
export function logSeriesToLogsModel(
logSeries: DataFrame[],
queries: DataQuery[] = [],
filterDuplicateRows = false
): LogsModel | undefined {
if (logSeries.length === 0) {
return undefined;
}
@ -387,7 +412,7 @@ export function logSeriesToLogsModel(logSeries: DataFrame[], queries: DataQuery[
const flatAllLabels = allLabels.flat();
const commonLabels = flatAllLabels.length > 0 ? findCommonLabels(flatAllLabels) : {};
const rows: LogRowModel[] = [];
let rows: LogRowModel[] = [];
let hasUniqueLabels = false;
for (const info of allSeries) {
@ -453,6 +478,10 @@ export function logSeriesToLogsModel(logSeries: DataFrame[], queries: DataQuery[
row.rowId = idField.values[j];
}
if (filterDuplicateRows && findMatchingRow(row, rows)) {
continue;
}
rows.push(row);
}
}

@ -1,7 +1,7 @@
import { DataQueryResponse, QueryResultMetaStat } from '@grafana/data';
import { DataQueryResponse, LoadingState, PanelData, QueryResultMetaStat, getDefaultTimeRange } from '@grafana/data';
import { getMockFrames } from 'app/plugins/datasource/loki/__mocks__/frames';
import { cloneQueryResponse, combineResponses } from './response';
import { cloneQueryResponse, combinePanelData, combineResponses } from './response';
describe('cloneQueryResponse', () => {
const { logFrameA } = getMockFrames();
@ -409,3 +409,83 @@ describe('combineResponses', () => {
});
});
});
describe('combinePanelData', () => {
it('combines series within PanelData instances', () => {
const { logFrameA, logFrameB } = getMockFrames();
const panelDataA: PanelData = {
state: LoadingState.Done,
series: [logFrameA],
timeRange: getDefaultTimeRange(),
};
const panelDataB: PanelData = {
state: LoadingState.Done,
series: [logFrameB],
timeRange: getDefaultTimeRange(),
};
expect(combinePanelData(panelDataA, panelDataB)).toEqual({
state: panelDataA.state,
series: [
{
fields: [
{
config: {},
name: 'Time',
type: 'time',
values: [1, 2, 3, 4],
},
{
config: {},
name: 'Line',
type: 'string',
values: ['line3', 'line4', 'line1', 'line2'],
},
{
config: {},
name: 'labels',
type: 'other',
values: [
{
otherLabel: 'other value',
},
{
label: 'value',
},
{
otherLabel: 'other value',
},
],
},
{
config: {},
name: 'tsNs',
type: 'string',
values: ['1000000', '2000000', '3000000', '4000000'],
},
{
config: {},
name: 'id',
type: 'string',
values: ['id3', 'id4', 'id1', 'id2'],
},
],
length: 4,
meta: {
custom: {
frameType: 'LabeledTimeValues',
},
stats: [
{
displayName: 'Summary: total bytes processed',
unit: 'decbytes',
value: 33,
},
],
},
refId: 'A',
},
],
timeRange: panelDataA.timeRange,
});
});
});

@ -5,10 +5,16 @@ import {
DataQueryResponseData,
Field,
FieldType,
PanelData,
QueryResultMetaStat,
shallowCompare,
} from '@grafana/data';
export function combinePanelData(currentData: PanelData, newData: PanelData): PanelData {
const series = combineResponses({ data: currentData.series }, { data: newData.series }).data;
return { ...currentData, series };
}
export function combineResponses(currentResult: DataQueryResponse | null, newResult: DataQueryResponse) {
if (!currentResult) {
return cloneQueryResponse(newResult);

@ -9,12 +9,15 @@ import {
MutableDataFrame,
DataFrame,
} from '@grafana/data';
import { getMockFrames } from 'app/plugins/datasource/loki/__mocks__/frames';
import { logSeriesToLogsModel } from './logsModel';
import {
calculateLogsLabelStats,
calculateStats,
checkLogsError,
escapeUnescapedString,
findMatchingRow,
getLogLevel,
getLogLevelFromKey,
getLogsVolumeMaximumRange,
@ -479,3 +482,44 @@ describe('escapeUnescapedString', () => {
expect(escapeUnescapedString(`\\r\\n|\\n|\\t|\\r`)).toBe(`\n|\n|\t|\n`);
});
});
describe('findMatchingRow', () => {
function setup(frames: DataFrame[]) {
return logSeriesToLogsModel(frames);
}
it('ignores rows from different queries', () => {
const { logFrameA, logFrameB } = getMockFrames();
logFrameA.refId = 'A';
logFrameB.refId = 'B';
const logsModel = setup([logFrameA, logFrameB]);
const rows = logsModel?.rows || [];
for (const row of rows) {
const targetRow = { ...row, dataFrame: { ...logFrameA, refId: 'Z' } };
expect(findMatchingRow(targetRow, rows)).toBe(undefined);
}
});
it('matches rows by rowId', () => {
const { logFrameA, logFrameB } = getMockFrames();
const logsModel = setup([logFrameA, logFrameB]);
const rows = logsModel?.rows || [];
for (const row of rows) {
const targetRow = { ...row, entry: `${Math.random()}`, timeEpochNs: `${Math.ceil(Math.random() * 1000000)}` };
expect(findMatchingRow(targetRow, rows)).toBeDefined();
}
});
it('matches rows by entry and nanosecond time', () => {
const { logFrameA, logFrameB } = getMockFrames();
const logsModel = setup([logFrameA, logFrameB]);
const rows = logsModel?.rows || [];
for (const row of rows) {
const targetRow = { ...row, rowId: undefined };
expect(findMatchingRow(targetRow, rows)).toBeDefined();
}
});
});

@ -297,3 +297,14 @@ export const copyText = async (text: string, buttonRef: React.MutableRefObject<E
export function targetIsElement(target: EventTarget | null): target is Element {
return target instanceof Element;
}
export function findMatchingRow(target: LogRowModel, rows: LogRowModel[]) {
return rows.find((row) => {
if (target.dataFrame.refId !== row.dataFrame.refId) {
return false;
}
const sameId = target.rowId && row.rowId && target.rowId === row.rowId;
const sameSignature = row.entry === target.entry && row.timeEpochNs === target.timeEpochNs;
return sameId || sameSignature;
});
}

Loading…
Cancel
Save