Table: Keep expanded rows persistent when data changes if it has unique ID (#80031)

pull/81076/head
Andrej Ocenas 2 years ago committed by GitHub
parent 3203f1cf39
commit 59595c7318
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
  1. 3
      .betterer.results
  2. 16
      docs/sources/developers/kinds/composable/testdata/dataquery/schema-reference.md
  3. 1
      packages/grafana-data/src/dataframe/index.ts
  4. 31
      packages/grafana-data/src/dataframe/utils.test.ts
  5. 25
      packages/grafana-data/src/dataframe/utils.ts
  6. 6
      packages/grafana-data/src/types/dataFrame.ts
  7. 2
      packages/grafana-schema/src/raw/composable/testdata/dataquery/x/TestDataDataQuery_types.gen.ts
  8. 6
      packages/grafana-ui/src/components/Table/RowsList.tsx
  9. 6
      packages/grafana-ui/src/components/Table/Table.mdx
  10. 40
      packages/grafana-ui/src/components/Table/Table.tsx
  11. 48
      packages/grafana-ui/src/components/Table/hooks.ts
  12. 7
      packages/grafana-ui/src/components/Table/reducer.ts
  13. 6
      packages/grafana-ui/src/components/Table/types.ts
  14. 1
      pkg/tsdb/grafana-testdata-datasource/kinds/dataquery/types_dataquery_gen.go
  15. 52
      public/app/plugins/datasource/grafana-testdata-datasource/components/StreamingClientEditor.tsx
  16. 2
      public/app/plugins/datasource/grafana-testdata-datasource/dataquery.cue
  17. 2
      public/app/plugins/datasource/grafana-testdata-datasource/dataquery.gen.ts
  18. 4
      public/app/plugins/datasource/grafana-testdata-datasource/package.json
  19. 103
      public/app/plugins/datasource/grafana-testdata-datasource/runStreams.ts
  20. 7
      public/app/plugins/datasource/tempo/resultTransformer.ts
  21. 2
      yarn.lock

@ -737,7 +737,8 @@ exports[`better eslint`] = {
],
"packages/grafana-ui/src/components/Table/Table.tsx:5381": [
[0, 0, 0, "Unexpected any. Specify a different type.", "0"],
[0, 0, 0, "Do not use any type assertions.", "1"]
[0, 0, 0, "Unexpected any. Specify a different type.", "1"],
[0, 0, 0, "Do not use any type assertions.", "2"]
],
"packages/grafana-ui/src/components/Table/TableCell.tsx:5381": [
[0, 0, 0, "Do not use any type assertions.", "0"],

@ -97,14 +97,14 @@ title: TestDataDataQuery kind
### StreamingQuery
| Property | Type | Required | Default | Description |
|----------|---------|----------|---------|-------------------------------------------------|
| `noise` | integer | **Yes** | | |
| `speed` | integer | **Yes** | | |
| `spread` | integer | **Yes** | | |
| `type` | string | **Yes** | | Possible values are: `signal`, `logs`, `fetch`. |
| `bands` | integer | No | | |
| `url` | string | No | | |
| Property | Type | Required | Default | Description |
|----------|---------|----------|---------|-----------------------------------------------------------|
| `noise` | integer | **Yes** | | |
| `speed` | integer | **Yes** | | |
| `spread` | integer | **Yes** | | |
| `type` | string | **Yes** | | Possible values are: `signal`, `logs`, `fetch`, `traces`. |
| `bands` | integer | No | | |
| `url` | string | No | | |
### USAQuery

@ -14,5 +14,6 @@ export {
isTimeSeriesFrames,
isTimeSeriesField,
getRowUniqueId,
addRow,
} from './utils';
export { StreamingDataFrame, StreamingFrameAction, type StreamingFrameOptions, closestIdx } from './StreamingDataFrame';

@ -1,7 +1,7 @@
import { FieldType } from '../types';
import { toDataFrame } from './processDataFrame';
import { anySeriesWithTimeField } from './utils';
import { createDataFrame, toDataFrame } from './processDataFrame';
import { anySeriesWithTimeField, addRow } from './utils';
describe('anySeriesWithTimeField', () => {
describe('single frame', () => {
@ -77,3 +77,30 @@ describe('anySeriesWithTimeField', () => {
});
});
});
describe('addRow', () => {
const frame = createDataFrame({
fields: [
{ name: 'name', type: FieldType.string },
{ name: 'date', type: FieldType.time },
{ name: 'number', type: FieldType.number },
],
});
const date = Date.now();
it('adds row to data frame as object', () => {
addRow(frame, { name: 'A', date, number: 1 });
expect(frame.fields[0].values[0]).toBe('A');
expect(frame.fields[1].values[0]).toBe(date);
expect(frame.fields[2].values[0]).toBe(1);
expect(frame.length).toBe(1);
});
it('adds row to data frame as array', () => {
addRow(frame, ['B', date, 42]);
expect(frame.fields[0].values[1]).toBe('B');
expect(frame.fields[1].values[1]).toBe(date);
expect(frame.fields[2].values[1]).toBe(42);
expect(frame.length).toBe(2);
});
});

@ -98,3 +98,28 @@ export function getRowUniqueId(dataFrame: DataFrame, rowIndex: number) {
}
return dataFrame.meta.uniqueRowIdFields.map((fieldIndex) => dataFrame.fields[fieldIndex].values[rowIndex]).join('-');
}
/**
* Simple helper to add values to a data frame. Doesn't do any validation so make sure you are adding the right types
* of values.
* @param dataFrame
* @param row Either an array of values or an object with keys that match the field names.
*/
export function addRow(dataFrame: DataFrame, row: Record<string, unknown> | unknown[]) {
if (row instanceof Array) {
for (let i = 0; i < row.length; i++) {
dataFrame.fields[i].values.push(row[i]);
}
} else {
for (const field of dataFrame.fields) {
field.values.push(row[field.name]);
}
}
try {
dataFrame.length++;
} catch (e) {
// Unfortunate but even though DataFrame as interface defines length some implementation of DataFrame only have
// length getter. In that case it will throw and so we just skip and assume they defined a `getter` for length that
// does not need any external updating.
}
}

@ -13,13 +13,17 @@ export enum FieldType {
number = 'number',
string = 'string',
boolean = 'boolean',
// Used to detect that the value is some kind of trace data to help with the visualisation and processing.
trace = 'trace',
geo = 'geo',
enum = 'enum',
other = 'other', // Object, Array, etc
frame = 'frame', // DataFrame
nestedFrames = 'nestedFrames', // @alpha Nested DataFrames
// @alpha Nested DataFrames. This is for example used with tables where expanding a row will show a nested table.
// The value should be DataFrame[] even if it is a single frame.
nestedFrames = 'nestedFrames',
}
/**

@ -50,7 +50,7 @@ export interface StreamingQuery {
noise: number;
speed: number;
spread: number;
type: ('signal' | 'logs' | 'fetch');
type: ('signal' | 'logs' | 'fetch' | 'traces');
url?: string;
}

@ -206,7 +206,7 @@ export const RowsList = (props: RowsListProps) => {
prepareRow(row);
const expandedRowStyle = tableState.expanded[row.index] ? css({ '&:hover': { background: 'inherit' } }) : {};
const expandedRowStyle = tableState.expanded[row.id] ? css({ '&:hover': { background: 'inherit' } }) : {};
if (rowHighlightIndex !== undefined && row.index === rowHighlightIndex) {
style = { ...style, backgroundColor: theme.components.table.rowHoverBackground };
@ -220,7 +220,7 @@ export const RowsList = (props: RowsListProps) => {
onMouseLeave={onRowLeave}
>
{/*add the nested data to the DOM first to prevent a 1px border CSS issue on the last cell of the row*/}
{nestedDataField && tableState.expanded[row.index] && (
{nestedDataField && tableState.expanded[row.id] && (
<ExpandedRow
nestedData={nestedDataField}
tableStyles={tableStyles}
@ -265,7 +265,7 @@ export const RowsList = (props: RowsListProps) => {
const getItemSize = (index: number): number => {
const indexForPagination = rowIndexForPagination(index);
const row = rows[indexForPagination];
if (tableState.expanded[row.index] && nestedDataField) {
if (tableState.expanded[row.id] && nestedDataField) {
return getExpandedRowHeight(nestedDataField, index, tableStyles);
}

@ -13,6 +13,12 @@ This nested fields values can contain an array of one or more dataframes. Each o
For each dataframe and index in the nested field, the dataframe will be rendered as one or more sub-tables below the main dataframe row at that index.
### Unique rowId
In some cases it makes sense to persist the opened/closed state of the sub-tables. For example: with streaming queries where a user may manipulate the state while additional data is still loading. In such cases use `dataframe.meta.uniqueRowIdFields` property to specify which fields create unique row id and table will use it to persist the state across data changes.
## Custom dataframe properties
Each dataframe also supports using the following custom property under `dataframe.meta.custom`:
- **noHeader**: boolean - Hides that sub-tables header.

@ -10,7 +10,7 @@ import {
} from 'react-table';
import { VariableSizeList } from 'react-window';
import { FieldType, ReducerID } from '@grafana/data';
import { FieldType, ReducerID, getRowUniqueId } from '@grafana/data';
import { selectors } from '@grafana/e2e-selectors';
import { TableCellHeight } from '@grafana/schema';
@ -129,8 +129,12 @@ export const Table = memo((props: Props) => {
},
});
const options: any = useMemo(
() => ({
const hasUniqueId = !!data.meta?.uniqueRowIdFields?.length;
const options: any = useMemo(() => {
// This is a bit hard to type with the react-table types here, the reducer does not actually match with the
// TableOptions.
const options: any = {
columns: memoizedColumns,
data: memoizedData,
disableResizing: !resizable,
@ -139,12 +143,24 @@ export const Table = memo((props: Props) => {
initialState: getInitialState(initialSortBy, memoizedColumns),
autoResetFilters: false,
sortTypes: {
number: sortNumber, // the builtin number type on react-table does not handle NaN values
'alphanumeric-insensitive': sortCaseInsensitive, // should be replace with the builtin string when react-table is upgraded, see https://github.com/tannerlinsley/react-table/pull/3235
// the builtin number type on react-table does not handle NaN values
number: sortNumber,
// should be replaced with the builtin string when react-table is upgraded,
// see https://github.com/tannerlinsley/react-table/pull/3235
'alphanumeric-insensitive': sortCaseInsensitive,
},
}),
[initialSortBy, memoizedColumns, memoizedData, resizable, stateReducer]
);
};
if (hasUniqueId) {
// row here is just always 0 because here we don't use real data but just a dummy array filled with 0.
// See memoizedData variable above.
options.getRowId = (row: Record<string, unknown>, relativeIndex: number) => getRowUniqueId(data, relativeIndex);
// If we have unique field we assume we can count on it as being globally unique, and we don't need to reset when
// data changes.
options.autoResetExpanded = false;
}
return options;
}, [initialSortBy, memoizedColumns, memoizedData, resizable, stateReducer, hasUniqueId, data]);
const {
getTableProps,
@ -164,12 +180,6 @@ export const Table = memo((props: Props) => {
const extendedState = state as GrafanaTableState;
toggleAllRowsExpandedRef.current = toggleAllRowsExpanded;
const expandedRowsRepr = JSON.stringify(Object.keys(state.expanded));
useEffect(() => {
// Reset the list size cache when the expanded rows change
listRef.current?.resetAfterIndex(0);
}, [expandedRowsRepr]);
/*
Footer value calculation is being moved in the Table component and the footerValues prop will be deprecated.
The footerValues prop is still used in the Table component for backwards compatibility. Adding the
@ -226,7 +236,7 @@ export const Table = memo((props: Props) => {
setPageSize(pageSize);
}, [pageSize, setPageSize]);
useResetVariableListSizeCache(extendedState, listRef, data);
useResetVariableListSizeCache(extendedState, listRef, data, hasUniqueId);
useFixScrollbarContainer(variableSizeListScrollbarRef, tableDivRef);
const onNavigate = useCallback(

@ -23,7 +23,7 @@ export function useFixScrollbarContainer(
// Select Table custom scrollbars
const tableScrollbarView = tableDivRef.current.firstChild;
//If they exists, move the scrollbar element to the Table container scope
//If they exist, move the scrollbar element to the Table container scope
if (tableScrollbarView && listVerticalScrollbarHTML) {
listVerticalScrollbarHTML.remove();
if (tableScrollbarView instanceof HTMLElement) {
@ -36,41 +36,47 @@ export function useFixScrollbarContainer(
}
/**
react-table caches the height of cells so we need to reset them when expanding/collapsing rows
We need to take the minimum of the current expanded indexes and the previous expandedIndexes array to account
for collapsed rows, since they disappear from expandedIndexes but still keep their expanded height
react-table caches the height of cells, so we need to reset them when expanding/collapsing rows.
We use `lastExpandedOrCollapsedIndex` since collapsed rows disappear from `expandedIndexes` but still keep their expanded
height.
*/
export function useResetVariableListSizeCache(
extendedState: GrafanaTableState,
listRef: React.RefObject<VariableSizeList>,
data: DataFrame
data: DataFrame,
hasUniqueId: boolean
) {
// Make sure we trigger the reset when keys change in any way
const expandedRowsRepr = JSON.stringify(Object.keys(extendedState.expanded));
useEffect(() => {
if (extendedState.lastExpandedIndex !== undefined) {
// Gets the expanded row with the lowest index. Needed to reset all expanded row heights from that index on
let resetIndex = extendedState.lastExpandedIndex;
const expandedIndexes = Object.keys(extendedState.expanded);
if (expandedIndexes.length > 0) {
const lowestExpandedIndex = parseInt(expandedIndexes[0], 10);
if (!isNaN(lowestExpandedIndex)) {
resetIndex = Math.min(resetIndex, lowestExpandedIndex);
}
// By default, reset all rows
let resetIndex = 0;
// If we have unique field, extendedState.expanded keys are not row indexes but IDs so instead of trying to search
// for correct index we just reset the whole table.
if (!hasUniqueId) {
// If we don't have we reset from the last changed index.
if (Number.isFinite(extendedState.lastExpandedOrCollapsedIndex)) {
resetIndex = extendedState.lastExpandedOrCollapsedIndex!;
}
const index =
// Account for paging.
resetIndex =
extendedState.pageIndex === 0
? resetIndex - 1
: resetIndex - extendedState.pageIndex - extendedState.pageIndex * extendedState.pageSize;
listRef.current?.resetAfterIndex(Math.max(index, 0));
return;
}
listRef.current?.resetAfterIndex(Math.max(resetIndex, 0));
return;
}, [
extendedState.lastExpandedIndex,
extendedState.toggleRowExpandedCounter,
extendedState.pageIndex,
extendedState.lastExpandedOrCollapsedIndex,
extendedState.pageSize,
extendedState.pageIndex,
listRef,
data,
extendedState.expanded,
expandedRowsRepr,
hasUniqueId,
]);
}

@ -50,8 +50,7 @@ export function useTableStateReducer({ onColumnResize, onSortByChange, data }: P
if (action.id) {
return {
...newState,
lastExpandedIndex: parseInt(action.id, 10),
toggleRowExpandedCounter: newState.toggleRowExpandedCounter + 1,
lastExpandedOrCollapsedIndex: parseInt(action.id, 10),
};
}
}
@ -67,9 +66,7 @@ export function getInitialState(
initialSortBy: Props['initialSortBy'],
columns: GrafanaTableColumn[]
): Partial<GrafanaTableState> {
const state: Partial<GrafanaTableState> = {
toggleRowExpandedCounter: 0,
};
const state: Partial<GrafanaTableState> = {};
if (initialSortBy) {
state.sortBy = [];

@ -67,8 +67,10 @@ export interface TableFooterCalc {
}
export interface GrafanaTableState extends TableState {
lastExpandedIndex?: number;
toggleRowExpandedCounter: number;
// We manually track this to know where to reset the row heights. This is needed because react-table removed the
// collapsed IDs/indexes from the state.expanded map so when collapsing we would have to do a diff of current and
// previous state.expanded to know what changed.
lastExpandedOrCollapsedIndex?: number;
}
export interface GrafanaTableRow extends Row, UseExpandedRowProps<{}> {}

@ -21,6 +21,7 @@ const (
StreamingQueryTypeFetch StreamingQueryType = "fetch"
StreamingQueryTypeLogs StreamingQueryType = "logs"
StreamingQueryTypeSignal StreamingQueryType = "signal"
StreamingQueryTypeTraces StreamingQueryType = "traces"
)
// Defines values for ErrorType.

@ -16,6 +16,7 @@ const types = [
{ value: 'signal', label: 'Signal' },
{ value: 'logs', label: 'Logs' },
{ value: 'fetch', label: 'Fetch' },
{ value: 'traces', label: 'Traces' },
];
export const StreamingClientEditor = ({ onChange, query }: EditorProps) => {
@ -29,29 +30,42 @@ export const StreamingClientEditor = ({ onChange, query }: EditorProps) => {
onChange({ target: { name, value: Number(value) } });
};
const streamType = query?.stream?.type || 'signal';
const fields =
streamType === 'signal'
? streamingClientFields
: ['logs', 'traces'].includes(streamType)
? [streamingClientFields[0]] // speed
: [];
return (
<InlineFieldRow>
<InlineField label="Type" labelWidth={14}>
<Select width={32} onChange={onSelectChange} defaultValue={types[0]} options={types} />
<Select
width={32}
onChange={onSelectChange}
defaultValue={types[0]}
options={types}
value={query?.stream?.type}
/>
</InlineField>
{query?.stream?.type === 'signal' &&
streamingClientFields.map(({ label, id, min, step, placeholder }) => {
return (
<InlineField label={label} labelWidth={14} key={id}>
<Input
width={32}
type="number"
id={`stream.${id}-${query.refId}`}
name={id}
min={min}
step={step}
value={query.stream?.[id]}
placeholder={placeholder}
onChange={onInputChange}
/>
</InlineField>
);
})}
{fields.map(({ label, id, min, step, placeholder }) => {
return (
<InlineField label={label} labelWidth={14} key={id}>
<Input
width={32}
type="number"
id={`stream.${id}-${query.refId}`}
name={id}
min={min}
step={step}
value={query.stream?.[id]}
placeholder={placeholder}
onChange={onInputChange}
/>
</InlineField>
);
})}
{query?.stream?.type === 'fetch' && (
<InlineField label="URL" labelWidth={14} grow>

@ -55,7 +55,7 @@ composableKinds: DataQuery: {
#TestDataQueryType: "random_walk" | "slow_query" | "random_walk_with_error" | "random_walk_table" | "exponential_heatmap_bucket_data" | "linear_heatmap_bucket_data" | "no_data_points" | "datapoints_outside_range" | "csv_metric_values" | "predictable_pulse" | "predictable_csv_wave" | "streaming_client" | "simulation" | "usa" | "live" | "grafana_api" | "arrow" | "annotations" | "table_static" | "server_error_500" | "logs" | "node_graph" | "flame_graph" | "raw_frame" | "csv_file" | "csv_content" | "trace" | "manual_entry" | "variables-query" @cuetsy(kind="enum", memberNames="RandomWalk|SlowQuery|RandomWalkWithError|RandomWalkTable|ExponentialHeatmapBucketData|LinearHeatmapBucketData|NoDataPoints|DataPointsOutsideRange|CSVMetricValues|PredictablePulse|PredictableCSVWave|StreamingClient|Simulation|USA|Live|GrafanaAPI|Arrow|Annotations|TableStatic|ServerError500|Logs|NodeGraph|FlameGraph|RawFrame|CSVFile|CSVContent|Trace|ManualEntry|VariablesQuery")
#StreamingQuery: {
type: "signal" | "logs" | "fetch"
type: "signal" | "logs" | "fetch" | "traces"
speed: int32
spread: int32
noise: int32

@ -47,7 +47,7 @@ export interface StreamingQuery {
noise: number;
speed: number;
spread: number;
type: ('signal' | 'logs' | 'fetch');
type: ('signal' | 'logs' | 'fetch' | 'traces');
url?: string;
}

@ -15,7 +15,8 @@
"react": "18.2.0",
"react-use": "17.4.3",
"rxjs": "7.8.1",
"tslib": "2.6.2"
"tslib": "2.6.2",
"uuid": "9.0.0"
},
"devDependencies": {
"@grafana/e2e-selectors": "10.4.0-pre",
@ -27,6 +28,7 @@
"@types/node": "20.11.5",
"@types/react": "18.2.48",
"@types/testing-library__jest-dom": "5.14.9",
"@types/uuid": "9.0.2",
"ts-node": "10.9.2",
"webpack": "5.89.0"
},

@ -1,5 +1,6 @@
import { defaults } from 'lodash';
import { Observable } from 'rxjs';
import { v4 as uuidv4 } from 'uuid';
import {
DataQueryRequest,
@ -12,6 +13,10 @@ import {
DataFrameSchema,
DataFrameData,
StreamingDataFrame,
createDataFrame,
addRow,
getDisplayProcessor,
createTheme,
} from '@grafana/data';
import { getRandomLine } from './LogIpsum';
@ -27,14 +32,15 @@ export const defaultStreamQuery: StreamingQuery = {
export function runStream(target: TestData, req: DataQueryRequest<TestData>): Observable<DataQueryResponse> {
const query = defaults(target.stream, defaultStreamQuery);
if ('signal' === query.type) {
return runSignalStream(target, query, req);
}
if ('logs' === query.type) {
return runLogsStream(target, query, req);
}
if ('fetch' === query.type) {
return runFetchStream(target, query, req);
switch (query.type) {
case 'signal':
return runSignalStream(target, query, req);
case 'logs':
return runLogsStream(target, query, req);
case 'fetch':
return runFetchStream(target, query, req);
case 'traces':
return runTracesStream(target, query, req);
}
throw new Error(`Unknown Stream Type: ${query.type}`);
}
@ -45,7 +51,7 @@ export function runSignalStream(
req: DataQueryRequest<TestData>
): Observable<DataQueryResponse> {
return new Observable<DataQueryResponse>((subscriber) => {
const streamId = `signal-${req.panelId}-${target.refId}`;
const streamId = `signal-${req.panelId || 'explore'}-${target.refId}`;
const maxDataPoints = req.maxDataPoints || 1000;
const schema: DataFrameSchema = {
@ -127,7 +133,7 @@ export function runLogsStream(
req: DataQueryRequest<TestData>
): Observable<DataQueryResponse> {
return new Observable<DataQueryResponse>((subscriber) => {
const streamId = `logs-${req.panelId}-${target.refId}`;
const streamId = `logs-${req.panelId || 'explore'}-${target.refId}`;
const maxDataPoints = req.maxDataPoints || 1000;
const data = new CircularDataFrame({
@ -151,6 +157,7 @@ export function runLogsStream(
subscriber.next({
data: [data],
key: streamId,
state: LoadingState.Streaming,
});
timeoutId = setTimeout(pushNextEvent, speed);
@ -172,7 +179,7 @@ export function runFetchStream(
req: DataQueryRequest<TestData>
): Observable<DataQueryResponse> {
return new Observable<DataQueryResponse>((subscriber) => {
const streamId = `fetch-${req.panelId}-${target.refId}`;
const streamId = `fetch-${req.panelId || 'explore'}-${target.refId}`;
const maxDataPoints = req.maxDataPoints || 1000;
let data = new CircularDataFrame({
@ -243,3 +250,77 @@ export function runFetchStream(
};
});
}
export function runTracesStream(
target: TestData,
query: StreamingQuery,
req: DataQueryRequest<TestData>
): Observable<DataQueryResponse> {
return new Observable<DataQueryResponse>((subscriber) => {
const streamId = `traces-${req.panelId || 'explore'}-${target.refId}`;
const data = createMainTraceFrame(target, req.maxDataPoints);
let timeoutId: ReturnType<typeof setTimeout>;
const pushNextEvent = () => {
const subframe = createTraceSubFrame();
addRow(subframe, [uuidv4(), Date.now(), 'Grafana', 1500]);
addRow(data, [uuidv4(), Date.now(), 'Grafana', 'HTTP GET /explore', 1500, [subframe]]);
subscriber.next({
data: [data],
key: streamId,
state: LoadingState.Streaming,
});
timeoutId = setTimeout(pushNextEvent, query.speed);
};
// Send first event in 5ms
setTimeout(pushNextEvent, 5);
return () => {
console.log('unsubscribing to stream ' + streamId);
clearTimeout(timeoutId);
};
});
}
function createMainTraceFrame(target: TestData, maxDataPoints = 1000) {
const data = new CircularDataFrame({
append: 'head',
capacity: maxDataPoints,
});
data.refId = target.refId;
data.name = target.alias || 'Traces ' + target.refId;
data.addField({ name: 'TraceID', type: FieldType.string });
data.addField({ name: 'Start time', type: FieldType.time });
data.addField({ name: 'Service', type: FieldType.string });
data.addField({ name: 'Name', type: FieldType.string });
data.addField({ name: 'Duration', type: FieldType.number, config: { unit: 'ms' } });
data.addField({ name: 'nested', type: FieldType.nestedFrames });
data.meta = {
preferredVisualisationType: 'table',
uniqueRowIdFields: [0],
};
return data;
}
function createTraceSubFrame() {
const frame = createDataFrame({
fields: [
{ name: 'SpanID', type: FieldType.string },
{ name: 'Start time', type: FieldType.time },
{ name: 'service.name', type: FieldType.string },
{ name: 'duration', type: FieldType.number },
],
});
// TODO: this should be removed later but right now there is an issue that applyFieldOverrides does not consider
// nested frames.
for (const f of frame.fields) {
f.display = getDisplayProcessor({ field: f, theme });
}
return frame;
}
const theme = createTheme();

@ -634,6 +634,11 @@ export function formatTraceQLResponse(
return createTableFrameFromTraceQlQuery(data, instanceSettings);
}
/**
* Create data frame while adding spans for each trace into a subtable.
* @param data
* @param instanceSettings
*/
export function createTableFrameFromTraceQlQuery(
data: TraceSearchMetadata[],
instanceSettings: DataSourceInstanceSettings
@ -697,6 +702,7 @@ export function createTableFrameFromTraceQlQuery(
],
meta: {
preferredVisualisationType: 'table',
uniqueRowIdFields: [0],
},
});
@ -993,6 +999,7 @@ const traceSubFrame = (
},
});
// TODO: this should be done in `applyFieldOverrides` instead recursively for the nested `DataFrames`
const theme = createTheme();
for (const field of subFrame.fields) {
field.display = getDisplayProcessor({ field, theme });

@ -2993,6 +2993,7 @@ __metadata:
"@types/node": "npm:20.11.5"
"@types/react": "npm:18.2.48"
"@types/testing-library__jest-dom": "npm:5.14.9"
"@types/uuid": "npm:9.0.2"
lodash: "npm:4.17.21"
micro-memoize: "npm:^4.1.2"
react: "npm:18.2.0"
@ -3000,6 +3001,7 @@ __metadata:
rxjs: "npm:7.8.1"
ts-node: "npm:10.9.2"
tslib: "npm:2.6.2"
uuid: "npm:9.0.0"
webpack: "npm:5.89.0"
peerDependencies:
"@grafana/runtime": "*"

Loading…
Cancel
Save