DataFrame: convert from row based to a columnar value format (#18391)

pull/17673/head^2
Ryan McKinley 6 years ago committed by GitHub
parent 350b9a9494
commit e59bae55d9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 35
      packages/grafana-data/src/types/data.ts
  2. 110
      packages/grafana-data/src/types/dataFrame.ts
  3. 2
      packages/grafana-data/src/types/displayValue.ts
  4. 1
      packages/grafana-data/src/types/index.ts
  5. 149
      packages/grafana-data/src/utils/__snapshots__/csv.test.ts.snap
  6. 37
      packages/grafana-data/src/utils/csv.test.ts
  7. 217
      packages/grafana-data/src/utils/csv.ts
  8. 89
      packages/grafana-data/src/utils/dataFrameHelper.test.ts
  9. 232
      packages/grafana-data/src/utils/dataFrameHelper.ts
  10. 76
      packages/grafana-data/src/utils/dataFrameView.test.ts
  11. 67
      packages/grafana-data/src/utils/dataFrameView.ts
  12. 71
      packages/grafana-data/src/utils/fieldCache.test.ts
  13. 76
      packages/grafana-data/src/utils/fieldCache.ts
  14. 94
      packages/grafana-data/src/utils/fieldReducer.test.ts
  15. 116
      packages/grafana-data/src/utils/fieldReducer.ts
  16. 4
      packages/grafana-data/src/utils/index.ts
  17. 24
      packages/grafana-data/src/utils/logs.ts
  18. 83
      packages/grafana-data/src/utils/processDataFrame.test.ts
  19. 273
      packages/grafana-data/src/utils/processDataFrame.ts
  20. 43
      packages/grafana-data/src/utils/vector.test.ts
  21. 133
      packages/grafana-data/src/utils/vector.ts
  22. 4
      packages/grafana-ui/src/components/SingleStatShared/FieldDisplayEditor.tsx
  23. 6
      packages/grafana-ui/src/components/SingleStatShared/FieldPropertiesEditor.tsx
  24. 17
      packages/grafana-ui/src/components/Table/Table.story.tsx
  25. 19
      packages/grafana-ui/src/components/Table/Table.tsx
  26. 8
      packages/grafana-ui/src/components/Table/TableCellBuilder.tsx
  27. 4
      packages/grafana-ui/src/components/Table/TableInputCSV.tsx
  28. 8
      packages/grafana-ui/src/components/Table/examples.ts
  29. 13
      packages/grafana-ui/src/types/datasource.ts
  30. 4
      packages/grafana-ui/src/utils/displayValue.test.ts
  31. 23
      packages/grafana-ui/src/utils/displayValue.ts
  32. 19
      packages/grafana-ui/src/utils/fieldDisplay.test.ts
  33. 67
      packages/grafana-ui/src/utils/fieldDisplay.ts
  34. 20
      packages/grafana-ui/src/utils/flotPairs.test.ts
  35. 22
      packages/grafana-ui/src/utils/flotPairs.ts
  36. 104
      public/app/core/logs_model.ts
  37. 69
      public/app/core/specs/logs_model.test.ts
  38. 16
      public/app/features/dashboard/panel_editor/QueryEditorRow.test.ts
  39. 7
      public/app/features/dashboard/state/PanelQueryRunner.test.ts
  40. 15
      public/app/features/dashboard/state/PanelQueryState.test.ts
  41. 12
      public/app/features/explore/state/epics/processQueryResultsEpic.test.ts
  42. 13
      public/app/features/explore/state/epics/runQueriesBatchEpic.test.ts
  43. 65
      public/app/plugins/datasource/elasticsearch/elastic_response.ts
  44. 36
      public/app/plugins/datasource/elasticsearch/specs/elastic_response.test.ts
  45. 13
      public/app/plugins/datasource/grafana-azure-monitor-datasource/azure_monitor/azure_monitor_datasource.test.ts
  46. 12
      public/app/plugins/datasource/input/InputConfigEditor.tsx
  47. 11
      public/app/plugins/datasource/input/InputDatasource.test.ts
  48. 40
      public/app/plugins/datasource/input/InputDatasource.ts
  49. 20
      public/app/plugins/datasource/input/InputQueryEditor.tsx
  50. 6
      public/app/plugins/datasource/input/types.ts
  51. 8
      public/app/plugins/datasource/input/utils.ts
  52. 2
      public/app/plugins/datasource/loki/datasource.test.ts
  53. 11
      public/app/plugins/datasource/loki/datasource.ts
  54. 8
      public/app/plugins/datasource/loki/result_transformer.test.ts
  55. 25
      public/app/plugins/datasource/loki/result_transformer.ts
  56. 118
      public/app/plugins/datasource/testdata/StreamHandler.ts
  57. 4
      public/app/plugins/panel/bargauge/BarGaugePanelEditor.tsx
  58. 4
      public/app/plugins/panel/gauge/GaugePanelEditor.tsx
  59. 27
      public/app/plugins/panel/graph/data_processor.ts
  60. 10
      public/app/plugins/panel/graph/specs/data_processor.test.ts
  61. 21
      public/app/plugins/panel/graph2/getGraphSeriesModel.ts
  62. 4
      public/app/plugins/panel/piechart/PieChartPanelEditor.tsx
  63. 4
      public/app/plugins/panel/singlestat2/SingleStatEditor.tsx

@ -1,6 +1,3 @@
import { Threshold } from './threshold';
import { ValueMapping } from './valueMapping';
export enum LoadingState {
NotStarted = 'NotStarted',
Loading = 'Loading',
@ -9,14 +6,6 @@ export enum LoadingState {
Error = 'Error',
}
export enum FieldType {
time = 'time', // or date
number = 'number',
string = 'string',
boolean = 'boolean',
other = 'other', // Object, Array, etc
}
export interface QueryResultMeta {
[key: string]: any;
@ -42,34 +31,10 @@ export interface QueryResultBase {
meta?: QueryResultMeta;
}
export interface Field {
name: string; // The column name
title?: string; // The display value for this field. This supports template variables blank is auto
type?: FieldType;
filterable?: boolean;
unit?: string;
decimals?: number | null; // Significant digits (for display)
min?: number | null;
max?: number | null;
// Convert input values into a display value
mappings?: ValueMapping[];
// Must be sorted by 'value', first value is always -Infinity
thresholds?: Threshold[];
}
export interface Labels {
[key: string]: string;
}
export interface DataFrame extends QueryResultBase {
name?: string;
fields: Field[];
rows: any[][];
labels?: Labels;
}
export interface Column {
text: string; // For a Column, the 'text' is the field name
filterable?: boolean;

@ -0,0 +1,110 @@
import { Threshold } from './threshold';
import { ValueMapping } from './valueMapping';
import { QueryResultBase, Labels, NullValueMode } from './data';
import { FieldCalcs } from '../utils/index';
import { DisplayProcessor } from './displayValue';
export enum FieldType {
time = 'time', // or date
number = 'number',
string = 'string',
boolean = 'boolean',
other = 'other', // Object, Array, etc
}
/**
* Every property is optional
*
* Plugins may extend this with additional properties. Somethign like series overrides
*/
export interface FieldConfig {
title?: string; // The display value for this field. This supports template variables blank is auto
filterable?: boolean;
// Numeric Options
unit?: string;
decimals?: number | null; // Significant digits (for display)
min?: number | null;
max?: number | null;
// Convert input values into a display string
mappings?: ValueMapping[];
// Must be sorted by 'value', first value is always -Infinity
thresholds?: Threshold[];
// Used when reducing field values
nullValueMode?: NullValueMode;
// Alternative to empty string
noValue?: string;
}
export interface Vector<T = any> {
length: number;
/**
* Access the value by index (Like an array)
*/
get(index: number): T;
/**
* Get the resutls as an array.
*/
toArray(): T[];
/**
* Return the values as a simple array for json serialization
*/
toJSON(): any; // same results as toArray()
}
export interface Field<T = any> {
name: string; // The column name
type: FieldType;
config: FieldConfig;
values: Vector<T>; // `buffer` when JSON
/**
* Cache of reduced values
*/
calcs?: FieldCalcs;
/**
* Convert text to the field value
*/
parse?: (value: any) => T;
/**
* Convert a value for display
*/
display?: DisplayProcessor;
}
export interface DataFrame extends QueryResultBase {
name?: string;
fields: Field[]; // All fields of equal length
labels?: Labels;
// The number of rows
length: number;
}
/**
* Like a field, but properties are optional and values may be a simple array
*/
export interface FieldDTO<T = any> {
name: string; // The column name
type?: FieldType;
config?: FieldConfig;
values?: Vector<T> | T[]; // toJSON will always be T[], input could be either
}
/**
* Like a DataFrame, but fields may be a FieldDTO
*/
export interface DataFrameDTO extends QueryResultBase {
name?: string;
labels?: Labels;
fields: Array<FieldDTO | Field>;
}

@ -1,3 +1,5 @@
export type DisplayProcessor = (value: any) => DisplayValue;
export interface DisplayValue {
text: string; // Show in the UI
numeric: number; // Use isNaN to check if it is a real number

@ -1,4 +1,5 @@
export * from './data';
export * from './dataFrame';
export * from './dataLink';
export * from './logs';
export * from './navModel';

@ -4,42 +4,54 @@ exports[`read csv should get X and y 1`] = `
Object {
"fields": Array [
Object {
"name": "Column 1",
"type": "number",
"config": Object {},
"name": "Field 1",
"type": "string",
"values": Array [
"",
"2",
"5",
"",
],
},
Object {
"name": "Column 2",
"config": Object {},
"name": "Field 2",
"type": "number",
"values": Array [
1,
3,
6,
NaN,
],
},
Object {
"name": "Column 3",
"config": Object {},
"name": "Field 3",
"type": "number",
"values": Array [
null,
4,
NaN,
NaN,
],
},
Object {
"config": Object {},
"name": "Field 4",
"type": "number",
"values": Array [
null,
null,
null,
7,
],
},
],
"rows": Array [
Array [
2,
3,
4,
null,
],
Array [
5,
6,
null,
null,
],
Array [
null,
null,
null,
7,
],
],
"labels": undefined,
"meta": undefined,
"name": undefined,
"refId": undefined,
}
`;
@ -47,30 +59,37 @@ exports[`read csv should read csv from local file system 1`] = `
Object {
"fields": Array [
Object {
"config": Object {},
"name": "a",
"type": "number",
"values": Array [
10,
40,
],
},
Object {
"config": Object {},
"name": "b",
"type": "number",
"values": Array [
20,
50,
],
},
Object {
"config": Object {},
"name": "c",
"type": "number",
"values": Array [
30,
60,
],
},
],
"rows": Array [
Array [
10,
20,
30,
],
Array [
40,
50,
60,
],
],
"labels": undefined,
"meta": undefined,
"name": undefined,
"refId": undefined,
}
`;
@ -78,42 +97,48 @@ exports[`read csv should read csv with headers 1`] = `
Object {
"fields": Array [
Object {
"config": Object {
"unit": "ms",
},
"name": "a",
"type": "number",
"unit": "ms",
"values": Array [
10,
40,
40,
40,
],
},
Object {
"config": Object {
"unit": "lengthm",
},
"name": "b",
"type": "string",
"unit": "lengthm",
"type": "number",
"values": Array [
20,
50,
500,
50,
],
},
Object {
"config": Object {
"unit": "s",
},
"name": "c",
"type": "boolean",
"unit": "s",
"values": Array [
true,
false,
false,
true,
],
},
],
"rows": Array [
Array [
10,
"20",
true,
],
Array [
40,
"50",
false,
],
Array [
40,
"500",
false,
],
Array [
40,
"50",
true,
],
],
"labels": undefined,
"meta": undefined,
"name": undefined,
"refId": undefined,
}
`;

@ -1,7 +1,9 @@
import { readCSV, toCSV, CSVHeaderStyle } from './csv';
import { getDataFrameRow } from './processDataFrame';
// Test with local CSV files
const fs = require('fs');
import fs from 'fs';
import { toDataFrameDTO } from './processDataFrame';
describe('read csv', () => {
it('should get X and y', () => {
@ -11,14 +13,31 @@ describe('read csv', () => {
const series = data[0];
expect(series.fields.length).toBe(4);
expect(series.rows.length).toBe(3);
const rows = 4;
expect(series.length).toBe(rows);
// Make sure everythign it padded properly
for (const row of series.rows) {
expect(row.length).toBe(series.fields.length);
for (const field of series.fields) {
expect(field.values.length).toBe(rows);
}
expect(series).toMatchSnapshot();
const dto = toDataFrameDTO(series);
expect(dto).toMatchSnapshot();
});
it('should read single string OK', () => {
const text = 'a,b,c';
const data = readCSV(text);
expect(data.length).toBe(1);
const series = data[0];
expect(series.fields.length).toBe(3);
expect(series.length).toBe(0);
expect(series.fields[0].name).toEqual('a');
expect(series.fields[1].name).toEqual('b');
expect(series.fields[2].name).toEqual('c');
});
it('should read csv from local file system', () => {
@ -28,7 +47,7 @@ describe('read csv', () => {
const csv = fs.readFileSync(path, 'utf8');
const data = readCSV(csv);
expect(data.length).toBe(1);
expect(data[0]).toMatchSnapshot();
expect(toDataFrameDTO(data[0])).toMatchSnapshot();
});
it('should read csv with headers', () => {
@ -38,7 +57,7 @@ describe('read csv', () => {
const csv = fs.readFileSync(path, 'utf8');
const data = readCSV(csv);
expect(data.length).toBe(1);
expect(data[0]).toMatchSnapshot();
expect(toDataFrameDTO(data[0])).toMatchSnapshot();
});
});
@ -54,7 +73,7 @@ describe('write csv', () => {
const data = readCSV(csv);
const out = toCSV(data, { headerStyle: CSVHeaderStyle.full });
expect(data.length).toBe(1);
expect(data[0].rows[0]).toEqual(firstRow);
expect(getDataFrameRow(data[0], 0)).toEqual(firstRow);
expect(data[0].fields.length).toBe(3);
expect(norm(out)).toBe(norm(csv));
@ -65,7 +84,7 @@ describe('write csv', () => {
const f = readCSV(shorter);
const fields = f[0].fields;
expect(fields.length).toBe(3);
expect(f[0].rows[0]).toEqual(firstRow);
expect(getDataFrameRow(f[0], 0)).toEqual(firstRow);
expect(fields.map(f => f.name).join(',')).toEqual('a,b,c'); // the names
});
});

@ -4,8 +4,9 @@ import defaults from 'lodash/defaults';
import isNumber from 'lodash/isNumber';
// Types
import { DataFrame, Field, FieldType } from '../types';
import { DataFrame, Field, FieldType, FieldConfig } from '../types';
import { guessFieldTypeFromValue } from './processDataFrame';
import { DataFrameHelper } from './dataFrameHelper';
export enum CSVHeaderStyle {
full,
@ -28,9 +29,9 @@ export interface CSVParseCallbacks {
* This can return a modified table to force any
* Column configurations
*/
onHeader: (table: DataFrame) => void;
onHeader: (fields: Field[]) => void;
// Called after each row is read and
// Called after each row is read
onRow: (row: any[]) => void;
}
@ -49,16 +50,13 @@ enum ParseState {
ReadingRows,
}
type FieldParser = (value: string) => any;
export class CSVReader {
config: CSVConfig;
callback?: CSVParseCallbacks;
field: FieldParser[];
series: DataFrame;
state: ParseState;
data: DataFrame[];
data: DataFrameHelper[];
current: DataFrameHelper;
constructor(options?: CSVOptions) {
if (!options) {
@ -67,12 +65,8 @@ export class CSVReader {
this.config = options.config || {};
this.callback = options.callback;
this.field = [];
this.current = new DataFrameHelper({ fields: [] });
this.state = ParseState.Starting;
this.series = {
fields: [],
rows: [],
};
this.data = [];
}
@ -92,37 +86,42 @@ export class CSVReader {
const idx = first.indexOf('#', 2);
if (idx > 0) {
const k = first.substr(1, idx - 1);
const isName = 'name' === k;
// Simple object used to check if headers match
const headerKeys: Field = {
name: '#',
type: FieldType.number,
const headerKeys: FieldConfig = {
unit: '#',
};
// Check if it is a known/supported column
if (headerKeys.hasOwnProperty(k)) {
if (isName || headerKeys.hasOwnProperty(k)) {
// Starting a new table after reading rows
if (this.state === ParseState.ReadingRows) {
this.series = {
fields: [],
rows: [],
};
this.data.push(this.series);
this.current = new DataFrameHelper({ fields: [] });
this.data.push(this.current);
}
padColumnWidth(this.series.fields, line.length);
const fields: any[] = this.series.fields; // cast to any so we can lookup by key
const v = first.substr(idx + 1);
fields[0][k] = v;
for (let j = 1; j < fields.length; j++) {
fields[j][k] = line[j];
if (isName) {
this.current.addFieldFor(undefined, v);
for (let j = 1; j < line.length; j++) {
this.current.addFieldFor(undefined, line[j]);
}
} else {
const { fields } = this.current;
for (let j = 0; j < fields.length; j++) {
if (!fields[j].config) {
fields[j].config = {};
}
const disp = fields[j].config as any; // any lets name lookup
disp[k] = j === 0 ? v : line[j];
}
}
this.state = ParseState.InHeader;
continue;
}
} else if (this.state === ParseState.Starting) {
this.series.fields = makeFieldsFor(line);
this.state = ParseState.InHeader;
continue;
}
@ -133,67 +132,48 @@ export class CSVReader {
if (this.state === ParseState.Starting) {
const type = guessFieldTypeFromValue(first);
if (type === FieldType.string) {
this.series.fields = makeFieldsFor(line);
for (const s of line) {
this.current.addFieldFor(undefined, s);
}
this.state = ParseState.InHeader;
continue;
}
this.series.fields = makeFieldsFor(new Array(line.length));
this.series.fields[0].type = type;
this.state = ParseState.InHeader; // fall through to read rows
}
}
if (this.state === ParseState.InHeader) {
padColumnWidth(this.series.fields, line.length);
this.state = ParseState.ReadingRows;
// Add the current results to the data
if (this.state !== ParseState.ReadingRows) {
// anything???
}
if (this.state === ParseState.ReadingRows) {
// Make sure colum structure is valid
if (line.length > this.series.fields.length) {
padColumnWidth(this.series.fields, line.length);
if (this.callback) {
this.callback.onHeader(this.series);
} else {
// Expand all rows with nulls
for (let x = 0; x < this.series.rows.length; x++) {
const row = this.series.rows[x];
while (row.length < line.length) {
row.push(null);
}
}
}
}
this.state = ParseState.ReadingRows;
const row: any[] = [];
for (let j = 0; j < line.length; j++) {
const v = line[j];
if (v) {
if (!this.field[j]) {
this.field[j] = makeFieldParser(v, this.series.fields[j]);
}
row.push(this.field[j](v));
} else {
row.push(null);
}
// Make sure colum structure is valid
if (line.length > this.current.fields.length) {
const { fields } = this.current;
for (let f = fields.length; f < line.length; f++) {
this.current.addFieldFor(line[f]);
}
if (this.callback) {
// Send the header after we guess the type
if (this.series.rows.length === 0) {
this.callback.onHeader(this.series);
this.series.rows.push(row); // Only add the first row
}
this.callback.onRow(row);
} else {
this.series.rows.push(row);
this.callback.onHeader(this.current.fields);
}
}
this.current.appendRow(line);
if (this.callback) {
// // Send the header after we guess the type
// if (this.series.rows.length === 0) {
// this.callback.onHeader(this.series);
// }
this.callback.onRow(line);
}
}
};
readCSV(text: string): DataFrame[] {
this.data = [this.series];
readCSV(text: string): DataFrameHelper[] {
this.current = new DataFrameHelper({ fields: [] });
this.data = [this.current];
const papacfg = {
...this.config,
@ -204,58 +184,8 @@ export class CSVReader {
} as ParseConfig;
Papa.parse(text, papacfg);
return this.data;
}
}
function makeFieldParser(value: string, field: Field): FieldParser {
if (!field.type) {
if (field.name === 'time' || field.name === 'Time') {
field.type = FieldType.time;
} else {
field.type = guessFieldTypeFromValue(value);
}
}
if (field.type === FieldType.number) {
return (value: string) => {
return parseFloat(value);
};
}
// Will convert anything that starts with "T" to true
if (field.type === FieldType.boolean) {
return (value: string) => {
return !(value[0] === 'F' || value[0] === 'f' || value[0] === '0');
};
}
// Just pass the string back
return (value: string) => value;
}
/**
* Creates a field object for each string in the list
*/
function makeFieldsFor(line: string[]): Field[] {
const fields: Field[] = [];
for (let i = 0; i < line.length; i++) {
const v = line[i] ? line[i] : 'Column ' + (i + 1);
fields.push({ name: v });
}
return fields;
}
/**
* Makes sure the colum has valid entries up the the width
*/
function padColumnWidth(fields: Field[], width: number) {
if (fields.length < width) {
for (let i = fields.length; i < width; i++) {
fields.push({
name: 'Field ' + (i + 1),
});
}
return this.data;
}
}
@ -295,15 +225,26 @@ function makeFieldWriter(field: Field, config: CSVConfig): FieldWriter {
}
function getHeaderLine(key: string, fields: Field[], config: CSVConfig): string {
const isName = 'name' === key;
const isType = 'type' === key;
for (const f of fields) {
if (f.hasOwnProperty(key)) {
const display = f.config;
if (isName || isType || (display && display.hasOwnProperty(key))) {
let line = '#' + key + '#';
for (let i = 0; i < fields.length; i++) {
if (i > 0) {
line = line + config.delimiter;
}
const v = (fields[i] as any)[key];
let v: any = fields[i].name;
if (isType) {
v = fields[i].type;
} else if (isName) {
// already name
} else {
v = (fields[i].config as any)[key];
}
if (v) {
line = line + writeValue(v, config);
}
@ -329,7 +270,7 @@ export function toCSV(data: DataFrame[], config?: CSVConfig): string {
});
for (const series of data) {
const { rows, fields } = series;
const { fields } = series;
if (config.headerStyle === CSVHeaderStyle.full) {
csv =
csv +
@ -346,20 +287,22 @@ export function toCSV(data: DataFrame[], config?: CSVConfig): string {
}
csv += config.newline;
}
const writers = fields.map(field => makeFieldWriter(field, config!));
for (let i = 0; i < rows.length; i++) {
const row = rows[i];
for (let j = 0; j < row.length; j++) {
if (j > 0) {
csv = csv + config.delimiter;
}
const length = fields[0].values.length;
if (length > 0) {
const writers = fields.map(field => makeFieldWriter(field, config!));
for (let i = 0; i < length; i++) {
for (let j = 0; j < fields.length; j++) {
if (j > 0) {
csv = csv + config.delimiter;
}
const v = row[j];
if (v !== null) {
csv = csv + writers[j](v);
const v = fields[j].values.get(i);
if (v !== null) {
csv = csv + writers[j](v);
}
}
csv = csv + config.newline;
}
csv = csv + config.newline;
}
csv = csv + config.newline;
}

@ -0,0 +1,89 @@
import { FieldType, DataFrameDTO, FieldDTO } from '../types/index';
import { DataFrameHelper } from './dataFrameHelper';
describe('dataFrameHelper', () => {
const frame: DataFrameDTO = {
fields: [
{ name: 'time', type: FieldType.time, values: [100, 200, 300] },
{ name: 'name', type: FieldType.string, values: ['a', 'b', 'c'] },
{ name: 'value', type: FieldType.number, values: [1, 2, 3] },
{ name: 'value', type: FieldType.number, values: [4, 5, 6] },
],
};
const ext = new DataFrameHelper(frame);
it('Should get a valid count for the fields', () => {
expect(ext.length).toEqual(3);
});
it('Should get the first field with a duplicate name', () => {
const field = ext.getFieldByName('value');
expect(field!.name).toEqual('value');
expect(field!.values.toJSON()).toEqual([1, 2, 3]);
});
});
describe('FieldCache', () => {
it('when creating a new FieldCache from fields should be able to query cache', () => {
const fields: FieldDTO[] = [
{ name: 'time', type: FieldType.time },
{ name: 'string', type: FieldType.string },
{ name: 'number', type: FieldType.number },
{ name: 'boolean', type: FieldType.boolean },
{ name: 'other', type: FieldType.other },
{ name: 'undefined' },
];
const fieldCache = new DataFrameHelper({ fields });
const allFields = fieldCache.getFields();
expect(allFields).toHaveLength(6);
const expectedFieldNames = ['time', 'string', 'number', 'boolean', 'other', 'undefined'];
expect(allFields.map(f => f.name)).toEqual(expectedFieldNames);
expect(fieldCache.hasFieldOfType(FieldType.time)).toBeTruthy();
expect(fieldCache.hasFieldOfType(FieldType.string)).toBeTruthy();
expect(fieldCache.hasFieldOfType(FieldType.number)).toBeTruthy();
expect(fieldCache.hasFieldOfType(FieldType.boolean)).toBeTruthy();
expect(fieldCache.hasFieldOfType(FieldType.other)).toBeTruthy();
expect(fieldCache.getFields(FieldType.time).map(f => f.name)).toEqual([expectedFieldNames[0]]);
expect(fieldCache.getFields(FieldType.string).map(f => f.name)).toEqual([expectedFieldNames[1]]);
expect(fieldCache.getFields(FieldType.number).map(f => f.name)).toEqual([expectedFieldNames[2]]);
expect(fieldCache.getFields(FieldType.boolean).map(f => f.name)).toEqual([expectedFieldNames[3]]);
expect(fieldCache.getFields(FieldType.other).map(f => f.name)).toEqual([
expectedFieldNames[4],
expectedFieldNames[5],
]);
expect(fieldCache.fields[0].name).toEqual(expectedFieldNames[0]);
expect(fieldCache.fields[1].name).toEqual(expectedFieldNames[1]);
expect(fieldCache.fields[2].name).toEqual(expectedFieldNames[2]);
expect(fieldCache.fields[3].name).toEqual(expectedFieldNames[3]);
expect(fieldCache.fields[4].name).toEqual(expectedFieldNames[4]);
expect(fieldCache.fields[5].name).toEqual(expectedFieldNames[5]);
expect(fieldCache.fields[6]).toBeUndefined();
expect(fieldCache.getFirstFieldOfType(FieldType.time)!.name).toEqual(expectedFieldNames[0]);
expect(fieldCache.getFirstFieldOfType(FieldType.string)!.name).toEqual(expectedFieldNames[1]);
expect(fieldCache.getFirstFieldOfType(FieldType.number)!.name).toEqual(expectedFieldNames[2]);
expect(fieldCache.getFirstFieldOfType(FieldType.boolean)!.name).toEqual(expectedFieldNames[3]);
expect(fieldCache.getFirstFieldOfType(FieldType.other)!.name).toEqual(expectedFieldNames[4]);
expect(fieldCache.hasFieldNamed('tim')).toBeFalsy();
expect(fieldCache.hasFieldNamed('time')).toBeTruthy();
expect(fieldCache.hasFieldNamed('string')).toBeTruthy();
expect(fieldCache.hasFieldNamed('number')).toBeTruthy();
expect(fieldCache.hasFieldNamed('boolean')).toBeTruthy();
expect(fieldCache.hasFieldNamed('other')).toBeTruthy();
expect(fieldCache.hasFieldNamed('undefined')).toBeTruthy();
expect(fieldCache.getFieldByName('time')!.name).toEqual(expectedFieldNames[0]);
expect(fieldCache.getFieldByName('string')!.name).toEqual(expectedFieldNames[1]);
expect(fieldCache.getFieldByName('number')!.name).toEqual(expectedFieldNames[2]);
expect(fieldCache.getFieldByName('boolean')!.name).toEqual(expectedFieldNames[3]);
expect(fieldCache.getFieldByName('other')!.name).toEqual(expectedFieldNames[4]);
expect(fieldCache.getFieldByName('undefined')!.name).toEqual(expectedFieldNames[5]);
expect(fieldCache.getFieldByName('null')).toBeUndefined();
});
});

@ -0,0 +1,232 @@
import { Field, FieldType, DataFrame, Vector, FieldDTO, DataFrameDTO } from '../types/dataFrame';
import { Labels, QueryResultMeta } from '../types/data';
import { guessFieldTypeForField, guessFieldTypeFromValue } from './processDataFrame';
import { ArrayVector } from './vector';
import isArray from 'lodash/isArray';
export class DataFrameHelper implements DataFrame {
refId?: string;
meta?: QueryResultMeta;
name?: string;
fields: Field[];
labels?: Labels;
length = 0; // updated so it is the length of all fields
private fieldByName: { [key: string]: Field } = {};
private fieldByType: { [key: string]: Field[] } = {};
constructor(data?: DataFrame | DataFrameDTO) {
if (!data) {
data = { fields: [] }; //
}
this.refId = data.refId;
this.meta = data.meta;
this.name = data.name;
this.labels = data.labels;
this.fields = [];
for (let i = 0; i < data.fields.length; i++) {
this.addField(data.fields[i]);
}
}
addFieldFor(value: any, name?: string): Field {
if (!name) {
name = `Field ${this.fields.length + 1}`;
}
return this.addField({
name,
type: guessFieldTypeFromValue(value),
});
}
/**
* Reverse the direction of all fields
*/
reverse() {
for (const f of this.fields) {
if (isArray(f.values)) {
const arr = f.values as any[];
arr.reverse();
}
}
}
private updateTypeIndex(field: Field) {
// Make sure it has a type
if (field.type === FieldType.other) {
const t = guessFieldTypeForField(field);
if (t) {
field.type = t;
}
}
if (!this.fieldByType[field.type]) {
this.fieldByType[field.type] = [];
}
this.fieldByType[field.type].push(field);
}
addField(f: Field | FieldDTO): Field {
const type = f.type || FieldType.other;
const values =
!f.values || isArray(f.values)
? new ArrayVector(f.values as any[] | undefined) // array or empty
: (f.values as Vector);
// And a name
let name = f.name;
if (!name) {
if (type === FieldType.time) {
name = `Time ${this.fields.length + 1}`;
} else {
name = `Column ${this.fields.length + 1}`;
}
}
const field: Field = {
name,
type,
config: f.config || {},
values,
};
this.updateTypeIndex(field);
if (this.fieldByName[field.name]) {
console.warn('Duplicate field names in DataFrame: ', field.name);
} else {
this.fieldByName[field.name] = field;
}
// Make sure the lengths all match
if (field.values.length !== this.length) {
if (field.values.length > this.length) {
// Add `null` to all other values
const newlen = field.values.length;
for (const fx of this.fields) {
const arr = fx.values as ArrayVector;
while (fx.values.length !== newlen) {
arr.buffer.push(null);
}
}
this.length = field.values.length;
} else {
const arr = field.values as ArrayVector;
while (field.values.length !== this.length) {
arr.buffer.push(null);
}
}
}
this.fields.push(field);
return field;
}
/**
* This will add each value to the corresponding column
*/
appendRow(row: any[]) {
for (let i = this.fields.length; i < row.length; i++) {
this.addFieldFor(row[i]);
}
// The first line may change the field types
if (this.length < 1) {
this.fieldByType = {};
for (let i = 0; i < this.fields.length; i++) {
const f = this.fields[i];
if (!f.type || f.type === FieldType.other) {
f.type = guessFieldTypeFromValue(row[i]);
}
this.updateTypeIndex(f);
}
}
for (let i = 0; i < this.fields.length; i++) {
const f = this.fields[i];
let v = row[i];
if (!f.parse) {
f.parse = makeFieldParser(v, f);
}
v = f.parse(v);
const arr = f.values as ArrayVector;
arr.buffer.push(v); // may be undefined
}
this.length++;
}
/**
* Add any values that match the field names
*/
appendRowFrom(obj: { [key: string]: any }) {
for (const f of this.fields) {
const v = obj[f.name];
if (!f.parse) {
f.parse = makeFieldParser(v, f);
}
const arr = f.values as ArrayVector;
arr.buffer.push(f.parse(v)); // may be undefined
}
this.length++;
}
getFields(type?: FieldType): Field[] {
if (!type) {
return [...this.fields]; // All fields
}
const fields = this.fieldByType[type];
if (fields) {
return [...fields];
}
return [];
}
hasFieldOfType(type: FieldType): boolean {
const types = this.fieldByType[type];
return types && types.length > 0;
}
getFirstFieldOfType(type: FieldType): Field | undefined {
const arr = this.fieldByType[type];
if (arr && arr.length > 0) {
return arr[0];
}
return undefined;
}
hasFieldNamed(name: string): boolean {
return !!this.fieldByName[name];
}
/**
* Returns the first field with the given name.
*/
getFieldByName(name: string): Field | undefined {
return this.fieldByName[name];
}
}
function makeFieldParser(value: string, field: Field): (value: string) => any {
if (!field.type) {
if (field.name === 'time' || field.name === 'Time') {
field.type = FieldType.time;
} else {
field.type = guessFieldTypeFromValue(value);
}
}
if (field.type === FieldType.number) {
return (value: string) => {
return parseFloat(value);
};
}
// Will convert anything that starts with "T" to true
if (field.type === FieldType.boolean) {
return (value: string) => {
return !(value[0] === 'F' || value[0] === 'f' || value[0] === '0');
};
}
// Just pass the string back
return (value: string) => value;
}

@ -0,0 +1,76 @@
import { FieldType, DataFrameDTO } from '../types/index';
import { DataFrameHelper } from './dataFrameHelper';
import { DataFrameView } from './dataFrameView';
import { DateTime } from './moment_wrapper';
interface MySpecialObject {
time: DateTime;
name: string;
value: number;
more: string; // MISSING
}
describe('dataFrameView', () => {
const frame: DataFrameDTO = {
fields: [
{ name: 'time', type: FieldType.time, values: [100, 200, 300] },
{ name: 'name', type: FieldType.string, values: ['a', 'b', 'c'] },
{ name: 'value', type: FieldType.number, values: [1, 2, 3] },
],
};
const ext = new DataFrameHelper(frame);
const vector = new DataFrameView<MySpecialObject>(ext);
it('Should get a typed vector', () => {
expect(vector.length).toEqual(3);
const first = vector.get(0);
expect(first.time).toEqual(100);
expect(first.name).toEqual('a');
expect(first.value).toEqual(1);
expect(first.more).toBeUndefined();
});
it('Should support the spread operator', () => {
expect(vector.length).toEqual(3);
const first = vector.get(0);
const copy = { ...first };
expect(copy.time).toEqual(100);
expect(copy.name).toEqual('a');
expect(copy.value).toEqual(1);
expect(copy.more).toBeUndefined();
});
it('Should support array indexes', () => {
expect(vector.length).toEqual(3);
const first = vector.get(0) as any;
expect(first[0]).toEqual(100);
expect(first[1]).toEqual('a');
expect(first[2]).toEqual(1);
expect(first[3]).toBeUndefined();
});
it('Should advertise the property names for each field', () => {
expect(vector.length).toEqual(3);
const first = vector.get(0);
const keys = Object.keys(first);
expect(keys).toEqual(['time', 'name', 'value']);
});
it('has a weird side effect that the object values change after interation', () => {
expect(vector.length).toEqual(3);
// Get the first value
const first = vector.get(0);
expect(first.name).toEqual('a');
// Then get the second one
const second = vector.get(1);
// the values for 'first' have changed
expect(first.name).toEqual('b');
expect(first.name).toEqual(second.name);
});
});

@ -0,0 +1,67 @@
import { DataFrame, Vector } from '../types/index';
/**
* This abstraction will present the contents of a DataFrame as if
* it were a well typed javascript object Vector.
*
* NOTE: The contents of the object returned from `view.get(index)`
* are optimized for use in a loop. All calls return the same object
* but the index has changed.
*
* For example, the three objects:
* const first = view.get(0);
* const second = view.get(1);
* const third = view.get(2);
* will point to the contents at index 2
*
* If you need three different objects, consider something like:
* const first = { ... view.get(0) };
* const second = { ... view.get(1) };
* const third = { ... view.get(2) };
*/
export class DataFrameView<T = any> implements Vector<T> {
private index = 0;
private obj: T;
constructor(private data: DataFrame) {
const obj = ({} as unknown) as T;
for (let i = 0; i < data.fields.length; i++) {
const field = data.fields[i];
const getter = () => {
return field.values.get(this.index);
};
if (!(obj as any).hasOwnProperty(field.name)) {
Object.defineProperty(obj, field.name, {
enumerable: true, // Shows up as enumerable property
get: getter,
});
}
Object.defineProperty(obj, i, {
enumerable: false, // Don't enumerate array index
get: getter,
});
}
this.obj = obj;
}
get length() {
return this.data.length;
}
get(idx: number) {
this.index = idx;
return this.obj;
}
toArray(): T[] {
const arr: T[] = [];
for (let i = 0; i < this.data.length; i++) {
arr.push({ ...this.get(i) });
}
return arr;
}
toJSON(): T[] {
return this.toArray();
}
}

@ -1,71 +0,0 @@
import { FieldType } from '../types/index';
import { FieldCache } from './fieldCache';
describe('FieldCache', () => {
it('when creating a new FieldCache from fields should be able to query cache', () => {
const fields = [
{ name: 'time', type: FieldType.time },
{ name: 'string', type: FieldType.string },
{ name: 'number', type: FieldType.number },
{ name: 'boolean', type: FieldType.boolean },
{ name: 'other', type: FieldType.other },
{ name: 'undefined' },
];
const fieldCache = new FieldCache(fields);
const allFields = fieldCache.getFields();
expect(allFields).toHaveLength(6);
const expectedFields = [
{ ...fields[0], index: 0 },
{ ...fields[1], index: 1 },
{ ...fields[2], index: 2 },
{ ...fields[3], index: 3 },
{ ...fields[4], index: 4 },
{ ...fields[5], type: FieldType.other, index: 5 },
];
expect(allFields).toMatchObject(expectedFields);
expect(fieldCache.hasFieldOfType(FieldType.time)).toBeTruthy();
expect(fieldCache.hasFieldOfType(FieldType.string)).toBeTruthy();
expect(fieldCache.hasFieldOfType(FieldType.number)).toBeTruthy();
expect(fieldCache.hasFieldOfType(FieldType.boolean)).toBeTruthy();
expect(fieldCache.hasFieldOfType(FieldType.other)).toBeTruthy();
expect(fieldCache.getFields(FieldType.time)).toMatchObject([expectedFields[0]]);
expect(fieldCache.getFields(FieldType.string)).toMatchObject([expectedFields[1]]);
expect(fieldCache.getFields(FieldType.number)).toMatchObject([expectedFields[2]]);
expect(fieldCache.getFields(FieldType.boolean)).toMatchObject([expectedFields[3]]);
expect(fieldCache.getFields(FieldType.other)).toMatchObject([expectedFields[4], expectedFields[5]]);
expect(fieldCache.getFieldByIndex(0)).toMatchObject(expectedFields[0]);
expect(fieldCache.getFieldByIndex(1)).toMatchObject(expectedFields[1]);
expect(fieldCache.getFieldByIndex(2)).toMatchObject(expectedFields[2]);
expect(fieldCache.getFieldByIndex(3)).toMatchObject(expectedFields[3]);
expect(fieldCache.getFieldByIndex(4)).toMatchObject(expectedFields[4]);
expect(fieldCache.getFieldByIndex(5)).toMatchObject(expectedFields[5]);
expect(fieldCache.getFieldByIndex(6)).toBeNull();
expect(fieldCache.getFirstFieldOfType(FieldType.time)).toMatchObject(expectedFields[0]);
expect(fieldCache.getFirstFieldOfType(FieldType.string)).toMatchObject(expectedFields[1]);
expect(fieldCache.getFirstFieldOfType(FieldType.number)).toMatchObject(expectedFields[2]);
expect(fieldCache.getFirstFieldOfType(FieldType.boolean)).toMatchObject(expectedFields[3]);
expect(fieldCache.getFirstFieldOfType(FieldType.other)).toMatchObject(expectedFields[4]);
expect(fieldCache.hasFieldNamed('tim')).toBeFalsy();
expect(fieldCache.hasFieldNamed('time')).toBeTruthy();
expect(fieldCache.hasFieldNamed('string')).toBeTruthy();
expect(fieldCache.hasFieldNamed('number')).toBeTruthy();
expect(fieldCache.hasFieldNamed('boolean')).toBeTruthy();
expect(fieldCache.hasFieldNamed('other')).toBeTruthy();
expect(fieldCache.hasFieldNamed('undefined')).toBeTruthy();
expect(fieldCache.getFieldByName('time')).toMatchObject(expectedFields[0]);
expect(fieldCache.getFieldByName('string')).toMatchObject(expectedFields[1]);
expect(fieldCache.getFieldByName('number')).toMatchObject(expectedFields[2]);
expect(fieldCache.getFieldByName('boolean')).toMatchObject(expectedFields[3]);
expect(fieldCache.getFieldByName('other')).toMatchObject(expectedFields[4]);
expect(fieldCache.getFieldByName('undefined')).toMatchObject(expectedFields[5]);
expect(fieldCache.getFieldByName('null')).toBeNull();
});
});

@ -1,76 +0,0 @@
import { Field, FieldType } from '../types/index';
export interface IndexedField extends Field {
index: number;
}
export class FieldCache {
private fields: Field[];
private fieldIndexByName: { [key: string]: number };
private fieldIndexByType: { [key: string]: number[] };
constructor(fields?: Field[]) {
this.fields = [];
this.fieldIndexByName = {};
this.fieldIndexByType = {};
this.fieldIndexByType[FieldType.time] = [];
this.fieldIndexByType[FieldType.string] = [];
this.fieldIndexByType[FieldType.number] = [];
this.fieldIndexByType[FieldType.boolean] = [];
this.fieldIndexByType[FieldType.other] = [];
if (fields) {
for (let n = 0; n < fields.length; n++) {
const field = fields[n];
this.addField(field);
}
}
}
addField(field: Field) {
this.fields.push({
type: FieldType.other,
...field,
});
const index = this.fields.length - 1;
this.fieldIndexByName[field.name] = index;
this.fieldIndexByType[field.type || FieldType.other].push(index);
}
hasFieldOfType(type: FieldType): boolean {
return this.fieldIndexByType[type] && this.fieldIndexByType[type].length > 0;
}
getFields(type?: FieldType): IndexedField[] {
const fields: IndexedField[] = [];
for (let index = 0; index < this.fields.length; index++) {
const field = this.fields[index];
if (!type || field.type === type) {
fields.push({ ...field, index });
}
}
return fields;
}
getFieldByIndex(index: number): IndexedField | null {
return this.fields[index] ? { ...this.fields[index], index } : null;
}
getFirstFieldOfType(type: FieldType): IndexedField | null {
return this.hasFieldOfType(type)
? { ...this.fields[this.fieldIndexByType[type][0]], index: this.fieldIndexByType[type][0] }
: null;
}
hasFieldNamed(name: string): boolean {
return this.fieldIndexByName[name] !== undefined;
}
getFieldByName(name: string): IndexedField | null {
return this.hasFieldNamed(name)
? { ...this.fields[this.fieldIndexByName[name]], index: this.fieldIndexByName[name] }
: null;
}
}

@ -1,20 +1,32 @@
import { fieldReducers, ReducerID, reduceField } from './fieldReducer';
import _ from 'lodash';
import { DataFrame } from '../types/data';
import { Field, FieldType } from '../types/index';
import { DataFrameHelper } from './dataFrameHelper';
import { ArrayVector } from './vector';
import { guessFieldTypeFromValue } from './processDataFrame';
/**
* Run a reducer and get back the value
*/
function reduce(series: DataFrame, fieldIndex: number, id: string): any {
return reduceField({ series, fieldIndex, reducers: [id] })[id];
function reduce(field: Field, id: string): any {
return reduceField({ field, reducers: [id] })[id];
}
describe('Stats Calculators', () => {
const basicTable = {
fields: [{ name: 'a' }, { name: 'b' }, { name: 'c' }],
rows: [[10, 20, 30], [20, 30, 40]],
function createField<T>(name: string, values?: T[], type?: FieldType): Field<T> {
const arr = new ArrayVector(values);
return {
name,
config: {},
type: type ? type : guessFieldTypeFromValue(arr.get(0)),
values: arr,
};
}
describe('Stats Calculators', () => {
const basicTable = new DataFrameHelper({
fields: [{ name: 'a', values: [10, 20] }, { name: 'b', values: [20, 30] }, { name: 'c', values: [30, 40] }],
});
it('should load all standard stats', () => {
for (const id of Object.keys(ReducerID)) {
@ -38,8 +50,7 @@ describe('Stats Calculators', () => {
it('should calculate basic stats', () => {
const stats = reduceField({
series: basicTable,
fieldIndex: 0,
field: basicTable.fields[0],
reducers: ['first', 'last', 'mean'],
});
@ -54,9 +65,9 @@ describe('Stats Calculators', () => {
});
it('should support a single stat also', () => {
basicTable.fields[0].calcs = undefined; // clear the cache
const stats = reduceField({
series: basicTable,
fieldIndex: 0,
field: basicTable.fields[0],
reducers: ['first'],
});
@ -67,8 +78,7 @@ describe('Stats Calculators', () => {
it('should get non standard stats', () => {
const stats = reduceField({
series: basicTable,
fieldIndex: 0,
field: basicTable.fields[0],
reducers: [ReducerID.distinctCount, ReducerID.changeCount],
});
@ -78,8 +88,7 @@ describe('Stats Calculators', () => {
it('should calculate step', () => {
const stats = reduceField({
series: { fields: [{ name: 'A' }], rows: [[100], [200], [300], [400]] },
fieldIndex: 0,
field: createField('x', [100, 200, 300, 400]),
reducers: [ReducerID.step, ReducerID.delta],
});
@ -88,53 +97,38 @@ describe('Stats Calculators', () => {
});
it('consistenly check allIsNull/allIsZero', () => {
const empty = {
fields: [{ name: 'A' }],
rows: [],
};
const allNull = ({
fields: [{ name: 'A' }],
rows: [null, null, null, null],
} as unknown) as DataFrame;
const allNull2 = {
fields: [{ name: 'A' }],
rows: [[null], [null], [null], [null]],
};
const allZero = {
fields: [{ name: 'A' }],
rows: [[0], [0], [0], [0]],
};
expect(reduce(empty, 0, ReducerID.allIsNull)).toEqual(true);
expect(reduce(allNull, 0, ReducerID.allIsNull)).toEqual(true);
expect(reduce(allNull2, 0, ReducerID.allIsNull)).toEqual(true);
expect(reduce(empty, 0, ReducerID.allIsZero)).toEqual(false);
expect(reduce(allNull, 0, ReducerID.allIsZero)).toEqual(false);
expect(reduce(allNull2, 0, ReducerID.allIsZero)).toEqual(false);
expect(reduce(allZero, 0, ReducerID.allIsZero)).toEqual(true);
const empty = createField('x');
const allNull = createField('x', [null, null, null, null]);
const allUndefined = createField('x', [undefined, undefined, undefined, undefined]);
const allZero = createField('x', [0, 0, 0, 0]);
expect(reduce(empty, ReducerID.allIsNull)).toEqual(true);
expect(reduce(allNull, ReducerID.allIsNull)).toEqual(true);
expect(reduce(allUndefined, ReducerID.allIsNull)).toEqual(true);
expect(reduce(empty, ReducerID.allIsZero)).toEqual(false);
expect(reduce(allNull, ReducerID.allIsZero)).toEqual(false);
expect(reduce(allZero, ReducerID.allIsZero)).toEqual(true);
});
it('consistent results for first/last value with null', () => {
const info = [
{
rows: [[null], [200], [null]], // first/last value is null
data: [null, 200, null], // first/last value is null
result: 200,
},
{
rows: [[null], [null], [null]], // All null
data: [null, null, null], // All null
result: undefined,
},
{
rows: [], // Empty row
data: [undefined, undefined, undefined], // Empty row
result: undefined,
},
];
const fields = [{ name: 'A' }];
const stats = reduceField({
series: { rows: info[0].rows, fields },
fieldIndex: 0,
field: createField('x', info[0].data),
reducers: [ReducerID.first, ReducerID.last, ReducerID.firstNotNull, ReducerID.lastNotNull], // uses standard path
});
expect(stats[ReducerID.first]).toEqual(null);
@ -146,21 +140,19 @@ describe('Stats Calculators', () => {
for (const input of info) {
for (const reducer of reducers) {
const v1 = reduceField({
series: { rows: input.rows, fields },
fieldIndex: 0,
field: createField('x', input.data),
reducers: [reducer, ReducerID.mean], // uses standard path
})[reducer];
const v2 = reduceField({
series: { rows: input.rows, fields },
fieldIndex: 0,
field: createField('x', input.data),
reducers: [reducer], // uses optimized path
})[reducer];
if (v1 !== v2 || v1 !== input.result) {
const msg =
`Invalid ${reducer} result for: ` +
input.rows.join(', ') +
input.data.join(', ') +
` Expected: ${input.result}` + // configured
` Recieved: Multiple: ${v1}, Single: ${v2}`;
expect(msg).toEqual(null);

@ -1,7 +1,7 @@
// Libraries
import isNumber from 'lodash/isNumber';
import { DataFrame, NullValueMode } from '../types';
import { NullValueMode, Field } from '../types';
import { Registry, RegistryItem } from './registry';
export enum ReducerID {
@ -33,7 +33,7 @@ export interface FieldCalcs {
}
// Internal function
type FieldReducer = (data: DataFrame, fieldIndex: number, ignoreNulls: boolean, nullAsZero: boolean) => FieldCalcs;
type FieldReducer = (field: Field, ignoreNulls: boolean, nullAsZero: boolean) => FieldCalcs;
export interface FieldReducerInfo extends RegistryItem {
// Internal details
@ -43,52 +43,76 @@ export interface FieldReducerInfo extends RegistryItem {
}
interface ReduceFieldOptions {
series: DataFrame;
fieldIndex: number;
field: Field;
reducers: string[]; // The stats to calculate
nullValueMode?: NullValueMode;
}
/**
* @returns an object with a key for each selected stat
*/
export function reduceField(options: ReduceFieldOptions): FieldCalcs {
const { series, fieldIndex, reducers, nullValueMode } = options;
const { field, reducers } = options;
if (!reducers || reducers.length < 1) {
if (!field || !reducers || reducers.length < 1) {
return {};
}
if (field.calcs) {
// Find the values we need to calculate
const missing: string[] = [];
for (const s of reducers) {
if (!field.calcs.hasOwnProperty(s)) {
missing.push(s);
}
}
if (missing.length < 1) {
return {
...field.calcs,
};
}
}
const queue = fieldReducers.list(reducers);
// Return early for empty series
// This lets the concrete implementations assume at least one row
if (!series.rows || series.rows.length < 1) {
const calcs = {} as FieldCalcs;
const data = field.values;
if (data.length < 1) {
const calcs = { ...field.calcs } as FieldCalcs;
for (const reducer of queue) {
calcs[reducer.id] = reducer.emptyInputResult !== null ? reducer.emptyInputResult : null;
}
return calcs;
return (field.calcs = calcs);
}
const { nullValueMode } = field.config;
const ignoreNulls = nullValueMode === NullValueMode.Ignore;
const nullAsZero = nullValueMode === NullValueMode.AsZero;
// Avoid calculating all the standard stats if possible
if (queue.length === 1 && queue[0].reduce) {
return queue[0].reduce(series, fieldIndex, ignoreNulls, nullAsZero);
const values = queue[0].reduce(field, ignoreNulls, nullAsZero);
field.calcs = {
...field.calcs,
...values,
};
return values;
}
// For now everything can use the standard stats
let values = doStandardCalcs(series, fieldIndex, ignoreNulls, nullAsZero);
let values = doStandardCalcs(field, ignoreNulls, nullAsZero);
for (const reducer of queue) {
if (!values.hasOwnProperty(reducer.id) && reducer.reduce) {
values = {
...values,
...reducer.reduce(series, fieldIndex, ignoreNulls, nullAsZero),
...reducer.reduce(field, ignoreNulls, nullAsZero),
};
}
}
field.calcs = {
...field.calcs,
...values,
};
return values;
}
@ -200,7 +224,7 @@ export const fieldReducers = new Registry<FieldReducerInfo>(() => [
},
]);
function doStandardCalcs(data: DataFrame, fieldIndex: number, ignoreNulls: boolean, nullAsZero: boolean): FieldCalcs {
function doStandardCalcs(field: Field, ignoreNulls: boolean, nullAsZero: boolean): FieldCalcs {
const calcs = {
sum: 0,
max: -Number.MAX_VALUE,
@ -223,9 +247,10 @@ function doStandardCalcs(data: DataFrame, fieldIndex: number, ignoreNulls: boole
// Just used for calcutations -- not exposed as a stat
previousDeltaUp: true,
} as FieldCalcs;
const data = field.values;
for (let i = 0; i < data.rows.length; i++) {
let currentValue = data.rows[i] ? data.rows[i][fieldIndex] : null;
for (let i = 0; i < data.length; i++) {
let currentValue = data.get(i);
if (i === 0) {
calcs.first = currentValue;
}
@ -260,7 +285,7 @@ function doStandardCalcs(data: DataFrame, fieldIndex: number, ignoreNulls: boole
if (calcs.lastNotNull! > currentValue) {
// counter reset
calcs.previousDeltaUp = false;
if (i === data.rows.length - 1) {
if (i === data.length - 1) {
// reset on last
calcs.delta += currentValue;
}
@ -326,18 +351,14 @@ function doStandardCalcs(data: DataFrame, fieldIndex: number, ignoreNulls: boole
return calcs;
}
function calculateFirst(data: DataFrame, fieldIndex: number, ignoreNulls: boolean, nullAsZero: boolean): FieldCalcs {
return { first: data.rows[0][fieldIndex] };
function calculateFirst(field: Field, ignoreNulls: boolean, nullAsZero: boolean): FieldCalcs {
return { first: field.values.get(0) };
}
function calculateFirstNotNull(
data: DataFrame,
fieldIndex: number,
ignoreNulls: boolean,
nullAsZero: boolean
): FieldCalcs {
for (let idx = 0; idx < data.rows.length; idx++) {
const v = data.rows[idx][fieldIndex];
function calculateFirstNotNull(field: Field, ignoreNulls: boolean, nullAsZero: boolean): FieldCalcs {
const data = field.values;
for (let idx = 0; idx < data.length; idx++) {
const v = data.get(idx);
if (v != null) {
return { firstNotNull: v };
}
@ -345,19 +366,16 @@ function calculateFirstNotNull(
return { firstNotNull: undefined };
}
function calculateLast(data: DataFrame, fieldIndex: number, ignoreNulls: boolean, nullAsZero: boolean): FieldCalcs {
return { last: data.rows[data.rows.length - 1][fieldIndex] };
function calculateLast(field: Field, ignoreNulls: boolean, nullAsZero: boolean): FieldCalcs {
const data = field.values;
return { last: data.get(data.length - 1) };
}
function calculateLastNotNull(
data: DataFrame,
fieldIndex: number,
ignoreNulls: boolean,
nullAsZero: boolean
): FieldCalcs {
let idx = data.rows.length - 1;
function calculateLastNotNull(field: Field, ignoreNulls: boolean, nullAsZero: boolean): FieldCalcs {
const data = field.values;
let idx = data.length - 1;
while (idx >= 0) {
const v = data.rows[idx--][fieldIndex];
const v = data.get(idx--);
if (v != null) {
return { lastNotNull: v };
}
@ -365,17 +383,13 @@ function calculateLastNotNull(
return { lastNotNull: undefined };
}
function calculateChangeCount(
data: DataFrame,
fieldIndex: number,
ignoreNulls: boolean,
nullAsZero: boolean
): FieldCalcs {
function calculateChangeCount(field: Field, ignoreNulls: boolean, nullAsZero: boolean): FieldCalcs {
const data = field.values;
let count = 0;
let first = true;
let last: any = null;
for (let i = 0; i < data.rows.length; i++) {
let currentValue = data.rows[i][fieldIndex];
for (let i = 0; i < data.length; i++) {
let currentValue = data.get(i);
if (currentValue === null) {
if (ignoreNulls) {
continue;
@ -394,15 +408,11 @@ function calculateChangeCount(
return { changeCount: count };
}
function calculateDistinctCount(
data: DataFrame,
fieldIndex: number,
ignoreNulls: boolean,
nullAsZero: boolean
): FieldCalcs {
function calculateDistinctCount(field: Field, ignoreNulls: boolean, nullAsZero: boolean): FieldCalcs {
const data = field.values;
const distinct = new Set<any>();
for (let i = 0; i < data.rows.length; i++) {
let currentValue = data.rows[i][fieldIndex];
for (let i = 0; i < data.length; i++) {
let currentValue = data.get(i);
if (currentValue === null) {
if (ignoreNulls) {
continue;

@ -8,9 +8,11 @@ export * from './logs';
export * from './labels';
export * from './labels';
export * from './object';
export * from './fieldCache';
export * from './moment_wrapper';
export * from './thresholds';
export * from './dataFrameHelper';
export * from './dataFrameView';
export * from './vector';
export { getMappedValue } from './valueMappings';

@ -1,5 +1,6 @@
import { LogLevel } from '../types/logs';
import { DataFrame, FieldType } from '../types/data';
import { DataFrame, FieldType } from '../types/index';
import { ArrayVector } from './vector';
/**
* Returns the log level of a log line.
@ -33,12 +34,23 @@ export function getLogLevelFromKey(key: string): LogLevel {
}
export function addLogLevelToSeries(series: DataFrame, lineIndex: number): DataFrame {
const levels = new ArrayVector<LogLevel>();
const lines = series.fields[lineIndex];
for (let i = 0; i < lines.values.length; i++) {
const line = lines.values.get(lineIndex);
levels.buffer.push(getLogLevel(line));
}
return {
...series, // Keeps Tags, RefID etc
fields: [...series.fields, { name: 'LogLevel', type: FieldType.string }],
rows: series.rows.map(row => {
const line = row[lineIndex];
return [...row, getLogLevel(line)];
}),
fields: [
...series.fields,
{
name: 'LogLevel',
type: FieldType.string,
values: levels,
config: {},
},
],
};
}

@ -5,9 +5,11 @@ import {
toDataFrame,
guessFieldTypes,
guessFieldTypeFromValue,
sortDataFrame,
} from './processDataFrame';
import { FieldType, TimeSeries, DataFrame, TableData } from '../types/data';
import { FieldType, TimeSeries, TableData, DataFrameDTO } from '../types/index';
import { dateTime } from './moment_wrapper';
import { DataFrameHelper } from './dataFrameHelper';
describe('toDataFrame', () => {
it('converts timeseries to series', () => {
@ -17,7 +19,15 @@ describe('toDataFrame', () => {
};
let series = toDataFrame(input1);
expect(series.fields[0].name).toBe(input1.target);
expect(series.rows).toBe(input1.datapoints);
const v0 = series.fields[0].values;
const v1 = series.fields[1].values;
expect(v0.length).toEqual(2);
expect(v1.length).toEqual(2);
expect(v0.get(0)).toEqual(100);
expect(v0.get(1)).toEqual(200);
expect(v1.get(0)).toEqual(1);
expect(v1.get(1)).toEqual(2);
// Should fill a default name if target is empty
const input2 = {
@ -39,12 +49,23 @@ describe('toDataFrame', () => {
});
it('keeps dataFrame unchanged', () => {
const input = {
fields: [{ text: 'A' }, { text: 'B' }, { text: 'C' }],
const input = toDataFrame({
datapoints: [[100, 1], [200, 2]],
});
expect(input.length).toEqual(2);
// If the object is alreay a DataFrame, it should not change
const again = toDataFrame(input);
expect(again).toBe(input);
});
it('migrate from 6.3 style rows', () => {
const oldDataFrame = {
fields: [{ name: 'A' }, { name: 'B' }, { name: 'C' }],
rows: [[100, 'A', 1], [200, 'B', 2], [300, 'C', 3]],
};
const series = toDataFrame(input);
expect(series).toBe(input);
const data = toDataFrame(oldDataFrame);
expect(data.length).toBe(oldDataFrame.rows.length);
});
it('Guess Colum Types from value', () => {
@ -68,14 +89,18 @@ describe('toDataFrame', () => {
});
it('Guess Colum Types from series', () => {
const series = {
fields: [{ name: 'A (number)' }, { name: 'B (strings)' }, { name: 'C (nulls)' }, { name: 'Time' }],
rows: [[123, null, null, '2000'], [null, 'Hello', null, 'XXX']],
};
const series = new DataFrameHelper({
fields: [
{ name: 'A (number)', values: [123, null] },
{ name: 'B (strings)', values: [null, 'Hello'] },
{ name: 'C (nulls)', values: [null, null] },
{ name: 'Time', values: ['2000', 1967] },
],
});
const norm = guessFieldTypes(series);
expect(norm.fields[0].type).toBe(FieldType.number);
expect(norm.fields[1].type).toBe(FieldType.string);
expect(norm.fields[2].type).toBeUndefined();
expect(norm.fields[2].type).toBe(FieldType.other);
expect(norm.fields[3].type).toBe(FieldType.time); // based on name
});
});
@ -103,6 +128,7 @@ describe('SerisData backwards compatibility', () => {
const series = toDataFrame(table);
expect(isTableData(table)).toBeTruthy();
expect(isDataFrame(series)).toBeTruthy();
expect(series.fields[0].config.unit).toEqual('ms');
const roundtrip = toLegacyResponseData(series) as TimeSeries;
expect(isTableData(roundtrip)).toBeTruthy();
@ -110,23 +136,46 @@ describe('SerisData backwards compatibility', () => {
});
it('converts DataFrame to TableData to series and back again', () => {
const series: DataFrame = {
const json: DataFrameDTO = {
refId: 'Z',
meta: {
somethign: 8,
},
fields: [
{ name: 'T', type: FieldType.time }, // first
{ name: 'N', type: FieldType.number, filterable: true },
{ name: 'S', type: FieldType.string, filterable: true },
{ name: 'T', type: FieldType.time, values: [1, 2, 3] },
{ name: 'N', type: FieldType.number, config: { filterable: true }, values: [100, 200, 300] },
{ name: 'S', type: FieldType.string, config: { filterable: true }, values: ['1', '2', '3'] },
],
rows: [[1, 100, '1'], [2, 200, '2'], [3, 300, '3']],
};
const series = toDataFrame(json);
const table = toLegacyResponseData(series) as TableData;
expect(table.meta).toBe(series.meta);
expect(table.refId).toBe(series.refId);
expect(table.meta).toEqual(series.meta);
const names = table.columns.map(c => c.text);
expect(names).toEqual(['T', 'N', 'S']);
});
});
describe('sorted DataFrame', () => {
const frame = toDataFrame({
fields: [
{ name: 'fist', type: FieldType.time, values: [1, 2, 3] },
{ name: 'second', type: FieldType.string, values: ['a', 'b', 'c'] },
{ name: 'third', type: FieldType.number, values: [2000, 3000, 1000] },
],
});
it('Should sort numbers', () => {
const sorted = sortDataFrame(frame, 0, true);
expect(sorted.length).toEqual(3);
expect(sorted.fields[0].values.toJSON()).toEqual([3, 2, 1]);
expect(sorted.fields[1].values.toJSON()).toEqual(['c', 'b', 'a']);
});
it('Should sort strings', () => {
const sorted = sortDataFrame(frame, 1, true);
expect(sorted.length).toEqual(3);
expect(sorted.fields[0].values.toJSON()).toEqual([3, 2, 1]);
expect(sorted.fields[1].values.toJSON()).toEqual(['c', 'b', 'a']);
});
});

@ -4,61 +4,122 @@ import isString from 'lodash/isString';
import isBoolean from 'lodash/isBoolean';
// Types
import { DataFrame, Field, TimeSeries, FieldType, TableData, Column, GraphSeriesXY } from '../types/index';
import {
DataFrame,
Field,
FieldConfig,
TimeSeries,
FieldType,
TableData,
Column,
GraphSeriesXY,
TimeSeriesValue,
FieldDTO,
DataFrameDTO,
} from '../types/index';
import { isDateTime } from './moment_wrapper';
import { ArrayVector, SortedVector } from './vector';
import { DataFrameHelper } from './dataFrameHelper';
function convertTableToDataFrame(table: TableData): DataFrame {
const fields = table.columns.map(c => {
const { text, ...disp } = c;
return {
name: text, // rename 'text' to the 'name' field
config: (disp || {}) as FieldConfig,
values: new ArrayVector(),
type: FieldType.other,
};
});
// Fill in the field values
for (const row of table.rows) {
for (let i = 0; i < fields.length; i++) {
fields[i].values.buffer.push(row[i]);
}
}
for (const f of fields) {
const t = guessFieldTypeForField(f);
if (t) {
f.type = t;
}
}
return {
// rename the 'text' to 'name' field
fields: table.columns.map(c => {
const { text, ...field } = c;
const f = field as Field;
f.name = text;
return f;
}),
rows: table.rows,
fields,
refId: table.refId,
meta: table.meta,
name: table.name,
length: fields[0].values.length,
};
}
function convertTimeSeriesToDataFrame(timeSeries: TimeSeries): DataFrame {
return {
name: timeSeries.target,
fields: [
{
name: timeSeries.target || 'Value',
type: FieldType.number,
const fields = [
{
name: timeSeries.target || 'Value',
type: FieldType.number,
config: {
unit: timeSeries.unit,
},
{
name: 'Time',
type: FieldType.time,
values: new ArrayVector<TimeSeriesValue>(),
},
{
name: 'Time',
type: FieldType.time,
config: {
unit: 'dateTimeAsIso',
},
],
rows: timeSeries.datapoints,
values: new ArrayVector<number>(),
},
];
for (const point of timeSeries.datapoints) {
fields[0].values.buffer.push(point[0]);
fields[1].values.buffer.push(point[1]);
}
return {
name: timeSeries.target,
labels: timeSeries.tags,
refId: timeSeries.refId,
meta: timeSeries.meta,
fields,
length: timeSeries.datapoints.length,
};
}
/**
* This is added temporarily while we convert the LogsModel
* to DataFrame. See: https://github.com/grafana/grafana/issues/18528
*/
function convertGraphSeriesToDataFrame(graphSeries: GraphSeriesXY): DataFrame {
const x = new ArrayVector();
const y = new ArrayVector();
for (let i = 0; i < graphSeries.data.length; i++) {
const row = graphSeries.data[i];
x.buffer.push(row[0]);
y.buffer.push(row[1]);
}
return {
name: graphSeries.label,
fields: [
{
name: graphSeries.label || 'Value',
type: FieldType.number,
config: {},
values: x,
},
{
name: 'Time',
type: FieldType.time,
unit: 'dateTimeAsIso',
config: {
unit: 'dateTimeAsIso',
},
values: y,
},
],
rows: graphSeries.data,
length: x.buffer.length,
};
}
@ -102,20 +163,18 @@ export function guessFieldTypeFromValue(v: any): FieldType {
/**
* Looks at the data to guess the column type. This ignores any existing setting
*/
export function guessFieldTypeFromSeries(series: DataFrame, index: number): FieldType | undefined {
const column = series.fields[index];
export function guessFieldTypeForField(field: Field): FieldType | undefined {
// 1. Use the column name to guess
if (column.name) {
const name = column.name.toLowerCase();
if (field.name) {
const name = field.name.toLowerCase();
if (name === 'date' || name === 'time') {
return FieldType.time;
}
}
// 2. Check the first non-null value
for (let i = 0; i < series.rows.length; i++) {
const v = series.rows[i][index];
for (let i = 0; i < field.values.length; i++) {
const v = field.values.get(i);
if (v !== null) {
return guessFieldTypeFromValue(v);
}
@ -135,14 +194,14 @@ export const guessFieldTypes = (series: DataFrame): DataFrame => {
// Somethign is missing a type return a modified copy
return {
...series,
fields: series.fields.map((field, index) => {
if (field.type) {
fields: series.fields.map(field => {
if (field.type && field.type !== FieldType.other) {
return field;
}
// Replace it with a calculated version
// Calculate a reasonable schema value
return {
...field,
type: guessFieldTypeFromSeries(series, index),
type: guessFieldTypeForField(field) || FieldType.other,
};
}),
};
@ -158,7 +217,22 @@ export const isDataFrame = (data: any): data is DataFrame => data && data.hasOwn
export const toDataFrame = (data: any): DataFrame => {
if (data.hasOwnProperty('fields')) {
return data as DataFrame;
// @deprecated -- remove in 6.5
if (data.hasOwnProperty('rows')) {
const v = new DataFrameHelper(data as DataFrameDTO);
const rows = data.rows as any[][];
for (let i = 0; i < rows.length; i++) {
v.appendRow(rows[i]);
}
// TODO: deprection warning
return v;
}
// DataFrameDTO does not have length
if (data.hasOwnProperty('length')) {
return data as DataFrame;
}
return new DataFrameHelper(data as DataFrameDTO);
}
if (data.hasOwnProperty('datapoints')) {
return convertTimeSeriesToDataFrame(data);
@ -174,52 +248,129 @@ export const toDataFrame = (data: any): DataFrame => {
throw new Error('Unsupported data format');
};
export const toLegacyResponseData = (series: DataFrame): TimeSeries | TableData => {
const { fields, rows } = series;
export const toLegacyResponseData = (frame: DataFrame): TimeSeries | TableData => {
const { fields } = frame;
const length = fields[0].values.length;
const rows: any[][] = [];
for (let i = 0; i < length; i++) {
const row: any[] = [];
for (let j = 0; j < fields.length; j++) {
row.push(fields[j].values.get(i));
}
rows.push(row);
}
if (fields.length === 2) {
const type = guessFieldTypeFromSeries(series, 1);
let type = fields[1].type;
if (!type) {
type = guessFieldTypeForField(fields[1]) || FieldType.other;
}
if (type === FieldType.time) {
return {
alias: fields[0].name || series.name,
target: fields[0].name || series.name,
alias: fields[0].name || frame.name,
target: fields[0].name || frame.name,
datapoints: rows,
unit: fields[0].unit,
refId: series.refId,
meta: series.meta,
unit: fields[0].config ? fields[0].config.unit : undefined,
refId: frame.refId,
meta: frame.meta,
} as TimeSeries;
}
}
return {
columns: fields.map(f => {
const { name, ...column } = f;
(column as Column).text = name;
return column as Column;
const { name, config } = f;
if (config) {
// keep unit etc
const { ...column } = config;
(column as Column).text = name;
return column as Column;
}
return { text: name };
}),
refId: series.refId,
meta: series.meta,
refId: frame.refId,
meta: frame.meta,
rows,
};
};
export function sortDataFrame(data: DataFrame, sortIndex?: number, reverse = false): DataFrame {
if (isNumber(sortIndex)) {
const copy = {
...data,
rows: [...data.rows].sort((a, b) => {
a = a[sortIndex];
b = b[sortIndex];
// Sort null or undefined separately from comparable values
return +(a == null) - +(b == null) || +(a > b) || -(a < b);
}),
const field = data.fields[sortIndex!];
if (!field) {
return data;
}
// Natural order
const index: number[] = [];
for (let i = 0; i < data.length; i++) {
index.push(i);
}
const values = field.values;
// Numeric Comparison
let compare = (a: number, b: number) => {
const vA = values.get(a);
const vB = values.get(b);
return vA - vB; // works for numbers!
};
// String Comparison
if (field.type === FieldType.string) {
compare = (a: number, b: number) => {
const vA: string = values.get(a);
const vB: string = values.get(b);
return vA.localeCompare(vB);
};
}
if (reverse) {
copy.rows.reverse();
}
// Run the sort function
index.sort(compare);
if (reverse) {
index.reverse();
}
// Return a copy that maps sorted values
return {
...data,
fields: data.fields.map(f => {
return {
...f,
values: new SortedVector(f.values, index),
};
}),
};
}
return copy;
/**
* Wrapper to get an array from each field value
*/
export function getDataFrameRow(data: DataFrame, row: number): any[] {
const values: any[] = [];
for (const field of data.fields) {
values.push(field.values.get(row));
}
return data;
return values;
}
/**
* Returns a copy that does not include functions
*/
export function toDataFrameDTO(data: DataFrame): DataFrameDTO {
const fields: FieldDTO[] = data.fields.map(f => {
return {
name: f.name,
type: f.type,
config: f.config,
values: f.values.toJSON(),
};
});
return {
fields,
refId: data.refId,
meta: data.meta,
name: data.name,
labels: data.labels,
};
}

@ -0,0 +1,43 @@
import { ConstantVector, ScaledVector, ArrayVector, CircularVector } from './vector';
describe('Check Proxy Vector', () => {
it('should support constant values', () => {
const value = 3.5;
const v = new ConstantVector(value, 7);
expect(v.length).toEqual(7);
expect(v.get(0)).toEqual(value);
expect(v.get(1)).toEqual(value);
// Now check all of them
for (let i = 0; i < 10; i++) {
expect(v.get(i)).toEqual(value);
}
});
it('should support multiply operations', () => {
const source = new ArrayVector([1, 2, 3, 4]);
const scale = 2.456;
const v = new ScaledVector(source, scale);
expect(v.length).toEqual(source.length);
// expect(v.push(10)).toEqual(source.length); // not implemented
for (let i = 0; i < 10; i++) {
expect(v.get(i)).toEqual(source.get(i) * scale);
}
});
});
describe('Check Circular Vector', () => {
it('should support constant values', () => {
const buffer = [3, 2, 1, 0];
const v = new CircularVector(buffer);
expect(v.length).toEqual(4);
expect(v.toJSON()).toEqual([3, 2, 1, 0]);
v.append(4);
expect(v.toJSON()).toEqual([4, 3, 2, 1]);
v.append(5);
expect(v.toJSON()).toEqual([5, 4, 3, 2]);
});
});

@ -0,0 +1,133 @@
import { Vector } from '../types/dataFrame';
export function vectorToArray<T>(v: Vector<T>): T[] {
const arr: T[] = [];
for (let i = 0; i < v.length; i++) {
arr[i] = v.get(i);
}
return arr;
}
export class ArrayVector<T = any> implements Vector<T> {
buffer: T[];
constructor(buffer?: T[]) {
this.buffer = buffer ? buffer : [];
}
get length() {
return this.buffer.length;
}
get(index: number): T {
return this.buffer[index];
}
toArray(): T[] {
return this.buffer;
}
toJSON(): T[] {
return this.buffer;
}
}
export class ConstantVector<T = any> implements Vector<T> {
constructor(private value: T, private len: number) {}
get length() {
return this.len;
}
get(index: number): T {
return this.value;
}
toArray(): T[] {
const arr: T[] = [];
for (let i = 0; i < this.length; i++) {
arr[i] = this.value;
}
return arr;
}
toJSON(): T[] {
return this.toArray();
}
}
export class ScaledVector implements Vector<number> {
constructor(private source: Vector<number>, private scale: number) {}
get length(): number {
return this.source.length;
}
get(index: number): number {
return this.source.get(index) * this.scale;
}
toArray(): number[] {
return vectorToArray(this);
}
toJSON(): number[] {
return vectorToArray(this);
}
}
export class CircularVector<T = any> implements Vector<T> {
buffer: T[];
index: number;
length: number;
constructor(buffer: T[]) {
this.length = buffer.length;
this.buffer = buffer;
this.index = 0;
}
append(value: T) {
let idx = this.index - 1;
if (idx < 0) {
idx = this.length - 1;
}
this.buffer[idx] = value;
this.index = idx;
}
get(index: number): T {
return this.buffer[(index + this.index) % this.length];
}
toArray(): T[] {
return vectorToArray(this);
}
toJSON(): T[] {
return vectorToArray(this);
}
}
/**
* Values are returned in the order defined by the input parameter
*/
export class SortedVector<T = any> implements Vector<T> {
constructor(private source: Vector<T>, private order: number[]) {}
get length(): number {
return this.source.length;
}
get(index: number): T {
return this.source.get(this.order[index]);
}
toArray(): T[] {
return vectorToArray(this);
}
toJSON(): T[] {
return vectorToArray(this);
}
}

@ -9,7 +9,7 @@ import { StatsPicker } from '../StatsPicker/StatsPicker';
// Types
import { FieldDisplayOptions, DEFAULT_FIELD_DISPLAY_VALUES_LIMIT } from '../../utils/fieldDisplay';
import Select from '../Select/Select';
import { Field, ReducerID, toNumberString, toIntegerOrUndefined, SelectableValue } from '@grafana/data';
import { ReducerID, toNumberString, toIntegerOrUndefined, SelectableValue, FieldConfig } from '@grafana/data';
const showOptions: Array<SelectableValue<boolean>> = [
{
@ -40,7 +40,7 @@ export class FieldDisplayEditor extends PureComponent<Props> {
this.props.onChange({ ...this.props.value, calcs });
};
onDefaultsChange = (value: Partial<Field>) => {
onDefaultsChange = (value: FieldConfig) => {
this.props.onChange({ ...this.props.value, defaults: value });
};

@ -7,7 +7,7 @@ import { FormLabel } from '../FormLabel/FormLabel';
import { UnitPicker } from '../UnitPicker/UnitPicker';
// Types
import { toIntegerOrUndefined, Field, SelectableValue, toFloatOrUndefined, toNumberString } from '@grafana/data';
import { toIntegerOrUndefined, SelectableValue, FieldConfig, toFloatOrUndefined, toNumberString } from '@grafana/data';
import { VAR_SERIES_NAME, VAR_FIELD_NAME, VAR_CALC, VAR_CELL_PREFIX } from '../../utils/fieldDisplay';
@ -15,8 +15,8 @@ const labelWidth = 6;
export interface Props {
showMinMax: boolean;
value: Partial<Field>;
onChange: (value: Partial<Field>, event?: React.SyntheticEvent<HTMLElement>) => void;
value: FieldConfig;
onChange: (value: FieldConfig, event?: React.SyntheticEvent<HTMLElement>) => void;
}
export const FieldPropertiesEditor: React.FC<Props> = ({ value, onChange, showMinMax }) => {

@ -5,7 +5,7 @@ import { getTheme } from '../../themes';
import { migratedTestTable, migratedTestStyles, simpleTable } from './examples';
import { ScopedVars, GrafanaThemeType } from '../../types/index';
import { DataFrame } from '@grafana/data';
import { DataFrame, FieldType, ArrayVector } from '@grafana/data';
import { withFullSizeStory } from '../../utils/storybook/withFullSizeStory';
import { number, boolean } from '@storybook/addon-knobs';
@ -33,14 +33,19 @@ export function columnIndexToLeter(column: number) {
export function makeDummyTable(columnCount: number, rowCount: number): DataFrame {
return {
fields: Array.from(new Array(columnCount), (x, i) => {
const colId = columnIndexToLeter(i);
const values = new ArrayVector<string>();
for (let i = 0; i < rowCount; i++) {
values.buffer.push(colId + (i + 1));
}
return {
name: columnIndexToLeter(i),
name: colId,
type: FieldType.string,
config: {},
values,
};
}),
rows: Array.from(new Array(rowCount), (x, rowId) => {
const suffix = (rowId + 1).toString();
return Array.from(new Array(columnCount), (x, colId) => columnIndexToLeter(colId) + suffix);
}),
length: rowCount,
};
}

@ -12,7 +12,7 @@ import {
} from 'react-virtualized';
import { Themeable } from '../../types/theme';
import { stringToJsRegex, DataFrame, sortDataFrame } from '@grafana/data';
import { stringToJsRegex, DataFrame, sortDataFrame, getDataFrameRow, ArrayVector, FieldType } from '@grafana/data';
import {
TableCellBuilder,
@ -107,7 +107,7 @@ export class Table extends Component<Props, State> {
if (dataChanged || rotate !== prevProps.rotate) {
const { width, minColumnWidth } = this.props;
this.rotateWidth = Math.max(width / data.rows.length, minColumnWidth);
this.rotateWidth = Math.max(width / data.length, minColumnWidth);
}
// Update the data when data or sort changes
@ -146,7 +146,7 @@ export class Table extends Component<Props, State> {
return {
header: title,
width: columnWidth,
builder: getCellBuilder(col, style, this.props),
builder: getCellBuilder(col.config || {}, style, this.props),
};
});
}
@ -185,9 +185,9 @@ export class Table extends Component<Props, State> {
if (row < 0) {
this.doSort(column);
} else {
const values = this.state.data.rows[row];
const value = values[column];
console.log('CLICK', value, row);
const field = this.state.data.fields[columnIndex];
const value = field.values.get(rowIndex);
console.log('CLICK', value, field.name);
}
};
@ -201,6 +201,9 @@ export class Table extends Component<Props, State> {
if (!col) {
col = {
name: '??' + columnIndex + '???',
config: {},
values: new ArrayVector(),
type: FieldType.other,
};
}
@ -226,7 +229,7 @@ export class Table extends Component<Props, State> {
const { data } = this.state;
const isHeader = row < 0;
const rowData = isHeader ? data.fields : data.rows[row];
const rowData = isHeader ? data.fields : getDataFrameRow(data, row); // TODO! improve
const value = rowData ? rowData[column] : '';
const builder = isHeader ? this.headerBuilder : this.getTableCellBuilder(column);
@ -258,7 +261,7 @@ export class Table extends Component<Props, State> {
}
let columnCount = data.fields.length;
let rowCount = data.rows.length + (showHeader ? 1 : 0);
let rowCount = data.length + (showHeader ? 1 : 0);
let fixedColumnCount = Math.min(fixedColumns, columnCount);
let fixedRowCount = showHeader && fixedHeader ? 1 : 0;

@ -6,7 +6,7 @@ import { Table, Props } from './Table';
import { ValueFormatter, getValueFormat, getColorFromHexRgbOrName } from '../../utils/index';
import { GrafanaTheme } from '../../types/theme';
import { InterpolateFunction } from '../../types/panel';
import { Field, dateTime } from '@grafana/data';
import { Field, dateTime, FieldConfig } from '@grafana/data';
export interface TableCellBuilderOptions {
value: any;
@ -73,7 +73,7 @@ export interface ColumnStyle {
// private replaceVariables: InterpolateFunction,
// private fmt?:ValueFormatter) {
export function getCellBuilder(schema: Field, style: ColumnStyle | null, props: Props): TableCellBuilder {
export function getCellBuilder(schema: FieldConfig, style: ColumnStyle | null, props: Props): TableCellBuilder {
if (!style) {
return simpleCellBuilder;
}
@ -153,7 +153,7 @@ class CellBuilderWithStyle {
private mapper: ValueMapper,
private style: ColumnStyle,
private theme: GrafanaTheme,
private column: Field,
private schema: FieldConfig,
private replaceVariables: InterpolateFunction,
private fmt?: ValueFormatter
) {}
@ -244,7 +244,7 @@ class CellBuilderWithStyle {
}
// ??? I don't think this will still work!
if (this.column.filterable) {
if (this.schema.filterable) {
cellClasses.push('table-panel-cell-filterable');
value = (
<>

@ -71,10 +71,10 @@ export class TableInputCSV extends React.PureComponent<Props, State> {
/>
{data && (
<footer>
{data.map((series, index) => {
{data.map((frame, index) => {
return (
<span key={index}>
Rows:{series.rows.length}, Columns:{series.fields.length} &nbsp;
Rows:{frame.length}, Columns:{frame.fields.length} &nbsp;
<i className="fa fa-check-circle" />
</span>
);

@ -1,12 +1,12 @@
import { DataFrame } from '@grafana/data';
import { toDataFrame } from '@grafana/data';
import { ColumnStyle } from './TableCellBuilder';
import { getColorDefinitionByName } from '../../utils/namedColorsPalette';
const SemiDarkOrange = getColorDefinitionByName('semi-dark-orange');
export const migratedTestTable = {
export const migratedTestTable = toDataFrame({
type: 'table',
fields: [
columns: [
{ name: 'Time' },
{ name: 'Value' },
{ name: 'Colored' },
@ -22,7 +22,7 @@ export const migratedTestTable = {
{ name: 'RangeMappingColored' },
],
rows: [[1388556366666, 1230, 40, undefined, '', '', 'my.host.com', 'host1', ['value1', 'value2'], 1, 2, 1, 2]],
} as DataFrame;
});
export const migratedTestStyles: ColumnStyle[] = [
{

@ -1,5 +1,14 @@
import { ComponentType, ComponentClass } from 'react';
import { TimeRange, RawTimeRange, TableData, TimeSeries, DataFrame, LogRowModel, LoadingState } from '@grafana/data';
import {
TimeRange,
RawTimeRange,
TableData,
TimeSeries,
DataFrame,
LogRowModel,
LoadingState,
DataFrameDTO,
} from '@grafana/data';
import { PluginMeta, GrafanaPlugin } from './plugin';
import { PanelData } from './panel';
@ -286,7 +295,7 @@ export interface ExploreStartPageProps {
*/
export type LegacyResponseData = TimeSeries | TableData | any;
export type DataQueryResponseData = DataFrame | LegacyResponseData;
export type DataQueryResponseData = DataFrameDTO | LegacyResponseData;
export type DataStreamObserver = (event: DataStreamState) => void;

@ -1,6 +1,6 @@
import { MappingType, ValueMapping, DisplayValue } from '@grafana/data';
import { MappingType, ValueMapping, DisplayProcessor, DisplayValue } from '@grafana/data';
import { getDisplayProcessor, getColorFromThreshold, DisplayProcessor, getDecimalsForValue } from './displayValue';
import { getDisplayProcessor, getColorFromThreshold, getDecimalsForValue } from './displayValue';
function assertSame(input: any, processors: DisplayProcessor[], match: DisplayValue) {
processors.forEach(processor => {

@ -1,6 +1,14 @@
// Libraries
import _ from 'lodash';
import { Threshold, getMappedValue, Field, DecimalInfo, DisplayValue, DecimalCount } from '@grafana/data';
import {
Threshold,
getMappedValue,
FieldConfig,
DisplayProcessor,
DecimalInfo,
DisplayValue,
DecimalCount,
} from '@grafana/data';
// Utils
import { getValueFormat } from './valueFormats/valueFormats';
@ -9,13 +17,8 @@ import { getColorFromHexRgbOrName } from './namedColorsPalette';
// Types
import { GrafanaTheme, GrafanaThemeType } from '../types';
export type DisplayProcessor = (value: any) => DisplayValue;
export interface DisplayValueOptions {
field?: Partial<Field>;
// Alternative to empty string
noValue?: string;
field?: FieldConfig;
// Context
isUtc?: boolean;
@ -62,7 +65,11 @@ export function getDisplayProcessor(options?: DisplayValueOptions): DisplayProce
}
if (!text) {
text = options.noValue ? options.noValue : '';
if (field && field.noValue) {
text = field.noValue;
} else {
text = ''; // No data?
}
}
return { text, numeric, color };
};

@ -1,5 +1,5 @@
import { getFieldProperties, getFieldDisplayValues, GetFieldDisplayValuesOptions } from './fieldDisplay';
import { FieldType, ReducerID, Threshold } from '@grafana/data';
import { ReducerID, Threshold, DataFrameHelper } from '@grafana/data';
import { GrafanaThemeType } from '../types/theme';
import { getTheme } from '../themes/index';
@ -34,19 +34,14 @@ describe('FieldDisplay', () => {
// Simple test dataset
const options: GetFieldDisplayValuesOptions = {
data: [
{
new DataFrameHelper({
name: 'Series Name',
fields: [
{ name: 'Field 1', type: FieldType.string },
{ name: 'Field 2', type: FieldType.number },
{ name: 'Field 3', type: FieldType.number },
{ name: 'Field 1', values: ['a', 'b', 'c'] },
{ name: 'Field 2', values: [1, 3, 5] },
{ name: 'Field 3', values: [2, 4, 6] },
],
rows: [
['a', 1, 2], // 0
['b', 3, 4], // 1
['c', 5, 6], // 2
],
},
}),
],
replaceVariables: (value: string) => {
return value; // Return it unchanged
@ -140,7 +135,7 @@ describe('FieldDisplay', () => {
{
name: 'No data',
fields: [],
rows: [],
length: 0,
},
],
replaceVariables: (value: string) => {

@ -2,9 +2,8 @@ import {
ReducerID,
reduceField,
FieldType,
NullValueMode,
DataFrame,
Field,
FieldConfig,
DisplayValue,
GraphSeriesValue,
} from '@grafana/data';
@ -21,8 +20,8 @@ export interface FieldDisplayOptions {
limit?: number; // if showing all values limit
calcs: string[]; // when !values, pick one value for the whole field
defaults: Partial<Field>; // Use these values unless otherwise stated
override: Partial<Field>; // Set these values regardless of the source
defaults: FieldConfig; // Use these values unless otherwise stated
override: FieldConfig; // Set these values regardless of the source
}
export const VAR_SERIES_NAME = '__series_name';
@ -60,7 +59,8 @@ function getTitleTemplate(title: string | undefined, stats: string[], data?: Dat
}
export interface FieldDisplay {
field: Field;
name: string; // NOT title!
field: FieldConfig;
display: DisplayValue;
sparkline?: GraphSeriesValue[][];
}
@ -109,45 +109,50 @@ export const getFieldDisplayValues = (options: GetFieldDisplayValuesOptions): Fi
}
for (let i = 0; i < series.fields.length && !hitLimit; i++) {
const field = getFieldProperties(defaults, series.fields[i], override);
const field = series.fields[i];
// Show all number fields
if (field.type !== FieldType.number) {
continue;
}
const config = getFieldProperties(defaults, field.config || {}, override);
if (!field.name) {
field.name = `Field[${s}]`; // it is a copy, so safe to edit
let name = field.name;
if (!name) {
name = `Field[${s}]`;
}
scopedVars[VAR_FIELD_NAME] = { text: 'Field', value: field.name };
scopedVars[VAR_FIELD_NAME] = { text: 'Field', value: name };
const display = getDisplayProcessor({
field,
field: config,
theme: options.theme,
});
const title = field.title ? field.title : defaultTitle;
const title = config.title ? config.title : defaultTitle;
// Show all number fields
if (fieldOptions.values) {
const usesCellValues = title.indexOf(VAR_CELL_PREFIX) >= 0;
for (const row of series.rows) {
for (let j = 0; j < field.values.length; j++) {
// Add all the row variables
if (usesCellValues) {
for (let j = 0; j < series.fields.length; j++) {
scopedVars[VAR_CELL_PREFIX + j] = {
value: row[j],
text: toString(row[j]),
for (let k = 0; k < series.fields.length; k++) {
const f = series.fields[k];
const v = f.values.get(j);
scopedVars[VAR_CELL_PREFIX + k] = {
value: v,
text: toString(v),
};
}
}
const displayValue = display(row[i]);
const displayValue = display(field.values.get(j));
displayValue.title = replaceVariables(title, scopedVars);
values.push({
field,
name,
field: config,
display: displayValue,
});
@ -158,10 +163,8 @@ export const getFieldDisplayValues = (options: GetFieldDisplayValuesOptions): Fi
}
} else {
const results = reduceField({
series,
fieldIndex: i,
field,
reducers: calcs, // The stats to calculate
nullValueMode: NullValueMode.Null,
});
// Single sparkline for a field
@ -169,10 +172,8 @@ export const getFieldDisplayValues = (options: GetFieldDisplayValuesOptions): Fi
timeColumn < 0
? undefined
: getFlotPairs({
rows: series.rows,
xIndex: timeColumn,
yIndex: i,
nullValueMode: NullValueMode.Null,
xField: series.fields[timeColumn],
yField: series.fields[i],
});
for (const calc of calcs) {
@ -180,7 +181,8 @@ export const getFieldDisplayValues = (options: GetFieldDisplayValuesOptions): Fi
const displayValue = display(results[calc]);
displayValue.title = replaceVariables(title, scopedVars);
values.push({
field,
name,
field: config,
display: displayValue,
sparkline: points,
});
@ -192,9 +194,9 @@ export const getFieldDisplayValues = (options: GetFieldDisplayValuesOptions): Fi
if (values.length === 0) {
values.push({
name: 'No data',
field: {
...defaults,
name: 'No Data',
},
display: {
numeric: 0,
@ -222,7 +224,7 @@ const numericFieldProps: any = {
* For numeric values, only valid numbers will be applied
* for units, 'none' will be skipped
*/
export function applyFieldProperties(field: Field, props?: Partial<Field>): Field {
export function applyFieldProperties(field: FieldConfig, props?: FieldConfig): FieldConfig {
if (!props) {
return field;
}
@ -250,14 +252,11 @@ export function applyFieldProperties(field: Field, props?: Partial<Field>): Fiel
copy[key] = val;
}
}
return copy as Field;
return copy as FieldConfig;
}
type PartialField = Partial<Field>;
export function getFieldProperties(...props: PartialField[]): Field {
let field = props[0] as Field;
export function getFieldProperties(...props: FieldConfig[]): FieldConfig {
let field = props[0] as FieldConfig;
for (let i = 1; i < props.length; i++) {
field = applyFieldProperties(field, props[i]);
}

@ -1,10 +1,19 @@
import { getFlotPairs } from './flotPairs';
import { DataFrameHelper } from '@grafana/data';
describe('getFlotPairs', () => {
const rows = [[1, 100, 'a'], [2, 200, 'b'], [3, 300, 'c']];
const series = new DataFrameHelper({
fields: [
{ name: 'a', values: [1, 2, 3] },
{ name: 'b', values: [100, 200, 300] },
{ name: 'c', values: ['a', 'b', 'c'] },
],
});
it('should get X and y', () => {
const pairs = getFlotPairs({ rows, xIndex: 0, yIndex: 1 });
const pairs = getFlotPairs({
xField: series.fields[0],
yField: series.fields[1],
});
expect(pairs.length).toEqual(3);
expect(pairs[0].length).toEqual(2);
@ -13,7 +22,10 @@ describe('getFlotPairs', () => {
});
it('should work with strings', () => {
const pairs = getFlotPairs({ rows, xIndex: 0, yIndex: 2 });
const pairs = getFlotPairs({
xField: series.fields[0],
yField: series.fields[2],
});
expect(pairs.length).toEqual(3);
expect(pairs[0].length).toEqual(2);

@ -1,22 +1,28 @@
// Types
import { NullValueMode, GraphSeriesValue } from '@grafana/data';
import { NullValueMode, GraphSeriesValue, Field } from '@grafana/data';
export interface FlotPairsOptions {
rows: any[][];
xIndex: number;
yIndex: number;
xField: Field;
yField: Field;
nullValueMode?: NullValueMode;
}
export function getFlotPairs({ rows, xIndex, yIndex, nullValueMode }: FlotPairsOptions): GraphSeriesValue[][] {
export function getFlotPairs({ xField, yField, nullValueMode }: FlotPairsOptions): GraphSeriesValue[][] {
const vX = xField.values;
const vY = yField.values;
const length = vX.length;
if (vY.length !== length) {
throw new Error('Unexpected field length');
}
const ignoreNulls = nullValueMode === NullValueMode.Ignore;
const nullAsZero = nullValueMode === NullValueMode.AsZero;
const pairs: any[][] = [];
for (let i = 0; i < rows.length; i++) {
const x = rows[i][xIndex];
let y = rows[i][yIndex];
for (let i = 0; i < length; i++) {
const x = vX.get(i);
let y = vY.get(i);
if (y === null) {
if (ignoreNulls) {

@ -10,7 +10,6 @@ import {
findCommonLabels,
findUniqueLabels,
getLogLevel,
FieldCache,
FieldType,
getLogLevelFromKey,
LogRowModel,
@ -20,11 +19,13 @@ import {
LogsParser,
LogLabelStatsModel,
LogsDedupStrategy,
DataFrameHelper,
GraphSeriesXY,
LoadingState,
dateTime,
toUtc,
NullValueMode,
toDataFrame,
} from '@grafana/data';
import { getThemeColor } from 'app/core/utils/colors';
import { hasAnsiCodes } from 'app/core/utils/text';
@ -245,10 +246,11 @@ export function makeSeriesForLogs(rows: LogRowModel[], intervalMs: number): Grap
return a[1] - b[1];
});
// EEEP: converts GraphSeriesXY to DataFrame and back again!
const data = toDataFrame(series);
const points = getFlotPairs({
rows: series.datapoints,
xIndex: 1,
yIndex: 0,
xField: data.fields[1],
yField: data.fields[0],
nullValueMode: NullValueMode.Null,
});
@ -336,14 +338,56 @@ export function logSeriesToLogsModel(logSeries: DataFrame[]): LogsModel {
for (let i = 0; i < logSeries.length; i++) {
const series = logSeries[i];
const fieldCache = new FieldCache(series.fields);
const data = new DataFrameHelper(series);
const uniqueLabels = findUniqueLabels(series.labels, commonLabels);
if (Object.keys(uniqueLabels).length > 0) {
hasUniqueLabels = true;
}
for (let j = 0; j < series.rows.length; j++) {
rows.push(processLogSeriesRow(series, fieldCache, j, uniqueLabels));
const timeFieldIndex = data.getFirstFieldOfType(FieldType.time);
const stringField = data.getFirstFieldOfType(FieldType.string);
const logLevelField = data.getFieldByName('level');
let seriesLogLevel: LogLevel | undefined = undefined;
if (series.labels && Object.keys(series.labels).indexOf('level') !== -1) {
seriesLogLevel = getLogLevelFromKey(series.labels['level']);
}
for (let j = 0; j < data.length; j++) {
const ts = timeFieldIndex.values.get(j);
const time = dateTime(ts);
const timeEpochMs = time.valueOf();
const timeFromNow = time.fromNow();
const timeLocal = time.format('YYYY-MM-DD HH:mm:ss');
const timeUtc = toUtc(ts).format('YYYY-MM-DD HH:mm:ss');
const message = stringField.values.get(j);
let logLevel = LogLevel.unknown;
if (logLevelField) {
logLevel = getLogLevelFromKey(logLevelField.values.get(j));
} else if (seriesLogLevel) {
logLevel = seriesLogLevel;
} else {
logLevel = getLogLevel(message);
}
const hasAnsi = hasAnsiCodes(message);
const searchWords = series.meta && series.meta.searchWords ? series.meta.searchWords : [];
rows.push({
logLevel,
timeFromNow,
timeEpochMs,
timeLocal,
timeUtc,
uniqueLabels,
hasAnsi,
searchWords,
entry: hasAnsi ? ansicolor.strip(message) : message,
raw: message,
labels: series.labels,
timestamp: ts,
});
}
}
@ -373,49 +417,3 @@ export function logSeriesToLogsModel(logSeries: DataFrame[]): LogsModel {
rows,
};
}
export function processLogSeriesRow(
series: DataFrame,
fieldCache: FieldCache,
rowIndex: number,
uniqueLabels: Labels
): LogRowModel {
const row = series.rows[rowIndex];
const timeFieldIndex = fieldCache.getFirstFieldOfType(FieldType.time).index;
const ts = row[timeFieldIndex];
const stringFieldIndex = fieldCache.getFirstFieldOfType(FieldType.string).index;
const message = row[stringFieldIndex];
const time = dateTime(ts);
const timeEpochMs = time.valueOf();
const timeFromNow = time.fromNow();
const timeLocal = time.format('YYYY-MM-DD HH:mm:ss');
const timeUtc = toUtc(ts).format('YYYY-MM-DD HH:mm:ss');
let logLevel = LogLevel.unknown;
const logLevelField = fieldCache.getFieldByName('level');
if (logLevelField) {
logLevel = getLogLevelFromKey(row[logLevelField.index]);
} else if (series.labels && Object.keys(series.labels).indexOf('level') !== -1) {
logLevel = getLogLevelFromKey(series.labels['level']);
} else {
logLevel = getLogLevel(message);
}
const hasAnsi = hasAnsiCodes(message);
const searchWords = series.meta && series.meta.searchWords ? series.meta.searchWords : [];
return {
logLevel,
timeFromNow,
timeEpochMs,
timeLocal,
timeUtc,
uniqueLabels,
hasAnsi,
searchWords,
entry: hasAnsi ? ansicolor.strip(message) : message,
raw: message,
labels: series.labels,
timestamp: ts,
};
}

@ -1,4 +1,13 @@
import { DataFrame, FieldType, LogsModel, LogsMetaKind, LogsDedupStrategy, LogLevel } from '@grafana/data';
import {
DataFrame,
FieldType,
LogsModel,
LogsMetaKind,
LogsDedupStrategy,
LogLevel,
DataFrameHelper,
toDataFrame,
} from '@grafana/data';
import {
dedupLogRows,
calculateFieldStats,
@ -344,47 +353,46 @@ describe('dataFrameToLogsModel', () => {
it('given series without correct series name should return empty logs model', () => {
const series: DataFrame[] = [
{
toDataFrame({
fields: [],
rows: [],
},
}),
];
expect(dataFrameToLogsModel(series, 0)).toMatchObject(emptyLogsModel);
});
it('given series without a time field should return empty logs model', () => {
const series: DataFrame[] = [
{
new DataFrameHelper({
fields: [
{
name: 'message',
type: FieldType.string,
values: [],
},
],
rows: [],
},
}),
];
expect(dataFrameToLogsModel(series, 0)).toMatchObject(emptyLogsModel);
});
it('given series without a string field should return empty logs model', () => {
const series: DataFrame[] = [
{
new DataFrameHelper({
fields: [
{
name: 'time',
type: FieldType.time,
values: [],
},
],
rows: [],
},
}),
];
expect(dataFrameToLogsModel(series, 0)).toMatchObject(emptyLogsModel);
});
it('given one series should return expected logs model', () => {
const series: DataFrame[] = [
{
new DataFrameHelper({
labels: {
filename: '/var/log/grafana/grafana.log',
job: 'grafana',
@ -393,26 +401,21 @@ describe('dataFrameToLogsModel', () => {
{
name: 'time',
type: FieldType.time,
values: ['2019-04-26T09:28:11.352440161Z', '2019-04-26T14:42:50.991981292Z'],
},
{
name: 'message',
type: FieldType.string,
values: [
't=2019-04-26T11:05:28+0200 lvl=info msg="Initializing DatasourceCacheService" logger=server',
't=2019-04-26T16:42:50+0200 lvl=eror msg="new token…t unhashed token=56d9fdc5c8b7400bd51b060eea8ca9d7',
],
},
],
rows: [
[
'2019-04-26T09:28:11.352440161Z',
't=2019-04-26T11:05:28+0200 lvl=info msg="Initializing DatasourceCacheService" logger=server',
],
[
'2019-04-26T14:42:50.991981292Z',
't=2019-04-26T16:42:50+0200 lvl=eror msg="new token…t unhashed token=56d9fdc5c8b7400bd51b060eea8ca9d7',
],
],
meta: {
limit: 1000,
},
},
}),
];
const logsModel = dataFrameToLogsModel(series, 0);
expect(logsModel.hasUniqueLabels).toBeFalsy();
@ -450,23 +453,25 @@ describe('dataFrameToLogsModel', () => {
it('given one series without labels should return expected logs model', () => {
const series: DataFrame[] = [
{
new DataFrameHelper({
fields: [
{
name: 'time',
type: FieldType.time,
values: ['1970-01-01T00:00:01Z'],
},
{
name: 'message',
type: FieldType.string,
values: ['WARN boooo'],
},
{
name: 'level',
type: FieldType.string,
values: ['dbug'],
},
],
rows: [['1970-01-01T00:00:01Z', 'WARN boooo', 'dbug']],
},
}),
];
const logsModel = dataFrameToLogsModel(series, 0);
expect(logsModel.rows).toHaveLength(1);
@ -482,7 +487,7 @@ describe('dataFrameToLogsModel', () => {
it('given multiple series should return expected logs model', () => {
const series: DataFrame[] = [
{
toDataFrame({
labels: {
foo: 'bar',
baz: '1',
@ -492,15 +497,16 @@ describe('dataFrameToLogsModel', () => {
{
name: 'ts',
type: FieldType.time,
values: ['1970-01-01T00:00:01Z'],
},
{
name: 'line',
type: FieldType.string,
values: ['WARN boooo'],
},
],
rows: [['1970-01-01T00:00:01Z', 'WARN boooo']],
},
{
}),
toDataFrame({
name: 'logs',
labels: {
foo: 'bar',
@ -511,14 +517,15 @@ describe('dataFrameToLogsModel', () => {
{
name: 'time',
type: FieldType.time,
values: ['1970-01-01T00:00:00Z', '1970-01-01T00:00:02Z'],
},
{
name: 'message',
type: FieldType.string,
values: ['INFO 1', 'INFO 2'],
},
],
rows: [['1970-01-01T00:00:00Z', 'INFO 1'], ['1970-01-01T00:00:02Z', 'INFO 2']],
},
}),
];
const logsModel = dataFrameToLogsModel(series, 0);
expect(logsModel.hasUniqueLabels).toBeTruthy();

@ -1,4 +1,4 @@
import { LoadingState } from '@grafana/data';
import { LoadingState, toDataFrame } from '@grafana/data';
import { PanelData, DataQueryRequest } from '@grafana/ui';
import { filterPanelDataToQuery } from './QueryEditorRow';
@ -10,14 +10,14 @@ function makePretendRequest(requestId: string, subRequests?: DataQueryRequest[])
}
describe('filterPanelDataToQuery', () => {
const data = {
const data: PanelData = {
state: LoadingState.Done,
series: [
{ refId: 'A', fields: [{ name: 'AAA' }], rows: [], meta: {} },
{ refId: 'B', fields: [{ name: 'B111' }], rows: [], meta: {} },
{ refId: 'B', fields: [{ name: 'B222' }], rows: [], meta: {} },
{ refId: 'B', fields: [{ name: 'B333' }], rows: [], meta: {} },
{ refId: 'C', fields: [{ name: 'CCCC' }], rows: [], meta: { requestId: 'sub3' } },
toDataFrame({ refId: 'A', fields: [{ name: 'AAA' }], meta: {} }),
toDataFrame({ refId: 'B', fields: [{ name: 'B111' }], meta: {} }),
toDataFrame({ refId: 'B', fields: [{ name: 'B222' }], meta: {} }),
toDataFrame({ refId: 'B', fields: [{ name: 'B333' }], meta: {} }),
toDataFrame({ refId: 'C', fields: [{ name: 'CCCC' }], meta: { requestId: 'sub3' } }),
],
error: {
refId: 'B',
@ -28,7 +28,7 @@ describe('filterPanelDataToQuery', () => {
makePretendRequest('sub2'),
makePretendRequest('sub3'),
]),
} as PanelData;
};
it('should not have an error unless the refId matches', () => {
const panelData = filterPanelDataToQuery(data, 'A');

@ -1,7 +1,7 @@
import { PanelQueryRunner } from './PanelQueryRunner';
import { PanelData, DataQueryRequest, DataStreamObserver, DataStreamState, ScopedVars } from '@grafana/ui';
import { LoadingState } from '@grafana/data';
import { LoadingState, DataFrameHelper } from '@grafana/data';
import { dateTime } from '@grafana/data';
jest.mock('app/core/services/backend_srv');
@ -169,11 +169,10 @@ describe('PanelQueryRunner', () => {
state: LoadingState.Streaming,
key: 'test-stream-1',
data: [
{
rows: [],
new DataFrameHelper({
fields: [],
name: 'I am a magic stream',
},
}),
],
request: {
requestId: ctx.queryCalledWith.requestId,

@ -1,6 +1,6 @@
import { toDataQueryError, PanelQueryState, getProcessedDataFrames } from './PanelQueryState';
import { MockDataSourceApi } from 'test/mocks/datasource_srv';
import { LoadingState } from '@grafana/data';
import { LoadingState, getDataFrameRow } from '@grafana/data';
import { DataQueryResponse } from '@grafana/ui';
import { getQueryOptions } from 'test/helpers/getQueryOptions';
@ -68,16 +68,18 @@ describe('getProcessedDataFrame', () => {
const data = getProcessedDataFrames([null, input1, input2, null, null]);
expect(data.length).toBe(2);
expect(data[0].fields[0].name).toBe(input1.target);
expect(data[0].rows).toBe(input1.datapoints);
const cmp = [getDataFrameRow(data[0], 0), getDataFrameRow(data[0], 1)];
expect(cmp).toEqual(input1.datapoints);
// Default name
expect(data[1].fields[0].name).toEqual('Value');
// Every colun should have a name and a type
for (const table of data) {
for (const column of table.fields) {
expect(column.name).toBeDefined();
expect(column.type).toBeDefined();
for (const field of table.fields) {
expect(field.name).toBeDefined();
expect(field.type).toBeDefined();
}
}
});
@ -92,8 +94,7 @@ describe('getProcessedDataFrame', () => {
function makeSeriesStub(refId: string) {
return {
fields: [{ name: 'a' }],
rows: [],
fields: [{ name: undefined }],
refId,
} as any;
}

@ -8,21 +8,19 @@ import {
updateTimeRangeAction,
runQueriesAction,
} from '../actionTypes';
import { DataFrame, LoadingState } from '@grafana/data';
import { DataFrame, LoadingState, toDataFrame } from '@grafana/data';
import { processQueryResultsEpic } from './processQueryResultsEpic';
import TableModel from 'app/core/table_model';
const testContext = () => {
const serieA: DataFrame = {
const serieA: DataFrame = toDataFrame({
fields: [],
refId: 'A',
rows: [],
};
const serieB: DataFrame = {
});
const serieB: DataFrame = toDataFrame({
fields: [],
refId: 'B',
rows: [],
};
});
const series = [serieA, serieB];
const latency = 0;
const loadingState = LoadingState.Done;

@ -14,25 +14,28 @@ import {
clearQueriesAction,
stateSaveAction,
} from '../actionTypes';
import { LoadingState, DataFrame, FieldType } from '@grafana/data';
import { LoadingState, DataFrame, FieldType, DataFrameHelper } from '@grafana/data';
import { DataQueryRequest } from '@grafana/ui';
const testContext = () => {
const series: DataFrame[] = [
{
new DataFrameHelper({
fields: [
{
name: 'Value',
values: [],
},
{
name: 'Time',
type: FieldType.time,
unit: 'dateTimeAsIso',
config: {
unit: 'dateTimeAsIso',
},
values: [],
},
],
rows: [],
refId: 'A',
},
}),
];
const response = { data: series };

@ -2,7 +2,7 @@ import _ from 'lodash';
import flatten from 'app/core/utils/flatten';
import * as queryDef from './query_def';
import TableModel from 'app/core/table_model';
import { DataFrame, toDataFrame, FieldType } from '@grafana/data';
import { DataFrame, toDataFrame, FieldType, DataFrameHelper } from '@grafana/data';
import { DataQueryResponse } from '@grafana/ui';
import { ElasticsearchAggregation } from './types';
@ -464,33 +464,38 @@ export class ElasticResponse {
if (docs.length > 0) {
propNames = propNames.sort();
const series: DataFrame = {
fields: [
{
name: this.targets[0].timeField,
type: FieldType.time,
},
],
rows: [],
const series = new DataFrameHelper({ fields: [] });
series.addField({
name: this.targets[0].timeField,
type: FieldType.time,
}).parse = (v: any) => {
return v[0] || '';
};
if (logMessageField) {
series.fields.push({
series.addField({
name: logMessageField,
type: FieldType.string,
});
}).parse = (v: any) => {
return v || '';
};
} else {
series.fields.push({
series.addField({
name: '_source',
type: FieldType.string,
});
}).parse = (v: any) => {
return JSON.stringify(v, null, 2);
};
}
if (logLevelField) {
series.fields.push({
series.addField({
name: 'level',
type: FieldType.string,
});
}).parse = (v: any) => {
return v || '';
};
}
for (const propName of propNames) {
@ -498,35 +503,17 @@ export class ElasticResponse {
continue;
}
series.fields.push({
series.addField({
name: propName,
type: FieldType.string,
});
}).parse = (v: any) => {
return v || '';
};
}
// Add a row for each document
for (const doc of docs) {
const row: any[] = [];
row.push(doc[this.targets[0].timeField][0]);
if (logMessageField) {
row.push(doc[logMessageField] || '');
} else {
row.push(JSON.stringify(doc._source, null, 2));
}
if (logLevelField) {
row.push(doc[logLevelField] || '');
}
for (const propName of propNames) {
if (doc.hasOwnProperty(propName)) {
row.push(doc[propName]);
} else {
row.push(null);
}
}
series.rows.push(row);
series.appendRowFrom(doc);
}
dataFrame.push(series);

@ -1,4 +1,6 @@
import { ElasticResponse } from '../elastic_response';
import { DataFrameHelper, DataFrameView } from '@grafana/data';
import { KeyValue } from '@grafana/ui';
describe('ElasticResponse', () => {
let targets;
@ -858,19 +860,39 @@ describe('ElasticResponse', () => {
it('should return histogram aggregation and documents', () => {
expect(result.data.length).toBe(2);
expect(result.data[0].fields).toContainEqual({ name: '@timestamp', type: 'time' });
expect(result.data[0].fields).toContainEqual({ name: 'host', type: 'string' });
expect(result.data[0].fields).toContainEqual({ name: 'message', type: 'string' });
result.data[0].rows.forEach((row: any, i: number) => {
const logResults = result.data[0] as DataFrameHelper;
const fields = logResults.fields.map(f => {
return {
name: f.name,
type: f.type,
};
});
expect(fields).toContainEqual({ name: '@timestamp', type: 'time' });
expect(fields).toContainEqual({ name: 'host', type: 'string' });
expect(fields).toContainEqual({ name: 'message', type: 'string' });
let rows = new DataFrameView(logResults);
for (let i = 0; i < rows.length; i++) {
const r = rows.get(i);
const row = [r._id, r._type, r._index, r._source];
expect(row).toContain(response.responses[0].hits.hits[i]._id);
expect(row).toContain(response.responses[0].hits.hits[i]._type);
expect(row).toContain(response.responses[0].hits.hits[i]._index);
expect(row).toContain(JSON.stringify(response.responses[0].hits.hits[i]._source, undefined, 2));
});
}
// Make a map from the histogram results
const hist: KeyValue<number> = {};
const histogramResults = new DataFrameHelper(result.data[1]);
rows = new DataFrameView(histogramResults);
for (let i = 0; i < rows.length; i++) {
const row = rows.get(i);
hist[row.Time] = row.Count;
}
expect(result.data[1]).toHaveProperty('name', 'Count');
response.responses[0].aggregations['2'].buckets.forEach((bucket: any) => {
expect(result.data[1].rows).toContainEqual([bucket.doc_count, bucket.key]);
expect(hist[bucket.key]).toEqual(bucket.doc_count);
});
});
});

@ -2,7 +2,7 @@ import AzureMonitorDatasource from '../datasource';
// @ts-ignore
import Q from 'q';
import { TemplateSrv } from 'app/features/templating/template_srv';
import { toUtc } from '@grafana/data';
import { toUtc, DataFrame } from '@grafana/data';
describe('AzureMonitorDatasource', () => {
const ctx: any = {
@ -132,11 +132,12 @@ describe('AzureMonitorDatasource', () => {
it('should return a list of datapoints', () => {
return ctx.ds.query(options).then((results: any) => {
expect(results.data.length).toBe(1);
expect(results.data[0].name).toEqual('Percentage CPU');
expect(results.data[0].rows[0][1]).toEqual(1558278660000);
expect(results.data[0].rows[0][0]).toEqual(2.2075);
expect(results.data[0].rows[1][1]).toEqual(1558278720000);
expect(results.data[0].rows[1][0]).toEqual(2.29);
const data = results.data[0] as DataFrame;
expect(data.name).toEqual('Percentage CPU');
expect(data.fields[1].values.get(0)).toEqual(1558278660000);
expect(data.fields[0].values.get(0)).toEqual(2.2075);
expect(data.fields[1].values.get(1)).toEqual(1558278720000);
expect(data.fields[0].values.get(1)).toEqual(2.29);
});
});
});

@ -5,7 +5,8 @@ import React, { PureComponent } from 'react';
import { InputOptions } from './types';
import { DataSourcePluginOptionsEditorProps, DataSourceSettings, TableInputCSV } from '@grafana/ui';
import { DataFrame, toCSV } from '@grafana/data';
import { DataFrame, DataFrameHelper } from '@grafana/data';
import { dataFrameToCSV } from './utils';
type InputSettings = DataSourceSettings<InputOptions>;
@ -23,7 +24,7 @@ export class InputConfigEditor extends PureComponent<Props, State> {
componentDidMount() {
const { options } = this.props;
if (options.jsonData.data) {
const text = toCSV(options.jsonData.data);
const text = dataFrameToCSV(options.jsonData.data);
this.setState({ text });
}
}
@ -31,12 +32,7 @@ export class InputConfigEditor extends PureComponent<Props, State> {
onSeriesParsed = (data: DataFrame[], text: string) => {
const { options, onOptionsChange } = this.props;
if (!data) {
data = [
{
fields: [],
rows: [],
},
];
data = [new DataFrameHelper()];
}
// data is a property on 'jsonData'
const jsonData = {

@ -1,6 +1,6 @@
import InputDatasource, { describeDataFrame } from './InputDatasource';
import { InputQuery, InputOptions } from './types';
import { readCSV } from '@grafana/data';
import { readCSV, DataFrame, DataFrameHelper } from '@grafana/data';
import { DataSourceInstanceSettings, PluginMeta } from '@grafana/ui';
import { getQueryOptions } from 'test/helpers/getQueryOptions';
@ -26,9 +26,9 @@ describe('InputDatasource', () => {
return ds.query(options).then(rsp => {
expect(rsp.data.length).toBe(1);
const series = rsp.data[0];
const series: DataFrame = rsp.data[0];
expect(series.refId).toBe('Z');
expect(series.rows).toEqual(data[0].rows);
expect(series.fields[0].values).toEqual(data[0].fields[0].values);
});
});
});
@ -38,11 +38,10 @@ describe('InputDatasource', () => {
expect(describeDataFrame(null)).toEqual('');
expect(
describeDataFrame([
{
new DataFrameHelper({
name: 'x',
fields: [{ name: 'a' }],
rows: [],
},
}),
])
).toEqual('1 Fields, 0 Rows');
});

@ -6,17 +6,19 @@ import {
DataSourceInstanceSettings,
MetricFindValue,
} from '@grafana/ui';
import { DataFrame } from '@grafana/data';
import { DataFrame, DataFrameDTO, toDataFrame } from '@grafana/data';
import { InputQuery, InputOptions } from './types';
export class InputDatasource extends DataSourceApi<InputQuery, InputOptions> {
data: DataFrame[];
data: DataFrame[] = [];
constructor(instanceSettings: DataSourceInstanceSettings<InputOptions>) {
super(instanceSettings);
this.data = instanceSettings.jsonData.data ? instanceSettings.jsonData.data : [];
if (instanceSettings.jsonData.data) {
this.data = instanceSettings.jsonData.data.map(v => toDataFrame(v));
}
}
/**
@ -47,14 +49,14 @@ export class InputDatasource extends DataSourceApi<InputQuery, InputOptions> {
query(options: DataQueryRequest<InputQuery>): Promise<DataQueryResponse> {
const results: DataFrame[] = [];
for (const query of options.targets) {
if (query.hide) {
continue;
let data = this.data;
if (query.data) {
data = query.data.map(v => toDataFrame(v));
}
const data = query.data ? query.data : this.data;
for (const series of data) {
for (let i = 0; i < data.length; i++) {
results.push({
...data[i],
refId: query.refId,
...series,
});
}
}
@ -66,8 +68,9 @@ export class InputDatasource extends DataSourceApi<InputQuery, InputOptions> {
let rowCount = 0;
let info = `${this.data.length} Series:`;
for (const series of this.data) {
info += ` [${series.fields.length} Fields, ${series.rows.length} Rows]`;
rowCount += series.rows.length;
const length = series.length;
info += ` [${series.fields.length} Fields, ${length} Rows]`;
rowCount += length;
}
if (rowCount > 0) {
@ -84,13 +87,23 @@ export class InputDatasource extends DataSourceApi<InputQuery, InputOptions> {
}
}
export function describeDataFrame(data: DataFrame[]): string {
function getLength(data?: DataFrameDTO | DataFrame) {
if (!data || !data.fields || !data.fields.length) {
return 0;
}
if (data.hasOwnProperty('length')) {
return (data as DataFrame).length;
}
return data.fields[0].values.length;
}
export function describeDataFrame(data: Array<DataFrameDTO | DataFrame>): string {
if (!data || !data.length) {
return '';
}
if (data.length > 1) {
const count = data.reduce((acc, series) => {
return acc + series.rows.length;
return acc + getLength(series);
}, 0);
return `${data.length} Series, ${count} Rows`;
}
@ -98,7 +111,8 @@ export function describeDataFrame(data: DataFrame[]): string {
if (!series.fields) {
return 'Missing Fields';
}
return `${series.fields.length} Fields, ${series.rows.length} Rows`;
const length = getLength(series);
return `${series.fields.length} Fields, ${length} Rows`;
}
export default InputDatasource;

@ -6,7 +6,9 @@ import { InputDatasource, describeDataFrame } from './InputDatasource';
import { InputQuery, InputOptions } from './types';
import { FormLabel, Select, QueryEditorProps, TableInputCSV } from '@grafana/ui';
import { DataFrame, toCSV, SelectableValue } from '@grafana/data';
import { DataFrame, toCSV, SelectableValue, DataFrameHelper } from '@grafana/data';
import { dataFrameToCSV } from './utils';
type Props = QueryEditorProps<InputDatasource, InputQuery, InputOptions>;
@ -26,7 +28,7 @@ export class InputQueryEditor extends PureComponent<Props, State> {
onComponentDidMount() {
const { query } = this.props;
const text = query.data ? toCSV(query.data) : '';
const text = dataFrameToCSV(query.data);
this.setState({ text });
}
@ -39,12 +41,7 @@ export class InputQueryEditor extends PureComponent<Props, State> {
}
data = [...datasource.data];
if (!data) {
data = [
{
fields: [],
rows: [],
},
];
data = [new DataFrameHelper()];
}
this.setState({ text: toCSV(data) });
}
@ -56,12 +53,7 @@ export class InputQueryEditor extends PureComponent<Props, State> {
const { query, onChange, onRunQuery } = this.props;
this.setState({ text });
if (!data) {
data = [
{
fields: [],
rows: [],
},
];
data = [new DataFrameHelper()];
}
onChange({ ...query, data });
onRunQuery();

@ -1,12 +1,12 @@
import { DataQuery, DataSourceJsonData } from '@grafana/ui';
import { DataFrame } from '@grafana/data';
import { DataFrameDTO } from '@grafana/data';
export interface InputQuery extends DataQuery {
// Data saved in the panel
data?: DataFrame[];
data?: DataFrameDTO[];
}
export interface InputOptions extends DataSourceJsonData {
// Saved in the datasource and download with bootData
data?: DataFrame[];
data?: DataFrameDTO[];
}

@ -0,0 +1,8 @@
import { toDataFrame, DataFrameDTO, toCSV } from '@grafana/data';
export function dataFrameToCSV(dto?: DataFrameDTO[]) {
if (!dto || !dto.length) {
return '';
}
return toCSV(dto.map(v => toDataFrame(dto)));
}

@ -68,7 +68,7 @@ describe('LokiDatasource', () => {
const res = await ds.query(options);
const dataFrame = res.data[0] as DataFrame;
expect(dataFrame.rows[0][1]).toBe('hello');
expect(dataFrame.fields[1].values.get(0)).toBe('hello');
expect(dataFrame.meta.limit).toBe(20);
expect(dataFrame.meta.searchWords).toEqual(['(?i)foo']);
done();

@ -154,7 +154,7 @@ export class LokiDatasource extends DataSourceApi<LokiQuery, LokiOptions> {
}
if (!data.streams) {
return [{ ...logStreamToDataFrame(data), refId: target.refId }];
return [logStreamToDataFrame(data, target.refId)];
}
for (const stream of data.streams || []) {
@ -330,18 +330,17 @@ export class LokiDatasource extends DataSourceApi<LokiQuery, LokiOptions> {
const series: DataFrame[] = [];
try {
const reverse = options && options.direction === 'FORWARD';
const result = await this._request('/api/prom/query', target);
if (result.data) {
for (const stream of result.data.streams || []) {
const dataFrame = logStreamToDataFrame(stream);
if (reverse) {
dataFrame.reverse();
}
series.push(dataFrame);
}
}
if (options && options.direction === 'FORWARD') {
if (series[0] && series[0].rows) {
series[0].rows.reverse();
}
}
return {
data: series,

@ -26,9 +26,9 @@ describe('convert loki response to DataFrame', () => {
expect(data.length).toBe(2);
expect(data[0].labels['foo']).toEqual('bar');
expect(data[0].rows[0][0]).toEqual(streams[0].entries[0].ts);
expect(data[0].rows[0][1]).toEqual(streams[0].entries[0].line);
expect(data[1].rows[0][0]).toEqual(streams[1].entries[0].ts);
expect(data[1].rows[0][1]).toEqual(streams[1].entries[0].line);
expect(data[0].fields[0].values.get(0)).toEqual(streams[0].entries[0].ts);
expect(data[0].fields[1].values.get(0)).toEqual(streams[0].entries[0].line);
expect(data[1].fields[0].values.get(0)).toEqual(streams[1].entries[0].ts);
expect(data[1].fields[1].values.get(0)).toEqual(streams[1].entries[0].line);
});
});

@ -1,16 +1,25 @@
import { LokiLogsStream } from './types';
import { DataFrame, parseLabels, FieldType, Labels } from '@grafana/data';
import { parseLabels, FieldType, Labels, DataFrameHelper } from '@grafana/data';
export function logStreamToDataFrame(stream: LokiLogsStream): DataFrame {
export function logStreamToDataFrame(stream: LokiLogsStream, refId?: string): DataFrameHelper {
let labels: Labels = stream.parsedLabels;
if (!labels && stream.labels) {
labels = parseLabels(stream.labels);
}
return {
const time: string[] = [];
const lines: string[] = [];
for (const entry of stream.entries) {
time.push(entry.ts || entry.timestamp);
lines.push(entry.line);
}
return new DataFrameHelper({
refId,
labels,
fields: [{ name: 'ts', type: FieldType.time }, { name: 'line', type: FieldType.string }],
rows: stream.entries.map(entry => {
return [entry.ts || entry.timestamp, entry.line];
}),
};
fields: [
{ name: 'ts', type: FieldType.time, values: time }, // Time
{ name: 'line', type: FieldType.string, values: lines }, // Line
],
});
}

@ -1,7 +1,16 @@
import defaults from 'lodash/defaults';
import { DataQueryRequest, DataQueryResponse, DataQueryError, DataStreamObserver, DataStreamState } from '@grafana/ui';
import { FieldType, DataFrame, LoadingState, LogLevel, CSVReader } from '@grafana/data';
import {
FieldType,
Field,
LoadingState,
LogLevel,
CSVReader,
DataFrameHelper,
CircularVector,
DataFrame,
} from '@grafana/data';
import { TestDataQuery, StreamingQuery } from './types';
export const defaultQuery: StreamingQuery = {
@ -74,6 +83,10 @@ export class StreamWorker {
last = -1;
timeoutId = 0;
// The values within
values: CircularVector[] = [];
data: DataFrame = { fields: [], length: 0 };
constructor(key: string, query: TestDataQuery, request: DataQueryRequest, observer: DataStreamObserver) {
this.stream = {
key,
@ -103,26 +116,25 @@ export class StreamWorker {
}
this.query = query.stream;
this.stream.request = request; // OK?
console.log('Reuse Test Stream: ', this);
return true;
}
appendRows(append: any[][]) {
// Trim the maximum row count
const { query, stream } = this;
const maxRows = query.buffer ? query.buffer : stream.request.maxDataPoints;
// Edit the first series
const series = stream.data[0];
let rows = series.rows.concat(append);
const extra = maxRows - rows.length;
if (extra < 0) {
rows = rows.slice(extra * -1);
}
series.rows = rows;
const { stream, values, data } = this;
// Tell the event about only the rows that changed (it may want to process them)
stream.delta = [{ ...series, rows: append }];
// Append all rows
for (let i = 0; i < append.length; i++) {
const row = append[i];
for (let j = 0; j < values.length; j++) {
values[j].append(row[j]); // Circular buffer will kick out old entries
}
}
// Clear any cached values
for (let j = 0; j < data.fields.length; j++) {
data.fields[j].calcs = undefined;
}
stream.data = [data];
// Broadcast the changes
if (this.observer) {
@ -143,7 +155,7 @@ export class SignalWorker extends StreamWorker {
constructor(key: string, query: TestDataQuery, request: DataQueryRequest, observer: DataStreamObserver) {
super(key, query, request, observer);
setTimeout(() => {
this.stream.data = [this.initBuffer(query.refId)];
this.initBuffer(query.refId);
this.looper();
}, 10);
@ -162,33 +174,46 @@ export class SignalWorker extends StreamWorker {
return row;
};
initBuffer(refId: string): DataFrame {
initBuffer(refId: string) {
const { speed, buffer } = this.query;
const data = {
fields: [{ name: 'Time', type: FieldType.time }, { name: 'Value', type: FieldType.number }],
rows: [],
const request = this.stream.request;
const maxRows = buffer ? buffer : request.maxDataPoints;
const times = new CircularVector(new Array<number>(maxRows));
const vals = new CircularVector(new Array<number>(maxRows));
this.values = [times, vals];
const data = new DataFrameHelper({
fields: [
{ name: 'Time', type: FieldType.time, values: times }, // The time field
{ name: 'Value', type: FieldType.number, values: vals },
],
refId,
name: 'Signal ' + refId,
} as DataFrame;
});
for (let i = 0; i < this.bands; i++) {
const suffix = this.bands > 1 ? ` ${i + 1}` : '';
data.fields.push({ name: 'Min' + suffix, type: FieldType.number });
data.fields.push({ name: 'Max' + suffix, type: FieldType.number });
const min = new CircularVector(new Array<number>(maxRows));
const max = new CircularVector(new Array<number>(maxRows));
this.values.push(min);
this.values.push(max);
data.addField({ name: 'Min' + suffix, type: FieldType.number, values: min });
data.addField({ name: 'Max' + suffix, type: FieldType.number, values: max });
}
console.log('START', data);
const request = this.stream.request;
this.value = Math.random() * 100;
const maxRows = buffer ? buffer : request.maxDataPoints;
let time = Date.now() - maxRows * speed;
for (let i = 0; i < maxRows; i++) {
data.rows.push(this.nextRow(time));
const row = this.nextRow(time);
for (let j = 0; j < this.values.length; j++) {
this.values[j].append(row[j]);
}
time += speed;
}
return data;
this.data = data;
}
looper = () => {
@ -251,9 +276,10 @@ export class FetchWorker extends StreamWorker {
return this.reader.read().then(this.processChunk);
};
onHeader = (series: DataFrame) => {
series.refId = this.refId;
this.stream.data = [series];
onHeader = (fields: Field[]) => {
console.warn('TODO!!!', fields);
// series.refId = this.refId;
// this.stream.data = [series];
};
onRow = (row: any[]) => {
@ -269,7 +295,7 @@ export class LogsWorker extends StreamWorker {
super(key, query, request, observer);
window.setTimeout(() => {
this.stream.data = [this.initBuffer(query.refId)];
this.initBuffer(query.refId);
this.looper();
}, 10);
}
@ -314,24 +340,34 @@ export class LogsWorker extends StreamWorker {
return [time, '[' + this.getRandomLogLevel() + '] ' + this.getRandomLine()];
};
initBuffer(refId: string): DataFrame {
initBuffer(refId: string) {
const { speed, buffer } = this.query;
const data = {
fields: [{ name: 'Time', type: FieldType.time }, { name: 'Line', type: FieldType.string }],
rows: [],
refId,
name: 'Logs ' + refId,
} as DataFrame;
const request = this.stream.request;
const maxRows = buffer ? buffer : request.maxDataPoints;
const times = new CircularVector(new Array(maxRows));
const lines = new CircularVector(new Array(maxRows));
this.values = [times, lines];
this.data = new DataFrameHelper({
fields: [
{ name: 'Time', type: FieldType.time, values: times },
{ name: 'Line', type: FieldType.string, values: lines },
],
refId,
name: 'Logs ' + refId,
});
// Fill up the buffer
let time = Date.now() - maxRows * speed;
for (let i = 0; i < maxRows; i++) {
data.rows.push(this.nextRow(time));
const row = this.nextRow(time);
times.append(row[0]);
lines.append(row[1]);
time += speed;
}
return data;
}
looper = () => {

@ -13,7 +13,7 @@ import {
PanelEditorProps,
Select,
} from '@grafana/ui';
import { Field } from '@grafana/data';
import { FieldConfig } from '@grafana/data';
import { Threshold, ValueMapping } from '@grafana/data';
import { BarGaugeOptions, orientationOptions, displayModes } from './types';
@ -41,7 +41,7 @@ export class BarGaugePanelEditor extends PureComponent<PanelEditorProps<BarGauge
fieldOptions,
});
onDefaultsChange = (field: Partial<Field>) => {
onDefaultsChange = (field: FieldConfig) => {
this.onDisplayOptionsChanged({
...this.props.options.fieldOptions,
defaults: field,

@ -11,7 +11,7 @@ import {
Switch,
PanelOptionsGroup,
} from '@grafana/ui';
import { Field, Threshold, ValueMapping } from '@grafana/data';
import { Threshold, ValueMapping, FieldConfig } from '@grafana/data';
import { GaugeOptions } from './types';
@ -49,7 +49,7 @@ export class GaugePanelEditor extends PureComponent<PanelEditorProps<GaugeOption
fieldOptions,
});
onDefaultsChange = (field: Partial<Field>) => {
onDefaultsChange = (field: FieldConfig) => {
this.onDisplayOptionsChanged({
...this.props.options.fieldOptions,
defaults: field,

@ -1,6 +1,6 @@
import _ from 'lodash';
import { colors, getColorFromHexRgbOrName } from '@grafana/ui';
import { TimeRange, FieldCache, FieldType, Field, DataFrame } from '@grafana/data';
import { TimeRange, FieldType, Field, DataFrame, DataFrameHelper } from '@grafana/data';
import TimeSeries from 'app/core/time_series2';
import config from 'app/core/config';
@ -21,35 +21,24 @@ export class DataProcessor {
}
for (const series of dataList) {
const { fields } = series;
const cache = new FieldCache(fields);
const time = cache.getFirstFieldOfType(FieldType.time);
const data = new DataFrameHelper(series);
const time = data.getFirstFieldOfType(FieldType.time);
if (!time) {
continue;
}
const seriesName = series.name ? series.name : series.refId;
for (let i = 0; i < fields.length; i++) {
if (fields[i].type !== FieldType.number) {
continue;
}
const field = fields[i];
let name = field.title;
if (!field.title) {
name = field.name;
}
for (const field of data.getFields(FieldType.number)) {
let name = field.config && field.config.title ? field.config.title : field.name;
if (seriesName && dataList.length > 0 && name !== seriesName) {
name = seriesName + ' ' + name;
}
const datapoints = [];
for (const row of series.rows) {
datapoints.push([row[i], row[time.index]]);
for (let r = 0; r < data.length; r++) {
datapoints.push([field.values.get(r), time.values.get(r)]);
}
list.push(this.toTimeSeries(field, name, datapoints, list.length, range));
@ -76,7 +65,7 @@ export class DataProcessor {
datapoints: datapoints || [],
alias: alias,
color: getColorFromHexRgbOrName(color, config.theme.type),
unit: field.unit,
unit: field.config ? field.config.unit : undefined,
});
if (datapoints && datapoints.length > 0 && range) {

@ -34,12 +34,11 @@ describe('Graph DataProcessor', () => {
{
name: 'series',
fields: [
{ name: 'v1' }, // first
{ name: 'v2' }, // second
{ name: 'string' }, // skip
{ name: 'time' }, // Time is last column
{ name: 'v1', values: [0.1, 0.2, 0.3] }, // first
{ name: 'v2', values: [1.1, 2.2, 3.3] }, // second
{ name: 'string', values: ['a', 'b', 'c'] }, // skip
{ name: 'time', values: [1001, 1002, 1003] }, // Time is last column
],
rows: [[0.1, 1.1, 'a', 1001], [0.2, 2.2, 'b', 1002], [0.3, 3.3, 'c', 1003]],
},
]);
@ -47,6 +46,7 @@ describe('Graph DataProcessor', () => {
panel.xaxis.mode = 'series';
const series = processor.getSeriesList({ dataList });
expect(series.length).toEqual(5);
expect(series).toMatchSnapshot();
});

@ -1,5 +1,5 @@
import { colors, getFlotPairs, getColorFromHexRgbOrName, getDisplayProcessor, PanelData } from '@grafana/ui';
import { NullValueMode, reduceField, FieldCache, FieldType, DisplayValue, GraphSeriesXY } from '@grafana/data';
import { NullValueMode, reduceField, DataFrameHelper, FieldType, DisplayValue, GraphSeriesXY } from '@grafana/data';
import { SeriesOptions, GraphOptions } from './types';
import { GraphLegendEditorLegendOptions } from './GraphLegendEditor';
@ -19,29 +19,22 @@ export const getGraphSeriesModel = (
});
for (const series of data.series) {
const fieldCache = new FieldCache(series.fields);
const timeColumn = fieldCache.getFirstFieldOfType(FieldType.time);
const data = new DataFrameHelper(series);
const timeColumn = data.getFirstFieldOfType(FieldType.time);
if (!timeColumn) {
continue;
}
const numberFields = fieldCache.getFields(FieldType.number);
for (let i = 0; i < numberFields.length; i++) {
const field = numberFields[i];
for (const field of data.getFields(FieldType.number)) {
// Use external calculator just to make sure it works :)
const points = getFlotPairs({
rows: series.rows,
xIndex: timeColumn.index,
yIndex: field.index,
xField: timeColumn,
yField: field,
nullValueMode: NullValueMode.Null,
});
if (points.length > 0) {
const seriesStats = reduceField({
series,
reducers: legendOptions.stats,
fieldIndex: field.index,
});
const seriesStats = reduceField({ field, reducers: legendOptions.stats });
let statsDisplayValues: DisplayValue[];
if (legendOptions.stats) {

@ -8,7 +8,7 @@ import {
FieldPropertiesEditor,
PanelOptionsGroup,
} from '@grafana/ui';
import { ValueMapping, Field } from '@grafana/data';
import { ValueMapping, FieldConfig } from '@grafana/data';
import { PieChartOptionsBox } from './PieChartOptionsBox';
import { PieChartOptions } from './types';
@ -28,7 +28,7 @@ export class PieChartPanelEditor extends PureComponent<PanelEditorProps<PieChart
fieldOptions,
});
onDefaultsChange = (field: Partial<Field>) => {
onDefaultsChange = (field: FieldConfig) => {
this.onDisplayOptionsChanged({
...this.props.options.fieldOptions,
defaults: field,

@ -10,7 +10,7 @@ import {
FieldPropertiesEditor,
PanelOptionsGroup,
} from '@grafana/ui';
import { Threshold, ValueMapping, Field } from '@grafana/data';
import { Threshold, ValueMapping, FieldConfig } from '@grafana/data';
import { SingleStatOptions, SparklineOptions } from './types';
import { ColoringEditor } from './ColoringEditor';
@ -46,7 +46,7 @@ export class SingleStatEditor extends PureComponent<PanelEditorProps<SingleStatO
sparkline,
});
onDefaultsChange = (field: Partial<Field>) => {
onDefaultsChange = (field: FieldConfig) => {
this.onDisplayOptionsChanged({
...this.props.options.fieldOptions,
override: field,

Loading…
Cancel
Save