mirror of https://github.com/grafana/grafana
Loki: support loki with streaming in dashboards (#18709)
Move some of the buffering with live streaming inside of the datasource, sending full frames instead of deltas and allow Loki in dashboards.pull/18921/head
parent
e80e3608ad
commit
991f77cee1
@ -1,90 +1,66 @@ |
||||
// Libraries
|
||||
import React, { PureComponent } from 'react'; |
||||
import React, { memo } from 'react'; |
||||
|
||||
// Types
|
||||
import { QueryEditorProps } from '@grafana/ui'; |
||||
import { AbsoluteTimeRange } from '@grafana/data'; |
||||
import { QueryEditorProps, Switch, DataSourceStatus } from '@grafana/ui'; |
||||
import { LokiDatasource } from '../datasource'; |
||||
import { LokiQuery } from '../types'; |
||||
// import { LokiQueryField } from './LokiQueryField';
|
||||
import { LokiQueryField } from './LokiQueryField'; |
||||
import { useLokiSyntax } from './useLokiSyntax'; |
||||
|
||||
type Props = QueryEditorProps<LokiDatasource, LokiQuery>; |
||||
|
||||
// interface State {
|
||||
// query: LokiQuery;
|
||||
// }
|
||||
export const LokiQueryEditor = memo(function LokiQueryEditor(props: Props) { |
||||
const { query, panelData, datasource, onChange, onRunQuery } = props; |
||||
|
||||
export class LokiQueryEditor extends PureComponent<Props> { |
||||
// state: State = {
|
||||
// query: this.props.query,
|
||||
// };
|
||||
//
|
||||
// onRunQuery = () => {
|
||||
// const { query } = this.state;
|
||||
//
|
||||
// this.props.onChange(query);
|
||||
// this.props.onRunQuery();
|
||||
// };
|
||||
//
|
||||
// onFieldChange = (query: LokiQuery, override?) => {
|
||||
// this.setState({
|
||||
// query: {
|
||||
// ...this.state.query,
|
||||
// expr: query.expr,
|
||||
// },
|
||||
// });
|
||||
// };
|
||||
//
|
||||
// onFormatChanged = (option: SelectableValue) => {
|
||||
// this.props.onChange({
|
||||
// ...this.state.query,
|
||||
// resultFormat: option.value,
|
||||
// });
|
||||
// };
|
||||
let absolute: AbsoluteTimeRange; |
||||
if (panelData && panelData.request) { |
||||
const { range } = panelData.request; |
||||
absolute = { |
||||
from: range.from.valueOf(), |
||||
to: range.to.valueOf(), |
||||
}; |
||||
} else { |
||||
absolute = { |
||||
from: Date.now() - 10000, |
||||
to: Date.now(), |
||||
}; |
||||
} |
||||
|
||||
render() { |
||||
// const { query } = this.state;
|
||||
// const { datasource } = this.props;
|
||||
// const formatOptions: SelectableValue[] = [
|
||||
// { label: 'Time Series', value: 'time_series' },
|
||||
// { label: 'Table', value: 'table' },
|
||||
// ];
|
||||
//
|
||||
// query.resultFormat = query.resultFormat || 'time_series';
|
||||
// const currentFormat = formatOptions.find(item => item.value === query.resultFormat);
|
||||
const { isSyntaxReady, setActiveOption, refreshLabels, ...syntaxProps } = useLokiSyntax( |
||||
datasource.languageProvider, |
||||
// TODO maybe use real status
|
||||
DataSourceStatus.Connected, |
||||
absolute |
||||
); |
||||
|
||||
return ( |
||||
<div> |
||||
return ( |
||||
<div> |
||||
<LokiQueryField |
||||
datasource={datasource} |
||||
datasourceStatus={DataSourceStatus.Connected} |
||||
query={query} |
||||
onChange={onChange} |
||||
onRunQuery={onRunQuery} |
||||
history={[]} |
||||
panelData={panelData} |
||||
onLoadOptions={setActiveOption} |
||||
onLabelsRefresh={refreshLabels} |
||||
syntaxLoaded={isSyntaxReady} |
||||
absoluteRange={absolute} |
||||
{...syntaxProps} |
||||
/> |
||||
<div className="gf-form-inline"> |
||||
<div className="gf-form"> |
||||
<div className="gf-form-label"> |
||||
Loki is currently not supported as dashboard data source. We are working on it! |
||||
</div> |
||||
<Switch label="Live" checked={!!query.live} onChange={() => onChange({ ...query, live: !query.live })} /> |
||||
</div> |
||||
{/* |
||||
<LokiQueryField |
||||
datasource={datasource} |
||||
query={query} |
||||
onQueryChange={this.onFieldChange} |
||||
onExecuteQuery={this.onRunQuery} |
||||
history={[]} |
||||
/> |
||||
<div className="gf-form-inline"> |
||||
<div className="gf-form"> |
||||
<div className="gf-form-label">Format as</div> |
||||
<Select |
||||
isSearchable={false} |
||||
options={formatOptions} |
||||
onChange={this.onFormatChanged} |
||||
value={currentFormat} |
||||
/> |
||||
</div> |
||||
<div className="gf-form gf-form--grow"> |
||||
<div className="gf-form-label gf-form-label--grow" /> |
||||
</div> |
||||
<div className="gf-form gf-form--grow"> |
||||
<div className="gf-form-label gf-form-label--grow" /> |
||||
</div> |
||||
*/} |
||||
</div> |
||||
); |
||||
} |
||||
} |
||||
</div> |
||||
); |
||||
}); |
||||
|
||||
export default LokiQueryEditor; |
||||
|
@ -0,0 +1,207 @@ |
||||
import { Subject, Observable } from 'rxjs'; |
||||
import * as rxJsWebSocket from 'rxjs/webSocket'; |
||||
import { LiveStreams } from './live_streams'; |
||||
import { DataFrameView, Labels, formatLabels, DataFrame } from '@grafana/data'; |
||||
import { noop } from 'lodash'; |
||||
|
||||
let fakeSocket: Subject<any>; |
||||
jest.mock('rxjs/webSocket', () => { |
||||
return { |
||||
__esModule: true, |
||||
webSocket: () => fakeSocket, |
||||
}; |
||||
}); |
||||
|
||||
describe('Live Stream Tests', () => { |
||||
afterAll(() => { |
||||
jest.restoreAllMocks(); |
||||
}); |
||||
|
||||
const msg0: any = { |
||||
streams: [ |
||||
{ |
||||
labels: '{filename="/var/log/sntpc.log", job="varlogs"}', |
||||
entries: [ |
||||
{ |
||||
ts: '2019-08-28T20:50:40.118944705Z', |
||||
line: 'Kittens', |
||||
}, |
||||
], |
||||
}, |
||||
], |
||||
dropped_entries: null, |
||||
}; |
||||
|
||||
it('reads the values into the buffer', done => { |
||||
fakeSocket = new Subject<any>(); |
||||
const labels: Labels = { job: 'varlogs' }; |
||||
const target = makeTarget('fake', labels); |
||||
const stream = new LiveStreams().getStream(target); |
||||
expect.assertions(5); |
||||
|
||||
const tests = [ |
||||
(val: DataFrame[]) => expect(val).toEqual([]), |
||||
(val: DataFrame[]) => { |
||||
expect(val[0].length).toEqual(7); |
||||
expect(val[0].labels).toEqual(labels); |
||||
}, |
||||
(val: DataFrame[]) => { |
||||
expect(val[0].length).toEqual(8); |
||||
const view = new DataFrameView(val[0]); |
||||
const last = { ...view.get(view.length - 1) }; |
||||
expect(last).toEqual({ |
||||
ts: '2019-08-28T20:50:40.118944705Z', |
||||
line: 'Kittens', |
||||
labels: { filename: '/var/log/sntpc.log' }, |
||||
}); |
||||
}, |
||||
]; |
||||
stream.subscribe({ |
||||
next: val => { |
||||
const test = tests.shift(); |
||||
test(val); |
||||
}, |
||||
complete: () => done(), |
||||
}); |
||||
|
||||
// Send it the initial list of things
|
||||
fakeSocket.next(initialRawResponse); |
||||
// Send it a single update
|
||||
fakeSocket.next(msg0); |
||||
fakeSocket.complete(); |
||||
}); |
||||
|
||||
it('returns the same subscription if the url matches existing one', () => { |
||||
fakeSocket = new Subject<any>(); |
||||
const liveStreams = new LiveStreams(); |
||||
const stream1 = liveStreams.getStream(makeTarget('url_to_match')); |
||||
const stream2 = liveStreams.getStream(makeTarget('url_to_match')); |
||||
expect(stream1).toBe(stream2); |
||||
}); |
||||
|
||||
it('returns new subscription when the previous unsubscribed', () => { |
||||
fakeSocket = new Subject<any>(); |
||||
const liveStreams = new LiveStreams(); |
||||
const stream1 = liveStreams.getStream(makeTarget('url_to_match')); |
||||
const subscription = stream1.subscribe({ |
||||
next: noop, |
||||
}); |
||||
subscription.unsubscribe(); |
||||
|
||||
const stream2 = liveStreams.getStream(makeTarget('url_to_match')); |
||||
expect(stream1).not.toBe(stream2); |
||||
}); |
||||
|
||||
it('returns new subscription when the previous is unsubscribed and correctly unsubscribes from source', () => { |
||||
let unsubscribed = false; |
||||
fakeSocket = new Observable(() => { |
||||
return () => (unsubscribed = true); |
||||
}) as any; |
||||
const spy = spyOn(rxJsWebSocket, 'webSocket'); |
||||
spy.and.returnValue(fakeSocket); |
||||
|
||||
const liveStreams = new LiveStreams(); |
||||
const stream1 = liveStreams.getStream(makeTarget('url_to_match')); |
||||
const subscription = stream1.subscribe({ |
||||
next: noop, |
||||
}); |
||||
subscription.unsubscribe(); |
||||
expect(unsubscribed).toBe(true); |
||||
}); |
||||
}); |
||||
|
||||
/** |
||||
* Create target (query to run). Url is what is used as cache key. |
||||
*/ |
||||
function makeTarget(url: string, labels?: Labels) { |
||||
labels = labels || { job: 'varlogs' }; |
||||
return { |
||||
url, |
||||
size: 10, |
||||
query: formatLabels(labels), |
||||
refId: 'A', |
||||
regexp: '', |
||||
}; |
||||
} |
||||
|
||||
//----------------------------------------------------------------
|
||||
// Added this at the end so the top is more readable
|
||||
//----------------------------------------------------------------
|
||||
|
||||
const initialRawResponse: any = { |
||||
streams: [ |
||||
{ |
||||
labels: '{filename="/var/log/docker.log", job="varlogs"}', |
||||
entries: [ |
||||
{ |
||||
ts: '2019-08-28T20:43:38.215447855Z', |
||||
line: |
||||
'2019-08-28T20:43:38Z docker time="2019-08-28T20:43:38.147149490Z" ' + |
||||
'level=debug msg="[resolver] received AAAA record \\"::1\\" for \\"localhost.\\" from udp:192.168.65.1"', |
||||
}, |
||||
], |
||||
}, |
||||
{ |
||||
labels: '{filename="/var/log/docker.log", job="varlogs"}', |
||||
entries: [ |
||||
{ |
||||
ts: '2019-08-28T20:43:38.215450388Z', |
||||
line: |
||||
'2019-08-28T20:43:38Z docker time="2019-08-28T20:43:38.147224630Z" ' + |
||||
'level=debug msg="[resolver] received AAAA record \\"fe80::1\\" for \\"localhost.\\" from udp:192.168.65.1"', |
||||
}, |
||||
], |
||||
}, |
||||
{ |
||||
labels: '{filename="/var/log/sntpc.log", job="varlogs"}', |
||||
entries: [ |
||||
{ |
||||
ts: '2019-08-28T20:43:40.452525099Z', |
||||
line: '2019-08-28T20:43:40Z sntpc sntpc[1]: offset=-0.022171, delay=0.000463', |
||||
}, |
||||
], |
||||
}, |
||||
{ |
||||
labels: '{filename="/var/log/sntpc.log", job="varlogs"}', |
||||
entries: [ |
||||
{ |
||||
ts: '2019-08-28T20:44:10.297164454Z', |
||||
line: '2019-08-28T20:44:10Z sntpc sntpc[1]: offset=-0.022327, delay=0.000527', |
||||
}, |
||||
], |
||||
}, |
||||
{ |
||||
labels: '{filename="/var/log/lifecycle-server.log", job="varlogs"}', |
||||
entries: [ |
||||
{ |
||||
ts: '2019-08-28T20:44:38.152248647Z', |
||||
line: |
||||
'2019-08-28T20:44:38Z lifecycle-server time="2019-08-28T20:44:38.095444834Z" ' + |
||||
'level=debug msg="Name To resolve: localhost."', |
||||
}, |
||||
], |
||||
}, |
||||
{ |
||||
labels: '{filename="/var/log/lifecycle-server.log", job="varlogs"}', |
||||
entries: [ |
||||
{ |
||||
ts: '2019-08-28T20:44:38.15225554Z', |
||||
line: |
||||
'2019-08-28T20:44:38Z lifecycle-server time="2019-08-28T20:44:38.095896074Z" ' + |
||||
'level=debug msg="[resolver] query localhost. (A) from 172.22.0.4:53748, forwarding to udp:192.168.65.1"', |
||||
}, |
||||
], |
||||
}, |
||||
{ |
||||
labels: '{filename="/var/log/docker.log", job="varlogs"}', |
||||
entries: [ |
||||
{ |
||||
ts: '2019-08-28T20:44:38.152271475Z', |
||||
line: |
||||
'2019-08-28T20:44:38Z docker time="2019-08-28T20:44:38.095444834Z" level=debug msg="Name To resolve: localhost."', |
||||
}, |
||||
], |
||||
}, |
||||
], |
||||
dropped_entries: null, |
||||
}; |
@ -0,0 +1,51 @@ |
||||
import { DataFrame, FieldType, parseLabels, KeyValue, CircularDataFrame } from '@grafana/data'; |
||||
import { Observable, BehaviorSubject } from 'rxjs'; |
||||
import { webSocket } from 'rxjs/webSocket'; |
||||
import { LokiResponse } from './types'; |
||||
import { finalize, map, multicast, refCount } from 'rxjs/operators'; |
||||
import { appendResponseToBufferedData } from './result_transformer'; |
||||
|
||||
/** |
||||
* Maps directly to a query in the UI (refId is key) |
||||
*/ |
||||
export interface LiveTarget { |
||||
query: string; |
||||
regexp: string; |
||||
url: string; |
||||
refId: string; |
||||
size: number; |
||||
} |
||||
|
||||
/** |
||||
* Cache of websocket streams that can be returned as observable. In case there already is a stream for particular |
||||
* target it is returned and on subscription returns the latest dataFrame. |
||||
*/ |
||||
export class LiveStreams { |
||||
private streams: KeyValue<Observable<DataFrame[]>> = {}; |
||||
|
||||
getStream(target: LiveTarget): Observable<DataFrame[]> { |
||||
let stream = this.streams[target.url]; |
||||
if (!stream) { |
||||
const data = new CircularDataFrame({ capacity: target.size }); |
||||
data.labels = parseLabels(target.query); |
||||
data.addField({ name: 'ts', type: FieldType.time, config: { title: 'Time' } }); |
||||
data.addField({ name: 'line', type: FieldType.string }); |
||||
data.addField({ name: 'labels', type: FieldType.other }); |
||||
|
||||
const subject = new BehaviorSubject<DataFrame[]>([]); |
||||
stream = webSocket(target.url).pipe( |
||||
finalize(() => { |
||||
delete this.streams[target.url]; |
||||
}), |
||||
map((response: LokiResponse) => { |
||||
appendResponseToBufferedData(response, data); |
||||
return [data]; |
||||
}), |
||||
multicast(subject), |
||||
refCount() |
||||
); |
||||
this.streams[target.url] = stream; |
||||
} |
||||
return stream; |
||||
} |
||||
} |
@ -1,25 +1,67 @@ |
||||
import { LokiLogsStream } from './types'; |
||||
import { parseLabels, FieldType, Labels, MutableDataFrame } from '@grafana/data'; |
||||
import { LokiLogsStream, LokiResponse } from './types'; |
||||
import { |
||||
parseLabels, |
||||
FieldType, |
||||
Labels, |
||||
DataFrame, |
||||
ArrayVector, |
||||
MutableDataFrame, |
||||
findUniqueLabels, |
||||
} from '@grafana/data'; |
||||
|
||||
export function logStreamToDataFrame(stream: LokiLogsStream, refId?: string): MutableDataFrame { |
||||
/** |
||||
* Transforms LokiLogStream structure into a dataFrame. Used when doing standard queries. |
||||
*/ |
||||
export function logStreamToDataFrame(stream: LokiLogsStream, reverse?: boolean, refId?: string): DataFrame { |
||||
let labels: Labels = stream.parsedLabels; |
||||
if (!labels && stream.labels) { |
||||
labels = parseLabels(stream.labels); |
||||
} |
||||
const time: string[] = []; |
||||
const lines: string[] = []; |
||||
const times = new ArrayVector<string>([]); |
||||
const lines = new ArrayVector<string>([]); |
||||
|
||||
for (const entry of stream.entries) { |
||||
time.push(entry.ts || entry.timestamp); |
||||
lines.push(entry.line); |
||||
times.add(entry.ts || entry.timestamp); |
||||
lines.add(entry.line); |
||||
} |
||||
|
||||
return new MutableDataFrame({ |
||||
if (reverse) { |
||||
times.buffer = times.buffer.reverse(); |
||||
lines.buffer = lines.buffer.reverse(); |
||||
} |
||||
|
||||
return { |
||||
refId, |
||||
labels, |
||||
fields: [ |
||||
{ name: 'ts', type: FieldType.time, values: time }, // Time
|
||||
{ name: 'line', type: FieldType.string, values: lines }, // Line
|
||||
{ name: 'ts', type: FieldType.time, config: { title: 'Time' }, values: times }, // Time
|
||||
{ name: 'line', type: FieldType.string, config: {}, values: lines }, // Line
|
||||
], |
||||
}); |
||||
length: times.length, |
||||
}; |
||||
} |
||||
|
||||
/** |
||||
* Transform LokiResponse data and appends it to MutableDataFrame. Used for streaming where the dataFrame can be |
||||
* a CircularDataFrame creating a fixed size rolling buffer. |
||||
* TODO: Probably could be unified with the logStreamToDataFrame function. |
||||
*/ |
||||
export function appendResponseToBufferedData(response: LokiResponse, data: MutableDataFrame) { |
||||
// Should we do anythign with: response.dropped_entries?
|
||||
|
||||
const streams: LokiLogsStream[] = response.streams; |
||||
if (streams && streams.length) { |
||||
for (const stream of streams) { |
||||
// Find unique labels
|
||||
const labels = parseLabels(stream.labels); |
||||
const unique = findUniqueLabels(labels, data.labels); |
||||
|
||||
// Add each line
|
||||
for (const entry of stream.entries) { |
||||
data.values.ts.add(entry.ts || entry.timestamp); |
||||
data.values.line.add(entry.line); |
||||
data.values.labels.add(unique); |
||||
} |
||||
} |
||||
} |
||||
} |
||||
|
Loading…
Reference in new issue