mirror of https://github.com/grafana/grafana
Merge pull request #12675 from grafana/davkal/logging-datasource
Datasource for Grafana logging platformpull/12698/head
commit
4d722b2177
@ -0,0 +1,9 @@ |
||||
import React from 'react'; |
||||
|
||||
export default function({ value }) { |
||||
return ( |
||||
<div> |
||||
<pre>{JSON.stringify(value, undefined, 2)}</pre> |
||||
</div> |
||||
); |
||||
} |
@ -0,0 +1,66 @@ |
||||
import React, { Fragment, PureComponent } from 'react'; |
||||
|
||||
import { LogsModel, LogRow } from 'app/core/logs_model'; |
||||
|
||||
interface LogsProps { |
||||
className?: string; |
||||
data: LogsModel; |
||||
} |
||||
|
||||
const EXAMPLE_QUERY = '{job="default/prometheus"}'; |
||||
|
||||
const Entry: React.SFC<LogRow> = props => { |
||||
const { entry, searchMatches } = props; |
||||
if (searchMatches && searchMatches.length > 0) { |
||||
let lastMatchEnd = 0; |
||||
const spans = searchMatches.reduce((acc, match, i) => { |
||||
// Insert non-match
|
||||
if (match.start !== lastMatchEnd) { |
||||
acc.push(<>{entry.slice(lastMatchEnd, match.start)}</>); |
||||
} |
||||
// Match
|
||||
acc.push( |
||||
<span className="logs-row-match-highlight" title={`Matching expression: ${match.text}`}> |
||||
{entry.substr(match.start, match.length)} |
||||
</span> |
||||
); |
||||
lastMatchEnd = match.start + match.length; |
||||
// Non-matching end
|
||||
if (i === searchMatches.length - 1) { |
||||
acc.push(<>{entry.slice(lastMatchEnd)}</>); |
||||
} |
||||
return acc; |
||||
}, []); |
||||
return <>{spans}</>; |
||||
} |
||||
return <>{props.entry}</>; |
||||
}; |
||||
|
||||
export default class Logs extends PureComponent<LogsProps, any> { |
||||
render() { |
||||
const { className = '', data } = this.props; |
||||
const hasData = data && data.rows && data.rows.length > 0; |
||||
return ( |
||||
<div className={`${className} logs`}> |
||||
{hasData ? ( |
||||
<div className="logs-entries panel-container"> |
||||
{data.rows.map(row => ( |
||||
<Fragment key={row.key}> |
||||
<div className={row.logLevel ? `logs-row-level logs-row-level-${row.logLevel}` : ''} /> |
||||
<div title={`${row.timestamp} (${row.timeFromNow})`}>{row.timeLocal}</div> |
||||
<div> |
||||
<Entry {...row} /> |
||||
</div> |
||||
</Fragment> |
||||
))} |
||||
</div> |
||||
) : null} |
||||
{!hasData ? ( |
||||
<div className="panel-container"> |
||||
Enter a query like <code>{EXAMPLE_QUERY}</code> |
||||
</div> |
||||
) : null} |
||||
</div> |
||||
); |
||||
} |
||||
} |
@ -0,0 +1,29 @@ |
||||
export enum LogLevel { |
||||
crit = 'crit', |
||||
warn = 'warn', |
||||
err = 'error', |
||||
error = 'error', |
||||
info = 'info', |
||||
debug = 'debug', |
||||
trace = 'trace', |
||||
} |
||||
|
||||
export interface LogSearchMatch { |
||||
start: number; |
||||
length: number; |
||||
text?: string; |
||||
} |
||||
|
||||
export interface LogRow { |
||||
key: string; |
||||
entry: string; |
||||
logLevel: LogLevel; |
||||
timestamp: string; |
||||
timeFromNow: string; |
||||
timeLocal: string; |
||||
searchMatches?: LogSearchMatch[]; |
||||
} |
||||
|
||||
export interface LogsModel { |
||||
rows: LogRow[]; |
||||
} |
@ -0,0 +1,3 @@ |
||||
# Grafana Logging Datasource - Native Plugin |
||||
|
||||
This is a **built in** datasource that allows you to connect to Grafana's logging service. |
@ -0,0 +1,38 @@ |
||||
import { parseQuery } from './datasource'; |
||||
|
||||
describe('parseQuery', () => { |
||||
it('returns empty for empty string', () => { |
||||
expect(parseQuery('')).toEqual({ |
||||
query: '', |
||||
regexp: '', |
||||
}); |
||||
}); |
||||
|
||||
it('returns regexp for strings without query', () => { |
||||
expect(parseQuery('test')).toEqual({ |
||||
query: '', |
||||
regexp: 'test', |
||||
}); |
||||
}); |
||||
|
||||
it('returns query for strings without regexp', () => { |
||||
expect(parseQuery('{foo="bar"}')).toEqual({ |
||||
query: '{foo="bar"}', |
||||
regexp: '', |
||||
}); |
||||
}); |
||||
|
||||
it('returns query for strings with query and search string', () => { |
||||
expect(parseQuery('x {foo="bar"}')).toEqual({ |
||||
query: '{foo="bar"}', |
||||
regexp: 'x', |
||||
}); |
||||
}); |
||||
|
||||
it('returns query for strings with query and regexp', () => { |
||||
expect(parseQuery('{foo="bar"} x|y')).toEqual({ |
||||
query: '{foo="bar"}', |
||||
regexp: 'x|y', |
||||
}); |
||||
}); |
||||
}); |
@ -0,0 +1,134 @@ |
||||
import _ from 'lodash'; |
||||
|
||||
import * as dateMath from 'app/core/utils/datemath'; |
||||
|
||||
import { processStreams } from './result_transformer'; |
||||
|
||||
const DEFAULT_LIMIT = 100; |
||||
|
||||
const DEFAULT_QUERY_PARAMS = { |
||||
direction: 'BACKWARD', |
||||
limit: DEFAULT_LIMIT, |
||||
regexp: '', |
||||
query: '', |
||||
}; |
||||
|
||||
const QUERY_REGEXP = /({\w+="[^"]+"})?\s*(\w[^{]+)?\s*({\w+="[^"]+"})?/; |
||||
export function parseQuery(input: string) { |
||||
const match = input.match(QUERY_REGEXP); |
||||
let query = ''; |
||||
let regexp = ''; |
||||
|
||||
if (match) { |
||||
if (match[1]) { |
||||
query = match[1]; |
||||
} |
||||
if (match[2]) { |
||||
regexp = match[2].trim(); |
||||
} |
||||
if (match[3]) { |
||||
if (match[1]) { |
||||
query = `${match[1].slice(0, -1)},${match[3].slice(1)}`; |
||||
} else { |
||||
query = match[3]; |
||||
} |
||||
} |
||||
} |
||||
|
||||
return { query, regexp }; |
||||
} |
||||
|
||||
function serializeParams(data: any) { |
||||
return Object.keys(data) |
||||
.map(k => { |
||||
const v = data[k]; |
||||
return encodeURIComponent(k) + '=' + encodeURIComponent(v); |
||||
}) |
||||
.join('&'); |
||||
} |
||||
|
||||
export default class LoggingDatasource { |
||||
/** @ngInject */ |
||||
constructor(private instanceSettings, private backendSrv, private templateSrv) {} |
||||
|
||||
_request(apiUrl: string, data?, options?: any) { |
||||
const baseUrl = this.instanceSettings.url; |
||||
const params = data ? serializeParams(data) : ''; |
||||
const url = `${baseUrl}${apiUrl}?${params}`; |
||||
const req = { |
||||
...options, |
||||
url, |
||||
}; |
||||
return this.backendSrv.datasourceRequest(req); |
||||
} |
||||
|
||||
prepareQueryTarget(target, options) { |
||||
const interpolated = this.templateSrv.replace(target.expr); |
||||
const start = this.getTime(options.range.from, false); |
||||
const end = this.getTime(options.range.to, true); |
||||
return { |
||||
...DEFAULT_QUERY_PARAMS, |
||||
...parseQuery(interpolated), |
||||
start, |
||||
end, |
||||
}; |
||||
} |
||||
|
||||
query(options) { |
||||
const queryTargets = options.targets |
||||
.filter(target => target.expr) |
||||
.map(target => this.prepareQueryTarget(target, options)); |
||||
if (queryTargets.length === 0) { |
||||
return Promise.resolve({ data: [] }); |
||||
} |
||||
|
||||
const queries = queryTargets.map(target => this._request('/api/prom/query', target)); |
||||
|
||||
return Promise.all(queries).then((results: any[]) => { |
||||
// Flatten streams from multiple queries
|
||||
const allStreams = results.reduce((acc, response, i) => { |
||||
const streams = response.data.streams || []; |
||||
// Inject search for match highlighting
|
||||
const search = queryTargets[i].regexp; |
||||
streams.forEach(s => { |
||||
s.search = search; |
||||
}); |
||||
return [...acc, ...streams]; |
||||
}, []); |
||||
const model = processStreams(allStreams, DEFAULT_LIMIT); |
||||
return { data: model }; |
||||
}); |
||||
} |
||||
|
||||
metadataRequest(url) { |
||||
// HACK to get label values for {job=|}, will be replaced when implementing LoggingQueryField
|
||||
const apiUrl = url.replace('v1', 'prom'); |
||||
return this._request(apiUrl, { silent: true }).then(res => { |
||||
const data = { data: { data: res.data.values || [] } }; |
||||
return data; |
||||
}); |
||||
} |
||||
|
||||
getTime(date, roundUp) { |
||||
if (_.isString(date)) { |
||||
date = dateMath.parse(date, roundUp); |
||||
} |
||||
return Math.ceil(date.valueOf() * 1e6); |
||||
} |
||||
|
||||
testDatasource() { |
||||
return this._request('/api/prom/label') |
||||
.then(res => { |
||||
if (res && res.data && res.data.values && res.data.values.length > 0) { |
||||
return { status: 'success', message: 'Data source connected and labels found.' }; |
||||
} |
||||
return { |
||||
status: 'error', |
||||
message: 'Data source connected, but no labels received. Verify that logging is configured properly.', |
||||
}; |
||||
}) |
||||
.catch(err => { |
||||
return { status: 'error', message: err.message }; |
||||
}); |
||||
} |
||||
} |
After Width: | Height: | Size: 5.6 KiB |
@ -0,0 +1,7 @@ |
||||
import Datasource from './datasource'; |
||||
|
||||
export class LoggingConfigCtrl { |
||||
static templateUrl = 'partials/config.html'; |
||||
} |
||||
|
||||
export { Datasource, LoggingConfigCtrl as ConfigCtrl }; |
@ -0,0 +1,2 @@ |
||||
<datasource-http-settings current="ctrl.current" no-direct-access="true"> |
||||
</datasource-http-settings> |
@ -0,0 +1,28 @@ |
||||
{ |
||||
"type": "datasource", |
||||
"name": "Grafana Logging", |
||||
"id": "logging", |
||||
"metrics": false, |
||||
"alerting": false, |
||||
"annotations": false, |
||||
"logs": true, |
||||
"explore": true, |
||||
"info": { |
||||
"description": "Grafana Logging Data Source for Grafana", |
||||
"author": { |
||||
"name": "Grafana Project", |
||||
"url": "https://grafana.com" |
||||
}, |
||||
"logos": { |
||||
"small": "img/grafana_icon.svg", |
||||
"large": "img/grafana_icon.svg" |
||||
}, |
||||
"links": [ |
||||
{ |
||||
"name": "Grafana Logging", |
||||
"url": "https://grafana.com/" |
||||
} |
||||
], |
||||
"version": "5.3.0" |
||||
} |
||||
} |
@ -0,0 +1,45 @@ |
||||
import { LogLevel } from 'app/core/logs_model'; |
||||
|
||||
import { getLogLevel, getSearchMatches } from './result_transformer'; |
||||
|
||||
describe('getSearchMatches()', () => { |
||||
it('gets no matches for when search and or line are empty', () => { |
||||
expect(getSearchMatches('', '')).toEqual([]); |
||||
expect(getSearchMatches('foo', '')).toEqual([]); |
||||
expect(getSearchMatches('', 'foo')).toEqual([]); |
||||
}); |
||||
|
||||
it('gets no matches for unmatched search string', () => { |
||||
expect(getSearchMatches('foo', 'bar')).toEqual([]); |
||||
}); |
||||
|
||||
it('gets matches for matched search string', () => { |
||||
expect(getSearchMatches('foo', 'foo')).toEqual([{ length: 3, start: 0, text: 'foo' }]); |
||||
expect(getSearchMatches(' foo ', 'foo')).toEqual([{ length: 3, start: 1, text: 'foo' }]); |
||||
}); |
||||
|
||||
expect(getSearchMatches(' foo foo bar ', 'foo|bar')).toEqual([ |
||||
{ length: 3, start: 1, text: 'foo' }, |
||||
{ length: 3, start: 5, text: 'foo' }, |
||||
{ length: 3, start: 9, text: 'bar' }, |
||||
]); |
||||
}); |
||||
|
||||
describe('getLoglevel()', () => { |
||||
it('returns no log level on empty line', () => { |
||||
expect(getLogLevel('')).toBe(undefined); |
||||
}); |
||||
|
||||
it('returns no log level on when level is part of a word', () => { |
||||
expect(getLogLevel('this is a warning')).toBe(undefined); |
||||
}); |
||||
|
||||
it('returns log level on line contains a log level', () => { |
||||
expect(getLogLevel('warn: it is looking bad')).toBe(LogLevel.warn); |
||||
expect(getLogLevel('2007-12-12 12:12:12 [WARN]: it is looking bad')).toBe(LogLevel.warn); |
||||
}); |
||||
|
||||
it('returns first log level found', () => { |
||||
expect(getLogLevel('WARN this could be a debug message')).toBe(LogLevel.warn); |
||||
}); |
||||
}); |
@ -0,0 +1,71 @@ |
||||
import _ from 'lodash'; |
||||
import moment from 'moment'; |
||||
|
||||
import { LogLevel, LogsModel, LogRow } from 'app/core/logs_model'; |
||||
|
||||
export function getLogLevel(line: string): LogLevel { |
||||
if (!line) { |
||||
return undefined; |
||||
} |
||||
let level: LogLevel; |
||||
Object.keys(LogLevel).forEach(key => { |
||||
if (!level) { |
||||
const regexp = new RegExp(`\\b${key}\\b`, 'i'); |
||||
if (regexp.test(line)) { |
||||
level = LogLevel[key]; |
||||
} |
||||
} |
||||
}); |
||||
return level; |
||||
} |
||||
|
||||
export function getSearchMatches(line: string, search: string) { |
||||
// Empty search can send re.exec() into infinite loop, exit early
|
||||
if (!line || !search) { |
||||
return []; |
||||
} |
||||
const regexp = new RegExp(`(?:${search})`, 'g'); |
||||
const matches = []; |
||||
let match; |
||||
while ((match = regexp.exec(line))) { |
||||
matches.push({ |
||||
text: match[0], |
||||
start: match.index, |
||||
length: match[0].length, |
||||
}); |
||||
} |
||||
return matches; |
||||
} |
||||
|
||||
export function processEntry(entry: { line: string; timestamp: string }, stream): LogRow { |
||||
const { line, timestamp } = entry; |
||||
const { labels } = stream; |
||||
const key = `EK${timestamp}${labels}`; |
||||
const time = moment(timestamp); |
||||
const timeFromNow = time.fromNow(); |
||||
const timeLocal = time.format('YYYY-MM-DD HH:mm:ss'); |
||||
const searchMatches = getSearchMatches(line, stream.search); |
||||
const logLevel = getLogLevel(line); |
||||
|
||||
return { |
||||
key, |
||||
logLevel, |
||||
searchMatches, |
||||
timeFromNow, |
||||
timeLocal, |
||||
entry: line, |
||||
timestamp: timestamp, |
||||
}; |
||||
} |
||||
|
||||
export function processStreams(streams, limit?: number): LogsModel { |
||||
const combinedEntries = streams.reduce((acc, stream) => { |
||||
return [...acc, ...stream.entries.map(entry => processEntry(entry, stream))]; |
||||
}, []); |
||||
const sortedEntries = _.chain(combinedEntries) |
||||
.sortBy('timestamp') |
||||
.reverse() |
||||
.slice(0, limit || combinedEntries.length) |
||||
.value(); |
||||
return { rows: sortedEntries }; |
||||
} |
Loading…
Reference in new issue