mirror of https://github.com/grafana/grafana
Tempo: Decouple Tempo from Grafana core (#79888)
parent
767029a43d
commit
522519f671
@ -0,0 +1,41 @@ |
||||
package main |
||||
|
||||
import ( |
||||
"context" |
||||
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend" |
||||
"github.com/grafana/grafana-plugin-sdk-go/backend/httpclient" |
||||
"github.com/grafana/grafana-plugin-sdk-go/backend/instancemgmt" |
||||
tempo "github.com/grafana/grafana/pkg/tsdb/tempo" |
||||
) |
||||
|
||||
type Datasource struct { |
||||
Service *tempo.Service |
||||
} |
||||
|
||||
var ( |
||||
_ backend.QueryDataHandler = (*Datasource)(nil) |
||||
_ backend.StreamHandler = (*Datasource)(nil) |
||||
) |
||||
|
||||
func NewDatasource(c context.Context, b backend.DataSourceInstanceSettings) (instancemgmt.Instance, error) { |
||||
return &Datasource{ |
||||
Service: tempo.ProvideService(httpclient.NewProvider()), |
||||
}, nil |
||||
} |
||||
|
||||
func (d *Datasource) QueryData(ctx context.Context, req *backend.QueryDataRequest) (*backend.QueryDataResponse, error) { |
||||
return d.Service.QueryData(ctx, req) |
||||
} |
||||
|
||||
func (d *Datasource) SubscribeStream(ctx context.Context, req *backend.SubscribeStreamRequest) (*backend.SubscribeStreamResponse, error) { |
||||
return d.Service.SubscribeStream(ctx, req) |
||||
} |
||||
|
||||
func (d *Datasource) PublishStream(ctx context.Context, req *backend.PublishStreamRequest) (*backend.PublishStreamResponse, error) { |
||||
return d.Service.PublishStream(ctx, req) |
||||
} |
||||
|
||||
func (d *Datasource) RunStream(ctx context.Context, req *backend.RunStreamRequest, sender *backend.StreamSender) error { |
||||
return d.Service.RunStream(ctx, req, sender) |
||||
} |
@ -0,0 +1,16 @@ |
||||
package main |
||||
|
||||
import ( |
||||
"os" |
||||
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend/datasource" |
||||
"github.com/grafana/grafana-plugin-sdk-go/backend/log" |
||||
) |
||||
|
||||
func main() { |
||||
// Created as described at https://grafana.com/developers/plugin-tools/introduction/backend-plugins
|
||||
if err := datasource.Manage("tempo", NewDatasource, datasource.ManageOpts{}); err != nil { |
||||
log.DefaultLogger.Error(err.Error()) |
||||
os.Exit(1) |
||||
} |
||||
} |
@ -0,0 +1,2 @@ |
||||
# TS generate from cue by cuetsy |
||||
**/*.gen.ts |
@ -0,0 +1 @@ |
||||
# Changelog |
@ -0,0 +1,3 @@ |
||||
This directory contains dependencies that we duplicated from Grafana core while working on the decoupling of Tempo from such core. |
||||
The long-term goal is to move these files away from here by replacing them with packages. |
||||
As such, they are only temporary and meant to be used internally to this package, please avoid using them for example as dependencies (imports) in other data source plugins. |
@ -0,0 +1,128 @@ |
||||
import { createSelector, createSlice, PayloadAction } from '@reduxjs/toolkit'; |
||||
|
||||
import { AppNotification, AppNotificationSeverity, AppNotificationsState } from './types/appNotifications'; |
||||
|
||||
const MAX_STORED_NOTIFICATIONS = 25; |
||||
export const STORAGE_KEY = 'notifications'; |
||||
export const NEW_NOTIFS_KEY = `${STORAGE_KEY}/lastRead`; |
||||
type StoredNotification = Omit<AppNotification, 'component'>; |
||||
|
||||
export const initialState: AppNotificationsState = { |
||||
byId: deserializeNotifications(), |
||||
lastRead: Number.parseInt(window.localStorage.getItem(NEW_NOTIFS_KEY) ?? `${Date.now()}`, 10), |
||||
}; |
||||
|
||||
/** |
||||
* Reducer and action to show toast notifications of various types (success, warnings, errors etc). Use to show |
||||
* transient info to user, like errors that cannot be otherwise handled or success after an action. |
||||
* |
||||
* Use factory functions in core/copy/appNotifications to create the payload. |
||||
*/ |
||||
const appNotificationsSlice = createSlice({ |
||||
name: 'appNotifications', |
||||
initialState, |
||||
reducers: { |
||||
notifyApp: (state, { payload: newAlert }: PayloadAction<AppNotification>) => { |
||||
if (Object.values(state.byId).some((alert) => isSimilar(newAlert, alert) && alert.showing)) { |
||||
return; |
||||
} |
||||
|
||||
state.byId[newAlert.id] = newAlert; |
||||
serializeNotifications(state.byId); |
||||
}, |
||||
hideAppNotification: (state, { payload: alertId }: PayloadAction<string>) => { |
||||
if (!(alertId in state.byId)) { |
||||
return; |
||||
} |
||||
|
||||
state.byId[alertId].showing = false; |
||||
serializeNotifications(state.byId); |
||||
}, |
||||
clearNotification: (state, { payload: alertId }: PayloadAction<string>) => { |
||||
delete state.byId[alertId]; |
||||
serializeNotifications(state.byId); |
||||
}, |
||||
clearAllNotifications: (state) => { |
||||
state.byId = {}; |
||||
serializeNotifications(state.byId); |
||||
}, |
||||
readAllNotifications: (state, { payload: timestamp }: PayloadAction<number>) => { |
||||
state.lastRead = timestamp; |
||||
}, |
||||
}, |
||||
}); |
||||
|
||||
export const { notifyApp, hideAppNotification, clearNotification, clearAllNotifications, readAllNotifications } = |
||||
appNotificationsSlice.actions; |
||||
|
||||
export const appNotificationsReducer = appNotificationsSlice.reducer; |
||||
|
||||
// Selectors
|
||||
|
||||
export const selectLastReadTimestamp = (state: AppNotificationsState) => state.lastRead; |
||||
export const selectById = (state: AppNotificationsState) => state.byId; |
||||
export const selectAll = createSelector(selectById, (byId) => |
||||
Object.values(byId).sort((a, b) => b.timestamp - a.timestamp) |
||||
); |
||||
export const selectWarningsAndErrors = createSelector(selectAll, (all) => all.filter(isAtLeastWarning)); |
||||
export const selectVisible = createSelector(selectById, (byId) => Object.values(byId).filter((n) => n.showing)); |
||||
|
||||
// Helper functions
|
||||
|
||||
function isSimilar(a: AppNotification, b: AppNotification): boolean { |
||||
return a.icon === b.icon && a.severity === b.severity && a.text === b.text && a.title === b.title; |
||||
} |
||||
|
||||
function isAtLeastWarning(notif: AppNotification) { |
||||
return notif.severity === AppNotificationSeverity.Warning || notif.severity === AppNotificationSeverity.Error; |
||||
} |
||||
|
||||
function isStoredNotification(obj: unknown): obj is StoredNotification { |
||||
return typeof obj === 'object' && obj !== null && 'id' in obj && 'icon' in obj && 'title' in obj && 'text' in obj; |
||||
} |
||||
|
||||
// (De)serialization
|
||||
|
||||
export function deserializeNotifications(): Record<string, StoredNotification> { |
||||
const storedNotifsRaw = window.localStorage.getItem(STORAGE_KEY); |
||||
if (!storedNotifsRaw) { |
||||
return {}; |
||||
} |
||||
|
||||
const parsed = JSON.parse(storedNotifsRaw); |
||||
if (!Object.values(parsed).every((v) => isStoredNotification(v))) { |
||||
return {}; |
||||
} |
||||
|
||||
return parsed; |
||||
} |
||||
|
||||
function serializeNotifications(notifs: Record<string, StoredNotification>) { |
||||
const reducedNotifs = Object.values(notifs) |
||||
.filter(isAtLeastWarning) |
||||
.sort((a, b) => b.timestamp - a.timestamp) |
||||
.slice(0, MAX_STORED_NOTIFICATIONS) |
||||
.reduce<Record<string, StoredNotification>>((prev, cur) => { |
||||
prev[cur.id] = { |
||||
id: cur.id, |
||||
severity: cur.severity, |
||||
icon: cur.icon, |
||||
title: cur.title, |
||||
text: cur.text, |
||||
traceId: cur.traceId, |
||||
timestamp: cur.timestamp, |
||||
// we don't care about still showing toasts after refreshing
|
||||
// https://github.com/grafana/grafana/issues/71932
|
||||
showing: false, |
||||
}; |
||||
|
||||
return prev; |
||||
}, {}); |
||||
|
||||
try { |
||||
window.localStorage.setItem(STORAGE_KEY, JSON.stringify(reducedNotifs)); |
||||
} catch (err) { |
||||
console.error('Unable to persist notifications to local storage'); |
||||
console.error(err); |
||||
} |
||||
} |
@ -0,0 +1,5 @@ |
||||
import { appNotificationsReducer as appNotifications } from './appNotification'; |
||||
|
||||
export default { |
||||
appNotifications, |
||||
}; |
@ -0,0 +1,36 @@ |
||||
export interface AppNotification { |
||||
id: string; |
||||
severity: AppNotificationSeverity; |
||||
icon: string; |
||||
title: string; |
||||
text: string; |
||||
traceId?: string; |
||||
component?: React.ReactElement; |
||||
showing: boolean; |
||||
timestamp: number; |
||||
} |
||||
|
||||
export enum AppNotificationSeverity { |
||||
Success = 'success', |
||||
Warning = 'warning', |
||||
Error = 'error', |
||||
Info = 'info', |
||||
} |
||||
|
||||
export enum AppNotificationTimeout { |
||||
Success = 3000, |
||||
Warning = 5000, |
||||
Error = 7000, |
||||
} |
||||
|
||||
export const timeoutMap = { |
||||
[AppNotificationSeverity.Success]: AppNotificationTimeout.Success, |
||||
[AppNotificationSeverity.Warning]: AppNotificationTimeout.Warning, |
||||
[AppNotificationSeverity.Error]: AppNotificationTimeout.Error, |
||||
[AppNotificationSeverity.Info]: AppNotificationTimeout.Success, |
||||
}; |
||||
|
||||
export interface AppNotificationsState { |
||||
byId: Record<string, AppNotification>; |
||||
lastRead: number; |
||||
} |
@ -0,0 +1 @@ |
||||
export * from './appNotifications'; |
@ -0,0 +1,104 @@ |
||||
import React, { PureComponent, ReactNode } from 'react'; |
||||
|
||||
import { AdHocVariableFilter, DataSourceRef, SelectableValue } from '@grafana/data'; |
||||
import { Segment } from '@grafana/ui'; |
||||
|
||||
import { AdHocFilterBuilder } from './AdHocFilterBuilder'; |
||||
import { REMOVE_FILTER_KEY } from './AdHocFilterKey'; |
||||
import { AdHocFilterRenderer } from './AdHocFilterRenderer'; |
||||
import { ConditionSegment } from './ConditionSegment'; |
||||
|
||||
interface Props { |
||||
datasource: DataSourceRef | null; |
||||
filters: AdHocVariableFilter[]; |
||||
baseFilters?: AdHocVariableFilter[]; |
||||
addFilter: (filter: AdHocVariableFilter) => void; |
||||
removeFilter: (index: number) => void; |
||||
changeFilter: (index: number, newFilter: AdHocVariableFilter) => void; |
||||
disabled?: boolean; |
||||
} |
||||
|
||||
/** |
||||
* Simple filtering component that automatically uses datasource APIs to get available labels and its values, for |
||||
* dynamic visual filtering without need for much setup. Instead of having single onChange prop this reports all the |
||||
* change events with separate props so it is usable with AdHocPicker. |
||||
* |
||||
* Note: There isn't API on datasource to suggest the operators here so that is hardcoded to use prometheus style |
||||
* operators. Also filters are assumed to be joined with `AND` operator, which is also hardcoded. |
||||
*/ |
||||
export class AdHocFilter extends PureComponent<Props> { |
||||
onChange = (index: number, prop: string) => (key: SelectableValue<string | null>) => { |
||||
const { filters } = this.props; |
||||
const { value } = key; |
||||
|
||||
if (key.value === REMOVE_FILTER_KEY) { |
||||
return this.props.removeFilter(index); |
||||
} |
||||
|
||||
return this.props.changeFilter(index, { |
||||
...filters[index], |
||||
[prop]: value, |
||||
}); |
||||
}; |
||||
|
||||
appendFilterToVariable = (filter: AdHocVariableFilter) => { |
||||
this.props.addFilter(filter); |
||||
}; |
||||
|
||||
render() { |
||||
const { filters, disabled } = this.props; |
||||
|
||||
return ( |
||||
<div className="gf-form-inline"> |
||||
{this.renderFilters(filters, disabled)} |
||||
|
||||
{!disabled && ( |
||||
<AdHocFilterBuilder |
||||
datasource={this.props.datasource!} |
||||
appendBefore={filters.length > 0 ? <ConditionSegment label="AND" /> : null} |
||||
onCompleted={this.appendFilterToVariable} |
||||
allFilters={this.getAllFilters()} |
||||
/> |
||||
)} |
||||
</div> |
||||
); |
||||
} |
||||
|
||||
getAllFilters() { |
||||
if (this.props.baseFilters) { |
||||
return this.props.baseFilters.concat(this.props.filters); |
||||
} |
||||
|
||||
return this.props.filters; |
||||
} |
||||
|
||||
renderFilters(filters: AdHocVariableFilter[], disabled?: boolean) { |
||||
if (filters.length === 0 && disabled) { |
||||
return <Segment disabled={disabled} value="No filters" options={[]} onChange={() => {}} />; |
||||
} |
||||
|
||||
return filters.reduce((segments: ReactNode[], filter, index) => { |
||||
if (segments.length > 0) { |
||||
segments.push(<ConditionSegment label="AND" key={`condition-${index}`} />); |
||||
} |
||||
segments.push(this.renderFilterSegments(filter, index, disabled)); |
||||
return segments; |
||||
}, []); |
||||
} |
||||
|
||||
renderFilterSegments(filter: AdHocVariableFilter, index: number, disabled?: boolean) { |
||||
return ( |
||||
<React.Fragment key={`filter-${index}`}> |
||||
<AdHocFilterRenderer |
||||
disabled={disabled} |
||||
datasource={this.props.datasource!} |
||||
filter={filter} |
||||
onKeyChange={this.onChange(index, 'key')} |
||||
onOperatorChange={this.onChange(index, 'operator')} |
||||
onValueChange={this.onChange(index, 'value')} |
||||
allFilters={this.getAllFilters()} |
||||
/> |
||||
</React.Fragment> |
||||
); |
||||
} |
||||
} |
@ -0,0 +1,75 @@ |
||||
import i18n from 'i18next'; |
||||
import React, { useCallback, useState } from 'react'; |
||||
|
||||
import { AdHocVariableFilter, DataSourceRef, SelectableValue } from '@grafana/data'; |
||||
|
||||
import { AdHocFilterKey, REMOVE_FILTER_KEY } from './AdHocFilterKey'; |
||||
import { AdHocFilterRenderer } from './AdHocFilterRenderer'; |
||||
|
||||
interface Props { |
||||
datasource: DataSourceRef; |
||||
onCompleted: (filter: AdHocVariableFilter) => void; |
||||
appendBefore?: React.ReactNode; |
||||
allFilters: AdHocVariableFilter[]; |
||||
} |
||||
|
||||
// Reassign t() so i18next-parser doesn't warn on dynamic key, and we can have 'failOnWarnings' enabled
|
||||
const tFunc = i18n.t; |
||||
|
||||
// import { t } from 'app/core/internationalization';
|
||||
export const t = (id: string, defaultMessage: string, values?: Record<string, unknown>) => { |
||||
return tFunc(id, defaultMessage, values); |
||||
}; |
||||
|
||||
export const AdHocFilterBuilder = ({ datasource, appendBefore, onCompleted, allFilters }: Props) => { |
||||
const [key, setKey] = useState<string | null>(null); |
||||
const [operator, setOperator] = useState<string>('='); |
||||
|
||||
const onKeyChanged = useCallback( |
||||
(item: SelectableValue<string | null>) => { |
||||
if (item.value !== REMOVE_FILTER_KEY) { |
||||
setKey(item.value ?? ''); |
||||
return; |
||||
} |
||||
setKey(null); |
||||
}, |
||||
[setKey] |
||||
); |
||||
|
||||
const onOperatorChanged = useCallback( |
||||
(item: SelectableValue<string>) => setOperator(item.value ?? ''), |
||||
[setOperator] |
||||
); |
||||
|
||||
const onValueChanged = useCallback( |
||||
(item: SelectableValue<string>) => { |
||||
onCompleted({ |
||||
value: item.value ?? '', |
||||
operator: operator, |
||||
key: key!, |
||||
}); |
||||
setKey(null); |
||||
setOperator('='); |
||||
}, |
||||
[onCompleted, operator, key] |
||||
); |
||||
|
||||
if (key === null) { |
||||
return <AdHocFilterKey datasource={datasource} filterKey={key} onChange={onKeyChanged} allFilters={allFilters} />; |
||||
} |
||||
|
||||
return ( |
||||
<React.Fragment key="filter-builder"> |
||||
{appendBefore} |
||||
<AdHocFilterRenderer |
||||
datasource={datasource} |
||||
filter={{ key, value: '', operator }} |
||||
placeHolder={t('variable.adhoc.placeholder', 'Select value')} |
||||
onKeyChange={onKeyChanged} |
||||
onOperatorChange={onOperatorChanged} |
||||
onValueChange={onValueChanged} |
||||
allFilters={allFilters} |
||||
/> |
||||
</React.Fragment> |
||||
); |
||||
}; |
@ -0,0 +1,82 @@ |
||||
import React, { ReactElement } from 'react'; |
||||
|
||||
import { AdHocVariableFilter, DataSourceRef, SelectableValue } from '@grafana/data'; |
||||
import { getDataSourceSrv } from '@grafana/runtime'; |
||||
import { Icon, SegmentAsync } from '@grafana/ui'; |
||||
|
||||
interface Props { |
||||
datasource: DataSourceRef; |
||||
filterKey: string | null; |
||||
onChange: (item: SelectableValue<string | null>) => void; |
||||
allFilters: AdHocVariableFilter[]; |
||||
disabled?: boolean; |
||||
} |
||||
|
||||
const MIN_WIDTH = 90; |
||||
export const AdHocFilterKey = ({ datasource, onChange, disabled, filterKey, allFilters }: Props) => { |
||||
const loadKeys = () => fetchFilterKeys(datasource, filterKey, allFilters); |
||||
const loadKeysWithRemove = () => fetchFilterKeysWithRemove(datasource, filterKey, allFilters); |
||||
|
||||
if (filterKey === null) { |
||||
return ( |
||||
<div className="gf-form" data-testid="AdHocFilterKey-add-key-wrapper"> |
||||
<SegmentAsync |
||||
disabled={disabled} |
||||
className="query-segment-key" |
||||
Component={plusSegment} |
||||
value={filterKey} |
||||
onChange={onChange} |
||||
loadOptions={loadKeys} |
||||
inputMinWidth={MIN_WIDTH} |
||||
/> |
||||
</div> |
||||
); |
||||
} |
||||
|
||||
return ( |
||||
<div className="gf-form" data-testid="AdHocFilterKey-key-wrapper"> |
||||
<SegmentAsync |
||||
disabled={disabled} |
||||
className="query-segment-key" |
||||
value={filterKey} |
||||
onChange={onChange} |
||||
loadOptions={loadKeysWithRemove} |
||||
inputMinWidth={MIN_WIDTH} |
||||
/> |
||||
</div> |
||||
); |
||||
}; |
||||
|
||||
export const REMOVE_FILTER_KEY = '-- remove filter --'; |
||||
const REMOVE_VALUE = { label: REMOVE_FILTER_KEY, value: REMOVE_FILTER_KEY }; |
||||
|
||||
const plusSegment: ReactElement = ( |
||||
<span className="gf-form-label query-part" aria-label="Add Filter"> |
||||
<Icon name="plus" /> |
||||
</span> |
||||
); |
||||
|
||||
const fetchFilterKeys = async ( |
||||
datasource: DataSourceRef, |
||||
currentKey: string | null, |
||||
allFilters: AdHocVariableFilter[] |
||||
): Promise<Array<SelectableValue<string>>> => { |
||||
const ds = await getDataSourceSrv().get(datasource); |
||||
|
||||
if (!ds || !ds.getTagKeys) { |
||||
return []; |
||||
} |
||||
|
||||
const otherFilters = allFilters.filter((f) => f.key !== currentKey); |
||||
const metrics = await ds.getTagKeys({ filters: otherFilters }); |
||||
return metrics.map((m) => ({ label: m.text, value: m.text })); |
||||
}; |
||||
|
||||
const fetchFilterKeysWithRemove = async ( |
||||
datasource: DataSourceRef, |
||||
currentKey: string | null, |
||||
allFilters: AdHocVariableFilter[] |
||||
): Promise<Array<SelectableValue<string>>> => { |
||||
const keys = await fetchFilterKeys(datasource, currentKey, allFilters); |
||||
return [REMOVE_VALUE, ...keys]; |
||||
}; |
@ -0,0 +1,54 @@ |
||||
import React from 'react'; |
||||
|
||||
import { AdHocVariableFilter, DataSourceRef, SelectableValue } from '@grafana/data'; |
||||
|
||||
import { AdHocFilterKey } from './AdHocFilterKey'; |
||||
import { AdHocFilterValue } from './AdHocFilterValue'; |
||||
import { OperatorSegment } from './OperatorSegment'; |
||||
|
||||
interface Props { |
||||
datasource: DataSourceRef; |
||||
filter: AdHocVariableFilter; |
||||
allFilters: AdHocVariableFilter[]; |
||||
onKeyChange: (item: SelectableValue<string | null>) => void; |
||||
onOperatorChange: (item: SelectableValue<string>) => void; |
||||
onValueChange: (item: SelectableValue<string>) => void; |
||||
placeHolder?: string; |
||||
getTagKeysOptions?: any; |
||||
disabled?: boolean; |
||||
} |
||||
|
||||
export const AdHocFilterRenderer = ({ |
||||
datasource, |
||||
filter: { key, operator, value }, |
||||
onKeyChange, |
||||
onOperatorChange, |
||||
onValueChange, |
||||
placeHolder, |
||||
allFilters, |
||||
disabled, |
||||
}: Props) => { |
||||
return ( |
||||
<> |
||||
<AdHocFilterKey |
||||
disabled={disabled} |
||||
datasource={datasource} |
||||
filterKey={key} |
||||
onChange={onKeyChange} |
||||
allFilters={allFilters} |
||||
/> |
||||
<div className="gf-form"> |
||||
<OperatorSegment disabled={disabled} value={operator} onChange={onOperatorChange} /> |
||||
</div> |
||||
<AdHocFilterValue |
||||
disabled={disabled} |
||||
datasource={datasource} |
||||
filterKey={key} |
||||
filterValue={value} |
||||
allFilters={allFilters} |
||||
onChange={onValueChange} |
||||
placeHolder={placeHolder} |
||||
/> |
||||
</> |
||||
); |
||||
}; |
@ -0,0 +1,68 @@ |
||||
import React from 'react'; |
||||
|
||||
import { |
||||
AdHocVariableFilter, |
||||
DataSourceRef, |
||||
MetricFindValue, |
||||
SelectableValue, |
||||
getDefaultTimeRange, |
||||
} from '@grafana/data'; |
||||
// import { getTimeSrv } from 'app/features/dashboard/services/TimeSrv';
|
||||
import { getDataSourceSrv } from '@grafana/runtime'; |
||||
import { SegmentAsync } from '@grafana/ui'; |
||||
|
||||
interface Props { |
||||
datasource: DataSourceRef; |
||||
filterKey: string; |
||||
filterValue?: string; |
||||
onChange: (item: SelectableValue<string>) => void; |
||||
placeHolder?: string; |
||||
disabled?: boolean; |
||||
allFilters: AdHocVariableFilter[]; |
||||
} |
||||
|
||||
export const AdHocFilterValue = ({ |
||||
datasource, |
||||
disabled, |
||||
onChange, |
||||
filterKey, |
||||
filterValue, |
||||
placeHolder, |
||||
allFilters, |
||||
}: Props) => { |
||||
const loadValues = () => fetchFilterValues(datasource, filterKey, allFilters); |
||||
|
||||
return ( |
||||
<div className="gf-form" data-testid="AdHocFilterValue-value-wrapper"> |
||||
<SegmentAsync |
||||
className="query-segment-value" |
||||
disabled={disabled} |
||||
placeholder={placeHolder} |
||||
value={filterValue} |
||||
onChange={onChange} |
||||
loadOptions={loadValues} |
||||
/> |
||||
</div> |
||||
); |
||||
}; |
||||
|
||||
const fetchFilterValues = async ( |
||||
datasource: DataSourceRef, |
||||
key: string, |
||||
allFilters: AdHocVariableFilter[] |
||||
): Promise<Array<SelectableValue<string>>> => { |
||||
const ds = await getDataSourceSrv().get(datasource); |
||||
|
||||
if (!ds || !ds.getTagValues) { |
||||
return []; |
||||
} |
||||
|
||||
// const timeRange = getTimeSrv().timeRange();
|
||||
// As https://github.com/grafana/grafana/pull/76118/files#diff-260d46415915a2e3e7d294e313bd128666e9f0868aa94d8aee4d4a24a060b542L27-R26
|
||||
const timeRange = getDefaultTimeRange(); |
||||
|
||||
// Filter out the current filter key from the list of all filters
|
||||
const otherFilters = allFilters.filter((f) => f.key !== key); |
||||
const metrics = await ds.getTagValues({ key, filters: otherFilters, timeRange }); |
||||
return metrics.map((m: MetricFindValue) => ({ label: m.text, value: m.text })); |
||||
}; |
@ -0,0 +1,13 @@ |
||||
import React from 'react'; |
||||
|
||||
interface Props { |
||||
label: string; |
||||
} |
||||
|
||||
export const ConditionSegment = ({ label }: Props) => { |
||||
return ( |
||||
<div className="gf-form"> |
||||
<span className="gf-form-label query-keyword">{label}</span> |
||||
</div> |
||||
); |
||||
}; |
@ -0,0 +1,27 @@ |
||||
import React from 'react'; |
||||
|
||||
import { SelectableValue } from '@grafana/data'; |
||||
import { Segment } from '@grafana/ui'; |
||||
|
||||
interface Props { |
||||
value: string; |
||||
onChange: (item: SelectableValue<string>) => void; |
||||
disabled?: boolean; |
||||
} |
||||
|
||||
const options = ['=', '!=', '<', '>', '=~', '!~'].map<SelectableValue<string>>((value) => ({ |
||||
label: value, |
||||
value, |
||||
})); |
||||
|
||||
export const OperatorSegment = ({ value, disabled, onChange }: Props) => { |
||||
return ( |
||||
<Segment |
||||
className="query-segment-operator" |
||||
value={value} |
||||
disabled={disabled} |
||||
options={options} |
||||
onChange={onChange} |
||||
/> |
||||
); |
||||
}; |
@ -0,0 +1,47 @@ |
||||
import { css } from '@emotion/css'; |
||||
import React from 'react'; |
||||
|
||||
import { GrafanaTheme2 } from '@grafana/data'; |
||||
import { useStyles2 } from '@grafana/ui'; |
||||
|
||||
type Props = { |
||||
description: string; |
||||
suffix: string; |
||||
feature: string; |
||||
}; |
||||
|
||||
export function ConfigDescriptionLink(props: Props) { |
||||
const { description, suffix, feature } = props; |
||||
const text = `Learn more about ${feature}`; |
||||
const styles = useStyles2(getStyles); |
||||
|
||||
return ( |
||||
<span className={styles.container}> |
||||
{description} |
||||
<a |
||||
aria-label={text} |
||||
href={`https://grafana.com/docs/grafana/next/datasources/${suffix}`} |
||||
rel="noreferrer" |
||||
target="_blank" |
||||
> |
||||
{text} |
||||
</a> |
||||
</span> |
||||
); |
||||
} |
||||
|
||||
const getStyles = (theme: GrafanaTheme2) => { |
||||
return { |
||||
container: css({ |
||||
color: theme.colors.text.secondary, |
||||
a: css({ |
||||
color: theme.colors.text.link, |
||||
textDecoration: 'underline', |
||||
marginLeft: '5px', |
||||
'&:hover': { |
||||
textDecoration: 'none', |
||||
}, |
||||
}), |
||||
}), |
||||
}; |
||||
}; |
@ -0,0 +1,25 @@ |
||||
import { css } from '@emotion/css'; |
||||
import React from 'react'; |
||||
|
||||
import { GrafanaTheme2 } from '@grafana/data'; |
||||
import { useStyles2 } from '@grafana/ui'; |
||||
|
||||
export const Divider = ({ hideLine = false }) => { |
||||
const styles = useStyles2(getStyles); |
||||
|
||||
if (hideLine) { |
||||
return <hr className={styles.dividerHideLine} />; |
||||
} |
||||
|
||||
return <hr className={styles.divider} />; |
||||
}; |
||||
|
||||
const getStyles = (theme: GrafanaTheme2) => ({ |
||||
divider: css` |
||||
margin: ${theme.spacing(4, 0)}; |
||||
`,
|
||||
dividerHideLine: css` |
||||
border: none; |
||||
margin: ${theme.spacing(3, 0)}; |
||||
`,
|
||||
}); |
@ -0,0 +1,86 @@ |
||||
import { css } from '@emotion/css'; |
||||
import React from 'react'; |
||||
|
||||
import { |
||||
DataSourceJsonData, |
||||
DataSourcePluginOptionsEditorProps, |
||||
GrafanaTheme2, |
||||
updateDatasourcePluginJsonDataOption, |
||||
} from '@grafana/data'; |
||||
import { ConfigSubSection } from '@grafana/experimental'; |
||||
import { InlineField, InlineFieldRow, InlineSwitch, useStyles2 } from '@grafana/ui'; |
||||
|
||||
import { ConfigDescriptionLink } from './ConfigDescriptionLink'; |
||||
|
||||
export interface NodeGraphOptions { |
||||
enabled?: boolean; |
||||
} |
||||
|
||||
export interface NodeGraphData extends DataSourceJsonData { |
||||
nodeGraph?: NodeGraphOptions; |
||||
} |
||||
|
||||
interface Props extends DataSourcePluginOptionsEditorProps<NodeGraphData> {} |
||||
|
||||
export function NodeGraphSettings({ options, onOptionsChange }: Props) { |
||||
const styles = useStyles2(getStyles); |
||||
|
||||
return ( |
||||
<div className={styles.container}> |
||||
<InlineFieldRow className={styles.row}> |
||||
<InlineField |
||||
tooltip="Displays the node graph above the trace view. Default: disabled" |
||||
label="Enable node graph" |
||||
labelWidth={26} |
||||
> |
||||
<InlineSwitch |
||||
id="enableNodeGraph" |
||||
value={options.jsonData.nodeGraph?.enabled} |
||||
onChange={(event: React.SyntheticEvent<HTMLInputElement>) => |
||||
updateDatasourcePluginJsonDataOption({ onOptionsChange, options }, 'nodeGraph', { |
||||
...options.jsonData.nodeGraph, |
||||
enabled: event.currentTarget.checked, |
||||
}) |
||||
} |
||||
/> |
||||
</InlineField> |
||||
</InlineFieldRow> |
||||
</div> |
||||
); |
||||
} |
||||
|
||||
export const NodeGraphSection = ({ options, onOptionsChange }: DataSourcePluginOptionsEditorProps) => { |
||||
let suffix = options.type; |
||||
suffix += options.type === 'tempo' ? '/configure-tempo-data-source/#node-graph' : '/#node-graph'; |
||||
|
||||
return ( |
||||
<ConfigSubSection |
||||
title="Node graph" |
||||
description={ |
||||
<ConfigDescriptionLink |
||||
description="Show or hide the node graph visualization." |
||||
suffix={suffix} |
||||
feature="the node graph" |
||||
/> |
||||
} |
||||
> |
||||
<NodeGraphSettings options={options} onOptionsChange={onOptionsChange} /> |
||||
</ConfigSubSection> |
||||
); |
||||
}; |
||||
|
||||
const getStyles = (theme: GrafanaTheme2) => ({ |
||||
infoText: css` |
||||
label: infoText; |
||||
padding-bottom: ${theme.spacing(2)}; |
||||
color: ${theme.colors.text.secondary}; |
||||
`,
|
||||
container: css` |
||||
label: container; |
||||
width: 100%; |
||||
`,
|
||||
row: css` |
||||
label: row; |
||||
align-items: baseline; |
||||
`,
|
||||
}); |
@ -0,0 +1,112 @@ |
||||
import { css } from '@emotion/css'; |
||||
import React from 'react'; |
||||
|
||||
import { |
||||
DataSourceJsonData, |
||||
DataSourcePluginOptionsEditorProps, |
||||
GrafanaTheme2, |
||||
toOption, |
||||
updateDatasourcePluginJsonDataOption, |
||||
} from '@grafana/data'; |
||||
import { ConfigSubSection } from '@grafana/experimental'; |
||||
import { InlineField, InlineFieldRow, Input, Select, useStyles2 } from '@grafana/ui'; |
||||
|
||||
import { ConfigDescriptionLink } from '../ConfigDescriptionLink'; |
||||
|
||||
export interface SpanBarOptions { |
||||
type?: string; |
||||
tag?: string; |
||||
} |
||||
|
||||
export interface SpanBarOptionsData extends DataSourceJsonData { |
||||
spanBar?: SpanBarOptions; |
||||
} |
||||
|
||||
export const NONE = 'None'; |
||||
export const DURATION = 'Duration'; |
||||
export const TAG = 'Tag'; |
||||
|
||||
interface Props extends DataSourcePluginOptionsEditorProps<SpanBarOptionsData> {} |
||||
|
||||
export default function SpanBarSettings({ options, onOptionsChange }: Props) { |
||||
const styles = useStyles2(getStyles); |
||||
const selectOptions = [NONE, DURATION, TAG].map(toOption); |
||||
|
||||
return ( |
||||
<div className={css({ width: '100%' })}> |
||||
<InlineFieldRow className={styles.row}> |
||||
<InlineField label="Label" labelWidth={26} tooltip="Default: duration" grow> |
||||
<Select |
||||
inputId="label" |
||||
options={selectOptions} |
||||
value={options.jsonData.spanBar?.type || ''} |
||||
onChange={(v) => { |
||||
updateDatasourcePluginJsonDataOption({ onOptionsChange, options }, 'spanBar', { |
||||
...options.jsonData.spanBar, |
||||
type: v?.value ?? '', |
||||
}); |
||||
}} |
||||
placeholder="Duration" |
||||
isClearable |
||||
aria-label={'select-label-name'} |
||||
width={40} |
||||
/> |
||||
</InlineField> |
||||
</InlineFieldRow> |
||||
{options.jsonData.spanBar?.type === TAG && ( |
||||
<InlineFieldRow className={styles.row}> |
||||
<InlineField |
||||
label="Tag key" |
||||
labelWidth={26} |
||||
tooltip="Tag key which will be used to get the tag value. A span's attributes and resources will be searched for the tag key" |
||||
> |
||||
<Input |
||||
type="text" |
||||
placeholder="Enter tag key" |
||||
onChange={(v) => |
||||
updateDatasourcePluginJsonDataOption({ onOptionsChange, options }, 'spanBar', { |
||||
...options.jsonData.spanBar, |
||||
tag: v.currentTarget.value, |
||||
}) |
||||
} |
||||
value={options.jsonData.spanBar?.tag || ''} |
||||
width={40} |
||||
/> |
||||
</InlineField> |
||||
</InlineFieldRow> |
||||
)} |
||||
</div> |
||||
); |
||||
} |
||||
|
||||
export const SpanBarSection = ({ options, onOptionsChange }: DataSourcePluginOptionsEditorProps) => { |
||||
let suffix = options.type; |
||||
suffix += options.type === 'tempo' ? '/configure-tempo-data-source/#span-bar' : '/#span-bar'; |
||||
|
||||
return ( |
||||
<ConfigSubSection |
||||
title="Span bar" |
||||
description={ |
||||
<ConfigDescriptionLink |
||||
description="Add additional info next to the service and operation on a span bar row in the trace view." |
||||
suffix={suffix} |
||||
feature="the span bar" |
||||
/> |
||||
} |
||||
> |
||||
<SpanBarSettings options={options} onOptionsChange={onOptionsChange} /> |
||||
</ConfigSubSection> |
||||
); |
||||
}; |
||||
|
||||
const getStyles = (theme: GrafanaTheme2) => ({ |
||||
infoText: css` |
||||
label: infoText; |
||||
padding-bottom: ${theme.spacing(2)}; |
||||
color: ${theme.colors.text.secondary}; |
||||
`,
|
||||
row: css` |
||||
label: row; |
||||
align-items: baseline; |
||||
`,
|
||||
}); |
@ -0,0 +1,46 @@ |
||||
import { v4 as uuidv4 } from 'uuid'; |
||||
|
||||
import { AppNotification, AppNotificationSeverity } from '../actions/types'; |
||||
|
||||
import { getMessageFromError } from './errors'; |
||||
|
||||
const defaultSuccessNotification = { |
||||
title: '', |
||||
text: '', |
||||
severity: AppNotificationSeverity.Success, |
||||
icon: 'check', |
||||
}; |
||||
|
||||
const defaultErrorNotification = { |
||||
title: '', |
||||
text: '', |
||||
severity: AppNotificationSeverity.Error, |
||||
icon: 'exclamation-triangle', |
||||
}; |
||||
|
||||
export const createSuccessNotification = (title: string, text = '', traceId?: string): AppNotification => ({ |
||||
...defaultSuccessNotification, |
||||
title, |
||||
text, |
||||
id: uuidv4(), |
||||
timestamp: Date.now(), |
||||
showing: true, |
||||
}); |
||||
|
||||
export const createErrorNotification = ( |
||||
title: string, |
||||
text: string | Error = '', |
||||
traceId?: string, |
||||
component?: React.ReactElement |
||||
): AppNotification => { |
||||
return { |
||||
...defaultErrorNotification, |
||||
text: getMessageFromError(text), |
||||
title, |
||||
id: uuidv4(), |
||||
traceId, |
||||
component, |
||||
timestamp: Date.now(), |
||||
showing: true, |
||||
}; |
||||
}; |
@ -0,0 +1,21 @@ |
||||
import { isFetchError } from '@grafana/runtime'; |
||||
|
||||
export function getMessageFromError(err: unknown): string { |
||||
if (typeof err === 'string') { |
||||
return err; |
||||
} |
||||
|
||||
if (err) { |
||||
if (err instanceof Error) { |
||||
return err.message; |
||||
} else if (isFetchError(err)) { |
||||
if (err.data && err.data.message) { |
||||
return err.data.message; |
||||
} else if (err.statusText) { |
||||
return err.statusText; |
||||
} |
||||
} |
||||
} |
||||
|
||||
return JSON.stringify(err); |
||||
} |
@ -0,0 +1,265 @@ |
||||
import { LRUCache } from 'lru-cache'; |
||||
import Prism from 'prismjs'; |
||||
|
||||
import { LanguageProvider, AbstractQuery, KeyValue } from '@grafana/data'; |
||||
|
||||
import { extractLabelMatchers, processLabels, toPromLikeExpr } from '../prometheus/language_utils'; |
||||
|
||||
import { |
||||
extractLabelKeysFromDataFrame, |
||||
extractLogParserFromDataFrame, |
||||
extractUnwrapLabelKeysFromDataFrame, |
||||
} from './responseUtils'; |
||||
import syntax from './syntax'; |
||||
import { ParserAndLabelKeysResult, LokiDatasource, LokiQuery, LokiQueryType } from './types'; |
||||
|
||||
const DEFAULT_MAX_LINES_SAMPLE = 10; |
||||
const NS_IN_MS = 1000000; |
||||
|
||||
export default class LokiLanguageProvider extends LanguageProvider { |
||||
labelKeys: string[]; |
||||
started = false; |
||||
datasource: LokiDatasource; |
||||
|
||||
/** |
||||
* Cache for labels of series. This is bit simplistic in the sense that it just counts responses each as a 1 and does |
||||
* not account for different size of a response. If that is needed a `length` function can be added in the options. |
||||
* 10 as a max size is totally arbitrary right now. |
||||
*/ |
||||
private seriesCache = new LRUCache<string, Record<string, string[]>>({ max: 10 }); |
||||
private labelsCache = new LRUCache<string, string[]>({ max: 10 }); |
||||
|
||||
constructor(datasource: LokiDatasource, initialValues?: any) { |
||||
super(); |
||||
|
||||
this.datasource = datasource; |
||||
this.labelKeys = []; |
||||
|
||||
Object.assign(this, initialValues); |
||||
} |
||||
|
||||
request = async (url: string, params?: any) => { |
||||
try { |
||||
return await this.datasource.metadataRequest(url, params); |
||||
} catch (error) { |
||||
console.error(error); |
||||
} |
||||
|
||||
return undefined; |
||||
}; |
||||
|
||||
/** |
||||
* Initialize the language provider by fetching set of labels. |
||||
*/ |
||||
start = () => { |
||||
if (!this.startTask) { |
||||
this.startTask = this.fetchLabels().then(() => { |
||||
this.started = true; |
||||
return []; |
||||
}); |
||||
} |
||||
|
||||
return this.startTask; |
||||
}; |
||||
|
||||
/** |
||||
* Returns the label keys that have been fetched. |
||||
* If labels have not been fetched yet, it will return an empty array. |
||||
* For updated labels (which should not happen often), use fetchLabels. |
||||
* It is quite complicated to know when to use fetchLabels and when to use getLabelKeys. |
||||
* We should consider simplifying this and use caching in the same way as with seriesCache and labelsCache |
||||
* and just always use fetchLabels. |
||||
* Caching should be thought out properly, so we are not fetching this often, as labelKeys should not be changing often. |
||||
* |
||||
* @returns {string[]} An array of label keys or an empty array if labels have not been fetched. |
||||
*/ |
||||
getLabelKeys(): string[] { |
||||
return this.labelKeys; |
||||
} |
||||
|
||||
importFromAbstractQuery(labelBasedQuery: AbstractQuery): LokiQuery { |
||||
return { |
||||
refId: labelBasedQuery.refId, |
||||
expr: toPromLikeExpr(labelBasedQuery), |
||||
// queryType: LokiQueryType.Range,
|
||||
queryType: LokiQueryType.Range, |
||||
}; |
||||
} |
||||
|
||||
exportToAbstractQuery(query: LokiQuery): AbstractQuery { |
||||
const lokiQuery = query.expr; |
||||
if (!lokiQuery || lokiQuery.length === 0) { |
||||
return { refId: query.refId, labelMatchers: [] }; |
||||
} |
||||
const tokens = Prism.tokenize(lokiQuery, syntax); |
||||
return { |
||||
refId: query.refId, |
||||
labelMatchers: extractLabelMatchers(tokens), |
||||
}; |
||||
} |
||||
|
||||
/** |
||||
* Fetch all label keys |
||||
* This asynchronous function returns all available label keys from the data source. |
||||
* It returns a promise that resolves to an array of strings containing the label keys. |
||||
* |
||||
* @returns A promise containing an array of label keys. |
||||
* @throws An error if the fetch operation fails. |
||||
*/ |
||||
async fetchLabels(): Promise<string[]> { |
||||
const url = 'labels'; |
||||
const timeRange = this.datasource.getTimeRangeParams(); |
||||
|
||||
const res = await this.request(url, timeRange); |
||||
if (Array.isArray(res)) { |
||||
const labels = res |
||||
.slice() |
||||
.sort() |
||||
.filter((label) => label !== '__name__'); |
||||
this.labelKeys = labels; |
||||
return this.labelKeys; |
||||
} |
||||
|
||||
return []; |
||||
} |
||||
|
||||
/** |
||||
* Fetch series labels for a selector |
||||
* |
||||
* This method fetches labels for a given stream selector, such as `{job="grafana"}`. |
||||
* It returns a promise that resolves to a record mapping label names to their corresponding values. |
||||
* |
||||
* @param streamSelector - The stream selector for which you want to retrieve labels. |
||||
* @returns A promise containing a record of label names and their values. |
||||
* @throws An error if the fetch operation fails. |
||||
*/ |
||||
fetchSeriesLabels = async (streamSelector: string): Promise<Record<string, string[]>> => { |
||||
const interpolatedMatch = this.datasource.interpolateString(streamSelector); |
||||
const url = 'series'; |
||||
const { start, end } = this.datasource.getTimeRangeParams(); |
||||
|
||||
const cacheKey = this.generateCacheKey(url, start, end, interpolatedMatch); |
||||
let value = this.seriesCache.get(cacheKey); |
||||
if (!value) { |
||||
const params = { 'match[]': interpolatedMatch, start, end }; |
||||
const data = await this.request(url, params); |
||||
const { values } = processLabels(data); |
||||
value = values; |
||||
this.seriesCache.set(cacheKey, value); |
||||
} |
||||
return value; |
||||
}; |
||||
|
||||
/** |
||||
* Fetch series for a selector. Use this for raw results. Use fetchSeriesLabels() to get labels. |
||||
* @param match |
||||
*/ |
||||
fetchSeries = async (match: string): Promise<Array<Record<string, string>>> => { |
||||
const url = 'series'; |
||||
const { start, end } = this.datasource.getTimeRangeParams(); |
||||
const params = { 'match[]': match, start, end }; |
||||
return await this.request(url, params); |
||||
}; |
||||
|
||||
// Cache key is a bit different here. We round up to a minute the intervals.
|
||||
// The rounding may seem strange but makes relative intervals like now-1h less prone to need separate request every
|
||||
// millisecond while still actually getting all the keys for the correct interval. This still can create problems
|
||||
// when user does not the newest values for a minute if already cached.
|
||||
private generateCacheKey(url: string, start: number, end: number, param: string): string { |
||||
return [url, this.roundTime(start), this.roundTime(end), param].join(); |
||||
} |
||||
|
||||
// Round nanoseconds epoch to nearest 5 minute interval
|
||||
private roundTime(nanoseconds: number): number { |
||||
return nanoseconds ? Math.floor(nanoseconds / NS_IN_MS / 1000 / 60 / 5) : 0; |
||||
} |
||||
|
||||
/** |
||||
* Fetch label values |
||||
* |
||||
* This asynchronous function fetches values associated with a specified label name. |
||||
* It returns a promise that resolves to an array of strings containing the label values. |
||||
* |
||||
* @param labelName - The name of the label for which you want to retrieve values. |
||||
* @param options - (Optional) An object containing additional options - currently only stream selector. |
||||
* @param options.streamSelector - (Optional) The stream selector to filter label values. If not provided, all label values are fetched. |
||||
* @returns A promise containing an array of label values. |
||||
* @throws An error if the fetch operation fails. |
||||
*/ |
||||
async fetchLabelValues(labelName: string, options?: { streamSelector?: string }): Promise<string[]> { |
||||
const label = encodeURIComponent(this.datasource.interpolateString(labelName)); |
||||
const streamParam = options?.streamSelector |
||||
? encodeURIComponent(this.datasource.interpolateString(options.streamSelector)) |
||||
: undefined; |
||||
|
||||
const url = `label/${label}/values`; |
||||
const rangeParams = this.datasource.getTimeRangeParams(); |
||||
const { start, end } = rangeParams; |
||||
const params: KeyValue<string | number> = { start, end }; |
||||
let paramCacheKey = label; |
||||
|
||||
if (streamParam) { |
||||
params.query = streamParam; |
||||
paramCacheKey += streamParam; |
||||
} |
||||
|
||||
const cacheKey = this.generateCacheKey(url, start, end, paramCacheKey); |
||||
|
||||
let labelValues = this.labelsCache.get(cacheKey); |
||||
if (!labelValues) { |
||||
// Clear value when requesting new one. Empty object being truthy also makes sure we don't request twice.
|
||||
this.labelsCache.set(cacheKey, []); |
||||
const res = await this.request(url, params); |
||||
if (Array.isArray(res)) { |
||||
labelValues = res.slice().sort(); |
||||
this.labelsCache.set(cacheKey, labelValues); |
||||
} |
||||
} |
||||
|
||||
return labelValues ?? []; |
||||
} |
||||
|
||||
/** |
||||
* Get parser and label keys for a selector |
||||
* |
||||
* This asynchronous function is used to fetch parsers and label keys for a selected log stream based on sampled lines. |
||||
* It returns a promise that resolves to an object with the following properties: |
||||
* |
||||
* - `extractedLabelKeys`: An array of available label keys associated with the log stream. |
||||
* - `hasJSON`: A boolean indicating whether JSON parsing is available for the stream. |
||||
* - `hasLogfmt`: A boolean indicating whether Logfmt parsing is available for the stream. |
||||
* - `hasPack`: A boolean indicating whether Pack parsing is available for the stream. |
||||
* - `unwrapLabelKeys`: An array of label keys that can be used for unwrapping log data. |
||||
* |
||||
* @param streamSelector - The selector for the log stream you want to analyze. |
||||
* @param {Object} [options] - Optional parameters. |
||||
* @param {number} [options.maxLines] - The number of log lines requested when determining parsers and label keys. |
||||
* Smaller maxLines is recommended for improved query performance. The default count is 10. |
||||
* @returns A promise containing an object with parser and label key information. |
||||
* @throws An error if the fetch operation fails. |
||||
*/ |
||||
async getParserAndLabelKeys( |
||||
streamSelector: string, |
||||
options?: { maxLines?: number } |
||||
): Promise<ParserAndLabelKeysResult> { |
||||
const series = await this.datasource.getDataSamples({ |
||||
expr: streamSelector, |
||||
refId: 'data-samples', |
||||
maxLines: options?.maxLines || DEFAULT_MAX_LINES_SAMPLE, |
||||
}); |
||||
|
||||
if (!series.length) { |
||||
return { extractedLabelKeys: [], unwrapLabelKeys: [], hasJSON: false, hasLogfmt: false, hasPack: false }; |
||||
} |
||||
|
||||
const { hasLogfmt, hasJSON, hasPack } = extractLogParserFromDataFrame(series[0]); |
||||
|
||||
return { |
||||
extractedLabelKeys: extractLabelKeysFromDataFrame(series[0]), |
||||
unwrapLabelKeys: extractUnwrapLabelKeysFromDataFrame(series[0]), |
||||
hasJSON, |
||||
hasPack, |
||||
hasLogfmt, |
||||
}; |
||||
} |
||||
} |
@ -0,0 +1,90 @@ |
||||
import React, { ReactNode } from 'react'; |
||||
|
||||
import { QueryEditorProps } from '@grafana/data'; |
||||
|
||||
import { shouldRefreshLabels } from './languageUtils'; |
||||
import { MonacoQueryFieldWrapper } from './monaco-query-field/MonacoQueryFieldWrapper'; |
||||
import { LokiQuery, LokiOptions, LokiDatasource } from './types'; |
||||
|
||||
export interface LokiQueryFieldProps extends QueryEditorProps<LokiDatasource, LokiQuery, LokiOptions> { |
||||
ExtraFieldElement?: ReactNode; |
||||
placeholder?: string; |
||||
'data-testid'?: string; |
||||
} |
||||
|
||||
interface LokiQueryFieldState { |
||||
labelsLoaded: boolean; |
||||
} |
||||
|
||||
export class LokiQueryField extends React.PureComponent<LokiQueryFieldProps, LokiQueryFieldState> { |
||||
_isMounted = false; |
||||
|
||||
constructor(props: LokiQueryFieldProps) { |
||||
super(props); |
||||
|
||||
this.state = { labelsLoaded: false }; |
||||
} |
||||
|
||||
async componentDidMount() { |
||||
this._isMounted = true; |
||||
await this.props.datasource.languageProvider.start(); |
||||
if (this._isMounted) { |
||||
this.setState({ labelsLoaded: true }); |
||||
} |
||||
} |
||||
|
||||
componentWillUnmount() { |
||||
this._isMounted = false; |
||||
} |
||||
|
||||
componentDidUpdate(prevProps: LokiQueryFieldProps) { |
||||
const { |
||||
range, |
||||
datasource: { languageProvider }, |
||||
} = this.props; |
||||
const refreshLabels = shouldRefreshLabels(range, prevProps.range); |
||||
// We want to refresh labels when range changes (we round up intervals to a minute)
|
||||
if (refreshLabels) { |
||||
languageProvider.fetchLabels(); |
||||
} |
||||
} |
||||
|
||||
onChangeQuery = (value: string, override?: boolean) => { |
||||
// Send text change to parent
|
||||
const { query, onChange, onRunQuery } = this.props; |
||||
if (onChange) { |
||||
const nextQuery = { ...query, expr: value }; |
||||
onChange(nextQuery); |
||||
|
||||
if (override && onRunQuery) { |
||||
onRunQuery(); |
||||
} |
||||
} |
||||
}; |
||||
|
||||
render() { |
||||
const { ExtraFieldElement, query, datasource, history, onRunQuery } = this.props; |
||||
const placeholder = this.props.placeholder ?? 'Enter a Loki query (run with Shift+Enter)'; |
||||
|
||||
return ( |
||||
<> |
||||
<div |
||||
className="gf-form-inline gf-form-inline--xs-view-flex-column flex-grow-1" |
||||
data-testid={this.props['data-testid']} |
||||
> |
||||
<div className="gf-form--grow flex-shrink-1 min-width-15"> |
||||
<MonacoQueryFieldWrapper |
||||
datasource={datasource} |
||||
history={history ?? []} |
||||
onChange={this.onChangeQuery} |
||||
onRunQuery={onRunQuery} |
||||
initialValue={query.expr ?? ''} |
||||
placeholder={placeholder} |
||||
/> |
||||
</div> |
||||
</div> |
||||
{ExtraFieldElement} |
||||
</> |
||||
); |
||||
} |
||||
} |
@ -0,0 +1,65 @@ |
||||
// Code generated - EDITING IS FUTILE. DO NOT EDIT.
|
||||
//
|
||||
// Generated by:
|
||||
// public/app/plugins/gen.go
|
||||
// Using jennies:
|
||||
// TSTypesJenny
|
||||
// PluginTSTypesJenny
|
||||
//
|
||||
// Run 'make gen-cue' from repository root to regenerate.
|
||||
|
||||
import * as common from '@grafana/schema'; |
||||
|
||||
export enum QueryEditorMode { |
||||
Builder = 'builder', |
||||
Code = 'code', |
||||
} |
||||
|
||||
export enum LokiQueryType { |
||||
Instant = 'instant', |
||||
Range = 'range', |
||||
Stream = 'stream', |
||||
} |
||||
|
||||
export enum SupportingQueryType { |
||||
DataSample = 'dataSample', |
||||
LogsSample = 'logsSample', |
||||
LogsVolume = 'logsVolume', |
||||
} |
||||
|
||||
export enum LokiQueryDirection { |
||||
Backward = 'backward', |
||||
Forward = 'forward', |
||||
} |
||||
|
||||
export interface Loki extends common.DataQuery { |
||||
editorMode?: QueryEditorMode; |
||||
/** |
||||
* The LogQL query. |
||||
*/ |
||||
expr: string; |
||||
/** |
||||
* @deprecated, now use queryType. |
||||
*/ |
||||
instant?: boolean; |
||||
/** |
||||
* Used to override the name of the series. |
||||
*/ |
||||
legendFormat?: string; |
||||
/** |
||||
* Used to limit the number of log rows returned. |
||||
*/ |
||||
maxLines?: number; |
||||
/** |
||||
* @deprecated, now use queryType. |
||||
*/ |
||||
range?: boolean; |
||||
/** |
||||
* @deprecated, now use step. |
||||
*/ |
||||
resolution?: number; |
||||
/** |
||||
* Used to set step value for range queries. |
||||
*/ |
||||
step?: string; |
||||
} |
@ -0,0 +1,90 @@ |
||||
import { TimeRange } from '@grafana/data'; |
||||
|
||||
function roundMsToMin(milliseconds: number): number { |
||||
return roundSecToMin(milliseconds / 1000); |
||||
} |
||||
|
||||
function roundSecToMin(seconds: number): number { |
||||
return Math.floor(seconds / 60); |
||||
} |
||||
|
||||
export function shouldRefreshLabels(range?: TimeRange, prevRange?: TimeRange): boolean { |
||||
if (range && prevRange) { |
||||
const sameMinuteFrom = roundMsToMin(range.from.valueOf()) === roundMsToMin(prevRange.from.valueOf()); |
||||
const sameMinuteTo = roundMsToMin(range.to.valueOf()) === roundMsToMin(prevRange.to.valueOf()); |
||||
// If both are same, don't need to refresh
|
||||
return !(sameMinuteFrom && sameMinuteTo); |
||||
} |
||||
return false; |
||||
} |
||||
|
||||
// Loki regular-expressions use the RE2 syntax (https://github.com/google/re2/wiki/Syntax),
|
||||
// so every character that matches something in that list has to be escaped.
|
||||
// the list of meta characters is: *+?()|\.[]{}^$
|
||||
// we make a javascript regular expression that matches those characters:
|
||||
const RE2_METACHARACTERS = /[*+?()|\\.\[\]{}^$]/g; |
||||
function escapeLokiRegexp(value: string): string { |
||||
return value.replace(RE2_METACHARACTERS, '\\$&'); |
||||
} |
||||
|
||||
// based on the openmetrics-documentation, the 3 symbols we have to handle are:
|
||||
// - \n ... the newline character
|
||||
// - \ ... the backslash character
|
||||
// - " ... the double-quote character
|
||||
export function escapeLabelValueInExactSelector(labelValue: string): string { |
||||
return labelValue.replace(/\\/g, '\\\\').replace(/\n/g, '\\n').replace(/"/g, '\\"'); |
||||
} |
||||
|
||||
export function unescapeLabelValue(labelValue: string): string { |
||||
return labelValue.replace(/\\n/g, '\n').replace(/\\"/g, '"').replace(/\\\\/g, '\\'); |
||||
} |
||||
|
||||
export function escapeLabelValueInRegexSelector(labelValue: string): string { |
||||
return escapeLabelValueInExactSelector(escapeLokiRegexp(labelValue)); |
||||
} |
||||
|
||||
export function escapeLabelValueInSelector(labelValue: string, selector?: string): string { |
||||
return isRegexSelector(selector) |
||||
? escapeLabelValueInRegexSelector(labelValue) |
||||
: escapeLabelValueInExactSelector(labelValue); |
||||
} |
||||
|
||||
export function isRegexSelector(selector?: string) { |
||||
if (selector && (selector.includes('=~') || selector.includes('!~'))) { |
||||
return true; |
||||
} |
||||
return false; |
||||
} |
||||
|
||||
export function isBytesString(string: string) { |
||||
const BYTES_KEYWORDS = [ |
||||
'b', |
||||
'kib', |
||||
'Kib', |
||||
'kb', |
||||
'KB', |
||||
'mib', |
||||
'Mib', |
||||
'mb', |
||||
'MB', |
||||
'gib', |
||||
'Gib', |
||||
'gb', |
||||
'GB', |
||||
'tib', |
||||
'Tib', |
||||
'tb', |
||||
'TB', |
||||
'pib', |
||||
'Pib', |
||||
'pb', |
||||
'PB', |
||||
'eib', |
||||
'Eib', |
||||
'eb', |
||||
'EB', |
||||
]; |
||||
const regex = new RegExp(`^(?:-?\\d+(?:\\.\\d+)?)(?:${BYTES_KEYWORDS.join('|')})$`); |
||||
const match = string.match(regex); |
||||
return !!match; |
||||
} |
@ -0,0 +1,28 @@ |
||||
export function isLogLineJSON(line: string): boolean { |
||||
let parsed; |
||||
try { |
||||
parsed = JSON.parse(line); |
||||
} catch (error) {} |
||||
// The JSON parser should only be used for log lines that are valid serialized JSON objects.
|
||||
return typeof parsed === 'object'; |
||||
} |
||||
|
||||
// This matches:
|
||||
// first a label from start of the string or first white space, then any word chars until "="
|
||||
// second either an empty quotes, or anything that starts with quote and ends with unescaped quote,
|
||||
// or any non whitespace chars that do not start with quote
|
||||
const LOGFMT_REGEXP = /(?:^|\s)([\w\(\)\[\]\{\}]+)=(""|(?:".*?[^\\]"|[^"\s]\S*))/; |
||||
|
||||
export function isLogLineLogfmt(line: string): boolean { |
||||
return LOGFMT_REGEXP.test(line); |
||||
} |
||||
|
||||
export function isLogLinePacked(line: string): boolean { |
||||
let parsed; |
||||
try { |
||||
parsed = JSON.parse(line); |
||||
return parsed.hasOwnProperty('_entry'); |
||||
} catch (error) { |
||||
return false; |
||||
} |
||||
} |
@ -0,0 +1,273 @@ |
||||
import { css } from '@emotion/css'; |
||||
import { debounce } from 'lodash'; |
||||
import React, { useRef, useEffect } from 'react'; |
||||
import { useLatest } from 'react-use'; |
||||
import { v4 as uuidv4 } from 'uuid'; |
||||
|
||||
import { GrafanaTheme2 } from '@grafana/data'; |
||||
import { selectors } from '@grafana/e2e-selectors'; |
||||
import { parser } from '@grafana/lezer-logql'; |
||||
import { languageConfiguration, monarchlanguage } from '@grafana/monaco-logql'; |
||||
import { useTheme2, ReactMonacoEditor, Monaco, monacoTypes, MonacoEditor } from '@grafana/ui'; |
||||
|
||||
import { Props } from './MonacoQueryFieldProps'; |
||||
import { getOverrideServices } from './getOverrideServices'; |
||||
import { getCompletionProvider, getSuggestOptions } from './monaco-completion-provider'; |
||||
import { CompletionDataProvider } from './monaco-completion-provider/CompletionDataProvider'; |
||||
import { placeHolderScopedVars, validateQuery } from './monaco-completion-provider/validation'; |
||||
|
||||
const options: monacoTypes.editor.IStandaloneEditorConstructionOptions = { |
||||
codeLens: false, |
||||
contextmenu: false, |
||||
// we need `fixedOverflowWidgets` because otherwise in grafana-dashboards
|
||||
// the popup is clipped by the panel-visualizations.
|
||||
fixedOverflowWidgets: true, |
||||
folding: false, |
||||
fontSize: 14, |
||||
lineDecorationsWidth: 8, // used as "padding-left"
|
||||
lineNumbers: 'off', |
||||
minimap: { enabled: false }, |
||||
overviewRulerBorder: false, |
||||
overviewRulerLanes: 0, |
||||
padding: { |
||||
// these numbers were picked so that visually this matches the previous version
|
||||
// of the query-editor the best
|
||||
top: 4, |
||||
bottom: 5, |
||||
}, |
||||
renderLineHighlight: 'none', |
||||
scrollbar: { |
||||
vertical: 'hidden', |
||||
verticalScrollbarSize: 8, // used as "padding-right"
|
||||
horizontal: 'hidden', |
||||
horizontalScrollbarSize: 0, |
||||
alwaysConsumeMouseWheel: false, |
||||
}, |
||||
scrollBeyondLastLine: false, |
||||
suggest: getSuggestOptions(), |
||||
suggestFontSize: 12, |
||||
wordWrap: 'on', |
||||
}; |
||||
|
||||
// this number was chosen by testing various values. it might be necessary
|
||||
// because of the width of the border, not sure.
|
||||
//it needs to do 2 things:
|
||||
// 1. when the editor is single-line, it should make the editor height be visually correct
|
||||
// 2. when the editor is multi-line, the editor should not be "scrollable" (meaning,
|
||||
// you do a scroll-movement in the editor, and it will scroll the content by a couple pixels
|
||||
// up & down. this we want to avoid)
|
||||
const EDITOR_HEIGHT_OFFSET = 2; |
||||
|
||||
const LANG_ID = 'logql'; |
||||
|
||||
// we must only run the lang-setup code once
|
||||
let LANGUAGE_SETUP_STARTED = false; |
||||
|
||||
export const defaultWordPattern = /(-?\d*\.\d\w*)|([^`~!#%^&*()\-=+\[{\]}\\|;:'",.<>\/?\s]+)/g; |
||||
|
||||
function ensureLogQL(monaco: Monaco) { |
||||
if (LANGUAGE_SETUP_STARTED === false) { |
||||
LANGUAGE_SETUP_STARTED = true; |
||||
monaco.languages.register({ id: LANG_ID }); |
||||
|
||||
monaco.languages.setMonarchTokensProvider(LANG_ID, monarchlanguage); |
||||
monaco.languages.setLanguageConfiguration(LANG_ID, { |
||||
...languageConfiguration, |
||||
wordPattern: /(-?\d*\.\d\w*)|([^`~!#%^&*()+\[{\]}\\|;:',.<>\/?\s]+)/g, |
||||
// Default: /(-?\d*\.\d\w*)|([^`~!#%^&*()\-=+\[{\]}\\|;:'",.<>\/?\s]+)/g
|
||||
// Removed `"`, `=`, and `-`, from the exclusion list, so now the completion provider can decide to overwrite any matching words, or just insert text at the cursor
|
||||
}); |
||||
} |
||||
} |
||||
|
||||
const getStyles = (theme: GrafanaTheme2, placeholder: string) => { |
||||
return { |
||||
container: css` |
||||
border-radius: ${theme.shape.radius.default}; |
||||
border: 1px solid ${theme.components.input.borderColor}; |
||||
width: 100%; |
||||
.monaco-editor .suggest-widget { |
||||
min-width: 50%; |
||||
} |
||||
`,
|
||||
placeholder: css` |
||||
::after { |
||||
content: '${placeholder}'; |
||||
font-family: ${theme.typography.fontFamilyMonospace}; |
||||
opacity: 0.3; |
||||
} |
||||
`,
|
||||
}; |
||||
}; |
||||
|
||||
const MonacoQueryField = ({ history, onBlur, onRunQuery, initialValue, datasource, placeholder, onChange }: Props) => { |
||||
const id = uuidv4(); |
||||
// we need only one instance of `overrideServices` during the lifetime of the react component
|
||||
const overrideServicesRef = useRef(getOverrideServices()); |
||||
const containerRef = useRef<HTMLDivElement>(null); |
||||
|
||||
const langProviderRef = useLatest(datasource.languageProvider); |
||||
const historyRef = useLatest(history); |
||||
const onRunQueryRef = useLatest(onRunQuery); |
||||
const onBlurRef = useLatest(onBlur); |
||||
|
||||
const autocompleteCleanupCallback = useRef<(() => void) | null>(null); |
||||
|
||||
const theme = useTheme2(); |
||||
const styles = getStyles(theme, placeholder); |
||||
|
||||
useEffect(() => { |
||||
// when we unmount, we unregister the autocomplete-function, if it was registered
|
||||
return () => { |
||||
autocompleteCleanupCallback.current?.(); |
||||
}; |
||||
}, []); |
||||
|
||||
const setPlaceholder = (monaco: Monaco, editor: MonacoEditor) => { |
||||
const placeholderDecorators = [ |
||||
{ |
||||
range: new monaco.Range(1, 1, 1, 1), |
||||
options: { |
||||
className: styles.placeholder, |
||||
isWholeLine: true, |
||||
}, |
||||
}, |
||||
]; |
||||
|
||||
let decorators: string[] = []; |
||||
|
||||
const checkDecorators: () => void = () => { |
||||
const model = editor.getModel(); |
||||
|
||||
if (!model) { |
||||
return; |
||||
} |
||||
|
||||
const newDecorators = model.getValueLength() === 0 ? placeholderDecorators : []; |
||||
decorators = model.deltaDecorations(decorators, newDecorators); |
||||
}; |
||||
|
||||
checkDecorators(); |
||||
editor.onDidChangeModelContent(checkDecorators); |
||||
}; |
||||
|
||||
const onTypeDebounced = debounce(async (query: string) => { |
||||
onChange(query); |
||||
}, 1000); |
||||
|
||||
return ( |
||||
<div |
||||
aria-label={selectors.components.QueryField.container} |
||||
className={styles.container} |
||||
// NOTE: we will be setting inline-style-width/height on this element
|
||||
ref={containerRef} |
||||
> |
||||
<ReactMonacoEditor |
||||
overrideServices={overrideServicesRef.current} |
||||
options={options} |
||||
language={LANG_ID} |
||||
value={initialValue} |
||||
beforeMount={(monaco) => { |
||||
ensureLogQL(monaco); |
||||
}} |
||||
onMount={(editor, monaco) => { |
||||
// Monaco has a bug where it runs actions on all instances (https://github.com/microsoft/monaco-editor/issues/2947), so we ensure actions are executed on instance-level with this ContextKey.
|
||||
const isEditorFocused = editor.createContextKey<boolean>('isEditorFocused' + id, false); |
||||
// we setup on-blur
|
||||
editor.onDidBlurEditorWidget(() => { |
||||
isEditorFocused.set(false); |
||||
onBlurRef.current(editor.getValue()); |
||||
}); |
||||
editor.onDidChangeModelContent((e) => { |
||||
const model = editor.getModel(); |
||||
if (!model) { |
||||
return; |
||||
} |
||||
const query = model.getValue(); |
||||
const errors = |
||||
validateQuery( |
||||
query, |
||||
datasource.interpolateString(query, placeHolderScopedVars), |
||||
model.getLinesContent(), |
||||
parser |
||||
) || []; |
||||
|
||||
const markers = errors.map(({ error, ...boundary }: any) => ({ |
||||
message: `${ |
||||
error ? `Error parsing "${error}"` : 'Parse error' |
||||
}. The query appears to be incorrect and could fail to be executed.`,
|
||||
severity: monaco.MarkerSeverity.Error, |
||||
...boundary, |
||||
})); |
||||
|
||||
onTypeDebounced(query); |
||||
monaco.editor.setModelMarkers(model, 'owner', markers); |
||||
}); |
||||
const dataProvider = new CompletionDataProvider(langProviderRef.current, historyRef); |
||||
const completionProvider = getCompletionProvider(monaco, dataProvider); |
||||
|
||||
// completion-providers in monaco are not registered directly to editor-instances,
|
||||
// they are registered to languages. this makes it hard for us to have
|
||||
// separate completion-providers for every query-field-instance
|
||||
// (but we need that, because they might connect to different datasources).
|
||||
// the trick we do is, we wrap the callback in a "proxy",
|
||||
// and in the proxy, the first thing is, we check if we are called from
|
||||
// "our editor instance", and if not, we just return nothing. if yes,
|
||||
// we call the completion-provider.
|
||||
const filteringCompletionProvider: monacoTypes.languages.CompletionItemProvider = { |
||||
...completionProvider, |
||||
provideCompletionItems: (model, position, context, token) => { |
||||
// if the model-id does not match, then this call is from a different editor-instance,
|
||||
// not "our instance", so return nothing
|
||||
if (editor.getModel()?.id !== model.id) { |
||||
return { suggestions: [] }; |
||||
} |
||||
return completionProvider.provideCompletionItems(model, position, context, token); |
||||
}, |
||||
}; |
||||
|
||||
const { dispose } = monaco.languages.registerCompletionItemProvider(LANG_ID, filteringCompletionProvider); |
||||
|
||||
autocompleteCleanupCallback.current = dispose; |
||||
// this code makes the editor resize itself so that the content fits
|
||||
// (it will grow taller when necessary)
|
||||
// FIXME: maybe move this functionality into CodeEditor, like:
|
||||
// <CodeEditor resizingMode="single-line"/>
|
||||
const handleResize = () => { |
||||
const containerDiv = containerRef.current; |
||||
if (containerDiv !== null) { |
||||
const pixelHeight = editor.getContentHeight(); |
||||
containerDiv.style.height = `${pixelHeight + EDITOR_HEIGHT_OFFSET}px`; |
||||
const pixelWidth = containerDiv.clientWidth; |
||||
editor.layout({ width: pixelWidth, height: pixelHeight }); |
||||
} |
||||
}; |
||||
|
||||
editor.onDidContentSizeChange(handleResize); |
||||
handleResize(); |
||||
// handle: shift + enter
|
||||
// FIXME: maybe move this functionality into CodeEditor?
|
||||
editor.addCommand( |
||||
monaco.KeyMod.Shift | monaco.KeyCode.Enter, |
||||
() => { |
||||
onRunQueryRef.current(editor.getValue()); |
||||
}, |
||||
'isEditorFocused' + id |
||||
); |
||||
|
||||
editor.onDidFocusEditorText(() => { |
||||
isEditorFocused.set(true); |
||||
if (editor.getValue().trim() === '') { |
||||
editor.trigger('', 'editor.action.triggerSuggest', {}); |
||||
} |
||||
}); |
||||
|
||||
setPlaceholder(monaco, editor); |
||||
}} |
||||
/> |
||||
</div> |
||||
); |
||||
}; |
||||
|
||||
// Default export for lazy load.
|
||||
export default MonacoQueryField; |
@ -0,0 +1,13 @@ |
||||
import React, { Suspense } from 'react'; |
||||
|
||||
import { Props } from './MonacoQueryFieldProps'; |
||||
|
||||
const Field = React.lazy(() => import(/* webpackChunkName: "loki-query-field" */ './MonacoQueryField')); |
||||
|
||||
export const MonacoQueryFieldLazy = (props: Props) => { |
||||
return ( |
||||
<Suspense fallback={null}> |
||||
<Field {...props} /> |
||||
</Suspense> |
||||
); |
||||
}; |
@ -0,0 +1,17 @@ |
||||
import { HistoryItem } from '@grafana/data'; |
||||
|
||||
import { LokiDatasource, LokiQuery } from '../types'; |
||||
|
||||
// we need to store this in a separate file,
|
||||
// because we have an async-wrapper around,
|
||||
// the react-component, and it needs the same
|
||||
// props as the sync-component.
|
||||
export type Props = { |
||||
initialValue: string; |
||||
history: Array<HistoryItem<LokiQuery>>; |
||||
onRunQuery: (value: string) => void; |
||||
onBlur: (value: string) => void; |
||||
placeholder: string; |
||||
datasource: LokiDatasource; |
||||
onChange: (query: string) => void; |
||||
}; |
@ -0,0 +1,27 @@ |
||||
import React, { useRef } from 'react'; |
||||
|
||||
import { MonacoQueryFieldLazy } from './MonacoQueryFieldLazy'; |
||||
import { Props as MonacoProps } from './MonacoQueryFieldProps'; |
||||
|
||||
export type Props = Omit<MonacoProps, 'onRunQuery' | 'onBlur'> & { |
||||
onChange: (query: string) => void; |
||||
onRunQuery: () => void; |
||||
onQueryType?: (query: string) => void; |
||||
}; |
||||
|
||||
export const MonacoQueryFieldWrapper = (props: Props) => { |
||||
const lastRunValueRef = useRef<string | null>(null); |
||||
const { onRunQuery, onChange, ...rest } = props; |
||||
|
||||
const handleRunQuery = (value: string) => { |
||||
lastRunValueRef.current = value; |
||||
onChange(value); |
||||
onRunQuery(); |
||||
}; |
||||
|
||||
const handleBlur = (value: string) => { |
||||
onChange(value); |
||||
}; |
||||
|
||||
return <MonacoQueryFieldLazy onRunQuery={handleRunQuery} onBlur={handleBlur} onChange={onChange} {...rest} />; |
||||
}; |
@ -0,0 +1,112 @@ |
||||
import { monacoTypes } from '@grafana/ui'; |
||||
|
||||
// this thing here is a workaround in a way.
|
||||
// what we want to achieve, is that when the autocomplete-window
|
||||
// opens, the "second, extra popup" with the extra help,
|
||||
// also opens automatically.
|
||||
// but there is no API to achieve it.
|
||||
// the way to do it is to implement the `storageService`
|
||||
// interface, and provide our custom implementation,
|
||||
// which will default to `true` for the correct string-key.
|
||||
// unfortunately, while the typescript-interface exists,
|
||||
// it is not exported from monaco-editor,
|
||||
// so we cannot rely on typescript to make sure
|
||||
// we do it right. all we can do is to manually
|
||||
// lookup the interface, and make sure we code our code right.
|
||||
// our code is a "best effort" approach,
|
||||
// i am not 100% how the `scope` and `target` things work,
|
||||
// but so far it seems to work ok.
|
||||
// i would use an another approach, if there was one available.
|
||||
|
||||
function makeStorageService() { |
||||
// we need to return an object that fulfills this interface:
|
||||
// https://github.com/microsoft/vscode/blob/ff1e16eebb93af79fd6d7af1356c4003a120c563/src/vs/platform/storage/common/storage.ts#L37
|
||||
// unfortunately it is not export from monaco-editor
|
||||
|
||||
const strings = new Map<string, string>(); |
||||
|
||||
// we want this to be true by default
|
||||
strings.set('expandSuggestionDocs', true.toString()); |
||||
|
||||
return { |
||||
// we do not implement the on* handlers
|
||||
onDidChangeValue: (data: unknown): void => undefined, |
||||
onDidChangeTarget: (data: unknown): void => undefined, |
||||
onWillSaveState: (data: unknown): void => undefined, |
||||
|
||||
get: (key: string, scope: unknown, fallbackValue?: string): string | undefined => { |
||||
return strings.get(key) ?? fallbackValue; |
||||
}, |
||||
|
||||
getBoolean: (key: string, scope: unknown, fallbackValue?: boolean): boolean | undefined => { |
||||
const val = strings.get(key); |
||||
if (val !== undefined) { |
||||
// the interface docs say the value will be converted
|
||||
// to a boolean but do not specify how, so we improvise
|
||||
return val === 'true'; |
||||
} else { |
||||
return fallbackValue; |
||||
} |
||||
}, |
||||
|
||||
getNumber: (key: string, scope: unknown, fallbackValue?: number): number | undefined => { |
||||
const val = strings.get(key); |
||||
if (val !== undefined) { |
||||
return parseInt(val, 10); |
||||
} else { |
||||
return fallbackValue; |
||||
} |
||||
}, |
||||
|
||||
store: ( |
||||
key: string, |
||||
value: string | boolean | number | undefined | null, |
||||
scope: unknown, |
||||
target: unknown |
||||
): void => { |
||||
// the interface docs say if the value is nullish, it should act as delete
|
||||
if (value === null || value === undefined) { |
||||
strings.delete(key); |
||||
} else { |
||||
strings.set(key, value.toString()); |
||||
} |
||||
}, |
||||
|
||||
remove: (key: string, scope: unknown): void => { |
||||
strings.delete(key); |
||||
}, |
||||
|
||||
keys: (scope: unknown, target: unknown): string[] => { |
||||
return Array.from(strings.keys()); |
||||
}, |
||||
|
||||
logStorage: (): void => { |
||||
console.log('logStorage: not implemented'); |
||||
}, |
||||
|
||||
migrate: (): Promise<void> => { |
||||
// we do not implement this
|
||||
return Promise.resolve(undefined); |
||||
}, |
||||
|
||||
isNew: (scope: unknown): boolean => { |
||||
// we create a new storage for every session, we do not persist it,
|
||||
// so we return `true`.
|
||||
return true; |
||||
}, |
||||
|
||||
flush: (reason?: unknown): Promise<void> => { |
||||
// we do not implement this
|
||||
return Promise.resolve(undefined); |
||||
}, |
||||
}; |
||||
} |
||||
|
||||
let overrideServices: monacoTypes.editor.IEditorOverrideServices = { |
||||
storageService: makeStorageService(), |
||||
}; |
||||
|
||||
export function getOverrideServices(): monacoTypes.editor.IEditorOverrideServices { |
||||
// One instance of this for every query editor
|
||||
return overrideServices; |
||||
} |
@ -0,0 +1,97 @@ |
||||
import { chain } from 'lodash'; |
||||
|
||||
import { HistoryItem } from '@grafana/data'; |
||||
|
||||
import { LokiQuery, ParserAndLabelKeysResult, LanguageProvider } from '../../types'; |
||||
|
||||
export function escapeLabelValueInExactSelector(labelValue: string): string { |
||||
return labelValue.replace(/\\/g, '\\\\').replace(/\n/g, '\\n').replace(/"/g, '\\"'); |
||||
} |
||||
|
||||
import { Label } from './situation'; |
||||
|
||||
interface HistoryRef { |
||||
current: Array<HistoryItem<LokiQuery>>; |
||||
} |
||||
|
||||
export class CompletionDataProvider { |
||||
constructor( |
||||
private languageProvider: LanguageProvider, |
||||
private historyRef: HistoryRef = { current: [] } |
||||
) { |
||||
this.queryToLabelKeysCache = new Map(); |
||||
} |
||||
private queryToLabelKeysCache: Map<string, ParserAndLabelKeysResult>; |
||||
|
||||
private buildSelector(labels: Label[]): string { |
||||
const allLabelTexts = labels.map( |
||||
(label) => `${label.name}${label.op}"${escapeLabelValueInExactSelector(label.value)}"` |
||||
); |
||||
|
||||
return `{${allLabelTexts.join(',')}}`; |
||||
} |
||||
|
||||
getHistory() { |
||||
return chain(this.historyRef.current) |
||||
.map((history: HistoryItem<LokiQuery>) => history.query.expr) |
||||
.filter() |
||||
.uniq() |
||||
.value(); |
||||
} |
||||
|
||||
async getLabelNames(otherLabels: Label[] = []) { |
||||
if (otherLabels.length === 0) { |
||||
// if there is no filtering, we have to use a special endpoint
|
||||
return this.languageProvider.getLabelKeys(); |
||||
} |
||||
const data = await this.getSeriesLabels(otherLabels); |
||||
const possibleLabelNames = Object.keys(data); // all names from datasource
|
||||
const usedLabelNames = new Set(otherLabels.map((l) => l.name)); // names used in the query
|
||||
return possibleLabelNames.filter((label) => !usedLabelNames.has(label)); |
||||
} |
||||
|
||||
async getLabelValues(labelName: string, otherLabels: Label[]) { |
||||
if (otherLabels.length === 0) { |
||||
// if there is no filtering, we have to use a special endpoint
|
||||
return await this.languageProvider.fetchLabelValues(labelName); |
||||
} |
||||
|
||||
const data = await this.getSeriesLabels(otherLabels); |
||||
return data[labelName] ?? []; |
||||
} |
||||
|
||||
/** |
||||
* Runs a Loki query to extract label keys from the result. |
||||
* The result is cached for the query string. |
||||
* |
||||
* Since various "situations" in the monaco code editor trigger this function, it is prone to being called multiple times for the same query |
||||
* Here is a lightweight and simple cache to avoid calling the backend multiple times for the same query. |
||||
* |
||||
* @param logQuery |
||||
*/ |
||||
async getParserAndLabelKeys(logQuery: string): Promise<ParserAndLabelKeysResult> { |
||||
const EXTRACTED_LABEL_KEYS_MAX_CACHE_SIZE = 2; |
||||
const cachedLabelKeys = this.queryToLabelKeysCache.has(logQuery) ? this.queryToLabelKeysCache.get(logQuery) : null; |
||||
if (cachedLabelKeys) { |
||||
// cache hit! Serve stale result from cache
|
||||
return cachedLabelKeys; |
||||
} else { |
||||
// If cache is larger than max size, delete the first (oldest) index
|
||||
if (this.queryToLabelKeysCache.size >= EXTRACTED_LABEL_KEYS_MAX_CACHE_SIZE) { |
||||
// Make room in the cache for the fresh result by deleting the "first" index
|
||||
const keys = this.queryToLabelKeysCache.keys(); |
||||
const firstKey = keys.next().value; |
||||
this.queryToLabelKeysCache.delete(firstKey); |
||||
} |
||||
// Fetch a fresh result from the backend
|
||||
const labelKeys = await this.languageProvider.getParserAndLabelKeys(logQuery); |
||||
// Add the result to the cache
|
||||
this.queryToLabelKeysCache.set(logQuery, labelKeys); |
||||
return labelKeys; |
||||
} |
||||
} |
||||
|
||||
async getSeriesLabels(labels: Label[]) { |
||||
return await this.languageProvider.fetchSeriesLabels(this.buildSelector(labels)).then((data: any) => data ?? {}); |
||||
} |
||||
} |
@ -0,0 +1,22 @@ |
||||
// This helper class is used to make typescript warn you when you miss a case-block in a switch statement.
|
||||
// For example:
|
||||
//
|
||||
// const x:'A'|'B'|'C' = 'A';
|
||||
//
|
||||
// switch(x) {
|
||||
// case 'A':
|
||||
// // something
|
||||
// case 'B':
|
||||
// // something
|
||||
// default:
|
||||
// throw new NeverCaseError(x);
|
||||
// }
|
||||
//
|
||||
//
|
||||
// TypeScript detect the missing case and display an error.
|
||||
|
||||
export class NeverCaseError extends Error { |
||||
constructor(value: never) { |
||||
super(`Unexpected case in switch statement: ${JSON.stringify(value)}`); |
||||
} |
||||
} |
@ -0,0 +1,201 @@ |
||||
import type { Monaco, monacoTypes } from '@grafana/ui'; |
||||
|
||||
import { CompletionDataProvider } from './CompletionDataProvider'; |
||||
import { NeverCaseError } from './NeverCaseError'; |
||||
import { Situation, getSituation } from './situation'; |
||||
|
||||
type CompletionType = |
||||
| 'HISTORY' |
||||
| 'FUNCTION' |
||||
| 'DURATION' |
||||
| 'LABEL_NAME' |
||||
| 'LABEL_VALUE' |
||||
| 'PATTERN' |
||||
| 'PARSER' |
||||
| 'LINE_FILTER' |
||||
| 'PIPE_OPERATION'; |
||||
|
||||
type Completion = { |
||||
type: CompletionType; |
||||
label: string; |
||||
insertText: string; |
||||
detail?: string; |
||||
documentation?: string; |
||||
triggerOnInsert?: boolean; |
||||
isSnippet?: boolean; |
||||
}; |
||||
|
||||
const DURATION_COMPLETIONS: Completion[] = ['$__auto', '1m', '5m', '10m', '30m', '1h', '1d'].map((text) => ({ |
||||
type: 'DURATION', |
||||
label: text, |
||||
insertText: text, |
||||
})); |
||||
const getCompletions = async (situation: Situation, dataProvider: CompletionDataProvider) => { |
||||
return DURATION_COMPLETIONS; |
||||
}; |
||||
|
||||
// from: monacoTypes.languages.CompletionItemInsertTextRule.InsertAsSnippet
|
||||
const INSERT_AS_SNIPPET_ENUM_VALUE = 4; |
||||
|
||||
export function getSuggestOptions(): monacoTypes.editor.ISuggestOptions { |
||||
return { |
||||
// monaco-editor sometimes provides suggestions automatically, i am not
|
||||
// sure based on what, seems to be by analyzing the words already
|
||||
// written.
|
||||
// to try it out:
|
||||
// - enter `go_goroutines{job~`
|
||||
// - have the cursor at the end of the string
|
||||
// - press ctrl-enter
|
||||
// - you will get two suggestions
|
||||
// those were not provided by grafana, they are offered automatically.
|
||||
// i want to remove those. the only way i found is:
|
||||
// - every suggestion-item has a `kind` attribute,
|
||||
// that controls the icon to the left of the suggestion.
|
||||
// - items auto-generated by monaco have `kind` set to `text`.
|
||||
// - we make sure grafana-provided suggestions do not have `kind` set to `text`.
|
||||
// - and then we tell monaco not to show suggestions of kind `text`
|
||||
showWords: false, |
||||
}; |
||||
} |
||||
|
||||
function getMonacoCompletionItemKind(type: CompletionType, monaco: Monaco): monacoTypes.languages.CompletionItemKind { |
||||
switch (type) { |
||||
case 'DURATION': |
||||
return monaco.languages.CompletionItemKind.Unit; |
||||
case 'FUNCTION': |
||||
return monaco.languages.CompletionItemKind.Variable; |
||||
case 'HISTORY': |
||||
return monaco.languages.CompletionItemKind.Snippet; |
||||
case 'LABEL_NAME': |
||||
return monaco.languages.CompletionItemKind.Enum; |
||||
case 'LABEL_VALUE': |
||||
return monaco.languages.CompletionItemKind.EnumMember; |
||||
case 'PATTERN': |
||||
return monaco.languages.CompletionItemKind.Constructor; |
||||
case 'PARSER': |
||||
return monaco.languages.CompletionItemKind.Class; |
||||
case 'LINE_FILTER': |
||||
return monaco.languages.CompletionItemKind.TypeParameter; |
||||
case 'PIPE_OPERATION': |
||||
return monaco.languages.CompletionItemKind.Interface; |
||||
default: |
||||
throw new NeverCaseError(type as never); |
||||
} |
||||
} |
||||
|
||||
export function getCompletionProvider( |
||||
monaco: Monaco, |
||||
dataProvider: CompletionDataProvider |
||||
): monacoTypes.languages.CompletionItemProvider { |
||||
const provideCompletionItems = ( |
||||
model: monacoTypes.editor.ITextModel, |
||||
position: monacoTypes.Position |
||||
): monacoTypes.languages.ProviderResult<monacoTypes.languages.CompletionList> => { |
||||
const word = model.getWordAtPosition(position); |
||||
const wordUntil = model.getWordUntilPosition(position); |
||||
|
||||
// documentation says `position` will be "adjusted" in `getOffsetAt`
|
||||
// i don't know what that means, to be sure i clone it
|
||||
const positionClone = { |
||||
column: position.column, |
||||
lineNumber: position.lineNumber, |
||||
}; |
||||
const offset = model.getOffsetAt(positionClone); |
||||
const situation = getSituation(model.getValue(), offset); |
||||
const range = calculateRange(situation, word, wordUntil, monaco, position); |
||||
const completionsPromise = situation != null ? getCompletions(situation, dataProvider) : Promise.resolve([]); |
||||
return completionsPromise.then((items) => { |
||||
// monaco by default alphabetically orders the items.
|
||||
// to stop it, we use a number-as-string sortkey,
|
||||
// so that monaco keeps the order we use
|
||||
const maxIndexDigits = items.length.toString().length; |
||||
const suggestions: monacoTypes.languages.CompletionItem[] = items.map((item, index) => ({ |
||||
kind: getMonacoCompletionItemKind(item.type, monaco), |
||||
label: item.label, |
||||
insertText: item.insertText, |
||||
insertTextRules: item.isSnippet ? INSERT_AS_SNIPPET_ENUM_VALUE : undefined, |
||||
detail: item.detail, |
||||
documentation: item.documentation, |
||||
sortText: index.toString().padStart(maxIndexDigits, '0'), // to force the order we have
|
||||
range: range, |
||||
command: item.triggerOnInsert |
||||
? { |
||||
id: 'editor.action.triggerSuggest', |
||||
title: '', |
||||
} |
||||
: undefined, |
||||
})); |
||||
return { suggestions }; |
||||
}); |
||||
}; |
||||
|
||||
return { |
||||
triggerCharacters: ['{', ',', '[', '(', '=', '~', ' ', '"', '|'], |
||||
provideCompletionItems, |
||||
}; |
||||
} |
||||
|
||||
export const calculateRange = ( |
||||
situation: Situation | null, |
||||
word: monacoTypes.editor.IWordAtPosition | null, |
||||
wordUntil: monacoTypes.editor.IWordAtPosition, |
||||
monaco: Monaco, |
||||
position: monacoTypes.Position |
||||
): monacoTypes.Range => { |
||||
if ( |
||||
situation && |
||||
situation?.type === 'IN_LABEL_SELECTOR_WITH_LABEL_NAME' && |
||||
'betweenQuotes' in situation && |
||||
situation.betweenQuotes |
||||
) { |
||||
// Word until won't have second quote if they are between quotes
|
||||
const indexOfFirstQuote = wordUntil?.word?.indexOf('"') ?? 0; |
||||
|
||||
const indexOfLastQuote = word?.word?.lastIndexOf('"') ?? 0; |
||||
|
||||
const indexOfEquals = word?.word.indexOf('='); |
||||
const indexOfLastEquals = word?.word.lastIndexOf('='); |
||||
|
||||
// Just one equals "=" the cursor is somewhere within a label value
|
||||
// e.g. value="labe^l-value" or value="^label-value" etc
|
||||
// We want the word to include everything within the quotes, so the result from autocomplete overwrites the existing label value
|
||||
if ( |
||||
indexOfLastEquals === indexOfEquals && |
||||
indexOfFirstQuote !== -1 && |
||||
indexOfLastQuote !== -1 && |
||||
indexOfLastEquals !== -1 |
||||
) { |
||||
return word != null |
||||
? monaco.Range.lift({ |
||||
startLineNumber: position.lineNumber, |
||||
endLineNumber: position.lineNumber, |
||||
startColumn: wordUntil.startColumn + indexOfFirstQuote + 1, |
||||
endColumn: wordUntil.startColumn + indexOfLastQuote, |
||||
}) |
||||
: monaco.Range.fromPositions(position); |
||||
} |
||||
} |
||||
|
||||
if (situation && situation.type === 'IN_LABEL_SELECTOR_WITH_LABEL_NAME') { |
||||
// Otherwise we want the range to be calculated as the cursor position, as we want to insert the autocomplete, instead of overwriting existing text
|
||||
// The cursor position is the length of the wordUntil
|
||||
return word != null |
||||
? monaco.Range.lift({ |
||||
startLineNumber: position.lineNumber, |
||||
endLineNumber: position.lineNumber, |
||||
startColumn: wordUntil.endColumn, |
||||
endColumn: wordUntil.endColumn, |
||||
}) |
||||
: monaco.Range.fromPositions(position); |
||||
} |
||||
|
||||
// And for all other non-label cases, we want to use the word start and end column
|
||||
return word != null |
||||
? monaco.Range.lift({ |
||||
startLineNumber: position.lineNumber, |
||||
endLineNumber: position.lineNumber, |
||||
startColumn: word.startColumn, |
||||
endColumn: word.endColumn, |
||||
}) |
||||
: monaco.Range.fromPositions(position); |
||||
}; |
@ -0,0 +1,67 @@ |
||||
export type LabelOperator = '=' | '!=' | '=~' | '!~'; |
||||
|
||||
export type Label = { |
||||
name: string; |
||||
value: string; |
||||
op: LabelOperator; |
||||
}; |
||||
|
||||
export type Situation = |
||||
| { |
||||
type: 'EMPTY'; |
||||
} |
||||
| { |
||||
type: 'AT_ROOT'; |
||||
} |
||||
| { |
||||
type: 'IN_LOGFMT'; |
||||
otherLabels: string[]; |
||||
flags: boolean; |
||||
trailingSpace: boolean; |
||||
trailingComma: boolean; |
||||
logQuery: string; |
||||
} |
||||
| { |
||||
type: 'IN_RANGE'; |
||||
} |
||||
| { |
||||
type: 'IN_AGGREGATION'; |
||||
} |
||||
| { |
||||
type: 'IN_GROUPING'; |
||||
logQuery: string; |
||||
} |
||||
| { |
||||
type: 'IN_LABEL_SELECTOR_NO_LABEL_NAME'; |
||||
otherLabels: Label[]; |
||||
} |
||||
| { |
||||
type: 'IN_LABEL_SELECTOR_WITH_LABEL_NAME'; |
||||
labelName: string; |
||||
betweenQuotes: boolean; |
||||
otherLabels: Label[]; |
||||
} |
||||
| { |
||||
type: 'AFTER_SELECTOR'; |
||||
afterPipe: boolean; |
||||
hasSpace: boolean; |
||||
logQuery: string; |
||||
} |
||||
| { |
||||
type: 'AFTER_UNWRAP'; |
||||
logQuery: string; |
||||
} |
||||
| { |
||||
type: 'AFTER_KEEP_AND_DROP'; |
||||
logQuery: string; |
||||
}; |
||||
|
||||
/** |
||||
* THIS METHOD IS KNOWN TO BE INCOMPLETE due to the decoupling of the Tempo datasource from Grafana core: |
||||
* Incomplete support for LogQL autocomplete from 'public/app/plugins/datasource/loki/components/monaco-query-field/monaco-completion-provider/situation.ts'; |
||||
*/ |
||||
export const getSituation = (text: string, pos: number): Situation | null => { |
||||
return { |
||||
type: 'EMPTY', |
||||
}; |
||||
}; |
@ -0,0 +1,126 @@ |
||||
import { SyntaxNode } from '@lezer/common'; |
||||
import { LRParser } from '@lezer/lr'; |
||||
|
||||
// import { ErrorId } from 'app/plugins/datasource/prometheus/querybuilder/shared/parsingUtils';
|
||||
const ErrorId = 0; |
||||
|
||||
interface ParserErrorBoundary { |
||||
startLineNumber: number; |
||||
startColumn: number; |
||||
endLineNumber: number; |
||||
endColumn: number; |
||||
error: string; |
||||
} |
||||
|
||||
interface ParseError { |
||||
text: string; |
||||
node: SyntaxNode; |
||||
} |
||||
|
||||
/** |
||||
* Conceived to work in combination with the MonacoQueryField component. |
||||
* Given an original query, and it's interpolated version, it will return an array of ParserErrorBoundary |
||||
* objects containing nodes which are actual errors. The interpolated version (even with placeholder variables) |
||||
* is required because variables look like errors for Lezer. |
||||
* @internal |
||||
*/ |
||||
export function validateQuery( |
||||
query: string, |
||||
interpolatedQuery: string, |
||||
queryLines: string[], |
||||
parser: LRParser |
||||
): ParserErrorBoundary[] | false { |
||||
if (!query) { |
||||
return false; |
||||
} |
||||
|
||||
/** |
||||
* To provide support to variable interpolation in query validation, we run the parser in the interpolated |
||||
* query. If there are errors there, we trace them back to the original unparsed query, so we can more |
||||
* accurately highlight the error in the query, since it's likely that the variable name and variable value |
||||
* have different lengths. With this, we also exclude irrelevant parser errors that are produced by |
||||
* lezer not understanding $variables and $__variables, which usually generate 2 or 3 error SyntaxNode. |
||||
*/ |
||||
const interpolatedErrors: ParseError[] = parseQuery(interpolatedQuery, parser); |
||||
if (!interpolatedErrors.length) { |
||||
return false; |
||||
} |
||||
|
||||
let parseErrors: ParseError[] = interpolatedErrors; |
||||
if (query !== interpolatedQuery) { |
||||
const queryErrors: ParseError[] = parseQuery(query, parser); |
||||
parseErrors = interpolatedErrors.flatMap( |
||||
(interpolatedError) => |
||||
queryErrors.filter((queryError) => interpolatedError.text === queryError.text) || interpolatedError |
||||
); |
||||
} |
||||
|
||||
return parseErrors.map((parseError) => findErrorBoundary(query, queryLines, parseError)).filter(isErrorBoundary); |
||||
} |
||||
|
||||
function parseQuery(query: string, parser: LRParser) { |
||||
const parseErrors: ParseError[] = []; |
||||
const tree = parser.parse(query); |
||||
tree.iterate({ |
||||
enter: (nodeRef): false | void => { |
||||
if (nodeRef.type.id === ErrorId) { |
||||
const node = nodeRef.node; |
||||
parseErrors.push({ |
||||
node: node, |
||||
text: query.substring(node.from, node.to), |
||||
}); |
||||
} |
||||
}, |
||||
}); |
||||
return parseErrors; |
||||
} |
||||
|
||||
function findErrorBoundary(query: string, queryLines: string[], parseError: ParseError): ParserErrorBoundary | null { |
||||
if (queryLines.length === 1) { |
||||
const isEmptyString = parseError.node.from === parseError.node.to; |
||||
const errorNode = isEmptyString && parseError.node.parent ? parseError.node.parent : parseError.node; |
||||
const error = isEmptyString ? query.substring(errorNode.from, errorNode.to) : parseError.text; |
||||
return { |
||||
startLineNumber: 1, |
||||
startColumn: errorNode.from + 1, |
||||
endLineNumber: 1, |
||||
endColumn: errorNode.to + 1, |
||||
error, |
||||
}; |
||||
} |
||||
|
||||
let startPos = 0, |
||||
endPos = 0; |
||||
for (let line = 0; line < queryLines.length; line++) { |
||||
endPos = startPos + queryLines[line].length; |
||||
|
||||
if (parseError.node.from > endPos) { |
||||
startPos += queryLines[line].length + 1; |
||||
continue; |
||||
} |
||||
|
||||
return { |
||||
startLineNumber: line + 1, |
||||
startColumn: parseError.node.from - startPos + 1, |
||||
endLineNumber: line + 1, |
||||
endColumn: parseError.node.to - startPos + 1, |
||||
error: parseError.text, |
||||
}; |
||||
} |
||||
|
||||
return null; |
||||
} |
||||
|
||||
function isErrorBoundary(boundary: ParserErrorBoundary | null): boundary is ParserErrorBoundary { |
||||
return boundary !== null; |
||||
} |
||||
|
||||
export const placeHolderScopedVars = { |
||||
__interval: { text: '1s', value: '1s' }, |
||||
__rate_interval: { text: '1s', value: '1s' }, |
||||
__auto: { text: '1s', value: '1s' }, |
||||
__interval_ms: { text: '1000', value: 1000 }, |
||||
__range_ms: { text: '1000', value: 1000 }, |
||||
__range_s: { text: '1', value: 1 }, |
||||
__range: { text: '1s', value: '1s' }, |
||||
}; |
@ -0,0 +1,260 @@ |
||||
import { |
||||
DataFrame, |
||||
DataFrameType, |
||||
DataQueryResponse, |
||||
DataQueryResponseData, |
||||
Field, |
||||
FieldType, |
||||
isValidGoDuration, |
||||
Labels, |
||||
QueryResultMetaStat, |
||||
shallowCompare, |
||||
} from '@grafana/data'; |
||||
|
||||
import { isBytesString } from './languageUtils'; |
||||
import { isLogLineJSON, isLogLineLogfmt, isLogLinePacked } from './lineParser'; |
||||
|
||||
export function dataFrameHasLokiError(frame: DataFrame): boolean { |
||||
const labelSets: Labels[] = frame.fields.find((f) => f.name === 'labels')?.values ?? []; |
||||
return labelSets.some((labels) => labels.__error__ !== undefined); |
||||
} |
||||
|
||||
export function dataFrameHasLevelLabel(frame: DataFrame): boolean { |
||||
const labelSets: Labels[] = frame.fields.find((f) => f.name === 'labels')?.values ?? []; |
||||
return labelSets.some((labels) => labels.level !== undefined); |
||||
} |
||||
|
||||
export function extractLogParserFromDataFrame(frame: DataFrame): { |
||||
hasLogfmt: boolean; |
||||
hasJSON: boolean; |
||||
hasPack: boolean; |
||||
} { |
||||
const lineField = frame.fields.find((field) => field.type === FieldType.string); |
||||
if (lineField == null) { |
||||
return { hasJSON: false, hasLogfmt: false, hasPack: false }; |
||||
} |
||||
|
||||
const logLines: string[] = lineField.values; |
||||
|
||||
let hasJSON = false; |
||||
let hasLogfmt = false; |
||||
let hasPack = false; |
||||
|
||||
logLines.forEach((line) => { |
||||
if (isLogLineJSON(line)) { |
||||
hasJSON = true; |
||||
|
||||
hasPack = isLogLinePacked(line); |
||||
} |
||||
if (isLogLineLogfmt(line)) { |
||||
hasLogfmt = true; |
||||
} |
||||
}); |
||||
|
||||
return { hasLogfmt, hasJSON, hasPack }; |
||||
} |
||||
|
||||
export function extractLabelKeysFromDataFrame(frame: DataFrame): string[] { |
||||
const labelsArray: Array<{ [key: string]: string }> | undefined = |
||||
frame?.fields?.find((field) => field.name === 'labels')?.values ?? []; |
||||
|
||||
if (!labelsArray?.length) { |
||||
return []; |
||||
} |
||||
|
||||
return Object.keys(labelsArray[0]); |
||||
} |
||||
|
||||
export function extractUnwrapLabelKeysFromDataFrame(frame: DataFrame): string[] { |
||||
const labelsArray: Array<{ [key: string]: string }> | undefined = |
||||
frame?.fields?.find((field) => field.name === 'labels')?.values ?? []; |
||||
|
||||
if (!labelsArray?.length) { |
||||
return []; |
||||
} |
||||
|
||||
// We do this only for first label object, because we want to consider only labels that are present in all log lines
|
||||
// possibleUnwrapLabels are labels with 1. number value OR 2. value that is valid go duration OR 3. bytes string value
|
||||
const possibleUnwrapLabels = Object.keys(labelsArray[0]).filter((key) => { |
||||
const value = labelsArray[0][key]; |
||||
if (!value) { |
||||
return false; |
||||
} |
||||
return !isNaN(Number(value)) || isValidGoDuration(value) || isBytesString(value); |
||||
}); |
||||
|
||||
// Add only labels that are present in every line to unwrapLabels
|
||||
return possibleUnwrapLabels.filter((label) => labelsArray.every((obj) => obj[label])); |
||||
} |
||||
|
||||
export function extractHasErrorLabelFromDataFrame(frame: DataFrame): boolean { |
||||
const labelField = frame.fields.find((field) => field.name === 'labels' && field.type === FieldType.other); |
||||
if (labelField == null) { |
||||
return false; |
||||
} |
||||
|
||||
const labels: Array<{ [key: string]: string }> = labelField.values; |
||||
return labels.some((label) => label['__error__']); |
||||
} |
||||
|
||||
export function extractLevelLikeLabelFromDataFrame(frame: DataFrame): string | null { |
||||
const labelField = frame.fields.find((field) => field.name === 'labels' && field.type === FieldType.other); |
||||
if (labelField == null) { |
||||
return null; |
||||
} |
||||
|
||||
// Depending on number of labels, this can be pretty heavy operation.
|
||||
// Let's just look at first 2 lines If needed, we can introduce more later.
|
||||
const labelsArray: Array<{ [key: string]: string }> = labelField.values.slice(0, 2); |
||||
let levelLikeLabel: string | null = null; |
||||
|
||||
// Find first level-like label
|
||||
for (let labels of labelsArray) { |
||||
const label = Object.keys(labels).find((label) => label === 'lvl' || label.includes('level')); |
||||
if (label) { |
||||
levelLikeLabel = label; |
||||
break; |
||||
} |
||||
} |
||||
return levelLikeLabel; |
||||
} |
||||
|
||||
function shouldCombine(frame1: DataFrame, frame2: DataFrame): boolean { |
||||
if (frame1.refId !== frame2.refId) { |
||||
return false; |
||||
} |
||||
|
||||
const frameType1 = frame1.meta?.type; |
||||
const frameType2 = frame2.meta?.type; |
||||
|
||||
if (frameType1 !== frameType2) { |
||||
// we do not join things that have a different type
|
||||
return false; |
||||
} |
||||
|
||||
// metric range query data
|
||||
if (frameType1 === DataFrameType.TimeSeriesMulti) { |
||||
const field1 = frame1.fields.find((f) => f.type === FieldType.number); |
||||
const field2 = frame2.fields.find((f) => f.type === FieldType.number); |
||||
if (field1 === undefined || field2 === undefined) { |
||||
// should never happen
|
||||
return false; |
||||
} |
||||
|
||||
return shallowCompare(field1.labels ?? {}, field2.labels ?? {}); |
||||
} |
||||
|
||||
// logs query data
|
||||
// logs use a special attribute in the dataframe's "custom" section
|
||||
// because we do not have a good "frametype" value for them yet.
|
||||
const customType1 = frame1.meta?.custom?.frameType; |
||||
const customType2 = frame2.meta?.custom?.frameType; |
||||
|
||||
if (customType1 === 'LabeledTimeValues' && customType2 === 'LabeledTimeValues') { |
||||
return true; |
||||
} |
||||
|
||||
// should never reach here
|
||||
return false; |
||||
} |
||||
|
||||
export function combineResponses(currentResult: DataQueryResponse | null, newResult: DataQueryResponse) { |
||||
if (!currentResult) { |
||||
return cloneQueryResponse(newResult); |
||||
} |
||||
|
||||
newResult.data.forEach((newFrame) => { |
||||
const currentFrame = currentResult.data.find((frame) => shouldCombine(frame, newFrame)); |
||||
if (!currentFrame) { |
||||
currentResult.data.push(cloneDataFrame(newFrame)); |
||||
return; |
||||
} |
||||
combineFrames(currentFrame, newFrame); |
||||
}); |
||||
|
||||
const mergedErrors = [...(currentResult.errors ?? []), ...(newResult.errors ?? [])]; |
||||
|
||||
// we make sure to have `.errors` as undefined, instead of empty-array
|
||||
// when no errors.
|
||||
|
||||
if (mergedErrors.length > 0) { |
||||
currentResult.errors = mergedErrors; |
||||
} |
||||
|
||||
// the `.error` attribute is obsolete now,
|
||||
// but we have to maintain it, otherwise
|
||||
// some grafana parts do not behave well.
|
||||
// we just choose the old error, if it exists,
|
||||
// otherwise the new error, if it exists.
|
||||
const mergedError = currentResult.error ?? newResult.error; |
||||
if (mergedError != null) { |
||||
currentResult.error = mergedError; |
||||
} |
||||
|
||||
const mergedTraceIds = [...(currentResult.traceIds ?? []), ...(newResult.traceIds ?? [])]; |
||||
if (mergedTraceIds.length > 0) { |
||||
currentResult.traceIds = mergedTraceIds; |
||||
} |
||||
|
||||
return currentResult; |
||||
} |
||||
|
||||
function combineFrames(dest: DataFrame, source: DataFrame) { |
||||
const totalFields = dest.fields.length; |
||||
for (let i = 0; i < totalFields; i++) { |
||||
dest.fields[i].values = [].concat.apply(source.fields[i].values, dest.fields[i].values); |
||||
if (source.fields[i].nanos) { |
||||
const nanos: number[] = dest.fields[i].nanos?.slice() || []; |
||||
dest.fields[i].nanos = source.fields[i].nanos?.concat(nanos); |
||||
} |
||||
} |
||||
dest.length += source.length; |
||||
dest.meta = { |
||||
...dest.meta, |
||||
stats: getCombinedMetadataStats(dest.meta?.stats ?? [], source.meta?.stats ?? []), |
||||
}; |
||||
} |
||||
|
||||
const TOTAL_BYTES_STAT = 'Summary: total bytes processed'; |
||||
|
||||
function getCombinedMetadataStats( |
||||
destStats: QueryResultMetaStat[], |
||||
sourceStats: QueryResultMetaStat[] |
||||
): QueryResultMetaStat[] { |
||||
// in the current approach, we only handle a single stat
|
||||
const destStat = destStats.find((s) => s.displayName === TOTAL_BYTES_STAT); |
||||
const sourceStat = sourceStats.find((s) => s.displayName === TOTAL_BYTES_STAT); |
||||
|
||||
if (sourceStat != null && destStat != null) { |
||||
return [{ value: sourceStat.value + destStat.value, displayName: TOTAL_BYTES_STAT, unit: destStat.unit }]; |
||||
} |
||||
|
||||
// maybe one of them exist
|
||||
const eitherStat = sourceStat ?? destStat; |
||||
if (eitherStat != null) { |
||||
return [eitherStat]; |
||||
} |
||||
|
||||
return []; |
||||
} |
||||
|
||||
/** |
||||
* Deep clones a DataQueryResponse |
||||
*/ |
||||
export function cloneQueryResponse(response: DataQueryResponse): DataQueryResponse { |
||||
const newResponse = { |
||||
...response, |
||||
data: response.data.map(cloneDataFrame), |
||||
}; |
||||
return newResponse; |
||||
} |
||||
|
||||
function cloneDataFrame(frame: DataQueryResponseData): DataQueryResponseData { |
||||
return { |
||||
...frame, |
||||
fields: frame.fields.map((field: Field) => ({ |
||||
...field, |
||||
values: field.values, |
||||
})), |
||||
}; |
||||
} |
@ -0,0 +1,281 @@ |
||||
import { Grammar } from 'prismjs'; |
||||
|
||||
import { CompletionItem } from '@grafana/ui'; |
||||
|
||||
export const AGGREGATION_OPERATORS: CompletionItem[] = [ |
||||
{ |
||||
label: 'avg', |
||||
insertText: 'avg', |
||||
documentation: 'Calculate the average over dimensions', |
||||
}, |
||||
{ |
||||
label: 'bottomk', |
||||
insertText: 'bottomk', |
||||
documentation: 'Smallest k elements by sample value', |
||||
}, |
||||
{ |
||||
label: 'count', |
||||
insertText: 'count', |
||||
documentation: 'Count number of elements in the vector', |
||||
}, |
||||
{ |
||||
label: 'max', |
||||
insertText: 'max', |
||||
documentation: 'Select maximum over dimensions', |
||||
}, |
||||
{ |
||||
label: 'min', |
||||
insertText: 'min', |
||||
documentation: 'Select minimum over dimensions', |
||||
}, |
||||
{ |
||||
label: 'stddev', |
||||
insertText: 'stddev', |
||||
documentation: 'Calculate population standard deviation over dimensions', |
||||
}, |
||||
{ |
||||
label: 'stdvar', |
||||
insertText: 'stdvar', |
||||
documentation: 'Calculate population standard variance over dimensions', |
||||
}, |
||||
{ |
||||
label: 'sum', |
||||
insertText: 'sum', |
||||
documentation: 'Calculate sum over dimensions', |
||||
}, |
||||
{ |
||||
label: 'topk', |
||||
insertText: 'topk', |
||||
documentation: 'Largest k elements by sample value', |
||||
}, |
||||
]; |
||||
|
||||
export const PIPE_PARSERS: CompletionItem[] = [ |
||||
{ |
||||
label: 'json', |
||||
insertText: 'json', |
||||
documentation: 'Extracting labels from the log line using json parser.', |
||||
}, |
||||
{ |
||||
label: 'regexp', |
||||
insertText: 'regexp ""', |
||||
documentation: 'Extracting labels from the log line using regexp parser.', |
||||
move: -1, |
||||
}, |
||||
{ |
||||
label: 'logfmt', |
||||
insertText: 'logfmt', |
||||
documentation: 'Extracting labels from the log line using logfmt parser.', |
||||
}, |
||||
{ |
||||
label: 'pattern', |
||||
insertText: 'pattern', |
||||
documentation: 'Extracting labels from the log line using pattern parser. Only available in Loki 2.3+.', |
||||
}, |
||||
{ |
||||
label: 'unpack', |
||||
insertText: 'unpack', |
||||
detail: 'unpack identifier', |
||||
documentation: |
||||
'Parses a JSON log line, unpacking all embedded labels in the pack stage. A special property "_entry" will also be used to replace the original log line. Only available in Loki 2.2+.', |
||||
}, |
||||
]; |
||||
|
||||
export const PIPE_OPERATORS: CompletionItem[] = [ |
||||
{ |
||||
label: 'unwrap', |
||||
insertText: 'unwrap', |
||||
detail: 'unwrap identifier', |
||||
documentation: 'Take labels and use the values as sample data for metric aggregations.', |
||||
}, |
||||
{ |
||||
label: 'label_format', |
||||
insertText: 'label_format', |
||||
documentation: 'Use to rename, modify or add labels. For example, | label_format foo=bar .', |
||||
}, |
||||
{ |
||||
label: 'line_format', |
||||
insertText: 'line_format', |
||||
documentation: 'Rewrites log line content. For example, | line_format "{{.query}} {{.duration}}" .', |
||||
}, |
||||
]; |
||||
|
||||
export const RANGE_VEC_FUNCTIONS = [ |
||||
{ |
||||
insertText: 'avg_over_time', |
||||
label: 'avg_over_time', |
||||
detail: 'avg_over_time(range-vector)', |
||||
documentation: 'The average of all values in the specified interval.', |
||||
}, |
||||
{ |
||||
insertText: 'bytes_over_time', |
||||
label: 'bytes_over_time', |
||||
detail: 'bytes_over_time(range-vector)', |
||||
documentation: 'Counts the amount of bytes used by each log stream for a given range', |
||||
}, |
||||
{ |
||||
insertText: 'bytes_rate', |
||||
label: 'bytes_rate', |
||||
detail: 'bytes_rate(range-vector)', |
||||
documentation: 'Calculates the number of bytes per second for each stream.', |
||||
}, |
||||
{ |
||||
insertText: 'first_over_time', |
||||
label: 'first_over_time', |
||||
detail: 'first_over_time(range-vector)', |
||||
documentation: 'The first of all values in the specified interval. Only available in Loki 2.3+.', |
||||
}, |
||||
{ |
||||
insertText: 'last_over_time', |
||||
label: 'last_over_time', |
||||
detail: 'last_over_time(range-vector)', |
||||
documentation: 'The last of all values in the specified interval. Only available in Loki 2.3+.', |
||||
}, |
||||
{ |
||||
insertText: 'sum_over_time', |
||||
label: 'sum_over_time', |
||||
detail: 'sum_over_time(range-vector)', |
||||
documentation: 'The sum of all values in the specified interval.', |
||||
}, |
||||
{ |
||||
insertText: 'count_over_time', |
||||
label: 'count_over_time', |
||||
detail: 'count_over_time(range-vector)', |
||||
documentation: 'The count of all values in the specified interval.', |
||||
}, |
||||
{ |
||||
insertText: 'max_over_time', |
||||
label: 'max_over_time', |
||||
detail: 'max_over_time(range-vector)', |
||||
documentation: 'The maximum of all values in the specified interval.', |
||||
}, |
||||
{ |
||||
insertText: 'min_over_time', |
||||
label: 'min_over_time', |
||||
detail: 'min_over_time(range-vector)', |
||||
documentation: 'The minimum of all values in the specified interval.', |
||||
}, |
||||
{ |
||||
insertText: 'quantile_over_time', |
||||
label: 'quantile_over_time', |
||||
detail: 'quantile_over_time(scalar, range-vector)', |
||||
documentation: 'The φ-quantile (0 ≤ φ ≤ 1) of the values in the specified interval.', |
||||
}, |
||||
{ |
||||
insertText: 'rate', |
||||
label: 'rate', |
||||
detail: 'rate(v range-vector)', |
||||
documentation: 'Calculates the number of entries per second.', |
||||
}, |
||||
{ |
||||
insertText: 'stddev_over_time', |
||||
label: 'stddev_over_time', |
||||
detail: 'stddev_over_time(range-vector)', |
||||
documentation: 'The population standard deviation of the values in the specified interval.', |
||||
}, |
||||
{ |
||||
insertText: 'stdvar_over_time', |
||||
label: 'stdvar_over_time', |
||||
detail: 'stdvar_over_time(range-vector)', |
||||
documentation: 'The population standard variance of the values in the specified interval.', |
||||
}, |
||||
]; |
||||
|
||||
export const BUILT_IN_FUNCTIONS = [ |
||||
{ |
||||
insertText: 'vector', |
||||
label: 'vector', |
||||
detail: 'vector(scalar)', |
||||
documentation: 'Returns the scalar as a vector with no labels.', |
||||
}, |
||||
]; |
||||
|
||||
export const FUNCTIONS = [...AGGREGATION_OPERATORS, ...RANGE_VEC_FUNCTIONS, ...BUILT_IN_FUNCTIONS]; |
||||
|
||||
// Loki grammar is used for query highlight in query previews outside of code editor
|
||||
export const lokiGrammar: Grammar = { |
||||
comment: { |
||||
pattern: /#.*/, |
||||
}, |
||||
'context-aggregation': { |
||||
pattern: /((without|by)\s*)\([^)]*\)/, // by ()
|
||||
lookbehind: true, |
||||
inside: { |
||||
'label-key': { |
||||
pattern: /[^(),\s][^,)]*[^),\s]*/, |
||||
alias: 'attr-name', |
||||
}, |
||||
punctuation: /[()]/, |
||||
}, |
||||
}, |
||||
'context-labels': { |
||||
pattern: /\{[^}]*(?=}?)/, |
||||
greedy: true, |
||||
inside: { |
||||
comment: { |
||||
pattern: /#.*/, |
||||
}, |
||||
'label-key': { |
||||
pattern: /[a-zA-Z_]\w*(?=\s*(=|!=|=~|!~))/, |
||||
alias: 'attr-name', |
||||
greedy: true, |
||||
}, |
||||
'label-value': { |
||||
pattern: /"(?:\\.|[^\\"])*"/, |
||||
greedy: true, |
||||
alias: 'attr-value', |
||||
}, |
||||
punctuation: /[{]/, |
||||
}, |
||||
}, |
||||
'context-pipe': { |
||||
pattern: /\s\|[^=~]\s?\w*/i, |
||||
inside: { |
||||
'pipe-operator': { |
||||
pattern: /\|/i, |
||||
alias: 'operator', |
||||
}, |
||||
'pipe-operations': { |
||||
pattern: new RegExp(`${[...PIPE_PARSERS, ...PIPE_OPERATORS].map((f) => f.label).join('|')}`, 'i'), |
||||
alias: 'keyword', |
||||
}, |
||||
}, |
||||
}, |
||||
function: new RegExp(`\\b(?:${FUNCTIONS.map((f) => f.label).join('|')})(?=\\s*\\()`, 'i'), |
||||
'context-range': [ |
||||
{ |
||||
pattern: /\[[^\]]*(?=\])/, // [1m]
|
||||
inside: { |
||||
'range-duration': { |
||||
pattern: /\b\d+[smhdwy]\b/i, |
||||
alias: 'number', |
||||
}, |
||||
}, |
||||
}, |
||||
{ |
||||
pattern: /(offset\s+)\w+/, // offset 1m
|
||||
lookbehind: true, |
||||
inside: { |
||||
'range-duration': { |
||||
pattern: /\b\d+[smhdwy]\b/i, |
||||
alias: 'number', |
||||
}, |
||||
}, |
||||
}, |
||||
], |
||||
quote: { |
||||
pattern: /"(?:\\.|[^\\"])*"/, |
||||
alias: 'string', |
||||
greedy: true, |
||||
}, |
||||
backticks: { |
||||
pattern: /`(?:\\.|[^\\`])*`/, |
||||
alias: 'string', |
||||
greedy: true, |
||||
}, |
||||
number: /\b-?\d+((\.\d*)?([eE][+-]?\d+)?)?\b/, |
||||
operator: /\s?(\|[=~]?|!=?|<(?:=>?|<|>)?|>[>=]?)\s?/i, |
||||
punctuation: /[{}(),.]/, |
||||
}; |
||||
|
||||
export default lokiGrammar; |
@ -0,0 +1,97 @@ |
||||
import { Observable } from 'rxjs'; |
||||
|
||||
import { |
||||
DataFrame, |
||||
DataQueryRequest, |
||||
DataQueryResponse, |
||||
DataSourceJsonData, |
||||
DataSourcePluginMeta, |
||||
DataSourceRef, |
||||
ScopedVars, |
||||
TestDataSourceResponse, |
||||
} from '@grafana/data'; |
||||
import { BackendSrvRequest } from '@grafana/runtime'; |
||||
|
||||
import LokiLanguageProvider from './LanguageProvider'; |
||||
import { Loki as LokiQueryFromSchema, LokiQueryType, SupportingQueryType, LokiQueryDirection } from './dataquery.gen'; |
||||
|
||||
export { LokiQueryType }; |
||||
|
||||
export enum LokiResultType { |
||||
Stream = 'streams', |
||||
Vector = 'vector', |
||||
Matrix = 'matrix', |
||||
} |
||||
|
||||
export interface LokiQuery extends LokiQueryFromSchema { |
||||
direction?: LokiQueryDirection; |
||||
/** Used only to identify supporting queries, e.g. logs volume, logs sample and data sample */ |
||||
supportingQueryType?: SupportingQueryType; |
||||
// CUE autogenerates `queryType` as `?string`, as that's how it is defined
|
||||
// in the parent-interface (in DataQuery).
|
||||
// the temporary fix (until this gets improved in the codegen), is to
|
||||
// override it here
|
||||
queryType?: LokiQueryType; |
||||
|
||||
/** |
||||
* This is a property for the experimental query splitting feature. |
||||
* @experimental |
||||
*/ |
||||
splitDuration?: string; |
||||
} |
||||
|
||||
export interface LokiOptions extends DataSourceJsonData { |
||||
maxLines?: string; |
||||
derivedFields?: DerivedFieldConfig[]; |
||||
alertmanager?: string; |
||||
keepCookies?: string[]; |
||||
predefinedOperations?: string; |
||||
} |
||||
|
||||
export type DerivedFieldConfig = { |
||||
matcherRegex: string; |
||||
name: string; |
||||
url?: string; |
||||
urlDisplayLabel?: string; |
||||
datasourceUid?: string; |
||||
matcherType?: 'label' | 'regex'; |
||||
}; |
||||
|
||||
export interface QueryStats { |
||||
streams: number; |
||||
chunks: number; |
||||
bytes: number; |
||||
entries: number; |
||||
// The error message displayed in the UI when we cant estimate the size of the query.
|
||||
message?: string; |
||||
} |
||||
|
||||
export type LokiDatasource = { |
||||
name: string; |
||||
id: number; |
||||
type: string; |
||||
uid: string; |
||||
query: (request: DataQueryRequest<any>) => Observable<DataQueryResponse> | Promise<DataQueryResponse>; |
||||
testDatasource: () => Promise<TestDataSourceResponse>; |
||||
meta: DataSourcePluginMeta<{}>; |
||||
getRef: () => DataSourceRef; |
||||
metadataRequest: ( |
||||
url: string, |
||||
params?: Record<string, string | number>, |
||||
options?: Partial<BackendSrvRequest> |
||||
) => Promise<any>; |
||||
getTimeRangeParams: () => any; |
||||
interpolateString: (string: string, scopedVars?: ScopedVars) => string; |
||||
getDataSamples: (query: LokiQuery) => Promise<DataFrame[]>; |
||||
languageProvider: any; |
||||
}; |
||||
|
||||
export interface ParserAndLabelKeysResult { |
||||
extractedLabelKeys: string[]; |
||||
hasJSON: boolean; |
||||
hasLogfmt: boolean; |
||||
hasPack: boolean; |
||||
unwrapLabelKeys: string[]; |
||||
} |
||||
|
||||
export type LanguageProvider = LokiLanguageProvider; |
@ -0,0 +1,115 @@ |
||||
import { css } from '@emotion/css'; |
||||
import React from 'react'; |
||||
import { useToggle } from 'react-use'; |
||||
|
||||
import { getValueFormat, GrafanaTheme2 } from '@grafana/data'; |
||||
import { config } from '@grafana/runtime'; |
||||
import { Collapse, Icon, Tooltip, useStyles2, Stack } from '@grafana/ui'; |
||||
|
||||
import { QueryStats } from '../loki/types'; |
||||
|
||||
export interface Props { |
||||
title: string; |
||||
collapsedInfo: string[]; |
||||
queryStats?: QueryStats | null; |
||||
children: React.ReactNode; |
||||
} |
||||
|
||||
export function QueryOptionGroup({ title, children, collapsedInfo, queryStats }: Props) { |
||||
const [isOpen, toggleOpen] = useToggle(false); |
||||
const styles = useStyles2(getStyles); |
||||
|
||||
return ( |
||||
<div className={styles.wrapper}> |
||||
<Collapse |
||||
className={styles.collapse} |
||||
collapsible |
||||
isOpen={isOpen} |
||||
onToggle={toggleOpen} |
||||
label={ |
||||
<Stack gap={0}> |
||||
<h6 className={styles.title}>{title}</h6> |
||||
{!isOpen && ( |
||||
<div className={styles.description}> |
||||
{collapsedInfo.map((x, i) => ( |
||||
<span key={i}>{x}</span> |
||||
))} |
||||
</div> |
||||
)} |
||||
</Stack> |
||||
} |
||||
> |
||||
<div className={styles.body}>{children}</div> |
||||
</Collapse> |
||||
|
||||
{queryStats && config.featureToggles.lokiQuerySplitting && ( |
||||
<Tooltip content="Note: the query will be split into multiple parts and executed in sequence. Query limits will only apply each individual part."> |
||||
<Icon tabIndex={0} name="info-circle" className={styles.tooltip} size="sm" /> |
||||
</Tooltip> |
||||
)} |
||||
|
||||
{queryStats && <p className={styles.stats}>{generateQueryStats(queryStats)}</p>} |
||||
</div> |
||||
); |
||||
} |
||||
|
||||
const getStyles = (theme: GrafanaTheme2) => { |
||||
return { |
||||
collapse: css({ |
||||
backgroundColor: 'unset', |
||||
border: 'unset', |
||||
marginBottom: 0, |
||||
|
||||
['> button']: { |
||||
padding: theme.spacing(0, 1), |
||||
}, |
||||
}), |
||||
wrapper: css({ |
||||
width: '100%', |
||||
display: 'flex', |
||||
justifyContent: 'space-between', |
||||
alignItems: 'baseline', |
||||
}), |
||||
title: css({ |
||||
flexGrow: 1, |
||||
overflow: 'hidden', |
||||
fontSize: theme.typography.bodySmall.fontSize, |
||||
fontWeight: theme.typography.fontWeightMedium, |
||||
margin: 0, |
||||
}), |
||||
description: css({ |
||||
color: theme.colors.text.secondary, |
||||
fontSize: theme.typography.bodySmall.fontSize, |
||||
fontWeight: theme.typography.bodySmall.fontWeight, |
||||
paddingLeft: theme.spacing(2), |
||||
gap: theme.spacing(2), |
||||
display: 'flex', |
||||
}), |
||||
body: css({ |
||||
display: 'flex', |
||||
gap: theme.spacing(2), |
||||
flexWrap: 'wrap', |
||||
}), |
||||
stats: css({ |
||||
margin: '0px', |
||||
color: theme.colors.text.secondary, |
||||
fontSize: theme.typography.bodySmall.fontSize, |
||||
}), |
||||
tooltip: css({ |
||||
marginRight: theme.spacing(0.25), |
||||
}), |
||||
}; |
||||
}; |
||||
|
||||
const generateQueryStats = (queryStats: QueryStats) => { |
||||
if (queryStats.message) { |
||||
return queryStats.message; |
||||
} |
||||
|
||||
return `This query will process approximately ${convertUnits(queryStats)}.`; |
||||
}; |
||||
|
||||
const convertUnits = (queryStats: QueryStats): string => { |
||||
const { text, suffix } = getValueFormat('bytes')(queryStats.bytes, 1); |
||||
return text + suffix; |
||||
}; |
@ -0,0 +1,37 @@ |
||||
import { css, cx } from '@emotion/css'; |
||||
import Prism, { Grammar } from 'prismjs'; |
||||
import React from 'react'; |
||||
|
||||
import { GrafanaTheme2 } from '@grafana/data/src'; |
||||
import { useTheme2 } from '@grafana/ui/src'; |
||||
|
||||
export interface Props { |
||||
query: string; |
||||
lang: { |
||||
grammar: Grammar; |
||||
name: string; |
||||
}; |
||||
className?: string; |
||||
} |
||||
export function RawQuery({ query, lang, className }: Props) { |
||||
const theme = useTheme2(); |
||||
const styles = getStyles(theme); |
||||
const highlighted = Prism.highlight(query, lang.grammar, lang.name); |
||||
|
||||
return ( |
||||
<div |
||||
className={cx(styles.editorField, 'prism-syntax-highlight', className)} |
||||
aria-label="selector" |
||||
dangerouslySetInnerHTML={{ __html: highlighted }} |
||||
/> |
||||
); |
||||
} |
||||
|
||||
const getStyles = (theme: GrafanaTheme2) => { |
||||
return { |
||||
editorField: css({ |
||||
fontFamily: theme.typography.fontFamilyMonospace, |
||||
fontSize: theme.typography.bodySmall.fontSize, |
||||
}), |
||||
}; |
||||
}; |
@ -0,0 +1,54 @@ |
||||
// Code generated - EDITING IS FUTILE. DO NOT EDIT.
|
||||
//
|
||||
// Generated by:
|
||||
// public/app/plugins/gen.go
|
||||
// Using jennies:
|
||||
// TSTypesJenny
|
||||
// PluginTSTypesJenny
|
||||
//
|
||||
// Run 'make gen-cue' from repository root to regenerate.
|
||||
|
||||
import * as common from '@grafana/schema'; |
||||
|
||||
export enum QueryEditorMode { |
||||
Builder = 'builder', |
||||
Code = 'code', |
||||
} |
||||
|
||||
export type PromQueryFormat = ('time_series' | 'table' | 'heatmap'); |
||||
|
||||
export interface Prometheus extends common.DataQuery { |
||||
/** |
||||
* Specifies which editor is being used to prepare the query. It can be "code" or "builder" |
||||
*/ |
||||
editorMode?: QueryEditorMode; |
||||
/** |
||||
* Execute an additional query to identify interesting raw samples relevant for the given expr |
||||
*/ |
||||
exemplar?: boolean; |
||||
/** |
||||
* The actual expression/query that will be evaluated by Prometheus |
||||
*/ |
||||
expr: string; |
||||
/** |
||||
* Query format to determine how to display data points in panel. It can be "time_series", "table", "heatmap" |
||||
*/ |
||||
format?: PromQueryFormat; |
||||
/** |
||||
* Returns only the latest value that Prometheus has scraped for the requested time series |
||||
*/ |
||||
instant?: boolean; |
||||
/** |
||||
* @deprecated Used to specify how many times to divide max data points by. We use max data points under query options |
||||
* See https://github.com/grafana/grafana/issues/48081
|
||||
*/ |
||||
intervalFactor?: number; |
||||
/** |
||||
* Series name override or template. Ex. {{hostname}} will be replaced with label value for hostname |
||||
*/ |
||||
legendFormat?: string; |
||||
/** |
||||
* Returns a Range vector, comprised of a set of time series containing a range of data points over time for each time series |
||||
*/ |
||||
range?: boolean; |
||||
} |
@ -0,0 +1,122 @@ |
||||
import { invert } from 'lodash'; |
||||
import { Token } from 'prismjs'; |
||||
|
||||
import { AbstractLabelOperator, AbstractLabelMatcher, AbstractQuery } from '@grafana/data'; |
||||
|
||||
export const SUGGESTIONS_LIMIT = 10000; |
||||
|
||||
const FromPromLikeMap: Record<string, AbstractLabelOperator> = { |
||||
'=': AbstractLabelOperator.Equal, |
||||
'!=': AbstractLabelOperator.NotEqual, |
||||
'=~': AbstractLabelOperator.EqualRegEx, |
||||
'!~': AbstractLabelOperator.NotEqualRegEx, |
||||
}; |
||||
|
||||
const ToPromLikeMap: Record<AbstractLabelOperator, string> = invert(FromPromLikeMap) as Record< |
||||
AbstractLabelOperator, |
||||
string |
||||
>; |
||||
|
||||
export function limitSuggestions(items: string[]) { |
||||
return items.slice(0, SUGGESTIONS_LIMIT); |
||||
} |
||||
|
||||
export function processLabels(labels: Array<{ [key: string]: string }>, withName = false) { |
||||
// For processing we are going to use sets as they have significantly better performance than arrays
|
||||
// After we process labels, we will convert sets to arrays and return object with label values in arrays
|
||||
const valueSet: { [key: string]: Set<string> } = {}; |
||||
labels.forEach((label) => { |
||||
const { __name__, ...rest } = label; |
||||
if (withName) { |
||||
valueSet['__name__'] = valueSet['__name__'] || new Set(); |
||||
if (!valueSet['__name__'].has(__name__)) { |
||||
valueSet['__name__'].add(__name__); |
||||
} |
||||
} |
||||
|
||||
Object.keys(rest).forEach((key) => { |
||||
if (!valueSet[key]) { |
||||
valueSet[key] = new Set(); |
||||
} |
||||
if (!valueSet[key].has(rest[key])) { |
||||
valueSet[key].add(rest[key]); |
||||
} |
||||
}); |
||||
}); |
||||
|
||||
// valueArray that we are going to return in the object
|
||||
const valueArray: { [key: string]: string[] } = {}; |
||||
limitSuggestions(Object.keys(valueSet)).forEach((key) => { |
||||
valueArray[key] = limitSuggestions(Array.from(valueSet[key])); |
||||
}); |
||||
|
||||
return { values: valueArray, keys: Object.keys(valueArray) }; |
||||
} |
||||
|
||||
export function toPromLikeExpr(labelBasedQuery: AbstractQuery): string { |
||||
const expr = labelBasedQuery.labelMatchers |
||||
.map((selector: AbstractLabelMatcher) => { |
||||
const operator = ToPromLikeMap[selector.operator]; |
||||
if (operator) { |
||||
return `${selector.name}${operator}"${selector.value}"`; |
||||
} else { |
||||
return ''; |
||||
} |
||||
}) |
||||
.filter((e: string) => e !== '') |
||||
.join(', '); |
||||
|
||||
return expr ? `{${expr}}` : ''; |
||||
} |
||||
|
||||
function getMaybeTokenStringContent(token: Token): string { |
||||
if (typeof token.content === 'string') { |
||||
return token.content; |
||||
} |
||||
|
||||
return ''; |
||||
} |
||||
|
||||
export function extractLabelMatchers(tokens: Array<string | Token>): AbstractLabelMatcher[] { |
||||
const labelMatchers: AbstractLabelMatcher[] = []; |
||||
|
||||
for (const token of tokens) { |
||||
if (!(token instanceof Token)) { |
||||
continue; |
||||
} |
||||
|
||||
if (token.type === 'context-labels') { |
||||
let labelKey = ''; |
||||
let labelValue = ''; |
||||
let labelOperator = ''; |
||||
|
||||
const contentTokens = Array.isArray(token.content) ? token.content : [token.content]; |
||||
|
||||
for (let currentToken of contentTokens) { |
||||
if (typeof currentToken === 'string') { |
||||
let currentStr: string; |
||||
currentStr = currentToken; |
||||
if (currentStr === '=' || currentStr === '!=' || currentStr === '=~' || currentStr === '!~') { |
||||
labelOperator = currentStr; |
||||
} |
||||
} else if (currentToken instanceof Token) { |
||||
switch (currentToken.type) { |
||||
case 'label-key': |
||||
labelKey = getMaybeTokenStringContent(currentToken); |
||||
break; |
||||
case 'label-value': |
||||
labelValue = getMaybeTokenStringContent(currentToken); |
||||
labelValue = labelValue.substring(1, labelValue.length - 1); |
||||
const labelComparator = FromPromLikeMap[labelOperator]; |
||||
if (labelComparator) { |
||||
labelMatchers.push({ name: labelKey, operator: labelComparator, value: labelValue }); |
||||
} |
||||
break; |
||||
} |
||||
} |
||||
} |
||||
} |
||||
} |
||||
|
||||
return labelMatchers; |
||||
} |
@ -0,0 +1,165 @@ |
||||
import { Observable } from 'rxjs'; |
||||
|
||||
import { |
||||
DataQueryRequest, |
||||
DataQueryResponse, |
||||
DataSourceGetTagKeysOptions, |
||||
DataSourceJsonData, |
||||
MetricFindValue, |
||||
} from '@grafana/data'; |
||||
import { DataQuery } from '@grafana/schema'; |
||||
|
||||
import { Prometheus as GenPromQuery } from './dataquery.gen'; |
||||
|
||||
// import { QueryBuilderLabelFilter, QueryEditorMode } from './querybuilder/shared/types';
|
||||
export interface QueryBuilderLabelFilter { |
||||
label: string; |
||||
op: string; |
||||
value: string; |
||||
} |
||||
|
||||
export enum QueryEditorMode { |
||||
Code = 'code', |
||||
Builder = 'builder', |
||||
} |
||||
|
||||
export interface PromQuery extends GenPromQuery, DataQuery { |
||||
/** |
||||
* Timezone offset to align start & end time on backend |
||||
*/ |
||||
utcOffsetSec?: number; |
||||
valueWithRefId?: boolean; |
||||
showingGraph?: boolean; |
||||
showingTable?: boolean; |
||||
hinting?: boolean; |
||||
interval?: string; |
||||
// store the metrics explorer additional settings
|
||||
useBackend?: boolean; |
||||
disableTextWrap?: boolean; |
||||
fullMetaSearch?: boolean; |
||||
includeNullMetadata?: boolean; |
||||
} |
||||
|
||||
export enum PrometheusCacheLevel { |
||||
Low = 'Low', |
||||
Medium = 'Medium', |
||||
High = 'High', |
||||
None = 'None', |
||||
} |
||||
|
||||
export enum PromApplication { |
||||
Cortex = 'Cortex', |
||||
Mimir = 'Mimir', |
||||
Prometheus = 'Prometheus', |
||||
Thanos = 'Thanos', |
||||
} |
||||
|
||||
export interface PromOptions extends DataSourceJsonData { |
||||
timeInterval?: string; |
||||
queryTimeout?: string; |
||||
httpMethod?: string; |
||||
customQueryParameters?: string; |
||||
disableMetricsLookup?: boolean; |
||||
exemplarTraceIdDestinations?: ExemplarTraceIdDestination[]; |
||||
prometheusType?: PromApplication; |
||||
prometheusVersion?: string; |
||||
cacheLevel?: PrometheusCacheLevel; |
||||
defaultEditor?: QueryEditorMode; |
||||
incrementalQuerying?: boolean; |
||||
incrementalQueryOverlapWindow?: string; |
||||
disableRecordingRules?: boolean; |
||||
sigV4Auth?: boolean; |
||||
oauthPassThru?: boolean; |
||||
} |
||||
|
||||
export type ExemplarTraceIdDestination = { |
||||
name: string; |
||||
url?: string; |
||||
urlDisplayLabel?: string; |
||||
datasourceUid?: string; |
||||
}; |
||||
|
||||
export interface PromQueryRequest extends PromQuery { |
||||
step?: number; |
||||
requestId?: string; |
||||
start: number; |
||||
end: number; |
||||
headers?: any; |
||||
} |
||||
|
||||
export interface PromMetricsMetadataItem { |
||||
type: string; |
||||
help: string; |
||||
unit?: string; |
||||
} |
||||
|
||||
export interface PromMetricsMetadata { |
||||
[metric: string]: PromMetricsMetadataItem; |
||||
} |
||||
|
||||
export type PromValue = [number, any]; |
||||
|
||||
export interface PromMetric { |
||||
__name__?: string; |
||||
|
||||
[index: string]: any; |
||||
} |
||||
|
||||
export interface PromBuildInfoResponse { |
||||
data: { |
||||
application?: string; |
||||
version: string; |
||||
revision: string; |
||||
features?: { |
||||
ruler_config_api?: 'true' | 'false'; |
||||
alertmanager_config_api?: 'true' | 'false'; |
||||
query_sharding?: 'true' | 'false'; |
||||
federated_rules?: 'true' | 'false'; |
||||
}; |
||||
[key: string]: unknown; |
||||
}; |
||||
status: 'success'; |
||||
} |
||||
|
||||
/** |
||||
* Auto = query.legendFormat == '__auto' |
||||
* Verbose = query.legendFormat == null/undefined/'' |
||||
* Custom query.legendFormat.length > 0 && query.legendFormat !== '__auto' |
||||
*/ |
||||
export enum LegendFormatMode { |
||||
Auto = '__auto', |
||||
Verbose = '__verbose', |
||||
Custom = '__custom', |
||||
} |
||||
|
||||
export enum PromVariableQueryType { |
||||
LabelNames, |
||||
LabelValues, |
||||
MetricNames, |
||||
VarQueryResult, |
||||
SeriesQuery, |
||||
ClassicQuery, |
||||
} |
||||
|
||||
export interface PromVariableQuery extends DataQuery { |
||||
query?: string; |
||||
expr?: string; |
||||
qryType?: PromVariableQueryType; |
||||
label?: string; |
||||
metric?: string; |
||||
varQuery?: string; |
||||
seriesQuery?: string; |
||||
labelFilters?: QueryBuilderLabelFilter[]; |
||||
match?: string; |
||||
classicQuery?: string; |
||||
} |
||||
|
||||
export type StandardPromVariableQuery = { |
||||
query: string; |
||||
refId: string; |
||||
}; |
||||
|
||||
export type PrometheusDatasource = { |
||||
getTagKeys(options: DataSourceGetTagKeysOptions): Promise<MetricFindValue[]>; |
||||
query(request: DataQueryRequest<PromQuery>): Observable<DataQueryResponse>; |
||||
}; |
@ -0,0 +1,47 @@ |
||||
import { css } from '@emotion/css'; |
||||
import React from 'react'; |
||||
|
||||
import { GrafanaTheme2 } from '@grafana/data'; |
||||
import { useStyles2 } from '@grafana/ui'; |
||||
|
||||
type Props = { |
||||
description: string; |
||||
suffix: string; |
||||
feature: string; |
||||
}; |
||||
|
||||
export function ConfigDescriptionLink(props: Props) { |
||||
const { description, suffix, feature } = props; |
||||
const text = `Learn more about ${feature}`; |
||||
const styles = useStyles2(getStyles); |
||||
|
||||
return ( |
||||
<span className={styles.container}> |
||||
{description} |
||||
<a |
||||
aria-label={text} |
||||
href={`https://grafana.com/docs/grafana/next/datasources/${suffix}`} |
||||
rel="noreferrer" |
||||
target="_blank" |
||||
> |
||||
{text} |
||||
</a> |
||||
</span> |
||||
); |
||||
} |
||||
|
||||
const getStyles = (theme: GrafanaTheme2) => { |
||||
return { |
||||
container: css({ |
||||
color: theme.colors.text.secondary, |
||||
a: css({ |
||||
color: theme.colors.text.link, |
||||
textDecoration: 'underline', |
||||
marginLeft: '5px', |
||||
'&:hover': { |
||||
textDecoration: 'none', |
||||
}, |
||||
}), |
||||
}), |
||||
}; |
||||
}; |
@ -0,0 +1,76 @@ |
||||
import { render, screen, waitFor } from '@testing-library/react'; |
||||
import userEvent from '@testing-library/user-event'; |
||||
import React, { useState } from 'react'; |
||||
|
||||
import { invalidTimeShiftError } from '../TraceToLogs/TraceToLogsSettings'; |
||||
|
||||
import { IntervalInput } from './IntervalInput'; |
||||
|
||||
describe('IntervalInput', () => { |
||||
const IntervalInputtWithProps = ({ val }: { val: string }) => { |
||||
const [value, setValue] = useState(val); |
||||
|
||||
return ( |
||||
<IntervalInput |
||||
label="" |
||||
tooltip="" |
||||
value={value} |
||||
disabled={false} |
||||
onChange={(v) => { |
||||
setValue(v); |
||||
}} |
||||
isInvalidError={invalidTimeShiftError} |
||||
/> |
||||
); |
||||
}; |
||||
|
||||
describe('validates time shift correctly', () => { |
||||
it('for previosuly saved invalid value', async () => { |
||||
render(<IntervalInputtWithProps val="77" />); |
||||
expect(screen.getByDisplayValue('77')).toBeInTheDocument(); |
||||
expect(screen.getByText(invalidTimeShiftError)).toBeInTheDocument(); |
||||
}); |
||||
|
||||
it('for previously saved empty value', async () => { |
||||
render(<IntervalInputtWithProps val="" />); |
||||
expect(screen.getByPlaceholderText('0')).toBeInTheDocument(); |
||||
expect(screen.queryByText(invalidTimeShiftError)).not.toBeInTheDocument(); |
||||
}); |
||||
|
||||
it('for empty (valid) value', async () => { |
||||
render(<IntervalInputtWithProps val="1ms" />); |
||||
await userEvent.clear(screen.getByDisplayValue('1ms')); |
||||
await waitFor(() => { |
||||
expect(screen.queryByText(invalidTimeShiftError)).not.toBeInTheDocument(); |
||||
}); |
||||
}); |
||||
|
||||
it('for valid value', async () => { |
||||
render(<IntervalInputtWithProps val="10ms" />); |
||||
expect(screen.queryByText(invalidTimeShiftError)).not.toBeInTheDocument(); |
||||
|
||||
const input = screen.getByDisplayValue('10ms'); |
||||
await userEvent.clear(input); |
||||
await userEvent.type(input, '100s'); |
||||
await waitFor(() => { |
||||
expect(screen.queryByText(invalidTimeShiftError)).not.toBeInTheDocument(); |
||||
}); |
||||
|
||||
await userEvent.clear(input); |
||||
await userEvent.type(input, '-77ms'); |
||||
await waitFor(() => { |
||||
expect(screen.queryByText(invalidTimeShiftError)).not.toBeInTheDocument(); |
||||
}); |
||||
}); |
||||
|
||||
it('for invalid value', async () => { |
||||
render(<IntervalInputtWithProps val="10ms" />); |
||||
const input = screen.getByDisplayValue('10ms'); |
||||
await userEvent.clear(input); |
||||
await userEvent.type(input, 'abc'); |
||||
await waitFor(() => { |
||||
expect(screen.queryByText(invalidTimeShiftError)).toBeInTheDocument(); |
||||
}); |
||||
}); |
||||
}); |
||||
}); |
@ -0,0 +1,69 @@ |
||||
import React, { useState } from 'react'; |
||||
import { useDebounce } from 'react-use'; |
||||
|
||||
import { InlineField, Input } from '@grafana/ui'; |
||||
|
||||
import { validateInterval, validateIntervalRegex } from './validation'; |
||||
|
||||
interface Props { |
||||
value: string; |
||||
onChange: (val: string) => void; |
||||
isInvalidError: string; |
||||
placeholder?: string; |
||||
width?: number; |
||||
ariaLabel?: string; |
||||
label?: string; |
||||
tooltip?: string; |
||||
disabled?: boolean; |
||||
validationRegex?: RegExp; |
||||
} |
||||
|
||||
interface FieldProps { |
||||
labelWidth: number; |
||||
disabled: boolean; |
||||
invalid: boolean; |
||||
error: string; |
||||
label?: string; |
||||
tooltip?: string; |
||||
} |
||||
|
||||
export const IntervalInput = (props: Props) => { |
||||
const validationRegex = props.validationRegex || validateIntervalRegex; |
||||
const [intervalIsInvalid, setIntervalIsInvalid] = useState(() => { |
||||
return props.value ? validateInterval(props.value, validationRegex) : false; |
||||
}); |
||||
|
||||
useDebounce( |
||||
() => { |
||||
setIntervalIsInvalid(validateInterval(props.value, validationRegex)); |
||||
}, |
||||
500, |
||||
[props.value] |
||||
); |
||||
|
||||
const fieldProps: FieldProps = { |
||||
labelWidth: 26, |
||||
disabled: props.disabled ?? false, |
||||
invalid: intervalIsInvalid, |
||||
error: props.isInvalidError, |
||||
}; |
||||
if (props.label) { |
||||
fieldProps.label = props.label; |
||||
fieldProps.tooltip = props.tooltip || ''; |
||||
} |
||||
|
||||
return ( |
||||
<InlineField {...fieldProps}> |
||||
<Input |
||||
type="text" |
||||
placeholder={props.placeholder || '0'} |
||||
width={props.width || 40} |
||||
onChange={(e) => { |
||||
props.onChange(e.currentTarget.value); |
||||
}} |
||||
value={props.value} |
||||
aria-label={props.ariaLabel || 'interval input'} |
||||
/> |
||||
</InlineField> |
||||
); |
||||
}; |
@ -0,0 +1,28 @@ |
||||
import { validateInterval, validateIntervalRegex } from './validation'; |
||||
|
||||
describe('Validation', () => { |
||||
it('should validate incorrect values correctly', () => { |
||||
expect(validateInterval('-', validateIntervalRegex)).toBeTruthy(); |
||||
expect(validateInterval('1', validateIntervalRegex)).toBeTruthy(); |
||||
expect(validateInterval('test', validateIntervalRegex)).toBeTruthy(); |
||||
expect(validateInterval('1ds', validateIntervalRegex)).toBeTruthy(); |
||||
expect(validateInterval('10Ms', validateIntervalRegex)).toBeTruthy(); |
||||
expect(validateInterval('-9999999', validateIntervalRegex)).toBeTruthy(); |
||||
}); |
||||
|
||||
it('should validate correct values correctly', () => { |
||||
expect(validateInterval('1y', validateIntervalRegex)).toBeFalsy(); |
||||
expect(validateInterval('1M', validateIntervalRegex)).toBeFalsy(); |
||||
expect(validateInterval('1w', validateIntervalRegex)).toBeFalsy(); |
||||
expect(validateInterval('1d', validateIntervalRegex)).toBeFalsy(); |
||||
expect(validateInterval('2h', validateIntervalRegex)).toBeFalsy(); |
||||
expect(validateInterval('4m', validateIntervalRegex)).toBeFalsy(); |
||||
expect(validateInterval('8s', validateIntervalRegex)).toBeFalsy(); |
||||
expect(validateInterval('80ms', validateIntervalRegex)).toBeFalsy(); |
||||
expect(validateInterval('-80ms', validateIntervalRegex)).toBeFalsy(); |
||||
}); |
||||
|
||||
it('should not return error if no value provided', () => { |
||||
expect(validateInterval('', validateIntervalRegex)).toBeFalsy(); |
||||
}); |
||||
}); |
@ -0,0 +1,6 @@ |
||||
export const validateIntervalRegex = /^(-?\d+(?:\.\d+)?)(ms|[Mwdhmsy])$/; |
||||
|
||||
export const validateInterval = (val: string, regex: RegExp) => { |
||||
const matches = val.match(regex); |
||||
return matches || !val ? false : true; |
||||
}; |
@ -0,0 +1,112 @@ |
||||
import { css, cx } from '@emotion/css'; |
||||
import React from 'react'; |
||||
|
||||
import { GrafanaTheme2 } from '@grafana/data'; |
||||
import { InlineLabel, SegmentInput, ToolbarButton, useStyles2 } from '@grafana/ui'; |
||||
import { ToolbarButtonVariant } from '@grafana/ui/src/components/ToolbarButton'; |
||||
|
||||
import { TraceToLogsTag } from './TraceToLogsSettings'; |
||||
|
||||
interface Props { |
||||
values: TraceToLogsTag[]; |
||||
onChange: (values: TraceToLogsTag[]) => void; |
||||
id?: string; |
||||
} |
||||
|
||||
const VARIANT = 'none' as ToolbarButtonVariant; |
||||
|
||||
export const TagMappingInput = ({ values, onChange, id }: Props) => { |
||||
const styles = useStyles2(getStyles); |
||||
|
||||
return ( |
||||
<div className={styles.wrapper}> |
||||
{values.length ? ( |
||||
values.map((value, idx) => ( |
||||
<div className={styles.pair} key={idx}> |
||||
<SegmentInput |
||||
id={`${id}-key-${idx}`} |
||||
placeholder={'Tag name'} |
||||
value={value.key} |
||||
onChange={(e) => { |
||||
onChange( |
||||
values.map((v, i) => { |
||||
if (i === idx) { |
||||
return { ...v, key: String(e) }; |
||||
} |
||||
return v; |
||||
}) |
||||
); |
||||
}} |
||||
/> |
||||
<InlineLabel aria-label="equals" className={styles.operator}> |
||||
as |
||||
</InlineLabel> |
||||
<SegmentInput |
||||
id={`${id}-value-${idx}`} |
||||
placeholder={'New name (optional)'} |
||||
value={value.value || ''} |
||||
onChange={(e) => { |
||||
onChange( |
||||
values.map((v, i) => { |
||||
if (i === idx) { |
||||
return { ...v, value: String(e) }; |
||||
} |
||||
return v; |
||||
}) |
||||
); |
||||
}} |
||||
/> |
||||
<ToolbarButton |
||||
onClick={() => onChange([...values.slice(0, idx), ...values.slice(idx + 1)])} |
||||
className={cx(styles.removeTag, 'query-part')} |
||||
aria-label="Remove tag" |
||||
variant={VARIANT} |
||||
type="button" |
||||
icon="times" |
||||
/> |
||||
|
||||
{idx === values.length - 1 ? ( |
||||
<ToolbarButton |
||||
onClick={() => onChange([...values, { key: '', value: '' }])} |
||||
className="query-part" |
||||
aria-label="Add tag" |
||||
type="button" |
||||
variant={VARIANT} |
||||
icon="plus" |
||||
/> |
||||
) : null} |
||||
</div> |
||||
)) |
||||
) : ( |
||||
<ToolbarButton |
||||
icon="plus" |
||||
onClick={() => onChange([...values, { key: '', value: '' }])} |
||||
className="query-part" |
||||
aria-label="Add tag" |
||||
type="button" |
||||
variant={VARIANT} |
||||
/> |
||||
)} |
||||
</div> |
||||
); |
||||
}; |
||||
|
||||
const getStyles = (theme: GrafanaTheme2) => ({ |
||||
wrapper: css({ |
||||
display: 'flex', |
||||
flexDirection: 'column', |
||||
gap: `${theme.spacing(0.5)} 0`, |
||||
}), |
||||
pair: css({ |
||||
display: 'flex', |
||||
justifyContent: 'start', |
||||
alignItems: 'center', |
||||
}), |
||||
operator: css({ |
||||
color: theme.v1.palette.orange, |
||||
width: 'auto', |
||||
}), |
||||
removeTag: css({ |
||||
marginRight: theme.spacing(0.5), |
||||
}), |
||||
}); |
@ -0,0 +1,123 @@ |
||||
import { render, screen } from '@testing-library/react'; |
||||
import userEvent from '@testing-library/user-event'; |
||||
import React from 'react'; |
||||
|
||||
import { DataSourceInstanceSettings, DataSourceSettings } from '@grafana/data'; |
||||
import { DataSourceSrv, setDataSourceSrv } from '@grafana/runtime'; |
||||
|
||||
import { TraceToLogsData, TraceToLogsSettings } from './TraceToLogsSettings'; |
||||
|
||||
const defaultOptionsOldFormat: DataSourceSettings<TraceToLogsData> = { |
||||
jsonData: { |
||||
tracesToLogs: { |
||||
datasourceUid: 'loki1_uid', |
||||
tags: ['someTag'], |
||||
mapTagNamesEnabled: false, |
||||
spanStartTimeShift: '1m', |
||||
spanEndTimeShift: '1m', |
||||
filterByTraceID: true, |
||||
filterBySpanID: true, |
||||
}, |
||||
}, |
||||
} as unknown as DataSourceSettings<TraceToLogsData>; |
||||
|
||||
const defaultOptionsNewFormat: DataSourceSettings<TraceToLogsData> = { |
||||
jsonData: { |
||||
tracesToLogsV2: { |
||||
datasourceUid: 'loki1_uid', |
||||
tags: [{ key: 'someTag', value: 'newName' }], |
||||
spanStartTimeShift: '1m', |
||||
spanEndTimeShift: '1m', |
||||
filterByTraceID: true, |
||||
filterBySpanID: true, |
||||
customQuery: true, |
||||
query: '{${__tags}}', |
||||
}, |
||||
}, |
||||
} as unknown as DataSourceSettings<TraceToLogsData>; |
||||
|
||||
const lokiSettings = { |
||||
uid: 'loki1_uid', |
||||
name: 'loki1', |
||||
type: 'loki', |
||||
meta: { info: { logos: { small: '' } } }, |
||||
} as unknown as DataSourceInstanceSettings; |
||||
|
||||
describe('TraceToLogsSettings', () => { |
||||
beforeAll(() => { |
||||
setDataSourceSrv({ |
||||
getList() { |
||||
return [lokiSettings]; |
||||
}, |
||||
getInstanceSettings() { |
||||
return lokiSettings; |
||||
}, |
||||
} as unknown as DataSourceSrv); |
||||
}); |
||||
|
||||
it('should render old format without error', () => { |
||||
expect(() => |
||||
render(<TraceToLogsSettings options={defaultOptionsOldFormat} onOptionsChange={() => {}} />) |
||||
).not.toThrow(); |
||||
}); |
||||
|
||||
it('should render new format without error', () => { |
||||
expect(() => |
||||
render(<TraceToLogsSettings options={defaultOptionsNewFormat} onOptionsChange={() => {}} />) |
||||
).not.toThrow(); |
||||
}); |
||||
|
||||
it('should render and transform data from old format correctly', () => { |
||||
render(<TraceToLogsSettings options={defaultOptionsOldFormat} onOptionsChange={() => {}} />); |
||||
expect(screen.getByText('someTag')).toBeInTheDocument(); |
||||
expect((screen.getByLabelText('Use custom query') as HTMLInputElement).checked).toBeFalsy(); |
||||
expect((screen.getByLabelText('Filter by trace ID') as HTMLInputElement).checked).toBeTruthy(); |
||||
expect((screen.getByLabelText('Filter by span ID') as HTMLInputElement).checked).toBeTruthy(); |
||||
}); |
||||
|
||||
it('renders old mapped tags correctly', () => { |
||||
const options = { |
||||
...defaultOptionsOldFormat, |
||||
jsonData: { |
||||
...defaultOptionsOldFormat.jsonData, |
||||
tracesToLogs: { |
||||
...defaultOptionsOldFormat.jsonData.tracesToLogs, |
||||
tags: undefined, |
||||
mappedTags: [{ key: 'someTag', value: 'withNewName' }], |
||||
mapTagNamesEnabled: true, |
||||
}, |
||||
}, |
||||
}; |
||||
|
||||
render(<TraceToLogsSettings options={options} onOptionsChange={() => {}} />); |
||||
expect(screen.getByText('someTag')).toBeInTheDocument(); |
||||
expect(screen.getByText('withNewName')).toBeInTheDocument(); |
||||
}); |
||||
|
||||
it('transforms old format to new on change', async () => { |
||||
const changeMock = jest.fn(); |
||||
render(<TraceToLogsSettings options={defaultOptionsOldFormat} onOptionsChange={changeMock} />); |
||||
const checkBox = screen.getByLabelText('Filter by trace ID'); |
||||
await userEvent.click(checkBox); |
||||
expect(changeMock.mock.calls[0]).toEqual([ |
||||
{ |
||||
jsonData: { |
||||
tracesToLogs: undefined, |
||||
tracesToLogsV2: { |
||||
customQuery: false, |
||||
datasourceUid: 'loki1_uid', |
||||
filterBySpanID: true, |
||||
filterByTraceID: false, |
||||
spanEndTimeShift: '1m', |
||||
spanStartTimeShift: '1m', |
||||
tags: [ |
||||
{ |
||||
key: 'someTag', |
||||
}, |
||||
], |
||||
}, |
||||
}, |
||||
}, |
||||
]); |
||||
}); |
||||
}); |
@ -0,0 +1,275 @@ |
||||
import { css } from '@emotion/css'; |
||||
import React, { useCallback, useMemo } from 'react'; |
||||
|
||||
import { DataSourceJsonData, DataSourceInstanceSettings, DataSourcePluginOptionsEditorProps } from '@grafana/data'; |
||||
import { ConfigSection } from '@grafana/experimental'; |
||||
import { DataSourcePicker } from '@grafana/runtime'; |
||||
import { InlineField, InlineFieldRow, Input, InlineSwitch } from '@grafana/ui'; |
||||
|
||||
import { ConfigDescriptionLink } from '../ConfigDescriptionLink'; |
||||
import { IntervalInput } from '../IntervalInput/IntervalInput'; |
||||
|
||||
import { TagMappingInput } from './TagMappingInput'; |
||||
|
||||
export interface TraceToLogsTag { |
||||
key: string; |
||||
value?: string; |
||||
} |
||||
|
||||
// @deprecated use getTraceToLogsOptions to get the v2 version of this config from jsonData
|
||||
export interface TraceToLogsOptions { |
||||
datasourceUid?: string; |
||||
tags?: string[]; |
||||
mappedTags?: TraceToLogsTag[]; |
||||
mapTagNamesEnabled?: boolean; |
||||
spanStartTimeShift?: string; |
||||
spanEndTimeShift?: string; |
||||
filterByTraceID?: boolean; |
||||
filterBySpanID?: boolean; |
||||
lokiSearch?: boolean; // legacy
|
||||
} |
||||
|
||||
export interface TraceToLogsOptionsV2 { |
||||
datasourceUid?: string; |
||||
tags?: TraceToLogsTag[]; |
||||
spanStartTimeShift?: string; |
||||
spanEndTimeShift?: string; |
||||
filterByTraceID?: boolean; |
||||
filterBySpanID?: boolean; |
||||
query?: string; |
||||
customQuery: boolean; |
||||
} |
||||
|
||||
export interface TraceToLogsData extends DataSourceJsonData { |
||||
tracesToLogs?: TraceToLogsOptions; |
||||
tracesToLogsV2?: TraceToLogsOptionsV2; |
||||
} |
||||
|
||||
/** |
||||
* Gets new version of the traceToLogs config from the json data either returning directly or transforming the old |
||||
* version to new and returning that. |
||||
*/ |
||||
export function getTraceToLogsOptions(data?: TraceToLogsData): TraceToLogsOptionsV2 | undefined { |
||||
if (data?.tracesToLogsV2) { |
||||
return data.tracesToLogsV2; |
||||
} |
||||
if (!data?.tracesToLogs) { |
||||
return undefined; |
||||
} |
||||
const traceToLogs: TraceToLogsOptionsV2 = { |
||||
customQuery: false, |
||||
}; |
||||
traceToLogs.datasourceUid = data.tracesToLogs.datasourceUid; |
||||
traceToLogs.tags = data.tracesToLogs.mapTagNamesEnabled |
||||
? data.tracesToLogs.mappedTags |
||||
: data.tracesToLogs.tags?.map((tag) => ({ key: tag })); |
||||
traceToLogs.filterByTraceID = data.tracesToLogs.filterByTraceID; |
||||
traceToLogs.filterBySpanID = data.tracesToLogs.filterBySpanID; |
||||
traceToLogs.spanStartTimeShift = data.tracesToLogs.spanStartTimeShift; |
||||
traceToLogs.spanEndTimeShift = data.tracesToLogs.spanEndTimeShift; |
||||
return traceToLogs; |
||||
} |
||||
|
||||
interface Props extends DataSourcePluginOptionsEditorProps<TraceToLogsData> {} |
||||
|
||||
export function TraceToLogsSettings({ options, onOptionsChange }: Props) { |
||||
const supportedDataSourceTypes = [ |
||||
'loki', |
||||
'elasticsearch', |
||||
'grafana-splunk-datasource', // external
|
||||
'grafana-opensearch-datasource', // external
|
||||
'grafana-falconlogscale-datasource', // external
|
||||
'googlecloud-logging-datasource', // external
|
||||
]; |
||||
|
||||
const traceToLogs = useMemo( |
||||
(): TraceToLogsOptionsV2 => getTraceToLogsOptions(options.jsonData) || { customQuery: false }, |
||||
[options.jsonData] |
||||
); |
||||
const { query = '', tags, customQuery } = traceToLogs; |
||||
|
||||
const updateTracesToLogs = useCallback( |
||||
(value: Partial<TraceToLogsOptionsV2>) => { |
||||
// Cannot use updateDatasourcePluginJsonDataOption here as we need to update 2 keys, and they would overwrite each
|
||||
// other as updateDatasourcePluginJsonDataOption isn't synchronized
|
||||
onOptionsChange({ |
||||
...options, |
||||
jsonData: { |
||||
...options.jsonData, |
||||
tracesToLogsV2: { |
||||
...traceToLogs, |
||||
...value, |
||||
}, |
||||
tracesToLogs: undefined, |
||||
}, |
||||
}); |
||||
}, |
||||
[onOptionsChange, options, traceToLogs] |
||||
); |
||||
|
||||
return ( |
||||
<div className={css({ width: '100%' })}> |
||||
<InlineFieldRow> |
||||
<InlineField |
||||
tooltip="The logs data source the trace is going to navigate to" |
||||
label="Data source" |
||||
labelWidth={26} |
||||
> |
||||
<DataSourcePicker |
||||
inputId="trace-to-logs-data-source-picker" |
||||
filter={(ds) => supportedDataSourceTypes.includes(ds.type)} |
||||
current={traceToLogs.datasourceUid} |
||||
noDefault={true} |
||||
width={40} |
||||
onChange={(ds: DataSourceInstanceSettings) => |
||||
updateTracesToLogs({ |
||||
datasourceUid: ds.uid, |
||||
}) |
||||
} |
||||
/> |
||||
</InlineField> |
||||
</InlineFieldRow> |
||||
|
||||
<InlineFieldRow> |
||||
<IntervalInput |
||||
label={getTimeShiftLabel('start')} |
||||
tooltip={getTimeShiftTooltip('start', '0')} |
||||
value={traceToLogs.spanStartTimeShift || ''} |
||||
onChange={(val) => { |
||||
updateTracesToLogs({ spanStartTimeShift: val }); |
||||
}} |
||||
isInvalidError={invalidTimeShiftError} |
||||
/> |
||||
</InlineFieldRow> |
||||
|
||||
<InlineFieldRow> |
||||
<IntervalInput |
||||
label={getTimeShiftLabel('end')} |
||||
tooltip={getTimeShiftTooltip('end', '0')} |
||||
value={traceToLogs.spanEndTimeShift || ''} |
||||
onChange={(val) => { |
||||
updateTracesToLogs({ spanEndTimeShift: val }); |
||||
}} |
||||
isInvalidError={invalidTimeShiftError} |
||||
/> |
||||
</InlineFieldRow> |
||||
|
||||
<InlineFieldRow> |
||||
<InlineField |
||||
tooltip="Tags that will be used in the query. Default tags: 'cluster', 'hostname', 'namespace', 'pod', 'service.name', 'service.namespace'" |
||||
label="Tags" |
||||
labelWidth={26} |
||||
> |
||||
<TagMappingInput values={tags ?? []} onChange={(v) => updateTracesToLogs({ tags: v })} /> |
||||
</InlineField> |
||||
</InlineFieldRow> |
||||
|
||||
<IdFilter |
||||
disabled={customQuery} |
||||
type={'trace'} |
||||
id={'filterByTraceID'} |
||||
value={Boolean(traceToLogs.filterByTraceID)} |
||||
onChange={(val) => updateTracesToLogs({ filterByTraceID: val })} |
||||
/> |
||||
<IdFilter |
||||
disabled={customQuery} |
||||
type={'span'} |
||||
id={'filterBySpanID'} |
||||
value={Boolean(traceToLogs.filterBySpanID)} |
||||
onChange={(val) => updateTracesToLogs({ filterBySpanID: val })} |
||||
/> |
||||
|
||||
<InlineFieldRow> |
||||
<InlineField |
||||
tooltip="Use a custom query with the possibility to interpolate variables from the trace or span" |
||||
label="Use custom query" |
||||
labelWidth={26} |
||||
> |
||||
<InlineSwitch |
||||
id={'customQuerySwitch'} |
||||
value={customQuery} |
||||
onChange={(event: React.SyntheticEvent<HTMLInputElement>) => |
||||
updateTracesToLogs({ customQuery: event.currentTarget.checked }) |
||||
} |
||||
/> |
||||
</InlineField> |
||||
</InlineFieldRow> |
||||
|
||||
{customQuery && ( |
||||
<InlineField |
||||
label="Query" |
||||
labelWidth={26} |
||||
tooltip="The query that will run when navigating from a trace to logs data source. Interpolate tags using the `$__tags` keyword" |
||||
grow |
||||
> |
||||
<Input |
||||
label="Query" |
||||
type="text" |
||||
allowFullScreen |
||||
value={query} |
||||
onChange={(e) => updateTracesToLogs({ query: e.currentTarget.value })} |
||||
/> |
||||
</InlineField> |
||||
)} |
||||
</div> |
||||
); |
||||
} |
||||
|
||||
interface IdFilterProps { |
||||
type: 'trace' | 'span'; |
||||
id: string; |
||||
value: boolean; |
||||
onChange: (val: boolean) => void; |
||||
disabled: boolean; |
||||
} |
||||
function IdFilter(props: IdFilterProps) { |
||||
return ( |
||||
<InlineFieldRow> |
||||
<InlineField |
||||
disabled={props.disabled} |
||||
label={`Filter by ${props.type} ID`} |
||||
labelWidth={26} |
||||
grow |
||||
tooltip={`Filters logs by ${props.type} ID`} |
||||
> |
||||
<InlineSwitch |
||||
id={props.id} |
||||
value={props.value} |
||||
onChange={(event: React.SyntheticEvent<HTMLInputElement>) => props.onChange(event.currentTarget.checked)} |
||||
/> |
||||
</InlineField> |
||||
</InlineFieldRow> |
||||
); |
||||
} |
||||
|
||||
export const getTimeShiftLabel = (type: 'start' | 'end') => { |
||||
return `Span ${type} time shift`; |
||||
}; |
||||
|
||||
export const getTimeShiftTooltip = (type: 'start' | 'end', defaultVal: string) => { |
||||
return `Shifts the ${type} time of the span. Default: ${defaultVal} (Time units can be used here, for example: 5s, -1m, 3h)`; |
||||
}; |
||||
|
||||
export const invalidTimeShiftError = 'Invalid time shift. See tooltip for examples.'; |
||||
|
||||
export const TraceToLogsSection = ({ options, onOptionsChange }: DataSourcePluginOptionsEditorProps) => { |
||||
let suffix = options.type; |
||||
suffix += options.type === 'tempo' ? '/configure-tempo-data-source/#trace-to-logs' : '/#trace-to-logs'; |
||||
|
||||
return ( |
||||
<ConfigSection |
||||
title="Trace to logs" |
||||
description={ |
||||
<ConfigDescriptionLink |
||||
description="Navigate from a trace span to the selected data source's logs." |
||||
suffix={suffix} |
||||
feature="trace to logs" |
||||
/> |
||||
} |
||||
isCollapsible={true} |
||||
isInitiallyOpen={true} |
||||
> |
||||
<TraceToLogsSettings options={options} onOptionsChange={onOptionsChange} /> |
||||
</ConfigSection> |
||||
); |
||||
}; |
@ -0,0 +1,240 @@ |
||||
import { css } from '@emotion/css'; |
||||
import React from 'react'; |
||||
|
||||
import { |
||||
DataSourceInstanceSettings, |
||||
DataSourceJsonData, |
||||
DataSourcePluginOptionsEditorProps, |
||||
GrafanaTheme2, |
||||
updateDatasourcePluginJsonDataOption, |
||||
} from '@grafana/data'; |
||||
import { ConfigSection } from '@grafana/experimental'; |
||||
import { DataSourcePicker } from '@grafana/runtime'; |
||||
import { Button, InlineField, InlineFieldRow, Input, useStyles2 } from '@grafana/ui'; |
||||
|
||||
import { ConfigDescriptionLink } from '../ConfigDescriptionLink'; |
||||
import { IntervalInput } from '../IntervalInput/IntervalInput'; |
||||
import { TagMappingInput } from '../TraceToLogs/TagMappingInput'; |
||||
import { getTimeShiftLabel, getTimeShiftTooltip, invalidTimeShiftError } from '../TraceToLogs/TraceToLogsSettings'; |
||||
|
||||
export interface TraceToMetricsOptions { |
||||
datasourceUid?: string; |
||||
tags?: Array<{ key: string; value: string }>; |
||||
queries: TraceToMetricQuery[]; |
||||
spanStartTimeShift?: string; |
||||
spanEndTimeShift?: string; |
||||
} |
||||
|
||||
export interface TraceToMetricQuery { |
||||
name?: string; |
||||
query?: string; |
||||
} |
||||
|
||||
export interface TraceToMetricsData extends DataSourceJsonData { |
||||
tracesToMetrics?: TraceToMetricsOptions; |
||||
} |
||||
|
||||
interface Props extends DataSourcePluginOptionsEditorProps<TraceToMetricsData> {} |
||||
|
||||
export function TraceToMetricsSettings({ options, onOptionsChange }: Props) { |
||||
const styles = useStyles2(getStyles); |
||||
|
||||
return ( |
||||
<div className={css({ width: '100%' })}> |
||||
<InlineFieldRow className={styles.row}> |
||||
<InlineField |
||||
tooltip="The Prometheus data source the trace is going to navigate to" |
||||
label="Data source" |
||||
labelWidth={26} |
||||
> |
||||
<DataSourcePicker |
||||
inputId="trace-to-metrics-data-source-picker" |
||||
pluginId="prometheus" |
||||
current={options.jsonData.tracesToMetrics?.datasourceUid} |
||||
noDefault={true} |
||||
width={40} |
||||
onChange={(ds: DataSourceInstanceSettings) => |
||||
updateDatasourcePluginJsonDataOption({ onOptionsChange, options }, 'tracesToMetrics', { |
||||
...options.jsonData.tracesToMetrics, |
||||
datasourceUid: ds.uid, |
||||
}) |
||||
} |
||||
/> |
||||
</InlineField> |
||||
{options.jsonData.tracesToMetrics?.datasourceUid ? ( |
||||
<Button |
||||
type="button" |
||||
variant="secondary" |
||||
size="sm" |
||||
fill="text" |
||||
onClick={() => { |
||||
updateDatasourcePluginJsonDataOption({ onOptionsChange, options }, 'tracesToMetrics', { |
||||
...options.jsonData.tracesToMetrics, |
||||
datasourceUid: undefined, |
||||
}); |
||||
}} |
||||
> |
||||
Clear |
||||
</Button> |
||||
) : null} |
||||
</InlineFieldRow> |
||||
|
||||
<InlineFieldRow> |
||||
<IntervalInput |
||||
label={getTimeShiftLabel('start')} |
||||
tooltip={getTimeShiftTooltip('start', '-2m')} |
||||
value={options.jsonData.tracesToMetrics?.spanStartTimeShift || ''} |
||||
onChange={(val) => { |
||||
updateDatasourcePluginJsonDataOption({ onOptionsChange, options }, 'tracesToMetrics', { |
||||
...options.jsonData.tracesToMetrics, |
||||
spanStartTimeShift: val, |
||||
}); |
||||
}} |
||||
placeholder={'-2m'} |
||||
isInvalidError={invalidTimeShiftError} |
||||
/> |
||||
</InlineFieldRow> |
||||
|
||||
<InlineFieldRow> |
||||
<IntervalInput |
||||
label={getTimeShiftLabel('end')} |
||||
tooltip={getTimeShiftTooltip('end', '2m')} |
||||
value={options.jsonData.tracesToMetrics?.spanEndTimeShift || ''} |
||||
onChange={(val) => { |
||||
updateDatasourcePluginJsonDataOption({ onOptionsChange, options }, 'tracesToMetrics', { |
||||
...options.jsonData.tracesToMetrics, |
||||
spanEndTimeShift: val, |
||||
}); |
||||
}} |
||||
placeholder={'2m'} |
||||
isInvalidError={invalidTimeShiftError} |
||||
/> |
||||
</InlineFieldRow> |
||||
|
||||
<InlineFieldRow> |
||||
<InlineField tooltip="Tags that will be used in the metrics query" label="Tags" labelWidth={26}> |
||||
<TagMappingInput |
||||
values={options.jsonData.tracesToMetrics?.tags ?? []} |
||||
onChange={(v) => |
||||
updateDatasourcePluginJsonDataOption({ onOptionsChange, options }, 'tracesToMetrics', { |
||||
...options.jsonData.tracesToMetrics, |
||||
tags: v, |
||||
}) |
||||
} |
||||
/> |
||||
</InlineField> |
||||
</InlineFieldRow> |
||||
|
||||
{options.jsonData.tracesToMetrics?.queries?.map((query, i) => ( |
||||
<div key={i} className={styles.queryRow}> |
||||
<InlineField label="Link Label" labelWidth={26} tooltip="Descriptive label for the linked query"> |
||||
<Input |
||||
label="Link Label" |
||||
type="text" |
||||
allowFullScreen |
||||
value={query.name} |
||||
width={40} |
||||
onChange={(e) => { |
||||
let newQueries = options.jsonData.tracesToMetrics?.queries.slice() ?? []; |
||||
newQueries[i].name = e.currentTarget.value; |
||||
updateDatasourcePluginJsonDataOption({ onOptionsChange, options }, 'tracesToMetrics', { |
||||
...options.jsonData.tracesToMetrics, |
||||
queries: newQueries, |
||||
}); |
||||
}} |
||||
/> |
||||
</InlineField> |
||||
<InlineField |
||||
label="Query" |
||||
labelWidth={10} |
||||
tooltip="The Prometheus query that will run when navigating from a trace to metrics. Interpolate tags using the `$__tags` keyword" |
||||
grow |
||||
> |
||||
<Input |
||||
label="Query" |
||||
type="text" |
||||
allowFullScreen |
||||
value={query.query} |
||||
onChange={(e) => { |
||||
let newQueries = options.jsonData.tracesToMetrics?.queries.slice() ?? []; |
||||
newQueries[i].query = e.currentTarget.value; |
||||
updateDatasourcePluginJsonDataOption({ onOptionsChange, options }, 'tracesToMetrics', { |
||||
...options.jsonData.tracesToMetrics, |
||||
queries: newQueries, |
||||
}); |
||||
}} |
||||
/> |
||||
</InlineField> |
||||
|
||||
<Button |
||||
variant="destructive" |
||||
title="Remove query" |
||||
icon="times" |
||||
type="button" |
||||
onClick={() => { |
||||
let newQueries = options.jsonData.tracesToMetrics?.queries.slice(); |
||||
newQueries?.splice(i, 1); |
||||
updateDatasourcePluginJsonDataOption({ onOptionsChange, options }, 'tracesToMetrics', { |
||||
...options.jsonData.tracesToMetrics, |
||||
queries: newQueries, |
||||
}); |
||||
}} |
||||
/> |
||||
</div> |
||||
))} |
||||
|
||||
<Button |
||||
variant="secondary" |
||||
title="Add query" |
||||
icon="plus" |
||||
type="button" |
||||
onClick={() => { |
||||
updateDatasourcePluginJsonDataOption({ onOptionsChange, options }, 'tracesToMetrics', { |
||||
...options.jsonData.tracesToMetrics, |
||||
queries: [...(options.jsonData.tracesToMetrics?.queries ?? []), { query: '' }], |
||||
}); |
||||
}} |
||||
> |
||||
Add query |
||||
</Button> |
||||
</div> |
||||
); |
||||
} |
||||
|
||||
export const TraceToMetricsSection = ({ options, onOptionsChange }: DataSourcePluginOptionsEditorProps) => { |
||||
let suffix = options.type; |
||||
suffix += options.type === 'tempo' ? '/configure-tempo-data-source/#trace-to-metrics' : '/#trace-to-metrics'; |
||||
|
||||
return ( |
||||
<ConfigSection |
||||
title="Trace to metrics" |
||||
description={ |
||||
<ConfigDescriptionLink |
||||
description="Navigate from a trace span to the selected data source's metrics." |
||||
suffix={suffix} |
||||
feature="trace to metrics" |
||||
/> |
||||
} |
||||
isCollapsible={true} |
||||
isInitiallyOpen={true} |
||||
> |
||||
<TraceToMetricsSettings options={options} onOptionsChange={onOptionsChange} /> |
||||
</ConfigSection> |
||||
); |
||||
}; |
||||
|
||||
const getStyles = (theme: GrafanaTheme2) => ({ |
||||
infoText: css` |
||||
padding-bottom: ${theme.spacing(2)}; |
||||
color: ${theme.colors.text.secondary}; |
||||
`,
|
||||
row: css` |
||||
label: row; |
||||
align-items: baseline; |
||||
`,
|
||||
queryRow: css` |
||||
label: queryRow; |
||||
display: flex; |
||||
flex-flow: wrap; |
||||
`,
|
||||
}); |
@ -0,0 +1,53 @@ |
||||
import { render, screen, waitFor } from '@testing-library/react'; |
||||
import React from 'react'; |
||||
|
||||
import { DataSourceInstanceSettings, DataSourceSettings } from '@grafana/data'; |
||||
import { DataSourceSrv, setDataSourceSrv } from '@grafana/runtime'; |
||||
|
||||
import { TraceToProfilesData, TraceToProfilesSettings } from './TraceToProfilesSettings'; |
||||
|
||||
const defaultOption: DataSourceSettings<TraceToProfilesData> = { |
||||
jsonData: { |
||||
tracesToProfiles: { |
||||
datasourceUid: 'profiling1_uid', |
||||
tags: [{ key: 'someTag', value: 'newName' }], |
||||
customQuery: true, |
||||
query: '{${__tags}}', |
||||
}, |
||||
}, |
||||
} as unknown as DataSourceSettings<TraceToProfilesData>; |
||||
|
||||
const pyroSettings = { |
||||
uid: 'profiling1_uid', |
||||
name: 'profiling1', |
||||
type: 'grafana-pyroscope-datasource', |
||||
meta: { info: { logos: { small: '' } } }, |
||||
} as unknown as DataSourceInstanceSettings; |
||||
|
||||
describe('TraceToProfilesSettings', () => { |
||||
beforeAll(() => { |
||||
setDataSourceSrv({ |
||||
getList() { |
||||
return [pyroSettings]; |
||||
}, |
||||
getInstanceSettings() { |
||||
return pyroSettings; |
||||
}, |
||||
} as unknown as DataSourceSrv); |
||||
}); |
||||
|
||||
it('should render without error', () => { |
||||
waitFor(() => { |
||||
expect(() => |
||||
render(<TraceToProfilesSettings options={defaultOption} onOptionsChange={() => {}} />) |
||||
).not.toThrow(); |
||||
}); |
||||
}); |
||||
|
||||
it('should render all options', () => { |
||||
render(<TraceToProfilesSettings options={defaultOption} onOptionsChange={() => {}} />); |
||||
expect(screen.getByText('Tags')).toBeInTheDocument(); |
||||
expect(screen.getByText('Profile type')).toBeInTheDocument(); |
||||
expect(screen.getByText('Use custom query')).toBeInTheDocument(); |
||||
}); |
||||
}); |
@ -0,0 +1,186 @@ |
||||
import { css } from '@emotion/css'; |
||||
import React, { useEffect, useMemo, useState } from 'react'; |
||||
import { useAsync } from 'react-use'; |
||||
|
||||
import { |
||||
DataSourceJsonData, |
||||
DataSourceInstanceSettings, |
||||
DataSourcePluginOptionsEditorProps, |
||||
updateDatasourcePluginJsonDataOption, |
||||
} from '@grafana/data'; |
||||
import { ConfigSection } from '@grafana/experimental'; |
||||
import { DataSourcePicker, getDataSourceSrv } from '@grafana/runtime'; |
||||
import { InlineField, InlineFieldRow, Input, InlineSwitch } from '@grafana/ui'; |
||||
|
||||
import { ConfigDescriptionLink } from '../ConfigDescriptionLink'; |
||||
import { TagMappingInput } from '../TraceToLogs/TagMappingInput'; |
||||
import { ProfileTypesCascader } from '../pyroscope/ProfileTypesCascader'; |
||||
import { PyroscopeDataSource } from '../pyroscope/datasource'; |
||||
import { ProfileTypeMessage } from '../pyroscope/types'; |
||||
|
||||
export interface TraceToProfilesOptions { |
||||
datasourceUid?: string; |
||||
tags?: Array<{ key: string; value?: string }>; |
||||
query?: string; |
||||
profileTypeId?: string; |
||||
customQuery: boolean; |
||||
} |
||||
|
||||
export interface TraceToProfilesData extends DataSourceJsonData { |
||||
tracesToProfiles?: TraceToProfilesOptions; |
||||
} |
||||
|
||||
interface Props extends DataSourcePluginOptionsEditorProps<TraceToProfilesData> {} |
||||
|
||||
export function TraceToProfilesSettings({ options, onOptionsChange }: Props) { |
||||
const supportedDataSourceTypes = useMemo(() => ['grafana-pyroscope-datasource'], []); |
||||
|
||||
const [profileTypes, setProfileTypes] = useState<ProfileTypeMessage[]>([]); |
||||
const profileTypesPlaceholder = useMemo(() => { |
||||
let placeholder = profileTypes.length === 0 ? 'No profile types found' : 'Select profile type'; |
||||
if (!options.jsonData.tracesToProfiles?.datasourceUid) { |
||||
placeholder = 'Please select profiling data source'; |
||||
} |
||||
return placeholder; |
||||
}, [options.jsonData.tracesToProfiles?.datasourceUid, profileTypes]); |
||||
|
||||
const { value: dataSource } = useAsync(async () => { |
||||
return await getDataSourceSrv().get(options.jsonData.tracesToProfiles?.datasourceUid); |
||||
}, [options.jsonData.tracesToProfiles?.datasourceUid]); |
||||
|
||||
useEffect(() => { |
||||
if ( |
||||
dataSource && |
||||
dataSource instanceof PyroscopeDataSource && |
||||
supportedDataSourceTypes.includes(dataSource.type) && |
||||
dataSource.uid === options.jsonData.tracesToProfiles?.datasourceUid |
||||
) { |
||||
dataSource.getProfileTypes().then((profileTypes) => { |
||||
setProfileTypes(profileTypes); |
||||
}); |
||||
} else { |
||||
setProfileTypes([]); |
||||
} |
||||
}, [dataSource, onOptionsChange, options, supportedDataSourceTypes]); |
||||
|
||||
return ( |
||||
<div className={css({ width: '100%' })}> |
||||
<InlineFieldRow> |
||||
<InlineField |
||||
tooltip="The profiles data source the trace is going to navigate to" |
||||
label="Data source" |
||||
labelWidth={26} |
||||
> |
||||
<DataSourcePicker |
||||
inputId="trace-to-profiles-data-source-picker" |
||||
filter={(ds) => supportedDataSourceTypes.includes(ds.type)} |
||||
current={options.jsonData.tracesToProfiles?.datasourceUid} |
||||
noDefault={true} |
||||
width={40} |
||||
onChange={(ds: DataSourceInstanceSettings) => { |
||||
updateDatasourcePluginJsonDataOption({ onOptionsChange, options }, 'tracesToProfiles', { |
||||
...options.jsonData.tracesToProfiles, |
||||
datasourceUid: ds.uid, |
||||
}); |
||||
}} |
||||
/> |
||||
</InlineField> |
||||
</InlineFieldRow> |
||||
|
||||
<InlineFieldRow> |
||||
<InlineField |
||||
tooltip="Tags that will be used in the query. Default tags: 'service.name', 'service.namespace'" |
||||
label="Tags" |
||||
labelWidth={26} |
||||
> |
||||
<TagMappingInput |
||||
values={options.jsonData.tracesToProfiles?.tags ?? []} |
||||
onChange={(v) => { |
||||
updateDatasourcePluginJsonDataOption({ onOptionsChange, options }, 'tracesToProfiles', { |
||||
...options.jsonData.tracesToProfiles, |
||||
tags: v, |
||||
}); |
||||
}} |
||||
/> |
||||
</InlineField> |
||||
</InlineFieldRow> |
||||
|
||||
<InlineFieldRow> |
||||
<InlineField tooltip="Profile type that will be used in the query" label="Profile type" labelWidth={26}> |
||||
<ProfileTypesCascader |
||||
profileTypes={profileTypes} |
||||
placeholder={profileTypesPlaceholder} |
||||
initialProfileTypeId={options.jsonData.tracesToProfiles?.profileTypeId} |
||||
onChange={(val) => { |
||||
updateDatasourcePluginJsonDataOption({ onOptionsChange, options }, 'tracesToProfiles', { |
||||
...options.jsonData.tracesToProfiles, |
||||
profileTypeId: val, |
||||
}); |
||||
}} |
||||
width={40} |
||||
/> |
||||
</InlineField> |
||||
</InlineFieldRow> |
||||
|
||||
<InlineFieldRow> |
||||
<InlineField |
||||
tooltip="Use a custom query with the possibility to interpolate variables from the trace or span" |
||||
label="Use custom query" |
||||
labelWidth={26} |
||||
> |
||||
<InlineSwitch |
||||
id={'profilesCustomQuerySwitch'} |
||||
value={options.jsonData.tracesToProfiles?.customQuery} |
||||
onChange={(event: React.SyntheticEvent<HTMLInputElement>) => |
||||
updateDatasourcePluginJsonDataOption({ onOptionsChange, options }, 'tracesToProfiles', { |
||||
...options.jsonData.tracesToProfiles, |
||||
customQuery: event.currentTarget.checked, |
||||
}) |
||||
} |
||||
/> |
||||
</InlineField> |
||||
</InlineFieldRow> |
||||
|
||||
{options.jsonData.tracesToProfiles?.customQuery && ( |
||||
<InlineField |
||||
label="Query" |
||||
labelWidth={26} |
||||
tooltip="The query that will run when navigating from a trace to profiles data source. Interpolate tags using the `$__tags` keyword" |
||||
grow |
||||
> |
||||
<Input |
||||
label="Query" |
||||
type="text" |
||||
allowFullScreen |
||||
value={options.jsonData.tracesToProfiles?.query || ''} |
||||
onChange={(e) => |
||||
updateDatasourcePluginJsonDataOption({ onOptionsChange, options }, 'tracesToProfiles', { |
||||
...options.jsonData.tracesToProfiles, |
||||
query: e.currentTarget.value, |
||||
}) |
||||
} |
||||
/> |
||||
</InlineField> |
||||
)} |
||||
</div> |
||||
); |
||||
} |
||||
|
||||
export const TraceToProfilesSection = ({ options, onOptionsChange }: DataSourcePluginOptionsEditorProps) => { |
||||
return ( |
||||
<ConfigSection |
||||
title="Trace to profiles" |
||||
description={ |
||||
<ConfigDescriptionLink |
||||
description="Navigate from a trace span to the selected data source's profiles." |
||||
suffix={`${options.type}/#trace-to-profiles`} |
||||
feature="trace to profiles" |
||||
/> |
||||
} |
||||
isCollapsible={true} |
||||
isInitiallyOpen={true} |
||||
> |
||||
<TraceToProfilesSettings options={options} onOptionsChange={onOptionsChange} /> |
||||
</ConfigSection> |
||||
); |
||||
}; |
@ -0,0 +1,8 @@ |
||||
/** |
||||
* A library containing logic to manage traces. |
||||
* |
||||
* @packageDocumentation |
||||
*/ |
||||
type Props = {}; |
||||
|
||||
export { Props }; |
@ -0,0 +1,12 @@ |
||||
/** |
||||
* A library containing logic to manage traces. |
||||
* |
||||
* @packageDocumentation |
||||
*/ |
||||
|
||||
export * from './IntervalInput/IntervalInput'; |
||||
export * from './TraceToLogs/TagMappingInput'; |
||||
export * from './TraceToLogs/TraceToLogsSettings'; |
||||
export * from './TraceToMetrics/TraceToMetricsSettings'; |
||||
export * from './TraceToProfiles/TraceToProfilesSettings'; |
||||
export * from './utils'; |
@ -0,0 +1,85 @@ |
||||
import React, { useEffect, useMemo, useState } from 'react'; |
||||
|
||||
import { Cascader, CascaderOption } from '@grafana/ui'; |
||||
|
||||
import { PyroscopeDataSource } from './datasource'; |
||||
import { ProfileTypeMessage } from './types'; |
||||
|
||||
type Props = { |
||||
initialProfileTypeId?: string; |
||||
profileTypes?: ProfileTypeMessage[]; |
||||
onChange: (value: string) => void; |
||||
placeholder?: string; |
||||
width?: number; |
||||
}; |
||||
|
||||
export function ProfileTypesCascader(props: Props) { |
||||
const cascaderOptions = useCascaderOptions(props.profileTypes); |
||||
|
||||
return ( |
||||
<Cascader |
||||
placeholder={props.placeholder} |
||||
separator={'-'} |
||||
displayAllSelectedLevels={true} |
||||
initialValue={props.initialProfileTypeId} |
||||
allowCustomValue={true} |
||||
onSelect={props.onChange} |
||||
options={cascaderOptions} |
||||
changeOnSelect={false} |
||||
width={props.width ?? 26} |
||||
/> |
||||
); |
||||
} |
||||
|
||||
// Turn profileTypes into cascader options
|
||||
function useCascaderOptions(profileTypes?: ProfileTypeMessage[]): CascaderOption[] { |
||||
return useMemo(() => { |
||||
if (!profileTypes) { |
||||
return []; |
||||
} |
||||
let mainTypes = new Map<string, CascaderOption>(); |
||||
// Classify profile types by name then sample type.
|
||||
// The profileTypes are something like cpu:sample:nanoseconds:sample:count or app.something.something
|
||||
for (let profileType of profileTypes) { |
||||
let parts: string[] = []; |
||||
if (profileType.id.indexOf(':') > -1) { |
||||
parts = profileType.id.split(':'); |
||||
} |
||||
|
||||
const [name, type] = parts; |
||||
|
||||
if (!mainTypes.has(name)) { |
||||
mainTypes.set(name, { |
||||
label: name, |
||||
value: name, |
||||
items: [], |
||||
}); |
||||
} |
||||
mainTypes.get(name)?.items!.push({ |
||||
label: type, |
||||
value: profileType.id, |
||||
}); |
||||
} |
||||
return Array.from(mainTypes.values()); |
||||
}, [profileTypes]); |
||||
} |
||||
|
||||
/** |
||||
* Loads the profile types. |
||||
* |
||||
* This is exported and not used directly in the ProfileTypesCascader component because in some case we need to know |
||||
* the profileTypes before rendering the cascader. |
||||
* @param datasource |
||||
*/ |
||||
export function useProfileTypes(datasource: PyroscopeDataSource) { |
||||
const [profileTypes, setProfileTypes] = useState<ProfileTypeMessage[]>(); |
||||
|
||||
useEffect(() => { |
||||
(async () => { |
||||
const profileTypes = await datasource.getProfileTypes(); |
||||
setProfileTypes(profileTypes); |
||||
})(); |
||||
}, [datasource]); |
||||
|
||||
return profileTypes; |
||||
} |
@ -0,0 +1,44 @@ |
||||
// Code generated - EDITING IS FUTILE. DO NOT EDIT.
|
||||
//
|
||||
// Generated by:
|
||||
// public/app/plugins/gen.go
|
||||
// Using jennies:
|
||||
// TSTypesJenny
|
||||
// PluginTSTypesJenny
|
||||
//
|
||||
// Run 'make gen-cue' from repository root to regenerate.
|
||||
|
||||
import * as common from '@grafana/schema'; |
||||
|
||||
export type PyroscopeQueryType = ('metrics' | 'profile' | 'both'); |
||||
|
||||
export const defaultPyroscopeQueryType: PyroscopeQueryType = 'both'; |
||||
|
||||
export interface GrafanaPyroscope extends common.DataQuery { |
||||
/** |
||||
* Allows to group the results. |
||||
*/ |
||||
groupBy: Array<string>; |
||||
/** |
||||
* Specifies the query label selectors. |
||||
*/ |
||||
labelSelector: string; |
||||
/** |
||||
* Sets the maximum number of nodes in the flamegraph. |
||||
*/ |
||||
maxNodes?: number; |
||||
/** |
||||
* Specifies the type of profile to query. |
||||
*/ |
||||
profileTypeId: string; |
||||
/** |
||||
* Specifies the query span selectors. |
||||
*/ |
||||
spanSelector?: Array<string>; |
||||
} |
||||
|
||||
export const defaultGrafanaPyroscope: Partial<GrafanaPyroscope> = { |
||||
groupBy: [], |
||||
labelSelector: '{}', |
||||
spanSelector: [], |
||||
}; |
@ -0,0 +1,28 @@ |
||||
import { Observable } from 'rxjs'; |
||||
|
||||
import { AbstractQuery, CoreApp, DataQueryRequest, DataQueryResponse, ScopedVars } from '@grafana/data'; |
||||
import { DataSourceWithBackend } from '@grafana/runtime'; |
||||
|
||||
import { PyroscopeDataSourceOptions, Query, ProfileTypeMessage } from './types'; |
||||
|
||||
export abstract class PyroscopeDataSource extends DataSourceWithBackend<Query, PyroscopeDataSourceOptions> { |
||||
abstract query(request: DataQueryRequest<Query>): Observable<DataQueryResponse>; |
||||
|
||||
abstract getProfileTypes(): Promise<ProfileTypeMessage[]>; |
||||
|
||||
abstract getLabelNames(query: string, start: number, end: number): Promise<string[]>; |
||||
|
||||
abstract getLabelValues(query: string, label: string, start: number, end: number): Promise<string[]>; |
||||
|
||||
abstract applyTemplateVariables(query: Query, scopedVars: ScopedVars): Query; |
||||
|
||||
abstract importFromAbstractQueries(abstractQueries: AbstractQuery[]): Promise<Query[]>; |
||||
|
||||
abstract importFromAbstractQuery(labelBasedQuery: AbstractQuery): Query; |
||||
|
||||
abstract exportToAbstractQueries(queries: Query[]): Promise<AbstractQuery[]>; |
||||
|
||||
abstract exportToAbstractQuery(query: Query): AbstractQuery; |
||||
|
||||
abstract getDefaultQuery(app: CoreApp): Partial<Query>; |
||||
} |
@ -0,0 +1,16 @@ |
||||
import { DataSourceJsonData } from '@grafana/data'; |
||||
|
||||
import { GrafanaPyroscope, PyroscopeQueryType } from './dataquery.gen'; |
||||
|
||||
export interface ProfileTypeMessage { |
||||
id: string; |
||||
label: string; |
||||
} |
||||
|
||||
export interface PyroscopeDataSourceOptions extends DataSourceJsonData { |
||||
minStep?: string; |
||||
} |
||||
|
||||
export interface Query extends GrafanaPyroscope { |
||||
queryType: PyroscopeQueryType; |
||||
} |
@ -0,0 +1,118 @@ |
||||
/** |
||||
* Get non overlapping duration of the ranges as they can overlap or have gaps. |
||||
*/ |
||||
import { FieldType, MutableDataFrame, NodeGraphDataFrameFieldNames as Fields } from '@grafana/data'; |
||||
|
||||
export function getNonOverlappingDuration(ranges: Array<[number, number]>): number { |
||||
ranges.sort((a, b) => a[0] - b[0]); |
||||
const mergedRanges = ranges.reduce<Array<[number, number]>>((acc, range) => { |
||||
if (!acc.length) { |
||||
return [range]; |
||||
} |
||||
const tail = acc.slice(-1)[0]; |
||||
const [prevStart, prevEnd] = tail; |
||||
const [start, end] = range; |
||||
if (end < prevEnd) { |
||||
// In this case the range is completely inside the prev range so we can just ignore it.
|
||||
return acc; |
||||
} |
||||
|
||||
if (start > prevEnd) { |
||||
// There is no overlap so we can just add it to stack
|
||||
return [...acc, range]; |
||||
} |
||||
|
||||
// We know there is overlap and current range ends later than previous so we can just extend the range
|
||||
return [...acc.slice(0, -1), [prevStart, end]]; |
||||
}, []); |
||||
|
||||
return mergedRanges.reduce((acc, range) => { |
||||
return acc + (range[1] - range[0]); |
||||
}, 0); |
||||
} |
||||
|
||||
/** |
||||
* Returns a map of the spans with children array for easier processing. It will also contain empty spans in case |
||||
* span is missing but other spans are its children. This is more generic because it needs to allow iterating over |
||||
* both arrays and dataframe views. |
||||
*/ |
||||
export function makeSpanMap<T>(getSpan: (index: number) => { span: T; id: string; parentIds: string[] } | undefined): { |
||||
[id: string]: { span: T; children: string[] }; |
||||
} { |
||||
const spanMap: { [id: string]: { span?: T; children: string[] } } = {}; |
||||
|
||||
let span; |
||||
for (let index = 0; (span = getSpan(index)), !!span; index++) { |
||||
if (!spanMap[span.id]) { |
||||
spanMap[span.id] = { |
||||
span: span.span, |
||||
children: [], |
||||
}; |
||||
} else { |
||||
spanMap[span.id].span = span.span; |
||||
} |
||||
|
||||
for (const parentId of span.parentIds) { |
||||
if (parentId) { |
||||
if (!spanMap[parentId]) { |
||||
spanMap[parentId] = { |
||||
span: undefined, |
||||
children: [span.id], |
||||
}; |
||||
} else { |
||||
spanMap[parentId].children.push(span.id); |
||||
} |
||||
} |
||||
} |
||||
} |
||||
return spanMap as { [id: string]: { span: T; children: string[] } }; |
||||
} |
||||
|
||||
export function getStats(duration: number, traceDuration: number, selfDuration: number) { |
||||
return { |
||||
main: `${toFixedNoTrailingZeros(duration)}ms (${toFixedNoTrailingZeros((duration / traceDuration) * 100)}%)`, |
||||
secondary: `${toFixedNoTrailingZeros(selfDuration)}ms (${toFixedNoTrailingZeros( |
||||
(selfDuration / duration) * 100 |
||||
)}%)`,
|
||||
}; |
||||
} |
||||
|
||||
function toFixedNoTrailingZeros(n: number) { |
||||
return parseFloat(n.toFixed(2)); |
||||
} |
||||
|
||||
/** |
||||
* Create default frames used when returning data for node graph. |
||||
*/ |
||||
export function makeFrames() { |
||||
const nodesFrame = new MutableDataFrame({ |
||||
fields: [ |
||||
{ name: Fields.id, type: FieldType.string }, |
||||
{ name: Fields.title, type: FieldType.string }, |
||||
{ name: Fields.subTitle, type: FieldType.string }, |
||||
{ name: Fields.mainStat, type: FieldType.string, config: { displayName: 'Total time (% of trace)' } }, |
||||
{ name: Fields.secondaryStat, type: FieldType.string, config: { displayName: 'Self time (% of total)' } }, |
||||
{ |
||||
name: Fields.color, |
||||
type: FieldType.number, |
||||
config: { color: { mode: 'continuous-GrYlRd' }, displayName: 'Self time / Trace duration' }, |
||||
}, |
||||
], |
||||
meta: { |
||||
preferredVisualisationType: 'nodeGraph', |
||||
}, |
||||
}); |
||||
|
||||
const edgesFrame = new MutableDataFrame({ |
||||
fields: [ |
||||
{ name: Fields.id, type: FieldType.string }, |
||||
{ name: Fields.target, type: FieldType.string }, |
||||
{ name: Fields.source, type: FieldType.string }, |
||||
], |
||||
meta: { |
||||
preferredVisualisationType: 'nodeGraph', |
||||
}, |
||||
}); |
||||
|
||||
return [nodesFrame, edgesFrame]; |
||||
} |
@ -0,0 +1,27 @@ |
||||
import { Store } from 'redux'; |
||||
|
||||
export let store: Store<StoreState>; |
||||
export const initialKeyedVariablesState: any = { keys: {} }; |
||||
|
||||
type StoreState = ReturnType<ReturnType<any>>; |
||||
|
||||
export function setStore(newStore: Store<StoreState>) { |
||||
store = newStore; |
||||
} |
||||
|
||||
export function getState(): StoreState { |
||||
if (!store || !store.getState) { |
||||
return { templating: { ...initialKeyedVariablesState, lastKey: 'key' } } as StoreState; // used by tests
|
||||
} |
||||
|
||||
return store.getState(); |
||||
} |
||||
|
||||
// This was `any` before
|
||||
export function dispatch(action: any) { |
||||
if (!store || !store.getState) { |
||||
return; |
||||
} |
||||
|
||||
return store.dispatch(action); |
||||
} |
@ -0,0 +1,15 @@ |
||||
import { FetchResponse } from '@grafana/runtime'; |
||||
|
||||
export function createFetchResponse<T>(data: T): FetchResponse<T> { |
||||
return { |
||||
data, |
||||
status: 200, |
||||
url: 'http://localhost:3000/api/ds/query', |
||||
config: { url: 'http://localhost:3000/api/ds/query' }, |
||||
type: 'basic', |
||||
statusText: 'Ok', |
||||
redirected: false, |
||||
headers: {} as unknown as Headers, |
||||
ok: true, |
||||
}; |
||||
} |
@ -0,0 +1,8 @@ |
||||
import { waitFor } from '@testing-library/react'; |
||||
import { select } from 'react-select-event'; |
||||
|
||||
// Used to select an option or options from a Select in unit tests
|
||||
export const selectOptionInTest = async ( |
||||
input: HTMLElement, |
||||
optionOrOptions: string | RegExp | Array<string | RegExp> |
||||
) => await waitFor(() => select(input, optionOrOptions, { container: document.body })); |
@ -0,0 +1,8 @@ |
||||
// import { AdHocVariableFilter } from '../../../features/variables/types';
|
||||
export interface AdHocVariableFilter { |
||||
key: string; |
||||
operator: string; |
||||
value: string; |
||||
/** @deprecated */ |
||||
condition?: string; |
||||
} |
@ -0,0 +1,155 @@ |
||||
import { omitBy } from 'lodash'; |
||||
|
||||
import { deprecationWarning, urlUtil } from '@grafana/data'; |
||||
import { BackendSrvRequest } from '@grafana/runtime'; |
||||
|
||||
export const parseInitFromOptions = (options: BackendSrvRequest): RequestInit => { |
||||
const method = options.method; |
||||
const headers = parseHeaders(options); |
||||
const isAppJson = isContentTypeApplicationJson(headers); |
||||
const body = parseBody(options, isAppJson); |
||||
const credentials = parseCredentials(options); |
||||
|
||||
return { |
||||
method, |
||||
headers, |
||||
body, |
||||
credentials, |
||||
}; |
||||
}; |
||||
|
||||
interface HeaderParser { |
||||
canParse: (options: BackendSrvRequest) => boolean; |
||||
parse: (headers: Headers) => Headers; |
||||
} |
||||
|
||||
const defaultHeaderParser: HeaderParser = { |
||||
canParse: () => true, |
||||
parse: (headers) => { |
||||
const accept = headers.get('accept'); |
||||
if (accept) { |
||||
return headers; |
||||
} |
||||
|
||||
headers.set('accept', 'application/json, text/plain, */*'); |
||||
return headers; |
||||
}, |
||||
}; |
||||
|
||||
const parseHeaderByMethodFactory = (methodPredicate: string): HeaderParser => ({ |
||||
canParse: (options) => { |
||||
const method = options?.method ? options?.method.toLowerCase() : ''; |
||||
return method === methodPredicate; |
||||
}, |
||||
parse: (headers) => { |
||||
const contentType = headers.get('content-type'); |
||||
if (contentType) { |
||||
return headers; |
||||
} |
||||
|
||||
headers.set('content-type', 'application/json'); |
||||
return headers; |
||||
}, |
||||
}); |
||||
|
||||
const postHeaderParser: HeaderParser = parseHeaderByMethodFactory('post'); |
||||
const putHeaderParser: HeaderParser = parseHeaderByMethodFactory('put'); |
||||
const patchHeaderParser: HeaderParser = parseHeaderByMethodFactory('patch'); |
||||
|
||||
const headerParsers = [postHeaderParser, putHeaderParser, patchHeaderParser, defaultHeaderParser]; |
||||
|
||||
export const parseHeaders = (options: BackendSrvRequest) => { |
||||
const headers = options?.headers ? new Headers(options.headers) : new Headers(); |
||||
const parsers = headerParsers.filter((parser) => parser.canParse(options)); |
||||
const combinedHeaders = parsers.reduce((prev, parser) => { |
||||
return parser.parse(prev); |
||||
}, headers); |
||||
|
||||
return combinedHeaders; |
||||
}; |
||||
|
||||
export const isContentTypeApplicationJson = (headers: Headers) => { |
||||
if (!headers) { |
||||
return false; |
||||
} |
||||
|
||||
const contentType = headers.get('content-type'); |
||||
if (contentType && contentType.toLowerCase() === 'application/json') { |
||||
return true; |
||||
} |
||||
|
||||
return false; |
||||
}; |
||||
|
||||
export const parseBody = (options: BackendSrvRequest, isAppJson: boolean) => { |
||||
if (!options) { |
||||
return options; |
||||
} |
||||
|
||||
if (!options.data || typeof options.data === 'string') { |
||||
return options.data; |
||||
} |
||||
if (options.data instanceof Blob) { |
||||
return options.data; |
||||
} |
||||
|
||||
return isAppJson ? JSON.stringify(options.data) : new URLSearchParams(options.data); |
||||
}; |
||||
|
||||
export async function parseResponseBody<T>( |
||||
response: Response, |
||||
responseType?: 'json' | 'text' | 'arraybuffer' | 'blob' |
||||
): Promise<T> { |
||||
if (responseType) { |
||||
switch (responseType) { |
||||
case 'arraybuffer': |
||||
// This was `any` before; same for the other `any`s
|
||||
return response.arrayBuffer() as any; |
||||
|
||||
case 'blob': |
||||
return response.blob() as any; |
||||
|
||||
case 'json': |
||||
// An empty string is not a valid JSON.
|
||||
// Sometimes (unfortunately) our APIs declare their Content-Type as JSON, however they return an empty body.
|
||||
if (response.headers.get('Content-Length') === '0') { |
||||
console.warn(`${response.url} returned an invalid JSON`); |
||||
return {} as unknown as T; |
||||
} |
||||
|
||||
return await response.json(); |
||||
|
||||
case 'text': |
||||
return response.text() as any; |
||||
} |
||||
} |
||||
|
||||
const textData = await response.text(); // this could be just a string, prometheus requests for instance
|
||||
try { |
||||
return JSON.parse(textData); // majority of the requests this will be something that can be parsed
|
||||
} catch {} |
||||
return textData as any; |
||||
} |
||||
|
||||
export const parseUrlFromOptions = (options: BackendSrvRequest): string => { |
||||
const cleanParams = omitBy(options.params, (v) => v === undefined || (v && v.length === 0)); |
||||
const serializedParams = urlUtil.serializeParams(cleanParams); |
||||
return options.params && serializedParams.length ? `${options.url}?${serializedParams}` : options.url; |
||||
}; |
||||
|
||||
export const parseCredentials = (options: BackendSrvRequest): RequestCredentials => { |
||||
if (!options) { |
||||
return options; |
||||
} |
||||
|
||||
if (options.credentials) { |
||||
return options.credentials; |
||||
} |
||||
|
||||
if (options.withCredentials) { |
||||
deprecationWarning('BackendSrvRequest', 'withCredentials', 'credentials'); |
||||
return 'include'; |
||||
} |
||||
|
||||
return 'same-origin'; |
||||
}; |
@ -0,0 +1,76 @@ |
||||
{ |
||||
"name": "@grafana-plugins/tempo", |
||||
"description": "Grafana plugin for the Tempo data source.", |
||||
"private": true, |
||||
"version": "10.4.0-pre", |
||||
"dependencies": { |
||||
"@emotion/css": "11.11.2", |
||||
"@grafana/data": "workspace:*", |
||||
"@grafana/e2e-selectors": "workspace:*", |
||||
"@grafana/experimental": "1.7.4", |
||||
"@grafana/lezer-logql": "0.2.1", |
||||
"@grafana/lezer-traceql": "0.0.12", |
||||
"@grafana/monaco-logql": "^0.0.7", |
||||
"@grafana/runtime": "workspace:*", |
||||
"@grafana/schema": "workspace:*", |
||||
"@grafana/ui": "workspace:*", |
||||
"@lezer/common": "1.2.0", |
||||
"@lezer/lr": "1.3.3", |
||||
"@opentelemetry/api": "1.6.0", |
||||
"@opentelemetry/exporter-collector": "0.25.0", |
||||
"@opentelemetry/semantic-conventions": "1.17.1", |
||||
"@reduxjs/toolkit": "1.9.5", |
||||
"buffer": "6.0.3", |
||||
"events": "3.3.0", |
||||
"i18next": "^22.0.0", |
||||
"lodash": "4.17.21", |
||||
"lru-cache": "10.0.0", |
||||
"monaco-editor": "0.34.0", |
||||
"prismjs": "1.29.0", |
||||
"react": "18.2.0", |
||||
"react-dom": "18.2.0", |
||||
"react-router": "6.2.1", |
||||
"react-use": "17.4.0", |
||||
"redux": "4.2.1", |
||||
"rxjs": "7.8.1", |
||||
"semver": "7.5.4", |
||||
"stream-browserify": "3.0.0", |
||||
"string_decoder": "1.2.0", |
||||
"tslib": "2.6.0", |
||||
"uuid": "9.0.0" |
||||
}, |
||||
"devDependencies": { |
||||
"@babel/core": "7.23.2", |
||||
"@grafana/plugin-configs": "10.4.0-pre", |
||||
"@grafana/tsconfig": "^1.3.0-rc1", |
||||
"@swc/core": "1.3.38", |
||||
"@testing-library/jest-dom": "6.1.4", |
||||
"@testing-library/react": "14.0.0", |
||||
"@testing-library/user-event": "14.5.2", |
||||
"@types/jest": "29.5.4", |
||||
"@types/lodash": "4.14.195", |
||||
"@types/node": "20.8.10", |
||||
"@types/prismjs": "1.26.0", |
||||
"@types/react": "18.2.15", |
||||
"@types/react-dom": "18.2.7", |
||||
"@types/semver": "7.5.0", |
||||
"@types/uuid": "9.0.2", |
||||
"copy-webpack-plugin": "11.0.0", |
||||
"eslint-webpack-plugin": "4.0.1", |
||||
"glob": "10.3.3", |
||||
"react-select-event": "5.5.1", |
||||
"replace-in-file-webpack-plugin": "1.0.6", |
||||
"ts-node": "10.9.1", |
||||
"typescript": "5.2.2", |
||||
"webpack": "5.89.0" |
||||
}, |
||||
"peerDependencies": { |
||||
"@grafana/runtime": "*" |
||||
}, |
||||
"scripts": { |
||||
"build": "webpack -c ./webpack.config.ts --env production", |
||||
"build:commit": "webpack -c ./webpack.config.ts --env production --env commit=$(git rev-parse --short HEAD)", |
||||
"dev": "webpack -w -c ./webpack.config.ts --env development" |
||||
}, |
||||
"packageManager": "yarn@3.6.0" |
||||
} |
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in new issue