Merge branch 'master' into postgres-query-builder

pull/11081/head
Sven Klemm 7 years ago
commit d8606ddfb0
  1. 3
      .dockerignore
  2. 1
      CHANGELOG.md
  3. 82
      Dockerfile
  4. 3
      Makefile
  5. 2
      NOTICE.md
  6. 32
      README.md
  7. 1
      docs/sources/installation/ldap.md
  8. 3
      packaging/docker/Dockerfile
  9. 111
      public/app/containers/Explore/Explore.tsx
  10. 42
      public/app/containers/Explore/PromQueryField.jest.tsx
  11. 66
      public/app/containers/Explore/PromQueryField.tsx
  12. 25
      public/app/containers/Explore/QueryRows.tsx
  13. 18
      public/app/containers/Teams/TeamMembers.tsx
  14. 4
      public/app/features/plugins/plugin_edit_ctrl.ts
  15. 176
      public/app/plugins/datasource/prometheus/datasource.ts
  16. 22
      public/app/plugins/datasource/prometheus/result_transformer.ts
  17. 82
      public/app/plugins/datasource/prometheus/specs/datasource.jest.ts
  18. 12
      public/app/plugins/datasource/prometheus/specs/result_transformer.jest.ts
  19. 4
      public/app/plugins/panel/graph/legend.ts
  20. 8
      public/sass/pages/_explore.scss
  21. 6
      public/vendor/css/rc-cascader.scss
  22. 67
      scripts/docker/run.sh

@ -3,9 +3,12 @@
.git
.gitignore
.github
.vscode
bin
data*
dist
docker
Dockerfile
docs
dump.rdb
node_modules

@ -39,6 +39,7 @@
* **Graph**: Option to hide series from tooltip [#3341](https://github.com/grafana/grafana/issues/3341), thx [@mtanda](https://github.com/mtanda)
* **UI**: Fix iOS home screen "app" icon and Windows 10 app experience [#12752](https://github.com/grafana/grafana/issues/12752), thx [@andig](https://github.com/andig)
* **Datasource**: Fix UI issue with secret fields after updating datasource [#11270](https://github.com/grafana/grafana/issues/11270)
* **Plugins**: Convert URL-like text to links in plugins readme [#12843](https://github.com/grafana/grafana/pull/12843), thx [pgiraud](https://github.com/pgiraud)
### Breaking changes

@ -0,0 +1,82 @@
# Golang build container
FROM golang:1.10
WORKDIR $GOPATH/src/github.com/grafana/grafana
COPY Gopkg.toml Gopkg.lock ./
COPY vendor vendor
ARG DEP_ENSURE=""
RUN if [ ! -z "${DEP_ENSURE}" ]; then \
go get -u github.com/golang/dep/cmd/dep && \
dep ensure --vendor-only; \
fi
COPY pkg pkg
COPY build.go build.go
COPY package.json package.json
RUN go run build.go build
# Node build container
FROM node:8
WORKDIR /usr/src/app/
COPY package.json yarn.lock ./
RUN yarn install --pure-lockfile --no-progress
COPY Gruntfile.js tsconfig.json tslint.json ./
COPY public public
COPY scripts scripts
COPY emails emails
ENV NODE_ENV production
RUN ./node_modules/.bin/grunt build
# Final container
FROM debian:stretch-slim
ARG GF_UID="472"
ARG GF_GID="472"
ENV PATH=/usr/share/grafana/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin \
GF_PATHS_CONFIG="/etc/grafana/grafana.ini" \
GF_PATHS_DATA="/var/lib/grafana" \
GF_PATHS_HOME="/usr/share/grafana" \
GF_PATHS_LOGS="/var/log/grafana" \
GF_PATHS_PLUGINS="/var/lib/grafana/plugins" \
GF_PATHS_PROVISIONING="/etc/grafana/provisioning"
WORKDIR $GF_PATHS_HOME
RUN apt-get update && apt-get install -qq -y libfontconfig ca-certificates && \
apt-get autoremove -y && \
rm -rf /var/lib/apt/lists/*
COPY conf ./conf
RUN mkdir -p "$GF_PATHS_HOME/.aws" && \
groupadd -r -g $GF_GID grafana && \
useradd -r -u $GF_UID -g grafana grafana && \
mkdir -p "$GF_PATHS_PROVISIONING/datasources" \
"$GF_PATHS_PROVISIONING/dashboards" \
"$GF_PATHS_LOGS" \
"$GF_PATHS_PLUGINS" \
"$GF_PATHS_DATA" && \
cp "$GF_PATHS_HOME/conf/sample.ini" "$GF_PATHS_CONFIG" && \
cp "$GF_PATHS_HOME/conf/ldap.toml" /etc/grafana/ldap.toml && \
chown -R grafana:grafana "$GF_PATHS_DATA" "$GF_PATHS_HOME/.aws" "$GF_PATHS_LOGS" "$GF_PATHS_PLUGINS" && \
chmod 777 "$GF_PATHS_DATA" "$GF_PATHS_HOME/.aws" "$GF_PATHS_LOGS" "$GF_PATHS_PLUGINS"
COPY --from=0 /go/src/github.com/grafana/grafana/bin/linux-amd64/grafana-server /go/src/github.com/grafana/grafana/bin/linux-amd64/grafana-cli ./bin/
COPY --from=1 /usr/src/app/public ./public
COPY --from=1 /usr/src/app/tools ./tools
COPY tools/phantomjs/render.js ./tools/phantomjs/render.js
EXPOSE 3000
COPY ./packaging/docker/run.sh /run.sh
USER grafana
ENTRYPOINT [ "/run.sh" ]

@ -30,6 +30,9 @@ build-docker-dev:
cp dist/grafana-latest.linux-x64.tar.gz packaging/docker
cd packaging/docker && docker build --tag grafana/grafana:dev .
build-docker-full:
docker build --tag grafana/grafana:dev .
test-go:
go test -v ./pkg/...

@ -1,5 +1,5 @@
Copyright 2014-2017 Grafana Labs
Copyright 2014-2018 Grafana Labs
This software is based on Kibana:
Copyright 2012-2013 Elasticsearch BV

@ -43,7 +43,7 @@ To build the assets, rebuild on file change, and serve them by Grafana's webserv
```bash
npm install -g yarn
yarn install --pure-lockfile
npm run watch
yarn run watch
```
Build the assets, rebuild on file change with Hot Module Replacement (HMR), and serve them by webpack-dev-server (http://localhost:3333):
@ -54,14 +54,14 @@ env GRAFANA_THEME=light yarn start
```
Note: HMR for Angular is not supported. If you edit files in the Angular part of the app, the whole page will reload.
Run tests
Run tests
```bash
npm run jest
yarn run jest
```
Run karma tests
```bash
npm run karma
yarn run karma
```
### Recompile backend on source change
@ -98,30 +98,42 @@ In your custom.ini uncomment (remove the leading `;`) sign. And set `app_mode =
#### Frontend
Execute all frontend tests
```bash
npm run test
yarn run test
```
Writing & watching frontend tests (we have two test runners)
- jest for all new tests that do not require browser context (React+more)
- Start watcher: `npm run jest`
- Start watcher: `yarn run jest`
- Jest will run all test files that end with the name ".jest.ts"
- karma + mocha is used for testing angularjs components. We do want to migrate these test to jest over time (if possible).
- Start watcher: `npm run karma`
- Start watcher: `yarn run karma`
- Karma+Mocha runs all files that end with the name "_specs.ts".
#### Backend
```bash
# Run Golang tests using sqlite3 as database (default)
go test ./pkg/...
go test ./pkg/...
# Run Golang tests using mysql as database - convenient to use /docker/blocks/mysql_tests
GRAFANA_TEST_DB=mysql go test ./pkg/...
GRAFANA_TEST_DB=mysql go test ./pkg/...
# Run Golang tests using postgres as database - convenient to use /docker/blocks/postgres_tests
GRAFANA_TEST_DB=postgres go test ./pkg/...
GRAFANA_TEST_DB=postgres go test ./pkg/...
```
## Building custom docker image
You can build a custom image using Docker, which doesn't require installing any dependencies besides docker itself.
```bash
git clone https://github.com/grafana/grafana
cd grafana
docker build -t grafana:dev .
docker run -d --name=grafana -p 3000:3000 grafana:dev
```
Open grafana in your browser (default: `http://localhost:3000`) and login with admin user (default: `user/pass = admin/admin`).
## Contribute
If you have any idea for an improvement or found a bug, do not hesitate to open an issue.

@ -48,6 +48,7 @@ bind_dn = "cn=admin,dc=grafana,dc=org"
bind_password = 'grafana'
# User search filter, for example "(cn=%s)" or "(sAMAccountName=%s)" or "(uid=%s)"
# Allow login from email or username, example "(|(sAMAccountName=%s)(userPrincipalName=%s))"
search_filter = "(cn=%s)"
# An array of base dns to search through

@ -23,6 +23,8 @@ ENV PATH=/usr/share/grafana/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bi
GF_PATHS_PLUGINS="/var/lib/grafana/plugins" \
GF_PATHS_PROVISIONING="/etc/grafana/provisioning"
WORKDIR $GF_PATHS_HOME
RUN apt-get update && apt-get install -qq -y libfontconfig ca-certificates && \
apt-get autoremove -y && \
rm -rf /var/lib/apt/lists/*
@ -47,5 +49,4 @@ EXPOSE 3000
COPY ./run.sh /run.sh
USER grafana
WORKDIR /
ENTRYPOINT [ "/run.sh" ]

@ -19,6 +19,16 @@ import { ensureQueries, generateQueryKey, hasQuery } from './utils/query';
const MAX_HISTORY_ITEMS = 100;
function makeHints(hints) {
const hintsByIndex = [];
hints.forEach(hint => {
if (hint) {
hintsByIndex[hint.index] = hint;
}
});
return hintsByIndex;
}
function makeTimeSeriesList(dataList, options) {
return dataList.map((seriesData, index) => {
const datapoints = seriesData.datapoints || [];
@ -37,7 +47,7 @@ function makeTimeSeriesList(dataList, options) {
});
}
function parseInitialState(initial: string | undefined) {
function parseUrlState(initial: string | undefined) {
if (initial) {
try {
const parsed = JSON.parse(decodePathComponent(initial));
@ -64,8 +74,9 @@ interface IExploreState {
latency: number;
loading: any;
logsResult: any;
queries: any;
queryError: any;
queries: any[];
queryErrors: any[];
queryHints: any[];
range: any;
requestOptions: any;
showingGraph: boolean;
@ -82,7 +93,8 @@ export class Explore extends React.Component<any, IExploreState> {
constructor(props) {
super(props);
const { datasource, queries, range } = parseInitialState(props.routeParams.state);
const initialState: IExploreState = props.initialState;
const { datasource, queries, range } = parseUrlState(props.routeParams.state);
this.state = {
datasource: null,
datasourceError: null,
@ -95,7 +107,8 @@ export class Explore extends React.Component<any, IExploreState> {
loading: false,
logsResult: null,
queries: ensureQueries(queries),
queryError: null,
queryErrors: [],
queryHints: [],
range: range || { ...DEFAULT_RANGE },
requestOptions: null,
showingGraph: true,
@ -105,7 +118,7 @@ export class Explore extends React.Component<any, IExploreState> {
supportsLogs: null,
supportsTable: null,
tableResult: null,
...props.initialState,
...initialState,
};
}
@ -156,6 +169,10 @@ export class Explore extends React.Component<any, IExploreState> {
const historyKey = `grafana.explore.history.${datasourceId}`;
const history = store.getObject(historyKey, []);
if (datasource.init) {
datasource.init();
}
this.setState(
{
datasource,
@ -191,6 +208,8 @@ export class Explore extends React.Component<any, IExploreState> {
datasourceLoading: true,
graphResult: null,
logsResult: null,
queryErrors: [],
queryHints: [],
tableResult: null,
});
const datasource = await this.props.datasourceSrv.get(option.value);
@ -199,6 +218,7 @@ export class Explore extends React.Component<any, IExploreState> {
onChangeQuery = (value: string, index: number, override?: boolean) => {
const { queries } = this.state;
let { queryErrors, queryHints } = this.state;
const prevQuery = queries[index];
const edited = override ? false : prevQuery.query !== value;
const nextQuery = {
@ -208,7 +228,18 @@ export class Explore extends React.Component<any, IExploreState> {
};
const nextQueries = [...queries];
nextQueries[index] = nextQuery;
this.setState({ queries: nextQueries }, override ? () => this.onSubmit() : undefined);
if (override) {
queryErrors = [];
queryHints = [];
}
this.setState(
{
queryErrors,
queryHints,
queries: nextQueries,
},
override ? () => this.onSubmit() : undefined
);
};
onChangeTime = nextRange => {
@ -255,13 +286,32 @@ export class Explore extends React.Component<any, IExploreState> {
};
onClickTableCell = (columnKey: string, rowValue: string) => {
this.onModifyQueries({ type: 'ADD_FILTER', key: columnKey, value: rowValue });
};
onModifyQueries = (action: object, index?: number) => {
const { datasource, queries } = this.state;
if (datasource && datasource.modifyQuery) {
const nextQueries = queries.map(q => ({
...q,
edited: false,
query: datasource.modifyQuery(q.query, { addFilter: { key: columnKey, value: rowValue } }),
}));
let nextQueries;
if (index === undefined) {
// Modify all queries
nextQueries = queries.map(q => ({
...q,
edited: false,
query: datasource.modifyQuery(q.query, action),
}));
} else {
// Modify query only at index
nextQueries = [
...queries.slice(0, index),
{
...queries[index],
edited: false,
query: datasource.modifyQuery(queries[index].query, action),
},
...queries.slice(index + 1),
];
}
this.setState({ queries: nextQueries }, () => this.onSubmit());
}
};
@ -309,7 +359,7 @@ export class Explore extends React.Component<any, IExploreState> {
this.setState({ history });
}
buildQueryOptions(targetOptions: { format: string; instant?: boolean }) {
buildQueryOptions(targetOptions: { format: string; hinting?: boolean; instant?: boolean }) {
const { datasource, queries, range } = this.state;
const resolution = this.el.offsetWidth;
const absoluteRange = {
@ -333,19 +383,20 @@ export class Explore extends React.Component<any, IExploreState> {
if (!hasQuery(queries)) {
return;
}
this.setState({ latency: 0, loading: true, graphResult: null, queryError: null });
this.setState({ latency: 0, loading: true, graphResult: null, queryErrors: [], queryHints: [] });
const now = Date.now();
const options = this.buildQueryOptions({ format: 'time_series', instant: false });
const options = this.buildQueryOptions({ format: 'time_series', instant: false, hinting: true });
try {
const res = await datasource.query(options);
const result = makeTimeSeriesList(res.data, options);
const queryHints = res.hints ? makeHints(res.hints) : [];
const latency = Date.now() - now;
this.setState({ latency, loading: false, graphResult: result, requestOptions: options });
this.setState({ latency, loading: false, graphResult: result, queryHints, requestOptions: options });
this.onQuerySuccess(datasource.meta.id, queries);
} catch (response) {
console.error(response);
const queryError = response.data ? response.data.error : response;
this.setState({ loading: false, queryError });
this.setState({ loading: false, queryErrors: [queryError] });
}
}
@ -354,7 +405,7 @@ export class Explore extends React.Component<any, IExploreState> {
if (!hasQuery(queries)) {
return;
}
this.setState({ latency: 0, loading: true, queryError: null, tableResult: null });
this.setState({ latency: 0, loading: true, queryErrors: [], queryHints: [], tableResult: null });
const now = Date.now();
const options = this.buildQueryOptions({
format: 'table',
@ -369,7 +420,7 @@ export class Explore extends React.Component<any, IExploreState> {
} catch (response) {
console.error(response);
const queryError = response.data ? response.data.error : response;
this.setState({ loading: false, queryError });
this.setState({ loading: false, queryErrors: [queryError] });
}
}
@ -378,7 +429,7 @@ export class Explore extends React.Component<any, IExploreState> {
if (!hasQuery(queries)) {
return;
}
this.setState({ latency: 0, loading: true, queryError: null, logsResult: null });
this.setState({ latency: 0, loading: true, queryErrors: [], queryHints: [], logsResult: null });
const now = Date.now();
const options = this.buildQueryOptions({
format: 'logs',
@ -393,7 +444,7 @@ export class Explore extends React.Component<any, IExploreState> {
} catch (response) {
console.error(response);
const queryError = response.data ? response.data.error : response;
this.setState({ loading: false, queryError });
this.setState({ loading: false, queryErrors: [queryError] });
}
}
@ -415,7 +466,8 @@ export class Explore extends React.Component<any, IExploreState> {
loading,
logsResult,
queries,
queryError,
queryErrors,
queryHints,
range,
requestOptions,
showingGraph,
@ -449,12 +501,12 @@ export class Explore extends React.Component<any, IExploreState> {
</a>
</div>
) : (
<div className="navbar-buttons explore-first-button">
<button className="btn navbar-button" onClick={this.onClickCloseSplit}>
Close Split
<div className="navbar-buttons explore-first-button">
<button className="btn navbar-button" onClick={this.onClickCloseSplit}>
Close Split
</button>
</div>
)}
</div>
)}
{!datasourceMissing ? (
<div className="navbar-buttons">
<Select
@ -504,14 +556,15 @@ export class Explore extends React.Component<any, IExploreState> {
<QueryRows
history={history}
queries={queries}
queryErrors={queryErrors}
queryHints={queryHints}
request={this.request}
onAddQueryRow={this.onAddQueryRow}
onChangeQuery={this.onChangeQuery}
onClickHintFix={this.onModifyQueries}
onExecuteQuery={this.onSubmit}
onRemoveQueryRow={this.onRemoveQueryRow}
/>
{queryError && !loading ? <div className="text-warning m-a-2">{queryError}</div> : null}
<div className="result-options">
{supportsGraph ? (
<button className={`btn navbar-button ${graphButtonActive}`} onClick={this.onClickGraphButton}>

@ -3,7 +3,7 @@ import Enzyme, { shallow } from 'enzyme';
import Adapter from 'enzyme-adapter-react-16';
import Plain from 'slate-plain-serializer';
import PromQueryField from './PromQueryField';
import PromQueryField, { groupMetricsByPrefix, RECORDING_RULES_GROUP } from './PromQueryField';
Enzyme.configure({ adapter: new Adapter() });
@ -177,3 +177,43 @@ describe('PromQueryField typeahead handling', () => {
});
});
});
describe('groupMetricsByPrefix()', () => {
it('returns an empty group for no metrics', () => {
expect(groupMetricsByPrefix([])).toEqual([]);
});
it('returns options grouped by prefix', () => {
expect(groupMetricsByPrefix(['foo_metric'])).toMatchObject([
{
value: 'foo',
children: [
{
value: 'foo_metric',
},
],
},
]);
});
it('returns options without prefix as toplevel option', () => {
expect(groupMetricsByPrefix(['metric'])).toMatchObject([
{
value: 'metric',
},
]);
});
it('returns recording rules grouped separately', () => {
expect(groupMetricsByPrefix([':foo_metric:'])).toMatchObject([
{
value: RECORDING_RULES_GROUP,
children: [
{
value: ':foo_metric:',
},
],
},
]);
});
});

@ -28,6 +28,7 @@ const HISTORY_ITEM_COUNT = 5;
const HISTORY_COUNT_CUTOFF = 1000 * 60 * 60 * 24; // 24h
const METRIC_MARK = 'metric';
const PRISM_LANGUAGE = 'promql';
export const RECORDING_RULES_GROUP = '__recording_rules__';
export const wrapLabel = (label: string) => ({ label });
export const setFunctionMove = (suggestion: Suggestion): Suggestion => {
@ -52,7 +53,22 @@ export function addHistoryMetadata(item: Suggestion, history: any[]): Suggestion
}
export function groupMetricsByPrefix(metrics: string[], delimiter = '_'): CascaderOption[] {
return _.chain(metrics)
// Filter out recording rules and insert as first option
const ruleRegex = /:\w+:/;
const ruleNames = metrics.filter(metric => ruleRegex.test(metric));
const rulesOption = {
label: 'Recording rules',
value: RECORDING_RULES_GROUP,
children: ruleNames
.slice()
.sort()
.map(name => ({ label: name, value: name })),
};
const options = ruleNames.length > 0 ? [rulesOption] : [];
const metricsOptions = _.chain(metrics)
.filter(metric => !ruleRegex.test(metric))
.groupBy(metric => metric.split(delimiter)[0])
.map((metricsForPrefix: string[], prefix: string): CascaderOption => {
const prefixIsMetric = metricsForPrefix.length === 1 && metricsForPrefix[0] === prefix;
@ -65,6 +81,8 @@ export function groupMetricsByPrefix(metrics: string[], delimiter = '_'): Cascad
})
.sortBy('label')
.value();
return [...options, ...metricsOptions];
}
export function willApplySuggestion(
@ -105,13 +123,16 @@ interface CascaderOption {
}
interface PromQueryFieldProps {
history?: any[];
error?: string;
hint?: any;
histogramMetrics?: string[];
history?: any[];
initialQuery?: string | null;
labelKeys?: { [index: string]: string[] }; // metric -> [labelKey,...]
labelValues?: { [index: string]: { [index: string]: string[] } }; // metric -> labelKey -> [labelValue,...]
metrics?: string[];
metricsByPrefix?: CascaderOption[];
onClickHintFix?: (action: any) => void;
onPressEnter?: () => void;
onQueryChange?: (value: string, override?: boolean) => void;
portalPrefix?: string;
@ -189,6 +210,13 @@ class PromQueryField extends React.Component<PromQueryFieldProps, PromQueryField
}
};
onClickHintFix = () => {
const { hint, onClickHintFix } = this.props;
if (onClickHintFix && hint && hint.fix) {
onClickHintFix(hint.fix.action);
}
};
onReceiveMetrics = () => {
if (!this.state.metrics) {
return;
@ -435,6 +463,7 @@ class PromQueryField extends React.Component<PromQueryFieldProps, PromQueryField
}
render() {
const { error, hint } = this.props;
const { histogramMetrics, metricsByPrefix } = this.state;
const histogramOptions = histogramMetrics.map(hm => ({ label: hm, value: hm }));
const metricsOptions = [
@ -449,16 +478,29 @@ class PromQueryField extends React.Component<PromQueryFieldProps, PromQueryField
<button className="btn navbar-button navbar-button--tight">Metrics</button>
</Cascader>
</div>
<div className="slate-query-field-wrapper">
<TypeaheadField
additionalPlugins={this.plugins}
cleanText={cleanText}
initialValue={this.props.initialQuery}
onTypeahead={this.onTypeahead}
onWillApplySuggestion={willApplySuggestion}
onValueChanged={this.onChangeQuery}
placeholder="Enter a PromQL query"
/>
<div className="prom-query-field-wrapper">
<div className="slate-query-field-wrapper">
<TypeaheadField
additionalPlugins={this.plugins}
cleanText={cleanText}
initialValue={this.props.initialQuery}
onTypeahead={this.onTypeahead}
onWillApplySuggestion={willApplySuggestion}
onValueChanged={this.onChangeQuery}
placeholder="Enter a PromQL query"
/>
</div>
{error ? <div className="prom-query-field-info text-error">{error}</div> : null}
{hint ? (
<div className="prom-query-field-info text-warning">
{hint.label}{' '}
{hint.fix ? (
<a className="text-link muted" onClick={this.onClickHintFix}>
{hint.fix.label}
</a>
) : null}
</div>
) : null}
</div>
</div>
);

@ -1,5 +1,6 @@
import React, { PureComponent } from 'react';
// TODO make this datasource-plugin-dependent
import QueryField from './PromQueryField';
class QueryRow extends PureComponent<any, {}> {
@ -21,6 +22,13 @@ class QueryRow extends PureComponent<any, {}> {
this.onChangeQuery('', true);
};
onClickHintFix = action => {
const { index, onClickHintFix } = this.props;
if (onClickHintFix) {
onClickHintFix(action, index);
}
};
onClickRemoveButton = () => {
const { index, onRemoveQueryRow } = this.props;
if (onRemoveQueryRow) {
@ -36,14 +44,17 @@ class QueryRow extends PureComponent<any, {}> {
};
render() {
const { edited, history, query, request } = this.props;
const { edited, history, query, queryError, queryHint, request } = this.props;
return (
<div className="query-row">
<div className="query-row-field">
<QueryField
error={queryError}
hint={queryHint}
initialQuery={edited ? null : query}
history={history}
portalPrefix="explore"
onClickHintFix={this.onClickHintFix}
onPressEnter={this.onPressEnter}
onQueryChange={this.onChangeQuery}
request={request}
@ -67,11 +78,19 @@ class QueryRow extends PureComponent<any, {}> {
export default class QueryRows extends PureComponent<any, {}> {
render() {
const { className = '', queries, ...handlers } = this.props;
const { className = '', queries, queryErrors = [], queryHints = [], ...handlers } = this.props;
return (
<div className={className}>
{queries.map((q, index) => (
<QueryRow key={q.key} index={index} query={q.query} edited={q.edited} {...handlers} />
<QueryRow
key={q.key}
index={index}
query={q.query}
queryError={queryErrors[index]}
queryHint={queryHints[index]}
edited={q.edited}
{...handlers}
/>
))}
</div>
);

@ -2,9 +2,9 @@ import React from 'react';
import { hot } from 'react-hot-loader';
import { observer } from 'mobx-react';
import { ITeam, ITeamMember } from 'app/stores/TeamsStore/TeamsStore';
import appEvents from 'app/core/app_events';
import SlideDown from 'app/core/components/Animations/SlideDown';
import { UserPicker, User } from 'app/core/components/Picker/UserPicker';
import DeleteButton from 'app/core/components/DeleteButton/DeleteButton';
interface Props {
team: ITeam;
@ -31,15 +31,7 @@ export class TeamMembers extends React.Component<Props, State> {
};
removeMember(member: ITeamMember) {
appEvents.emit('confirm-modal', {
title: 'Remove Member',
text: 'Are you sure you want to remove ' + member.login + ' from this group?',
yesText: 'Remove',
icon: 'fa-warning',
onConfirm: () => {
this.removeMemberConfirmed(member);
},
});
this.props.team.removeMember(member);
}
removeMemberConfirmed(member: ITeamMember) {
@ -54,10 +46,8 @@ export class TeamMembers extends React.Component<Props, State> {
</td>
<td>{member.login}</td>
<td>{member.email}</td>
<td style={{ width: '1%' }}>
<a onClick={() => this.removeMember(member)} className="btn btn-danger btn-mini">
<i className="fa fa-remove" />
</a>
<td className="text-right">
<DeleteButton onConfirmDelete={() => this.removeMember(member)} />
</td>
</tr>
);

@ -97,7 +97,9 @@ export class PluginEditCtrl {
initReadme() {
return this.backendSrv.get(`/api/plugins/${this.pluginId}/markdown/readme`).then(res => {
var md = new Remarkable();
var md = new Remarkable({
linkify: true
});
this.readmeHtml = this.$sce.trustAsHtml(md.render(res));
});
}

@ -82,6 +82,112 @@ export function addLabelToQuery(query: string, key: string, value: string): stri
return parts.join('');
}
export function determineQueryHints(series: any[], datasource?: any): any[] {
const hints = series.map((s, i) => {
const query: string = s.query;
const index: number = s.responseIndex;
if (query === undefined || index === undefined) {
return null;
}
// ..._bucket metric needs a histogram_quantile()
const histogramMetric = query.trim().match(/^\w+_bucket$/);
if (histogramMetric) {
const label = 'Time series has buckets, you probably wanted a histogram.';
return {
index,
label,
fix: {
label: 'Fix by adding histogram_quantile().',
action: {
type: 'ADD_HISTOGRAM_QUANTILE',
query,
index,
},
},
};
}
// Check for monotony
const datapoints: [number, number][] = s.datapoints;
const simpleMetric = query.trim().match(/^\w+$/);
if (simpleMetric && datapoints.length > 1) {
let increasing = false;
const monotonic = datapoints.every((dp, index) => {
if (index === 0) {
return true;
}
increasing = increasing || dp[0] > datapoints[index - 1][0];
// monotonic?
return dp[0] >= datapoints[index - 1][0];
});
if (increasing && monotonic) {
const label = 'Time series is monotonously increasing.';
return {
label,
index,
fix: {
label: 'Fix by adding rate().',
action: {
type: 'ADD_RATE',
query,
index,
},
},
};
}
}
// Check for recording rules expansion
if (datasource && datasource.ruleMappings) {
const mapping = datasource.ruleMappings;
const mappingForQuery = Object.keys(mapping).reduce((acc, ruleName) => {
if (query.search(ruleName) > -1) {
return {
...acc,
[ruleName]: mapping[ruleName],
};
}
return acc;
}, {});
if (_.size(mappingForQuery) > 0) {
const label = 'Query contains recording rules.';
return {
label,
index,
fix: {
label: 'Expand rules',
action: {
type: 'EXPAND_RULES',
query,
index,
mapping: mappingForQuery,
},
},
};
}
}
// No hint found
return null;
});
return hints;
}
export function extractRuleMappingFromGroups(groups: any[]) {
return groups.reduce(
(mapping, group) =>
group.rules.filter(rule => rule.type === 'recording').reduce(
(acc, rule) => ({
...acc,
[rule.name]: rule.query,
}),
mapping
),
{}
);
}
export function prometheusRegularEscape(value) {
if (typeof value === 'string') {
return value.replace(/'/g, "\\\\'");
@ -100,6 +206,7 @@ export class PrometheusDatasource {
type: string;
editorSrc: string;
name: string;
ruleMappings: { [index: string]: string };
supportsExplore: boolean;
supportMetrics: boolean;
url: string;
@ -127,6 +234,11 @@ export class PrometheusDatasource {
this.queryTimeout = instanceSettings.jsonData.queryTimeout;
this.httpMethod = instanceSettings.jsonData.httpMethod || 'GET';
this.resultTransformer = new ResultTransformer(templateSrv);
this.ruleMappings = {};
}
init() {
this.loadRules();
}
_request(url, data?, options?: any) {
@ -223,10 +335,15 @@ export class PrometheusDatasource {
return this.$q.all(allQueryPromise).then(responseList => {
let result = [];
let hints = [];
_.each(responseList, (response, index) => {
if (response.status === 'error') {
throw response.error;
const error = {
index,
...response.error,
};
throw error;
}
// Keeping original start/end for transformers
@ -241,16 +358,24 @@ export class PrometheusDatasource {
responseIndex: index,
refId: activeTargets[index].refId,
};
this.resultTransformer.transform(result, response, transformerOptions);
const series = this.resultTransformer.transform(response, transformerOptions);
result = [...result, ...series];
if (queries[index].hinting) {
const queryHints = determineQueryHints(series, this);
hints = [...hints, ...queryHints];
}
});
return { data: result };
return { data: result, hints };
});
}
createQuery(target, options, start, end) {
var query: any = {};
query.instant = target.instant;
const query: any = {
hinting: target.hinting,
instant: target.instant,
};
var range = Math.ceil(end - start);
var interval = kbn.interval_to_seconds(options.interval);
@ -450,12 +575,43 @@ export class PrometheusDatasource {
return state;
}
modifyQuery(query: string, options: any): string {
const { addFilter } = options;
if (addFilter) {
return addLabelToQuery(query, addFilter.key, addFilter.value);
loadRules() {
this.metadataRequest('/api/v1/rules')
.then(res => res.data || res.json())
.then(body => {
const groups = _.get(body, ['data', 'groups']);
if (groups) {
this.ruleMappings = extractRuleMappingFromGroups(groups);
}
})
.catch(e => {
console.log('Rules API is experimental. Ignore next error.');
console.error(e);
});
}
modifyQuery(query: string, action: any): string {
switch (action.type) {
case 'ADD_FILTER': {
return addLabelToQuery(query, action.key, action.value);
}
case 'ADD_HISTOGRAM_QUANTILE': {
return `histogram_quantile(0.95, sum(rate(${query}[5m])) by (le))`;
}
case 'ADD_RATE': {
return `rate(${query}[5m])`;
}
case 'EXPAND_RULES': {
const mapping = action.mapping;
if (mapping) {
const ruleNames = Object.keys(mapping);
const rulesRegex = new RegExp(`(\\s|^)(${ruleNames.join('|')})(\\s|$|\\()`, 'ig');
return query.replace(rulesRegex, (match, pre, name, post) => mapping[name]);
}
}
default:
return query;
}
return query;
}
getPrometheusTime(date, roundUp) {

@ -4,11 +4,11 @@ import TableModel from 'app/core/table_model';
export class ResultTransformer {
constructor(private templateSrv) {}
transform(result: any, response: any, options: any) {
transform(response: any, options: any): any[] {
let prometheusResult = response.data.data.result;
if (options.format === 'table') {
result.push(this.transformMetricDataToTable(prometheusResult, options.responseListLength, options.refId));
return [this.transformMetricDataToTable(prometheusResult, options.responseListLength, options.refId)];
} else if (options.format === 'heatmap') {
let seriesList = [];
prometheusResult.sort(sortSeriesByLabel);
@ -16,16 +16,19 @@ export class ResultTransformer {
seriesList.push(this.transformMetricData(metricData, options, options.start, options.end));
}
seriesList = this.transformToHistogramOverTime(seriesList);
result.push(...seriesList);
return seriesList;
} else {
let seriesList = [];
for (let metricData of prometheusResult) {
if (response.data.data.resultType === 'matrix') {
result.push(this.transformMetricData(metricData, options, options.start, options.end));
seriesList.push(this.transformMetricData(metricData, options, options.start, options.end));
} else if (response.data.data.resultType === 'vector') {
result.push(this.transformInstantMetricData(metricData, options));
seriesList.push(this.transformInstantMetricData(metricData, options));
}
}
return seriesList;
}
return [];
}
transformMetricData(metricData, options, start, end) {
@ -60,7 +63,12 @@ export class ResultTransformer {
dps.push([null, t]);
}
return { target: metricLabel, datapoints: dps };
return {
datapoints: dps,
query: options.query,
responseIndex: options.responseIndex,
target: metricLabel,
};
}
transformMetricDataToTable(md, resultCount: number, refId: string) {
@ -124,7 +132,7 @@ export class ResultTransformer {
metricLabel = null;
metricLabel = this.createMetricLabel(md.metric, options);
dps.push([parseFloat(md.value[1]), md.value[0] * 1000]);
return { target: metricLabel, datapoints: dps };
return { target: metricLabel, datapoints: dps, labels: md.metric };
}
createMetricLabel(labelData, options) {

@ -3,6 +3,8 @@ import moment from 'moment';
import q from 'q';
import {
alignRange,
determineQueryHints,
extractRuleMappingFromGroups,
PrometheusDatasource,
prometheusSpecialRegexEscape,
prometheusRegularEscape,
@ -122,7 +124,7 @@ describe('PrometheusDatasource', () => {
ctx.ds.performTimeSeriesQuery = jest.fn().mockReturnValue(responseMock);
return ctx.ds.query(ctx.query).then(result => {
let results = result.data;
return expect(results).toEqual(expected);
return expect(results).toMatchObject(expected);
});
});
@ -180,6 +182,84 @@ describe('PrometheusDatasource', () => {
});
});
describe('determineQueryHints()', () => {
it('returns no hints for no series', () => {
expect(determineQueryHints([])).toEqual([]);
});
it('returns no hints for empty series', () => {
expect(determineQueryHints([{ datapoints: [], query: '' }])).toEqual([null]);
});
it('returns no hint for a monotonously decreasing series', () => {
const series = [{ datapoints: [[23, 1000], [22, 1001]], query: 'metric', responseIndex: 0 }];
const hints = determineQueryHints(series);
expect(hints).toEqual([null]);
});
it('returns a rate hint for a monotonously increasing series', () => {
const series = [{ datapoints: [[23, 1000], [24, 1001]], query: 'metric', responseIndex: 0 }];
const hints = determineQueryHints(series);
expect(hints.length).toBe(1);
expect(hints[0]).toMatchObject({
label: 'Time series is monotonously increasing.',
index: 0,
fix: {
action: {
type: 'ADD_RATE',
query: 'metric',
},
},
});
});
it('returns a histogram hint for a bucket series', () => {
const series = [{ datapoints: [[23, 1000]], query: 'metric_bucket', responseIndex: 0 }];
const hints = determineQueryHints(series);
expect(hints.length).toBe(1);
expect(hints[0]).toMatchObject({
label: 'Time series has buckets, you probably wanted a histogram.',
index: 0,
fix: {
action: {
type: 'ADD_HISTOGRAM_QUANTILE',
query: 'metric_bucket',
},
},
});
});
});
describe('extractRuleMappingFromGroups()', () => {
it('returns empty mapping for no rule groups', () => {
expect(extractRuleMappingFromGroups([])).toEqual({});
});
it('returns a mapping for recording rules only', () => {
const groups = [
{
rules: [
{
name: 'HighRequestLatency',
query: 'job:request_latency_seconds:mean5m{job="myjob"} > 0.5',
type: 'alerting',
},
{
name: 'job:http_inprogress_requests:sum',
query: 'sum(http_inprogress_requests) by (job)',
type: 'recording',
},
],
file: '/rules.yaml',
interval: 60,
name: 'example',
},
];
const mapping = extractRuleMappingFromGroups(groups);
expect(mapping).toEqual({ 'job:http_inprogress_requests:sum': 'sum(http_inprogress_requests) by (job)' });
});
});
describe('Prometheus regular escaping', () => {
it('should not escape non-string', () => {
expect(prometheusRegularEscape(12)).toEqual(12);

@ -111,7 +111,6 @@ describe('Prometheus Result Transformer', () => {
};
it('should convert cumulative histogram to regular', () => {
let result = [];
let options = {
format: 'heatmap',
start: 1445000010,
@ -119,7 +118,7 @@ describe('Prometheus Result Transformer', () => {
legendFormat: '{{le}}',
};
ctx.resultTransformer.transform(result, { data: response }, options);
const result = ctx.resultTransformer.transform({ data: response }, options);
expect(result).toEqual([
{ target: '1', datapoints: [[10, 1445000010000], [10, 1445000020000], [0, 1445000030000]] },
{ target: '2', datapoints: [[10, 1445000010000], [0, 1445000020000], [30, 1445000030000]] },
@ -172,14 +171,13 @@ describe('Prometheus Result Transformer', () => {
],
},
};
let result = [];
let options = {
format: 'timeseries',
start: 0,
end: 2,
};
ctx.resultTransformer.transform(result, { data: response }, options);
const result = ctx.resultTransformer.transform({ data: response }, options);
expect(result).toEqual([{ target: 'test{job="testjob"}', datapoints: [[10, 0], [10, 1000], [0, 2000]] }]);
});
@ -196,7 +194,6 @@ describe('Prometheus Result Transformer', () => {
],
},
};
let result = [];
let options = {
format: 'timeseries',
step: 1,
@ -204,7 +201,7 @@ describe('Prometheus Result Transformer', () => {
end: 2,
};
ctx.resultTransformer.transform(result, { data: response }, options);
const result = ctx.resultTransformer.transform({ data: response }, options);
expect(result).toEqual([{ target: 'test{job="testjob"}', datapoints: [[null, 0], [10, 1000], [0, 2000]] }]);
});
@ -221,7 +218,6 @@ describe('Prometheus Result Transformer', () => {
],
},
};
let result = [];
let options = {
format: 'timeseries',
step: 2,
@ -229,7 +225,7 @@ describe('Prometheus Result Transformer', () => {
end: 8,
};
ctx.resultTransformer.transform(result, { data: response }, options);
const result = ctx.resultTransformer.transform({ data: response }, options);
expect(result).toEqual([
{ target: 'test{job="testjob"}', datapoints: [[null, 0], [null, 2000], [10, 4000], [null, 6000], [10, 8000]] },
]);

@ -70,9 +70,9 @@ module.directive('graphLegend', function(popoverSrv, $timeout) {
var el = $(e.currentTarget);
var index = getSeriesIndexForElement(el);
var seriesInfo = seriesList[index];
var scrollPosition = $(elem.children('tbody')).scrollTop();
const scrollPosition = legendScrollbar.scroller.scrollTop;
ctrl.toggleSeries(seriesInfo, e);
$(elem.children('tbody')).scrollTop(scrollPosition);
legendScrollbar.scroller.scrollTop = scrollPosition;
}
function sortLegend(e) {

@ -158,4 +158,12 @@
.prom-query-field {
display: flex;
}
.prom-query-field-wrapper {
width: 100%;
}
.prom-query-field-info {
margin: 0.25em 0.5em 0.5em;
}
}

@ -16,7 +16,7 @@
}
.rc-cascader-menus.slide-up-enter,
.rc-cascader-menus.slide-up-appear {
animation-duration: .3s;
animation-duration: 0.3s;
animation-fill-mode: both;
transform-origin: 0 0;
opacity: 0;
@ -24,7 +24,7 @@
animation-play-state: paused;
}
.rc-cascader-menus.slide-up-leave {
animation-duration: .3s;
animation-duration: 0.3s;
animation-fill-mode: both;
transform-origin: 0 0;
opacity: 1;
@ -66,7 +66,7 @@
.rc-cascader-menu-item {
height: 32px;
line-height: 32px;
padding: 0 16px;
padding: 0 2.5em 0 16px;
cursor: pointer;
white-space: nowrap;
overflow: hidden;

@ -0,0 +1,67 @@
#!/bin/bash -e
PERMISSIONS_OK=0
if [ ! -r "$GF_PATHS_CONFIG" ]; then
echo "GF_PATHS_CONFIG='$GF_PATHS_CONFIG' is not readable."
PERMISSIONS_OK=1
fi
if [ ! -w "$GF_PATHS_DATA" ]; then
echo "GF_PATHS_DATA='$GF_PATHS_DATA' is not writable."
PERMISSIONS_OK=1
fi
if [ ! -r "$GF_PATHS_HOME" ]; then
echo "GF_PATHS_HOME='$GF_PATHS_HOME' is not readable."
PERMISSIONS_OK=1
fi
if [ $PERMISSIONS_OK -eq 1 ]; then
echo "You may have issues with file permissions, more information here: http://docs.grafana.org/installation/docker/#migration-from-a-previous-version-of-the-docker-container-to-5-1-or-later"
fi
if [ ! -d "$GF_PATHS_PLUGINS" ]; then
mkdir "$GF_PATHS_PLUGINS"
fi
if [ ! -z ${GF_AWS_PROFILES+x} ]; then
> "$GF_PATHS_HOME/.aws/credentials"
for profile in ${GF_AWS_PROFILES}; do
access_key_varname="GF_AWS_${profile}_ACCESS_KEY_ID"
secret_key_varname="GF_AWS_${profile}_SECRET_ACCESS_KEY"
region_varname="GF_AWS_${profile}_REGION"
if [ ! -z "${!access_key_varname}" -a ! -z "${!secret_key_varname}" ]; then
echo "[${profile}]" >> "$GF_PATHS_HOME/.aws/credentials"
echo "aws_access_key_id = ${!access_key_varname}" >> "$GF_PATHS_HOME/.aws/credentials"
echo "aws_secret_access_key = ${!secret_key_varname}" >> "$GF_PATHS_HOME/.aws/credentials"
if [ ! -z "${!region_varname}" ]; then
echo "region = ${!region_varname}" >> "$GF_PATHS_HOME/.aws/credentials"
fi
fi
done
chmod 600 "$GF_PATHS_HOME/.aws/credentials"
fi
if [ ! -z "${GF_INSTALL_PLUGINS}" ]; then
OLDIFS=$IFS
IFS=','
for plugin in ${GF_INSTALL_PLUGINS}; do
IFS=$OLDIFS
grafana-cli --pluginsDir "${GF_PATHS_PLUGINS}" plugins install ${plugin}
done
fi
exec grafana-server \
--homepath="$GF_PATHS_HOME" \
--config="$GF_PATHS_CONFIG" \
"$@" \
cfg:default.log.mode="console" \
cfg:default.paths.data="$GF_PATHS_DATA" \
cfg:default.paths.logs="$GF_PATHS_LOGS" \
cfg:default.paths.plugins="$GF_PATHS_PLUGINS" \
cfg:default.paths.provisioning="$GF_PATHS_PROVISIONING"
Loading…
Cancel
Save