diff --git a/.bra.toml b/.bra.toml index 15961e1e3fd..aa7a1680adc 100644 --- a/.bra.toml +++ b/.bra.toml @@ -4,6 +4,7 @@ init_cmds = [ ["./bin/grafana-server", "cfg:app_mode=development"] ] watch_all = true +follow_symlinks = true watch_dirs = [ "$WORKDIR/pkg", "$WORKDIR/public/views", diff --git a/.circleci/config.yml b/.circleci/config.yml index 1e046aec34d..a5497e6c7e8 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -19,7 +19,7 @@ version: 2 jobs: mysql-integration-test: docker: - - image: circleci/golang:1.10 + - image: circleci/golang:1.11 - image: circleci/mysql:5.6-ram environment: MYSQL_ROOT_PASSWORD: rootpass @@ -32,14 +32,14 @@ jobs: - run: sudo apt update - run: sudo apt install -y mysql-client - run: dockerize -wait tcp://127.0.0.1:3306 -timeout 120s - - run: cat docker/blocks/mysql_tests/setup.sql | mysql -h 127.0.0.1 -P 3306 -u root -prootpass + - run: cat devenv/docker/blocks/mysql_tests/setup.sql | mysql -h 127.0.0.1 -P 3306 -u root -prootpass - run: name: mysql integration tests command: 'GRAFANA_TEST_DB=mysql go test ./pkg/services/sqlstore/... ./pkg/tsdb/mysql/... ' postgres-integration-test: docker: - - image: circleci/golang:1.10 + - image: circleci/golang:1.11 - image: circleci/postgres:9.3-ram environment: POSTGRES_USER: grafanatest @@ -51,7 +51,7 @@ jobs: - run: sudo apt update - run: sudo apt install -y postgresql-client - run: dockerize -wait tcp://127.0.0.1:5432 -timeout 120s - - run: 'PGPASSWORD=grafanatest psql -p 5432 -h 127.0.0.1 -U grafanatest -d grafanatest -f docker/blocks/postgres_tests/setup.sql' + - run: 'PGPASSWORD=grafanatest psql -p 5432 -h 127.0.0.1 -U grafanatest -d grafanatest -f devenv/docker/blocks/postgres_tests/setup.sql' - run: name: postgres integration tests command: 'GRAFANA_TEST_DB=postgres go test ./pkg/services/sqlstore/... ./pkg/tsdb/postgres/...' @@ -74,22 +74,23 @@ jobs: gometalinter: docker: - - image: circleci/golang:1.10 + - image: circleci/golang:1.11 environment: # we need CGO because of go-sqlite3 CGO_ENABLED: 1 working_directory: /go/src/github.com/grafana/grafana steps: - checkout - - run: 'go get -u gopkg.in/alecthomas/gometalinter.v2' + - run: 'go get -u github.com/alecthomas/gometalinter' - run: 'go get -u github.com/tsenart/deadcode' + - run: 'go get -u github.com/jgautheron/goconst/cmd/goconst' - run: 'go get -u github.com/gordonklaus/ineffassign' - run: 'go get -u github.com/opennota/check/cmd/structcheck' - run: 'go get -u github.com/mdempsky/unconvert' - run: 'go get -u github.com/opennota/check/cmd/varcheck' - run: name: run linters - command: 'gometalinter.v2 --enable-gc --vendor --deadline 10m --disable-all --enable=deadcode --enable=ineffassign --enable=structcheck --enable=unconvert --enable=varcheck ./...' + command: 'gometalinter --enable-gc --vendor --deadline 10m --disable-all --enable=deadcode --enable=goconst --enable=ineffassign --enable=structcheck --enable=unconvert --enable=varcheck ./...' - run: name: run go vet command: 'go vet ./pkg/...' @@ -115,7 +116,7 @@ jobs: test-backend: docker: - - image: circleci/golang:1.10 + - image: circleci/golang:1.11 working_directory: /go/src/github.com/grafana/grafana steps: - checkout @@ -125,7 +126,7 @@ jobs: build-all: docker: - - image: grafana/build-container:1.0.0 + - image: grafana/build-container:1.2.0 working_directory: /go/src/github.com/grafana/grafana steps: - checkout @@ -157,18 +158,23 @@ jobs: name: sha-sum packages command: 'go run build.go sha-dist' - run: - name: Build Grafana.com publisher + name: Build Grafana.com master publisher command: 'go build -o scripts/publish scripts/build/publish.go' + - run: + name: Build Grafana.com release publisher + command: 'cd scripts/build/release_publisher && go build -o release_publisher .' - persist_to_workspace: root: . paths: - dist/grafana* - scripts/*.sh - scripts/publish + - scripts/build/release_publisher/release_publisher + - scripts/build/publish.sh build: docker: - - image: grafana/build-container:1.0.0 + - image: grafana/build-container:1.2.0 working_directory: /go/src/github.com/grafana/grafana steps: - checkout @@ -227,7 +233,7 @@ jobs: build-enterprise: docker: - - image: grafana/build-container:v0.1 + - image: grafana/build-container:1.2.0 working_directory: /go/src/github.com/grafana/grafana steps: - checkout @@ -298,8 +304,8 @@ jobs: name: deploy to s3 command: 'aws s3 sync ./dist s3://$BUCKET_NAME/release' - run: - name: Trigger Windows build - command: './scripts/trigger_windows_build.sh ${APPVEYOR_TOKEN} ${CIRCLE_SHA1} release' + name: Deploy to Grafana.com + command: './scripts/build/publish.sh' workflows: version: 2 diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md deleted file mode 100644 index 769ba2a519b..00000000000 --- a/.github/CONTRIBUTING.md +++ /dev/null @@ -1,22 +0,0 @@ -Follow the setup guide in README.md - -### Rebuild frontend assets on source change -``` -yarn watch -``` - -### Rerun tests on source change -``` -yarn jest -``` - -### Run tests for backend assets before commit -``` -test -z "$(gofmt -s -l . | grep -v -E 'vendor/(github.com|golang.org|gopkg.in)' | tee /dev/stderr)" -``` - -### Run tests for frontend assets before commit -``` -yarn test -go test -v ./pkg/... -``` diff --git a/.gitignore b/.gitignore index bf97948d178..21083741e14 100644 --- a/.gitignore +++ b/.gitignore @@ -40,8 +40,8 @@ public/css/*.min.css conf/custom.ini fig.yml -docker-compose.yml -docker-compose.yaml +devenv/docker-compose.yml +devenv/docker-compose.yaml /conf/provisioning/**/custom.yaml /conf/provisioning/**/dev.yaml /conf/ldap_dev.toml @@ -54,6 +54,7 @@ profile.cov /pkg/cmd/grafana-server/grafana-server /pkg/cmd/grafana-server/debug /pkg/extensions +/public/app/extensions debug.test /examples/*/dist /packaging/**/*.rpm @@ -68,7 +69,9 @@ debug.test /vendor/**/*.yml /vendor/**/*_test.go /vendor/**/.editorconfig -/vendor/**/appengine* *.orig /devenv/bulk-dashboards/*.json +/devenv/bulk_alerting_dashboards/*.json + +/scripts/build/release_publisher/release_publisher diff --git a/CHANGELOG.md b/CHANGELOG.md index aed25afb02e..c80f2852f2c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,77 +1,168 @@ -# 5.3.0 (unreleased) +# 5.4.0 (unreleased) +### New Features + +* **Alerting**: Option to disable OK alert notifications [#12330](https://github.com/grafana/grafana/issues/12330) & [#6696](https://github.com/grafana/grafana/issues/6696), thx [@davewat](https://github.com/davewat) +* **Postgres/MySQL/MSSQL**: Adds support for configuration of max open/idle connections and connection max lifetime. Also, panels with multiple SQL queries will now be executed concurrently [#11711](https://github.com/grafana/grafana/issues/11711), thx [@connection-reset](https://github.com/connection-reset) +* **MSSQL**: Add encrypt setting to allow configuration of how data sent between client and server are encrypted [#13629](https://github.com/grafana/grafana/issues/13629), thx [@ramiro](https://github.com/ramiro) +* **MySQL**: Support connecting thru Unix socket for MySQL datasource [#12342](https://github.com/grafana/grafana/issues/12342), thx [@Yukinoshita-Yukino](https://github.com/Yukinoshita-Yukino) + +### Minor + +* **Cloudwatch**: Show all available CloudWatch regions [#12308](https://github.com/grafana/grafana/issues/12308), thx [@mtanda](https://github.com/mtanda) +* **Units**: New clock time format, to format ms or second values as for example `01h:59m`, [#13635](https://github.com/grafana/grafana/issues/13635), thx [@franciscocpg](https://github.com/franciscocpg) +* **Datasource Proxy**: Keep trailing slash for datasource proxy requests [#13326](https://github.com/grafana/grafana/pull/13326), thx [@ryantxu](https://github.com/ryantxu) + +### Breaking changes + +* Postgres/MySQL/MSSQL datasources now per default uses `max open connections` = `unlimited` (earlier 10), `max idle connections` = `2` (earlier 10) and `connection max lifetime` = `4` hours (earlier unlimited) + +# 5.3.2 (unreleased) + +* **Postgres**: Fix template variables error [#13692](https://github.com/grafana/grafana/issues/13692), thx [@svenklemm](https://github.com/svenklemm) +* **Cloudwatch**: Fix service panic because of race conditions [#13674](https://github.com/grafana/grafana/issues/13674), thx [@mtanda](https://github.com/mtanda) +* **LDAP**: Fix super admins can also be admins of orgs [#13710](https://github.com/grafana/grafana/issues/13710), thx [@adrien-f](https://github.com/adrien-f) + +# 5.3.1 (2018-10-16) + +* **Render**: Fix PhantomJS render of graph panel when legend displayed as table to the right [#13616](https://github.com/grafana/grafana/issues/13616) +* **Stackdriver**: Filter option disappears after removing initial filter [#13607](https://github.com/grafana/grafana/issues/13607) +* **Elasticsearch**: Fix no limit size in terms aggregation for alerting queries [#13172](https://github.com/grafana/grafana/issues/13172), thx [@Yukinoshita-Yukino](https://github.com/Yukinoshita-Yukino) +* **InfluxDB**: Fix for annotation issue that caused text to be shown twice [#13553](https://github.com/grafana/grafana/issues/13553) +* **Variables**: Fix nesting variables leads to exception and missing refresh [#13628](https://github.com/grafana/grafana/issues/13628) +* **Variables**: Prometheus: Single letter labels are not supported [#13641](https://github.com/grafana/grafana/issues/13641), thx [@olshansky](https://github.com/olshansky) +* **Graph**: Fix graph time formatting for Last 24h ranges [#13650](https://github.com/grafana/grafana/issues/13650) +* **Playlist**: Fix cannot add dashboards with long names to playlist [#13464](https://github.com/grafana/grafana/issues/13464), thx [@neufeldtech](https://github.com/neufeldtech) +* **HTTP API**: Fix /api/org/users so that query and limit querystrings works + +# 5.3.0 (2018-10-10) + +* **Stackdriver**: Filter wildcards and regex matching are not yet supported [#13495](https://github.com/grafana/grafana/issues/13495) +* **Stackdriver**: Support the distribution metric type for heatmaps [#13559](https://github.com/grafana/grafana/issues/13559) +* **Cloudwatch**: Automatically set graph yaxis unit [#13575](https://github.com/grafana/grafana/issues/13575), thx [@mtanda](https://github.com/mtanda) + +# 5.3.0-beta3 (2018-10-03) + +* **Stackdriver**: Fix for missing ngInject [#13511](https://github.com/grafana/grafana/pull/13511) +* **Permissions**: Fix for broken permissions selector [#13507](https://github.com/grafana/grafana/issues/13507) +* **Alerting**: Alert reminders deduping not working as expected when running multiple Grafana instances [#13492](https://github.com/grafana/grafana/issues/13492) + +# 5.3.0-beta2 (2018-10-01) + +### New Features + +* **Annotations**: Enable template variables in tagged annotations queries [#9735](https://github.com/grafana/grafana/issues/9735) +* **Stackdriver**: Support for Google Stackdriver Datasource [#13289](https://github.com/grafana/grafana/pull/13289) + +### Minor + +* **Provisioning**: Dashboard Provisioning now support symlinks that changes target [#12534](https://github.com/grafana/grafana/issues/12534), thx [@auhlig](https://github.com/auhlig) +* **OAuth**: Allow oauth email attribute name to be configurable [#12986](https://github.com/grafana/grafana/issues/12986), thx [@bobmshannon](https://github.com/bobmshannon) +* **Tags**: Default sort order for GetDashboardTags [#11681](https://github.com/grafana/grafana/pull/11681), thx [@Jonnymcc](https://github.com/Jonnymcc) +* **Prometheus**: Label completion queries respect dashboard time range [#12251](https://github.com/grafana/grafana/pull/12251), thx [@mtanda](https://github.com/mtanda) +* **Prometheus**: Allow to display annotations based on Prometheus series value [#10159](https://github.com/grafana/grafana/issues/10159), thx [@mtanda](https://github.com/mtanda) +* **Prometheus**: Adhoc-filtering for Prometheus dashboards [#13212](https://github.com/grafana/grafana/issues/13212) +* **Singlestat**: Fix gauge display accuracy for percents [#13270](https://github.com/grafana/grafana/issues/13270), thx [@tianon](https://github.com/tianon) +* **Dashboard**: Prevent auto refresh from starting when loading dashboard with absolute time range [#12030](https://github.com/grafana/grafana/issues/12030) +* **Templating**: New templating variable type `Text box` that allows free text input [#3173](https://github.com/grafana/grafana/issues/3173) +* **Alerting**: Link to view full size image in Microsoft Teams alert notifier [#13121](https://github.com/grafana/grafana/issues/13121), thx [@holiiveira](https://github.com/holiiveira) +* **Alerting**: Fixes a bug where all alerts would send reminders after upgrade & restart [#13402](https://github.com/grafana/grafana/pull/13402) +* **Alerting**: Concurrent render limit for graphs used in notifications [#13401](https://github.com/grafana/grafana/pull/13401) +* **Postgres/MySQL/MSSQL**: Add support for replacing $__interval and $__interval_ms in alert queries [#11555](https://github.com/grafana/grafana/issues/11555), thx [@svenklemm](https://github.com/svenklemm) + +# 5.3.0-beta1 (2018-09-06) + +### New Major Features + +* **Alerting**: Notification reminders [#7330](https://github.com/grafana/grafana/issues/7330), thx [@jbaublitz](https://github.com/jbaublitz) +* **Dashboard**: TV & Kiosk mode changes, new cycle view mode button in dashboard toolbar [#13025](https://github.com/grafana/grafana/pull/13025) * **OAuth**: Gitlab OAuth with support for filter by groups [#5623](https://github.com/grafana/grafana/issues/5623), thx [@BenoitKnecht](https://github.com/BenoitKnecht) -* **Dataproxy**: Pass configured/auth headers to a Datasource [#10971](https://github.com/grafana/grafana/issues/10971), thx [@mrsiano](https://github.com/mrsiano) -* **Cleanup**: Make temp file time to live configurable [#11607](https://github.com/grafana/grafana/issues/11607), thx [@xapon](https://github.com/xapon) +* **Postgres**: Graphical query builder [#10095](https://github.com/grafana/grafana/issues/10095), thx [svenklemm](https://github.com/svenklemm) + +### New Features + * **LDAP**: Define Grafana Admin permission in ldap group mappings [#2469](https://github.com/grafana/grafana/issues/2496), PR [#12622](https://github.com/grafana/grafana/issues/12622) -* **Cloudwatch**: CloudWatch GetMetricData support [#11487](https://github.com/grafana/grafana/issues/11487), thx [@mtanda](https://github.com/mtanda) -* **Configuration**: Allow auto-assigning users to specific organization (other than Main. Org) [#1823](https://github.com/grafana/grafana/issues/1823) [#12801](https://github.com/grafana/grafana/issues/12801), thx [@gzzo](https://github.com/gzzo) and [@ofosos](https://github.com/ofosos) -* **Profile**: List teams that the user is member of in current/active organization [#12476](https://github.com/grafana/grafana/issues/12476) * **LDAP**: Client certificates support [#12805](https://github.com/grafana/grafana/issues/12805), thx [@nyxi](https://github.com/nyxi) +* **Profile**: List teams that the user is member of in current/active organization [#12476](https://github.com/grafana/grafana/issues/12476) +* **Configuration**: Allow auto-assigning users to specific organization (other than Main. Org) [#1823](https://github.com/grafana/grafana/issues/1823) [#12801](https://github.com/grafana/grafana/issues/12801), thx [@gzzo](https://github.com/gzzo) and [@ofosos](https://github.com/ofosos) +* **Dataproxy**: Pass configured/auth headers to a Datasource [#10971](https://github.com/grafana/grafana/issues/10971), thx [@mrsiano](https://github.com/mrsiano) +* **CloudWatch**: GetMetricData support [#11487](https://github.com/grafana/grafana/issues/11487), thx [@mtanda](https://github.com/mtanda) * **Postgres**: TimescaleDB support, e.g. use `time_bucket` for grouping by time when option enabled [#12680](https://github.com/grafana/grafana/pull/12680), thx [svenklemm](https://github.com/svenklemm) +* **Cleanup**: Make temp file time to live configurable [#11607](https://github.com/grafana/grafana/issues/11607), thx [@xapon](https://github.com/xapon) ### Minor -* **Api**: Delete nonexistent datasource should return 404 [#12313](https://github.com/grafana/grafana/issues/12313), thx [@AustinWinstanley](https://github.com/AustinWinstanley) -* **Dashboard**: Fix selecting current dashboard from search should not reload dashboard [#12248](https://github.com/grafana/grafana/issues/12248) -* **Dashboard**: Use uid when linking to dashboards internally in a dashboard [#10705](https://github.com/grafana/grafana/issues/10705) -* **Singlestat**: Make colorization of prefix and postfix optional in singlestat [#11892](https://github.com/grafana/grafana/pull/11892), thx [@ApsOps](https://github.com/ApsOps) +* **Alerting**: Its now possible to configure the default value for how to handle errors and no data in alerting. [#10424](https://github.com/grafana/grafana/issues/10424) +* **Alerting**: Fix diff and percent_diff reducers [#11563](https://github.com/grafana/grafana/issues/11563), thx [@jessetane](https://github.com/jessetane) +* **Alerting**: Fix rendering timeout which could cause notifications to not be sent due to rendering timing out [#12151](https://github.com/grafana/grafana/issues/12151) +* **Docker**: Make it possible to set a specific plugin url [#12861](https://github.com/grafana/grafana/pull/12861), thx [ClementGautier](https://github.com/ClementGautier) +* **GrafanaCli**: Fixed issue with grafana-cli install plugin resulting in corrupt http response from source error. Fixes [#13079](https://github.com/grafana/grafana/issues/13079) +* **Provisioning**: Should allow one default datasource per organisation [#12229](https://github.com/grafana/grafana/issues/12229) +* **Github OAuth**: Allow changes of user info at Github to be synched to Grafana when signing in [#11818](https://github.com/grafana/grafana/issues/11818), thx [@rwaweber](https://github.com/rwaweber) +* **OAuth**: Fix overriding tls_skip_verify_insecure using environment variable [#12747](https://github.com/grafana/grafana/issues/12747), thx [@jangaraj](https://github.com/jangaraj) * **Prometheus**: Fix graph panel bar width issue in aligned prometheus queries [#12379](https://github.com/grafana/grafana/issues/12379) * **Prometheus**: Heatmap - fix unhandled error when some points are missing [#12484](https://github.com/grafana/grafana/issues/12484) * **Prometheus**: Add $__interval, $__interval_ms, $__range, $__range_s & $__range_ms support for dashboard and template queries [#12597](https://github.com/grafana/grafana/issues/12597) [#12882](https://github.com/grafana/grafana/issues/12882), thx [@roidelapluie](https://github.com/roidelapluie) -* **Variables**: Skip unneeded extra query request when de-selecting variable values used for repeated panels [#8186](https://github.com/grafana/grafana/issues/8186), thx [@mtanda](https://github.com/mtanda) -* **Variables**: Limit amount of queries executed when updating variable that other variable(s) are dependent on [#11890](https://github.com/grafana/grafana/issues/11890) -* **Variables**: Support query variable refresh when another variable referenced in `Regex` field change its value [#12952](https://github.com/grafana/grafana/issues/12952), thx [@franciscocpg](https://github.com/franciscocpg) -* **Variables**: Support variables in query variable `Custom all value` field [#12965](https://github.com/grafana/grafana/issues/12965), thx [@franciscocpg](https://github.com/franciscocpg) +* **Elasticsearch**: For alerting/backend, support having index name to the right of pattern in index pattern [#12731](https://github.com/grafana/grafana/issues/12731) +* **Graphite**: Fix for quoting of int function parameters (when using variables) [#11927](https://github.com/grafana/grafana/pull/11927) +* **InfluxDB**: Support timeFilter in query templating for InfluxDB [#12598](https://github.com/grafana/grafana/pull/12598), thx [kichristensen](https://github.com/kichristensen) * **Postgres/MySQL/MSSQL**: New $__unixEpochGroup and $__unixEpochGroupAlias macros [#12892](https://github.com/grafana/grafana/issues/12892), thx [@svenklemm](https://github.com/svenklemm) * **Postgres/MySQL/MSSQL**: Add previous fill mode to $__timeGroup macro which will fill in previously seen value when point is missing [#12756](https://github.com/grafana/grafana/issues/12756), thx [@svenklemm](https://github.com/svenklemm) * **Postgres/MySQL/MSSQL**: Use floor rounding in $__timeGroup macro function [#12460](https://github.com/grafana/grafana/issues/12460), thx [@svenklemm](https://github.com/svenklemm) * **Postgres/MySQL/MSSQL**: Use metric column as prefix when returning multiple value columns [#12727](https://github.com/grafana/grafana/issues/12727), thx [@svenklemm](https://github.com/svenklemm) * **Postgres/MySQL/MSSQL**: New $__timeGroupAlias macro. Postgres $__timeGroup no longer automatically adds time column alias [#12749](https://github.com/grafana/grafana/issues/12749), thx [@svenklemm](https://github.com/svenklemm) * **Postgres/MySQL/MSSQL**: Escape single quotes in variables [#12785](https://github.com/grafana/grafana/issues/12785), thx [@eMerzh](https://github.com/eMerzh) +* **Postgres/MySQL/MSSQL**: Min time interval support [#13157](https://github.com/grafana/grafana/issues/13157), thx [@svenklemm](https://github.com/svenklemm) * **MySQL/MSSQL**: Use datetime format instead of epoch for $__timeFilter, $__timeFrom and $__timeTo macros [#11618](https://github.com/grafana/grafana/issues/11618) [#11619](https://github.com/grafana/grafana/issues/11619), thx [@AustinWinstanley](https://github.com/AustinWinstanley) * **Postgres**: Escape ssl mode parameter in connectionstring [#12644](https://github.com/grafana/grafana/issues/12644), thx [@yogyrahmawan](https://github.com/yogyrahmawan) -* **Github OAuth**: Allow changes of user info at Github to be synched to Grafana when signing in [#11818](https://github.com/grafana/grafana/issues/11818), thx [@rwaweber](https://github.com/rwaweber) -* **Alerting**: Fix diff and percent_diff reducers [#11563](https://github.com/grafana/grafana/issues/11563), thx [@jessetane](https://github.com/jessetane) -* **Alerting**: Fix rendering timeout which could cause notifications to not be sent due to rendering timing out [#12151](https://github.com/grafana/grafana/issues/12151) * **Cloudwatch**: Improved error handling [#12489](https://github.com/grafana/grafana/issues/12489), thx [@mtanda](https://github.com/mtanda) * **Cloudwatch**: AppSync metrics and dimensions [#12300](https://github.com/grafana/grafana/issues/12300), thx [@franciscocpg](https://github.com/franciscocpg) * **Cloudwatch**: Direct Connect metrics and dimensions [#12762](https://github.com/grafana/grafana/pulls/12762), thx [@mindriot88](https://github.com/mindriot88) * **Cloudwatch**: Added BurstBalance metric to list of AWS RDS metrics [#12561](https://github.com/grafana/grafana/pulls/12561), thx [@activeshadow](https://github.com/activeshadow) * **Cloudwatch**: Add new Redshift metrics and dimensions [#12063](https://github.com/grafana/grafana/pulls/12063), thx [@A21z](https://github.com/A21z) +* **Dashboard**: Fix selecting current dashboard from search should not reload dashboard [#12248](https://github.com/grafana/grafana/issues/12248) +* **Dashboard**: Use uid when linking to dashboards internally in a dashboard [#10705](https://github.com/grafana/grafana/issues/10705) +* **Graph**: Option to hide series from tooltip [#3341](https://github.com/grafana/grafana/issues/3341), thx [@mtanda](https://github.com/mtanda) +* **Singlestat**: Make colorization of prefix and postfix optional in singlestat [#11892](https://github.com/grafana/grafana/pull/11892), thx [@ApsOps](https://github.com/ApsOps) * **Table**: Adjust header contrast for the light theme [#12668](https://github.com/grafana/grafana/issues/12668) * **Table**: Fix link color when using light theme and thresholds in use [#12766](https://github.com/grafana/grafana/issues/12766) -om/grafana/grafana/issues/12668) * **Table**: Fix for useless horizontal scrollbar for table panel [#9964](https://github.com/grafana/grafana/issues/9964) * **Table**: Make table sorting stable when null values exist [#12362](https://github.com/grafana/grafana/pull/12362), thx [@bz2](https://github.com/bz2) -* **Elasticsearch**: For alerting/backend, support having index name to the right of pattern in index pattern [#12731](https://github.com/grafana/grafana/issues/12731) -* **OAuth**: Fix overriding tls_skip_verify_insecure using environment variable [#12747](https://github.com/grafana/grafana/issues/12747), thx [@jangaraj](https://github.com/jangaraj) +* **Heatmap**: Fix broken tooltip and crosshair on Firefox [#12486](https://github.com/grafana/grafana/issues/12486) +* **Datasource**: Fix UI issue with secret fields after updating datasource [#11270](https://github.com/grafana/grafana/issues/11270) +* **Variables**: Skip unneeded extra query request when de-selecting variable values used for repeated panels [#8186](https://github.com/grafana/grafana/issues/8186), thx [@mtanda](https://github.com/mtanda) +* **Variables**: Limit amount of queries executed when updating variable that other variable(s) are dependent on [#11890](https://github.com/grafana/grafana/issues/11890) +* **Variables**: Support query variable refresh when another variable referenced in `Regex` field change its value [#12952](https://github.com/grafana/grafana/issues/12952), thx [@franciscocpg](https://github.com/franciscocpg) +* **Variables**: Support variables in query variable `Custom all value` field [#12965](https://github.com/grafana/grafana/issues/12965), thx [@franciscocpg](https://github.com/franciscocpg) * **Units**: Change units to include characters for power of 2 and 3 [#12744](https://github.com/grafana/grafana/pull/12744), thx [@Worty](https://github.com/Worty) * **Units**: Polish złoty currency [#12691](https://github.com/grafana/grafana/pull/12691), thx [@mwegrzynek](https://github.com/mwegrzynek) -* **Graph**: Option to hide series from tooltip [#3341](https://github.com/grafana/grafana/issues/3341), thx [@mtanda](https://github.com/mtanda) +* **Units**: Adds bitcoin axes unit. [#13125](https://github.com/grafana/grafana/pull/13125) +* **Api**: Delete nonexistent datasource should return 404 [#12313](https://github.com/grafana/grafana/issues/12313), thx [@AustinWinstanley](https://github.com/AustinWinstanley) +* **Logging**: Reopen log files after receiving a SIGHUP signal [#13112](https://github.com/grafana/grafana/pull/13112), thx [@filewalkwithme](https://github.com/filewalkwithme) +* **Login**: Show loading animation while waiting for authentication response on login [#12865](https://github.com/grafana/grafana/issues/12865) * **UI**: Fix iOS home screen "app" icon and Windows 10 app experience [#12752](https://github.com/grafana/grafana/issues/12752), thx [@andig](https://github.com/andig) -* **Datasource**: Fix UI issue with secret fields after updating datasource [#11270](https://github.com/grafana/grafana/issues/11270) * **Plugins**: Convert URL-like text to links in plugins readme [#12843](https://github.com/grafana/grafana/pull/12843), thx [pgiraud](https://github.com/pgiraud) -* **Docker**: Make it possible to set a specific plugin url [#12861](https://github.com/grafana/grafana/pull/12861), thx [ClementGautier](https://github.com/ClementGautier) -* **Graphite**: Fix for quoting of int function parameters (when using variables) [#11927](https://github.com/grafana/grafana/pull/11927) -* **InfluxDB**: Support timeFilter in query templating for InfluxDB [#12598](https://github.com/grafana/grafana/pull/12598), thx [kichristensen](https://github.com/kichristensen) -* **Provisioning**: Should allow one default datasource per organisation [#12229](https://github.com/grafana/grafana/issues/12229) -* **Heatmap**: Fix broken tooltip and crosshair on Firefox [#12486](https://github.com/grafana/grafana/issues/12486) -* **Login**: Show loading animation while waiting for authentication response on login [#12865](https://github.com/grafana/grafana/issues/12865) ### Breaking changes * Postgres datasource no longer automatically adds time column alias when using the $__timeGroup alias. However, there's code in place which should make this change backward compatible and shouldn't create any issues. +* Kiosk mode now also hides submenu (variables) +* ?inactive url parameter no longer supported, replaced with kiosk=tv url parameter ### New experimental features -These are new features that's still being worked on and are in an experimental phase. We incourage users to try these out and provide any feedback in related issue. +These are new features that's still being worked on and are in an experimental phase. We encourage users to try these out and provide any feedback in related issue. * **Dashboard**: Auto fit dashboard panels to optimize space used for current TV / Monitor [#12768](https://github.com/grafana/grafana/issues/12768) ### Tech * **Frontend**: Convert all Frontend Karma tests to Jest tests [#12224](https://github.com/grafana/grafana/issues/12224) +* **Backend**: Upgrade to golang 1.11 [#13030](https://github.com/grafana/grafana/issues/13030) + +# 5.2.4 (2018-09-07) + +* **GrafanaCli**: Fixed issue with grafana-cli install plugin resulting in corrupt http response from source error. Fixes [#13079](https://github.com/grafana/grafana/issues/13079) # 5.2.3 (2018-08-29) @@ -277,7 +368,7 @@ See [security announcement](https://community.grafana.com/t/grafana-5-2-3-and-4- * **Dashboard**: Sizing and positioning of settings menu icons [#11572](https://github.com/grafana/grafana/pull/11572) * **Dashboard**: Add search filter/tabs to new panel control [#10427](https://github.com/grafana/grafana/issues/10427) * **Folders**: User with org viewer role should not be able to save/move dashboards in/to general folder [#11553](https://github.com/grafana/grafana/issues/11553) -* **Influxdb**: Dont assume the first column in table response is time. [#11476](https://github.com/grafana/grafana/issues/11476), thx [@hahnjo](https://github.com/hahnjo) +* **Influxdb**: Don't assume the first column in table response is time. [#11476](https://github.com/grafana/grafana/issues/11476), thx [@hahnjo](https://github.com/hahnjo) ### Tech * Backend code simplification [#11613](https://github.com/grafana/grafana/pull/11613), thx [@knweiss](https://github.com/knweiss) @@ -464,7 +555,7 @@ See [security announcement](https://community.grafana.com/t/grafana-5-2-3-and-4- # 4.6.2 (2017-11-16) ## Important -* **Prometheus**: Fixes bug with new prometheus alerts in Grafana. Make sure to download this version if your using Prometheus for alerting. More details in the issue. [#9777](https://github.com/grafana/grafana/issues/9777) +* **Prometheus**: Fixes bug with new prometheus alerts in Grafana. Make sure to download this version if you're using Prometheus for alerting. More details in the issue. [#9777](https://github.com/grafana/grafana/issues/9777) ## Fixes * **Color picker**: Bug after using textbox input field to change/paste color string [#9769](https://github.com/grafana/grafana/issues/9769) @@ -1423,7 +1514,7 @@ Grafana 2.x is fundamentally different from 1.x; it now ships with an integrated **New features** - [Issue #1623](https://github.com/grafana/grafana/issues/1623). Share Dashboard: Dashboard snapshot sharing (dash and data snapshot), save to local or save to public snapshot dashboard snapshots.raintank.io site -- [Issue #1622](https://github.com/grafana/grafana/issues/1622). Share Panel: The share modal now has an embed option, gives you an iframe that you can use to embedd a single graph on another web site +- [Issue #1622](https://github.com/grafana/grafana/issues/1622). Share Panel: The share modal now has an embed option, gives you an iframe that you can use to embed a single graph on another web site - [Issue #718](https://github.com/grafana/grafana/issues/718). Dashboard: When saving a dashboard and another user has made changes in between the user is prompted with a warning if he really wants to overwrite the other's changes - [Issue #1331](https://github.com/grafana/grafana/issues/1331). Graph & Singlestat: New axis/unit format selector and more units (kbytes, Joule, Watt, eV), and new design for graph axis & grid tab and single stat options tab views - [Issue #1241](https://github.com/grafana/grafana/issues/1242). Timepicker: New option in timepicker (under dashboard settings), to change ``now`` to be for example ``now-1m``, useful when you want to ignore last minute because it contains incomplete data diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 00000000000..8b2ba090fe1 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,56 @@ + +# Contributing + +Grafana uses GitHub to manage contributions. +Contributions take the form of pull requests that will be reviewed by the core team. + +* If you are a new contributor see: [Steps to Contribute](#steps-to-contribute) + +* If you have a trivial fix or improvement, go ahead and create a pull request. + +* If you plan to do something more involved, discuss your idea on the respective [issue](https://github.com/grafana/grafana/issues) or create a [new issue](https://github.com/grafana/grafana/issues/new) if it does not exist. This will avoid unnecessary work and surely give you and us a good deal of inspiration. + + +## Steps to Contribute + +Should you wish to work on a GitHub issue, check first if it is not already assigned to someone. If it is free, you claim it by commenting on the issue that you want to work on it. This is to prevent duplicated efforts from contributors on the same issue. + +Please check the [`beginner friendly`](https://github.com/grafana/grafana/issues?q=is%3Aopen+is%3Aissue+label%3A%22beginner+friendly%22) label to find issues that are good for getting started. If you have questions about one of the issues, with or without the tag, please comment on them and one of the core team or the original poster will clarify it. + + + +## Setup + +Follow the setup guide in README.md + +### Rebuild frontend assets on source change +``` +yarn watch +``` + +### Rerun tests on source change +``` +yarn jest +``` + +### Run tests for backend assets before commit +``` +test -z "$(gofmt -s -l . | grep -v -E 'vendor/(github.com|golang.org|gopkg.in)' | tee /dev/stderr)" +``` + +### Run tests for frontend assets before commit +``` +yarn test +go test -v ./pkg/... +``` + + +## Pull Request Checklist + +* Branch from the master branch and, if needed, rebase to the current master branch before submitting your pull request. If it doesn't merge cleanly with master you may be asked to rebase your changes. + +* Commits should be as small as possible, while ensuring that each commit is correct independently (i.e., each commit should compile and pass tests). + +* If your patch is not getting reviewed or you need a specific person to review it, you can @-reply a reviewer asking for a review in the pull request or a comment. + +* Add tests relevant to the fixed bug or new feature. diff --git a/Dockerfile b/Dockerfile index f7e45893c38..28dd71952af 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,5 +1,5 @@ # Golang build container -FROM golang:1.10 +FROM golang:1.11 WORKDIR $GOPATH/src/github.com/grafana/grafana diff --git a/Gopkg.lock b/Gopkg.lock index 6f08e208ecd..4286add847d 100644 --- a/Gopkg.lock +++ b/Gopkg.lock @@ -19,6 +19,12 @@ packages = ["."] revision = "7677a1d7c1137cd3dd5ba7a076d0c898a1ef4520" +[[projects]] + branch = "master" + name = "github.com/VividCortex/mysqlerr" + packages = ["."] + revision = "6c6b55f8796f578c870b7e19bafb16103bc40095" + [[projects]] name = "github.com/aws/aws-sdk-go" packages = [ @@ -258,7 +264,7 @@ branch = "master" name = "github.com/hashicorp/yamux" packages = ["."] - revision = "2658be15c5f05e76244154714161f17e3e77de2e" + revision = "7221087c3d281fda5f794e28c2ea4c6e4d5c4558" [[projects]] name = "github.com/inconshreveable/log15" @@ -427,12 +433,6 @@ revision = "1744e2970ca51c86172c8190fadad617561ed6e7" version = "v1.0.0" -[[projects]] - branch = "master" - name = "github.com/shurcooL/sanitized_anchor_name" - packages = ["."] - revision = "86672fcb3f950f35f2e675df2240550f2a50762f" - [[projects]] name = "github.com/smartystreets/assertions" packages = [ @@ -507,6 +507,8 @@ branch = "master" name = "golang.org/x/crypto" packages = [ + "ed25519", + "ed25519/internal/edwards25519", "md4", "pbkdf2" ] @@ -670,6 +672,16 @@ revision = "e6179049628164864e6e84e973cfb56335748dea" version = "v2.3.2" +[[projects]] + name = "gopkg.in/square/go-jose.v2" + packages = [ + ".", + "cipher", + "json" + ] + revision = "ef984e69dd356202fd4e4910d4d9c24468bdf0b8" + version = "v2.1.9" + [[projects]] name = "gopkg.in/yaml.v2" packages = ["."] @@ -679,6 +691,6 @@ [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "cb8e7fd81f23ec987fc4d5dd9d31ae0f1164bc2f30cbea2fe86e0d97dd945beb" + inputs-digest = "6f7f271afd27f78b7d8ebe27436fee72c9925fb82a978bdc57fde44e01f3ca51" solver-name = "gps-cdcl" solver-version = 1 diff --git a/Gopkg.toml b/Gopkg.toml index 6c91ec37221..e3cbdeabb5d 100644 --- a/Gopkg.toml +++ b/Gopkg.toml @@ -203,3 +203,11 @@ ignored = [ [[constraint]] name = "github.com/denisenkom/go-mssqldb" revision = "270bc3860bb94dd3a3ffd047377d746c5e276726" + +[[constraint]] + name = "github.com/VividCortex/mysqlerr" + branch = "master" + +[[constraint]] + name = "gopkg.in/square/go-jose.v2" + version = "2.1.9" diff --git a/Gruntfile.js b/Gruntfile.js index 8a71fb44148..2d5990b5f58 100644 --- a/Gruntfile.js +++ b/Gruntfile.js @@ -25,7 +25,6 @@ module.exports = function (grunt) { } } - config.coverage = grunt.option('coverage'); config.phjs = grunt.option('phjsToRelease'); config.pkg.version = grunt.option('pkgVer') || config.pkg.version; diff --git a/Makefile b/Makefile index c6915409ed7..c9e51d897f3 100644 --- a/Makefile +++ b/Makefile @@ -43,6 +43,3 @@ test: test-go test-js run: ./bin/grafana-server - -protoc: - protoc -I pkg/tsdb/models pkg/tsdb/models/*.proto --go_out=plugins=grpc:pkg/tsdb/models/. diff --git a/PLUGIN_DEV.md b/PLUGIN_DEV.md index 4e2e080ebe6..168b21dbd88 100644 --- a/PLUGIN_DEV.md +++ b/PLUGIN_DEV.md @@ -6,8 +6,8 @@ upgrading Grafana please check here before creating an issue. ## Links -- [Datasource plugin written in typescript](https://github.com/grafana/typescript-template-datasource) -- [Simple json dataource plugin](https://github.com/grafana/simple-json-datasource) +- [Datasource plugin written in TypeScript](https://github.com/grafana/typescript-template-datasource) +- [Simple JSON datasource plugin](https://github.com/grafana/simple-json-datasource) - [Plugin development guide](http://docs.grafana.org/plugins/developing/development/) - [Webpack Grafana plugin template project](https://github.com/CorpGlory/grafana-plugin-template-webpack) diff --git a/README.md b/README.md index 74fb10c8066..5882ea8a6a3 100644 --- a/README.md +++ b/README.md @@ -24,7 +24,7 @@ the latest master builds [here](https://grafana.com/grafana/download) ### Dependencies -- Go 1.10 +- Go (Latest Stable) - NodeJS LTS ### Building the backend @@ -69,15 +69,27 @@ bra run Open grafana in your browser (default: `http://localhost:3000`) and login with admin user (default: `user/pass = admin/admin`). -### Building a docker image (on linux/amd64) +### Building a Docker image -This builds a docker image from your local sources: +There are two different ways to build a Grafana docker image. If you're machine is setup for Grafana development and you run linux/amd64 you can build just the image. Otherwise, there is the option to build Grafana completely within Docker. + +Run the image you have built using: `docker run --rm -p 3000:3000 grafana/grafana:dev` + +#### Building on linux/amd64 (fast) 1. Build the frontend `go run build.go build-frontend` 2. Build the docker image `make build-docker-dev` The resulting image will be tagged as `grafana/grafana:dev` +#### Building anywhere (slower) + +Choose this option to build on platforms other than linux/amd64 and/or not have to setup the Grafana development environment. + +1. `make build-docker-full` or `docker build -t grafana/grafana:dev .` + +The resulting image will be tagged as `grafana/grafana:dev` + ### Dev config Create a custom.ini in the conf directory to override default configuration options. @@ -113,18 +125,6 @@ GRAFANA_TEST_DB=mysql go test ./pkg/... GRAFANA_TEST_DB=postgres go test ./pkg/... ``` -## Building custom docker image - -You can build a custom image using Docker, which doesn't require installing any dependencies besides docker itself. -```bash -git clone https://github.com/grafana/grafana -cd grafana -docker build -t grafana:dev . -docker run -d --name=grafana -p 3000:3000 grafana:dev -``` - -Open grafana in your browser (default: `http://localhost:3000`) and login with admin user (default: `user/pass = admin/admin`). - ## Contribute If you have any idea for an improvement or found a bug, do not hesitate to open an issue. @@ -138,5 +138,5 @@ plugin development. ## License -Grafana is distributed under Apache 2.0 License. +Grafana is distributed under [Apache 2.0 License](https://github.com/grafana/grafana/blob/master/LICENSE.md). diff --git a/UPGRADING_DEPENDENCIES.md b/UPGRADING_DEPENDENCIES.md new file mode 100644 index 00000000000..f3d2adbd71a --- /dev/null +++ b/UPGRADING_DEPENDENCIES.md @@ -0,0 +1,89 @@ +# Guide to Upgrading Dependencies + +Upgrading Go or Node.js requires making changes in many different files. See below for a list and explanation for each. + +## Go + +- CircleCi +- `grafana/build-container` +- Appveyor +- Dockerfile + +## Node.js + +- CircleCI +- `grafana/build-container` +- Appveyor +- Dockerfile + +## Go Dependencies + +Updated using `dep`. + +- `Gopkg.toml` +- `Gopkg.lock` + +## Node.js Dependencies + +Updated using `yarn`. + +- `package.json` + +## Where to make changes + +### CircleCI + +Our builds run on CircleCI through our build script. + +#### Files + +- `.circleci/config.yml`. + +#### Dependencies + +- nodejs +- golang +- grafana/build-container (our custom docker build container) + +### grafana/build-container + +The main build step (in CircleCI) is built using a custom build container that comes pre-baked with some of the neccesary dependencies. + +Link: [grafana-build-container](https://github.com/grafana/grafana-build-container) + +#### Dependencies + +- fpm +- nodejs +- golang +- crosscompiling (several compilers) + +### Appveyor + +Master and release builds trigger test runs on Appveyors build environment so that tests will run on Windows. + +#### Files: + +- `appveyor.yml` + +#### Dependencies + +- nodejs +- golang + +### Dockerfile + +There is a Docker build for Grafana in the root of the project that allows anyone to build Grafana just using Docker. + +#### Files + +- `Dockerfile` + +#### Dependencies + +- nodejs +- golang + +### Local developer environments + +Please send out a notice in the grafana-dev slack channel when updating Go or Node.js to make it easier for everyone to update their local developer environments. \ No newline at end of file diff --git a/appveyor.yml b/appveyor.yml index 5cdec1b8bf5..4bbd3668e19 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -5,9 +5,9 @@ os: Windows Server 2012 R2 clone_folder: c:\gopath\src\github.com\grafana\grafana environment: - nodejs_version: "6" + nodejs_version: "8" GOPATH: C:\gopath - GOVERSION: 1.10 + GOVERSION: 1.11 install: - rmdir c:\go /s /q diff --git a/build.go b/build.go index 561dd70df0e..69fbf3bada8 100644 --- a/build.go +++ b/build.go @@ -22,6 +22,11 @@ import ( "time" ) +const ( + windows = "windows" + linux = "linux" +) + var ( //versionRe = regexp.MustCompile(`-[0-9]{1,3}-g[0-9a-f]{5,10}`) goarch string @@ -110,17 +115,16 @@ func main() { case "package": grunt(gruntBuildArg("build")...) grunt(gruntBuildArg("package")...) - if goos == "linux" { + if goos == linux { createLinuxPackages() } case "package-only": grunt(gruntBuildArg("package")...) - if goos == "linux" { + if goos == linux { createLinuxPackages() } - case "pkg-rpm": grunt(gruntBuildArg("release")...) createRpmPackages() @@ -379,7 +383,7 @@ func ensureGoPath() { } func grunt(params ...string) { - if runtime.GOOS == "windows" { + if runtime.GOOS == windows { runPrint(`.\node_modules\.bin\grunt`, params...) } else { runPrint("./node_modules/.bin/grunt", params...) @@ -417,11 +421,11 @@ func test(pkg string) { func build(binaryName, pkg string, tags []string) { binary := fmt.Sprintf("./bin/%s-%s/%s", goos, goarch, binaryName) if isDev { - //dont include os and arch in output path in dev environment + //don't include os and arch in output path in dev environment binary = fmt.Sprintf("./bin/%s", binaryName) } - if goos == "windows" { + if goos == windows { binary += ".exe" } @@ -485,11 +489,11 @@ func clean() { func setBuildEnv() { os.Setenv("GOOS", goos) - if goos == "windows" { + if goos == windows { // require windows >=7 os.Setenv("CGO_CFLAGS", "-D_WIN32_WINNT=0x0601") } - if goarch != "amd64" || goos != "linux" { + if goarch != "amd64" || goos != linux { // needed for all other archs cgo = true } diff --git a/codecov.yml b/codecov.yml deleted file mode 100644 index b2a839365ac..00000000000 --- a/codecov.yml +++ /dev/null @@ -1,11 +0,0 @@ -coverage: - precision: 2 - round: down - range: "50...100" - - status: - project: yes - patch: yes - changes: no - -comment: off diff --git a/conf/defaults.ini b/conf/defaults.ini index 90fc144c6e0..eb8debc0094 100644 --- a/conf/defaults.ini +++ b/conf/defaults.ini @@ -321,6 +321,7 @@ allow_sign_up = true client_id = some_id client_secret = some_secret scopes = user:email +email_attribute_name = email:primary auth_url = token_url = api_url = @@ -467,6 +468,16 @@ enabled = true # Makes it possible to turn off alert rule execution but alerting UI is visible execute_alerts = true +# Default setting for new alert rules. Defaults to categorize error and timeouts as alerting. (alerting, keep_state) +error_or_timeout = alerting + +# Default setting for how Grafana handles nodata or null values in alerting. (alerting, no_data, keep_state, ok) +nodata_or_nullvalues = no_data + +# Alert notifications can include images, but rendering many images at the same time can overload the server +# This limit will protect the server from render overloading and make sure notifications are sent out quickly +concurrent_render_limit = 5 + #################################### Explore ############################# [explore] # Enable the Explore section @@ -538,3 +549,8 @@ container_name = [external_image_storage.local] # does not require any configuration + +[rendering] +# Options to configure external image rendering server like https://github.com/grafana/grafana-image-renderer +server_url = +callback_url = diff --git a/conf/ldap.toml b/conf/ldap.toml index 9a7088ed823..b684f2556d5 100644 --- a/conf/ldap.toml +++ b/conf/ldap.toml @@ -31,37 +31,11 @@ search_filter = "(cn=%s)" # An array of base dns to search through search_base_dns = ["dc=grafana,dc=org"] -# In POSIX LDAP schemas, without memberOf attribute a secondary query must be made for groups. -# This is done by enabling group_search_filter below. You must also set member_of= "cn" -# in [servers.attributes] below. - -# Users with nested/recursive group membership and an LDAP server that supports LDAP_MATCHING_RULE_IN_CHAIN -# can set group_search_filter, group_search_filter_user_attribute, group_search_base_dns and member_of -# below in such a way that the user's recursive group membership is considered. -# -# Nested Groups + Active Directory (AD) Example: -# -# AD groups store the Distinguished Names (DNs) of members, so your filter must -# recursively search your groups for the authenticating user's DN. For example: -# -# group_search_filter = "(member:1.2.840.113556.1.4.1941:=%s)" -# group_search_filter_user_attribute = "distinguishedName" -# group_search_base_dns = ["ou=groups,dc=grafana,dc=org"] -# -# [servers.attributes] -# ... -# member_of = "distinguishedName" - -## Group search filter, to retrieve the groups of which the user is a member (only set if memberOf attribute is not available) +## For Posix or LDAP setups that does not support member_of attribute you can define the below settings +## Please check grafana LDAP docs for examples # group_search_filter = "(&(objectClass=posixGroup)(memberUid=%s))" -## Group search filter user attribute defines what user attribute gets substituted for %s in group_search_filter. -## Defaults to the value of username in [server.attributes] -## Valid options are any of your values in [servers.attributes] -## If you are using nested groups you probably want to set this and member_of in -## [servers.attributes] to "distinguishedName" -# group_search_filter_user_attribute = "distinguishedName" -## An array of the base DNs to search through for groups. Typically uses ou=groups # group_search_base_dns = ["ou=groups,dc=grafana,dc=org"] +# group_search_filter_user_attribute = "uid" # Specify names of the ldap attributes your ldap uses [servers.attributes] diff --git a/conf/sample.ini b/conf/sample.ini index 4291071e026..e6a03718d19 100644 --- a/conf/sample.ini +++ b/conf/sample.ini @@ -387,6 +387,16 @@ log_queries = # Makes it possible to turn off alert rule execution but alerting UI is visible ;execute_alerts = true +# Default setting for new alert rules. Defaults to categorize error and timeouts as alerting. (alerting, keep_state) +;error_or_timeout = alerting + +# Default setting for how Grafana handles nodata or null values in alerting. (alerting, no_data, keep_state, ok) +;nodata_or_nullvalues = no_data + +# Alert notifications can include images, but rendering many images at the same time can overload the server +# This limit will protect the server from render overloading and make sure notifications are sent out quickly +;concurrent_render_limit = 5 + #################################### Explore ############################# [explore] # Enable the Explore section @@ -425,7 +435,7 @@ log_queries = ;sampler_param = 1 #################################### Grafana.com integration ########################## -# Url used to to import dashboards directly from Grafana.com +# Url used to import dashboards directly from Grafana.com [grafana_com] ;url = https://grafana.com @@ -460,3 +470,8 @@ log_queries = [external_image_storage.local] # does not require any configuration + +[rendering] +# Options to configure external image rendering server like https://github.com/grafana/grafana-image-renderer +;server_url = +;callback_url = diff --git a/scripts/benchmarks/ab/ab_test.sh b/devenv/benchmarks/ab/ab_test.sh similarity index 100% rename from scripts/benchmarks/ab/ab_test.sh rename to devenv/benchmarks/ab/ab_test.sh diff --git a/devenv/bulk_alerting_dashboards/bulk_alerting_dashboards.yaml b/devenv/bulk_alerting_dashboards/bulk_alerting_dashboards.yaml new file mode 100644 index 00000000000..1ede5dcd30a --- /dev/null +++ b/devenv/bulk_alerting_dashboards/bulk_alerting_dashboards.yaml @@ -0,0 +1,9 @@ +apiVersion: 1 + +providers: + - name: 'Bulk alerting dashboards' + folder: 'Bulk alerting dashboards' + type: file + options: + path: devenv/bulk_alerting_dashboards + diff --git a/devenv/bulk_alerting_dashboards/bulkdash_alerting.jsonnet b/devenv/bulk_alerting_dashboards/bulkdash_alerting.jsonnet new file mode 100644 index 00000000000..a7acd57745d --- /dev/null +++ b/devenv/bulk_alerting_dashboards/bulkdash_alerting.jsonnet @@ -0,0 +1,168 @@ +{ + "editable": true, + "gnetId": null, + "graphTooltip": 0, + "id": null, + "links": [], + "panels": [ + { + "alert": { + "conditions": [ + { + "evaluator": { + "params": [ + 65 + ], + "type": "gt" + }, + "operator": { + "type": "and" + }, + "query": { + "params": [ + "A", + "5m", + "now" + ] + }, + "reducer": { + "params": [], + "type": "avg" + }, + "type": "query" + } + ], + "executionErrorState": "alerting", + "frequency": "10s", + "handler": 1, + "name": "bulk alerting", + "noDataState": "no_data", + "notifications": [] + }, + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-prometheus", + "fill": 1, + "gridPos": { + "h": 9, + "w": 12, + "x": 0, + "y": 0 + }, + "id": 2, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "$$hashKey": "object:117", + "expr": "go_goroutines", + "format": "time_series", + "intervalFactor": 1, + "refId": "A" + } + ], + "thresholds": [ + { + "colorMode": "critical", + "fill": true, + "line": true, + "op": "gt", + "value": 50 + } + ], + "timeFrom": null, + "timeShift": null, + "title": "Panel Title", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ] + } + ], + "schemaVersion": 16, + "style": "dark", + "tags": [], + "templating": { + "list": [] + }, + "time": { + "from": "now-6h", + "to": "now" + }, + "timepicker": { + "refresh_intervals": [ + "5s", + "10s", + "30s", + "1m", + "5m", + "15m", + "30m", + "1h", + "2h", + "1d" + ], + "time_options": [ + "5m", + "15m", + "1h", + "6h", + "12h", + "24h", + "2d", + "7d", + "30d" + ] + }, + "timezone": "", + "title": "New dashboard", + "uid": null, + "version": 0 +} \ No newline at end of file diff --git a/docker/create_docker_compose.sh b/devenv/create_docker_compose.sh similarity index 94% rename from docker/create_docker_compose.sh rename to devenv/create_docker_compose.sh index 9d28ede8e7e..5da9e8f5c8f 100755 --- a/docker/create_docker_compose.sh +++ b/devenv/create_docker_compose.sh @@ -1,13 +1,13 @@ #!/bin/bash -blocks_dir=blocks +blocks_dir=docker/blocks docker_dir=docker template_dir=templates grafana_config_file=conf.tmp grafana_config=config -compose_header_file=compose_header.yml +compose_header_file=docker/compose_header.yml fig_file=docker-compose.yaml fig_config=docker-compose.yaml diff --git a/devenv/dev-dashboards/panel_tests_polystat.json b/devenv/dev-dashboards/panel_tests_polystat.json new file mode 100644 index 00000000000..51d3085c438 --- /dev/null +++ b/devenv/dev-dashboards/panel_tests_polystat.json @@ -0,0 +1,3343 @@ +{ + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": "-- Grafana --", + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "type": "dashboard" + } + ] + }, + "editable": true, + "gnetId": null, + "graphTooltip": 0, + "links": [], + "panels": [ + { + "animationModes": [ + { + "text": "Show All", + "value": "all" + }, + { + "text": "Show Triggered", + "value": "triggered" + } + ], + "colors": [ + "#299c46", + "rgba(237, 129, 40, 0.89)", + "#d44a3a" + ], + "d3DivId": "d3_svg_4", + "datasource": "gdev-testdata", + "decimals": 2, + "displayModes": [ + { + "text": "Show All", + "value": "all" + }, + { + "text": "Show Triggered", + "value": "triggered" + } + ], + "fontSizes": [ + 4, + 5, + 6, + 7, + 8, + 9, + 10, + 11, + 12, + 13, + 14, + 15, + 16, + 17, + 18, + 19, + 20, + 22, + 24, + 26, + 28, + 30, + 32, + 34, + 36, + 38, + 40, + 42, + 44, + 46, + 48, + 50, + 52, + 54, + 56, + 58, + 60, + 62, + 64, + 66, + 68, + 70 + ], + "fontTypes": [ + "Open Sans", + "Arial", + "Avant Garde", + "Bookman", + "Consolas", + "Courier", + "Courier New", + "Futura", + "Garamond", + "Helvetica", + "Palatino", + "Times", + "Times New Roman", + "Verdana" + ], + "format": "none", + "gridPos": { + "h": 9, + "w": 12, + "x": 0, + "y": 0 + }, + "id": 4, + "links": [], + "notcolors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "operatorName": "avg", + "operatorOptions": [ + { + "text": "Average", + "value": "avg" + }, + { + "text": "Count", + "value": "count" + }, + { + "text": "Current", + "value": "current" + }, + { + "text": "Delta", + "value": "delta" + }, + { + "text": "Difference", + "value": "diff" + }, + { + "text": "First", + "value": "first" + }, + { + "text": "Log Min", + "value": "logmin" + }, + { + "text": "Max", + "value": "max" + }, + { + "text": "Min", + "value": "min" + }, + { + "text": "Name", + "value": "name" + }, + { + "text": "Time of Last Point", + "value": "last_time" + }, + { + "text": "Time Step", + "value": "time_step" + }, + { + "text": "Total", + "value": "total" + } + ], + "polystat": { + "animationSpeed": 2500, + "columnAutoSize": true, + "columns": "", + "defaultClickThrough": "", + "defaultClickThroughSanitize": true, + "displayLimit": 100, + "fontAutoScale": true, + "fontSize": 12, + "globalDisplayMode": "all", + "globalOperatorName": "avg", + "gradientEnabled": true, + "hexagonSortByDirection": "asc", + "hexagonSortByField": "name", + "maxMetrics": 0, + "polygonBorderColor": "black", + "polygonBorderSize": 2, + "radius": "", + "radiusAutoSize": true, + "rowAutoSize": true, + "rows": "", + "shape": "hexagon_pointed_top", + "tooltipDisplayMode": "all", + "tooltipDisplayTextTriggeredEmpty": "OK", + "tooltipFontSize": 12, + "tooltipFontType": "Open Sans", + "tooltipPrimarySortDirection": "desc", + "tooltipPrimarySortField": "thresholdLevel", + "tooltipSecondarySortDirection": "desc", + "tooltipSecondarySortField": "value", + "tooltipTimestampEnabled": true + }, + "savedComposites": [], + "savedOverrides": [], + "shapes": [ + { + "text": "Hexagon Pointed Top", + "value": "hexagon_pointed_top" + }, + { + "text": "Hexagon Flat Top", + "value": "hexagon_flat_top" + }, + { + "text": "Circle", + "value": "circle" + }, + { + "text": "Cross", + "value": "cross" + }, + { + "text": "Diamond", + "value": "diamond" + }, + { + "text": "Square", + "value": "square" + }, + { + "text": "Star", + "value": "star" + }, + { + "text": "Triangle", + "value": "triangle" + }, + { + "text": "Wye", + "value": "wye" + } + ], + "sortDirections": [ + { + "text": "Ascending", + "value": "asc" + }, + { + "text": "Descending", + "value": "desc" + } + ], + "sortFields": [ + { + "text": "Name", + "value": "name" + }, + { + "text": "Threshold Level", + "value": "thresholdLevel" + }, + { + "text": "Value", + "value": "value" + } + ], + "svgContainer": {}, + "targets": [ + { + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "random_walk" + }, + { + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "B", + "scenarioId": "random_walk" + }, + { + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "C", + "scenarioId": "random_walk" + }, + { + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "D", + "scenarioId": "random_walk" + }, + { + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "E", + "scenarioId": "random_walk" + } + ], + "thresholdStates": [ + { + "text": "ok", + "value": 0 + }, + { + "text": "warning", + "value": 1 + }, + { + "text": "critical", + "value": 2 + }, + { + "text": "custom", + "value": 3 + } + ], + "title": "Poor use of space", + "type": "grafana-polystat-panel", + "unitFormats": [ + { + "submenu": [ + { + "text": "none", + "value": "none" + }, + { + "text": "short", + "value": "short" + }, + { + "text": "percent (0-100)", + "value": "percent" + }, + { + "text": "percent (0.0-1.0)", + "value": "percentunit" + }, + { + "text": "Humidity (%H)", + "value": "humidity" + }, + { + "text": "decibel", + "value": "dB" + }, + { + "text": "hexadecimal (0x)", + "value": "hex0x" + }, + { + "text": "hexadecimal", + "value": "hex" + }, + { + "text": "scientific notation", + "value": "sci" + }, + { + "text": "locale format", + "value": "locale" + } + ], + "text": "none" + }, + { + "submenu": [ + { + "text": "Dollars ($)", + "value": "currencyUSD" + }, + { + "text": "Pounds (£)", + "value": "currencyGBP" + }, + { + "text": "Euro (€)", + "value": "currencyEUR" + }, + { + "text": "Yen (¥)", + "value": "currencyJPY" + }, + { + "text": "Rubles (₽)", + "value": "currencyRUB" + }, + { + "text": "Hryvnias (₴)", + "value": "currencyUAH" + }, + { + "text": "Real (R$)", + "value": "currencyBRL" + }, + { + "text": "Danish Krone (kr)", + "value": "currencyDKK" + }, + { + "text": "Icelandic Króna (kr)", + "value": "currencyISK" + }, + { + "text": "Norwegian Krone (kr)", + "value": "currencyNOK" + }, + { + "text": "Swedish Krona (kr)", + "value": "currencySEK" + }, + { + "text": "Czech koruna (czk)", + "value": "currencyCZK" + }, + { + "text": "Swiss franc (CHF)", + "value": "currencyCHF" + }, + { + "text": "Polish Złoty (PLN)", + "value": "currencyPLN" + }, + { + "text": "Bitcoin (฿)", + "value": "currencyBTC" + } + ], + "text": "currency" + }, + { + "submenu": [ + { + "text": "Hertz (1/s)", + "value": "hertz" + }, + { + "text": "nanoseconds (ns)", + "value": "ns" + }, + { + "text": "microseconds (µs)", + "value": "µs" + }, + { + "text": "milliseconds (ms)", + "value": "ms" + }, + { + "text": "seconds (s)", + "value": "s" + }, + { + "text": "minutes (m)", + "value": "m" + }, + { + "text": "hours (h)", + "value": "h" + }, + { + "text": "days (d)", + "value": "d" + }, + { + "text": "duration (ms)", + "value": "dtdurationms" + }, + { + "text": "duration (s)", + "value": "dtdurations" + }, + { + "text": "duration (hh:mm:ss)", + "value": "dthms" + }, + { + "text": "Timeticks (s/100)", + "value": "timeticks" + } + ], + "text": "time" + }, + { + "submenu": [ + { + "text": "YYYY-MM-DD HH:mm:ss", + "value": "dateTimeAsIso" + }, + { + "text": "DD/MM/YYYY h:mm:ss a", + "value": "dateTimeAsUS" + }, + { + "text": "From Now", + "value": "dateTimeFromNow" + } + ], + "text": "date & time" + }, + { + "submenu": [ + { + "text": "bits", + "value": "bits" + }, + { + "text": "bytes", + "value": "bytes" + }, + { + "text": "kibibytes", + "value": "kbytes" + }, + { + "text": "mebibytes", + "value": "mbytes" + }, + { + "text": "gibibytes", + "value": "gbytes" + } + ], + "text": "data (IEC)" + }, + { + "submenu": [ + { + "text": "bits", + "value": "decbits" + }, + { + "text": "bytes", + "value": "decbytes" + }, + { + "text": "kilobytes", + "value": "deckbytes" + }, + { + "text": "megabytes", + "value": "decmbytes" + }, + { + "text": "gigabytes", + "value": "decgbytes" + } + ], + "text": "data (Metric)" + }, + { + "submenu": [ + { + "text": "packets/sec", + "value": "pps" + }, + { + "text": "bits/sec", + "value": "bps" + }, + { + "text": "bytes/sec", + "value": "Bps" + }, + { + "text": "kilobits/sec", + "value": "Kbits" + }, + { + "text": "kilobytes/sec", + "value": "KBs" + }, + { + "text": "megabits/sec", + "value": "Mbits" + }, + { + "text": "megabytes/sec", + "value": "MBs" + }, + { + "text": "gigabytes/sec", + "value": "GBs" + }, + { + "text": "gigabits/sec", + "value": "Gbits" + } + ], + "text": "data rate" + }, + { + "submenu": [ + { + "text": "hashes/sec", + "value": "Hs" + }, + { + "text": "kilohashes/sec", + "value": "KHs" + }, + { + "text": "megahashes/sec", + "value": "MHs" + }, + { + "text": "gigahashes/sec", + "value": "GHs" + }, + { + "text": "terahashes/sec", + "value": "THs" + }, + { + "text": "petahashes/sec", + "value": "PHs" + }, + { + "text": "exahashes/sec", + "value": "EHs" + } + ], + "text": "hash rate" + }, + { + "submenu": [ + { + "text": "ops/sec (ops)", + "value": "ops" + }, + { + "text": "requests/sec (rps)", + "value": "reqps" + }, + { + "text": "reads/sec (rps)", + "value": "rps" + }, + { + "text": "writes/sec (wps)", + "value": "wps" + }, + { + "text": "I/O ops/sec (iops)", + "value": "iops" + }, + { + "text": "ops/min (opm)", + "value": "opm" + }, + { + "text": "reads/min (rpm)", + "value": "rpm" + }, + { + "text": "writes/min (wpm)", + "value": "wpm" + } + ], + "text": "throughput" + }, + { + "submenu": [ + { + "text": "millimetre (mm)", + "value": "lengthmm" + }, + { + "text": "meter (m)", + "value": "lengthm" + }, + { + "text": "feet (ft)", + "value": "lengthft" + }, + { + "text": "kilometer (km)", + "value": "lengthkm" + }, + { + "text": "mile (mi)", + "value": "lengthmi" + } + ], + "text": "length" + }, + { + "submenu": [ + { + "text": "Square Meters (m²)", + "value": "areaM2" + }, + { + "text": "Square Feet (ft²)", + "value": "areaF2" + }, + { + "text": "Square Miles (mi²)", + "value": "areaMI2" + } + ], + "text": "area" + }, + { + "submenu": [ + { + "text": "milligram (mg)", + "value": "massmg" + }, + { + "text": "gram (g)", + "value": "massg" + }, + { + "text": "kilogram (kg)", + "value": "masskg" + }, + { + "text": "metric ton (t)", + "value": "masst" + } + ], + "text": "mass" + }, + { + "submenu": [ + { + "text": "metres/second (m/s)", + "value": "velocityms" + }, + { + "text": "kilometers/hour (km/h)", + "value": "velocitykmh" + }, + { + "text": "miles/hour (mph)", + "value": "velocitymph" + }, + { + "text": "knot (kn)", + "value": "velocityknot" + } + ], + "text": "velocity" + }, + { + "submenu": [ + { + "text": "millilitre (mL)", + "value": "mlitre" + }, + { + "text": "litre (L)", + "value": "litre" + }, + { + "text": "cubic metre", + "value": "m3" + }, + { + "text": "Normal cubic metre", + "value": "Nm3" + }, + { + "text": "cubic decimetre", + "value": "dm3" + }, + { + "text": "gallons", + "value": "gallons" + } + ], + "text": "volume" + }, + { + "submenu": [ + { + "text": "Watt (W)", + "value": "watt" + }, + { + "text": "Kilowatt (kW)", + "value": "kwatt" + }, + { + "text": "Milliwatt (mW)", + "value": "mwatt" + }, + { + "text": "Watt per square metre (W/m²)", + "value": "Wm2" + }, + { + "text": "Volt-ampere (VA)", + "value": "voltamp" + }, + { + "text": "Kilovolt-ampere (kVA)", + "value": "kvoltamp" + }, + { + "text": "Volt-ampere reactive (var)", + "value": "voltampreact" + }, + { + "text": "Kilovolt-ampere reactive (kvar)", + "value": "kvoltampreact" + }, + { + "text": "Watt-hour (Wh)", + "value": "watth" + }, + { + "text": "Kilowatt-hour (kWh)", + "value": "kwatth" + }, + { + "text": "Kilowatt-min (kWm)", + "value": "kwattm" + }, + { + "text": "Joule (J)", + "value": "joule" + }, + { + "text": "Electron volt (eV)", + "value": "ev" + }, + { + "text": "Ampere (A)", + "value": "amp" + }, + { + "text": "Kiloampere (kA)", + "value": "kamp" + }, + { + "text": "Milliampere (mA)", + "value": "mamp" + }, + { + "text": "Volt (V)", + "value": "volt" + }, + { + "text": "Kilovolt (kV)", + "value": "kvolt" + }, + { + "text": "Millivolt (mV)", + "value": "mvolt" + }, + { + "text": "Decibel-milliwatt (dBm)", + "value": "dBm" + }, + { + "text": "Ohm (Ω)", + "value": "ohm" + }, + { + "text": "Lumens (Lm)", + "value": "lumens" + } + ], + "text": "energy" + }, + { + "submenu": [ + { + "text": "Celsius (°C)", + "value": "celsius" + }, + { + "text": "Farenheit (°F)", + "value": "farenheit" + }, + { + "text": "Kelvin (K)", + "value": "kelvin" + } + ], + "text": "temperature" + }, + { + "submenu": [ + { + "text": "Millibars", + "value": "pressurembar" + }, + { + "text": "Bars", + "value": "pressurebar" + }, + { + "text": "Kilobars", + "value": "pressurekbar" + }, + { + "text": "Hectopascals", + "value": "pressurehpa" + }, + { + "text": "Kilopascals", + "value": "pressurekpa" + }, + { + "text": "Inches of mercury", + "value": "pressurehg" + }, + { + "text": "PSI", + "value": "pressurepsi" + } + ], + "text": "pressure" + }, + { + "submenu": [ + { + "text": "Newton-meters (Nm)", + "value": "forceNm" + }, + { + "text": "Kilonewton-meters (kNm)", + "value": "forcekNm" + }, + { + "text": "Newtons (N)", + "value": "forceN" + }, + { + "text": "Kilonewtons (kN)", + "value": "forcekN" + } + ], + "text": "force" + }, + { + "submenu": [ + { + "text": "Gallons/min (gpm)", + "value": "flowgpm" + }, + { + "text": "Cubic meters/sec (cms)", + "value": "flowcms" + }, + { + "text": "Cubic feet/sec (cfs)", + "value": "flowcfs" + }, + { + "text": "Cubic feet/min (cfm)", + "value": "flowcfm" + }, + { + "text": "Litre/hour", + "value": "litreh" + }, + { + "text": "Litre/min (l/min)", + "value": "flowlpm" + }, + { + "text": "milliLitre/min (mL/min)", + "value": "flowmlpm" + } + ], + "text": "flow" + }, + { + "submenu": [ + { + "text": "Degrees (°)", + "value": "degree" + }, + { + "text": "Radians", + "value": "radian" + }, + { + "text": "Gradian", + "value": "grad" + } + ], + "text": "angle" + }, + { + "submenu": [ + { + "text": "Meters/sec²", + "value": "accMS2" + }, + { + "text": "Feet/sec²", + "value": "accFS2" + }, + { + "text": "G unit", + "value": "accG" + } + ], + "text": "acceleration" + }, + { + "submenu": [ + { + "text": "Becquerel (Bq)", + "value": "radbq" + }, + { + "text": "curie (Ci)", + "value": "radci" + }, + { + "text": "Gray (Gy)", + "value": "radgy" + }, + { + "text": "rad", + "value": "radrad" + }, + { + "text": "Sievert (Sv)", + "value": "radsv" + }, + { + "text": "rem", + "value": "radrem" + }, + { + "text": "Exposure (C/kg)", + "value": "radexpckg" + }, + { + "text": "roentgen (R)", + "value": "radr" + }, + { + "text": "Sievert/hour (Sv/h)", + "value": "radsvh" + } + ], + "text": "radiation" + }, + { + "submenu": [ + { + "text": "parts-per-million (ppm)", + "value": "ppm" + }, + { + "text": "parts-per-billion (ppb)", + "value": "conppb" + }, + { + "text": "nanogram per cubic metre (ng/m³)", + "value": "conngm3" + }, + { + "text": "nanogram per normal cubic metre (ng/Nm³)", + "value": "conngNm3" + }, + { + "text": "microgram per cubic metre (μg/m³)", + "value": "conμgm3" + }, + { + "text": "microgram per normal cubic metre (μg/Nm³)", + "value": "conμgNm3" + }, + { + "text": "milligram per cubic metre (mg/m³)", + "value": "conmgm3" + }, + { + "text": "milligram per normal cubic metre (mg/Nm³)", + "value": "conmgNm3" + }, + { + "text": "gram per cubic metre (g/m³)", + "value": "congm3" + }, + { + "text": "gram per normal cubic metre (g/Nm³)", + "value": "congNm3" + } + ], + "text": "concentration" + } + ] + }, + { + "animationModes": [ + { + "text": "Show All", + "value": "all" + }, + { + "text": "Show Triggered", + "value": "triggered" + } + ], + "colors": [ + "#299c46", + "rgba(237, 129, 40, 0.89)", + "#d44a3a" + ], + "d3DivId": "d3_svg_5", + "datasource": "gdev-testdata", + "decimals": 2, + "displayModes": [ + { + "text": "Show All", + "value": "all" + }, + { + "text": "Show Triggered", + "value": "triggered" + } + ], + "fontSizes": [ + 4, + 5, + 6, + 7, + 8, + 9, + 10, + 11, + 12, + 13, + 14, + 15, + 16, + 17, + 18, + 19, + 20, + 22, + 24, + 26, + 28, + 30, + 32, + 34, + 36, + 38, + 40, + 42, + 44, + 46, + 48, + 50, + 52, + 54, + 56, + 58, + 60, + 62, + 64, + 66, + 68, + 70 + ], + "fontTypes": [ + "Open Sans", + "Arial", + "Avant Garde", + "Bookman", + "Consolas", + "Courier", + "Courier New", + "Futura", + "Garamond", + "Helvetica", + "Palatino", + "Times", + "Times New Roman", + "Verdana" + ], + "format": "none", + "gridPos": { + "h": 9, + "w": 12, + "x": 12, + "y": 0 + }, + "id": 5, + "links": [], + "notcolors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "operatorName": "avg", + "operatorOptions": [ + { + "text": "Average", + "value": "avg" + }, + { + "text": "Count", + "value": "count" + }, + { + "text": "Current", + "value": "current" + }, + { + "text": "Delta", + "value": "delta" + }, + { + "text": "Difference", + "value": "diff" + }, + { + "text": "First", + "value": "first" + }, + { + "text": "Log Min", + "value": "logmin" + }, + { + "text": "Max", + "value": "max" + }, + { + "text": "Min", + "value": "min" + }, + { + "text": "Name", + "value": "name" + }, + { + "text": "Time of Last Point", + "value": "last_time" + }, + { + "text": "Time Step", + "value": "time_step" + }, + { + "text": "Total", + "value": "total" + } + ], + "polystat": { + "animationSpeed": 2500, + "columnAutoSize": true, + "columns": "", + "defaultClickThrough": "", + "defaultClickThroughSanitize": true, + "displayLimit": 100, + "fontAutoScale": true, + "fontSize": 12, + "globalDisplayMode": "all", + "globalOperatorName": "avg", + "gradientEnabled": true, + "hexagonSortByDirection": "asc", + "hexagonSortByField": "name", + "maxMetrics": 0, + "polygonBorderColor": "black", + "polygonBorderSize": 2, + "radius": "", + "radiusAutoSize": true, + "rowAutoSize": true, + "rows": "", + "shape": "hexagon_pointed_top", + "tooltipDisplayMode": "all", + "tooltipDisplayTextTriggeredEmpty": "OK", + "tooltipFontSize": 12, + "tooltipFontType": "Open Sans", + "tooltipPrimarySortDirection": "desc", + "tooltipPrimarySortField": "thresholdLevel", + "tooltipSecondarySortDirection": "desc", + "tooltipSecondarySortField": "value", + "tooltipTimestampEnabled": true + }, + "savedComposites": [ + { + "compositeName": "comp", + "members": [ + { + "seriesName": "A-series" + }, + { + "seriesName": "B-series" + } + ], + "enabled": true, + "clickThrough": "", + "hideMembers": true, + "showName": true, + "showValue": true, + "animateMode": "all", + "thresholdLevel": 0, + "sanitizeURLEnabled": true, + "sanitizedURL": "" + } + ], + "savedOverrides": [], + "shapes": [ + { + "text": "Hexagon Pointed Top", + "value": "hexagon_pointed_top" + }, + { + "text": "Hexagon Flat Top", + "value": "hexagon_flat_top" + }, + { + "text": "Circle", + "value": "circle" + }, + { + "text": "Cross", + "value": "cross" + }, + { + "text": "Diamond", + "value": "diamond" + }, + { + "text": "Square", + "value": "square" + }, + { + "text": "Star", + "value": "star" + }, + { + "text": "Triangle", + "value": "triangle" + }, + { + "text": "Wye", + "value": "wye" + } + ], + "sortDirections": [ + { + "text": "Ascending", + "value": "asc" + }, + { + "text": "Descending", + "value": "desc" + } + ], + "sortFields": [ + { + "text": "Name", + "value": "name" + }, + { + "text": "Threshold Level", + "value": "thresholdLevel" + }, + { + "text": "Value", + "value": "value" + } + ], + "svgContainer": {}, + "targets": [ + { + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "random_walk" + }, + { + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "B", + "scenarioId": "random_walk" + }, + { + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "C", + "scenarioId": "random_walk" + }, + { + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "D", + "scenarioId": "random_walk" + }, + { + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "E", + "scenarioId": "random_walk" + } + ], + "thresholdStates": [ + { + "text": "ok", + "value": 0 + }, + { + "text": "warning", + "value": 1 + }, + { + "text": "critical", + "value": 2 + }, + { + "text": "custom", + "value": 3 + } + ], + "title": "Composite crash", + "type": "grafana-polystat-panel", + "unitFormats": [ + { + "submenu": [ + { + "text": "none", + "value": "none" + }, + { + "text": "short", + "value": "short" + }, + { + "text": "percent (0-100)", + "value": "percent" + }, + { + "text": "percent (0.0-1.0)", + "value": "percentunit" + }, + { + "text": "Humidity (%H)", + "value": "humidity" + }, + { + "text": "decibel", + "value": "dB" + }, + { + "text": "hexadecimal (0x)", + "value": "hex0x" + }, + { + "text": "hexadecimal", + "value": "hex" + }, + { + "text": "scientific notation", + "value": "sci" + }, + { + "text": "locale format", + "value": "locale" + } + ], + "text": "none" + }, + { + "submenu": [ + { + "text": "Dollars ($)", + "value": "currencyUSD" + }, + { + "text": "Pounds (£)", + "value": "currencyGBP" + }, + { + "text": "Euro (€)", + "value": "currencyEUR" + }, + { + "text": "Yen (¥)", + "value": "currencyJPY" + }, + { + "text": "Rubles (₽)", + "value": "currencyRUB" + }, + { + "text": "Hryvnias (₴)", + "value": "currencyUAH" + }, + { + "text": "Real (R$)", + "value": "currencyBRL" + }, + { + "text": "Danish Krone (kr)", + "value": "currencyDKK" + }, + { + "text": "Icelandic Króna (kr)", + "value": "currencyISK" + }, + { + "text": "Norwegian Krone (kr)", + "value": "currencyNOK" + }, + { + "text": "Swedish Krona (kr)", + "value": "currencySEK" + }, + { + "text": "Czech koruna (czk)", + "value": "currencyCZK" + }, + { + "text": "Swiss franc (CHF)", + "value": "currencyCHF" + }, + { + "text": "Polish Złoty (PLN)", + "value": "currencyPLN" + }, + { + "text": "Bitcoin (฿)", + "value": "currencyBTC" + } + ], + "text": "currency" + }, + { + "submenu": [ + { + "text": "Hertz (1/s)", + "value": "hertz" + }, + { + "text": "nanoseconds (ns)", + "value": "ns" + }, + { + "text": "microseconds (µs)", + "value": "µs" + }, + { + "text": "milliseconds (ms)", + "value": "ms" + }, + { + "text": "seconds (s)", + "value": "s" + }, + { + "text": "minutes (m)", + "value": "m" + }, + { + "text": "hours (h)", + "value": "h" + }, + { + "text": "days (d)", + "value": "d" + }, + { + "text": "duration (ms)", + "value": "dtdurationms" + }, + { + "text": "duration (s)", + "value": "dtdurations" + }, + { + "text": "duration (hh:mm:ss)", + "value": "dthms" + }, + { + "text": "Timeticks (s/100)", + "value": "timeticks" + } + ], + "text": "time" + }, + { + "submenu": [ + { + "text": "YYYY-MM-DD HH:mm:ss", + "value": "dateTimeAsIso" + }, + { + "text": "DD/MM/YYYY h:mm:ss a", + "value": "dateTimeAsUS" + }, + { + "text": "From Now", + "value": "dateTimeFromNow" + } + ], + "text": "date & time" + }, + { + "submenu": [ + { + "text": "bits", + "value": "bits" + }, + { + "text": "bytes", + "value": "bytes" + }, + { + "text": "kibibytes", + "value": "kbytes" + }, + { + "text": "mebibytes", + "value": "mbytes" + }, + { + "text": "gibibytes", + "value": "gbytes" + } + ], + "text": "data (IEC)" + }, + { + "submenu": [ + { + "text": "bits", + "value": "decbits" + }, + { + "text": "bytes", + "value": "decbytes" + }, + { + "text": "kilobytes", + "value": "deckbytes" + }, + { + "text": "megabytes", + "value": "decmbytes" + }, + { + "text": "gigabytes", + "value": "decgbytes" + } + ], + "text": "data (Metric)" + }, + { + "submenu": [ + { + "text": "packets/sec", + "value": "pps" + }, + { + "text": "bits/sec", + "value": "bps" + }, + { + "text": "bytes/sec", + "value": "Bps" + }, + { + "text": "kilobits/sec", + "value": "Kbits" + }, + { + "text": "kilobytes/sec", + "value": "KBs" + }, + { + "text": "megabits/sec", + "value": "Mbits" + }, + { + "text": "megabytes/sec", + "value": "MBs" + }, + { + "text": "gigabytes/sec", + "value": "GBs" + }, + { + "text": "gigabits/sec", + "value": "Gbits" + } + ], + "text": "data rate" + }, + { + "submenu": [ + { + "text": "hashes/sec", + "value": "Hs" + }, + { + "text": "kilohashes/sec", + "value": "KHs" + }, + { + "text": "megahashes/sec", + "value": "MHs" + }, + { + "text": "gigahashes/sec", + "value": "GHs" + }, + { + "text": "terahashes/sec", + "value": "THs" + }, + { + "text": "petahashes/sec", + "value": "PHs" + }, + { + "text": "exahashes/sec", + "value": "EHs" + } + ], + "text": "hash rate" + }, + { + "submenu": [ + { + "text": "ops/sec (ops)", + "value": "ops" + }, + { + "text": "requests/sec (rps)", + "value": "reqps" + }, + { + "text": "reads/sec (rps)", + "value": "rps" + }, + { + "text": "writes/sec (wps)", + "value": "wps" + }, + { + "text": "I/O ops/sec (iops)", + "value": "iops" + }, + { + "text": "ops/min (opm)", + "value": "opm" + }, + { + "text": "reads/min (rpm)", + "value": "rpm" + }, + { + "text": "writes/min (wpm)", + "value": "wpm" + } + ], + "text": "throughput" + }, + { + "submenu": [ + { + "text": "millimetre (mm)", + "value": "lengthmm" + }, + { + "text": "meter (m)", + "value": "lengthm" + }, + { + "text": "feet (ft)", + "value": "lengthft" + }, + { + "text": "kilometer (km)", + "value": "lengthkm" + }, + { + "text": "mile (mi)", + "value": "lengthmi" + } + ], + "text": "length" + }, + { + "submenu": [ + { + "text": "Square Meters (m²)", + "value": "areaM2" + }, + { + "text": "Square Feet (ft²)", + "value": "areaF2" + }, + { + "text": "Square Miles (mi²)", + "value": "areaMI2" + } + ], + "text": "area" + }, + { + "submenu": [ + { + "text": "milligram (mg)", + "value": "massmg" + }, + { + "text": "gram (g)", + "value": "massg" + }, + { + "text": "kilogram (kg)", + "value": "masskg" + }, + { + "text": "metric ton (t)", + "value": "masst" + } + ], + "text": "mass" + }, + { + "submenu": [ + { + "text": "metres/second (m/s)", + "value": "velocityms" + }, + { + "text": "kilometers/hour (km/h)", + "value": "velocitykmh" + }, + { + "text": "miles/hour (mph)", + "value": "velocitymph" + }, + { + "text": "knot (kn)", + "value": "velocityknot" + } + ], + "text": "velocity" + }, + { + "submenu": [ + { + "text": "millilitre (mL)", + "value": "mlitre" + }, + { + "text": "litre (L)", + "value": "litre" + }, + { + "text": "cubic metre", + "value": "m3" + }, + { + "text": "Normal cubic metre", + "value": "Nm3" + }, + { + "text": "cubic decimetre", + "value": "dm3" + }, + { + "text": "gallons", + "value": "gallons" + } + ], + "text": "volume" + }, + { + "submenu": [ + { + "text": "Watt (W)", + "value": "watt" + }, + { + "text": "Kilowatt (kW)", + "value": "kwatt" + }, + { + "text": "Milliwatt (mW)", + "value": "mwatt" + }, + { + "text": "Watt per square metre (W/m²)", + "value": "Wm2" + }, + { + "text": "Volt-ampere (VA)", + "value": "voltamp" + }, + { + "text": "Kilovolt-ampere (kVA)", + "value": "kvoltamp" + }, + { + "text": "Volt-ampere reactive (var)", + "value": "voltampreact" + }, + { + "text": "Kilovolt-ampere reactive (kvar)", + "value": "kvoltampreact" + }, + { + "text": "Watt-hour (Wh)", + "value": "watth" + }, + { + "text": "Kilowatt-hour (kWh)", + "value": "kwatth" + }, + { + "text": "Kilowatt-min (kWm)", + "value": "kwattm" + }, + { + "text": "Joule (J)", + "value": "joule" + }, + { + "text": "Electron volt (eV)", + "value": "ev" + }, + { + "text": "Ampere (A)", + "value": "amp" + }, + { + "text": "Kiloampere (kA)", + "value": "kamp" + }, + { + "text": "Milliampere (mA)", + "value": "mamp" + }, + { + "text": "Volt (V)", + "value": "volt" + }, + { + "text": "Kilovolt (kV)", + "value": "kvolt" + }, + { + "text": "Millivolt (mV)", + "value": "mvolt" + }, + { + "text": "Decibel-milliwatt (dBm)", + "value": "dBm" + }, + { + "text": "Ohm (Ω)", + "value": "ohm" + }, + { + "text": "Lumens (Lm)", + "value": "lumens" + } + ], + "text": "energy" + }, + { + "submenu": [ + { + "text": "Celsius (°C)", + "value": "celsius" + }, + { + "text": "Farenheit (°F)", + "value": "farenheit" + }, + { + "text": "Kelvin (K)", + "value": "kelvin" + } + ], + "text": "temperature" + }, + { + "submenu": [ + { + "text": "Millibars", + "value": "pressurembar" + }, + { + "text": "Bars", + "value": "pressurebar" + }, + { + "text": "Kilobars", + "value": "pressurekbar" + }, + { + "text": "Hectopascals", + "value": "pressurehpa" + }, + { + "text": "Kilopascals", + "value": "pressurekpa" + }, + { + "text": "Inches of mercury", + "value": "pressurehg" + }, + { + "text": "PSI", + "value": "pressurepsi" + } + ], + "text": "pressure" + }, + { + "submenu": [ + { + "text": "Newton-meters (Nm)", + "value": "forceNm" + }, + { + "text": "Kilonewton-meters (kNm)", + "value": "forcekNm" + }, + { + "text": "Newtons (N)", + "value": "forceN" + }, + { + "text": "Kilonewtons (kN)", + "value": "forcekN" + } + ], + "text": "force" + }, + { + "submenu": [ + { + "text": "Gallons/min (gpm)", + "value": "flowgpm" + }, + { + "text": "Cubic meters/sec (cms)", + "value": "flowcms" + }, + { + "text": "Cubic feet/sec (cfs)", + "value": "flowcfs" + }, + { + "text": "Cubic feet/min (cfm)", + "value": "flowcfm" + }, + { + "text": "Litre/hour", + "value": "litreh" + }, + { + "text": "Litre/min (l/min)", + "value": "flowlpm" + }, + { + "text": "milliLitre/min (mL/min)", + "value": "flowmlpm" + } + ], + "text": "flow" + }, + { + "submenu": [ + { + "text": "Degrees (°)", + "value": "degree" + }, + { + "text": "Radians", + "value": "radian" + }, + { + "text": "Gradian", + "value": "grad" + } + ], + "text": "angle" + }, + { + "submenu": [ + { + "text": "Meters/sec²", + "value": "accMS2" + }, + { + "text": "Feet/sec²", + "value": "accFS2" + }, + { + "text": "G unit", + "value": "accG" + } + ], + "text": "acceleration" + }, + { + "submenu": [ + { + "text": "Becquerel (Bq)", + "value": "radbq" + }, + { + "text": "curie (Ci)", + "value": "radci" + }, + { + "text": "Gray (Gy)", + "value": "radgy" + }, + { + "text": "rad", + "value": "radrad" + }, + { + "text": "Sievert (Sv)", + "value": "radsv" + }, + { + "text": "rem", + "value": "radrem" + }, + { + "text": "Exposure (C/kg)", + "value": "radexpckg" + }, + { + "text": "roentgen (R)", + "value": "radr" + }, + { + "text": "Sievert/hour (Sv/h)", + "value": "radsvh" + } + ], + "text": "radiation" + }, + { + "submenu": [ + { + "text": "parts-per-million (ppm)", + "value": "ppm" + }, + { + "text": "parts-per-billion (ppb)", + "value": "conppb" + }, + { + "text": "nanogram per cubic metre (ng/m³)", + "value": "conngm3" + }, + { + "text": "nanogram per normal cubic metre (ng/Nm³)", + "value": "conngNm3" + }, + { + "text": "microgram per cubic metre (μg/m³)", + "value": "conμgm3" + }, + { + "text": "microgram per normal cubic metre (μg/Nm³)", + "value": "conμgNm3" + }, + { + "text": "milligram per cubic metre (mg/m³)", + "value": "conmgm3" + }, + { + "text": "milligram per normal cubic metre (mg/Nm³)", + "value": "conmgNm3" + }, + { + "text": "gram per cubic metre (g/m³)", + "value": "congm3" + }, + { + "text": "gram per normal cubic metre (g/Nm³)", + "value": "congNm3" + } + ], + "text": "concentration" + } + ] + }, + { + "animationModes": [ + { + "text": "Show All", + "value": "all" + }, + { + "text": "Show Triggered", + "value": "triggered" + } + ], + "colors": [ + "#299c46", + "rgba(237, 129, 40, 0.89)", + "#d44a3a" + ], + "d3DivId": "d3_svg_2", + "datasource": "gdev-testdata", + "decimals": 2, + "displayModes": [ + { + "text": "Show All", + "value": "all" + }, + { + "text": "Show Triggered", + "value": "triggered" + } + ], + "fontSizes": [ + 4, + 5, + 6, + 7, + 8, + 9, + 10, + 11, + 12, + 13, + 14, + 15, + 16, + 17, + 18, + 19, + 20, + 22, + 24, + 26, + 28, + 30, + 32, + 34, + 36, + 38, + 40, + 42, + 44, + 46, + 48, + 50, + 52, + 54, + 56, + 58, + 60, + 62, + 64, + 66, + 68, + 70 + ], + "fontTypes": [ + "Open Sans", + "Arial", + "Avant Garde", + "Bookman", + "Consolas", + "Courier", + "Courier New", + "Futura", + "Garamond", + "Helvetica", + "Palatino", + "Times", + "Times New Roman", + "Verdana" + ], + "format": "none", + "gridPos": { + "h": 10, + "w": 12, + "x": 0, + "y": 9 + }, + "id": 2, + "links": [], + "notcolors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "operatorName": "avg", + "operatorOptions": [ + { + "text": "Average", + "value": "avg" + }, + { + "text": "Count", + "value": "count" + }, + { + "text": "Current", + "value": "current" + }, + { + "text": "Delta", + "value": "delta" + }, + { + "text": "Difference", + "value": "diff" + }, + { + "text": "First", + "value": "first" + }, + { + "text": "Log Min", + "value": "logmin" + }, + { + "text": "Max", + "value": "max" + }, + { + "text": "Min", + "value": "min" + }, + { + "text": "Name", + "value": "name" + }, + { + "text": "Time of Last Point", + "value": "last_time" + }, + { + "text": "Time Step", + "value": "time_step" + }, + { + "text": "Total", + "value": "total" + } + ], + "polystat": { + "animationSpeed": 2500, + "columnAutoSize": true, + "columns": 1, + "defaultClickThrough": "", + "defaultClickThroughSanitize": true, + "displayLimit": 100, + "fontAutoScale": true, + "fontSize": 12, + "globalDisplayMode": "all", + "globalOperatorName": "avg", + "gradientEnabled": true, + "hexagonSortByDirection": "asc", + "hexagonSortByField": "name", + "maxMetrics": 0, + "polygonBorderColor": "black", + "polygonBorderSize": 2, + "radius": "", + "radiusAutoSize": true, + "rowAutoSize": true, + "rows": 1, + "shape": "hexagon_pointed_top", + "tooltipDisplayMode": "all", + "tooltipDisplayTextTriggeredEmpty": "OK", + "tooltipFontSize": 12, + "tooltipFontType": "Open Sans", + "tooltipPrimarySortDirection": "desc", + "tooltipPrimarySortField": "thresholdLevel", + "tooltipSecondarySortDirection": "desc", + "tooltipSecondarySortField": "value", + "tooltipTimestampEnabled": true + }, + "savedComposites": [], + "savedOverrides": [], + "shapes": [ + { + "text": "Hexagon Pointed Top", + "value": "hexagon_pointed_top" + }, + { + "text": "Hexagon Flat Top", + "value": "hexagon_flat_top" + }, + { + "text": "Circle", + "value": "circle" + }, + { + "text": "Cross", + "value": "cross" + }, + { + "text": "Diamond", + "value": "diamond" + }, + { + "text": "Square", + "value": "square" + }, + { + "text": "Star", + "value": "star" + }, + { + "text": "Triangle", + "value": "triangle" + }, + { + "text": "Wye", + "value": "wye" + } + ], + "sortDirections": [ + { + "text": "Ascending", + "value": "asc" + }, + { + "text": "Descending", + "value": "desc" + } + ], + "sortFields": [ + { + "text": "Name", + "value": "name" + }, + { + "text": "Threshold Level", + "value": "thresholdLevel" + }, + { + "text": "Value", + "value": "value" + } + ], + "svgContainer": {}, + "targets": [ + { + "alias": "Sensor-A", + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0" + }, + { + "alias": "Sensor-B", + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "B", + "scenarioId": "csv_metric_values", + "stringInput": "3433,23432,55" + }, + { + "alias": "Sensor-C", + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "C", + "scenarioId": "csv_metric_values", + "stringInput": "1,2,3,4,5,6" + }, + { + "alias": "Sensor-E", + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "D", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0" + } + ], + "thresholdStates": [ + { + "text": "ok", + "value": 0 + }, + { + "text": "warning", + "value": 1 + }, + { + "text": "critical", + "value": 2 + }, + { + "text": "custom", + "value": 3 + } + ], + "title": "No Value in Sensor-C Bug", + "type": "grafana-polystat-panel", + "unitFormats": [ + { + "submenu": [ + { + "text": "none", + "value": "none" + }, + { + "text": "short", + "value": "short" + }, + { + "text": "percent (0-100)", + "value": "percent" + }, + { + "text": "percent (0.0-1.0)", + "value": "percentunit" + }, + { + "text": "Humidity (%H)", + "value": "humidity" + }, + { + "text": "decibel", + "value": "dB" + }, + { + "text": "hexadecimal (0x)", + "value": "hex0x" + }, + { + "text": "hexadecimal", + "value": "hex" + }, + { + "text": "scientific notation", + "value": "sci" + }, + { + "text": "locale format", + "value": "locale" + } + ], + "text": "none" + }, + { + "submenu": [ + { + "text": "Dollars ($)", + "value": "currencyUSD" + }, + { + "text": "Pounds (£)", + "value": "currencyGBP" + }, + { + "text": "Euro (€)", + "value": "currencyEUR" + }, + { + "text": "Yen (¥)", + "value": "currencyJPY" + }, + { + "text": "Rubles (₽)", + "value": "currencyRUB" + }, + { + "text": "Hryvnias (₴)", + "value": "currencyUAH" + }, + { + "text": "Real (R$)", + "value": "currencyBRL" + }, + { + "text": "Danish Krone (kr)", + "value": "currencyDKK" + }, + { + "text": "Icelandic Króna (kr)", + "value": "currencyISK" + }, + { + "text": "Norwegian Krone (kr)", + "value": "currencyNOK" + }, + { + "text": "Swedish Krona (kr)", + "value": "currencySEK" + }, + { + "text": "Czech koruna (czk)", + "value": "currencyCZK" + }, + { + "text": "Swiss franc (CHF)", + "value": "currencyCHF" + }, + { + "text": "Polish Złoty (PLN)", + "value": "currencyPLN" + }, + { + "text": "Bitcoin (฿)", + "value": "currencyBTC" + } + ], + "text": "currency" + }, + { + "submenu": [ + { + "text": "Hertz (1/s)", + "value": "hertz" + }, + { + "text": "nanoseconds (ns)", + "value": "ns" + }, + { + "text": "microseconds (µs)", + "value": "µs" + }, + { + "text": "milliseconds (ms)", + "value": "ms" + }, + { + "text": "seconds (s)", + "value": "s" + }, + { + "text": "minutes (m)", + "value": "m" + }, + { + "text": "hours (h)", + "value": "h" + }, + { + "text": "days (d)", + "value": "d" + }, + { + "text": "duration (ms)", + "value": "dtdurationms" + }, + { + "text": "duration (s)", + "value": "dtdurations" + }, + { + "text": "duration (hh:mm:ss)", + "value": "dthms" + }, + { + "text": "Timeticks (s/100)", + "value": "timeticks" + } + ], + "text": "time" + }, + { + "submenu": [ + { + "text": "YYYY-MM-DD HH:mm:ss", + "value": "dateTimeAsIso" + }, + { + "text": "DD/MM/YYYY h:mm:ss a", + "value": "dateTimeAsUS" + }, + { + "text": "From Now", + "value": "dateTimeFromNow" + } + ], + "text": "date & time" + }, + { + "submenu": [ + { + "text": "bits", + "value": "bits" + }, + { + "text": "bytes", + "value": "bytes" + }, + { + "text": "kibibytes", + "value": "kbytes" + }, + { + "text": "mebibytes", + "value": "mbytes" + }, + { + "text": "gibibytes", + "value": "gbytes" + } + ], + "text": "data (IEC)" + }, + { + "submenu": [ + { + "text": "bits", + "value": "decbits" + }, + { + "text": "bytes", + "value": "decbytes" + }, + { + "text": "kilobytes", + "value": "deckbytes" + }, + { + "text": "megabytes", + "value": "decmbytes" + }, + { + "text": "gigabytes", + "value": "decgbytes" + } + ], + "text": "data (Metric)" + }, + { + "submenu": [ + { + "text": "packets/sec", + "value": "pps" + }, + { + "text": "bits/sec", + "value": "bps" + }, + { + "text": "bytes/sec", + "value": "Bps" + }, + { + "text": "kilobits/sec", + "value": "Kbits" + }, + { + "text": "kilobytes/sec", + "value": "KBs" + }, + { + "text": "megabits/sec", + "value": "Mbits" + }, + { + "text": "megabytes/sec", + "value": "MBs" + }, + { + "text": "gigabytes/sec", + "value": "GBs" + }, + { + "text": "gigabits/sec", + "value": "Gbits" + } + ], + "text": "data rate" + }, + { + "submenu": [ + { + "text": "hashes/sec", + "value": "Hs" + }, + { + "text": "kilohashes/sec", + "value": "KHs" + }, + { + "text": "megahashes/sec", + "value": "MHs" + }, + { + "text": "gigahashes/sec", + "value": "GHs" + }, + { + "text": "terahashes/sec", + "value": "THs" + }, + { + "text": "petahashes/sec", + "value": "PHs" + }, + { + "text": "exahashes/sec", + "value": "EHs" + } + ], + "text": "hash rate" + }, + { + "submenu": [ + { + "text": "ops/sec (ops)", + "value": "ops" + }, + { + "text": "requests/sec (rps)", + "value": "reqps" + }, + { + "text": "reads/sec (rps)", + "value": "rps" + }, + { + "text": "writes/sec (wps)", + "value": "wps" + }, + { + "text": "I/O ops/sec (iops)", + "value": "iops" + }, + { + "text": "ops/min (opm)", + "value": "opm" + }, + { + "text": "reads/min (rpm)", + "value": "rpm" + }, + { + "text": "writes/min (wpm)", + "value": "wpm" + } + ], + "text": "throughput" + }, + { + "submenu": [ + { + "text": "millimetre (mm)", + "value": "lengthmm" + }, + { + "text": "meter (m)", + "value": "lengthm" + }, + { + "text": "feet (ft)", + "value": "lengthft" + }, + { + "text": "kilometer (km)", + "value": "lengthkm" + }, + { + "text": "mile (mi)", + "value": "lengthmi" + } + ], + "text": "length" + }, + { + "submenu": [ + { + "text": "Square Meters (m²)", + "value": "areaM2" + }, + { + "text": "Square Feet (ft²)", + "value": "areaF2" + }, + { + "text": "Square Miles (mi²)", + "value": "areaMI2" + } + ], + "text": "area" + }, + { + "submenu": [ + { + "text": "milligram (mg)", + "value": "massmg" + }, + { + "text": "gram (g)", + "value": "massg" + }, + { + "text": "kilogram (kg)", + "value": "masskg" + }, + { + "text": "metric ton (t)", + "value": "masst" + } + ], + "text": "mass" + }, + { + "submenu": [ + { + "text": "metres/second (m/s)", + "value": "velocityms" + }, + { + "text": "kilometers/hour (km/h)", + "value": "velocitykmh" + }, + { + "text": "miles/hour (mph)", + "value": "velocitymph" + }, + { + "text": "knot (kn)", + "value": "velocityknot" + } + ], + "text": "velocity" + }, + { + "submenu": [ + { + "text": "millilitre (mL)", + "value": "mlitre" + }, + { + "text": "litre (L)", + "value": "litre" + }, + { + "text": "cubic metre", + "value": "m3" + }, + { + "text": "Normal cubic metre", + "value": "Nm3" + }, + { + "text": "cubic decimetre", + "value": "dm3" + }, + { + "text": "gallons", + "value": "gallons" + } + ], + "text": "volume" + }, + { + "submenu": [ + { + "text": "Watt (W)", + "value": "watt" + }, + { + "text": "Kilowatt (kW)", + "value": "kwatt" + }, + { + "text": "Milliwatt (mW)", + "value": "mwatt" + }, + { + "text": "Watt per square metre (W/m²)", + "value": "Wm2" + }, + { + "text": "Volt-ampere (VA)", + "value": "voltamp" + }, + { + "text": "Kilovolt-ampere (kVA)", + "value": "kvoltamp" + }, + { + "text": "Volt-ampere reactive (var)", + "value": "voltampreact" + }, + { + "text": "Kilovolt-ampere reactive (kvar)", + "value": "kvoltampreact" + }, + { + "text": "Watt-hour (Wh)", + "value": "watth" + }, + { + "text": "Kilowatt-hour (kWh)", + "value": "kwatth" + }, + { + "text": "Kilowatt-min (kWm)", + "value": "kwattm" + }, + { + "text": "Joule (J)", + "value": "joule" + }, + { + "text": "Electron volt (eV)", + "value": "ev" + }, + { + "text": "Ampere (A)", + "value": "amp" + }, + { + "text": "Kiloampere (kA)", + "value": "kamp" + }, + { + "text": "Milliampere (mA)", + "value": "mamp" + }, + { + "text": "Volt (V)", + "value": "volt" + }, + { + "text": "Kilovolt (kV)", + "value": "kvolt" + }, + { + "text": "Millivolt (mV)", + "value": "mvolt" + }, + { + "text": "Decibel-milliwatt (dBm)", + "value": "dBm" + }, + { + "text": "Ohm (Ω)", + "value": "ohm" + }, + { + "text": "Lumens (Lm)", + "value": "lumens" + } + ], + "text": "energy" + }, + { + "submenu": [ + { + "text": "Celsius (°C)", + "value": "celsius" + }, + { + "text": "Farenheit (°F)", + "value": "farenheit" + }, + { + "text": "Kelvin (K)", + "value": "kelvin" + } + ], + "text": "temperature" + }, + { + "submenu": [ + { + "text": "Millibars", + "value": "pressurembar" + }, + { + "text": "Bars", + "value": "pressurebar" + }, + { + "text": "Kilobars", + "value": "pressurekbar" + }, + { + "text": "Hectopascals", + "value": "pressurehpa" + }, + { + "text": "Kilopascals", + "value": "pressurekpa" + }, + { + "text": "Inches of mercury", + "value": "pressurehg" + }, + { + "text": "PSI", + "value": "pressurepsi" + } + ], + "text": "pressure" + }, + { + "submenu": [ + { + "text": "Newton-meters (Nm)", + "value": "forceNm" + }, + { + "text": "Kilonewton-meters (kNm)", + "value": "forcekNm" + }, + { + "text": "Newtons (N)", + "value": "forceN" + }, + { + "text": "Kilonewtons (kN)", + "value": "forcekN" + } + ], + "text": "force" + }, + { + "submenu": [ + { + "text": "Gallons/min (gpm)", + "value": "flowgpm" + }, + { + "text": "Cubic meters/sec (cms)", + "value": "flowcms" + }, + { + "text": "Cubic feet/sec (cfs)", + "value": "flowcfs" + }, + { + "text": "Cubic feet/min (cfm)", + "value": "flowcfm" + }, + { + "text": "Litre/hour", + "value": "litreh" + }, + { + "text": "Litre/min (l/min)", + "value": "flowlpm" + }, + { + "text": "milliLitre/min (mL/min)", + "value": "flowmlpm" + } + ], + "text": "flow" + }, + { + "submenu": [ + { + "text": "Degrees (°)", + "value": "degree" + }, + { + "text": "Radians", + "value": "radian" + }, + { + "text": "Gradian", + "value": "grad" + } + ], + "text": "angle" + }, + { + "submenu": [ + { + "text": "Meters/sec²", + "value": "accMS2" + }, + { + "text": "Feet/sec²", + "value": "accFS2" + }, + { + "text": "G unit", + "value": "accG" + } + ], + "text": "acceleration" + }, + { + "submenu": [ + { + "text": "Becquerel (Bq)", + "value": "radbq" + }, + { + "text": "curie (Ci)", + "value": "radci" + }, + { + "text": "Gray (Gy)", + "value": "radgy" + }, + { + "text": "rad", + "value": "radrad" + }, + { + "text": "Sievert (Sv)", + "value": "radsv" + }, + { + "text": "rem", + "value": "radrem" + }, + { + "text": "Exposure (C/kg)", + "value": "radexpckg" + }, + { + "text": "roentgen (R)", + "value": "radr" + }, + { + "text": "Sievert/hour (Sv/h)", + "value": "radsvh" + } + ], + "text": "radiation" + }, + { + "submenu": [ + { + "text": "parts-per-million (ppm)", + "value": "ppm" + }, + { + "text": "parts-per-billion (ppb)", + "value": "conppb" + }, + { + "text": "nanogram per cubic metre (ng/m³)", + "value": "conngm3" + }, + { + "text": "nanogram per normal cubic metre (ng/Nm³)", + "value": "conngNm3" + }, + { + "text": "microgram per cubic metre (μg/m³)", + "value": "conμgm3" + }, + { + "text": "microgram per normal cubic metre (μg/Nm³)", + "value": "conμgNm3" + }, + { + "text": "milligram per cubic metre (mg/m³)", + "value": "conmgm3" + }, + { + "text": "milligram per normal cubic metre (mg/Nm³)", + "value": "conmgNm3" + }, + { + "text": "gram per cubic metre (g/m³)", + "value": "congm3" + }, + { + "text": "gram per normal cubic metre (g/Nm³)", + "value": "congNm3" + } + ], + "text": "concentration" + } + ] + } + ], + "schemaVersion": 16, + "style": "dark", + "tags": [ + "panel-test", + "gdev" + ], + "templating": { + "list": [] + }, + "time": { + "from": "now-6h", + "to": "now" + }, + "timepicker": { + "refresh_intervals": [ + "5s", + "10s", + "30s", + "1m", + "5m", + "15m", + "30m", + "1h", + "2h", + "1d" + ], + "time_options": [ + "5m", + "15m", + "1h", + "6h", + "12h", + "24h", + "2d", + "7d", + "30d" + ] + }, + "timezone": "", + "title": "Panel Tests - Polystat", + "uid": "Kp9Z0hTik", + "version": 5 +} diff --git a/devenv/dev-dashboards/panel_tests_slow_queries_and_annotations.json b/devenv/dev-dashboards/panel_tests_slow_queries_and_annotations.json new file mode 100644 index 00000000000..08bf6dce9d0 --- /dev/null +++ b/devenv/dev-dashboards/panel_tests_slow_queries_and_annotations.json @@ -0,0 +1,1166 @@ +{ + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": "-- Grafana --", + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "type": "dashboard" + }, + { + "datasource": "-- Grafana --", + "enable": true, + "hide": false, + "iconColor": "rgba(255, 96, 96, 1)", + "limit": 100, + "matchAny": false, + "name": "annotations", + "showIn": 0, + "tags": [ + "asd" + ], + "type": "tags" + } + ] + }, + "editable": true, + "gnetId": null, + "graphTooltip": 0, + "links": [], + "panels": [ + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "fill": 1, + "gridPos": { + "h": 7, + "w": 13, + "x": 0, + "y": 0 + }, + "id": 6, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "slow_query", + "stringInput": "30s" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Panel Title", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "fill": 1, + "gridPos": { + "h": 7, + "w": 11, + "x": 13, + "y": 0 + }, + "id": 7, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "slow_query", + "stringInput": "30s" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Panel Title", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "fill": 1, + "gridPos": { + "h": 7, + "w": 8, + "x": 0, + "y": 7 + }, + "id": 8, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "slow_query", + "stringInput": "30s" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Panel Title", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "fill": 1, + "gridPos": { + "h": 7, + "w": 8, + "x": 8, + "y": 7 + }, + "id": 18, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "slow_query", + "stringInput": "30s" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Panel Title", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "fill": 1, + "gridPos": { + "h": 7, + "w": 8, + "x": 16, + "y": 7 + }, + "id": 17, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "slow_query", + "stringInput": "30s" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Panel Title", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "fill": 1, + "gridPos": { + "h": 5, + "w": 8, + "x": 0, + "y": 14 + }, + "id": 10, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "slow_query", + "stringInput": "5s" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Panel Title", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "fill": 1, + "gridPos": { + "h": 5, + "w": 8, + "x": 8, + "y": 14 + }, + "id": 9, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "slow_query", + "stringInput": "5s" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Panel Title", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "fill": 1, + "gridPos": { + "h": 5, + "w": 8, + "x": 16, + "y": 14 + }, + "id": 11, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "slow_query", + "stringInput": "5s" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Panel Title", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "fill": 1, + "gridPos": { + "h": 5, + "w": 8, + "x": 0, + "y": 19 + }, + "id": 14, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "slow_query", + "stringInput": "5s" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Panel Title", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "fill": 1, + "gridPos": { + "h": 5, + "w": 8, + "x": 8, + "y": 19 + }, + "id": 15, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "slow_query", + "stringInput": "5s" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Panel Title", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "fill": 1, + "gridPos": { + "h": 5, + "w": 8, + "x": 16, + "y": 19 + }, + "id": 12, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "slow_query", + "stringInput": "5s" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Panel Title", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "fill": 1, + "gridPos": { + "h": 6, + "w": 16, + "x": 0, + "y": 24 + }, + "id": 13, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "slow_query", + "stringInput": "5s" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Panel Title", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "fill": 1, + "gridPos": { + "h": 6, + "w": 8, + "x": 16, + "y": 24 + }, + "id": 16, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "slow_query", + "stringInput": "5s" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Panel Title", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + } + ], + "schemaVersion": 16, + "style": "dark", + "tags": [], + "templating": { + "list": [] + }, + "time": { + "from": "now-6h", + "to": "now" + }, + "timepicker": { + "refresh_intervals": [ + "5s", + "10s", + "30s", + "1m", + "5m", + "15m", + "30m", + "1h", + "2h", + "1d" + ], + "time_options": [ + "5m", + "15m", + "1h", + "6h", + "12h", + "24h", + "2d", + "7d", + "30d" + ] + }, + "timezone": "", + "title": "Panel tests - Slow Queries & Annotations", + "uid": "xtY_uCAiz", + "version": 11 +} diff --git a/docker/blocks/apache_proxy/Dockerfile b/devenv/docker/blocks/apache_proxy/Dockerfile similarity index 100% rename from docker/blocks/apache_proxy/Dockerfile rename to devenv/docker/blocks/apache_proxy/Dockerfile diff --git a/docker/blocks/apache_proxy/docker-compose.yaml b/devenv/docker/blocks/apache_proxy/docker-compose.yaml similarity index 88% rename from docker/blocks/apache_proxy/docker-compose.yaml rename to devenv/docker/blocks/apache_proxy/docker-compose.yaml index 86d4befadd6..3791213f05a 100644 --- a/docker/blocks/apache_proxy/docker-compose.yaml +++ b/devenv/docker/blocks/apache_proxy/docker-compose.yaml @@ -5,5 +5,5 @@ # root_url = %(protocol)s://%(domain)s:10081/grafana/ apacheproxy: - build: blocks/apache_proxy + build: docker/blocks/apache_proxy network_mode: host diff --git a/docker/blocks/apache_proxy/ports.conf b/devenv/docker/blocks/apache_proxy/ports.conf similarity index 100% rename from docker/blocks/apache_proxy/ports.conf rename to devenv/docker/blocks/apache_proxy/ports.conf diff --git a/docker/blocks/apache_proxy/proxy.conf b/devenv/docker/blocks/apache_proxy/proxy.conf similarity index 100% rename from docker/blocks/apache_proxy/proxy.conf rename to devenv/docker/blocks/apache_proxy/proxy.conf diff --git a/docker/blocks/collectd/Dockerfile b/devenv/docker/blocks/collectd/Dockerfile similarity index 100% rename from docker/blocks/collectd/Dockerfile rename to devenv/docker/blocks/collectd/Dockerfile diff --git a/docker/blocks/collectd/README.md b/devenv/docker/blocks/collectd/README.md similarity index 100% rename from docker/blocks/collectd/README.md rename to devenv/docker/blocks/collectd/README.md diff --git a/docker/blocks/collectd/collectd.conf.tpl b/devenv/docker/blocks/collectd/collectd.conf.tpl similarity index 100% rename from docker/blocks/collectd/collectd.conf.tpl rename to devenv/docker/blocks/collectd/collectd.conf.tpl diff --git a/docker/blocks/collectd/docker-compose.yaml b/devenv/docker/blocks/collectd/docker-compose.yaml similarity index 87% rename from docker/blocks/collectd/docker-compose.yaml rename to devenv/docker/blocks/collectd/docker-compose.yaml index c95827f7928..c5e189b58d8 100644 --- a/docker/blocks/collectd/docker-compose.yaml +++ b/devenv/docker/blocks/collectd/docker-compose.yaml @@ -1,5 +1,5 @@ collectd: - build: blocks/collectd + build: docker/blocks/collectd environment: HOST_NAME: myserver GRAPHITE_HOST: graphite diff --git a/docker/blocks/collectd/etc_mtab b/devenv/docker/blocks/collectd/etc_mtab similarity index 100% rename from docker/blocks/collectd/etc_mtab rename to devenv/docker/blocks/collectd/etc_mtab diff --git a/docker/blocks/collectd/start_container b/devenv/docker/blocks/collectd/start_container similarity index 100% rename from docker/blocks/collectd/start_container rename to devenv/docker/blocks/collectd/start_container diff --git a/docker/blocks/elastic/docker-compose.yaml b/devenv/docker/blocks/elastic/docker-compose.yaml similarity index 100% rename from docker/blocks/elastic/docker-compose.yaml rename to devenv/docker/blocks/elastic/docker-compose.yaml diff --git a/docker/blocks/elastic/elasticsearch.yml b/devenv/docker/blocks/elastic/elasticsearch.yml similarity index 100% rename from docker/blocks/elastic/elasticsearch.yml rename to devenv/docker/blocks/elastic/elasticsearch.yml diff --git a/docker/blocks/elastic1/docker-compose.yaml b/devenv/docker/blocks/elastic1/docker-compose.yaml similarity index 100% rename from docker/blocks/elastic1/docker-compose.yaml rename to devenv/docker/blocks/elastic1/docker-compose.yaml diff --git a/docker/blocks/elastic1/elasticsearch.yml b/devenv/docker/blocks/elastic1/elasticsearch.yml similarity index 100% rename from docker/blocks/elastic1/elasticsearch.yml rename to devenv/docker/blocks/elastic1/elasticsearch.yml diff --git a/docker/blocks/elastic5/docker-compose.yaml b/devenv/docker/blocks/elastic5/docker-compose.yaml similarity index 100% rename from docker/blocks/elastic5/docker-compose.yaml rename to devenv/docker/blocks/elastic5/docker-compose.yaml diff --git a/docker/blocks/elastic5/elasticsearch.yml b/devenv/docker/blocks/elastic5/elasticsearch.yml similarity index 100% rename from docker/blocks/elastic5/elasticsearch.yml rename to devenv/docker/blocks/elastic5/elasticsearch.yml diff --git a/docker/blocks/elastic6/docker-compose.yaml b/devenv/docker/blocks/elastic6/docker-compose.yaml similarity index 100% rename from docker/blocks/elastic6/docker-compose.yaml rename to devenv/docker/blocks/elastic6/docker-compose.yaml diff --git a/docker/blocks/elastic6/elasticsearch.yml b/devenv/docker/blocks/elastic6/elasticsearch.yml similarity index 100% rename from docker/blocks/elastic6/elasticsearch.yml rename to devenv/docker/blocks/elastic6/elasticsearch.yml diff --git a/docker/blocks/graphite/Dockerfile b/devenv/docker/blocks/graphite/Dockerfile similarity index 100% rename from docker/blocks/graphite/Dockerfile rename to devenv/docker/blocks/graphite/Dockerfile diff --git a/docker/blocks/graphite/docker-compose.yaml b/devenv/docker/blocks/graphite/docker-compose.yaml similarity index 89% rename from docker/blocks/graphite/docker-compose.yaml rename to devenv/docker/blocks/graphite/docker-compose.yaml index 606e28638f7..acebd2bd9c0 100644 --- a/docker/blocks/graphite/docker-compose.yaml +++ b/devenv/docker/blocks/graphite/docker-compose.yaml @@ -1,5 +1,5 @@ graphite09: - build: blocks/graphite + build: docker/blocks/graphite ports: - "8080:80" - "2003:2003" diff --git a/docker/blocks/graphite/files/carbon.conf b/devenv/docker/blocks/graphite/files/carbon.conf similarity index 100% rename from docker/blocks/graphite/files/carbon.conf rename to devenv/docker/blocks/graphite/files/carbon.conf diff --git a/docker/blocks/graphite/files/events_views.py b/devenv/docker/blocks/graphite/files/events_views.py similarity index 100% rename from docker/blocks/graphite/files/events_views.py rename to devenv/docker/blocks/graphite/files/events_views.py diff --git a/docker/blocks/graphite/files/initial_data.json b/devenv/docker/blocks/graphite/files/initial_data.json similarity index 100% rename from docker/blocks/graphite/files/initial_data.json rename to devenv/docker/blocks/graphite/files/initial_data.json diff --git a/docker/blocks/graphite/files/local_settings.py b/devenv/docker/blocks/graphite/files/local_settings.py similarity index 100% rename from docker/blocks/graphite/files/local_settings.py rename to devenv/docker/blocks/graphite/files/local_settings.py diff --git a/docker/blocks/graphite/files/my_htpasswd b/devenv/docker/blocks/graphite/files/my_htpasswd similarity index 100% rename from docker/blocks/graphite/files/my_htpasswd rename to devenv/docker/blocks/graphite/files/my_htpasswd diff --git a/docker/blocks/graphite/files/nginx.conf b/devenv/docker/blocks/graphite/files/nginx.conf similarity index 100% rename from docker/blocks/graphite/files/nginx.conf rename to devenv/docker/blocks/graphite/files/nginx.conf diff --git a/docker/blocks/graphite/files/statsd_config.js b/devenv/docker/blocks/graphite/files/statsd_config.js similarity index 100% rename from docker/blocks/graphite/files/statsd_config.js rename to devenv/docker/blocks/graphite/files/statsd_config.js diff --git a/docker/blocks/graphite/files/storage-aggregation.conf b/devenv/docker/blocks/graphite/files/storage-aggregation.conf similarity index 100% rename from docker/blocks/graphite/files/storage-aggregation.conf rename to devenv/docker/blocks/graphite/files/storage-aggregation.conf diff --git a/docker/blocks/graphite/files/storage-schemas.conf b/devenv/docker/blocks/graphite/files/storage-schemas.conf similarity index 100% rename from docker/blocks/graphite/files/storage-schemas.conf rename to devenv/docker/blocks/graphite/files/storage-schemas.conf diff --git a/docker/blocks/graphite/files/supervisord.conf b/devenv/docker/blocks/graphite/files/supervisord.conf similarity index 100% rename from docker/blocks/graphite/files/supervisord.conf rename to devenv/docker/blocks/graphite/files/supervisord.conf diff --git a/docker/blocks/graphite1/Dockerfile b/devenv/docker/blocks/graphite1/Dockerfile similarity index 100% rename from docker/blocks/graphite1/Dockerfile rename to devenv/docker/blocks/graphite1/Dockerfile diff --git a/docker/blocks/graphite1/big-dashboard.json b/devenv/docker/blocks/graphite1/big-dashboard.json similarity index 100% rename from docker/blocks/graphite1/big-dashboard.json rename to devenv/docker/blocks/graphite1/big-dashboard.json diff --git a/docker/blocks/graphite1/conf/etc/logrotate.d/graphite-statsd b/devenv/docker/blocks/graphite1/conf/etc/logrotate.d/graphite-statsd similarity index 100% rename from docker/blocks/graphite1/conf/etc/logrotate.d/graphite-statsd rename to devenv/docker/blocks/graphite1/conf/etc/logrotate.d/graphite-statsd diff --git a/docker/blocks/graphite1/conf/etc/my_init.d/01_conf_init.sh b/devenv/docker/blocks/graphite1/conf/etc/my_init.d/01_conf_init.sh similarity index 100% rename from docker/blocks/graphite1/conf/etc/my_init.d/01_conf_init.sh rename to devenv/docker/blocks/graphite1/conf/etc/my_init.d/01_conf_init.sh diff --git a/docker/blocks/graphite1/conf/etc/nginx/nginx.conf b/devenv/docker/blocks/graphite1/conf/etc/nginx/nginx.conf similarity index 100% rename from docker/blocks/graphite1/conf/etc/nginx/nginx.conf rename to devenv/docker/blocks/graphite1/conf/etc/nginx/nginx.conf diff --git a/docker/blocks/graphite1/conf/etc/nginx/sites-enabled/graphite-statsd.conf b/devenv/docker/blocks/graphite1/conf/etc/nginx/sites-enabled/graphite-statsd.conf similarity index 100% rename from docker/blocks/graphite1/conf/etc/nginx/sites-enabled/graphite-statsd.conf rename to devenv/docker/blocks/graphite1/conf/etc/nginx/sites-enabled/graphite-statsd.conf diff --git a/docker/blocks/graphite1/conf/etc/service/carbon-aggregator/run b/devenv/docker/blocks/graphite1/conf/etc/service/carbon-aggregator/run similarity index 100% rename from docker/blocks/graphite1/conf/etc/service/carbon-aggregator/run rename to devenv/docker/blocks/graphite1/conf/etc/service/carbon-aggregator/run diff --git a/docker/blocks/graphite1/conf/etc/service/carbon/run b/devenv/docker/blocks/graphite1/conf/etc/service/carbon/run similarity index 100% rename from docker/blocks/graphite1/conf/etc/service/carbon/run rename to devenv/docker/blocks/graphite1/conf/etc/service/carbon/run diff --git a/docker/blocks/graphite1/conf/etc/service/graphite/run b/devenv/docker/blocks/graphite1/conf/etc/service/graphite/run similarity index 100% rename from docker/blocks/graphite1/conf/etc/service/graphite/run rename to devenv/docker/blocks/graphite1/conf/etc/service/graphite/run diff --git a/docker/blocks/graphite1/conf/etc/service/nginx/run b/devenv/docker/blocks/graphite1/conf/etc/service/nginx/run similarity index 100% rename from docker/blocks/graphite1/conf/etc/service/nginx/run rename to devenv/docker/blocks/graphite1/conf/etc/service/nginx/run diff --git a/docker/blocks/graphite1/conf/etc/service/statsd/run b/devenv/docker/blocks/graphite1/conf/etc/service/statsd/run similarity index 100% rename from docker/blocks/graphite1/conf/etc/service/statsd/run rename to devenv/docker/blocks/graphite1/conf/etc/service/statsd/run diff --git a/docker/blocks/graphite1/conf/opt/graphite/conf/aggregation-rules.conf b/devenv/docker/blocks/graphite1/conf/opt/graphite/conf/aggregation-rules.conf similarity index 96% rename from docker/blocks/graphite1/conf/opt/graphite/conf/aggregation-rules.conf rename to devenv/docker/blocks/graphite1/conf/opt/graphite/conf/aggregation-rules.conf index c9520124a2a..792bbfd6857 100644 --- a/docker/blocks/graphite1/conf/opt/graphite/conf/aggregation-rules.conf +++ b/devenv/docker/blocks/graphite1/conf/opt/graphite/conf/aggregation-rules.conf @@ -8,7 +8,7 @@ # 'avg'. The name of the aggregate metric will be derived from # 'output_template' filling in any captured fields from 'input_pattern'. # -# For example, if you're metric naming scheme is: +# For example, if your metric naming scheme is: # # .applications... # diff --git a/docker/blocks/graphite1/conf/opt/graphite/conf/blacklist.conf b/devenv/docker/blocks/graphite1/conf/opt/graphite/conf/blacklist.conf similarity index 100% rename from docker/blocks/graphite1/conf/opt/graphite/conf/blacklist.conf rename to devenv/docker/blocks/graphite1/conf/opt/graphite/conf/blacklist.conf diff --git a/docker/blocks/graphite1/conf/opt/graphite/conf/carbon.amqp.conf b/devenv/docker/blocks/graphite1/conf/opt/graphite/conf/carbon.amqp.conf similarity index 100% rename from docker/blocks/graphite1/conf/opt/graphite/conf/carbon.amqp.conf rename to devenv/docker/blocks/graphite1/conf/opt/graphite/conf/carbon.amqp.conf diff --git a/docker/blocks/graphite1/conf/opt/graphite/conf/carbon.conf b/devenv/docker/blocks/graphite1/conf/opt/graphite/conf/carbon.conf similarity index 100% rename from docker/blocks/graphite1/conf/opt/graphite/conf/carbon.conf rename to devenv/docker/blocks/graphite1/conf/opt/graphite/conf/carbon.conf diff --git a/docker/blocks/graphite1/conf/opt/graphite/conf/dashboard.conf b/devenv/docker/blocks/graphite1/conf/opt/graphite/conf/dashboard.conf similarity index 100% rename from docker/blocks/graphite1/conf/opt/graphite/conf/dashboard.conf rename to devenv/docker/blocks/graphite1/conf/opt/graphite/conf/dashboard.conf diff --git a/docker/blocks/graphite1/conf/opt/graphite/conf/graphTemplates.conf b/devenv/docker/blocks/graphite1/conf/opt/graphite/conf/graphTemplates.conf similarity index 100% rename from docker/blocks/graphite1/conf/opt/graphite/conf/graphTemplates.conf rename to devenv/docker/blocks/graphite1/conf/opt/graphite/conf/graphTemplates.conf diff --git a/docker/blocks/graphite1/conf/opt/graphite/conf/relay-rules.conf b/devenv/docker/blocks/graphite1/conf/opt/graphite/conf/relay-rules.conf similarity index 100% rename from docker/blocks/graphite1/conf/opt/graphite/conf/relay-rules.conf rename to devenv/docker/blocks/graphite1/conf/opt/graphite/conf/relay-rules.conf diff --git a/docker/blocks/graphite1/conf/opt/graphite/conf/rewrite-rules.conf b/devenv/docker/blocks/graphite1/conf/opt/graphite/conf/rewrite-rules.conf similarity index 100% rename from docker/blocks/graphite1/conf/opt/graphite/conf/rewrite-rules.conf rename to devenv/docker/blocks/graphite1/conf/opt/graphite/conf/rewrite-rules.conf diff --git a/docker/blocks/graphite1/conf/opt/graphite/conf/storage-aggregation.conf b/devenv/docker/blocks/graphite1/conf/opt/graphite/conf/storage-aggregation.conf similarity index 100% rename from docker/blocks/graphite1/conf/opt/graphite/conf/storage-aggregation.conf rename to devenv/docker/blocks/graphite1/conf/opt/graphite/conf/storage-aggregation.conf diff --git a/docker/blocks/graphite1/conf/opt/graphite/conf/storage-schemas.conf b/devenv/docker/blocks/graphite1/conf/opt/graphite/conf/storage-schemas.conf similarity index 100% rename from docker/blocks/graphite1/conf/opt/graphite/conf/storage-schemas.conf rename to devenv/docker/blocks/graphite1/conf/opt/graphite/conf/storage-schemas.conf diff --git a/docker/blocks/graphite1/conf/opt/graphite/conf/whitelist.conf b/devenv/docker/blocks/graphite1/conf/opt/graphite/conf/whitelist.conf similarity index 100% rename from docker/blocks/graphite1/conf/opt/graphite/conf/whitelist.conf rename to devenv/docker/blocks/graphite1/conf/opt/graphite/conf/whitelist.conf diff --git a/docker/blocks/graphite1/conf/opt/graphite/webapp/graphite/app_settings.py b/devenv/docker/blocks/graphite1/conf/opt/graphite/webapp/graphite/app_settings.py similarity index 100% rename from docker/blocks/graphite1/conf/opt/graphite/webapp/graphite/app_settings.py rename to devenv/docker/blocks/graphite1/conf/opt/graphite/webapp/graphite/app_settings.py diff --git a/docker/blocks/graphite1/conf/opt/graphite/webapp/graphite/local_settings.py b/devenv/docker/blocks/graphite1/conf/opt/graphite/webapp/graphite/local_settings.py similarity index 100% rename from docker/blocks/graphite1/conf/opt/graphite/webapp/graphite/local_settings.py rename to devenv/docker/blocks/graphite1/conf/opt/graphite/webapp/graphite/local_settings.py diff --git a/docker/blocks/graphite1/conf/opt/statsd/config.js b/devenv/docker/blocks/graphite1/conf/opt/statsd/config.js similarity index 100% rename from docker/blocks/graphite1/conf/opt/statsd/config.js rename to devenv/docker/blocks/graphite1/conf/opt/statsd/config.js diff --git a/docker/blocks/graphite1/conf/usr/local/bin/django_admin_init.exp b/devenv/docker/blocks/graphite1/conf/usr/local/bin/django_admin_init.exp similarity index 100% rename from docker/blocks/graphite1/conf/usr/local/bin/django_admin_init.exp rename to devenv/docker/blocks/graphite1/conf/usr/local/bin/django_admin_init.exp diff --git a/docker/blocks/graphite1/conf/usr/local/bin/manage.sh b/devenv/docker/blocks/graphite1/conf/usr/local/bin/manage.sh similarity index 100% rename from docker/blocks/graphite1/conf/usr/local/bin/manage.sh rename to devenv/docker/blocks/graphite1/conf/usr/local/bin/manage.sh diff --git a/docker/blocks/graphite1/docker-compose.yaml b/devenv/docker/blocks/graphite1/docker-compose.yaml similarity index 90% rename from docker/blocks/graphite1/docker-compose.yaml rename to devenv/docker/blocks/graphite1/docker-compose.yaml index cd10593f423..1fa3e738ba8 100644 --- a/docker/blocks/graphite1/docker-compose.yaml +++ b/devenv/docker/blocks/graphite1/docker-compose.yaml @@ -1,6 +1,6 @@ graphite: build: - context: blocks/graphite1 + context: docker/blocks/graphite1 args: version: master ports: diff --git a/docker/blocks/graphite11/big-dashboard.json b/devenv/docker/blocks/graphite11/big-dashboard.json similarity index 100% rename from docker/blocks/graphite11/big-dashboard.json rename to devenv/docker/blocks/graphite11/big-dashboard.json diff --git a/docker/blocks/graphite11/docker-compose.yaml b/devenv/docker/blocks/graphite11/docker-compose.yaml similarity index 100% rename from docker/blocks/graphite11/docker-compose.yaml rename to devenv/docker/blocks/graphite11/docker-compose.yaml diff --git a/docker/blocks/influxdb/docker-compose.yaml b/devenv/docker/blocks/influxdb/docker-compose.yaml similarity index 80% rename from docker/blocks/influxdb/docker-compose.yaml rename to devenv/docker/blocks/influxdb/docker-compose.yaml index 3434f5d09b9..e1727807d41 100644 --- a/docker/blocks/influxdb/docker-compose.yaml +++ b/devenv/docker/blocks/influxdb/docker-compose.yaml @@ -6,7 +6,7 @@ - "8083:8083" - "8086:8086" volumes: - - ./blocks/influxdb/influxdb.conf:/etc/influxdb/influxdb.conf + - ./docker/blocks/influxdb/influxdb.conf:/etc/influxdb/influxdb.conf fake-influxdb-data: image: grafana/fake-data-gen diff --git a/docker/blocks/influxdb/influxdb.conf b/devenv/docker/blocks/influxdb/influxdb.conf similarity index 100% rename from docker/blocks/influxdb/influxdb.conf rename to devenv/docker/blocks/influxdb/influxdb.conf diff --git a/docker/blocks/jaeger/docker-compose.yaml b/devenv/docker/blocks/jaeger/docker-compose.yaml similarity index 100% rename from docker/blocks/jaeger/docker-compose.yaml rename to devenv/docker/blocks/jaeger/docker-compose.yaml diff --git a/docker/blocks/memcached/docker-compose.yaml b/devenv/docker/blocks/memcached/docker-compose.yaml similarity index 100% rename from docker/blocks/memcached/docker-compose.yaml rename to devenv/docker/blocks/memcached/docker-compose.yaml diff --git a/docker/blocks/mssql/build/Dockerfile b/devenv/docker/blocks/mssql/build/Dockerfile similarity index 100% rename from docker/blocks/mssql/build/Dockerfile rename to devenv/docker/blocks/mssql/build/Dockerfile diff --git a/docker/blocks/mssql/build/entrypoint.sh b/devenv/docker/blocks/mssql/build/entrypoint.sh similarity index 100% rename from docker/blocks/mssql/build/entrypoint.sh rename to devenv/docker/blocks/mssql/build/entrypoint.sh diff --git a/docker/blocks/mssql/build/setup.sh b/devenv/docker/blocks/mssql/build/setup.sh similarity index 100% rename from docker/blocks/mssql/build/setup.sh rename to devenv/docker/blocks/mssql/build/setup.sh diff --git a/docker/blocks/mssql/build/setup.sql.template b/devenv/docker/blocks/mssql/build/setup.sql.template similarity index 100% rename from docker/blocks/mssql/build/setup.sql.template rename to devenv/docker/blocks/mssql/build/setup.sql.template diff --git a/docker/blocks/mssql/docker-compose.yaml b/devenv/docker/blocks/mssql/docker-compose.yaml similarity index 85% rename from docker/blocks/mssql/docker-compose.yaml rename to devenv/docker/blocks/mssql/docker-compose.yaml index a346fb791f7..05a93629e73 100644 --- a/docker/blocks/mssql/docker-compose.yaml +++ b/devenv/docker/blocks/mssql/docker-compose.yaml @@ -1,6 +1,6 @@ mssql: build: - context: blocks/mssql/build + context: docker/blocks/mssql/build environment: ACCEPT_EULA: Y MSSQL_SA_PASSWORD: Password! diff --git a/docker/blocks/mssql_tests/docker-compose.yaml b/devenv/docker/blocks/mssql_tests/docker-compose.yaml similarity index 79% rename from docker/blocks/mssql_tests/docker-compose.yaml rename to devenv/docker/blocks/mssql_tests/docker-compose.yaml index 5da6aad82af..eea4d1e3561 100644 --- a/docker/blocks/mssql_tests/docker-compose.yaml +++ b/devenv/docker/blocks/mssql_tests/docker-compose.yaml @@ -1,6 +1,6 @@ mssqltests: build: - context: blocks/mssql/build + context: docker/blocks/mssql/build environment: ACCEPT_EULA: Y MSSQL_SA_PASSWORD: Password! diff --git a/docker/blocks/mysql/config b/devenv/docker/blocks/mysql/config similarity index 100% rename from docker/blocks/mysql/config rename to devenv/docker/blocks/mysql/config diff --git a/docker/blocks/mysql/docker-compose.yaml b/devenv/docker/blocks/mysql/docker-compose.yaml similarity index 100% rename from docker/blocks/mysql/docker-compose.yaml rename to devenv/docker/blocks/mysql/docker-compose.yaml diff --git a/docker/blocks/mysql_opendata/Dockerfile b/devenv/docker/blocks/mysql_opendata/Dockerfile similarity index 100% rename from docker/blocks/mysql_opendata/Dockerfile rename to devenv/docker/blocks/mysql_opendata/Dockerfile diff --git a/docker/blocks/mysql_opendata/docker-compose.yaml b/devenv/docker/blocks/mysql_opendata/docker-compose.yaml similarity index 82% rename from docker/blocks/mysql_opendata/docker-compose.yaml rename to devenv/docker/blocks/mysql_opendata/docker-compose.yaml index 594eeed284a..4d478ee0860 100644 --- a/docker/blocks/mysql_opendata/docker-compose.yaml +++ b/devenv/docker/blocks/mysql_opendata/docker-compose.yaml @@ -1,5 +1,5 @@ mysql_opendata: - build: blocks/mysql_opendata + build: docker/blocks/mysql_opendata environment: MYSQL_ROOT_PASSWORD: rootpass MYSQL_DATABASE: testdata diff --git a/docker/blocks/mysql_opendata/import_csv.sql b/devenv/docker/blocks/mysql_opendata/import_csv.sql similarity index 100% rename from docker/blocks/mysql_opendata/import_csv.sql rename to devenv/docker/blocks/mysql_opendata/import_csv.sql diff --git a/docker/blocks/mysql_tests/Dockerfile b/devenv/docker/blocks/mysql_tests/Dockerfile similarity index 100% rename from docker/blocks/mysql_tests/Dockerfile rename to devenv/docker/blocks/mysql_tests/Dockerfile diff --git a/docker/blocks/mysql_tests/docker-compose.yaml b/devenv/docker/blocks/mysql_tests/docker-compose.yaml similarity index 84% rename from docker/blocks/mysql_tests/docker-compose.yaml rename to devenv/docker/blocks/mysql_tests/docker-compose.yaml index 035a6167017..a7509d47880 100644 --- a/docker/blocks/mysql_tests/docker-compose.yaml +++ b/devenv/docker/blocks/mysql_tests/docker-compose.yaml @@ -1,6 +1,6 @@ mysqltests: build: - context: blocks/mysql_tests + context: docker/blocks/mysql_tests environment: MYSQL_ROOT_PASSWORD: rootpass MYSQL_DATABASE: grafana_tests diff --git a/docker/blocks/mysql_tests/setup.sql b/devenv/docker/blocks/mysql_tests/setup.sql similarity index 100% rename from docker/blocks/mysql_tests/setup.sql rename to devenv/docker/blocks/mysql_tests/setup.sql diff --git a/docker/blocks/nginx_proxy/Dockerfile b/devenv/docker/blocks/nginx_proxy/Dockerfile similarity index 100% rename from docker/blocks/nginx_proxy/Dockerfile rename to devenv/docker/blocks/nginx_proxy/Dockerfile diff --git a/docker/blocks/nginx_proxy/docker-compose.yaml b/devenv/docker/blocks/nginx_proxy/docker-compose.yaml similarity index 88% rename from docker/blocks/nginx_proxy/docker-compose.yaml rename to devenv/docker/blocks/nginx_proxy/docker-compose.yaml index a0ceceb83ac..aefd7226f36 100644 --- a/docker/blocks/nginx_proxy/docker-compose.yaml +++ b/devenv/docker/blocks/nginx_proxy/docker-compose.yaml @@ -5,5 +5,5 @@ # root_url = %(protocol)s://%(domain)s:10080/grafana/ nginxproxy: - build: blocks/nginx_proxy + build: docker/blocks/nginx_proxy network_mode: host diff --git a/docker/blocks/nginx_proxy/htpasswd b/devenv/docker/blocks/nginx_proxy/htpasswd similarity index 100% rename from docker/blocks/nginx_proxy/htpasswd rename to devenv/docker/blocks/nginx_proxy/htpasswd diff --git a/docker/blocks/nginx_proxy/nginx.conf b/devenv/docker/blocks/nginx_proxy/nginx.conf similarity index 100% rename from docker/blocks/nginx_proxy/nginx.conf rename to devenv/docker/blocks/nginx_proxy/nginx.conf diff --git a/docker/blocks/openldap/Dockerfile b/devenv/docker/blocks/openldap/Dockerfile similarity index 100% rename from docker/blocks/openldap/Dockerfile rename to devenv/docker/blocks/openldap/Dockerfile diff --git a/docker/blocks/openldap/docker-compose.yaml b/devenv/docker/blocks/openldap/docker-compose.yaml similarity index 82% rename from docker/blocks/openldap/docker-compose.yaml rename to devenv/docker/blocks/openldap/docker-compose.yaml index be06524a57d..d11858ccfb9 100644 --- a/docker/blocks/openldap/docker-compose.yaml +++ b/devenv/docker/blocks/openldap/docker-compose.yaml @@ -1,5 +1,5 @@ openldap: - build: blocks/openldap + build: docker/blocks/openldap environment: SLAPD_PASSWORD: grafana SLAPD_DOMAIN: grafana.org diff --git a/docker/blocks/openldap/entrypoint.sh b/devenv/docker/blocks/openldap/entrypoint.sh similarity index 100% rename from docker/blocks/openldap/entrypoint.sh rename to devenv/docker/blocks/openldap/entrypoint.sh diff --git a/docker/blocks/openldap/ldap_dev.toml b/devenv/docker/blocks/openldap/ldap_dev.toml similarity index 99% rename from docker/blocks/openldap/ldap_dev.toml rename to devenv/docker/blocks/openldap/ldap_dev.toml index e79771b57de..8767ff3c64a 100644 --- a/docker/blocks/openldap/ldap_dev.toml +++ b/devenv/docker/blocks/openldap/ldap_dev.toml @@ -72,6 +72,7 @@ email = "email" [[servers.group_mappings]] group_dn = "cn=admins,ou=groups,dc=grafana,dc=org" org_role = "Admin" +grafana_admin = true # The Grafana organization database id, optional, if left out the default org (id 1) will be used # org_id = 1 diff --git a/docker/blocks/openldap/modules/memberof.ldif b/devenv/docker/blocks/openldap/modules/memberof.ldif similarity index 100% rename from docker/blocks/openldap/modules/memberof.ldif rename to devenv/docker/blocks/openldap/modules/memberof.ldif diff --git a/docker/blocks/openldap/notes.md b/devenv/docker/blocks/openldap/notes.md similarity index 100% rename from docker/blocks/openldap/notes.md rename to devenv/docker/blocks/openldap/notes.md diff --git a/docker/blocks/openldap/prepopulate.sh b/devenv/docker/blocks/openldap/prepopulate.sh similarity index 100% rename from docker/blocks/openldap/prepopulate.sh rename to devenv/docker/blocks/openldap/prepopulate.sh diff --git a/docker/blocks/openldap/prepopulate/1_units.ldif b/devenv/docker/blocks/openldap/prepopulate/1_units.ldif similarity index 100% rename from docker/blocks/openldap/prepopulate/1_units.ldif rename to devenv/docker/blocks/openldap/prepopulate/1_units.ldif diff --git a/docker/blocks/openldap/prepopulate/2_users.ldif b/devenv/docker/blocks/openldap/prepopulate/2_users.ldif similarity index 100% rename from docker/blocks/openldap/prepopulate/2_users.ldif rename to devenv/docker/blocks/openldap/prepopulate/2_users.ldif diff --git a/docker/blocks/openldap/prepopulate/3_groups.ldif b/devenv/docker/blocks/openldap/prepopulate/3_groups.ldif similarity index 100% rename from docker/blocks/openldap/prepopulate/3_groups.ldif rename to devenv/docker/blocks/openldap/prepopulate/3_groups.ldif diff --git a/docker/blocks/opentsdb/docker-compose.yaml b/devenv/docker/blocks/opentsdb/docker-compose.yaml similarity index 100% rename from docker/blocks/opentsdb/docker-compose.yaml rename to devenv/docker/blocks/opentsdb/docker-compose.yaml diff --git a/docker/blocks/postgres/docker-compose.yaml b/devenv/docker/blocks/postgres/docker-compose.yaml similarity index 100% rename from docker/blocks/postgres/docker-compose.yaml rename to devenv/docker/blocks/postgres/docker-compose.yaml diff --git a/docker/blocks/postgres_tests/Dockerfile b/devenv/docker/blocks/postgres_tests/Dockerfile similarity index 100% rename from docker/blocks/postgres_tests/Dockerfile rename to devenv/docker/blocks/postgres_tests/Dockerfile diff --git a/docker/blocks/postgres_tests/docker-compose.yaml b/devenv/docker/blocks/postgres_tests/docker-compose.yaml similarity index 63% rename from docker/blocks/postgres_tests/docker-compose.yaml rename to devenv/docker/blocks/postgres_tests/docker-compose.yaml index f5ce0a5a3d3..7e6da7d8517 100644 --- a/docker/blocks/postgres_tests/docker-compose.yaml +++ b/devenv/docker/blocks/postgres_tests/docker-compose.yaml @@ -1,6 +1,6 @@ postgrestest: build: - context: blocks/postgres_tests + context: docker/blocks/postgres_tests environment: POSTGRES_USER: grafanatest POSTGRES_PASSWORD: grafanatest diff --git a/docker/blocks/postgres_tests/setup.sql b/devenv/docker/blocks/postgres_tests/setup.sql similarity index 100% rename from docker/blocks/postgres_tests/setup.sql rename to devenv/docker/blocks/postgres_tests/setup.sql diff --git a/docker/blocks/prometheus/Dockerfile b/devenv/docker/blocks/prometheus/Dockerfile similarity index 100% rename from docker/blocks/prometheus/Dockerfile rename to devenv/docker/blocks/prometheus/Dockerfile diff --git a/docker/blocks/prometheus/alert.rules b/devenv/docker/blocks/prometheus/alert.rules similarity index 100% rename from docker/blocks/prometheus/alert.rules rename to devenv/docker/blocks/prometheus/alert.rules diff --git a/docker/blocks/prometheus2/docker-compose.yaml b/devenv/docker/blocks/prometheus/docker-compose.yaml similarity index 86% rename from docker/blocks/prometheus2/docker-compose.yaml rename to devenv/docker/blocks/prometheus/docker-compose.yaml index 589df868084..db778060dde 100644 --- a/docker/blocks/prometheus2/docker-compose.yaml +++ b/devenv/docker/blocks/prometheus/docker-compose.yaml @@ -1,5 +1,5 @@ prometheus: - build: blocks/prometheus2 + build: docker/blocks/prometheus network_mode: host ports: - "9090:9090" @@ -25,7 +25,7 @@ - "9093:9093" prometheus-random-data: - build: blocks/prometheus_random_data + build: docker/blocks/prometheus_random_data network_mode: host ports: - "8081:8080" diff --git a/docker/blocks/prometheus/prometheus.yml b/devenv/docker/blocks/prometheus/prometheus.yml similarity index 100% rename from docker/blocks/prometheus/prometheus.yml rename to devenv/docker/blocks/prometheus/prometheus.yml diff --git a/docker/blocks/prometheus2/Dockerfile b/devenv/docker/blocks/prometheus2/Dockerfile similarity index 100% rename from docker/blocks/prometheus2/Dockerfile rename to devenv/docker/blocks/prometheus2/Dockerfile diff --git a/docker/blocks/prometheus2/alert.rules b/devenv/docker/blocks/prometheus2/alert.rules similarity index 100% rename from docker/blocks/prometheus2/alert.rules rename to devenv/docker/blocks/prometheus2/alert.rules diff --git a/docker/blocks/prometheus/docker-compose.yaml b/devenv/docker/blocks/prometheus2/docker-compose.yaml similarity index 85% rename from docker/blocks/prometheus/docker-compose.yaml rename to devenv/docker/blocks/prometheus2/docker-compose.yaml index 3c304cc74ad..d586b4b5742 100644 --- a/docker/blocks/prometheus/docker-compose.yaml +++ b/devenv/docker/blocks/prometheus2/docker-compose.yaml @@ -1,5 +1,5 @@ prometheus: - build: blocks/prometheus + build: docker/blocks/prometheus2 network_mode: host ports: - "9090:9090" @@ -25,7 +25,7 @@ - "9093:9093" prometheus-random-data: - build: blocks/prometheus_random_data + build: docker/blocks/prometheus_random_data network_mode: host ports: - "8081:8080" diff --git a/docker/blocks/prometheus2/prometheus.yml b/devenv/docker/blocks/prometheus2/prometheus.yml similarity index 100% rename from docker/blocks/prometheus2/prometheus.yml rename to devenv/docker/blocks/prometheus2/prometheus.yml diff --git a/docker/blocks/prometheus_mac/Dockerfile b/devenv/docker/blocks/prometheus_mac/Dockerfile similarity index 100% rename from docker/blocks/prometheus_mac/Dockerfile rename to devenv/docker/blocks/prometheus_mac/Dockerfile diff --git a/docker/blocks/prometheus_mac/alert.rules b/devenv/docker/blocks/prometheus_mac/alert.rules similarity index 100% rename from docker/blocks/prometheus_mac/alert.rules rename to devenv/docker/blocks/prometheus_mac/alert.rules diff --git a/docker/blocks/prometheus_mac/docker-compose.yaml b/devenv/docker/blocks/prometheus_mac/docker-compose.yaml similarity index 82% rename from docker/blocks/prometheus_mac/docker-compose.yaml rename to devenv/docker/blocks/prometheus_mac/docker-compose.yaml index ef53b07418a..b73d278fae2 100644 --- a/docker/blocks/prometheus_mac/docker-compose.yaml +++ b/devenv/docker/blocks/prometheus_mac/docker-compose.yaml @@ -1,5 +1,5 @@ prometheus: - build: blocks/prometheus_mac + build: docker/blocks/prometheus_mac ports: - "9090:9090" @@ -21,6 +21,6 @@ - "9093:9093" prometheus-random-data: - build: blocks/prometheus_random_data + build: docker/blocks/prometheus_random_data ports: - "8081:8080" diff --git a/docker/blocks/prometheus_mac/prometheus.yml b/devenv/docker/blocks/prometheus_mac/prometheus.yml similarity index 100% rename from docker/blocks/prometheus_mac/prometheus.yml rename to devenv/docker/blocks/prometheus_mac/prometheus.yml diff --git a/docker/blocks/prometheus_random_data/Dockerfile b/devenv/docker/blocks/prometheus_random_data/Dockerfile similarity index 100% rename from docker/blocks/prometheus_random_data/Dockerfile rename to devenv/docker/blocks/prometheus_random_data/Dockerfile diff --git a/docker/blocks/smtp/Dockerfile b/devenv/docker/blocks/smtp/Dockerfile similarity index 100% rename from docker/blocks/smtp/Dockerfile rename to devenv/docker/blocks/smtp/Dockerfile diff --git a/docker/blocks/smtp/bootstrap.sh b/devenv/docker/blocks/smtp/bootstrap.sh similarity index 100% rename from docker/blocks/smtp/bootstrap.sh rename to devenv/docker/blocks/smtp/bootstrap.sh diff --git a/docker/blocks/smtp/docker-compose.yaml b/devenv/docker/blocks/smtp/docker-compose.yaml similarity index 100% rename from docker/blocks/smtp/docker-compose.yaml rename to devenv/docker/blocks/smtp/docker-compose.yaml diff --git a/docker/buildcontainer/Dockerfile b/devenv/docker/buildcontainer/Dockerfile similarity index 100% rename from docker/buildcontainer/Dockerfile rename to devenv/docker/buildcontainer/Dockerfile diff --git a/docker/buildcontainer/build.sh b/devenv/docker/buildcontainer/build.sh similarity index 100% rename from docker/buildcontainer/build.sh rename to devenv/docker/buildcontainer/build.sh diff --git a/docker/buildcontainer/build_circle.sh b/devenv/docker/buildcontainer/build_circle.sh similarity index 100% rename from docker/buildcontainer/build_circle.sh rename to devenv/docker/buildcontainer/build_circle.sh diff --git a/docker/buildcontainer/run_circle.sh b/devenv/docker/buildcontainer/run_circle.sh similarity index 100% rename from docker/buildcontainer/run_circle.sh rename to devenv/docker/buildcontainer/run_circle.sh diff --git a/docker/compose_header.yml b/devenv/docker/compose_header.yml similarity index 100% rename from docker/compose_header.yml rename to devenv/docker/compose_header.yml diff --git a/docker/debtest/Dockerfile b/devenv/docker/debtest/Dockerfile similarity index 100% rename from docker/debtest/Dockerfile rename to devenv/docker/debtest/Dockerfile diff --git a/docker/debtest/build.sh b/devenv/docker/debtest/build.sh similarity index 100% rename from docker/debtest/build.sh rename to devenv/docker/debtest/build.sh diff --git a/devenv/docker/ha_test/.gitignore b/devenv/docker/ha_test/.gitignore new file mode 100644 index 00000000000..0f4e139e204 --- /dev/null +++ b/devenv/docker/ha_test/.gitignore @@ -0,0 +1 @@ +grafana/provisioning/dashboards/alerts/alert-* \ No newline at end of file diff --git a/devenv/docker/ha_test/README.md b/devenv/docker/ha_test/README.md new file mode 100644 index 00000000000..bc93727ceae --- /dev/null +++ b/devenv/docker/ha_test/README.md @@ -0,0 +1,137 @@ +# Grafana High Availability (HA) test setup + +A set of docker compose services which together creates a Grafana HA test setup with capability of easily +scaling up/down number of Grafana instances. + +Included services + +* Grafana +* Mysql - Grafana configuration database and session storage +* Prometheus - Monitoring of Grafana and used as datasource of provisioned alert rules +* Nginx - Reverse proxy for Grafana and Prometheus. Enables browsing Grafana/Prometheus UI using a hostname + +## Prerequisites + +### Build grafana docker container + +Build a Grafana docker container from current branch and commit and tag it as grafana/grafana:dev. + +```bash +$ cd +$ make build-docker-full +``` + +### Virtual host names + +#### Alternative 1 - Use dnsmasq + +```bash +$ sudo apt-get install dnsmasq +$ echo 'address=/loc/127.0.0.1' | sudo tee /etc/dnsmasq.d/dnsmasq-loc.conf > /dev/null +$ sudo /etc/init.d/dnsmasq restart +$ ping whatever.loc +PING whatever.loc (127.0.0.1) 56(84) bytes of data. +64 bytes from localhost (127.0.0.1): icmp_seq=1 ttl=64 time=0.076 ms +--- whatever.loc ping statistics --- +1 packet transmitted, 1 received, 0% packet loss, time 1998ms +``` + +#### Alternative 2 - Manually update /etc/hosts + +Update your `/etc/hosts` to be able to access Grafana and/or Prometheus UI using a hostname. + +```bash +$ cat /etc/hosts +127.0.0.1 grafana.loc +127.0.0.1 prometheus.loc +``` + +## Start services + +```bash +$ docker-compose up -d +``` + +Browse +* http://grafana.loc/ +* http://prometheus.loc/ + +Check for any errors + +```bash +$ docker-compose logs | grep error +``` + +### Scale Grafana instances up/down + +Scale number of Grafana instances to `` + +```bash +$ docker-compose up --scale grafana= -d +# for example 3 instances +$ docker-compose up --scale grafana=3 -d +``` + +## Test alerting + +### Create notification channels + +Creates default notification channels, if not already exists + +```bash +$ ./alerts.sh setup +``` + +### Slack notifications + +Disable + +```bash +$ ./alerts.sh slack -d +``` + +Enable and configure url + +```bash +$ ./alerts.sh slack -u https://hooks.slack.com/services/... +``` + +Enable, configure url and enable reminders + +```bash +$ ./alerts.sh slack -u https://hooks.slack.com/services/... -r -e 10m +``` + +### Provision alert dashboards with alert rules + +Provision 1 dashboard/alert rule (default) + +```bash +$ ./alerts.sh provision +``` + +Provision 10 dashboards/alert rules + +```bash +$ ./alerts.sh provision -a 10 +``` + +Provision 10 dashboards/alert rules and change condition to `gt > 100` + +```bash +$ ./alerts.sh provision -a 10 -c 100 +``` + +### Pause/unpause all alert rules + +Pause + +```bash +$ ./alerts.sh pause +``` + +Unpause + +```bash +$ ./alerts.sh unpause +``` diff --git a/devenv/docker/ha_test/alerts.sh b/devenv/docker/ha_test/alerts.sh new file mode 100755 index 00000000000..a05a4581739 --- /dev/null +++ b/devenv/docker/ha_test/alerts.sh @@ -0,0 +1,156 @@ +#!/bin/bash + +requiresJsonnet() { + if ! type "jsonnet" > /dev/null; then + echo "you need you install jsonnet to run this script" + echo "follow the instructions on https://github.com/google/jsonnet" + exit 1 + fi +} + +setup() { + STATUS=$(curl -s -o /dev/null -w '%{http_code}' http://admin:admin@grafana.loc/api/alert-notifications/1) + if [ $STATUS -eq 200 ]; then + echo "Email already exists, skipping..." + else + curl -H "Content-Type: application/json" \ + -d '{ + "name": "Email", + "type": "email", + "isDefault": false, + "sendReminder": false, + "uploadImage": true, + "settings": { + "addresses": "user@test.com" + } + }' \ + http://admin:admin@grafana.loc/api/alert-notifications + fi + + STATUS=$(curl -s -o /dev/null -w '%{http_code}' http://admin:admin@grafana.loc/api/alert-notifications/2) + if [ $STATUS -eq 200 ]; then + echo "Slack already exists, skipping..." + else + curl -H "Content-Type: application/json" \ + -d '{ + "name": "Slack", + "type": "slack", + "isDefault": false, + "sendReminder": false, + "uploadImage": true + }' \ + http://admin:admin@grafana.loc/api/alert-notifications + fi +} + +slack() { + enabled=true + url='' + remind=false + remindEvery='10m' + + while getopts ":e:u:dr" o; do + case "${o}" in + e) + remindEvery=${OPTARG} + ;; + u) + url=${OPTARG} + ;; + d) + enabled=false + ;; + r) + remind=true + ;; + esac + done + shift $((OPTIND-1)) + + curl -X PUT \ + -H "Content-Type: application/json" \ + -d '{ + "id": 2, + "name": "Slack", + "type": "slack", + "isDefault": '$enabled', + "sendReminder": '$remind', + "frequency": "'$remindEvery'", + "uploadImage": true, + "settings": { + "url": "'$url'" + } + }' \ + http://admin:admin@grafana.loc/api/alert-notifications/2 +} + +provision() { + alerts=1 + condition=65 + while getopts ":a:c:" o; do + case "${o}" in + a) + alerts=${OPTARG} + ;; + c) + condition=${OPTARG} + ;; + esac + done + shift $((OPTIND-1)) + + requiresJsonnet + + rm -rf grafana/provisioning/dashboards/alerts/alert-*.json + jsonnet -m grafana/provisioning/dashboards/alerts grafana/provisioning/alerts.jsonnet --ext-code alerts=$alerts --ext-code condition=$condition +} + +pause() { + curl -H "Content-Type: application/json" \ + -d '{"paused":true}' \ + http://admin:admin@grafana.loc/api/admin/pause-all-alerts +} + +unpause() { + curl -H "Content-Type: application/json" \ + -d '{"paused":false}' \ + http://admin:admin@grafana.loc/api/admin/pause-all-alerts +} + +usage() { + echo -e "Usage: ./alerts.sh COMMAND [OPTIONS]\n" + echo -e "Commands" + echo -e " setup\t\t creates default alert notification channels" + echo -e " slack\t\t configure slack notification channel" + echo -e " [-d]\t\t\t disable notifier, default enabled" + echo -e " [-u]\t\t\t url" + echo -e " [-r]\t\t\t send reminders" + echo -e " [-e ]\t\t default 10m\n" + echo -e " provision\t provision alerts" + echo -e " [-a ]\t default 1" + echo -e " [-c ]\t default 65\n" + echo -e " pause\t\t pause all alerts" + echo -e " unpause\t unpause all alerts" +} + +main() { + local cmd=$1 + + if [[ $cmd == "setup" ]]; then + setup + elif [[ $cmd == "slack" ]]; then + slack "${@:2}" + elif [[ $cmd == "provision" ]]; then + provision "${@:2}" + elif [[ $cmd == "pause" ]]; then + pause + elif [[ $cmd == "unpause" ]]; then + unpause + fi + + if [[ -z "$cmd" ]]; then + usage + fi +} + +main "$@" diff --git a/devenv/docker/ha_test/docker-compose.yaml b/devenv/docker/ha_test/docker-compose.yaml new file mode 100644 index 00000000000..ce8630d88a4 --- /dev/null +++ b/devenv/docker/ha_test/docker-compose.yaml @@ -0,0 +1,78 @@ +version: "2.1" + +services: + nginx-proxy: + image: jwilder/nginx-proxy + ports: + - "80:80" + volumes: + - /var/run/docker.sock:/tmp/docker.sock:ro + + db: + image: mysql + environment: + MYSQL_ROOT_PASSWORD: rootpass + MYSQL_DATABASE: grafana + MYSQL_USER: grafana + MYSQL_PASSWORD: password + ports: + - 3306 + healthcheck: + test: ["CMD", "mysqladmin" ,"ping", "-h", "localhost"] + timeout: 10s + retries: 10 + + # db: + # image: postgres:9.3 + # environment: + # POSTGRES_DATABASE: grafana + # POSTGRES_USER: grafana + # POSTGRES_PASSWORD: password + # ports: + # - 5432 + # healthcheck: + # test: ["CMD-SHELL", "pg_isready -d grafana -U grafana"] + # timeout: 10s + # retries: 10 + + grafana: + image: grafana/grafana:dev + volumes: + - ./grafana/provisioning/:/etc/grafana/provisioning/ + environment: + - VIRTUAL_HOST=grafana.loc + - GF_SERVER_ROOT_URL=http://grafana.loc + - GF_DATABASE_NAME=grafana + - GF_DATABASE_USER=grafana + - GF_DATABASE_PASSWORD=password + - GF_DATABASE_TYPE=mysql + - GF_DATABASE_HOST=db:3306 + - GF_SESSION_PROVIDER=mysql + - GF_SESSION_PROVIDER_CONFIG=grafana:password@tcp(db:3306)/grafana?allowNativePasswords=true + # - GF_DATABASE_TYPE=postgres + # - GF_DATABASE_HOST=db:5432 + # - GF_DATABASE_SSL_MODE=disable + # - GF_SESSION_PROVIDER=postgres + # - GF_SESSION_PROVIDER_CONFIG=user=grafana password=password host=db port=5432 dbname=grafana sslmode=disable + - GF_LOG_FILTERS=alerting.notifier:debug,alerting.notifier.slack:debug + ports: + - 3000 + depends_on: + db: + condition: service_healthy + + prometheus: + image: prom/prometheus:v2.4.2 + volumes: + - ./prometheus/:/etc/prometheus/ + environment: + - VIRTUAL_HOST=prometheus.loc + ports: + - 9090 + + # mysqld-exporter: + # image: prom/mysqld-exporter + # environment: + # - DATA_SOURCE_NAME=grafana:password@(mysql:3306)/ + # ports: + # - 9104 diff --git a/devenv/docker/ha_test/grafana/provisioning/alerts.jsonnet b/devenv/docker/ha_test/grafana/provisioning/alerts.jsonnet new file mode 100644 index 00000000000..86ded7e79d6 --- /dev/null +++ b/devenv/docker/ha_test/grafana/provisioning/alerts.jsonnet @@ -0,0 +1,202 @@ +local numAlerts = std.extVar('alerts'); +local condition = std.extVar('condition'); +local arr = std.range(1, numAlerts); + +local alertDashboardTemplate = { + "editable": true, + "gnetId": null, + "graphTooltip": 0, + "id": null, + "links": [], + "panels": [ + { + "alert": { + "conditions": [ + { + "evaluator": { + "params": [ + 65 + ], + "type": "gt" + }, + "operator": { + "type": "and" + }, + "query": { + "params": [ + "A", + "5m", + "now" + ] + }, + "reducer": { + "params": [], + "type": "avg" + }, + "type": "query" + } + ], + "executionErrorState": "alerting", + "frequency": "10s", + "handler": 1, + "name": "bulk alerting", + "noDataState": "no_data", + "notifications": [ + { + "id": 2 + } + ] + }, + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Prometheus", + "fill": 1, + "gridPos": { + "h": 9, + "w": 12, + "x": 0, + "y": 0 + }, + "id": 2, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "$$hashKey": "object:117", + "expr": "go_goroutines", + "format": "time_series", + "intervalFactor": 1, + "refId": "A" + } + ], + "thresholds": [ + { + "colorMode": "critical", + "fill": true, + "line": true, + "op": "gt", + "value": 50 + } + ], + "timeFrom": null, + "timeShift": null, + "title": "Panel Title", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ] + } + ], + "schemaVersion": 16, + "style": "dark", + "tags": [], + "templating": { + "list": [] + }, + "time": { + "from": "now-6h", + "to": "now" + }, + "timepicker": { + "refresh_intervals": [ + "5s", + "10s", + "30s", + "1m", + "5m", + "15m", + "30m", + "1h", + "2h", + "1d" + ], + "time_options": [ + "5m", + "15m", + "1h", + "6h", + "12h", + "24h", + "2d", + "7d", + "30d" + ] + }, + "timezone": "", + "title": "New dashboard", + "uid": null, + "version": 0 +}; + + +{ + ['alert-' + std.toString(x) + '.json']: + alertDashboardTemplate + { + panels: [ + alertDashboardTemplate.panels[0] + + { + alert+: { + name: 'Alert rule ' + x, + conditions: [ + alertDashboardTemplate.panels[0].alert.conditions[0] + + { + evaluator+: { + params: [condition] + } + }, + ], + }, + }, + ], + uid: 'alert-' + x, + title: 'Alert ' + x + }, + for x in arr +} \ No newline at end of file diff --git a/devenv/docker/ha_test/grafana/provisioning/dashboards/alerts.yaml b/devenv/docker/ha_test/grafana/provisioning/dashboards/alerts.yaml new file mode 100644 index 00000000000..60b6cd4bb04 --- /dev/null +++ b/devenv/docker/ha_test/grafana/provisioning/dashboards/alerts.yaml @@ -0,0 +1,8 @@ +apiVersion: 1 + +providers: + - name: 'Alerts' + folder: 'Alerts' + type: file + options: + path: /etc/grafana/provisioning/dashboards/alerts diff --git a/devenv/docker/ha_test/grafana/provisioning/dashboards/alerts/overview.json b/devenv/docker/ha_test/grafana/provisioning/dashboards/alerts/overview.json new file mode 100644 index 00000000000..53e33c37b1f --- /dev/null +++ b/devenv/docker/ha_test/grafana/provisioning/dashboards/alerts/overview.json @@ -0,0 +1,172 @@ +{ + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": "-- Grafana --", + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "type": "dashboard" + } + ] + }, + "editable": true, + "gnetId": null, + "graphTooltip": 0, + "links": [], + "panels": [ + { + "aliasColors": { + "Active alerts": "#bf1b00" + }, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Prometheus", + "fill": 1, + "gridPos": { + "h": 12, + "w": 24, + "x": 0, + "y": 0 + }, + "id": 2, + "interval": "", + "legend": { + "alignAsTable": true, + "avg": false, + "current": true, + "max": false, + "min": false, + "rightSide": true, + "show": true, + "total": false, + "values": true + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [ + { + "alias": "Active grafana instances", + "dashes": true, + "fill": 0 + } + ], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "sum(increase(grafana_alerting_notification_sent_total[1m])) by(job)", + "format": "time_series", + "instant": false, + "interval": "1m", + "intervalFactor": 1, + "legendFormat": "Notifications sent", + "refId": "A" + }, + { + "expr": "min(grafana_alerting_active_alerts) without(instance)", + "format": "time_series", + "interval": "1m", + "intervalFactor": 1, + "legendFormat": "Active alerts", + "refId": "B" + }, + { + "expr": "count(up{job=\"grafana\"})", + "format": "time_series", + "intervalFactor": 1, + "legendFormat": "Active grafana instances", + "refId": "C" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Notifications sent vs active alerts", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": "0", + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": 3 + } + } + ], + "schemaVersion": 16, + "style": "dark", + "tags": [], + "templating": { + "list": [] + }, + "time": { + "from": "now-1h", + "to": "now" + }, + "timepicker": { + "refresh_intervals": [ + "5s", + "10s", + "30s", + "1m", + "5m", + "15m", + "30m", + "1h", + "2h", + "1d" + ], + "time_options": [ + "5m", + "15m", + "1h", + "6h", + "12h", + "24h", + "2d", + "7d", + "30d" + ] + }, + "timezone": "", + "title": "Overview", + "uid": "xHy7-hAik", + "version": 6 +} \ No newline at end of file diff --git a/devenv/docker/ha_test/grafana/provisioning/datasources/datasources.yaml b/devenv/docker/ha_test/grafana/provisioning/datasources/datasources.yaml new file mode 100644 index 00000000000..8d59793be16 --- /dev/null +++ b/devenv/docker/ha_test/grafana/provisioning/datasources/datasources.yaml @@ -0,0 +1,11 @@ +apiVersion: 1 + +datasources: + - name: Prometheus + type: prometheus + access: proxy + url: http://prometheus:9090 + jsonData: + timeInterval: 10s + queryTimeout: 30s + httpMethod: POST \ No newline at end of file diff --git a/devenv/docker/ha_test/prometheus/prometheus.yml b/devenv/docker/ha_test/prometheus/prometheus.yml new file mode 100644 index 00000000000..ea97ba8ba05 --- /dev/null +++ b/devenv/docker/ha_test/prometheus/prometheus.yml @@ -0,0 +1,39 @@ +# my global config +global: + scrape_interval: 10s # By default, scrape targets every 15 seconds. + evaluation_interval: 10s # By default, scrape targets every 15 seconds. + # scrape_timeout is set to the global default (10s). + +# Load and evaluate rules in this file every 'evaluation_interval' seconds. +#rule_files: +# - "alert.rules" +# - "first.rules" +# - "second.rules" + +# alerting: +# alertmanagers: +# - scheme: http +# static_configs: +# - targets: +# - "127.0.0.1:9093" + +scrape_configs: + - job_name: 'prometheus' + static_configs: + - targets: ['localhost:9090'] + + - job_name: 'grafana' + dns_sd_configs: + - names: + - 'grafana' + type: 'A' + port: 3000 + refresh_interval: 10s + + # - job_name: 'mysql' + # dns_sd_configs: + # - names: + # - 'mysqld-exporter' + # type: 'A' + # port: 9104 + # refresh_interval: 10s \ No newline at end of file diff --git a/docker/rpmtest/build.sh b/devenv/docker/rpmtest/build.sh similarity index 100% rename from docker/rpmtest/build.sh rename to devenv/docker/rpmtest/build.sh diff --git a/tests/api/clearState.test.ts b/devenv/e2e-api-tests/clearState.test.ts similarity index 100% rename from tests/api/clearState.test.ts rename to devenv/e2e-api-tests/clearState.test.ts diff --git a/tests/api/client.ts b/devenv/e2e-api-tests/client.ts similarity index 100% rename from tests/api/client.ts rename to devenv/e2e-api-tests/client.ts diff --git a/tests/api/dashboard.test.ts b/devenv/e2e-api-tests/dashboard.test.ts similarity index 100% rename from tests/api/dashboard.test.ts rename to devenv/e2e-api-tests/dashboard.test.ts diff --git a/tests/api/folder.test.ts b/devenv/e2e-api-tests/folder.test.ts similarity index 100% rename from tests/api/folder.test.ts rename to devenv/e2e-api-tests/folder.test.ts diff --git a/tests/api/jest.js b/devenv/e2e-api-tests/jest.js similarity index 100% rename from tests/api/jest.js rename to devenv/e2e-api-tests/jest.js diff --git a/tests/api/search.test.ts b/devenv/e2e-api-tests/search.test.ts similarity index 100% rename from tests/api/search.test.ts rename to devenv/e2e-api-tests/search.test.ts diff --git a/tests/api/setup.ts b/devenv/e2e-api-tests/setup.ts similarity index 100% rename from tests/api/setup.ts rename to devenv/e2e-api-tests/setup.ts diff --git a/tests/api/tsconfig.json b/devenv/e2e-api-tests/tsconfig.json similarity index 100% rename from tests/api/tsconfig.json rename to devenv/e2e-api-tests/tsconfig.json diff --git a/tests/api/user.test.ts b/devenv/e2e-api-tests/user.test.ts similarity index 100% rename from tests/api/user.test.ts rename to devenv/e2e-api-tests/user.test.ts diff --git a/devenv/setup.sh b/devenv/setup.sh index cc71ecc71bf..c9cc0d47a6f 100755 --- a/devenv/setup.sh +++ b/devenv/setup.sh @@ -11,7 +11,21 @@ bulkDashboard() { let COUNTER=COUNTER+1 done - ln -s -f -r ./bulk-dashboards/bulk-dashboards.yaml ../conf/provisioning/dashboards/custom.yaml + ln -s -f ../../../devenv/bulk-dashboards/bulk-dashboards.yaml ../conf/provisioning/dashboards/custom.yaml +} + +bulkAlertingDashboard() { + + requiresJsonnet + + COUNTER=0 + MAX=100 + while [ $COUNTER -lt $MAX ]; do + jsonnet -o "bulk_alerting_dashboards/alerting_dashboard${COUNTER}.json" -e "local bulkDash = import 'bulk_alerting_dashboards/bulkdash_alerting.jsonnet'; bulkDash + { uid: 'bd-${COUNTER}', title: 'alerting-title-${COUNTER}' }" + let COUNTER=COUNTER+1 + done + + ln -s -f ../../../devenv/bulk_alerting_dashboards/bulk_alerting_dashboards.yaml ../conf/provisioning/dashboards/custom.yaml } requiresJsonnet() { @@ -36,8 +50,9 @@ devDatasources() { usage() { echo -e "\n" echo "Usage:" - echo " bulk-dashboards - create and provisioning 400 dashboards" - echo " no args - provisiong core datasources and dev dashboards" + echo " bulk-dashboards - create and provisioning 400 dashboards" + echo " bulk-alerting-dashboards - create and provisioning 400 dashboards with alerts" + echo " no args - provisiong core datasources and dev dashboards" } main() { @@ -48,7 +63,9 @@ main() { local cmd=$1 - if [[ $cmd == "bulk-dashboards" ]]; then + if [[ $cmd == "bulk-alerting-dashboards" ]]; then + bulkAlertingDashboard + elif [[ $cmd == "bulk-dashboards" ]]; then bulkDashboard else devDashboards diff --git a/docs/README.md b/docs/README.md index ff5ef6a4131..7310f184a60 100644 --- a/docs/README.md +++ b/docs/README.md @@ -65,7 +65,7 @@ make docs-build This will rebuild the docs docker container. -To be able to use the image your have to quit (CTRL-C) the `make watch` command (that you run in the same directory as this README). Then simply rerun `make watch`, it will restart the docs server but now with access to your image. +To be able to use the image you have to quit (CTRL-C) the `make watch` command (that you run in the same directory as this README). Then simply rerun `make watch`, it will restart the docs server but now with access to your image. ### Editing content diff --git a/docs/sources/administration/permissions.md b/docs/sources/administration/permissions.md index e7b84a417c0..0d374f03647 100644 --- a/docs/sources/administration/permissions.md +++ b/docs/sources/administration/permissions.md @@ -52,12 +52,10 @@ This admin flag makes a user a `Super Admin`. This means they can access the `Se ### Dashboard & Folder Permissions -> Introduced in Grafana v5.0 - {{< docs-imagebox img="/img/docs/v50/folder_permissions.png" max-width="500px" class="docs-image--right" >}} For dashboards and dashboard folders there is a **Permissions** page that make it possible to -remove the default role based permssions for Editors and Viewers. It's here you can add and assign permissions to specific **Users** and **Teams**. +remove the default role based permissions for Editors and Viewers. It's here you can add and assign permissions to specific **Users** and **Teams**. You can assign & remove permissions for **Organization Roles**, **Users** and **Teams**. @@ -104,7 +102,7 @@ Permissions for a dashboard: Result: You cannot override to a lower permission. `user1` has Admin permission as the highest permission always wins. -- **View**: Can only view existing dashboars/folders. +- **View**: Can only view existing dashboards/folders. - You cannot override permissions for users with **Org Admin Role** - A more specific permission with lower permission level will not have any effect if a more general rule exists with higher permission level. For example if "Everyone with Editor Role Can Edit" exists in the ACL list then **John Doe** will still have Edit permission even after you have specifically added a permission for this user with the permission set to **View**. You need to remove or lower the permission level of the more general rule. diff --git a/docs/sources/administration/provisioning.md b/docs/sources/administration/provisioning.md index c57fb1683f0..8916b2bf6e3 100644 --- a/docs/sources/administration/provisioning.md +++ b/docs/sources/administration/provisioning.md @@ -71,6 +71,7 @@ Puppet | [https://forge.puppet.com/puppet/grafana](https://forge.puppet.com/pupp Ansible | [https://github.com/cloudalchemy/ansible-grafana](https://github.com/cloudalchemy/ansible-grafana) Chef | [https://github.com/JonathanTron/chef-grafana](https://github.com/JonathanTron/chef-grafana) Saltstack | [https://github.com/salt-formulas/salt-formula-grafana](https://github.com/salt-formulas/salt-formula-grafana) +Jsonnet | [https://github.com/grafana/grafonnet-lib/](https://github.com/grafana/grafonnet-lib/) ## Datasources @@ -122,7 +123,7 @@ datasources: withCredentials: # mark as default datasource. Max one per org isDefault: - # fields that will be converted to json and stored in json_data + # fields that will be converted to json and stored in jsonData jsonData: graphiteVersion: "1.1" tlsAuth: true @@ -146,7 +147,7 @@ Please refer to each datasource documentation for specific provisioning examples #### Json Data -Since not all datasources have the same configuration settings we only have the most common ones as fields. The rest should be stored as a json blob in the `json_data` field. Here are the most common settings that the core datasources use. +Since not all datasources have the same configuration settings we only have the most common ones as fields. The rest should be stored as a json blob in the `jsonData` field. Here are the most common settings that the core datasources use. | Name | Type | Datasource | Description | | ---- | ---- | ---- | ---- | @@ -154,10 +155,10 @@ Since not all datasources have the same configuration settings we only have the | tlsAuthWithCACert | boolean | *All* | Enable TLS authentication using CA cert | | tlsSkipVerify | boolean | *All* | Controls whether a client verifies the server's certificate chain and host name. | | graphiteVersion | string | Graphite | Graphite version | -| timeInterval | string | Elastic, InfluxDB & Prometheus | Lowest interval/step value that should be used for this data source | -| esVersion | number | Elastic | Elasticsearch version as an number (2/5/56) | -| timeField | string | Elastic | Which field that should be used as timestamp | -| interval | string | Elastic | Index date time format | +| timeInterval | string | Prometheus, Elasticsearch, InfluxDB, MySQL, PostgreSQL & MSSQL | Lowest interval/step value that should be used for this data source | +| esVersion | number | Elasticsearch | Elasticsearch version as a number (2/5/56) | +| timeField | string | Elasticsearch | Which field that should be used as timestamp | +| interval | string | Elasticsearch | Index date time format | | authType | string | Cloudwatch | Auth provider. keys/credentials/arn | | assumeRoleArn | string | Cloudwatch | ARN of Assume Role | | defaultRegion | string | Cloudwatch | AWS region | @@ -165,6 +166,12 @@ Since not all datasources have the same configuration settings we only have the | tsdbVersion | string | OpenTSDB | Version | | tsdbResolution | string | OpenTSDB | Resolution | | sslmode | string | PostgreSQL | SSLmode. 'disable', 'require', 'verify-ca' or 'verify-full' | +| encrypt | string | MSSQL | Connection SSL encryption handling. 'disable', 'false' or 'true' | +| postgresVersion | number | PostgreSQL | Postgres version as a number (903/904/905/906/1000) meaning v9.3, v9.4, ..., v10 | +| timescaledb | boolean | PostgreSQL | Enable usage of TimescaleDB extension | +| maxOpenConns | number | MySQL, PostgreSQL & MSSQL | Maximum number of open connections to the database (Grafana v5.4+) | +| maxIdleConns | number | MySQL, PostgreSQL & MSSQL | Maximum number of connections in the idle connection pool (Grafana v5.4+) | +| connMaxLifetime | number | MySQL, PostgreSQL & MSSQL | Maximum amount of time in seconds a connection may be reused (Grafana v5.4+) | #### Secure Json Data @@ -197,7 +204,7 @@ providers: folder: '' type: file disableDeletion: false - updateIntervalSeconds: 3 #how often Grafana will scan for changed dashboards + updateIntervalSeconds: 10 #how often Grafana will scan for changed dashboards options: path: /var/lib/grafana/dashboards ``` @@ -214,7 +221,7 @@ Note: The JSON shown in input field and when using `Copy JSON to Clipboard` and/ {{< docs-imagebox img="/img/docs/v51/provisioning_cannot_save_dashboard.png" max-width="500px" class="docs-image--no-shadow" >}} -### Reuseable Dashboard Urls +### Reusable Dashboard Urls If the dashboard in the json file contains an [uid](/reference/dashboard/#json-fields), Grafana will force insert/update on that uid. This allows you to migrate dashboards betweens Grafana instances and provisioning Grafana from configuration without breaking the urls given since the new dashboard url uses the uid as identifier. When Grafana starts, it will update/insert all dashboards available in the configured folders. If you modify the file, the dashboard will also be updated. diff --git a/docs/sources/alerting/notifications.md b/docs/sources/alerting/notifications.md index 58046cafae4..307af1ee15e 100644 --- a/docs/sources/alerting/notifications.md +++ b/docs/sources/alerting/notifications.md @@ -16,12 +16,11 @@ weight = 2 When an alert changes state, it sends out notifications. Each alert rule can have multiple notifications. In order to add a notification to an alert rule you first need -to add and configure a `notification` channel (can be email, PagerDuty or other integration). This is done from the Notification Channels page. +to add and configure a `notification` channel (can be email, PagerDuty or other integration). +This is done from the Notification Channels page. ## Notification Channel Setup -{{< imgbox max-width="30%" img="/img/docs/v50/alerts_notifications_menu.png" caption="Alerting Notification Channels" >}} - On the Notification Channels page hit the `New Channel` button to go the page where you can configure and setup a new Notification Channel. @@ -30,7 +29,31 @@ sure it's setup correctly. ### Send on all alerts -When checked, this option will nofity for all alert rules - existing and new. +When checked, this option will notify for all alert rules - existing and new. + +### Send reminders + +> Only available in Grafana v5.3 and above. + +{{< docs-imagebox max-width="600px" img="/img/docs/v53/alerting_notification_reminders.png" class="docs-image--right" caption="Alerting notification reminders setup" >}} + +When this option is checked additional notifications (reminders) will be sent for triggered alerts. You can specify how often reminders +should be sent using number of seconds (s), minutes (m) or hours (h), for example `30s`, `3m`, `5m` or `1h` etc. + +**Important:** Alert reminders are sent after rules are evaluated. Therefore a reminder can never be sent more frequently than a configured [alert rule evaluation interval](/alerting/rules/#name-evaluation-interval). + +These examples show how often and when reminders are sent for a triggered alert. + +Alert rule evaluation interval | Send reminders every | Reminder sent every (after last alert notification) +---------- | ----------- | ----------- +`30s` | `15s` | ~30 seconds +`1m` | `5m` | ~5 minutes +`5m` | `15m` | ~15 minutes +`6m` | `20m` | ~24 minutes +`1h` | `15m` | ~1 hour +`1h` | `2h` | ~2 hours + +
## Supported Notification Types @@ -105,7 +128,7 @@ Example json body: In DingTalk PC Client: -1. Click "more" icon on left bottom of the panel. +1. Click "more" icon on upper right of the panel. 2. Click "Robot Manage" item in the pop menu, there will be a new panel call "Robot Manage". @@ -132,23 +155,23 @@ Once these two properties are set, you can send the alerts to Kafka for further ### All supported notifiers -Name | Type |Support images ------|------------ | ------ -Slack | `slack` | yes -Pagerduty | `pagerduty` | yes -Email | `email` | yes -Webhook | `webhook` | link -Kafka | `kafka` | no -Hipchat | `hipchat` | yes -VictorOps | `victorops` | yes -Sensu | `sensu` | yes -OpsGenie | `opsgenie` | yes -Threema | `threema` | yes -Pushover | `pushover` | no -Telegram | `telegram` | no -Line | `line` | no -Prometheus Alertmanager | `prometheus-alertmanager` | no -Microsoft Teams | `teams` | yes +Name | Type |Support images | Support reminders +-----|------------ | ------ | ------ | +Slack | `slack` | yes | yes +Pagerduty | `pagerduty` | yes | yes +Email | `email` | yes | yes +Webhook | `webhook` | link | yes +Kafka | `kafka` | no | yes +Hipchat | `hipchat` | yes | yes +VictorOps | `victorops` | yes | yes +Sensu | `sensu` | yes | yes +OpsGenie | `opsgenie` | yes | yes +Threema | `threema` | yes | yes +Pushover | `pushover` | no | yes +Telegram | `telegram` | no | yes +Line | `line` | no | yes +Microsoft Teams | `teams` | yes | yes +Prometheus Alertmanager | `prometheus-alertmanager` | no | no diff --git a/docs/sources/alerting/rules.md b/docs/sources/alerting/rules.md index fa7332e7145..488619055e2 100644 --- a/docs/sources/alerting/rules.md +++ b/docs/sources/alerting/rules.md @@ -88,6 +88,11 @@ So as you can see from the above scenario Grafana will not send out notification to fire if the rule already is in state `Alerting`. To improve support for queries that return multiple series we plan to track state **per series** in a future release. +> Starting with Grafana v5.3 you can configure reminders to be sent for triggered alerts. This will send additional notifications +> when an alert continues to fire. If other series (like server2 in the example above) also cause the alert rule to fire they will +> be included in the reminder notification. Depending on what notification channel you're using you may be able to take advantage +> of this feature for identifying new/existing series causing alert to fire. [Read more about notification reminders here](/alerting/notifications/#send-reminders). + ### No Data / Null values Below your conditions you can configure how the rule evaluation engine should handle queries that return no data or only null values. diff --git a/docs/sources/tutorials/authproxy.md b/docs/sources/auth/auth-proxy.md similarity index 67% rename from docs/sources/tutorials/authproxy.md rename to docs/sources/auth/auth-proxy.md index 6f13de85c18..e066eed9190 100644 --- a/docs/sources/tutorials/authproxy.md +++ b/docs/sources/auth/auth-proxy.md @@ -1,42 +1,43 @@ +++ -title = "Grafana Authproxy" +title = "Auth Proxy" +description = "Grafana Auth Proxy Guide " +keywords = ["grafana", "configuration", "documentation", "proxy"] type = "docs" -keywords = ["grafana", "tutorials", "authproxy"] +aliases = ["/tutorials/authproxy/"] [menu.docs] -parent = "tutorials" -weight = 10 +name = "Auth Proxy" +identifier = "auth-proxy" +parent = "authentication" +weight = 2 +++ -# Grafana Authproxy +# Auth Proxy Authentication -AuthProxy allows you to offload the authentication of users to a web server (there are many reasons why you’d want to run a web server in front of a production version of Grafana, especially if it’s exposed to the Internet). +You can configure Grafana to let a http reverse proxy handling authentication. Popular web servers have a very +extensive list of pluggable authentication modules, and any of them can be used with the AuthProxy feature. +Below we detail the configuration options for auth proxy. -Popular web servers have a very extensive list of pluggable authentication modules, and any of them can be used with the AuthProxy feature. - -The Grafana AuthProxy feature is very simple in design, but it is this simplicity that makes it so powerful. - -## Interacting with Grafana’s AuthProxy via curl - -The AuthProxy feature can be configured through the Grafana configuration file with the following options: - -```js +```bash [auth.proxy] +# Defaults to false, but set to true to enable this feature enabled = true +# HTTP Header name that will contain the username or email header_name = X-WEBAUTH-USER +# HTTP Header property, defaults to `username` but can also be `email` header_property = username +# Set to `true` to enable auto sign up of users who do not exist in Grafana DB. Defaults to `true`. auto_sign_up = true +# If combined with Grafana LDAP integration define sync interval ldap_sync_ttl = 60 +# Limit where auth proxy requests come from by configuring a list of IP addresses. +# This can be used to prevent users spoofing the X-WEBAUTH-USER header. whitelist = +# Optionally define more headers to sync other user attributes +# Example `headers = Name:X-WEBAUTH-NAME Email:X-WEBAUTH-EMAIL`` +headers = ``` -* **enabled**: this is to toggle the feature on or off -* **header_name**: this is the HTTP header name that passes the username or email address of the authenticated user to Grafana. Grafana will trust what ever username is contained in this header and automatically log the user in. -* **header_property**: this tells Grafana whether the value in the header_name is a username or an email address. (In Grafana you can log in using your account username or account email) -* **auto_sign_up**: If set to true, Grafana will automatically create user accounts in the Grafana DB if one does not exist. If set to false, users who do not exist in the GrafanaDB won’t be able to log in, even though their username and password are valid. -* **ldap_sync_ttl**: When both auth.proxy and auth.ldap are enabled, user's organisation and role are synchronised from ldap after the http proxy authentication. You can force ldap re-synchronisation after `ldap_sync_ttl` minutes. -* **whitelist**: Comma separated list of trusted authentication proxies IP. - -With a fresh install of Grafana, using the above configuration for the authProxy feature, we can send a simple API call to list all users. The only user that will be present is the default “Admin” user that is added the first time Grafana starts up. As you can see all we need to do to authenticate the request is to provide the “X-WEBAUTH-USER” header. +## Interacting with Grafana’s AuthProxy via curl ```bash curl -H "X-WEBAUTH-USER: admin" http://localhost:3000/api/users @@ -71,7 +72,8 @@ I’ll demonstrate how to use Apache for authenticating users. In this example w ### Apache BasicAuth -In this example we use Apache as a reverseProxy in front of Grafana. Apache handles the Authentication of users before forwarding requests to the Grafana backend service. +In this example we use Apache as a reverse proxy in front of Grafana. Apache handles the Authentication of users before forwarding requests to the Grafana backend service. + #### Apache configuration @@ -116,38 +118,7 @@ In this example we use Apache as a reverseProxy in front of Grafana. Apache hand * The last 3 lines are then just standard reverse proxy configuration to direct all authenticated requests to our Grafana server running on port 3000. -#### Grafana configuration - -```bash -############# Users ################ -[users] - # disable user signup / registration -allow_sign_up = false - -# Set to true to automatically assign new users to the default organization (id 1) -auto_assign_org = true - -# Default role new users will be automatically assigned (if auto_assign_org above is set to true) - auto_assign_org_role = Editor - - -############ Auth Proxy ######## -[auth.proxy] -enabled = true - -# the Header name that contains the authenticated user. -header_name = X-WEBAUTH-USER - -# does the user authenticate against the proxy using a 'username' or an 'email' -header_property = username - -# automatically add the user to the system if they don't already exist. -auto_sign_up = true -``` - -#### Full walk through using Docker. - -##### Grafana Container +## Full walk through using Docker. For this example, we use the official Grafana docker image available at [Docker Hub](https://hub.docker.com/r/grafana/grafana/) @@ -166,7 +137,8 @@ header_property = username auto_sign_up = true ``` -* Launch the Grafana container, using our custom grafana.ini to replace `/etc/grafana/grafana.ini`. We don't expose any ports for this container as it will only be connected to by our Apache container. +Launch the Grafana container, using our custom grafana.ini to replace `/etc/grafana/grafana.ini`. We don't expose +any ports for this container as it will only be connected to by our Apache container. ```bash docker run -i -v $(pwd)/grafana.ini:/etc/grafana/grafana.ini --name grafana grafana/grafana diff --git a/docs/sources/auth/generic-oauth.md b/docs/sources/auth/generic-oauth.md new file mode 100644 index 00000000000..6fa6531fc98 --- /dev/null +++ b/docs/sources/auth/generic-oauth.md @@ -0,0 +1,214 @@ ++++ +title = "OAuth authentication" +description = "Grafana OAuthentication Guide " +keywords = ["grafana", "configuration", "documentation", "oauth"] +type = "docs" +[menu.docs] +name = "Generic OAuth" +identifier = "generic_oauth" +parent = "authentication" +weight = 3 ++++ + +# Generic OAuth Authentication + +You can configure many different oauth2 authentication services with Grafana using the generic oauth2 feature. Below you +can find examples using Okta, BitBucket, OneLogin and Azure. + +This callback URL must match the full HTTP address that you use in your browser to access Grafana, but with the prefix path of `/login/generic_oauth`. + +You may have to set the `root_url` option of `[server]` for the callback URL to be +correct. For example in case you are serving Grafana behind a proxy. + +Example config: + +```bash +[auth.generic_oauth] +enabled = true +client_id = YOUR_APP_CLIENT_ID +client_secret = YOUR_APP_CLIENT_SECRET +scopes = +auth_url = +token_url = +api_url = +allowed_domains = mycompany.com mycompany.org +allow_sign_up = true +``` + +Set `api_url` to the resource that returns [OpenID UserInfo](https://connect2id.com/products/server/docs/api/userinfo) compatible information. + +Grafana will attempt to determine the user's e-mail address by querying the OAuth provider as described below in the following order until an e-mail address is found: + +1. Check for the presence of an e-mail address via the `email` field encoded in the OAuth `id_token` parameter. +2. Check for the presence of an e-mail address in the `attributes` map encoded in the OAuth `id_token` parameter. By default Grafana will perform a lookup into the attributes map using the `email:primary` key, however, this is configurable and can be adjusted by using the `email_attribute_name` configuration option. +3. Query the `/emails` endpoint of the OAuth provider's API (configured with `api_url`) and check for the presence of an e-mail address marked as a primary address. +4. If no e-mail address is found in steps (1-3), then the e-mail address of the user is set to the empty string. + +## Set up OAuth2 with Okta + +First set up Grafana as an OpenId client "webapplication" in Okta. Then set the Base URIs to `https:///` and set the Login redirect URIs to `https:///login/generic_oauth`. + +Finally set up the generic oauth module like this: +```bash +[auth.generic_oauth] +name = Okta +enabled = true +scopes = openid profile email +client_id = +client_secret = +auth_url = https:///oauth2/v1/authorize +token_url = https:///oauth2/v1/token +api_url = https:///oauth2/v1/userinfo +``` + +## Set up OAuth2 with Bitbucket + +```bash +[auth.generic_oauth] +name = BitBucket +enabled = true +allow_sign_up = true +client_id = +client_secret = +scopes = account email +auth_url = https://bitbucket.org/site/oauth2/authorize +token_url = https://bitbucket.org/site/oauth2/access_token +api_url = https://api.bitbucket.org/2.0/user +team_ids = +allowed_organizations = +``` + +## Set up OAuth2 with OneLogin + +1. Create a new Custom Connector with the following settings: + - Name: Grafana + - Sign On Method: OpenID Connect + - Redirect URI: `https:///login/generic_oauth` + - Signing Algorithm: RS256 + - Login URL: `https:///login/generic_oauth` + + then: +2. Add an App to the Grafana Connector: + - Display Name: Grafana + + then: +3. Under the SSO tab on the Grafana App details page you'll find the Client ID and Client Secret. + + Your OneLogin Domain will match the url you use to access OneLogin. + + Configure Grafana as follows: + + ```bash + [auth.generic_oauth] + name = OneLogin + enabled = true + allow_sign_up = true + client_id = + client_secret = + scopes = openid email name + auth_url = https://.onelogin.com/oidc/auth + token_url = https://.onelogin.com/oidc/token + api_url = https://.onelogin.com/oidc/me + team_ids = + allowed_organizations = + ``` + +### Set up OAuth2 with Auth0 + +1. Create a new Client in Auth0 + - Name: Grafana + - Type: Regular Web Application + +2. Go to the Settings tab and set: + - Allowed Callback URLs: `https:///login/generic_oauth` + +3. Click Save Changes, then use the values at the top of the page to configure Grafana: + + ```bash + [auth.generic_oauth] + enabled = true + allow_sign_up = true + team_ids = + allowed_organizations = + name = Auth0 + client_id = + client_secret = + scopes = openid profile email + auth_url = https:///authorize + token_url = https:///oauth/token + api_url = https:///userinfo + ``` + +### Set up OAuth2 with Azure Active Directory + +1. Log in to portal.azure.com and click "Azure Active Directory" in the side menu, then click the "Properties" sub-menu item. + +2. Copy the "Directory ID", this is needed for setting URLs later + +3. Click "App Registrations" and add a new application registration: + - Name: Grafana + - Application type: Web app / API + - Sign-on URL: `https:///login/generic_oauth` + +4. Click the name of the new application to open the application details page. + +5. Note down the "Application ID", this will be the OAuth client id. + +6. Click "Settings", then click "Keys" and add a new entry under Passwords + - Key Description: Grafana OAuth + - Duration: Never Expires + +7. Click Save then copy the key value, this will be the OAuth client secret. + +8. Configure Grafana as follows: + + ```bash + [auth.generic_oauth] + name = Azure AD + enabled = true + allow_sign_up = true + client_id = + client_secret = + scopes = openid email name + auth_url = https://login.microsoftonline.com//oauth2/authorize + token_url = https://login.microsoftonline.com//oauth2/token + api_url = + team_ids = + allowed_organizations = + ``` + +> Note: It's important to ensure that the [root_url](/installation/configuration/#root-url) in Grafana is set in your Azure Application Reply URLs (App -> Settings -> Reply URLs) + +## Set up OAuth2 with Centrify + +1. Create a new Custom OpenID Connect application configuration in the Centrify dashboard. + +2. Create a memorable unique Application ID, e.g. "grafana", "grafana_aws", etc. + +3. Put in other basic configuration (name, description, logo, category) + +4. On the Trust tab, generate a long password and put it into the OpenID Connect Client Secret field. + +5. Put the URL to the front page of your Grafana instance into the "Resource Application URL" field. + +6. Add an authorized Redirect URI like https://your-grafana-server/login/generic_oauth + +7. Set up permissions, policies, etc. just like any other Centrify app + +8. Configure Grafana as follows: + + ```bash + [auth.generic_oauth] + name = Centrify + enabled = true + allow_sign_up = true + client_id = + client_secret = .my.centrify.com/OAuth2/Authorize/ + token_url = https://.my.centrify.com/OAuth2/Token/ + ``` + +
+ + diff --git a/docs/sources/auth/github.md b/docs/sources/auth/github.md new file mode 100644 index 00000000000..b4ffc0fc2d4 --- /dev/null +++ b/docs/sources/auth/github.md @@ -0,0 +1,101 @@ ++++ +title = "Google OAuth2 Authentication" +description = "Grafana OAuthentication Guide " +keywords = ["grafana", "configuration", "documentation", "oauth"] +type = "docs" +[menu.docs] +name = "GitHub" +identifier = "github_oauth2" +parent = "authentication" +weight = 4 ++++ + +# GitHub OAuth2 Authentication + +To enable the GitHub OAuth2 you must register your application with GitHub. GitHub will generate a client ID and secret key for you to use. + +## Configure GitHub OAuth application + +You need to create a GitHub OAuth application (you find this under the GitHub +settings page). When you create the application you will need to specify +a callback URL. Specify this as callback: + +```bash +http://:/login/github +``` + +This callback URL must match the full HTTP address that you use in your +browser to access Grafana, but with the prefix path of `/login/github`. +When the GitHub OAuth application is created you will get a Client ID and a +Client Secret. Specify these in the Grafana configuration file. For +example: + +## Enable GitHub in Grafana + +```bash +[auth.github] +enabled = true +allow_sign_up = true +client_id = YOUR_GITHUB_APP_CLIENT_ID +client_secret = YOUR_GITHUB_APP_CLIENT_SECRET +scopes = user:email,read:org +auth_url = https://github.com/login/oauth/authorize +token_url = https://github.com/login/oauth/access_token +api_url = https://api.github.com/user +team_ids = +allowed_organizations = +``` + +You may have to set the `root_url` option of `[server]` for the callback URL to be +correct. For example in case you are serving Grafana behind a proxy. + +Restart the Grafana back-end. You should now see a GitHub login button +on the login page. You can now login or sign up with your GitHub +accounts. + +You may allow users to sign-up via GitHub authentication by setting the +`allow_sign_up` option to `true`. When this option is set to `true`, any +user successfully authenticating via GitHub authentication will be +automatically signed up. + +### team_ids + +Require an active team membership for at least one of the given teams on +GitHub. If the authenticated user isn't a member of at least one of the +teams they will not be able to register or authenticate with your +Grafana instance. For example: + +```bash +[auth.github] +enabled = true +client_id = YOUR_GITHUB_APP_CLIENT_ID +client_secret = YOUR_GITHUB_APP_CLIENT_SECRET +scopes = user:email,read:org +team_ids = 150,300 +auth_url = https://github.com/login/oauth/authorize +token_url = https://github.com/login/oauth/access_token +api_url = https://api.github.com/user +allow_sign_up = true +``` + +### allowed_organizations + +Require an active organization membership for at least one of the given +organizations on GitHub. If the authenticated user isn't a member of at least +one of the organizations they will not be able to register or authenticate with +your Grafana instance. For example + +```bash +[auth.github] +enabled = true +client_id = YOUR_GITHUB_APP_CLIENT_ID +client_secret = YOUR_GITHUB_APP_CLIENT_SECRET +scopes = user:email,read:org +auth_url = https://github.com/login/oauth/authorize +token_url = https://github.com/login/oauth/access_token +api_url = https://api.github.com/user +allow_sign_up = true +# space-delimited organization names +allowed_organizations = github google +``` + diff --git a/docs/sources/auth/gitlab.md b/docs/sources/auth/gitlab.md new file mode 100644 index 00000000000..e3a450f9fc7 --- /dev/null +++ b/docs/sources/auth/gitlab.md @@ -0,0 +1,118 @@ ++++ +title = "Google OAuth2 Authentication" +description = "Grafana OAuthentication Guide " +keywords = ["grafana", "configuration", "documentation", "oauth"] +type = "docs" +[menu.docs] +name = "GitLab" +identifier = "gitlab_oauth" +parent = "authentication" +weight = 5 ++++ + +# GitLab OAuth2 Authentication + +To enable the GitLab OAuth2 you must register an application in GitLab. GitLab will generate a client ID and secret key for you to use. + +## Create GitLab OAuth keys + +You need to [create a GitLab OAuth application](https://docs.gitlab.com/ce/integration/oauth_provider.html). +Choose a descriptive *Name*, and use the following *Redirect URI*: + +``` +https://grafana.example.com/login/gitlab +``` + +where `https://grafana.example.com` is the URL you use to connect to Grafana. +Adjust it as needed if you don't use HTTPS or if you use a different port; for +instance, if you access Grafana at `http://203.0.113.31:3000`, you should use + +``` +http://203.0.113.31:3000/login/gitlab +``` + +Finally, select *api* as the *Scope* and submit the form. Note that if you're +not going to use GitLab groups for authorization (i.e. not setting +`allowed_groups`, see below), you can select *read_user* instead of *api* as +the *Scope*, thus giving a more restricted access to your GitLab API. + +You'll get an *Application Id* and a *Secret* in return; we'll call them +`GITLAB_APPLICATION_ID` and `GITLAB_SECRET` respectively for the rest of this +section. + +## Enable GitLab in Grafana + +Add the following to your Grafana configuration file to enable GitLab +authentication: + +```bash +[auth.gitlab] +enabled = false +allow_sign_up = false +client_id = GITLAB_APPLICATION_ID +client_secret = GITLAB_SECRET +scopes = api +auth_url = https://gitlab.com/oauth/authorize +token_url = https://gitlab.com/oauth/token +api_url = https://gitlab.com/api/v4 +allowed_groups = +``` + +You may have to set the `root_url` option of `[server]` for the callback URL to be +correct. For example in case you are serving Grafana behind a proxy. + +Restart the Grafana backend for your changes to take effect. + +If you use your own instance of GitLab instead of `gitlab.com`, adjust +`auth_url`, `token_url` and `api_url` accordingly by replacing the `gitlab.com` +hostname with your own. + +With `allow_sign_up` set to `false`, only existing users will be able to login +using their GitLab account, but with `allow_sign_up` set to `true`, *any* user +who can authenticate on GitLab will be able to login on your Grafana instance; +if you use the public `gitlab.com`, it means anyone in the world would be able +to login on your Grafana instance. + +You can can however limit access to only members of a given group or list of +groups by setting the `allowed_groups` option. + +### allowed_groups + +To limit access to authenticated users that are members of one or more [GitLab +groups](https://docs.gitlab.com/ce/user/group/index.html), set `allowed_groups` +to a comma- or space-separated list of groups. For instance, if you want to +only give access to members of the `example` group, set + + +```ini +allowed_groups = example +``` + +If you want to also give access to members of the subgroup `bar`, which is in +the group `foo`, set + +```ini +allowed_groups = example, foo/bar +``` + +Note that in GitLab, the group or subgroup name doesn't always match its +display name, especially if the display name contains spaces or special +characters. Make sure you always use the group or subgroup name as it appears +in the URL of the group or subgroup. + +Here's a complete example with `alloed_sign_up` enabled, and access limited to +the `example` and `foo/bar` groups: + +```ini +[auth.gitlab] +enabled = false +allow_sign_up = true +client_id = GITLAB_APPLICATION_ID +client_secret = GITLAB_SECRET +scopes = api +auth_url = https://gitlab.com/oauth/authorize +token_url = https://gitlab.com/oauth/token +api_url = https://gitlab.com/api/v4 +allowed_groups = example, foo/bar +``` + diff --git a/docs/sources/auth/google.md b/docs/sources/auth/google.md new file mode 100644 index 00000000000..f7faf1a1097 --- /dev/null +++ b/docs/sources/auth/google.md @@ -0,0 +1,58 @@ ++++ +title = "Google OAuth2 Authentication" +description = "Grafana OAuthentication Guide " +keywords = ["grafana", "configuration", "documentation", "oauth"] +type = "docs" +[menu.docs] +name = "Google" +identifier = "ggogle_oauth2" +parent = "authentication" +weight = 3 ++++ + +# Google OAuth2 Authentication + +To enable the Google OAuth2 you must register your application with Google. Google will generate a client ID and secret key for you to use. + +## Create Google OAuth keys + +First, you need to create a Google OAuth Client: + +1. Go to https://console.developers.google.com/apis/credentials +2. Click the 'Create Credentials' button, then click 'OAuth Client ID' in the menu that drops down +3. Enter the following: + - Application Type: Web Application + - Name: Grafana + - Authorized Javascript Origins: https://grafana.mycompany.com + - Authorized Redirect URLs: https://grafana.mycompany.com/login/google + - Replace https://grafana.mycompany.com with the URL of your Grafana instance. +4. Click Create +5. Copy the Client ID and Client Secret from the 'OAuth Client' modal + +## Enable Google OAuth in Grafana + +Specify the Client ID and Secret in the [Grafana configuration file]({{< relref "installation/configuration.md#config-file-locations" >}}). For example: + +```bash +[auth.google] +enabled = true +client_id = CLIENT_ID +client_secret = CLIENT_SECRET +scopes = https://www.googleapis.com/auth/userinfo.profile https://www.googleapis.com/auth/userinfo.email +auth_url = https://accounts.google.com/o/oauth2/auth +token_url = https://accounts.google.com/o/oauth2/token +allowed_domains = mycompany.com mycompany.org +allow_sign_up = true +``` + +You may have to set the `root_url` option of `[server]` for the callback URL to be +correct. For example in case you are serving Grafana behind a proxy. + +Restart the Grafana back-end. You should now see a Google login button +on the login page. You can now login or sign up with your Google +accounts. The `allowed_domains` option is optional, and domains were separated by space. + +You may allow users to sign-up via Google authentication by setting the +`allow_sign_up` option to `true`. When this option is set to `true`, any +user successfully authenticating via Google authentication will be +automatically signed up. diff --git a/docs/sources/auth/index.md b/docs/sources/auth/index.md new file mode 100644 index 00000000000..7fdcc082319 --- /dev/null +++ b/docs/sources/auth/index.md @@ -0,0 +1,12 @@ ++++ +title = "Authentication" +description = "Authentication" +type = "docs" +[menu.docs] +name = "Authentication" +identifier = "authentication" +parent = "admin" +weight = 3 ++++ + + diff --git a/docs/sources/auth/ldap.md b/docs/sources/auth/ldap.md new file mode 100644 index 00000000000..4a884a60d15 --- /dev/null +++ b/docs/sources/auth/ldap.md @@ -0,0 +1,261 @@ ++++ +title = "LDAP Authentication" +description = "Grafana LDAP Authentication Guide " +keywords = ["grafana", "configuration", "documentation", "ldap", "active directory"] +type = "docs" +[menu.docs] +name = "LDAP" +identifier = "ldap" +parent = "authentication" +weight = 2 ++++ + +# LDAP Authentication + +The LDAP integration in Grafana allows your Grafana users to login with their LDAP credentials. You can also specify mappings between LDAP +group memberships and Grafana Organization user roles. + +## Supported LDAP Servers + +Grafana uses a [third-party LDAP library](https://github.com/go-ldap/ldap) under the hood that supports basic LDAP v3 functionality. +This means that you should be able to configure LDAP integration using any compliant LDAPv3 server, for example [OpenLDAP](#openldap) or +[Active Directory](#active-directory) among [others](https://en.wikipedia.org/wiki/Directory_service#LDAP_implementations). + +## Enable LDAP + +In order to use LDAP integration you'll first need to enable LDAP in the [main config file]({{< relref "installation/configuration.md" >}}) as well as specify the path to the LDAP +specific configuration file (default: `/etc/grafana/ldap.toml`). + +```bash +[auth.ldap] +# Set to `true` to enable LDAP integration (default: `false`) +enabled = true + +# Path to the LDAP specific configuration file (default: `/etc/grafana/ldap.toml`) +config_file = /etc/grafana/ldap.toml + +# Allow sign up should almost always be true (default) to allow new Grafana users to be created (if ldap authentication is ok). If set to +# false only pre-existing Grafana users will be able to login (if ldap authentication is ok). +allow_sign_up = true +``` + +## Grafana LDAP Configuration + +Depending on which LDAP server you're using and how that's configured your Grafana LDAP configuration may vary. +See [configuration examples](#configuration-examples) for more information. + +**LDAP specific configuration file (ldap.toml) example:** +```bash +[[servers]] +# Ldap server host (specify multiple hosts space separated) +host = "127.0.0.1" +# Default port is 389 or 636 if use_ssl = true +port = 389 +# Set to true if ldap server supports TLS +use_ssl = false +# Set to true if connect ldap server with STARTTLS pattern (create connection in insecure, then upgrade to secure connection with TLS) +start_tls = false +# set to true if you want to skip ssl cert validation +ssl_skip_verify = false +# set to the path to your root CA certificate or leave unset to use system defaults +# root_ca_cert = "/path/to/certificate.crt" +# Authentication against LDAP servers requiring client certificates +# client_cert = "/path/to/client.crt" +# client_key = "/path/to/client.key" + +# Search user bind dn +bind_dn = "cn=admin,dc=grafana,dc=org" +# Search user bind password +# If the password contains # or ; you have to wrap it with triple quotes. Ex """#password;""" +bind_password = 'grafana' + +# User search filter, for example "(cn=%s)" or "(sAMAccountName=%s)" or "(uid=%s)" +# Allow login from email or username, example "(|(sAMAccountName=%s)(userPrincipalName=%s))" +search_filter = "(cn=%s)" + +# An array of base dns to search through +search_base_dns = ["dc=grafana,dc=org"] + +# group_search_filter = "(&(objectClass=posixGroup)(memberUid=%s))" +# group_search_filter_user_attribute = "distinguishedName" +# group_search_base_dns = ["ou=groups,dc=grafana,dc=org"] + +# Specify names of the ldap attributes your ldap uses +[servers.attributes] +name = "givenName" +surname = "sn" +username = "cn" +member_of = "memberOf" +email = "email" +``` + +### Bind + +#### Bind & Bind Password + +By default the configuration expects you to specify a bind DN and bind password. This should be a read only user that can perform LDAP searches. +When the user DN is found a second bind is performed with the user provided username & password (in the normal Grafana login form). + +```bash +bind_dn = "cn=admin,dc=grafana,dc=org" +bind_password = "grafana" +``` + +#### Single Bind Example + +If you can provide a single bind expression that matches all possible users, you can skip the second bind and bind against the user DN directly. +This allows you to not specify a bind_password in the configuration file. + +```bash +bind_dn = "cn=%s,o=users,dc=grafana,dc=org" +``` + +In this case you skip providing a `bind_password` and instead provide a `bind_dn` value with a `%s` somewhere. This will be replaced with the username entered in on the Grafana login page. +The search filter and search bases settings are still needed to perform the LDAP search to retrieve the other LDAP information (like LDAP groups and email). + +### POSIX schema +If your ldap server does not support the memberOf attribute add these options: + +```bash +## Group search filter, to retrieve the groups of which the user is a member (only set if memberOf attribute is not available) +group_search_filter = "(&(objectClass=posixGroup)(memberUid=%s))" +## An array of the base DNs to search through for groups. Typically uses ou=groups +group_search_base_dns = ["ou=groups,dc=grafana,dc=org"] +## the %s in the search filter will be replaced with the attribute defined below +group_search_filter_user_attribute = "uid" +``` + +Also set `member_of = "dn"` in the `[servers.attributes]` section. + +### Group Mappings + +In `[[servers.group_mappings]]` you can map an LDAP group to a Grafana organization and role. These will be synced every time the user logs in, with LDAP being +the authoritative source. So, if you change a user's role in the Grafana Org. Users page, this change will be reset the next time the user logs in. If you +change the LDAP groups of a user, the change will take effect the next time the user logs in. + +The first group mapping that an LDAP user is matched to will be used for the sync. If you have LDAP users that fit multiple mappings, the topmost mapping in the +TOML config will be used. + +**LDAP specific configuration file (ldap.toml) example:** +```bash +[[servers]] +# other settings omitted for clarity + +[[servers.group_mappings]] +group_dn = "cn=superadmins,dc=grafana,dc=org" +org_role = "Admin" +grafana_admin = true # Available in Grafana v5.3 and above + +[[servers.group_mappings]] +group_dn = "cn=admins,dc=grafana,dc=org" +org_role = "Admin" + +[[servers.group_mappings]] +group_dn = "cn=users,dc=grafana,dc=org" +org_role = "Editor" + +[[servers.group_mappings]] +group_dn = "*" +org_role = "Viewer" +``` + +Setting | Required | Description | Default +------------ | ------------ | ------------- | ------------- +`group_dn` | Yes | LDAP distinguished name (DN) of LDAP group. If you want to match all (or no LDAP groups) then you can use wildcard (`"*"`) | +`org_role` | Yes | Assign users of `group_dn` the organisation role `"Admin"`, `"Editor"` or `"Viewer"` | +`org_id` | No | The Grafana organization database id. Setting this allows for multiple group_dn's to be assigned to the same `org_role` provided the `org_id` differs | `1` (default org id) +`grafana_admin` | No | When `true` makes user of `group_dn` Grafana server admin. A Grafana server admin has admin access over all organisations and users. Available in Grafana v5.3 and above | `false` + +### Nested/recursive group membership + +Users with nested/recursive group membership must have an LDAP server that supports `LDAP_MATCHING_RULE_IN_CHAIN` +and configure `group_search_filter` in a way that it returns the groups the submitted username is a member of. + +**Active Directory example:** + +Active Directory groups store the Distinguished Names (DNs) of members, so your filter will need to know the DN for the user based only on the submitted username. +Multiple DN templates can be searched by combining filters with the LDAP OR-operator. Examples: + +```bash +group_search_filter = "(member:1.2.840.113556.1.4.1941:=CN=%s,[user container/OU])" +group_search_filter = "(|(member:1.2.840.113556.1.4.1941:=CN=%s,[user container/OU])(member:1.2.840.113556.1.4.1941:=CN=%s,[another user container/OU]))" +group_search_filter_user_attribute = "cn" +``` +For more information on AD searches see [Microsoft's Search Filter Syntax](https://docs.microsoft.com/en-us/windows/desktop/adsi/search-filter-syntax) documentation. + +For troubleshooting, by changing `member_of` in `[servers.attributes]` to "dn" it will show you more accurate group memberships when [debug is enabled](#troubleshooting). + +## Configuration examples + +### OpenLDAP + +[OpenLDAP](http://www.openldap.org/) is an open source directory service. + +**LDAP specific configuration file (ldap.toml):** +```bash +[[servers]] +host = "127.0.0.1" +port = 389 +use_ssl = false +start_tls = false +ssl_skip_verify = false +bind_dn = "cn=admin,dc=grafana,dc=org" +bind_password = 'grafana' +search_filter = "(cn=%s)" +search_base_dns = ["dc=grafana,dc=org"] + +[servers.attributes] +name = "givenName" +surname = "sn" +username = "cn" +member_of = "memberOf" +email = "email" + +# [[servers.group_mappings]] omitted for clarity +``` + +### Active Directory + +[Active Directory](https://technet.microsoft.com/en-us/library/hh831484(v=ws.11).aspx) is a directory service which is commonly used in Windows environments. + +Assuming the following Active Directory server setup: + +* IP address: `10.0.0.1` +* Domain: `CORP` +* DNS name: `corp.local` + +**LDAP specific configuration file (ldap.toml):** +```bash +[[servers]] +host = "10.0.0.1" +port = 3269 +use_ssl = true +start_tls = false +ssl_skip_verify = true +bind_dn = "CORP\\%s" +search_filter = "(sAMAccountName=%s)" +search_base_dns = ["dc=corp,dc=local"] + +[servers.attributes] +name = "givenName" +surname = "sn" +username = "sAMAccountName" +member_of = "memberOf" +email = "mail" + +# [[servers.group_mappings]] omitted for clarity +``` + +#### Port requirements + +In above example SSL is enabled and an encrypted port have been configured. If your Active Directory don't support SSL please change `enable_ssl = false` and `port = 389`. +Please inspect your Active Directory configuration and documentation to find the correct settings. For more information about Active Directory and port requirements see [link](https://technet.microsoft.com/en-us/library/dd772723(v=ws.10)). + +## Troubleshooting + +To troubleshoot and get more log info enable ldap debug logging in the [main config file]({{< relref "installation/configuration.md" >}}). + +```bash +[log] +filters = ldap:debug +``` diff --git a/docs/sources/auth/overview.md b/docs/sources/auth/overview.md new file mode 100644 index 00000000000..a372600ac46 --- /dev/null +++ b/docs/sources/auth/overview.md @@ -0,0 +1,86 @@ ++++ +title = "Overview" +description = "Overview for auth" +type = "docs" +[menu.docs] +name = "Overview" +identifier = "overview-auth" +parent = "authentication" +weight = 1 ++++ + +# User Authentication Overview + +Grafana provides many ways to authenticate users. Some authentication integrations also enable syncing user +permissions and org memberships. + +## OAuth Integrations + +- [Google OAuth]({{< relref "auth/google.md" >}}) +- [GitHub OAuth]({{< relref "auth/github.md" >}}) +- [Gitlab OAuth]({{< relref "auth/gitlab.md" >}}) +- [Generic OAuth]({{< relref "auth/generic-oauth.md" >}}) (Okta2, BitBucket, Azure, OneLogin, Auth0) + +## LDAP integrations + +- [LDAP Authentication]({{< relref "auth/ldap.md" >}}) (OpenLDAP, ActiveDirectory, etc) + +## Auth proxy + +- [Auth Proxy]({{< relref "auth/auth-proxy.md" >}}) If you want to handle authentication outside Grafana using a reverse + proxy. + +## Grafana Auth + +Grafana of course has a built in user authentication system with password authentication enabled by default. You can +disable authentication by enabling anonymous access. You can also hide login form and only allow login through an auth +provider (listed above). There is also options for allowing self sign up. + +### Anonymous authentication + +You can make Grafana accessible without any login required by enabling anonymous access in the configuration file. + +Example: + +```bash +[auth.anonymous] +enabled = true + +# Organization name that should be used for unauthenticated users +org_name = Main Org. + +# Role for unauthenticated users, other valid values are `Editor` and `Admin` +org_role = Viewer +``` + +If you change your organization name in the Grafana UI this setting needs to be updated to match the new name. + +### Basic authentication + +Basic auth is enabled by default and works with the built in Grafana user password authentication system and LDAP +authentication integration. + +To disable basic auth: + +```bash +[auth.basic] +enabled = false +``` + +### Disable login form + +You can hide the Grafana login form using the below configuration settings. + +```bash +[auth] +disable_login_form ⁼ true +``` + +### Hide sign-out menu + +Set to the option detailed below to true to hide sign-out menu link. Useful if you use an auth proxy. + +```bash +[auth] +disable_signout_menu = true +``` diff --git a/docs/sources/contribute/cla.md b/docs/sources/contribute/cla.md index ffb2aaef1b9..a073a9a4eae 100644 --- a/docs/sources/contribute/cla.md +++ b/docs/sources/contribute/cla.md @@ -101,4 +101,4 @@ TO THE MAXIMUM EXTENT PERMITTED BY APPLICABLE LAW, IN NO EVENT WILL YOU [OR US]


-This CLA agreement is based on the [Harmony Contributor Aggrement Template (combined)](http://www.harmonyagreements.org/agreements.html), [Creative Commons Attribution 3.0 Unported License](https://creativecommons.org/licenses/by/3.0/) +This CLA agreement is based on the [Harmony Contributor Agreement Template (combined)](http://www.harmonyagreements.org/agreements.html), [Creative Commons Attribution 3.0 Unported License](https://creativecommons.org/licenses/by/3.0/) diff --git a/docs/sources/features/datasources/cloudwatch.md b/docs/sources/features/datasources/cloudwatch.md index 7adc6ebe4fb..be36d108475 100644 --- a/docs/sources/features/datasources/cloudwatch.md +++ b/docs/sources/features/datasources/cloudwatch.md @@ -46,7 +46,7 @@ Checkout AWS docs on [IAM Roles](http://docs.aws.amazon.com/AWSEC2/latest/UserGu ## IAM Policies Grafana needs permissions granted via IAM to be able to read CloudWatch metrics -and EC2 tags/instances. You can attach these permissions to IAM roles and +and EC2 tags/instances/regions. You can attach these permissions to IAM roles and utilize Grafana's built-in support for assuming roles. Here is a minimal policy example: @@ -65,11 +65,12 @@ Here is a minimal policy example: "Resource": "*" }, { - "Sid": "AllowReadingTagsFromEC2", + "Sid": "AllowReadingTagsInstancesRegionsFromEC2", "Effect": "Allow", "Action": [ "ec2:DescribeTags", - "ec2:DescribeInstances" + "ec2:DescribeInstances", + "ec2:DescribeRegions" ], "Resource": "*" } diff --git a/docs/sources/features/datasources/mssql.md b/docs/sources/features/datasources/mssql.md index da0c9581e99..cd191f14273 100644 --- a/docs/sources/features/datasources/mssql.md +++ b/docs/sources/features/datasources/mssql.md @@ -6,7 +6,7 @@ type = "docs" [menu.docs] name = "Microsoft SQL Server" parent = "datasources" -weight = 7 +weight = 8 +++ # Using Microsoft SQL Server in Grafana @@ -32,6 +32,28 @@ Name | Description *Database* | Name of your MSSQL database. *User* | Database user's login/username *Password* | Database user's password +*Encrypt* | This option determines whether or to which extent a secure SSL TCP/IP connection will be negotiated with the server, default `false` (Grafana v5.4+). +*Max open* | The maximum number of open connections to the database, default `unlimited` (Grafana v5.4+). +*Max idle* | The maximum number of connections in the idle connection pool, default `2` (Grafana v5.4+). +*Max lifetime* | The maximum amount of time in seconds a connection may be reused, default `14400`/4 hours (Grafana v5.4+). + +### Min time interval + +A lower limit for the [$__interval](/reference/templating/#the-interval-variable) and [$__interval_ms](/reference/templating/#the-interval-ms-variable) variables. +Recommended to be set to write frequency, for example `1m` if your data is written every minute. +This option can also be overridden/configured in a dashboard panel under data source options. It's important to note that this value **needs** to be formatted as a +number followed by a valid time identifier, e.g. `1m` (1 minute) or `30s` (30 seconds). The following time identifiers are supported: + +Identifier | Description +------------ | ------------- +`y` | year +`M` | month +`w` | week +`d` | day +`h` | hour +`m` | minute +`s` | second +`ms` | millisecond ### Database User Permissions (Important!) @@ -51,8 +73,8 @@ Make sure the user does not get any unwanted privileges from the public role. ### Known Issues -MSSQL 2008 and 2008 R2 engine cannot handle login records when SSL encryption is not disabled. Due to this you may receive an `Login error: EOF` error when trying to create your datasource. -To fix MSSQL 2008 R2 issue, install MSSQL 2008 R2 Service Pack 2. To fix MSSQL 2008 issue, install Microsoft MSSQL 2008 Service Pack 3 and Cumulative update package 3 for MSSQL 2008 SP3. +If you're using an older version of Microsoft SQL Server like 2008 and 2008R2 you may need to disable encryption to be able to connect. +If possible, we recommend you to use the latest service pack available for optimal compatibility. ## Query Editor @@ -156,6 +178,8 @@ The resulting table panel: If you set `Format as` to `Time series`, for use in Graph panel for example, then the query must must have a column named `time` that returns either a sql datetime or any numeric datatype representing unix epoch in seconds. You may return a column named `metric` that is used as metric name for the value column. Any column except `time` and `metric` is treated as a value column. If you omit the `metric` column, the name of the value column will be the metric name. You may select multiple value columns, each will have its name as metric. If you return multiple value columns and a column named `metric` then this column is used as prefix for the series name (only available in Grafana 5.3+). +Resultsets of time series queries need to be sorted by time. + **Example database table:** ```sql @@ -205,7 +229,7 @@ When above query are used in a graph panel the result will be two series named ` {{< docs-imagebox img="/img/docs/v51/mssql_time_series_two.png" class="docs-image--no-shadow docs-image--right" >}} -**Example with multiple `value` culumns:** +**Example with multiple `value` columns:** ```sql SELECT @@ -565,6 +589,10 @@ datasources: url: localhost:1433 database: grafana user: grafana + jsonData: + maxOpenConns: 0 # Grafana v5.4+ + maxIdleConns: 2 # Grafana v5.4+ + connMaxLifetime: 14400 # Grafana v5.4+ secureJsonData: password: "Password!" diff --git a/docs/sources/features/datasources/mysql.md b/docs/sources/features/datasources/mysql.md index afac746b050..988f632bff3 100644 --- a/docs/sources/features/datasources/mysql.md +++ b/docs/sources/features/datasources/mysql.md @@ -35,13 +35,34 @@ Name | Description *Database* | Name of your MySQL database. *User* | Database user's login/username *Password* | Database user's password +*Max open* | The maximum number of open connections to the database, default `unlimited` (Grafana v5.4+). +*Max idle* | The maximum number of connections in the idle connection pool, default `2` (Grafana v5.4+). +*Max lifetime* | The maximum amount of time in seconds a connection may be reused, default `14400`/4 hours. This should always be lower than configured [wait_timeout](https://dev.mysql.com/doc/refman/8.0/en/server-system-variables.html#sysvar_wait_timeout) in MySQL (Grafana v5.4+). + +### Min time interval + +A lower limit for the [$__interval](/reference/templating/#the-interval-variable) and [$__interval_ms](/reference/templating/#the-interval-ms-variable) variables. +Recommended to be set to write frequency, for example `1m` if your data is written every minute. +This option can also be overridden/configured in a dashboard panel under data source options. It's important to note that this value **needs** to be formatted as a +number followed by a valid time identifier, e.g. `1m` (1 minute) or `30s` (30 seconds). The following time identifiers are supported: + +Identifier | Description +------------ | ------------- +`y` | year +`M` | month +`w` | week +`d` | day +`h` | hour +`m` | minute +`s` | second +`ms` | millisecond ### Database User Permissions (Important!) The database user you specify when you add the data source should only be granted SELECT permissions on the specified database & tables you want to query. Grafana does not validate that the query is safe. The query could include any SQL statement. For example, statements like `USE otherdb;` and `DROP TABLE user;` would be -executed. To protect against this we **Highly** recommmend you create a specific mysql user with restricted permissions. +executed. To protect against this we **Highly** recommend you create a specific mysql user with restricted permissions. Example: @@ -111,6 +132,8 @@ Any column except `time` and `metric` is treated as a value column. You may return a column named `metric` that is used as metric name for the value column. If you return multiple value columns and a column named `metric` then this column is used as prefix for the series name (only available in Grafana 5.3+). +Resultsets of time series queries need to be sorted by time. + **Example with `metric` column:** ```sql @@ -296,4 +319,8 @@ datasources: database: grafana user: grafana password: password + jsonData: + maxOpenConns: 0 # Grafana v5.4+ + maxIdleConns: 2 # Grafana v5.4+ + connMaxLifetime: 14400 # Grafana v5.4+ ``` diff --git a/docs/sources/features/datasources/opentsdb.md b/docs/sources/features/datasources/opentsdb.md index 1f6f022a18c..d2cd0b1dc0e 100644 --- a/docs/sources/features/datasources/opentsdb.md +++ b/docs/sources/features/datasources/opentsdb.md @@ -84,7 +84,7 @@ Some examples are mentioned below to make nested template queries work successfu Query | Description ------------ | ------------- *tag_values(cpu, hostname, env=$env)* | Return tag values for cpu metric, selected env tag value and tag key hostname -*tag_values(cpu, hostanme, env=$env, region=$region)* | Return tag values for cpu metric, selected env tag value, selected region tag value and tag key hostname +*tag_values(cpu, hostname, env=$env, region=$region)* | Return tag values for cpu metric, selected env tag value, selected region tag value and tag key hostname For details on OpenTSDB metric queries checkout the official [OpenTSDB documentation](http://opentsdb.net/docs/build/html/index.html) diff --git a/docs/sources/features/datasources/postgres.md b/docs/sources/features/datasources/postgres.md index 58970d240c8..52f8804f27f 100644 --- a/docs/sources/features/datasources/postgres.md +++ b/docs/sources/features/datasources/postgres.md @@ -16,7 +16,7 @@ Grafana ships with a built-in PostgreSQL data source plugin that allows you to q ## Adding the data source 1. Open the side menu by clicking the Grafana icon in the top header. -2. In the side menu under the `Dashboards` link you should find a link named `Data Sources`. +2. In the side menu under the `Configuration` icon you should find a link named `Data Sources`. 3. Click the `+ Add data source` button in the top header. 4. Select *PostgreSQL* from the *Type* dropdown. @@ -31,16 +31,36 @@ Name | Description *User* | Database user's login/username *Password* | Database user's password *SSL Mode* | This option determines whether or with what priority a secure SSL TCP/IP connection will be negotiated with the server. +*Max open* | The maximum number of open connections to the database, default `unlimited` (Grafana v5.4+). +*Max idle* | The maximum number of connections in the idle connection pool, default `2` (Grafana v5.4+). +*Max lifetime* | The maximum amount of time in seconds a connection may be reused, default `14400`/4 hours (Grafana v5.4+). *Version* | This option determines which functions are available in the query builder (only available in Grafana 5.3+). -*TimescaleDB* | TimescaleDB is a time-series database built as a PostgreSQL extension. If enabled, Grafana will use ```time_bucket``` in the ```$__timeGroup``` macro and display TimescaleDB specific aggregate functions in the query builder (only available in Grafana 5.3+). +*TimescaleDB* | TimescaleDB is a time-series database built as a PostgreSQL extension. If enabled, Grafana will use `time_bucket` in the `$__timeGroup` macro and display TimescaleDB specific aggregate functions in the query builder (only available in Grafana 5.3+). +### Min time interval + +A lower limit for the [$__interval](/reference/templating/#the-interval-variable) and [$__interval_ms](/reference/templating/#the-interval-ms-variable) variables. +Recommended to be set to write frequency, for example `1m` if your data is written every minute. +This option can also be overridden/configured in a dashboard panel under data source options. It's important to note that this value **needs** to be formatted as a +number followed by a valid time identifier, e.g. `1m` (1 minute) or `30s` (30 seconds). The following time identifiers are supported: + +Identifier | Description +------------ | ------------- +`y` | year +`M` | month +`w` | week +`d` | day +`h` | hour +`m` | minute +`s` | second +`ms` | millisecond ### Database User Permissions (Important!) The database user you specify when you add the data source should only be granted SELECT permissions on the specified database & tables you want to query. Grafana does not validate that the query is safe. The query could include any SQL statement. For example, statements like `DELETE FROM user;` and `DROP TABLE user;` would be -executed. To protect against this we **Highly** recommmend you create a specific postgresql user with restricted permissions. +executed. To protect against this we **highly** recommend you create a specific PostgreSQL user with restricted permissions. Example: @@ -52,9 +72,72 @@ Example: Make sure the user does not get any unwanted privileges from the public role. +## Query Editor + +> Only available in Grafana v5.3+. + +{{< docs-imagebox img="/img/docs/v53/postgres_query_still.png" class="docs-image--no-shadow" animated-gif="/img/docs/v53/postgres_query.gif" >}} + +You find the PostgreSQL query editor in the metrics tab in Graph or Singlestat panel's edit mode. You enter edit mode by clicking the +panel title, then edit. + +The query editor has a link named `Generated SQL` that shows up after a query has been executed, while in panel edit mode. Click on it and it will expand and show the raw interpolated SQL string that was executed. + +### Select table, time column and metric column (FROM) + +When you enter edit mode for the first time or add a new query Grafana will try to prefill the query builder with the first table that has a timestamp column and a numeric column. + +In the FROM field, Grafana will suggest tables that are in the `search_path` of the database user. To select a table or view not in your `search_path` +you can manually enter a fully qualified name (schema.table) like `public.metrics`. + +The Time column field refers to the name of the column holding your time values. Selecting a value for the Metric column field is optional. If a value is selected, the Metric column field will be used as the series name. + +The metric column suggestions will only contain columns with a text datatype (char,varchar,text). +If you want to use a column with a different datatype as metric column you may enter the column name with a cast: `ip::text`. +You may also enter arbitrary SQL expressions in the metric column field that evaluate to a text datatype like +`hostname || ' ' || container_name`. + +### Columns, Window and Aggregation functions (SELECT) + +In the `SELECT` row you can specify what columns and functions you want to use. +In the column field you may write arbitrary expressions instead of a column name like `column1 * column2 / column3`. + +The available functions in the query editor depend on the PostgreSQL version you selected when configuring the datasource. +If you use aggregate functions you need to group your resultset. The editor will automatically add a `GROUP BY time` if you add an aggregate function. + +The editor tries to simplify and unify this part of the query. For example:
+![](/img/docs/v53/postgres_select_editor.png)
+ +The above will generate the following PostgreSQL `SELECT` clause: + +```sql +avg(tx_bytes) OVER (ORDER BY "time" ROWS 5 PRECEDING) AS "tx_bytes" +``` + +You may add further value columns by clicking the plus button and selecting `Column` from the menu. Multiple value columns will be plotted as separate series in the graph panel. + +### Filter data (WHERE) +To add a filter click the plus icon to the right of the `WHERE` condition. You can remove filters by clicking on +the filter and selecting `Remove`. A filter for the current selected timerange is automatically added to new queries. + +### Group By +To group by time or any other columns click the plus icon at the end of the GROUP BY row. The suggestion dropdown will only show text columns of your currently selected table but you may manually enter any column. +You can remove the group by clicking on the item and then selecting `Remove`. + +If you add any grouping, all selected columns need to have an aggregate function applied. The query builder will automatically add aggregate functions to all columns without aggregate functions when you add groupings. + +#### Gap Filling + +Grafana can fill in missing values when you group by time. The time function accepts two arguments. The first argument is the time window that you would like to group by, and the second argument is the value you want Grafana to fill missing items with. + +### Text Editor Mode (RAW) +You can switch to the raw query editor mode by clicking the hamburger icon and selecting `Switch editor mode` or by clicking `Edit SQL` below the query. + +> If you use the raw query editor, be sure your query at minimum has `ORDER BY time` and a filter on the returned time range. + ## Macros -To simplify syntax and to allow for dynamic parts, like date range filters, the query can contain macros. +Macros can be used within a query to simplify syntax and allow for dynamic parts. Macro example | Description ------------ | ------------- @@ -63,21 +146,19 @@ Macro example | Description *$__timeFilter(dateColumn)* | Will be replaced by a time range filter using the specified column name. For example, *dateColumn BETWEEN '2017-04-21T05:01:17Z' AND '2017-04-21T05:06:17Z'* *$__timeFrom()* | Will be replaced by the start of the currently active time selection. For example, *'2017-04-21T05:01:17Z'* *$__timeTo()* | Will be replaced by the end of the currently active time selection. For example, *'2017-04-21T05:06:17Z'* -*$__timeGroup(dateColumn,'5m')* | Will be replaced by an expression usable in GROUP BY clause. For example, *(extract(epoch from dateColumn)/300)::bigint*300* -*$__timeGroup(dateColumn,'5m', 0)* | Same as above but with a fill parameter so missing points in that series will be added by grafana and 0 will be used as value. +*$__timeGroup(dateColumn,'5m')* | Will be replaced by an expression usable in a GROUP BY clause. For example, *(extract(epoch from dateColumn)/300)::bigint*300* +*$__timeGroup(dateColumn,'5m', 0)* | Same as above but with a fill parameter so missing points in that series will be added by Grafana and 0 will be used as the value. *$__timeGroup(dateColumn,'5m', NULL)* | Same as above but NULL will be used as value for missing points. -*$__timeGroup(dateColumn,'5m', previous)* | Same as above but the previous value in that series will be used as fill value if no value has been seen yet NULL will be used (only available in Grafana 5.3+). -*$__timeGroupAlias(dateColumn,'5m')* | Will be replaced identical to $__timeGroup but with an added column alias (only available in Grafana 5.3+). -*$__unixEpochFilter(dateColumn)* | Will be replaced by a time range filter using the specified column name with times represented as unix timestamp. For example, *dateColumn >= 1494410783 AND dateColumn <= 1494497183* +*$__timeGroup(dateColumn,'5m', previous)* | Same as above but the previous value in that series will be used as fill value. If no value has been seen yet, NULL will be used (only available in Grafana 5.3+). +*$__timeGroupAlias(dateColumn,'5m')* | Will be replaced with an expression identical to $__timeGroup, but with an added column alias (only available in Grafana 5.3+). +*$__unixEpochFilter(dateColumn)* | Will be replaced by a time range filter using the specified column name with times represented as unix timestamps. For example, *dateColumn >= 1494410783 AND dateColumn <= 1494497183* *$__unixEpochFrom()* | Will be replaced by the start of the currently active time selection as unix timestamp. For example, *1494410783* *$__unixEpochTo()* | Will be replaced by the end of the currently active time selection as unix timestamp. For example, *1494497183* -*$__unixEpochGroup(dateColumn,'5m', [fillmode])* | Same as $__timeGroup but for times stored as unix timestamp (only available in Grafana 5.3+). -*$__unixEpochGroupAlias(dateColumn,'5m', [fillmode])* | Same as above but also adds a column alias (only available in Grafana 5.3+). +*$__unixEpochGroup(dateColumn,'5m', [fillmode])* | Same as $__timeGroup, but for times stored as unix timestamp (only available in Grafana 5.3+). +*$__unixEpochGroupAlias(dateColumn,'5m', [fillmode])* | Same as above, but also adds a column alias (only available in Grafana 5.3+). We plan to add many more macros. If you have suggestions for what macros you would like to see, please [open an issue](https://github.com/grafana/grafana) in our GitHub repo. -The query editor has a link named `Generated SQL` that shows up after a query as been executed, while in panel edit mode. Click on it and it will expand and show the raw interpolated SQL string that was executed. - ## Table queries If the `Format as` query option is set to `Table` then you can basically do any type of SQL query. The table panel will automatically show the results of whatever columns & rows your query returns. @@ -107,11 +188,13 @@ The resulting table panel: ## Time series queries -If you set `Format as` to `Time series`, for use in Graph panel for example, then the query must return a column named `time` that returns either a sql datetime or any numeric datatype representing unix epoch. -Any column except `time` and `metric` is treated as a value column. +If you set `Format as` to `Time series`, for use in Graph panel for example, then the query must return a column named `time` that returns either a SQL datetime or any numeric datatype representing unix epoch. +Any column except `time` and `metric` are treated as a value column. You may return a column named `metric` that is used as metric name for the value column. If you return multiple value columns and a column named `metric` then this column is used as prefix for the series name (only available in Grafana 5.3+). +Resultsets of time series queries need to be sorted by time. + **Example with `metric` column:** ```sql @@ -187,7 +270,7 @@ Another option is a query that can create a key/value variable. The query should SELECT hostname AS __text, id AS __value FROM host ``` -You can also create nested variables. For example if you had another variable named `region`. Then you could have +You can also create nested variables. Using a variable named `region`, you could have the hosts variable only show hosts from the current selected region with a query like this (if `region` is a multi-value variable then use the `IN` comparison operator rather than `=` to match against multiple values): ```sql @@ -196,7 +279,7 @@ SELECT hostname FROM host WHERE region IN($region) ### Using Variables in Queries -From Grafana 4.3.0 to 4.6.0, template variables are always quoted automatically so if it is a string value do not wrap them in quotes in where clauses. +From Grafana 4.3.0 to 4.6.0, template variables are always quoted automatically. If your template variables are strings, do not wrap them in quotes in where clauses. From Grafana 4.7.0, template variable values are only quoted when the template variable is a `multi-value`. @@ -228,7 +311,7 @@ ORDER BY atimestamp ASC #### Disabling Quoting for Multi-value Variables -Grafana automatically creates a quoted, comma-separated string for multi-value variables. For example: if `server01` and `server02` are selected then it will be formatted as: `'server01', 'server02'`. Do disable quoting, use the csv formatting option for variables: +Grafana automatically creates a quoted, comma-separated string for multi-value variables. For example: if `server01` and `server02` are selected then it will be formatted as: `'server01', 'server02'`. To disable quoting, use the csv formatting option for variables: `${servers:csv}` @@ -272,7 +355,7 @@ tags | Optional field name to use for event tags as a comma separated string. ## Alerting -Time series queries should work in alerting conditions. Table formatted queries is not yet supported in alert rule +Time series queries should work in alerting conditions. Table formatted queries are not yet supported in alert rule conditions. ## Configure the Datasource with Provisioning @@ -294,5 +377,9 @@ datasources: password: "Password!" jsonData: sslmode: "disable" # disable/require/verify-ca/verify-full + maxOpenConns: 0 # Grafana v5.4+ + maxIdleConns: 2 # Grafana v5.4+ + connMaxLifetime: 14400 # Grafana v5.4+ + postgresVersion: 903 # 903=9.3, 904=9.4, 905=9.5, 906=9.6, 1000=10 timescaledb: false ``` diff --git a/docs/sources/features/datasources/stackdriver.md b/docs/sources/features/datasources/stackdriver.md new file mode 100644 index 00000000000..cea86e96faf --- /dev/null +++ b/docs/sources/features/datasources/stackdriver.md @@ -0,0 +1,216 @@ ++++ +title = "Using Stackdriver in Grafana" +description = "Guide for using Stackdriver in Grafana" +keywords = ["grafana", "stackdriver", "google", "guide"] +type = "docs" +aliases = ["/datasources/stackdriver"] +[menu.docs] +name = "Stackdriver" +parent = "datasources" +weight = 11 ++++ + +# Using Google Stackdriver in Grafana + +> Only available in Grafana v5.3+. +> The datasource is currently a beta feature and is subject to change. + +Grafana ships with built-in support for Google Stackdriver. Just add it as a datasource and you are ready to build dashboards for your Stackdriver metrics. + +## Adding the data source to Grafana + +1. Open the side menu by clicking the Grafana icon in the top header. +2. In the side menu under the `Dashboards` link you should find a link named `Data Sources`. +3. Click the `+ Add data source` button in the top header. +4. Select `Stackdriver` from the _Type_ dropdown. +5. Upload or paste in the Service Account Key file. See below for steps on how to create a Service Account Key file. + +> NOTE: If you're not seeing the `Data Sources` link in your side menu it means that your current user does not have the `Admin` role for the current organization. + +| Name | Description | +| --------------------- | ----------------------------------------------------------------------------------- | +| _Name_ | The datasource name. This is how you refer to the datasource in panels & queries. | +| _Default_ | Default datasource means that it will be pre-selected for new panels. | +| _Service Account Key_ | Service Account Key File for a GCP Project. Instructions below on how to create it. | + +## Authentication + +### Service Account Credentials - Private Key File + +To authenticate with the Stackdriver API, you need to create a Google Cloud Platform (GCP) Service Account for the Project you want to show data for. A Grafana datasource integrates with one GCP Project. If you want to visualize data from multiple GCP Projects then you need to create one datasource per GCP Project. + +#### Enable APIs + +The following APIs need to be enabled first: + +* [Monitoring API](https://console.cloud.google.com/apis/library/monitoring.googleapis.com) +* [Cloud Resource Manager API](https://console.cloud.google.com/apis/library/cloudresourcemanager.googleapis.com) + +Click on the links above and click the `Enable` button: + +{{< docs-imagebox img="/img/docs/v53/stackdriver_enable_api.png" class="docs-image--no-shadow" caption="Enable GCP APIs" >}} + +#### Create a GCP Service Account for a Project + +1. Navigate to the [APIs & Services Credentials page](https://console.cloud.google.com/apis/credentials). +2. Click on the `Create credentials` dropdown/button and choose the `Service account key` option. + + {{< docs-imagebox img="/img/docs/v53/stackdriver_create_service_account_button.png" class="docs-image--no-shadow" caption="Create service account button" >}} + +3. On the `Create service account key` page, choose key type `JSON`. Then in the `Service Account` dropdown, choose the `New service account` option: + + {{< docs-imagebox img="/img/docs/v53/stackdriver_create_service_account_key.png" class="docs-image--no-shadow" caption="Create service account key" >}} + +4. Some new fields will appear. Fill in a name for the service account in the `Service account name` field and then choose the `Monitoring Viewer` role from the `Role` dropdown: + + {{< docs-imagebox img="/img/docs/v53/stackdriver_service_account_choose_role.png" class="docs-image--no-shadow" caption="Choose role" >}} + +5. Click the Create button. A JSON key file will be created and downloaded to your computer. Store this file in a secure place as it allows access to your Stackdriver data. +6. Upload it to Grafana on the datasource Configuration page. You can either upload the file or paste in the contents of the file. + + {{< docs-imagebox img="/img/docs/v53/stackdriver_grafana_upload_key.png" class="docs-image--no-shadow" caption="Upload service key file to Grafana" >}} + +7. The file contents will be encrypted and saved in the Grafana database. Don't forget to save after uploading the file! + + {{< docs-imagebox img="/img/docs/v53/stackdriver_grafana_key_uploaded.png" class="docs-image--no-shadow" caption="Service key file is uploaded to Grafana" >}} + +## Metric Query Editor + +{{< docs-imagebox img="/img/docs/v53/stackdriver_query_editor.png" max-width= "400px" class="docs-image--right" >}} + +The Stackdriver query editor allows you to select metrics, group/aggregate by labels and by time, and use filters to specify which time series you want in the results. + +Begin by choosing a `Service` and then a metric from the `Metric` dropdown. Use the plus and minus icons in the filter and group by sections to add/remove filters or group by clauses. + +Stackdriver metrics can be of different kinds (GAUGE, DELTA, CUMULATIVE) and these kinds have support for different aggregation options (reducers and aligners). The Grafana query editor shows the list of available aggregation methods for a selected metric and sets a default reducer and aligner when you select the metric. Units for the Y-axis are also automatically selected by the query editor. + +### Filter + +To add a filter, click the plus icon and choose a field to filter by and enter a filter value e.g. `instance_name = grafana-1`. You can remove the filter by clicking on the filter name and select `--remove filter--`. + +#### Simple wildcards + +When the operator is set to `=` or `!=` it is possible to add wildcards to the filter value field. E.g `us-*` will capture all values that starts with "us-" and `*central-a` will capture all values that ends with "central-a". `*-central-*` captures all values that has the substring of -central-. Simple wildcards are less expensive than regular expressions. + +#### Regular expressions + +When the operator is set to `=~` or `!=~` it is possible to add regular expressions to the filter value field. E.g `us-central[1-3]-[af]` would match all values that starts with "us-central", is followed by a number in the range of 1 to 3, a dash and then either an "a" or an "f". Leading and trailing slashes are not needed when creating regular expressions. + +### Aggregation + +The aggregation field lets you combine time series based on common statistics. Read more about this option [here](https://cloud.google.com/monitoring/charts/metrics-selector#aggregation-options). + +The `Aligner` field allows you to align multiple time series after the same group by time interval. Read more about how it works [here](https://cloud.google.com/monitoring/charts/metrics-selector#alignment). + +#### Alignment Period/Group by Time + +The `Alignment Period` groups a metric by time if an aggregation is chosen. The default is to use the GCP Stackdriver default groupings (which allows you to compare graphs in Grafana with graphs in the Stackdriver UI). +The option is called `Stackdriver auto` and the defaults are: + +* 1m for time ranges < 23 hours +* 5m for time ranges >= 23 hours and < 6 days +* 1h for time ranges >= 6 days + +The other automatic option is `Grafana auto`. This will automatically set the group by time depending on the time range chosen and the width of the graph panel. Read more about the details [here](http://docs.grafana.org/reference/templating/#the-interval-variable). + +It is also possible to choose fixed time intervals to group by, like `1h` or `1d`. + +### Group By + +Group by resource or metric labels to reduce the number of time series and to aggregate the results by a group by. E.g. Group by instance_name to see an aggregated metric for a Compute instance. + +### Alias Patterns + +The Alias By field allows you to control the format of the legend keys. The default is to show the metric name and labels. This can be long and hard to read. Using the following patterns in the alias field, you can format the legend key the way you want it. + +#### Metric Type Patterns + +| Alias Pattern | Description | Example Result | +| -------------------- | ---------------------------- | ------------------------------------------------- | +| `{{metric.type}}` | returns the full Metric Type | `compute.googleapis.com/instance/cpu/utilization` | +| `{{metric.name}}` | returns the metric name part | `instance/cpu/utilization` | +| `{{metric.service}}` | returns the service part | `compute` | + +#### Label Patterns + +In the Group By dropdown, you can see a list of metric and resource labels for a metric. These can be included in the legend key using alias patterns. + +| Alias Pattern Format | Description | Alias Pattern Example | Example Result | +| ------------------------ | -------------------------------- | -------------------------------- | ---------------- | +| `{{metric.label.xxx}}` | returns the metric label value | `{{metric.label.instance_name}}` | `grafana-1-prod` | +| `{{resource.label.xxx}}` | returns the resource label value | `{{resource.label.zone}}` | `us-east1-b` | + +Example Alias By: `{{metric.type}} - {{metric.labels.instance_name}}` + +Example Result: `compute.googleapis.com/instance/cpu/usage_time - server1-prod` + +## Templating + +Instead of hard-coding things like server, application and sensor name in you metric queries you can use variables in their place. +Variables are shown as dropdown select boxes at the top of the dashboard. These dropdowns makes it easy to change the data +being displayed in your dashboard. + +Checkout the [Templating]({{< relref "reference/templating.md" >}}) documentation for an introduction to the templating feature and the different +types of template variables. + +### Query Variable + +Writing variable queries is not supported yet. + +### Using variables in queries + +There are two syntaxes: + +* `$` Example: `metric.label.$metric_label` +* `[[varname]]` Example: `metric.label.[[metric_label]]` + +Why two ways? The first syntax is easier to read and write but does not allow you to use a variable in the middle of a word. When the _Multi-value_ or _Include all value_ options are enabled, Grafana converts the labels from plain text to a regex compatible string, which means you have to use `=~` instead of `=`. + +## Annotations + +{{< docs-imagebox img="/img/docs/v53/stackdriver_annotations_query_editor.png" max-width= "400px" class="docs-image--right" >}} + +[Annotations]({{< relref "reference/annotations.md" >}}) allows you to overlay rich event information on top of graphs. You add annotation +queries via the Dashboard menu / Annotations view. Annotation rendering is expensive so it is important to limit the number of rows returned. There is no support for showing Stackdriver annotations and events yet but it works well with [custom metrics](https://cloud.google.com/monitoring/custom-metrics/) in Stackdriver. + +With the query editor for annotations, you can select a metric and filters. The `Title` and `Text` fields support templating and can use data returned from the query. For example, the Title field could have the following text: + +`{{metric.type}} has value: {{metric.value}}` + +Example Result: `monitoring.googleapis.com/uptime_check/http_status has this value: 502` + +### Patterns for the Annotation Query Editor + +| Alias Pattern Format | Description | Alias Pattern Example | Example Result | +| ------------------------ | -------------------------------- | -------------------------------- | ------------------------------------------------- | +| `{{metric.value}}` | value of the metric/point | `{{metric.value}}` | `555` | +| `{{metric.type}}` | returns the full Metric Type | `{{metric.type}}` | `compute.googleapis.com/instance/cpu/utilization` | +| `{{metric.name}}` | returns the metric name part | `{{metric.name}}` | `instance/cpu/utilization` | +| `{{metric.service}}` | returns the service part | `{{metric.service}}` | `compute` | +| `{{metric.label.xxx}}` | returns the metric label value | `{{metric.label.instance_name}}` | `grafana-1-prod` | +| `{{resource.label.xxx}}` | returns the resource label value | `{{resource.label.zone}}` | `us-east1-b` | + +## Configure the Datasource with Provisioning + +It's now possible to configure datasources using config files with Grafana's provisioning system. You can read more about how it works and all the settings you can set for datasources on the [provisioning docs page](/administration/provisioning/#datasources) + +Here is a provisioning example for this datasource. + +```yaml +apiVersion: 1 + +datasources: + - name: Stackdriver + type: stackdriver + access: proxy + jsonData: + tokenUri: https://oauth2.googleapis.com/token + clientEmail: stackdriver@myproject.iam.gserviceaccount.com + secureJsonData: + privateKey: | + -----BEGIN PRIVATE KEY----- + POSEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCb1u1Srw8ICYHS + ... + yA+23427282348234= + -----END PRIVATE KEY----- +``` diff --git a/docs/sources/features/panels/alertlist.md b/docs/sources/features/panels/alertlist.md index 58aa2c0966a..a1ea8f0f600 100644 --- a/docs/sources/features/panels/alertlist.md +++ b/docs/sources/features/panels/alertlist.md @@ -22,6 +22,6 @@ The alert list panel allows you to display your dashboards alerts. The list can 1. **Show**: Lets you choose between current state or recent state changes. 2. **Max Items**: Max items set the maximum of items in a list. -3. **Sort Order**: Lets you sort your list alphabeticaly(asc/desc) or by importance. +3. **Sort Order**: Lets you sort your list alphabetically(asc/desc) or by importance. 4. **Alerts From** This Dashboard`: Shows alerts only from the dashboard the alert list is in. 5. **State Filter**: Here you can filter your list by one or more parameters. diff --git a/docs/sources/features/panels/heatmap.md b/docs/sources/features/panels/heatmap.md index 56ffe29f20f..aa87fbef1df 100644 --- a/docs/sources/features/panels/heatmap.md +++ b/docs/sources/features/panels/heatmap.md @@ -80,7 +80,7 @@ the upper or lower bound of the interval. There are a number of datasources supporting histogram over time like Elasticsearch (by using a Histogram bucket aggregation) or Prometheus (with [histogram](https://prometheus.io/docs/concepts/metric_types/#histogram) metric type and *Format as* option set to Heatmap). But generally, any datasource could be used if it meets the requirements: -returns series with names representing bucket bound or returns sereis sorted by the bound in ascending order. +returns series with names representing bucket bound or returns series sorted by the bound in ascending order. With Elasticsearch you control the size of the buckets using the Histogram interval (Y-Axis) and the Date Histogram interval (X-axis). diff --git a/docs/sources/guides/getting_started.md b/docs/sources/guides/getting_started.md index a27c6ca4c99..27957990265 100644 --- a/docs/sources/guides/getting_started.md +++ b/docs/sources/guides/getting_started.md @@ -69,7 +69,7 @@ The image above shows you the top header for a Dashboard. ## Dashboards, Panels, the building blocks of Grafana... -Dashboards are at the core of what Grafana is all about. Dashboards are composed of individual Panels arranged on a grid. Grafana ships with a variety of Panels. Grafana makes it easy to construct the right queries, and customize the display properties so that you can create the perfect Dashboard for your need. Each Panel can interact with data from any configured Grafana Data Source (currently InfluxDB, Graphite, OpenTSDB, Prometheus and Cloudwatch). The [Basic Concepts](/guides/basic_concepts) guide explores these key ideas in detail. +Dashboards are at the core of what Grafana is all about. Dashboards are composed of individual Panels arranged on a grid. Grafana ships with a variety of Panels. Grafana makes it easy to construct the right queries, and customize the display properties so that you can create the perfect Dashboard for your need. Each Panel can interact with data from any configured Grafana Data Source (currently Graphite, Prometheus, Elasticsearch, InfluxDB, OpenTSDB, MySQL, PostgreSQL, Microsoft SQL Server and AWS Cloudwatch). The [Basic Concepts](/guides/basic_concepts) guide explores these key ideas in detail. diff --git a/docs/sources/guides/whats-new-in-v2-5.md b/docs/sources/guides/whats-new-in-v2-5.md index 90270ea1121..08d51ba5bd7 100644 --- a/docs/sources/guides/whats-new-in-v2-5.md +++ b/docs/sources/guides/whats-new-in-v2-5.md @@ -25,7 +25,7 @@ correctly in UTC mode.
This release brings a fully featured query editor for Elasticsearch. You will now be able to visualize -logs or any kind of data stored in Elasticserarch. The query editor allows you to build both simple +logs or any kind of data stored in Elasticsearch. The query editor allows you to build both simple and complex queries for logs or metrics. - Compute metrics from your documents, supported Elasticsearch aggregations: diff --git a/docs/sources/guides/whats-new-in-v2.md b/docs/sources/guides/whats-new-in-v2.md index 499849c8d83..28d068b1cd6 100644 --- a/docs/sources/guides/whats-new-in-v2.md +++ b/docs/sources/guides/whats-new-in-v2.md @@ -34,7 +34,7 @@ Organizations via a role. That role can be: There are currently no permissions on individual dashboards. -Read more about Grafanas new user model on the [Admin section](../reference/admin/) +Read more about Grafana's new user model on the [Admin section](../reference/admin/) ## Dashboard Snapshot sharing diff --git a/docs/sources/guides/whats-new-in-v3-1.md b/docs/sources/guides/whats-new-in-v3-1.md index 1e8ef87297b..ab6c5281275 100644 --- a/docs/sources/guides/whats-new-in-v3-1.md +++ b/docs/sources/guides/whats-new-in-v3-1.md @@ -21,7 +21,7 @@ The export feature is now accessed from the share menu. Dashboards exported from Grafana 3.1 are now more portable and easier for others to import than before. The export process extracts information data source types used by panels and adds these to a new `inputs` section in the dashboard json. So when you or another person tries to import the dashboard they will be asked to -select data source and optional metrix prefix options. +select data source and optional metric prefix options. @@ -53,7 +53,7 @@ Grafana url to share with a colleague without having to use the Share modal. ## Internal metrics -Do you want metrics about viewing metrics? Ofc you do! In this release we added support for sending metrics about Grafana to graphite. +Do you want metrics about viewing metrics? Of course you do! In this release we added support for sending metrics about Grafana to graphite. You can configure interval and server in the config file. ## Logging diff --git a/docs/sources/guides/whats-new-in-v3.md b/docs/sources/guides/whats-new-in-v3.md index d82a833ec90..dbd9b685a2b 100644 --- a/docs/sources/guides/whats-new-in-v3.md +++ b/docs/sources/guides/whats-new-in-v3.md @@ -197,7 +197,7 @@ you can install it manually from [Grafana.com](https://grafana.com) ## Plugin showcase Discovering and installing plugins is very quick and easy with Grafana 3.0 and [Grafana.com](https://grafana.com). Here -are a couple that I incurage you try! +are a couple that I encourage you try! #### [Clock Panel](https://grafana.com/plugins/grafana-clock-panel) Support's both current time and count down mode. diff --git a/docs/sources/guides/whats-new-in-v4-2.md b/docs/sources/guides/whats-new-in-v4-2.md index e976ed24700..7a00023172a 100644 --- a/docs/sources/guides/whats-new-in-v4-2.md +++ b/docs/sources/guides/whats-new-in-v4-2.md @@ -45,7 +45,7 @@ We might add more global built in variables in the future and if we do we will p ### Dedupe alert notifications when running multiple servers -In this release we will dedupe alert notificiations when you are running multiple servers. +In this release we will dedupe alert notifications when you are running multiple servers. This makes it possible to run alerting on multiple servers and only get one notification. We currently solve this with sql transactions which puts some limitations for how many servers you can use to execute the same rules. @@ -67,7 +67,7 @@ Making it possible to have users in multiple groups and have detailed access con ## Upgrade & Breaking changes -If your using https in grafana we now force you to use tls 1.2 and the most secure ciphers. +If you're using https in grafana we now force you to use tls 1.2 and the most secure ciphers. We think its better to be secure by default rather then making it configurable. If you want to run https with lower versions of tls we suggest you put a reserve proxy in front of grafana. diff --git a/docs/sources/guides/whats-new-in-v4-5.md b/docs/sources/guides/whats-new-in-v4-5.md index a5cd3ca982d..c6cfcf64720 100644 --- a/docs/sources/guides/whats-new-in-v4-5.md +++ b/docs/sources/guides/whats-new-in-v4-5.md @@ -45,7 +45,7 @@ More information [here](https://community.grafana.com/t/using-grafanas-query-ins ### Enhancements * **GitHub OAuth**: Support for GitHub organizations with 100+ teams. [#8846](https://github.com/grafana/grafana/issues/8846), thx [@skwashd](https://github.com/skwashd) -* **Graphite**: Calls to Graphite api /metrics/find now include panel or dashboad time range (from & until) in most cases, [#8055](https://github.com/grafana/grafana/issues/8055) +* **Graphite**: Calls to Graphite api /metrics/find now include panel or dashboard time range (from & until) in most cases, [#8055](https://github.com/grafana/grafana/issues/8055) * **Graphite**: Added new graphite 1.0 functions, available if you set version to 1.0.x in data source settings. New Functions: mapSeries, reduceSeries, isNonNull, groupByNodes, offsetToZero, grep, weightedAverage, removeEmptySeries, aggregateLine, averageOutsidePercentile, delay, exponentialMovingAverage, fallbackSeries, integralByInterval, interpolate, invert, linearRegression, movingMin, movingMax, movingSum, multiplySeriesWithWildcards, pow, powSeries, removeBetweenPercentile, squareRoot, timeSlice, closes [#8261](https://github.com/grafana/grafana/issues/8261) - **Elasticsearch**: Ad-hoc filters now use query phrase match filters instead of term filters, works on non keyword/raw fields [#9095](https://github.com/grafana/grafana/issues/9095). @@ -53,7 +53,7 @@ More information [here](https://community.grafana.com/t/using-grafanas-query-ins * **InfluxDB/Elasticsearch**: The panel & data source option named "Group by time interval" is now named "Min time interval" and does now always define a lower limit for the auto group by time. Without having to use `>` prefix (that prefix still works). This should in theory have close to zero actual impact on existing dashboards. It does mean that if you used this setting to define a hard group by time interval of, say "1d", if you zoomed to a time range wide enough the time range could increase above the "1d" range as the setting is now always considered a lower limit. -This option is now rennamed (and moved to Options sub section above your queries): +This option is now renamed (and moved to Options sub section above your queries): ![image|519x120](upload://ySjHOVpavV6yk9LHQxL9nq2HIsT.png) Datas source selection & options & help are now above your metric queries. diff --git a/docs/sources/guides/whats-new-in-v4-6.md b/docs/sources/guides/whats-new-in-v4-6.md index ee0c4ea7a04..91fa74084a8 100644 --- a/docs/sources/guides/whats-new-in-v4-6.md +++ b/docs/sources/guides/whats-new-in-v4-6.md @@ -61,7 +61,7 @@ This makes exploring and filtering Prometheus data much easier. ### Minor Changes * **SMTP**: Make it possible to set specific EHLO for smtp client. [#9319](https://github.com/grafana/grafana/issues/9319) -* **Dataproxy**: Allow grafan to renegotiate tls connection [#9250](https://github.com/grafana/grafana/issues/9250) +* **Dataproxy**: Allow Grafana to renegotiate tls connection [#9250](https://github.com/grafana/grafana/issues/9250) * **HTTP**: set net.Dialer.DualStack to true for all http clients [#9367](https://github.com/grafana/grafana/pull/9367) * **Alerting**: Add diff and percent diff as series reducers [#9386](https://github.com/grafana/grafana/pull/9386), thx [@shanhuhai5739](https://github.com/shanhuhai5739) * **Slack**: Allow images to be uploaded to slack when Token is present [#7175](https://github.com/grafana/grafana/issues/7175), thx [@xginn8](https://github.com/xginn8) diff --git a/docs/sources/guides/whats-new-in-v5-3.md b/docs/sources/guides/whats-new-in-v5-3.md new file mode 100644 index 00000000000..5dcadc0813d --- /dev/null +++ b/docs/sources/guides/whats-new-in-v5-3.md @@ -0,0 +1,92 @@ ++++ +title = "What's New in Grafana v5.3" +description = "Feature & improvement highlights for Grafana v5.3" +keywords = ["grafana", "new", "documentation", "5.3"] +type = "docs" +[menu.docs] +name = "Version 5.3" +identifier = "v5.3" +parent = "whatsnew" +weight = -9 ++++ + +# What's New in Grafana v5.3 + +Grafana v5.3 brings new features, many enhancements and bug fixes. This article will detail the major new features and enhancements. + +- [Google Stackdriver]({{< relref "#google-stackdriver" >}}) as a core datasource! +- [TV mode]({{< relref "#tv-and-kiosk-mode" >}}) is improved and more accessible +- [Alerting]({{< relref "#notification-reminders" >}}) with notification reminders +- [Postgres]({{< relref "#postgres-query-builder" >}}) gets a new query builder! +- [OAuth]({{< relref "#improved-oauth-support-for-gitlab" >}}) support for Gitlab is improved +- [Annotations]({{< relref "#annotations" >}}) with template variable filtering +- [Variables]({{< relref "#variables" >}}) with free text support + +## Google Stackdriver + +{{< docs-imagebox img="/img/docs/v53/stackdriver-with-heatmap.png" max-width= "600px" class="docs-image--no-shadow docs-image--right" >}} + +Grafana v5.3 ships with built-in support for [Google Stackdriver](https://cloud.google.com/stackdriver/) and enables you to visualize your Stackdriver metrics in Grafana. + +Getting started with the plugin is easy. Simply create a GCE Service account that has access to the Stackdriver API scope, download the Service Account key file from Google and upload it on the Stackdriver datasource config page in Grafana and you should have a secure server-to-server authentication setup. Like other core plugins, Stackdriver has built-in support for alerting. It also comes with support for heatmaps and basic variables. + +If you're already accustomed to the Stackdriver Metrics Explorer UI, you'll notice that there are a lot of similarities to the query editor in Grafana. It is possible to add filters using wildcards and regular expressions. You can do Group By, Primary Aggregation and Alignment. + +Alias By allows you to format the legend the way you want, and it's a feature that is not yet present in the Metrics Explorer. Two other features that are only supported in the Grafana plugin are the abilities to manually set the Alignment Period in the query editor and to add Annotations queries. + +The Grafana Stackdriver plugin comes with support for automatic unit detection. Grafana will try to map the Stackdriver unit type to a corresponding unit type in Grafana, and if successful the panel Y-axes will be updated accordingly to display the correct unit of measure. This is the first core plugin to provide support for unit detection, and it is our intention to provide support for this in other core plugins in the near future. + +The datasource is still in the `beta` phase, meaning it's currently in active development and is still missing one important feature - templating queries. +Please try it out, but be aware of that it might be subject to changes and possible bugs. We would love to hear your feedback. + +Please read [Using Google Stackdriver in Grafana](/features/datasources/stackdriver/) for more detailed information on how to get started and use it. + +## TV and Kiosk Mode + +{{< docs-imagebox img="/img/docs/v53/tv_mode_still.png" max-width="600px" class="docs-image--no-shadow docs-image--right" animated-gif="/img/docs/v53/tv_mode.gif" >}} + +We've improved the TV & kiosk mode to make it easier to use. There's now an icon in the top bar that will let you cycle through the different view modes. + +1. In the first view mode, the sidebar and most of the buttons in the top bar will be hidden. +2. In the second view mode, the top bar is completely hidden so that only the dashboard itself is shown. +3. Hit the escape key to go back to the default view mode. + +When switching view modes, the url will be updated to reflect the view mode selected. This allows a dashboard to be opened with a +certain view mode enabled. Additionally, this also enables [playlists](/reference/playlist) to be started with a certain view mode enabled. + +
+ +## Notification Reminders + +Do you use Grafana alerting and have some notifications that are more important than others? Then it's possible to set reminders so that you continue to be alerted until the problem is fixed. This is done on the notification channel itself and will affect all alerts that use that channel. +For additional examples of why reminders might be useful for you, see [multiple series](/alerting/rules/#multiple-series). + +Learn how to enable and configure reminders [here](/alerting/notifications/#send-reminders). + +## Postgres Query Builder + +Grafana 5.3 comes with a new graphical query builder for Postgres. This brings Postgres integration more in line with some of the other datasources and makes it easier for both advanced users and beginners to work with timeseries in Postgres. Learn more about it in the [documentation](/features/datasources/postgres/#query-editor). + +{{< docs-imagebox img="/img/docs/v53/postgres_query_still.png" class="docs-image--no-shadow" animated-gif="/img/docs/v53/postgres_query.gif" >}} + +## Improved OAuth Support for Gitlab + +Grafana 5.3 comes with a new OAuth integration for Gitlab that enables configuration to only allow users that are a member of certain Gitlab groups to authenticate. This makes it possible to use Gitlab OAuth with Grafana in a shared environment without giving everyone access to Grafana. +Learn how to enable and configure it in the [documentation](/auth/gitlab/). + +## Annotations + +Grafana 5.3 brings improved support for [native annotations](/reference/annotations/#native-annotations) and makes it possible to use template variables when filtering by tags. +Learn more about it in the [documentation](/reference/annotations/#query-by-tag). + +{{< docs-imagebox img="/img/docs/v53/annotation_tag_filter_variable.png" max-width="600px" >}} + +## Variables + +Grafana 5.3 ships with a brand new variable type named `Text box` which makes it easier and more convenient to provide free text input to a variable. +This new variable type will display as a free text input field with an optional prefilled default value. + +## Changelog + +Checkout the [CHANGELOG.md](https://github.com/grafana/grafana/blob/master/CHANGELOG.md) file for a complete list +of new features, changes, and bug fixes. diff --git a/docs/sources/http_api/alerting.md b/docs/sources/http_api/alerting.md index 80b6e283be3..103de190793 100644 --- a/docs/sources/http_api/alerting.md +++ b/docs/sources/http_api/alerting.md @@ -50,6 +50,7 @@ Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk ```http HTTP/1.1 200 Content-Type: application/json + [ { "id": 1, @@ -86,6 +87,7 @@ Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk ```http HTTP/1.1 200 Content-Type: application/json + { "id": 1, "dashboardId": 1, @@ -146,6 +148,7 @@ JSON Body Schema: ```http HTTP/1.1 200 Content-Type: application/json + { "alertId": 1, "state": "Paused", @@ -177,6 +180,7 @@ JSON Body Schema: ```http HTTP/1.1 200 Content-Type: application/json + { "state": "Paused", "message": "alert paused", @@ -204,19 +208,26 @@ Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk HTTP/1.1 200 Content-Type: application/json -{ - "id": 1, - "name": "Team A", - "type": "email", - "isDefault": true, - "created": "2017-01-01 12:45", - "updated": "2017-01-01 12:45" -} +[ + { + "id": 1, + "name": "Team A", + "type": "email", + "isDefault": false, + "sendReminder": false, + "settings": { + "addresses": "carl@grafana.com;dev@grafana.com" + }, + "created": "2018-04-23T14:44:09+02:00", + "updated": "2018-08-20T15:47:49+02:00" + } +] + ``` ## Create alert notification -You can find the full list of [supported notifers](/alerting/notifications/#all-supported-notifier) at the alert notifiers page. +You can find the full list of [supported notifiers](/alerting/notifications/#all-supported-notifier) at the alert notifiers page. `POST /api/alert-notifications` @@ -232,6 +243,7 @@ Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk "name": "new alert notification", //Required "type": "email", //Required "isDefault": false, + "sendReminder": false, "settings": { "addresses": "carl@grafana.com;dev@grafana.com" } @@ -243,14 +255,18 @@ Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk ```http HTTP/1.1 200 Content-Type: application/json + { "id": 1, "name": "new alert notification", "type": "email", "isDefault": false, - "settings": { addresses: "carl@grafana.com;dev@grafana.com"} } - "created": "2017-01-01 12:34", - "updated": "2017-01-01 12:34" + "sendReminder": false, + "settings": { + "addresses": "carl@grafana.com;dev@grafana.com" + }, + "created": "2018-04-23T14:44:09+02:00", + "updated": "2018-08-20T15:47:49+02:00" } ``` @@ -271,6 +287,8 @@ Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk "name": "new alert notification", //Required "type": "email", //Required "isDefault": false, + "sendReminder": true, + "frequency": "15m", "settings": { "addresses: "carl@grafana.com;dev@grafana.com" } @@ -282,12 +300,17 @@ Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk ```http HTTP/1.1 200 Content-Type: application/json + { "id": 1, "name": "new alert notification", "type": "email", "isDefault": false, - "settings": { addresses: "carl@grafana.com;dev@grafana.com"} } + "sendReminder": true, + "frequency": "15m", + "settings": { + "addresses": "carl@grafana.com;dev@grafana.com" + }, "created": "2017-01-01 12:34", "updated": "2017-01-01 12:34" } @@ -311,6 +334,7 @@ Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk ```http HTTP/1.1 200 Content-Type: application/json + { "message": "Notification deleted" } diff --git a/docs/sources/http_api/auth.md b/docs/sources/http_api/auth.md index 8ff40b5ef04..e87d3571322 100644 --- a/docs/sources/http_api/auth.md +++ b/docs/sources/http_api/auth.md @@ -5,7 +5,7 @@ keywords = ["grafana", "http", "documentation", "api", "authentication"] aliases = ["/http_api/authentication/"] type = "docs" [menu.docs] -name = "Authentication" +name = "Authentication HTTP API" parent = "http_api" +++ diff --git a/docs/sources/http_api/dashboard_versions.md b/docs/sources/http_api/dashboard_versions.md index 3d0ec27a3a3..0be22674997 100644 --- a/docs/sources/http_api/dashboard_versions.md +++ b/docs/sources/http_api/dashboard_versions.md @@ -291,7 +291,7 @@ Content-Type: text/html; charset=UTF-8

``` -The response is a textual respresentation of the diff, with the dashboard values being in JSON, similar to the diffs seen on sites like GitHub or GitLab. +The response is a textual representation of the diff, with the dashboard values being in JSON, similar to the diffs seen on sites like GitHub or GitLab. Status Codes: diff --git a/docs/sources/index.md b/docs/sources/index.md index da977b73e0c..e9a900d93f1 100644 --- a/docs/sources/index.md +++ b/docs/sources/index.md @@ -60,9 +60,9 @@ aliases = ["v1.1", "guides/reference/admin"]

Provisioning

A guide to help you automate your Grafana setup & configuration.

- }}" class="nav-cards__item nav-cards__item--guide"> -

What's new in v5.2

-

Article on all the new cool features and enhancements in v5.2

+
}}" class="nav-cards__item nav-cards__item--guide"> +

What's new in v5.3

+

Article on all the new cool features and enhancements in v5.3

}}" class="nav-cards__item nav-cards__item--guide">

Screencasts

@@ -88,9 +88,13 @@ aliases = ["v1.1", "guides/reference/admin"]
Prometheus
- }}" class="nav-cards__item nav-cards__item--ds"> - -
OpenTSDB
+
}}" class="nav-cards__item nav-cards__item--ds"> + +
Google Stackdriver
+
+ }}" class="nav-cards__item nav-cards__item--ds"> + +
Cloudwatch
}}" class="nav-cards__item nav-cards__item--ds"> @@ -100,8 +104,12 @@ aliases = ["v1.1", "guides/reference/admin"]
Postgres
- }}" class="nav-cards__item nav-cards__item--ds"> - -
Cloudwatch
+
}}" class="nav-cards__item nav-cards__item--ds"> + +
Microsoft SQL Server
+
+ }}" class="nav-cards__item nav-cards__item--ds"> + +
OpenTSDB
diff --git a/docs/sources/installation/configuration.md b/docs/sources/installation/configuration.md index 3394dfe16bc..8d156e739bf 100644 --- a/docs/sources/installation/configuration.md +++ b/docs/sources/installation/configuration.md @@ -127,10 +127,13 @@ Another way is put a webserver like Nginx or Apache in front of Grafana and have ### protocol -`http` or `https` +`http`,`https` or `socket` > **Note** Grafana versions earlier than 3.0 are vulnerable to [POODLE](https://en.wikipedia.org/wiki/POODLE). So we strongly recommend to upgrade to 3.x or use a reverse proxy for ssl termination. +### socket +Path where the socket should be created when `protocol=socket`. Please make sure that Grafana has appropriate permissions. + ### domain This setting is only used in as a part of the `root_url` setting (see below). Important if you @@ -322,470 +325,17 @@ Defaults to `false`. ## [auth] -### disable_login_form - -Set to true to disable (hide) the login form, useful if you use OAuth, defaults to false. - -### disable_signout_menu - -Set to true to disable the signout link in the side menu. useful if you use auth.proxy, defaults to false. - -
- -## [auth.anonymous] - -### enabled - -Set to `true` to enable anonymous access. Defaults to `false` - -### org_name - -Set the organization name that should be used for anonymous users. If -you change your organization name in the Grafana UI this setting needs -to be updated to match the new name. - -### org_role - -Specify role for anonymous users. Defaults to `Viewer`, other valid -options are `Editor` and `Admin`. - -## [auth.github] - -You need to create a GitHub OAuth application (you find this under the GitHub -settings page). When you create the application you will need to specify -a callback URL. Specify this as callback: - -```bash -http://:/login/github -``` - -This callback URL must match the full HTTP address that you use in your -browser to access Grafana, but with the prefix path of `/login/github`. -When the GitHub OAuth application is created you will get a Client ID and a -Client Secret. Specify these in the Grafana configuration file. For -example: - -```bash -[auth.github] -enabled = true -allow_sign_up = true -client_id = YOUR_GITHUB_APP_CLIENT_ID -client_secret = YOUR_GITHUB_APP_CLIENT_SECRET -scopes = user:email,read:org -auth_url = https://github.com/login/oauth/authorize -token_url = https://github.com/login/oauth/access_token -api_url = https://api.github.com/user -team_ids = -allowed_organizations = -``` - -Restart the Grafana back-end. You should now see a GitHub login button -on the login page. You can now login or sign up with your GitHub -accounts. - -You may allow users to sign-up via GitHub authentication by setting the -`allow_sign_up` option to `true`. When this option is set to `true`, any -user successfully authenticating via GitHub authentication will be -automatically signed up. - -### team_ids - -Require an active team membership for at least one of the given teams on -GitHub. If the authenticated user isn't a member of at least one of the -teams they will not be able to register or authenticate with your -Grafana instance. For example: - -```bash -[auth.github] -enabled = true -client_id = YOUR_GITHUB_APP_CLIENT_ID -client_secret = YOUR_GITHUB_APP_CLIENT_SECRET -scopes = user:email,read:org -team_ids = 150,300 -auth_url = https://github.com/login/oauth/authorize -token_url = https://github.com/login/oauth/access_token -api_url = https://api.github.com/user -allow_sign_up = true -``` - -### allowed_organizations - -Require an active organization membership for at least one of the given -organizations on GitHub. If the authenticated user isn't a member of at least -one of the organizations they will not be able to register or authenticate with -your Grafana instance. For example - -```bash -[auth.github] -enabled = true -client_id = YOUR_GITHUB_APP_CLIENT_ID -client_secret = YOUR_GITHUB_APP_CLIENT_SECRET -scopes = user:email,read:org -auth_url = https://github.com/login/oauth/authorize -token_url = https://github.com/login/oauth/access_token -api_url = https://api.github.com/user -allow_sign_up = true -# space-delimited organization names -allowed_organizations = github google -``` - -
- -## [auth.gitlab] - -> Only available in Grafana v5.3+. - -You need to [create a GitLab OAuth -application](https://docs.gitlab.com/ce/integration/oauth_provider.html). -Choose a descriptive *Name*, and use the following *Redirect URI*: - -``` -https://grafana.example.com/login/gitlab -``` - -where `https://grafana.example.com` is the URL you use to connect to Grafana. -Adjust it as needed if you don't use HTTPS or if you use a different port; for -instance, if you access Grafana at `http://203.0.113.31:3000`, you should use - -``` -http://203.0.113.31:3000/login/gitlab -``` - -Finally, select *api* as the *Scope* and submit the form. Note that if you're -not going to use GitLab groups for authorization (i.e. not setting -`allowed_groups`, see below), you can select *read_user* instead of *api* as -the *Scope*, thus giving a more restricted access to your GitLab API. - -You'll get an *Application Id* and a *Secret* in return; we'll call them -`GITLAB_APPLICATION_ID` and `GITLAB_SECRET` respectively for the rest of this -section. - -Add the following to your Grafana configuration file to enable GitLab -authentication: - -```ini -[auth.gitlab] -enabled = false -allow_sign_up = false -client_id = GITLAB_APPLICATION_ID -client_secret = GITLAB_SECRET -scopes = api -auth_url = https://gitlab.com/oauth/authorize -token_url = https://gitlab.com/oauth/token -api_url = https://gitlab.com/api/v4 -allowed_groups = -``` - -Restart the Grafana backend for your changes to take effect. - -If you use your own instance of GitLab instead of `gitlab.com`, adjust -`auth_url`, `token_url` and `api_url` accordingly by replacing the `gitlab.com` -hostname with your own. - -With `allow_sign_up` set to `false`, only existing users will be able to login -using their GitLab account, but with `allow_sign_up` set to `true`, *any* user -who can authenticate on GitLab will be able to login on your Grafana instance; -if you use the public `gitlab.com`, it means anyone in the world would be able -to login on your Grafana instance. - -You can can however limit access to only members of a given group or list of -groups by setting the `allowed_groups` option. - -### allowed_groups - -To limit access to authenticated users that are members of one or more [GitLab -groups](https://docs.gitlab.com/ce/user/group/index.html), set `allowed_groups` -to a comma- or space-separated list of groups. For instance, if you want to -only give access to members of the `example` group, set - - -```ini -allowed_groups = example -``` - -If you want to also give access to members of the subgroup `bar`, which is in -the group `foo`, set - -```ini -allowed_groups = example, foo/bar -``` - -Note that in GitLab, the group or subgroup name doesn't always match its -display name, especially if the display name contains spaces or special -characters. Make sure you always use the group or subgroup name as it appears -in the URL of the group or subgroup. - -Here's a complete example with `alloed_sign_up` enabled, and access limited to -the `example` and `foo/bar` groups: - -```ini -[auth.gitlab] -enabled = false -allow_sign_up = true -client_id = GITLAB_APPLICATION_ID -client_secret = GITLAB_SECRET -scopes = api -auth_url = https://gitlab.com/oauth/authorize -token_url = https://gitlab.com/oauth/token -api_url = https://gitlab.com/api/v4 -allowed_groups = example, foo/bar -``` - -
- -## [auth.google] - -First, you need to create a Google OAuth Client: - -1. Go to https://console.developers.google.com/apis/credentials +Grafana provides many ways to authenticate users. The docs for authentication has been split in to many different pages +below. -2. Click the 'Create Credentials' button, then click 'OAuth Client ID' in the -menu that drops down - -3. Enter the following: - - - Application Type: Web Application - - Name: Grafana - - Authorized Javascript Origins: https://grafana.mycompany.com - - Authorized Redirect URLs: https://grafana.mycompany.com/login/google - - Replace https://grafana.mycompany.com with the URL of your Grafana instance. - -4. Click Create - -5. Copy the Client ID and Client Secret from the 'OAuth Client' modal - -Specify the Client ID and Secret in the Grafana configuration file. For example: - -```bash -[auth.google] -enabled = true -client_id = CLIENT_ID -client_secret = CLIENT_SECRET -scopes = https://www.googleapis.com/auth/userinfo.profile https://www.googleapis.com/auth/userinfo.email -auth_url = https://accounts.google.com/o/oauth2/auth -token_url = https://accounts.google.com/o/oauth2/token -allowed_domains = mycompany.com mycompany.org -allow_sign_up = true -``` - -Restart the Grafana back-end. You should now see a Google login button -on the login page. You can now login or sign up with your Google -accounts. The `allowed_domains` option is optional, and domains were separated by space. - -You may allow users to sign-up via Google authentication by setting the -`allow_sign_up` option to `true`. When this option is set to `true`, any -user successfully authenticating via Google authentication will be -automatically signed up. - -## [auth.generic_oauth] - -This option could be used if have your own oauth service. - -This callback URL must match the full HTTP address that you use in your -browser to access Grafana, but with the prefix path of `/login/generic_oauth`. - -```bash -[auth.generic_oauth] -enabled = true -client_id = YOUR_APP_CLIENT_ID -client_secret = YOUR_APP_CLIENT_SECRET -scopes = -auth_url = -token_url = -api_url = -allowed_domains = mycompany.com mycompany.org -allow_sign_up = true -``` - -Set api_url to the resource that returns [OpenID UserInfo](https://connect2id.com/products/server/docs/api/userinfo) compatible information. - -### Set up oauth2 with Okta - -First set up Grafana as an OpenId client "webapplication" in Okta. Then set the Base URIs to `https:///` and set the Login redirect URIs to `https:///login/generic_oauth`. - -Finally set up the generic oauth module like this: -```bash -[auth.generic_oauth] -name = Okta -enabled = true -scopes = openid profile email -client_id = -client_secret = -auth_url = https:///oauth2/v1/authorize -token_url = https:///oauth2/v1/token -api_url = https:///oauth2/v1/userinfo -``` - -### Set up oauth2 with Bitbucket - -```bash -[auth.generic_oauth] -name = BitBucket -enabled = true -allow_sign_up = true -client_id = -client_secret = -scopes = account email -auth_url = https://bitbucket.org/site/oauth2/authorize -token_url = https://bitbucket.org/site/oauth2/access_token -api_url = https://api.bitbucket.org/2.0/user -team_ids = -allowed_organizations = -``` - -### Set up oauth2 with OneLogin - -1. Create a new Custom Connector with the following settings: - - Name: Grafana - - Sign On Method: OpenID Connect - - Redirect URI: `https:///login/generic_oauth` - - Signing Algorithm: RS256 - - Login URL: `https:///login/generic_oauth` - - then: -2. Add an App to the Grafana Connector: - - Display Name: Grafana - - then: -3. Under the SSO tab on the Grafana App details page you'll find the Client ID and Client Secret. - - Your OneLogin Domain will match the url you use to access OneLogin. - - Configure Grafana as follows: - - ```bash - [auth.generic_oauth] - name = OneLogin - enabled = true - allow_sign_up = true - client_id = - client_secret = - scopes = openid email name - auth_url = https://.onelogin.com/oidc/auth - token_url = https://.onelogin.com/oidc/token - api_url = https://.onelogin.com/oidc/me - team_ids = - allowed_organizations = - ``` - -### Set up oauth2 with Auth0 - -1. Create a new Client in Auth0 - - Name: Grafana - - Type: Regular Web Application - -2. Go to the Settings tab and set: - - Allowed Callback URLs: `https:///login/generic_oauth` - -3. Click Save Changes, then use the values at the top of the page to configure Grafana: - - ```bash - [auth.generic_oauth] - enabled = true - allow_sign_up = true - team_ids = - allowed_organizations = - name = Auth0 - client_id = - client_secret = - scopes = openid profile email - auth_url = https:///authorize - token_url = https:///oauth/token - api_url = https:///userinfo - ``` - -### Set up oauth2 with Azure Active Directory - -1. Log in to portal.azure.com and click "Azure Active Directory" in the side menu, then click the "Properties" sub-menu item. - -2. Copy the "Directory ID", this is needed for setting URLs later - -3. Click "App Registrations" and add a new application registration: - - Name: Grafana - - Application type: Web app / API - - Sign-on URL: `https:///login/generic_oauth` - -4. Click the name of the new application to open the application details page. - -5. Note down the "Application ID", this will be the OAuth client id. - -6. Click "Settings", then click "Keys" and add a new entry under Passwords - - Key Description: Grafana OAuth - - Duration: Never Expires - -7. Click Save then copy the key value, this will be the OAuth client secret. - -8. Configure Grafana as follows: - - ```bash - [auth.generic_oauth] - name = Azure AD - enabled = true - allow_sign_up = true - client_id = - client_secret = - scopes = openid email name - auth_url = https://login.microsoftonline.com//oauth2/authorize - token_url = https://login.microsoftonline.com//oauth2/token - api_url = - team_ids = - allowed_organizations = - ``` - -
- -## [auth.basic] -### enabled -When enabled is `true` (default) the http api will accept basic authentication. - -
- -## [auth.ldap] -### enabled -Set to `true` to enable LDAP integration (default: `false`) - -### config_file -Path to the LDAP specific configuration file (default: `/etc/grafana/ldap.toml`) - -### allow_sign_up - -Allow sign up should almost always be true (default) to allow new Grafana users to be created (if ldap authentication is ok). If set to -false only pre-existing Grafana users will be able to login (if ldap authentication is ok). - -> For details on LDAP Configuration, go to the [LDAP Integration]({{< relref "ldap.md" >}}) page. - -
- -## [auth.proxy] - -This feature allows you to handle authentication in a http reverse proxy. - -### enabled - -Defaults to `false` - -### header_name - -Defaults to X-WEBAUTH-USER - -#### header_property - -Defaults to username but can also be set to email - -### auto_sign_up - -Set to `true` to enable auto sign up of users who do not exist in Grafana DB. Defaults to `true`. - -### whitelist - -Limit where auth proxy requests come from by configuring a list of IP addresses. This can be used to prevent users spoofing the X-WEBAUTH-USER header. - -### headers - -Used to define additional headers for `Name`, `Email` and/or `Login`, for example if the user's name is sent in the X-WEBAUTH-NAME header and their email address in the X-WEBAUTH-EMAIL header, set `headers = Name:X-WEBAUTH-NAME Email:X-WEBAUTH-EMAIL`. - -
+- [Authentication Overview]({{< relref "auth/overview.md" >}}) (anonymous access options, hide login and more) +- [Google OAuth]({{< relref "auth/google.md" >}}) (auth.google) +- [GitHub OAuth]({{< relref "auth/github.md" >}}) (auth.github) +- [Gitlab OAuth]({{< relref "auth/gitlab.md" >}}) (auth.gitlab) +- [Generic OAuth]({{< relref "auth/generic-oauth.md" >}}) (auth.generic_oauth, okta2, auth0, bitbucket, azure) +- [Basic Authentication]({{< relref "auth/overview.md" >}}) (auth.basic) +- [LDAP Authentication]({{< relref "auth/ldap.md" >}}) (auth.ldap) +- [Auth Proxy]({{< relref "auth/auth-proxy.md" >}}) (auth.proxy) ## [session] @@ -1009,3 +559,21 @@ Defaults to true. Set to false to disable alerting engine and hide Alerting from ### execute_alerts Makes it possible to turn off alert rule execution. + +### error_or_timeout +> Available in 5.3 and above + +Default setting for new alert rules. Defaults to categorize error and timeouts as alerting. (alerting, keep_state) + +### nodata_or_nullvalues +> Available in 5.3 and above + +Default setting for how Grafana handles nodata or null values in alerting. (alerting, no_data, keep_state, ok) + +# concurrent_render_limit + +> Available in 5.3 and above + +Alert notifications can include images, but rendering many images at the same time can overload the server. +This limit will protect the server from render overloading and make sure notifications are sent out quickly. Default +value is `5`. diff --git a/docs/sources/installation/debian.md b/docs/sources/installation/debian.md index 13fa3440170..2ae2e9dc40d 100644 --- a/docs/sources/installation/debian.md +++ b/docs/sources/installation/debian.md @@ -100,6 +100,8 @@ This will start the `grafana-server` process as the `grafana` user, which was created during the package installation. The default HTTP port is `3000` and default user and group is `admin`. +Default login and password `admin`/ `admin` + To configure the Grafana server to start at boot time: ```bash diff --git a/docs/sources/installation/docker.md b/docs/sources/installation/docker.md index c71dc105ad4..52353ede8c2 100644 --- a/docs/sources/installation/docker.md +++ b/docs/sources/installation/docker.md @@ -20,7 +20,7 @@ $ docker run -d -p 3000:3000 grafana/grafana ## Configuration -All options defined in conf/grafana.ini can be overridden using environment +All options defined in `conf/grafana.ini` can be overridden using environment variables by using the syntax `GF__`. For example: @@ -40,6 +40,19 @@ those options. > For any changes to `conf/grafana.ini` (or corresponding environment variables) to take effect you need to restart Grafana by restarting the Docker container. +### Default Paths + +The following settings are hard-coded when launching the Grafana Docker container and can only be overridden using environment variables, not in `conf/grafana.ini`. + +Setting | Default value +----------------------|--------------------------- +GF_PATHS_CONFIG | /etc/grafana/grafana.ini +GF_PATHS_DATA | /var/lib/grafana +GF_PATHS_HOME | /usr/share/grafana +GF_PATHS_LOGS | /var/log/grafana +GF_PATHS_PLUGINS | /var/lib/grafana/plugins +GF_PATHS_PROVISIONING | /etc/grafana/provisioning + ## Running a Specific Version of Grafana ```bash @@ -74,7 +87,7 @@ docker run \ ## Building a custom Grafana image with pre-installed plugins -In the [grafana-docker](https://github.com/grafana/grafana-docker/) there is a folder called `custom/` which includes a `Dockerfile` that can be used to build a custom Grafana image. It accepts `GRAFANA_VERSION` and `GF_INSTALL_PLUGINS` as build arguments. +In the [grafana-docker](https://github.com/grafana/grafana/tree/master/packaging/docker) there is a folder called `custom/` which includes a `Dockerfile` that can be used to build a custom Grafana image. It accepts `GRAFANA_VERSION` and `GF_INSTALL_PLUGINS` as build arguments. Example of how to build and run: ```bash @@ -90,6 +103,21 @@ docker run \ grafana:latest-with-plugins ``` +## Installing Plugins from other sources + +> Only available in Grafana v5.3.1+ + +It's possible to install plugins from custom url:s by specifying the url like this: `GF_INSTALL_PLUGINS=;` + +```bash +docker run \ + -d \ + -p 3000:3000 \ + --name=grafana \ + -e "GF_INSTALL_PLUGINS=http://plugin-domain.com/my-custom-plugin.zip;custom-plugin" \ + grafana/grafana +``` + ## Configuring AWS Credentials for CloudWatch Support ```bash diff --git a/docs/sources/installation/ldap.md b/docs/sources/installation/ldap.md deleted file mode 100644 index 88cf40632db..00000000000 --- a/docs/sources/installation/ldap.md +++ /dev/null @@ -1,150 +0,0 @@ -+++ -title = "LDAP Authentication" -description = "Grafana LDAP Authentication Guide " -keywords = ["grafana", "configuration", "documentation", "ldap"] -type = "docs" -[menu.docs] -name = "LDAP Authentication" -identifier = "ldap" -parent = "admin" -weight = 2 -+++ - -# LDAP Authentication - -Grafana (2.1 and newer) ships with a strong LDAP integration feature. The LDAP integration in Grafana allows your -Grafana users to login with their LDAP credentials. You can also specify mappings between LDAP -group memberships and Grafana Organization user roles. - -## Configuration -You turn on LDAP in the [main config file]({{< relref "configuration.md#auth-ldap" >}}) as well as specify the path to the LDAP -specific configuration file (default: `/etc/grafana/ldap.toml`). - -### Example config - -```toml -# To troubleshoot and get more log info enable ldap debug logging in grafana.ini -# [log] -# filters = ldap:debug - -[[servers]] -# Ldap server host (specify multiple hosts space separated) -host = "127.0.0.1" -# Default port is 389 or 636 if use_ssl = true -port = 389 -# Set to true if ldap server supports TLS -use_ssl = false -# Set to true if connect ldap server with STARTTLS pattern (create connection in insecure, then upgrade to secure connection with TLS) -start_tls = false -# set to true if you want to skip ssl cert validation -ssl_skip_verify = false -# set to the path to your root CA certificate or leave unset to use system defaults -# root_ca_cert = "/path/to/certificate.crt" -# Authentication against LDAP servers requiring client certificates -# client_cert = "/path/to/client.crt" -# client_key = "/path/to/client.key" - -# Search user bind dn -bind_dn = "cn=admin,dc=grafana,dc=org" -# Search user bind password -# If the password contains # or ; you have to wrap it with triple quotes. Ex """#password;""" -bind_password = 'grafana' - -# User search filter, for example "(cn=%s)" or "(sAMAccountName=%s)" or "(uid=%s)" -# Allow login from email or username, example "(|(sAMAccountName=%s)(userPrincipalName=%s))" -search_filter = "(cn=%s)" - -# An array of base dns to search through -search_base_dns = ["dc=grafana,dc=org"] - -# In POSIX LDAP schemas, without memberOf attribute a secondary query must be made for groups. -# This is done by enabling group_search_filter below. You must also set member_of= "cn" -# in [servers.attributes] below. - -## Group search filter, to retrieve the groups of which the user is a member (only set if memberOf attribute is not available) -# group_search_filter = "(&(objectClass=posixGroup)(memberUid=%s))" -## An array of the base DNs to search through for groups. Typically uses ou=groups -# group_search_base_dns = ["ou=groups,dc=grafana,dc=org"] - -# Specify names of the ldap attributes your ldap uses -[servers.attributes] -name = "givenName" -surname = "sn" -username = "cn" -member_of = "memberOf" -email = "email" - -# Map ldap groups to grafana org roles -[[servers.group_mappings]] -group_dn = "cn=admins,dc=grafana,dc=org" -org_role = "Admin" -# To make user an instance admin (Grafana Admin) uncomment line below -# grafana_admin = true -# The Grafana organization database id, optional, if left out the default org (id 1) will be used. Setting this allows for multiple group_dn's to be assigned to the same org_role provided the org_id differs -# org_id = 1 - -[[servers.group_mappings]] -group_dn = "cn=users,dc=grafana,dc=org" -org_role = "Editor" - -[[servers.group_mappings]] -# If you want to match all (or no ldap groups) then you can use wildcard -group_dn = "*" -org_role = "Viewer" - -``` - -## Bind & Bind Password - -By default the configuration expects you to specify a bind DN and bind password. This should be a read only user that can perform LDAP searches. -When the user DN is found a second bind is performed with the user provided username & password (in the normal Grafana login form). - -```bash -bind_dn = "cn=admin,dc=grafana,dc=org" -bind_password = "grafana" -``` - -### Single Bind Example - -If you can provide a single bind expression that matches all possible users, you can skip the second bind and bind against the user DN directly. -This allows you to not specify a bind_password in the configuration file. - -```bash -bind_dn = "cn=%s,o=users,dc=grafana,dc=org" -``` - -In this case you skip providing a `bind_password` and instead provide a `bind_dn` value with a `%s` somewhere. This will be replaced with the username entered in on the Grafana login page. -The search filter and search bases settings are still needed to perform the LDAP search to retrieve the other LDAP information (like LDAP groups and email). - -## POSIX schema (no memberOf attribute) -If your ldap server does not support the memberOf attribute add these options: - -```toml -## Group search filter, to retrieve the groups of which the user is a member (only set if memberOf attribute is not available) -group_search_filter = "(&(objectClass=posixGroup)(memberUid=%s))" -## An array of the base DNs to search through for groups. Typically uses ou=groups -group_search_base_dns = ["ou=groups,dc=grafana,dc=org"] -``` - -Also change set `member_of = "cn"` in the `[servers.attributes]` section. - - -## LDAP to Grafana Org Role Sync - -### Mappings -In `[[servers.group_mappings]]` you can map an LDAP group to a Grafana organization -and role. These will be synced every time the user logs in, with LDAP being -the authoritative source. So, if you change a user's role in the Grafana Org. -Users page, this change will be reset the next time the user logs in. If you -change the LDAP groups of a user, the change will take effect the next -time the user logs in. - -### Grafana Admin -with a servers.group_mappings section you can set grafana_admin = true or false to sync Grafana Admin permission. A Grafana server admin has admin access over all orgs & -users. - -### Priority -The first group mapping that an LDAP user is matched to will be used for the sync. If you have LDAP users that fit multiple mappings, the topmost mapping in the TOML config will be used. - - - diff --git a/docs/sources/installation/mac.md b/docs/sources/installation/mac.md index fbc00c01737..336e46c895d 100644 --- a/docs/sources/installation/mac.md +++ b/docs/sources/installation/mac.md @@ -60,6 +60,8 @@ Then start Grafana using: brew services start grafana ``` +Default login and password `admin`/ `admin` + ### Configuration diff --git a/docs/sources/installation/rpm.md b/docs/sources/installation/rpm.md index 24c301c5763..5bf3b7ed745 100644 --- a/docs/sources/installation/rpm.md +++ b/docs/sources/installation/rpm.md @@ -115,6 +115,8 @@ This will start the `grafana-server` process as the `grafana` user, which is created during package installation. The default HTTP port is `3000`, and default user and group is `admin`. +Default login and password `admin`/ `admin` + To configure the Grafana server to start at boot time: ```bash diff --git a/docs/sources/installation/upgrading.md b/docs/sources/installation/upgrading.md index c72bb4c0921..a476a38c3c5 100644 --- a/docs/sources/installation/upgrading.md +++ b/docs/sources/installation/upgrading.md @@ -109,3 +109,11 @@ positioning system when you load them in v5. Dashboards saved in v5 will not wor external panel plugins might need to be updated to work properly. For more details on the new panel positioning system, [click here]({{< relref "reference/dashboard.md#panel-size-position" >}}) + +## Upgrading to v5.2 + +One of the database migrations included in this release will update all annotation timestamps from second to millisecond precision. If you have a large amount of annotations the database migration may take a long time to complete which may cause problems if you use systemd to run Grafana. + +We've got one report where using systemd, PostgreSQL and a large amount of annotations (table size 1645mb) took 8-20 minutes for the database migration to complete. However, the grafana-server process was killed after 90 seconds by systemd. Any database migration queries in progress when systemd kills the grafana-server process continues to execute in database until finished. + +If you're using systemd and have a large amount of annotations consider temporary adjusting the systemd `TimeoutStartSec` setting to something high like `30m` before upgrading. diff --git a/docs/sources/installation/windows.md b/docs/sources/installation/windows.md index 572081a1c54..b17d625a76e 100644 --- a/docs/sources/installation/windows.md +++ b/docs/sources/installation/windows.md @@ -31,6 +31,9 @@ on windows. Edit `custom.ini` and uncomment the `http_port` configuration option (`;` is the comment character in ini files) and change it to something like `8080` or similar. That port should not require extra Windows privileges. +Default login and password `admin`/ `admin` + + Start Grafana by executing `grafana-server.exe`, located in the `bin` directory, preferably from the command line. If you want to run Grafana as windows service, download [NSSM](https://nssm.cc/). It is very easy to add Grafana as a Windows diff --git a/docs/sources/plugins/developing/development.md b/docs/sources/plugins/developing/development.md index f2e70a50c6a..48410b06732 100644 --- a/docs/sources/plugins/developing/development.md +++ b/docs/sources/plugins/developing/development.md @@ -10,7 +10,7 @@ weight = 1 # Developer Guide -You can extend Grafana by writing your own plugins and then share then with other users in [our plugin repository](https://grafana.com/plugins). +You can extend Grafana by writing your own plugins and then share them with other users in [our plugin repository](https://grafana.com/plugins). ## Short version @@ -33,7 +33,7 @@ There are two blog posts about authoring a plugin that might also be of interest ## What languages? Since everything turns into javascript it's up to you to choose which language you want. That said it's probably a good idea to choose es6 or typescript since -we use es6 classes in Grafana. So it's easier to get inspiration from the Grafana repo is you choose one of those languages. +we use es6 classes in Grafana. So it's easier to get inspiration from the Grafana repo if you choose one of those languages. ## Buildscript @@ -60,7 +60,6 @@ and [apps]({{< relref "apps.md" >}}) plugins in the documentation. The Grafana SDK is quite small so far and can be found here: - [SDK file in Grafana](https://github.com/grafana/grafana/blob/master/public/app/plugins/sdk.ts) -- [SDK Readme](https://github.com/grafana/grafana/blob/master/public/app/plugins/plugin_api.md) The SDK contains three different plugin classes: PanelCtrl, MetricsPanelCtrl and QueryCtrl. For plugins of the panel type, the module.js file should export one of these. There are some extra classes for [data sources]({{< relref "datasources.md" >}}). diff --git a/docs/sources/project/building_from_source.md b/docs/sources/project/building_from_source.md index 64e67a22bae..eed05f05fa6 100644 --- a/docs/sources/project/building_from_source.md +++ b/docs/sources/project/building_from_source.md @@ -13,7 +13,7 @@ dev environment. Grafana ships with its own required backend server; also comple ## Dependencies -- [Go 1.10](https://golang.org/dl/) +- [Go (Latest Stable)](https://golang.org/dl/) - [Git](https://git-scm.com/downloads) - [NodeJS LTS](https://nodejs.org/download/) - node-gyp is the Node.js native addon build tool and it requires extra dependencies: python 2.7, make and GCC. These are already installed for most Linux distros and MacOS. See the Building On Windows section or the [node-gyp installation instructions](https://github.com/nodejs/node-gyp#installation) for more details. diff --git a/docs/sources/reference/annotations.md b/docs/sources/reference/annotations.md index bfc104ef522..8732c8c709b 100644 --- a/docs/sources/reference/annotations.md +++ b/docs/sources/reference/annotations.md @@ -45,8 +45,11 @@ can still show them if you add a new **Annotation Query** and filter by tags. Bu ### Query by tag You can create new annotation queries that fetch annotations from the native annotation store via the `-- Grafana --` data source and by setting *Filter by* to `Tags`. Specify at least -one tag. For example create an annotation query name `outages` and specify a tag named `outage`. This query will show all annotations you create (from any dashboard or via API) that -have the `outage` tag. +one tag. For example create an annotation query name `outages` and specify a tag named `outage`. This query will show all annotations you create (from any dashboard or via API) that have the `outage` tag. By default, if you add multiple tags in the annotation query, Grafana will only show annotations that have all the tags you supplied. You can invert the behavior by enabling `Match any` which means that Grafana will show annotations that contains at least one of the tags you supplied. + +In Grafana v5.3+ it's possible to use template variables in the tag query. So if you have a dashboard showing stats for different services and a template variable that dictates which services to show, you can now use the same template variable in your annotation query to only show annotations for those services. + +{{< docs-imagebox img="/img/docs/v53/annotation_tag_filter_variable.png" max-width="600px" >}} ## Querying other data sources diff --git a/docs/sources/reference/templating.md b/docs/sources/reference/templating.md index 7f86465312c..403dabba8ae 100644 --- a/docs/sources/reference/templating.md +++ b/docs/sources/reference/templating.md @@ -90,6 +90,7 @@ Type | Description *Custom* | Define the variable options manually using a comma separated list. *Constant* | Define a hidden constant. Useful for metric path prefixes for dashboards you want to share. During dashboard export, constant variables will be made into an import option. *Ad hoc filters* | Very special kind of variable that only works with some data sources, InfluxDB & Elasticsearch currently. It allows you to add key/value filters that will automatically be added to all metric queries that use the specified data source. +*Text box* | This variable type will display as a free text input field with an optional default value. ### Query options @@ -245,7 +246,7 @@ Grafana has global built-in variables that can be used in expressions in the que ### The $__interval Variable -This $__interval variable is similar to the `auto` interval variable that is described above. It can be used as a parameter to group by time (for InfluxDB), Date histogram interval (for Elasticsearch) or as a *summarize* function parameter (for Graphite). +This $__interval variable is similar to the `auto` interval variable that is described above. It can be used as a parameter to group by time (for InfluxDB, MySQL, Postgres, MSSQL), Date histogram interval (for Elasticsearch) or as a *summarize* function parameter (for Graphite). Grafana automatically calculates an interval that can be used to group by time in queries. When there are more data points than can be shown on a graph then queries can be made more efficient by grouping by a larger interval. It is more efficient to group by 1 day than by 10s when looking at 3 months of data and the graph will look the same and the query will be faster. The `$__interval` is calculated using the time range and the width of the graph (the number of pixels). diff --git a/docs/sources/tutorials/ha_setup.md b/docs/sources/tutorials/ha_setup.md index 9ae2989f6e6..f141392e223 100644 --- a/docs/sources/tutorials/ha_setup.md +++ b/docs/sources/tutorials/ha_setup.md @@ -22,13 +22,13 @@ Setting up Grafana for high availability is fairly simple. It comes down to two First, you need to do is to setup MySQL or Postgres on another server and configure Grafana to use that database. You can find the configuration for doing that in the [[database]]({{< relref "configuration.md" >}}#database) section in the grafana config. -Grafana will now persist all long term data in the database. How to configure the database for high availability is out of scope for this guide. We recommend finding an expert on for the database your using. +Grafana will now persist all long term data in the database. How to configure the database for high availability is out of scope for this guide. We recommend finding an expert on for the database you're using. ## User sessions -The second thing to consider is how to deal with user sessions and how to configure your load balancer infront of Grafana. -Grafana support two says of storing session data locally on disk or in a database/cache-server. -If you want to store sessions on disk you can use `sticky sessions` in your load balanacer. If you prefer to store session data in a database/cache-server +The second thing to consider is how to deal with user sessions and how to configure your load balancer in front of Grafana. +Grafana supports two ways of storing session data: locally on disk or in a database/cache-server. +If you want to store sessions on disk you can use `sticky sessions` in your load balancer. If you prefer to store session data in a database/cache-server you can use any stateless routing strategy in your load balancer (ex round robin or least connections). ### Sticky sessions diff --git a/docs/versions.json b/docs/versions.json index caefbe198d6..48962a783ae 100644 --- a/docs/versions.json +++ b/docs/versions.json @@ -1,5 +1,6 @@ [ - { "version": "v5.2", "path": "/", "archived": false, "current": true }, + { "version": "v5.3", "path": "/", "archived": false, "current": true }, + { "version": "v5.2", "path": "/v5.2", "archived": true }, { "version": "v5.1", "path": "/v5.1", "archived": true }, { "version": "v5.0", "path": "/v5.0", "archived": true }, { "version": "v4.6", "path": "/v4.6", "archived": true }, diff --git a/examples/README.md b/examples/README.md deleted file mode 100644 index 75f1f9a9a86..00000000000 --- a/examples/README.md +++ /dev/null @@ -1,5 +0,0 @@ -## Example plugin implementations - -datasource:[simple-json-datasource](https://github.com/grafana/simple-json-datasource) -app: [example-app](https://github.com/grafana/example-app) -panel: [grafana-piechart-panel](https://github.com/grafana/piechart-panel) diff --git a/examples/alerting-dashboard.json b/examples/alerting-dashboard.json deleted file mode 100644 index 744460d7847..00000000000 --- a/examples/alerting-dashboard.json +++ /dev/null @@ -1,800 +0,0 @@ -{ - "__inputs": [ - { - "name": "DS_GRAPHITE", - "label": "graphite", - "description": "", - "type": "datasource", - "pluginId": "graphite", - "pluginName": "Graphite" - } - ], - "__requires": [ - { - "type": "panel", - "id": "graph", - "name": "Graph", - "version": "" - }, - { - "type": "grafana", - "id": "grafana", - "name": "Grafana", - "version": "3.1.0" - }, - { - "type": "datasource", - "id": "graphite", - "name": "Graphite", - "version": "1.0.0" - } - ], - "id": null, - "title": "Alerting example", - "tags": [], - "style": "dark", - "timezone": "browser", - "editable": true, - "hideControls": false, - "sharedCrosshair": false, - "rows": [ - { - "collapse": false, - "editable": true, - "height": "250px", - "panels": [ - { - "alert": { - "conditions": [ - { - "evaluator": { - "params": [ - 355 - ], - "type": "gt" - }, - "query": { - "params": [ - "A", - "5m", - "now" - ] - }, - "reducer": { - "params": [], - "type": "avg" - }, - "type": "query" - } - ], - "enabled": true, - "frequency": "60s", - "handler": 1, - "name": "Critical alert panel", - "notifications": [], - "severity": "critical" - }, - "alerting": {}, - "aliasColors": {}, - "bars": false, - "datasource": "${DS_GRAPHITE}", - "editable": true, - "error": false, - "fill": 1, - "grid": {}, - "id": 1, - "isNew": true, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 2, - "links": [], - "nullPointMode": "connected", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 4, - "stack": false, - "steppedLine": false, - "targets": [ - { - "refId": "A", - "target": "aliasByNode(statsd.fakesite.counters.session_start.*.count, 4)" - }, - { - "refId": "B", - "target": "aliasByNode(scale(statsd.$apa.counters.session_start.*.count, 10), 4)" - } - ], - "thresholds": [ - { - "colorMode": "critical", - "fill": true, - "line": true, - "op": "gt", - "value": 355 - } - ], - "timeFrom": null, - "timeShift": null, - "title": "Critical panel", - "tooltip": { - "msResolution": false, - "shared": true, - "sort": 0, - "value_type": "cumulative" - }, - "type": "graph", - "xaxis": { - "show": true - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - }, - { - "alert": { - "conditions": [ - { - "evaluator": { - "params": [ - 20 - ], - "type": "gt" - }, - "query": { - "params": [ - "A", - "5m", - "now" - ] - }, - "reducer": { - "params": [], - "type": "avg" - }, - "type": "query" - } - ], - "enabled": true, - "frequency": "60s", - "handler": 1, - "name": "Warning panel alert", - "notifications": [], - "severity": "warning" - }, - "alerting": {}, - "aliasColors": {}, - "bars": false, - "datasource": "${DS_GRAPHITE}", - "editable": true, - "error": false, - "fill": 1, - "id": 2, - "isNew": true, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 2, - "links": [], - "nullPointMode": "connected", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 4, - "stack": false, - "steppedLine": false, - "targets": [ - { - "refId": "A", - "target": "aliasByNode(statsd.fakesite.counters.session_start.*.count, 4)" - } - ], - "thresholds": [ - { - "colorMode": "warning", - "fill": true, - "fillColor": "rgba(235, 138, 14, 0.12)", - "line": true, - "lineColor": "rgba(247, 149, 32, 0.60)", - "op": "gt", - "value": 20 - } - ], - "timeFrom": null, - "timeShift": null, - "title": "Warning panel", - "tooltip": { - "msResolution": false, - "shared": true, - "sort": 0, - "value_type": "cumulative" - }, - "type": "graph", - "xaxis": { - "show": true - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - }, - { - "alert": { - "conditions": [ - { - "evaluator": { - "params": [ - 1 - ], - "type": "lt" - }, - "query": { - "params": [ - "A", - "5m", - "now" - ] - }, - "reducer": { - "params": [], - "type": "count" - }, - "type": "query" - } - ], - "enabled": true, - "frequency": "60s", - "handler": 1, - "name": "No datapoints", - "notifications": [], - "severity": "critical" - }, - "alerting": {}, - "aliasColors": {}, - "bars": false, - "datasource": "${DS_GRAPHITE}", - "editable": true, - "error": false, - "fill": 1, - "id": 20, - "isNew": true, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 2, - "links": [], - "nullPointMode": "connected", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 4, - "stack": false, - "steppedLine": false, - "targets": [ - { - "refId": "A", - "target": "aliasByNode(statsd.fakesite.counters.session_start.*.count, 4)" - } - ], - "thresholds": [ - { - "value": 1, - "op": "lt", - "fill": true, - "line": true, - "colorMode": "critical" - } - ], - "timeFrom": null, - "timeShift": null, - "title": "Count datapoints", - "tooltip": { - "msResolution": false, - "shared": true, - "sort": 0, - "value_type": "cumulative" - }, - "type": "graph", - "xaxis": { - "show": true - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - } - ], - "title": "Row" - }, - { - "collapse": false, - "editable": true, - "height": "250px", - "panels": [ - { - "alert": { - "conditions": [ - { - "evaluator": { - "params": [ - 20 - ], - "type": "lt" - }, - "query": { - "params": [ - "A", - "5m", - "now" - ] - }, - "reducer": { - "params": [], - "type": "avg" - }, - "type": "query" - } - ], - "enabled": true, - "frequency": "60s", - "handler": 1, - "name": "Alert below value", - "notifications": [], - "severity": "critical" - }, - "alerting": {}, - "aliasColors": {}, - "bars": false, - "datasource": "${DS_GRAPHITE}", - "editable": true, - "error": false, - "fill": 1, - "id": 17, - "isNew": true, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 2, - "links": [], - "nullPointMode": "connected", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 3, - "stack": false, - "steppedLine": false, - "targets": [ - { - "refId": "A", - "target": "aliasByNode(statsd.fakesite.counters.session_start.*.count, 4)" - } - ], - "thresholds": [ - { - "colorMode": "critical", - "fill": true, - "fillColor": "rgba(234, 112, 112, 0.12)", - "line": true, - "lineColor": "rgba(237, 46, 24, 0.60)", - "op": "lt", - "value": 20 - } - ], - "timeFrom": null, - "timeShift": null, - "title": "Alert below value", - "tooltip": { - "msResolution": false, - "shared": true, - "sort": 0, - "value_type": "cumulative" - }, - "type": "graph", - "xaxis": { - "show": true - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - }, - { - "alert": { - "conditions": [ - { - "evaluator": { - "params": [ - 10, - 80 - ], - "type": "outside_range" - }, - "query": { - "params": [ - "A", - "5m", - "now" - ] - }, - "reducer": { - "params": [], - "type": "avg" - }, - "type": "query" - } - ], - "enabled": true, - "frequency": "10s", - "handler": 1, - "name": "Alert is outside range", - "notifications": [], - "severity": "critical" - }, - "alerting": {}, - "aliasColors": {}, - "bars": false, - "datasource": "${DS_GRAPHITE}", - "editable": true, - "error": false, - "fill": 1, - "id": 18, - "isNew": true, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 2, - "links": [], - "nullPointMode": "connected", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 3, - "stack": false, - "steppedLine": false, - "targets": [ - { - "refId": "A", - "target": "aliasByNode(statsd.fakesite.counters.session_start.*.count, 4)" - } - ], - "thresholds": [ - { - "colorMode": "critical", - "fill": true, - "fillColor": "rgba(234, 112, 112, 0.12)", - "line": true, - "lineColor": "rgba(237, 46, 24, 0.60)", - "op": "lt", - "value": 10 - }, - { - "colorMode": "critical", - "fill": true, - "fillColor": "rgba(234, 112, 112, 0.12)", - "line": true, - "lineColor": "rgba(237, 46, 24, 0.60)", - "op": "gt", - "value": 80 - } - ], - "timeFrom": null, - "timeShift": null, - "title": "Alert is outside range", - "tooltip": { - "msResolution": false, - "shared": true, - "sort": 0, - "value_type": "cumulative" - }, - "type": "graph", - "xaxis": { - "show": true - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - }, - { - "alert": { - "conditions": [ - { - "evaluator": { - "params": [ - 60, - 80 - ], - "type": "within_range" - }, - "query": { - "params": [ - "A", - "5m", - "now" - ] - }, - "reducer": { - "params": [], - "type": "avg" - }, - "type": "query" - } - ], - "enabled": true, - "frequency": "10s", - "handler": 1, - "name": "Alert is within range", - "notifications": [], - "severity": "critical" - }, - "alerting": {}, - "aliasColors": {}, - "bars": false, - "datasource": "${DS_GRAPHITE}", - "editable": true, - "error": false, - "fill": 1, - "id": 19, - "isNew": true, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 2, - "links": [], - "nullPointMode": "connected", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 3, - "stack": false, - "steppedLine": false, - "targets": [ - { - "refId": "A", - "target": "aliasByNode(statsd.fakesite.counters.session_start.*.count, 4)" - } - ], - "thresholds": [ - { - "colorMode": "critical", - "fill": true, - "fillColor": "rgba(234, 112, 112, 0.12)", - "line": true, - "lineColor": "rgba(237, 46, 24, 0.60)", - "op": "gt", - "value": 60 - }, - { - "colorMode": "critical", - "fill": true, - "fillColor": "rgba(234, 112, 112, 0.12)", - "line": true, - "lineColor": "rgba(237, 46, 24, 0.60)", - "op": "lt", - "value": 80 - } - ], - "timeFrom": null, - "timeShift": null, - "title": "Alert is within range", - "tooltip": { - "msResolution": false, - "shared": true, - "sort": 0, - "value_type": "cumulative" - }, - "type": "graph", - "xaxis": { - "show": true - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - } - ], - "title": "New row" - } - ], - "time": { - "from": "now-6h", - "to": "now" - }, - "timepicker": { - "refresh_intervals": [ - "5s", - "10s", - "30s", - "1m", - "5m", - "15m", - "30m", - "1h", - "2h", - "1d" - ], - "time_options": [ - "5m", - "15m", - "1h", - "6h", - "12h", - "24h", - "2d", - "7d", - "30d" - ] - }, - "templating": { - "list": [ - { - "current": { - "text": "fakesite", - "value": "fakesite" - }, - "datasource": null, - "hide": 0, - "includeAll": false, - "multi": false, - "name": "apa", - "options": [ - { - "selected": true, - "text": "fakesite", - "value": "fakesite" - } - ], - "query": "fakesite", - "refresh": 0, - "type": "custom" - } - ] - }, - "annotations": { - "list": [] - }, - "schemaVersion": 13, - "version": 15, - "links": [], - "gnetId": null -} \ No newline at end of file diff --git a/examples/alerting-multiple-alerts.json b/examples/alerting-multiple-alerts.json deleted file mode 100644 index e6e729ecc06..00000000000 --- a/examples/alerting-multiple-alerts.json +++ /dev/null @@ -1,2216 +0,0 @@ -{ - "__inputs": [ - { - "name": "DS_GRAPHITE", - "label": "graphite", - "description": "", - "type": "datasource", - "pluginId": "graphite", - "pluginName": "Graphite" - } - ], - "__requires": [ - { - "type": "panel", - "id": "graph", - "name": "Graph", - "version": "" - }, - { - "type": "grafana", - "id": "grafana", - "name": "Grafana", - "version": "3.1.0" - }, - { - "type": "datasource", - "id": "graphite", - "name": "Graphite", - "version": "1.0.0" - } - ], - "id": null, - "title": "Dashboard with many alerts", - "tags": [], - "style": "dark", - "timezone": "browser", - "editable": true, - "hideControls": false, - "sharedCrosshair": false, - "rows": [ - { - "collapse": false, - "editable": true, - "height": "250px", - "panels": [ - { - "alert": { - "conditions": [ - { - "evaluator": { - "params": [ - 30 - ], - "type": "gt" - }, - "query": { - "params": [ - "A", - "5m", - "now" - ] - }, - "reducer": { - "params": [], - "type": "sum" - }, - "type": "query" - } - ], - "enabled": true, - "frequency": "60s", - "handler": 1, - "name": "Critical alert panel", - "notifications": [], - "severity": "critical" - }, - "aliasColors": {}, - "bars": false, - "datasource": "${DS_GRAPHITE}", - "editable": true, - "error": false, - "fill": 1, - "grid": {}, - "id": 1, - "isNew": true, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 2, - "links": [], - "nullPointMode": "connected", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 3, - "stack": false, - "steppedLine": false, - "targets": [ - { - "refId": "A", - "target": "aliasByNode(statsd.fakesite.counters.session_start.*.count, 4)" - } - ], - "thresholds": [ - { - "colorMode": "critical", - "fill": true, - "fillColor": "rgba(234, 112, 112, 0.12)", - "line": true, - "lineColor": "rgba(237, 46, 24, 0.60)", - "op": "gt", - "value": 30 - } - ], - "timeFrom": null, - "timeShift": null, - "title": "Critical panel", - "tooltip": { - "msResolution": false, - "shared": true, - "sort": 0, - "value_type": "cumulative" - }, - "type": "graph", - "xaxis": { - "show": true - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - }, - { - "alert": { - "conditions": [ - { - "evaluator": { - "params": [ - 30 - ], - "type": "gt" - }, - "query": { - "params": [ - "A", - "5m", - "now" - ] - }, - "reducer": { - "params": [], - "type": "sum" - }, - "type": "query" - } - ], - "enabled": true, - "frequency": "60s", - "handler": 1, - "name": "Critical alert panel", - "notifications": [], - "severity": "critical" - }, - "aliasColors": {}, - "bars": false, - "datasource": "${DS_GRAPHITE}", - "editable": true, - "error": false, - "fill": 1, - "grid": {}, - "id": 5, - "isNew": true, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 2, - "links": [], - "nullPointMode": "connected", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 3, - "stack": false, - "steppedLine": false, - "targets": [ - { - "refId": "A", - "target": "aliasByNode(statsd.fakesite.counters.session_start.*.count, 4)" - } - ], - "thresholds": [ - { - "colorMode": "critical", - "fill": true, - "fillColor": "rgba(234, 112, 112, 0.12)", - "line": true, - "lineColor": "rgba(237, 46, 24, 0.60)", - "op": "gt", - "value": 30 - } - ], - "timeFrom": null, - "timeShift": null, - "title": "Critical panel", - "tooltip": { - "msResolution": false, - "shared": true, - "sort": 0, - "value_type": "cumulative" - }, - "type": "graph", - "xaxis": { - "show": true - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - }, - { - "alert": { - "conditions": [ - { - "evaluator": { - "params": [ - 30 - ], - "type": "gt" - }, - "query": { - "params": [ - "A", - "5m", - "now" - ] - }, - "reducer": { - "params": [], - "type": "sum" - }, - "type": "query" - } - ], - "enabled": true, - "frequency": "60s", - "handler": 1, - "name": "Critical alert panel", - "notifications": [], - "severity": "critical" - }, - "aliasColors": {}, - "bars": false, - "datasource": "${DS_GRAPHITE}", - "editable": true, - "error": false, - "fill": 1, - "grid": {}, - "id": 6, - "isNew": true, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 2, - "links": [], - "nullPointMode": "connected", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 3, - "stack": false, - "steppedLine": false, - "targets": [ - { - "refId": "A", - "target": "aliasByNode(statsd.fakesite.counters.session_start.*.count, 4)" - } - ], - "thresholds": [ - { - "colorMode": "critical", - "fill": true, - "fillColor": "rgba(234, 112, 112, 0.12)", - "line": true, - "lineColor": "rgba(237, 46, 24, 0.60)", - "op": "gt", - "value": 30 - } - ], - "timeFrom": null, - "timeShift": null, - "title": "Critical panel", - "tooltip": { - "msResolution": false, - "shared": true, - "sort": 0, - "value_type": "cumulative" - }, - "type": "graph", - "xaxis": { - "show": true - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - }, - { - "alert": { - "conditions": [ - { - "evaluator": { - "params": [ - 30 - ], - "type": "gt" - }, - "query": { - "params": [ - "A", - "5m", - "now" - ] - }, - "reducer": { - "params": [], - "type": "sum" - }, - "type": "query" - } - ], - "enabled": true, - "frequency": "60s", - "handler": 1, - "name": "Critical alert panel", - "notifications": [], - "severity": "critical" - }, - "aliasColors": {}, - "bars": false, - "datasource": "${DS_GRAPHITE}", - "editable": true, - "error": false, - "fill": 1, - "grid": {}, - "id": 8, - "isNew": true, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 2, - "links": [], - "nullPointMode": "connected", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 3, - "stack": false, - "steppedLine": false, - "targets": [ - { - "refId": "A", - "target": "aliasByNode(statsd.fakesite.counters.session_start.*.count, 4)" - } - ], - "thresholds": [ - { - "colorMode": "critical", - "fill": true, - "fillColor": "rgba(234, 112, 112, 0.12)", - "line": true, - "lineColor": "rgba(237, 46, 24, 0.60)", - "op": "gt", - "value": 30 - } - ], - "timeFrom": null, - "timeShift": null, - "title": "Critical panel", - "tooltip": { - "msResolution": false, - "shared": true, - "sort": 0, - "value_type": "cumulative" - }, - "type": "graph", - "xaxis": { - "show": true - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - } - ], - "title": "Row" - }, - { - "collapse": false, - "editable": true, - "height": "250px", - "panels": [ - { - "alert": { - "conditions": [ - { - "evaluator": { - "params": [ - 20 - ], - "type": "gt" - }, - "query": { - "params": [ - "A", - "5m", - "now" - ] - }, - "reducer": { - "params": [], - "type": "avg" - }, - "type": "query" - } - ], - "enabled": true, - "frequency": "60s", - "handler": 1, - "name": "Warning panel alert", - "notifications": [], - "severity": "warning" - }, - "aliasColors": {}, - "bars": false, - "datasource": "${DS_GRAPHITE}", - "editable": true, - "error": false, - "fill": 1, - "id": 2, - "isNew": true, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 2, - "links": [], - "nullPointMode": "connected", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 3, - "stack": false, - "steppedLine": false, - "targets": [ - { - "refId": "A", - "target": "aliasByNode(statsd.fakesite.counters.session_start.*.count, 4)" - } - ], - "thresholds": [ - { - "colorMode": "warning", - "fill": true, - "fillColor": "rgba(235, 138, 14, 0.12)", - "line": true, - "lineColor": "rgba(247, 149, 32, 0.60)", - "op": "gt", - "value": 20 - } - ], - "timeFrom": null, - "timeShift": null, - "title": "Warning panel", - "tooltip": { - "msResolution": false, - "shared": true, - "sort": 0, - "value_type": "cumulative" - }, - "type": "graph", - "xaxis": { - "show": true - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - }, - { - "alert": { - "conditions": [ - { - "evaluator": { - "params": [ - 20 - ], - "type": "gt" - }, - "query": { - "params": [ - "A", - "5m", - "now" - ] - }, - "reducer": { - "params": [], - "type": "avg" - }, - "type": "query" - } - ], - "enabled": true, - "frequency": "60s", - "handler": 1, - "name": "Warning panel alert", - "notifications": [], - "severity": "warning" - }, - "aliasColors": {}, - "bars": false, - "datasource": "${DS_GRAPHITE}", - "editable": true, - "error": false, - "fill": 1, - "id": 3, - "isNew": true, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 2, - "links": [], - "nullPointMode": "connected", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 3, - "stack": false, - "steppedLine": false, - "targets": [ - { - "refId": "A", - "target": "aliasByNode(statsd.fakesite.counters.session_start.*.count, 4)" - } - ], - "thresholds": [ - { - "colorMode": "warning", - "fill": true, - "fillColor": "rgba(235, 138, 14, 0.12)", - "line": true, - "lineColor": "rgba(247, 149, 32, 0.60)", - "op": "gt", - "value": 20 - } - ], - "timeFrom": null, - "timeShift": null, - "title": "Warning panel", - "tooltip": { - "msResolution": false, - "shared": true, - "sort": 0, - "value_type": "cumulative" - }, - "type": "graph", - "xaxis": { - "show": true - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - }, - { - "alert": { - "conditions": [ - { - "evaluator": { - "params": [ - 20 - ], - "type": "gt" - }, - "query": { - "params": [ - "A", - "5m", - "now" - ] - }, - "reducer": { - "params": [], - "type": "avg" - }, - "type": "query" - } - ], - "enabled": true, - "frequency": "60s", - "handler": 1, - "name": "Warning panel alert", - "notifications": [], - "severity": "warning" - }, - "aliasColors": {}, - "bars": false, - "datasource": "${DS_GRAPHITE}", - "editable": true, - "error": false, - "fill": 1, - "id": 4, - "isNew": true, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 2, - "links": [], - "nullPointMode": "connected", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 3, - "stack": false, - "steppedLine": false, - "targets": [ - { - "refId": "A", - "target": "aliasByNode(statsd.fakesite.counters.session_start.*.count, 4)" - } - ], - "thresholds": [ - { - "colorMode": "warning", - "fill": true, - "fillColor": "rgba(235, 138, 14, 0.12)", - "line": true, - "lineColor": "rgba(247, 149, 32, 0.60)", - "op": "gt", - "value": 20 - } - ], - "timeFrom": null, - "timeShift": null, - "title": "Warning panel", - "tooltip": { - "msResolution": false, - "shared": true, - "sort": 0, - "value_type": "cumulative" - }, - "type": "graph", - "xaxis": { - "show": true - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - }, - { - "alert": { - "conditions": [ - { - "evaluator": { - "params": [ - 20 - ], - "type": "gt" - }, - "query": { - "params": [ - "A", - "5m", - "now" - ] - }, - "reducer": { - "params": [], - "type": "avg" - }, - "type": "query" - } - ], - "enabled": true, - "frequency": "60s", - "handler": 1, - "name": "Warning panel alert", - "notifications": [], - "severity": "warning" - }, - "aliasColors": {}, - "bars": false, - "datasource": "${DS_GRAPHITE}", - "editable": true, - "error": false, - "fill": 1, - "id": 7, - "isNew": true, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 2, - "links": [], - "nullPointMode": "connected", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 3, - "stack": false, - "steppedLine": false, - "targets": [ - { - "refId": "A", - "target": "aliasByNode(statsd.fakesite.counters.session_start.*.count, 4)" - } - ], - "thresholds": [ - { - "colorMode": "warning", - "fill": true, - "fillColor": "rgba(235, 138, 14, 0.12)", - "line": true, - "lineColor": "rgba(247, 149, 32, 0.60)", - "op": "gt", - "value": 20 - } - ], - "timeFrom": null, - "timeShift": null, - "title": "Warning panel", - "tooltip": { - "msResolution": false, - "shared": true, - "sort": 0, - "value_type": "cumulative" - }, - "type": "graph", - "xaxis": { - "show": true - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - } - ], - "title": "New row" - }, - { - "collapse": false, - "editable": true, - "height": "250px", - "panels": [ - { - "alert": { - "conditions": [ - { - "evaluator": { - "params": [ - 50 - ], - "type": "gt" - }, - "query": { - "params": [ - "A", - "5m", - "now" - ] - }, - "reducer": { - "params": [], - "type": "avg" - }, - "type": "query" - } - ], - "enabled": true, - "frequency": "10s", - "handler": 1, - "name": "Fast Critical panel alert", - "notifications": [], - "severity": "critical" - }, - "aliasColors": {}, - "bars": false, - "datasource": "${DS_GRAPHITE}", - "editable": true, - "error": false, - "fill": 1, - "id": 9, - "isNew": true, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 2, - "links": [], - "nullPointMode": "connected", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 3, - "stack": false, - "steppedLine": false, - "targets": [ - { - "refId": "A", - "target": "aliasByNode(statsd.fakesite.counters.session_start.*.count, 4)" - } - ], - "thresholds": [ - { - "colorMode": "critical", - "fill": true, - "fillColor": "rgba(234, 112, 112, 0.12)", - "line": true, - "lineColor": "rgba(237, 46, 24, 0.60)", - "op": "gt", - "value": 50 - } - ], - "timeFrom": null, - "timeShift": null, - "title": "Fast Critical panel", - "tooltip": { - "msResolution": false, - "shared": true, - "sort": 0, - "value_type": "cumulative" - }, - "type": "graph", - "xaxis": { - "show": true - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - }, - { - "alert": { - "conditions": [ - { - "evaluator": { - "params": [ - 50 - ], - "type": "gt" - }, - "query": { - "params": [ - "A", - "5m", - "now" - ] - }, - "reducer": { - "params": [], - "type": "avg" - }, - "type": "query" - } - ], - "enabled": true, - "frequency": "10s", - "handler": 1, - "name": "Fast Critical panel alert", - "notifications": [], - "severity": "critical" - }, - "aliasColors": {}, - "bars": false, - "datasource": "${DS_GRAPHITE}", - "editable": true, - "error": false, - "fill": 1, - "id": 10, - "isNew": true, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 2, - "links": [], - "nullPointMode": "connected", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 3, - "stack": false, - "steppedLine": false, - "targets": [ - { - "refId": "A", - "target": "aliasByNode(statsd.fakesite.counters.session_start.*.count, 4)" - } - ], - "thresholds": [ - { - "colorMode": "critical", - "fill": true, - "fillColor": "rgba(234, 112, 112, 0.12)", - "line": true, - "lineColor": "rgba(237, 46, 24, 0.60)", - "op": "gt", - "value": 50 - } - ], - "timeFrom": null, - "timeShift": null, - "title": "Fast Critical panel", - "tooltip": { - "msResolution": false, - "shared": true, - "sort": 0, - "value_type": "cumulative" - }, - "type": "graph", - "xaxis": { - "show": true - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - }, - { - "alert": { - "conditions": [ - { - "evaluator": { - "params": [ - 50 - ], - "type": "gt" - }, - "query": { - "params": [ - "A", - "5m", - "now" - ] - }, - "reducer": { - "params": [], - "type": "avg" - }, - "type": "query" - } - ], - "enabled": true, - "frequency": "10s", - "handler": 1, - "name": "Fast Critical panel alert", - "notifications": [], - "severity": "critical" - }, - "aliasColors": {}, - "bars": false, - "datasource": "${DS_GRAPHITE}", - "editable": true, - "error": false, - "fill": 1, - "id": 11, - "isNew": true, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 2, - "links": [], - "nullPointMode": "connected", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 3, - "stack": false, - "steppedLine": false, - "targets": [ - { - "refId": "A", - "target": "aliasByNode(statsd.fakesite.counters.session_start.*.count, 4)" - } - ], - "thresholds": [ - { - "colorMode": "critical", - "fill": true, - "fillColor": "rgba(234, 112, 112, 0.12)", - "line": true, - "lineColor": "rgba(237, 46, 24, 0.60)", - "op": "gt", - "value": 50 - } - ], - "timeFrom": null, - "timeShift": null, - "title": "Fast Critical panel", - "tooltip": { - "msResolution": false, - "shared": true, - "sort": 0, - "value_type": "cumulative" - }, - "type": "graph", - "xaxis": { - "show": true - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - }, - { - "alert": { - "conditions": [ - { - "evaluator": { - "params": [ - 50 - ], - "type": "gt" - }, - "query": { - "params": [ - "A", - "5m", - "now" - ] - }, - "reducer": { - "params": [], - "type": "avg" - }, - "type": "query" - } - ], - "enabled": true, - "frequency": "10s", - "handler": 1, - "name": "Fast Critical panel alert", - "notifications": [], - "severity": "critical" - }, - "aliasColors": {}, - "bars": false, - "datasource": "${DS_GRAPHITE}", - "editable": true, - "error": false, - "fill": 1, - "id": 12, - "isNew": true, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 2, - "links": [], - "nullPointMode": "connected", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 3, - "stack": false, - "steppedLine": false, - "targets": [ - { - "refId": "A", - "target": "aliasByNode(statsd.fakesite.counters.session_start.*.count, 4)" - } - ], - "thresholds": [ - { - "colorMode": "critical", - "fill": true, - "fillColor": "rgba(234, 112, 112, 0.12)", - "line": true, - "lineColor": "rgba(237, 46, 24, 0.60)", - "op": "gt", - "value": 50 - } - ], - "timeFrom": null, - "timeShift": null, - "title": "Fast Critical panel", - "tooltip": { - "msResolution": false, - "shared": true, - "sort": 0, - "value_type": "cumulative" - }, - "type": "graph", - "xaxis": { - "show": true - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - } - ], - "title": "New row" - }, - { - "collapse": false, - "editable": true, - "height": "250px", - "panels": [ - { - "alert": { - "enabled": true, - "conditions": [ - { - "type": "query", - "query": { - "params": [ - "A", - "5m", - "now" - ] - }, - "reducer": { - "type": "avg", - "params": [] - }, - "evaluator": { - "type": "gt", - "params": [ - 10 - ] - } - } - ], - "severity": "warning", - "frequency": "1s", - "handler": 1, - "notifications": [], - "name": "Fast Warning panel alert" - }, - "aliasColors": {}, - "bars": false, - "datasource": "${DS_GRAPHITE}", - "editable": true, - "error": false, - "fill": 1, - "id": 13, - "isNew": true, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 2, - "links": [], - "nullPointMode": "connected", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 3, - "stack": false, - "steppedLine": false, - "targets": [ - { - "refId": "A", - "target": "aliasByNode(statsd.fakesite.counters.session_start.*.count, 4)" - } - ], - "thresholds": [ - { - "value": 10, - "op": "gt", - "fill": true, - "line": true, - "colorMode": "warning", - "fillColor": "rgba(235, 138, 14, 0.12)", - "lineColor": "rgba(247, 149, 32, 0.60)" - } - ], - "timeFrom": null, - "timeShift": null, - "title": "Fast Warning panel", - "tooltip": { - "msResolution": false, - "shared": true, - "sort": 0, - "value_type": "cumulative" - }, - "type": "graph", - "xaxis": { - "show": true - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - }, - { - "alert": { - "enabled": true, - "conditions": [ - { - "type": "query", - "query": { - "params": [ - "A", - "5m", - "now" - ] - }, - "reducer": { - "type": "avg", - "params": [] - }, - "evaluator": { - "type": "gt", - "params": [ - 10 - ] - } - } - ], - "severity": "warning", - "frequency": "1s", - "handler": 1, - "notifications": [], - "name": "Fast Warning panel alert" - }, - "aliasColors": {}, - "bars": false, - "datasource": "${DS_GRAPHITE}", - "editable": true, - "error": false, - "fill": 1, - "id": 14, - "isNew": true, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 2, - "links": [], - "nullPointMode": "connected", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 3, - "stack": false, - "steppedLine": false, - "targets": [ - { - "refId": "A", - "target": "aliasByNode(statsd.fakesite.counters.session_start.*.count, 4)" - } - ], - "thresholds": [ - { - "value": 10, - "op": "gt", - "fill": true, - "line": true, - "colorMode": "warning", - "fillColor": "rgba(235, 138, 14, 0.12)", - "lineColor": "rgba(247, 149, 32, 0.60)" - } - ], - "timeFrom": null, - "timeShift": null, - "title": "Fast Warning panel", - "tooltip": { - "msResolution": false, - "shared": true, - "sort": 0, - "value_type": "cumulative" - }, - "type": "graph", - "xaxis": { - "show": true - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - }, - { - "alert": { - "enabled": true, - "conditions": [ - { - "type": "query", - "query": { - "params": [ - "A", - "5m", - "now" - ] - }, - "reducer": { - "type": "avg", - "params": [] - }, - "evaluator": { - "type": "gt", - "params": [ - 10 - ] - } - } - ], - "severity": "warning", - "frequency": "1s", - "handler": 1, - "notifications": [], - "name": "Fast Warning panel alert" - }, - "aliasColors": {}, - "bars": false, - "datasource": "${DS_GRAPHITE}", - "editable": true, - "error": false, - "fill": 1, - "id": 15, - "isNew": true, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 2, - "links": [], - "nullPointMode": "connected", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 3, - "stack": false, - "steppedLine": false, - "targets": [ - { - "refId": "A", - "target": "aliasByNode(statsd.fakesite.counters.session_start.*.count, 4)" - } - ], - "thresholds": [ - { - "value": 10, - "op": "gt", - "fill": true, - "line": true, - "colorMode": "warning", - "fillColor": "rgba(235, 138, 14, 0.12)", - "lineColor": "rgba(247, 149, 32, 0.60)" - } - ], - "timeFrom": null, - "timeShift": null, - "title": "Fast Warning panel", - "tooltip": { - "msResolution": false, - "shared": true, - "sort": 0, - "value_type": "cumulative" - }, - "type": "graph", - "xaxis": { - "show": true - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - }, - { - "alert": { - "enabled": true, - "conditions": [ - { - "type": "query", - "query": { - "params": [ - "A", - "5m", - "now" - ] - }, - "reducer": { - "type": "avg", - "params": [] - }, - "evaluator": { - "type": "gt", - "params": [ - 10 - ] - } - } - ], - "severity": "warning", - "frequency": "1s", - "handler": 1, - "notifications": [], - "name": "Fast Warning panel alert" - }, - "aliasColors": {}, - "bars": false, - "datasource": "${DS_GRAPHITE}", - "editable": true, - "error": false, - "fill": 1, - "id": 16, - "isNew": true, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 2, - "links": [], - "nullPointMode": "connected", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 3, - "stack": false, - "steppedLine": false, - "targets": [ - { - "refId": "A", - "target": "aliasByNode(statsd.fakesite.counters.session_start.*.count, 4)" - } - ], - "thresholds": [ - { - "value": 10, - "op": "gt", - "fill": true, - "line": true, - "colorMode": "warning", - "fillColor": "rgba(235, 138, 14, 0.12)", - "lineColor": "rgba(247, 149, 32, 0.60)" - } - ], - "timeFrom": null, - "timeShift": null, - "title": "Fast Warning panel", - "tooltip": { - "msResolution": false, - "shared": true, - "sort": 0, - "value_type": "cumulative" - }, - "type": "graph", - "xaxis": { - "show": true - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - } - ], - "title": "New row" - }, - { - "title": "New row", - "height": "250px", - "editable": true, - "collapse": false, - "panels": [ - { - "title": "Alert below value", - "error": false, - "span": 3, - "editable": true, - "type": "graph", - "isNew": true, - "id": 17, - "targets": [ - { - "target": "aliasByNode(statsd.fakesite.counters.session_start.*.count, 4)", - "refId": "A" - } - ], - "datasource": "${DS_GRAPHITE}", - "renderer": "flot", - "yaxes": [ - { - "label": null, - "show": true, - "logBase": 1, - "min": null, - "max": null, - "format": "short" - }, - { - "label": null, - "show": true, - "logBase": 1, - "min": null, - "max": null, - "format": "short" - } - ], - "xaxis": { - "show": true - }, - "alert": { - "conditions": [ - { - "type": "query", - "query": { - "params": [ - "A", - "5m", - "now" - ] - }, - "reducer": { - "type": "avg", - "params": [] - }, - "evaluator": { - "type": "lt", - "params": [ - 20 - ] - } - } - ], - "severity": "critical", - "frequency": "60s", - "handler": 1, - "notifications": [], - "name": "Alert below value", - "enabled": true - }, - "lines": true, - "fill": 1, - "linewidth": 2, - "points": false, - "pointradius": 5, - "bars": false, - "stack": false, - "percentage": false, - "legend": { - "show": true, - "values": false, - "min": false, - "max": false, - "current": false, - "total": false, - "avg": false - }, - "nullPointMode": "connected", - "steppedLine": false, - "tooltip": { - "value_type": "cumulative", - "shared": true, - "sort": 0, - "msResolution": false - }, - "timeFrom": null, - "timeShift": null, - "aliasColors": {}, - "seriesOverrides": [], - "thresholds": [ - { - "value": 20, - "op": "lt", - "fill": true, - "line": true, - "colorMode": "critical", - "fillColor": "rgba(234, 112, 112, 0.12)", - "lineColor": "rgba(237, 46, 24, 0.60)" - } - ], - "links": [] - }, - { - "title": "Alert is outside range", - "error": false, - "span": 3, - "editable": true, - "type": "graph", - "isNew": true, - "id": 18, - "targets": [ - { - "target": "aliasByNode(statsd.fakesite.counters.session_start.*.count, 4)", - "refId": "A" - } - ], - "datasource": "${DS_GRAPHITE}", - "renderer": "flot", - "yaxes": [ - { - "label": null, - "show": true, - "logBase": 1, - "min": null, - "max": null, - "format": "short" - }, - { - "label": null, - "show": true, - "logBase": 1, - "min": null, - "max": null, - "format": "short" - } - ], - "xaxis": { - "show": true - }, - "alert": { - "conditions": [ - { - "type": "query", - "query": { - "params": [ - "A", - "5m", - "now" - ] - }, - "reducer": { - "type": "avg", - "params": [] - }, - "evaluator": { - "type": "outside_range", - "params": [ - 10, - 80 - ] - } - } - ], - "severity": "critical", - "frequency": "10s", - "handler": 1, - "notifications": [], - "name": "Alert is outside range", - "enabled": true - }, - "lines": true, - "fill": 1, - "linewidth": 2, - "points": false, - "pointradius": 5, - "bars": false, - "stack": false, - "percentage": false, - "legend": { - "show": true, - "values": false, - "min": false, - "max": false, - "current": false, - "total": false, - "avg": false - }, - "nullPointMode": "connected", - "steppedLine": false, - "tooltip": { - "value_type": "cumulative", - "shared": true, - "sort": 0, - "msResolution": false - }, - "timeFrom": null, - "timeShift": null, - "aliasColors": {}, - "seriesOverrides": [], - "thresholds": [ - { - "value": 10, - "op": "lt", - "fill": true, - "line": true, - "colorMode": "critical", - "fillColor": "rgba(234, 112, 112, 0.12)", - "lineColor": "rgba(237, 46, 24, 0.60)" - }, - { - "value": 80, - "op": "gt", - "fill": true, - "line": true, - "colorMode": "critical", - "fillColor": "rgba(234, 112, 112, 0.12)", - "lineColor": "rgba(237, 46, 24, 0.60)" - } - ], - "links": [] - }, - { - "title": "Alert is within range", - "error": false, - "span": 3, - "editable": true, - "type": "graph", - "isNew": true, - "id": 19, - "targets": [ - { - "target": "aliasByNode(statsd.fakesite.counters.session_start.*.count, 4)", - "refId": "A" - } - ], - "datasource": "${DS_GRAPHITE}", - "renderer": "flot", - "yaxes": [ - { - "label": null, - "show": true, - "logBase": 1, - "min": null, - "max": null, - "format": "short" - }, - { - "label": null, - "show": true, - "logBase": 1, - "min": null, - "max": null, - "format": "short" - } - ], - "xaxis": { - "show": true - }, - "alert": { - "conditions": [ - { - "type": "query", - "query": { - "params": [ - "A", - "5m", - "now" - ] - }, - "reducer": { - "type": "avg", - "params": [] - }, - "evaluator": { - "type": "within_range", - "params": [ - 60, - 80 - ] - } - } - ], - "severity": "critical", - "frequency": "10s", - "handler": 1, - "notifications": [], - "name": "Alert is within range", - "enabled": true - }, - "lines": true, - "fill": 1, - "linewidth": 2, - "points": false, - "pointradius": 5, - "bars": false, - "stack": false, - "percentage": false, - "legend": { - "show": true, - "values": false, - "min": false, - "max": false, - "current": false, - "total": false, - "avg": false - }, - "nullPointMode": "connected", - "steppedLine": false, - "tooltip": { - "value_type": "cumulative", - "shared": true, - "sort": 0, - "msResolution": false - }, - "timeFrom": null, - "timeShift": null, - "aliasColors": {}, - "seriesOverrides": [], - "thresholds": [ - { - "value": 60, - "op": "gt", - "fill": true, - "line": true, - "colorMode": "critical", - "fillColor": "rgba(234, 112, 112, 0.12)", - "lineColor": "rgba(237, 46, 24, 0.60)" - }, - { - "value": 80, - "op": "lt", - "fill": true, - "line": true, - "colorMode": "critical", - "fillColor": "rgba(234, 112, 112, 0.12)", - "lineColor": "rgba(237, 46, 24, 0.60)" - } - ], - "links": [] - } - ] - } - ], - "time": { - "from": "now-6h", - "to": "now" - }, - "timepicker": { - "refresh_intervals": [ - "5s", - "10s", - "30s", - "1m", - "5m", - "15m", - "30m", - "1h", - "2h", - "1d" - ], - "time_options": [ - "5m", - "15m", - "1h", - "6h", - "12h", - "24h", - "2d", - "7d", - "30d" - ] - }, - "templating": { - "list": [] - }, - "annotations": { - "list": [] - }, - "schemaVersion": 13, - "version": 50, - "links": [], - "gnetId": null -} \ No newline at end of file diff --git a/jest.config.js b/jest.config.js index a5cd3416f75..cac634fbf10 100644 --- a/jest.config.js +++ b/jest.config.js @@ -1,13 +1,8 @@ module.exports = { verbose: false, - "globals": { - "ts-jest": { - "tsConfigFile": "tsconfig.json" - } - }, "transform": { - "^.+\\.tsx?$": "/node_modules/ts-jest/preprocessor.js" + "^.+\\.(ts|tsx)$": "ts-jest" }, "moduleDirectories": ["node_modules", "public"], "roots": [ diff --git a/latest.json b/latest.json index 7b36131fea2..4355e9a64b7 100644 --- a/latest.json +++ b/latest.json @@ -1,4 +1,4 @@ { - "stable": "5.2.3", - "testing": "5.2.3" + "stable": "5.3.1", + "testing": "5.3.1" } diff --git a/package.json b/package.json index 9cc47ff71b8..b4c70c5b3a8 100644 --- a/package.json +++ b/package.json @@ -4,18 +4,20 @@ "company": "Grafana Labs" }, "name": "grafana", - "version": "5.3.0-pre1", + "version": "5.4.0-pre1", "repository": { "type": "git", "url": "http://github.com/grafana/grafana.git" }, "devDependencies": { "@types/d3": "^4.10.1", - "@types/enzyme": "^2.8.9", - "@types/jest": "^21.1.4", + "@types/enzyme": "^3.1.13", + "@types/jest": "^23.3.2", "@types/node": "^8.0.31", - "@types/react": "^16.0.25", - "@types/react-dom": "^16.0.3", + "@types/react": "^16.4.14", + "@types/react-custom-scrollbars": "^4.0.5", + "@types/react-dom": "^16.0.7", + "@types/react-select": "^2.0.4", "angular-mocks": "1.6.6", "autoprefixer": "^6.4.0", "axios": "^0.17.1", @@ -25,15 +27,15 @@ "babel-preset-es2015": "^6.24.1", "clean-webpack-plugin": "^0.1.19", "css-loader": "^0.28.7", - "enzyme": "^3.1.0", - "enzyme-adapter-react-16": "^1.0.1", - "enzyme-to-json": "^3.3.0", + "enzyme": "^3.6.0", + "enzyme-adapter-react-16": "^1.5.0", + "enzyme-to-json": "^3.3.4", "es6-promise": "^3.0.2", "es6-shim": "^0.35.3", "expect.js": "~0.2.0", "expose-loader": "^0.7.3", "file-loader": "^1.1.11", - "fork-ts-checker-webpack-plugin": "^0.4.2", + "fork-ts-checker-webpack-plugin": "^0.4.9", "gaze": "^1.1.2", "glob": "~7.0.0", "grunt": "1.0.1", @@ -55,11 +57,10 @@ "html-webpack-harddisk-plugin": "^0.2.0", "html-webpack-plugin": "^3.2.0", "husky": "^0.14.3", - "jest": "^22.0.4", + "jest": "^23.6.0", "lint-staged": "^6.0.0", "load-grunt-tasks": "3.5.2", "mini-css-extract-plugin": "^0.4.0", - "mobx-react-devtools": "^4.2.15", "mocha": "^4.0.1", "ng-annotate-loader": "^0.6.1", "ng-annotate-webpack-plugin": "^0.3.0", @@ -71,22 +72,22 @@ "postcss-loader": "^2.0.6", "postcss-reporter": "^5.0.0", "prettier": "1.9.2", - "react-hot-loader": "^4.2.0", - "react-test-renderer": "^16.0.0", + "react-hot-loader": "^4.3.6", + "react-test-renderer": "^16.5.0", "sass-lint": "^1.10.2", "sass-loader": "^7.0.1", "sinon": "1.17.6", "style-loader": "^0.21.0", "systemjs": "0.20.19", "systemjs-plugin-css": "^0.1.36", - "ts-jest": "^22.4.6", - "ts-loader": "^4.3.0", + "ts-jest": "^23.10.4", + "ts-loader": "^5.1.0", "tslib": "^1.9.3", "tslint": "^5.8.0", "tslint-loader": "^3.5.3", - "typescript": "^2.6.2", + "typescript": "^3.0.3", "uglifyjs-webpack-plugin": "^1.2.7", - "webpack": "^4.8.0", + "webpack": "4.19.1", "webpack-bundle-analyzer": "^2.9.0", "webpack-cleanup-plugin": "^0.5.1", "webpack-cli": "^2.1.4", @@ -100,8 +101,7 @@ "watch": "webpack --progress --colors --watch --mode development --config scripts/webpack/webpack.dev.js", "build": "grunt build", "test": "grunt test", - "test:coverage": "grunt test --coverage=true", - "lint": "tslint -c tslint.json --project tsconfig.json --type-check", + "lint": "tslint -c tslint.json --project tsconfig.json", "jest": "jest --notify --watch", "api-tests": "jest --notify --watch --config=tests/api/jest.js", "precommit": "lint-staged && grunt precommit" @@ -132,6 +132,7 @@ "angular-native-dragdrop": "1.2.2", "angular-route": "1.6.6", "angular-sanitize": "1.6.6", + "babel-jest": "^23.6.0", "babel-polyfill": "^6.26.0", "baron": "^3.0.3", "brace": "^0.10.0", @@ -144,28 +145,32 @@ "immutable": "^3.8.2", "jquery": "^3.2.1", "lodash": "^4.17.10", - "mobx": "^3.4.1", - "mobx-react": "^4.3.5", - "mobx-state-tree": "^1.3.1", "moment": "^2.22.2", "mousetrap": "^1.6.0", "mousetrap-global-bind": "^1.1.0", "prismjs": "^1.6.0", - "prop-types": "^15.6.0", + "prop-types": "^15.6.2", "rc-cascader": "^0.14.0", - "react": "^16.2.0", - "react-dom": "^16.2.0", + "react": "^16.5.0", + "react-custom-scrollbars": "^4.2.1", + "react-dom": "^16.5.0", "react-grid-layout": "0.16.6", "react-highlight-words": "^0.10.0", "react-popper": "^0.7.5", - "react-select": "^1.1.0", + "react-redux": "^5.0.7", + "react-select": "2.1.0", "react-sizeme": "^2.3.6", + "react-table": "^6.8.6", "react-transition-group": "^2.2.1", + "redux": "^4.0.0", + "redux-logger": "^3.0.6", + "redux-thunk": "^2.3.0", "remarkable": "^1.7.1", "rst2html": "github:thoward/rst2html#990cb89", "rxjs": "^5.4.3", "slate": "^0.33.4", "slate-plain-serializer": "^0.5.10", + "slate-prism": "^0.5.0", "slate-react": "^0.12.4", "tether": "^1.4.0", "tether-drop": "https://github.com/torkelo/drop/tarball/master", diff --git a/pkg/api/alerting.go b/pkg/api/alerting.go index 60013fe2b10..a936d696207 100644 --- a/pkg/api/alerting.go +++ b/pkg/api/alerting.go @@ -192,14 +192,7 @@ func GetAlertNotifications(c *m.ReqContext) Response { result := make([]*dtos.AlertNotification, 0) for _, notification := range query.Result { - result = append(result, &dtos.AlertNotification{ - Id: notification.Id, - Name: notification.Name, - Type: notification.Type, - IsDefault: notification.IsDefault, - Created: notification.Created, - Updated: notification.Updated, - }) + result = append(result, dtos.NewAlertNotification(notification)) } return JSON(200, result) @@ -215,7 +208,7 @@ func GetAlertNotificationByID(c *m.ReqContext) Response { return Error(500, "Failed to get alert notifications", err) } - return JSON(200, query.Result) + return JSON(200, dtos.NewAlertNotification(query.Result)) } func CreateAlertNotification(c *m.ReqContext, cmd m.CreateAlertNotificationCommand) Response { @@ -225,7 +218,7 @@ func CreateAlertNotification(c *m.ReqContext, cmd m.CreateAlertNotificationComma return Error(500, "Failed to create alert notification", err) } - return JSON(200, cmd.Result) + return JSON(200, dtos.NewAlertNotification(cmd.Result)) } func UpdateAlertNotification(c *m.ReqContext, cmd m.UpdateAlertNotificationCommand) Response { @@ -235,7 +228,7 @@ func UpdateAlertNotification(c *m.ReqContext, cmd m.UpdateAlertNotificationComma return Error(500, "Failed to update alert notification", err) } - return JSON(200, cmd.Result) + return JSON(200, dtos.NewAlertNotification(cmd.Result)) } func DeleteAlertNotification(c *m.ReqContext) Response { diff --git a/pkg/api/annotations.go b/pkg/api/annotations.go index 55c9c954940..242b5531f51 100644 --- a/pkg/api/annotations.go +++ b/pkg/api/annotations.go @@ -24,6 +24,7 @@ func GetAnnotations(c *m.ReqContext) Response { Limit: c.QueryInt64("limit"), Tags: c.QueryStrings("tags"), Type: c.Query("type"), + MatchAny: c.QueryBool("matchAny"), } repo := annotations.GetRepository() diff --git a/pkg/api/api.go b/pkg/api/api.go index 906481bbb8a..c2739a66d6c 100644 --- a/pkg/api/api.go +++ b/pkg/api/api.go @@ -10,10 +10,10 @@ import ( ) func (hs *HTTPServer) registerRoutes() { - reqSignedIn := middleware.Auth(&middleware.AuthOptions{ReqSignedIn: true}) - reqGrafanaAdmin := middleware.Auth(&middleware.AuthOptions{ReqSignedIn: true, ReqGrafanaAdmin: true}) - reqEditorRole := middleware.RoleAuth(m.ROLE_EDITOR, m.ROLE_ADMIN) - reqOrgAdmin := middleware.RoleAuth(m.ROLE_ADMIN) + reqSignedIn := middleware.ReqSignedIn + reqGrafanaAdmin := middleware.ReqGrafanaAdmin + reqEditorRole := middleware.ReqEditorRole + reqOrgAdmin := middleware.ReqOrgAdmin redirectFromLegacyDashboardURL := middleware.RedirectFromLegacyDashboardURL() redirectFromLegacyDashboardSoloURL := middleware.RedirectFromLegacyDashboardSoloURL() quota := middleware.Quota @@ -22,66 +22,66 @@ func (hs *HTTPServer) registerRoutes() { r := hs.RouteRegister // not logged in views - r.Get("/", reqSignedIn, Index) + r.Get("/", reqSignedIn, hs.Index) r.Get("/logout", Logout) r.Post("/login", quota("session"), bind(dtos.LoginCommand{}), Wrap(LoginPost)) r.Get("/login/:name", quota("session"), OAuthLogin) - r.Get("/login", LoginView) - r.Get("/invite/:code", Index) + r.Get("/login", hs.LoginView) + r.Get("/invite/:code", hs.Index) // authed views - r.Get("/profile/", reqSignedIn, Index) - r.Get("/profile/password", reqSignedIn, Index) - r.Get("/profile/switch-org/:id", reqSignedIn, ChangeActiveOrgAndRedirectToHome) - r.Get("/org/", reqSignedIn, Index) - r.Get("/org/new", reqSignedIn, Index) - r.Get("/datasources/", reqSignedIn, Index) - r.Get("/datasources/new", reqSignedIn, Index) - r.Get("/datasources/edit/*", reqSignedIn, Index) - r.Get("/org/users", reqSignedIn, Index) - r.Get("/org/users/new", reqSignedIn, Index) - r.Get("/org/users/invite", reqSignedIn, Index) - r.Get("/org/teams", reqSignedIn, Index) - r.Get("/org/teams/*", reqSignedIn, Index) - r.Get("/org/apikeys/", reqSignedIn, Index) - r.Get("/dashboard/import/", reqSignedIn, Index) - r.Get("/configuration", reqGrafanaAdmin, Index) - r.Get("/admin", reqGrafanaAdmin, Index) - r.Get("/admin/settings", reqGrafanaAdmin, Index) - r.Get("/admin/users", reqGrafanaAdmin, Index) - r.Get("/admin/users/create", reqGrafanaAdmin, Index) - r.Get("/admin/users/edit/:id", reqGrafanaAdmin, Index) - r.Get("/admin/orgs", reqGrafanaAdmin, Index) - r.Get("/admin/orgs/edit/:id", reqGrafanaAdmin, Index) - r.Get("/admin/stats", reqGrafanaAdmin, Index) - - r.Get("/styleguide", reqSignedIn, Index) - - r.Get("/plugins", reqSignedIn, Index) - r.Get("/plugins/:id/edit", reqSignedIn, Index) - r.Get("/plugins/:id/page/:page", reqSignedIn, Index) - - r.Get("/d/:uid/:slug", reqSignedIn, Index) - r.Get("/d/:uid", reqSignedIn, Index) - r.Get("/dashboard/db/:slug", reqSignedIn, redirectFromLegacyDashboardURL, Index) - r.Get("/dashboard/script/*", reqSignedIn, Index) - r.Get("/dashboard-solo/snapshot/*", Index) - r.Get("/d-solo/:uid/:slug", reqSignedIn, Index) - r.Get("/dashboard-solo/db/:slug", reqSignedIn, redirectFromLegacyDashboardSoloURL, Index) - r.Get("/dashboard-solo/script/*", reqSignedIn, Index) - r.Get("/import/dashboard", reqSignedIn, Index) - r.Get("/dashboards/", reqSignedIn, Index) - r.Get("/dashboards/*", reqSignedIn, Index) - - r.Get("/explore", reqEditorRole, Index) - - r.Get("/playlists/", reqSignedIn, Index) - r.Get("/playlists/*", reqSignedIn, Index) - r.Get("/alerting/", reqSignedIn, Index) - r.Get("/alerting/*", reqSignedIn, Index) + r.Get("/profile/", reqSignedIn, hs.Index) + r.Get("/profile/password", reqSignedIn, hs.Index) + r.Get("/profile/switch-org/:id", reqSignedIn, hs.ChangeActiveOrgAndRedirectToHome) + r.Get("/org/", reqSignedIn, hs.Index) + r.Get("/org/new", reqSignedIn, hs.Index) + r.Get("/datasources/", reqSignedIn, hs.Index) + r.Get("/datasources/new", reqSignedIn, hs.Index) + r.Get("/datasources/edit/*", reqSignedIn, hs.Index) + r.Get("/org/users", reqSignedIn, hs.Index) + r.Get("/org/users/new", reqSignedIn, hs.Index) + r.Get("/org/users/invite", reqSignedIn, hs.Index) + r.Get("/org/teams", reqSignedIn, hs.Index) + r.Get("/org/teams/*", reqSignedIn, hs.Index) + r.Get("/org/apikeys/", reqSignedIn, hs.Index) + r.Get("/dashboard/import/", reqSignedIn, hs.Index) + r.Get("/configuration", reqGrafanaAdmin, hs.Index) + r.Get("/admin", reqGrafanaAdmin, hs.Index) + r.Get("/admin/settings", reqGrafanaAdmin, hs.Index) + r.Get("/admin/users", reqGrafanaAdmin, hs.Index) + r.Get("/admin/users/create", reqGrafanaAdmin, hs.Index) + r.Get("/admin/users/edit/:id", reqGrafanaAdmin, hs.Index) + r.Get("/admin/orgs", reqGrafanaAdmin, hs.Index) + r.Get("/admin/orgs/edit/:id", reqGrafanaAdmin, hs.Index) + r.Get("/admin/stats", reqGrafanaAdmin, hs.Index) + + r.Get("/styleguide", reqSignedIn, hs.Index) + + r.Get("/plugins", reqSignedIn, hs.Index) + r.Get("/plugins/:id/edit", reqSignedIn, hs.Index) + r.Get("/plugins/:id/page/:page", reqSignedIn, hs.Index) + + r.Get("/d/:uid/:slug", reqSignedIn, hs.Index) + r.Get("/d/:uid", reqSignedIn, hs.Index) + r.Get("/dashboard/db/:slug", reqSignedIn, redirectFromLegacyDashboardURL, hs.Index) + r.Get("/dashboard/script/*", reqSignedIn, hs.Index) + r.Get("/dashboard-solo/snapshot/*", hs.Index) + r.Get("/d-solo/:uid/:slug", reqSignedIn, hs.Index) + r.Get("/dashboard-solo/db/:slug", reqSignedIn, redirectFromLegacyDashboardSoloURL, hs.Index) + r.Get("/dashboard-solo/script/*", reqSignedIn, hs.Index) + r.Get("/import/dashboard", reqSignedIn, hs.Index) + r.Get("/dashboards/", reqSignedIn, hs.Index) + r.Get("/dashboards/*", reqSignedIn, hs.Index) + + r.Get("/explore", reqEditorRole, hs.Index) + + r.Get("/playlists/", reqSignedIn, hs.Index) + r.Get("/playlists/*", reqSignedIn, hs.Index) + r.Get("/alerting/", reqSignedIn, hs.Index) + r.Get("/alerting/*", reqSignedIn, hs.Index) // sign up - r.Get("/signup", Index) + r.Get("/signup", hs.Index) r.Get("/api/user/signup/options", Wrap(GetSignUpOptions)) r.Post("/api/user/signup", quota("user"), bind(dtos.SignUpForm{}), Wrap(SignUp)) r.Post("/api/user/signup/step2", bind(dtos.SignUpStep2Form{}), Wrap(SignUpStep2)) @@ -91,15 +91,15 @@ func (hs *HTTPServer) registerRoutes() { r.Post("/api/user/invite/complete", bind(dtos.CompleteInviteForm{}), Wrap(CompleteInvite)) // reset password - r.Get("/user/password/send-reset-email", Index) - r.Get("/user/password/reset", Index) + r.Get("/user/password/send-reset-email", hs.Index) + r.Get("/user/password/reset", hs.Index) r.Post("/api/user/password/send-reset-email", bind(dtos.SendResetPasswordEmailForm{}), Wrap(SendResetPasswordEmail)) r.Post("/api/user/password/reset", bind(dtos.ResetUserPasswordForm{}), Wrap(ResetPassword)) // dashboard snapshots - r.Get("/dashboard/snapshot/*", Index) - r.Get("/dashboard/snapshots/", reqSignedIn, Index) + r.Get("/dashboard/snapshot/*", hs.Index) + r.Get("/dashboard/snapshots/", reqSignedIn, hs.Index) // api for dashboard snapshots r.Post("/api/snapshots/", bind(m.CreateDashboardSnapshotCommand{}), CreateDashboardSnapshot) @@ -234,13 +234,13 @@ func (hs *HTTPServer) registerRoutes() { datasourceRoute.Get("/", Wrap(GetDataSources)) datasourceRoute.Post("/", quota("data_source"), bind(m.AddDataSourceCommand{}), Wrap(AddDataSource)) datasourceRoute.Put("/:id", bind(m.UpdateDataSourceCommand{}), Wrap(UpdateDataSource)) - datasourceRoute.Delete("/:id", Wrap(DeleteDataSourceByID)) + datasourceRoute.Delete("/:id", Wrap(DeleteDataSourceById)) datasourceRoute.Delete("/name/:name", Wrap(DeleteDataSourceByName)) - datasourceRoute.Get("/:id", Wrap(GetDataSourceByID)) + datasourceRoute.Get("/:id", Wrap(GetDataSourceById)) datasourceRoute.Get("/name/:name", Wrap(GetDataSourceByName)) }, reqOrgAdmin) - apiRoute.Get("/datasources/id/:name", Wrap(GetDataSourceIDByName), reqSignedIn) + apiRoute.Get("/datasources/id/:name", Wrap(GetDataSourceIdByName), reqSignedIn) apiRoute.Get("/plugins", Wrap(GetPluginList)) apiRoute.Get("/plugins/:pluginId/settings", Wrap(GetPluginSettingByID)) @@ -320,7 +320,7 @@ func (hs *HTTPServer) registerRoutes() { apiRoute.Get("/search/", Search) // metrics - apiRoute.Post("/tsdb/query", bind(dtos.MetricRequest{}), Wrap(QueryMetrics)) + apiRoute.Post("/tsdb/query", bind(dtos.MetricRequest{}), Wrap(hs.QueryMetrics)) apiRoute.Get("/tsdb/testdata/scenarios", Wrap(GetTestDataScenarios)) apiRoute.Get("/tsdb/testdata/gensql", reqGrafanaAdmin, Wrap(GenerateSQLTestData)) apiRoute.Get("/tsdb/testdata/random-walk", Wrap(GetTestDataRandomWalk)) diff --git a/pkg/api/avatar/avatar.go b/pkg/api/avatar/avatar.go index 5becf90ca35..6cf164285bf 100644 --- a/pkg/api/avatar/avatar.go +++ b/pkg/api/avatar/avatar.go @@ -97,15 +97,6 @@ type CacheServer struct { cache *gocache.Cache } -func (this *CacheServer) mustInt(r *http.Request, defaultValue int, keys ...string) (v int) { - for _, k := range keys { - if _, err := fmt.Sscanf(r.FormValue(k), "%d", &v); err == nil { - defaultValue = v - } - } - return defaultValue -} - func (this *CacheServer) Handler(ctx *macaron.Context) { urlPath := ctx.Req.URL.Path hash := urlPath[strings.LastIndex(urlPath, "/")+1:] diff --git a/pkg/api/dashboard.go b/pkg/api/dashboard.go index c2ab6dd9a1a..02248334b9c 100644 --- a/pkg/api/dashboard.go +++ b/pkg/api/dashboard.go @@ -6,6 +6,7 @@ import ( "os" "path" + "github.com/grafana/grafana/pkg/services/alerting" "github.com/grafana/grafana/pkg/services/dashboards" "github.com/grafana/grafana/pkg/api/dtos" @@ -22,6 +23,10 @@ import ( "github.com/grafana/grafana/pkg/util" ) +const ( + anonString = "Anonymous" +) + func isDashboardStarredByUser(c *m.ReqContext, dashID int64) (bool, error) { if !c.IsSignedIn { return false, nil @@ -64,7 +69,7 @@ func GetDashboard(c *m.ReqContext) Response { } // Finding creator and last updater of the dashboard - updater, creator := "Anonymous", "Anonymous" + updater, creator := anonString, anonString if dash.UpdatedBy > 0 { updater = getUserLogin(dash.UpdatedBy) } @@ -128,7 +133,7 @@ func getUserLogin(userID int64) string { query := m.GetUserByIdQuery{Id: userID} err := bus.Dispatch(&query) if err != nil { - return "Anonymous" + return anonString } return query.Result.Login } @@ -247,8 +252,8 @@ func PostDashboard(c *m.ReqContext, cmd m.SaveDashboardCommand) Response { return Error(403, err.Error(), err) } - if err == m.ErrDashboardContainsInvalidAlertData { - return Error(500, "Invalid alert data. Cannot save dashboard", err) + if validationErr, ok := err.(alerting.ValidationError); ok { + return Error(422, validationErr.Error(), nil) } if err != nil { @@ -403,7 +408,7 @@ func GetDashboardVersion(c *m.ReqContext) Response { return Error(500, fmt.Sprintf("Dashboard version %d not found for dashboardId %d", query.Version, dashID), err) } - creator := "Anonymous" + creator := anonString if query.Result.CreatedBy > 0 { creator = getUserLogin(query.Result.CreatedBy) } diff --git a/pkg/api/dashboard_test.go b/pkg/api/dashboard_test.go index 283a9b5f12c..2726623c242 100644 --- a/pkg/api/dashboard_test.go +++ b/pkg/api/dashboard_test.go @@ -9,6 +9,7 @@ import ( "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/components/simplejson" m "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/services/alerting" "github.com/grafana/grafana/pkg/services/dashboards" "github.com/grafana/grafana/pkg/setting" @@ -725,7 +726,7 @@ func TestDashboardApiEndpoint(t *testing.T) { {SaveError: m.ErrDashboardVersionMismatch, ExpectedStatusCode: 412}, {SaveError: m.ErrDashboardTitleEmpty, ExpectedStatusCode: 400}, {SaveError: m.ErrDashboardFolderCannotHaveParent, ExpectedStatusCode: 400}, - {SaveError: m.ErrDashboardContainsInvalidAlertData, ExpectedStatusCode: 500}, + {SaveError: alerting.ValidationError{Reason: "Mu"}, ExpectedStatusCode: 422}, {SaveError: m.ErrDashboardFailedToUpdateAlertData, ExpectedStatusCode: 500}, {SaveError: m.ErrDashboardFailedGenerateUniqueUid, ExpectedStatusCode: 500}, {SaveError: m.ErrDashboardTypeMismatch, ExpectedStatusCode: 400}, diff --git a/pkg/api/dataproxy.go b/pkg/api/dataproxy.go index 33839ca985d..3bb2f236129 100644 --- a/pkg/api/dataproxy.go +++ b/pkg/api/dataproxy.go @@ -2,6 +2,7 @@ package api import ( "fmt" + "github.com/pkg/errors" "time" "github.com/grafana/grafana/pkg/api/pluginproxy" @@ -13,19 +14,34 @@ import ( const HeaderNameNoBackendCache = "X-Grafana-NoCache" -func (hs *HTTPServer) getDatasourceByID(id int64, orgID int64, nocache bool) (*m.DataSource, error) { +func (hs *HTTPServer) getDatasourceFromCache(id int64, c *m.ReqContext) (*m.DataSource, error) { + userPermissionsQuery := m.GetDataSourcePermissionsForUserQuery{ + User: c.SignedInUser, + } + if err := bus.Dispatch(&userPermissionsQuery); err != nil { + if err != bus.ErrHandlerNotFound { + return nil, err + } + } else { + permissionType, exists := userPermissionsQuery.Result[id] + if exists && permissionType != m.DsPermissionQuery { + return nil, errors.New("User not allowed to access datasource") + } + } + + nocache := c.Req.Header.Get(HeaderNameNoBackendCache) == "true" cacheKey := fmt.Sprintf("ds-%d", id) if !nocache { if cached, found := hs.cache.Get(cacheKey); found { ds := cached.(*m.DataSource) - if ds.OrgId == orgID { + if ds.OrgId == c.OrgId { return ds, nil } } } - query := m.GetDataSourceByIdQuery{Id: id, OrgId: orgID} + query := m.GetDataSourceByIdQuery{Id: id, OrgId: c.OrgId} if err := bus.Dispatch(&query); err != nil { return nil, err } @@ -37,9 +53,8 @@ func (hs *HTTPServer) getDatasourceByID(id int64, orgID int64, nocache bool) (*m func (hs *HTTPServer) ProxyDataSourceRequest(c *m.ReqContext) { c.TimeRequest(metrics.M_DataSource_ProxyReq_Timer) - nocache := c.Req.Header.Get(HeaderNameNoBackendCache) == "true" - - ds, err := hs.getDatasourceByID(c.ParamsInt64(":id"), c.OrgId, nocache) + dsId := c.ParamsInt64(":id") + ds, err := hs.getDatasourceFromCache(dsId, c) if err != nil { c.JsonApiErr(500, "Unable to load datasource meta data", err) @@ -53,7 +68,21 @@ func (hs *HTTPServer) ProxyDataSourceRequest(c *m.ReqContext) { return } - proxyPath := c.Params("*") + // macaron does not include trailing slashes when resolving a wildcard path + proxyPath := ensureProxyPathTrailingSlash(c.Req.URL.Path, c.Params("*")) + proxy := pluginproxy.NewDataSourceProxy(ds, plugin, c, proxyPath) proxy.HandleRequest() } + +// ensureProxyPathTrailingSlash Check for a trailing slash in original path and makes +// sure that a trailing slash is added to proxy path, if not already exists. +func ensureProxyPathTrailingSlash(originalPath, proxyPath string) string { + if len(proxyPath) > 1 { + if originalPath[len(originalPath)-1] == '/' && proxyPath[len(proxyPath)-1] != '/' { + return proxyPath + "/" + } + } + + return proxyPath +} diff --git a/pkg/api/dataproxy_test.go b/pkg/api/dataproxy_test.go new file mode 100644 index 00000000000..a1d7cf68a37 --- /dev/null +++ b/pkg/api/dataproxy_test.go @@ -0,0 +1,19 @@ +package api + +import ( + "testing" + + . "github.com/smartystreets/goconvey/convey" +) + +func TestDataProxy(t *testing.T) { + Convey("Data proxy test", t, func() { + Convey("Should append trailing slash to proxy path if original path has a trailing slash", func() { + So(ensureProxyPathTrailingSlash("/api/datasources/proxy/6/api/v1/query_range/", "api/v1/query_range/"), ShouldEqual, "api/v1/query_range/") + }) + + Convey("Should not append trailing slash to proxy path if original path doesn't have a trailing slash", func() { + So(ensureProxyPathTrailingSlash("/api/datasources/proxy/6/api/v1/query_range", "api/v1/query_range"), ShouldEqual, "api/v1/query_range") + }) + }) +} diff --git a/pkg/api/datasources.go b/pkg/api/datasources.go index 23dbb221d71..e7614614076 100644 --- a/pkg/api/datasources.go +++ b/pkg/api/datasources.go @@ -20,8 +20,8 @@ func GetDataSources(c *m.ReqContext) Response { result := make(dtos.DataSourceList, 0) for _, ds := range query.Result { dsItem := dtos.DataSourceListItemDTO{ - Id: ds.Id, OrgId: ds.OrgId, + Id: ds.Id, Name: ds.Name, Url: ds.Url, Type: ds.Type, @@ -49,7 +49,7 @@ func GetDataSources(c *m.ReqContext) Response { return JSON(200, &result) } -func GetDataSourceByID(c *m.ReqContext) Response { +func GetDataSourceById(c *m.ReqContext) Response { query := m.GetDataSourceByIdQuery{ Id: c.ParamsInt64(":id"), OrgId: c.OrgId, @@ -68,14 +68,14 @@ func GetDataSourceByID(c *m.ReqContext) Response { return JSON(200, &dtos) } -func DeleteDataSourceByID(c *m.ReqContext) Response { +func DeleteDataSourceById(c *m.ReqContext) Response { id := c.ParamsInt64(":id") if id <= 0 { return Error(400, "Missing valid datasource id", nil) } - ds, err := getRawDataSourceByID(id, c.OrgId) + ds, err := getRawDataSourceById(id, c.OrgId) if err != nil { return Error(400, "Failed to delete datasource", nil) } @@ -186,7 +186,7 @@ func fillWithSecureJSONData(cmd *m.UpdateDataSourceCommand) error { return nil } - ds, err := getRawDataSourceByID(cmd.Id, cmd.OrgId) + ds, err := getRawDataSourceById(cmd.Id, cmd.OrgId) if err != nil { return err } @@ -206,7 +206,7 @@ func fillWithSecureJSONData(cmd *m.UpdateDataSourceCommand) error { return nil } -func getRawDataSourceByID(id int64, orgID int64) (*m.DataSource, error) { +func getRawDataSourceById(id int64, orgID int64) (*m.DataSource, error) { query := m.GetDataSourceByIdQuery{ Id: id, OrgId: orgID, @@ -236,7 +236,7 @@ func GetDataSourceByName(c *m.ReqContext) Response { } // Get /api/datasources/id/:name -func GetDataSourceIDByName(c *m.ReqContext) Response { +func GetDataSourceIdByName(c *m.ReqContext) Response { query := m.GetDataSourceByNameQuery{Name: c.Params(":name"), OrgId: c.OrgId} if err := bus.Dispatch(&query); err != nil { diff --git a/pkg/api/dtos/alerting.go b/pkg/api/dtos/alerting.go index d30f2697f3f..c037831f341 100644 --- a/pkg/api/dtos/alerting.go +++ b/pkg/api/dtos/alerting.go @@ -1,35 +1,78 @@ package dtos import ( + "fmt" "time" "github.com/grafana/grafana/pkg/components/null" "github.com/grafana/grafana/pkg/components/simplejson" - m "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/models" ) type AlertRule struct { - Id int64 `json:"id"` - DashboardId int64 `json:"dashboardId"` - PanelId int64 `json:"panelId"` - Name string `json:"name"` - Message string `json:"message"` - State m.AlertStateType `json:"state"` - NewStateDate time.Time `json:"newStateDate"` - EvalDate time.Time `json:"evalDate"` - EvalData *simplejson.Json `json:"evalData"` - ExecutionError string `json:"executionError"` - Url string `json:"url"` - CanEdit bool `json:"canEdit"` + Id int64 `json:"id"` + DashboardId int64 `json:"dashboardId"` + PanelId int64 `json:"panelId"` + Name string `json:"name"` + Message string `json:"message"` + State models.AlertStateType `json:"state"` + NewStateDate time.Time `json:"newStateDate"` + EvalDate time.Time `json:"evalDate"` + EvalData *simplejson.Json `json:"evalData"` + ExecutionError string `json:"executionError"` + Url string `json:"url"` + CanEdit bool `json:"canEdit"` +} + +func formatShort(interval time.Duration) string { + var result string + + hours := interval / time.Hour + if hours > 0 { + result += fmt.Sprintf("%dh", hours) + } + + remaining := interval - (hours * time.Hour) + mins := remaining / time.Minute + if mins > 0 { + result += fmt.Sprintf("%dm", mins) + } + + remaining = remaining - (mins * time.Minute) + seconds := remaining / time.Second + if seconds > 0 { + result += fmt.Sprintf("%ds", seconds) + } + + return result +} + +func NewAlertNotification(notification *models.AlertNotification) *AlertNotification { + return &AlertNotification{ + Id: notification.Id, + Name: notification.Name, + Type: notification.Type, + IsDefault: notification.IsDefault, + Created: notification.Created, + Updated: notification.Updated, + Frequency: formatShort(notification.Frequency), + SendReminder: notification.SendReminder, + DisableResolveMessage: notification.DisableResolveMessage, + Settings: notification.Settings, + } } type AlertNotification struct { - Id int64 `json:"id"` - Name string `json:"name"` - Type string `json:"type"` - IsDefault bool `json:"isDefault"` - Created time.Time `json:"created"` - Updated time.Time `json:"updated"` + Id int64 `json:"id"` + Name string `json:"name"` + Type string `json:"type"` + IsDefault bool `json:"isDefault"` + SendReminder bool `json:"sendReminder"` + DisableResolveMessage bool `json:"disableResolveMessage"` + Frequency string `json:"frequency"` + Created time.Time `json:"created"` + Updated time.Time `json:"updated"` + Settings *simplejson.Json `json:"settings"` } type AlertTestCommand struct { @@ -39,7 +82,7 @@ type AlertTestCommand struct { type AlertTestResult struct { Firing bool `json:"firing"` - State m.AlertStateType `json:"state"` + State models.AlertStateType `json:"state"` ConditionEvals string `json:"conditionEvals"` TimeMs string `json:"timeMs"` Error string `json:"error,omitempty"` @@ -59,9 +102,12 @@ type EvalMatch struct { } type NotificationTestCommand struct { - Name string `json:"name"` - Type string `json:"type"` - Settings *simplejson.Json `json:"settings"` + Name string `json:"name"` + Type string `json:"type"` + SendReminder bool `json:"sendReminder"` + DisableResolveMessage bool `json:"disableResolveMessage"` + Frequency string `json:"frequency"` + Settings *simplejson.Json `json:"settings"` } type PauseAlertCommand struct { diff --git a/pkg/api/dtos/alerting_test.go b/pkg/api/dtos/alerting_test.go new file mode 100644 index 00000000000..f4c09f202cb --- /dev/null +++ b/pkg/api/dtos/alerting_test.go @@ -0,0 +1,35 @@ +package dtos + +import ( + "testing" + "time" +) + +func TestFormatShort(t *testing.T) { + tcs := []struct { + interval time.Duration + expected string + }{ + {interval: time.Hour, expected: "1h"}, + {interval: time.Hour + time.Minute, expected: "1h1m"}, + {interval: (time.Hour * 10) + time.Minute, expected: "10h1m"}, + {interval: (time.Hour * 10) + (time.Minute * 10) + time.Second, expected: "10h10m1s"}, + {interval: time.Minute * 10, expected: "10m"}, + } + + for _, tc := range tcs { + got := formatShort(tc.interval) + if got != tc.expected { + t.Errorf("expected %s got %s interval: %v", tc.expected, got, tc.interval) + } + + parsed, err := time.ParseDuration(tc.expected) + if err != nil { + t.Fatalf("could not parse expected duration") + } + + if parsed != tc.interval { + t.Errorf("expects the parsed duration to equal the interval. Got %v expected: %v", parsed, tc.interval) + } + } +} diff --git a/pkg/api/folder.go b/pkg/api/folder.go index f0cdff24d20..0e08343b556 100644 --- a/pkg/api/folder.go +++ b/pkg/api/folder.go @@ -95,7 +95,7 @@ func toFolderDto(g guardian.DashboardGuardian, folder *m.Folder) dtos.Folder { canAdmin, _ := g.CanAdmin() // Finding creator and last updater of the folder - updater, creator := "Anonymous", "Anonymous" + updater, creator := anonString, anonString if folder.CreatedBy > 0 { creator = getUserLogin(folder.CreatedBy) } diff --git a/pkg/api/folder_test.go b/pkg/api/folder_test.go index 6e24e432535..880de338c8f 100644 --- a/pkg/api/folder_test.go +++ b/pkg/api/folder_test.go @@ -133,16 +133,6 @@ func TestFoldersApiEndpoint(t *testing.T) { }) } -func callGetFolderByUID(sc *scenarioContext) { - sc.handlerFunc = GetFolderByUID - sc.fakeReqWithParams("GET", sc.url, map[string]string{}).exec() -} - -func callDeleteFolder(sc *scenarioContext) { - sc.handlerFunc = DeleteFolder - sc.fakeReqWithParams("DELETE", sc.url, map[string]string{}).exec() -} - func callCreateFolder(sc *scenarioContext) { sc.fakeReqWithParams("POST", sc.url, map[string]string{}).exec() } diff --git a/pkg/api/frontendsettings.go b/pkg/api/frontendsettings.go index da3c88566c1..43fa0c858fc 100644 --- a/pkg/api/frontendsettings.go +++ b/pkg/api/frontendsettings.go @@ -22,7 +22,20 @@ func getFrontendSettingsMap(c *m.ReqContext) (map[string]interface{}, error) { return nil, err } - orgDataSources = query.Result + dsFilterQuery := m.DatasourcesPermissionFilterQuery{ + User: c.SignedInUser, + Datasources: query.Result, + } + + if err := bus.Dispatch(&dsFilterQuery); err != nil { + if err != bus.ErrHandlerNotFound { + return nil, err + } + + orgDataSources = query.Result + } else { + orgDataSources = dsFilterQuery.Result + } } datasources := make(map[string]interface{}) @@ -132,20 +145,22 @@ func getFrontendSettingsMap(c *m.ReqContext) (map[string]interface{}, error) { } jsonObj := map[string]interface{}{ - "defaultDatasource": defaultDatasource, - "datasources": datasources, - "panels": panels, - "appSubUrl": setting.AppSubUrl, - "allowOrgCreate": (setting.AllowUserOrgCreate && c.IsSignedIn) || c.IsGrafanaAdmin, - "authProxyEnabled": setting.AuthProxyEnabled, - "ldapEnabled": setting.LdapEnabled, - "alertingEnabled": setting.AlertingEnabled, - "exploreEnabled": setting.ExploreEnabled, - "googleAnalyticsId": setting.GoogleAnalyticsId, - "disableLoginForm": setting.DisableLoginForm, - "externalUserMngInfo": setting.ExternalUserMngInfo, - "externalUserMngLinkUrl": setting.ExternalUserMngLinkUrl, - "externalUserMngLinkName": setting.ExternalUserMngLinkName, + "defaultDatasource": defaultDatasource, + "datasources": datasources, + "panels": panels, + "appSubUrl": setting.AppSubUrl, + "allowOrgCreate": (setting.AllowUserOrgCreate && c.IsSignedIn) || c.IsGrafanaAdmin, + "authProxyEnabled": setting.AuthProxyEnabled, + "ldapEnabled": setting.LdapEnabled, + "alertingEnabled": setting.AlertingEnabled, + "alertingErrorOrTimeout": setting.AlertingErrorOrTimeout, + "alertingNoDataOrNullValues": setting.AlertingNoDataOrNullValues, + "exploreEnabled": setting.ExploreEnabled, + "googleAnalyticsId": setting.GoogleAnalyticsId, + "disableLoginForm": setting.DisableLoginForm, + "externalUserMngInfo": setting.ExternalUserMngInfo, + "externalUserMngLinkUrl": setting.ExternalUserMngLinkUrl, + "externalUserMngLinkName": setting.ExternalUserMngLinkName, "buildInfo": map[string]interface{}{ "version": setting.BuildVersion, "commit": setting.BuildCommit, diff --git a/pkg/api/http_server.go b/pkg/api/http_server.go index 0de63ce5e08..858b3c5a8c5 100644 --- a/pkg/api/http_server.go +++ b/pkg/api/http_server.go @@ -28,6 +28,7 @@ import ( "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/plugins" "github.com/grafana/grafana/pkg/registry" + "github.com/grafana/grafana/pkg/services/hooks" "github.com/grafana/grafana/pkg/services/rendering" "github.com/grafana/grafana/pkg/setting" ) @@ -52,6 +53,7 @@ type HTTPServer struct { Bus bus.Bus `inject:""` RenderService rendering.Service `inject:""` Cfg *setting.Cfg `inject:""` + HooksService *hooks.HooksService `inject:""` } func (hs *HTTPServer) Init() error { @@ -184,7 +186,7 @@ func (hs *HTTPServer) applyRoutes() { // then custom app proxy routes hs.initAppPluginRoutes(hs.macaron) // lastly not found route - hs.macaron.NotFound(NotFoundHandler) + hs.macaron.NotFound(hs.NotFoundHandler) } func (hs *HTTPServer) addMiddlewaresAndStaticRoutes() { @@ -233,6 +235,10 @@ func (hs *HTTPServer) addMiddlewaresAndStaticRoutes() { } func (hs *HTTPServer) metricsEndpoint(ctx *macaron.Context) { + if !hs.Cfg.MetricsEndpointEnabled { + return + } + if ctx.Req.Method != "GET" || ctx.Req.URL.Path != "/metrics" { return } diff --git a/pkg/api/index.go b/pkg/api/index.go index ea10940d3ba..9f867d51cad 100644 --- a/pkg/api/index.go +++ b/pkg/api/index.go @@ -11,7 +11,13 @@ import ( "github.com/grafana/grafana/pkg/setting" ) -func setIndexViewData(c *m.ReqContext) (*dtos.IndexViewData, error) { +const ( + // Themes + lightName = "light" + darkName = "dark" +) + +func (hs *HTTPServer) setIndexViewData(c *m.ReqContext) (*dtos.IndexViewData, error) { settings, err := getFrontendSettingsMap(c) if err != nil { return nil, err @@ -60,7 +66,7 @@ func setIndexViewData(c *m.ReqContext) (*dtos.IndexViewData, error) { OrgRole: c.OrgRole, GravatarUrl: dtos.GetGravatarUrl(c.Email), IsGrafanaAdmin: c.IsGrafanaAdmin, - LightTheme: prefs.Theme == "light", + LightTheme: prefs.Theme == lightName, Timezone: prefs.Timezone, Locale: locale, HelpFlags1: c.HelpFlags1, @@ -88,9 +94,12 @@ func setIndexViewData(c *m.ReqContext) (*dtos.IndexViewData, error) { } themeURLParam := c.Query("theme") - if themeURLParam == "light" { + if themeURLParam == lightName { data.User.LightTheme = true - data.Theme = "light" + data.Theme = lightName + } else if themeURLParam == darkName { + data.User.LightTheme = false + data.Theme = darkName } if hasEditPermissionInFoldersQuery.Result { @@ -341,11 +350,12 @@ func setIndexViewData(c *m.ReqContext) (*dtos.IndexViewData, error) { }, }) + hs.HooksService.RunIndexDataHooks(&data) return &data, nil } -func Index(c *m.ReqContext) { - data, err := setIndexViewData(c) +func (hs *HTTPServer) Index(c *m.ReqContext) { + data, err := hs.setIndexViewData(c) if err != nil { c.Handle(500, "Failed to get settings", err) return @@ -353,13 +363,13 @@ func Index(c *m.ReqContext) { c.HTML(200, "index", data) } -func NotFoundHandler(c *m.ReqContext) { +func (hs *HTTPServer) NotFoundHandler(c *m.ReqContext) { if c.IsApiRequest() { c.JsonApiErr(404, "Not found", nil) return } - data, err := setIndexViewData(c) + data, err := hs.setIndexViewData(c) if err != nil { c.Handle(500, "Failed to get settings", err) return diff --git a/pkg/api/live/conn.go b/pkg/api/live/conn.go index f2a041d7631..0fae7f75b73 100644 --- a/pkg/api/live/conn.go +++ b/pkg/api/live/conn.go @@ -70,7 +70,7 @@ func (c *connection) readPump() { func (c *connection) handleMessage(message []byte) { json, err := simplejson.NewJson(message) if err != nil { - log.Error(3, "Unreadable message on websocket channel:", err) + log.Error(3, "Unreadable message on websocket channel. error: %v", err) } msgType := json.Get("action").MustString() diff --git a/pkg/api/live/hub.go b/pkg/api/live/hub.go index 37ab5667e55..9708bc515d1 100644 --- a/pkg/api/live/hub.go +++ b/pkg/api/live/hub.go @@ -37,9 +37,6 @@ func newHub() *hub { } } -func (h *hub) removeConnection() { -} - func (h *hub) run(ctx context.Context) { for { select { diff --git a/pkg/api/login.go b/pkg/api/login.go index 632d04e37f1..1083f89adfd 100644 --- a/pkg/api/login.go +++ b/pkg/api/login.go @@ -17,8 +17,8 @@ const ( ViewIndex = "index" ) -func LoginView(c *m.ReqContext) { - viewData, err := setIndexViewData(c) +func (hs *HTTPServer) LoginView(c *m.ReqContext) { + viewData, err := hs.setIndexViewData(c) if err != nil { c.Handle(500, "Failed to get settings", err) return diff --git a/pkg/api/metrics.go b/pkg/api/metrics.go index f2bc79df7ad..cb80bd346b8 100644 --- a/pkg/api/metrics.go +++ b/pkg/api/metrics.go @@ -13,21 +13,21 @@ import ( ) // POST /api/tsdb/query -func QueryMetrics(c *m.ReqContext, reqDto dtos.MetricRequest) Response { +func (hs *HTTPServer) QueryMetrics(c *m.ReqContext, reqDto dtos.MetricRequest) Response { timeRange := tsdb.NewTimeRange(reqDto.From, reqDto.To) if len(reqDto.Queries) == 0 { return Error(400, "No queries found in query", nil) } - dsID, err := reqDto.Queries[0].Get("datasourceId").Int64() + datasourceId, err := reqDto.Queries[0].Get("datasourceId").Int64() if err != nil { return Error(400, "Query missing datasourceId", nil) } - dsQuery := m.GetDataSourceByIdQuery{Id: dsID, OrgId: c.OrgId} - if err := bus.Dispatch(&dsQuery); err != nil { - return Error(500, "failed to fetch data source", err) + ds, err := hs.getDatasourceFromCache(datasourceId, c) + if err != nil { + return Error(500, "Unable to load datasource meta data", err) } request := &tsdb.TsdbQuery{TimeRange: timeRange} @@ -38,11 +38,11 @@ func QueryMetrics(c *m.ReqContext, reqDto dtos.MetricRequest) Response { MaxDataPoints: query.Get("maxDataPoints").MustInt64(100), IntervalMs: query.Get("intervalMs").MustInt64(1000), Model: query, - DataSource: dsQuery.Result, + DataSource: ds, }) } - resp, err := tsdb.HandleRequest(context.Background(), dsQuery.Result, request) + resp, err := tsdb.HandleRequest(c.Req.Context(), ds, request) if err != nil { return Error(500, "Metric request error", err) } diff --git a/pkg/api/org_users.go b/pkg/api/org_users.go index 4e2ed36431e..e750662c764 100644 --- a/pkg/api/org_users.go +++ b/pkg/api/org_users.go @@ -45,7 +45,7 @@ func addOrgUserHelper(cmd m.AddOrgUserCommand) Response { // GET /api/org/users func GetOrgUsersForCurrentOrg(c *m.ReqContext) Response { - return getOrgUsersHelper(c.OrgId, c.Params("query"), c.ParamsInt("limit")) + return getOrgUsersHelper(c.OrgId, c.Query("query"), c.QueryInt("limit")) } // GET /api/orgs/:orgId/users @@ -102,26 +102,32 @@ func updateOrgUserHelper(cmd m.UpdateOrgUserCommand) Response { // DELETE /api/org/users/:userId func RemoveOrgUserForCurrentOrg(c *m.ReqContext) Response { - userID := c.ParamsInt64(":userId") - return removeOrgUserHelper(c.OrgId, userID) + return removeOrgUserHelper(&m.RemoveOrgUserCommand{ + UserId: c.ParamsInt64(":userId"), + OrgId: c.OrgId, + ShouldDeleteOrphanedUser: true, + }) } // DELETE /api/orgs/:orgId/users/:userId func RemoveOrgUser(c *m.ReqContext) Response { - userID := c.ParamsInt64(":userId") - orgID := c.ParamsInt64(":orgId") - return removeOrgUserHelper(orgID, userID) + return removeOrgUserHelper(&m.RemoveOrgUserCommand{ + UserId: c.ParamsInt64(":userId"), + OrgId: c.ParamsInt64(":orgId"), + }) } -func removeOrgUserHelper(orgID int64, userID int64) Response { - cmd := m.RemoveOrgUserCommand{OrgId: orgID, UserId: userID} - - if err := bus.Dispatch(&cmd); err != nil { +func removeOrgUserHelper(cmd *m.RemoveOrgUserCommand) Response { + if err := bus.Dispatch(cmd); err != nil { if err == m.ErrLastOrgAdmin { return Error(400, "Cannot remove last organization admin", nil) } return Error(500, "Failed to remove user from organization", err) } + if cmd.UserWasDeleted { + return Success("User deleted") + } + return Success("User removed from organization") } diff --git a/pkg/api/pluginproxy/access_token_provider.go b/pkg/api/pluginproxy/access_token_provider.go new file mode 100644 index 00000000000..22407823ff9 --- /dev/null +++ b/pkg/api/pluginproxy/access_token_provider.go @@ -0,0 +1,171 @@ +package pluginproxy + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "net/http" + "net/url" + "strconv" + "sync" + "time" + + "golang.org/x/oauth2" + + "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/plugins" + "golang.org/x/oauth2/jwt" +) + +var ( + tokenCache = tokenCacheType{ + cache: map[string]*jwtToken{}, + } + oauthJwtTokenCache = oauthJwtTokenCacheType{ + cache: map[string]*oauth2.Token{}, + } +) + +type tokenCacheType struct { + cache map[string]*jwtToken + sync.Mutex +} + +type oauthJwtTokenCacheType struct { + cache map[string]*oauth2.Token + sync.Mutex +} + +type accessTokenProvider struct { + route *plugins.AppPluginRoute + datasourceId int64 + datasourceVersion int +} + +type jwtToken struct { + ExpiresOn time.Time `json:"-"` + ExpiresOnString string `json:"expires_on"` + AccessToken string `json:"access_token"` +} + +func newAccessTokenProvider(ds *models.DataSource, pluginRoute *plugins.AppPluginRoute) *accessTokenProvider { + return &accessTokenProvider{ + datasourceId: ds.Id, + datasourceVersion: ds.Version, + route: pluginRoute, + } +} + +func (provider *accessTokenProvider) getAccessToken(data templateData) (string, error) { + tokenCache.Lock() + defer tokenCache.Unlock() + if cachedToken, found := tokenCache.cache[provider.getAccessTokenCacheKey()]; found { + if cachedToken.ExpiresOn.After(time.Now().Add(time.Second * 10)) { + logger.Info("Using token from cache") + return cachedToken.AccessToken, nil + } + } + + urlInterpolated, err := interpolateString(provider.route.TokenAuth.Url, data) + if err != nil { + return "", err + } + + params := make(url.Values) + for key, value := range provider.route.TokenAuth.Params { + interpolatedParam, err := interpolateString(value, data) + if err != nil { + return "", err + } + params.Add(key, interpolatedParam) + } + + getTokenReq, _ := http.NewRequest("POST", urlInterpolated, bytes.NewBufferString(params.Encode())) + getTokenReq.Header.Add("Content-Type", "application/x-www-form-urlencoded") + getTokenReq.Header.Add("Content-Length", strconv.Itoa(len(params.Encode()))) + + resp, err := client.Do(getTokenReq) + if err != nil { + return "", err + } + + defer resp.Body.Close() + + var token jwtToken + if err := json.NewDecoder(resp.Body).Decode(&token); err != nil { + return "", err + } + + expiresOnEpoch, _ := strconv.ParseInt(token.ExpiresOnString, 10, 64) + token.ExpiresOn = time.Unix(expiresOnEpoch, 0) + tokenCache.cache[provider.getAccessTokenCacheKey()] = &token + + logger.Info("Got new access token", "ExpiresOn", token.ExpiresOn) + + return token.AccessToken, nil +} + +func (provider *accessTokenProvider) getJwtAccessToken(ctx context.Context, data templateData) (string, error) { + oauthJwtTokenCache.Lock() + defer oauthJwtTokenCache.Unlock() + if cachedToken, found := oauthJwtTokenCache.cache[provider.getAccessTokenCacheKey()]; found { + if cachedToken.Expiry.After(time.Now().Add(time.Second * 10)) { + logger.Debug("Using token from cache") + return cachedToken.AccessToken, nil + } + } + + conf := &jwt.Config{} + + if val, ok := provider.route.JwtTokenAuth.Params["client_email"]; ok { + interpolatedVal, err := interpolateString(val, data) + if err != nil { + return "", err + } + conf.Email = interpolatedVal + } + + if val, ok := provider.route.JwtTokenAuth.Params["private_key"]; ok { + interpolatedVal, err := interpolateString(val, data) + if err != nil { + return "", err + } + conf.PrivateKey = []byte(interpolatedVal) + } + + if val, ok := provider.route.JwtTokenAuth.Params["token_uri"]; ok { + interpolatedVal, err := interpolateString(val, data) + if err != nil { + return "", err + } + conf.TokenURL = interpolatedVal + } + + conf.Scopes = provider.route.JwtTokenAuth.Scopes + + token, err := getTokenSource(conf, ctx) + if err != nil { + return "", err + } + + oauthJwtTokenCache.cache[provider.getAccessTokenCacheKey()] = token + + logger.Info("Got new access token", "ExpiresOn", token.Expiry) + + return token.AccessToken, nil +} + +var getTokenSource = func(conf *jwt.Config, ctx context.Context) (*oauth2.Token, error) { + tokenSrc := conf.TokenSource(ctx) + token, err := tokenSrc.Token() + if err != nil { + return nil, err + } + + return token, nil +} + +func (provider *accessTokenProvider) getAccessTokenCacheKey() string { + return fmt.Sprintf("%v_%v_%v_%v", provider.datasourceId, provider.datasourceVersion, provider.route.Path, provider.route.Method) +} diff --git a/pkg/api/pluginproxy/access_token_provider_test.go b/pkg/api/pluginproxy/access_token_provider_test.go new file mode 100644 index 00000000000..e75748e4660 --- /dev/null +++ b/pkg/api/pluginproxy/access_token_provider_test.go @@ -0,0 +1,94 @@ +package pluginproxy + +import ( + "context" + "testing" + "time" + + "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/plugins" + . "github.com/smartystreets/goconvey/convey" + "golang.org/x/oauth2" + "golang.org/x/oauth2/jwt" +) + +func TestAccessToken(t *testing.T) { + Convey("Plugin with JWT token auth route", t, func() { + pluginRoute := &plugins.AppPluginRoute{ + Path: "pathwithjwttoken1", + Url: "https://api.jwt.io/some/path", + Method: "GET", + JwtTokenAuth: &plugins.JwtTokenAuth{ + Url: "https://login.server.com/{{.JsonData.tenantId}}/oauth2/token", + Scopes: []string{ + "https://www.testapi.com/auth/monitoring.read", + "https://www.testapi.com/auth/cloudplatformprojects.readonly", + }, + Params: map[string]string{ + "token_uri": "{{.JsonData.tokenUri}}", + "client_email": "{{.JsonData.clientEmail}}", + "private_key": "{{.SecureJsonData.privateKey}}", + }, + }, + } + + templateData := templateData{ + JsonData: map[string]interface{}{ + "clientEmail": "test@test.com", + "tokenUri": "login.url.com/token", + }, + SecureJsonData: map[string]string{ + "privateKey": "testkey", + }, + } + + ds := &models.DataSource{Id: 1, Version: 2} + + Convey("should fetch token using jwt private key", func() { + getTokenSource = func(conf *jwt.Config, ctx context.Context) (*oauth2.Token, error) { + return &oauth2.Token{AccessToken: "abc"}, nil + } + provider := newAccessTokenProvider(ds, pluginRoute) + token, err := provider.getJwtAccessToken(context.Background(), templateData) + So(err, ShouldBeNil) + + So(token, ShouldEqual, "abc") + }) + + Convey("should set jwt config values", func() { + getTokenSource = func(conf *jwt.Config, ctx context.Context) (*oauth2.Token, error) { + So(conf.Email, ShouldEqual, "test@test.com") + So(conf.PrivateKey, ShouldResemble, []byte("testkey")) + So(len(conf.Scopes), ShouldEqual, 2) + So(conf.Scopes[0], ShouldEqual, "https://www.testapi.com/auth/monitoring.read") + So(conf.Scopes[1], ShouldEqual, "https://www.testapi.com/auth/cloudplatformprojects.readonly") + So(conf.TokenURL, ShouldEqual, "login.url.com/token") + + return &oauth2.Token{AccessToken: "abc"}, nil + } + + provider := newAccessTokenProvider(ds, pluginRoute) + _, err := provider.getJwtAccessToken(context.Background(), templateData) + So(err, ShouldBeNil) + }) + + Convey("should use cached token on second call", func() { + getTokenSource = func(conf *jwt.Config, ctx context.Context) (*oauth2.Token, error) { + return &oauth2.Token{ + AccessToken: "abc", + Expiry: time.Now().Add(1 * time.Minute)}, nil + } + provider := newAccessTokenProvider(ds, pluginRoute) + token1, err := provider.getJwtAccessToken(context.Background(), templateData) + So(err, ShouldBeNil) + So(token1, ShouldEqual, "abc") + + getTokenSource = func(conf *jwt.Config, ctx context.Context) (*oauth2.Token, error) { + return &oauth2.Token{AccessToken: "error: cache not used"}, nil + } + token2, err := provider.getJwtAccessToken(context.Background(), templateData) + So(err, ShouldBeNil) + So(token2, ShouldEqual, "abc") + }) + }) +} diff --git a/pkg/api/pluginproxy/ds_auth_provider.go b/pkg/api/pluginproxy/ds_auth_provider.go new file mode 100644 index 00000000000..c68da839d13 --- /dev/null +++ b/pkg/api/pluginproxy/ds_auth_provider.go @@ -0,0 +1,93 @@ +package pluginproxy + +import ( + "bytes" + "context" + "fmt" + "net/http" + "net/url" + "strings" + "text/template" + + m "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/plugins" + "github.com/grafana/grafana/pkg/util" +) + +//ApplyRoute should use the plugin route data to set auth headers and custom headers +func ApplyRoute(ctx context.Context, req *http.Request, proxyPath string, route *plugins.AppPluginRoute, ds *m.DataSource) { + proxyPath = strings.TrimPrefix(proxyPath, route.Path) + + data := templateData{ + JsonData: ds.JsonData.Interface().(map[string]interface{}), + SecureJsonData: ds.SecureJsonData.Decrypt(), + } + + interpolatedURL, err := interpolateString(route.Url, data) + if err != nil { + logger.Error("Error interpolating proxy url", "error", err) + return + } + + routeURL, err := url.Parse(interpolatedURL) + if err != nil { + logger.Error("Error parsing plugin route url", "error", err) + return + } + + req.URL.Scheme = routeURL.Scheme + req.URL.Host = routeURL.Host + req.Host = routeURL.Host + req.URL.Path = util.JoinUrlFragments(routeURL.Path, proxyPath) + + if err := addHeaders(&req.Header, route, data); err != nil { + logger.Error("Failed to render plugin headers", "error", err) + } + + tokenProvider := newAccessTokenProvider(ds, route) + + if route.TokenAuth != nil { + if token, err := tokenProvider.getAccessToken(data); err != nil { + logger.Error("Failed to get access token", "error", err) + } else { + req.Header.Add("Authorization", fmt.Sprintf("Bearer %s", token)) + } + } + + if route.JwtTokenAuth != nil { + if token, err := tokenProvider.getJwtAccessToken(ctx, data); err != nil { + logger.Error("Failed to get access token", "error", err) + } else { + req.Header.Add("Authorization", fmt.Sprintf("Bearer %s", token)) + } + } + logger.Info("Requesting", "url", req.URL.String()) + +} + +func interpolateString(text string, data templateData) (string, error) { + t, err := template.New("content").Parse(text) + if err != nil { + return "", fmt.Errorf("could not parse template %s", text) + } + + var contentBuf bytes.Buffer + err = t.Execute(&contentBuf, data) + if err != nil { + return "", fmt.Errorf("failed to execute template %s", text) + } + + return contentBuf.String(), nil +} + +func addHeaders(reqHeaders *http.Header, route *plugins.AppPluginRoute, data templateData) error { + for _, header := range route.Headers { + interpolated, err := interpolateString(header.Content, data) + if err != nil { + return err + } + reqHeaders.Add(header.Name, interpolated) + } + + return nil +} diff --git a/pkg/api/pluginproxy/ds_auth_provider_test.go b/pkg/api/pluginproxy/ds_auth_provider_test.go new file mode 100644 index 00000000000..9bd98a339e5 --- /dev/null +++ b/pkg/api/pluginproxy/ds_auth_provider_test.go @@ -0,0 +1,21 @@ +package pluginproxy + +import ( + "testing" + + . "github.com/smartystreets/goconvey/convey" +) + +func TestDsAuthProvider(t *testing.T) { + Convey("When interpolating string", t, func() { + data := templateData{ + SecureJsonData: map[string]string{ + "Test": "0asd+asd", + }, + } + + interpolated, err := interpolateString("{{.SecureJsonData.Test}}", data) + So(err, ShouldBeNil) + So(interpolated, ShouldEqual, "0asd+asd") + }) +} diff --git a/pkg/api/pluginproxy/ds_proxy.go b/pkg/api/pluginproxy/ds_proxy.go index fb2cab9b9b1..0c000058e4b 100644 --- a/pkg/api/pluginproxy/ds_proxy.go +++ b/pkg/api/pluginproxy/ds_proxy.go @@ -2,7 +2,6 @@ package pluginproxy import ( "bytes" - "encoding/json" "errors" "fmt" "io/ioutil" @@ -12,7 +11,6 @@ import ( "net/url" "strconv" "strings" - "text/template" "time" "github.com/opentracing/opentracing-go" @@ -25,17 +23,10 @@ import ( ) var ( - logger = log.New("data-proxy-log") - tokenCache = map[string]*jwtToken{} - client = newHTTPClient() + logger = log.New("data-proxy-log") + client = newHTTPClient() ) -type jwtToken struct { - ExpiresOn time.Time `json:"-"` - ExpiresOnString string `json:"expires_on"` - AccessToken string `json:"access_token"` -} - type DataSourceProxy struct { ds *m.DataSource ctx *m.ReqContext @@ -162,7 +153,6 @@ func (proxy *DataSourceProxy) getDirector() func(req *http.Request) { } else { req.URL.Path = util.JoinUrlFragments(proxy.targetUrl.Path, proxy.proxyPath) } - if proxy.ds.BasicAuth { req.Header.Del("Authorization") req.Header.Add("Authorization", util.GetBasicAuthHeader(proxy.ds.BasicAuthUser, proxy.ds.BasicAuthPassword)) @@ -219,7 +209,7 @@ func (proxy *DataSourceProxy) getDirector() func(req *http.Request) { } if proxy.route != nil { - proxy.applyRoute(req) + ApplyRoute(proxy.ctx.Req.Context(), req, proxy.proxyPath, proxy.route, proxy.ds) } } } @@ -311,120 +301,3 @@ func checkWhiteList(c *m.ReqContext, host string) bool { return true } - -func (proxy *DataSourceProxy) applyRoute(req *http.Request) { - proxy.proxyPath = strings.TrimPrefix(proxy.proxyPath, proxy.route.Path) - - data := templateData{ - JsonData: proxy.ds.JsonData.Interface().(map[string]interface{}), - SecureJsonData: proxy.ds.SecureJsonData.Decrypt(), - } - - interpolatedURL, err := interpolateString(proxy.route.Url, data) - if err != nil { - logger.Error("Error interpolating proxy url", "error", err) - return - } - - routeURL, err := url.Parse(interpolatedURL) - if err != nil { - logger.Error("Error parsing plugin route url", "error", err) - return - } - - req.URL.Scheme = routeURL.Scheme - req.URL.Host = routeURL.Host - req.Host = routeURL.Host - req.URL.Path = util.JoinUrlFragments(routeURL.Path, proxy.proxyPath) - - if err := addHeaders(&req.Header, proxy.route, data); err != nil { - logger.Error("Failed to render plugin headers", "error", err) - } - - if proxy.route.TokenAuth != nil { - if token, err := proxy.getAccessToken(data); err != nil { - logger.Error("Failed to get access token", "error", err) - } else { - req.Header.Add("Authorization", fmt.Sprintf("Bearer %s", token)) - } - } - - logger.Info("Requesting", "url", req.URL.String()) -} - -func (proxy *DataSourceProxy) getAccessToken(data templateData) (string, error) { - if cachedToken, found := tokenCache[proxy.getAccessTokenCacheKey()]; found { - if cachedToken.ExpiresOn.After(time.Now().Add(time.Second * 10)) { - logger.Info("Using token from cache") - return cachedToken.AccessToken, nil - } - } - - urlInterpolated, err := interpolateString(proxy.route.TokenAuth.Url, data) - if err != nil { - return "", err - } - - params := make(url.Values) - for key, value := range proxy.route.TokenAuth.Params { - interpolatedParam, err := interpolateString(value, data) - if err != nil { - return "", err - } - params.Add(key, interpolatedParam) - } - - getTokenReq, _ := http.NewRequest("POST", urlInterpolated, bytes.NewBufferString(params.Encode())) - getTokenReq.Header.Add("Content-Type", "application/x-www-form-urlencoded") - getTokenReq.Header.Add("Content-Length", strconv.Itoa(len(params.Encode()))) - - resp, err := client.Do(getTokenReq) - if err != nil { - return "", err - } - - defer resp.Body.Close() - - var token jwtToken - if err := json.NewDecoder(resp.Body).Decode(&token); err != nil { - return "", err - } - - expiresOnEpoch, _ := strconv.ParseInt(token.ExpiresOnString, 10, 64) - token.ExpiresOn = time.Unix(expiresOnEpoch, 0) - tokenCache[proxy.getAccessTokenCacheKey()] = &token - - logger.Info("Got new access token", "ExpiresOn", token.ExpiresOn) - return token.AccessToken, nil -} - -func (proxy *DataSourceProxy) getAccessTokenCacheKey() string { - return fmt.Sprintf("%v_%v_%v", proxy.ds.Id, proxy.route.Path, proxy.route.Method) -} - -func interpolateString(text string, data templateData) (string, error) { - t, err := template.New("content").Parse(text) - if err != nil { - return "", fmt.Errorf("could not parse template %s", text) - } - - var contentBuf bytes.Buffer - err = t.Execute(&contentBuf, data) - if err != nil { - return "", fmt.Errorf("failed to execute template %s", text) - } - - return contentBuf.String(), nil -} - -func addHeaders(reqHeaders *http.Header, route *plugins.AppPluginRoute, data templateData) error { - for _, header := range route.Headers { - interpolated, err := interpolateString(header.Content, data) - if err != nil { - return err - } - reqHeaders.Add(header.Name, interpolated) - } - - return nil -} diff --git a/pkg/api/pluginproxy/ds_proxy_test.go b/pkg/api/pluginproxy/ds_proxy_test.go index e6d05872787..ab0effb298f 100644 --- a/pkg/api/pluginproxy/ds_proxy_test.go +++ b/pkg/api/pluginproxy/ds_proxy_test.go @@ -83,7 +83,7 @@ func TestDSRouteRule(t *testing.T) { Convey("When matching route path", func() { proxy := NewDataSourceProxy(ds, plugin, ctx, "api/v4/some/method") proxy.route = plugin.Routes[0] - proxy.applyRoute(req) + ApplyRoute(proxy.ctx.Req.Context(), req, proxy.proxyPath, proxy.route, proxy.ds) Convey("should add headers and update url", func() { So(req.URL.String(), ShouldEqual, "https://www.google.com/some/method") @@ -94,7 +94,7 @@ func TestDSRouteRule(t *testing.T) { Convey("When matching route path and has dynamic url", func() { proxy := NewDataSourceProxy(ds, plugin, ctx, "api/common/some/method") proxy.route = plugin.Routes[3] - proxy.applyRoute(req) + ApplyRoute(proxy.ctx.Req.Context(), req, proxy.proxyPath, proxy.route, proxy.ds) Convey("should add headers and interpolate the url", func() { So(req.URL.String(), ShouldEqual, "https://dynamic.grafana.com/some/method") @@ -188,7 +188,7 @@ func TestDSRouteRule(t *testing.T) { client = newFakeHTTPClient(json) proxy1 := NewDataSourceProxy(ds, plugin, ctx, "pathwithtoken1") proxy1.route = plugin.Routes[0] - proxy1.applyRoute(req) + ApplyRoute(proxy1.ctx.Req.Context(), req, proxy1.proxyPath, proxy1.route, proxy1.ds) authorizationHeaderCall1 = req.Header.Get("Authorization") So(req.URL.String(), ShouldEqual, "https://api.nr1.io/some/path") @@ -202,7 +202,7 @@ func TestDSRouteRule(t *testing.T) { client = newFakeHTTPClient(json2) proxy2 := NewDataSourceProxy(ds, plugin, ctx, "pathwithtoken2") proxy2.route = plugin.Routes[1] - proxy2.applyRoute(req) + ApplyRoute(proxy2.ctx.Req.Context(), req, proxy2.proxyPath, proxy2.route, proxy2.ds) authorizationHeaderCall2 = req.Header.Get("Authorization") @@ -217,7 +217,7 @@ func TestDSRouteRule(t *testing.T) { client = newFakeHTTPClient([]byte{}) proxy3 := NewDataSourceProxy(ds, plugin, ctx, "pathwithtoken1") proxy3.route = plugin.Routes[0] - proxy3.applyRoute(req) + ApplyRoute(proxy3.ctx.Req.Context(), req, proxy3.proxyPath, proxy3.route, proxy3.ds) authorizationHeaderCall3 := req.Header.Get("Authorization") So(req.URL.String(), ShouldEqual, "https://api.nr1.io/some/path") @@ -331,18 +331,6 @@ func TestDSRouteRule(t *testing.T) { }) }) - Convey("When interpolating string", func() { - data := templateData{ - SecureJsonData: map[string]string{ - "Test": "0asd+asd", - }, - } - - interpolated, err := interpolateString("{{.SecureJsonData.Test}}", data) - So(err, ShouldBeNil) - So(interpolated, ShouldEqual, "0asd+asd") - }) - Convey("When proxying a data source with custom headers specified", func() { plugin := &plugins.DataSourcePlugin{} @@ -374,6 +362,23 @@ func TestDSRouteRule(t *testing.T) { }) }) + Convey("When proxying a custom datasource", func() { + plugin := &plugins.DataSourcePlugin{} + ds := &m.DataSource{ + Type: "custom-datasource", + Url: "http://host/root/", + } + ctx := &m.ReqContext{} + proxy := NewDataSourceProxy(ds, plugin, ctx, "/path/to/folder/") + req, err := http.NewRequest(http.MethodGet, "http://grafana.com/sub", nil) + So(err, ShouldBeNil) + + proxy.getDirector()(req) + + Convey("Shoudl keep user request (including trailing slash)", func() { + So(req.URL.String(), ShouldEqual, "http://host/root/path/to/folder/") + }) + }) }) } diff --git a/pkg/api/render.go b/pkg/api/render.go index b8ef6cc5cb6..cf672af9bea 100644 --- a/pkg/api/render.go +++ b/pkg/api/render.go @@ -41,15 +41,16 @@ func (hs *HTTPServer) RenderToPng(c *m.ReqContext) { } result, err := hs.RenderService.Render(c.Req.Context(), rendering.Opts{ - Width: width, - Height: height, - Timeout: time.Duration(timeout) * time.Second, - OrgId: c.OrgId, - UserId: c.UserId, - OrgRole: c.OrgRole, - Path: c.Params("*") + queryParams, - Timezone: queryReader.Get("tz", ""), - Encoding: queryReader.Get("encoding", ""), + Width: width, + Height: height, + Timeout: time.Duration(timeout) * time.Second, + OrgId: c.OrgId, + UserId: c.UserId, + OrgRole: c.OrgRole, + Path: c.Params("*") + queryParams, + Timezone: queryReader.Get("tz", ""), + Encoding: queryReader.Get("encoding", ""), + ConcurrentLimit: 30, }) if err != nil && err == rendering.ErrTimeout { diff --git a/pkg/api/team_members.go b/pkg/api/team_members.go index 60a170a8c31..5b5970de6ad 100644 --- a/pkg/api/team_members.go +++ b/pkg/api/team_members.go @@ -4,6 +4,7 @@ import ( "github.com/grafana/grafana/pkg/api/dtos" "github.com/grafana/grafana/pkg/bus" m "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/util" ) @@ -17,6 +18,11 @@ func GetTeamMembers(c *m.ReqContext) Response { for _, member := range query.Result { member.AvatarUrl = dtos.GetGravatarUrl(member.Email) + member.Labels = []string{} + + if setting.IsEnterprise && setting.LdapEnabled && member.External { + member.Labels = append(member.Labels, "LDAP") + } } return JSON(200, query.Result) diff --git a/pkg/api/user.go b/pkg/api/user.go index 4b916202e65..7116ad83f3f 100644 --- a/pkg/api/user.go +++ b/pkg/api/user.go @@ -177,17 +177,17 @@ func UserSetUsingOrg(c *m.ReqContext) Response { } // GET /profile/switch-org/:id -func ChangeActiveOrgAndRedirectToHome(c *m.ReqContext) { +func (hs *HTTPServer) ChangeActiveOrgAndRedirectToHome(c *m.ReqContext) { orgID := c.ParamsInt64(":id") if !validateUsingOrg(c.UserId, orgID) { - NotFoundHandler(c) + hs.NotFoundHandler(c) } cmd := m.SetUsingOrgCommand{UserId: c.UserId, OrgId: orgID} if err := bus.Dispatch(&cmd); err != nil { - NotFoundHandler(c) + hs.NotFoundHandler(c) } c.Redirect(setting.AppSubUrl + "/") diff --git a/pkg/cmd/grafana-cli/commands/commands.go b/pkg/cmd/grafana-cli/commands/commands.go index 5e69559b9fa..902fd415977 100644 --- a/pkg/cmd/grafana-cli/commands/commands.go +++ b/pkg/cmd/grafana-cli/commands/commands.go @@ -6,6 +6,7 @@ import ( "github.com/codegangsta/cli" "github.com/fatih/color" + "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/cmd/grafana-cli/logger" "github.com/grafana/grafana/pkg/services/sqlstore" "github.com/grafana/grafana/pkg/setting" @@ -24,6 +25,7 @@ func runDbCommand(command func(commandLine CommandLine) error) func(context *cli engine := &sqlstore.SqlStore{} engine.Cfg = cfg + engine.Bus = bus.GetBus() engine.Init() if err := command(cmd); err != nil { diff --git a/pkg/cmd/grafana-cli/commands/install_command.go b/pkg/cmd/grafana-cli/commands/install_command.go index 9bdb73a5858..f88bb9bbfff 100644 --- a/pkg/cmd/grafana-cli/commands/install_command.go +++ b/pkg/cmd/grafana-cli/commands/install_command.go @@ -112,7 +112,7 @@ func SelectVersion(plugin m.Plugin, version string) (m.Version, error) { } } - return m.Version{}, errors.New("Could not find the version your looking for") + return m.Version{}, errors.New("Could not find the version you're looking for") } func RemoveGitBuildFromName(pluginName, filename string) string { @@ -152,7 +152,7 @@ func downloadFile(pluginName, filePath, url string) (err error) { return err } - r, err := zip.NewReader(bytes.NewReader(body), resp.ContentLength) + r, err := zip.NewReader(bytes.NewReader(body), int64(len(body))) if err != nil { return err } diff --git a/pkg/cmd/grafana-cli/commands/upgrade_command.go b/pkg/cmd/grafana-cli/commands/upgrade_command.go index 355ccab3d1c..396371d3577 100644 --- a/pkg/cmd/grafana-cli/commands/upgrade_command.go +++ b/pkg/cmd/grafana-cli/commands/upgrade_command.go @@ -16,7 +16,7 @@ func upgradeCommand(c CommandLine) error { return err } - v, err2 := s.GetPlugin(localPlugin.Id, c.RepoDirectory()) + v, err2 := s.GetPlugin(pluginName, c.RepoDirectory()) if err2 != nil { return err2 @@ -24,9 +24,9 @@ func upgradeCommand(c CommandLine) error { if ShouldUpgrade(localPlugin.Info.Version, v) { s.RemoveInstalledPlugin(pluginsDir, pluginName) - return InstallPlugin(localPlugin.Id, "", c) + return InstallPlugin(pluginName, "", c) } - logger.Infof("%s %s is up to date \n", color.GreenString("✔"), localPlugin.Id) + logger.Infof("%s %s is up to date \n", color.GreenString("✔"), pluginName) return nil } diff --git a/pkg/cmd/grafana-cli/services/services.go b/pkg/cmd/grafana-cli/services/services.go index e743d42022c..338975bc130 100644 --- a/pkg/cmd/grafana-cli/services/services.go +++ b/pkg/cmd/grafana-cli/services/services.go @@ -63,7 +63,7 @@ func ListAllPlugins(repoUrl string) (m.PluginRepo, error) { var data m.PluginRepo err = json.Unmarshal(body, &data) if err != nil { - logger.Info("Failed to unmarshal graphite response error: %v", err) + logger.Info("Failed to unmarshal plugin repo response error:", err) return m.PluginRepo{}, err } @@ -140,7 +140,7 @@ func GetPlugin(pluginId, repoUrl string) (m.Plugin, error) { var data m.Plugin err = json.Unmarshal(body, &data) if err != nil { - logger.Info("Failed to unmarshal graphite response error: %v", err) + logger.Info("Failed to unmarshal plugin repo response error:", err) return m.Plugin{}, err } diff --git a/pkg/cmd/grafana-cli/utils/grafana_path.go b/pkg/cmd/grafana-cli/utils/grafana_path.go index afb622bbb93..5f5c944f52b 100644 --- a/pkg/cmd/grafana-cli/utils/grafana_path.go +++ b/pkg/cmd/grafana-cli/utils/grafana_path.go @@ -42,6 +42,8 @@ func returnOsDefault(currentOs string) string { return "/usr/local/var/lib/grafana/plugins" case "freebsd": return "/var/db/grafana/plugins" + case "openbsd": + return "/var/grafana/plugins" default: //"linux" return "/var/lib/grafana/plugins" } diff --git a/pkg/cmd/grafana-server/main.go b/pkg/cmd/grafana-server/main.go index f00e6bba0fd..06c07a2887c 100644 --- a/pkg/cmd/grafana-server/main.go +++ b/pkg/cmd/grafana-server/main.go @@ -29,6 +29,7 @@ import ( _ "github.com/grafana/grafana/pkg/tsdb/opentsdb" _ "github.com/grafana/grafana/pkg/tsdb/postgres" _ "github.com/grafana/grafana/pkg/tsdb/prometheus" + _ "github.com/grafana/grafana/pkg/tsdb/stackdriver" _ "github.com/grafana/grafana/pkg/tsdb/testdata" ) @@ -96,13 +97,17 @@ func main() { func listenToSystemSignals(server *GrafanaServerImpl) { signalChan := make(chan os.Signal, 1) - ignoreChan := make(chan os.Signal, 1) + sighupChan := make(chan os.Signal, 1) - signal.Notify(ignoreChan, syscall.SIGHUP) - signal.Notify(signalChan, os.Interrupt, os.Kill, syscall.SIGTERM) + signal.Notify(sighupChan, syscall.SIGHUP) + signal.Notify(signalChan, os.Interrupt, syscall.SIGTERM) - select { - case sig := <-signalChan: - server.Shutdown(fmt.Sprintf("System signal: %s", sig)) + for { + select { + case <-sighupChan: + log.Reload() + case sig := <-signalChan: + server.Shutdown(fmt.Sprintf("System signal: %s", sig)) + } } } diff --git a/pkg/components/imguploader/azureblobuploader.go b/pkg/components/imguploader/azureblobuploader.go index 3c0ac5b8884..b37763931c8 100644 --- a/pkg/components/imguploader/azureblobuploader.go +++ b/pkg/components/imguploader/azureblobuploader.go @@ -52,7 +52,7 @@ func (az *AzureBlobUploader) Upload(ctx context.Context, imageDiskPath string) ( } randomFileName := util.GetRandomString(30) + ".png" // upload image - az.log.Debug("Uploading image to azure_blob", "conatiner_name", az.container_name, "blob_name", randomFileName) + az.log.Debug("Uploading image to azure_blob", "container_name", az.container_name, "blob_name", randomFileName) resp, err := blob.FileUpload(az.container_name, randomFileName, file) if err != nil { return "", err @@ -127,8 +127,6 @@ type xmlError struct { const ms_date_layout = "Mon, 02 Jan 2006 15:04:05 GMT" const version = "2017-04-17" -var client = &http.Client{} - type StorageClient struct { Auth *Auth Transport http.RoundTripper @@ -274,10 +272,10 @@ func (a *Auth) canonicalizedHeaders(req *http.Request) string { } } - splitted := strings.Split(buffer.String(), "\n") - sort.Strings(splitted) + split := strings.Split(buffer.String(), "\n") + sort.Strings(split) - return strings.Join(splitted, "\n") + return strings.Join(split, "\n") } /* @@ -313,8 +311,8 @@ func (a *Auth) canonicalizedResource(req *http.Request) string { buffer.WriteString(fmt.Sprintf("\n%s:%s", key, strings.Join(values, ","))) } - splitted := strings.Split(buffer.String(), "\n") - sort.Strings(splitted) + split := strings.Split(buffer.String(), "\n") + sort.Strings(split) - return strings.Join(splitted, "\n") + return strings.Join(split, "\n") } diff --git a/pkg/components/imguploader/s3uploader.go b/pkg/components/imguploader/s3uploader.go index 62196357c61..9c8af21e39e 100644 --- a/pkg/components/imguploader/s3uploader.go +++ b/pkg/components/imguploader/s3uploader.go @@ -2,12 +2,15 @@ package imguploader import ( "context" + "fmt" "os" "time" "github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go/aws/credentials" "github.com/aws/aws-sdk-go/aws/credentials/ec2rolecreds" + "github.com/aws/aws-sdk-go/aws/credentials/endpointcreds" + "github.com/aws/aws-sdk-go/aws/defaults" "github.com/aws/aws-sdk-go/aws/ec2metadata" "github.com/aws/aws-sdk-go/aws/endpoints" "github.com/aws/aws-sdk-go/aws/session" @@ -50,7 +53,7 @@ func (u *S3Uploader) Upload(ctx context.Context, imageDiskPath string) (string, SecretAccessKey: u.secretKey, }}, &credentials.EnvProvider{}, - &ec2rolecreds.EC2RoleProvider{Client: ec2metadata.New(sess), ExpiryWindow: 5 * time.Minute}, + remoteCredProvider(sess), }) cfg := &aws.Config{ Region: aws.String(u.region), @@ -60,7 +63,7 @@ func (u *S3Uploader) Upload(ctx context.Context, imageDiskPath string) (string, s3_endpoint, _ := endpoints.DefaultResolver().EndpointFor("s3", u.region) key := u.path + util.GetRandomString(20) + ".png" image_url := s3_endpoint.URL + "/" + u.bucket + "/" + key - log.Debug("Uploading image to s3", "url = ", image_url) + log.Debug("Uploading image to s3. url = %s", image_url) file, err := os.Open(imageDiskPath) if err != nil { @@ -85,3 +88,27 @@ func (u *S3Uploader) Upload(ctx context.Context, imageDiskPath string) (string, } return image_url, nil } + +func remoteCredProvider(sess *session.Session) credentials.Provider { + ecsCredURI := os.Getenv("AWS_CONTAINER_CREDENTIALS_RELATIVE_URI") + + if len(ecsCredURI) > 0 { + return ecsCredProvider(sess, ecsCredURI) + } + return ec2RoleProvider(sess) +} + +func ecsCredProvider(sess *session.Session, uri string) credentials.Provider { + const host = `169.254.170.2` + + d := defaults.Get() + return endpointcreds.NewProviderClient( + *d.Config, + d.Handlers, + fmt.Sprintf("http://%s%s", host, uri), + func(p *endpointcreds.Provider) { p.ExpiryWindow = 5 * time.Minute }) +} + +func ec2RoleProvider(sess *session.Session) credentials.Provider { + return &ec2rolecreds.EC2RoleProvider{Client: ec2metadata.New(sess), ExpiryWindow: 5 * time.Minute} +} diff --git a/pkg/components/null/float.go b/pkg/components/null/float.go index 4f783f2c584..9082c831084 100644 --- a/pkg/components/null/float.go +++ b/pkg/components/null/float.go @@ -8,6 +8,10 @@ import ( "strconv" ) +const ( + nullString = "null" +) + // Float is a nullable float64. // It does not consider zero values to be null. // It will decode to null, not zero, if null. @@ -68,7 +72,7 @@ func (f *Float) UnmarshalJSON(data []byte) error { // It will return an error if the input is not an integer, blank, or "null". func (f *Float) UnmarshalText(text []byte) error { str := string(text) - if str == "" || str == "null" { + if str == "" || str == nullString { f.Valid = false return nil } @@ -82,7 +86,7 @@ func (f *Float) UnmarshalText(text []byte) error { // It will encode null if this Float is null. func (f Float) MarshalJSON() ([]byte, error) { if !f.Valid { - return []byte("null"), nil + return []byte(nullString), nil } return []byte(strconv.FormatFloat(f.Float64, 'f', -1, 64)), nil } @@ -100,7 +104,7 @@ func (f Float) MarshalText() ([]byte, error) { // It will encode a blank string if this Float is null. func (f Float) String() string { if !f.Valid { - return "null" + return nullString } return fmt.Sprintf("%1.3f", f.Float64) @@ -109,7 +113,7 @@ func (f Float) String() string { // FullString returns float as string in full precision func (f Float) FullString() string { if !f.Valid { - return "null" + return nullString } return fmt.Sprintf("%f", f.Float64) diff --git a/pkg/components/simplejson/simplejson.go b/pkg/components/simplejson/simplejson.go index 85e2f955943..35e305eb414 100644 --- a/pkg/components/simplejson/simplejson.go +++ b/pkg/components/simplejson/simplejson.go @@ -256,7 +256,7 @@ func (j *Json) StringArray() ([]string, error) { // MustArray guarantees the return of a `[]interface{}` (with optional default) // -// useful when you want to interate over array values in a succinct manner: +// useful when you want to iterate over array values in a succinct manner: // for i, v := range js.Get("results").MustArray() { // fmt.Println(i, v) // } @@ -281,7 +281,7 @@ func (j *Json) MustArray(args ...[]interface{}) []interface{} { // MustMap guarantees the return of a `map[string]interface{}` (with optional default) // -// useful when you want to interate over map values in a succinct manner: +// useful when you want to iterate over map values in a succinct manner: // for k, v := range js.Get("dictionary").MustMap() { // fmt.Println(k, v) // } @@ -329,7 +329,7 @@ func (j *Json) MustString(args ...string) string { // MustStringArray guarantees the return of a `[]string` (with optional default) // -// useful when you want to interate over array values in a succinct manner: +// useful when you want to iterate over array values in a succinct manner: // for i, s := range js.Get("results").MustStringArray() { // fmt.Println(i, s) // } diff --git a/pkg/extensions/main.go b/pkg/extensions/main.go index 6e3461da8a8..1d8bbce03f3 100644 --- a/pkg/extensions/main.go +++ b/pkg/extensions/main.go @@ -1,3 +1,7 @@ package extensions +import ( + _ "gopkg.in/square/go-jose.v2" +) + var IsEnterprise bool = false diff --git a/pkg/log/file.go b/pkg/log/file.go index d137adbf3de..b8430dc6086 100644 --- a/pkg/log/file.go +++ b/pkg/log/file.go @@ -236,3 +236,20 @@ func (w *FileLogWriter) Close() { func (w *FileLogWriter) Flush() { w.mw.fd.Sync() } + +// Reload file logger +func (w *FileLogWriter) Reload() { + // block Logger's io.Writer + w.mw.Lock() + defer w.mw.Unlock() + + // Close + fd := w.mw.fd + fd.Close() + + // Open again + err := w.StartLogger() + if err != nil { + fmt.Fprintf(os.Stderr, "Reload StartLogger: %s\n", err) + } +} diff --git a/pkg/log/handlers.go b/pkg/log/handlers.go index 14a96fdcdb4..804d8fcbd70 100644 --- a/pkg/log/handlers.go +++ b/pkg/log/handlers.go @@ -3,3 +3,7 @@ package log type DisposableHandler interface { Close() } + +type ReloadableHandler interface { + Reload() +} diff --git a/pkg/log/log.go b/pkg/log/log.go index 0e6874e1b4b..8f0522748ef 100644 --- a/pkg/log/log.go +++ b/pkg/log/log.go @@ -21,10 +21,12 @@ import ( var Root log15.Logger var loggersToClose []DisposableHandler +var loggersToReload []ReloadableHandler var filters map[string]log15.Lvl func init() { loggersToClose = make([]DisposableHandler, 0) + loggersToReload = make([]ReloadableHandler, 0) Root = log15.Root() Root.SetHandler(log15.DiscardHandler()) } @@ -103,7 +105,7 @@ func Critical(skip int, format string, v ...interface{}) { } func Fatal(skip int, format string, v ...interface{}) { - Root.Crit(fmt.Sprintf(format, v)) + Root.Crit(fmt.Sprintf(format, v...)) Close() os.Exit(1) } @@ -115,6 +117,12 @@ func Close() { loggersToClose = make([]DisposableHandler, 0) } +func Reload() { + for _, logger := range loggersToReload { + logger.Reload() + } +} + func GetLogLevelFor(name string) Lvl { if level, ok := filters[name]; ok { switch level { @@ -230,6 +238,7 @@ func ReadLoggingConfig(modes []string, logsPath string, cfg *ini.File) { fileHandler.Init() loggersToClose = append(loggersToClose, fileHandler) + loggersToReload = append(loggersToReload, fileHandler) handler = fileHandler case "syslog": sysLogHandler := NewSyslog(sec, format) diff --git a/pkg/login/auth.go b/pkg/login/auth.go index 215a22cde33..991fa72fd54 100644 --- a/pkg/login/auth.go +++ b/pkg/login/auth.go @@ -2,7 +2,6 @@ package login import ( "errors" - "github.com/grafana/grafana/pkg/bus" m "github.com/grafana/grafana/pkg/models" ) @@ -14,6 +13,7 @@ var ( ErrProviderDeniedRequest = errors.New("Login provider denied login request") ErrSignUpNotAllowed = errors.New("Signup is not allowed for this adapter") ErrTooManyLoginAttempts = errors.New("Too many consecutive incorrect login attempts for user. Login for user temporarily blocked") + ErrPasswordEmpty = errors.New("No password provided.") ErrUsersQuotaReached = errors.New("Users quota reached") ErrGettingUserQuota = errors.New("Error getting user quota") ) @@ -28,6 +28,10 @@ func AuthenticateUser(query *m.LoginUserQuery) error { return err } + if err := validatePasswordSet(query.Password); err != nil { + return err + } + err := loginUsingGrafanaDB(query) if err == nil || (err != m.ErrUserNotFound && err != ErrInvalidCredentials) { return err @@ -52,3 +56,10 @@ func AuthenticateUser(query *m.LoginUserQuery) error { return err } +func validatePasswordSet(password string) error { + if len(password) == 0 { + return ErrPasswordEmpty + } + + return nil +} diff --git a/pkg/login/auth_test.go b/pkg/login/auth_test.go index 932125c410e..a4cd8284cdd 100644 --- a/pkg/login/auth_test.go +++ b/pkg/login/auth_test.go @@ -10,6 +10,24 @@ import ( func TestAuthenticateUser(t *testing.T) { Convey("Authenticate user", t, func() { + authScenario("When a user authenticates without setting a password", func(sc *authScenarioContext) { + mockLoginAttemptValidation(nil, sc) + mockLoginUsingGrafanaDB(nil, sc) + mockLoginUsingLdap(false, nil, sc) + + loginQuery := m.LoginUserQuery{ + Username: "user", + Password: "", + } + err := AuthenticateUser(&loginQuery) + + Convey("login should fail", func() { + So(sc.grafanaLoginWasCalled, ShouldBeFalse) + So(sc.ldapLoginWasCalled, ShouldBeFalse) + So(err, ShouldEqual, ErrPasswordEmpty) + }) + }) + authScenario("When a user authenticates having too many login attempts", func(sc *authScenarioContext) { mockLoginAttemptValidation(ErrTooManyLoginAttempts, sc) mockLoginUsingGrafanaDB(nil, sc) diff --git a/pkg/login/ext_user.go b/pkg/login/ext_user.go index a421e3ebe0a..1262c1cc44f 100644 --- a/pkg/login/ext_user.go +++ b/pkg/login/ext_user.go @@ -35,7 +35,7 @@ func UpsertUser(cmd *m.UpsertUserCommand) error { limitReached, err := quota.QuotaReached(cmd.ReqContext, "user") if err != nil { - log.Warn("Error getting user quota", "err", err) + log.Warn("Error getting user quota. error: %v", err) return ErrGettingUserQuota } if limitReached { @@ -135,7 +135,7 @@ func updateUser(user *m.User, extUser *m.ExternalUserInfo) error { return nil } - log.Debug("Syncing user info", "id", user.Id, "update", updateCmd) + log.Debug2("Syncing user info", "id", user.Id, "update", updateCmd) return bus.Dispatch(updateCmd) } diff --git a/pkg/login/ldap.go b/pkg/login/ldap.go index 053778e8deb..4c71ab3cd5f 100644 --- a/pkg/login/ldap.go +++ b/pkg/login/ldap.go @@ -185,7 +185,9 @@ func (a *ldapAuther) GetGrafanaUserFor(ctx *m.ReqContext, ldapUser *LdapUserInfo if ldapUser.isMemberOf(group.GroupDN) { extUser.OrgRoles[group.OrgId] = group.OrgRole - extUser.IsGrafanaAdmin = group.IsGrafanaAdmin + if extUser.IsGrafanaAdmin == nil || *extUser.IsGrafanaAdmin == false { + extUser.IsGrafanaAdmin = group.IsGrafanaAdmin + } } } @@ -326,15 +328,19 @@ func (a *ldapAuther) searchForUser(username string) (*LdapUserInfo, error) { a.log.Info("Searching for user's groups", "filter", filter) + // support old way of reading settings + groupIdAttribute := a.server.Attr.MemberOf + // but prefer dn attribute if default settings are used + if groupIdAttribute == "" || groupIdAttribute == "memberOf" { + groupIdAttribute = "dn" + } + groupSearchReq := ldap.SearchRequest{ BaseDN: groupSearchBase, Scope: ldap.ScopeWholeSubtree, DerefAliases: ldap.NeverDerefAliases, - Attributes: []string{ - // Here MemberOf would be the thing that identifies the group, which is normally 'cn' - a.server.Attr.MemberOf, - }, - Filter: filter, + Attributes: []string{groupIdAttribute}, + Filter: filter, } groupSearchResult, err = a.conn.Search(&groupSearchReq) @@ -344,7 +350,7 @@ func (a *ldapAuther) searchForUser(username string) (*LdapUserInfo, error) { if len(groupSearchResult.Entries) > 0 { for i := range groupSearchResult.Entries { - memberOf = append(memberOf, getLdapAttrN(a.server.Attr.MemberOf, groupSearchResult, i)) + memberOf = append(memberOf, getLdapAttrN(groupIdAttribute, groupSearchResult, i)) } break } diff --git a/pkg/login/ldap_settings.go b/pkg/login/ldap_settings.go index 7ebfbc79ba8..40791a509db 100644 --- a/pkg/login/ldap_settings.go +++ b/pkg/login/ldap_settings.go @@ -48,7 +48,7 @@ type LdapAttributeMap struct { type LdapGroupToOrgRole struct { GroupDN string `toml:"group_dn"` OrgId int64 `toml:"org_id"` - IsGrafanaAdmin *bool `toml:"grafana_admin"` // This is a pointer to know if it was set or not (for backwards compatability) + IsGrafanaAdmin *bool `toml:"grafana_admin"` // This is a pointer to know if it was set or not (for backwards compatibility) OrgRole m.RoleType `toml:"org_role"` } diff --git a/pkg/metrics/metrics.go b/pkg/metrics/metrics.go index a8d9f7308fa..9a514fdb6f3 100644 --- a/pkg/metrics/metrics.go +++ b/pkg/metrics/metrics.go @@ -61,6 +61,23 @@ var ( M_Grafana_Version *prometheus.GaugeVec ) +func newCounterVecStartingAtZero(opts prometheus.CounterOpts, labels []string, labelValues ...string) *prometheus.CounterVec { + counter := prometheus.NewCounterVec(opts, labels) + + for _, label := range labelValues { + counter.WithLabelValues(label).Add(0) + } + + return counter +} + +func newCounterStartingAtZero(opts prometheus.CounterOpts, labelValues ...string) prometheus.Counter { + counter := prometheus.NewCounter(opts) + counter.Add(0) + + return counter +} + func init() { M_Instance_Start = prometheus.NewCounter(prometheus.CounterOpts{ Name: "instance_start_total", @@ -68,32 +85,27 @@ func init() { Namespace: exporterName, }) - M_Page_Status = prometheus.NewCounterVec( + httpStatusCodes := []string{"200", "404", "500", "unknown"} + M_Page_Status = newCounterVecStartingAtZero( prometheus.CounterOpts{ Name: "page_response_status_total", Help: "page http response status", Namespace: exporterName, - }, - []string{"code"}, - ) + }, []string{"code"}, httpStatusCodes...) - M_Api_Status = prometheus.NewCounterVec( + M_Api_Status = newCounterVecStartingAtZero( prometheus.CounterOpts{ Name: "api_response_status_total", Help: "api http response status", Namespace: exporterName, - }, - []string{"code"}, - ) + }, []string{"code"}, httpStatusCodes...) - M_Proxy_Status = prometheus.NewCounterVec( + M_Proxy_Status = newCounterVecStartingAtZero( prometheus.CounterOpts{ Name: "proxy_response_status_total", Help: "proxy http response status", Namespace: exporterName, - }, - []string{"code"}, - ) + }, []string{"code"}, httpStatusCodes...) M_Http_Request_Total = prometheus.NewCounterVec( prometheus.CounterOpts{ @@ -111,19 +123,19 @@ func init() { []string{"handler", "statuscode", "method"}, ) - M_Api_User_SignUpStarted = prometheus.NewCounter(prometheus.CounterOpts{ + M_Api_User_SignUpStarted = newCounterStartingAtZero(prometheus.CounterOpts{ Name: "api_user_signup_started_total", Help: "amount of users who started the signup flow", Namespace: exporterName, }) - M_Api_User_SignUpCompleted = prometheus.NewCounter(prometheus.CounterOpts{ + M_Api_User_SignUpCompleted = newCounterStartingAtZero(prometheus.CounterOpts{ Name: "api_user_signup_completed_total", Help: "amount of users who completed the signup flow", Namespace: exporterName, }) - M_Api_User_SignUpInvite = prometheus.NewCounter(prometheus.CounterOpts{ + M_Api_User_SignUpInvite = newCounterStartingAtZero(prometheus.CounterOpts{ Name: "api_user_signup_invite_total", Help: "amount of users who have been invited", Namespace: exporterName, @@ -147,49 +159,49 @@ func init() { Namespace: exporterName, }) - M_Api_Admin_User_Create = prometheus.NewCounter(prometheus.CounterOpts{ + M_Api_Admin_User_Create = newCounterStartingAtZero(prometheus.CounterOpts{ Name: "api_admin_user_created_total", Help: "api admin user created counter", Namespace: exporterName, }) - M_Api_Login_Post = prometheus.NewCounter(prometheus.CounterOpts{ + M_Api_Login_Post = newCounterStartingAtZero(prometheus.CounterOpts{ Name: "api_login_post_total", Help: "api login post counter", Namespace: exporterName, }) - M_Api_Login_OAuth = prometheus.NewCounter(prometheus.CounterOpts{ + M_Api_Login_OAuth = newCounterStartingAtZero(prometheus.CounterOpts{ Name: "api_login_oauth_total", Help: "api login oauth counter", Namespace: exporterName, }) - M_Api_Org_Create = prometheus.NewCounter(prometheus.CounterOpts{ + M_Api_Org_Create = newCounterStartingAtZero(prometheus.CounterOpts{ Name: "api_org_create_total", Help: "api org created counter", Namespace: exporterName, }) - M_Api_Dashboard_Snapshot_Create = prometheus.NewCounter(prometheus.CounterOpts{ + M_Api_Dashboard_Snapshot_Create = newCounterStartingAtZero(prometheus.CounterOpts{ Name: "api_dashboard_snapshot_create_total", Help: "dashboard snapshots created", Namespace: exporterName, }) - M_Api_Dashboard_Snapshot_External = prometheus.NewCounter(prometheus.CounterOpts{ + M_Api_Dashboard_Snapshot_External = newCounterStartingAtZero(prometheus.CounterOpts{ Name: "api_dashboard_snapshot_external_total", Help: "external dashboard snapshots created", Namespace: exporterName, }) - M_Api_Dashboard_Snapshot_Get = prometheus.NewCounter(prometheus.CounterOpts{ + M_Api_Dashboard_Snapshot_Get = newCounterStartingAtZero(prometheus.CounterOpts{ Name: "api_dashboard_snapshot_get_total", Help: "loaded dashboards", Namespace: exporterName, }) - M_Api_Dashboard_Insert = prometheus.NewCounter(prometheus.CounterOpts{ + M_Api_Dashboard_Insert = newCounterStartingAtZero(prometheus.CounterOpts{ Name: "api_models_dashboard_insert_total", Help: "dashboards inserted ", Namespace: exporterName, @@ -207,25 +219,25 @@ func init() { Namespace: exporterName, }, []string{"type"}) - M_Aws_CloudWatch_GetMetricStatistics = prometheus.NewCounter(prometheus.CounterOpts{ + M_Aws_CloudWatch_GetMetricStatistics = newCounterStartingAtZero(prometheus.CounterOpts{ Name: "aws_cloudwatch_get_metric_statistics_total", Help: "counter for getting metric statistics from aws", Namespace: exporterName, }) - M_Aws_CloudWatch_ListMetrics = prometheus.NewCounter(prometheus.CounterOpts{ + M_Aws_CloudWatch_ListMetrics = newCounterStartingAtZero(prometheus.CounterOpts{ Name: "aws_cloudwatch_list_metrics_total", Help: "counter for getting list of metrics from aws", Namespace: exporterName, }) - M_Aws_CloudWatch_GetMetricData = prometheus.NewCounter(prometheus.CounterOpts{ + M_Aws_CloudWatch_GetMetricData = newCounterStartingAtZero(prometheus.CounterOpts{ Name: "aws_cloudwatch_get_metric_data_total", Help: "counter for getting metric data time series from aws", Namespace: exporterName, }) - M_DB_DataSource_QueryById = prometheus.NewCounter(prometheus.CounterOpts{ + M_DB_DataSource_QueryById = newCounterStartingAtZero(prometheus.CounterOpts{ Name: "db_datasource_query_by_id_total", Help: "counter for getting datasource by id", Namespace: exporterName, @@ -350,7 +362,7 @@ func getEdition() string { } } -func sendUsageStats() { +func sendUsageStats(oauthProviders map[string]bool) { if !setting.ReportingEnabled { return } @@ -440,6 +452,34 @@ func sendUsageStats() { metrics["stats.ds_access.other."+access+".count"] = count } + anStats := models.GetAlertNotifierUsageStatsQuery{} + if err := bus.Dispatch(&anStats); err != nil { + metricsLogger.Error("Failed to get alert notification stats", "error", err) + return + } + + for _, stats := range anStats.Result { + metrics["stats.alert_notifiers."+stats.Type+".count"] = stats.Count + } + + authTypes := map[string]bool{} + authTypes["anonymous"] = setting.AnonymousEnabled + authTypes["basic_auth"] = setting.BasicAuthEnabled + authTypes["ldap"] = setting.LdapEnabled + authTypes["auth_proxy"] = setting.AuthProxyEnabled + + for provider, enabled := range oauthProviders { + authTypes["oauth_"+provider] = enabled + } + + for authType, enabled := range authTypes { + enabledValue := 0 + if enabled { + enabledValue = 1 + } + metrics["stats.auth_enabled."+authType+".count"] = enabledValue + } + out, _ := json.MarshalIndent(report, "", " ") data := bytes.NewBuffer(out) diff --git a/pkg/metrics/metrics_test.go b/pkg/metrics/metrics_test.go index 8d88e03d106..43739221f1e 100644 --- a/pkg/metrics/metrics_test.go +++ b/pkg/metrics/metrics_test.go @@ -115,6 +115,24 @@ func TestMetrics(t *testing.T) { return nil }) + var getAlertNotifierUsageStatsQuery *models.GetAlertNotifierUsageStatsQuery + bus.AddHandler("test", func(query *models.GetAlertNotifierUsageStatsQuery) error { + query.Result = []*models.NotifierUsageStats{ + { + Type: "slack", + Count: 1, + }, + { + Type: "webhook", + Count: 2, + }, + } + + getAlertNotifierUsageStatsQuery = query + + return nil + }) + var wg sync.WaitGroup var responseBuffer *bytes.Buffer var req *http.Request @@ -129,11 +147,19 @@ func TestMetrics(t *testing.T) { })) usageStatsURL = ts.URL - sendUsageStats() + oauthProviders := map[string]bool{ + "github": true, + "gitlab": true, + "google": true, + "generic_oauth": true, + "grafana_com": true, + } + + sendUsageStats(oauthProviders) Convey("Given reporting not enabled and sending usage stats", func() { setting.ReportingEnabled = false - sendUsageStats() + sendUsageStats(oauthProviders) Convey("Should not gather stats or call http endpoint", func() { So(getSystemStatsQuery, ShouldBeNil) @@ -146,8 +172,13 @@ func TestMetrics(t *testing.T) { Convey("Given reporting enabled and sending usage stats", func() { setting.ReportingEnabled = true setting.BuildVersion = "5.0.0" + setting.AnonymousEnabled = true + setting.BasicAuthEnabled = true + setting.LdapEnabled = true + setting.AuthProxyEnabled = true + wg.Add(1) - sendUsageStats() + sendUsageStats(oauthProviders) Convey("Should gather stats and call http endpoint", func() { if waitTimeout(&wg, 2*time.Second) { @@ -157,6 +188,7 @@ func TestMetrics(t *testing.T) { So(getSystemStatsQuery, ShouldNotBeNil) So(getDataSourceStatsQuery, ShouldNotBeNil) So(getDataSourceAccessStatsQuery, ShouldNotBeNil) + So(getAlertNotifierUsageStatsQuery, ShouldNotBeNil) So(req, ShouldNotBeNil) So(req.Method, ShouldEqual, http.MethodPost) So(req.Header.Get("Content-Type"), ShouldEqual, "application/json") @@ -198,6 +230,19 @@ func TestMetrics(t *testing.T) { So(metrics.Get("stats.ds_access."+models.DS_PROMETHEUS+".proxy.count").MustInt(), ShouldEqual, 3) So(metrics.Get("stats.ds_access.other.direct.count").MustInt(), ShouldEqual, 6+7) So(metrics.Get("stats.ds_access.other.proxy.count").MustInt(), ShouldEqual, 4+8) + + So(metrics.Get("stats.alert_notifiers.slack.count").MustInt(), ShouldEqual, 1) + So(metrics.Get("stats.alert_notifiers.webhook.count").MustInt(), ShouldEqual, 2) + + So(metrics.Get("stats.auth_enabled.anonymous.count").MustInt(), ShouldEqual, 1) + So(metrics.Get("stats.auth_enabled.basic_auth.count").MustInt(), ShouldEqual, 1) + So(metrics.Get("stats.auth_enabled.ldap.count").MustInt(), ShouldEqual, 1) + So(metrics.Get("stats.auth_enabled.auth_proxy.count").MustInt(), ShouldEqual, 1) + So(metrics.Get("stats.auth_enabled.oauth_github.count").MustInt(), ShouldEqual, 1) + So(metrics.Get("stats.auth_enabled.oauth_gitlab.count").MustInt(), ShouldEqual, 1) + So(metrics.Get("stats.auth_enabled.oauth_google.count").MustInt(), ShouldEqual, 1) + So(metrics.Get("stats.auth_enabled.oauth_generic_oauth.count").MustInt(), ShouldEqual, 1) + So(metrics.Get("stats.auth_enabled.oauth_grafana_com.count").MustInt(), ShouldEqual, 1) }) }) diff --git a/pkg/metrics/service.go b/pkg/metrics/service.go index ec38e0acfec..d2c0c815da9 100644 --- a/pkg/metrics/service.go +++ b/pkg/metrics/service.go @@ -28,9 +28,9 @@ func init() { type InternalMetricsService struct { Cfg *setting.Cfg `inject:""` - enabled bool intervalSeconds int64 graphiteCfg *graphitebridge.Config + oauthProviders map[string]bool } func (im *InternalMetricsService) Init() error { @@ -61,7 +61,7 @@ func (im *InternalMetricsService) Run(ctx context.Context) error { for { select { case <-onceEveryDayTick.C: - sendUsageStats() + sendUsageStats(im.oauthProviders) case <-everyMinuteTicker.C: updateTotalStats() case <-ctx.Done(): diff --git a/pkg/metrics/settings.go b/pkg/metrics/settings.go index 58b84a7192f..18b9e78d6ff 100644 --- a/pkg/metrics/settings.go +++ b/pkg/metrics/settings.go @@ -5,6 +5,8 @@ import ( "strings" "time" + "github.com/grafana/grafana/pkg/social" + "github.com/grafana/grafana/pkg/metrics/graphitebridge" "github.com/grafana/grafana/pkg/setting" "github.com/prometheus/client_golang/prometheus" @@ -16,17 +18,14 @@ func (im *InternalMetricsService) readSettings() error { return fmt.Errorf("Unable to find metrics config section %v", err) } - im.enabled = section.Key("enabled").MustBool(false) im.intervalSeconds = section.Key("interval_seconds").MustInt64(10) - if !im.enabled { - return nil - } - if err := im.parseGraphiteSettings(); err != nil { return fmt.Errorf("Unable to parse metrics graphite section, %v", err) } + im.oauthProviders = social.GetOAuthProviders(im.Cfg) + return nil } diff --git a/pkg/middleware/auth_proxy.go b/pkg/middleware/auth_proxy.go index 144a0ae3a69..29bd305b336 100644 --- a/pkg/middleware/auth_proxy.go +++ b/pkg/middleware/auth_proxy.go @@ -36,7 +36,7 @@ func initContextWithAuthProxy(ctx *m.ReqContext, orgID int64) bool { // initialize session if err := ctx.Session.Start(ctx.Context); err != nil { - log.Error(3, "Failed to start session", err) + log.Error(3, "Failed to start session. error %v", err) return false } @@ -146,12 +146,12 @@ func initContextWithAuthProxy(ctx *m.ReqContext, orgID int64) bool { if getRequestUserId(ctx) > 0 && getRequestUserId(ctx) != query.Result.UserId { // remove session if err := ctx.Session.Destory(ctx.Context); err != nil { - log.Error(3, "Failed to destroy session, err") + log.Error(3, "Failed to destroy session. error: %v", err) } // initialize a new session if err := ctx.Session.Start(ctx.Context); err != nil { - log.Error(3, "Failed to start session", err) + log.Error(3, "Failed to start session. error: %v", err) } } diff --git a/pkg/middleware/middleware.go b/pkg/middleware/middleware.go index 475dce089b1..3e83a60f94b 100644 --- a/pkg/middleware/middleware.go +++ b/pkg/middleware/middleware.go @@ -14,6 +14,13 @@ import ( "github.com/grafana/grafana/pkg/util" ) +var ( + ReqGrafanaAdmin = Auth(&AuthOptions{ReqSignedIn: true, ReqGrafanaAdmin: true}) + ReqSignedIn = Auth(&AuthOptions{ReqSignedIn: true}) + ReqEditorRole = RoleAuth(m.ROLE_EDITOR, m.ROLE_ADMIN) + ReqOrgAdmin = RoleAuth(m.ROLE_ADMIN) +) + func GetContextHandler() macaron.Handler { return func(c *macaron.Context) { ctx := &m.ReqContext{ diff --git a/pkg/middleware/middleware_test.go b/pkg/middleware/middleware_test.go index 87c23a7b49a..1830b3eb161 100644 --- a/pkg/middleware/middleware_test.go +++ b/pkg/middleware/middleware_test.go @@ -435,11 +435,6 @@ func (sc *scenarioContext) withValidApiKey() *scenarioContext { return sc } -func (sc *scenarioContext) withInvalidApiKey() *scenarioContext { - sc.apiKey = "nvalidhhhhds" - return sc -} - func (sc *scenarioContext) withAuthorizationHeader(authHeader string) *scenarioContext { sc.authHeader = authHeader return sc diff --git a/pkg/models/alert.go b/pkg/models/alert.go index fba2aa63df9..ba1fc0779ba 100644 --- a/pkg/models/alert.go +++ b/pkg/models/alert.go @@ -75,7 +75,7 @@ type Alert struct { EvalData *simplejson.Json NewStateDate time.Time - StateChanges int + StateChanges int64 Created time.Time Updated time.Time @@ -156,7 +156,7 @@ type SetAlertStateCommand struct { Error string EvalData *simplejson.Json - Timestamp time.Time + Result Alert } //Queries diff --git a/pkg/models/alert_notifications.go b/pkg/models/alert_notifications.go index 87b515f370c..e0fd12937ed 100644 --- a/pkg/models/alert_notifications.go +++ b/pkg/models/alert_notifications.go @@ -1,38 +1,63 @@ package models import ( + "errors" "time" "github.com/grafana/grafana/pkg/components/simplejson" ) +var ( + ErrNotificationFrequencyNotFound = errors.New("Notification frequency not specified") + ErrAlertNotificationStateNotFound = errors.New("alert notification state not found") + ErrAlertNotificationStateVersionConflict = errors.New("alert notification state update version conflict") + ErrAlertNotificationStateAlreadyExist = errors.New("alert notification state already exists.") +) + +type AlertNotificationStateType string + +var ( + AlertNotificationStatePending = AlertNotificationStateType("pending") + AlertNotificationStateCompleted = AlertNotificationStateType("completed") + AlertNotificationStateUnknown = AlertNotificationStateType("unknown") +) + type AlertNotification struct { - Id int64 `json:"id"` - OrgId int64 `json:"-"` - Name string `json:"name"` - Type string `json:"type"` - IsDefault bool `json:"isDefault"` - Settings *simplejson.Json `json:"settings"` - Created time.Time `json:"created"` - Updated time.Time `json:"updated"` + Id int64 `json:"id"` + OrgId int64 `json:"-"` + Name string `json:"name"` + Type string `json:"type"` + SendReminder bool `json:"sendReminder"` + DisableResolveMessage bool `json:"disableResolveMessage"` + Frequency time.Duration `json:"frequency"` + IsDefault bool `json:"isDefault"` + Settings *simplejson.Json `json:"settings"` + Created time.Time `json:"created"` + Updated time.Time `json:"updated"` } type CreateAlertNotificationCommand struct { - Name string `json:"name" binding:"Required"` - Type string `json:"type" binding:"Required"` - IsDefault bool `json:"isDefault"` - Settings *simplejson.Json `json:"settings"` + Name string `json:"name" binding:"Required"` + Type string `json:"type" binding:"Required"` + SendReminder bool `json:"sendReminder"` + DisableResolveMessage bool `json:"disableResolveMessage"` + Frequency string `json:"frequency"` + IsDefault bool `json:"isDefault"` + Settings *simplejson.Json `json:"settings"` OrgId int64 `json:"-"` Result *AlertNotification } type UpdateAlertNotificationCommand struct { - Id int64 `json:"id" binding:"Required"` - Name string `json:"name" binding:"Required"` - Type string `json:"type" binding:"Required"` - IsDefault bool `json:"isDefault"` - Settings *simplejson.Json `json:"settings" binding:"Required"` + Id int64 `json:"id" binding:"Required"` + Name string `json:"name" binding:"Required"` + Type string `json:"type" binding:"Required"` + SendReminder bool `json:"sendReminder"` + DisableResolveMessage bool `json:"disableResolveMessage"` + Frequency string `json:"frequency"` + IsDefault bool `json:"isDefault"` + Settings *simplejson.Json `json:"settings" binding:"Required"` OrgId int64 `json:"-"` Result *AlertNotification @@ -63,3 +88,35 @@ type GetAllAlertNotificationsQuery struct { Result []*AlertNotification } + +type AlertNotificationState struct { + Id int64 + OrgId int64 + AlertId int64 + NotifierId int64 + State AlertNotificationStateType + Version int64 + UpdatedAt int64 + AlertRuleStateUpdatedVersion int64 +} + +type SetAlertNotificationStateToPendingCommand struct { + Id int64 + AlertRuleStateUpdatedVersion int64 + Version int64 + + ResultVersion int64 +} + +type SetAlertNotificationStateToCompleteCommand struct { + Id int64 + Version int64 +} + +type GetOrCreateNotificationStateQuery struct { + OrgId int64 + AlertId int64 + NotifierId int64 + + Result *AlertNotificationState +} diff --git a/pkg/models/dashboards.go b/pkg/models/dashboards.go index 4b84d840113..e8aebb1d1f4 100644 --- a/pkg/models/dashboards.go +++ b/pkg/models/dashboards.go @@ -21,7 +21,6 @@ var ( ErrDashboardVersionMismatch = errors.New("The dashboard has been changed by someone else") ErrDashboardTitleEmpty = errors.New("Dashboard title cannot be empty") ErrDashboardFolderCannotHaveParent = errors.New("A Dashboard Folder cannot be added to another folder") - ErrDashboardContainsInvalidAlertData = errors.New("Invalid alert data. Cannot save dashboard") ErrDashboardFailedToUpdateAlertData = errors.New("Failed to save alert data") ErrDashboardsWithSameSlugExists = errors.New("Multiple dashboards with the same slug exists") ErrDashboardFailedGenerateUniqueUid = errors.New("Failed to generate unique dashboard id") diff --git a/pkg/models/datasource.go b/pkg/models/datasource.go index b7e3e3eaa17..b71d17ec0d1 100644 --- a/pkg/models/datasource.go +++ b/pkg/models/datasource.go @@ -22,6 +22,7 @@ const ( DS_MSSQL = "mssql" DS_ACCESS_DIRECT = "direct" DS_ACCESS_PROXY = "proxy" + DS_STACKDRIVER = "stackdriver" ) var ( @@ -29,6 +30,7 @@ var ( ErrDataSourceNameExists = errors.New("Data source with same name already exists") ErrDataSourceUpdatingOldVersion = errors.New("Trying to update old version of datasource") ErrDatasourceIsReadOnly = errors.New("Data source is readonly. Can only be updated from configuration.") + ErrDataSourceAccessDenied = errors.New("Data source access denied") ) type DsAccess string @@ -59,23 +61,23 @@ type DataSource struct { } var knownDatasourcePlugins = map[string]bool{ - DS_ES: true, - DS_GRAPHITE: true, - DS_INFLUXDB: true, - DS_INFLUXDB_08: true, - DS_KAIROSDB: true, - DS_CLOUDWATCH: true, - DS_PROMETHEUS: true, - DS_OPENTSDB: true, - DS_POSTGRES: true, - DS_MYSQL: true, - DS_MSSQL: true, - "opennms": true, - "abhisant-druid-datasource": true, - "dalmatinerdb-datasource": true, - "gnocci": true, - "zabbix": true, - "alexanderzobnin-zabbix-datasource": true, + DS_ES: true, + DS_GRAPHITE: true, + DS_INFLUXDB: true, + DS_INFLUXDB_08: true, + DS_KAIROSDB: true, + DS_CLOUDWATCH: true, + DS_PROMETHEUS: true, + DS_OPENTSDB: true, + DS_POSTGRES: true, + DS_MYSQL: true, + DS_MSSQL: true, + DS_STACKDRIVER: true, + "opennms": true, + "abhisant-druid-datasource": true, + "dalmatinerdb-datasource": true, + "gnocci": true, + "zabbix": true, "newrelic-app": true, "grafana-datadog-datasource": true, "grafana-simple-json": true, @@ -88,6 +90,7 @@ var knownDatasourcePlugins = map[string]bool{ "ayoungprogrammer-finance-datasource": true, "monasca-datasource": true, "vertamedia-clickhouse-datasource": true, + "alexanderzobnin-zabbix-datasource": true, } func IsKnownDataSourcePlugin(dsType string) bool { @@ -165,6 +168,7 @@ type DeleteDataSourceByNameCommand struct { type GetDataSourcesQuery struct { OrgId int64 + User *SignedInUser Result []*DataSource } @@ -185,6 +189,31 @@ type GetDataSourceByNameQuery struct { } // --------------------- -// EVENTS -type DataSourceCreatedEvent struct { +// Permissions +// --------------------- + +type DsPermissionType int + +const ( + DsPermissionNoAccess DsPermissionType = iota + DsPermissionQuery +) + +func (p DsPermissionType) String() string { + names := map[int]string{ + int(DsPermissionQuery): "Query", + int(DsPermissionNoAccess): "No Access", + } + return names[int(p)] +} + +type GetDataSourcePermissionsForUserQuery struct { + User *SignedInUser + Result map[int64]DsPermissionType +} + +type DatasourcesPermissionFilterQuery struct { + User *SignedInUser + Datasources []*DataSource + Result []*DataSource } diff --git a/pkg/models/org_user.go b/pkg/models/org_user.go index 9231d18cfd6..b6ecd924e9a 100644 --- a/pkg/models/org_user.go +++ b/pkg/models/org_user.go @@ -72,8 +72,10 @@ type OrgUser struct { // COMMANDS type RemoveOrgUserCommand struct { - UserId int64 - OrgId int64 + UserId int64 + OrgId int64 + ShouldDeleteOrphanedUser bool + UserWasDeleted bool } type AddOrgUserCommand struct { diff --git a/pkg/models/stats.go b/pkg/models/stats.go index 4cd50d37463..d3e145dedf4 100644 --- a/pkg/models/stats.go +++ b/pkg/models/stats.go @@ -40,6 +40,15 @@ type GetDataSourceAccessStatsQuery struct { Result []*DataSourceAccessStats } +type NotifierUsageStats struct { + Type string + Count int64 +} + +type GetAlertNotifierUsageStatsQuery struct { + Result []*NotifierUsageStats +} + type AdminStats struct { Users int `json:"users"` Orgs int `json:"orgs"` diff --git a/pkg/models/team_member.go b/pkg/models/team_member.go index 9434dad8ecd..dd64787f465 100644 --- a/pkg/models/team_member.go +++ b/pkg/models/team_member.go @@ -12,10 +12,11 @@ var ( // TeamMember model type TeamMember struct { - Id int64 - OrgId int64 - TeamId int64 - UserId int64 + Id int64 + OrgId int64 + TeamId int64 + UserId int64 + External bool Created time.Time Updated time.Time @@ -25,9 +26,10 @@ type TeamMember struct { // COMMANDS type AddTeamMemberCommand struct { - UserId int64 `json:"userId" binding:"Required"` - OrgId int64 `json:"-"` - TeamId int64 `json:"-"` + UserId int64 `json:"userId" binding:"Required"` + OrgId int64 `json:"-"` + TeamId int64 `json:"-"` + External bool `json:"-"` } type RemoveTeamMemberCommand struct { @@ -40,20 +42,23 @@ type RemoveTeamMemberCommand struct { // QUERIES type GetTeamMembersQuery struct { - OrgId int64 - TeamId int64 - UserId int64 - Result []*TeamMemberDTO + OrgId int64 + TeamId int64 + UserId int64 + External bool + Result []*TeamMemberDTO } // ---------------------- // Projections and DTOs type TeamMemberDTO struct { - OrgId int64 `json:"orgId"` - TeamId int64 `json:"teamId"` - UserId int64 `json:"userId"` - Email string `json:"email"` - Login string `json:"login"` - AvatarUrl string `json:"avatarUrl"` + OrgId int64 `json:"orgId"` + TeamId int64 `json:"teamId"` + UserId int64 `json:"userId"` + External bool `json:"-"` + Email string `json:"email"` + Login string `json:"login"` + AvatarUrl string `json:"avatarUrl"` + Labels []string `json:"labels"` } diff --git a/pkg/plugins/app_plugin.go b/pkg/plugins/app_plugin.go index b070ba592f0..922b2444b7b 100644 --- a/pkg/plugins/app_plugin.go +++ b/pkg/plugins/app_plugin.go @@ -23,12 +23,13 @@ type AppPlugin struct { } type AppPluginRoute struct { - Path string `json:"path"` - Method string `json:"method"` - ReqRole models.RoleType `json:"reqRole"` - Url string `json:"url"` - Headers []AppPluginRouteHeader `json:"headers"` - TokenAuth *JwtTokenAuth `json:"tokenAuth"` + Path string `json:"path"` + Method string `json:"method"` + ReqRole models.RoleType `json:"reqRole"` + Url string `json:"url"` + Headers []AppPluginRouteHeader `json:"headers"` + TokenAuth *JwtTokenAuth `json:"tokenAuth"` + JwtTokenAuth *JwtTokenAuth `json:"jwtTokenAuth"` } type AppPluginRouteHeader struct { @@ -36,8 +37,11 @@ type AppPluginRouteHeader struct { Content string `json:"content"` } +// JwtTokenAuth struct is both for normal Token Auth and JWT Token Auth with +// an uploaded JWT file. type JwtTokenAuth struct { Url string `json:"url"` + Scopes []string `json:"scopes"` Params map[string]string `json:"params"` } diff --git a/pkg/plugins/dashboard_importer_test.go b/pkg/plugins/dashboard_importer_test.go index 6f31b49f99d..ca8dfcd515c 100644 --- a/pkg/plugins/dashboard_importer_test.go +++ b/pkg/plugins/dashboard_importer_test.go @@ -35,7 +35,7 @@ func TestDashboardImport(t *testing.T) { So(cmd.Result, ShouldNotBeNil) resultStr, _ := mock.SavedDashboards[0].Dashboard.Data.EncodePretty() - expectedBytes, _ := ioutil.ReadFile("../../tests/test-app/dashboards/connections_result.json") + expectedBytes, _ := ioutil.ReadFile("testdata/test-app/dashboards/connections_result.json") expectedJson, _ := simplejson.NewJson(expectedBytes) expectedStr, _ := expectedJson.EncodePretty() @@ -89,7 +89,7 @@ func pluginScenario(desc string, t *testing.T, fn func()) { Convey("Given a plugin", t, func() { setting.Raw = ini.Empty() sec, _ := setting.Raw.NewSection("plugin.test-app") - sec.NewKey("path", "../../tests/test-app") + sec.NewKey("path", "testdata/test-app") pm := &PluginManager{} err := pm.Init() diff --git a/pkg/plugins/dashboards_test.go b/pkg/plugins/dashboards_test.go index c422a1431c0..6fc6ace0e00 100644 --- a/pkg/plugins/dashboards_test.go +++ b/pkg/plugins/dashboards_test.go @@ -16,7 +16,7 @@ func TestPluginDashboards(t *testing.T) { Convey("When asking plugin dashboard info", t, func() { setting.Raw = ini.Empty() sec, _ := setting.Raw.NewSection("plugin.test-app") - sec.NewKey("path", "../../tests/test-app") + sec.NewKey("path", "testdata/test-app") pm := &PluginManager{} err := pm.Init() diff --git a/pkg/plugins/dashboards_updater.go b/pkg/plugins/dashboards_updater.go index ebe11ed32d4..616d4541bec 100644 --- a/pkg/plugins/dashboards_updater.go +++ b/pkg/plugins/dashboards_updater.go @@ -48,11 +48,7 @@ func autoUpdateAppDashboard(pluginDashInfo *PluginDashboardInfoDTO, orgId int64) Path: pluginDashInfo.Path, } - if err := bus.Dispatch(&updateCmd); err != nil { - return err - } - - return nil + return bus.Dispatch(&updateCmd) } func syncPluginDashboards(pluginDef *PluginBase, orgId int64) { diff --git a/pkg/plugins/plugins_test.go b/pkg/plugins/plugins_test.go index fa68ae4389d..d16e6abb4c7 100644 --- a/pkg/plugins/plugins_test.go +++ b/pkg/plugins/plugins_test.go @@ -30,7 +30,7 @@ func TestPluginScans(t *testing.T) { Convey("When reading app plugin definition", t, func() { setting.Raw = ini.Empty() sec, _ := setting.Raw.NewSection("plugin.nginx-app") - sec.NewKey("path", "../../tests/test-app") + sec.NewKey("path", "testdata/test-app") pm := &PluginManager{} err := pm.Init() diff --git a/tests/datasource-test/module.js b/pkg/plugins/testdata/datasource-test/module.js similarity index 100% rename from tests/datasource-test/module.js rename to pkg/plugins/testdata/datasource-test/module.js diff --git a/tests/datasource-test/plugin.json b/pkg/plugins/testdata/datasource-test/plugin.json similarity index 100% rename from tests/datasource-test/plugin.json rename to pkg/plugins/testdata/datasource-test/plugin.json diff --git a/tests/test-app/dashboards/connections.json b/pkg/plugins/testdata/test-app/dashboards/connections.json similarity index 100% rename from tests/test-app/dashboards/connections.json rename to pkg/plugins/testdata/test-app/dashboards/connections.json diff --git a/tests/test-app/dashboards/connections_result.json b/pkg/plugins/testdata/test-app/dashboards/connections_result.json similarity index 100% rename from tests/test-app/dashboards/connections_result.json rename to pkg/plugins/testdata/test-app/dashboards/connections_result.json diff --git a/tests/test-app/dashboards/memory.json b/pkg/plugins/testdata/test-app/dashboards/memory.json similarity index 100% rename from tests/test-app/dashboards/memory.json rename to pkg/plugins/testdata/test-app/dashboards/memory.json diff --git a/tests/test-app/plugin.json b/pkg/plugins/testdata/test-app/plugin.json similarity index 100% rename from tests/test-app/plugin.json rename to pkg/plugins/testdata/test-app/plugin.json diff --git a/pkg/services/alerting/conditions/evaluator.go b/pkg/services/alerting/conditions/evaluator.go index 8d7ca57f010..eef593d39e2 100644 --- a/pkg/services/alerting/conditions/evaluator.go +++ b/pkg/services/alerting/conditions/evaluator.go @@ -2,6 +2,7 @@ package conditions import ( "encoding/json" + "fmt" "github.com/grafana/grafana/pkg/components/null" "github.com/grafana/grafana/pkg/components/simplejson" @@ -31,12 +32,12 @@ type ThresholdEvaluator struct { func newThresholdEvaluator(typ string, model *simplejson.Json) (*ThresholdEvaluator, error) { params := model.Get("params").MustArray() if len(params) == 0 { - return nil, alerting.ValidationError{Reason: "Evaluator missing threshold parameter"} + return nil, fmt.Errorf("Evaluator missing threshold parameter") } firstParam, ok := params[0].(json.Number) if !ok { - return nil, alerting.ValidationError{Reason: "Evaluator has invalid parameter"} + return nil, fmt.Errorf("Evaluator has invalid parameter") } defaultEval := &ThresholdEvaluator{Type: typ} @@ -107,7 +108,7 @@ func (e *RangedEvaluator) Eval(reducedValue null.Float) bool { func NewAlertEvaluator(model *simplejson.Json) (AlertEvaluator, error) { typ := model.Get("type").MustString() if typ == "" { - return nil, alerting.ValidationError{Reason: "Evaluator missing type property"} + return nil, fmt.Errorf("Evaluator missing type property") } if inSlice(typ, defaultTypes) { @@ -122,7 +123,7 @@ func NewAlertEvaluator(model *simplejson.Json) (AlertEvaluator, error) { return &NoValueEvaluator{}, nil } - return nil, alerting.ValidationError{Reason: "Evaluator invalid evaluator type: " + typ} + return nil, fmt.Errorf("Evaluator invalid evaluator type: %s", typ) } func inSlice(a string, list []string) bool { diff --git a/pkg/services/alerting/extractor.go b/pkg/services/alerting/extractor.go index e1c1bfacb2e..edfab2dedee 100644 --- a/pkg/services/alerting/extractor.go +++ b/pkg/services/alerting/extractor.go @@ -82,12 +82,12 @@ func (e *DashAlertExtractor) getAlertFromPanels(jsonWithPanels *simplejson.Json, if collapsed && collapsedJSON.MustBool() { // extract alerts from sub panels for collapsed panels - als, err := e.getAlertFromPanels(panel, validateAlertFunc) + alertSlice, err := e.getAlertFromPanels(panel, validateAlertFunc) if err != nil { return nil, err } - alerts = append(alerts, als...) + alerts = append(alerts, alertSlice...) continue } @@ -99,7 +99,7 @@ func (e *DashAlertExtractor) getAlertFromPanels(jsonWithPanels *simplejson.Json, panelID, err := panel.Get("id").Int64() if err != nil { - return nil, fmt.Errorf("panel id is required. err %v", err) + return nil, ValidationError{Reason: "A numeric panel id property is missing"} } // backward compatibility check, can be removed later @@ -145,7 +145,8 @@ func (e *DashAlertExtractor) getAlertFromPanels(jsonWithPanels *simplejson.Json, datasource, err := e.lookupDatasourceID(dsName) if err != nil { - return nil, err + e.log.Debug("Error looking up datasource", "error", err) + return nil, ValidationError{Reason: fmt.Sprintf("Data source used by alert rule not found, alertName=%v, datasource=%s", alert.Name, dsName)} } jsonQuery.SetPath([]string{"datasourceId"}, datasource.Id) @@ -166,8 +167,7 @@ func (e *DashAlertExtractor) getAlertFromPanels(jsonWithPanels *simplejson.Json, } if !validateAlertFunc(alert) { - e.log.Debug("Invalid Alert Data. Dashboard, Org or Panel ID is not correct", "alertName", alert.Name, "panelId", alert.PanelId) - return nil, m.ErrDashboardContainsInvalidAlertData + return nil, ValidationError{Reason: fmt.Sprintf("Panel id is not correct, alertName=%v, panelId=%v", alert.Name, alert.PanelId)} } alerts = append(alerts, alert) diff --git a/pkg/services/alerting/extractor_test.go b/pkg/services/alerting/extractor_test.go index c7212e48174..e2dc01a1181 100644 --- a/pkg/services/alerting/extractor_test.go +++ b/pkg/services/alerting/extractor_test.go @@ -258,7 +258,7 @@ func TestAlertRuleExtraction(t *testing.T) { Convey("Should fail on save", func() { _, err := extractor.GetAlerts() - So(err, ShouldEqual, m.ErrDashboardContainsInvalidAlertData) + So(err.Error(), ShouldEqual, "Alert validation error: Panel id is not correct, alertName=Influxdb, panelId=1") }) }) }) diff --git a/pkg/services/alerting/interfaces.go b/pkg/services/alerting/interfaces.go index 18f969ba1b9..040d0991861 100644 --- a/pkg/services/alerting/interfaces.go +++ b/pkg/services/alerting/interfaces.go @@ -1,6 +1,11 @@ package alerting -import "time" +import ( + "context" + "time" + + "github.com/grafana/grafana/pkg/models" +) type EvalHandler interface { Eval(evalContext *EvalContext) @@ -15,17 +20,27 @@ type Notifier interface { Notify(evalContext *EvalContext) error GetType() string NeedsImage() bool - ShouldNotify(evalContext *EvalContext) bool + + // ShouldNotify checks this evaluation should send an alert notification + ShouldNotify(ctx context.Context, evalContext *EvalContext, notificationState *models.AlertNotificationState) bool GetNotifierId() int64 GetIsDefault() bool + GetSendReminder() bool + GetDisableResolveMessage() bool + GetFrequency() time.Duration +} + +type notifierState struct { + notifier Notifier + state *models.AlertNotificationState } -type NotifierSlice []Notifier +type notifierStateSlice []*notifierState -func (notifiers NotifierSlice) ShouldUploadImage() bool { - for _, notifier := range notifiers { - if notifier.NeedsImage() { +func (notifiers notifierStateSlice) ShouldUploadImage() bool { + for _, ns := range notifiers { + if ns.notifier.NeedsImage() { return true } } diff --git a/pkg/services/alerting/notifier.go b/pkg/services/alerting/notifier.go index f4e0a0f434f..9ce50eadd6b 100644 --- a/pkg/services/alerting/notifier.go +++ b/pkg/services/alerting/notifier.go @@ -4,13 +4,12 @@ import ( "errors" "fmt" - "golang.org/x/sync/errgroup" - "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/components/imguploader" "github.com/grafana/grafana/pkg/log" "github.com/grafana/grafana/pkg/metrics" "github.com/grafana/grafana/pkg/services/rendering" + "github.com/grafana/grafana/pkg/setting" m "github.com/grafana/grafana/pkg/models" ) @@ -40,35 +39,82 @@ type notificationService struct { } func (n *notificationService) SendIfNeeded(context *EvalContext) error { - notifiers, err := n.getNeededNotifiers(context.Rule.OrgId, context.Rule.Notifications, context) + notifierStates, err := n.getNeededNotifiers(context.Rule.OrgId, context.Rule.Notifications, context) if err != nil { return err } - if len(notifiers) == 0 { + if len(notifierStates) == 0 { return nil } - if notifiers.ShouldUploadImage() { + if notifierStates.ShouldUploadImage() { if err = n.uploadImage(context); err != nil { n.log.Error("Failed to upload alert panel image.", "error", err) } } - return n.sendNotifications(context, notifiers) + return n.sendNotifications(context, notifierStates) } -func (n *notificationService) sendNotifications(context *EvalContext, notifiers []Notifier) error { - g, _ := errgroup.WithContext(context.Ctx) +func (n *notificationService) sendAndMarkAsComplete(evalContext *EvalContext, notifierState *notifierState) error { + notifier := notifierState.notifier + + n.log.Debug("Sending notification", "type", notifier.GetType(), "id", notifier.GetNotifierId(), "isDefault", notifier.GetIsDefault()) + metrics.M_Alerting_Notification_Sent.WithLabelValues(notifier.GetType()).Inc() + + err := notifier.Notify(evalContext) + + if err != nil { + n.log.Error("failed to send notification", "id", notifier.GetNotifierId(), "error", err) + } + + if evalContext.IsTestRun { + return nil + } - for _, notifier := range notifiers { - not := notifier //avoid updating scope variable in go routine - n.log.Debug("Sending notification", "type", not.GetType(), "id", not.GetNotifierId(), "isDefault", not.GetIsDefault()) - metrics.M_Alerting_Notification_Sent.WithLabelValues(not.GetType()).Inc() - g.Go(func() error { return not.Notify(context) }) + cmd := &m.SetAlertNotificationStateToCompleteCommand{ + Id: notifierState.state.Id, + Version: notifierState.state.Version, } - return g.Wait() + return bus.DispatchCtx(evalContext.Ctx, cmd) +} + +func (n *notificationService) sendNotification(evalContext *EvalContext, notifierState *notifierState) error { + if !evalContext.IsTestRun { + setPendingCmd := &m.SetAlertNotificationStateToPendingCommand{ + Id: notifierState.state.Id, + Version: notifierState.state.Version, + AlertRuleStateUpdatedVersion: evalContext.Rule.StateChanges, + } + + err := bus.DispatchCtx(evalContext.Ctx, setPendingCmd) + if err == m.ErrAlertNotificationStateVersionConflict { + return nil + } + + if err != nil { + return err + } + + // We need to update state version to be able to log + // unexpected version conflicts when marking notifications as ok + notifierState.state.Version = setPendingCmd.ResultVersion + } + + return n.sendAndMarkAsComplete(evalContext, notifierState) +} + +func (n *notificationService) sendNotifications(evalContext *EvalContext, notifierStates notifierStateSlice) error { + for _, notifierState := range notifierStates { + err := n.sendNotification(evalContext, notifierState) + if err != nil { + n.log.Error("failed to send notification", "id", notifierState.notifier.GetNotifierId(), "error", err) + } + } + + return nil } func (n *notificationService) uploadImage(context *EvalContext) (err error) { @@ -78,11 +124,12 @@ func (n *notificationService) uploadImage(context *EvalContext) (err error) { } renderOpts := rendering.Opts{ - Width: 1000, - Height: 500, - Timeout: alertTimeout / 2, - OrgId: context.Rule.OrgId, - OrgRole: m.ROLE_ADMIN, + Width: 1000, + Height: 500, + Timeout: alertTimeout / 2, + OrgId: context.Rule.OrgId, + OrgRole: m.ROLE_ADMIN, + ConcurrentLimit: setting.AlertingRenderLimit, } ref, err := context.GetDashboardUID() @@ -110,21 +157,38 @@ func (n *notificationService) uploadImage(context *EvalContext) (err error) { return nil } -func (n *notificationService) getNeededNotifiers(orgId int64, notificationIds []int64, context *EvalContext) (NotifierSlice, error) { +func (n *notificationService) getNeededNotifiers(orgId int64, notificationIds []int64, evalContext *EvalContext) (notifierStateSlice, error) { query := &m.GetAlertNotificationsToSendQuery{OrgId: orgId, Ids: notificationIds} if err := bus.Dispatch(query); err != nil { return nil, err } - var result []Notifier + var result notifierStateSlice for _, notification := range query.Result { not, err := n.createNotifierFor(notification) if err != nil { - return nil, err + n.log.Error("Could not create notifier", "notifier", notification.Id, "error", err) + continue + } + + query := &m.GetOrCreateNotificationStateQuery{ + NotifierId: notification.Id, + AlertId: evalContext.Rule.Id, + OrgId: evalContext.Rule.OrgId, + } + + err = bus.DispatchCtx(evalContext.Ctx, query) + if err != nil { + n.log.Error("Could not get notification state.", "notifier", notification.Id, "error", err) + continue } - if not.ShouldNotify(context) { - result = append(result, not) + + if not.ShouldNotify(evalContext.Ctx, evalContext, query.Result) { + result = append(result, ¬ifierState{ + notifier: not, + state: query.Result, + }) } } diff --git a/pkg/services/alerting/notifiers/alertmanager.go b/pkg/services/alerting/notifiers/alertmanager.go index d449167de13..2caa4d5ab58 100644 --- a/pkg/services/alerting/notifiers/alertmanager.go +++ b/pkg/services/alerting/notifiers/alertmanager.go @@ -1,6 +1,7 @@ package notifiers import ( + "context" "time" "github.com/grafana/grafana/pkg/bus" @@ -33,7 +34,7 @@ func NewAlertmanagerNotifier(model *m.AlertNotification) (alerting.Notifier, err } return &AlertmanagerNotifier{ - NotifierBase: NewNotifierBase(model.Id, model.IsDefault, model.Name, model.Type, model.Settings), + NotifierBase: NewNotifierBase(model), Url: url, log: log.New("alerting.notifier.prometheus-alertmanager"), }, nil @@ -45,7 +46,7 @@ type AlertmanagerNotifier struct { log log.Logger } -func (this *AlertmanagerNotifier) ShouldNotify(evalContext *alerting.EvalContext) bool { +func (this *AlertmanagerNotifier) ShouldNotify(ctx context.Context, evalContext *alerting.EvalContext, notificationState *m.AlertNotificationState) bool { this.log.Debug("Should notify", "ruleId", evalContext.Rule.Id, "state", evalContext.Rule.State, "previousState", evalContext.PrevAlertState) // Do not notify when we become OK for the first time. diff --git a/pkg/services/alerting/notifiers/base.go b/pkg/services/alerting/notifiers/base.go index 868db3aec79..d141d6cd257 100644 --- a/pkg/services/alerting/notifiers/base.go +++ b/pkg/services/alerting/notifiers/base.go @@ -1,50 +1,95 @@ package notifiers import ( - "github.com/grafana/grafana/pkg/components/simplejson" - m "github.com/grafana/grafana/pkg/models" + "context" + "time" + "github.com/grafana/grafana/pkg/log" + "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/services/alerting" ) +const ( + triggMetrString = "Triggered metrics:\n\n" +) + type NotifierBase struct { - Name string - Type string - Id int64 - IsDeault bool - UploadImage bool + Name string + Type string + Id int64 + IsDeault bool + UploadImage bool + SendReminder bool + DisableResolveMessage bool + Frequency time.Duration + + log log.Logger } -func NewNotifierBase(id int64, isDefault bool, name, notifierType string, model *simplejson.Json) NotifierBase { +func NewNotifierBase(model *models.AlertNotification) NotifierBase { uploadImage := true - value, exist := model.CheckGet("uploadImage") + value, exist := model.Settings.CheckGet("uploadImage") if exist { uploadImage = value.MustBool() } return NotifierBase{ - Id: id, - Name: name, - IsDeault: isDefault, - Type: notifierType, - UploadImage: uploadImage, + Id: model.Id, + Name: model.Name, + IsDeault: model.IsDefault, + Type: model.Type, + UploadImage: uploadImage, + SendReminder: model.SendReminder, + DisableResolveMessage: model.DisableResolveMessage, + Frequency: model.Frequency, + log: log.New("alerting.notifier." + model.Name), } } -func defaultShouldNotify(context *alerting.EvalContext) bool { +// ShouldNotify checks this evaluation should send an alert notification +func (n *NotifierBase) ShouldNotify(ctx context.Context, context *alerting.EvalContext, notiferState *models.AlertNotificationState) bool { // Only notify on state change. - if context.PrevAlertState == context.Rule.State { + if context.PrevAlertState == context.Rule.State && !n.SendReminder { return false } + + if context.PrevAlertState == context.Rule.State && n.SendReminder { + // Do not notify if interval has not elapsed + lastNotify := time.Unix(notiferState.UpdatedAt, 0) + if notiferState.UpdatedAt != 0 && lastNotify.Add(n.Frequency).After(time.Now()) { + return false + } + + // Do not notify if alert state is OK or pending even on repeated notify + if context.Rule.State == models.AlertStateOK || context.Rule.State == models.AlertStatePending { + return false + } + } + // Do not notify when we become OK for the first time. - if (context.PrevAlertState == m.AlertStatePending) && (context.Rule.State == m.AlertStateOK) { + if context.PrevAlertState == models.AlertStatePending && context.Rule.State == models.AlertStateOK { + return false + } + + // Do not notify when we OK -> Pending + if context.PrevAlertState == models.AlertStateOK && context.Rule.State == models.AlertStatePending { return false } - return true -} -func (n *NotifierBase) ShouldNotify(context *alerting.EvalContext) bool { - return defaultShouldNotify(context) + // Do not notifu if state pending and it have been updated last minute + if notiferState.State == models.AlertNotificationStatePending { + lastUpdated := time.Unix(notiferState.UpdatedAt, 0) + if lastUpdated.Add(1 * time.Minute).After(time.Now()) { + return false + } + } + + // Do not notify when state is OK if DisableResolveMessage is set to true + if context.Rule.State == models.AlertStateOK && n.DisableResolveMessage { + return false + } + + return true } func (n *NotifierBase) GetType() string { @@ -62,3 +107,15 @@ func (n *NotifierBase) GetNotifierId() int64 { func (n *NotifierBase) GetIsDefault() bool { return n.IsDeault } + +func (n *NotifierBase) GetSendReminder() bool { + return n.SendReminder +} + +func (n *NotifierBase) GetDisableResolveMessage() bool { + return n.DisableResolveMessage +} + +func (n *NotifierBase) GetFrequency() time.Duration { + return n.Frequency +} diff --git a/pkg/services/alerting/notifiers/base_test.go b/pkg/services/alerting/notifiers/base_test.go index b7142d144cc..5062828cb4f 100644 --- a/pkg/services/alerting/notifiers/base_test.go +++ b/pkg/services/alerting/notifiers/base_test.go @@ -3,6 +3,7 @@ package notifiers import ( "context" "testing" + "time" "github.com/grafana/grafana/pkg/components/simplejson" m "github.com/grafana/grafana/pkg/models" @@ -10,47 +11,178 @@ import ( . "github.com/smartystreets/goconvey/convey" ) +func TestShouldSendAlertNotification(t *testing.T) { + tnow := time.Now() + + tcs := []struct { + name string + prevState m.AlertStateType + newState m.AlertStateType + sendReminder bool + frequency time.Duration + state *m.AlertNotificationState + + expect bool + }{ + { + name: "pending -> ok should not trigger an notification", + newState: m.AlertStateOK, + prevState: m.AlertStatePending, + sendReminder: false, + state: &m.AlertNotificationState{}, + + expect: false, + }, + { + name: "ok -> alerting should trigger an notification", + newState: m.AlertStateAlerting, + prevState: m.AlertStateOK, + sendReminder: false, + state: &m.AlertNotificationState{}, + + expect: true, + }, + { + name: "ok -> pending should not trigger an notification", + newState: m.AlertStatePending, + prevState: m.AlertStateOK, + sendReminder: false, + state: &m.AlertNotificationState{}, + + expect: false, + }, + { + name: "ok -> ok should not trigger an notification", + newState: m.AlertStateOK, + prevState: m.AlertStateOK, + sendReminder: false, + state: &m.AlertNotificationState{}, + + expect: false, + }, + { + name: "ok -> ok with reminder should not trigger an notification", + newState: m.AlertStateOK, + prevState: m.AlertStateOK, + sendReminder: true, + state: &m.AlertNotificationState{}, + + expect: false, + }, + { + name: "alerting -> ok should trigger an notification", + newState: m.AlertStateOK, + prevState: m.AlertStateAlerting, + sendReminder: false, + state: &m.AlertNotificationState{}, + + expect: true, + }, + { + name: "alerting -> ok should trigger an notification when reminders enabled", + newState: m.AlertStateOK, + prevState: m.AlertStateAlerting, + frequency: time.Minute * 10, + sendReminder: true, + state: &m.AlertNotificationState{UpdatedAt: tnow.Add(-time.Minute).Unix()}, + + expect: true, + }, + { + name: "alerting -> alerting with reminder and no state should trigger", + newState: m.AlertStateAlerting, + prevState: m.AlertStateAlerting, + frequency: time.Minute * 10, + sendReminder: true, + state: &m.AlertNotificationState{}, + + expect: true, + }, + { + name: "alerting -> alerting with reminder and last notification sent 1 minute ago should not trigger", + newState: m.AlertStateAlerting, + prevState: m.AlertStateAlerting, + frequency: time.Minute * 10, + sendReminder: true, + state: &m.AlertNotificationState{UpdatedAt: tnow.Add(-time.Minute).Unix()}, + + expect: false, + }, + { + name: "alerting -> alerting with reminder and last notifciation sent 11 minutes ago should trigger", + newState: m.AlertStateAlerting, + prevState: m.AlertStateAlerting, + frequency: time.Minute * 10, + sendReminder: true, + state: &m.AlertNotificationState{UpdatedAt: tnow.Add(-11 * time.Minute).Unix()}, + + expect: true, + }, + { + name: "OK -> alerting with notifciation state pending and updated 30 seconds ago should not trigger", + newState: m.AlertStateAlerting, + prevState: m.AlertStateOK, + state: &m.AlertNotificationState{State: m.AlertNotificationStatePending, UpdatedAt: tnow.Add(-30 * time.Second).Unix()}, + + expect: false, + }, + { + name: "OK -> alerting with notifciation state pending and updated 2 minutes ago should trigger", + newState: m.AlertStateAlerting, + prevState: m.AlertStateOK, + state: &m.AlertNotificationState{State: m.AlertNotificationStatePending, UpdatedAt: tnow.Add(-2 * time.Minute).Unix()}, + + expect: true, + }, + } + + for _, tc := range tcs { + evalContext := alerting.NewEvalContext(context.TODO(), &alerting.Rule{ + State: tc.prevState, + }) + + evalContext.Rule.State = tc.newState + nb := &NotifierBase{SendReminder: tc.sendReminder, Frequency: tc.frequency} + + if nb.ShouldNotify(evalContext.Ctx, evalContext, tc.state) != tc.expect { + t.Errorf("failed test %s.\n expected \n%+v \nto return: %v", tc.name, tc, tc.expect) + } + } +} + func TestBaseNotifier(t *testing.T) { - Convey("Base notifier tests", t, func() { - Convey("default constructor for notifiers", func() { - bJson := simplejson.New() + Convey("default constructor for notifiers", t, func() { + bJson := simplejson.New() - Convey("can parse false value", func() { - bJson.Set("uploadImage", false) + model := &m.AlertNotification{ + Id: 1, + Name: "name", + Type: "email", + Settings: bJson, + } - base := NewNotifierBase(1, false, "name", "email", bJson) - So(base.UploadImage, ShouldBeFalse) - }) + Convey("can parse false value", func() { + bJson.Set("uploadImage", false) - Convey("can parse true value", func() { - bJson.Set("uploadImage", true) + base := NewNotifierBase(model) + So(base.UploadImage, ShouldBeFalse) + }) - base := NewNotifierBase(1, false, "name", "email", bJson) - So(base.UploadImage, ShouldBeTrue) - }) + Convey("can parse true value", func() { + bJson.Set("uploadImage", true) + + base := NewNotifierBase(model) + So(base.UploadImage, ShouldBeTrue) + }) - Convey("default value should be true for backwards compatibility", func() { - base := NewNotifierBase(1, false, "name", "email", bJson) - So(base.UploadImage, ShouldBeTrue) - }) + Convey("default value should be true for backwards compatibility", func() { + base := NewNotifierBase(model) + So(base.UploadImage, ShouldBeTrue) }) - Convey("should notify", func() { - Convey("pending -> ok", func() { - context := alerting.NewEvalContext(context.TODO(), &alerting.Rule{ - State: m.AlertStatePending, - }) - context.Rule.State = m.AlertStateOK - So(defaultShouldNotify(context), ShouldBeFalse) - }) - - Convey("ok -> alerting", func() { - context := alerting.NewEvalContext(context.TODO(), &alerting.Rule{ - State: m.AlertStateOK, - }) - context.Rule.State = m.AlertStateAlerting - So(defaultShouldNotify(context), ShouldBeTrue) - }) + Convey("default value should be false for backwards compatibility", func() { + base := NewNotifierBase(model) + So(base.DisableResolveMessage, ShouldBeFalse) }) }) } diff --git a/pkg/services/alerting/notifiers/dingding.go b/pkg/services/alerting/notifiers/dingding.go index 14eacef5831..738e43af2d2 100644 --- a/pkg/services/alerting/notifiers/dingding.go +++ b/pkg/services/alerting/notifiers/dingding.go @@ -32,7 +32,7 @@ func NewDingDingNotifier(model *m.AlertNotification) (alerting.Notifier, error) } return &DingDingNotifier{ - NotifierBase: NewNotifierBase(model.Id, model.IsDefault, model.Name, model.Type, model.Settings), + NotifierBase: NewNotifierBase(model), Url: url, log: log.New("alerting.notifier.dingding"), }, nil diff --git a/pkg/services/alerting/notifiers/discord.go b/pkg/services/alerting/notifiers/discord.go index 3ffa7484870..57d9d438fa2 100644 --- a/pkg/services/alerting/notifiers/discord.go +++ b/pkg/services/alerting/notifiers/discord.go @@ -39,7 +39,7 @@ func NewDiscordNotifier(model *m.AlertNotification) (alerting.Notifier, error) { } return &DiscordNotifier{ - NotifierBase: NewNotifierBase(model.Id, model.IsDefault, model.Name, model.Type, model.Settings), + NotifierBase: NewNotifierBase(model), WebhookURL: url, log: log.New("alerting.notifier.discord"), }, nil diff --git a/pkg/services/alerting/notifiers/email.go b/pkg/services/alerting/notifiers/email.go index 562ffbe1269..17b88f7d97f 100644 --- a/pkg/services/alerting/notifiers/email.go +++ b/pkg/services/alerting/notifiers/email.go @@ -52,7 +52,7 @@ func NewEmailNotifier(model *m.AlertNotification) (alerting.Notifier, error) { }) return &EmailNotifier{ - NotifierBase: NewNotifierBase(model.Id, model.IsDefault, model.Name, model.Type, model.Settings), + NotifierBase: NewNotifierBase(model), Addresses: addresses, log: log.New("alerting.notifier.email"), }, nil diff --git a/pkg/services/alerting/notifiers/hipchat.go b/pkg/services/alerting/notifiers/hipchat.go index 58e1b7bd71e..388cec79597 100644 --- a/pkg/services/alerting/notifiers/hipchat.go +++ b/pkg/services/alerting/notifiers/hipchat.go @@ -59,7 +59,7 @@ func NewHipChatNotifier(model *models.AlertNotification) (alerting.Notifier, err roomId := model.Settings.Get("roomid").MustString() return &HipChatNotifier{ - NotifierBase: NewNotifierBase(model.Id, model.IsDefault, model.Name, model.Type, model.Settings), + NotifierBase: NewNotifierBase(model), Url: url, ApiKey: apikey, RoomId: roomId, @@ -125,7 +125,7 @@ func (this *HipChatNotifier) Notify(evalContext *alerting.EvalContext) error { case models.AlertStateOK: color = "green" case models.AlertStateNoData: - color = "grey" + color = "gray" case models.AlertStateAlerting: color = "red" } diff --git a/pkg/services/alerting/notifiers/kafka.go b/pkg/services/alerting/notifiers/kafka.go index 92f6489106b..a8a424c87a7 100644 --- a/pkg/services/alerting/notifiers/kafka.go +++ b/pkg/services/alerting/notifiers/kafka.go @@ -43,7 +43,7 @@ func NewKafkaNotifier(model *m.AlertNotification) (alerting.Notifier, error) { } return &KafkaNotifier{ - NotifierBase: NewNotifierBase(model.Id, model.IsDefault, model.Name, model.Type, model.Settings), + NotifierBase: NewNotifierBase(model), Endpoint: endpoint, Topic: topic, log: log.New("alerting.notifier.kafka"), @@ -61,7 +61,7 @@ func (this *KafkaNotifier) Notify(evalContext *alerting.EvalContext) error { state := evalContext.Rule.State - customData := "Triggered metrics:\n\n" + customData := triggMetrString for _, evt := range evalContext.EvalMatches { customData = customData + fmt.Sprintf("%s: %v\n", evt.Metric, evt.Value) } diff --git a/pkg/services/alerting/notifiers/line.go b/pkg/services/alerting/notifiers/line.go index 4814662f3a9..9e3888b8f95 100644 --- a/pkg/services/alerting/notifiers/line.go +++ b/pkg/services/alerting/notifiers/line.go @@ -39,7 +39,7 @@ func NewLINENotifier(model *m.AlertNotification) (alerting.Notifier, error) { } return &LineNotifier{ - NotifierBase: NewNotifierBase(model.Id, model.IsDefault, model.Name, model.Type, model.Settings), + NotifierBase: NewNotifierBase(model), Token: token, log: log.New("alerting.notifier.line"), }, nil diff --git a/pkg/services/alerting/notifiers/opsgenie.go b/pkg/services/alerting/notifiers/opsgenie.go index f0f5142cf05..629968b5102 100644 --- a/pkg/services/alerting/notifiers/opsgenie.go +++ b/pkg/services/alerting/notifiers/opsgenie.go @@ -56,7 +56,7 @@ func NewOpsGenieNotifier(model *m.AlertNotification) (alerting.Notifier, error) } return &OpsGenieNotifier{ - NotifierBase: NewNotifierBase(model.Id, model.IsDefault, model.Name, model.Type, model.Settings), + NotifierBase: NewNotifierBase(model), ApiKey: apiKey, ApiUrl: apiUrl, AutoClose: autoClose, @@ -95,7 +95,7 @@ func (this *OpsGenieNotifier) createAlert(evalContext *alerting.EvalContext) err return err } - customData := "Triggered metrics:\n\n" + customData := triggMetrString for _, evt := range evalContext.EvalMatches { customData = customData + fmt.Sprintf("%s: %v\n", evt.Metric, evt.Value) } diff --git a/pkg/services/alerting/notifiers/pagerduty.go b/pkg/services/alerting/notifiers/pagerduty.go index 02219b2203d..9f6ce3c2dc8 100644 --- a/pkg/services/alerting/notifiers/pagerduty.go +++ b/pkg/services/alerting/notifiers/pagerduty.go @@ -51,7 +51,7 @@ func NewPagerdutyNotifier(model *m.AlertNotification) (alerting.Notifier, error) } return &PagerdutyNotifier{ - NotifierBase: NewNotifierBase(model.Id, model.IsDefault, model.Name, model.Type, model.Settings), + NotifierBase: NewNotifierBase(model), Key: key, AutoResolve: autoResolve, log: log.New("alerting.notifier.pagerduty"), @@ -76,7 +76,7 @@ func (this *PagerdutyNotifier) Notify(evalContext *alerting.EvalContext) error { if evalContext.Rule.State == m.AlertStateOK { eventType = "resolve" } - customData := "Triggered metrics:\n\n" + customData := triggMetrString for _, evt := range evalContext.EvalMatches { customData = customData + fmt.Sprintf("%s: %v\n", evt.Metric, evt.Value) } diff --git a/pkg/services/alerting/notifiers/pushover.go b/pkg/services/alerting/notifiers/pushover.go index cbe9e16801a..55dc02c5f4a 100644 --- a/pkg/services/alerting/notifiers/pushover.go +++ b/pkg/services/alerting/notifiers/pushover.go @@ -99,7 +99,7 @@ func NewPushoverNotifier(model *m.AlertNotification) (alerting.Notifier, error) return nil, alerting.ValidationError{Reason: "API token not given"} } return &PushoverNotifier{ - NotifierBase: NewNotifierBase(model.Id, model.IsDefault, model.Name, model.Type, model.Settings), + NotifierBase: NewNotifierBase(model), UserKey: userKey, ApiToken: apiToken, Priority: priority, diff --git a/pkg/services/alerting/notifiers/sensu.go b/pkg/services/alerting/notifiers/sensu.go index 9f77801d458..21d5d3d9d9e 100644 --- a/pkg/services/alerting/notifiers/sensu.go +++ b/pkg/services/alerting/notifiers/sensu.go @@ -51,7 +51,7 @@ func NewSensuNotifier(model *m.AlertNotification) (alerting.Notifier, error) { } return &SensuNotifier{ - NotifierBase: NewNotifierBase(model.Id, model.IsDefault, model.Name, model.Type, model.Settings), + NotifierBase: NewNotifierBase(model), Url: url, User: model.Settings.Get("username").MustString(), Source: model.Settings.Get("source").MustString(), diff --git a/pkg/services/alerting/notifiers/slack.go b/pkg/services/alerting/notifiers/slack.go index c1dadba414d..374b49ea957 100644 --- a/pkg/services/alerting/notifiers/slack.go +++ b/pkg/services/alerting/notifiers/slack.go @@ -78,7 +78,7 @@ func NewSlackNotifier(model *m.AlertNotification) (alerting.Notifier, error) { uploadImage := model.Settings.Get("uploadImage").MustBool(true) return &SlackNotifier{ - NotifierBase: NewNotifierBase(model.Id, model.IsDefault, model.Name, model.Type, model.Settings), + NotifierBase: NewNotifierBase(model), Url: url, Recipient: recipient, Mention: mention, diff --git a/pkg/services/alerting/notifiers/teams.go b/pkg/services/alerting/notifiers/teams.go index 4e34e16ab51..2dad11285b4 100644 --- a/pkg/services/alerting/notifiers/teams.go +++ b/pkg/services/alerting/notifiers/teams.go @@ -33,7 +33,7 @@ func NewTeamsNotifier(model *m.AlertNotification) (alerting.Notifier, error) { } return &TeamsNotifier{ - NotifierBase: NewNotifierBase(model.Id, model.IsDefault, model.Name, model.Type, model.Settings), + NotifierBase: NewNotifierBase(model), Url: url, log: log.New("alerting.notifier.teams"), }, nil @@ -74,7 +74,7 @@ func (this *TeamsNotifier) Notify(evalContext *alerting.EvalContext) error { } message := "" - if evalContext.Rule.State != m.AlertStateOK { //dont add message when going back to alert state ok. + if evalContext.Rule.State != m.AlertStateOK { //don't add message when going back to alert state ok. message = evalContext.Rule.Message } @@ -96,14 +96,26 @@ func (this *TeamsNotifier) Notify(evalContext *alerting.EvalContext) error { }, }, "text": message, - "potentialAction": []map[string]interface{}{ + }, + }, + "potentialAction": []map[string]interface{}{ + { + "@context": "http://schema.org", + "@type": "OpenUri", + "name": "View Rule", + "targets": []map[string]interface{}{ + { + "os": "default", "uri": ruleUrl, + }, + }, + }, + { + "@context": "http://schema.org", + "@type": "OpenUri", + "name": "View Graph", + "targets": []map[string]interface{}{ { - "@context": "http://schema.org", - "@type": "ViewAction", - "name": "View Rule", - "target": []string{ - ruleUrl, - }, + "os": "default", "uri": evalContext.ImagePublicUrl, }, }, }, diff --git a/pkg/services/alerting/notifiers/telegram.go b/pkg/services/alerting/notifiers/telegram.go index ca24c996914..6c47c92972c 100644 --- a/pkg/services/alerting/notifiers/telegram.go +++ b/pkg/services/alerting/notifiers/telegram.go @@ -78,7 +78,7 @@ func NewTelegramNotifier(model *m.AlertNotification) (alerting.Notifier, error) } return &TelegramNotifier{ - NotifierBase: NewNotifierBase(model.Id, model.IsDefault, model.Name, model.Type, model.Settings), + NotifierBase: NewNotifierBase(model), BotToken: botToken, ChatID: chatId, UploadImage: uploadImage, @@ -127,7 +127,13 @@ func (this *TelegramNotifier) buildMessageInlineImage(evalContext *alerting.Eval var err error imageFile, err = os.Open(evalContext.ImageOnDiskPath) - defer imageFile.Close() + defer func() { + err := imageFile.Close() + if err != nil { + log.Error2("Could not close Telegram inline image.", "err", err) + } + }() + if err != nil { return nil, err } @@ -216,7 +222,7 @@ func appendIfPossible(message string, extra string, sizeLimit int) string { if len(extra)+len(message) <= sizeLimit { return message + extra } - log.Debug("Line too long for image caption.", "value", extra) + log.Debug("Line too long for image caption. value: %s", extra) return message } diff --git a/pkg/services/alerting/notifiers/telegram_test.go b/pkg/services/alerting/notifiers/telegram_test.go index 98c8d884ad0..911323ae9d1 100644 --- a/pkg/services/alerting/notifiers/telegram_test.go +++ b/pkg/services/alerting/notifiers/telegram_test.go @@ -1,6 +1,7 @@ package notifiers import ( + "context" "testing" "github.com/grafana/grafana/pkg/components/simplejson" @@ -52,11 +53,12 @@ func TestTelegramNotifier(t *testing.T) { }) Convey("generateCaption should generate a message with all pertinent details", func() { - evalContext := alerting.NewEvalContext(nil, &alerting.Rule{ - Name: "This is an alarm", - Message: "Some kind of message.", - State: m.AlertStateOK, - }) + evalContext := alerting.NewEvalContext(context.Background(), + &alerting.Rule{ + Name: "This is an alarm", + Message: "Some kind of message.", + State: m.AlertStateOK, + }) caption := generateImageCaption(evalContext, "http://grafa.url/abcdef", "") So(len(caption), ShouldBeLessThanOrEqualTo, 200) @@ -68,11 +70,12 @@ func TestTelegramNotifier(t *testing.T) { Convey("When generating a message", func() { Convey("URL should be skipped if it's too long", func() { - evalContext := alerting.NewEvalContext(nil, &alerting.Rule{ - Name: "This is an alarm", - Message: "Some kind of message.", - State: m.AlertStateOK, - }) + evalContext := alerting.NewEvalContext(context.Background(), + &alerting.Rule{ + Name: "This is an alarm", + Message: "Some kind of message.", + State: m.AlertStateOK, + }) caption := generateImageCaption(evalContext, "http://grafa.url/abcdefaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", @@ -85,11 +88,12 @@ func TestTelegramNotifier(t *testing.T) { }) Convey("Message should be trimmed if it's too long", func() { - evalContext := alerting.NewEvalContext(nil, &alerting.Rule{ - Name: "This is an alarm", - Message: "Some kind of message that is too long for appending to our pretty little message, this line is actually exactly 197 chars long and I will get there in the end I promise I will. Yes siree that's it.", - State: m.AlertStateOK, - }) + evalContext := alerting.NewEvalContext(context.Background(), + &alerting.Rule{ + Name: "This is an alarm", + Message: "Some kind of message that is too long for appending to our pretty little message, this line is actually exactly 197 chars long and I will get there in the end I promise I will. Yes siree that's it.", + State: m.AlertStateOK, + }) caption := generateImageCaption(evalContext, "http://grafa.url/foo", @@ -101,11 +105,12 @@ func TestTelegramNotifier(t *testing.T) { }) Convey("Metrics should be skipped if they don't fit", func() { - evalContext := alerting.NewEvalContext(nil, &alerting.Rule{ - Name: "This is an alarm", - Message: "Some kind of message that is too long for appending to our pretty little message, this line is actually exactly 197 chars long and I will get there in the end I ", - State: m.AlertStateOK, - }) + evalContext := alerting.NewEvalContext(context.Background(), + &alerting.Rule{ + Name: "This is an alarm", + Message: "Some kind of message that is too long for appending to our pretty little message, this line is actually exactly 197 chars long and I will get there in the end I ", + State: m.AlertStateOK, + }) caption := generateImageCaption(evalContext, "http://grafa.url/foo", diff --git a/pkg/services/alerting/notifiers/threema.go b/pkg/services/alerting/notifiers/threema.go index e4ffffc9108..28a62fade17 100644 --- a/pkg/services/alerting/notifiers/threema.go +++ b/pkg/services/alerting/notifiers/threema.go @@ -106,7 +106,7 @@ func NewThreemaNotifier(model *m.AlertNotification) (alerting.Notifier, error) { } return &ThreemaNotifier{ - NotifierBase: NewNotifierBase(model.Id, model.IsDefault, model.Name, model.Type, model.Settings), + NotifierBase: NewNotifierBase(model), GatewayID: gatewayID, RecipientID: recipientID, APISecret: apiSecret, diff --git a/pkg/services/alerting/notifiers/victorops.go b/pkg/services/alerting/notifiers/victorops.go index a753ca3cbf6..3093aec9957 100644 --- a/pkg/services/alerting/notifiers/victorops.go +++ b/pkg/services/alerting/notifiers/victorops.go @@ -51,7 +51,7 @@ func NewVictoropsNotifier(model *models.AlertNotification) (alerting.Notifier, e } return &VictoropsNotifier{ - NotifierBase: NewNotifierBase(model.Id, model.IsDefault, model.Name, model.Type, model.Settings), + NotifierBase: NewNotifierBase(model), URL: url, AutoResolve: autoResolve, log: log.New("alerting.notifier.victorops"), diff --git a/pkg/services/alerting/notifiers/webhook.go b/pkg/services/alerting/notifiers/webhook.go index 4c97ed2b75e..4045e496af9 100644 --- a/pkg/services/alerting/notifiers/webhook.go +++ b/pkg/services/alerting/notifiers/webhook.go @@ -47,7 +47,7 @@ func NewWebHookNotifier(model *m.AlertNotification) (alerting.Notifier, error) { } return &WebhookNotifier{ - NotifierBase: NewNotifierBase(model.Id, model.IsDefault, model.Name, model.Type, model.Settings), + NotifierBase: NewNotifierBase(model), Url: url, User: model.Settings.Get("username").MustString(), Password: model.Settings.Get("password").MustString(), diff --git a/pkg/services/alerting/result_handler.go b/pkg/services/alerting/result_handler.go index c57b28c7c3e..420ffeb9a55 100644 --- a/pkg/services/alerting/result_handler.go +++ b/pkg/services/alerting/result_handler.go @@ -67,6 +67,12 @@ func (handler *DefaultResultHandler) Handle(evalContext *EvalContext) error { } handler.log.Error("Failed to save state", "error", err) + } else { + + // StateChanges is used for de duping alert notifications + // when two servers are raising. This makes sure that the server + // with the last state change always sends a notification. + evalContext.Rule.StateChanges = cmd.Result.StateChanges } // save annotation @@ -89,6 +95,5 @@ func (handler *DefaultResultHandler) Handle(evalContext *EvalContext) error { } handler.notifier.SendIfNeeded(evalContext) - return nil } diff --git a/pkg/services/alerting/rule.go b/pkg/services/alerting/rule.go index 018d138dbe4..999611f15c4 100644 --- a/pkg/services/alerting/rule.go +++ b/pkg/services/alerting/rule.go @@ -23,6 +23,8 @@ type Rule struct { State m.AlertStateType Conditions []Condition Notifications []int64 + + StateChanges int64 } type ValidationError struct { @@ -34,13 +36,13 @@ type ValidationError struct { } func (e ValidationError) Error() string { - extraInfo := "" + extraInfo := e.Reason if e.Alertid != 0 { extraInfo = fmt.Sprintf("%s AlertId: %v", extraInfo, e.Alertid) } if e.PanelId != 0 { - extraInfo = fmt.Sprintf("%s PanelId: %v ", extraInfo, e.PanelId) + extraInfo = fmt.Sprintf("%s PanelId: %v", extraInfo, e.PanelId) } if e.DashboardId != 0 { @@ -48,10 +50,10 @@ func (e ValidationError) Error() string { } if e.Err != nil { - return fmt.Sprintf("%s %s%s", e.Err.Error(), e.Reason, extraInfo) + return fmt.Sprintf("Alert validation error: %s%s", e.Err.Error(), extraInfo) } - return fmt.Sprintf("Failed to extract alert.Reason: %s %s", e.Reason, extraInfo) + return fmt.Sprintf("Alert validation error: %s", extraInfo) } var ( @@ -100,6 +102,7 @@ func NewRuleFromDBAlert(ruleDef *m.Alert) (*Rule, error) { model.State = ruleDef.State model.NoDataState = m.NoDataOption(ruleDef.Settings.Get("noDataState").MustString("no_data")) model.ExecutionErrorState = m.ExecutionErrorOption(ruleDef.Settings.Get("executionErrorState").MustString("alerting")) + model.StateChanges = ruleDef.StateChanges for _, v := range ruleDef.Settings.Get("notifications").MustArray() { jsonModel := simplejson.NewFromAny(v) @@ -125,7 +128,7 @@ func NewRuleFromDBAlert(ruleDef *m.Alert) (*Rule, error) { } if len(model.Conditions) == 0 { - return nil, fmt.Errorf("Alert is missing conditions") + return nil, ValidationError{Reason: "Alert is missing conditions"} } return model, nil diff --git a/pkg/services/alerting/test_notification.go b/pkg/services/alerting/test_notification.go index 8421360b5ed..8aa1b80aa22 100644 --- a/pkg/services/alerting/test_notification.go +++ b/pkg/services/alerting/test_notification.go @@ -39,7 +39,7 @@ func handleNotificationTestCommand(cmd *NotificationTestCommand) error { return err } - return notifier.sendNotifications(createTestEvalContext(cmd), []Notifier{notifiers}) + return notifier.sendNotifications(createTestEvalContext(cmd), notifierStateSlice{{notifier: notifiers}}) } func createTestEvalContext(cmd *NotificationTestCommand) *EvalContext { diff --git a/pkg/services/alerting/ticker.go b/pkg/services/alerting/ticker.go index 5ce19b1b232..8cee2653ee9 100644 --- a/pkg/services/alerting/ticker.go +++ b/pkg/services/alerting/ticker.go @@ -37,10 +37,6 @@ func NewTicker(last time.Time, initialOffset time.Duration, c clock.Clock) *Tick return t } -func (t *Ticker) updateOffset(offset time.Duration) { - t.newOffset <- offset -} - func (t *Ticker) run() { for { next := t.last.Add(time.Duration(1) * time.Second) diff --git a/pkg/services/annotations/annotations.go b/pkg/services/annotations/annotations.go index 9b490169d3b..60a92aa897a 100644 --- a/pkg/services/annotations/annotations.go +++ b/pkg/services/annotations/annotations.go @@ -21,6 +21,7 @@ type ItemQuery struct { RegionId int64 `json:"regionId"` Tags []string `json:"tags"` Type string `json:"type"` + MatchAny bool `json:"matchAny"` Limit int64 `json:"limit"` } diff --git a/pkg/services/cleanup/cleanup.go b/pkg/services/cleanup/cleanup.go index 521601a358b..c15ae8ef36c 100644 --- a/pkg/services/cleanup/cleanup.go +++ b/pkg/services/cleanup/cleanup.go @@ -73,7 +73,7 @@ func (srv *CleanUpService) cleanUpTmpFiles() { } } - srv.log.Debug("Found old rendered image to delete", "deleted", len(toDelete), "keept", len(files)) + srv.log.Debug("Found old rendered image to delete", "deleted", len(toDelete), "kept", len(files)) } func (srv *CleanUpService) shouldCleanupTempFile(filemtime time.Time, now time.Time) bool { diff --git a/pkg/services/dashboards/dashboard_service.go b/pkg/services/dashboards/dashboard_service.go index 278421e6be7..8eb7f4a6e72 100644 --- a/pkg/services/dashboards/dashboard_service.go +++ b/pkg/services/dashboards/dashboard_service.go @@ -5,6 +5,7 @@ import ( "time" "github.com/grafana/grafana/pkg/bus" + "github.com/grafana/grafana/pkg/log" "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/services/guardian" "github.com/grafana/grafana/pkg/util" @@ -25,7 +26,9 @@ type DashboardProvisioningService interface { // NewService factory for creating a new dashboard service var NewService = func() DashboardService { - return &dashboardServiceImpl{} + return &dashboardServiceImpl{ + log: log.New("dashboard-service"), + } } // NewProvisioningService factory for creating a new dashboard provisioning service @@ -45,6 +48,7 @@ type SaveDashboardDTO struct { type dashboardServiceImpl struct { orgId int64 user *models.SignedInUser + log log.Logger } func (dr *dashboardServiceImpl) GetProvisionedDashboardData(name string) ([]*models.DashboardProvisioning, error) { @@ -89,7 +93,7 @@ func (dr *dashboardServiceImpl) buildSaveDashboardCommand(dto *SaveDashboardDTO, } if err := bus.Dispatch(&validateAlertsCmd); err != nil { - return nil, models.ErrDashboardContainsInvalidAlertData + return nil, err } } diff --git a/pkg/services/dashboards/dashboard_service_test.go b/pkg/services/dashboards/dashboard_service_test.go index f9d487f625c..b8300a5af8d 100644 --- a/pkg/services/dashboards/dashboard_service_test.go +++ b/pkg/services/dashboards/dashboard_service_test.go @@ -117,12 +117,12 @@ func TestDashboardService(t *testing.T) { }) bus.AddHandler("test", func(cmd *models.ValidateDashboardAlertsCommand) error { - return errors.New("error") + return errors.New("Alert validation error") }) dto.Dashboard = models.NewDashboard("Dash") _, err := service.SaveDashboard(dto) - So(err, ShouldEqual, models.ErrDashboardContainsInvalidAlertData) + So(err.Error(), ShouldEqual, "Alert validation error") }) }) diff --git a/pkg/services/hooks/hooks.go b/pkg/services/hooks/hooks.go new file mode 100644 index 00000000000..c51650cf6c9 --- /dev/null +++ b/pkg/services/hooks/hooks.go @@ -0,0 +1,30 @@ +package hooks + +import ( + "github.com/grafana/grafana/pkg/api/dtos" + "github.com/grafana/grafana/pkg/registry" +) + +type IndexDataHook func(indexData *dtos.IndexViewData) + +type HooksService struct { + indexDataHooks []IndexDataHook +} + +func init() { + registry.RegisterService(&HooksService{}) +} + +func (srv *HooksService) Init() error { + return nil +} + +func (srv *HooksService) AddIndexDataHook(hook IndexDataHook) { + srv.indexDataHooks = append(srv.indexDataHooks, hook) +} + +func (srv *HooksService) RunIndexDataHooks(indexData *dtos.IndexViewData) { + for _, hook := range srv.indexDataHooks { + hook(indexData) + } +} diff --git a/pkg/services/notifications/notifications_test.go b/pkg/services/notifications/notifications_test.go index 504c10c22ec..d54b70e704f 100644 --- a/pkg/services/notifications/notifications_test.go +++ b/pkg/services/notifications/notifications_test.go @@ -9,12 +9,6 @@ import ( . "github.com/smartystreets/goconvey/convey" ) -type testTriggeredAlert struct { - ActualValue float64 - Name string - State string -} - func TestNotifications(t *testing.T) { Convey("Given the notifications service", t, func() { diff --git a/pkg/services/provisioning/dashboards/config_reader.go b/pkg/services/provisioning/dashboards/config_reader.go index 7508550838f..bfef06b558e 100644 --- a/pkg/services/provisioning/dashboards/config_reader.go +++ b/pkg/services/provisioning/dashboards/config_reader.go @@ -83,7 +83,7 @@ func (cr *configReader) readConfig() ([]*DashboardsAsConfig, error) { } if dashboards[i].UpdateIntervalSeconds == 0 { - dashboards[i].UpdateIntervalSeconds = 3 + dashboards[i].UpdateIntervalSeconds = 10 } } diff --git a/pkg/services/provisioning/dashboards/config_reader_test.go b/pkg/services/provisioning/dashboards/config_reader_test.go index df0d2ae038e..d386e42349d 100644 --- a/pkg/services/provisioning/dashboards/config_reader_test.go +++ b/pkg/services/provisioning/dashboards/config_reader_test.go @@ -70,7 +70,7 @@ func validateDashboardAsConfig(t *testing.T, cfg []*DashboardsAsConfig) { So(len(ds.Options), ShouldEqual, 1) So(ds.Options["path"], ShouldEqual, "/var/lib/grafana/dashboards") So(ds.DisableDeletion, ShouldBeTrue) - So(ds.UpdateIntervalSeconds, ShouldEqual, 10) + So(ds.UpdateIntervalSeconds, ShouldEqual, 15) ds2 := cfg[1] So(ds2.Name, ShouldEqual, "default") @@ -81,5 +81,5 @@ func validateDashboardAsConfig(t *testing.T, cfg []*DashboardsAsConfig) { So(len(ds2.Options), ShouldEqual, 1) So(ds2.Options["path"], ShouldEqual, "/var/lib/grafana/dashboards") So(ds2.DisableDeletion, ShouldBeFalse) - So(ds2.UpdateIntervalSeconds, ShouldEqual, 3) + So(ds2.UpdateIntervalSeconds, ShouldEqual, 10) } diff --git a/pkg/services/provisioning/dashboards/file_reader.go b/pkg/services/provisioning/dashboards/file_reader.go index ef27ba97235..ea093860f3e 100644 --- a/pkg/services/provisioning/dashboards/file_reader.go +++ b/pkg/services/provisioning/dashboards/file_reader.go @@ -43,26 +43,6 @@ func NewDashboardFileReader(cfg *DashboardsAsConfig, log log.Logger) (*fileReade log.Warn("[Deprecated] The folder property is deprecated. Please use path instead.") } - if _, err := os.Stat(path); os.IsNotExist(err) { - log.Error("Cannot read directory", "error", err) - } - - copy := path - path, err := filepath.Abs(path) - if err != nil { - log.Error("Could not create absolute path ", "path", path) - } - - path, err = filepath.EvalSymlinks(path) - if err != nil { - log.Error("Failed to read content of symlinked path: %s", path) - } - - if path == "" { - path = copy - log.Info("falling back to original path due to EvalSymlink/Abs failure") - } - return &fileReader{ Cfg: cfg, Path: path, @@ -99,7 +79,8 @@ func (fr *fileReader) ReadAndListen(ctx context.Context) error { } func (fr *fileReader) startWalkingDisk() error { - if _, err := os.Stat(fr.Path); err != nil { + resolvedPath := fr.resolvePath(fr.Path) + if _, err := os.Stat(resolvedPath); err != nil { if os.IsNotExist(err) { return err } @@ -116,7 +97,7 @@ func (fr *fileReader) startWalkingDisk() error { } filesFoundOnDisk := map[string]os.FileInfo{} - err = filepath.Walk(fr.Path, createWalkFn(filesFoundOnDisk)) + err = filepath.Walk(resolvedPath, createWalkFn(filesFoundOnDisk)) if err != nil { return err } @@ -156,7 +137,7 @@ func (fr *fileReader) deleteDashboardIfFileIsMissing(provisionedDashboardRefs ma cmd := &models.DeleteDashboardCommand{OrgId: fr.Cfg.OrgId, Id: dashboardId} err := bus.Dispatch(cmd) if err != nil { - fr.log.Error("failed to delete dashboard", "id", cmd.Id) + fr.log.Error("failed to delete dashboard", "id", cmd.Id, "error", err) } } } @@ -344,6 +325,29 @@ func (fr *fileReader) readDashboardFromFile(path string, lastModified time.Time, }, nil } +func (fr *fileReader) resolvePath(path string) string { + if _, err := os.Stat(path); os.IsNotExist(err) { + fr.log.Error("Cannot read directory", "error", err) + } + + copy := path + path, err := filepath.Abs(path) + if err != nil { + fr.log.Error("Could not create absolute path ", "path", path) + } + + path, err = filepath.EvalSymlinks(path) + if err != nil { + fr.log.Error("Failed to read content of symlinked path: %s", path) + } + + if path == "" { + path = copy + fr.log.Info("falling back to original path due to EvalSymlink/Abs failure") + } + return path +} + type provisioningMetadata struct { uid string title string diff --git a/pkg/services/provisioning/dashboards/file_reader_linux_test.go b/pkg/services/provisioning/dashboards/file_reader_linux_test.go index 9d4cdae8609..77f488ebcfb 100644 --- a/pkg/services/provisioning/dashboards/file_reader_linux_test.go +++ b/pkg/services/provisioning/dashboards/file_reader_linux_test.go @@ -30,10 +30,11 @@ func TestProvsionedSymlinkedFolder(t *testing.T) { want, err := filepath.Abs(containingId) if err != nil { - t.Errorf("expected err to be nill") + t.Errorf("expected err to be nil") } - if reader.Path != want { - t.Errorf("got %s want %s", reader.Path, want) + resolvedPath := reader.resolvePath(reader.Path) + if resolvedPath != want { + t.Errorf("got %s want %s", resolvedPath, want) } } diff --git a/pkg/services/provisioning/dashboards/file_reader_test.go b/pkg/services/provisioning/dashboards/file_reader_test.go index bdc1e95aafe..fe849816553 100644 --- a/pkg/services/provisioning/dashboards/file_reader_test.go +++ b/pkg/services/provisioning/dashboards/file_reader_test.go @@ -67,7 +67,8 @@ func TestCreatingNewDashboardFileReader(t *testing.T) { reader, err := NewDashboardFileReader(cfg, log.New("test-logger")) So(err, ShouldBeNil) - So(filepath.IsAbs(reader.Path), ShouldBeTrue) + resolvedPath := reader.resolvePath(reader.Path) + So(filepath.IsAbs(resolvedPath), ShouldBeTrue) }) }) } diff --git a/pkg/services/provisioning/dashboards/testdata/test-configs/dashboards-from-disk/dev-dashboards.yaml b/pkg/services/provisioning/dashboards/testdata/test-configs/dashboards-from-disk/dev-dashboards.yaml index e26c329f87c..c43c4a14c53 100644 --- a/pkg/services/provisioning/dashboards/testdata/test-configs/dashboards-from-disk/dev-dashboards.yaml +++ b/pkg/services/provisioning/dashboards/testdata/test-configs/dashboards-from-disk/dev-dashboards.yaml @@ -6,7 +6,7 @@ providers: folder: 'developers' editable: true disableDeletion: true - updateIntervalSeconds: 10 + updateIntervalSeconds: 15 type: file options: path: /var/lib/grafana/dashboards diff --git a/pkg/services/provisioning/dashboards/testdata/test-configs/version-0/version-0.yaml b/pkg/services/provisioning/dashboards/testdata/test-configs/version-0/version-0.yaml index 69a317fb396..8b7b8991759 100644 --- a/pkg/services/provisioning/dashboards/testdata/test-configs/version-0/version-0.yaml +++ b/pkg/services/provisioning/dashboards/testdata/test-configs/version-0/version-0.yaml @@ -3,7 +3,7 @@ folder: 'developers' editable: true disableDeletion: true - updateIntervalSeconds: 10 + updateIntervalSeconds: 15 type: file options: path: /var/lib/grafana/dashboards diff --git a/pkg/services/provisioning/datasources/testdata/broken-yaml/commented.yaml b/pkg/services/provisioning/datasources/testdata/broken-yaml/commented.yaml index 1bb9cb53b45..b532c9012ec 100644 --- a/pkg/services/provisioning/datasources/testdata/broken-yaml/commented.yaml +++ b/pkg/services/provisioning/datasources/testdata/broken-yaml/commented.yaml @@ -4,7 +4,7 @@ # org_id: 1 # # list of datasources to insert/update depending -# # whats available in the datbase +# # what's available in the database #datasources: # # name of the datasource. Required # - name: Graphite diff --git a/pkg/services/rendering/http_mode.go b/pkg/services/rendering/http_mode.go index 9084ca27353..40259c44746 100644 --- a/pkg/services/rendering/http_mode.go +++ b/pkg/services/rendering/http_mode.go @@ -2,6 +2,7 @@ package rendering import ( "context" + "fmt" "io" "net" "net/http" @@ -20,14 +21,13 @@ var netTransport = &http.Transport{ TLSHandshakeTimeout: 5 * time.Second, } +var netClient = &http.Client{ + Transport: netTransport, +} + func (rs *RenderingService) renderViaHttp(ctx context.Context, opts Opts) (*RenderResult, error) { filePath := rs.getFilePathForNewImage() - var netClient = &http.Client{ - Timeout: opts.Timeout, - Transport: netTransport, - } - rendererUrl, err := url.Parse(rs.Cfg.RendererUrl) if err != nil { return nil, err @@ -35,10 +35,10 @@ func (rs *RenderingService) renderViaHttp(ctx context.Context, opts Opts) (*Rend queryParams := rendererUrl.Query() queryParams.Add("url", rs.getURL(opts.Path)) - queryParams.Add("renderKey", rs.getRenderKey(opts.UserId, opts.OrgId, opts.OrgRole)) + queryParams.Add("renderKey", rs.getRenderKey(opts.OrgId, opts.UserId, opts.OrgRole)) queryParams.Add("width", strconv.Itoa(opts.Width)) queryParams.Add("height", strconv.Itoa(opts.Height)) - queryParams.Add("domain", rs.getLocalDomain()) + queryParams.Add("domain", rs.domain) queryParams.Add("timezone", isoTimeOffsetToPosixTz(opts.Timezone)) queryParams.Add("encoding", opts.Encoding) queryParams.Add("timeout", strconv.Itoa(int(opts.Timeout.Seconds()))) @@ -49,20 +49,48 @@ func (rs *RenderingService) renderViaHttp(ctx context.Context, opts Opts) (*Rend return nil, err } + reqContext, cancel := context.WithTimeout(ctx, opts.Timeout+time.Second*2) + defer cancel() + + req = req.WithContext(reqContext) + // make request to renderer server resp, err := netClient.Do(req) if err != nil { - return nil, err + rs.log.Error("Failed to send request to remote rendering service.", "error", err) + return nil, fmt.Errorf("Failed to send request to remote rendering service. %s", err) } // save response to file defer resp.Body.Close() + + // check for timeout first + if reqContext.Err() == context.DeadlineExceeded { + rs.log.Info("Rendering timed out") + return nil, ErrTimeout + } + + // if we didn't get a 200 response, something went wrong. + if resp.StatusCode != http.StatusOK { + rs.log.Error("Remote rendering request failed", "error", resp.Status) + return nil, fmt.Errorf("Remote rendering request failed. %d: %s", resp.StatusCode, resp.Status) + } + out, err := os.Create(filePath) if err != nil { return nil, err } defer out.Close() - io.Copy(out, resp.Body) + _, err = io.Copy(out, resp.Body) + if err != nil { + // check that we didn't timeout while receiving the response. + if reqContext.Err() == context.DeadlineExceeded { + rs.log.Info("Rendering timed out") + return nil, ErrTimeout + } + rs.log.Error("Remote rendering request failed", "error", err) + return nil, fmt.Errorf("Remote rendering request failed. %s", err) + } return &RenderResult{FilePath: filePath}, err } diff --git a/pkg/services/rendering/interface.go b/pkg/services/rendering/interface.go index 85c139cfc04..39cb1ada0f5 100644 --- a/pkg/services/rendering/interface.go +++ b/pkg/services/rendering/interface.go @@ -13,15 +13,16 @@ var ErrNoRenderer = errors.New("No renderer plugin found nor is an external rend var ErrPhantomJSNotInstalled = errors.New("PhantomJS executable not found") type Opts struct { - Width int - Height int - Timeout time.Duration - OrgId int64 - UserId int64 - OrgRole models.RoleType - Path string - Encoding string - Timezone string + Width int + Height int + Timeout time.Duration + OrgId int64 + UserId int64 + OrgRole models.RoleType + Path string + Encoding string + Timezone string + ConcurrentLimit int } type RenderResult struct { diff --git a/pkg/services/rendering/phantomjs.go b/pkg/services/rendering/phantomjs.go index 87ccaf6b5d2..1bd7489c153 100644 --- a/pkg/services/rendering/phantomjs.go +++ b/pkg/services/rendering/phantomjs.go @@ -49,7 +49,7 @@ func (rs *RenderingService) renderViaPhantomJS(ctx context.Context, opts Opts) ( fmt.Sprintf("width=%v", opts.Width), fmt.Sprintf("height=%v", opts.Height), fmt.Sprintf("png=%v", pngPath), - fmt.Sprintf("domain=%v", rs.getLocalDomain()), + fmt.Sprintf("domain=%v", rs.domain), fmt.Sprintf("timeout=%v", opts.Timeout.Seconds()), fmt.Sprintf("renderKey=%v", renderKey), } diff --git a/pkg/services/rendering/plugin_mode.go b/pkg/services/rendering/plugin_mode.go index 550779ad7c3..58fef2b095f 100644 --- a/pkg/services/rendering/plugin_mode.go +++ b/pkg/services/rendering/plugin_mode.go @@ -77,10 +77,10 @@ func (rs *RenderingService) renderViaPlugin(ctx context.Context, opts Opts) (*Re Height: int32(opts.Height), FilePath: pngPath, Timeout: int32(opts.Timeout.Seconds()), - RenderKey: rs.getRenderKey(opts.UserId, opts.OrgId, opts.OrgRole), + RenderKey: rs.getRenderKey(opts.OrgId, opts.UserId, opts.OrgRole), Encoding: opts.Encoding, Timezone: isoTimeOffsetToPosixTz(opts.Timezone), - Domain: rs.getLocalDomain(), + Domain: rs.domain, }) if err != nil { diff --git a/pkg/services/rendering/rendering.go b/pkg/services/rendering/rendering.go index 799aecc3e88..0b4f23e93b4 100644 --- a/pkg/services/rendering/rendering.go +++ b/pkg/services/rendering/rendering.go @@ -3,6 +3,8 @@ package rendering import ( "context" "fmt" + "net/url" + "os" "path/filepath" plugin "github.com/hashicorp/go-plugin" @@ -22,17 +24,37 @@ func init() { } type RenderingService struct { - log log.Logger - pluginClient *plugin.Client - grpcPlugin pluginModel.RendererPlugin - pluginInfo *plugins.RendererPlugin - renderAction renderFunc + log log.Logger + pluginClient *plugin.Client + grpcPlugin pluginModel.RendererPlugin + pluginInfo *plugins.RendererPlugin + renderAction renderFunc + domain string + inProgressCount int Cfg *setting.Cfg `inject:""` } func (rs *RenderingService) Init() error { rs.log = log.New("rendering") + + // ensure ImagesDir exists + err := os.MkdirAll(rs.Cfg.ImagesDir, 0700) + if err != nil { + return err + } + + // set value used for domain attribute of renderKey cookie + if rs.Cfg.RendererUrl != "" { + // RendererCallbackUrl has already been passed, it won't generate an error. + u, _ := url.Parse(rs.Cfg.RendererCallbackUrl) + rs.domain = u.Hostname() + } else if setting.HttpAddr != setting.DEFAULT_HTTP_ADDR { + rs.domain = setting.HttpAddr + } else { + rs.domain = "localhost" + } + return nil } @@ -69,6 +91,18 @@ func (rs *RenderingService) Run(ctx context.Context) error { } func (rs *RenderingService) Render(ctx context.Context, opts Opts) (*RenderResult, error) { + if rs.inProgressCount > opts.ConcurrentLimit { + return &RenderResult{ + FilePath: filepath.Join(setting.HomePath, "public/img/rendering_limit.png"), + }, nil + } + + defer func() { + rs.inProgressCount -= 1 + }() + + rs.inProgressCount += 1 + if rs.renderAction != nil { return rs.renderAction(ctx, opts) } else { @@ -82,16 +116,17 @@ func (rs *RenderingService) getFilePathForNewImage() string { } func (rs *RenderingService) getURL(path string) string { - // &render=1 signals to the legacy redirect layer to - return fmt.Sprintf("%s://%s:%s/%s&render=1", setting.Protocol, rs.getLocalDomain(), setting.HttpPort, path) -} + if rs.Cfg.RendererUrl != "" { + // The backend rendering service can potentially be remote. + // So we need to use the root_url to ensure the rendering service + // can reach this Grafana instance. -func (rs *RenderingService) getLocalDomain() string { - if setting.HttpAddr != setting.DEFAULT_HTTP_ADDR { - return setting.HttpAddr - } + // &render=1 signals to the legacy redirect layer to + return fmt.Sprintf("%s%s&render=1", rs.Cfg.RendererCallbackUrl, path) - return "localhost" + } + // &render=1 signals to the legacy redirect layer to + return fmt.Sprintf("%s://%s:%s/%s&render=1", setting.Protocol, rs.domain, setting.HttpPort, path) } func (rs *RenderingService) getRenderKey(orgId, userId int64, orgRole models.RoleType) string { diff --git a/pkg/services/sqlstore/alert.go b/pkg/services/sqlstore/alert.go index af911dc22e6..2f17402b80c 100644 --- a/pkg/services/sqlstore/alert.go +++ b/pkg/services/sqlstore/alert.go @@ -40,7 +40,7 @@ func GetAlertById(query *m.GetAlertByIdQuery) error { func GetAllAlertQueryHandler(query *m.GetAllAlertsQuery) error { var alerts []*m.Alert - err := x.Sql("select * from alert").Find(&alerts) + err := x.SQL("select * from alert").Find(&alerts) if err != nil { return err } @@ -60,6 +60,10 @@ func deleteAlertByIdInternal(alertId int64, reason string, sess *DBSession) erro return err } + if _, err := sess.Exec("DELETE FROM alert_notification_state WHERE alert_id = ?", alertId); err != nil { + return err + } + return nil } @@ -190,7 +194,7 @@ func updateAlerts(existingAlerts []*m.Alert, cmd *m.SaveAlertsCommand, sess *DBS alert.Updated = timeNow() alert.State = alertToUpdate.State sess.MustCols("message") - _, err := sess.Id(alert.Id).Update(alert) + _, err := sess.ID(alert.Id).Update(alert) if err != nil { return err } @@ -249,7 +253,7 @@ func SetAlertState(cmd *m.SetAlertStateCommand) error { return inTransaction(func(sess *DBSession) error { alert := m.Alert{} - if has, err := sess.Id(cmd.AlertId).Get(&alert); err != nil { + if has, err := sess.ID(cmd.AlertId).Get(&alert); err != nil { return err } else if !has { return fmt.Errorf("Could not find alert") @@ -275,6 +279,8 @@ func SetAlertState(cmd *m.SetAlertStateCommand) error { } sess.ID(alert.Id).Update(&alert) + + cmd.Result = alert return nil }) } diff --git a/pkg/services/sqlstore/alert_notification.go b/pkg/services/sqlstore/alert_notification.go index 651241f7714..afe6269510f 100644 --- a/pkg/services/sqlstore/alert_notification.go +++ b/pkg/services/sqlstore/alert_notification.go @@ -2,6 +2,8 @@ package sqlstore import ( "bytes" + "context" + "errors" "fmt" "strings" "time" @@ -17,13 +19,23 @@ func init() { bus.AddHandler("sql", DeleteAlertNotification) bus.AddHandler("sql", GetAlertNotificationsToSend) bus.AddHandler("sql", GetAllAlertNotifications) + bus.AddHandlerCtx("sql", GetOrCreateAlertNotificationState) + bus.AddHandlerCtx("sql", SetAlertNotificationStateToCompleteCommand) + bus.AddHandlerCtx("sql", SetAlertNotificationStateToPendingCommand) } func DeleteAlertNotification(cmd *m.DeleteAlertNotificationCommand) error { return inTransaction(func(sess *DBSession) error { sql := "DELETE FROM alert_notification WHERE alert_notification.org_id = ? AND alert_notification.id = ?" - _, err := sess.Exec(sql, cmd.OrgId, cmd.Id) - return err + if _, err := sess.Exec(sql, cmd.OrgId, cmd.Id); err != nil { + return err + } + + if _, err := sess.Exec("DELETE FROM alert_notification_state WHERE alert_notification_state.org_id = ? AND alert_notification_state.notifier_id = ?", cmd.OrgId, cmd.Id); err != nil { + return err + } + + return nil }) } @@ -53,7 +65,10 @@ func GetAlertNotificationsToSend(query *m.GetAlertNotificationsToSendQuery) erro alert_notification.created, alert_notification.updated, alert_notification.settings, - alert_notification.is_default + alert_notification.is_default, + alert_notification.disable_resolve_message, + alert_notification.send_reminder, + alert_notification.frequency FROM alert_notification `) @@ -91,7 +106,10 @@ func getAlertNotificationInternal(query *m.GetAlertNotificationsQuery, sess *DBS alert_notification.created, alert_notification.updated, alert_notification.settings, - alert_notification.is_default + alert_notification.is_default, + alert_notification.disable_resolve_message, + alert_notification.send_reminder, + alert_notification.frequency FROM alert_notification `) @@ -111,7 +129,7 @@ func getAlertNotificationInternal(query *m.GetAlertNotificationsQuery, sess *DBS } results := make([]*m.AlertNotification, 0) - if err := sess.Sql(sql.String(), params...).Find(&results); err != nil { + if err := sess.SQL(sql.String(), params...).Find(&results); err != nil { return err } @@ -137,17 +155,32 @@ func CreateAlertNotificationCommand(cmd *m.CreateAlertNotificationCommand) error return fmt.Errorf("Alert notification name %s already exists", cmd.Name) } + var frequency time.Duration + if cmd.SendReminder { + if cmd.Frequency == "" { + return m.ErrNotificationFrequencyNotFound + } + + frequency, err = time.ParseDuration(cmd.Frequency) + if err != nil { + return err + } + } + alertNotification := &m.AlertNotification{ - OrgId: cmd.OrgId, - Name: cmd.Name, - Type: cmd.Type, - Settings: cmd.Settings, - Created: time.Now(), - Updated: time.Now(), - IsDefault: cmd.IsDefault, + OrgId: cmd.OrgId, + Name: cmd.Name, + Type: cmd.Type, + Settings: cmd.Settings, + SendReminder: cmd.SendReminder, + DisableResolveMessage: cmd.DisableResolveMessage, + Frequency: frequency, + Created: time.Now(), + Updated: time.Now(), + IsDefault: cmd.IsDefault, } - if _, err = sess.Insert(alertNotification); err != nil { + if _, err = sess.MustCols("send_reminder").Insert(alertNotification); err != nil { return err } @@ -179,16 +212,152 @@ func UpdateAlertNotification(cmd *m.UpdateAlertNotificationCommand) error { current.Name = cmd.Name current.Type = cmd.Type current.IsDefault = cmd.IsDefault + current.SendReminder = cmd.SendReminder + current.DisableResolveMessage = cmd.DisableResolveMessage + + if current.SendReminder { + if cmd.Frequency == "" { + return m.ErrNotificationFrequencyNotFound + } - sess.UseBool("is_default") + frequency, err := time.ParseDuration(cmd.Frequency) + if err != nil { + return err + } + + current.Frequency = frequency + } + + sess.UseBool("is_default", "send_reminder", "disable_resolve_message") if affected, err := sess.ID(cmd.Id).Update(current); err != nil { return err } else if affected == 0 { - return fmt.Errorf("Could not find alert notification") + return fmt.Errorf("Could not update alert notification") } cmd.Result = ¤t return nil }) } + +func SetAlertNotificationStateToCompleteCommand(ctx context.Context, cmd *m.SetAlertNotificationStateToCompleteCommand) error { + return inTransactionCtx(ctx, func(sess *DBSession) error { + version := cmd.Version + var current m.AlertNotificationState + sess.ID(cmd.Id).Get(¤t) + + newVersion := cmd.Version + 1 + + sql := `UPDATE alert_notification_state SET + state = ?, + version = ?, + updated_at = ? + WHERE + id = ?` + + _, err := sess.Exec(sql, m.AlertNotificationStateCompleted, newVersion, timeNow().Unix(), cmd.Id) + + if err != nil { + return err + } + + if current.Version != version { + sqlog.Error("notification state out of sync. the notification is marked as complete but has been modified between set as pending and completion.", "notifierId", current.NotifierId) + } + + return nil + }) +} + +func SetAlertNotificationStateToPendingCommand(ctx context.Context, cmd *m.SetAlertNotificationStateToPendingCommand) error { + return withDbSession(ctx, func(sess *DBSession) error { + newVersion := cmd.Version + 1 + sql := `UPDATE alert_notification_state SET + state = ?, + version = ?, + updated_at = ?, + alert_rule_state_updated_version = ? + WHERE + id = ? AND + (version = ? OR alert_rule_state_updated_version < ?)` + + res, err := sess.Exec(sql, + m.AlertNotificationStatePending, + newVersion, + timeNow().Unix(), + cmd.AlertRuleStateUpdatedVersion, + cmd.Id, + cmd.Version, + cmd.AlertRuleStateUpdatedVersion) + + if err != nil { + return err + } + + affected, _ := res.RowsAffected() + if affected == 0 { + return m.ErrAlertNotificationStateVersionConflict + } + + cmd.ResultVersion = newVersion + + return nil + }) +} + +func GetOrCreateAlertNotificationState(ctx context.Context, cmd *m.GetOrCreateNotificationStateQuery) error { + return inTransactionCtx(ctx, func(sess *DBSession) error { + nj := &m.AlertNotificationState{} + + exist, err := getAlertNotificationState(sess, cmd, nj) + + // if exists, return it, otherwise create it with default values + if err != nil { + return err + } + + if exist { + cmd.Result = nj + return nil + } + + notificationState := &m.AlertNotificationState{ + OrgId: cmd.OrgId, + AlertId: cmd.AlertId, + NotifierId: cmd.NotifierId, + State: m.AlertNotificationStateUnknown, + UpdatedAt: timeNow().Unix(), + } + + if _, err := sess.Insert(notificationState); err != nil { + if dialect.IsUniqueConstraintViolation(err) { + exist, err = getAlertNotificationState(sess, cmd, nj) + + if err != nil { + return err + } + + if !exist { + return errors.New("Should not happen") + } + + cmd.Result = nj + return nil + } + + return err + } + + cmd.Result = notificationState + return nil + }) +} + +func getAlertNotificationState(sess *DBSession, cmd *m.GetOrCreateNotificationStateQuery, nj *m.AlertNotificationState) (bool, error) { + return sess. + Where("alert_notification_state.org_id = ?", cmd.OrgId). + Where("alert_notification_state.alert_id = ?", cmd.AlertId). + Where("alert_notification_state.notifier_id = ?", cmd.NotifierId). + Get(nj) +} diff --git a/pkg/services/sqlstore/alert_notification_test.go b/pkg/services/sqlstore/alert_notification_test.go index 2dbf9de5ca8..629a6292eb5 100644 --- a/pkg/services/sqlstore/alert_notification_test.go +++ b/pkg/services/sqlstore/alert_notification_test.go @@ -1,20 +1,146 @@ package sqlstore import ( + "context" "testing" + "time" "github.com/grafana/grafana/pkg/components/simplejson" - m "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/models" . "github.com/smartystreets/goconvey/convey" ) func TestAlertNotificationSQLAccess(t *testing.T) { Convey("Testing Alert notification sql access", t, func() { InitTestDB(t) - var err error + + Convey("Alert notification state", func() { + var alertID int64 = 7 + var orgID int64 = 5 + var notifierID int64 = 10 + oldTimeNow := timeNow + now := time.Date(2018, 9, 30, 0, 0, 0, 0, time.UTC) + timeNow = func() time.Time { return now } + + Convey("Get no existing state should create a new state", func() { + query := &models.GetOrCreateNotificationStateQuery{AlertId: alertID, OrgId: orgID, NotifierId: notifierID} + err := GetOrCreateAlertNotificationState(context.Background(), query) + So(err, ShouldBeNil) + So(query.Result, ShouldNotBeNil) + So(query.Result.State, ShouldEqual, "unknown") + So(query.Result.Version, ShouldEqual, 0) + So(query.Result.UpdatedAt, ShouldEqual, now.Unix()) + + Convey("Get existing state should not create a new state", func() { + query2 := &models.GetOrCreateNotificationStateQuery{AlertId: alertID, OrgId: orgID, NotifierId: notifierID} + err := GetOrCreateAlertNotificationState(context.Background(), query2) + So(err, ShouldBeNil) + So(query2.Result, ShouldNotBeNil) + So(query2.Result.Id, ShouldEqual, query.Result.Id) + So(query2.Result.UpdatedAt, ShouldEqual, now.Unix()) + }) + + Convey("Update existing state to pending with correct version should update database", func() { + s := *query.Result + + cmd := models.SetAlertNotificationStateToPendingCommand{ + Id: s.Id, + Version: s.Version, + AlertRuleStateUpdatedVersion: s.AlertRuleStateUpdatedVersion, + } + + err := SetAlertNotificationStateToPendingCommand(context.Background(), &cmd) + So(err, ShouldBeNil) + So(cmd.ResultVersion, ShouldEqual, 1) + + query2 := &models.GetOrCreateNotificationStateQuery{AlertId: alertID, OrgId: orgID, NotifierId: notifierID} + err = GetOrCreateAlertNotificationState(context.Background(), query2) + So(err, ShouldBeNil) + So(query2.Result.Version, ShouldEqual, 1) + So(query2.Result.State, ShouldEqual, models.AlertNotificationStatePending) + So(query2.Result.UpdatedAt, ShouldEqual, now.Unix()) + + Convey("Update existing state to completed should update database", func() { + s := *query.Result + setStateCmd := models.SetAlertNotificationStateToCompleteCommand{ + Id: s.Id, + Version: cmd.ResultVersion, + } + err := SetAlertNotificationStateToCompleteCommand(context.Background(), &setStateCmd) + So(err, ShouldBeNil) + + query3 := &models.GetOrCreateNotificationStateQuery{AlertId: alertID, OrgId: orgID, NotifierId: notifierID} + err = GetOrCreateAlertNotificationState(context.Background(), query3) + So(err, ShouldBeNil) + So(query3.Result.Version, ShouldEqual, 2) + So(query3.Result.State, ShouldEqual, models.AlertNotificationStateCompleted) + So(query3.Result.UpdatedAt, ShouldEqual, now.Unix()) + }) + + Convey("Update existing state to completed should update database. regardless of version", func() { + s := *query.Result + unknownVersion := int64(1000) + cmd := models.SetAlertNotificationStateToCompleteCommand{ + Id: s.Id, + Version: unknownVersion, + } + err := SetAlertNotificationStateToCompleteCommand(context.Background(), &cmd) + So(err, ShouldBeNil) + + query3 := &models.GetOrCreateNotificationStateQuery{AlertId: alertID, OrgId: orgID, NotifierId: notifierID} + err = GetOrCreateAlertNotificationState(context.Background(), query3) + So(err, ShouldBeNil) + So(query3.Result.Version, ShouldEqual, unknownVersion+1) + So(query3.Result.State, ShouldEqual, models.AlertNotificationStateCompleted) + So(query3.Result.UpdatedAt, ShouldEqual, now.Unix()) + }) + }) + + Convey("Update existing state to pending with incorrect version should return version mismatch error", func() { + s := *query.Result + s.Version = 1000 + cmd := models.SetAlertNotificationStateToPendingCommand{ + Id: s.NotifierId, + Version: s.Version, + AlertRuleStateUpdatedVersion: s.AlertRuleStateUpdatedVersion, + } + err := SetAlertNotificationStateToPendingCommand(context.Background(), &cmd) + So(err, ShouldEqual, models.ErrAlertNotificationStateVersionConflict) + }) + + Convey("Updating existing state to pending with incorrect version since alert rule state update version is higher", func() { + s := *query.Result + cmd := models.SetAlertNotificationStateToPendingCommand{ + Id: s.Id, + Version: s.Version, + AlertRuleStateUpdatedVersion: 1000, + } + err := SetAlertNotificationStateToPendingCommand(context.Background(), &cmd) + So(err, ShouldBeNil) + + So(cmd.ResultVersion, ShouldEqual, 1) + }) + + Convey("different version and same alert state change version should return error", func() { + s := *query.Result + s.Version = 1000 + cmd := models.SetAlertNotificationStateToPendingCommand{ + Id: s.Id, + Version: s.Version, + AlertRuleStateUpdatedVersion: s.AlertRuleStateUpdatedVersion, + } + err := SetAlertNotificationStateToPendingCommand(context.Background(), &cmd) + So(err, ShouldNotBeNil) + }) + }) + + Reset(func() { + timeNow = oldTimeNow + }) + }) Convey("Alert notifications should be empty", func() { - cmd := &m.GetAlertNotificationsQuery{ + cmd := &models.GetAlertNotificationsQuery{ OrgId: 2, Name: "email", } @@ -24,19 +150,76 @@ func TestAlertNotificationSQLAccess(t *testing.T) { So(cmd.Result, ShouldBeNil) }) + Convey("Cannot save alert notifier with send reminder = true", func() { + cmd := &models.CreateAlertNotificationCommand{ + Name: "ops", + Type: "email", + OrgId: 1, + SendReminder: true, + Settings: simplejson.New(), + } + + Convey("and missing frequency", func() { + err := CreateAlertNotificationCommand(cmd) + So(err, ShouldEqual, models.ErrNotificationFrequencyNotFound) + }) + + Convey("invalid frequency", func() { + cmd.Frequency = "invalid duration" + + err := CreateAlertNotificationCommand(cmd) + So(err.Error(), ShouldEqual, "time: invalid duration invalid duration") + }) + }) + + Convey("Cannot update alert notifier with send reminder = false", func() { + cmd := &models.CreateAlertNotificationCommand{ + Name: "ops update", + Type: "email", + OrgId: 1, + SendReminder: false, + Settings: simplejson.New(), + } + + err := CreateAlertNotificationCommand(cmd) + So(err, ShouldBeNil) + + updateCmd := &models.UpdateAlertNotificationCommand{ + Id: cmd.Result.Id, + SendReminder: true, + } + + Convey("and missing frequency", func() { + err := UpdateAlertNotification(updateCmd) + So(err, ShouldEqual, models.ErrNotificationFrequencyNotFound) + }) + + Convey("invalid frequency", func() { + updateCmd.Frequency = "invalid duration" + + err := UpdateAlertNotification(updateCmd) + So(err, ShouldNotBeNil) + So(err.Error(), ShouldEqual, "time: invalid duration invalid duration") + }) + }) + Convey("Can save Alert Notification", func() { - cmd := &m.CreateAlertNotificationCommand{ - Name: "ops", - Type: "email", - OrgId: 1, - Settings: simplejson.New(), + cmd := &models.CreateAlertNotificationCommand{ + Name: "ops", + Type: "email", + OrgId: 1, + SendReminder: true, + Frequency: "10s", + Settings: simplejson.New(), } - err = CreateAlertNotificationCommand(cmd) + err := CreateAlertNotificationCommand(cmd) So(err, ShouldBeNil) So(cmd.Result.Id, ShouldNotEqual, 0) So(cmd.Result.OrgId, ShouldNotEqual, 0) So(cmd.Result.Type, ShouldEqual, "email") + So(cmd.Result.Frequency, ShouldEqual, 10*time.Second) + So(cmd.Result.DisableResolveMessage, ShouldBeFalse) Convey("Cannot save Alert Notification with the same name", func() { err = CreateAlertNotificationCommand(cmd) @@ -44,26 +227,45 @@ func TestAlertNotificationSQLAccess(t *testing.T) { }) Convey("Can update alert notification", func() { - newCmd := &m.UpdateAlertNotificationCommand{ - Name: "NewName", - Type: "webhook", - OrgId: cmd.Result.OrgId, - Settings: simplejson.New(), - Id: cmd.Result.Id, + newCmd := &models.UpdateAlertNotificationCommand{ + Name: "NewName", + Type: "webhook", + OrgId: cmd.Result.OrgId, + SendReminder: true, + DisableResolveMessage: true, + Frequency: "60s", + Settings: simplejson.New(), + Id: cmd.Result.Id, } err := UpdateAlertNotification(newCmd) So(err, ShouldBeNil) So(newCmd.Result.Name, ShouldEqual, "NewName") + So(newCmd.Result.Frequency, ShouldEqual, 60*time.Second) + So(newCmd.Result.DisableResolveMessage, ShouldBeTrue) + }) + + Convey("Can update alert notification to disable sending of reminders", func() { + newCmd := &models.UpdateAlertNotificationCommand{ + Name: "NewName", + Type: "webhook", + OrgId: cmd.Result.OrgId, + SendReminder: false, + Settings: simplejson.New(), + Id: cmd.Result.Id, + } + err := UpdateAlertNotification(newCmd) + So(err, ShouldBeNil) + So(newCmd.Result.SendReminder, ShouldBeFalse) }) }) Convey("Can search using an array of ids", func() { - cmd1 := m.CreateAlertNotificationCommand{Name: "nagios", Type: "webhook", OrgId: 1, Settings: simplejson.New()} - cmd2 := m.CreateAlertNotificationCommand{Name: "slack", Type: "webhook", OrgId: 1, Settings: simplejson.New()} - cmd3 := m.CreateAlertNotificationCommand{Name: "ops2", Type: "email", OrgId: 1, Settings: simplejson.New()} - cmd4 := m.CreateAlertNotificationCommand{IsDefault: true, Name: "default", Type: "email", OrgId: 1, Settings: simplejson.New()} + cmd1 := models.CreateAlertNotificationCommand{Name: "nagios", Type: "webhook", OrgId: 1, SendReminder: true, Frequency: "10s", Settings: simplejson.New()} + cmd2 := models.CreateAlertNotificationCommand{Name: "slack", Type: "webhook", OrgId: 1, SendReminder: true, Frequency: "10s", Settings: simplejson.New()} + cmd3 := models.CreateAlertNotificationCommand{Name: "ops2", Type: "email", OrgId: 1, SendReminder: true, Frequency: "10s", Settings: simplejson.New()} + cmd4 := models.CreateAlertNotificationCommand{IsDefault: true, Name: "default", Type: "email", OrgId: 1, SendReminder: true, Frequency: "10s", Settings: simplejson.New()} - otherOrg := m.CreateAlertNotificationCommand{Name: "default", Type: "email", OrgId: 2, Settings: simplejson.New()} + otherOrg := models.CreateAlertNotificationCommand{Name: "default", Type: "email", OrgId: 2, SendReminder: true, Frequency: "10s", Settings: simplejson.New()} So(CreateAlertNotificationCommand(&cmd1), ShouldBeNil) So(CreateAlertNotificationCommand(&cmd2), ShouldBeNil) @@ -72,7 +274,7 @@ func TestAlertNotificationSQLAccess(t *testing.T) { So(CreateAlertNotificationCommand(&otherOrg), ShouldBeNil) Convey("search", func() { - query := &m.GetAlertNotificationsToSendQuery{ + query := &models.GetAlertNotificationsToSendQuery{ Ids: []int64{cmd1.Result.Id, cmd2.Result.Id, 112341231}, OrgId: 1, } @@ -83,7 +285,7 @@ func TestAlertNotificationSQLAccess(t *testing.T) { }) Convey("all", func() { - query := &m.GetAllAlertNotificationsQuery{ + query := &models.GetAllAlertNotificationsQuery{ OrgId: 1, } diff --git a/pkg/services/sqlstore/annotation.go b/pkg/services/sqlstore/annotation.go index a65bc136554..274481baeca 100644 --- a/pkg/services/sqlstore/annotation.go +++ b/pkg/services/sqlstore/annotation.go @@ -110,7 +110,7 @@ func (r *SqlAnnotationRepo) Update(item *annotations.Item) error { existing.Tags = item.Tags - _, err = sess.Table("annotation").Id(existing.Id).Cols("epoch", "text", "region_id", "updated", "tags").Update(existing) + _, err = sess.Table("annotation").ID(existing.Id).Cols("epoch", "text", "region_id", "updated", "tags").Update(existing) return err }) } @@ -211,7 +211,12 @@ func (r *SqlAnnotationRepo) Find(query *annotations.ItemQuery) ([]*annotations.I ) `, strings.Join(keyValueFilters, " OR ")) - sql.WriteString(fmt.Sprintf(" AND (%s) = %d ", tagsSubQuery, len(tags))) + if query.MatchAny { + sql.WriteString(fmt.Sprintf(" AND (%s) > 0 ", tagsSubQuery)) + } else { + sql.WriteString(fmt.Sprintf(" AND (%s) = %d ", tagsSubQuery, len(tags))) + } + } } @@ -223,7 +228,7 @@ func (r *SqlAnnotationRepo) Find(query *annotations.ItemQuery) ([]*annotations.I items := make([]*annotations.ItemDTO, 0) - if err := x.Sql(sql.String(), params...).Find(&items); err != nil { + if err := x.SQL(sql.String(), params...).Find(&items); err != nil { return nil, err } diff --git a/pkg/services/sqlstore/annotation_test.go b/pkg/services/sqlstore/annotation_test.go index c0d267f2578..d3459527e7d 100644 --- a/pkg/services/sqlstore/annotation_test.go +++ b/pkg/services/sqlstore/annotation_test.go @@ -78,7 +78,31 @@ func TestAnnotations(t *testing.T) { So(err, ShouldBeNil) So(annotation2.Id, ShouldBeGreaterThan, 0) - Convey("Can query for annotation", func() { + globalAnnotation1 := &annotations.Item{ + OrgId: 1, + UserId: 1, + Text: "deploy", + Type: "", + Epoch: 15, + Tags: []string{"deploy"}, + } + err = repo.Save(globalAnnotation1) + So(err, ShouldBeNil) + So(globalAnnotation1.Id, ShouldBeGreaterThan, 0) + + globalAnnotation2 := &annotations.Item{ + OrgId: 1, + UserId: 1, + Text: "rollback", + Type: "", + Epoch: 17, + Tags: []string{"rollback"}, + } + err = repo.Save(globalAnnotation2) + So(err, ShouldBeNil) + So(globalAnnotation2.Id, ShouldBeGreaterThan, 0) + + Convey("Can query for annotation by dashboard id", func() { items, err := repo.Find(&annotations.ItemQuery{ OrgId: 1, DashboardId: 1, @@ -165,7 +189,7 @@ func TestAnnotations(t *testing.T) { OrgId: 1, DashboardId: 1, From: 1, - To: 15, + To: 15, //this will exclude the second test annotation Tags: []string{"outage", "error"}, }) @@ -173,6 +197,19 @@ func TestAnnotations(t *testing.T) { So(items, ShouldHaveLength, 1) }) + Convey("Should find two annotations using partial match", func() { + items, err := repo.Find(&annotations.ItemQuery{ + OrgId: 1, + From: 1, + To: 25, + MatchAny: true, + Tags: []string{"rollback", "deploy"}, + }) + + So(err, ShouldBeNil) + So(items, ShouldHaveLength, 2) + }) + Convey("Should find one when all key value tag filters does match", func() { items, err := repo.Find(&annotations.ItemQuery{ OrgId: 1, diff --git a/pkg/services/sqlstore/dashboard.go b/pkg/services/sqlstore/dashboard.go index aff532bb3b5..e43279208e7 100644 --- a/pkg/services/sqlstore/dashboard.go +++ b/pkg/services/sqlstore/dashboard.go @@ -225,7 +225,7 @@ func findDashboards(query *search.FindPersistedDashboardsQuery) ([]DashboardSear var res []DashboardSearchProjection sql, params := sb.ToSql() - err := x.Sql(sql, params...).Find(&res) + err := x.SQL(sql, params...).Find(&res) if err != nil { return nil, err } @@ -295,10 +295,11 @@ func GetDashboardTags(query *m.GetDashboardTagsQuery) error { FROM dashboard INNER JOIN dashboard_tag on dashboard_tag.dashboard_id = dashboard.id WHERE dashboard.org_id=? - GROUP BY term` + GROUP BY term + ORDER BY term` query.Result = make([]*m.DashboardTagCloudItem, 0) - sess := x.Sql(sql, query.OrgId) + sess := x.SQL(sql, query.OrgId) err := sess.Find(&query.Result) return err } @@ -412,7 +413,7 @@ func GetDashboardPermissionsForUser(query *m.GetDashboardPermissionsForUserQuery params = append(params, query.UserId) params = append(params, dialect.BooleanStr(false)) - err := x.Sql(sql, params...).Find(&query.Result) + err := x.SQL(sql, params...).Find(&query.Result) for _, p := range query.Result { p.PermissionName = p.Permission.String() @@ -631,7 +632,7 @@ func HasEditPermissionInFolders(query *m.HasEditPermissionInFoldersQuery) error } resp := make([]*folderCount, 0) - if err := x.Sql(builder.GetSqlString(), builder.params...).Find(&resp); err != nil { + if err := x.SQL(builder.GetSqlString(), builder.params...).Find(&resp); err != nil { return err } diff --git a/pkg/services/sqlstore/dashboard_service_integration_test.go b/pkg/services/sqlstore/dashboard_service_integration_test.go index a9658f7ab76..a4e76aca340 100644 --- a/pkg/services/sqlstore/dashboard_service_integration_test.go +++ b/pkg/services/sqlstore/dashboard_service_integration_test.go @@ -932,29 +932,6 @@ func TestIntegratedDashboardService(t *testing.T) { }) } -type scenarioContext struct { - dashboardGuardianMock *guardian.FakeDashboardGuardian -} - -type scenarioFunc func(c *scenarioContext) - -func dashboardGuardianScenario(desc string, mock *guardian.FakeDashboardGuardian, fn scenarioFunc) { - Convey(desc, func() { - origNewDashboardGuardian := guardian.New - guardian.MockDashboardGuardian(mock) - - sc := &scenarioContext{ - dashboardGuardianMock: mock, - } - - defer func() { - guardian.New = origNewDashboardGuardian - }() - - fn(sc) - }) -} - type dashboardPermissionScenarioContext struct { dashboardGuardianMock *guardian.FakeDashboardGuardian } diff --git a/pkg/services/sqlstore/datasource.go b/pkg/services/sqlstore/datasource.go index 00d520bcfc6..7f70e5c25fc 100644 --- a/pkg/services/sqlstore/datasource.go +++ b/pkg/services/sqlstore/datasource.go @@ -27,6 +27,7 @@ func GetDataSourceById(query *m.GetDataSourceByIdQuery) error { datasource := m.DataSource{OrgId: query.OrgId, Id: query.Id} has, err := x.Get(&datasource) + if err != nil { return err } diff --git a/pkg/services/sqlstore/migrations/alert_mig.go b/pkg/services/sqlstore/migrations/alert_mig.go index 2a364d5f464..198a47b50ff 100644 --- a/pkg/services/sqlstore/migrations/alert_mig.go +++ b/pkg/services/sqlstore/migrations/alert_mig.go @@ -65,6 +65,16 @@ func addAlertMigrations(mg *Migrator) { mg.AddMigration("Add column is_default", NewAddColumnMigration(alert_notification, &Column{ Name: "is_default", Type: DB_Bool, Nullable: false, Default: "0", })) + mg.AddMigration("Add column frequency", NewAddColumnMigration(alert_notification, &Column{ + Name: "frequency", Type: DB_BigInt, Nullable: true, + })) + mg.AddMigration("Add column send_reminder", NewAddColumnMigration(alert_notification, &Column{ + Name: "send_reminder", Type: DB_Bool, Nullable: true, Default: "0", + })) + mg.AddMigration("Add column disable_resolve_message", NewAddColumnMigration(alert_notification, &Column{ + Name: "disable_resolve_message", Type: DB_Bool, Nullable: false, Default: "0", + })) + mg.AddMigration("add index alert_notification org_id & name", NewAddIndexMigration(alert_notification, alert_notification.Indices[0])) mg.AddMigration("Update alert table charset", NewTableCharsetMigration("alert", []*Column{ @@ -82,4 +92,45 @@ func addAlertMigrations(mg *Migrator) { {Name: "type", Type: DB_NVarchar, Length: 255, Nullable: false}, {Name: "settings", Type: DB_Text, Nullable: false}, })) + + notification_journal := Table{ + Name: "alert_notification_journal", + Columns: []*Column{ + {Name: "id", Type: DB_BigInt, IsPrimaryKey: true, IsAutoIncrement: true}, + {Name: "org_id", Type: DB_BigInt, Nullable: false}, + {Name: "alert_id", Type: DB_BigInt, Nullable: false}, + {Name: "notifier_id", Type: DB_BigInt, Nullable: false}, + {Name: "sent_at", Type: DB_BigInt, Nullable: false}, + {Name: "success", Type: DB_Bool, Nullable: false}, + }, + Indices: []*Index{ + {Cols: []string{"org_id", "alert_id", "notifier_id"}, Type: IndexType}, + }, + } + + mg.AddMigration("create notification_journal table v1", NewAddTableMigration(notification_journal)) + mg.AddMigration("add index notification_journal org_id & alert_id & notifier_id", NewAddIndexMigration(notification_journal, notification_journal.Indices[0])) + + mg.AddMigration("drop alert_notification_journal", NewDropTableMigration("alert_notification_journal")) + + alert_notification_state := Table{ + Name: "alert_notification_state", + Columns: []*Column{ + {Name: "id", Type: DB_BigInt, IsPrimaryKey: true, IsAutoIncrement: true}, + {Name: "org_id", Type: DB_BigInt, Nullable: false}, + {Name: "alert_id", Type: DB_BigInt, Nullable: false}, + {Name: "notifier_id", Type: DB_BigInt, Nullable: false}, + {Name: "state", Type: DB_NVarchar, Length: 50, Nullable: false}, + {Name: "version", Type: DB_BigInt, Nullable: false}, + {Name: "updated_at", Type: DB_BigInt, Nullable: false}, + {Name: "alert_rule_state_updated_version", Type: DB_BigInt, Nullable: false}, + }, + Indices: []*Index{ + {Cols: []string{"org_id", "alert_id", "notifier_id"}, Type: UniqueIndex}, + }, + } + + mg.AddMigration("create alert_notification_state table v1", NewAddTableMigration(alert_notification_state)) + mg.AddMigration("add index alert_notification_state org_id & alert_id & notifier_id", + NewAddIndexMigration(alert_notification_state, alert_notification_state.Indices[0])) } diff --git a/pkg/services/sqlstore/migrations/annotation_mig.go b/pkg/services/sqlstore/migrations/annotation_mig.go index d231d3283e2..49920dee490 100644 --- a/pkg/services/sqlstore/migrations/annotation_mig.go +++ b/pkg/services/sqlstore/migrations/annotation_mig.go @@ -105,7 +105,7 @@ func addAnnotationMig(mg *Migrator) { })) // - // Convert epoch saved as seconds to miliseconds + // Convert epoch saved as seconds to milliseconds // updateEpochSql := "UPDATE annotation SET epoch = (epoch*1000) where epoch < 9999999999" mg.AddMigration("Convert existing annotations from seconds to milliseconds", NewRawSqlMigration(updateEpochSql)) diff --git a/pkg/services/sqlstore/migrations/team_mig.go b/pkg/services/sqlstore/migrations/team_mig.go index 9800d27f8ab..34c46ad13cf 100644 --- a/pkg/services/sqlstore/migrations/team_mig.go +++ b/pkg/services/sqlstore/migrations/team_mig.go @@ -51,4 +51,7 @@ func addTeamMigrations(mg *Migrator) { Name: "email", Type: DB_NVarchar, Nullable: true, Length: 190, })) + mg.AddMigration("Add column external to team_member table", NewAddColumnMigration(teamMemberV1, &Column{ + Name: "external", Type: DB_Bool, Nullable: true, + })) } diff --git a/pkg/services/sqlstore/migrations/user_mig.go b/pkg/services/sqlstore/migrations/user_mig.go index 400033aaa33..e273cb7d542 100644 --- a/pkg/services/sqlstore/migrations/user_mig.go +++ b/pkg/services/sqlstore/migrations/user_mig.go @@ -134,7 +134,7 @@ type TempUserDTO struct { func (m *AddMissingUserSaltAndRandsMigration) Exec(sess *xorm.Session, mg *Migrator) error { users := make([]*TempUserDTO, 0) - err := sess.Sql(fmt.Sprintf("SELECT id, login from %s WHERE rands = ''", mg.Dialect.Quote("user"))).Find(&users) + err := sess.SQL(fmt.Sprintf("SELECT id, login from %s WHERE rands = ''", mg.Dialect.Quote("user"))).Find(&users) if err != nil { return err } diff --git a/pkg/services/sqlstore/migrator/dialect.go b/pkg/services/sqlstore/migrator/dialect.go index 427d102b280..506a01c3ed8 100644 --- a/pkg/services/sqlstore/migrator/dialect.go +++ b/pkg/services/sqlstore/migrator/dialect.go @@ -44,6 +44,8 @@ type Dialect interface { CleanDB() error NoOpSql() string + + IsUniqueConstraintViolation(err error) bool } func NewDialect(engine *xorm.Engine) Dialect { diff --git a/pkg/services/sqlstore/migrator/mysql_dialect.go b/pkg/services/sqlstore/migrator/mysql_dialect.go index 1ed16871c15..7daa4597430 100644 --- a/pkg/services/sqlstore/migrator/mysql_dialect.go +++ b/pkg/services/sqlstore/migrator/mysql_dialect.go @@ -5,6 +5,8 @@ import ( "strconv" "strings" + "github.com/VividCortex/mysqlerr" + "github.com/go-sql-driver/mysql" "github.com/go-xorm/xorm" ) @@ -125,3 +127,13 @@ func (db *Mysql) CleanDB() error { return nil } + +func (db *Mysql) IsUniqueConstraintViolation(err error) bool { + if driverErr, ok := err.(*mysql.MySQLError); ok { + if driverErr.Number == mysqlerr.ER_DUP_ENTRY { + return true + } + } + + return false +} diff --git a/pkg/services/sqlstore/migrator/postgres_dialect.go b/pkg/services/sqlstore/migrator/postgres_dialect.go index eae9ad3ca3f..ab8812a1e26 100644 --- a/pkg/services/sqlstore/migrator/postgres_dialect.go +++ b/pkg/services/sqlstore/migrator/postgres_dialect.go @@ -6,6 +6,7 @@ import ( "strings" "github.com/go-xorm/xorm" + "github.com/lib/pq" ) type Postgres struct { @@ -136,3 +137,13 @@ func (db *Postgres) CleanDB() error { return nil } + +func (db *Postgres) IsUniqueConstraintViolation(err error) bool { + if driverErr, ok := err.(*pq.Error); ok { + if driverErr.Code == "23505" { + return true + } + } + + return false +} diff --git a/pkg/services/sqlstore/migrator/sqlite_dialect.go b/pkg/services/sqlstore/migrator/sqlite_dialect.go index 01082b95c88..446e3fcef12 100644 --- a/pkg/services/sqlstore/migrator/sqlite_dialect.go +++ b/pkg/services/sqlstore/migrator/sqlite_dialect.go @@ -4,6 +4,7 @@ import ( "fmt" "github.com/go-xorm/xorm" + sqlite3 "github.com/mattn/go-sqlite3" ) type Sqlite3 struct { @@ -82,3 +83,13 @@ func (db *Sqlite3) DropIndexSql(tableName string, index *Index) string { func (db *Sqlite3) CleanDB() error { return nil } + +func (db *Sqlite3) IsUniqueConstraintViolation(err error) bool { + if driverErr, ok := err.(sqlite3.Error); ok { + if driverErr.ExtendedCode == sqlite3.ErrConstraintUnique { + return true + } + } + + return false +} diff --git a/pkg/services/sqlstore/org.go b/pkg/services/sqlstore/org.go index 8931f1cf0f5..e36a80322d8 100644 --- a/pkg/services/sqlstore/org.go +++ b/pkg/services/sqlstore/org.go @@ -133,7 +133,7 @@ func UpdateOrg(cmd *m.UpdateOrgCommand) error { Updated: time.Now(), } - affectedRows, err := sess.Id(cmd.OrgId).Update(&org) + affectedRows, err := sess.ID(cmd.OrgId).Update(&org) if err != nil { return err @@ -166,7 +166,7 @@ func UpdateOrgAddress(cmd *m.UpdateOrgAddressCommand) error { Updated: time.Now(), } - if _, err := sess.Id(cmd.OrgId).Update(&org); err != nil { + if _, err := sess.ID(cmd.OrgId).Update(&org); err != nil { return err } diff --git a/pkg/services/sqlstore/org_test.go b/pkg/services/sqlstore/org_test.go index af8500707d5..c02686c24ba 100644 --- a/pkg/services/sqlstore/org_test.go +++ b/pkg/services/sqlstore/org_test.go @@ -182,6 +182,21 @@ func TestAccountDataAccess(t *testing.T) { }) }) + Convey("Removing user from org should delete user completely if in no other org", func() { + // make sure ac2 has no org + err := DeleteOrg(&m.DeleteOrgCommand{Id: ac2.OrgId}) + So(err, ShouldBeNil) + + // remove frome ac2 from ac1 org + remCmd := m.RemoveOrgUserCommand{OrgId: ac1.OrgId, UserId: ac2.Id, ShouldDeleteOrphanedUser: true} + err = RemoveOrgUser(&remCmd) + So(err, ShouldBeNil) + So(remCmd.UserWasDeleted, ShouldBeTrue) + + err = GetSignedInUser(&m.GetSignedInUserQuery{UserId: ac2.Id}) + So(err, ShouldEqual, m.ErrUserNotFound) + }) + Convey("Cannot delete last admin org user", func() { cmd := m.RemoveOrgUserCommand{OrgId: ac1.OrgId, UserId: ac1.Id} err := RemoveOrgUser(&cmd) diff --git a/pkg/services/sqlstore/org_users.go b/pkg/services/sqlstore/org_users.go index aad72cdacb4..abbc320020e 100644 --- a/pkg/services/sqlstore/org_users.go +++ b/pkg/services/sqlstore/org_users.go @@ -21,7 +21,7 @@ func AddOrgUser(cmd *m.AddOrgUserCommand) error { return inTransaction(func(sess *DBSession) error { // check if user exists var user m.User - if exists, err := sess.Id(cmd.UserId).Get(&user); err != nil { + if exists, err := sess.ID(cmd.UserId).Get(&user); err != nil { return err } else if !exists { return m.ErrUserNotFound @@ -85,7 +85,7 @@ func UpdateOrgUser(cmd *m.UpdateOrgUserCommand) error { orgUser.Role = cmd.Role orgUser.Updated = time.Now() - _, err = sess.Id(orgUser.Id).Update(&orgUser) + _, err = sess.ID(orgUser.Id).Update(&orgUser) if err != nil { return err } @@ -138,7 +138,7 @@ func RemoveOrgUser(cmd *m.RemoveOrgUserCommand) error { return inTransaction(func(sess *DBSession) error { // check if user exists var user m.User - if exists, err := sess.Id(cmd.UserId).Get(&user); err != nil { + if exists, err := sess.ID(cmd.UserId).Get(&user); err != nil { return err } else if !exists { return m.ErrUserNotFound @@ -157,6 +157,12 @@ func RemoveOrgUser(cmd *m.RemoveOrgUserCommand) error { } } + // validate that after delete there is at least one user with admin role in org + if err := validateOneAdminLeftInOrg(cmd.OrgId, sess); err != nil { + return err + } + + // check user other orgs and update user current org var userOrgs []*m.UserOrgDTO sess.Table("org_user") sess.Join("INNER", "org", "org_user.org_id=org.id") @@ -168,22 +174,31 @@ func RemoveOrgUser(cmd *m.RemoveOrgUserCommand) error { return err } - hasCurrentOrgSet := false - for _, userOrg := range userOrgs { - if user.OrgId == userOrg.OrgId { - hasCurrentOrgSet = true - break + if len(userOrgs) > 0 { + hasCurrentOrgSet := false + for _, userOrg := range userOrgs { + if user.OrgId == userOrg.OrgId { + hasCurrentOrgSet = true + break + } } - } - if !hasCurrentOrgSet && len(userOrgs) > 0 { - err = setUsingOrgInTransaction(sess, user.Id, userOrgs[0].OrgId) - if err != nil { + if !hasCurrentOrgSet { + err = setUsingOrgInTransaction(sess, user.Id, userOrgs[0].OrgId) + if err != nil { + return err + } + } + } else if cmd.ShouldDeleteOrphanedUser { + // no other orgs, delete the full user + if err := deleteUserInTransaction(sess, &m.DeleteUserCommand{UserId: user.Id}); err != nil { return err } + + cmd.UserWasDeleted = true } - return validateOneAdminLeftInOrg(cmd.OrgId, sess) + return nil }) } diff --git a/pkg/services/sqlstore/plugin_setting.go b/pkg/services/sqlstore/plugin_setting.go index 676d26fad56..8fbf1b6be1c 100644 --- a/pkg/services/sqlstore/plugin_setting.go +++ b/pkg/services/sqlstore/plugin_setting.go @@ -26,7 +26,7 @@ func GetPluginSettings(query *m.GetPluginSettingsQuery) error { params = append(params, query.OrgId) } - sess := x.Sql(sql, params...) + sess := x.SQL(sql, params...) query.Result = make([]*m.PluginSettingInfoDTO, 0) return sess.Find(&query.Result) } @@ -100,7 +100,7 @@ func UpdatePluginSetting(cmd *m.UpdatePluginSettingCmd) error { pluginSetting.Pinned = cmd.Pinned pluginSetting.PluginVersion = cmd.PluginVersion - _, err = sess.Id(pluginSetting.Id).Update(&pluginSetting) + _, err = sess.ID(pluginSetting.Id).Update(&pluginSetting) return err }) } diff --git a/pkg/services/sqlstore/preferences.go b/pkg/services/sqlstore/preferences.go index 885837764fc..04e787971d9 100644 --- a/pkg/services/sqlstore/preferences.go +++ b/pkg/services/sqlstore/preferences.go @@ -94,7 +94,7 @@ func SavePreferences(cmd *m.SavePreferencesCommand) error { prefs.Theme = cmd.Theme prefs.Updated = time.Now() prefs.Version += 1 - _, err = sess.Id(prefs.Id).AllCols().Update(&prefs) + _, err = sess.ID(prefs.Id).AllCols().Update(&prefs) return err }) } diff --git a/pkg/services/sqlstore/quota.go b/pkg/services/sqlstore/quota.go index 539555ddc50..7005b341268 100644 --- a/pkg/services/sqlstore/quota.go +++ b/pkg/services/sqlstore/quota.go @@ -38,7 +38,7 @@ func GetOrgQuotaByTarget(query *m.GetOrgQuotaByTargetQuery) error { //get quota used. rawSql := fmt.Sprintf("SELECT COUNT(*) as count from %s where org_id=?", dialect.Quote(query.Target)) resp := make([]*targetCount, 0) - if err := x.Sql(rawSql, query.OrgId).Find(&resp); err != nil { + if err := x.SQL(rawSql, query.OrgId).Find(&resp); err != nil { return err } @@ -81,7 +81,7 @@ func GetOrgQuotas(query *m.GetOrgQuotasQuery) error { //get quota used. rawSql := fmt.Sprintf("SELECT COUNT(*) as count from %s where org_id=?", dialect.Quote(q.Target)) resp := make([]*targetCount, 0) - if err := x.Sql(rawSql, q.OrgId).Find(&resp); err != nil { + if err := x.SQL(rawSql, q.OrgId).Find(&resp); err != nil { return err } result[i] = &m.OrgQuotaDTO{ @@ -116,7 +116,7 @@ func UpdateOrgQuota(cmd *m.UpdateOrgQuotaCmd) error { } } else { //update existing quota entry in the DB. - if _, err := sess.Id(quota.Id).Update("a); err != nil { + if _, err := sess.ID(quota.Id).Update("a); err != nil { return err } } @@ -140,7 +140,7 @@ func GetUserQuotaByTarget(query *m.GetUserQuotaByTargetQuery) error { //get quota used. rawSql := fmt.Sprintf("SELECT COUNT(*) as count from %s where user_id=?", dialect.Quote(query.Target)) resp := make([]*targetCount, 0) - if err := x.Sql(rawSql, query.UserId).Find(&resp); err != nil { + if err := x.SQL(rawSql, query.UserId).Find(&resp); err != nil { return err } @@ -183,7 +183,7 @@ func GetUserQuotas(query *m.GetUserQuotasQuery) error { //get quota used. rawSql := fmt.Sprintf("SELECT COUNT(*) as count from %s where user_id=?", dialect.Quote(q.Target)) resp := make([]*targetCount, 0) - if err := x.Sql(rawSql, q.UserId).Find(&resp); err != nil { + if err := x.SQL(rawSql, q.UserId).Find(&resp); err != nil { return err } result[i] = &m.UserQuotaDTO{ @@ -218,7 +218,7 @@ func UpdateUserQuota(cmd *m.UpdateUserQuotaCmd) error { } } else { //update existing quota entry in the DB. - if _, err := sess.Id(quota.Id).Update("a); err != nil { + if _, err := sess.ID(quota.Id).Update("a); err != nil { return err } } @@ -231,7 +231,7 @@ func GetGlobalQuotaByTarget(query *m.GetGlobalQuotaByTargetQuery) error { //get quota used. rawSql := fmt.Sprintf("SELECT COUNT(*) as count from %s", dialect.Quote(query.Target)) resp := make([]*targetCount, 0) - if err := x.Sql(rawSql).Find(&resp); err != nil { + if err := x.SQL(rawSql).Find(&resp); err != nil { return err } diff --git a/pkg/services/sqlstore/sqlstore.go b/pkg/services/sqlstore/sqlstore.go index 13d706b6198..f904b44c3c8 100644 --- a/pkg/services/sqlstore/sqlstore.go +++ b/pkg/services/sqlstore/sqlstore.go @@ -53,6 +53,7 @@ type SqlStore struct { dbCfg DatabaseConfig engine *xorm.Engine log log.Logger + Dialect migrator.Dialect skipEnsureAdmin bool } @@ -106,7 +107,7 @@ func (ss *SqlStore) inTransactionWithRetryCtx(ctx context.Context, callback dbTr if len(sess.events) > 0 { for _, e := range sess.events { if err = bus.Publish(e); err != nil { - log.Error(3, "Failed to publish event after commit", err) + log.Error(3, "Failed to publish event after commit. error: %v", err) } } } @@ -125,10 +126,12 @@ func (ss *SqlStore) Init() error { } ss.engine = engine + ss.Dialect = migrator.NewDialect(ss.engine) // temporarily still set global var x = engine - dialect = migrator.NewDialect(x) + dialect = ss.Dialect + migrator := migrator.NewMigrator(x) migrations.AddMigrations(migrator) @@ -233,7 +236,7 @@ func (ss *SqlStore) buildConnectionString() (string, error) { case migrator.SQLITE: // special case for tests if !filepath.IsAbs(ss.dbCfg.Path) { - ss.dbCfg.Path = filepath.Join(setting.DataPath, ss.dbCfg.Path) + ss.dbCfg.Path = filepath.Join(ss.Cfg.DataPath, ss.dbCfg.Path) } os.MkdirAll(path.Dir(ss.dbCfg.Path), os.ModePerm) cnnstr = "file:" + ss.dbCfg.Path + "?cache=shared&mode=rwc" @@ -347,7 +350,11 @@ func InitTestDB(t *testing.T) *SqlStore { t.Fatalf("Failed to init test database: %v", err) } - dialect = migrator.NewDialect(engine) + sqlstore.Dialect = migrator.NewDialect(engine) + + // temp global var until we get rid of global vars + dialect = sqlstore.Dialect + if err := dialect.CleanDB(); err != nil { t.Fatalf("Failed to clean test db %v", err) } diff --git a/pkg/services/sqlstore/stats.go b/pkg/services/sqlstore/stats.go index 6db481bf06b..2cec86e7239 100644 --- a/pkg/services/sqlstore/stats.go +++ b/pkg/services/sqlstore/stats.go @@ -13,11 +13,19 @@ func init() { bus.AddHandler("sql", GetDataSourceStats) bus.AddHandler("sql", GetDataSourceAccessStats) bus.AddHandler("sql", GetAdminStats) + bus.AddHandlerCtx("sql", GetAlertNotifiersUsageStats) bus.AddHandlerCtx("sql", GetSystemUserCountStats) } var activeUserTimeLimit = time.Hour * 24 * 30 +func GetAlertNotifiersUsageStats(ctx context.Context, query *m.GetAlertNotifierUsageStatsQuery) error { + var rawSql = `SELECT COUNT(*) as count, type FROM alert_notification GROUP BY type` + query.Result = make([]*m.NotifierUsageStats, 0) + err := x.SQL(rawSql).Find(&query.Result) + return err +} + func GetDataSourceStats(query *m.GetDataSourceStatsQuery) error { var rawSql = `SELECT COUNT(*) as count, type FROM data_source GROUP BY type` query.Result = make([]*m.DataSourceStats, 0) diff --git a/pkg/services/sqlstore/stats_test.go b/pkg/services/sqlstore/stats_test.go index dae24952d17..6949a0dbda2 100644 --- a/pkg/services/sqlstore/stats_test.go +++ b/pkg/services/sqlstore/stats_test.go @@ -36,5 +36,11 @@ func TestStatsDataAccess(t *testing.T) { err := GetDataSourceAccessStats(&query) So(err, ShouldBeNil) }) + + Convey("Get alert notifier stats should not results in error", func() { + query := m.GetAlertNotifierUsageStatsQuery{} + err := GetAlertNotifiersUsageStats(context.Background(), &query) + So(err, ShouldBeNil) + }) }) } diff --git a/pkg/services/sqlstore/team.go b/pkg/services/sqlstore/team.go index 72955df9a6a..a3010a086e5 100644 --- a/pkg/services/sqlstore/team.go +++ b/pkg/services/sqlstore/team.go @@ -74,7 +74,7 @@ func UpdateTeam(cmd *m.UpdateTeamCommand) error { sess.MustCols("email") - affectedRows, err := sess.Id(cmd.Id).Update(&team) + affectedRows, err := sess.ID(cmd.Id).Update(&team) if err != nil { return err @@ -169,7 +169,7 @@ func SearchTeams(query *m.SearchTeamsQuery) error { sql.WriteString(dialect.LimitOffset(int64(query.Limit), int64(offset))) } - if err := x.Sql(sql.String(), params...).Find(&query.Result.Teams); err != nil { + if err := x.SQL(sql.String(), params...).Find(&query.Result.Teams); err != nil { return err } @@ -196,7 +196,7 @@ func GetTeamById(query *m.GetTeamByIdQuery) error { sql.WriteString(` WHERE team.org_id = ? and team.id = ?`) var team m.TeamDTO - exists, err := x.Sql(sql.String(), query.OrgId, query.Id).Get(&team) + exists, err := x.SQL(sql.String(), query.OrgId, query.Id).Get(&team) if err != nil { return err @@ -220,7 +220,7 @@ func GetTeamsByUser(query *m.GetTeamsByUserQuery) error { sql.WriteString(` INNER JOIN team_member on team.id = team_member.team_id`) sql.WriteString(` WHERE team.org_id = ? and team_member.user_id = ?`) - err := x.Sql(sql.String(), query.OrgId, query.UserId).Find(&query.Result) + err := x.SQL(sql.String(), query.OrgId, query.UserId).Find(&query.Result) return err } @@ -240,11 +240,12 @@ func AddTeamMember(cmd *m.AddTeamMemberCommand) error { } entity := m.TeamMember{ - OrgId: cmd.OrgId, - TeamId: cmd.TeamId, - UserId: cmd.UserId, - Created: time.Now(), - Updated: time.Now(), + OrgId: cmd.OrgId, + TeamId: cmd.TeamId, + UserId: cmd.UserId, + External: cmd.External, + Created: time.Now(), + Updated: time.Now(), } _, err := sess.Insert(&entity) @@ -289,7 +290,10 @@ func GetTeamMembers(query *m.GetTeamMembersQuery) error { if query.UserId != 0 { sess.Where("team_member.user_id=?", query.UserId) } - sess.Cols("user.org_id", "team_member.team_id", "team_member.user_id", "user.email", "user.login") + if query.External { + sess.Where("team_member.external=?", dialect.BooleanStr(true)) + } + sess.Cols("team_member.org_id", "team_member.team_id", "team_member.user_id", "user.email", "user.login", "team_member.external") sess.Asc("user.login", "user.email") err := sess.Find(&query.Result) diff --git a/pkg/services/sqlstore/team_test.go b/pkg/services/sqlstore/team_test.go index abaa973957d..8f243617262 100644 --- a/pkg/services/sqlstore/team_test.go +++ b/pkg/services/sqlstore/team_test.go @@ -50,13 +50,29 @@ func TestTeamCommandsAndQueries(t *testing.T) { err = AddTeamMember(&m.AddTeamMemberCommand{OrgId: testOrgId, TeamId: team1.Id, UserId: userIds[0]}) So(err, ShouldBeNil) + err = AddTeamMember(&m.AddTeamMemberCommand{OrgId: testOrgId, TeamId: team1.Id, UserId: userIds[1], External: true}) + So(err, ShouldBeNil) q1 := &m.GetTeamMembersQuery{OrgId: testOrgId, TeamId: team1.Id} err = GetTeamMembers(q1) So(err, ShouldBeNil) + So(q1.Result, ShouldHaveLength, 2) So(q1.Result[0].TeamId, ShouldEqual, team1.Id) So(q1.Result[0].Login, ShouldEqual, "loginuser0") So(q1.Result[0].OrgId, ShouldEqual, testOrgId) + So(q1.Result[1].TeamId, ShouldEqual, team1.Id) + So(q1.Result[1].Login, ShouldEqual, "loginuser1") + So(q1.Result[1].OrgId, ShouldEqual, testOrgId) + So(q1.Result[1].External, ShouldEqual, true) + + q2 := &m.GetTeamMembersQuery{OrgId: testOrgId, TeamId: team1.Id, External: true} + err = GetTeamMembers(q2) + So(err, ShouldBeNil) + So(q2.Result, ShouldHaveLength, 1) + So(q2.Result[0].TeamId, ShouldEqual, team1.Id) + So(q2.Result[0].Login, ShouldEqual, "loginuser1") + So(q2.Result[0].OrgId, ShouldEqual, testOrgId) + So(q2.Result[0].External, ShouldEqual, true) }) Convey("Should be able to search for teams", func() { diff --git a/pkg/services/sqlstore/temp_user.go b/pkg/services/sqlstore/temp_user.go index e93ba2fd641..f13752f8038 100644 --- a/pkg/services/sqlstore/temp_user.go +++ b/pkg/services/sqlstore/temp_user.go @@ -96,7 +96,7 @@ func GetTempUsersQuery(query *m.GetTempUsersQuery) error { rawSql += " ORDER BY tu.created desc" query.Result = make([]*m.TempUserDTO, 0) - sess := x.Sql(rawSql, params...) + sess := x.SQL(rawSql, params...) err := sess.Find(&query.Result) return err } @@ -121,7 +121,7 @@ func GetTempUserByCode(query *m.GetTempUserByCodeQuery) error { WHERE tu.code=?` var tempUser m.TempUserDTO - sess := x.Sql(rawSql, query.Code) + sess := x.SQL(rawSql, query.Code) has, err := sess.Get(&tempUser) if err != nil { diff --git a/pkg/services/sqlstore/transactions.go b/pkg/services/sqlstore/transactions.go index eccd37f9a43..edf29fffb8f 100644 --- a/pkg/services/sqlstore/transactions.go +++ b/pkg/services/sqlstore/transactions.go @@ -89,7 +89,7 @@ func inTransactionWithRetryCtx(ctx context.Context, callback dbTransactionFunc, if len(sess.events) > 0 { for _, e := range sess.events { if err = bus.Publish(e); err != nil { - log.Error(3, "Failed to publish event after commit", err) + log.Error(3, "Failed to publish event after commit. error: %v", err) } } } diff --git a/pkg/services/sqlstore/transactions_test.go b/pkg/services/sqlstore/transactions_test.go index 937649921ba..041359cf1d3 100644 --- a/pkg/services/sqlstore/transactions_test.go +++ b/pkg/services/sqlstore/transactions_test.go @@ -10,10 +10,6 @@ import ( . "github.com/smartystreets/goconvey/convey" ) -type testQuery struct { - result bool -} - var ProvokedError = errors.New("testing error.") func TestTransaction(t *testing.T) { @@ -39,7 +35,7 @@ func TestTransaction(t *testing.T) { So(err, ShouldEqual, models.ErrInvalidApiKey) }) - Convey("wont update if one handler fails", func() { + Convey("won't update if one handler fails", func() { err := ss.InTransaction(context.Background(), func(ctx context.Context) error { err := DeleteApiKeyCtx(ctx, deleteApiKeyCmd) if err != nil { diff --git a/pkg/services/sqlstore/user.go b/pkg/services/sqlstore/user.go index 5d1b827e79f..72d5654a777 100644 --- a/pkg/services/sqlstore/user.go +++ b/pkg/services/sqlstore/user.go @@ -240,7 +240,7 @@ func UpdateUser(cmd *m.UpdateUserCommand) error { Updated: time.Now(), } - if _, err := sess.Id(cmd.UserId).Update(&user); err != nil { + if _, err := sess.ID(cmd.UserId).Update(&user); err != nil { return err } @@ -264,22 +264,19 @@ func ChangeUserPassword(cmd *m.ChangeUserPasswordCommand) error { Updated: time.Now(), } - _, err := sess.Id(cmd.UserId).Update(&user) + _, err := sess.ID(cmd.UserId).Update(&user) return err }) } func UpdateUserLastSeenAt(cmd *m.UpdateUserLastSeenAtCommand) error { return inTransaction(func(sess *DBSession) error { - if cmd.UserId <= 0 { - } - user := m.User{ Id: cmd.UserId, LastSeenAt: time.Now(), } - _, err := sess.Id(cmd.UserId).Update(&user) + _, err := sess.ID(cmd.UserId).Update(&user) return err }) } @@ -310,7 +307,7 @@ func setUsingOrgInTransaction(sess *DBSession, userID int64, orgID int64) error OrgId: orgID, } - _, err := sess.Id(userID).Update(&user) + _, err := sess.ID(userID).Update(&user) return err } @@ -372,11 +369,11 @@ func GetSignedInUser(query *m.GetSignedInUserQuery) error { sess := x.Table("user") if query.UserId > 0 { - sess.Sql(rawSql+"WHERE u.id=?", query.UserId) + sess.SQL(rawSql+"WHERE u.id=?", query.UserId) } else if query.Login != "" { - sess.Sql(rawSql+"WHERE u.login=?", query.Login) + sess.SQL(rawSql+"WHERE u.login=?", query.Login) } else if query.Email != "" { - sess.Sql(rawSql+"WHERE u.email=?", query.Email) + sess.SQL(rawSql+"WHERE u.email=?", query.Email) } var user m.SignedInUser @@ -448,35 +445,39 @@ func SearchUsers(query *m.SearchUsersQuery) error { func DeleteUser(cmd *m.DeleteUserCommand) error { return inTransaction(func(sess *DBSession) error { - deletes := []string{ - "DELETE FROM star WHERE user_id = ?", - "DELETE FROM " + dialect.Quote("user") + " WHERE id = ?", - "DELETE FROM org_user WHERE user_id = ?", - "DELETE FROM dashboard_acl WHERE user_id = ?", - "DELETE FROM preferences WHERE user_id = ?", - "DELETE FROM team_member WHERE user_id = ?", - "DELETE FROM user_auth WHERE user_id = ?", - } + return deleteUserInTransaction(sess, cmd) + }) +} - for _, sql := range deletes { - _, err := sess.Exec(sql, cmd.UserId) - if err != nil { - return err - } +func deleteUserInTransaction(sess *DBSession, cmd *m.DeleteUserCommand) error { + deletes := []string{ + "DELETE FROM star WHERE user_id = ?", + "DELETE FROM " + dialect.Quote("user") + " WHERE id = ?", + "DELETE FROM org_user WHERE user_id = ?", + "DELETE FROM dashboard_acl WHERE user_id = ?", + "DELETE FROM preferences WHERE user_id = ?", + "DELETE FROM team_member WHERE user_id = ?", + "DELETE FROM user_auth WHERE user_id = ?", + } + + for _, sql := range deletes { + _, err := sess.Exec(sql, cmd.UserId) + if err != nil { + return err } + } - return nil - }) + return nil } func UpdateUserPermissions(cmd *m.UpdateUserPermissionsCommand) error { return inTransaction(func(sess *DBSession) error { user := m.User{} - sess.Id(cmd.UserId).Get(&user) + sess.ID(cmd.UserId).Get(&user) user.IsAdmin = cmd.IsGrafanaAdmin sess.UseBool("is_admin") - _, err := sess.Id(user.Id).Update(&user) + _, err := sess.ID(user.Id).Update(&user) return err }) } @@ -490,7 +491,7 @@ func SetUserHelpFlag(cmd *m.SetUserHelpFlagCommand) error { Updated: time.Now(), } - _, err := sess.Id(cmd.UserId).Cols("help_flags1").Update(&user) + _, err := sess.ID(cmd.UserId).Cols("help_flags1").Update(&user) return err }) } diff --git a/pkg/services/sqlstore/user_auth_test.go b/pkg/services/sqlstore/user_auth_test.go index 5ad93dc7a3b..a0dd714fe6f 100644 --- a/pkg/services/sqlstore/user_auth_test.go +++ b/pkg/services/sqlstore/user_auth_test.go @@ -16,7 +16,6 @@ func TestUserAuth(t *testing.T) { Convey("Given 5 users", t, func() { var err error var cmd *m.CreateUserCommand - users := []m.User{} for i := 0; i < 5; i++ { cmd = &m.CreateUserCommand{ Email: fmt.Sprint("user", i, "@test.com"), @@ -25,7 +24,6 @@ func TestUserAuth(t *testing.T) { } err = CreateUser(context.Background(), cmd) So(err, ShouldBeNil) - users = append(users, cmd.Result) } Reset(func() { diff --git a/pkg/setting/setting.go b/pkg/setting/setting.go index eb61568261d..58901e55c6b 100644 --- a/pkg/setting/setting.go +++ b/pkg/setting/setting.go @@ -54,14 +54,11 @@ var ( ApplicationName string // Paths - LogsPath string HomePath string - DataPath string PluginsPath string CustomInitPath = "conf/custom.ini" // Log settings. - LogModes []string LogConfigs []util.DynMap // Http server options @@ -164,8 +161,11 @@ var ( Quota QuotaSettings // Alerting - AlertingEnabled bool - ExecuteAlerts bool + AlertingEnabled bool + ExecuteAlerts bool + AlertingRenderLimit int + AlertingErrorOrTimeout string + AlertingNoDataOrNullValues string // Explore UI ExploreEnabled bool @@ -184,22 +184,35 @@ var ( ImageUploadProvider string ) +// TODO move all global vars to this struct type Cfg struct { Raw *ini.File + // HTTP Server Settings + AppUrl string + AppSubUrl string + // Paths ProvisioningPath string + DataPath string + LogsPath string // SMTP email settings Smtp SmtpSettings // Rendering - ImagesDir string - PhantomDir string - RendererUrl string + ImagesDir string + PhantomDir string + RendererUrl string + RendererCallbackUrl string + RendererLimit int + RendererLimitAlerting int + DisableBruteForceLoginProtection bool TempDataLifetime time.Duration + + MetricsEndpointEnabled bool } type CommandLineArgs struct { @@ -324,7 +337,7 @@ func getCommandLineProperties(args []string) map[string]string { trimmed := strings.TrimPrefix(arg, "cfg:") parts := strings.Split(trimmed, "=") if len(parts) != 2 { - log.Fatal(3, "Invalid command line argument", arg) + log.Fatal(3, "Invalid command line argument. argument: %v", arg) return nil } @@ -402,7 +415,7 @@ func loadSpecifedConfigFile(configFile string, masterFile *ini.File) error { return nil } -func loadConfiguration(args *CommandLineArgs) (*ini.File, error) { +func (cfg *Cfg) loadConfiguration(args *CommandLineArgs) (*ini.File, error) { var err error // load config defaults @@ -433,7 +446,7 @@ func loadConfiguration(args *CommandLineArgs) (*ini.File, error) { // load specified config file err = loadSpecifedConfigFile(args.Config, parsedFile) if err != nil { - initLogging(parsedFile) + cfg.initLogging(parsedFile) log.Fatal(3, err.Error()) } @@ -450,8 +463,8 @@ func loadConfiguration(args *CommandLineArgs) (*ini.File, error) { evalConfigValues(parsedFile) // update data path and logging config - DataPath = makeAbsolute(parsedFile.Section("paths").Key("data").String(), HomePath) - initLogging(parsedFile) + cfg.DataPath = makeAbsolute(parsedFile.Section("paths").Key("data").String(), HomePath) + cfg.initLogging(parsedFile) return parsedFile, err } @@ -508,7 +521,7 @@ func NewCfg() *Cfg { func (cfg *Cfg) Load(args *CommandLineArgs) error { setHomePath(args) - iniFile, err := loadConfiguration(args) + iniFile, err := cfg.loadConfiguration(args) if err != nil { return err } @@ -529,6 +542,8 @@ func (cfg *Cfg) Load(args *CommandLineArgs) error { cfg.ProvisioningPath = makeAbsolute(iniFile.Section("paths").Key("provisioning").String(), HomePath) server := iniFile.Section("server") AppUrl, AppSubUrl = parseAppUrlAndSubUrl(server) + cfg.AppUrl = AppUrl + cfg.AppSubUrl = AppSubUrl Protocol = HTTP if server.Key("protocol").MustString("http") == "https" { @@ -641,9 +656,22 @@ func (cfg *Cfg) Load(args *CommandLineArgs) error { // Rendering renderSec := iniFile.Section("rendering") cfg.RendererUrl = renderSec.Key("server_url").String() - cfg.ImagesDir = filepath.Join(DataPath, "png") + cfg.RendererCallbackUrl = renderSec.Key("callback_url").String() + if cfg.RendererCallbackUrl == "" { + cfg.RendererCallbackUrl = AppUrl + } else { + if cfg.RendererCallbackUrl[len(cfg.RendererCallbackUrl)-1] != '/' { + cfg.RendererCallbackUrl += "/" + } + _, err := url.Parse(cfg.RendererCallbackUrl) + if err != nil { + log.Fatal(4, "Invalid callback_url(%s): %s", cfg.RendererCallbackUrl, err) + } + } + cfg.ImagesDir = filepath.Join(cfg.DataPath, "png") cfg.PhantomDir = filepath.Join(HomePath, "tools/phantomjs") cfg.TempDataLifetime = iniFile.Section("paths").Key("temp_data_lifetime").MustDuration(time.Second * 3600 * 24) + cfg.MetricsEndpointEnabled = iniFile.Section("metrics").Key("enabled").MustBool(true) analytics := iniFile.Section("analytics") ReportingEnabled = analytics.Key("reporting_enabled").MustBool(true) @@ -659,6 +687,9 @@ func (cfg *Cfg) Load(args *CommandLineArgs) error { alerting := iniFile.Section("alerting") AlertingEnabled = alerting.Key("enabled").MustBool(true) ExecuteAlerts = alerting.Key("execute_alerts").MustBool(true) + AlertingRenderLimit = alerting.Key("concurrent_render_limit").MustInt(5) + AlertingErrorOrTimeout = alerting.Key("error_or_timeout").MustString("alerting") + AlertingNoDataOrNullValues = alerting.Key("nodata_or_nullvalues").MustString("no_data") explore := iniFile.Section("explore") ExploreEnabled = explore.Key("enabled").MustBool(false) @@ -695,7 +726,7 @@ func (cfg *Cfg) readSessionConfig() { SessionOptions.IDLength = 16 if SessionOptions.Provider == "file" { - SessionOptions.ProviderConfig = makeAbsolute(SessionOptions.ProviderConfig, DataPath) + SessionOptions.ProviderConfig = makeAbsolute(SessionOptions.ProviderConfig, cfg.DataPath) os.MkdirAll(path.Dir(SessionOptions.ProviderConfig), os.ModePerm) } @@ -706,15 +737,15 @@ func (cfg *Cfg) readSessionConfig() { SessionConnMaxLifetime = cfg.Raw.Section("session").Key("conn_max_lifetime").MustInt64(14400) } -func initLogging(file *ini.File) { +func (cfg *Cfg) initLogging(file *ini.File) { // split on comma - LogModes = strings.Split(file.Section("log").Key("mode").MustString("console"), ",") + logModes := strings.Split(file.Section("log").Key("mode").MustString("console"), ",") // also try space - if len(LogModes) == 1 { - LogModes = strings.Split(file.Section("log").Key("mode").MustString("console"), " ") + if len(logModes) == 1 { + logModes = strings.Split(file.Section("log").Key("mode").MustString("console"), " ") } - LogsPath = makeAbsolute(file.Section("paths").Key("logs").String(), HomePath) - log.ReadLoggingConfig(LogModes, LogsPath, file) + cfg.LogsPath = makeAbsolute(file.Section("paths").Key("logs").String(), HomePath) + log.ReadLoggingConfig(logModes, cfg.LogsPath, file) } func (cfg *Cfg) LogConfigSources() { @@ -738,8 +769,8 @@ func (cfg *Cfg) LogConfigSources() { } logger.Info("Path Home", "path", HomePath) - logger.Info("Path Data", "path", DataPath) - logger.Info("Path Logs", "path", LogsPath) + logger.Info("Path Data", "path", cfg.DataPath) + logger.Info("Path Logs", "path", cfg.LogsPath) logger.Info("Path Plugins", "path", PluginsPath) logger.Info("Path Provisioning", "path", cfg.ProvisioningPath) logger.Info("App mode " + Env) diff --git a/pkg/setting/setting_oauth.go b/pkg/setting/setting_oauth.go index ee2e812415b..93b1ab6f101 100644 --- a/pkg/setting/setting_oauth.go +++ b/pkg/setting/setting_oauth.go @@ -5,6 +5,7 @@ type OAuthInfo struct { Scopes []string AuthUrl, TokenUrl string Enabled bool + EmailAttributeName string AllowedDomains []string HostedDomain string ApiUrl string diff --git a/pkg/setting/setting_test.go b/pkg/setting/setting_test.go index 9de22c86811..72dbe2378c7 100644 --- a/pkg/setting/setting_test.go +++ b/pkg/setting/setting_test.go @@ -20,6 +20,7 @@ func TestLoadingSettings(t *testing.T) { So(err, ShouldBeNil) So(AdminUser, ShouldEqual, "admin") + So(cfg.RendererCallbackUrl, ShouldEqual, "http://localhost:3000/") }) Convey("Should be able to override via environment variables", func() { @@ -29,8 +30,8 @@ func TestLoadingSettings(t *testing.T) { cfg.Load(&CommandLineArgs{HomePath: "../../"}) So(AdminUser, ShouldEqual, "superduper") - So(DataPath, ShouldEqual, filepath.Join(HomePath, "data")) - So(LogsPath, ShouldEqual, filepath.Join(DataPath, "log")) + So(cfg.DataPath, ShouldEqual, filepath.Join(HomePath, "data")) + So(cfg.LogsPath, ShouldEqual, filepath.Join(cfg.DataPath, "log")) }) Convey("Should replace password when defined in environment", func() { @@ -75,8 +76,8 @@ func TestLoadingSettings(t *testing.T) { HomePath: "../../", Args: []string{`cfg:paths.data=c:\tmp\data`, `cfg:paths.logs=c:\tmp\logs`}, }) - So(DataPath, ShouldEqual, `c:\tmp\data`) - So(LogsPath, ShouldEqual, `c:\tmp\logs`) + So(cfg.DataPath, ShouldEqual, `c:\tmp\data`) + So(cfg.LogsPath, ShouldEqual, `c:\tmp\logs`) } else { cfg := NewCfg() cfg.Load(&CommandLineArgs{ @@ -84,8 +85,8 @@ func TestLoadingSettings(t *testing.T) { Args: []string{"cfg:paths.data=/tmp/data", "cfg:paths.logs=/tmp/logs"}, }) - So(DataPath, ShouldEqual, "/tmp/data") - So(LogsPath, ShouldEqual, "/tmp/logs") + So(cfg.DataPath, ShouldEqual, "/tmp/data") + So(cfg.LogsPath, ShouldEqual, "/tmp/logs") } }) @@ -96,7 +97,7 @@ func TestLoadingSettings(t *testing.T) { Args: []string{ "cfg:default.server.domain=test2", }, - Config: filepath.Join(HomePath, "tests/config-files/override.ini"), + Config: filepath.Join(HomePath, "pkg/setting/testdata/override.ini"), }) So(Domain, ShouldEqual, "test2") @@ -107,20 +108,20 @@ func TestLoadingSettings(t *testing.T) { cfg := NewCfg() cfg.Load(&CommandLineArgs{ HomePath: "../../", - Config: filepath.Join(HomePath, "tests/config-files/override_windows.ini"), + Config: filepath.Join(HomePath, "pkg/setting/testdata/override_windows.ini"), Args: []string{`cfg:default.paths.data=c:\tmp\data`}, }) - So(DataPath, ShouldEqual, `c:\tmp\override`) + So(cfg.DataPath, ShouldEqual, `c:\tmp\override`) } else { cfg := NewCfg() cfg.Load(&CommandLineArgs{ HomePath: "../../", - Config: filepath.Join(HomePath, "tests/config-files/override.ini"), + Config: filepath.Join(HomePath, "pkg/setting/testdata/override.ini"), Args: []string{"cfg:default.paths.data=/tmp/data"}, }) - So(DataPath, ShouldEqual, "/tmp/override") + So(cfg.DataPath, ShouldEqual, "/tmp/override") } }) @@ -129,20 +130,20 @@ func TestLoadingSettings(t *testing.T) { cfg := NewCfg() cfg.Load(&CommandLineArgs{ HomePath: "../../", - Config: filepath.Join(HomePath, "tests/config-files/override_windows.ini"), + Config: filepath.Join(HomePath, "pkg/setting/testdata/override_windows.ini"), Args: []string{`cfg:paths.data=c:\tmp\data`}, }) - So(DataPath, ShouldEqual, `c:\tmp\data`) + So(cfg.DataPath, ShouldEqual, `c:\tmp\data`) } else { cfg := NewCfg() cfg.Load(&CommandLineArgs{ HomePath: "../../", - Config: filepath.Join(HomePath, "tests/config-files/override.ini"), + Config: filepath.Join(HomePath, "pkg/setting/testdata/override.ini"), Args: []string{"cfg:paths.data=/tmp/data"}, }) - So(DataPath, ShouldEqual, "/tmp/data") + So(cfg.DataPath, ShouldEqual, "/tmp/data") } }) @@ -155,7 +156,7 @@ func TestLoadingSettings(t *testing.T) { Args: []string{"cfg:paths.data=${GF_DATA_PATH}"}, }) - So(DataPath, ShouldEqual, `c:\tmp\env_override`) + So(cfg.DataPath, ShouldEqual, `c:\tmp\env_override`) } else { os.Setenv("GF_DATA_PATH", "/tmp/env_override") cfg := NewCfg() @@ -164,7 +165,7 @@ func TestLoadingSettings(t *testing.T) { Args: []string{"cfg:paths.data=${GF_DATA_PATH}"}, }) - So(DataPath, ShouldEqual, "/tmp/env_override") + So(cfg.DataPath, ShouldEqual, "/tmp/env_override") } }) @@ -178,5 +179,15 @@ func TestLoadingSettings(t *testing.T) { So(InstanceName, ShouldEqual, hostname) }) + Convey("Reading callback_url should add trailing slash", func() { + cfg := NewCfg() + cfg.Load(&CommandLineArgs{ + HomePath: "../../", + Args: []string{"cfg:rendering.callback_url=http://myserver/renderer"}, + }) + + So(cfg.RendererCallbackUrl, ShouldEqual, "http://myserver/renderer/") + }) + }) } diff --git a/tests/config-files/override.ini b/pkg/setting/testdata/override.ini similarity index 100% rename from tests/config-files/override.ini rename to pkg/setting/testdata/override.ini diff --git a/tests/config-files/override_windows.ini b/pkg/setting/testdata/override_windows.ini similarity index 100% rename from tests/config-files/override_windows.ini rename to pkg/setting/testdata/override_windows.ini diff --git a/pkg/social/generic_oauth.go b/pkg/social/generic_oauth.go index 8c02076096d..a97d58334c7 100644 --- a/pkg/social/generic_oauth.go +++ b/pkg/social/generic_oauth.go @@ -20,6 +20,7 @@ type SocialGenericOAuth struct { allowedOrganizations []string apiUrl string allowSignup bool + emailAttributeName string teamIds []int } @@ -264,8 +265,9 @@ func (s *SocialGenericOAuth) extractEmail(data *UserInfoJson) string { return data.Email } - if data.Attributes["email:primary"] != nil { - return data.Attributes["email:primary"][0] + emails, ok := data.Attributes[s.emailAttributeName] + if ok && len(emails) != 0 { + return emails[0] } if data.Upn != "" { diff --git a/pkg/social/social.go b/pkg/social/social.go index 2be71514629..8918507f3b9 100644 --- a/pkg/social/social.go +++ b/pkg/social/social.go @@ -46,35 +46,39 @@ func (e *Error) Error() string { return e.s } +const ( + grafanaCom = "grafana_com" +) + var ( SocialBaseUrl = "/login/" SocialMap = make(map[string]SocialConnector) + allOauthes = []string{"github", "gitlab", "google", "generic_oauth", "grafananet", grafanaCom} ) func NewOAuthService() { setting.OAuthService = &setting.OAuther{} setting.OAuthService.OAuthInfos = make(map[string]*setting.OAuthInfo) - allOauthes := []string{"github", "gitlab", "google", "generic_oauth", "grafananet", "grafana_com"} - for _, name := range allOauthes { sec := setting.Raw.Section("auth." + name) info := &setting.OAuthInfo{ - ClientId: sec.Key("client_id").String(), - ClientSecret: sec.Key("client_secret").String(), - Scopes: util.SplitString(sec.Key("scopes").String()), - AuthUrl: sec.Key("auth_url").String(), - TokenUrl: sec.Key("token_url").String(), - ApiUrl: sec.Key("api_url").String(), - Enabled: sec.Key("enabled").MustBool(), - AllowedDomains: util.SplitString(sec.Key("allowed_domains").String()), - HostedDomain: sec.Key("hosted_domain").String(), - AllowSignup: sec.Key("allow_sign_up").MustBool(), - Name: sec.Key("name").MustString(name), - TlsClientCert: sec.Key("tls_client_cert").String(), - TlsClientKey: sec.Key("tls_client_key").String(), - TlsClientCa: sec.Key("tls_client_ca").String(), - TlsSkipVerify: sec.Key("tls_skip_verify_insecure").MustBool(), + ClientId: sec.Key("client_id").String(), + ClientSecret: sec.Key("client_secret").String(), + Scopes: util.SplitString(sec.Key("scopes").String()), + AuthUrl: sec.Key("auth_url").String(), + TokenUrl: sec.Key("token_url").String(), + ApiUrl: sec.Key("api_url").String(), + Enabled: sec.Key("enabled").MustBool(), + EmailAttributeName: sec.Key("email_attribute_name").String(), + AllowedDomains: util.SplitString(sec.Key("allowed_domains").String()), + HostedDomain: sec.Key("hosted_domain").String(), + AllowSignup: sec.Key("allow_sign_up").MustBool(), + Name: sec.Key("name").MustString(name), + TlsClientCert: sec.Key("tls_client_cert").String(), + TlsClientKey: sec.Key("tls_client_key").String(), + TlsClientCa: sec.Key("tls_client_ca").String(), + TlsSkipVerify: sec.Key("tls_skip_verify_insecure").MustBool(), } if !info.Enabled { @@ -82,7 +86,7 @@ func NewOAuthService() { } if name == "grafananet" { - name = "grafana_com" + name = grafanaCom } setting.OAuthService.OAuthInfos[name] = info @@ -153,12 +157,13 @@ func NewOAuthService() { allowedDomains: info.AllowedDomains, apiUrl: info.ApiUrl, allowSignup: info.AllowSignup, + emailAttributeName: info.EmailAttributeName, teamIds: sec.Key("team_ids").Ints(","), allowedOrganizations: util.SplitString(sec.Key("allowed_organizations").String()), } } - if name == "grafana_com" { + if name == grafanaCom { config = oauth2.Config{ ClientID: info.ClientId, ClientSecret: info.ClientSecret, @@ -170,7 +175,7 @@ func NewOAuthService() { Scopes: info.Scopes, } - SocialMap["grafana_com"] = &SocialGrafanaCom{ + SocialMap[grafanaCom] = &SocialGrafanaCom{ SocialBase: &SocialBase{ Config: &config, log: logger, @@ -182,3 +187,26 @@ func NewOAuthService() { } } } + +// GetOAuthProviders returns available oauth providers and if they're enabled or not +var GetOAuthProviders = func(cfg *setting.Cfg) map[string]bool { + result := map[string]bool{} + + if cfg == nil || cfg.Raw == nil { + return result + } + + for _, name := range allOauthes { + if name == "grafananet" { + name = grafanaCom + } + + sec := cfg.Raw.Section("auth." + name) + if sec == nil { + continue + } + result[name] = sec.Key("enabled").MustBool() + } + + return result +} diff --git a/pkg/tracing/tracing.go b/pkg/tracing/tracing.go index 61f45af3635..fd7258b7a0a 100644 --- a/pkg/tracing/tracing.go +++ b/pkg/tracing/tracing.go @@ -58,7 +58,8 @@ func (ts *TracingService) parseSettings() { func (ts *TracingService) initGlobalTracer() error { cfg := jaegercfg.Configuration{ - Disabled: !ts.enabled, + ServiceName: "grafana", + Disabled: !ts.enabled, Sampler: &jaegercfg.SamplerConfig{ Type: ts.samplerType, Param: ts.samplerParam, @@ -78,7 +79,7 @@ func (ts *TracingService) initGlobalTracer() error { options = append(options, jaegercfg.Tag(tag, value)) } - tracer, closer, err := cfg.New("grafana", options...) + tracer, closer, err := cfg.NewTracer(options...) if err != nil { return err } diff --git a/pkg/tsdb/cloudwatch/cloudwatch.go b/pkg/tsdb/cloudwatch/cloudwatch.go index 92352a51315..437457df52a 100644 --- a/pkg/tsdb/cloudwatch/cloudwatch.go +++ b/pkg/tsdb/cloudwatch/cloudwatch.go @@ -86,9 +86,10 @@ func (e *CloudWatchExecutor) Query(ctx context.Context, dsInfo *models.DataSourc } func (e *CloudWatchExecutor) executeTimeSeriesQuery(ctx context.Context, queryContext *tsdb.TsdbQuery) (*tsdb.Response, error) { - result := &tsdb.Response{ + results := &tsdb.Response{ Results: make(map[string]*tsdb.QueryResult), } + resultChan := make(chan *tsdb.QueryResult, len(queryContext.Queries)) eg, ectx := errgroup.WithContext(ctx) @@ -102,10 +103,10 @@ func (e *CloudWatchExecutor) executeTimeSeriesQuery(ctx context.Context, queryCo RefId := queryContext.Queries[i].RefId query, err := parseQuery(queryContext.Queries[i].Model) if err != nil { - result.Results[RefId] = &tsdb.QueryResult{ + results.Results[RefId] = &tsdb.QueryResult{ Error: err, } - return result, nil + return results, nil } query.RefId = RefId @@ -118,10 +119,10 @@ func (e *CloudWatchExecutor) executeTimeSeriesQuery(ctx context.Context, queryCo } if query.Id == "" && query.Expression != "" { - result.Results[query.RefId] = &tsdb.QueryResult{ + results.Results[query.RefId] = &tsdb.QueryResult{ Error: fmt.Errorf("Invalid query: id should be set if using expression"), } - return result, nil + return results, nil } eg.Go(func() error { @@ -129,10 +130,14 @@ func (e *CloudWatchExecutor) executeTimeSeriesQuery(ctx context.Context, queryCo if ae, ok := err.(awserr.Error); ok && ae.Code() == "500" { return err } - result.Results[queryRes.RefId] = queryRes if err != nil { - result.Results[queryRes.RefId].Error = err + resultChan <- &tsdb.QueryResult{ + RefId: query.RefId, + Error: err, + } + return nil } + resultChan <- queryRes return nil }) } @@ -146,10 +151,10 @@ func (e *CloudWatchExecutor) executeTimeSeriesQuery(ctx context.Context, queryCo return err } for _, queryRes := range queryResponses { - result.Results[queryRes.RefId] = queryRes if err != nil { - result.Results[queryRes.RefId].Error = err + queryRes.Error = err } + resultChan <- queryRes } return nil }) @@ -159,8 +164,12 @@ func (e *CloudWatchExecutor) executeTimeSeriesQuery(ctx context.Context, queryCo if err := eg.Wait(); err != nil { return nil, err } + close(resultChan) + for result := range resultChan { + results.Results[result.RefId] = result + } - return result, nil + return results, nil } func (e *CloudWatchExecutor) executeQuery(ctx context.Context, query *CloudWatchQuery, queryContext *tsdb.TsdbQuery) (*tsdb.QueryResult, error) { @@ -196,7 +205,7 @@ func (e *CloudWatchExecutor) executeQuery(ctx context.Context, query *CloudWatch params.ExtendedStatistics = query.ExtendedStatistics } - // 1 minutes resolutin metrics is stored for 15 days, 15 * 24 * 60 = 21600 + // 1 minutes resolution metrics is stored for 15 days, 15 * 24 * 60 = 21600 if query.HighResolution && (((endTime.Unix() - startTime.Unix()) / int64(query.Period)) > 21600) { return nil, errors.New("too long query period") } @@ -267,9 +276,9 @@ func (e *CloudWatchExecutor) executeGetMetricDataQuery(ctx context.Context, regi ScanBy: aws.String("TimestampAscending"), } for _, query := range queries { - // 1 minutes resolutin metrics is stored for 15 days, 15 * 24 * 60 = 21600 + // 1 minutes resolution metrics is stored for 15 days, 15 * 24 * 60 = 21600 if query.HighResolution && (((endTime.Unix() - startTime.Unix()) / int64(query.Period)) > 21600) { - return nil, errors.New("too long query period") + return queryResponses, errors.New("too long query period") } mdq := &cloudwatch.MetricDataQuery{ @@ -362,6 +371,7 @@ func (e *CloudWatchExecutor) executeGetMetricDataQuery(ctx context.Context, regi } queryRes.Series = append(queryRes.Series, &series) + queryRes.Meta = simplejson.New() queryResponses = append(queryResponses, queryRes) } @@ -565,6 +575,12 @@ func parseResponse(resp *cloudwatch.GetMetricStatisticsOutput, query *CloudWatch } queryRes.Series = append(queryRes.Series, &series) + queryRes.Meta = simplejson.New() + if len(resp.Datapoints) > 0 && resp.Datapoints[0].Unit != nil { + if unit, ok := cloudwatchUnitMappings[*resp.Datapoints[0].Unit]; ok { + queryRes.Meta.Set("unit", unit) + } + } } return queryRes, nil diff --git a/pkg/tsdb/cloudwatch/cloudwatch_test.go b/pkg/tsdb/cloudwatch/cloudwatch_test.go index 719edba08ba..32b8c910f2b 100644 --- a/pkg/tsdb/cloudwatch/cloudwatch_test.go +++ b/pkg/tsdb/cloudwatch/cloudwatch_test.go @@ -71,6 +71,7 @@ func TestCloudWatch(t *testing.T) { "p50.00": aws.Float64(30.0), "p90.00": aws.Float64(40.0), }, + Unit: aws.String("Seconds"), }, }, } @@ -103,6 +104,7 @@ func TestCloudWatch(t *testing.T) { So(queryRes.Series[1].Points[0][0].String(), ShouldEqual, null.FloatFrom(20.0).String()) So(queryRes.Series[2].Points[0][0].String(), ShouldEqual, null.FloatFrom(30.0).String()) So(queryRes.Series[3].Points[0][0].String(), ShouldEqual, null.FloatFrom(40.0).String()) + So(queryRes.Meta.Get("unit").MustString(), ShouldEqual, "s") }) Convey("terminate gap of data points", func() { @@ -118,6 +120,7 @@ func TestCloudWatch(t *testing.T) { "p50.00": aws.Float64(30.0), "p90.00": aws.Float64(40.0), }, + Unit: aws.String("Seconds"), }, { Timestamp: aws.Time(timestamp.Add(60 * time.Second)), @@ -127,6 +130,7 @@ func TestCloudWatch(t *testing.T) { "p50.00": aws.Float64(40.0), "p90.00": aws.Float64(50.0), }, + Unit: aws.String("Seconds"), }, { Timestamp: aws.Time(timestamp.Add(180 * time.Second)), @@ -136,6 +140,7 @@ func TestCloudWatch(t *testing.T) { "p50.00": aws.Float64(50.0), "p90.00": aws.Float64(60.0), }, + Unit: aws.String("Seconds"), }, }, } diff --git a/pkg/tsdb/cloudwatch/constants.go b/pkg/tsdb/cloudwatch/constants.go new file mode 100644 index 00000000000..23817b1d133 --- /dev/null +++ b/pkg/tsdb/cloudwatch/constants.go @@ -0,0 +1,30 @@ +package cloudwatch + +var cloudwatchUnitMappings = map[string]string{ + "Seconds": "s", + "Microseconds": "µs", + "Milliseconds": "ms", + "Bytes": "bytes", + "Kilobytes": "kbytes", + "Megabytes": "mbytes", + "Gigabytes": "gbytes", + //"Terabytes": "", + "Bits": "bits", + //"Kilobits": "", + //"Megabits": "", + //"Gigabits": "", + //"Terabits": "", + "Percent": "percent", + //"Count": "", + "Bytes/Second": "Bps", + "Kilobytes/Second": "KBs", + "Megabytes/Second": "MBs", + "Gigabytes/Second": "GBs", + //"Terabytes/Second": "", + "Bits/Second": "bps", + "Kilobits/Second": "Kbits", + "Megabits/Second": "Mbits", + "Gigabits/Second": "Gbits", + //"Terabits/Second": "", + //"Count/Second": "", +} diff --git a/pkg/tsdb/cloudwatch/credentials.go b/pkg/tsdb/cloudwatch/credentials.go index 8b32c76daa3..165f8fdbe97 100644 --- a/pkg/tsdb/cloudwatch/credentials.go +++ b/pkg/tsdb/cloudwatch/credentials.go @@ -42,8 +42,7 @@ func GetCredentials(dsInfo *DatasourceInfo) (*credentials.Credentials, error) { accessKeyId := "" secretAccessKey := "" sessionToken := "" - var expiration *time.Time - expiration = nil + var expiration *time.Time = nil if dsInfo.AuthType == "arn" && strings.Index(dsInfo.AssumeRoleArn, "arn:aws:iam:") == 0 { params := &sts.AssumeRoleInput{ RoleArn: aws.String(dsInfo.AssumeRoleArn), diff --git a/pkg/tsdb/cloudwatch/metric_find_query.go b/pkg/tsdb/cloudwatch/metric_find_query.go index ef1b53eaf1b..b74af76f09a 100644 --- a/pkg/tsdb/cloudwatch/metric_find_query.go +++ b/pkg/tsdb/cloudwatch/metric_find_query.go @@ -234,10 +234,37 @@ func parseMultiSelectValue(input string) []string { // Please update the region list in public/app/plugins/datasource/cloudwatch/partials/config.html func (e *CloudWatchExecutor) handleGetRegions(ctx context.Context, parameters *simplejson.Json, queryContext *tsdb.TsdbQuery) ([]suggestData, error) { regions := []string{ - "ap-northeast-1", "ap-northeast-2", "ap-southeast-1", "ap-southeast-2", "ap-south-1", "ca-central-1", "cn-north-1", "cn-northwest-1", - "eu-central-1", "eu-west-1", "eu-west-2", "eu-west-3", "sa-east-1", "us-east-1", "us-east-2", "us-gov-west-1", "us-west-1", "us-west-2", + "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-south-1", "ap-southeast-1", "ap-southeast-2", "ca-central-1", + "eu-central-1", "eu-north-1", "eu-west-1", "eu-west-2", "eu-west-3", "me-south-1", "sa-east-1", "us-east-1", "us-east-2", "us-west-1", "us-west-2", + "cn-north-1", "cn-northwest-1", "us-gov-east-1", "us-gov-west-1", "us-isob-east-1", "us-iso-east-1", } + err := e.ensureClientSession("us-east-1") + if err != nil { + return nil, err + } + r, err := e.ec2Svc.DescribeRegions(&ec2.DescribeRegionsInput{}) + if err != nil { + // ignore error for backward compatibility + plog.Error("Failed to get regions", "error", err) + } else { + for _, region := range r.Regions { + exists := false + + for _, existingRegion := range regions { + if existingRegion == *region.RegionName { + exists = true + break + } + } + + if !exists { + regions = append(regions, *region.RegionName) + } + } + } + sort.Strings(regions) + result := make([]suggestData, 0) for _, region := range regions { result = append(result, suggestData{Text: region, Value: region}) @@ -466,6 +493,9 @@ func (e *CloudWatchExecutor) handleGetEc2InstanceAttribute(ctx context.Context, return nil, errors.New("invalid attribute path") } v = v.FieldByName(key) + if !v.IsValid() { + return nil, errors.New("invalid attribute path") + } } if attr, ok := v.Interface().(*string); ok { data = *attr diff --git a/pkg/tsdb/elasticsearch/client/client.go b/pkg/tsdb/elasticsearch/client/client.go index dff626a79eb..4ebe0db8f89 100644 --- a/pkg/tsdb/elasticsearch/client/client.go +++ b/pkg/tsdb/elasticsearch/client/client.go @@ -138,13 +138,13 @@ func (c *baseClientImpl) encodeBatchRequests(requests []*multiRequest) ([]byte, } body := string(reqBody) - body = strings.Replace(body, "$__interval_ms", strconv.FormatInt(r.interval.Value.Nanoseconds()/int64(time.Millisecond), 10), -1) + body = strings.Replace(body, "$__interval_ms", strconv.FormatInt(r.interval.Milliseconds(), 10), -1) body = strings.Replace(body, "$__interval", r.interval.Text, -1) payload.WriteString(body + "\n") } - elapsed := time.Now().Sub(start) + elapsed := time.Since(start) clientLog.Debug("Encoded batch requests to json", "took", elapsed) return payload.Bytes(), nil @@ -187,7 +187,7 @@ func (c *baseClientImpl) executeRequest(method, uriPath string, body []byte) (*h start := time.Now() defer func() { - elapsed := time.Now().Sub(start) + elapsed := time.Since(start) clientLog.Debug("Executed request", "took", elapsed) }() return ctxhttp.Do(c.ctx, httpClient, req) @@ -215,7 +215,7 @@ func (c *baseClientImpl) ExecuteMultisearch(r *MultiSearchRequest) (*MultiSearch return nil, err } - elapsed := time.Now().Sub(start) + elapsed := time.Since(start) clientLog.Debug("Decoded multisearch json response", "took", elapsed) msr.Status = res.StatusCode diff --git a/pkg/tsdb/elasticsearch/client/client_test.go b/pkg/tsdb/elasticsearch/client/client_test.go index 11d1cdb1d71..540a999688a 100644 --- a/pkg/tsdb/elasticsearch/client/client_test.go +++ b/pkg/tsdb/elasticsearch/client/client_test.go @@ -25,7 +25,7 @@ func TestClient(t *testing.T) { JsonData: simplejson.NewFromAny(make(map[string]interface{})), } - _, err := NewClient(nil, ds, nil) + _, err := NewClient(context.Background(), ds, nil) So(err, ShouldNotBeNil) }) @@ -36,11 +36,11 @@ func TestClient(t *testing.T) { }), } - _, err := NewClient(nil, ds, nil) + _, err := NewClient(context.Background(), ds, nil) So(err, ShouldNotBeNil) }) - Convey("When unspported version set should return error", func() { + Convey("When unsupported version set should return error", func() { ds := &models.DataSource{ JsonData: simplejson.NewFromAny(map[string]interface{}{ "esVersion": 6, @@ -48,7 +48,7 @@ func TestClient(t *testing.T) { }), } - _, err := NewClient(nil, ds, nil) + _, err := NewClient(context.Background(), ds, nil) So(err, ShouldNotBeNil) }) @@ -60,7 +60,7 @@ func TestClient(t *testing.T) { }), } - c, err := NewClient(nil, ds, nil) + c, err := NewClient(context.Background(), ds, nil) So(err, ShouldBeNil) So(c.GetVersion(), ShouldEqual, 2) }) @@ -73,7 +73,7 @@ func TestClient(t *testing.T) { }), } - c, err := NewClient(nil, ds, nil) + c, err := NewClient(context.Background(), ds, nil) So(err, ShouldBeNil) So(c.GetVersion(), ShouldEqual, 5) }) @@ -86,7 +86,7 @@ func TestClient(t *testing.T) { }), } - c, err := NewClient(nil, ds, nil) + c, err := NewClient(context.Background(), ds, nil) So(err, ShouldBeNil) So(c.GetVersion(), ShouldEqual, 56) }) diff --git a/pkg/tsdb/elasticsearch/client/search_request.go b/pkg/tsdb/elasticsearch/client/search_request.go index 2b833ce78d3..4c577a2c31d 100644 --- a/pkg/tsdb/elasticsearch/client/search_request.go +++ b/pkg/tsdb/elasticsearch/client/search_request.go @@ -56,9 +56,7 @@ func (b *SearchRequestBuilder) Build() (*SearchRequest, error) { if err != nil { return nil, err } - for _, agg := range aggArray { - sr.Aggs = append(sr.Aggs, agg) - } + sr.Aggs = append(sr.Aggs, aggArray...) } } @@ -112,7 +110,7 @@ func (b *SearchRequestBuilder) Query() *QueryBuilder { return b.queryBuilder } -// Agg initaite and returns a new aggregation builder +// Agg initiate and returns a new aggregation builder func (b *SearchRequestBuilder) Agg() AggBuilder { aggBuilder := newAggBuilder() b.aggBuilders = append(b.aggBuilders, aggBuilder) @@ -300,9 +298,7 @@ func (b *aggBuilderImpl) Build() (AggArray, error) { return nil, err } - for _, childAgg := range childAggs { - agg.Aggregation.Aggs = append(agg.Aggregation.Aggs, childAgg) - } + agg.Aggregation.Aggs = append(agg.Aggregation.Aggs, childAggs...) } aggs = append(aggs, agg) diff --git a/pkg/tsdb/elasticsearch/response_parser.go b/pkg/tsdb/elasticsearch/response_parser.go index 7bdab60389c..0837c3dd9d5 100644 --- a/pkg/tsdb/elasticsearch/response_parser.go +++ b/pkg/tsdb/elasticsearch/response_parser.go @@ -13,6 +13,19 @@ import ( "github.com/grafana/grafana/pkg/tsdb/elasticsearch/client" ) +const ( + // Metric types + countType = "count" + percentilesType = "percentiles" + extendedStatsType = "extended_stats" + // Bucket types + dateHistType = "date_histogram" + histogramType = "histogram" + filtersType = "filters" + termsType = "terms" + geohashGridType = "geohash_grid" +) + type responseParser struct { Responses []*es.SearchResponse Targets []*Query @@ -81,7 +94,7 @@ func (rp *responseParser) processBuckets(aggs map[string]interface{}, target *Qu } if depth == maxDepth { - if aggDef.Type == "date_histogram" { + if aggDef.Type == dateHistType { err = rp.processMetrics(esAgg, target, series, props) } else { err = rp.processAggregationDocs(esAgg, aggDef, target, table, props) @@ -92,7 +105,7 @@ func (rp *responseParser) processBuckets(aggs map[string]interface{}, target *Qu } else { for _, b := range esAgg.Get("buckets").MustArray() { bucket := simplejson.NewFromAny(b) - newProps := make(map[string]string, 0) + newProps := make(map[string]string) for k, v := range props { newProps[k] = v @@ -122,7 +135,7 @@ func (rp *responseParser) processBuckets(aggs map[string]interface{}, target *Qu for _, bucketKey := range bucketKeys { bucket := simplejson.NewFromAny(buckets[bucketKey]) - newProps := make(map[string]string, 0) + newProps := make(map[string]string) for k, v := range props { newProps[k] = v @@ -149,7 +162,7 @@ func (rp *responseParser) processMetrics(esAgg *simplejson.Json, target *Query, } switch metric.Type { - case "count": + case countType: newSeries := tsdb.TimeSeries{ Tags: make(map[string]string), } @@ -164,10 +177,10 @@ func (rp *responseParser) processMetrics(esAgg *simplejson.Json, target *Query, for k, v := range props { newSeries.Tags[k] = v } - newSeries.Tags["metric"] = "count" + newSeries.Tags["metric"] = countType *series = append(*series, &newSeries) - case "percentiles": + case percentilesType: buckets := esAgg.Get("buckets").MustArray() if len(buckets) == 0 { break @@ -198,7 +211,7 @@ func (rp *responseParser) processMetrics(esAgg *simplejson.Json, target *Query, } *series = append(*series, &newSeries) } - case "extended_stats": + case extendedStatsType: buckets := esAgg.Get("buckets").MustArray() metaKeys := make([]string, 0) @@ -312,10 +325,9 @@ func (rp *responseParser) processAggregationDocs(esAgg *simplejson.Json, aggDef for _, metric := range target.Metrics { switch metric.Type { - case "count": + case countType: addMetricValue(&values, rp.getMetricName(metric.Type), castToNullFloat(bucket.Get("doc_count"))) - break - case "extended_stats": + case extendedStatsType: metaKeys := make([]string, 0) meta := metric.Meta.MustMap() for k := range meta { @@ -355,7 +367,6 @@ func (rp *responseParser) processAggregationDocs(esAgg *simplejson.Json, aggDef } addMetricValue(&values, metricName, castToNullFloat(bucket.GetPath(metric.ID, "value"))) - break } } @@ -368,7 +379,7 @@ func (rp *responseParser) processAggregationDocs(esAgg *simplejson.Json, aggDef func (rp *responseParser) trimDatapoints(series *tsdb.TimeSeriesSlice, target *Query) { var histogram *BucketAgg for _, bucketAgg := range target.BucketAggs { - if bucketAgg.Type == "date_histogram" { + if bucketAgg.Type == dateHistType { histogram = bucketAgg break } diff --git a/pkg/tsdb/elasticsearch/time_series_query.go b/pkg/tsdb/elasticsearch/time_series_query.go index c9bb05dd09a..869e23e21ce 100644 --- a/pkg/tsdb/elasticsearch/time_series_query.go +++ b/pkg/tsdb/elasticsearch/time_series_query.go @@ -75,15 +75,15 @@ func (e *timeSeriesQuery) execute() (*tsdb.Response, error) { // iterate backwards to create aggregations bottom-down for _, bucketAgg := range q.BucketAggs { switch bucketAgg.Type { - case "date_histogram": + case dateHistType: aggBuilder = addDateHistogramAgg(aggBuilder, bucketAgg, from, to) - case "histogram": + case histogramType: aggBuilder = addHistogramAgg(aggBuilder, bucketAgg) - case "filters": + case filtersType: aggBuilder = addFiltersAgg(aggBuilder, bucketAgg) - case "terms": + case termsType: aggBuilder = addTermsAgg(aggBuilder, bucketAgg, q.Metrics) - case "geohash_grid": + case geohashGridType: aggBuilder = addGeoHashGridAgg(aggBuilder, bucketAgg) } } @@ -171,6 +171,10 @@ func addTermsAgg(aggBuilder es.AggBuilder, bucketAgg *BucketAgg, metrics []*Metr } else { a.Size = 500 } + if a.Size == 0 { + a.Size = 500 + } + if minDocCount, err := bucketAgg.Settings.Get("min_doc_count").Int(); err == nil { a.MinDocCount = &minDocCount } diff --git a/pkg/tsdb/elasticsearch/time_series_query_test.go b/pkg/tsdb/elasticsearch/time_series_query_test.go index 49bf5f5bc75..fe8ae0fa8f2 100644 --- a/pkg/tsdb/elasticsearch/time_series_query_test.go +++ b/pkg/tsdb/elasticsearch/time_series_query_test.go @@ -60,7 +60,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) { _, err := executeTsdbQuery(c, `{ "timeField": "@timestamp", "bucketAggs": [ - { "type": "terms", "field": "@host", "id": "2" }, + { "type": "terms", "field": "@host", "id": "2", "settings": { "size": "0", "order": "asc" } }, { "type": "date_histogram", "field": "@timestamp", "id": "3" } ], "metrics": [{"type": "count", "id": "1" }] @@ -69,7 +69,9 @@ func TestExecuteTimeSeriesQuery(t *testing.T) { sr := c.multisearchRequests[0].Requests[0] firstLevel := sr.Aggs[0] So(firstLevel.Key, ShouldEqual, "2") - So(firstLevel.Aggregation.Aggregation.(*es.TermsAggregation).Field, ShouldEqual, "@host") + termsAgg := firstLevel.Aggregation.Aggregation.(*es.TermsAggregation) + So(termsAgg.Field, ShouldEqual, "@host") + So(termsAgg.Size, ShouldEqual, 500) secondLevel := firstLevel.Aggregation.Aggs[0] So(secondLevel.Key, ShouldEqual, "3") So(secondLevel.Aggregation.Aggregation.(*es.DateHistogramAgg).Field, ShouldEqual, "@timestamp") diff --git a/pkg/tsdb/influxdb/query.go b/pkg/tsdb/influxdb/query.go index 0637a5bbb44..7cb8f0ecd82 100644 --- a/pkg/tsdb/influxdb/query.go +++ b/pkg/tsdb/influxdb/query.go @@ -4,7 +4,6 @@ import ( "fmt" "strconv" "strings" - "time" "regexp" @@ -34,7 +33,7 @@ func (query *Query) Build(queryContext *tsdb.TsdbQuery) (string, error) { res = strings.Replace(res, "$timeFilter", query.renderTimeFilter(queryContext), -1) res = strings.Replace(res, "$interval", interval.Text, -1) - res = strings.Replace(res, "$__interval_ms", strconv.FormatInt(interval.Value.Nanoseconds()/int64(time.Millisecond), 10), -1) + res = strings.Replace(res, "$__interval_ms", strconv.FormatInt(interval.Milliseconds(), 10), -1) res = strings.Replace(res, "$__interval", interval.Text, -1) return res, nil } diff --git a/pkg/tsdb/influxdb/query_test.go b/pkg/tsdb/influxdb/query_test.go index f1270560269..cc1358a72d7 100644 --- a/pkg/tsdb/influxdb/query_test.go +++ b/pkg/tsdb/influxdb/query_test.go @@ -158,7 +158,7 @@ func TestInfluxdbQueryBuilder(t *testing.T) { So(strings.Join(query.renderTags(), ""), ShouldEqual, `"key" < 10001`) }) - Convey("can render number greather then condition tags", func() { + Convey("can render number greater then condition tags", func() { query := &Query{Tags: []*Tag{{Operator: ">", Value: "10001", Key: "key"}}} So(strings.Join(query.renderTags(), ""), ShouldEqual, `"key" > 10001`) diff --git a/pkg/tsdb/interval.go b/pkg/tsdb/interval.go index 49904f27a37..fd6adee39d7 100644 --- a/pkg/tsdb/interval.go +++ b/pkg/tsdb/interval.go @@ -49,6 +49,10 @@ func NewIntervalCalculator(opt *IntervalOptions) *intervalCalculator { return calc } +func (i *Interval) Milliseconds() int64 { + return i.Value.Nanoseconds() / int64(time.Millisecond) +} + func (ic *intervalCalculator) Calculate(timerange *TimeRange, minInterval time.Duration) Interval { to := timerange.MustGetTo().UnixNano() from := timerange.MustGetFrom().UnixNano() diff --git a/pkg/tsdb/mssql/macros.go b/pkg/tsdb/mssql/macros.go index caba043e7b6..0a260f7ad70 100644 --- a/pkg/tsdb/mssql/macros.go +++ b/pkg/tsdb/mssql/macros.go @@ -13,12 +13,13 @@ const rsIdentifier = `([_a-zA-Z0-9]+)` const sExpr = `\$` + rsIdentifier + `\(([^\)]*)\)` type msSqlMacroEngine struct { + *tsdb.SqlMacroEngineBase timeRange *tsdb.TimeRange query *tsdb.Query } func newMssqlMacroEngine() tsdb.SqlMacroEngine { - return &msSqlMacroEngine{} + return &msSqlMacroEngine{SqlMacroEngineBase: tsdb.NewSqlMacroEngineBase()} } func (m *msSqlMacroEngine) Interpolate(query *tsdb.Query, timeRange *tsdb.TimeRange, sql string) (string, error) { @@ -27,7 +28,7 @@ func (m *msSqlMacroEngine) Interpolate(query *tsdb.Query, timeRange *tsdb.TimeRa rExp, _ := regexp.Compile(sExpr) var macroError error - sql = replaceAllStringSubmatchFunc(rExp, sql, func(groups []string) string { + sql = m.ReplaceAllStringSubmatchFunc(rExp, sql, func(groups []string) string { args := strings.Split(groups[2], ",") for i, arg := range args { args[i] = strings.Trim(arg, " ") @@ -47,23 +48,6 @@ func (m *msSqlMacroEngine) Interpolate(query *tsdb.Query, timeRange *tsdb.TimeRa return sql, nil } -func replaceAllStringSubmatchFunc(re *regexp.Regexp, str string, repl func([]string) string) string { - result := "" - lastIndex := 0 - - for _, v := range re.FindAllSubmatchIndex([]byte(str), -1) { - groups := []string{} - for i := 0; i < len(v); i += 2 { - groups = append(groups, str[v[i]:v[i+1]]) - } - - result += str[lastIndex:v[0]] + repl(groups) - lastIndex = v[1] - } - - return result + str[lastIndex:] -} - func (m *msSqlMacroEngine) evaluateMacro(name string, args []string) (string, error) { switch name { case "__time": @@ -82,10 +66,6 @@ func (m *msSqlMacroEngine) evaluateMacro(name string, args []string) (string, er } return fmt.Sprintf("%s BETWEEN '%s' AND '%s'", args[0], m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339), m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil - case "__timeFrom": - return fmt.Sprintf("'%s'", m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339)), nil - case "__timeTo": - return fmt.Sprintf("'%s'", m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil case "__timeGroup": if len(args) < 2 { return "", fmt.Errorf("macro %v needs time column and interval", name) @@ -112,10 +92,6 @@ func (m *msSqlMacroEngine) evaluateMacro(name string, args []string) (string, er return "", fmt.Errorf("missing time column argument for macro %v", name) } return fmt.Sprintf("%s >= %d AND %s <= %d", args[0], m.timeRange.GetFromAsSecondsEpoch(), args[0], m.timeRange.GetToAsSecondsEpoch()), nil - case "__unixEpochFrom": - return fmt.Sprintf("%d", m.timeRange.GetFromAsSecondsEpoch()), nil - case "__unixEpochTo": - return fmt.Sprintf("%d", m.timeRange.GetToAsSecondsEpoch()), nil case "__unixEpochGroup": if len(args) < 2 { return "", fmt.Errorf("macro %v needs time column and interval and optional fill value", name) diff --git a/pkg/tsdb/mssql/macros_test.go b/pkg/tsdb/mssql/macros_test.go index 8e0973b750c..7456238efa4 100644 --- a/pkg/tsdb/mssql/macros_test.go +++ b/pkg/tsdb/mssql/macros_test.go @@ -111,20 +111,6 @@ func TestMacroEngine(t *testing.T) { So(fillInterval, ShouldEqual, 5*time.Minute.Seconds()) }) - Convey("interpolate __timeFrom function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339))) - }) - - Convey("interpolate __timeTo function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339))) - }) - Convey("interpolate __unixEpochFilter function", func() { sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFilter(time_column)") So(err, ShouldBeNil) @@ -132,20 +118,6 @@ func TestMacroEngine(t *testing.T) { So(sql, ShouldEqual, fmt.Sprintf("select time_column >= %d AND time_column <= %d", from.Unix(), to.Unix())) }) - Convey("interpolate __unixEpochFrom function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFrom()") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select %d", from.Unix())) - }) - - Convey("interpolate __unixEpochTo function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochTo()") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select %d", to.Unix())) - }) - Convey("interpolate __unixEpochGroup function", func() { sql, err := engine.Interpolate(query, timeRange, "SELECT $__unixEpochGroup(time_column,'5m')") @@ -171,40 +143,12 @@ func TestMacroEngine(t *testing.T) { So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339))) }) - Convey("interpolate __timeFrom function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339))) - }) - - Convey("interpolate __timeTo function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339))) - }) - Convey("interpolate __unixEpochFilter function", func() { sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFilter(time_column)") So(err, ShouldBeNil) So(sql, ShouldEqual, fmt.Sprintf("select time_column >= %d AND time_column <= %d", from.Unix(), to.Unix())) }) - - Convey("interpolate __unixEpochFrom function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFrom()") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select %d", from.Unix())) - }) - - Convey("interpolate __unixEpochTo function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochTo()") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select %d", to.Unix())) - }) }) Convey("Given a time range between 1960-02-01 07:00 and 1980-02-03 08:00", func() { @@ -219,40 +163,12 @@ func TestMacroEngine(t *testing.T) { So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339))) }) - Convey("interpolate __timeFrom function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339))) - }) - - Convey("interpolate __timeTo function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339))) - }) - Convey("interpolate __unixEpochFilter function", func() { sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFilter(time_column)") So(err, ShouldBeNil) So(sql, ShouldEqual, fmt.Sprintf("select time_column >= %d AND time_column <= %d", from.Unix(), to.Unix())) }) - - Convey("interpolate __unixEpochFrom function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFrom()") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select %d", from.Unix())) - }) - - Convey("interpolate __unixEpochTo function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochTo()") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select %d", to.Unix())) - }) }) }) } diff --git a/pkg/tsdb/mssql/mssql.go b/pkg/tsdb/mssql/mssql.go index 72e57d03fa0..469d6baa5de 100644 --- a/pkg/tsdb/mssql/mssql.go +++ b/pkg/tsdb/mssql/mssql.go @@ -52,13 +52,18 @@ func generateConnectionString(datasource *models.DataSource) string { } server, port := hostParts[0], hostParts[1] - return fmt.Sprintf("server=%s;port=%s;database=%s;user id=%s;password=%s;", + encrypt := datasource.JsonData.Get("encrypt").MustString("false") + connStr := fmt.Sprintf("server=%s;port=%s;database=%s;user id=%s;password=%s;", server, port, datasource.Database, datasource.User, password, ) + if encrypt != "false" { + connStr += fmt.Sprintf("encrypt=%s;", encrypt) + } + return connStr } type mssqlRowTransformer struct { diff --git a/pkg/tsdb/mssql/mssql_test.go b/pkg/tsdb/mssql/mssql_test.go index 30d1da3bda1..c3d4470603d 100644 --- a/pkg/tsdb/mssql/mssql_test.go +++ b/pkg/tsdb/mssql/mssql_test.go @@ -1,6 +1,7 @@ package mssql import ( + "context" "fmt" "math/rand" "strings" @@ -35,6 +36,11 @@ func TestMSSQL(t *testing.T) { return x, nil } + origInterpolate := tsdb.Interpolate + tsdb.Interpolate = func(query *tsdb.Query, timeRange *tsdb.TimeRange, sql string) (string, error) { + return sql, nil + } + endpoint, err := newMssqlQueryEndpoint(&models.DataSource{ JsonData: simplejson.New(), SecureJsonData: securejsondata.SecureJsonData{}, @@ -47,6 +53,7 @@ func TestMSSQL(t *testing.T) { Reset(func() { sess.Close() tsdb.NewXormEngine = origXormEngine + tsdb.Interpolate = origInterpolate }) Convey("Given a table with different native data types", func() { @@ -122,7 +129,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) queryResult := resp.Results["A"] So(err, ShouldBeNil) @@ -212,7 +219,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -259,7 +266,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -295,6 +302,40 @@ func TestMSSQL(t *testing.T) { }) + Convey("When doing a metric query using timeGroup and $__interval", func() { + mockInterpolate := tsdb.Interpolate + tsdb.Interpolate = origInterpolate + + Reset(func() { + tsdb.Interpolate = mockInterpolate + }) + + Convey("Should replace $__interval", func() { + query := &tsdb.TsdbQuery{ + Queries: []*tsdb.Query{ + { + DataSource: &models.DataSource{}, + Model: simplejson.NewFromAny(map[string]interface{}{ + "rawSql": "SELECT $__timeGroup(time, $__interval) AS time, avg(value) as value FROM metric GROUP BY $__timeGroup(time, $__interval) ORDER BY 1", + "format": "time_series", + }), + RefId: "A", + }, + }, + TimeRange: &tsdb.TimeRange{ + From: fmt.Sprintf("%v", fromStart.Unix()*1000), + To: fmt.Sprintf("%v", fromStart.Add(30*time.Minute).Unix()*1000), + }, + } + + resp, err := endpoint.Query(context.Background(), nil, query) + So(err, ShouldBeNil) + queryResult := resp.Results["A"] + So(queryResult.Error, ShouldBeNil) + So(queryResult.Meta.Get("sql").MustString(), ShouldEqual, "SELECT FLOOR(DATEDIFF(second, '1970-01-01', time)/60)*60 AS time, avg(value) as value FROM metric GROUP BY FLOOR(DATEDIFF(second, '1970-01-01', time)/60)*60 ORDER BY 1") + }) + }) + Convey("When doing a metric query using timeGroup with float fill enabled", func() { query := &tsdb.TsdbQuery{ Queries: []*tsdb.Query{ @@ -312,7 +353,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -401,7 +442,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -423,7 +464,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -445,7 +486,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -467,7 +508,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -489,7 +530,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -511,7 +552,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -533,7 +574,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -555,7 +596,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -577,7 +618,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -600,7 +641,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -623,7 +664,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -635,6 +676,30 @@ func TestMSSQL(t *testing.T) { So(queryResult.Series[3].Name, ShouldEqual, "Metric B valueTwo") }) + Convey("When doing a query with timeFrom,timeTo,unixEpochFrom,unixEpochTo macros", func() { + tsdb.Interpolate = origInterpolate + query := &tsdb.TsdbQuery{ + TimeRange: tsdb.NewFakeTimeRange("5m", "now", fromStart), + Queries: []*tsdb.Query{ + { + DataSource: &models.DataSource{JsonData: simplejson.New()}, + Model: simplejson.NewFromAny(map[string]interface{}{ + "rawSql": `SELECT time FROM metric_values WHERE time > $__timeFrom() OR time < $__timeFrom() OR 1 < $__unixEpochFrom() OR $__unixEpochTo() > 1 ORDER BY 1`, + "format": "time_series", + }), + RefId: "A", + }, + }, + } + + resp, err := endpoint.Query(context.Background(), nil, query) + So(err, ShouldBeNil) + queryResult := resp.Results["A"] + So(queryResult.Error, ShouldBeNil) + So(queryResult.Meta.Get("sql").MustString(), ShouldEqual, "SELECT time FROM metric_values WHERE time > '2018-03-15T12:55:00Z' OR time < '2018-03-15T12:55:00Z' OR 1 < 1521118500 OR 1521118800 > 1 ORDER BY 1") + + }) + Convey("Given a stored procedure that takes @from and @to in epoch time", func() { sql := ` IF object_id('sp_test_epoch') IS NOT NULL @@ -679,9 +744,11 @@ func TestMSSQL(t *testing.T) { So(err, ShouldBeNil) Convey("When doing a metric query using stored procedure should return correct result", func() { + tsdb.Interpolate = origInterpolate query := &tsdb.TsdbQuery{ Queries: []*tsdb.Query{ { + DataSource: &models.DataSource{JsonData: simplejson.New()}, Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": `DECLARE @from int = $__unixEpochFrom(), @@ -699,7 +766,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) queryResult := resp.Results["A"] So(err, ShouldBeNil) So(queryResult.Error, ShouldBeNil) @@ -756,9 +823,11 @@ func TestMSSQL(t *testing.T) { So(err, ShouldBeNil) Convey("When doing a metric query using stored procedure should return correct result", func() { + tsdb.Interpolate = origInterpolate query := &tsdb.TsdbQuery{ Queries: []*tsdb.Query{ { + DataSource: &models.DataSource{JsonData: simplejson.New()}, Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": `DECLARE @from int = $__unixEpochFrom(), @@ -776,7 +845,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) queryResult := resp.Results["A"] So(err, ShouldBeNil) So(queryResult.Error, ShouldBeNil) @@ -852,7 +921,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) queryResult := resp.Results["Deploys"] So(err, ShouldBeNil) So(len(queryResult.Tables[0].Rows), ShouldEqual, 3) @@ -875,7 +944,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) queryResult := resp.Results["Tickets"] So(err, ShouldBeNil) So(len(queryResult.Tables[0].Rows), ShouldEqual, 3) @@ -901,7 +970,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -931,7 +1000,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -961,7 +1030,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -991,7 +1060,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -1019,7 +1088,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -1047,7 +1116,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) diff --git a/pkg/tsdb/mysql/macros.go b/pkg/tsdb/mysql/macros.go index 0dabdd7c283..a037aa9277a 100644 --- a/pkg/tsdb/mysql/macros.go +++ b/pkg/tsdb/mysql/macros.go @@ -9,17 +9,17 @@ import ( "github.com/grafana/grafana/pkg/tsdb" ) -//const rsString = `(?:"([^"]*)")`; const rsIdentifier = `([_a-zA-Z0-9]+)` const sExpr = `\$` + rsIdentifier + `\(([^\)]*)\)` type mySqlMacroEngine struct { + *tsdb.SqlMacroEngineBase timeRange *tsdb.TimeRange query *tsdb.Query } func newMysqlMacroEngine() tsdb.SqlMacroEngine { - return &mySqlMacroEngine{} + return &mySqlMacroEngine{SqlMacroEngineBase: tsdb.NewSqlMacroEngineBase()} } func (m *mySqlMacroEngine) Interpolate(query *tsdb.Query, timeRange *tsdb.TimeRange, sql string) (string, error) { @@ -28,7 +28,7 @@ func (m *mySqlMacroEngine) Interpolate(query *tsdb.Query, timeRange *tsdb.TimeRa rExp, _ := regexp.Compile(sExpr) var macroError error - sql = replaceAllStringSubmatchFunc(rExp, sql, func(groups []string) string { + sql = m.ReplaceAllStringSubmatchFunc(rExp, sql, func(groups []string) string { args := strings.Split(groups[2], ",") for i, arg := range args { args[i] = strings.Trim(arg, " ") @@ -48,23 +48,6 @@ func (m *mySqlMacroEngine) Interpolate(query *tsdb.Query, timeRange *tsdb.TimeRa return sql, nil } -func replaceAllStringSubmatchFunc(re *regexp.Regexp, str string, repl func([]string) string) string { - result := "" - lastIndex := 0 - - for _, v := range re.FindAllSubmatchIndex([]byte(str), -1) { - groups := []string{} - for i := 0; i < len(v); i += 2 { - groups = append(groups, str[v[i]:v[i+1]]) - } - - result += str[lastIndex:v[0]] + repl(groups) - lastIndex = v[1] - } - - return result + str[lastIndex:] -} - func (m *mySqlMacroEngine) evaluateMacro(name string, args []string) (string, error) { switch name { case "__timeEpoch", "__time": @@ -78,10 +61,6 @@ func (m *mySqlMacroEngine) evaluateMacro(name string, args []string) (string, er } return fmt.Sprintf("%s BETWEEN '%s' AND '%s'", args[0], m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339), m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil - case "__timeFrom": - return fmt.Sprintf("'%s'", m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339)), nil - case "__timeTo": - return fmt.Sprintf("'%s'", m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil case "__timeGroup": if len(args) < 2 { return "", fmt.Errorf("macro %v needs time column and interval", name) @@ -108,10 +87,6 @@ func (m *mySqlMacroEngine) evaluateMacro(name string, args []string) (string, er return "", fmt.Errorf("missing time column argument for macro %v", name) } return fmt.Sprintf("%s >= %d AND %s <= %d", args[0], m.timeRange.GetFromAsSecondsEpoch(), args[0], m.timeRange.GetToAsSecondsEpoch()), nil - case "__unixEpochFrom": - return fmt.Sprintf("%d", m.timeRange.GetFromAsSecondsEpoch()), nil - case "__unixEpochTo": - return fmt.Sprintf("%d", m.timeRange.GetToAsSecondsEpoch()), nil case "__unixEpochGroup": if len(args) < 2 { return "", fmt.Errorf("macro %v needs time column and interval and optional fill value", name) diff --git a/pkg/tsdb/mysql/macros_test.go b/pkg/tsdb/mysql/macros_test.go index fe153ca3e2d..3c9a5a26c94 100644 --- a/pkg/tsdb/mysql/macros_test.go +++ b/pkg/tsdb/mysql/macros_test.go @@ -63,20 +63,6 @@ func TestMacroEngine(t *testing.T) { So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339))) }) - Convey("interpolate __timeFrom function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339))) - }) - - Convey("interpolate __timeTo function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339))) - }) - Convey("interpolate __unixEpochFilter function", func() { sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFilter(time)") So(err, ShouldBeNil) @@ -84,20 +70,6 @@ func TestMacroEngine(t *testing.T) { So(sql, ShouldEqual, fmt.Sprintf("select time >= %d AND time <= %d", from.Unix(), to.Unix())) }) - Convey("interpolate __unixEpochFrom function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFrom()") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select %d", from.Unix())) - }) - - Convey("interpolate __unixEpochTo function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochTo()") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select %d", to.Unix())) - }) - Convey("interpolate __unixEpochGroup function", func() { sql, err := engine.Interpolate(query, timeRange, "SELECT $__unixEpochGroup(time_column,'5m')") @@ -123,40 +95,12 @@ func TestMacroEngine(t *testing.T) { So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339))) }) - Convey("interpolate __timeFrom function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339))) - }) - - Convey("interpolate __timeTo function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339))) - }) - Convey("interpolate __unixEpochFilter function", func() { sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFilter(time)") So(err, ShouldBeNil) So(sql, ShouldEqual, fmt.Sprintf("select time >= %d AND time <= %d", from.Unix(), to.Unix())) }) - - Convey("interpolate __unixEpochFrom function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFrom()") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select %d", from.Unix())) - }) - - Convey("interpolate __unixEpochTo function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochTo()") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select %d", to.Unix())) - }) }) Convey("Given a time range between 1960-02-01 07:00 and 1980-02-03 08:00", func() { @@ -171,40 +115,12 @@ func TestMacroEngine(t *testing.T) { So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339))) }) - Convey("interpolate __timeFrom function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339))) - }) - - Convey("interpolate __timeTo function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339))) - }) - Convey("interpolate __unixEpochFilter function", func() { sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFilter(time)") So(err, ShouldBeNil) So(sql, ShouldEqual, fmt.Sprintf("select time >= %d AND time <= %d", from.Unix(), to.Unix())) }) - - Convey("interpolate __unixEpochFrom function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFrom()") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select %d", from.Unix())) - }) - - Convey("interpolate __unixEpochTo function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochTo()") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select %d", to.Unix())) - }) }) }) } diff --git a/pkg/tsdb/mysql/mysql.go b/pkg/tsdb/mysql/mysql.go index 645f6b49bbb..35b03e489a0 100644 --- a/pkg/tsdb/mysql/mysql.go +++ b/pkg/tsdb/mysql/mysql.go @@ -5,6 +5,7 @@ import ( "fmt" "reflect" "strconv" + "strings" "github.com/go-sql-driver/mysql" "github.com/go-xorm/core" @@ -20,10 +21,14 @@ func init() { func newMysqlQueryEndpoint(datasource *models.DataSource) (tsdb.TsdbQueryEndpoint, error) { logger := log.New("tsdb.mysql") + protocol := "tcp" + if strings.HasPrefix(datasource.Url, "/") { + protocol = "unix" + } cnnstr := fmt.Sprintf("%s:%s@%s(%s)/%s?collation=utf8mb4_unicode_ci&parseTime=true&loc=UTC&allowNativePasswords=true", datasource.User, datasource.Password, - "tcp", + protocol, datasource.Url, datasource.Database, ) diff --git a/pkg/tsdb/mysql/mysql_test.go b/pkg/tsdb/mysql/mysql_test.go index ca6df8e360e..476e3ba6586 100644 --- a/pkg/tsdb/mysql/mysql_test.go +++ b/pkg/tsdb/mysql/mysql_test.go @@ -1,6 +1,7 @@ package mysql import ( + "context" "fmt" "math/rand" "strings" @@ -42,6 +43,11 @@ func TestMySQL(t *testing.T) { return x, nil } + origInterpolate := tsdb.Interpolate + tsdb.Interpolate = func(query *tsdb.Query, timeRange *tsdb.TimeRange, sql string) (string, error) { + return sql, nil + } + endpoint, err := newMysqlQueryEndpoint(&models.DataSource{ JsonData: simplejson.New(), SecureJsonData: securejsondata.SecureJsonData{}, @@ -54,6 +60,7 @@ func TestMySQL(t *testing.T) { Reset(func() { sess.Close() tsdb.NewXormEngine = origXormEngine + tsdb.Interpolate = origInterpolate }) Convey("Given a table with different native data types", func() { @@ -123,7 +130,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -211,7 +218,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -258,7 +265,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -295,6 +302,40 @@ func TestMySQL(t *testing.T) { }) + Convey("When doing a metric query using timeGroup and $__interval", func() { + mockInterpolate := tsdb.Interpolate + tsdb.Interpolate = origInterpolate + + Reset(func() { + tsdb.Interpolate = mockInterpolate + }) + + Convey("Should replace $__interval", func() { + query := &tsdb.TsdbQuery{ + Queries: []*tsdb.Query{ + { + DataSource: &models.DataSource{JsonData: simplejson.New()}, + Model: simplejson.NewFromAny(map[string]interface{}{ + "rawSql": "SELECT $__timeGroup(time, $__interval) AS time, avg(value) as value FROM metric GROUP BY 1 ORDER BY 1", + "format": "time_series", + }), + RefId: "A", + }, + }, + TimeRange: &tsdb.TimeRange{ + From: fmt.Sprintf("%v", fromStart.Unix()*1000), + To: fmt.Sprintf("%v", fromStart.Add(30*time.Minute).Unix()*1000), + }, + } + + resp, err := endpoint.Query(context.Background(), nil, query) + So(err, ShouldBeNil) + queryResult := resp.Results["A"] + So(queryResult.Error, ShouldBeNil) + So(queryResult.Meta.Get("sql").MustString(), ShouldEqual, "SELECT UNIX_TIMESTAMP(time) DIV 60 * 60 AS time, avg(value) as value FROM metric GROUP BY 1 ORDER BY 1") + }) + }) + Convey("When doing a metric query using timeGroup with value fill enabled", func() { query := &tsdb.TsdbQuery{ Queries: []*tsdb.Query{ @@ -312,7 +353,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -338,7 +379,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -433,7 +474,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -455,7 +496,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -477,7 +518,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -499,7 +540,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -521,7 +562,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -543,7 +584,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -565,7 +606,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -587,7 +628,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -609,7 +650,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -631,7 +672,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -653,7 +694,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -676,7 +717,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -701,7 +742,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -712,6 +753,30 @@ func TestMySQL(t *testing.T) { }) }) + Convey("When doing a query with timeFrom,timeTo,unixEpochFrom,unixEpochTo macros", func() { + tsdb.Interpolate = origInterpolate + query := &tsdb.TsdbQuery{ + TimeRange: tsdb.NewFakeTimeRange("5m", "now", fromStart), + Queries: []*tsdb.Query{ + { + DataSource: &models.DataSource{JsonData: simplejson.New()}, + Model: simplejson.NewFromAny(map[string]interface{}{ + "rawSql": `SELECT time FROM metric_values WHERE time > $__timeFrom() OR time < $__timeFrom() OR 1 < $__unixEpochFrom() OR $__unixEpochTo() > 1 ORDER BY 1`, + "format": "time_series", + }), + RefId: "A", + }, + }, + } + + resp, err := endpoint.Query(context.Background(), nil, query) + So(err, ShouldBeNil) + queryResult := resp.Results["A"] + So(queryResult.Error, ShouldBeNil) + So(queryResult.Meta.Get("sql").MustString(), ShouldEqual, "SELECT time FROM metric_values WHERE time > '2018-03-15T12:55:00Z' OR time < '2018-03-15T12:55:00Z' OR 1 < 1521118500 OR 1521118800 > 1 ORDER BY 1") + + }) + Convey("Given a table with event data", func() { type event struct { TimeSec int64 @@ -762,7 +827,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) queryResult := resp.Results["Deploys"] So(err, ShouldBeNil) So(len(queryResult.Tables[0].Rows), ShouldEqual, 3) @@ -785,7 +850,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) queryResult := resp.Results["Tickets"] So(err, ShouldBeNil) So(len(queryResult.Tables[0].Rows), ShouldEqual, 3) @@ -811,7 +876,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -841,7 +906,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -871,7 +936,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -901,7 +966,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -929,7 +994,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -957,7 +1022,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) diff --git a/pkg/tsdb/postgres/macros.go b/pkg/tsdb/postgres/macros.go index 0a2ea1d2af6..0fa5d8077e1 100644 --- a/pkg/tsdb/postgres/macros.go +++ b/pkg/tsdb/postgres/macros.go @@ -9,18 +9,21 @@ import ( "github.com/grafana/grafana/pkg/tsdb" ) -//const rsString = `(?:"([^"]*)")`; const rsIdentifier = `([_a-zA-Z0-9]+)` const sExpr = `\$` + rsIdentifier + `\(([^\)]*)\)` type postgresMacroEngine struct { + *tsdb.SqlMacroEngineBase timeRange *tsdb.TimeRange query *tsdb.Query timescaledb bool } func newPostgresMacroEngine(timescaledb bool) tsdb.SqlMacroEngine { - return &postgresMacroEngine{timescaledb: timescaledb} + return &postgresMacroEngine{ + SqlMacroEngineBase: tsdb.NewSqlMacroEngineBase(), + timescaledb: timescaledb, + } } func (m *postgresMacroEngine) Interpolate(query *tsdb.Query, timeRange *tsdb.TimeRange, sql string) (string, error) { @@ -29,7 +32,7 @@ func (m *postgresMacroEngine) Interpolate(query *tsdb.Query, timeRange *tsdb.Tim rExp, _ := regexp.Compile(sExpr) var macroError error - sql = replaceAllStringSubmatchFunc(rExp, sql, func(groups []string) string { + sql = m.ReplaceAllStringSubmatchFunc(rExp, sql, func(groups []string) string { // detect if $__timeGroup is supposed to add AS time for pre 5.3 compatibility // if there is a ',' directly after the macro call $__timeGroup is probably used @@ -66,23 +69,6 @@ func (m *postgresMacroEngine) Interpolate(query *tsdb.Query, timeRange *tsdb.Tim return sql, nil } -func replaceAllStringSubmatchFunc(re *regexp.Regexp, str string, repl func([]string) string) string { - result := "" - lastIndex := 0 - - for _, v := range re.FindAllSubmatchIndex([]byte(str), -1) { - groups := []string{} - for i := 0; i < len(v); i += 2 { - groups = append(groups, str[v[i]:v[i+1]]) - } - - result += str[lastIndex:v[0]] + repl(groups) - lastIndex = v[1] - } - - return result + str[lastIndex:] -} - func (m *postgresMacroEngine) evaluateMacro(name string, args []string) (string, error) { switch name { case "__time": @@ -101,10 +87,6 @@ func (m *postgresMacroEngine) evaluateMacro(name string, args []string) (string, } return fmt.Sprintf("%s BETWEEN '%s' AND '%s'", args[0], m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339), m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil - case "__timeFrom": - return fmt.Sprintf("'%s'", m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339)), nil - case "__timeTo": - return fmt.Sprintf("'%s'", m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil case "__timeGroup": if len(args) < 2 { return "", fmt.Errorf("macro %v needs time column and interval and optional fill value", name) @@ -136,10 +118,6 @@ func (m *postgresMacroEngine) evaluateMacro(name string, args []string) (string, return "", fmt.Errorf("missing time column argument for macro %v", name) } return fmt.Sprintf("%s >= %d AND %s <= %d", args[0], m.timeRange.GetFromAsSecondsEpoch(), args[0], m.timeRange.GetToAsSecondsEpoch()), nil - case "__unixEpochFrom": - return fmt.Sprintf("%d", m.timeRange.GetFromAsSecondsEpoch()), nil - case "__unixEpochTo": - return fmt.Sprintf("%d", m.timeRange.GetToAsSecondsEpoch()), nil case "__unixEpochGroup": if len(args) < 2 { return "", fmt.Errorf("macro %v needs time column and interval and optional fill value", name) diff --git a/pkg/tsdb/postgres/macros_test.go b/pkg/tsdb/postgres/macros_test.go index b0b7a28ddd4..8a3699f82b2 100644 --- a/pkg/tsdb/postgres/macros_test.go +++ b/pkg/tsdb/postgres/macros_test.go @@ -44,13 +44,6 @@ func TestMacroEngine(t *testing.T) { So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339))) }) - Convey("interpolate __timeFrom function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339))) - }) - Convey("interpolate __timeGroup function pre 5.3 compatibility", func() { sql, err := engine.Interpolate(query, timeRange, "SELECT $__timeGroup(time_column,'5m'), value") @@ -102,13 +95,6 @@ func TestMacroEngine(t *testing.T) { So(sql, ShouldEqual, "GROUP BY time_bucket('300s',time_column)") }) - Convey("interpolate __timeTo function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339))) - }) - Convey("interpolate __unixEpochFilter function", func() { sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFilter(time)") So(err, ShouldBeNil) @@ -116,20 +102,6 @@ func TestMacroEngine(t *testing.T) { So(sql, ShouldEqual, fmt.Sprintf("select time >= %d AND time <= %d", from.Unix(), to.Unix())) }) - Convey("interpolate __unixEpochFrom function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFrom()") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select %d", from.Unix())) - }) - - Convey("interpolate __unixEpochTo function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochTo()") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select %d", to.Unix())) - }) - Convey("interpolate __unixEpochGroup function", func() { sql, err := engine.Interpolate(query, timeRange, "SELECT $__unixEpochGroup(time_column,'5m')") @@ -155,40 +127,12 @@ func TestMacroEngine(t *testing.T) { So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339))) }) - Convey("interpolate __timeFrom function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339))) - }) - - Convey("interpolate __timeTo function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339))) - }) - Convey("interpolate __unixEpochFilter function", func() { sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFilter(time)") So(err, ShouldBeNil) So(sql, ShouldEqual, fmt.Sprintf("select time >= %d AND time <= %d", from.Unix(), to.Unix())) }) - - Convey("interpolate __unixEpochFrom function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFrom()") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select %d", from.Unix())) - }) - - Convey("interpolate __unixEpochTo function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochTo()") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select %d", to.Unix())) - }) }) Convey("Given a time range between 1960-02-01 07:00 and 1980-02-03 08:00", func() { @@ -203,40 +147,12 @@ func TestMacroEngine(t *testing.T) { So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339))) }) - Convey("interpolate __timeFrom function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339))) - }) - - Convey("interpolate __timeTo function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339))) - }) - Convey("interpolate __unixEpochFilter function", func() { sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFilter(time)") So(err, ShouldBeNil) So(sql, ShouldEqual, fmt.Sprintf("select time >= %d AND time <= %d", from.Unix(), to.Unix())) }) - - Convey("interpolate __unixEpochFrom function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFrom()") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select %d", from.Unix())) - }) - - Convey("interpolate __unixEpochTo function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochTo()") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select %d", to.Unix())) - }) }) }) } diff --git a/pkg/tsdb/postgres/postgres_test.go b/pkg/tsdb/postgres/postgres_test.go index 4e05f676682..c381938aead 100644 --- a/pkg/tsdb/postgres/postgres_test.go +++ b/pkg/tsdb/postgres/postgres_test.go @@ -1,6 +1,7 @@ package postgres import ( + "context" "fmt" "math/rand" "strings" @@ -43,6 +44,11 @@ func TestPostgres(t *testing.T) { return x, nil } + origInterpolate := tsdb.Interpolate + tsdb.Interpolate = func(query *tsdb.Query, timeRange *tsdb.TimeRange, sql string) (string, error) { + return sql, nil + } + endpoint, err := newPostgresQueryEndpoint(&models.DataSource{ JsonData: simplejson.New(), SecureJsonData: securejsondata.SecureJsonData{}, @@ -55,6 +61,7 @@ func TestPostgres(t *testing.T) { Reset(func() { sess.Close() tsdb.NewXormEngine = origXormEngine + tsdb.Interpolate = origInterpolate }) Convey("Given a table with different native data types", func() { @@ -111,7 +118,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -191,7 +198,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -222,6 +229,40 @@ func TestPostgres(t *testing.T) { } }) + Convey("When doing a metric query using timeGroup and $__interval", func() { + mockInterpolate := tsdb.Interpolate + tsdb.Interpolate = origInterpolate + + Reset(func() { + tsdb.Interpolate = mockInterpolate + }) + + Convey("Should replace $__interval", func() { + query := &tsdb.TsdbQuery{ + Queries: []*tsdb.Query{ + { + DataSource: &models.DataSource{}, + Model: simplejson.NewFromAny(map[string]interface{}{ + "rawSql": "SELECT $__timeGroup(time, $__interval) AS time, avg(value) as value FROM metric GROUP BY 1 ORDER BY 1", + "format": "time_series", + }), + RefId: "A", + }, + }, + TimeRange: &tsdb.TimeRange{ + From: fmt.Sprintf("%v", fromStart.Unix()*1000), + To: fmt.Sprintf("%v", fromStart.Add(30*time.Minute).Unix()*1000), + }, + } + + resp, err := endpoint.Query(context.Background(), nil, query) + So(err, ShouldBeNil) + queryResult := resp.Results["A"] + So(queryResult.Error, ShouldBeNil) + So(queryResult.Meta.Get("sql").MustString(), ShouldEqual, "SELECT floor(extract(epoch from time)/60)*60 AS time, avg(value) as value FROM metric GROUP BY 1 ORDER BY 1") + }) + }) + Convey("When doing a metric query using timeGroup with NULL fill enabled", func() { query := &tsdb.TsdbQuery{ Queries: []*tsdb.Query{ @@ -239,7 +280,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -293,7 +334,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -320,7 +361,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -410,7 +451,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -432,7 +473,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -454,7 +495,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -476,7 +517,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -498,7 +539,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -520,7 +561,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -542,7 +583,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -564,7 +605,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -586,7 +627,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -609,7 +650,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -634,7 +675,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -643,6 +684,30 @@ func TestPostgres(t *testing.T) { So(queryResult.Series[0].Name, ShouldEqual, "valueOne") So(queryResult.Series[1].Name, ShouldEqual, "valueTwo") }) + + Convey("When doing a query with timeFrom,timeTo,unixEpochFrom,unixEpochTo macros", func() { + tsdb.Interpolate = origInterpolate + query := &tsdb.TsdbQuery{ + TimeRange: tsdb.NewFakeTimeRange("5m", "now", fromStart), + Queries: []*tsdb.Query{ + { + DataSource: &models.DataSource{JsonData: simplejson.New()}, + Model: simplejson.NewFromAny(map[string]interface{}{ + "rawSql": `SELECT time FROM metric_values WHERE time > $__timeFrom() OR time < $__timeFrom() OR 1 < $__unixEpochFrom() OR $__unixEpochTo() > 1 ORDER BY 1`, + "format": "time_series", + }), + RefId: "A", + }, + }, + } + + resp, err := endpoint.Query(context.Background(), nil, query) + So(err, ShouldBeNil) + queryResult := resp.Results["A"] + So(queryResult.Error, ShouldBeNil) + So(queryResult.Meta.Get("sql").MustString(), ShouldEqual, "SELECT time FROM metric_values WHERE time > '2018-03-15T12:55:00Z' OR time < '2018-03-15T12:55:00Z' OR 1 < 1521118500 OR 1521118800 > 1 ORDER BY 1") + + }) }) Convey("Given a table with event data", func() { @@ -695,7 +760,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) queryResult := resp.Results["Deploys"] So(err, ShouldBeNil) So(len(queryResult.Tables[0].Rows), ShouldEqual, 3) @@ -718,7 +783,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) queryResult := resp.Results["Tickets"] So(err, ShouldBeNil) So(len(queryResult.Tables[0].Rows), ShouldEqual, 3) @@ -744,7 +809,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -774,7 +839,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -804,7 +869,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -834,7 +899,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -862,7 +927,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -890,7 +955,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) diff --git a/pkg/tsdb/prometheus/prometheus.go b/pkg/tsdb/prometheus/prometheus.go index bf9fe9f152c..83bb683fccf 100644 --- a/pkg/tsdb/prometheus/prometheus.go +++ b/pkg/tsdb/prometheus/prometheus.go @@ -92,12 +92,12 @@ func (e *PrometheusExecutor) Query(ctx context.Context, dsInfo *models.DataSourc return nil, err } - querys, err := parseQuery(dsInfo, tsdbQuery.Queries, tsdbQuery) + queries, err := parseQuery(dsInfo, tsdbQuery.Queries, tsdbQuery) if err != nil { return nil, err } - for _, query := range querys { + for _, query := range queries { timeRange := apiv1.Range{ Start: query.Start, End: query.End, diff --git a/pkg/tsdb/sql_engine.go b/pkg/tsdb/sql_engine.go index 454853c7cc8..1a4e2bd3943 100644 --- a/pkg/tsdb/sql_engine.go +++ b/pkg/tsdb/sql_engine.go @@ -6,6 +6,7 @@ import ( "database/sql" "fmt" "math" + "regexp" "strconv" "strings" "sync" @@ -43,6 +44,8 @@ var engineCache = engineCacheType{ versions: make(map[int64]int), } +var sqlIntervalCalculator = NewIntervalCalculator(nil) + var NewXormEngine = func(driverName string, connectionString string) (*xorm.Engine, error) { return xorm.NewEngine(driverName, connectionString) } @@ -95,8 +98,12 @@ var NewSqlQueryEndpoint = func(config *SqlQueryEndpointConfiguration, rowTransfo return nil, err } - engine.SetMaxOpenConns(10) - engine.SetMaxIdleConns(10) + maxOpenConns := config.Datasource.JsonData.Get("maxOpenConns").MustInt(0) + engine.SetMaxOpenConns(maxOpenConns) + maxIdleConns := config.Datasource.JsonData.Get("maxIdleConns").MustInt(2) + engine.SetMaxIdleConns(maxIdleConns) + connMaxLifetime := config.Datasource.JsonData.Get("connMaxLifetime").MustInt(14400) + engine.SetConnMaxLifetime(time.Duration(connMaxLifetime) * time.Second) engineCache.versions[config.Datasource.Id] = config.Datasource.Version engineCache.cache[config.Datasource.Id] = engine @@ -113,9 +120,7 @@ func (e *sqlQueryEndpoint) Query(ctx context.Context, dsInfo *models.DataSource, Results: make(map[string]*QueryResult), } - session := e.engine.NewSession() - defer session.Close() - db := session.DB() + var wg sync.WaitGroup for _, query := range tsdbQuery.Queries { rawSQL := query.Model.Get("rawSql").MustString() @@ -126,43 +131,79 @@ func (e *sqlQueryEndpoint) Query(ctx context.Context, dsInfo *models.DataSource, queryResult := &QueryResult{Meta: simplejson.New(), RefId: query.RefId} result.Results[query.RefId] = queryResult - rawSQL, err := e.macroEngine.Interpolate(query, tsdbQuery.TimeRange, rawSQL) + // global substitutions + rawSQL, err := Interpolate(query, tsdbQuery.TimeRange, rawSQL) if err != nil { queryResult.Error = err continue } - queryResult.Meta.Set("sql", rawSQL) - - rows, err := db.Query(rawSQL) + // datasource specific substitutions + rawSQL, err = e.macroEngine.Interpolate(query, tsdbQuery.TimeRange, rawSQL) if err != nil { queryResult.Error = err continue } - defer rows.Close() + queryResult.Meta.Set("sql", rawSQL) + + wg.Add(1) - format := query.Model.Get("format").MustString("time_series") + go func(rawSQL string, query *Query, queryResult *QueryResult) { + defer wg.Done() + session := e.engine.NewSession() + defer session.Close() + db := session.DB() - switch format { - case "time_series": - err := e.transformToTimeSeries(query, rows, queryResult, tsdbQuery) + rows, err := db.Query(rawSQL) if err != nil { queryResult.Error = err - continue + return } - case "table": - err := e.transformToTable(query, rows, queryResult, tsdbQuery) - if err != nil { - queryResult.Error = err - continue + + defer rows.Close() + + format := query.Model.Get("format").MustString("time_series") + + switch format { + case "time_series": + err := e.transformToTimeSeries(query, rows, queryResult, tsdbQuery) + if err != nil { + queryResult.Error = err + return + } + case "table": + err := e.transformToTable(query, rows, queryResult, tsdbQuery) + if err != nil { + queryResult.Error = err + return + } } - } + }(rawSQL, query, queryResult) } + wg.Wait() return result, nil } +// global macros/substitutions for all sql datasources +var Interpolate = func(query *Query, timeRange *TimeRange, sql string) (string, error) { + minInterval, err := GetIntervalFrom(query.DataSource, query.Model, time.Second*60) + if err != nil { + return sql, nil + } + interval := sqlIntervalCalculator.Calculate(timeRange, minInterval) + + sql = strings.Replace(sql, "$__interval_ms", strconv.FormatInt(interval.Milliseconds(), 10), -1) + sql = strings.Replace(sql, "$__interval", interval.Text, -1) + sql = strings.Replace(sql, "$__timeFrom()", fmt.Sprintf("'%s'", timeRange.GetFromAsTimeUTC().Format(time.RFC3339)), -1) + sql = strings.Replace(sql, "$__timeTo()", fmt.Sprintf("'%s'", timeRange.GetToAsTimeUTC().Format(time.RFC3339)), -1) + sql = strings.Replace(sql, "$__unixEpochFrom()", fmt.Sprintf("%d", timeRange.GetFromAsSecondsEpoch()), -1) + sql = strings.Replace(sql, "$__unixEpochTo()", fmt.Sprintf("%d", timeRange.GetToAsSecondsEpoch()), -1) + + return sql, nil +} + func (e *sqlQueryEndpoint) transformToTable(query *Query, rows *core.Rows, result *QueryResult, tsdbQuery *TsdbQuery) error { columnNames, err := rows.Columns() columnCount := len(columnNames) @@ -589,3 +630,26 @@ func SetupFillmode(query *Query, interval time.Duration, fillmode string) error return nil } + +type SqlMacroEngineBase struct{} + +func NewSqlMacroEngineBase() *SqlMacroEngineBase { + return &SqlMacroEngineBase{} +} + +func (m *SqlMacroEngineBase) ReplaceAllStringSubmatchFunc(re *regexp.Regexp, str string, repl func([]string) string) string { + result := "" + lastIndex := 0 + + for _, v := range re.FindAllSubmatchIndex([]byte(str), -1) { + groups := []string{} + for i := 0; i < len(v); i += 2 { + groups = append(groups, str[v[i]:v[i+1]]) + } + + result += str[lastIndex:v[0]] + repl(groups) + lastIndex = v[1] + } + + return result + str[lastIndex:] +} diff --git a/pkg/tsdb/sql_engine_test.go b/pkg/tsdb/sql_engine_test.go index 854734fac31..bfcc82aac47 100644 --- a/pkg/tsdb/sql_engine_test.go +++ b/pkg/tsdb/sql_engine_test.go @@ -1,10 +1,13 @@ package tsdb import ( + "fmt" "testing" "time" "github.com/grafana/grafana/pkg/components/null" + "github.com/grafana/grafana/pkg/components/simplejson" + "github.com/grafana/grafana/pkg/models" . "github.com/smartystreets/goconvey/convey" ) @@ -14,6 +17,63 @@ func TestSqlEngine(t *testing.T) { dt := time.Date(2018, 3, 14, 21, 20, 6, int(527345*time.Microsecond), time.UTC) earlyDt := time.Date(1970, 3, 14, 21, 20, 6, int(527345*time.Microsecond), time.UTC) + Convey("Given a time range between 2018-04-12 00:00 and 2018-04-12 00:05", func() { + from := time.Date(2018, 4, 12, 18, 0, 0, 0, time.UTC) + to := from.Add(5 * time.Minute) + timeRange := NewFakeTimeRange("5m", "now", to) + query := &Query{DataSource: &models.DataSource{}, Model: simplejson.New()} + + Convey("interpolate $__interval", func() { + sql, err := Interpolate(query, timeRange, "select $__interval ") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, "select 1m ") + }) + + Convey("interpolate $__interval in $__timeGroup", func() { + sql, err := Interpolate(query, timeRange, "select $__timeGroupAlias(time,$__interval)") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, "select $__timeGroupAlias(time,1m)") + }) + + Convey("interpolate $__interval_ms", func() { + sql, err := Interpolate(query, timeRange, "select $__interval_ms ") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, "select 60000 ") + }) + + Convey("interpolate __timeFrom function", func() { + sql, err := Interpolate(query, timeRange, "select $__timeFrom()") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339))) + }) + + Convey("interpolate __timeTo function", func() { + sql, err := Interpolate(query, timeRange, "select $__timeTo()") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339))) + }) + + Convey("interpolate __unixEpochFrom function", func() { + sql, err := Interpolate(query, timeRange, "select $__unixEpochFrom()") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, fmt.Sprintf("select %d", from.Unix())) + }) + + Convey("interpolate __unixEpochTo function", func() { + sql, err := Interpolate(query, timeRange, "select $__unixEpochTo()") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, fmt.Sprintf("select %d", to.Unix())) + }) + + }) + Convey("Given row values with time.Time as time columns", func() { var nilPointer *time.Time diff --git a/pkg/tsdb/stackdriver/annotation_query.go b/pkg/tsdb/stackdriver/annotation_query.go new file mode 100644 index 00000000000..db35171ad70 --- /dev/null +++ b/pkg/tsdb/stackdriver/annotation_query.go @@ -0,0 +1,120 @@ +package stackdriver + +import ( + "context" + "strconv" + "strings" + "time" + + "github.com/grafana/grafana/pkg/tsdb" +) + +func (e *StackdriverExecutor) executeAnnotationQuery(ctx context.Context, tsdbQuery *tsdb.TsdbQuery) (*tsdb.Response, error) { + result := &tsdb.Response{ + Results: make(map[string]*tsdb.QueryResult), + } + + firstQuery := tsdbQuery.Queries[0] + + queries, err := e.buildQueries(tsdbQuery) + if err != nil { + return nil, err + } + + queryRes, resp, err := e.executeQuery(ctx, queries[0], tsdbQuery) + if err != nil { + return nil, err + } + title := firstQuery.Model.Get("title").MustString() + text := firstQuery.Model.Get("text").MustString() + tags := firstQuery.Model.Get("tags").MustString() + err = e.parseToAnnotations(queryRes, resp, queries[0], title, text, tags) + result.Results[firstQuery.RefId] = queryRes + + return result, err +} + +func (e *StackdriverExecutor) parseToAnnotations(queryRes *tsdb.QueryResult, data StackdriverResponse, query *StackdriverQuery, title string, text string, tags string) error { + annotations := make([]map[string]string, 0) + + for _, series := range data.TimeSeries { + // reverse the order to be ascending + for i := len(series.Points) - 1; i >= 0; i-- { + point := series.Points[i] + value := strconv.FormatFloat(point.Value.DoubleValue, 'f', 6, 64) + if series.ValueType == "STRING" { + value = point.Value.StringValue + } + annotation := make(map[string]string) + annotation["time"] = point.Interval.EndTime.UTC().Format(time.RFC3339) + annotation["title"] = formatAnnotationText(title, value, series.Metric.Type, series.Metric.Labels, series.Resource.Labels) + annotation["tags"] = tags + annotation["text"] = formatAnnotationText(text, value, series.Metric.Type, series.Metric.Labels, series.Resource.Labels) + annotations = append(annotations, annotation) + } + } + + transformAnnotationToTable(annotations, queryRes) + return nil +} + +func transformAnnotationToTable(data []map[string]string, result *tsdb.QueryResult) { + table := &tsdb.Table{ + Columns: make([]tsdb.TableColumn, 4), + Rows: make([]tsdb.RowValues, 0), + } + table.Columns[0].Text = "time" + table.Columns[1].Text = "title" + table.Columns[2].Text = "tags" + table.Columns[3].Text = "text" + + for _, r := range data { + values := make([]interface{}, 4) + values[0] = r["time"] + values[1] = r["title"] + values[2] = r["tags"] + values[3] = r["text"] + table.Rows = append(table.Rows, values) + } + result.Tables = append(result.Tables, table) + result.Meta.Set("rowCount", len(data)) + slog.Info("anno", "len", len(data)) +} + +func formatAnnotationText(annotationText string, pointValue string, metricType string, metricLabels map[string]string, resourceLabels map[string]string) string { + result := legendKeyFormat.ReplaceAllFunc([]byte(annotationText), func(in []byte) []byte { + metaPartName := strings.Replace(string(in), "{{", "", 1) + metaPartName = strings.Replace(metaPartName, "}}", "", 1) + metaPartName = strings.TrimSpace(metaPartName) + + if metaPartName == "metric.type" { + return []byte(metricType) + } + + metricPart := replaceWithMetricPart(metaPartName, metricType) + + if metricPart != nil { + return metricPart + } + + if metaPartName == "metric.value" { + return []byte(pointValue) + } + + metaPartName = strings.Replace(metaPartName, "metric.label.", "", 1) + + if val, exists := metricLabels[metaPartName]; exists { + return []byte(val) + } + + metaPartName = strings.Replace(metaPartName, "resource.label.", "", 1) + + if val, exists := resourceLabels[metaPartName]; exists { + return []byte(val) + } + + return in + }) + + return string(result) +} diff --git a/pkg/tsdb/stackdriver/annotation_query_test.go b/pkg/tsdb/stackdriver/annotation_query_test.go new file mode 100644 index 00000000000..8229470d665 --- /dev/null +++ b/pkg/tsdb/stackdriver/annotation_query_test.go @@ -0,0 +1,33 @@ +package stackdriver + +import ( + "testing" + + "github.com/grafana/grafana/pkg/components/simplejson" + "github.com/grafana/grafana/pkg/tsdb" + + . "github.com/smartystreets/goconvey/convey" +) + +func TestStackdriverAnnotationQuery(t *testing.T) { + Convey("Stackdriver Annotation Query Executor", t, func() { + executor := &StackdriverExecutor{} + Convey("When parsing the stackdriver api response", func() { + data, err := loadTestFile("./test-data/2-series-response-no-agg.json") + So(err, ShouldBeNil) + So(len(data.TimeSeries), ShouldEqual, 3) + + res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "annotationQuery"} + query := &StackdriverQuery{} + err = executor.parseToAnnotations(res, data, query, "atitle {{metric.label.instance_name}} {{metric.value}}", "atext {{resource.label.zone}}", "atag") + So(err, ShouldBeNil) + + Convey("Should return annotations table", func() { + So(len(res.Tables), ShouldEqual, 1) + So(len(res.Tables[0].Rows), ShouldEqual, 9) + So(res.Tables[0].Rows[0][1], ShouldEqual, "atitle collector-asia-east-1 9.856650") + So(res.Tables[0].Rows[0][3], ShouldEqual, "atext asia-east1-a") + }) + }) + }) +} diff --git a/pkg/tsdb/stackdriver/stackdriver.go b/pkg/tsdb/stackdriver/stackdriver.go new file mode 100644 index 00000000000..96242dfdec4 --- /dev/null +++ b/pkg/tsdb/stackdriver/stackdriver.go @@ -0,0 +1,567 @@ +package stackdriver + +import ( + "context" + "encoding/json" + "errors" + "fmt" + "io/ioutil" + "math" + "net/http" + "net/url" + "path" + "regexp" + "strconv" + "strings" + "time" + + "golang.org/x/net/context/ctxhttp" + + "github.com/grafana/grafana/pkg/api/pluginproxy" + "github.com/grafana/grafana/pkg/components/null" + "github.com/grafana/grafana/pkg/components/simplejson" + "github.com/grafana/grafana/pkg/log" + "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/plugins" + "github.com/grafana/grafana/pkg/setting" + "github.com/grafana/grafana/pkg/tsdb" + "github.com/opentracing/opentracing-go" +) + +var ( + slog log.Logger + legendKeyFormat *regexp.Regexp + metricNameFormat *regexp.Regexp +) + +// StackdriverExecutor executes queries for the Stackdriver datasource +type StackdriverExecutor struct { + httpClient *http.Client + dsInfo *models.DataSource +} + +// NewStackdriverExecutor initializes a http client +func NewStackdriverExecutor(dsInfo *models.DataSource) (tsdb.TsdbQueryEndpoint, error) { + httpClient, err := dsInfo.GetHttpClient() + if err != nil { + return nil, err + } + + return &StackdriverExecutor{ + httpClient: httpClient, + dsInfo: dsInfo, + }, nil +} + +func init() { + slog = log.New("tsdb.stackdriver") + tsdb.RegisterTsdbQueryEndpoint("stackdriver", NewStackdriverExecutor) + legendKeyFormat = regexp.MustCompile(`\{\{\s*(.+?)\s*\}\}`) + metricNameFormat = regexp.MustCompile(`([\w\d_]+)\.googleapis\.com/(.+)`) +} + +// Query takes in the frontend queries, parses them into the Stackdriver query format +// executes the queries against the Stackdriver API and parses the response into +// the time series or table format +func (e *StackdriverExecutor) Query(ctx context.Context, dsInfo *models.DataSource, tsdbQuery *tsdb.TsdbQuery) (*tsdb.Response, error) { + var result *tsdb.Response + var err error + queryType := tsdbQuery.Queries[0].Model.Get("type").MustString("") + + switch queryType { + case "annotationQuery": + result, err = e.executeAnnotationQuery(ctx, tsdbQuery) + case "timeSeriesQuery": + fallthrough + default: + result, err = e.executeTimeSeriesQuery(ctx, tsdbQuery) + } + + return result, err +} + +func (e *StackdriverExecutor) executeTimeSeriesQuery(ctx context.Context, tsdbQuery *tsdb.TsdbQuery) (*tsdb.Response, error) { + result := &tsdb.Response{ + Results: make(map[string]*tsdb.QueryResult), + } + + queries, err := e.buildQueries(tsdbQuery) + if err != nil { + return nil, err + } + + for _, query := range queries { + queryRes, resp, err := e.executeQuery(ctx, query, tsdbQuery) + if err != nil { + return nil, err + } + err = e.parseResponse(queryRes, resp, query) + if err != nil { + queryRes.Error = err + } + result.Results[query.RefID] = queryRes + } + + return result, nil +} + +func (e *StackdriverExecutor) buildQueries(tsdbQuery *tsdb.TsdbQuery) ([]*StackdriverQuery, error) { + stackdriverQueries := []*StackdriverQuery{} + + startTime, err := tsdbQuery.TimeRange.ParseFrom() + if err != nil { + return nil, err + } + + endTime, err := tsdbQuery.TimeRange.ParseTo() + if err != nil { + return nil, err + } + + durationSeconds := int(endTime.Sub(startTime).Seconds()) + + for _, query := range tsdbQuery.Queries { + var target string + + metricType := query.Model.Get("metricType").MustString() + filterParts := query.Model.Get("filters").MustArray() + + params := url.Values{} + params.Add("interval.startTime", startTime.UTC().Format(time.RFC3339)) + params.Add("interval.endTime", endTime.UTC().Format(time.RFC3339)) + params.Add("filter", buildFilterString(metricType, filterParts)) + params.Add("view", query.Model.Get("view").MustString("FULL")) + setAggParams(¶ms, query, durationSeconds) + + target = params.Encode() + + if setting.Env == setting.DEV { + slog.Debug("Stackdriver request", "params", params) + } + + groupBys := query.Model.Get("groupBys").MustArray() + groupBysAsStrings := make([]string, 0) + for _, groupBy := range groupBys { + groupBysAsStrings = append(groupBysAsStrings, groupBy.(string)) + } + + aliasBy := query.Model.Get("aliasBy").MustString() + + stackdriverQueries = append(stackdriverQueries, &StackdriverQuery{ + Target: target, + Params: params, + RefID: query.RefId, + GroupBys: groupBysAsStrings, + AliasBy: aliasBy, + }) + } + + return stackdriverQueries, nil +} + +func reverse(s string) string { + chars := []rune(s) + for i, j := 0, len(chars)-1; i < j; i, j = i+1, j-1 { + chars[i], chars[j] = chars[j], chars[i] + } + return string(chars) +} + +func interpolateFilterWildcards(value string) string { + re := regexp.MustCompile("[*]") + matches := len(re.FindAllStringIndex(value, -1)) + if matches == 2 && strings.HasSuffix(value, "*") && strings.HasPrefix(value, "*") { + value = strings.Replace(value, "*", "", -1) + value = fmt.Sprintf(`has_substring("%s")`, value) + } else if matches == 1 && strings.HasPrefix(value, "*") { + value = strings.Replace(value, "*", "", 1) + value = fmt.Sprintf(`ends_with("%s")`, value) + } else if matches == 1 && strings.HasSuffix(value, "*") { + value = reverse(strings.Replace(reverse(value), "*", "", 1)) + value = fmt.Sprintf(`starts_with("%s")`, value) + } else if matches != 0 { + re := regexp.MustCompile(`[-\/^$+?.()|[\]{}]`) + value = string(re.ReplaceAllFunc([]byte(value), func(in []byte) []byte { + return []byte(strings.Replace(string(in), string(in), `\\`+string(in), 1)) + })) + value = strings.Replace(value, "*", ".*", -1) + value = strings.Replace(value, `"`, `\\"`, -1) + value = fmt.Sprintf(`monitoring.regex.full_match("^%s$")`, value) + } + + return value +} + +func buildFilterString(metricType string, filterParts []interface{}) string { + filterString := "" + for i, part := range filterParts { + mod := i % 4 + if part == "AND" { + filterString += " " + } else if mod == 2 { + operator := filterParts[i-1] + if operator == "=~" || operator == "!=~" { + filterString = reverse(strings.Replace(reverse(filterString), "~", "", 1)) + filterString += fmt.Sprintf(`monitoring.regex.full_match("%s")`, part) + } else if strings.Contains(part.(string), "*") { + filterString += interpolateFilterWildcards(part.(string)) + } else { + filterString += fmt.Sprintf(`"%s"`, part) + } + } else { + filterString += part.(string) + } + } + return strings.Trim(fmt.Sprintf(`metric.type="%s" %s`, metricType, filterString), " ") +} + +func setAggParams(params *url.Values, query *tsdb.Query, durationSeconds int) { + primaryAggregation := query.Model.Get("primaryAggregation").MustString() + perSeriesAligner := query.Model.Get("perSeriesAligner").MustString() + alignmentPeriod := query.Model.Get("alignmentPeriod").MustString() + + if primaryAggregation == "" { + primaryAggregation = "REDUCE_NONE" + } + + if perSeriesAligner == "" { + perSeriesAligner = "ALIGN_MEAN" + } + + if alignmentPeriod == "grafana-auto" || alignmentPeriod == "" { + alignmentPeriodValue := int(math.Max(float64(query.IntervalMs)/1000, 60.0)) + alignmentPeriod = "+" + strconv.Itoa(alignmentPeriodValue) + "s" + } + + if alignmentPeriod == "stackdriver-auto" { + alignmentPeriodValue := int(math.Max(float64(durationSeconds), 60.0)) + if alignmentPeriodValue < 60*60*23 { + alignmentPeriod = "+60s" + } else if alignmentPeriodValue < 60*60*24*6 { + alignmentPeriod = "+300s" + } else { + alignmentPeriod = "+3600s" + } + } + + re := regexp.MustCompile("[0-9]+") + seconds, err := strconv.ParseInt(re.FindString(alignmentPeriod), 10, 64) + if err != nil || seconds > 3600 { + alignmentPeriod = "+3600s" + } + + params.Add("aggregation.crossSeriesReducer", primaryAggregation) + params.Add("aggregation.perSeriesAligner", perSeriesAligner) + params.Add("aggregation.alignmentPeriod", alignmentPeriod) + + groupBys := query.Model.Get("groupBys").MustArray() + if len(groupBys) > 0 { + for i := 0; i < len(groupBys); i++ { + params.Add("aggregation.groupByFields", groupBys[i].(string)) + } + } +} + +func (e *StackdriverExecutor) executeQuery(ctx context.Context, query *StackdriverQuery, tsdbQuery *tsdb.TsdbQuery) (*tsdb.QueryResult, StackdriverResponse, error) { + queryResult := &tsdb.QueryResult{Meta: simplejson.New(), RefId: query.RefID} + + req, err := e.createRequest(ctx, e.dsInfo) + if err != nil { + queryResult.Error = err + return queryResult, StackdriverResponse{}, nil + } + + req.URL.RawQuery = query.Params.Encode() + queryResult.Meta.Set("rawQuery", req.URL.RawQuery) + alignmentPeriod, ok := req.URL.Query()["aggregation.alignmentPeriod"] + + if ok { + re := regexp.MustCompile("[0-9]+") + seconds, err := strconv.ParseInt(re.FindString(alignmentPeriod[0]), 10, 64) + if err == nil { + queryResult.Meta.Set("alignmentPeriod", seconds) + } + } + + span, ctx := opentracing.StartSpanFromContext(ctx, "stackdriver query") + span.SetTag("target", query.Target) + span.SetTag("from", tsdbQuery.TimeRange.From) + span.SetTag("until", tsdbQuery.TimeRange.To) + span.SetTag("datasource_id", e.dsInfo.Id) + span.SetTag("org_id", e.dsInfo.OrgId) + + defer span.Finish() + + opentracing.GlobalTracer().Inject( + span.Context(), + opentracing.HTTPHeaders, + opentracing.HTTPHeadersCarrier(req.Header)) + + res, err := ctxhttp.Do(ctx, e.httpClient, req) + if err != nil { + queryResult.Error = err + return queryResult, StackdriverResponse{}, nil + } + + data, err := e.unmarshalResponse(res) + if err != nil { + queryResult.Error = err + return queryResult, StackdriverResponse{}, nil + } + + return queryResult, data, nil +} + +func (e *StackdriverExecutor) unmarshalResponse(res *http.Response) (StackdriverResponse, error) { + body, err := ioutil.ReadAll(res.Body) + defer res.Body.Close() + if err != nil { + return StackdriverResponse{}, err + } + + if res.StatusCode/100 != 2 { + slog.Error("Request failed", "status", res.Status, "body", string(body)) + return StackdriverResponse{}, fmt.Errorf(string(body)) + } + + var data StackdriverResponse + err = json.Unmarshal(body, &data) + if err != nil { + slog.Error("Failed to unmarshal Stackdriver response", "error", err, "status", res.Status, "body", string(body)) + return StackdriverResponse{}, err + } + + return data, nil +} + +func (e *StackdriverExecutor) parseResponse(queryRes *tsdb.QueryResult, data StackdriverResponse, query *StackdriverQuery) error { + metricLabels := make(map[string][]string) + resourceLabels := make(map[string][]string) + + for _, series := range data.TimeSeries { + points := make([]tsdb.TimePoint, 0) + + defaultMetricName := series.Metric.Type + + for key, value := range series.Metric.Labels { + if !containsLabel(metricLabels[key], value) { + metricLabels[key] = append(metricLabels[key], value) + } + if len(query.GroupBys) == 0 || containsLabel(query.GroupBys, "metric.label."+key) { + defaultMetricName += " " + value + } + } + + for key, value := range series.Resource.Labels { + if !containsLabel(resourceLabels[key], value) { + resourceLabels[key] = append(resourceLabels[key], value) + } + if containsLabel(query.GroupBys, "resource.label."+key) { + defaultMetricName += " " + value + } + } + + // reverse the order to be ascending + if series.ValueType != "DISTRIBUTION" { + for i := len(series.Points) - 1; i >= 0; i-- { + point := series.Points[i] + value := point.Value.DoubleValue + + if series.ValueType == "INT64" { + parsedValue, err := strconv.ParseFloat(point.Value.IntValue, 64) + if err == nil { + value = parsedValue + } + } + + if series.ValueType == "BOOL" { + if point.Value.BoolValue { + value = 1 + } else { + value = 0 + } + } + + points = append(points, tsdb.NewTimePoint(null.FloatFrom(value), float64((point.Interval.EndTime).Unix())*1000)) + } + + metricName := formatLegendKeys(series.Metric.Type, defaultMetricName, series.Metric.Labels, series.Resource.Labels, make(map[string]string), query) + + queryRes.Series = append(queryRes.Series, &tsdb.TimeSeries{ + Name: metricName, + Points: points, + }) + } else { + buckets := make(map[int]*tsdb.TimeSeries) + + for i := len(series.Points) - 1; i >= 0; i-- { + point := series.Points[i] + if len(point.Value.DistributionValue.BucketCounts) == 0 { + continue + } + maxKey := 0 + for i := 0; i < len(point.Value.DistributionValue.BucketCounts); i++ { + value, err := strconv.ParseFloat(point.Value.DistributionValue.BucketCounts[i], 64) + if err != nil { + continue + } + if _, ok := buckets[i]; !ok { + // set lower bounds + // https://cloud.google.com/monitoring/api/ref_v3/rest/v3/TimeSeries#Distribution + bucketBound := calcBucketBound(point.Value.DistributionValue.BucketOptions, i) + additionalLabels := map[string]string{"bucket": bucketBound} + buckets[i] = &tsdb.TimeSeries{ + Name: formatLegendKeys(series.Metric.Type, defaultMetricName, series.Metric.Labels, series.Resource.Labels, additionalLabels, query), + Points: make([]tsdb.TimePoint, 0), + } + if maxKey < i { + maxKey = i + } + } + buckets[i].Points = append(buckets[i].Points, tsdb.NewTimePoint(null.FloatFrom(value), float64((point.Interval.EndTime).Unix())*1000)) + } + + // fill empty bucket + for i := 0; i < maxKey; i++ { + if _, ok := buckets[i]; !ok { + bucketBound := calcBucketBound(point.Value.DistributionValue.BucketOptions, i) + additionalLabels := map[string]string{"bucket": bucketBound} + buckets[i] = &tsdb.TimeSeries{ + Name: formatLegendKeys(series.Metric.Type, defaultMetricName, series.Metric.Labels, series.Resource.Labels, additionalLabels, query), + Points: make([]tsdb.TimePoint, 0), + } + } + } + } + for i := 0; i < len(buckets); i++ { + queryRes.Series = append(queryRes.Series, buckets[i]) + } + } + } + + queryRes.Meta.Set("resourceLabels", resourceLabels) + queryRes.Meta.Set("metricLabels", metricLabels) + queryRes.Meta.Set("groupBys", query.GroupBys) + + return nil +} + +func containsLabel(labels []string, newLabel string) bool { + for _, val := range labels { + if val == newLabel { + return true + } + } + return false +} + +func formatLegendKeys(metricType string, defaultMetricName string, metricLabels map[string]string, resourceLabels map[string]string, additionalLabels map[string]string, query *StackdriverQuery) string { + if query.AliasBy == "" { + return defaultMetricName + } + + result := legendKeyFormat.ReplaceAllFunc([]byte(query.AliasBy), func(in []byte) []byte { + metaPartName := strings.Replace(string(in), "{{", "", 1) + metaPartName = strings.Replace(metaPartName, "}}", "", 1) + metaPartName = strings.TrimSpace(metaPartName) + + if metaPartName == "metric.type" { + return []byte(metricType) + } + + metricPart := replaceWithMetricPart(metaPartName, metricType) + + if metricPart != nil { + return metricPart + } + + metaPartName = strings.Replace(metaPartName, "metric.label.", "", 1) + + if val, exists := metricLabels[metaPartName]; exists { + return []byte(val) + } + + metaPartName = strings.Replace(metaPartName, "resource.label.", "", 1) + + if val, exists := resourceLabels[metaPartName]; exists { + return []byte(val) + } + + if val, exists := additionalLabels[metaPartName]; exists { + return []byte(val) + } + + return in + }) + + return string(result) +} + +func replaceWithMetricPart(metaPartName string, metricType string) []byte { + // https://cloud.google.com/monitoring/api/v3/metrics-details#label_names + shortMatches := metricNameFormat.FindStringSubmatch(metricType) + + if metaPartName == "metric.name" { + if len(shortMatches) > 0 { + return []byte(shortMatches[2]) + } + } + + if metaPartName == "metric.service" { + if len(shortMatches) > 0 { + return []byte(shortMatches[1]) + } + } + + return nil +} + +func calcBucketBound(bucketOptions StackdriverBucketOptions, n int) string { + bucketBound := "0" + if n == 0 { + return bucketBound + } + + if bucketOptions.LinearBuckets != nil { + bucketBound = strconv.FormatInt(bucketOptions.LinearBuckets.Offset+(bucketOptions.LinearBuckets.Width*int64(n-1)), 10) + } else if bucketOptions.ExponentialBuckets != nil { + bucketBound = strconv.FormatInt(int64(bucketOptions.ExponentialBuckets.Scale*math.Pow(bucketOptions.ExponentialBuckets.GrowthFactor, float64(n-1))), 10) + } else if bucketOptions.ExplicitBuckets != nil { + bucketBound = strconv.FormatInt(bucketOptions.ExplicitBuckets.Bounds[(n-1)], 10) + } + return bucketBound +} + +func (e *StackdriverExecutor) createRequest(ctx context.Context, dsInfo *models.DataSource) (*http.Request, error) { + u, _ := url.Parse(dsInfo.Url) + u.Path = path.Join(u.Path, "render") + + req, err := http.NewRequest(http.MethodGet, "https://monitoring.googleapis.com/", nil) + if err != nil { + slog.Error("Failed to create request", "error", err) + return nil, fmt.Errorf("Failed to create request. error: %v", err) + } + + req.Header.Set("Content-Type", "application/json") + req.Header.Set("User-Agent", fmt.Sprintf("Grafana/%s", setting.BuildVersion)) + + // find plugin + plugin, ok := plugins.DataSources[dsInfo.Type] + if !ok { + return nil, errors.New("Unable to find datasource plugin Stackdriver") + } + projectName := dsInfo.JsonData.Get("defaultProject").MustString() + proxyPass := fmt.Sprintf("stackdriver%s", "v3/projects/"+projectName+"/timeSeries") + + var stackdriverRoute *plugins.AppPluginRoute + for _, route := range plugin.Routes { + if route.Path == "stackdriver" { + stackdriverRoute = route + break + } + } + + pluginproxy.ApplyRoute(ctx, req, proxyPass, stackdriverRoute, dsInfo) + + return req, nil +} diff --git a/pkg/tsdb/stackdriver/stackdriver_test.go b/pkg/tsdb/stackdriver/stackdriver_test.go new file mode 100644 index 00000000000..784bf4a7fbb --- /dev/null +++ b/pkg/tsdb/stackdriver/stackdriver_test.go @@ -0,0 +1,490 @@ +package stackdriver + +import ( + "encoding/json" + "fmt" + "io/ioutil" + "math" + "strconv" + "testing" + "time" + + "github.com/grafana/grafana/pkg/components/simplejson" + "github.com/grafana/grafana/pkg/tsdb" + + . "github.com/smartystreets/goconvey/convey" +) + +func TestStackdriver(t *testing.T) { + Convey("Stackdriver", t, func() { + executor := &StackdriverExecutor{} + + Convey("Parse queries from frontend and build Stackdriver API queries", func() { + fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC).In(time.Local) + tsdbQuery := &tsdb.TsdbQuery{ + TimeRange: &tsdb.TimeRange{ + From: fmt.Sprintf("%v", fromStart.Unix()*1000), + To: fmt.Sprintf("%v", fromStart.Add(34*time.Minute).Unix()*1000), + }, + Queries: []*tsdb.Query{ + { + Model: simplejson.NewFromAny(map[string]interface{}{ + "metricType": "a/metric/type", + "view": "FULL", + "aliasBy": "testalias", + "type": "timeSeriesQuery", + }), + RefId: "A", + }, + }, + } + + Convey("and query has no aggregation set", func() { + queries, err := executor.buildQueries(tsdbQuery) + So(err, ShouldBeNil) + + So(len(queries), ShouldEqual, 1) + So(queries[0].RefID, ShouldEqual, "A") + So(queries[0].Target, ShouldEqual, "aggregation.alignmentPeriod=%2B60s&aggregation.crossSeriesReducer=REDUCE_NONE&aggregation.perSeriesAligner=ALIGN_MEAN&filter=metric.type%3D%22a%2Fmetric%2Ftype%22&interval.endTime=2018-03-15T13%3A34%3A00Z&interval.startTime=2018-03-15T13%3A00%3A00Z&view=FULL") + So(len(queries[0].Params), ShouldEqual, 7) + So(queries[0].Params["interval.startTime"][0], ShouldEqual, "2018-03-15T13:00:00Z") + So(queries[0].Params["interval.endTime"][0], ShouldEqual, "2018-03-15T13:34:00Z") + So(queries[0].Params["aggregation.perSeriesAligner"][0], ShouldEqual, "ALIGN_MEAN") + So(queries[0].Params["filter"][0], ShouldEqual, "metric.type=\"a/metric/type\"") + So(queries[0].Params["view"][0], ShouldEqual, "FULL") + So(queries[0].AliasBy, ShouldEqual, "testalias") + }) + + Convey("and query has filters", func() { + tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ + "metricType": "a/metric/type", + "filters": []interface{}{"key", "=", "value", "AND", "key2", "=", "value2"}, + }) + + queries, err := executor.buildQueries(tsdbQuery) + So(err, ShouldBeNil) + So(len(queries), ShouldEqual, 1) + So(queries[0].Params["filter"][0], ShouldEqual, `metric.type="a/metric/type" key="value" key2="value2"`) + }) + + Convey("and alignmentPeriod is set to grafana-auto", func() { + Convey("and IntervalMs is larger than 60000", func() { + tsdbQuery.Queries[0].IntervalMs = 1000000 + tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ + "alignmentPeriod": "grafana-auto", + "filters": []interface{}{"key", "=", "value", "AND", "key2", "=", "value2"}, + }) + + queries, err := executor.buildQueries(tsdbQuery) + So(err, ShouldBeNil) + So(queries[0].Params["aggregation.alignmentPeriod"][0], ShouldEqual, `+1000s`) + }) + Convey("and IntervalMs is less than 60000", func() { + tsdbQuery.Queries[0].IntervalMs = 30000 + tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ + "alignmentPeriod": "grafana-auto", + "filters": []interface{}{"key", "=", "value", "AND", "key2", "=", "value2"}, + }) + + queries, err := executor.buildQueries(tsdbQuery) + So(err, ShouldBeNil) + So(queries[0].Params["aggregation.alignmentPeriod"][0], ShouldEqual, `+60s`) + }) + }) + + Convey("and alignmentPeriod is set to stackdriver-auto", func() { + Convey("and range is two hours", func() { + tsdbQuery.TimeRange.From = "1538033322461" + tsdbQuery.TimeRange.To = "1538040522461" + tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ + "target": "target", + "alignmentPeriod": "stackdriver-auto", + }) + + queries, err := executor.buildQueries(tsdbQuery) + So(err, ShouldBeNil) + So(queries[0].Params["aggregation.alignmentPeriod"][0], ShouldEqual, `+60s`) + }) + + Convey("and range is 22 hours", func() { + tsdbQuery.TimeRange.From = "1538034524922" + tsdbQuery.TimeRange.To = "1538113724922" + tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ + "target": "target", + "alignmentPeriod": "stackdriver-auto", + }) + + queries, err := executor.buildQueries(tsdbQuery) + So(err, ShouldBeNil) + So(queries[0].Params["aggregation.alignmentPeriod"][0], ShouldEqual, `+60s`) + }) + + Convey("and range is 23 hours", func() { + tsdbQuery.TimeRange.From = "1538034567985" + tsdbQuery.TimeRange.To = "1538117367985" + tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ + "target": "target", + "alignmentPeriod": "stackdriver-auto", + }) + + queries, err := executor.buildQueries(tsdbQuery) + So(err, ShouldBeNil) + So(queries[0].Params["aggregation.alignmentPeriod"][0], ShouldEqual, `+300s`) + }) + + Convey("and range is 7 days", func() { + tsdbQuery.TimeRange.From = "1538036324073" + tsdbQuery.TimeRange.To = "1538641124073" + tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ + "target": "target", + "alignmentPeriod": "stackdriver-auto", + }) + + queries, err := executor.buildQueries(tsdbQuery) + So(err, ShouldBeNil) + So(queries[0].Params["aggregation.alignmentPeriod"][0], ShouldEqual, `+3600s`) + }) + }) + + Convey("and alignmentPeriod is set in frontend", func() { + Convey("and alignment period is too big", func() { + tsdbQuery.Queries[0].IntervalMs = 1000 + tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ + "alignmentPeriod": "+360000s", + }) + + queries, err := executor.buildQueries(tsdbQuery) + So(err, ShouldBeNil) + So(queries[0].Params["aggregation.alignmentPeriod"][0], ShouldEqual, `+3600s`) + }) + + Convey("and alignment period is within accepted range", func() { + tsdbQuery.Queries[0].IntervalMs = 1000 + tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ + "alignmentPeriod": "+600s", + }) + + queries, err := executor.buildQueries(tsdbQuery) + So(err, ShouldBeNil) + So(queries[0].Params["aggregation.alignmentPeriod"][0], ShouldEqual, `+600s`) + }) + }) + + Convey("and query has aggregation mean set", func() { + tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ + "metricType": "a/metric/type", + "primaryAggregation": "REDUCE_MEAN", + "view": "FULL", + }) + + queries, err := executor.buildQueries(tsdbQuery) + So(err, ShouldBeNil) + + So(len(queries), ShouldEqual, 1) + So(queries[0].RefID, ShouldEqual, "A") + So(queries[0].Target, ShouldEqual, "aggregation.alignmentPeriod=%2B60s&aggregation.crossSeriesReducer=REDUCE_MEAN&aggregation.perSeriesAligner=ALIGN_MEAN&filter=metric.type%3D%22a%2Fmetric%2Ftype%22&interval.endTime=2018-03-15T13%3A34%3A00Z&interval.startTime=2018-03-15T13%3A00%3A00Z&view=FULL") + So(len(queries[0].Params), ShouldEqual, 7) + So(queries[0].Params["interval.startTime"][0], ShouldEqual, "2018-03-15T13:00:00Z") + So(queries[0].Params["interval.endTime"][0], ShouldEqual, "2018-03-15T13:34:00Z") + So(queries[0].Params["aggregation.crossSeriesReducer"][0], ShouldEqual, "REDUCE_MEAN") + So(queries[0].Params["aggregation.perSeriesAligner"][0], ShouldEqual, "ALIGN_MEAN") + So(queries[0].Params["aggregation.alignmentPeriod"][0], ShouldEqual, "+60s") + So(queries[0].Params["filter"][0], ShouldEqual, "metric.type=\"a/metric/type\"") + So(queries[0].Params["view"][0], ShouldEqual, "FULL") + }) + + Convey("and query has group bys", func() { + tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ + "metricType": "a/metric/type", + "primaryAggregation": "REDUCE_NONE", + "groupBys": []interface{}{"metric.label.group1", "metric.label.group2"}, + "view": "FULL", + }) + + queries, err := executor.buildQueries(tsdbQuery) + So(err, ShouldBeNil) + + So(len(queries), ShouldEqual, 1) + So(queries[0].RefID, ShouldEqual, "A") + So(queries[0].Target, ShouldEqual, "aggregation.alignmentPeriod=%2B60s&aggregation.crossSeriesReducer=REDUCE_NONE&aggregation.groupByFields=metric.label.group1&aggregation.groupByFields=metric.label.group2&aggregation.perSeriesAligner=ALIGN_MEAN&filter=metric.type%3D%22a%2Fmetric%2Ftype%22&interval.endTime=2018-03-15T13%3A34%3A00Z&interval.startTime=2018-03-15T13%3A00%3A00Z&view=FULL") + So(len(queries[0].Params), ShouldEqual, 8) + So(queries[0].Params["interval.startTime"][0], ShouldEqual, "2018-03-15T13:00:00Z") + So(queries[0].Params["interval.endTime"][0], ShouldEqual, "2018-03-15T13:34:00Z") + So(queries[0].Params["aggregation.perSeriesAligner"][0], ShouldEqual, "ALIGN_MEAN") + So(queries[0].Params["aggregation.groupByFields"][0], ShouldEqual, "metric.label.group1") + So(queries[0].Params["aggregation.groupByFields"][1], ShouldEqual, "metric.label.group2") + So(queries[0].Params["filter"][0], ShouldEqual, "metric.type=\"a/metric/type\"") + So(queries[0].Params["view"][0], ShouldEqual, "FULL") + }) + + }) + + Convey("Parse stackdriver response in the time series format", func() { + Convey("when data from query aggregated to one time series", func() { + data, err := loadTestFile("./test-data/1-series-response-agg-one-metric.json") + So(err, ShouldBeNil) + So(len(data.TimeSeries), ShouldEqual, 1) + + res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"} + query := &StackdriverQuery{} + err = executor.parseResponse(res, data, query) + So(err, ShouldBeNil) + + So(len(res.Series), ShouldEqual, 1) + So(res.Series[0].Name, ShouldEqual, "serviceruntime.googleapis.com/api/request_count") + So(len(res.Series[0].Points), ShouldEqual, 3) + + Convey("timestamps should be in ascending order", func() { + So(res.Series[0].Points[0][0].Float64, ShouldEqual, 0.05) + So(res.Series[0].Points[0][1].Float64, ShouldEqual, 1536670020000) + + So(res.Series[0].Points[1][0].Float64, ShouldEqual, 1.05) + So(res.Series[0].Points[1][1].Float64, ShouldEqual, 1536670080000) + + So(res.Series[0].Points[2][0].Float64, ShouldEqual, 1.0666666666667) + So(res.Series[0].Points[2][1].Float64, ShouldEqual, 1536670260000) + }) + }) + + Convey("when data from query with no aggregation", func() { + data, err := loadTestFile("./test-data/2-series-response-no-agg.json") + So(err, ShouldBeNil) + So(len(data.TimeSeries), ShouldEqual, 3) + + res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"} + query := &StackdriverQuery{} + err = executor.parseResponse(res, data, query) + So(err, ShouldBeNil) + + Convey("Should add labels to metric name", func() { + So(len(res.Series), ShouldEqual, 3) + So(res.Series[0].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time collector-asia-east-1") + So(res.Series[1].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time collector-europe-west-1") + So(res.Series[2].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time collector-us-east-1") + }) + + Convey("Should parse to time series", func() { + So(len(res.Series[0].Points), ShouldEqual, 3) + So(res.Series[0].Points[0][0].Float64, ShouldEqual, 9.8566497180145) + So(res.Series[0].Points[1][0].Float64, ShouldEqual, 9.7323568146676) + So(res.Series[0].Points[2][0].Float64, ShouldEqual, 9.7730520330369) + }) + + Convey("Should add meta for labels to the response", func() { + metricLabels := res.Meta.Get("metricLabels").Interface().(map[string][]string) + So(metricLabels, ShouldNotBeNil) + So(len(metricLabels["instance_name"]), ShouldEqual, 3) + So(metricLabels["instance_name"][0], ShouldEqual, "collector-asia-east-1") + So(metricLabels["instance_name"][1], ShouldEqual, "collector-europe-west-1") + So(metricLabels["instance_name"][2], ShouldEqual, "collector-us-east-1") + + resourceLabels := res.Meta.Get("resourceLabels").Interface().(map[string][]string) + So(resourceLabels, ShouldNotBeNil) + So(len(resourceLabels["zone"]), ShouldEqual, 3) + So(resourceLabels["zone"][0], ShouldEqual, "asia-east1-a") + So(resourceLabels["zone"][1], ShouldEqual, "europe-west1-b") + So(resourceLabels["zone"][2], ShouldEqual, "us-east1-b") + + So(len(resourceLabels["project_id"]), ShouldEqual, 1) + So(resourceLabels["project_id"][0], ShouldEqual, "grafana-prod") + }) + }) + + Convey("when data from query with no aggregation and group bys", func() { + data, err := loadTestFile("./test-data/2-series-response-no-agg.json") + So(err, ShouldBeNil) + So(len(data.TimeSeries), ShouldEqual, 3) + + res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"} + query := &StackdriverQuery{GroupBys: []string{"metric.label.instance_name", "resource.label.zone"}} + err = executor.parseResponse(res, data, query) + So(err, ShouldBeNil) + + Convey("Should add instance name and zone labels to metric name", func() { + So(len(res.Series), ShouldEqual, 3) + So(res.Series[0].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time collector-asia-east-1 asia-east1-a") + So(res.Series[1].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time collector-europe-west-1 europe-west1-b") + So(res.Series[2].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time collector-us-east-1 us-east1-b") + }) + }) + + Convey("when data from query with no aggregation and alias by", func() { + data, err := loadTestFile("./test-data/2-series-response-no-agg.json") + So(err, ShouldBeNil) + So(len(data.TimeSeries), ShouldEqual, 3) + + res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"} + + Convey("and the alias pattern is for metric type, a metric label and a resource label", func() { + + query := &StackdriverQuery{AliasBy: "{{metric.type}} - {{metric.label.instance_name}} - {{resource.label.zone}}", GroupBys: []string{"metric.label.instance_name", "resource.label.zone"}} + err = executor.parseResponse(res, data, query) + So(err, ShouldBeNil) + + Convey("Should use alias by formatting and only show instance name", func() { + So(len(res.Series), ShouldEqual, 3) + So(res.Series[0].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time - collector-asia-east-1 - asia-east1-a") + So(res.Series[1].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time - collector-europe-west-1 - europe-west1-b") + So(res.Series[2].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time - collector-us-east-1 - us-east1-b") + }) + }) + + Convey("and the alias pattern is for metric name", func() { + + query := &StackdriverQuery{AliasBy: "metric {{metric.name}} service {{metric.service}}", GroupBys: []string{"metric.label.instance_name", "resource.label.zone"}} + err = executor.parseResponse(res, data, query) + So(err, ShouldBeNil) + + Convey("Should use alias by formatting and only show instance name", func() { + So(len(res.Series), ShouldEqual, 3) + So(res.Series[0].Name, ShouldEqual, "metric instance/cpu/usage_time service compute") + So(res.Series[1].Name, ShouldEqual, "metric instance/cpu/usage_time service compute") + So(res.Series[2].Name, ShouldEqual, "metric instance/cpu/usage_time service compute") + }) + }) + }) + + Convey("when data from query is distribution", func() { + data, err := loadTestFile("./test-data/3-series-response-distribution.json") + So(err, ShouldBeNil) + So(len(data.TimeSeries), ShouldEqual, 1) + + res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"} + query := &StackdriverQuery{AliasBy: "{{bucket}}"} + err = executor.parseResponse(res, data, query) + So(err, ShouldBeNil) + + So(len(res.Series), ShouldEqual, 11) + for i := 0; i < 11; i++ { + if i == 0 { + So(res.Series[i].Name, ShouldEqual, "0") + } else { + So(res.Series[i].Name, ShouldEqual, strconv.FormatInt(int64(math.Pow(float64(2), float64(i-1))), 10)) + } + So(len(res.Series[i].Points), ShouldEqual, 3) + } + + Convey("timestamps should be in ascending order", func() { + So(res.Series[0].Points[0][1].Float64, ShouldEqual, 1536668940000) + So(res.Series[0].Points[1][1].Float64, ShouldEqual, 1536669000000) + So(res.Series[0].Points[2][1].Float64, ShouldEqual, 1536669060000) + }) + + Convey("value should be correct", func() { + So(res.Series[8].Points[0][0].Float64, ShouldEqual, 1) + So(res.Series[9].Points[0][0].Float64, ShouldEqual, 1) + So(res.Series[10].Points[0][0].Float64, ShouldEqual, 1) + So(res.Series[8].Points[1][0].Float64, ShouldEqual, 0) + So(res.Series[9].Points[1][0].Float64, ShouldEqual, 0) + So(res.Series[10].Points[1][0].Float64, ShouldEqual, 1) + So(res.Series[8].Points[2][0].Float64, ShouldEqual, 0) + So(res.Series[9].Points[2][0].Float64, ShouldEqual, 1) + So(res.Series[10].Points[2][0].Float64, ShouldEqual, 0) + }) + }) + + }) + + Convey("when interpolating filter wildcards", func() { + Convey("and wildcard is used in the beginning and the end of the word", func() { + Convey("and theres no wildcard in the middle of the word", func() { + value := interpolateFilterWildcards("*-central1*") + So(value, ShouldEqual, `has_substring("-central1")`) + }) + Convey("and there is a wildcard in the middle of the word", func() { + value := interpolateFilterWildcards("*-cent*ral1*") + So(value, ShouldNotStartWith, `has_substring`) + }) + }) + + Convey("and wildcard is used in the beginning of the word", func() { + Convey("and there is not a wildcard elsewhere in the word", func() { + value := interpolateFilterWildcards("*-central1") + So(value, ShouldEqual, `ends_with("-central1")`) + }) + Convey("and there is a wildcard elsewhere in the word", func() { + value := interpolateFilterWildcards("*-cent*al1") + So(value, ShouldNotStartWith, `ends_with`) + }) + }) + + Convey("and wildcard is used at the end of the word", func() { + Convey("and there is not a wildcard elsewhere in the word", func() { + value := interpolateFilterWildcards("us-central*") + So(value, ShouldEqual, `starts_with("us-central")`) + }) + Convey("and there is a wildcard elsewhere in the word", func() { + value := interpolateFilterWildcards("*us-central*") + So(value, ShouldNotStartWith, `starts_with`) + }) + }) + + Convey("and wildcard is used in the middle of the word", func() { + Convey("and there is only one wildcard", func() { + value := interpolateFilterWildcards("us-ce*tral1-b") + So(value, ShouldEqual, `monitoring.regex.full_match("^us\\-ce.*tral1\\-b$")`) + }) + + Convey("and there is more than one wildcard", func() { + value := interpolateFilterWildcards("us-ce*tra*1-b") + So(value, ShouldEqual, `monitoring.regex.full_match("^us\\-ce.*tra.*1\\-b$")`) + }) + }) + + Convey("and wildcard is used in the middle of the word and in the beginning of the word", func() { + value := interpolateFilterWildcards("*s-ce*tral1-b") + So(value, ShouldEqual, `monitoring.regex.full_match("^.*s\\-ce.*tral1\\-b$")`) + }) + + Convey("and wildcard is used in the middle of the word and in the ending of the word", func() { + value := interpolateFilterWildcards("us-ce*tral1-*") + So(value, ShouldEqual, `monitoring.regex.full_match("^us\\-ce.*tral1\\-.*$")`) + }) + + Convey("and no wildcard is used", func() { + value := interpolateFilterWildcards("us-central1-a}") + So(value, ShouldEqual, `us-central1-a}`) + }) + }) + + Convey("when building filter string", func() { + Convey("and theres no regex operator", func() { + Convey("and there are wildcards in a filter value", func() { + filterParts := []interface{}{"zone", "=", "*-central1*"} + value := buildFilterString("somemetrictype", filterParts) + So(value, ShouldEqual, `metric.type="somemetrictype" zone=has_substring("-central1")`) + }) + + Convey("and there are no wildcards in any filter value", func() { + filterParts := []interface{}{"zone", "!=", "us-central1-a"} + value := buildFilterString("somemetrictype", filterParts) + So(value, ShouldEqual, `metric.type="somemetrictype" zone!="us-central1-a"`) + }) + }) + + Convey("and there is a regex operator", func() { + filterParts := []interface{}{"zone", "=~", "us-central1-a~"} + value := buildFilterString("somemetrictype", filterParts) + Convey("it should remove the ~ character from the operator that belongs to the value", func() { + So(value, ShouldNotContainSubstring, `=~`) + So(value, ShouldContainSubstring, `zone=`) + }) + + Convey("it should insert monitoring.regex.full_match before filter value", func() { + So(value, ShouldContainSubstring, `zone=monitoring.regex.full_match("us-central1-a~")`) + }) + }) + }) + }) +} + +func loadTestFile(path string) (StackdriverResponse, error) { + var data StackdriverResponse + + jsonBody, err := ioutil.ReadFile(path) + if err != nil { + return data, err + } + err = json.Unmarshal(jsonBody, &data) + return data, err +} diff --git a/pkg/tsdb/stackdriver/test-data/1-series-response-agg-one-metric.json b/pkg/tsdb/stackdriver/test-data/1-series-response-agg-one-metric.json new file mode 100644 index 00000000000..e1a84583cc4 --- /dev/null +++ b/pkg/tsdb/stackdriver/test-data/1-series-response-agg-one-metric.json @@ -0,0 +1,46 @@ +{ + "timeSeries": [ + { + "metric": { + "type": "serviceruntime.googleapis.com\/api\/request_count" + }, + "resource": { + "type": "consumed_api", + "labels": { + "project_id": "grafana-prod" + } + }, + "metricKind": "GAUGE", + "valueType": "DOUBLE", + "points": [ + { + "interval": { + "startTime": "2018-09-11T12:51:00Z", + "endTime": "2018-09-11T12:51:00Z" + }, + "value": { + "doubleValue": 1.0666666666667 + } + }, + { + "interval": { + "startTime": "2018-09-11T12:48:00Z", + "endTime": "2018-09-11T12:48:00Z" + }, + "value": { + "doubleValue": 1.05 + } + }, + { + "interval": { + "startTime": "2018-09-11T12:47:00Z", + "endTime": "2018-09-11T12:47:00Z" + }, + "value": { + "doubleValue": 0.05 + } + } + ] + } + ] +} diff --git a/pkg/tsdb/stackdriver/test-data/2-series-response-no-agg.json b/pkg/tsdb/stackdriver/test-data/2-series-response-no-agg.json new file mode 100644 index 00000000000..da615a168bf --- /dev/null +++ b/pkg/tsdb/stackdriver/test-data/2-series-response-no-agg.json @@ -0,0 +1,145 @@ +{ + "timeSeries": [ + { + "metric": { + "labels": { + "instance_name": "collector-asia-east-1" + }, + "type": "compute.googleapis.com\/instance\/cpu\/usage_time" + }, + "resource": { + "type": "gce_instance", + "labels": { + "instance_id": "1119268429530133111", + "zone": "asia-east1-a", + "project_id": "grafana-prod" + } + }, + "metricKind": "DELTA", + "valueType": "DOUBLE", + "points": [ + { + "interval": { + "startTime": "2018-09-11T12:30:00Z", + "endTime": "2018-09-11T12:31:00Z" + }, + "value": { + "doubleValue": 9.7730520330369 + } + }, + { + "interval": { + "startTime": "2018-09-11T12:29:00Z", + "endTime": "2018-09-11T12:30:00Z" + }, + "value": { + "doubleValue": 9.7323568146676 + } + }, + { + "interval": { + "startTime": "2018-09-11T12:28:00Z", + "endTime": "2018-09-11T12:29:00Z" + }, + "value": { + "doubleValue": 9.8566497180145 + } + } + ] + }, + { + "metric": { + "labels": { + "instance_name": "collector-europe-west-1" + }, + "type": "compute.googleapis.com\/instance\/cpu\/usage_time" + }, + "resource": { + "type": "gce_instance", + "labels": { + "instance_id": "22241654114540837222", + "zone": "europe-west1-b", + "project_id": "grafana-prod" + } + }, + "metricKind": "DELTA", + "valueType": "DOUBLE", + "points": [ + { + "interval": { + "startTime": "2018-09-11T12:30:00Z", + "endTime": "2018-09-11T12:31:00Z" + }, + "value": { + "doubleValue": 8.8210971239023 + } + }, + { + "interval": { + "startTime": "2018-09-11T12:29:00Z", + "endTime": "2018-09-11T12:30:00Z" + }, + "value": { + "doubleValue": 8.9689492364414 + } + }, + { + "interval": { + "startTime": "2018-09-11T12:28:00Z", + "endTime": "2018-09-11T12:29:00Z" + }, + "value": { + "doubleValue": 9.0238475054502 + } + } + ] + }, + { + "metric": { + "labels": { + "instance_name": "collector-us-east-1" + }, + "type": "compute.googleapis.com\/instance\/cpu\/usage_time" + }, + "resource": { + "type": "gce_instance", + "labels": { + "instance_id": "3332264424035095333", + "zone": "us-east1-b", + "project_id": "grafana-prod" + } + }, + "metricKind": "DELTA", + "valueType": "DOUBLE", + "points": [ + { + "interval": { + "startTime": "2018-09-11T12:30:00Z", + "endTime": "2018-09-11T12:31:00Z" + }, + "value": { + "doubleValue": 30.807846801355 + } + }, + { + "interval": { + "startTime": "2018-09-11T12:29:00Z", + "endTime": "2018-09-11T12:30:00Z" + }, + "value": { + "doubleValue": 30.903974115849 + } + }, + { + "interval": { + "startTime": "2018-09-11T12:28:00Z", + "endTime": "2018-09-11T12:29:00Z" + }, + "value": { + "doubleValue": 30.829426143318 + } + } + ] + } + ] +} diff --git a/pkg/tsdb/stackdriver/test-data/3-series-response-distribution.json b/pkg/tsdb/stackdriver/test-data/3-series-response-distribution.json new file mode 100644 index 00000000000..8603f78eab4 --- /dev/null +++ b/pkg/tsdb/stackdriver/test-data/3-series-response-distribution.json @@ -0,0 +1,112 @@ +{ + "timeSeries": [ + { + "metric": { + "type": "loadbalancing.googleapis.com\/https\/backend_latencies" + }, + "resource": { + "type": "https_lb_rule", + "labels": { + "project_id": "grafana-prod" + } + }, + "metricKind": "DELTA", + "valueType": "DISTRIBUTION", + "points": [ + { + "interval": { + "startTime": "2018-09-11T12:30:00Z", + "endTime": "2018-09-11T12:31:00Z" + }, + "value": { + "distributionValue": { + "count": "1", + "bucketOptions": { + "exponentialBuckets": { + "numFiniteBuckets": 10, + "growthFactor": 2, + "scale": 1 + } + }, + "bucketCounts": [ + "0", + "0", + "0", + "0", + "0", + "0", + "0", + "0", + "0", + "1", + "0" + ] + } + } + }, + { + "interval": { + "startTime": "2018-09-11T12:29:00Z", + "endTime": "2018-09-11T12:30:00Z" + }, + "value": { + "distributionValue": { + "count": "1", + "bucketOptions": { + "exponentialBuckets": { + "numFiniteBuckets": 10, + "growthFactor": 2, + "scale": 1 + } + }, + "bucketCounts": [ + "0", + "0", + "0", + "0", + "0", + "0", + "0", + "0", + "0", + "0", + "1" + ] + } + } + }, + { + "interval": { + "startTime": "2018-09-11T12:28:00Z", + "endTime": "2018-09-11T12:29:00Z" + }, + "value": { + "distributionValue": { + "count": "3", + "bucketOptions": { + "exponentialBuckets": { + "numFiniteBuckets": 10, + "growthFactor": 2, + "scale": 1 + } + }, + "bucketCounts": [ + "0", + "0", + "0", + "0", + "0", + "0", + "0", + "0", + "1", + "1", + "1" + ] + } + } + } + ] + } + ] +} diff --git a/pkg/tsdb/stackdriver/types.go b/pkg/tsdb/stackdriver/types.go new file mode 100644 index 00000000000..3821ce7ceda --- /dev/null +++ b/pkg/tsdb/stackdriver/types.go @@ -0,0 +1,75 @@ +package stackdriver + +import ( + "net/url" + "time" +) + +// StackdriverQuery is the query that Grafana sends from the frontend +type StackdriverQuery struct { + Target string + Params url.Values + RefID string + GroupBys []string + AliasBy string +} + +type StackdriverBucketOptions struct { + LinearBuckets *struct { + NumFiniteBuckets int64 `json:"numFiniteBuckets"` + Width int64 `json:"width"` + Offset int64 `json:"offset"` + } `json:"linearBuckets"` + ExponentialBuckets *struct { + NumFiniteBuckets int64 `json:"numFiniteBuckets"` + GrowthFactor float64 `json:"growthFactor"` + Scale float64 `json:"scale"` + } `json:"exponentialBuckets"` + ExplicitBuckets *struct { + Bounds []int64 `json:"bounds"` + } `json:"explicitBuckets"` +} + +// StackdriverResponse is the data returned from the external Google Stackdriver API +type StackdriverResponse struct { + TimeSeries []struct { + Metric struct { + Labels map[string]string `json:"labels"` + Type string `json:"type"` + } `json:"metric"` + Resource struct { + Type string `json:"type"` + Labels map[string]string `json:"labels"` + } `json:"resource"` + MetricKind string `json:"metricKind"` + ValueType string `json:"valueType"` + Points []struct { + Interval struct { + StartTime time.Time `json:"startTime"` + EndTime time.Time `json:"endTime"` + } `json:"interval"` + Value struct { + DoubleValue float64 `json:"doubleValue"` + StringValue string `json:"stringValue"` + BoolValue bool `json:"boolValue"` + IntValue string `json:"int64Value"` + DistributionValue struct { + Count string `json:"count"` + Mean float64 `json:"mean"` + SumOfSquaredDeviation float64 `json:"sumOfSquaredDeviation"` + Range struct { + Min int `json:"min"` + Max int `json:"max"` + } `json:"range"` + BucketOptions StackdriverBucketOptions `json:"bucketOptions"` + BucketCounts []string `json:"bucketCounts"` + Examplars []struct { + Value float64 `json:"value"` + Timestamp string `json:"timestamp"` + // attachments + } `json:"examplars"` + } `json:"distributionValue"` + } `json:"value"` + } `json:"points"` + } `json:"timeSeries"` +} diff --git a/pkg/tsdb/testdata/scenarios.go b/pkg/tsdb/testdata/scenarios.go index e907fa8aae0..421a907b5e9 100644 --- a/pkg/tsdb/testdata/scenarios.go +++ b/pkg/tsdb/testdata/scenarios.go @@ -95,27 +95,20 @@ func init() { Id: "random_walk", Name: "Random Walk", - Handler: func(query *tsdb.Query, tsdbQuery *tsdb.TsdbQuery) *tsdb.QueryResult { - timeWalkerMs := tsdbQuery.TimeRange.GetFromAsMsEpoch() - to := tsdbQuery.TimeRange.GetToAsMsEpoch() - - series := newSeriesForQuery(query) - - points := make(tsdb.TimeSeriesPoints, 0) - walker := rand.Float64() * 100 - - for i := int64(0); i < 10000 && timeWalkerMs < to; i++ { - points = append(points, tsdb.NewTimePoint(null.FloatFrom(walker), float64(timeWalkerMs))) - - walker += rand.Float64() - 0.5 - timeWalkerMs += query.IntervalMs - } - - series.Points = points + Handler: func(query *tsdb.Query, context *tsdb.TsdbQuery) *tsdb.QueryResult { + return getRandomWalk(query, context) + }, + }) - queryRes := tsdb.NewQueryResult() - queryRes.Series = append(queryRes.Series, series) - return queryRes + registerScenario(&Scenario{ + Id: "slow_query", + Name: "Slow Query", + StringInput: "5s", + Handler: func(query *tsdb.Query, context *tsdb.TsdbQuery) *tsdb.QueryResult { + stringInput := query.Model.Get("stringInput").MustString() + parsedInterval, _ := time.ParseDuration(stringInput) + time.Sleep(parsedInterval) + return getRandomWalk(query, context) }, }) @@ -221,6 +214,57 @@ func init() { return queryRes }, }) + + registerScenario(&Scenario{ + Id: "table_static", + Name: "Table Static", + + Handler: func(query *tsdb.Query, context *tsdb.TsdbQuery) *tsdb.QueryResult { + timeWalkerMs := context.TimeRange.GetFromAsMsEpoch() + to := context.TimeRange.GetToAsMsEpoch() + + table := tsdb.Table{ + Columns: []tsdb.TableColumn{ + {Text: "Time"}, + {Text: "Message"}, + {Text: "Description"}, + {Text: "Value"}, + }, + Rows: []tsdb.RowValues{}, + } + for i := int64(0); i < 10 && timeWalkerMs < to; i++ { + table.Rows = append(table.Rows, tsdb.RowValues{float64(timeWalkerMs), "This is a message", "Description", 23.1}) + timeWalkerMs += query.IntervalMs + } + + queryRes := tsdb.NewQueryResult() + queryRes.Tables = append(queryRes.Tables, &table) + return queryRes + }, + }) +} + +func getRandomWalk(query *tsdb.Query, tsdbQuery *tsdb.TsdbQuery) *tsdb.QueryResult { + timeWalkerMs := tsdbQuery.TimeRange.GetFromAsMsEpoch() + to := tsdbQuery.TimeRange.GetToAsMsEpoch() + + series := newSeriesForQuery(query) + + points := make(tsdb.TimeSeriesPoints, 0) + walker := rand.Float64() * 100 + + for i := int64(0); i < 10000 && timeWalkerMs < to; i++ { + points = append(points, tsdb.NewTimePoint(null.FloatFrom(walker), float64(timeWalkerMs))) + + walker += rand.Float64() - 0.5 + timeWalkerMs += query.IntervalMs + } + + series.Points = points + + queryRes := tsdb.NewQueryResult() + queryRes.Series = append(queryRes.Series, series) + return queryRes } func registerScenario(scenario *Scenario) { diff --git a/pkg/util/md5_test.go b/pkg/util/md5_test.go index 1338d42bb51..43c685b8763 100644 --- a/pkg/util/md5_test.go +++ b/pkg/util/md5_test.go @@ -3,14 +3,14 @@ package util import "testing" func TestMd5Sum(t *testing.T) { - input := "dont hash passwords with md5" + input := "don't hash passwords with md5" have, err := Md5SumString(input) if err != nil { t.Fatal("expected err to be nil") } - want := "2d6a56c82d09d374643b926d3417afba" + want := "dd1f7fdb3466c0d09c2e839d1f1530f8" if have != want { t.Fatalf("expected: %s got: %s", want, have) } diff --git a/public/app/app.ts b/public/app/app.ts index d9e31018af9..298bf5609cd 100644 --- a/public/app/app.ts +++ b/public/app/app.ts @@ -21,7 +21,7 @@ import _ from 'lodash'; import moment from 'moment'; // add move to lodash for backward compatabiltiy -_.move = function(array, fromIndex, toIndex) { +_.move = (array, fromIndex, toIndex) => { array.splice(toIndex, 0, array.splice(fromIndex, 1)[0]); return array; }; @@ -29,7 +29,11 @@ _.move = function(array, fromIndex, toIndex) { import { coreModule, registerAngularDirectives } from './core/core'; import { setupAngularRoutes } from './routes/routes'; -declare var System: any; +// import symlinked extensions +const extensionsIndex = (require as any).context('.', true, /extensions\/index.ts/); +extensionsIndex.keys().forEach(key => { + extensionsIndex(key); +}); export class GrafanaApp { registerFunctions: any; @@ -76,9 +80,9 @@ export class GrafanaApp { $provide.decorator('$http', [ '$delegate', '$templateCache', - function($delegate, $templateCache) { + ($delegate, $templateCache) => { const get = $delegate.get; - $delegate.get = function(url, config) { + $delegate.get = (url, config) => { if (url.match(/\.html$/)) { // some template's already exist in the cache if (!$templateCache.get(url)) { @@ -105,9 +109,9 @@ export class GrafanaApp { 'react', ]; - const module_types = ['controllers', 'directives', 'factories', 'services', 'filters', 'routes']; + const moduleTypes = ['controllers', 'directives', 'factories', 'services', 'filters', 'routes']; - _.each(module_types, type => { + _.each(moduleTypes, type => { const moduleName = 'grafana.' + type; this.useModule(angular.module(moduleName, [])); }); @@ -119,7 +123,7 @@ export class GrafanaApp { coreModule.config(setupAngularRoutes); registerAngularDirectives(); - const preBootRequires = [System.import('app/features/all')]; + const preBootRequires = [import('app/features/all')]; Promise.all(preBootRequires) .then(() => { @@ -135,7 +139,7 @@ export class GrafanaApp { this.preBootModules = null; }); }) - .catch(function(err) { + .catch(err => { console.log('Application boot failed:', err); }); } diff --git a/public/app/containers/AlertRuleList/AlertRuleList.test.tsx b/public/app/containers/AlertRuleList/AlertRuleList.test.tsx deleted file mode 100644 index f88ff4522d4..00000000000 --- a/public/app/containers/AlertRuleList/AlertRuleList.test.tsx +++ /dev/null @@ -1,69 +0,0 @@ -import React from 'react'; -import moment from 'moment'; -import { AlertRuleList } from './AlertRuleList'; -import { RootStore } from 'app/stores/RootStore/RootStore'; -import { backendSrv, createNavTree } from 'test/mocks/common'; -import { mount } from 'enzyme'; -import toJson from 'enzyme-to-json'; - -describe('AlertRuleList', () => { - let page, store; - - beforeAll(() => { - backendSrv.get.mockReturnValue( - Promise.resolve([ - { - id: 11, - dashboardId: 58, - panelId: 3, - name: 'Panel Title alert', - state: 'ok', - newStateDate: moment() - .subtract(5, 'minutes') - .format(), - evalData: {}, - executionError: '', - url: 'd/ufkcofof/my-goal', - canEdit: true, - }, - ]) - ); - - store = RootStore.create( - {}, - { - backendSrv: backendSrv, - navTree: createNavTree('alerting', 'alert-list'), - } - ); - - page = mount(); - }); - - it('should call api to get rules', () => { - expect(backendSrv.get.mock.calls[0][0]).toEqual('/api/alerts'); - }); - - it('should render 1 rule', () => { - page.update(); - const ruleNode = page.find('.alert-rule-item'); - expect(toJson(ruleNode)).toMatchSnapshot(); - }); - - it('toggle state should change pause rule if not paused', async () => { - backendSrv.post.mockReturnValue( - Promise.resolve({ - state: 'paused', - }) - ); - - page.find('.fa-pause').simulate('click'); - - // wait for api call to resolve - await Promise.resolve(); - page.update(); - - expect(store.alertList.rules[0].state).toBe('paused'); - expect(page.find('.fa-play')).toHaveLength(1); - }); -}); diff --git a/public/app/containers/AlertRuleList/AlertRuleList.tsx b/public/app/containers/AlertRuleList/AlertRuleList.tsx deleted file mode 100644 index 668136dee6f..00000000000 --- a/public/app/containers/AlertRuleList/AlertRuleList.tsx +++ /dev/null @@ -1,178 +0,0 @@ -import React from 'react'; -import { hot } from 'react-hot-loader'; -import classNames from 'classnames'; -import { inject, observer } from 'mobx-react'; -import PageHeader from 'app/core/components/PageHeader/PageHeader'; -import { AlertRule } from 'app/stores/AlertListStore/AlertListStore'; -import appEvents from 'app/core/app_events'; -import ContainerProps from 'app/containers/ContainerProps'; -import Highlighter from 'react-highlight-words'; - -@inject('view', 'nav', 'alertList') -@observer -export class AlertRuleList extends React.Component { - stateFilters = [ - { text: 'All', value: 'all' }, - { text: 'OK', value: 'ok' }, - { text: 'Not OK', value: 'not_ok' }, - { text: 'Alerting', value: 'alerting' }, - { text: 'No Data', value: 'no_data' }, - { text: 'Paused', value: 'paused' }, - ]; - - constructor(props) { - super(props); - - this.props.nav.load('alerting', 'alert-list'); - this.fetchRules(); - } - - onStateFilterChanged = evt => { - this.props.view.updateQuery({ state: evt.target.value }); - this.fetchRules(); - }; - - fetchRules() { - this.props.alertList.loadRules({ - state: this.props.view.query.get('state') || 'all', - }); - } - - onOpenHowTo = () => { - appEvents.emit('show-modal', { - src: 'public/app/features/alerting/partials/alert_howto.html', - modalClass: 'confirm-modal', - model: {}, - }); - }; - - onSearchQueryChange = evt => { - this.props.alertList.setSearchQuery(evt.target.value); - }; - - render() { - const { nav, alertList } = this.props; - - return ( -
- -
-
-
- -
-
- - -
- -
-
- - - -
-
    - {alertList.filteredRules.map(rule => ( - - ))} -
-
-
-
- ); - } -} - -function AlertStateFilterOption({ text, value }) { - return ( - - ); -} - -export interface AlertRuleItemProps { - rule: AlertRule; - search: string; -} - -@observer -export class AlertRuleItem extends React.Component { - toggleState = () => { - this.props.rule.togglePaused(); - }; - - renderText(text: string) { - return ( - - ); - } - - render() { - const { rule } = this.props; - - const stateClass = classNames({ - fa: true, - 'fa-play': rule.isPaused, - 'fa-pause': !rule.isPaused, - }); - - const ruleUrl = `${rule.url}?panelId=${rule.panelId}&fullscreen=true&edit=true&tab=alert`; - - return ( -
  • - - - -
    -
    - -
    - {this.renderText(rule.stateText)} - for {rule.stateAge} -
    -
    - {rule.info &&
    {this.renderText(rule.info)}
    } -
    - -
    - - - - -
    -
  • - ); - } -} - -export default hot(module)(AlertRuleList); diff --git a/public/app/containers/ContainerProps.ts b/public/app/containers/ContainerProps.ts deleted file mode 100644 index 97889278fdc..00000000000 --- a/public/app/containers/ContainerProps.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { SearchStore } from './../stores/SearchStore/SearchStore'; -import { ServerStatsStore } from './../stores/ServerStatsStore/ServerStatsStore'; -import { NavStore } from './../stores/NavStore/NavStore'; -import { PermissionsStore } from './../stores/PermissionsStore/PermissionsStore'; -import { AlertListStore } from './../stores/AlertListStore/AlertListStore'; -import { ViewStore } from './../stores/ViewStore/ViewStore'; -import { FolderStore } from './../stores/FolderStore/FolderStore'; - -interface ContainerProps { - search: typeof SearchStore.Type; - serverStats: typeof ServerStatsStore.Type; - nav: typeof NavStore.Type; - alertList: typeof AlertListStore.Type; - permissions: typeof PermissionsStore.Type; - view: typeof ViewStore.Type; - folder: typeof FolderStore.Type; - backendSrv: any; -} - -export default ContainerProps; diff --git a/public/app/containers/Explore/Table.tsx b/public/app/containers/Explore/Table.tsx deleted file mode 100644 index cbb3ab11f4e..00000000000 --- a/public/app/containers/Explore/Table.tsx +++ /dev/null @@ -1,84 +0,0 @@ -import React, { PureComponent } from 'react'; -import TableModel from 'app/core/table_model'; - -const EMPTY_TABLE = new TableModel(); - -interface TableProps { - className?: string; - data: TableModel; - loading: boolean; - onClickCell?: (columnKey: string, rowValue: string) => void; -} - -interface SFCCellProps { - columnIndex: number; - onClickCell?: (columnKey: string, rowValue: string, columnIndex: number, rowIndex: number, table: TableModel) => void; - rowIndex: number; - table: TableModel; - value: string; -} - -function Cell(props: SFCCellProps) { - const { columnIndex, rowIndex, table, value, onClickCell } = props; - const column = table.columns[columnIndex]; - if (column && column.filterable && onClickCell) { - const onClick = event => { - event.preventDefault(); - onClickCell(column.text, value, columnIndex, rowIndex, table); - }; - return ( - - - {value} - - - ); - } - return {value}; -} - -export default class Table extends PureComponent { - render() { - const { className = '', data, loading, onClickCell } = this.props; - const tableModel = data || EMPTY_TABLE; - if (!loading && data && data.rows.length === 0) { - return ( - - - - - - - - - - - -
    Table
    The queries returned no data for a table.
    - ); - } - return ( - - - {tableModel.columns.map(col => )} - - - {tableModel.rows.map((row, i) => ( - - {row.map((value, j) => ( - - ))} - - ))} - -
    {col.text}
    - ); - } -} diff --git a/public/app/containers/Explore/Wrapper.tsx b/public/app/containers/Explore/Wrapper.tsx deleted file mode 100644 index 6bdbd7cc42f..00000000000 --- a/public/app/containers/Explore/Wrapper.tsx +++ /dev/null @@ -1,33 +0,0 @@ -import React, { PureComponent } from 'react'; - -import Explore from './Explore'; - -export default class Wrapper extends PureComponent { - state = { - initialState: null, - split: false, - }; - - handleChangeSplit = (split, initialState) => { - this.setState({ split, initialState }); - }; - - render() { - // State overrides for props from first Explore - const { initialState, split } = this.state; - return ( -
    - - {split ? ( - - ) : null} -
    - ); - } -} diff --git a/public/app/containers/Explore/slate-plugins/prism/index.tsx b/public/app/containers/Explore/slate-plugins/prism/index.tsx deleted file mode 100644 index d185518790f..00000000000 --- a/public/app/containers/Explore/slate-plugins/prism/index.tsx +++ /dev/null @@ -1,123 +0,0 @@ -import React from 'react'; -import Prism from 'prismjs'; - -const TOKEN_MARK = 'prism-token'; - -export function setPrismTokens(language, field, values, alias = 'variable') { - Prism.languages[language][field] = { - alias, - pattern: new RegExp(`(?:^|\\s)(${values.join('|')})(?:$|\\s)`), - }; -} - -/** - * Code-highlighting plugin based on Prism and - * https://github.com/ianstormtaylor/slate/blob/master/examples/code-highlighting/index.js - * - * (Adapted to handle nested grammar definitions.) - */ - -export default function PrismPlugin({ definition, language }) { - if (definition) { - // Don't override exising modified definitions - Prism.languages[language] = Prism.languages[language] || definition; - } - - return { - /** - * Render a Slate mark with appropiate CSS class names - * - * @param {Object} props - * @return {Element} - */ - - renderMark(props) { - const { children, mark } = props; - // Only apply spans to marks identified by this plugin - if (mark.type !== TOKEN_MARK) { - return undefined; - } - const className = `token ${mark.data.get('types')}`; - return {children}; - }, - - /** - * Decorate code blocks with Prism.js highlighting. - * - * @param {Node} node - * @return {Array} - */ - - decorateNode(node) { - if (node.type !== 'paragraph') { - return []; - } - - const texts = node.getTexts().toArray(); - const tstring = texts.map(t => t.text).join('\n'); - const grammar = Prism.languages[language]; - const tokens = Prism.tokenize(tstring, grammar); - const decorations = []; - let startText = texts.shift(); - let endText = startText; - let startOffset = 0; - let endOffset = 0; - let start = 0; - - function processToken(token, acc?) { - // Accumulate token types down the tree - const types = `${acc || ''} ${token.type || ''} ${token.alias || ''}`; - - // Add mark for token node - if (typeof token === 'string' || typeof token.content === 'string') { - startText = endText; - startOffset = endOffset; - - const content = typeof token === 'string' ? token : token.content; - const newlines = content.split('\n').length - 1; - const length = content.length - newlines; - const end = start + length; - - let available = startText.text.length - startOffset; - let remaining = length; - - endOffset = startOffset + remaining; - - while (available < remaining) { - endText = texts.shift(); - remaining = length - available; - available = endText.text.length; - endOffset = remaining; - } - - // Inject marks from up the tree (acc) as well - if (typeof token !== 'string' || acc) { - const range = { - anchorKey: startText.key, - anchorOffset: startOffset, - focusKey: endText.key, - focusOffset: endOffset, - marks: [{ type: TOKEN_MARK, data: { types } }], - }; - - decorations.push(range); - } - - start = end; - } else if (token.content && token.content.length) { - // Tokens can be nested - for (const subToken of token.content) { - processToken(subToken, types); - } - } - } - - // Process top-level tokens - for (const token of tokens) { - processToken(token); - } - - return decorations; - }, - }; -} diff --git a/public/app/containers/Explore/utils/query.ts b/public/app/containers/Explore/utils/query.ts deleted file mode 100644 index d774f619a30..00000000000 --- a/public/app/containers/Explore/utils/query.ts +++ /dev/null @@ -1,14 +0,0 @@ -export function generateQueryKey(index = 0) { - return `Q-${Date.now()}-${Math.random()}-${index}`; -} - -export function ensureQueries(queries?) { - if (queries && typeof queries === 'object' && queries.length > 0 && typeof queries[0] === 'string') { - return queries.map((query, i) => ({ key: generateQueryKey(i), query })); - } - return [{ key: generateQueryKey(), query: '' }]; -} - -export function hasQuery(queries) { - return queries.some(q => q.query); -} diff --git a/public/app/containers/ManageDashboards/FolderPermissions.tsx b/public/app/containers/ManageDashboards/FolderPermissions.tsx deleted file mode 100644 index 072908d2b8e..00000000000 --- a/public/app/containers/ManageDashboards/FolderPermissions.tsx +++ /dev/null @@ -1,80 +0,0 @@ -import React, { Component } from 'react'; -import { hot } from 'react-hot-loader'; -import { inject, observer } from 'mobx-react'; -import { toJS } from 'mobx'; -import ContainerProps from 'app/containers/ContainerProps'; -import PageHeader from 'app/core/components/PageHeader/PageHeader'; -import Permissions from 'app/core/components/Permissions/Permissions'; -import Tooltip from 'app/core/components/Tooltip/Tooltip'; -import PermissionsInfo from 'app/core/components/Permissions/PermissionsInfo'; -import AddPermissions from 'app/core/components/Permissions/AddPermissions'; -import SlideDown from 'app/core/components/Animations/SlideDown'; - -@inject('nav', 'folder', 'view', 'permissions') -@observer -export class FolderPermissions extends Component { - constructor(props) { - super(props); - this.handleAddPermission = this.handleAddPermission.bind(this); - } - - componentDidMount() { - this.loadStore(); - } - - componentWillUnmount() { - const { permissions } = this.props; - permissions.hideAddPermissions(); - } - - loadStore() { - const { nav, folder, view } = this.props; - return folder.load(view.routeParams.get('uid') as string).then(res => { - view.updatePathAndQuery(`${res.url}/permissions`, {}, {}); - return nav.initFolderNav(toJS(folder.folder), 'manage-folder-permissions'); - }); - } - - handleAddPermission() { - const { permissions } = this.props; - permissions.toggleAddPermissions(); - } - - render() { - const { nav, folder, permissions, backendSrv } = this.props; - - if (!folder.folder || !nav.main) { - return

    Loading

    ; - } - - const dashboardId = folder.folder.id; - - return ( -
    - -
    -
    -

    Folder Permissions

    - - - -
    - -
    - - - - -
    -
    - ); - } -} - -export default hot(module)(FolderPermissions); diff --git a/public/app/containers/ManageDashboards/FolderSettings.test.tsx b/public/app/containers/ManageDashboards/FolderSettings.test.tsx deleted file mode 100644 index bed3d569bcc..00000000000 --- a/public/app/containers/ManageDashboards/FolderSettings.test.tsx +++ /dev/null @@ -1,84 +0,0 @@ -import React from 'react'; -import { FolderSettings } from './FolderSettings'; -import { RootStore } from 'app/stores/RootStore/RootStore'; -import { backendSrv } from 'test/mocks/common'; -import { shallow } from 'enzyme'; - -describe('FolderSettings', () => { - let wrapper; - let page; - - beforeAll(() => { - backendSrv.getFolderByUid.mockReturnValue( - Promise.resolve({ - id: 1, - uid: 'uid', - title: 'Folder Name', - url: '/dashboards/f/uid/folder-name', - canSave: true, - version: 1, - }) - ); - - const store = RootStore.create( - { - view: { - path: 'asd', - query: {}, - routeParams: { - uid: 'uid-str', - }, - }, - }, - { - backendSrv: backendSrv, - } - ); - - wrapper = shallow(); - page = wrapper.dive(); - return page - .instance() - .loadStore() - .then(() => { - page.update(); - }); - }); - - it('should set the title input field', () => { - const titleInput = page.find('.gf-form-input'); - expect(titleInput).toHaveLength(1); - expect(titleInput.prop('value')).toBe('Folder Name'); - }); - - it('should update title and enable save button when changed', () => { - const titleInput = page.find('.gf-form-input'); - const disabledSubmitButton = page.find('button[type="submit"]'); - expect(disabledSubmitButton.prop('disabled')).toBe(true); - - titleInput.simulate('change', { target: { value: 'New Title' } }); - - const updatedTitleInput = page.find('.gf-form-input'); - expect(updatedTitleInput.prop('value')).toBe('New Title'); - const enabledSubmitButton = page.find('button[type="submit"]'); - expect(enabledSubmitButton.prop('disabled')).toBe(false); - }); - - it('should disable save button if title is changed back to old title', () => { - const titleInput = page.find('.gf-form-input'); - - titleInput.simulate('change', { target: { value: 'Folder Name' } }); - - const enabledSubmitButton = page.find('button[type="submit"]'); - expect(enabledSubmitButton.prop('disabled')).toBe(true); - }); - - it('should disable save button if title is changed to empty string', () => { - const titleInput = page.find('.gf-form-input'); - - titleInput.simulate('change', { target: { value: '' } }); - - const enabledSubmitButton = page.find('button[type="submit"]'); - expect(enabledSubmitButton.prop('disabled')).toBe(true); - }); -}); diff --git a/public/app/containers/ManageDashboards/FolderSettings.tsx b/public/app/containers/ManageDashboards/FolderSettings.tsx deleted file mode 100644 index 88830356563..00000000000 --- a/public/app/containers/ManageDashboards/FolderSettings.tsx +++ /dev/null @@ -1,160 +0,0 @@ -import React from 'react'; -import { hot } from 'react-hot-loader'; -import { inject, observer } from 'mobx-react'; -import { toJS } from 'mobx'; -import PageHeader from 'app/core/components/PageHeader/PageHeader'; -import ContainerProps from 'app/containers/ContainerProps'; -import { getSnapshot } from 'mobx-state-tree'; -import appEvents from 'app/core/app_events'; - -@inject('nav', 'folder', 'view') -@observer -export class FolderSettings extends React.Component { - formSnapshot: any; - - componentDidMount() { - this.loadStore(); - } - - loadStore() { - const { nav, folder, view } = this.props; - - return folder.load(view.routeParams.get('uid') as string).then(res => { - this.formSnapshot = getSnapshot(folder); - view.updatePathAndQuery(`${res.url}/settings`, {}, {}); - - return nav.initFolderNav(toJS(folder.folder), 'manage-folder-settings'); - }); - } - - onTitleChange(evt) { - this.props.folder.setTitle(this.getFormSnapshot().folder.title, evt.target.value); - } - - getFormSnapshot() { - if (!this.formSnapshot) { - this.formSnapshot = getSnapshot(this.props.folder); - } - - return this.formSnapshot; - } - - save(evt) { - if (evt) { - evt.stopPropagation(); - evt.preventDefault(); - } - - const { nav, folder, view } = this.props; - - folder - .saveFolder({ overwrite: false }) - .then(newUrl => { - view.updatePathAndQuery(newUrl, {}, {}); - - appEvents.emit('dashboard-saved'); - appEvents.emit('alert-success', ['Folder saved']); - }) - .then(() => { - return nav.initFolderNav(toJS(folder.folder), 'manage-folder-settings'); - }) - .catch(this.handleSaveFolderError.bind(this)); - } - - delete(evt) { - if (evt) { - evt.stopPropagation(); - evt.preventDefault(); - } - - const { folder, view } = this.props; - const title = folder.folder.title; - - appEvents.emit('confirm-modal', { - title: 'Delete', - text: `Do you want to delete this folder and all its dashboards?`, - icon: 'fa-trash', - yesText: 'Delete', - onConfirm: () => { - return folder.deleteFolder().then(() => { - appEvents.emit('alert-success', ['Folder Deleted', `${title} has been deleted`]); - view.updatePathAndQuery('dashboards', '', ''); - }); - }, - }); - } - - handleSaveFolderError(err) { - if (err.data && err.data.status === 'version-mismatch') { - err.isHandled = true; - - const { nav, folder, view } = this.props; - - appEvents.emit('confirm-modal', { - title: 'Conflict', - text: 'Someone else has updated this folder.', - text2: 'Would you still like to save this folder?', - yesText: 'Save & Overwrite', - icon: 'fa-warning', - onConfirm: () => { - folder - .saveFolder({ overwrite: true }) - .then(newUrl => { - view.updatePathAndQuery(newUrl, {}, {}); - - appEvents.emit('dashboard-saved'); - appEvents.emit('alert-success', ['Folder saved']); - }) - .then(() => { - return nav.initFolderNav(toJS(folder.folder), 'manage-folder-settings'); - }); - }, - }); - } - } - - render() { - const { nav, folder } = this.props; - - if (!folder.folder || !nav.main) { - return

    Loading

    ; - } - - return ( -
    - -
    -

    Folder Settings

    - -
    -
    -
    - - -
    -
    - - -
    - -
    -
    -
    - ); - } -} - -export default hot(module)(FolderSettings); diff --git a/public/app/containers/ServerStats/ServerStats.test.tsx b/public/app/containers/ServerStats/ServerStats.test.tsx deleted file mode 100644 index a329a47527d..00000000000 --- a/public/app/containers/ServerStats/ServerStats.test.tsx +++ /dev/null @@ -1,30 +0,0 @@ -import React from 'react'; -import renderer from 'react-test-renderer'; -import { ServerStats } from './ServerStats'; -import { RootStore } from 'app/stores/RootStore/RootStore'; -import { backendSrv, createNavTree } from 'test/mocks/common'; - -describe('ServerStats', () => { - it('Should render table with stats', done => { - backendSrv.get.mockReturnValue( - Promise.resolve({ - dashboards: 10, - }) - ); - - const store = RootStore.create( - {}, - { - backendSrv: backendSrv, - navTree: createNavTree('cfg', 'admin', 'server-stats'), - } - ); - - const page = renderer.create(); - - setTimeout(() => { - expect(page.toJSON()).toMatchSnapshot(); - done(); - }); - }); -}); diff --git a/public/app/containers/ServerStats/ServerStats.tsx b/public/app/containers/ServerStats/ServerStats.tsx deleted file mode 100644 index 63e78996041..00000000000 --- a/public/app/containers/ServerStats/ServerStats.tsx +++ /dev/null @@ -1,48 +0,0 @@ -import React from 'react'; -import { hot } from 'react-hot-loader'; -import { inject, observer } from 'mobx-react'; -import PageHeader from 'app/core/components/PageHeader/PageHeader'; -import ContainerProps from 'app/containers/ContainerProps'; - -@inject('nav', 'serverStats') -@observer -export class ServerStats extends React.Component { - constructor(props) { - super(props); - const { nav, serverStats } = this.props; - - nav.load('cfg', 'admin', 'server-stats'); - serverStats.load(); - } - - render() { - const { nav, serverStats } = this.props; - return ( -
    - -
    - - - - - - - - {serverStats.stats.map(StatItem)} -
    NameValue
    -
    -
    - ); - } -} - -function StatItem(stat) { - return ( - - {stat.name} - {stat.value} - - ); -} - -export default hot(module)(ServerStats); diff --git a/public/app/containers/Teams/TeamList.tsx b/public/app/containers/Teams/TeamList.tsx deleted file mode 100644 index d0feee75184..00000000000 --- a/public/app/containers/Teams/TeamList.tsx +++ /dev/null @@ -1,111 +0,0 @@ -import React from 'react'; -import { hot } from 'react-hot-loader'; -import { inject, observer } from 'mobx-react'; -import PageHeader from 'app/core/components/PageHeader/PageHeader'; -import { NavStore } from 'app/stores/NavStore/NavStore'; -import { TeamsStore, Team } from 'app/stores/TeamsStore/TeamsStore'; -import { BackendSrv } from 'app/core/services/backend_srv'; -import DeleteButton from 'app/core/components/DeleteButton/DeleteButton'; - -interface Props { - nav: typeof NavStore.Type; - teams: typeof TeamsStore.Type; - backendSrv: BackendSrv; -} - -@inject('nav', 'teams') -@observer -export class TeamList extends React.Component { - constructor(props) { - super(props); - - this.props.nav.load('cfg', 'teams'); - this.fetchTeams(); - } - - fetchTeams() { - this.props.teams.loadTeams(); - } - - deleteTeam(team: Team) { - this.props.backendSrv.delete('/api/teams/' + team.id).then(this.fetchTeams.bind(this)); - } - - onSearchQueryChange = evt => { - this.props.teams.setSearchQuery(evt.target.value); - }; - - renderTeamMember(team: Team): JSX.Element { - const teamUrl = `org/teams/edit/${team.id}`; - - return ( - - - - - - - - {team.name} - - - {team.email} - - - {team.memberCount} - - - this.deleteTeam(team)} /> - - - ); - } - - render() { - const { nav, teams } = this.props; - return ( -
    - -
    -
    -
    - -
    - - - -
    - - - - - - - - - {teams.filteredTeams.map(team => this.renderTeamMember(team))} -
    - NameEmailMembers -
    -
    -
    -
    - ); - } -} - -export default hot(module)(TeamList); diff --git a/public/app/containers/Teams/TeamPages.tsx b/public/app/containers/Teams/TeamPages.tsx deleted file mode 100644 index 2abc9c51535..00000000000 --- a/public/app/containers/Teams/TeamPages.tsx +++ /dev/null @@ -1,77 +0,0 @@ -import React from 'react'; -import _ from 'lodash'; -import { hot } from 'react-hot-loader'; -import { inject, observer } from 'mobx-react'; -import config from 'app/core/config'; -import PageHeader from 'app/core/components/PageHeader/PageHeader'; -import { NavStore } from 'app/stores/NavStore/NavStore'; -import { TeamsStore, Team } from 'app/stores/TeamsStore/TeamsStore'; -import { ViewStore } from 'app/stores/ViewStore/ViewStore'; -import TeamMembers from './TeamMembers'; -import TeamSettings from './TeamSettings'; -import TeamGroupSync from './TeamGroupSync'; - -interface Props { - nav: typeof NavStore.Type; - teams: typeof TeamsStore.Type; - view: typeof ViewStore.Type; -} - -@inject('nav', 'teams', 'view') -@observer -export class TeamPages extends React.Component { - isSyncEnabled: boolean; - currentPage: string; - - constructor(props) { - super(props); - - this.isSyncEnabled = config.buildInfo.isEnterprise; - this.currentPage = this.getCurrentPage(); - - this.loadTeam(); - } - - async loadTeam() { - const { teams, nav, view } = this.props; - - await teams.loadById(view.routeParams.get('id')); - - nav.initTeamPage(this.getCurrentTeam(), this.currentPage, this.isSyncEnabled); - } - - getCurrentTeam(): Team { - const { teams, view } = this.props; - return teams.map.get(view.routeParams.get('id')); - } - - getCurrentPage() { - const pages = ['members', 'settings', 'groupsync']; - const currentPage = this.props.view.routeParams.get('page'); - return _.includes(pages, currentPage) ? currentPage : pages[0]; - } - - render() { - const { nav } = this.props; - const currentTeam = this.getCurrentTeam(); - - if (!nav.main) { - return null; - } - - return ( -
    - - {currentTeam && ( -
    - {this.currentPage === 'members' && } - {this.currentPage === 'settings' && } - {this.currentPage === 'groupsync' && this.isSyncEnabled && } -
    - )} -
    - ); - } -} - -export default hot(module)(TeamPages); diff --git a/public/app/containers/Teams/TeamSettings.tsx b/public/app/containers/Teams/TeamSettings.tsx deleted file mode 100644 index 0de60a0b16c..00000000000 --- a/public/app/containers/Teams/TeamSettings.tsx +++ /dev/null @@ -1,69 +0,0 @@ -import React from 'react'; -import { hot } from 'react-hot-loader'; -import { observer } from 'mobx-react'; -import { Team } from 'app/stores/TeamsStore/TeamsStore'; -import { Label } from 'app/core/components/Forms/Forms'; - -interface Props { - team: Team; -} - -@observer -export class TeamSettings extends React.Component { - constructor(props) { - super(props); - } - - onChangeName = evt => { - this.props.team.setName(evt.target.value); - }; - - onChangeEmail = evt => { - this.props.team.setEmail(evt.target.value); - }; - - onUpdate = evt => { - evt.preventDefault(); - this.props.team.update(); - }; - - render() { - return ( -
    -

    Team Settings

    -
    -
    - - -
    -
    - - -
    - -
    - -
    -
    -
    - ); - } -} - -export default hot(module)(TeamSettings); diff --git a/public/app/core/actions/index.ts b/public/app/core/actions/index.ts new file mode 100644 index 00000000000..451a13dae99 --- /dev/null +++ b/public/app/core/actions/index.ts @@ -0,0 +1,4 @@ +import { updateLocation } from './location'; +import { updateNavIndex, UpdateNavIndexAction } from './navModel'; + +export { updateLocation, updateNavIndex, UpdateNavIndexAction }; diff --git a/public/app/core/actions/location.ts b/public/app/core/actions/location.ts new file mode 100644 index 00000000000..6f7ac67363e --- /dev/null +++ b/public/app/core/actions/location.ts @@ -0,0 +1,13 @@ +import { LocationUpdate } from 'app/types'; + +export type Action = UpdateLocationAction; + +export interface UpdateLocationAction { + type: 'UPDATE_LOCATION'; + payload: LocationUpdate; +} + +export const updateLocation = (location: LocationUpdate): UpdateLocationAction => ({ + type: 'UPDATE_LOCATION', + payload: location, +}); diff --git a/public/app/core/actions/navModel.ts b/public/app/core/actions/navModel.ts new file mode 100644 index 00000000000..a40a0e880ee --- /dev/null +++ b/public/app/core/actions/navModel.ts @@ -0,0 +1,17 @@ +import { NavModelItem } from '../../types'; + +export enum ActionTypes { + UpdateNavIndex = 'UPDATE_NAV_INDEX', +} + +export type Action = UpdateNavIndexAction; + +export interface UpdateNavIndexAction { + type: ActionTypes.UpdateNavIndex; + payload: NavModelItem; +} + +export const updateNavIndex = (item: NavModelItem): UpdateNavIndexAction => ({ + type: ActionTypes.UpdateNavIndex, + payload: item, +}); diff --git a/public/app/core/angular_wrappers.ts b/public/app/core/angular_wrappers.ts index a4439509f8e..6974d40aac8 100644 --- a/public/app/core/angular_wrappers.ts +++ b/public/app/core/angular_wrappers.ts @@ -2,21 +2,19 @@ import { react2AngularDirective } from 'app/core/utils/react2angular'; import { PasswordStrength } from './components/PasswordStrength'; import PageHeader from './components/PageHeader/PageHeader'; import EmptyListCTA from './components/EmptyListCTA/EmptyListCTA'; -import LoginBackground from './components/Login/LoginBackground'; import { SearchResult } from './components/search/SearchResult'; import { TagFilter } from './components/TagFilter/TagFilter'; -import DashboardPermissions from './components/Permissions/DashboardPermissions'; +import { SideMenu } from './components/sidemenu/SideMenu'; export function registerAngularDirectives() { react2AngularDirective('passwordStrength', PasswordStrength, ['password']); + react2AngularDirective('sidemenu', SideMenu, []); react2AngularDirective('pageHeader', PageHeader, ['model', 'noTabs']); react2AngularDirective('emptyListCta', EmptyListCTA, ['model']); - react2AngularDirective('loginBackground', LoginBackground, []); react2AngularDirective('searchResult', SearchResult, []); react2AngularDirective('tagFilter', TagFilter, [ 'tags', ['onSelect', { watchDepth: 'reference' }], ['tagOptions', { watchDepth: 'reference' }], ]); - react2AngularDirective('dashboardPermissions', DashboardPermissions, ['backendSrv', 'dashboardId', 'folder']); } diff --git a/public/app/core/components/CustomScrollbar/CustomScrollbar.test.tsx b/public/app/core/components/CustomScrollbar/CustomScrollbar.test.tsx new file mode 100644 index 00000000000..4edcf7313db --- /dev/null +++ b/public/app/core/components/CustomScrollbar/CustomScrollbar.test.tsx @@ -0,0 +1,16 @@ +import React from 'react'; +import renderer from 'react-test-renderer'; +import CustomScrollbar from './CustomScrollbar'; + +describe('CustomScrollbar', () => { + it('renders correctly', () => { + const tree = renderer + .create( + +

    Scrollable content

    +
    + ) + .toJSON(); + expect(tree).toMatchSnapshot(); + }); +}); diff --git a/public/app/core/components/CustomScrollbar/CustomScrollbar.tsx b/public/app/core/components/CustomScrollbar/CustomScrollbar.tsx new file mode 100644 index 00000000000..9b9a9c4d02a --- /dev/null +++ b/public/app/core/components/CustomScrollbar/CustomScrollbar.tsx @@ -0,0 +1,46 @@ +import React, { PureComponent } from 'react'; +import Scrollbars from 'react-custom-scrollbars'; + +interface Props { + customClassName?: string; + autoHide?: boolean; + autoHideTimeout?: number; + autoHideDuration?: number; + hideTracksWhenNotNeeded?: boolean; +} + +/** + * Wraps component into component from `react-custom-scrollbars` + */ +class CustomScrollbar extends PureComponent { + static defaultProps: Partial = { + customClassName: 'custom-scrollbars', + autoHide: true, + autoHideTimeout: 200, + autoHideDuration: 200, + hideTracksWhenNotNeeded: false, + }; + + render() { + const { customClassName, children, ...scrollProps } = this.props; + + return ( +
    } + renderTrackVertical={props =>
    } + renderThumbHorizontal={props =>
    } + renderThumbVertical={props =>
    } + renderView={props =>
    } + {...scrollProps} + > + {children} + + ); + } +} + +export default CustomScrollbar; diff --git a/public/app/core/components/CustomScrollbar/__snapshots__/CustomScrollbar.test.tsx.snap b/public/app/core/components/CustomScrollbar/__snapshots__/CustomScrollbar.test.tsx.snap new file mode 100644 index 00000000000..37d8cea45be --- /dev/null +++ b/public/app/core/components/CustomScrollbar/__snapshots__/CustomScrollbar.test.tsx.snap @@ -0,0 +1,86 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`CustomScrollbar renders correctly 1`] = ` +
    +
    +

    + Scrollable content +

    +
    +
    +
    +
    +
    +
    +
    +
    +`; diff --git a/public/app/core/components/LayoutSelector/LayoutSelector.tsx b/public/app/core/components/LayoutSelector/LayoutSelector.tsx new file mode 100644 index 00000000000..d9e00102438 --- /dev/null +++ b/public/app/core/components/LayoutSelector/LayoutSelector.tsx @@ -0,0 +1,39 @@ +import React, { SFC } from 'react'; + +export type LayoutMode = LayoutModes.Grid | LayoutModes.List; + +export enum LayoutModes { + Grid = 'grid', + List = 'list', +} + +interface Props { + mode: LayoutMode; + onLayoutModeChanged: (mode: LayoutMode) => {}; +} + +const LayoutSelector: SFC = props => { + const { mode, onLayoutModeChanged } = props; + return ( +
    + + +
    + ); +}; + +export default LayoutSelector; diff --git a/public/app/core/components/Login/LoginBackground.tsx b/public/app/core/components/Login/LoginBackground.tsx deleted file mode 100644 index 83e228ab6e0..00000000000 --- a/public/app/core/components/Login/LoginBackground.tsx +++ /dev/null @@ -1,1240 +0,0 @@ -import React, { Component } from 'react'; - -const xCount = 50; -const yCount = 50; - -function Cell({ x, y, flipIndex }) { - const index = (y * xCount) + x; - const bgColor1 = getColor(x, y); - return ( -
    - ); -} - -function getRandomInt(min, max) { - min = Math.ceil(min); - max = Math.floor(max); - return Math.floor(Math.random() * (max - min)) + min; //The maximum is exclusive and the minimum is inclusive -} - -export default class LoginBackground extends Component { - cancelInterval: any; - - constructor(props) { - super(props); - - this.state = { - flipIndex: null, - }; - - this.flipElements = this.flipElements.bind(this); - } - - flipElements() { - const elementIndexToFlip = getRandomInt(0, (xCount * yCount) - 1); - this.setState(prevState => { - return { - ...prevState, - flipIndex: elementIndexToFlip, - }; - }); - } - - componentWillMount() { - this.cancelInterval = setInterval(this.flipElements, 3000); - } - - componentWillUnmount() { - clearInterval(this.cancelInterval); - } - - render() { - console.log('re-render!', this.state.flipIndex); - - return ( -
    - {Array.from(Array(yCount)).map((el, y) => { - return ( -
    - {Array.from(Array(xCount)).map((el2, x) => { - return ( - - ); - })} -
    - ); - })} -
    - ); - } -} - -function getColor(x, y) { - const colors = [ - '#14161A', - '#111920', - '#121E27', - '#13212B', - '#122029', - '#101C24', - '#0F1B23', - '#0F1B22', - '#111C24', - '#101A22', - '#101A21', - '#111D25', - '#101E27', - '#101D26', - '#101B23', - '#11191E', - '#131519', - '#131518', - '#101B21', - '#121F29', - '#10232D', - '#11212B', - '#0E1C25', - '#0E1C24', - '#111F29', - '#11222B', - '#101E28', - '#102028', - '#111F2A', - '#11202A', - '#11191F', - '#121417', - '#12191D', - '#101D25', - '#11212C', - '#10242F', - '#0F212B', - '#0F1E27', - '#0F1D26', - '#0F1F29', - '#0F2029', - '#11232E', - '#10212B', - '#10222C', - '#0F202A', - '#112530', - '#10252F', - '#0F242E', - '#10222D', - '#10202A', - '#0F1C24', - '#0F1E28', - '#0F212A', - '#0F222B', - '#14171A', - '#0F1A20', - '#0F1C25', - '#10232E', - '#0E202A', - '#0E1E27', - '#0E1D26', - '#0F202B', - '#11232F', - '#102632', - '#102530', - '#122430', - '#0F1B21', - '#0F212C', - '#0E1F29', - '#112531', - '#0F2734', - '#0F2835', - '#0D1B23', - '#0F1A21', - '#0F1A23', - '#0F1D27', - '#0F222D', - '#102430', - '#102531', - '#10222E', - '#0F232D', - '#0E2633', - '#0E2734', - '#0F2834', - '#0E2835', - '#0F2633', - '#0F2532', - '#0E1A22', - '#0D1C24', - '#0F2735', - '#0F2937', - '#102A38', - '#112938', - '#102A39', - '#0F2A38', - '#102836', - '#0E1B23', - '#0F2938', - '#102A3A', - '#102D3D', - '#0F3040', - '#102D3E', - '#0F2E3E', - '#112C3B', - '#102B3B', - '#102B3A', - '#102D3C', - '#0F2A39', - '#0F2634', - '#0E2029', - '#0E1A21', - '#0F2B39', - '#0F2D3D', - '#0F2F40', - '#0E3142', - '#113445', - '#122431', - '#102E3E', - '#0F3345', - '#0E2F40', - '#0F3143', - '#102C3C', - '#0F2B3A', - '#0F1F28', - '#0F3344', - '#113548', - '#113C51', - '#144258', - '#103A4E', - '#103A4F', - '#103547', - '#10364A', - '#103649', - '#0F3448', - '#102C3A', - '#0F2836', - '#103447', - '#0F384C', - '#123F55', - '#15445A', - '#133F55', - '#103B50', - '#113E54', - '#103446', - '#0F3A4F', - '#0F3548', - '#0D3142', - '#102C3B', - '#0E2937', - '#103D52', - '#0E3544', - '#184C65', - '#154760', - '#14435B', - '#15465F', - '#124159', - '#0F3D53', - '#103C51', - '#0F3447', - '#0E3243', - '#113143', - '#113D53', - '#184B64', - '#184D67', - '#184C66', - '#174A63', - '#15455C', - '#13425A', - '#14445A', - '#10384C', - '#0E3446', - '#10181E', - '#103243', - '#0F384D', - '#14455C', - '#164761', - '#164C66', - '#1D627D', - '#12425A', - '#164A63', - '#14465D', - '#13435A', - '#0A2B38', - '#0F3446', - '#0D2F40', - '#0D2F3F', - '#0F2531', - '#102937', - '#10384B', - '#0F3649', - '#184E68', - '#1A5472', - '#184D68', - '#154A63', - '#19506B', - '#19536F', - '#1A4F69', - '#144760', - '#114058', - '#0E3A4F', - '#0E3547', - '#0C3042', - '#0E1B24', - '#11222C', - '#154C65', - '#1A5776', - '#1B5675', - '#113847', - '#1A5371', - '#194E68', - '#0E2D3D', - '#112D3B', - '#113D52', - '#18516D', - '#1A5979', - '#1B5878', - '#19526E', - '#1A526E', - '#13435B', - '#0F3E55', - '#0B374C', - '#0E3448', - '#0D2E3F', - '#0F2B3B', - '#112E3E', - '#113B50', - '#15465D', - '#1A526F', - '#1E5E81', - '#1D5B7B', - '#1A5777', - '#154456', - '#113949', - '#0D394E', - '#0F3549', - '#0F2C3B', - '#0E2733', - '#112E3D', - '#123D52', - '#10394C', - '#1B5674', - '#1A5370', - '#144861', - '#104058', - '#104159', - '#0E384C', - '#0D2D3D', - '#0E2533', - '#112C3A', - '#1B5979', - '#1B5C7D', - '#1A5675', - '#104057', - '#0F3C51', - '#11425A', - '#0E394D', - '#0C3243', - '#0E2735', - '#112F3E', - '#134158', - '#1D5E7F', - '#1D6083', - '#1C5877', - '#1A5573', - '#184D66', - '#164962', - '#0F3D54', - '#0E3D53', - '#0E3447', - '#0F2A3A', - '#0F2936', - '#101F28', - '#103040', - '#124056', - '#164E69', - '#144B64', - '#164D66', - '#0F3E54', - '#0E3B51', - '#0D3346', - '#0E1F27', - '#124158', - '#164961', - '#0E3C52', - '#19506C', - '#0F2C3C', - '#0E3244', - '#0E2A39', - '#0E2938', - '#113040', - '#134057', - '#1A5471', - '#154B63', - '#1C597A', - '#164760', - '#10374B', - '#0E374C', - '#0E384D', - '#11242F', - '#10394D', - '#18526E', - '#154B65', - '#103F55', - '#0D3345', - '#102532', - '#102029', - '#113142', - '#1B5973', - '#1A516B', - '#1C5979', - '#1C5A7A', - '#184A65', - '#164C65', - '#0D3041', - '#123142', - '#123E54', - '#1B5877', - '#1A5574', - '#1C5878', - '#13435C', - '#0F374B', - '#0C3143', - '#112F40', - '#123C51', - '#174E68', - '#1D5C7D', - '#14465F', - '#0F3F56', - '#0B3041', - '#123243', - '#15435B', - '#19516D', - '#1D5D7E', - '#1C5C7D', - '#184F69', - '#11374B', - '#103E54', - '#0E3143', - '#0F2D3C', - '#11242E', - '#133445', - '#1A5674', - '#1D6184', - '#1F658B', - '#0D3A50', - '#0C374B', - '#154862', - '#164B64', - '#154961', - '#0D384D', - '#102631', - '#113242', - '#134259', - '#185270', - '#1D6386', - '#1E678C', - '#1C5978', - '#0D3549', - '#0F2632', - '#184961', - '#1D5E80', - '#1E6488', - '#1F678D', - '#1E5B7C', - '#164862', - '#19526D', - '#113C52', - '#15455E', - '#0F2F3F', - '#144259', - '#194D67', - '#1D6991', - '#195777', - '#19516C', - '#103F56', - '#144660', - '#0D2E3E', - '#10212A', - '#113141', - '#16455C', - '#1D5B7C', - '#1F6589', - '#1E668C', - '#1E5F81', - '#0F3B50', - '#0D3244', - '#164A64', - '#184E69', - '#0E364A', - '#0E2E3E', - '#10222B', - '#19475E', - '#1B5A7B', - '#1E5D7F', - '#1E678D', - '#1E6184', - '#19506A', - '#1B5370', - '#1B5573', - '#0E3041', - '#122E3E', - '#16455B', - '#195370', - '#1D6489', - '#1D6B93', - '#164A65', - '#154A64', - '#1A5572', - '#1D6082', - '#1F6286', - '#1D6C94', - '#1E709A', - '#174A65', - '#1B526F', - '#1E6589', - '#1D6384', - '#0D3143', - '#0E2F3F', - '#174760', - '#1F6487', - '#1D668C', - '#0D2F41', - '#103B4F', - '#1C5C7E', - '#1F688F', - '#1C5B7C', - '#164D68', - '#1D6285', - '#0D364A', - '#1D5A7A', - '#1E6990', - '#1D6488', - '#18516B', - '#1A506B', - '#0E3B50', - '#0E3548', - '#124259', - '#13455C', - '#14485F', - '#1E5C7D', - '#122D3C', - '#1E6E98', - '#1E6A91', - '#1E6286', - '#1E6C95', - '#1D6990', - '#101F29', - '#174A62', - '#10394E', - '#1D6D96', - '#1E688E', - '#1D6E97', - '#1E6C94', - '#0E394E', - '#112B39', - '#195270', - '#1E668B', - '#1E6386', - '#1D6385', - '#0C3142', - '#1E6083', - '#1E729C', - '#1F709A', - '#1E6F98', - '#1D5F81', - '#1F688D', - '#1C6488', - '#1D6588', - '#1C6A93', - '#1E658B', - '#1F6C95', - '#0D3C52', - '#1C6385', - '#1E5F82', - '#0E3D54', - '#0F3244', - '#18485F', - '#1E6991', - '#1C5B7B', - '#1F6082', - '#0F3346', - '#18536F', - '#114056', - '#1D6B92', - '#1B5776', - '#0F3C52', - '#1E6890', - '#1F688E', - '#0C394E', - '#0F1D25', - '#1F6386', - '#1E688D', - '#1F6488', - '#20668C', - '#1D5978', - '#0F3D52', - '#0F1E26', - '#13465F', - '#0D374C', - '#1B5C7C', - '#0E1A23', - '#0F374A', - '#1B5574', - '#0F394C', - '#0E2A38', - '#102A37', - '#18506B', - '#1E5A7A', - '#0F3245', - '#0E2E3F', - '#1E678E', - '#1C5D7E', - '#1A5A7A', - '#0E2837', - '#102733', - '#0F3B51', - '#15475E', - '#1E6B93', - '#1E648A', - '#194961', - '#0F3A4E', - '#0E1D25', - '#194F69', - '#103345', - '#0F394D', - '#102B39', - '#103E55', - '#1B5572', - '#164861', - '#174861', - '#113B4F', - '#102936', - '#0F3041', - '#174961', - '#113E53', - '#134056', - '#124057', - '#194B63', - '#0E364B', - '#15445B', - '#16475E', - '#102F3F', - '#16485F', - '#0F2E3D', - '#101920', - '#12222C', - '#122C3B', - '#144157', - '#123B50', - '#16465D', - '#184960', - '#112B3A', - '#12232F', - '#132430', - '#113344', - '#11394C', - '#113649', - '#11364A', - '#133F56', - '#121D25', - '#112733', - '#112A38', - '#0F1F2A', - '#113447', - '#113A4E', - '#0F222C', - '#13222B', - '#112836', - '#102F3E', - '#113243', - '#123445', - '#12374B', - '#121E26', - '#122531', - '#11303F', - '#0D1D25', - '#102835', - '#112834', - '#101C23', - '#111C23', - '#12212B', - '#11222D', - '#0E1B22', - '#0E1D27', - '#121C22', - '#12202A', - '#101A20', - '#13191E', - '#111E28', - '#11212D', - '#0F1B24', - '#0F1C23', - '#13181D', - '#15171A', - '#121D23', - '#121F27', - '#111E27', - '#101B22', - '#121F28', - '#111E26', - '#101D24', - '#111C22', - '#12161E', - '#101925', - '#121E2D', - '#112033', - '#111E2F', - '#0F1B29', - '#0F1A28', - '#101B2A', - '#0E1A27', - '#101C2B', - '#111D2D', - '#111D2B', - '#0F1B28', - '#101923', - '#13161D', - '#13161C', - '#0F1A26', - '#101E2F', - '#112235', - '#102031', - '#0F1B2A', - '#112031', - '#102032', - '#101D2E', - '#121F2F', - '#112133', - '#101E30', - '#101F30', - '#102336', - '#101B2C', - '#0F1C2B', - '#111E2E', - '#0F2134', - '#102236', - '#0F2133', - '#101F31', - '#0F2438', - '#102337', - '#102235', - '#102133', - '#11171E', - '#101F2F', - '#102030', - '#102234', - '#102132', - '#12181F', - '#0F1A25', - '#0F2135', - '#0F1F30', - '#0F1C2D', - '#101D2C', - '#0F2033', - '#0E2338', - '#0F2237', - '#0F2236', - '#0B243B', - '#0D2338', - '#0E1A26', - '#0F1D2E', - '#0F2032', - '#0D2339', - '#0B253F', - '#0A253F', - '#0A253E', - '#0C2439', - '#0E1925', - '#0E2135', - '#0F2235', - '#0A243A', - '#08253E', - '#09253E', - '#0A263F', - '#0A243C', - '#0B233B', - '#0E1A28', - '#0D1A26', - '#09253F', - '#0A2743', - '#0B2844', - '#0B2641', - '#0A2744', - '#0A2844', - '#0B2743', - '#092745', - '#0F2337', - '#101D2D', - '#092743', - '#092846', - '#0E2B4C', - '#102E4F', - '#0E2C4D', - '#0B2A49', - '#082947', - '#0D2B4B', - '#0C2A4A', - '#092946', - '#082845', - '#0C2B4B', - '#0F2D4E', - '#103051', - '#133257', - '#0E2D4E', - '#143156', - '#112F51', - '#0B243A', - '#082744', - '#092844', - '#123054', - '#143359', - '#173A64', - '#183F6E', - '#173F6D', - '#153961', - '#163962', - '#133358', - '#15345B', - '#14345A', - '#102F50', - '#0A2948', - '#082844', - '#092641', - '#16375F', - '#193C69', - '#174170', - '#173E6B', - '#163A63', - '#173D69', - '#183D6A', - '#15365E', - '#112E50', - '#0A2A49', - '#082743', - '#0E1927', - '#173C68', - '#13487E', - '#164476', - '#174375', - '#193F6F', - '#173B66', - '#163B65', - '#082A48', - '#0A2641', - '#09243C', - '#174171', - '#14477C', - '#124980', - '#14487F', - '#174374', - '#15467B', - '#184172', - '#17406F', - '#184070', - '#163C67', - '#16355D', - '#123256', - '#0E1B29', - '#0F1923', - '#113052', - '#184274', - '#164579', - '#13477C', - '#193E6D', - '#0A243E', - '#0B233A', - '#0D1A29', - '#0B2742', - '#17365E', - '#163860', - '#124A84', - '#095191', - '#114A83', - '#0D4D8A', - '#0C4D8C', - '#104B85', - '#15477E', - '#174477', - '#183862', - '#0A233A', - '#092947', - '#09243D', - '#173963', - '#194173', - '#085396', - '#085394', - '#114B87', - '#144983', - '#094F8E', - '#075090', - '#0F4C89', - '#215287', - '#0E1A29', - '#184376', - '#0C4D8B', - '#07549A', - '#0A4E8D', - '#0F4C88', - '#0A4E8C', - '#174273', - '#193C6A', - '#0B2948', - '#0B2C4B', - '#0C4E8D', - '#1259A4', - '#0C579E', - '#0D4D8B', - '#095397', - '#085397', - '#085295', - '#144880', - '#173861', - '#15335A', - '#0F2C4D', - '#0C2949', - '#0B4E8D', - '#08559C', - '#07508F', - '#154578', - '#17365F', - '#122F53', - '#111D2C', - '#092A48', - '#08559D', - '#08559E', - '#0C56A1', - '#164271', - '#163E6A', - '#194071', - '#082642', - '#0F1E30', - '#0D2D4D', - '#114C87', - '#0E59A3', - '#135BA6', - '#085498', - '#085497', - '#095192', - '#0E4D8B', - '#0C4E8A', - '#134982', - '#17457B', - '#121F2E', - '#183E6C', - '#153E69', - '#07508E', - '#173F6C', - '#193D6B', - '#112D4F', - '#0A243B', - '#072946', - '#111E2D', - '#0B2740', - '#10497F', - '#17406E', - '#084F8D', - '#104A80', - '#0E2E4F', - '#143358', - '#16365D', - '#0A2742', - '#13477B', - '#154474', - '#104C86', - '#095291', - '#0B4F8E', - '#114A80', - '#095090', - '#075296', - '#163760', - '#2D6DB5', - '#0C2843', - '#0C233A', - '#153A62', - '#14467A', - '#075498', - '#085293', - '#09263F', - '#122030', - '#09559D', - '#0F4B83', - '#08549A', - '#14375D', - '#085499', - '#075499', - '#0A243D', - '#143E68', - '#10497E', - '#074F8E', - '#085496', - '#0C58A3', - '#065499', - '#085190', - '#0A2B4A', - '#104C88', - '#0D4F8E', - '#0F58A2', - '#0B569B', - '#0D58A1', - '#134A81', - '#09559C', - '#0A5293', - '#114B86', - '#0D2C4C', - '#103255', - '#16457A', - '#074F8C', - '#07559C', - '#185DA9', - '#1D61AD', - '#175CA8', - '#16406D', - '#153C65', - '#0E243A', - '#144679', - '#085192', - '#1A5EAC', - '#1D61AE', - '#11497F', - '#12487E', - '#0C243C', - '#123155', - '#0F59A3', - '#1B5FAB', - '#1E61AD', - '#145CA4', - '#0E599F', - '#11497E', - '#094F8D', - '#15345A', - '#134A85', - '#165CA8', - '#2263AF', - '#124466', - '#0A518F', - '#08569D', - '#16416F', - '#0B2B4A', - '#124A83', - '#0C57A2', - '#1E60AD', - '#1E62AE', - '#165DA8', - '#1059A4', - '#15406C', - '#0A4F8E', - '#12365A', - '#0A5191', - '#16355C', - '#1C5EAB', - '#155CA7', - '#085292', - '#174478', - '#153258', - '#111F2F', - '#174272', - '#1159A5', - '#1C5EAC', - '#2F74BB', - '#0C58A2', - '#0D59A3', - '#14477D', - '#132F53', - '#155BA6', - '#195FAA', - '#2366B1', - '#2967B2', - '#14477E', - '#1B5EAB', - '#175DA8', - '#0F4C86', - '#065090', - '#1C5FAC', - '#185CA8', - '#0D58A3', - '#0C4E8C', - '#134981', - '#14416D', - '#0F5AA5', - '#1F63AF', - '#114B88', - '#09508E', - '#0A569D', - '#195DAA', - '#0F1D2F', - '#1059A2', - '#0E599E', - '#2063AF', - '#1F63AE', - '#1A5EAA', - '#0C57A0', - '#195EAA', - '#1A5EA9', - '#0E4E8A', - '#12487D', - '#185DAA', - '#175EAA', - '#0A508E', - '#1559A6', - '#0E58A3', - '#095399', - '#0B4E8B', - '#0B569F', - '#0C57A1', - '#2967B1', - '#2365B0', - '#2163AE', - '#1A5DAA', - '#195EAB', - '#1E5FAC', - '#2564AF', - '#2767B1', - '#2766B1', - '#0D5A9F', - '#2062AE', - '#1F61AD', - '#195FAB', - '#0D4E8D', - '#173760', - '#111D2E', - '#09518F', - '#1A5FAC', - '#135BA7', - '#085291', - '#183761', - '#0B2845', - '#113457', - '#075393', - '#185EA9', - '#2B69B3', - '#2A67B2', - '#2867B1', - '#155DA8', - '#135CA6', - '#135AA5', - '#114980', - '#2566B1', - '#2064AF', - '#2364AF', - '#13365B', - '#154475', - '#08549B', - '#164373', - '#085392', - '#144576', - '#12497E', - '#0E5392', - '#135BA3', - '#0C5395', - '#0C5291', - '#0E579C', - '#0E5290', - '#134C83', - '#2163AC', - '#195CA6', - '#0D4E8C', - '#082945', - '#133256', - '#0E2F50', - '#105AA6', - '#134677', - '#144475', - '#145BA7', - '#154270', - '#1D60AD', - '#09569B', - '#09243E', - '#134A86', - '#0E59A4', - '#0A4E8B', - '#0E4B83', - '#1D5EAC', - '#101C2A', - '#134A84', - '#0E518F', - '#145CA7', - '#0E5699', - '#145BA5', - '#095292', - '#15416E', - '#153D67', - '#153F6B', - '#125AA5', - '#16406E', - '#0E1B27', - '#0D4F8C', - '#0F58A3', - '#114A82', - '#09569C', - '#0C2339', - '#0E1B28', - '#0D59A4', - '#07559D', - '#08569E', - '#095190', - '#0B253E', - '#0C2B49', - '#2264AF', - '#09549A', - '#09569F', - '#163D68', - '#0C263F', - '#143960', - '#183A65', - '#075496', - '#0C579F', - '#085191', - '#102438', - '#075295', - '#082946', - '#102437', - '#0C2642', - '#101C29', - '#0C253E', - '#15355C', - '#0B2E4D', - '#0F3253', - '#154577', - '#16335B', - '#0F1925', - '#0C2742', - '#0B2946', - '#0E2C4B', - '#0E2B48', - '#0E2237', - '#102237', - '#0B253D', - '#0A2946', - '#0C2841', - '#0D2A47', - '#0C2C4A', - '#08253F', - '#08243D', - '#111C2B', - '#0C2844', - '#0C2945', - '#0D243A', - '#122134', - '#0B2642', - '#113154', - '#113255', - '#0A2642', - '#0A2945', - '#0B263F', - '#0D2E4E', - '#0F1E2E', - '#0A2845', - '#0D2439', - '#0F1A29', - '#101C2E', - '#111923', - '#13181F', - '#111D2F', - '#111F30', - '#121E30', - '#121E2E', - '#101B27', - '#101A27', - '#13171F', - ]; - - // let randX = getRandomInt(0, x); - // let randY = getRandomInt(0, y); - // let randIndex = randY * xCount + randX; - - return colors[(y*xCount + x) % colors.length]; -} diff --git a/public/app/core/components/OrgActionBar/OrgActionBar.test.tsx b/public/app/core/components/OrgActionBar/OrgActionBar.test.tsx new file mode 100644 index 00000000000..9faf07f18d1 --- /dev/null +++ b/public/app/core/components/OrgActionBar/OrgActionBar.test.tsx @@ -0,0 +1,24 @@ +import React from 'react'; +import { shallow } from 'enzyme'; +import OrgActionBar, { Props } from './OrgActionBar'; + +const setup = (propOverrides?: object) => { + const props: Props = { + searchQuery: '', + setSearchQuery: jest.fn(), + target: '_blank', + linkButton: { href: 'some/url', title: 'test' }, + }; + + Object.assign(props, propOverrides); + + return shallow(); +}; + +describe('Render', () => { + it('should render component', () => { + const wrapper = setup(); + + expect(wrapper).toMatchSnapshot(); + }); +}); diff --git a/public/app/core/components/OrgActionBar/OrgActionBar.tsx b/public/app/core/components/OrgActionBar/OrgActionBar.tsx new file mode 100644 index 00000000000..8fc34a018e1 --- /dev/null +++ b/public/app/core/components/OrgActionBar/OrgActionBar.tsx @@ -0,0 +1,44 @@ +import React, { PureComponent } from 'react'; +import LayoutSelector, { LayoutMode } from '../LayoutSelector/LayoutSelector'; + +export interface Props { + searchQuery: string; + layoutMode?: LayoutMode; + onSetLayoutMode?: (mode: LayoutMode) => {}; + setSearchQuery: (value: string) => {}; + linkButton: { href: string; title: string }; + target?: string; +} + +export default class OrgActionBar extends PureComponent { + render() { + const { searchQuery, layoutMode, onSetLayoutMode, linkButton, setSearchQuery, target } = this.props; + const linkProps = { href: linkButton.href, target: undefined }; + + if (target) { + linkProps.target = target; + } + + return ( +
    +
    + + onSetLayoutMode(mode)} /> +
    + + ); + } +} diff --git a/public/app/core/components/OrgActionBar/__snapshots__/OrgActionBar.test.tsx.snap b/public/app/core/components/OrgActionBar/__snapshots__/OrgActionBar.test.tsx.snap new file mode 100644 index 00000000000..dc53e7863ea --- /dev/null +++ b/public/app/core/components/OrgActionBar/__snapshots__/OrgActionBar.test.tsx.snap @@ -0,0 +1,39 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`Render should render component 1`] = ` +
    +
    + + +
    + +`; diff --git a/public/app/core/components/PageHeader/PageHeader.tsx b/public/app/core/components/PageHeader/PageHeader.tsx index b7bef2495bb..c176095afa4 100644 --- a/public/app/core/components/PageHeader/PageHeader.tsx +++ b/public/app/core/components/PageHeader/PageHeader.tsx @@ -1,9 +1,7 @@ import React from 'react'; -import { observer } from 'mobx-react'; -import { NavModel, NavModelItem } from '../../nav_model_srv'; +import { NavModel, NavModelItem } from 'app/types'; import classNames from 'classnames'; import appEvents from 'app/core/app_events'; -import { toJS } from 'mobx'; export interface Props { model: NavModel; @@ -81,7 +79,6 @@ const Navigation = ({ main }: { main: NavModelItem }) => { ); }; -@observer export default class PageHeader extends React.Component { constructor(props) { super(props); @@ -148,7 +145,7 @@ export default class PageHeader extends React.Component { return null; } - const main = toJS(model.main); // Convert to JS if its a mobx observable + const main = model.main; return (
    diff --git a/public/app/core/components/PageLoader/PageLoader.tsx b/public/app/core/components/PageLoader/PageLoader.tsx new file mode 100644 index 00000000000..dcb67dde220 --- /dev/null +++ b/public/app/core/components/PageLoader/PageLoader.tsx @@ -0,0 +1,17 @@ +import React, { SFC } from 'react'; + +interface Props { + pageName: string; +} + +const PageLoader: SFC = ({ pageName }) => { + const loadingText = `Loading ${pageName}...`; + return ( +
    + +
    {loadingText}
    +
    + ); +}; + +export default PageLoader; diff --git a/public/app/core/components/PermissionList/AddPermission.tsx b/public/app/core/components/PermissionList/AddPermission.tsx new file mode 100644 index 00000000000..71cc937ddfa --- /dev/null +++ b/public/app/core/components/PermissionList/AddPermission.tsx @@ -0,0 +1,144 @@ +import React, { Component } from 'react'; +import { UserPicker } from 'app/core/components/Picker/UserPicker'; +import { TeamPicker, Team } from 'app/core/components/Picker/TeamPicker'; +import DescriptionPicker, { OptionWithDescription } from 'app/core/components/Picker/DescriptionPicker'; +import { User } from 'app/types'; +import { + dashboardPermissionLevels, + dashboardAclTargets, + AclTarget, + PermissionLevel, + NewDashboardAclItem, + OrgRole, +} from 'app/types/acl'; + +export interface Props { + onAddPermission: (item: NewDashboardAclItem) => void; + onCancel: () => void; +} + +class AddPermissions extends Component { + static defaultProps = { + showPermissionLevels: true, + }; + + constructor(props) { + super(props); + this.state = this.getCleanState(); + } + + getCleanState() { + return { + userId: 0, + teamId: 0, + type: AclTarget.Team, + permission: PermissionLevel.View, + }; + } + + onTypeChanged = evt => { + const type = evt.target.value as AclTarget; + + switch (type) { + case AclTarget.User: + case AclTarget.Team: + this.setState({ type: type, userId: 0, teamId: 0, role: undefined }); + break; + case AclTarget.Editor: + this.setState({ type: type, userId: 0, teamId: 0, role: OrgRole.Editor }); + break; + case AclTarget.Viewer: + this.setState({ type: type, userId: 0, teamId: 0, role: OrgRole.Viewer }); + break; + } + }; + + onUserSelected = (user: User) => { + this.setState({ userId: user && !Array.isArray(user) ? user.id : 0 }); + }; + + onTeamSelected = (team: Team) => { + this.setState({ teamId: team && !Array.isArray(team) ? team.id : 0 }); + }; + + onPermissionChanged = (permission: OptionWithDescription) => { + this.setState({ permission: permission.value }); + }; + + onSubmit = async evt => { + evt.preventDefault(); + await this.props.onAddPermission(this.state); + this.setState(this.getCleanState()); + }; + + isValid() { + switch (this.state.type) { + case AclTarget.Team: + return this.state.teamId > 0; + case AclTarget.User: + return this.state.userId > 0; + } + return true; + } + + render() { + const { onCancel } = this.props; + const newItem = this.state; + const pickerClassName = 'width-20'; + const isValid = this.isValid(); + return ( +
    + +
    +
    Add Permission For
    +
    +
    +
    + +
    +
    + + {newItem.type === AclTarget.User ? ( +
    + +
    + ) : null} + + {newItem.type === AclTarget.Team ? ( +
    + +
    + ) : null} + +
    + +
    + +
    + +
    +
    +
    +
    + ); + } +} + +export default AddPermissions; diff --git a/public/app/core/components/Permissions/DisabledPermissionsListItem.tsx b/public/app/core/components/PermissionList/DisabledPermissionListItem.tsx similarity index 84% rename from public/app/core/components/Permissions/DisabledPermissionsListItem.tsx rename to public/app/core/components/PermissionList/DisabledPermissionListItem.tsx index d65595dae66..ff679f67ae2 100644 --- a/public/app/core/components/Permissions/DisabledPermissionsListItem.tsx +++ b/public/app/core/components/PermissionList/DisabledPermissionListItem.tsx @@ -1,6 +1,6 @@ import React, { Component } from 'react'; import DescriptionPicker from 'app/core/components/Picker/DescriptionPicker'; -import { permissionOptions } from 'app/stores/PermissionsStore/PermissionsStore'; +import { dashboardPermissionLevels } from 'app/types/acl'; export interface Props { item: any; @@ -24,11 +24,11 @@ export default class DisabledPermissionListItem extends Component {
    {}} - value={item.permission} disabled={true} - className={'gf-form-input--form-dropdown-right'} + className={'gf-form-select-box__control--menu-right'} + value={item.permission} />
    diff --git a/public/app/core/components/Permissions/PermissionsList.tsx b/public/app/core/components/PermissionList/PermissionList.tsx similarity index 55% rename from public/app/core/components/Permissions/PermissionsList.tsx rename to public/app/core/components/PermissionList/PermissionList.tsx index 7e64de012e4..772baa0c274 100644 --- a/public/app/core/components/Permissions/PermissionsList.tsx +++ b/public/app/core/components/PermissionList/PermissionList.tsx @@ -1,21 +1,20 @@ -import React, { Component } from 'react'; -import PermissionsListItem from './PermissionsListItem'; -import DisabledPermissionsListItem from './DisabledPermissionsListItem'; -import { observer } from 'mobx-react'; -import { FolderInfo } from './FolderInfo'; +import React, { PureComponent } from 'react'; +import PermissionsListItem from './PermissionListItem'; +import DisabledPermissionsListItem from './DisabledPermissionListItem'; +import { FolderInfo } from 'app/types'; +import { DashboardAcl } from 'app/types/acl'; export interface Props { - permissions: any[]; - removeItem: any; - permissionChanged: any; - fetching: boolean; + items: DashboardAcl[]; + onRemoveItem: (item: DashboardAcl) => void; + onPermissionChanged: any; + isFetching: boolean; folderInfo?: FolderInfo; } -@observer -class PermissionsList extends Component { +class PermissionList extends PureComponent { render() { - const { permissions, removeItem, permissionChanged, fetching, folderInfo } = this.props; + const { items, onRemoveItem, onPermissionChanged, isFetching, folderInfo } = this.props; return ( @@ -28,19 +27,18 @@ class PermissionsList extends Component { icon: 'fa fa-fw fa-street-view', }} /> - {permissions.map((item, idx) => { + {items.map((item, idx) => { return ( ); })} - {fetching === true && permissions.length < 1 ? ( + {isFetching === true && items.length < 1 ? ( ) : null} - {fetching === false && permissions.length < 1 ? ( + {isFetching === false && items.length < 1 ? ( + + + + + + + + ); + } +} diff --git a/public/app/core/components/Permissions/PermissionsInfo.tsx b/public/app/core/components/PermissionList/PermissionsInfo.tsx similarity index 100% rename from public/app/core/components/Permissions/PermissionsInfo.tsx rename to public/app/core/components/PermissionList/PermissionsInfo.tsx diff --git a/public/app/core/components/Permissions/AddPermissions.test.tsx b/public/app/core/components/Permissions/AddPermissions.test.tsx deleted file mode 100644 index c6d1ab381b8..00000000000 --- a/public/app/core/components/Permissions/AddPermissions.test.tsx +++ /dev/null @@ -1,90 +0,0 @@ -import React from 'react'; -import { shallow } from 'enzyme'; -import AddPermissions from './AddPermissions'; -import { RootStore } from 'app/stores/RootStore/RootStore'; -import { getBackendSrv } from 'app/core/services/backend_srv'; - -jest.mock('app/core/services/backend_srv', () => ({ - getBackendSrv: () => { - return { - get: () => { - return Promise.resolve([ - { id: 2, dashboardId: 1, role: 'Viewer', permission: 1, permissionName: 'View' }, - { id: 3, dashboardId: 1, role: 'Editor', permission: 1, permissionName: 'Edit' }, - ]); - }, - post: jest.fn(() => Promise.resolve({})), - }; - }, -})); - -describe('AddPermissions', () => { - let wrapper; - let store; - let instance; - const backendSrv: any = getBackendSrv(); - - beforeAll(() => { - store = RootStore.create({}, { backendSrv: backendSrv }); - wrapper = shallow(); - instance = wrapper.instance(); - return store.permissions.load(1, true, false); - }); - - describe('when permission for a user is added', () => { - it('should save permission to db', () => { - const evt = { - target: { - value: 'User', - }, - }; - const userItem = { - id: 2, - login: 'user2', - }; - - instance.onTypeChanged(evt); - instance.onUserSelected(userItem); - - wrapper.update(); - - expect(wrapper.find('[data-save-permission]').prop('disabled')).toBe(false); - - wrapper.find('form').simulate('submit', { preventDefault() {} }); - - expect(backendSrv.post.mock.calls.length).toBe(1); - expect(backendSrv.post.mock.calls[0][0]).toBe('/api/dashboards/id/1/permissions'); - }); - }); - - describe('when permission for team is added', () => { - it('should save permission to db', () => { - const evt = { - target: { - value: 'Group', - }, - }; - - const teamItem = { - id: 2, - name: 'ug1', - }; - - instance.onTypeChanged(evt); - instance.onTeamSelected(teamItem); - - wrapper.update(); - - expect(wrapper.find('[data-save-permission]').prop('disabled')).toBe(false); - - wrapper.find('form').simulate('submit', { preventDefault() {} }); - - expect(backendSrv.post.mock.calls.length).toBe(1); - expect(backendSrv.post.mock.calls[0][0]).toBe('/api/dashboards/id/1/permissions'); - }); - }); - - afterEach(() => { - backendSrv.post.mockClear(); - }); -}); diff --git a/public/app/core/components/Permissions/AddPermissions.tsx b/public/app/core/components/Permissions/AddPermissions.tsx deleted file mode 100644 index 289e27aa731..00000000000 --- a/public/app/core/components/Permissions/AddPermissions.tsx +++ /dev/null @@ -1,128 +0,0 @@ -import React, { Component } from 'react'; -import { observer } from 'mobx-react'; -import { aclTypes } from 'app/stores/PermissionsStore/PermissionsStore'; -import { UserPicker, User } from 'app/core/components/Picker/UserPicker'; -import { TeamPicker, Team } from 'app/core/components/Picker/TeamPicker'; -import DescriptionPicker, { OptionWithDescription } from 'app/core/components/Picker/DescriptionPicker'; -import { permissionOptions } from 'app/stores/PermissionsStore/PermissionsStore'; - -export interface Props { - permissions: any; -} - -@observer -class AddPermissions extends Component { - constructor(props) { - super(props); - } - - componentWillMount() { - const { permissions } = this.props; - permissions.resetNewType(); - } - - onTypeChanged = evt => { - const { value } = evt.target; - const { permissions } = this.props; - - permissions.setNewType(value); - }; - - onUserSelected = (user: User) => { - const { permissions } = this.props; - if (!user) { - permissions.newItem.setUser(null, null); - return; - } - return permissions.newItem.setUser(user.id, user.login, user.avatarUrl); - }; - - onTeamSelected = (team: Team) => { - const { permissions } = this.props; - if (!team) { - permissions.newItem.setTeam(null, null); - return; - } - return permissions.newItem.setTeam(team.id, team.name, team.avatarUrl); - }; - - onPermissionChanged = (permission: OptionWithDescription) => { - const { permissions } = this.props; - return permissions.newItem.setPermission(permission.value); - }; - - resetNewType() { - const { permissions } = this.props; - return permissions.resetNewType(); - } - - onSubmit = evt => { - evt.preventDefault(); - const { permissions } = this.props; - permissions.addStoreItem(); - }; - - render() { - const { permissions } = this.props; - const newItem = permissions.newItem; - const pickerClassName = 'width-20'; - - const isValid = newItem.isValid(); - - return ( -
    - -
    -
    Add Permission For
    -
    -
    -
    - -
    -
    - - {newItem.type === 'User' ? ( -
    - -
    - ) : null} - - {newItem.type === 'Group' ? ( -
    - -
    - ) : null} - -
    - -
    - -
    - -
    -
    - -
    - ); - } -} - -export default AddPermissions; diff --git a/public/app/core/components/Permissions/DashboardPermissions.tsx b/public/app/core/components/Permissions/DashboardPermissions.tsx deleted file mode 100644 index 38a646b2473..00000000000 --- a/public/app/core/components/Permissions/DashboardPermissions.tsx +++ /dev/null @@ -1,71 +0,0 @@ -import React, { Component } from 'react'; -import { observer } from 'mobx-react'; -import { store } from 'app/stores/store'; -import Permissions from 'app/core/components/Permissions/Permissions'; -import Tooltip from 'app/core/components/Tooltip/Tooltip'; -import PermissionsInfo from 'app/core/components/Permissions/PermissionsInfo'; -import AddPermissions from 'app/core/components/Permissions/AddPermissions'; -import SlideDown from 'app/core/components/Animations/SlideDown'; -import { FolderInfo } from './FolderInfo'; - -export interface Props { - dashboardId: number; - folder?: FolderInfo; - backendSrv: any; -} - -@observer -class DashboardPermissions extends Component { - permissions: any; - - constructor(props) { - super(props); - this.handleAddPermission = this.handleAddPermission.bind(this); - this.permissions = store.permissions; - } - - handleAddPermission() { - this.permissions.toggleAddPermissions(); - } - - componentWillUnmount() { - this.permissions.hideAddPermissions(); - } - - render() { - const { dashboardId, folder, backendSrv } = this.props; - - return ( -
    -
    -
    -

    Permissions

    - - - -
    - -
    -
    - - - - -
    - ); - } -} - -export default DashboardPermissions; diff --git a/public/app/core/components/Permissions/FolderInfo.ts b/public/app/core/components/Permissions/FolderInfo.ts deleted file mode 100644 index d4a6020bb71..00000000000 --- a/public/app/core/components/Permissions/FolderInfo.ts +++ /dev/null @@ -1,5 +0,0 @@ -export interface FolderInfo { - id: number; - title: string; - url: string; -} diff --git a/public/app/core/components/Permissions/Permissions.tsx b/public/app/core/components/Permissions/Permissions.tsx deleted file mode 100644 index d17899c891f..00000000000 --- a/public/app/core/components/Permissions/Permissions.tsx +++ /dev/null @@ -1,91 +0,0 @@ -import React, { Component } from 'react'; -import PermissionsList from './PermissionsList'; -import { observer } from 'mobx-react'; -import { FolderInfo } from './FolderInfo'; - -export interface DashboardAcl { - id?: number; - dashboardId?: number; - userId?: number; - userLogin?: string; - userEmail?: string; - teamId?: number; - team?: string; - permission?: number; - permissionName?: string; - role?: string; - icon?: string; - name?: string; - inherited?: boolean; - sortRank?: number; -} - -export interface Props { - dashboardId: number; - folderInfo?: FolderInfo; - permissions?: any; - isFolder: boolean; - backendSrv: any; -} - -@observer -class Permissions extends Component { - constructor(props) { - super(props); - const { dashboardId, isFolder, folderInfo } = this.props; - this.permissionChanged = this.permissionChanged.bind(this); - this.typeChanged = this.typeChanged.bind(this); - this.removeItem = this.removeItem.bind(this); - this.loadStore(dashboardId, isFolder, folderInfo && folderInfo.id === 0); - } - - loadStore(dashboardId, isFolder, isInRoot = false) { - return this.props.permissions.load(dashboardId, isFolder, isInRoot); - } - - permissionChanged(index: number, permission: number, permissionName: string) { - const { permissions } = this.props; - permissions.updatePermissionOnIndex(index, permission, permissionName); - } - - removeItem(index: number) { - const { permissions } = this.props; - permissions.removeStoreItem(index); - } - - resetNewType() { - const { permissions } = this.props; - permissions.resetNewType(); - } - - typeChanged(evt) { - const { value } = evt.target; - const { permissions, dashboardId } = this.props; - - if (value === 'Viewer' || value === 'Editor') { - permissions.addStoreItem({ permission: 1, role: value, dashboardId: dashboardId }, dashboardId); - this.resetNewType(); - return; - } - - permissions.setNewType(value); - } - - render() { - const { permissions, folderInfo } = this.props; - - return ( -
    - -
    - ); - } -} - -export default Permissions; diff --git a/public/app/core/components/Permissions/PermissionsListItem.tsx b/public/app/core/components/Permissions/PermissionsListItem.tsx deleted file mode 100644 index a17aa8c04df..00000000000 --- a/public/app/core/components/Permissions/PermissionsListItem.tsx +++ /dev/null @@ -1,91 +0,0 @@ -import React from 'react'; -import { observer } from 'mobx-react'; -import DescriptionPicker from 'app/core/components/Picker/DescriptionPicker'; -import { permissionOptions } from 'app/stores/PermissionsStore/PermissionsStore'; - -const setClassNameHelper = inherited => { - return inherited ? 'gf-form-disabled' : ''; -}; - -function ItemAvatar({ item }) { - if (item.userAvatarUrl) { - return ; - } - if (item.teamAvatarUrl) { - return ; - } - if (item.role === 'Editor') { - return ; - } - - return ; -} - -function ItemDescription({ item }) { - if (item.userId) { - return (User); - } - if (item.teamId) { - return (Team); - } - return (Role); -} - -export default observer(({ item, removeItem, permissionChanged, itemIndex, folderInfo }) => { - const handleRemoveItem = evt => { - evt.preventDefault(); - removeItem(itemIndex); - }; - - const handleChangePermission = permissionOption => { - permissionChanged(itemIndex, permissionOption.value, permissionOption.label); - }; - - const inheritedFromRoot = item.dashboardId === -1 && !item.inherited; - - return ( -
    - - - - - - - - ); -}); diff --git a/public/app/core/components/Picker/DescriptionOption.tsx b/public/app/core/components/Picker/DescriptionOption.tsx index 1bcb7100489..9ddf13f7532 100644 --- a/public/app/core/components/Picker/DescriptionOption.tsx +++ b/public/app/core/components/Picker/DescriptionOption.tsx @@ -1,56 +1,25 @@ -import React, { Component } from 'react'; +import React from 'react'; +import { components } from 'react-select'; +import { OptionProps } from 'react-select/lib/components/Option'; -export interface Props { - onSelect: any; - onFocus: any; - option: any; - isFocused: any; - className: any; +// https://github.com/JedWatson/react-select/issues/3038 +interface ExtendedOptionProps extends OptionProps { + data: any; } -class DescriptionOption extends Component { - constructor(props) { - super(props); - this.handleMouseDown = this.handleMouseDown.bind(this); - this.handleMouseEnter = this.handleMouseEnter.bind(this); - this.handleMouseMove = this.handleMouseMove.bind(this); - } - - handleMouseDown(event) { - event.preventDefault(); - event.stopPropagation(); - this.props.onSelect(this.props.option, event); - } - - handleMouseEnter(event) { - this.props.onFocus(this.props.option, event); - } - - handleMouseMove(event) { - if (this.props.isFocused) { - return; - } - this.props.onFocus(this.props.option, event); - } - - render() { - const { option, children, className } = this.props; - return ( -
    Loading permissions... @@ -48,7 +46,7 @@ class PermissionsList extends Component {
    No permissions are set. Will only be accessible by admins. @@ -61,4 +59,4 @@ class PermissionsList extends Component { } } -export default PermissionsList; +export default PermissionList; diff --git a/public/app/core/components/PermissionList/PermissionListItem.tsx b/public/app/core/components/PermissionList/PermissionListItem.tsx new file mode 100644 index 00000000000..56b6114d236 --- /dev/null +++ b/public/app/core/components/PermissionList/PermissionListItem.tsx @@ -0,0 +1,100 @@ +import React, { PureComponent } from 'react'; +import DescriptionPicker from 'app/core/components/Picker/DescriptionPicker'; +import { dashboardPermissionLevels, DashboardAcl, PermissionLevel } from 'app/types/acl'; +import { FolderInfo } from 'app/types'; + +const setClassNameHelper = inherited => { + return inherited ? 'gf-form-disabled' : ''; +}; + +function ItemAvatar({ item }) { + if (item.userAvatarUrl) { + return ; + } + if (item.teamAvatarUrl) { + return ; + } + if (item.role === 'Editor') { + return ; + } + + return ; +} + +function ItemDescription({ item }) { + if (item.userId) { + return (User); + } + if (item.teamId) { + return (Team); + } + return (Role); +} + +interface Props { + item: DashboardAcl; + onRemoveItem: (item: DashboardAcl) => void; + onPermissionChanged: (item: DashboardAcl, level: PermissionLevel) => void; + folderInfo?: FolderInfo; +} + +export default class PermissionsListItem extends PureComponent { + onPermissionChanged = option => { + this.props.onPermissionChanged(this.props.item, option.value as PermissionLevel); + }; + + onRemoveItem = () => { + this.props.onRemoveItem(this.props.item); + }; + + render() { + const { item, folderInfo } = this.props; + const inheritedFromRoot = item.dashboardId === -1 && !item.inherited; + + return ( +
    + + + {item.name} + + {item.inherited && + folderInfo && ( + + Inherited from folder{' '} + + {folderInfo.title} + {' '} + + )} + {inheritedFromRoot && Default Permission} + Can +
    + +
    +
    + {!item.inherited ? ( + + + + ) : ( + + )} +
    - - - {item.name} - - {item.inherited && - folderInfo && ( - - Inherited from folder{' '} - - {folderInfo.title} - {' '} - - )} - {inheritedFromRoot && Default Permission} - Can -
    - -
    -
    - {!item.inherited ? ( - - - - ) : ( - - )} -
    NameValue