Merge branch 'master' into 14773-light-theme-page-bg

pull/14774/head
ijin08 6 years ago
commit 7133b79928
  1. 10
      .circleci/config.yml
  2. 8
      CHANGELOG.md
  3. 2
      Dockerfile
  4. 2
      README.md
  5. 2
      appveyor.yml
  6. 4
      conf/defaults.ini
  7. 7
      conf/sample.ini
  8. 7
      devenv/docker/blocks/alert_webhook_listener/Dockerfile
  9. 5
      devenv/docker/blocks/alert_webhook_listener/docker-compose.yaml
  10. 24
      devenv/docker/blocks/alert_webhook_listener/main.go
  11. 3
      docs/sources/auth/auth-proxy.md
  12. 11
      docs/sources/auth/generic-oauth.md
  13. 6
      docs/sources/http_api/admin.md
  14. 6
      docs/sources/installation/configuration.md
  15. 19
      docs/sources/installation/debian.md
  16. 22
      docs/sources/installation/rpm.md
  17. 4
      jest.config.js
  18. 57
      package.json
  19. 4
      packages/grafana-build/README.md
  20. 13
      packages/grafana-build/package.json
  21. 3
      packages/grafana-ui/README.md
  22. 33
      packages/grafana-ui/package.json
  23. 9
      packages/grafana-ui/src/components/DeleteButton/DeleteButton.test.tsx
  24. 20
      packages/grafana-ui/src/components/DeleteButton/DeleteButton.tsx
  25. 0
      packages/grafana-ui/src/components/DeleteButton/_DeleteButton.scss
  26. 1
      packages/grafana-ui/src/components/index.scss
  27. 1
      packages/grafana-ui/src/components/index.ts
  28. 23
      packages/grafana-ui/src/forms/GfFormLabel/GfFormLabel.tsx
  29. 1
      packages/grafana-ui/src/forms/index.ts
  30. 1
      packages/grafana-ui/src/index.scss
  31. 5
      packages/grafana-ui/src/index.ts
  32. 3
      packages/grafana-ui/src/types/index.ts
  33. 17
      packages/grafana-ui/src/types/jquery.d.ts
  34. 31
      packages/grafana-ui/src/types/panel.ts
  35. 53
      packages/grafana-ui/src/types/series.ts
  36. 17
      packages/grafana-ui/src/types/time.ts
  37. 1
      packages/grafana-ui/src/utils/index.ts
  38. 174
      packages/grafana-ui/src/utils/processTimeSeries.ts
  39. 15
      packages/grafana-ui/src/visualizations/Graph/Graph.tsx
  40. 1
      packages/grafana-ui/src/visualizations/index.ts
  41. 18
      packages/grafana-ui/tsconfig.json
  42. 3
      packages/grafana-ui/tslint.json
  43. 120
      pkg/api/dashboard_snapshot.go
  44. 87
      pkg/api/dashboard_snapshot_test.go
  45. 8
      pkg/api/plugins.go
  46. 49
      pkg/components/dashdiffs/formatter_json.go
  47. 2
      pkg/login/ldap.go
  48. 28
      pkg/middleware/auth_proxy.go
  49. 90
      pkg/middleware/middleware_test.go
  50. 4
      pkg/models/dashboard_snapshot.go
  51. 12
      pkg/plugins/datasource_plugin.go
  52. 6
      pkg/services/alerting/notifier.go
  53. 2
      pkg/services/alerting/test_notification.go
  54. 4
      pkg/services/dashboards/dashboard_service.go
  55. 6
      pkg/services/notifications/webhook.go
  56. 25
      pkg/services/session/mysql.go
  57. 2
      pkg/services/sqlstore/dashboard_snapshot.go
  58. 4
      pkg/services/sqlstore/datasource.go
  59. 8
      pkg/services/sqlstore/login_attempt.go
  60. 4
      pkg/services/sqlstore/migrations/dashboard_snapshot_mig.go
  61. 41
      pkg/services/sqlstore/migrator/conditions.go
  62. 8
      pkg/services/sqlstore/migrator/dialect.go
  63. 22
      pkg/services/sqlstore/migrator/migrations.go
  64. 24
      pkg/services/sqlstore/migrator/migrator.go
  65. 18
      pkg/services/sqlstore/migrator/mysql_dialect.go
  66. 6
      pkg/services/sqlstore/migrator/postgres_dialect.go
  67. 7
      pkg/services/sqlstore/migrator/sqlite_dialect.go
  68. 13
      pkg/services/sqlstore/sqlstore.go
  69. 7
      pkg/services/sqlstore/user.go
  70. 23
      pkg/services/sqlstore/user_test.go
  71. 1
      pkg/setting/setting_oauth.go
  72. 6
      pkg/social/social.go
  73. 3
      pkg/tsdb/cloudwatch/credentials.go
  74. 2
      pkg/tsdb/influxdb/model_parser.go
  75. 2
      pkg/tsdb/influxdb/model_parser_test.go
  76. 1
      pkg/tsdb/influxdb/models.go
  77. 10
      pkg/tsdb/influxdb/query.go
  78. 14
      pkg/tsdb/influxdb/query_test.go
  79. 6
      pkg/tsdb/postgres/macros.go
  80. 23
      pkg/tsdb/postgres/macros_test.go
  81. 6
      public/app/core/components/CustomScrollbar/CustomScrollbar.tsx
  82. 8
      public/app/core/components/CustomScrollbar/__snapshots__/CustomScrollbar.test.tsx.snap
  83. 2
      public/app/core/components/EmptyListCTA/EmptyListCTA.tsx
  84. 44
      public/app/core/components/ErrorBoundary/ErrorBoundary.tsx
  85. 43
      public/app/core/components/Form/Element.tsx
  86. 19
      public/app/core/components/Form/Label.tsx
  87. 2
      public/app/core/components/Form/index.ts
  88. 83
      public/app/core/components/PluginHelp/PluginHelp.tsx
  89. 6
      public/app/core/components/ToggleButtonGroup/ToggleButtonGroup.tsx
  90. 12
      public/app/core/components/Tooltip/Popper.tsx
  91. 5
      public/app/core/components/Tooltip/withPopper.tsx
  92. 4
      public/app/core/components/code_editor/code_editor.ts
  93. 2
      public/app/core/components/json_explorer/helpers.ts
  94. 2
      public/app/core/components/json_explorer/json_explorer.ts
  95. 2
      public/app/core/components/sidemenu/BottomNavLinks.test.tsx
  96. 2
      public/app/core/components/sidemenu/__snapshots__/BottomNavLinks.test.tsx.snap
  97. 4
      public/app/core/directives/tags.ts
  98. 2
      public/app/core/live/live_srv.ts
  99. 13
      public/app/core/logs_model.ts
  100. 4
      public/app/core/services/backend_srv.ts
  101. Some files were not shown because too many files have changed in this diff Show More

@ -19,7 +19,7 @@ version: 2
jobs:
mysql-integration-test:
docker:
- image: circleci/golang:1.11
- image: circleci/golang:1.11.4
- image: circleci/mysql:5.6-ram
environment:
MYSQL_ROOT_PASSWORD: rootpass
@ -39,7 +39,7 @@ jobs:
postgres-integration-test:
docker:
- image: circleci/golang:1.11
- image: circleci/golang:1.11.4
- image: circleci/postgres:9.3-ram
environment:
POSTGRES_USER: grafanatest
@ -74,7 +74,7 @@ jobs:
gometalinter:
docker:
- image: circleci/golang:1.11
- image: circleci/golang:1.11.4
environment:
# we need CGO because of go-sqlite3
CGO_ENABLED: 1
@ -117,7 +117,7 @@ jobs:
test-backend:
docker:
- image: circleci/golang:1.11
- image: circleci/golang:1.11.4
working_directory: /go/src/github.com/grafana/grafana
steps:
- checkout
@ -175,7 +175,7 @@ jobs:
build:
docker:
- image: grafana/build-container:1.2.1
- image: grafana/build-container:1.2.2
working_directory: /go/src/github.com/grafana/grafana
steps:
- checkout

@ -2,6 +2,7 @@
### New Features
* **Alerting**: Adds support for Google Hangouts Chat notifications [#11221](https://github.com/grafana/grafana/issues/11221), thx [@PatrickSchuster](https://github.com/PatrickSchuster)
* **Snapshots**: Enable deletion of public snapshot [#14109](https://github.com/grafana/grafana/issues/14109)
### Minor
@ -13,6 +14,13 @@
* **Templating**: Escaping "Custom" template variables [#13754](https://github.com/grafana/grafana/issues/13754), thx [@IntegersOfK](https://github.com/IntegersOfK)
* **Admin**: When multiple user invitations, all links are the same as the first user who was invited [#14483](https://github.com/grafana/grafana/issues/14483)
* **LDAP**: Upgrade go-ldap to v3 [#14548](https://github.com/grafana/grafana/issues/14548)
* **Proxy whitelist**: Add CIDR capability to auth_proxy whitelist [#14546](https://github.com/grafana/grafana/issues/14546), thx [@jacobrichard](https://github.com/jacobrichard)
* **OAuth**: Support OAuth providers that are not RFC6749 compliant [#14562](https://github.com/grafana/grafana/issues/14562), thx [@tdabasinskas](https://github.com/tdabasinskas)
* **Units**: Add blood glucose level units mg/dL and mmol/L [#14519](https://github.com/grafana/grafana/issues/14519), thx [@kjedamzik](https://github.com/kjedamzik)
* **Stackdriver**: Aggregating series returns more than one series [#14581](https://github.com/grafana/grafana/issues/14581) and [#13914](https://github.com/grafana/grafana/issues/13914), thx [@kinok](https://github.com/kinok)
### Bug fixes
* **Search**: Fix for issue with scrolling the "tags filter" dropdown, fixes [#14486](https://github.com/grafana/grafana/issues/14486)
# 5.4.2 (2018-12-13)

@ -1,5 +1,5 @@
# Golang build container
FROM golang:1.11
FROM golang:1.11.4
WORKDIR $GOPATH/src/github.com/grafana/grafana

@ -90,7 +90,7 @@ Choose this option to build on platforms other than linux/amd64 and/or not have
The resulting image will be tagged as `grafana/grafana:dev`
Notice: If you are using Docker for MacOS, be sure to let limit of Memory bigger than 2 GiB (at docker -> Perferences -> Advanced), otherwize you may faild at `grunt build`
Notice: If you are using Docker for MacOS, be sure to let limit of Memory bigger than 2 GiB (at docker -> Preferences -> Advanced), otherwize you may faild at `grunt build`
### Dev config

@ -7,7 +7,7 @@ clone_folder: c:\gopath\src\github.com\grafana\grafana
environment:
nodejs_version: "8"
GOPATH: C:\gopath
GOVERSION: 1.11
GOVERSION: 1.11.4
install:
- rmdir c:\go /s /q

@ -103,6 +103,9 @@ server_cert_name =
# For "sqlite3" only, path relative to data_path setting
path = grafana.db
# For "sqlite3" only. cache mode setting used for connecting to the database
cache_mode = private
#################################### Session #############################
[session]
# Either "memory", "file", "redis", "mysql", "postgres", "memcache", default is "file"
@ -335,6 +338,7 @@ tls_skip_verify_insecure = false
tls_client_cert =
tls_client_key =
tls_client_ca =
send_client_credentials_via_post = false
#################################### Basic Auth ##########################
[auth.basic]

@ -99,6 +99,9 @@
# Set to true to log the sql calls and execution times.
log_queries =
# For "sqlite3" only. cache mode setting used for connecting to the database. (private, shared)
;cache_mode = private
#################################### Session ####################################
[session]
# Either "memory", "file", "redis", "mysql", "postgres", default is "file"
@ -284,6 +287,10 @@ log_queries =
;tls_client_key =
;tls_client_ca =
; Set to true to enable sending client_id and client_secret via POST body instead of Basic authentication HTTP header
; This might be required if the OAuth provider is not RFC6749 compliant, only supporting credentials passed via POST payload
;send_client_credentials_via_post = false
#################################### Grafana.com Auth ####################
[auth.grafana_com]
;enabled = false

@ -0,0 +1,7 @@
FROM golang:latest
ADD main.go /
WORKDIR /
RUN go build -o main .
EXPOSE 3010
ENTRYPOINT ["/main"]

@ -0,0 +1,5 @@
alert_webhook_listener:
build: docker/blocks/alert_webhook_listener
network_mode: host
ports:
- "3010:3010"

@ -0,0 +1,24 @@
package main
import (
"fmt"
"io"
"io/ioutil"
"net/http"
)
func hello(w http.ResponseWriter, r *http.Request) {
body, err := ioutil.ReadAll(r.Body)
if err != nil {
return
}
line := fmt.Sprintf("webbhook: -> %s", string(body))
fmt.Println(line)
io.WriteString(w, line)
}
func main() {
http.HandleFunc("/", hello)
http.ListenAndServe(":3010", nil)
}

@ -31,9 +31,10 @@ auto_sign_up = true
ldap_sync_ttl = 60
# Limit where auth proxy requests come from by configuring a list of IP addresses.
# This can be used to prevent users spoofing the X-WEBAUTH-USER header.
# Example `whitelist = 192.168.1.1, 192.168.1.0/24, 2001::23, 2001::0/120`
whitelist =
# Optionally define more headers to sync other user attributes
# Example `headers = Name:X-WEBAUTH-NAME Email:X-WEBAUTH-EMAIL``
# Example `headers = Name:X-WEBAUTH-NAME Email:X-WEBAUTH-EMAIL`
headers =
```

@ -209,6 +209,17 @@ allowed_organizations =
token_url = https://<your domain>.my.centrify.com/OAuth2/Token/<Application ID>
```
## Set up OAuth2 with non-compliant providers
Some OAuth2 providers might not support `client_id` and `client_secret` passed via Basic Authentication HTTP header, which
results in `invalid_client` error. To allow Grafana to authenticate via these type of providers, the client identifiers must be
send via POST body, which can be enabled via the following settings:
```bash
[auth.generic_oauth]
send_client_credentials_via_post = true
```
<hr>

@ -285,7 +285,7 @@ Content-Type: application/json
HTTP/1.1 200
Content-Type: application/json
{message: "User permissions updated"}
{"message": "User permissions updated"}
```
## Delete global User
@ -308,7 +308,7 @@ Content-Type: application/json
HTTP/1.1 200
Content-Type: application/json
{message: "User deleted"}
{"message": "User deleted"}
```
## Pause all alerts
@ -339,5 +339,5 @@ JSON Body schema:
HTTP/1.1 200
Content-Type: application/json
{state: "new state", message: "alerts pause/un paused", "alertsAffected": 100}
{"state": "new state", "message": "alerts pause/un paused", "alertsAffected": 100}
```

@ -250,6 +250,12 @@ Sets the maximum amount of time a connection may be reused. The default is 14400
Set to `true` to log the sql calls and execution times.
### cache_mode
For "sqlite3" only. [Shared cache](https://www.sqlite.org/sharedcache.html) setting used for connecting to the database. (private, shared)
Defaults to private.
<hr />
## [security]

@ -34,32 +34,29 @@ sudo dpkg -i grafana_<version>_amd64.deb
Example:
```bash
wget https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana_5.1.4_amd64.deb
wget https://dl.grafana.com/oss/release/grafana_5.4.2_amd64.deb
sudo apt-get install -y adduser libfontconfig
sudo dpkg -i grafana_5.1.4_amd64.deb
sudo dpkg -i grafana_5.4.2_amd64.deb
```
## APT Repository
Add the following line to your `/etc/apt/sources.list` file.
Create a file `/etc/apt/sources.list.d/grafana.list` and add the following to it.
```bash
deb https://packagecloud.io/grafana/stable/debian/ stretch main
deb https://packages.grafana.com/oss/deb stable main
```
Use the above line even if you are on Ubuntu or another Debian version.
There is also a testing repository if you want beta or release
candidates.
There is a separate repository if you want beta releases.
```bash
deb https://packagecloud.io/grafana/testing/debian/ stretch main
deb https://packages.grafana.com/oss/deb beta main
```
Then add the [Package Cloud](https://packagecloud.io/grafana) key. This
allows you to install signed packages.
Use the above line even if you are on Ubuntu or another Debian version. Then add our gpg key. This allows you to install signed packages.
```bash
curl https://packagecloud.io/gpg.key | sudo apt-key add -
curl https://packages.grafana.com/gpg.key | sudo apt-key add -
```
Update your Apt repositories and install Grafana

@ -32,7 +32,7 @@ $ sudo yum install <rpm package url>
Example:
```bash
$ sudo yum install https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana-5.1.4-1.x86_64.rpm
$ sudo yum install https://dl.grafana.com/oss/release/grafana-5.4.2-1.x86_64.rpm
```
Or install manually using `rpm`. First execute
@ -44,7 +44,7 @@ $ wget <rpm package url>
Example:
```bash
$ wget https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana-5.1.4-1.x86_64.rpm
$ wget https://dl.grafana.com/oss/release/grafana-5.4.2-1.x86_64.rpm
```
### On CentOS / Fedora / Redhat:
@ -67,19 +67,27 @@ Add the following to a new file at `/etc/yum.repos.d/grafana.repo`
```bash
[grafana]
name=grafana
baseurl=https://packagecloud.io/grafana/stable/el/7/$basearch
baseurl=https://packages.grafana.com/oss/rpm
repo_gpgcheck=1
enabled=1
gpgcheck=1
gpgkey=https://packagecloud.io/gpg.key https://grafanarel.s3.amazonaws.com/RPM-GPG-KEY-grafana
gpgkey=https://packages.grafana.com/gpg.key
sslverify=1
sslcacert=/etc/pki/tls/certs/ca-bundle.crt
```
There is also a testing repository if you want beta or release candidates.
There is a separate repository if you want beta releases.
```bash
baseurl=https://packagecloud.io/grafana/testing/el/7/$basearch
[grafana]
name=grafana
baseurl=https://packages.grafana.com/oss/rpm-beta
repo_gpgcheck=1
enabled=1
gpgcheck=1
gpgkey=https://packages.grafana.com/gpg.key
sslverify=1
sslcacert=/etc/pki/tls/certs/ca-bundle.crt
```
Then install Grafana via the `yum` command.
@ -91,7 +99,7 @@ $ sudo yum install grafana
### RPM GPG Key
The RPMs are signed, you can verify the signature with this [public GPG
key](https://grafanarel.s3.amazonaws.com/RPM-GPG-KEY-grafana).
key](https://packages.grafana.com/gpg.key).
## Package details

@ -6,7 +6,9 @@ module.exports = {
},
"moduleDirectories": ["node_modules", "public"],
"roots": [
"<rootDir>/public"
"<rootDir>/public/app",
"<rootDir>/public/test",
"<rootDir>/packages"
],
"testRegex": "(\\.|/)(test)\\.(jsx?|tsx?)$",
"moduleFileExtensions": [

@ -1,4 +1,5 @@
{
"private": true,
"author": {
"name": "Torkel Ödegaard",
"company": "Grafana Labs"
@ -11,14 +12,16 @@
},
"devDependencies": {
"@babel/core": "^7.1.2",
"@rtsao/plugin-proposal-class-properties": "^7.0.1-patch.1",
"@babel/plugin-syntax-dynamic-import": "^7.0.0",
"@babel/preset-env": "^7.1.0",
"@babel/preset-react": "^7.0.0",
"@babel/preset-typescript": "^7.1.0",
"@rtsao/plugin-proposal-class-properties": "^7.0.1-patch.1",
"@types/classnames": "^2.2.6",
"@types/d3": "^4.10.1",
"@types/enzyme": "^3.1.13",
"@types/jest": "^23.3.2",
"@types/jquery": "^1.10.35",
"@types/node": "^8.0.31",
"@types/react": "^16.7.6",
"@types/react-custom-scrollbars": "^4.0.5",
@ -49,15 +52,12 @@
"grunt-cli": "~1.2.0",
"grunt-contrib-clean": "~1.0.0",
"grunt-contrib-compress": "^1.3.0",
"grunt-contrib-concat": "^1.0.1",
"grunt-contrib-copy": "~1.0.0",
"grunt-contrib-cssmin": "~1.0.2",
"grunt-exec": "^1.0.1",
"grunt-newer": "^1.3.0",
"grunt-notify": "^0.4.5",
"grunt-postcss": "^0.8.0",
"grunt-sass": "^2.0.0",
"grunt-sass-lint": "^0.2.2",
"grunt-sass-lint": "^0.2.4",
"grunt-usemin": "3.1.1",
"grunt-webpack": "^3.0.2",
"html-loader": "^0.5.1",
@ -73,6 +73,7 @@
"ng-annotate-webpack-plugin": "^0.3.0",
"ngtemplate-loader": "^2.0.1",
"npm": "^5.4.2",
"node-sass": "^4.11.0",
"optimize-css-assets-webpack-plugin": "^4.0.2",
"phantomjs-prebuilt": "^2.1.15",
"postcss-browser-reporter": "^0.5.0",
@ -92,6 +93,7 @@
"tslib": "^1.9.3",
"tslint": "^5.8.0",
"tslint-loader": "^3.5.3",
"tslint-react": "^3.6.0",
"typescript": "^3.0.3",
"uglifyjs-webpack-plugin": "^1.2.7",
"webpack": "4.19.1",
@ -108,15 +110,30 @@
"watch": "webpack --progress --colors --watch --mode development --config scripts/webpack/webpack.dev.js",
"build": "grunt build",
"test": "grunt test",
"lint": "tslint -c tslint.json --project tsconfig.json",
"tslint": "tslint -c tslint.json --project tsconfig.json",
"typecheck": "tsc --noEmit",
"jest": "jest --notify --watch",
"api-tests": "jest --notify --watch --config=tests/api/jest.js",
"precommit": "lint-staged && grunt precommit"
"precommit": "grunt precommit"
},
"husky": {
"hooks": {
"pre-commit": "lint-staged && grunt precommit"
}
},
"lint-staged": {
"*.{ts,tsx}": ["prettier --write", "git add"],
"*.scss": ["prettier --write", "git add"],
"*pkg/**/*.go": ["gofmt -w -s", "git add"]
"*.{ts,tsx}": [
"prettier --write",
"git add"
],
"*.scss": [
"prettier --write",
"git add"
],
"*pkg/**/*.go": [
"gofmt -w -s",
"git add"
]
},
"prettier": {
"trailingComma": "es5",
@ -126,6 +143,7 @@
"license": "Apache-2.0",
"dependencies": {
"@babel/polyfill": "^7.0.0",
"@torkelo/react-select": "2.1.1",
"angular": "1.6.6",
"angular-bindonce": "0.3.1",
"angular-native-dragdrop": "1.2.2",
@ -133,7 +151,7 @@
"angular-sanitize": "1.6.6",
"baron": "^3.0.3",
"brace": "^0.10.0",
"classnames": "^2.2.5",
"classnames": "^2.2.6",
"clipboard": "^1.7.1",
"d3": "^4.11.0",
"d3-scale-chromatic": "^1.3.0",
@ -152,10 +170,9 @@
"react-custom-scrollbars": "^4.2.1",
"react-dom": "^16.6.3",
"react-grid-layout": "0.16.6",
"react-popper": "^1.3.0",
"react-highlight-words": "0.11.0",
"react-popper": "^1.3.0",
"react-redux": "^5.0.7",
"@torkelo/react-select": "2.1.1",
"react-sizeme": "^2.3.6",
"react-table": "^6.8.6",
"react-transition-group": "^2.2.1",
@ -165,18 +182,26 @@
"redux-thunk": "^2.3.0",
"remarkable": "^1.7.1",
"rst2html": "github:thoward/rst2html#990cb89",
"rxjs": "^5.4.3",
"rxjs": "^6.3.3",
"slate": "^0.33.4",
"slate-plain-serializer": "^0.5.10",
"slate-prism": "^0.5.0",
"slate-react": "^0.12.4",
"tether": "^1.4.0",
"tether-drop": "https://github.com/torkelo/drop/tarball/master",
"tinycolor2": "^1.4.1",
"tslint-react": "^3.6.0"
"tinycolor2": "^1.4.1"
},
"resolutions": {
"caniuse-db": "1.0.30000772",
"**/@types/react": "16.7.6"
},
"workspaces": {
"packages": [
"packages/*"
],
"nohoist": [
"**/@types/*",
"**/@types/*/**"
]
}
}

@ -0,0 +1,4 @@
# Shared build scripts
Shared build scripts for plugins & internal packages.

@ -0,0 +1,13 @@
{
"name": "@grafana/build",
"private": true,
"version": "1.0.0",
"description": "",
"main": "index.js",
"scripts": {
"tslint": "echo \"Nothing to do\"",
"typecheck": "echo \"Nothing to do\""
},
"author": "",
"license": "ISC"
}

@ -0,0 +1,3 @@
# Grafana (WIP) shared component library
Used by internal & external plugins.

@ -0,0 +1,33 @@
{
"name": "@grafana/ui",
"version": "1.0.0",
"description": "",
"main": "src/index.ts",
"scripts": {
"tslint": "tslint -c tslint.json --project tsconfig.json",
"typecheck": "tsc --noEmit"
},
"author": "",
"license": "ISC",
"dependencies": {
"@torkelo/react-select": "2.1.1",
"classnames": "^2.2.5",
"jquery": "^3.2.1",
"lodash": "^4.17.10",
"moment": "^2.22.2",
"react": "^16.6.3",
"react-dom": "^16.6.3",
"react-highlight-words": "0.11.0",
"react-popper": "^1.3.0",
"react-transition-group": "^2.2.1",
"react-virtualized": "^9.21.0"
},
"devDependencies": {
"@types/jest": "^23.3.2",
"@types/lodash": "^4.14.119",
"@types/react": "^16.7.6",
"@types/classnames": "^2.2.6",
"@types/jquery": "^1.10.35",
"typescript": "^3.2.2"
}
}

@ -1,10 +1,10 @@
import React from 'react';
import DeleteButton from './DeleteButton';
import { DeleteButton } from './DeleteButton';
import { shallow } from 'enzyme';
describe('DeleteButton', () => {
let wrapper;
let deleted;
let wrapper: any;
let deleted: any;
beforeAll(() => {
deleted = false;
@ -12,7 +12,8 @@ describe('DeleteButton', () => {
function deleteItem() {
deleted = true;
}
wrapper = shallow(<DeleteButton onConfirmDelete={() => deleteItem()} />);
wrapper = shallow(<DeleteButton onConfirm={() => deleteItem()} />);
});
it('should show confirm delete when clicked', () => {

@ -1,19 +1,19 @@
import React, { PureComponent } from 'react';
import React, { PureComponent, SyntheticEvent } from 'react';
export interface DeleteButtonProps {
onConfirmDelete();
interface Props {
onConfirm(): void;
}
export interface DeleteButtonStates {
interface State {
showConfirm: boolean;
}
export default class DeleteButton extends PureComponent<DeleteButtonProps, DeleteButtonStates> {
state: DeleteButtonStates = {
export class DeleteButton extends PureComponent<Props, State> {
state: State = {
showConfirm: false,
};
onClickDelete = event => {
onClickDelete = (event: SyntheticEvent) => {
if (event) {
event.preventDefault();
}
@ -23,7 +23,7 @@ export default class DeleteButton extends PureComponent<DeleteButtonProps, Delet
});
};
onClickCancel = event => {
onClickCancel = (event: SyntheticEvent) => {
if (event) {
event.preventDefault();
}
@ -33,7 +33,7 @@ export default class DeleteButton extends PureComponent<DeleteButtonProps, Delet
};
render() {
const onClickConfirm = this.props.onConfirmDelete;
const { onConfirm } = this.props;
let showConfirm;
let showDeleteButton;
@ -55,7 +55,7 @@ export default class DeleteButton extends PureComponent<DeleteButtonProps, Delet
<a className="btn btn-small" onClick={this.onClickCancel}>
Cancel
</a>
<a className="btn btn-danger btn-small" onClick={onClickConfirm}>
<a className="btn btn-danger btn-small" onClick={onConfirm}>
Confirm Delete
</a>
</span>

@ -0,0 +1 @@
@import 'DeleteButton/DeleteButton';

@ -0,0 +1 @@
export { DeleteButton } from './DeleteButton/DeleteButton';

@ -0,0 +1,23 @@
import React, { SFC, ReactNode } from 'react';
import classNames from 'classnames';
interface Props {
children: ReactNode;
htmlFor?: string;
className?: string;
isFocused?: boolean;
isInvalid?: boolean;
}
export const GfFormLabel: SFC<Props> = ({ children, isFocused, isInvalid, className, htmlFor, ...rest }) => {
const classes = classNames('gf-form-label', className, {
'gf-form-label--is-focused': isFocused,
'gf-form-label--is-invalid': isInvalid,
});
return (
<label className={classes} {...rest} htmlFor={htmlFor}>
{children}
</label>
);
};

@ -0,0 +1 @@
export { GfFormLabel } from './GfFormLabel/GfFormLabel';

@ -0,0 +1 @@
@import 'components/index';

@ -0,0 +1,5 @@
export * from './components';
export * from './visualizations';
export * from './types';
export * from './utils';
export * from './forms';

@ -0,0 +1,3 @@
export * from './series';
export * from './time';
export * from './panel';

@ -0,0 +1,17 @@
interface JQueryPlot {
(element: HTMLElement | JQuery, data: any, options: any): void;
plugins: any[];
}
interface JQueryStatic {
plot: JQueryPlot;
}
interface JQuery {
place_tt: any;
modal: any;
tagsinput: any;
typeahead: any;
accessKey: any;
tooltip: any;
}

@ -0,0 +1,31 @@
import { TimeSeries, LoadingState } from './series';
import { TimeRange } from './time';
export interface PanelProps<T = any> {
timeSeries: TimeSeries[];
timeRange: TimeRange;
loading: LoadingState;
options: T;
renderCounter: number;
width: number;
height: number;
}
export interface PanelOptionsProps<T = any> {
options: T;
onChange: (options: T) => void;
}
export interface PanelSize {
width: number;
height: number;
}
export interface PanelMenuItem {
type?: 'submenu' | 'divider';
text?: string;
iconClassName?: string;
onClick?: () => void;
shortcut?: string;
subMenu?: PanelMenuItem[];
}

@ -0,0 +1,53 @@
export enum LoadingState {
NotStarted = 'NotStarted',
Loading = 'Loading',
Done = 'Done',
Error = 'Error',
}
export type TimeSeriesValue = number | null;
export type TimeSeriesPoints = TimeSeriesValue[][];
export interface TimeSeries {
target: string;
datapoints: TimeSeriesPoints;
unit?: string;
}
/** View model projection of a time series */
export interface TimeSeriesVM {
label: string;
color: string;
data: TimeSeriesValue[][];
stats: TimeSeriesStats;
}
export interface TimeSeriesStats {
total: number | null;
max: number | null;
min: number | null;
logmin: number;
avg: number | null;
current: number | null;
first: number | null;
delta: number;
diff: number | null;
range: number | null;
timeStep: number;
count: number;
allIsNull: boolean;
allIsZero: boolean;
}
export enum NullValueMode {
Null = 'null',
Ignore = 'connected',
AsZero = 'null as zero',
}
/** View model projection of many time series */
export interface TimeSeriesVMs {
[index: number]: TimeSeriesVM;
length: number;
}

@ -0,0 +1,17 @@
import { Moment } from 'moment';
export interface RawTimeRange {
from: Moment | string;
to: Moment | string;
}
export interface TimeRange {
from: Moment;
to: Moment;
raw: RawTimeRange;
}
export interface IntervalValues {
interval: string; // 10s,5m
intervalMs: number;
}

@ -0,0 +1 @@
export * from './processTimeSeries';

@ -0,0 +1,174 @@
// Libraries
import _ from 'lodash';
// Types
import { TimeSeries, TimeSeriesVMs, NullValueMode, TimeSeriesValue } from '../types';
interface Options {
timeSeries: TimeSeries[];
nullValueMode: NullValueMode;
colorPalette: string[];
}
export function processTimeSeries({ timeSeries, nullValueMode, colorPalette }: Options): TimeSeriesVMs {
const vmSeries = timeSeries.map((item, index) => {
const colorIndex = index % colorPalette.length;
const label = item.target;
const result = [];
// stat defaults
let total = 0;
let max: TimeSeriesValue = -Number.MAX_VALUE;
let min: TimeSeriesValue = Number.MAX_VALUE;
let logmin = Number.MAX_VALUE;
let avg: TimeSeriesValue = null;
let current: TimeSeriesValue = null;
let first: TimeSeriesValue = null;
let delta: TimeSeriesValue = 0;
let diff: TimeSeriesValue = null;
let range: TimeSeriesValue = null;
let timeStep = Number.MAX_VALUE;
let allIsNull = true;
let allIsZero = true;
const ignoreNulls = nullValueMode === NullValueMode.Ignore;
const nullAsZero = nullValueMode === NullValueMode.AsZero;
let currentTime: TimeSeriesValue = null;
let currentValue: TimeSeriesValue = null;
let nonNulls = 0;
let previousTime: TimeSeriesValue = null;
let previousValue = 0;
let previousDeltaUp = true;
for (let i = 0; i < item.datapoints.length; i++) {
currentValue = item.datapoints[i][0];
currentTime = item.datapoints[i][1];
if (typeof currentTime !== 'number') {
continue;
}
if (typeof currentValue !== 'number') {
continue;
}
// Due to missing values we could have different timeStep all along the series
// so we have to find the minimum one (could occur with aggregators such as ZimSum)
if (previousTime !== null && currentTime !== null) {
const currentStep = currentTime - previousTime;
if (currentStep < timeStep) {
timeStep = currentStep;
}
}
previousTime = currentTime;
if (currentValue === null) {
if (ignoreNulls) {
continue;
}
if (nullAsZero) {
currentValue = 0;
}
}
if (currentValue !== null) {
if (_.isNumber(currentValue)) {
total += currentValue;
allIsNull = false;
nonNulls++;
}
if (currentValue > max) {
max = currentValue;
}
if (currentValue < min) {
min = currentValue;
}
if (first === null) {
first = currentValue;
} else {
if (previousValue > currentValue) {
// counter reset
previousDeltaUp = false;
if (i === item.datapoints.length - 1) {
// reset on last
delta += currentValue;
}
} else {
if (previousDeltaUp) {
delta += currentValue - previousValue; // normal increment
} else {
delta += currentValue; // account for counter reset
}
previousDeltaUp = true;
}
}
previousValue = currentValue;
if (currentValue < logmin && currentValue > 0) {
logmin = currentValue;
}
if (currentValue !== 0) {
allIsZero = false;
}
}
result.push([currentTime, currentValue]);
}
if (max === -Number.MAX_VALUE) {
max = null;
}
if (min === Number.MAX_VALUE) {
min = null;
}
if (result.length && !allIsNull) {
avg = total / nonNulls;
current = result[result.length - 1][1];
if (current === null && result.length > 1) {
current = result[result.length - 2][1];
}
}
if (max !== null && min !== null) {
range = max - min;
}
if (current !== null && first !== null) {
diff = current - first;
}
const count = result.length;
return {
data: result,
label: label,
color: colorPalette[colorIndex],
stats: {
total,
min,
max,
current,
logmin,
avg,
diff,
delta,
timeStep,
range,
count,
first,
allIsZero,
allIsNull,
},
};
});
return vmSeries;
}

@ -1,11 +1,9 @@
// Libraries
import $ from 'jquery';
import React, { PureComponent } from 'react';
import 'vendor/flot/jquery.flot';
import 'vendor/flot/jquery.flot.time';
// Types
import { TimeRange, TimeSeriesVMs } from 'app/types';
import { TimeRange, TimeSeriesVMs } from '../../types';
interface GraphProps {
timeSeries: TimeSeriesVMs;
@ -24,7 +22,7 @@ export class Graph extends PureComponent<GraphProps> {
showBars: false,
};
element: HTMLElement;
element: HTMLElement | null;
componentDidUpdate() {
this.draw();
@ -35,6 +33,10 @@ export class Graph extends PureComponent<GraphProps> {
}
draw() {
if (this.element === null) {
return;
}
const { width, timeSeries, timeRange, showLines, showBars, showPoints } = this.props;
if (!width) {
@ -76,7 +78,7 @@ export class Graph extends PureComponent<GraphProps> {
max: max,
label: 'Datetime',
ticks: ticks,
timeformat: time_format(ticks, min, max),
timeformat: timeFormat(ticks, min, max),
},
grid: {
minBorderMargin: 0,
@ -96,6 +98,7 @@ export class Graph extends PureComponent<GraphProps> {
$.plot(this.element, timeSeries, flotOptions);
} catch (err) {
console.log('Graph rendering error', err, flotOptions, timeSeries);
throw new Error('Error rendering panel');
}
}
@ -109,7 +112,7 @@ export class Graph extends PureComponent<GraphProps> {
}
// Copied from graph.ts
function time_format(ticks, min, max) {
function timeFormat(ticks: number, min: number, max: number): string {
if (min && max && ticks) {
const range = max - min;
const secPerTick = range / ticks / 1000;

@ -0,0 +1 @@
export { Graph } from './Graph/Graph';

@ -0,0 +1,18 @@
{
"extends": "../../tsconfig.json",
"include": [
"src/**/*.ts",
"src/**/*.tsx"
],
"exclude": [
"dist"
],
"compilerOptions": {
"rootDir": ".",
"module": "esnext",
"outDir": "dist",
"declaration": true,
"noImplicitAny": true,
"strictNullChecks": true
}
}

@ -0,0 +1,3 @@
{
"extends": "../../tslint.json"
}

@ -1,10 +1,15 @@
package api
import (
"bytes"
"encoding/json"
"fmt"
"net/http"
"time"
"github.com/grafana/grafana/pkg/api/dtos"
"github.com/grafana/grafana/pkg/bus"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/metrics"
m "github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/services/guardian"
@ -12,6 +17,11 @@ import (
"github.com/grafana/grafana/pkg/util"
)
var client = &http.Client{
Timeout: time.Second * 5,
Transport: &http.Transport{Proxy: http.ProxyFromEnvironment},
}
func GetSharingOptions(c *m.ReqContext) {
c.JSON(200, util.DynMap{
"externalSnapshotURL": setting.ExternalSnapshotUrl,
@ -20,26 +30,79 @@ func GetSharingOptions(c *m.ReqContext) {
})
}
type CreateExternalSnapshotResponse struct {
Key string `json:"key"`
DeleteKey string `json:"deleteKey"`
Url string `json:"url"`
DeleteUrl string `json:"deleteUrl"`
}
func createExternalDashboardSnapshot(cmd m.CreateDashboardSnapshotCommand) (*CreateExternalSnapshotResponse, error) {
var createSnapshotResponse CreateExternalSnapshotResponse
message := map[string]interface{}{
"name": cmd.Name,
"expires": cmd.Expires,
"dashboard": cmd.Dashboard,
}
messageBytes, err := simplejson.NewFromAny(message).Encode()
if err != nil {
return nil, err
}
response, err := client.Post(setting.ExternalSnapshotUrl+"/api/snapshots", "application/json", bytes.NewBuffer(messageBytes))
if err != nil {
return nil, err
}
defer response.Body.Close()
if response.StatusCode != 200 {
return nil, fmt.Errorf("Create external snapshot response status code %d", response.StatusCode)
}
if err := json.NewDecoder(response.Body).Decode(&createSnapshotResponse); err != nil {
return nil, err
}
return &createSnapshotResponse, nil
}
// POST /api/snapshots
func CreateDashboardSnapshot(c *m.ReqContext, cmd m.CreateDashboardSnapshotCommand) {
if cmd.Name == "" {
cmd.Name = "Unnamed snapshot"
}
var url string
cmd.ExternalUrl = ""
cmd.OrgId = c.OrgId
cmd.UserId = c.UserId
if cmd.External {
// external snapshot ref requires key and delete key
if cmd.Key == "" || cmd.DeleteKey == "" {
c.JsonApiErr(400, "Missing key and delete key for external snapshot", nil)
if !setting.ExternalEnabled {
c.JsonApiErr(403, "External dashboard creation is disabled", nil)
return
}
response, err := createExternalDashboardSnapshot(cmd)
if err != nil {
c.JsonApiErr(500, "Failed to create external snaphost", err)
return
}
cmd.OrgId = -1
cmd.UserId = -1
url = response.Url
cmd.Key = response.Key
cmd.DeleteKey = response.DeleteKey
cmd.ExternalUrl = response.Url
cmd.ExternalDeleteUrl = response.DeleteUrl
cmd.Dashboard = simplejson.New()
metrics.M_Api_Dashboard_Snapshot_External.Inc()
} else {
cmd.Key = util.GetRandomString(32)
cmd.DeleteKey = util.GetRandomString(32)
cmd.OrgId = c.OrgId
cmd.UserId = c.UserId
url = setting.ToAbsUrl("dashboard/snapshot/" + cmd.Key)
metrics.M_Api_Dashboard_Snapshot_Create.Inc()
}
@ -51,7 +114,7 @@ func CreateDashboardSnapshot(c *m.ReqContext, cmd m.CreateDashboardSnapshotComma
c.JSON(200, util.DynMap{
"key": cmd.Key,
"deleteKey": cmd.DeleteKey,
"url": setting.ToAbsUrl("dashboard/snapshot/" + cmd.Key),
"url": url,
"deleteUrl": setting.ToAbsUrl("api/snapshots-delete/" + cmd.DeleteKey),
})
}
@ -91,6 +154,33 @@ func GetDashboardSnapshot(c *m.ReqContext) {
c.JSON(200, dto)
}
func deleteExternalDashboardSnapshot(externalUrl string) error {
response, err := client.Get(externalUrl)
if err != nil {
return err
}
defer response.Body.Close()
if response.StatusCode == 200 {
return nil
}
// Gracefully ignore "snapshot not found" errors as they could have already
// been removed either via the cleanup script or by request.
if response.StatusCode == 500 {
var respJson map[string]interface{}
if err := json.NewDecoder(response.Body).Decode(&respJson); err != nil {
return err
}
if respJson["message"] == "Failed to get dashboard snapshot" {
return nil
}
}
return fmt.Errorf("Unexpected response when deleting external snapshot. Status code: %d", response.StatusCode)
}
// GET /api/snapshots-delete/:deleteKey
func DeleteDashboardSnapshotByDeleteKey(c *m.ReqContext) Response {
key := c.Params(":deleteKey")
@ -102,6 +192,13 @@ func DeleteDashboardSnapshotByDeleteKey(c *m.ReqContext) Response {
return Error(500, "Failed to get dashboard snapshot", err)
}
if query.Result.External {
err := deleteExternalDashboardSnapshot(query.Result.ExternalDeleteUrl)
if err != nil {
return Error(500, "Failed to delete external dashboard", err)
}
}
cmd := &m.DeleteDashboardSnapshotCommand{DeleteKey: query.Result.DeleteKey}
if err := bus.Dispatch(cmd); err != nil {
@ -138,6 +235,13 @@ func DeleteDashboardSnapshot(c *m.ReqContext) Response {
return Error(403, "Access denied to this snapshot", nil)
}
if query.Result.External {
err := deleteExternalDashboardSnapshot(query.Result.ExternalDeleteUrl)
if err != nil {
return Error(500, "Failed to delete external dashboard", err)
}
}
cmd := &m.DeleteDashboardSnapshotCommand{DeleteKey: query.Result.DeleteKey}
if err := bus.Dispatch(cmd); err != nil {

@ -1,6 +1,9 @@
package api
import (
"fmt"
"net/http"
"net/http/httptest"
"testing"
"time"
@ -13,13 +16,17 @@ import (
func TestDashboardSnapshotApiEndpoint(t *testing.T) {
Convey("Given a single snapshot", t, func() {
var externalRequest *http.Request
jsonModel, _ := simplejson.NewJson([]byte(`{"id":100}`))
mockSnapshotResult := &m.DashboardSnapshot{
Id: 1,
Key: "12345",
DeleteKey: "54321",
Dashboard: jsonModel,
Expires: time.Now().Add(time.Duration(1000) * time.Second),
UserId: 999999,
External: true,
}
bus.AddHandler("test", func(query *m.GetDashboardSnapshotQuery) error {
@ -45,13 +52,25 @@ func TestDashboardSnapshotApiEndpoint(t *testing.T) {
return nil
})
setupRemoteServer := func(fn func(http.ResponseWriter, *http.Request)) *httptest.Server {
return httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
fn(rw, r)
}))
}
Convey("When user has editor role and is not in the ACL", func() {
Convey("Should not be able to delete snapshot", func() {
loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/snapshots/12345", "/api/snapshots/:key", m.ROLE_EDITOR, func(sc *scenarioContext) {
ts := setupRemoteServer(func(rw http.ResponseWriter, req *http.Request) {
externalRequest = req
})
mockSnapshotResult.ExternalDeleteUrl = ts.URL
sc.handlerFunc = DeleteDashboardSnapshot
sc.fakeReqWithParams("DELETE", sc.url, map[string]string{"key": "12345"}).exec()
So(sc.resp.Code, ShouldEqual, 403)
So(externalRequest, ShouldBeNil)
})
})
})
@ -59,6 +78,12 @@ func TestDashboardSnapshotApiEndpoint(t *testing.T) {
Convey("When user is anonymous", func() {
Convey("Should be able to delete snapshot by deleteKey", func() {
anonymousUserScenario("When calling GET on", "GET", "/api/snapshots-delete/12345", "/api/snapshots-delete/:deleteKey", func(sc *scenarioContext) {
ts := setupRemoteServer(func(rw http.ResponseWriter, req *http.Request) {
rw.WriteHeader(200)
externalRequest = req
})
mockSnapshotResult.ExternalDeleteUrl = ts.URL
sc.handlerFunc = DeleteDashboardSnapshotByDeleteKey
sc.fakeReqWithParams("GET", sc.url, map[string]string{"deleteKey": "12345"}).exec()
@ -67,6 +92,10 @@ func TestDashboardSnapshotApiEndpoint(t *testing.T) {
So(err, ShouldBeNil)
So(respJSON.Get("message").MustString(), ShouldStartWith, "Snapshot deleted")
So(externalRequest.Method, ShouldEqual, http.MethodGet)
So(fmt.Sprintf("http://%s", externalRequest.Host), ShouldEqual, ts.URL)
So(externalRequest.URL.EscapedPath(), ShouldEqual, "/")
})
})
})
@ -79,6 +108,12 @@ func TestDashboardSnapshotApiEndpoint(t *testing.T) {
Convey("Should be able to delete a snapshot", func() {
loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/snapshots/12345", "/api/snapshots/:key", m.ROLE_EDITOR, func(sc *scenarioContext) {
ts := setupRemoteServer(func(rw http.ResponseWriter, req *http.Request) {
rw.WriteHeader(200)
externalRequest = req
})
mockSnapshotResult.ExternalDeleteUrl = ts.URL
sc.handlerFunc = DeleteDashboardSnapshot
sc.fakeReqWithParams("DELETE", sc.url, map[string]string{"key": "12345"}).exec()
@ -87,6 +122,8 @@ func TestDashboardSnapshotApiEndpoint(t *testing.T) {
So(err, ShouldBeNil)
So(respJSON.Get("message").MustString(), ShouldStartWith, "Snapshot deleted")
So(fmt.Sprintf("http://%s", externalRequest.Host), ShouldEqual, ts.URL)
So(externalRequest.URL.EscapedPath(), ShouldEqual, "/")
})
})
})
@ -94,6 +131,7 @@ func TestDashboardSnapshotApiEndpoint(t *testing.T) {
Convey("When user is editor and is the creator of the snapshot", func() {
aclMockResp = []*m.DashboardAclInfoDTO{}
mockSnapshotResult.UserId = TestUserID
mockSnapshotResult.External = false
Convey("Should be able to delete a snapshot", func() {
loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/snapshots/12345", "/api/snapshots/:key", m.ROLE_EDITOR, func(sc *scenarioContext) {
@ -108,5 +146,54 @@ func TestDashboardSnapshotApiEndpoint(t *testing.T) {
})
})
})
Convey("When deleting an external snapshot", func() {
aclMockResp = []*m.DashboardAclInfoDTO{}
mockSnapshotResult.UserId = TestUserID
Convey("Should gracefully delete local snapshot when remote snapshot has already been removed", func() {
loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/snapshots/12345", "/api/snapshots/:key", m.ROLE_EDITOR, func(sc *scenarioContext) {
ts := setupRemoteServer(func(rw http.ResponseWriter, req *http.Request) {
rw.Write([]byte(`{"message":"Failed to get dashboard snapshot"}`))
rw.WriteHeader(500)
})
mockSnapshotResult.ExternalDeleteUrl = ts.URL
sc.handlerFunc = DeleteDashboardSnapshot
sc.fakeReqWithParams("DELETE", sc.url, map[string]string{"key": "12345"}).exec()
So(sc.resp.Code, ShouldEqual, 200)
})
})
Convey("Should fail to delete local snapshot when an unexpected 500 error occurs", func() {
loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/snapshots/12345", "/api/snapshots/:key", m.ROLE_EDITOR, func(sc *scenarioContext) {
ts := setupRemoteServer(func(rw http.ResponseWriter, req *http.Request) {
rw.WriteHeader(500)
rw.Write([]byte(`{"message":"Unexpected"}`))
})
mockSnapshotResult.ExternalDeleteUrl = ts.URL
sc.handlerFunc = DeleteDashboardSnapshot
sc.fakeReqWithParams("DELETE", sc.url, map[string]string{"key": "12345"}).exec()
So(sc.resp.Code, ShouldEqual, 500)
})
})
Convey("Should fail to delete local snapshot when an unexpected remote error occurs", func() {
loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/snapshots/12345", "/api/snapshots/:key", m.ROLE_EDITOR, func(sc *scenarioContext) {
ts := setupRemoteServer(func(rw http.ResponseWriter, req *http.Request) {
rw.WriteHeader(404)
})
mockSnapshotResult.ExternalDeleteUrl = ts.URL
sc.handlerFunc = DeleteDashboardSnapshot
sc.fakeReqWithParams("DELETE", sc.url, map[string]string{"key": "12345"}).exec()
So(sc.resp.Code, ShouldEqual, 500)
})
})
})
})
}

@ -164,6 +164,14 @@ func GetPluginMarkdown(c *m.ReqContext) Response {
return Error(500, "Could not get markdown file", err)
}
// fallback try readme
if len(content) == 0 {
content, err = plugins.GetPluginMarkdown(pluginID, "readme")
if err != nil {
return Error(501, "Could not get markdown file", err)
}
}
resp := Respond(200, content)
resp.Header("Content-Type", "text/plain; charset=utf-8")
return resp

@ -206,10 +206,9 @@ func (f *JSONFormatter) processObject(object map[string]interface{}, deltas []di
// Added
for _, delta := range deltas {
switch delta.(type) {
switch delta := delta.(type) {
case *diff.Added:
d := delta.(*diff.Added)
f.printRecursive(d.Position.String(), d.Value, ChangeAdded)
f.printRecursive(delta.Position.String(), delta.Value, ChangeAdded)
}
}
@ -222,9 +221,8 @@ func (f *JSONFormatter) processItem(value interface{}, deltas []diff.Delta, posi
if len(matchedDeltas) > 0 {
for _, matchedDelta := range matchedDeltas {
switch matchedDelta.(type) {
switch matchedDelta := matchedDelta.(type) {
case *diff.Object:
d := matchedDelta.(*diff.Object)
switch value.(type) {
case map[string]interface{}:
//ok
@ -238,7 +236,7 @@ func (f *JSONFormatter) processItem(value interface{}, deltas []diff.Delta, posi
f.print("{")
f.closeLine()
f.push(positionStr, len(o), false)
f.processObject(o, d.Deltas)
f.processObject(o, matchedDelta.Deltas)
f.pop()
f.newLine(ChangeNil)
f.print("}")
@ -246,7 +244,6 @@ func (f *JSONFormatter) processItem(value interface{}, deltas []diff.Delta, posi
f.closeLine()
case *diff.Array:
d := matchedDelta.(*diff.Array)
switch value.(type) {
case []interface{}:
//ok
@ -260,7 +257,7 @@ func (f *JSONFormatter) processItem(value interface{}, deltas []diff.Delta, posi
f.print("[")
f.closeLine()
f.push(positionStr, len(a), true)
f.processArray(a, d.Deltas)
f.processArray(a, matchedDelta.Deltas)
f.pop()
f.newLine(ChangeNil)
f.print("]")
@ -268,27 +265,23 @@ func (f *JSONFormatter) processItem(value interface{}, deltas []diff.Delta, posi
f.closeLine()
case *diff.Added:
d := matchedDelta.(*diff.Added)
f.printRecursive(positionStr, d.Value, ChangeAdded)
f.printRecursive(positionStr, matchedDelta.Value, ChangeAdded)
f.size[len(f.size)-1]++
case *diff.Modified:
d := matchedDelta.(*diff.Modified)
savedSize := f.size[len(f.size)-1]
f.printRecursive(positionStr, d.OldValue, ChangeOld)
f.printRecursive(positionStr, matchedDelta.OldValue, ChangeOld)
f.size[len(f.size)-1] = savedSize
f.printRecursive(positionStr, d.NewValue, ChangeNew)
f.printRecursive(positionStr, matchedDelta.NewValue, ChangeNew)
case *diff.TextDiff:
savedSize := f.size[len(f.size)-1]
d := matchedDelta.(*diff.TextDiff)
f.printRecursive(positionStr, d.OldValue, ChangeOld)
f.printRecursive(positionStr, matchedDelta.OldValue, ChangeOld)
f.size[len(f.size)-1] = savedSize
f.printRecursive(positionStr, d.NewValue, ChangeNew)
f.printRecursive(positionStr, matchedDelta.NewValue, ChangeNew)
case *diff.Deleted:
d := matchedDelta.(*diff.Deleted)
f.printRecursive(positionStr, d.Value, ChangeDeleted)
f.printRecursive(positionStr, matchedDelta.Value, ChangeDeleted)
default:
return errors.New("Unknown Delta type detected")
@ -305,13 +298,13 @@ func (f *JSONFormatter) processItem(value interface{}, deltas []diff.Delta, posi
func (f *JSONFormatter) searchDeltas(deltas []diff.Delta, position diff.Position) (results []diff.Delta) {
results = make([]diff.Delta, 0)
for _, delta := range deltas {
switch delta.(type) {
switch typedDelta := delta.(type) {
case diff.PostDelta:
if delta.(diff.PostDelta).PostPosition() == position {
if typedDelta.PostPosition() == position {
results = append(results, delta)
}
case diff.PreDelta:
if delta.(diff.PreDelta).PrePosition() == position {
if typedDelta.PrePosition() == position {
results = append(results, delta)
}
default:
@ -417,20 +410,19 @@ func (f *JSONFormatter) print(a string) {
}
func (f *JSONFormatter) printRecursive(name string, value interface{}, change ChangeType) {
switch value.(type) {
switch value := value.(type) {
case map[string]interface{}:
f.newLine(change)
f.printKey(name)
f.print("{")
f.closeLine()
m := value.(map[string]interface{})
size := len(m)
size := len(value)
f.push(name, size, false)
keys := sortKeys(m)
keys := sortKeys(value)
for _, key := range keys {
f.printRecursive(key, m[key], change)
f.printRecursive(key, value[key], change)
}
f.pop()
@ -445,10 +437,9 @@ func (f *JSONFormatter) printRecursive(name string, value interface{}, change Ch
f.print("[")
f.closeLine()
s := value.([]interface{})
size := len(s)
size := len(value)
f.push("", size, true)
for _, item := range s {
for _, item := range value {
f.printRecursive("", item, change)
}
f.pop()

@ -292,6 +292,8 @@ func (a *ldapAuther) searchForUser(username string) (*LdapUserInfo, error) {
Filter: strings.Replace(a.server.SearchFilter, "%s", ldap.EscapeFilter(username), -1),
}
a.log.Debug("Ldap Search For User Request", "info", spew.Sdump(searchReq))
searchResult, err = a.conn.Search(&searchReq)
if err != nil {
return nil, err

@ -198,17 +198,31 @@ func checkAuthenticationProxy(remoteAddr string, proxyHeaderValue string) error
}
proxies := strings.Split(setting.AuthProxyWhitelist, ",")
sourceIP, _, err := net.SplitHostPort(remoteAddr)
if err != nil {
return err
var proxyObjs []*net.IPNet
for _, proxy := range proxies {
proxyObjs = append(proxyObjs, coerceProxyAddress(proxy))
}
// Compare allowed IP addresses to actual address
for _, proxyIP := range proxies {
if sourceIP == strings.TrimSpace(proxyIP) {
sourceIP, _, _ := net.SplitHostPort(remoteAddr)
sourceObj := net.ParseIP(sourceIP)
for _, proxyObj := range proxyObjs {
if proxyObj.Contains(sourceObj) {
return nil
}
}
return fmt.Errorf("Request for user (%s) from %s is not from the authentication proxy", proxyHeaderValue, sourceIP)
}
func coerceProxyAddress(proxyAddr string) *net.IPNet {
proxyAddr = strings.TrimSpace(proxyAddr)
if !strings.Contains(proxyAddr, "/") {
proxyAddr = strings.Join([]string{proxyAddr, "32"}, "/")
}
_, network, err := net.ParseCIDR(proxyAddr)
if err != nil {
fmt.Println(err)
}
return network
}

@ -271,6 +271,23 @@ func TestMiddlewareContext(t *testing.T) {
})
})
middlewareScenario("When auth_proxy is enabled and IPv4 request RemoteAddr is not within trusted CIDR block", func(sc *scenarioContext) {
setting.AuthProxyEnabled = true
setting.AuthProxyHeaderName = "X-WEBAUTH-USER"
setting.AuthProxyHeaderProperty = "username"
setting.AuthProxyWhitelist = "192.168.1.0/24, 2001::0/120"
sc.fakeReq("GET", "/")
sc.req.Header.Add("X-WEBAUTH-USER", "torkelo")
sc.req.RemoteAddr = "192.168.3.1:12345"
sc.exec()
Convey("should return 407 status code", func() {
So(sc.resp.Code, ShouldEqual, 407)
So(sc.resp.Body.String(), ShouldContainSubstring, "Request for user (torkelo) from 192.168.3.1 is not from the authentication proxy")
})
})
middlewareScenario("When auth_proxy is enabled and IPv6 request RemoteAddr is not trusted", func(sc *scenarioContext) {
setting.AuthProxyEnabled = true
setting.AuthProxyHeaderName = "X-WEBAUTH-USER"
@ -288,6 +305,23 @@ func TestMiddlewareContext(t *testing.T) {
})
})
middlewareScenario("When auth_proxy is enabled and IPv6 request RemoteAddr is not within trusted CIDR block", func(sc *scenarioContext) {
setting.AuthProxyEnabled = true
setting.AuthProxyHeaderName = "X-WEBAUTH-USER"
setting.AuthProxyHeaderProperty = "username"
setting.AuthProxyWhitelist = "192.168.1.0/24, 2001::0/120"
sc.fakeReq("GET", "/")
sc.req.Header.Add("X-WEBAUTH-USER", "torkelo")
sc.req.RemoteAddr = "[2001:23]:12345"
sc.exec()
Convey("should return 407 status code", func() {
So(sc.resp.Code, ShouldEqual, 407)
So(sc.resp.Body.String(), ShouldContainSubstring, "Request for user (torkelo) from 2001:23 is not from the authentication proxy")
})
})
middlewareScenario("When auth_proxy is enabled and request RemoteAddr is trusted", func(sc *scenarioContext) {
setting.AuthProxyEnabled = true
setting.AuthProxyHeaderName = "X-WEBAUTH-USER"
@ -316,6 +350,62 @@ func TestMiddlewareContext(t *testing.T) {
})
})
middlewareScenario("When auth_proxy is enabled and IPv4 request RemoteAddr is within trusted CIDR block", func(sc *scenarioContext) {
setting.AuthProxyEnabled = true
setting.AuthProxyHeaderName = "X-WEBAUTH-USER"
setting.AuthProxyHeaderProperty = "username"
setting.AuthProxyWhitelist = "192.168.1.0/24, 2001::0/120"
bus.AddHandler("test", func(query *m.GetSignedInUserQuery) error {
query.Result = &m.SignedInUser{OrgId: 4, UserId: 33}
return nil
})
bus.AddHandler("test", func(cmd *m.UpsertUserCommand) error {
cmd.Result = &m.User{Id: 33}
return nil
})
sc.fakeReq("GET", "/")
sc.req.Header.Add("X-WEBAUTH-USER", "torkelo")
sc.req.RemoteAddr = "192.168.1.10:12345"
sc.exec()
Convey("Should init context with user info", func() {
So(sc.context.IsSignedIn, ShouldBeTrue)
So(sc.context.UserId, ShouldEqual, 33)
So(sc.context.OrgId, ShouldEqual, 4)
})
})
middlewareScenario("When auth_proxy is enabled and IPv6 request RemoteAddr is within trusted CIDR block", func(sc *scenarioContext) {
setting.AuthProxyEnabled = true
setting.AuthProxyHeaderName = "X-WEBAUTH-USER"
setting.AuthProxyHeaderProperty = "username"
setting.AuthProxyWhitelist = "192.168.1.0/24, 2001::0/120"
bus.AddHandler("test", func(query *m.GetSignedInUserQuery) error {
query.Result = &m.SignedInUser{OrgId: 4, UserId: 33}
return nil
})
bus.AddHandler("test", func(cmd *m.UpsertUserCommand) error {
cmd.Result = &m.User{Id: 33}
return nil
})
sc.fakeReq("GET", "/")
sc.req.Header.Add("X-WEBAUTH-USER", "torkelo")
sc.req.RemoteAddr = "[2001::23]:12345"
sc.exec()
Convey("Should init context with user info", func() {
So(sc.context.IsSignedIn, ShouldBeTrue)
So(sc.context.UserId, ShouldEqual, 33)
So(sc.context.OrgId, ShouldEqual, 4)
})
})
middlewareScenario("When session exists for previous user, create a new session", func(sc *scenarioContext) {
setting.AuthProxyEnabled = true
setting.AuthProxyHeaderName = "X-WEBAUTH-USER"

@ -16,6 +16,7 @@ type DashboardSnapshot struct {
UserId int64
External bool
ExternalUrl string
ExternalDeleteUrl string
Expires time.Time
Created time.Time
@ -49,6 +50,9 @@ type CreateDashboardSnapshotCommand struct {
// these are passed when storing an external snapshot ref
External bool `json:"external"`
ExternalUrl string `json:"-"`
ExternalDeleteUrl string `json:"-"`
Key string `json:"key"`
DeleteKey string `json:"deleteKey"`

@ -3,10 +3,8 @@ package plugins
import (
"context"
"encoding/json"
"os"
"os/exec"
"path"
"path/filepath"
"time"
"github.com/grafana/grafana-plugin-model/go/datasource"
@ -29,7 +27,6 @@ type DataSourcePlugin struct {
QueryOptions map[string]bool `json:"queryOptions,omitempty"`
BuiltIn bool `json:"builtIn,omitempty"`
Mixed bool `json:"mixed,omitempty"`
HasQueryHelp bool `json:"hasQueryHelp,omitempty"`
Routes []*AppPluginRoute `json:"routes"`
Backend bool `json:"backend,omitempty"`
@ -48,15 +45,6 @@ func (p *DataSourcePlugin) Load(decoder *json.Decoder, pluginDir string) error {
return err
}
// look for help markdown
helpPath := filepath.Join(p.PluginDir, "QUERY_HELP.md")
if _, err := os.Stat(helpPath); os.IsNotExist(err) {
helpPath = filepath.Join(p.PluginDir, "query_help.md")
}
if _, err := os.Stat(helpPath); err == nil {
p.HasQueryHelp = true
}
DataSources[p.Id] = p
return nil
}

@ -166,7 +166,7 @@ func (n *notificationService) getNeededNotifiers(orgId int64, notificationIds []
var result notifierStateSlice
for _, notification := range query.Result {
not, err := n.createNotifierFor(notification)
not, err := InitNotifier(notification)
if err != nil {
n.log.Error("Could not create notifier", "notifier", notification.Id, "error", err)
continue
@ -195,7 +195,8 @@ func (n *notificationService) getNeededNotifiers(orgId int64, notificationIds []
return result, nil
}
func (n *notificationService) createNotifierFor(model *m.AlertNotification) (Notifier, error) {
// InitNotifier instantiate a new notifier based on the model
func InitNotifier(model *m.AlertNotification) (Notifier, error) {
notifierPlugin, found := notifierFactories[model.Type]
if !found {
return nil, errors.New("Unsupported notification type")
@ -208,6 +209,7 @@ type NotifierFactory func(notification *m.AlertNotification) (Notifier, error)
var notifierFactories = make(map[string]*NotifierPlugin)
// RegisterNotifier register an notifier
func RegisterNotifier(plugin *NotifierPlugin) {
notifierFactories[plugin.Type] = plugin
}

@ -32,7 +32,7 @@ func handleNotificationTestCommand(cmd *NotificationTestCommand) error {
Settings: cmd.Settings,
}
notifiers, err := notifier.createNotifierFor(model)
notifiers, err := InitNotifier(model)
if err != nil {
log.Error2("Failed to create notifier", "error", err.Error())

@ -76,7 +76,7 @@ func (dr *dashboardServiceImpl) buildSaveDashboardCommand(dto *SaveDashboardDTO,
return nil, models.ErrDashboardFolderCannotHaveParent
}
if dash.IsFolder && strings.ToLower(dash.Title) == strings.ToLower(models.RootFolderName) {
if dash.IsFolder && strings.EqualFold(dash.Title, models.RootFolderName) {
return nil, models.ErrDashboardFolderNameExists
}
@ -175,7 +175,9 @@ func (dr *dashboardServiceImpl) SaveProvisionedDashboard(dto *SaveDashboardDTO,
dto.User = &models.SignedInUser{
UserId: 0,
OrgRole: models.ROLE_ADMIN,
OrgId: dto.OrgId,
}
cmd, err := dr.buildSaveDashboardCommand(dto, true, false)
if err != nil {
return nil, err

@ -4,6 +4,7 @@ import (
"bytes"
"context"
"fmt"
"io"
"io/ioutil"
"net"
"net/http"
@ -69,11 +70,14 @@ func (ns *NotificationService) sendWebRequestSync(ctx context.Context, webhook *
return err
}
defer resp.Body.Close()
if resp.StatusCode/100 == 2 {
// flushing the body enables the transport to reuse the same connection
io.Copy(ioutil.Discard, resp.Body)
return nil
}
defer resp.Body.Close()
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
return err

@ -33,14 +33,18 @@ type MysqlStore struct {
sid string
lock sync.RWMutex
data map[interface{}]interface{}
expiry int64
dirty bool
}
// NewMysqlStore creates and returns a mysql session store.
func NewMysqlStore(c *sql.DB, sid string, kv map[interface{}]interface{}) *MysqlStore {
func NewMysqlStore(c *sql.DB, sid string, kv map[interface{}]interface{}, expiry int64) *MysqlStore {
return &MysqlStore{
c: c,
sid: sid,
data: kv,
expiry: expiry,
dirty: false,
}
}
@ -50,6 +54,7 @@ func (s *MysqlStore) Set(key, val interface{}) error {
defer s.lock.Unlock()
s.data[key] = val
s.dirty = true
return nil
}
@ -67,6 +72,7 @@ func (s *MysqlStore) Delete(key interface{}) error {
defer s.lock.Unlock()
delete(s.data, key)
s.dirty = true
return nil
}
@ -77,13 +83,20 @@ func (s *MysqlStore) ID() string {
// Release releases resource and save data to provider.
func (s *MysqlStore) Release() error {
newExpiry := time.Now().Unix()
if !s.dirty && (s.expiry+60) >= newExpiry {
return nil
}
data, err := session.EncodeGob(s.data)
if err != nil {
return err
}
_, err = s.c.Exec("UPDATE session SET data=?, expiry=? WHERE `key`=?",
data, time.Now().Unix(), s.sid)
data, newExpiry, s.sid)
s.dirty = false
s.expiry = newExpiry
return err
}
@ -93,6 +106,7 @@ func (s *MysqlStore) Flush() error {
defer s.lock.Unlock()
s.data = make(map[interface{}]interface{})
s.dirty = true
return nil
}
@ -117,11 +131,12 @@ func (p *MysqlProvider) Init(expire int64, connStr string) (err error) {
// Read returns raw session store by session ID.
func (p *MysqlProvider) Read(sid string) (session.RawStore, error) {
expiry := time.Now().Unix()
var data []byte
err := p.c.QueryRow("SELECT data FROM session WHERE `key`=?", sid).Scan(&data)
err := p.c.QueryRow("SELECT data,expiry FROM session WHERE `key`=?", sid).Scan(&data, &expiry)
if err == sql.ErrNoRows {
_, err = p.c.Exec("INSERT INTO session(`key`,data,expiry) VALUES(?,?,?)",
sid, "", time.Now().Unix())
sid, "", expiry)
}
if err != nil {
return nil, err
@ -137,7 +152,7 @@ func (p *MysqlProvider) Read(sid string) (session.RawStore, error) {
}
}
return NewMysqlStore(p.c, sid, kv), nil
return NewMysqlStore(p.c, sid, kv, expiry), nil
}
// Exist returns true if session with given ID exists.

@ -53,6 +53,8 @@ func CreateDashboardSnapshot(cmd *m.CreateDashboardSnapshotCommand) error {
OrgId: cmd.OrgId,
UserId: cmd.UserId,
External: cmd.External,
ExternalUrl: cmd.ExternalUrl,
ExternalDeleteUrl: cmd.ExternalDeleteUrl,
Dashboard: cmd.Dashboard,
Expires: expires,
Created: time.Now(),

@ -53,14 +53,14 @@ func GetDataSourceByName(query *m.GetDataSourceByNameQuery) error {
}
func GetDataSources(query *m.GetDataSourcesQuery) error {
sess := x.Limit(1000, 0).Where("org_id=?", query.OrgId).Asc("name")
sess := x.Limit(5000, 0).Where("org_id=?", query.OrgId).Asc("name")
query.Result = make([]*m.DataSource, 0)
return sess.Find(&query.Result)
}
func GetAllDataSources(query *m.GetAllDataSourcesQuery) error {
sess := x.Limit(1000, 0).Asc("name")
sess := x.Limit(5000, 0).Asc("name")
query.Result = make([]*m.DataSource, 0)
return sess.Find(&query.Result)

@ -78,14 +78,14 @@ func GetUserLoginAttemptCount(query *m.GetUserLoginAttemptCountQuery) error {
}
func toInt64(i interface{}) int64 {
switch i.(type) {
switch i := i.(type) {
case []byte:
n, _ := strconv.ParseInt(string(i.([]byte)), 10, 64)
n, _ := strconv.ParseInt(string(i), 10, 64)
return n
case int:
return int64(i.(int))
return int64(i)
case int64:
return i.(int64)
return i
}
return 0
}

@ -60,4 +60,8 @@ func addDashboardSnapshotMigrations(mg *Migrator) {
{Name: "external_url", Type: DB_NVarchar, Length: 255, Nullable: false},
{Name: "dashboard", Type: DB_MediumText, Nullable: false},
}))
mg.AddMigration("Add column external_delete_url to dashboard_snapshots table", NewAddColumnMigration(snapshotV5, &Column{
Name: "external_delete_url", Type: DB_NVarchar, Length: 255, Nullable: true,
}))
}

@ -2,12 +2,47 @@ package migrator
type MigrationCondition interface {
Sql(dialect Dialect) (string, []interface{})
IsFulfilled(results []map[string][]byte) bool
}
type IfTableExistsCondition struct {
type ExistsMigrationCondition struct{}
func (c *ExistsMigrationCondition) IsFulfilled(results []map[string][]byte) bool {
return len(results) >= 1
}
type NotExistsMigrationCondition struct{}
func (c *NotExistsMigrationCondition) IsFulfilled(results []map[string][]byte) bool {
return len(results) == 0
}
type IfIndexExistsCondition struct {
ExistsMigrationCondition
TableName string
IndexName string
}
func (c *IfIndexExistsCondition) Sql(dialect Dialect) (string, []interface{}) {
return dialect.IndexCheckSql(c.TableName, c.IndexName)
}
type IfIndexNotExistsCondition struct {
NotExistsMigrationCondition
TableName string
IndexName string
}
func (c *IfIndexNotExistsCondition) Sql(dialect Dialect) (string, []interface{}) {
return dialect.IndexCheckSql(c.TableName, c.IndexName)
}
type IfColumnNotExistsCondition struct {
NotExistsMigrationCondition
TableName string
ColumnName string
}
func (c *IfTableExistsCondition) Sql(dialect Dialect) (string, []interface{}) {
return dialect.TableCheckSql(c.TableName)
func (c *IfColumnNotExistsCondition) Sql(dialect Dialect) (string, []interface{}) {
return dialect.ColumnCheckSql(c.TableName, c.ColumnName)
}

@ -29,10 +29,12 @@ type Dialect interface {
DropTable(tableName string) string
DropIndexSql(tableName string, index *Index) string
TableCheckSql(tableName string) (string, []interface{})
RenameTable(oldName string, newName string) string
UpdateTableSql(tableName string, columns []*Column) string
IndexCheckSql(tableName, indexName string) (string, []interface{})
ColumnCheckSql(tableName, columnName string) (string, []interface{})
ColString(*Column) string
ColStringNoPk(*Column) string
@ -182,6 +184,10 @@ func (db *BaseDialect) RenameTable(oldName string, newName string) string {
return fmt.Sprintf("ALTER TABLE %s RENAME TO %s", quote(oldName), quote(newName))
}
func (db *BaseDialect) ColumnCheckSql(tableName, columnName string) (string, []interface{}) {
return "", nil
}
func (db *BaseDialect) DropIndexSql(tableName string, index *Index) string {
quote := db.dialect.Quote
name := index.XName(tableName)

@ -85,7 +85,9 @@ type AddColumnMigration struct {
}
func NewAddColumnMigration(table Table, col *Column) *AddColumnMigration {
return &AddColumnMigration{tableName: table.Name, column: col}
m := &AddColumnMigration{tableName: table.Name, column: col}
m.Condition = &IfColumnNotExistsCondition{TableName: table.Name, ColumnName: col.Name}
return m
}
func (m *AddColumnMigration) Table(tableName string) *AddColumnMigration {
@ -109,7 +111,9 @@ type AddIndexMigration struct {
}
func NewAddIndexMigration(table Table, index *Index) *AddIndexMigration {
return &AddIndexMigration{tableName: table.Name, index: index}
m := &AddIndexMigration{tableName: table.Name, index: index}
m.Condition = &IfIndexNotExistsCondition{TableName: table.Name, IndexName: index.XName(table.Name)}
return m
}
func (m *AddIndexMigration) Table(tableName string) *AddIndexMigration {
@ -128,7 +132,9 @@ type DropIndexMigration struct {
}
func NewDropIndexMigration(table Table, index *Index) *DropIndexMigration {
return &DropIndexMigration{tableName: table.Name, index: index}
m := &DropIndexMigration{tableName: table.Name, index: index}
m.Condition = &IfIndexExistsCondition{TableName: table.Name, IndexName: index.XName(table.Name)}
return m
}
func (m *DropIndexMigration) Sql(dialect Dialect) string {
@ -179,11 +185,6 @@ func NewRenameTableMigration(oldName string, newName string) *RenameTableMigrati
return &RenameTableMigration{oldName: oldName, newName: newName}
}
func (m *RenameTableMigration) IfTableExists(tableName string) *RenameTableMigration {
m.Condition = &IfTableExistsCondition{TableName: tableName}
return m
}
func (m *RenameTableMigration) Rename(oldName string, newName string) *RenameTableMigration {
m.oldName = oldName
m.newName = newName
@ -212,11 +213,6 @@ func NewCopyTableDataMigration(targetTable string, sourceTable string, colMap ma
return m
}
func (m *CopyTableDataMigration) IfTableExists(tableName string) *CopyTableDataMigration {
m.Condition = &IfTableExistsCondition{TableName: tableName}
return m
}
func (m *CopyTableDataMigration) Sql(d Dialect) string {
return d.CopyTableData(m.sourceTable, m.targetTable, m.sourceCols, m.targetCols)
}

@ -94,8 +94,6 @@ func (mg *Migrator) Start() error {
Timestamp: time.Now(),
}
mg.Logger.Debug("Executing", "sql", sql)
err := mg.inTransaction(func(sess *xorm.Session) error {
err := mg.exec(m, sess)
if err != nil {
@ -123,18 +121,30 @@ func (mg *Migrator) exec(m Migration, sess *xorm.Session) error {
condition := m.GetCondition()
if condition != nil {
sql, args := condition.Sql(mg.Dialect)
results, err := sess.SQL(sql).Query(args...)
if err != nil || len(results) == 0 {
mg.Logger.Debug("Skipping migration condition not fulfilled", "id", m.Id())
return sess.Rollback()
if sql != "" {
mg.Logger.Debug("Executing migration condition sql", "id", m.Id(), "sql", sql, "args", args)
results, err := sess.SQL(sql, args...).Query()
if err != nil {
mg.Logger.Error("Executing migration condition failed", "id", m.Id(), "error", err)
return err
}
if !condition.IsFulfilled(results) {
mg.Logger.Warn("Skipping migration: Already executed, but not recorded in migration log", "id", m.Id())
return nil
}
}
}
var err error
if codeMigration, ok := m.(CodeMigration); ok {
mg.Logger.Debug("Executing code migration", "id", m.Id())
err = codeMigration.Exec(sess, mg)
} else {
_, err = sess.Exec(m.Sql(mg.Dialect))
sql := m.Sql(mg.Dialect)
mg.Logger.Debug("Executing sql migration", "id", m.Id(), "sql", sql)
_, err = sess.Exec(sql)
}
if err != nil {

@ -90,12 +90,6 @@ func (db *Mysql) SqlType(c *Column) string {
return res
}
func (db *Mysql) TableCheckSql(tableName string) (string, []interface{}) {
args := []interface{}{"grafana", tableName}
sql := "SELECT `TABLE_NAME` from `INFORMATION_SCHEMA`.`TABLES` WHERE `TABLE_SCHEMA`=? and `TABLE_NAME`=?"
return sql, args
}
func (db *Mysql) UpdateTableSql(tableName string, columns []*Column) string {
var statements = []string{}
@ -108,6 +102,18 @@ func (db *Mysql) UpdateTableSql(tableName string, columns []*Column) string {
return "ALTER TABLE " + db.Quote(tableName) + " " + strings.Join(statements, ", ") + ";"
}
func (db *Mysql) IndexCheckSql(tableName, indexName string) (string, []interface{}) {
args := []interface{}{tableName, indexName}
sql := "SELECT 1 FROM " + db.Quote("INFORMATION_SCHEMA") + "." + db.Quote("STATISTICS") + " WHERE " + db.Quote("TABLE_SCHEMA") + " = DATABASE() AND " + db.Quote("TABLE_NAME") + "=? AND " + db.Quote("INDEX_NAME") + "=?"
return sql, args
}
func (db *Mysql) ColumnCheckSql(tableName, columnName string) (string, []interface{}) {
args := []interface{}{tableName, columnName}
sql := "SELECT 1 FROM " + db.Quote("INFORMATION_SCHEMA") + "." + db.Quote("COLUMNS") + " WHERE " + db.Quote("TABLE_SCHEMA") + " = DATABASE() AND " + db.Quote("TABLE_NAME") + "=? AND " + db.Quote("COLUMN_NAME") + "=?"
return sql, args
}
func (db *Mysql) CleanDB() error {
tables, _ := db.engine.DBMetas()
sess := db.engine.NewSession()

@ -101,9 +101,9 @@ func (db *Postgres) SqlType(c *Column) string {
return res
}
func (db *Postgres) TableCheckSql(tableName string) (string, []interface{}) {
args := []interface{}{"grafana", tableName}
sql := "SELECT table_name FROM information_schema.tables WHERE table_schema=? and table_name=?"
func (db *Postgres) IndexCheckSql(tableName, indexName string) (string, []interface{}) {
args := []interface{}{tableName, indexName}
sql := "SELECT 1 FROM " + db.Quote("pg_indexes") + " WHERE" + db.Quote("tablename") + "=? AND " + db.Quote("indexname") + "=?"
return sql, args
}

@ -68,9 +68,10 @@ func (db *Sqlite3) SqlType(c *Column) string {
}
}
func (db *Sqlite3) TableCheckSql(tableName string) (string, []interface{}) {
args := []interface{}{tableName}
return "SELECT name FROM sqlite_master WHERE type='table' and name = ?", args
func (db *Sqlite3) IndexCheckSql(tableName, indexName string) (string, []interface{}) {
args := []interface{}{tableName, indexName}
sql := "SELECT 1 FROM " + db.Quote("sqlite_master") + " WHERE " + db.Quote("type") + "='index' AND " + db.Quote("tbl_name") + "=? AND " + db.Quote("name") + "=?"
return sql, args
}
func (db *Sqlite3) DropIndexSql(tableName string, index *Index) string {

@ -243,7 +243,7 @@ func (ss *SqlStore) buildConnectionString() (string, error) {
ss.dbCfg.Path = filepath.Join(ss.Cfg.DataPath, ss.dbCfg.Path)
}
os.MkdirAll(path.Dir(ss.dbCfg.Path), os.ModePerm)
cnnstr = "file:" + ss.dbCfg.Path + "?cache=shared&mode=rwc"
cnnstr = fmt.Sprintf("file:%s?cache=%s&mode=rwc", ss.dbCfg.Path, ss.dbCfg.CacheMode)
default:
return "", fmt.Errorf("Unknown database type: %s", ss.dbCfg.Type)
}
@ -319,6 +319,8 @@ func (ss *SqlStore) readConfig() {
ss.dbCfg.ClientCertPath = sec.Key("client_cert_path").String()
ss.dbCfg.ServerCertName = sec.Key("server_cert_name").String()
ss.dbCfg.Path = sec.Key("path").MustString("data/grafana.db")
ss.dbCfg.CacheMode = sec.Key("cache_mode").MustString("private")
}
func InitTestDB(t *testing.T) *SqlStore {
@ -391,7 +393,13 @@ func IsTestDbPostgres() bool {
}
type DatabaseConfig struct {
Type, Host, Name, User, Pwd, Path, SslMode string
Type string
Host string
Name string
User string
Pwd string
Path string
SslMode string
CaCertPath string
ClientKeyPath string
ClientCertPath string
@ -400,4 +408,5 @@ type DatabaseConfig struct {
MaxOpenConn int
MaxIdleConn int
ConnMaxLifetime int
CacheMode string
}

@ -345,8 +345,12 @@ func GetUserOrgList(query *m.GetUserOrgListQuery) error {
return err
}
func newSignedInUserCacheKey(orgID, userID int64) string {
return fmt.Sprintf("signed-in-user-%d-%d", userID, orgID)
}
func (ss *SqlStore) GetSignedInUserWithCache(query *m.GetSignedInUserQuery) error {
cacheKey := fmt.Sprintf("signed-in-user-%d-%d", query.UserId, query.OrgId)
cacheKey := newSignedInUserCacheKey(query.OrgId, query.UserId)
if cached, found := ss.CacheService.Get(cacheKey); found {
query.Result = cached.(*m.SignedInUser)
return nil
@ -357,6 +361,7 @@ func (ss *SqlStore) GetSignedInUserWithCache(query *m.GetSignedInUserQuery) erro
return err
}
cacheKey = newSignedInUserCacheKey(query.Result.OrgId, query.UserId)
ss.CacheService.Set(cacheKey, query.Result, time.Second*5)
return nil
}

@ -13,7 +13,7 @@ import (
func TestUserDataAccess(t *testing.T) {
Convey("Testing DB", t, func() {
InitTestDB(t)
ss := InitTestDB(t)
Convey("Creating a user", func() {
cmd := &m.CreateUserCommand{
@ -153,6 +153,27 @@ func TestUserDataAccess(t *testing.T) {
So(prefsQuery.Result.UserId, ShouldEqual, 0)
})
})
Convey("when retreiving signed in user for orgId=0 result should return active org id", func() {
ss.CacheService.Flush()
query := &m.GetSignedInUserQuery{OrgId: users[1].OrgId, UserId: users[1].Id}
err := ss.GetSignedInUserWithCache(query)
So(err, ShouldBeNil)
So(query.Result, ShouldNotBeNil)
So(query.OrgId, ShouldEqual, users[1].OrgId)
err = SetUsingOrg(&m.SetUsingOrgCommand{UserId: users[1].Id, OrgId: users[0].OrgId})
So(err, ShouldBeNil)
query = &m.GetSignedInUserQuery{OrgId: 0, UserId: users[1].Id}
err = ss.GetSignedInUserWithCache(query)
So(err, ShouldBeNil)
So(query.Result, ShouldNotBeNil)
So(query.Result.OrgId, ShouldEqual, users[0].OrgId)
cacheKey := newSignedInUserCacheKey(query.Result.OrgId, query.UserId)
_, found := ss.CacheService.Get(cacheKey)
So(found, ShouldBeTrue)
})
})
})

@ -15,6 +15,7 @@ type OAuthInfo struct {
TlsClientKey string
TlsClientCa string
TlsSkipVerify bool
SendClientCredentialsViaPost bool
}
type OAuther struct {

@ -79,12 +79,18 @@ func NewOAuthService() {
TlsClientKey: sec.Key("tls_client_key").String(),
TlsClientCa: sec.Key("tls_client_ca").String(),
TlsSkipVerify: sec.Key("tls_skip_verify_insecure").MustBool(),
SendClientCredentialsViaPost: sec.Key("send_client_credentials_via_post").MustBool(),
}
if !info.Enabled {
continue
}
// handle the clients that do not properly support Basic auth headers and require passing client_id/client_secret via POST payload
if info.SendClientCredentialsViaPost {
oauth2.RegisterBrokenAuthHeaderProvider(info.TokenUrl)
}
if name == "grafananet" {
name = grafanaCom
}

@ -3,7 +3,6 @@ package cloudwatch
import (
"fmt"
"os"
"strings"
"sync"
"time"
@ -43,7 +42,7 @@ func GetCredentials(dsInfo *DatasourceInfo) (*credentials.Credentials, error) {
secretAccessKey := ""
sessionToken := ""
var expiration *time.Time = nil
if dsInfo.AuthType == "arn" && strings.Index(dsInfo.AssumeRoleArn, "arn:aws:iam:") == 0 {
if dsInfo.AuthType == "arn" {
params := &sts.AssumeRoleInput{
RoleArn: aws.String(dsInfo.AssumeRoleArn),
RoleSessionName: aws.String("GrafanaSession"),

@ -16,6 +16,7 @@ func (qp *InfluxdbQueryParser) Parse(model *simplejson.Json, dsInfo *models.Data
rawQuery := model.Get("query").MustString("")
useRawQuery := model.Get("rawQuery").MustBool(false)
alias := model.Get("alias").MustString("")
tz := model.Get("tz").MustString("")
measurement := model.Get("measurement").MustString("")
@ -55,6 +56,7 @@ func (qp *InfluxdbQueryParser) Parse(model *simplejson.Json, dsInfo *models.Data
Interval: parsedInterval,
Alias: alias,
UseRawQuery: useRawQuery,
Tz: tz,
}, nil
}

@ -41,6 +41,7 @@ func TestInfluxdbQueryParser(t *testing.T) {
}
],
"measurement": "logins.count",
"tz": "Europe/Paris",
"policy": "default",
"refId": "B",
"resultFormat": "time_series",
@ -115,6 +116,7 @@ func TestInfluxdbQueryParser(t *testing.T) {
So(len(res.GroupBy), ShouldEqual, 3)
So(len(res.Selects), ShouldEqual, 3)
So(len(res.Tags), ShouldEqual, 2)
So(res.Tz, ShouldEqual, "Europe/Paris")
So(res.Interval, ShouldEqual, time.Second*20)
So(res.Alias, ShouldEqual, "serie alias")
})

@ -13,6 +13,7 @@ type Query struct {
UseRawQuery bool
Alias string
Interval time.Duration
Tz string
}
type Tag struct {

@ -26,6 +26,7 @@ func (query *Query) Build(queryContext *tsdb.TsdbQuery) (string, error) {
res += query.renderWhereClause()
res += query.renderTimeFilter(queryContext)
res += query.renderGroupBy(queryContext)
res += query.renderTz()
}
calculator := tsdb.NewIntervalCalculator(&tsdb.IntervalOptions{})
@ -154,3 +155,12 @@ func (query *Query) renderGroupBy(queryContext *tsdb.TsdbQuery) string {
return groupBy
}
func (query *Query) renderTz() string {
tz := query.Tz
if tz == "" {
return ""
} else {
return fmt.Sprintf(" tz('%s')", tz)
}
}

@ -47,6 +47,20 @@ func TestInfluxdbQueryBuilder(t *testing.T) {
So(rawQuery, ShouldEqual, `SELECT mean("value") FROM "policy"."cpu" WHERE time > now() - 5m GROUP BY time(10s) fill(null)`)
})
Convey("can build query with tz", func() {
query := &Query{
Selects: []*Select{{*qp1, *qp2}},
Measurement: "cpu",
GroupBy: []*QueryPart{groupBy1},
Tz: "Europe/Paris",
Interval: time.Second * 5,
}
rawQuery, err := query.Build(queryContext)
So(err, ShouldBeNil)
So(rawQuery, ShouldEqual, `SELECT mean("value") FROM "cpu" WHERE time > now() - 5m GROUP BY time(5s) tz('Europe/Paris')`)
})
Convey("can build query with group bys", func() {
query := &Query{
Selects: []*Select{{*qp1, *qp2}},

@ -86,11 +86,11 @@ func (m *postgresMacroEngine) evaluateMacro(name string, args []string) (string,
return "", fmt.Errorf("missing time column argument for macro %v", name)
}
return fmt.Sprintf("%s BETWEEN '%s' AND '%s'", args[0], m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339), m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
return fmt.Sprintf("%s BETWEEN '%s' AND '%s'", args[0], m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339Nano), m.timeRange.GetToAsTimeUTC().Format(time.RFC3339Nano)), nil
case "__timeFrom":
return fmt.Sprintf("'%s'", m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339)), nil
return fmt.Sprintf("'%s'", m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339Nano)), nil
case "__timeTo":
return fmt.Sprintf("'%s'", m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
return fmt.Sprintf("'%s'", m.timeRange.GetToAsTimeUTC().Format(time.RFC3339Nano)), nil
case "__timeGroup":
if len(args) < 2 {
return "", fmt.Errorf("macro %v needs time column and interval and optional fill value", name)

@ -41,7 +41,7 @@ func TestMacroEngine(t *testing.T) {
sql, err := engine.Interpolate(query, timeRange, "WHERE $__timeFilter(time_column)")
So(err, ShouldBeNil)
So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339)))
So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339Nano), to.Format(time.RFC3339Nano)))
})
Convey("interpolate __timeFrom function", func() {
@ -138,7 +138,7 @@ func TestMacroEngine(t *testing.T) {
sql, err := engine.Interpolate(query, timeRange, "WHERE $__timeFilter(time_column)")
So(err, ShouldBeNil)
So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339)))
So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339Nano), to.Format(time.RFC3339Nano)))
})
Convey("interpolate __unixEpochFilter function", func() {
@ -158,7 +158,7 @@ func TestMacroEngine(t *testing.T) {
sql, err := engine.Interpolate(query, timeRange, "WHERE $__timeFilter(time_column)")
So(err, ShouldBeNil)
So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339)))
So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339Nano), to.Format(time.RFC3339Nano)))
})
Convey("interpolate __unixEpochFilter function", func() {
@ -168,5 +168,22 @@ func TestMacroEngine(t *testing.T) {
So(sql, ShouldEqual, fmt.Sprintf("select time >= %d AND time <= %d", from.Unix(), to.Unix()))
})
})
Convey("Given a time range between 1960-02-01 07:00:00.5 and 1980-02-03 08:00:00.5", func() {
from := time.Date(1960, 2, 1, 7, 0, 0, 500e6, time.UTC)
to := time.Date(1980, 2, 3, 8, 0, 0, 500e6, time.UTC)
timeRange := tsdb.NewTimeRange(strconv.FormatInt(from.UnixNano()/int64(time.Millisecond), 10), strconv.FormatInt(to.UnixNano()/int64(time.Millisecond), 10))
So(from.Format(time.RFC3339Nano), ShouldEqual, "1960-02-01T07:00:00.5Z")
So(to.Format(time.RFC3339Nano), ShouldEqual, "1980-02-03T08:00:00.5Z")
Convey("interpolate __timeFilter function", func() {
sql, err := engine.Interpolate(query, timeRange, "WHERE $__timeFilter(time_column)")
So(err, ShouldBeNil)
So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339Nano), to.Format(time.RFC3339Nano)))
})
})
})
}

@ -28,8 +28,10 @@ class CustomScrollbar extends PureComponent<Props> {
<Scrollbars
className={customClassName}
autoHeight={true}
autoHeightMin={'inherit'}
autoHeightMax={'inherit'}
// These autoHeightMin & autoHeightMax options affect firefox and chrome differently.
// Before these where set to inhert but that caused problems with cut of legends in firefox
autoHeightMin={'0'}
autoHeightMax={'100%'}
renderTrackHorizontal={props => <div {...props} className="track-horizontal" />}
renderTrackVertical={props => <div {...props} className="track-vertical" />}
renderThumbHorizontal={props => <div {...props} className="thumb-horizontal" />}

@ -6,8 +6,8 @@ exports[`CustomScrollbar renders correctly 1`] = `
style={
Object {
"height": "auto",
"maxHeight": "inherit",
"minHeight": "inherit",
"maxHeight": "100%",
"minHeight": "0",
"overflow": "hidden",
"position": "relative",
"width": "100%",
@ -23,8 +23,8 @@ exports[`CustomScrollbar renders correctly 1`] = `
"left": undefined,
"marginBottom": 0,
"marginRight": 0,
"maxHeight": "calc(inherit + 0px)",
"minHeight": "calc(inherit + 0px)",
"maxHeight": "calc(100% + 0px)",
"minHeight": "calc(0 + 0px)",
"overflow": "scroll",
"position": "relative",
"right": undefined,

@ -24,12 +24,14 @@ class EmptyListCTA extends Component<Props, any> {
<i className={buttonIcon} />
{buttonTitle}
</a>
{proTip && (
<div className="empty-list-cta__pro-tip">
<i className="fa fa-rocket" /> ProTip: {proTip}
<a className="text-link empty-list-cta__pro-tip-link" href={proTipLink} target={proTipTarget}>
{proTipLinkTitle}
</a>
</div>
)}
</div>
);
}

@ -0,0 +1,44 @@
import { Component } from 'react';
interface ErrorInfo {
componentStack: string;
}
interface RenderProps {
error: Error;
errorInfo: ErrorInfo;
}
interface Props {
children: (r: RenderProps) => JSX.Element;
}
interface State {
error: Error;
errorInfo: ErrorInfo;
}
class ErrorBoundary extends Component<Props, State> {
readonly state: State = {
error: null,
errorInfo: null,
};
componentDidCatch(error: Error, errorInfo: ErrorInfo) {
this.setState({
error: error,
errorInfo: errorInfo
});
}
render() {
const { children } = this.props;
const { error, errorInfo } = this.state;
return children({
error,
errorInfo,
});
}
}
export default ErrorBoundary;

@ -1,43 +0,0 @@
import React, { PureComponent, ReactNode, ReactElement } from 'react';
import { Label } from './Label';
import { uniqueId } from 'lodash';
interface Props {
label?: ReactNode;
labelClassName?: string;
id?: string;
children: ReactElement<any>;
}
export class Element extends PureComponent<Props> {
elementId: string = this.props.id || uniqueId('form-element-');
get elementLabel() {
const { label, labelClassName } = this.props;
if (label) {
return (
<Label htmlFor={this.elementId} className={labelClassName}>
{label}
</Label>
);
}
return null;
}
get children() {
const { children } = this.props;
return React.cloneElement(children, { id: this.elementId });
}
render() {
return (
<div className="our-custom-wrapper-class">
{this.elementLabel}
{this.children}
</div>
);
}
}

@ -1,19 +0,0 @@
import React, { PureComponent, ReactNode } from 'react';
interface Props {
children: ReactNode;
htmlFor?: string;
className?: string;
}
export class Label extends PureComponent<Props> {
render() {
const { children, htmlFor, className } = this.props;
return (
<label className={`custom-label-class ${className || ''}`} htmlFor={htmlFor}>
{children}
</label>
);
}
}

@ -1,3 +1 @@
export { Element } from './Element';
export { Input } from './Input';
export { Label } from './Label';

@ -0,0 +1,83 @@
import React, { PureComponent } from 'react';
import Remarkable from 'remarkable';
import { getBackendSrv } from '../../services/backend_srv';
interface Props {
plugin: {
name: string;
id: string;
};
type: string;
}
interface State {
isError: boolean;
isLoading: boolean;
help: string;
}
export class PluginHelp extends PureComponent<Props, State> {
state = {
isError: false,
isLoading: false,
help: '',
};
componentDidMount(): void {
this.loadHelp();
}
constructPlaceholderInfo() {
return 'No plugin help or readme markdown file was found';
}
loadHelp = () => {
const { plugin, type } = this.props;
this.setState({ isLoading: true });
getBackendSrv()
.get(`/api/plugins/${plugin.id}/markdown/${type}`)
.then(response => {
const markdown = new Remarkable();
const helpHtml = markdown.render(response);
if (response === '' && type === 'help') {
this.setState({
isError: false,
isLoading: false,
help: this.constructPlaceholderInfo(),
});
} else {
this.setState({
isError: false,
isLoading: false,
help: helpHtml,
});
}
})
.catch(() => {
this.setState({
isError: true,
isLoading: false,
});
});
};
render() {
const { type } = this.props;
const { isError, isLoading, help } = this.state;
if (isLoading) {
return <h2>Loading help...</h2>;
}
if (isError) {
return <h3>'Error occurred when loading help'</h3>;
}
if (type === 'panel_help' && help === '') {
}
return <div className="markdown-html" dangerouslySetInnerHTML={{ __html: help }} />;
}
}

@ -52,7 +52,11 @@ export const ToggleButton: SFC<ToggleButtonProps> = ({
);
if (tooltip) {
return <Tooltip content={tooltip}>{button}</Tooltip>;
return (
<Tooltip content={tooltip} placement="bottom">
{button}
</Tooltip>
);
} else {
return button;
}

@ -3,6 +3,11 @@ import Portal from 'app/core/components/Portal/Portal';
import { Manager, Popper as ReactPopper, Reference } from 'react-popper';
import Transition from 'react-transition-group/Transition';
export enum Themes {
Default = 'popper__background--default',
Error = 'popper__background--error',
}
const defaultTransitionStyles = {
transition: 'opacity 200ms linear',
opacity: 0,
@ -21,13 +26,16 @@ interface Props {
placement?: any;
content: string | ((props: any) => JSX.Element);
refClassName?: string;
theme?: Themes;
}
class Popper extends PureComponent<Props> {
render() {
const { children, renderContent, show, placement, refClassName } = this.props;
const { children, renderContent, show, placement, refClassName, theme } = this.props;
const { content } = this.props;
const popperBackgroundClassName = 'popper__background' + (theme ? ' ' + theme : '');
return (
<Manager>
<Reference>
@ -53,7 +61,7 @@ class Popper extends PureComponent<Props> {
data-placement={placement}
className="popper"
>
<div className="popper__background">
<div className={popperBackgroundClassName}>
{renderContent(content)}
<div ref={arrowProps.ref} data-placement={placement} className="popper__arrow" />
</div>

@ -1,5 +1,5 @@
import React from 'react';
import { Themes } from './Popper';
export interface UsingPopperProps {
showPopper: (prevState: object) => void;
hidePopper: (prevState: object) => void;
@ -9,6 +9,7 @@ export interface UsingPopperProps {
content: string | ((props: any) => JSX.Element);
className?: string;
refClassName?: string;
theme?: Themes;
}
interface Props {
@ -16,6 +17,7 @@ interface Props {
className?: string;
refClassName?: string;
content: string | ((props: any) => JSX.Element);
theme?: Themes;
}
interface State {
@ -71,7 +73,6 @@ export default function withPopper(WrappedComponent) {
render() {
const { show, placement } = this.state;
const className = this.props.className || '';
return (
<WrappedComponent
{...this.props}

@ -50,7 +50,7 @@ const DEFAULT_THEME_LIGHT = 'ace/theme/textmate';
const DEFAULT_MODE = 'text';
const DEFAULT_MAX_LINES = 10;
const DEFAULT_TAB_SIZE = 2;
const DEFAULT_BEHAVIOURS = true;
const DEFAULT_BEHAVIORS = true;
const DEFAULT_SNIPPETS = true;
const editorTemplate = `<div></div>`;
@ -61,7 +61,7 @@ function link(scope, elem, attrs) {
const maxLines = attrs.maxLines || DEFAULT_MAX_LINES;
const showGutter = attrs.showGutter !== undefined;
const tabSize = attrs.tabSize || DEFAULT_TAB_SIZE;
const behavioursEnabled = attrs.behavioursEnabled ? attrs.behavioursEnabled === 'true' : DEFAULT_BEHAVIOURS;
const behavioursEnabled = attrs.behavioursEnabled ? attrs.behavioursEnabled === 'true' : DEFAULT_BEHAVIORS;
const snippetsEnabled = attrs.snippetsEnabled ? attrs.snippetsEnabled === 'true' : DEFAULT_SNIPPETS;
// Initialize editor

@ -1,5 +1,5 @@
// Based on work https://github.com/mohsen1/json-formatter-js
// Licence MIT, Copyright (c) 2015 Mohsen Azimi
// License MIT, Copyright (c) 2015 Mohsen Azimi
/*
* Escapes `"` characters from string

@ -1,5 +1,5 @@
// Based on work https://github.com/mohsen1/json-formatter-js
// Licence MIT, Copyright (c) 2015 Mohsen Azimi
// License MIT, Copyright (c) 2015 Mohsen Azimi
import { isObject, getObjectName, getType, getValuePreview, cssClass, createElement } from './helpers';

@ -36,7 +36,7 @@ describe('Render', () => {
expect(wrapper).toMatchSnapshot();
});
it('should render organisation switcher', () => {
it('should render organization switcher', () => {
const wrapper = setup({
link: {
showOrgSwitcher: true,

@ -73,7 +73,7 @@ exports[`Render should render component 1`] = `
</div>
`;
exports[`Render should render organisation switcher 1`] = `
exports[`Render should render organization switcher 1`] = `
<div
className="sidemenu-item dropdown dropup"
>

@ -69,7 +69,7 @@ function bootstrapTagsinput() {
},
});
select.on('itemAdded', event => {
select.on('itemAdded', (event: any) => {
if (scope.model.indexOf(event.item) === -1) {
scope.model.push(event.item);
if (scope.onTagsUpdated) {
@ -85,7 +85,7 @@ function bootstrapTagsinput() {
setColor(event.item, tagElement);
});
select.on('itemRemoved', event => {
select.on('itemRemoved', (event: any) => {
const idx = scope.model.indexOf(event.item);
if (idx !== -1) {
scope.model.splice(idx, 1);

@ -1,7 +1,7 @@
import _ from 'lodash';
import config from 'app/core/config';
import { Observable } from 'rxjs/Observable';
import { Observable } from 'rxjs';
export class LiveSrv {
conn: any;

@ -2,14 +2,23 @@ import _ from 'lodash';
import { TimeSeries } from 'app/core/core';
import colors, { getThemeColor } from 'app/core/utils/colors';
/**
* Mapping of log level abbreviation to canonical log level.
* Supported levels are reduce to limit color variation.
*/
export enum LogLevel {
emerg = 'critical',
alert = 'critical',
crit = 'critical',
critical = 'critical',
warn = 'warning',
warning = 'warning',
err = 'error',
eror = 'error',
error = 'error',
info = 'info',
notice = 'info',
dbug = 'debug',
debug = 'debug',
trace = 'trace',
unkown = 'unkown',
@ -81,7 +90,9 @@ export interface LogsStream {
export interface LogsStreamEntry {
line: string;
timestamp: string;
ts: string;
// Legacy, was renamed to ts
timestamp?: string;
}
export interface LogsStreamLabels {

@ -5,7 +5,7 @@ import { DashboardModel } from 'app/features/dashboard/dashboard_model';
export class BackendSrv {
private inFlightRequests = {};
private HTTP_REQUEST_CANCELLED = -1;
private HTTP_REQUEST_CANCELED = -1;
private noBackendCache: boolean;
/** @ngInject */
@ -178,7 +178,7 @@ export class BackendSrv {
return response;
})
.catch(err => {
if (err.status === this.HTTP_REQUEST_CANCELLED) {
if (err.status === this.HTTP_REQUEST_CANCELED) {
throw { err, cancelled: true };
}

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save