Explorar el Código

Merge branch 'master' into react-panels-step1

Torkel Ödegaard hace 7 años
padre
commit
8ba865bf57
Se han modificado 100 ficheros con 1587 adiciones y 845 borrados
  1. 3 3
      .circleci/config.yml
  2. 0 22
      .github/CONTRIBUTING.md
  3. 1 0
      .gitignore
  4. 25 2
      CHANGELOG.md
  5. 56 0
      CONTRIBUTING.md
  6. 14 2
      Gopkg.lock
  7. 4 0
      Gopkg.toml
  8. 2 2
      PLUGIN_DEV.md
  9. 1 1
      devenv/docker/blocks/influxdb/docker-compose.yaml
  10. 2 2
      docs/sources/administration/permissions.md
  11. 6 3
      docs/sources/administration/provisioning.md
  12. 3 0
      docs/sources/auth/generic-oauth.md
  13. 3 0
      docs/sources/auth/github.md
  14. 3 0
      docs/sources/auth/gitlab.md
  15. 3 0
      docs/sources/auth/google.md
  16. 1 1
      docs/sources/auth/overview.md
  17. 1 1
      docs/sources/contribute/cla.md
  18. 8 1
      docs/sources/features/datasources/mssql.md
  19. 8 1
      docs/sources/features/datasources/mysql.md
  20. 1 1
      docs/sources/features/datasources/opentsdb.md
  21. 6 0
      docs/sources/features/datasources/postgres.md
  22. 75 30
      docs/sources/features/datasources/stackdriver.md
  23. 1 1
      docs/sources/features/panels/alertlist.md
  24. 1 1
      docs/sources/features/panels/heatmap.md
  25. 1 1
      docs/sources/guides/whats-new-in-v2-5.md
  26. 1 1
      docs/sources/guides/whats-new-in-v2.md
  27. 2 2
      docs/sources/guides/whats-new-in-v3-1.md
  28. 1 1
      docs/sources/guides/whats-new-in-v3.md
  29. 1 1
      docs/sources/guides/whats-new-in-v4-2.md
  30. 2 2
      docs/sources/guides/whats-new-in-v4-5.md
  31. 1 1
      docs/sources/guides/whats-new-in-v4-6.md
  32. 74 0
      docs/sources/guides/whats-new-in-v5-3.md
  33. 1 1
      docs/sources/http_api/alerting.md
  34. 1 1
      docs/sources/http_api/dashboard_versions.md
  35. 17 9
      docs/sources/index.md
  36. 2 0
      docs/sources/installation/debian.md
  37. 2 0
      docs/sources/installation/mac.md
  38. 2 0
      docs/sources/installation/rpm.md
  39. 3 0
      docs/sources/installation/windows.md
  40. 3 1
      docs/sources/reference/annotations.md
  41. 1 0
      docs/sources/reference/templating.md
  42. 2 2
      docs/sources/tutorials/ha_setup.md
  43. 2 2
      docs/versions.json
  44. 2 2
      latest.json
  45. 3 2
      package.json
  46. 4 4
      pkg/api/api.go
  47. 15 1
      pkg/api/dataproxy.go
  48. 19 0
      pkg/api/dataproxy_test.go
  49. 14 7
      pkg/api/index.go
  50. 16 10
      pkg/api/org_users.go
  51. 17 0
      pkg/api/pluginproxy/ds_proxy_test.go
  52. 4 0
      pkg/extensions/main.go
  53. 7 0
      pkg/middleware/middleware.go
  54. 4 2
      pkg/models/org_user.go
  55. 15 0
      pkg/services/sqlstore/org_test.go
  56. 25 10
      pkg/services/sqlstore/org_users.go
  57. 1 1
      pkg/services/sqlstore/sqlstore.go
  58. 20 16
      pkg/services/sqlstore/user.go
  59. 24 18
      pkg/setting/setting.go
  60. 12 12
      pkg/setting/setting_test.go
  61. 13 3
      pkg/tsdb/cloudwatch/cloudwatch.go
  62. 5 0
      pkg/tsdb/cloudwatch/cloudwatch_test.go
  63. 30 0
      pkg/tsdb/cloudwatch/constants.go
  64. 4 0
      pkg/tsdb/elasticsearch/time_series_query.go
  65. 4 2
      pkg/tsdb/elasticsearch/time_series_query_test.go
  66. 0 8
      pkg/tsdb/mssql/macros.go
  67. 0 84
      pkg/tsdb/mssql/macros_test.go
  68. 28 0
      pkg/tsdb/mssql/mssql_test.go
  69. 0 8
      pkg/tsdb/mysql/macros.go
  70. 0 84
      pkg/tsdb/mysql/macros_test.go
  71. 25 1
      pkg/tsdb/mysql/mysql_test.go
  72. 0 8
      pkg/tsdb/postgres/macros.go
  73. 0 84
      pkg/tsdb/postgres/macros_test.go
  74. 24 0
      pkg/tsdb/postgres/postgres_test.go
  75. 39 23
      pkg/tsdb/sql_engine.go
  76. 29 0
      pkg/tsdb/sql_engine_test.go
  77. 138 31
      pkg/tsdb/stackdriver/stackdriver.go
  78. 133 0
      pkg/tsdb/stackdriver/stackdriver_test.go
  79. 112 0
      pkg/tsdb/stackdriver/test-data/3-series-response-distribution.json
  80. 36 4
      pkg/tsdb/stackdriver/types.go
  81. 6 2
      public/app/app.ts
  82. 1 0
      public/app/core/components/OrgActionBar/OrgActionBar.test.tsx
  83. 8 2
      public/app/core/components/OrgActionBar/OrgActionBar.tsx
  84. 17 0
      public/app/core/components/PageLoader/PageLoader.tsx
  85. 5 15
      public/app/core/components/PermissionList/AddPermission.tsx
  86. 2 2
      public/app/core/components/PermissionList/DisabledPermissionListItem.tsx
  87. 2 2
      public/app/core/components/PermissionList/PermissionListItem.tsx
  88. 18 49
      public/app/core/components/Picker/DescriptionOption.tsx
  89. 26 18
      public/app/core/components/Picker/DescriptionPicker.tsx
  90. 15 0
      public/app/core/components/Picker/IndicatorsContainer.tsx
  91. 18 0
      public/app/core/components/Picker/NoOptionsMessage.tsx
  92. 20 4
      public/app/core/components/Picker/PickerOption.test.tsx
  93. 17 49
      public/app/core/components/Picker/PickerOption.tsx
  94. 23 0
      public/app/core/components/Picker/ResetStyles.tsx
  95. 30 25
      public/app/core/components/Picker/TeamPicker.tsx
  96. 26 23
      public/app/core/components/Picker/UserPicker.tsx
  97. 12 13
      public/app/core/components/Picker/__snapshots__/PickerOption.test.tsx.snap
  98. 85 55
      public/app/core/components/Picker/__snapshots__/TeamPicker.test.tsx.snap
  99. 85 55
      public/app/core/components/Picker/__snapshots__/UserPicker.test.tsx.snap
  100. 2 7
      public/app/core/components/TagFilter/TagBadge.tsx

+ 3 - 3
.circleci/config.yml

@@ -126,7 +126,7 @@ jobs:
 
   build-all:
     docker:
-     - image: grafana/build-container:1.1.0
+     - image: grafana/build-container:1.2.0
     working_directory: /go/src/github.com/grafana/grafana
     steps:
       - checkout
@@ -173,7 +173,7 @@ jobs:
 
   build:
     docker:
-     - image: grafana/build-container:1.1.0
+     - image: grafana/build-container:1.2.0
     working_directory: /go/src/github.com/grafana/grafana
     steps:
       - checkout
@@ -232,7 +232,7 @@ jobs:
 
   build-enterprise:
     docker:
-     - image: grafana/build-container:v0.1
+     - image: grafana/build-container:1.2.0
     working_directory: /go/src/github.com/grafana/grafana
     steps:
       - checkout

+ 0 - 22
.github/CONTRIBUTING.md

@@ -1,22 +0,0 @@
-Follow the setup guide in README.md
-
-### Rebuild frontend assets on source change
-```
-yarn watch
-```
-
-### Rerun tests on source change
-```
-yarn jest
-```
-
-### Run tests for backend assets before commit
-```
-test -z "$(gofmt -s -l . | grep -v -E 'vendor/(github.com|golang.org|gopkg.in)' | tee /dev/stderr)"
-```
-
-### Run tests for frontend assets before commit
-```
-yarn test
-go test -v ./pkg/...
-```

+ 1 - 0
.gitignore

@@ -54,6 +54,7 @@ profile.cov
 /pkg/cmd/grafana-server/grafana-server
 /pkg/cmd/grafana-server/debug
 /pkg/extensions
+/public/app/extensions
 debug.test
 /examples/*/dist
 /packaging/**/*.rpm

+ 25 - 2
CHANGELOG.md

@@ -1,4 +1,27 @@
-# 5.3.0 (unreleased)
+# 5.4.0 (unreleased)
+
+### New Features
+
+* **Postgres/MySQL/MSSQL**: Adds support for configuration of max open/idle connections and connection max lifetime. Also, panels with multiple SQL queries will now be executed concurrently [#11711](https://github.com/grafana/grafana/issues/11711), thx [@connection-reset](https://github.com/connection-reset)
+
+### Minor
+
+* **Datasource Proxy**: Keep trailing slash for datasource proxy requests [#13326](https://github.com/grafana/grafana/pull/13326), thx [@ryantxu](https://github.com/ryantxu)
+* **Elasticsearch**: Fix no limit size in terms aggregation for alerting queries [#13172](https://github.com/grafana/grafana/issues/13172), thx [@Yukinoshita-Yukino](https://github.com/Yukinoshita-Yukino)
+
+### Breaking changes
+
+* Postgres/MySQL/MSSQL datasources now per default uses `max open connections` = `unlimited` (earlier 10), `max idle connections` = `2` (earlier 10) and `connection max lifetime` = `4` hours (earlier unlimited)
+
+# 5.3.1 (unreleased)
+
+* **Render**: Fix PhantomJS render of graph panel when legend displayed as table to the right [#13616](https://github.com/grafana/grafana/issues/13616)
+
+# 5.3.0 (2018-10-10)
+
+* **Stackdriver**: Filter wildcards and regex matching are not yet supported [#13495](https://github.com/grafana/grafana/issues/13495)
+* **Stackdriver**: Support the distribution metric type for heatmaps [#13559](https://github.com/grafana/grafana/issues/13559)
+* **Cloudwatch**: Automatically set graph yaxis unit [#13575](https://github.com/grafana/grafana/issues/13575), thx [@mtanda](https://github.com/mtanda)
 
 # 5.3.0-beta3 (2018-10-03)
 
@@ -45,7 +68,7 @@
 * **Profile**: List teams that the user is member of in current/active organization [#12476](https://github.com/grafana/grafana/issues/12476)
 * **Configuration**: Allow auto-assigning users to specific organization (other than Main. Org) [#1823](https://github.com/grafana/grafana/issues/1823) [#12801](https://github.com/grafana/grafana/issues/12801), thx [@gzzo](https://github.com/gzzo) and [@ofosos](https://github.com/ofosos)
 * **Dataproxy**: Pass configured/auth headers to a Datasource [#10971](https://github.com/grafana/grafana/issues/10971), thx [@mrsiano](https://github.com/mrsiano)
-* **Cloudwatch**: CloudWatch GetMetricData support [#11487](https://github.com/grafana/grafana/issues/11487), thx [@mtanda](https://github.com/mtanda)
+* ****: **: CloudWatch GetMetricData support [#11487](https://github.com/grafana/grafana/issues/11487), thx [@mtanda](https://github.com/mtanda)
 * **Postgres**: TimescaleDB support, e.g. use `time_bucket` for grouping by time when option enabled [#12680](https://github.com/grafana/grafana/pull/12680), thx [svenklemm](https://github.com/svenklemm)
 * **Cleanup**: Make temp file time to live configurable [#11607](https://github.com/grafana/grafana/issues/11607), thx [@xapon](https://github.com/xapon)
 

+ 56 - 0
CONTRIBUTING.md

@@ -0,0 +1,56 @@
+
+# Contributing
+
+Grafana uses GitHub to manage contributions.
+Contributions take the form of pull requests that will be reviewed by the core team.
+
+* If you are a new contributor see: [Steps to Contribute](#steps-to-contribute)
+
+* If you have a trivial fix or improvement, go ahead and create a pull request.
+
+* If you plan to do something more involved, discuss your idea on the respective [issue](https://github.com/grafana/grafana/issues) or create a [new issue](https://github.com/grafana/grafana/issues/new) if it does not exist. This will avoid unnecessary work and surely give you and us a good deal of inspiration. 
+
+
+## Steps to Contribute
+
+Should you wish to work on a GitHub issue, check first if it is not already assigned to someone. If it is free, you claim it by commenting on the issue that you want to work on it. This is to prevent duplicated efforts from contributors on the same issue.
+
+Please check the [`beginner friendly`](https://github.com/grafana/grafana/issues?q=is%3Aopen+is%3Aissue+label%3A%22beginner+friendly%22) label to find issues that are good for getting started. If you have questions about one of the issues, with or without the tag, please comment on them and one of the core team or the original poster will clarify it.
+
+
+
+## Setup
+
+Follow the setup guide in README.md
+
+### Rebuild frontend assets on source change
+```
+yarn watch
+```
+
+### Rerun tests on source change
+```
+yarn jest
+```
+
+### Run tests for backend assets before commit
+```
+test -z "$(gofmt -s -l . | grep -v -E 'vendor/(github.com|golang.org|gopkg.in)' | tee /dev/stderr)"
+```
+
+### Run tests for frontend assets before commit
+```
+yarn test
+go test -v ./pkg/...
+```
+
+
+## Pull Request Checklist
+
+* Branch from the master branch and, if needed, rebase to the current master branch before submitting your pull request. If it doesn't merge cleanly with master you may be asked to rebase your changes.
+
+* Commits should be as small as possible, while ensuring that each commit is correct independently (i.e., each commit should compile and pass tests).
+
+* If your patch is not getting reviewed or you need a specific person to review it, you can @-reply a reviewer asking for a review in the pull request or a comment.
+
+* Add tests relevant to the fixed bug or new feature.

+ 14 - 2
Gopkg.lock

@@ -264,7 +264,7 @@
   branch = "master"
   name = "github.com/hashicorp/yamux"
   packages = ["."]
-  revision = "2658be15c5f05e76244154714161f17e3e77de2e"
+  revision = "7221087c3d281fda5f794e28c2ea4c6e4d5c4558"
 
 [[projects]]
   name = "github.com/inconshreveable/log15"
@@ -507,6 +507,8 @@
   branch = "master"
   name = "golang.org/x/crypto"
   packages = [
+    "ed25519",
+    "ed25519/internal/edwards25519",
     "md4",
     "pbkdf2"
   ]
@@ -670,6 +672,16 @@
   revision = "e6179049628164864e6e84e973cfb56335748dea"
   version = "v2.3.2"
 
+[[projects]]
+  name = "gopkg.in/square/go-jose.v2"
+  packages = [
+    ".",
+    "cipher",
+    "json"
+  ]
+  revision = "ef984e69dd356202fd4e4910d4d9c24468bdf0b8"
+  version = "v2.1.9"
+
 [[projects]]
   name = "gopkg.in/yaml.v2"
   packages = ["."]
@@ -679,6 +691,6 @@
 [solve-meta]
   analyzer-name = "dep"
   analyzer-version = 1
-  inputs-digest = "6e9458f912a5f0eb3430b968f1b4dbc4e3b7671b282cf4fe1573419a6d9ba0d4"
+  inputs-digest = "6f7f271afd27f78b7d8ebe27436fee72c9925fb82a978bdc57fde44e01f3ca51"
   solver-name = "gps-cdcl"
   solver-version = 1

+ 4 - 0
Gopkg.toml

@@ -207,3 +207,7 @@ ignored = [
 [[constraint]]
   name = "github.com/VividCortex/mysqlerr"
   branch = "master"
+
+[[constraint]]
+  name = "gopkg.in/square/go-jose.v2"
+  version = "2.1.9"

+ 2 - 2
PLUGIN_DEV.md

@@ -6,8 +6,8 @@ upgrading Grafana please check here before creating an issue.
 
 ## Links
 
-- [Datasource plugin written in typescript](https://github.com/grafana/typescript-template-datasource)
-- [Simple json dataource plugin](https://github.com/grafana/simple-json-datasource)
+- [Datasource plugin written in TypeScript](https://github.com/grafana/typescript-template-datasource)
+- [Simple JSON datasource plugin](https://github.com/grafana/simple-json-datasource)
 - [Plugin development guide](http://docs.grafana.org/plugins/developing/development/)
 - [Webpack Grafana plugin template project](https://github.com/CorpGlory/grafana-plugin-template-webpack)
 

+ 1 - 1
devenv/docker/blocks/influxdb/docker-compose.yaml

@@ -6,7 +6,7 @@
       - "8083:8083"
       - "8086:8086"
     volumes:
-      - ./blocks/influxdb/influxdb.conf:/etc/influxdb/influxdb.conf
+      - ./docker/blocks/influxdb/influxdb.conf:/etc/influxdb/influxdb.conf
 
   fake-influxdb-data:
     image: grafana/fake-data-gen

+ 2 - 2
docs/sources/administration/permissions.md

@@ -55,7 +55,7 @@ This admin flag makes a user a `Super Admin`. This means they can access the `Se
 {{< docs-imagebox img="/img/docs/v50/folder_permissions.png" max-width="500px" class="docs-image--right" >}}
 
 For dashboards and dashboard folders there is a **Permissions** page that make it possible to
-remove the default role based permssions for Editors and Viewers. It's here you can add and assign permissions to specific **Users** and **Teams**.
+remove the default role based permissions for Editors and Viewers. It's here you can add and assign permissions to specific **Users** and **Teams**.
 
 You can assign & remove permissions for **Organization Roles**, **Users** and **Teams**.
 
@@ -102,7 +102,7 @@ Permissions for a dashboard:
 
 Result: You cannot override to a lower permission. `user1` has Admin permission as the highest permission always wins.
 
-- **View**: Can only view existing dashboars/folders.
+- **View**: Can only view existing dashboards/folders.
 - You cannot override permissions for users with **Org Admin Role**
 - A more specific permission with lower permission level will not have any effect if a more general rule exists with higher permission level. For example if "Everyone with Editor Role Can Edit" exists in the ACL list then **John Doe** will still have Edit permission even after you have specifically added a permission for this user with the permission set to **View**. You need to remove or lower the permission level of the more general rule.
 

+ 6 - 3
docs/sources/administration/provisioning.md

@@ -123,7 +123,7 @@ datasources:
   withCredentials:
   # <bool> mark as default datasource. Max one per org
   isDefault:
-  # <map> fields that will be converted to json and stored in json_data
+  # <map> fields that will be converted to json and stored in jsonData
   jsonData:
      graphiteVersion: "1.1"
      tlsAuth: true
@@ -147,7 +147,7 @@ Please refer to each datasource documentation for specific provisioning examples
 
 #### Json Data
 
-Since not all datasources have the same configuration settings we only have the most common ones as fields. The rest should be stored as a json blob in the `json_data` field. Here are the most common settings that the core datasources use.
+Since not all datasources have the same configuration settings we only have the most common ones as fields. The rest should be stored as a json blob in the `jsonData` field. Here are the most common settings that the core datasources use.
 
 | Name | Type | Datasource | Description |
 | ---- | ---- | ---- | ---- |
@@ -168,6 +168,9 @@ Since not all datasources have the same configuration settings we only have the
 | sslmode | string | PostgreSQL | SSLmode. 'disable', 'require', 'verify-ca' or 'verify-full' |
 | postgresVersion | number | PostgreSQL | Postgres version as a number (903/904/905/906/1000) meaning v9.3, v9.4, ..., v10 |
 | timescaledb | boolean | PostgreSQL | Enable usage of TimescaleDB extension |
+| maxOpenConns | number | MySQL, PostgreSQL & MSSQL | Maximum number of open connections to the database (Grafana v5.4+) |
+| maxIdleConns | number | MySQL, PostgreSQL & MSSQL | Maximum number of connections in the idle connection pool (Grafana v5.4+) |
+| connMaxLifetime | number | MySQL, PostgreSQL & MSSQL | Maximum amount of time in seconds a connection may be reused (Grafana v5.4+) |
 
 #### Secure Json Data
 
@@ -217,7 +220,7 @@ Note: The JSON shown in input field and when using `Copy JSON to Clipboard` and/
 
 {{< docs-imagebox img="/img/docs/v51/provisioning_cannot_save_dashboard.png" max-width="500px" class="docs-image--no-shadow" >}}
 
-### Reuseable Dashboard Urls
+### Reusable Dashboard Urls
 
 If the dashboard in the json file contains an [uid](/reference/dashboard/#json-fields), Grafana will force insert/update on that uid. This allows you to migrate dashboards betweens Grafana instances and provisioning Grafana from configuration without breaking the urls given since the new dashboard url uses the uid as identifier.
 When Grafana starts, it will update/insert all dashboards available in the configured folders. If you modify the file, the dashboard will also be updated.

+ 3 - 0
docs/sources/auth/generic-oauth.md

@@ -17,6 +17,9 @@ can find examples using Okta, BitBucket, OneLogin and Azure.
 
 This callback URL must match the full HTTP address that you use in your browser to access Grafana, but with the prefix path of `/login/generic_oauth`.
 
+You may have to set the `root_url` option of `[server]` for the callback URL to be 
+correct. For example in case you are serving Grafana behind a proxy.
+
 Example config:
 
 ```bash

+ 3 - 0
docs/sources/auth/github.md

@@ -46,6 +46,9 @@ team_ids =
 allowed_organizations =
 ```
 
+You may have to set the `root_url` option of `[server]` for the callback URL to be 
+correct. For example in case you are serving Grafana behind a proxy.
+
 Restart the Grafana back-end. You should now see a GitHub login button
 on the login page. You can now login or sign up with your GitHub
 accounts.

+ 3 - 0
docs/sources/auth/gitlab.md

@@ -58,6 +58,9 @@ api_url = https://gitlab.com/api/v4
 allowed_groups =
 ```
 
+You may have to set the `root_url` option of `[server]` for the callback URL to be 
+correct. For example in case you are serving Grafana behind a proxy.
+
 Restart the Grafana backend for your changes to take effect.
 
 If you use your own instance of GitLab instead of `gitlab.com`, adjust

+ 3 - 0
docs/sources/auth/google.md

@@ -45,6 +45,9 @@ allowed_domains = mycompany.com mycompany.org
 allow_sign_up = true
 ```
 
+You may have to set the `root_url` option of `[server]` for the callback URL to be 
+correct. For example in case you are serving Grafana behind a proxy.
+
 Restart the Grafana back-end. You should now see a Google login button
 on the login page. You can now login or sign up with your Google
 accounts. The `allowed_domains` option is optional, and domains were separated by space.

+ 1 - 1
docs/sources/auth/overview.md

@@ -58,7 +58,7 @@ If you change your organization name in the Grafana UI this setting needs to be
 ### Basic authentication
 
 Basic auth is enabled by default and works with the built in Grafana user password authentication system and LDAP
-authenticaten integration.
+authentication integration.
 
 To disable basic auth:
 

+ 1 - 1
docs/sources/contribute/cla.md

@@ -101,4 +101,4 @@ TO THE MAXIMUM EXTENT PERMITTED BY APPLICABLE LAW, IN NO EVENT WILL YOU [OR US]
 <br>
 <br>
 <br>
-This CLA agreement is based on the [Harmony Contributor Aggrement Template (combined)](http://www.harmonyagreements.org/agreements.html), [Creative Commons Attribution 3.0 Unported License](https://creativecommons.org/licenses/by/3.0/)
+This CLA agreement is based on the [Harmony Contributor Agreement Template (combined)](http://www.harmonyagreements.org/agreements.html), [Creative Commons Attribution 3.0 Unported License](https://creativecommons.org/licenses/by/3.0/)

+ 8 - 1
docs/sources/features/datasources/mssql.md

@@ -32,6 +32,9 @@ Name | Description
 *Database* | Name of your MSSQL database.
 *User* | Database user's login/username
 *Password* | Database user's password
+*Max open* | The maximum number of open connections to the database, default `unlimited` (Grafana v5.4+).
+*Max idle* | The maximum number of connections in the idle connection pool, default `2` (Grafana v5.4+).
+*Max lifetime* | The maximum amount of time in seconds a connection may be reused, default `14400`/4 hours (Grafana v5.4+).
 
 ### Min time interval
 
@@ -225,7 +228,7 @@ When above query are used in a graph panel the result will be two series named `
 
 {{< docs-imagebox img="/img/docs/v51/mssql_time_series_two.png" class="docs-image--no-shadow docs-image--right" >}}
 
-**Example with multiple `value` culumns:**
+**Example with multiple `value` columns:**
 
 ```sql
 SELECT
@@ -585,6 +588,10 @@ datasources:
     url: localhost:1433
     database: grafana
     user: grafana
+    jsonData:
+      maxOpenConns: 0         # Grafana v5.4+
+      maxIdleConns: 2         # Grafana v5.4+
+      connMaxLifetime: 14400  # Grafana v5.4+
     secureJsonData:
       password: "Password!"
 

+ 8 - 1
docs/sources/features/datasources/mysql.md

@@ -35,6 +35,9 @@ Name | Description
 *Database* | Name of your MySQL database.
 *User* | Database user's login/username
 *Password* | Database user's password
+*Max open* | The maximum number of open connections to the database, default `unlimited` (Grafana v5.4+).
+*Max idle* | The maximum number of connections in the idle connection pool, default `2` (Grafana v5.4+).
+*Max lifetime* | The maximum amount of time in seconds a connection may be reused, default `14400`/4 hours. This should always be lower than configured [wait_timeout](https://dev.mysql.com/doc/refman/8.0/en/server-system-variables.html#sysvar_wait_timeout) in MySQL (Grafana v5.4+).
 
 ### Min time interval
 
@@ -59,7 +62,7 @@ Identifier | Description
 The database user you specify when you add the data source should only be granted SELECT permissions on
 the specified database & tables you want to query. Grafana does not validate that the query is safe. The query
 could include any SQL statement. For example, statements like `USE otherdb;` and `DROP TABLE user;` would be
-executed. To protect against this we **Highly** recommmend you create a specific mysql user with restricted permissions.
+executed. To protect against this we **Highly** recommend you create a specific mysql user with restricted permissions.
 
 Example:
 
@@ -316,4 +319,8 @@ datasources:
     database: grafana
     user: grafana
     password: password
+    jsonData:
+      maxOpenConns: 0         # Grafana v5.4+
+      maxIdleConns: 2         # Grafana v5.4+
+      connMaxLifetime: 14400  # Grafana v5.4+
 ```

+ 1 - 1
docs/sources/features/datasources/opentsdb.md

@@ -84,7 +84,7 @@ Some examples are mentioned below to make nested template queries work successfu
 Query | Description
 ------------ | -------------
 *tag_values(cpu, hostname, env=$env)*  | Return tag values for cpu metric, selected env tag value and tag key hostname
-*tag_values(cpu, hostanme, env=$env, region=$region)* | Return tag values for cpu metric, selected env tag value, selected region tag value and tag key hostname
+*tag_values(cpu, hostname, env=$env, region=$region)* | Return tag values for cpu metric, selected env tag value, selected region tag value and tag key hostname
 
 For details on OpenTSDB metric queries checkout the official [OpenTSDB documentation](http://opentsdb.net/docs/build/html/index.html)
 

+ 6 - 0
docs/sources/features/datasources/postgres.md

@@ -31,6 +31,9 @@ Name | Description
 *User* | Database user's login/username
 *Password* | Database user's password
 *SSL Mode* | This option determines whether or with what priority a secure SSL TCP/IP connection will be negotiated with the server.
+*Max open* | The maximum number of open connections to the database, default `unlimited` (Grafana v5.4+).
+*Max idle* | The maximum number of connections in the idle connection pool, default `2` (Grafana v5.4+).
+*Max lifetime* | The maximum amount of time in seconds a connection may be reused, default `14400`/4 hours (Grafana v5.4+).
 *Version* | This option determines which functions are available in the query builder (only available in Grafana 5.3+).
 *TimescaleDB* | TimescaleDB is a time-series database built as a PostgreSQL extension. If enabled, Grafana will use `time_bucket` in the `$__timeGroup` macro and display TimescaleDB specific aggregate functions in the query builder (only available in Grafana 5.3+).
 
@@ -374,6 +377,9 @@ datasources:
       password: "Password!"
     jsonData:
       sslmode: "disable" # disable/require/verify-ca/verify-full
+      maxOpenConns: 0         # Grafana v5.4+
+      maxIdleConns: 2         # Grafana v5.4+
+      connMaxLifetime: 14400  # Grafana v5.4+
       postgresVersion: 903 # 903=9.3, 904=9.4, 905=9.5, 906=9.6, 1000=10
       timescaledb: false
 ```

+ 75 - 30
docs/sources/features/datasources/stackdriver.md

@@ -22,7 +22,7 @@ Grafana ships with built-in support for Google Stackdriver. Just add it as a dat
 1. Open the side menu by clicking the Grafana icon in the top header.
 2. In the side menu under the `Dashboards` link you should find a link named `Data Sources`.
 3. Click the `+ Add data source` button in the top header.
-4. Select `Stackdriver` from the *Type* dropdown.
+4. Select `Stackdriver` from the _Type_ dropdown.
 5. Upload or paste in the Service Account Key file. See below for steps on how to create a Service Account Key file.
 
 > NOTE: If you're not seeing the `Data Sources` link in your side menu it means that your current user does not have the `Admin` role for the current organization.
@@ -43,38 +43,58 @@ To authenticate with the Stackdriver API, you need to create a Google Cloud Plat
 
 The following APIs need to be enabled first:
 
-- [Monitoring API](https://console.cloud.google.com/apis/library/monitoring.googleapis.com)
-- [Cloud Resource Manager API](https://console.cloud.google.com/apis/library/cloudresourcemanager.googleapis.com)
+* [Monitoring API](https://console.cloud.google.com/apis/library/monitoring.googleapis.com)
+* [Cloud Resource Manager API](https://console.cloud.google.com/apis/library/cloudresourcemanager.googleapis.com)
 
 Click on the links above and click the `Enable` button:
 
-![Enable GCP APIs](/img/docs/v54/stackdriver_enable_api.png)
+{{< docs-imagebox img="/img/docs/v53/stackdriver_enable_api.png" class="docs-image--no-shadow" caption="Enable GCP APIs" >}}
 
 #### Create a GCP Service Account for a Project
 
 1. Navigate to the [APIs & Services Credentials page](https://console.cloud.google.com/apis/credentials).
 2. Click on the `Create credentials` dropdown/button and choose the `Service account key` option.
 
-    ![Create service account button](/img/docs/v54/stackdriver_create_service_account_button.png)
+    {{< docs-imagebox img="/img/docs/v53/stackdriver_create_service_account_button.png" class="docs-image--no-shadow" caption="Create service account button" >}}
+
 3. On the `Create service account key` page, choose key type `JSON`. Then in the `Service Account` dropdown, choose the `New service account` option:
 
-    ![Create service account key](/img/docs/v54/stackdriver_create_service_account_key.png)
+    {{< docs-imagebox img="/img/docs/v53/stackdriver_create_service_account_key.png" class="docs-image--no-shadow" caption="Create service account key" >}}
+
 4. Some new fields will appear. Fill in a name for the service account in the `Service account name` field and then choose the `Monitoring Viewer` role from the `Role` dropdown:
 
-    ![Choose role](/img/docs/v54/stackdriver_service_account_choose_role.png)
+    {{< docs-imagebox img="/img/docs/v53/stackdriver_service_account_choose_role.png" class="docs-image--no-shadow" caption="Choose role" >}}
+  
 5. Click the Create button. A JSON key file will be created and downloaded to your computer. Store this file in a secure place as it allows access to your Stackdriver data.
 6. Upload it to Grafana on the datasource Configuration page. You can either upload the file or paste in the contents of the file.
-    
-    ![Choose role](/img/docs/v54/stackdriver_grafana_upload_key.png)
+
+    {{< docs-imagebox img="/img/docs/v53/stackdriver_grafana_upload_key.png" class="docs-image--no-shadow" caption="Upload service key file to Grafana" >}}
+
 7. The file contents will be encrypted and saved in the Grafana database. Don't forget to save after uploading the file!
-    
-    ![Choose role](/img/docs/v54/stackdriver_grafana_key_uploaded.png)
+
+    {{< docs-imagebox img="/img/docs/v53/stackdriver_grafana_key_uploaded.png" class="docs-image--no-shadow" caption="Service key file is uploaded to Grafana" >}}
 
 ## Metric Query Editor
 
-Choose a metric from the `Metric` dropdown.
+{{< docs-imagebox img="/img/docs/v53/stackdriver_query_editor.png" max-width= "400px" class="docs-image--right" >}}
+
+The Stackdriver query editor allows you to select metrics, group/aggregate by labels and by time, and use filters to specify which time series you want in the results.
+
+Begin by choosing a `Service` and then a metric from the `Metric` dropdown. Use the plus and minus icons in the filter and group by sections to add/remove filters or group by clauses.
+
+Stackdriver metrics can be of different kinds (GAUGE, DELTA, CUMULATIVE) and these kinds have support for different aggregation options (reducers and aligners). The Grafana query editor shows the list of available aggregation methods for a selected metric and sets a default reducer and aligner when you select the metric. Units for the Y-axis are also automatically selected by the query editor.
+
+### Filter
+
+To add a filter, click the plus icon and choose a field to filter by and enter a filter value e.g. `instance_name = grafana-1`. You can remove the filter by clicking on the filter name and select `--remove filter--`.
 
-To add a filter, click the plus icon and choose a field to filter by and enter a filter value e.g. `instance_name = grafana-1`
+#### Simple wildcards
+
+When the operator is set to `=` or `!=` it is possible to add wildcards to the filter value field. E.g `us-*` will capture all values that starts with "us-" and `*central-a` will capture all values that ends with "central-a". `*-central-*` captures all values that has the substring of -central-. Simple wildcards are less expensive than regular expressions.
+
+#### Regular expressions
+
+When the operator is set to `=~` or `!=~` it is possible to add regular expressions to the filter value field. E.g `us-central[1-3]-[af]` would match all values that starts with "us-central", is followed by a number in the range of 1 to 3, a dash and then either an "a" or an "f". Leading and trailing slashes are not needed when creating regular expressions.
 
 ### Aggregation
 
@@ -87,9 +107,9 @@ The `Aligner` field allows you to align multiple time series after the same grou
 The `Alignment Period` groups a metric by time if an aggregation is chosen. The default is to use the GCP Stackdriver default groupings (which allows you to compare graphs in Grafana with graphs in the Stackdriver UI).
 The option is called `Stackdriver auto` and the defaults are:
 
-- 1m for time ranges < 23 hours
-- 5m for time ranges >= 23 hours and < 6 days
-- 1h for time ranges >= 6 days
+* 1m for time ranges < 23 hours
+* 5m for time ranges >= 23 hours and < 6 days
+* 1h for time ranges >= 6 days
 
 The other automatic option is `Grafana auto`. This will automatically set the group by time depending on the time range chosen and the width of the graph panel. Read more about the details [here](http://docs.grafana.org/reference/templating/#the-interval-variable).
 
@@ -105,20 +125,20 @@ The Alias By field allows you to control the format of the legend keys. The defa
 
 #### Metric Type Patterns
 
-Alias Pattern | Description | Example Result
------------------ | ---------------------------- | -------------
-`{{metric.type}}` | returns the full Metric Type | `compute.googleapis.com/instance/cpu/utilization`
-`{{metric.name}}` | returns the metric name part | `instance/cpu/utilization`
-`{{metric.service}}` | returns the service part | `compute`
+| Alias Pattern        | Description                  | Example Result                                    |
+| -------------------- | ---------------------------- | ------------------------------------------------- |
+| `{{metric.type}}`    | returns the full Metric Type | `compute.googleapis.com/instance/cpu/utilization` |
+| `{{metric.name}}`    | returns the metric name part | `instance/cpu/utilization`                        |
+| `{{metric.service}}` | returns the service part     | `compute`                                         |
 
 #### Label Patterns
 
 In the Group By dropdown, you can see a list of metric and resource labels for a metric. These can be included in the legend key using alias patterns.
 
-Alias Pattern Format | Description | Alias Pattern Example | Example Result
----------------------- | ---------------------------------- | ---------------------------- | -------------
-`{{metric.label.xxx}}` | returns the metric label value | `{{metric.label.instance_name}}` | `grafana-1-prod`
-`{{resource.label.xxx}}` | returns the resource label value | `{{resource.label.zone}}` | `us-east1-b`
+| Alias Pattern Format     | Description                      | Alias Pattern Example            | Example Result   |
+| ------------------------ | -------------------------------- | -------------------------------- | ---------------- |
+| `{{metric.label.xxx}}`   | returns the metric label value   | `{{metric.label.instance_name}}` | `grafana-1-prod` |
+| `{{resource.label.xxx}}` | returns the resource label value | `{{resource.label.zone}}`        | `us-east1-b`     |
 
 Example Alias By: `{{metric.type}} - {{metric.labels.instance_name}}`
 
@@ -141,15 +161,34 @@ Writing variable queries is not supported yet.
 
 There are two syntaxes:
 
-- `$<varname>`  Example: rate(http_requests_total{job=~"$job"}[5m])
-- `[[varname]]` Example: rate(http_requests_total{job=~"[[job]]"}[5m])
+* `$<varname>` Example: `metric.label.$metric_label`
+* `[[varname]]` Example: `metric.label.[[metric_label]]`
 
-Why two ways? The first syntax is easier to read and write but does not allow you to use a variable in the middle of a word. When the *Multi-value* or *Include all value* options are enabled, Grafana converts the labels from plain text to a regex compatible string, which means you have to use `=~` instead of `=`.
+Why two ways? The first syntax is easier to read and write but does not allow you to use a variable in the middle of a word. When the _Multi-value_ or _Include all value_ options are enabled, Grafana converts the labels from plain text to a regex compatible string, which means you have to use `=~` instead of `=`.
 
 ## Annotations
 
+{{< docs-imagebox img="/img/docs/v53/stackdriver_annotations_query_editor.png" max-width= "400px" class="docs-image--right" >}}
+
 [Annotations]({{< relref "reference/annotations.md" >}}) allows you to overlay rich event information on top of graphs. You add annotation
-queries via the Dashboard menu / Annotations view.
+queries via the Dashboard menu / Annotations view. Annotation rendering is expensive so it is important to limit the number of rows returned. There is no support for showing Stackdriver annotations and events yet but it works well with [custom metrics](https://cloud.google.com/monitoring/custom-metrics/) in Stackdriver.
+
+With the query editor for annotations, you can select a metric and filters. The `Title` and `Text` fields support templating and can use data returned from the query. For example, the Title field could have the following text:
+
+`{{metric.type}} has value: {{metric.value}}`
+
+Example Result: `monitoring.googleapis.com/uptime_check/http_status has this value: 502`
+
+### Patterns for the Annotation Query Editor
+
+| Alias Pattern Format     | Description                      | Alias Pattern Example            | Example Result                                    |
+| ------------------------ | -------------------------------- | -------------------------------- | ------------------------------------------------- |
+| `{{metric.value}}`       | value of the metric/point        | `{{metric.value}}`               | `555`                                             |
+| `{{metric.type}}`        | returns the full Metric Type     | `{{metric.type}}`                | `compute.googleapis.com/instance/cpu/utilization` |
+| `{{metric.name}}`        | returns the metric name part     | `{{metric.name}}`                | `instance/cpu/utilization`                        |
+| `{{metric.service}}`     | returns the service part         | `{{metric.service}}`             | `compute`                                         |
+| `{{metric.label.xxx}}`   | returns the metric label value   | `{{metric.label.instance_name}}` | `grafana-1-prod`                                  |
+| `{{resource.label.xxx}}` | returns the resource label value | `{{resource.label.zone}}`        | `us-east1-b`                                      |
 
 ## Configure the Datasource with Provisioning
 
@@ -163,9 +202,15 @@ apiVersion: 1
 datasources:
   - name: Stackdriver
     type: stackdriver
+    access: proxy
     jsonData:
       tokenUri: https://oauth2.googleapis.com/token
       clientEmail: stackdriver@myproject.iam.gserviceaccount.com
     secureJsonData:
-      privateKey: "<contents of your Service Account JWT Key file>"
+      privateKey: |
+        -----BEGIN PRIVATE KEY-----
+        POSEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCb1u1Srw8ICYHS
+        ...
+        yA+23427282348234=
+        -----END PRIVATE KEY-----
 ```

+ 1 - 1
docs/sources/features/panels/alertlist.md

@@ -22,6 +22,6 @@ The alert list panel allows you to display your dashboards alerts. The list can
 
 1. **Show**: Lets you choose between current state or recent state changes.
 2. **Max Items**: Max items set the maximum of items in a list.
-3. **Sort Order**: Lets you sort your list alphabeticaly(asc/desc) or by importance.
+3. **Sort Order**: Lets you sort your list alphabetically(asc/desc) or by importance.
 4. **Alerts From** This Dashboard`: Shows alerts only from the dashboard the alert list is in.
 5. **State Filter**: Here you can filter your list by one or more parameters.

+ 1 - 1
docs/sources/features/panels/heatmap.md

@@ -80,7 +80,7 @@ the upper or lower bound of the interval.
 There are a number of datasources supporting histogram over time like Elasticsearch (by using a Histogram bucket
 aggregation) or Prometheus (with [histogram](https://prometheus.io/docs/concepts/metric_types/#histogram) metric type
 and *Format as* option set to Heatmap). But generally, any datasource could be used if it meets the requirements:
-returns series with names representing bucket bound or returns sereis sorted by the bound in ascending order.
+returns series with names representing bucket bound or returns series sorted by the bound in ascending order.
 
 With Elasticsearch you control the size of the buckets using the Histogram interval (Y-Axis) and the Date Histogram interval (X-axis).
 

+ 1 - 1
docs/sources/guides/whats-new-in-v2-5.md

@@ -25,7 +25,7 @@ correctly in UTC mode.
 <br>
 
 This release brings a fully featured query editor for Elasticsearch. You will now be able to visualize
-logs or any kind of data stored in Elasticserarch. The query editor allows you to build both simple
+logs or any kind of data stored in Elasticsearch. The query editor allows you to build both simple
 and complex queries for logs or metrics.
 
 - Compute metrics from your documents, supported Elasticsearch aggregations:

+ 1 - 1
docs/sources/guides/whats-new-in-v2.md

@@ -34,7 +34,7 @@ Organizations via a role. That role can be:
 
 There are currently no permissions on individual dashboards.
 
-Read more about Grafanas new user model on the [Admin section](../reference/admin/)
+Read more about Grafana's new user model on the [Admin section](../reference/admin/)
 
 ## Dashboard Snapshot sharing
 

+ 2 - 2
docs/sources/guides/whats-new-in-v3-1.md

@@ -21,7 +21,7 @@ The export feature is now accessed from the share menu.
 Dashboards exported from Grafana 3.1 are now more portable and easier for others to import than before.
 The export process extracts information data source types used by panels and adds these to a new `inputs`
 section in the dashboard json. So when you or another person tries to import the dashboard they will be asked to
-select data source and optional metrix prefix options.
+select data source and optional metric prefix options.
 
 <img src="/img/docs/v31/import_step1.png">
 
@@ -53,7 +53,7 @@ Grafana url to share with a colleague without having to use the Share modal.
 
 ## Internal metrics
 
-Do you want metrics about viewing metrics? Ofc you do! In this release we added support for sending metrics about Grafana to graphite.
+Do you want metrics about viewing metrics? Of course you do! In this release we added support for sending metrics about Grafana to graphite.
 You can configure interval and server in the config file.
 
 ## Logging

+ 1 - 1
docs/sources/guides/whats-new-in-v3.md

@@ -197,7 +197,7 @@ you can install it manually from [Grafana.com](https://grafana.com)
 ## Plugin showcase
 
 Discovering and installing plugins is very quick and easy with Grafana 3.0 and [Grafana.com](https://grafana.com). Here
-are a couple that I incurage you try!
+are a couple that I encourage you try!
 
 #### [Clock Panel](https://grafana.com/plugins/grafana-clock-panel)
 Support's both current time and count down mode.

+ 1 - 1
docs/sources/guides/whats-new-in-v4-2.md

@@ -45,7 +45,7 @@ We might add more global built in variables in the future and if we do we will p
 
 ### Dedupe alert notifications when running multiple servers
 
-In this release we will dedupe alert notificiations when you are running multiple servers.
+In this release we will dedupe alert notifications when you are running multiple servers.
 This makes it possible to run alerting on multiple servers and only get one notification.
 
 We currently solve this with sql transactions which puts some limitations for how many servers you can use to execute the same rules.

+ 2 - 2
docs/sources/guides/whats-new-in-v4-5.md

@@ -45,7 +45,7 @@ More information [here](https://community.grafana.com/t/using-grafanas-query-ins
 ### Enhancements
 
 * **GitHub OAuth**: Support for GitHub organizations with 100+ teams. [#8846](https://github.com/grafana/grafana/issues/8846), thx [@skwashd](https://github.com/skwashd)
-* **Graphite**: Calls to Graphite api /metrics/find now include panel or dashboad time range (from & until) in most cases, [#8055](https://github.com/grafana/grafana/issues/8055)
+* **Graphite**: Calls to Graphite api /metrics/find now include panel or dashboard time range (from & until) in most cases, [#8055](https://github.com/grafana/grafana/issues/8055)
 * **Graphite**: Added new graphite 1.0 functions, available if you set version to 1.0.x in data source settings. New Functions: mapSeries, reduceSeries, isNonNull, groupByNodes, offsetToZero, grep, weightedAverage, removeEmptySeries, aggregateLine, averageOutsidePercentile, delay, exponentialMovingAverage, fallbackSeries, integralByInterval, interpolate, invert, linearRegression, movingMin, movingMax, movingSum, multiplySeriesWithWildcards, pow, powSeries, removeBetweenPercentile, squareRoot, timeSlice, closes [#8261](https://github.com/grafana/grafana/issues/8261)
 - **Elasticsearch**: Ad-hoc filters now use query phrase match filters instead of term filters, works on non keyword/raw fields [#9095](https://github.com/grafana/grafana/issues/9095).
 
@@ -53,7 +53,7 @@ More information [here](https://community.grafana.com/t/using-grafanas-query-ins
 
 * **InfluxDB/Elasticsearch**: The panel & data source option named "Group by time interval" is now named "Min time interval" and does now always define a lower limit for the auto group by time. Without having to use `>` prefix (that prefix still works). This should in theory have close to zero actual impact on existing dashboards. It does mean that if you used this setting to define a hard group by time interval of, say "1d", if you zoomed to a time range wide enough the time range could increase above the "1d" range as the setting is now always considered a lower limit.
 
-This option is now rennamed (and moved to Options sub section above your queries):
+This option is now renamed (and moved to Options sub section above your queries):
 ![image|519x120](upload://ySjHOVpavV6yk9LHQxL9nq2HIsT.png)
 
 Datas source selection & options & help are now above your metric queries.

+ 1 - 1
docs/sources/guides/whats-new-in-v4-6.md

@@ -61,7 +61,7 @@ This makes exploring and filtering Prometheus data much easier.
 ### Minor Changes
 
 * **SMTP**: Make it possible to set specific EHLO for smtp client. [#9319](https://github.com/grafana/grafana/issues/9319)
-* **Dataproxy**: Allow grafan to renegotiate tls connection [#9250](https://github.com/grafana/grafana/issues/9250)
+* **Dataproxy**: Allow Grafana to renegotiate tls connection [#9250](https://github.com/grafana/grafana/issues/9250)
 * **HTTP**: set net.Dialer.DualStack to true for all http clients [#9367](https://github.com/grafana/grafana/pull/9367)
 * **Alerting**: Add diff and percent diff as series reducers [#9386](https://github.com/grafana/grafana/pull/9386), thx [@shanhuhai5739](https://github.com/shanhuhai5739)
 * **Slack**: Allow images to be uploaded to slack when Token is present [#7175](https://github.com/grafana/grafana/issues/7175), thx [@xginn8](https://github.com/xginn8)

+ 74 - 0
docs/sources/guides/whats-new-in-v5-3.md

@@ -12,6 +12,80 @@ weight = -9
 
 # What's New in Grafana v5.3
 
+Grafana v5.3 brings new features, many enhancements and bug fixes. This article will detail the major new features and enhancements.
+
+- [Google Stackdriver]({{< relref "#google-stackdriver" >}}) as a core datasource!
+- [TV mode]({{< relref "#tv-and-kiosk-mode" >}}) is improved and more accessible
+- [Alerting]({{< relref "#notification-reminders" >}}) with notification reminders
+- [Postgres]({{< relref "#postgres-query-builder" >}}) gets a new query builder!
+- [OAuth]({{< relref "#improved-oauth-support-for-gitlab" >}}) support for Gitlab is improved
+- [Annotations]({{< relref "#annotations" >}}) with template variable filtering
+- [Variables]({{< relref "#variables" >}}) with free text support
+
+## Google Stackdriver
+
+{{< docs-imagebox img="/img/docs/v53/stackdriver-with-heatmap.png"  max-width= "600px" class="docs-image--no-shadow docs-image--right" >}}
+
+Grafana v5.3 ships with built-in support for [Google Stackdriver](https://cloud.google.com/stackdriver/) and enables you to visualize your Stackdriver metrics in Grafana. 
+
+Getting started with the plugin is easy. Simply create a GCE Service account that has access to the Stackdriver API scope, download the Service Account key file from Google and upload it on the Stackdriver datasource config page in Grafana and you should have a secure server-to-server authentication setup. Like other core plugins, Stackdriver has built-in support for alerting. It also comes with support for heatmaps and basic variables.
+
+If you're already accustomed to the Stackdriver Metrics Explorer UI, you'll notice that there are a lot of similarities to the query editor in Grafana. It is possible to add filters using wildcards and regular expressions. You can do Group By, Primary Aggregation and Alignment.
+
+Alias By allows you to format the legend the way you want, and it's a feature that is not yet present in the Metrics Explorer. Two other features that are only supported in the Grafana plugin are the abilities to manually set the Alignment Period in the query editor and to add Annotations queries.
+
+The Grafana Stackdriver plugin comes with support for automatic unit detection. Grafana will try to map the Stackdriver unit type to a corresponding unit type in Grafana, and if successful the panel Y-axes will be updated accordingly to display the correct unit of measure. This is the first core plugin to provide support for unit detection, and it is our intention to provide support for this in other core plugins in the near future.
+
+The datasource is still in the `beta` phase, meaning it's currently in active development and is still missing one important feature - templating queries.
+Please try it out, but be aware of that it might be subject to changes and possible bugs. We would love to hear your feedback.
+
+Please read [Using Google Stackdriver in Grafana](/features/datasources/stackdriver/) for more detailed information on how to get started and use it.
+
+## TV and Kiosk Mode
+
+{{< docs-imagebox img="/img/docs/v53/tv_mode_still.png" max-width="600px" class="docs-image--no-shadow docs-image--right" animated-gif="/img/docs/v53/tv_mode.gif" >}}
+
+We've improved the TV & kiosk mode to make it easier to use. There's now an icon in the top bar that will let you cycle through the different view modes.
+
+1. In the first view mode, the sidebar and most of the buttons in the top bar will be hidden.
+2. In the second view mode, the top bar is completely hidden so that only the dashboard itself is shown.
+3. Hit the escape key to go back to the default view mode.
+
+When switching view modes, the url will be updated to reflect the view mode selected. This allows a dashboard to be opened with a
+certain view mode enabled. Additionally, this also enables [playlists](/reference/playlist) to be started with a certain view mode enabled.
+
+<div class="clearfix"></div>
+
+## Notification Reminders
+
+Do you use Grafana alerting and have some notifications that are more important than others? Then it's possible to set reminders so that you continue to be alerted until the problem is fixed. This is done on the notification channel itself and will affect all alerts that use that channel.
+For additional examples of why reminders might be useful for you, see [multiple series](/alerting/rules/#multiple-series).
+
+Learn how to enable and configure reminders [here](/alerting/notifications/#send-reminders).
+
+## Postgres Query Builder
+
+Grafana 5.3 comes with a new graphical query builder for Postgres. This brings Postgres integration more in line with some of the other datasources and makes it easier for both advanced users and beginners to work with timeseries in Postgres. Learn more about it in the [documentation](/features/datasources/postgres/#query-editor).
+
+{{< docs-imagebox img="/img/docs/v53/postgres_query_still.png" class="docs-image--no-shadow" animated-gif="/img/docs/v53/postgres_query.gif" >}}
+
+## Improved OAuth Support for Gitlab
+
+Grafana 5.3 comes with a new OAuth integration for Gitlab that enables configuration to only allow users that are a member of certain Gitlab groups to authenticate. This makes it possible to use Gitlab OAuth with Grafana in a shared environment without giving everyone access to Grafana.
+Learn how to enable and configure it in the [documentation](/auth/gitlab/).
+
+## Annotations
+
+Grafana 5.3 brings improved support for [native annotations](/reference/annotations/#native-annotations) and makes it possible to use template variables when filtering by tags.
+Learn more about it in the [documentation](/reference/annotations/#query-by-tag).
+
+{{< docs-imagebox img="/img/docs/v53/annotation_tag_filter_variable.png" max-width="600px" >}}
+
+## Variables
+
+Grafana 5.3 ships with a brand new variable type named `Text box` which makes it easier and more convenient to provide free text input to a variable.
+This new variable type will display as a free text input field with an optional prefilled default value.
+
 ## Changelog
 
 Checkout the [CHANGELOG.md](https://github.com/grafana/grafana/blob/master/CHANGELOG.md) file for a complete list

+ 1 - 1
docs/sources/http_api/alerting.md

@@ -227,7 +227,7 @@ Content-Type: application/json
 
 ## Create alert notification
 
-You can find the full list of [supported notifers](/alerting/notifications/#all-supported-notifier) at the alert notifiers page.
+You can find the full list of [supported notifiers](/alerting/notifications/#all-supported-notifier) at the alert notifiers page.
 
 `POST /api/alert-notifications`
 

+ 1 - 1
docs/sources/http_api/dashboard_versions.md

@@ -291,7 +291,7 @@ Content-Type: text/html; charset=UTF-8
 </p>
 ```
 
-The response is a textual respresentation of the diff, with the dashboard values being in JSON, similar to the diffs seen on sites like GitHub or GitLab.
+The response is a textual representation of the diff, with the dashboard values being in JSON, similar to the diffs seen on sites like GitHub or GitLab.
 
 Status Codes:
 

+ 17 - 9
docs/sources/index.md

@@ -60,9 +60,9 @@ aliases = ["v1.1", "guides/reference/admin"]
         <h4>Provisioning</h4>
         <p>A guide to help you automate your Grafana setup & configuration.</p>
     </a>
-    <a href="{{< relref "guides/whats-new-in-v5-2.md" >}}" class="nav-cards__item nav-cards__item--guide">
-        <h4>What's new in v5.2</h4>
-        <p>Article on all the new cool features and enhancements in v5.2</p>
+    <a href="{{< relref "guides/whats-new-in-v5-3.md" >}}" class="nav-cards__item nav-cards__item--guide">
+        <h4>What's new in v5.3</h4>
+        <p>Article on all the new cool features and enhancements in v5.3</p>
     </a>
     <a href="{{< relref "tutorials/screencasts.md" >}}" class="nav-cards__item nav-cards__item--guide">
         <h4>Screencasts</h4>
@@ -88,9 +88,13 @@ aliases = ["v1.1", "guides/reference/admin"]
       <img src="/img/docs/logos/icon_prometheus.svg" >
       <h5>Prometheus</h5>
     </a>
-    <a href="{{< relref "features/datasources/opentsdb.md" >}}" class="nav-cards__item nav-cards__item--ds">
-      <img src="/img/docs/logos/icon_opentsdb.png" >
-      <h5>OpenTSDB</h5>
+    <a href="{{< relref "features/datasources/stackdriver.md" >}}" class="nav-cards__item nav-cards__item--ds">
+      <img src="/img/docs/logos/stackdriver_logo.png">
+      <h5>Google Stackdriver</h5>
+    </a>
+    <a href="{{< relref "features/datasources/cloudwatch.md" >}}" class="nav-cards__item nav-cards__item--ds">
+      <img src="/img/docs/logos/icon_cloudwatch.svg">
+      <h5>Cloudwatch</h5>
     </a>
     <a href="{{< relref "features/datasources/mysql.md" >}}" class="nav-cards__item nav-cards__item--ds">
       <img src="/img/docs/logos/icon_mysql.png" >
@@ -100,8 +104,12 @@ aliases = ["v1.1", "guides/reference/admin"]
       <img src="/img/docs/logos/icon_postgres.svg" >
       <h5>Postgres</h5>
     </a>
-    <a href="{{< relref "features/datasources/cloudwatch.md" >}}" class="nav-cards__item nav-cards__item--ds">
-      <img src="/img/docs/logos/icon_cloudwatch.svg">
-      <h5>Cloudwatch</h5>
+    <a href="{{< relref "features/datasources/mssql.md" >}}" class="nav-cards__item nav-cards__item--ds">
+      <img src="/img/docs/logos/sql_server_logo.svg">
+      <h5>Microsoft SQL Server</h5>
+    </a>
+    <a href="{{< relref "features/datasources/opentsdb.md" >}}" class="nav-cards__item nav-cards__item--ds">
+      <img src="/img/docs/logos/icon_opentsdb.png" >
+      <h5>OpenTSDB</h5>
     </a>
 </div>

+ 2 - 0
docs/sources/installation/debian.md

@@ -100,6 +100,8 @@ This will start the `grafana-server` process as the `grafana` user,
 which was created during the package installation. The default HTTP port
 is `3000` and default user and group is `admin`.
 
+Default login and password `admin`/ `admin`
+
 To configure the Grafana server to start at boot time:
 
 ```bash

+ 2 - 0
docs/sources/installation/mac.md

@@ -60,6 +60,8 @@ Then start Grafana using:
 brew services start grafana
 ```
 
+Default login and password `admin`/ `admin`
+
 
 ### Configuration
 

+ 2 - 0
docs/sources/installation/rpm.md

@@ -115,6 +115,8 @@ This will start the `grafana-server` process as the `grafana` user,
 which is created during package installation. The default HTTP port is
 `3000`, and default user and group is `admin`.
 
+Default login and password `admin`/ `admin`
+
 To configure the Grafana server to start at boot time:
 
 ```bash

+ 3 - 0
docs/sources/installation/windows.md

@@ -31,6 +31,9 @@ on windows. Edit `custom.ini` and uncomment the `http_port`
 configuration option (`;` is the comment character in ini files) and change it to something like `8080` or similar.
 That port should not require extra Windows privileges.
 
+Default login and password `admin`/ `admin`
+
+
 Start Grafana by executing `grafana-server.exe`, located in the `bin` directory, preferably from the
 command line. If you want to run Grafana as windows service, download
 [NSSM](https://nssm.cc/). It is very easy to add Grafana as a Windows

+ 3 - 1
docs/sources/reference/annotations.md

@@ -47,7 +47,9 @@ can still show them if you add a new **Annotation Query** and filter by tags. Bu
 You can create new annotation queries that fetch annotations from the native annotation store via the `-- Grafana --` data source and by setting *Filter by* to `Tags`. Specify at least
 one tag. For example create an annotation query name `outages` and specify a tag named `outage`. This query will show all annotations you create (from any dashboard or via API) that have the `outage` tag. By default, if you add multiple tags in the annotation query, Grafana will only show annotations that have all the tags you supplied. You can invert the behavior by enabling `Match any` which means that Grafana will show annotations that contains at least one of the tags you supplied.
 
-In 5.4+ it's possible to use template variables in the tag query. So if you have a dashboard showing stats for different services and an template variable that dictates which services to show, you can now use the same template variable in your annotation query to only show annotations for those services.
+In Grafana v5.3+ it's possible to use template variables in the tag query. So if you have a dashboard showing stats for different services and a template variable that dictates which services to show, you can now use the same template variable in your annotation query to only show annotations for those services.
+
+{{< docs-imagebox img="/img/docs/v53/annotation_tag_filter_variable.png" max-width="600px" >}}
 
 ## Querying other data sources
 

+ 1 - 0
docs/sources/reference/templating.md

@@ -90,6 +90,7 @@ Type | Description
 *Custom* | Define the variable options manually using a comma separated list.
 *Constant* | Define a hidden constant. Useful for metric path prefixes for dashboards you want to share. During dashboard export, constant variables will be made into an import option.
 *Ad hoc filters* | Very special kind of variable that only works with some data sources, InfluxDB & Elasticsearch currently. It allows you to add key/value filters that will automatically be added to all metric queries that use the specified data source.
+*Text box* | This variable type will display as a free text input field with an optional default value.
 
 ### Query options
 

+ 2 - 2
docs/sources/tutorials/ha_setup.md

@@ -26,9 +26,9 @@ Grafana will now persist all long term data in the database. How to configure th
 
 ## User sessions
 
-The second thing to consider is how to deal with user sessions and how to configure your load balancer infront of Grafana.
+The second thing to consider is how to deal with user sessions and how to configure your load balancer in front of Grafana.
 Grafana supports two ways of storing session data: locally on disk or in a database/cache-server.
-If you want to store sessions on disk you can use `sticky sessions` in your load balanacer. If you prefer to store session data in a database/cache-server
+If you want to store sessions on disk you can use `sticky sessions` in your load balancer. If you prefer to store session data in a database/cache-server
 you can use any stateless routing strategy in your load balancer (ex round robin or least connections).
 
 ### Sticky sessions

+ 2 - 2
docs/versions.json

@@ -1,6 +1,6 @@
 [
-  { "version": "v5.3", "path": "/v5.3", "archived": false, "current": false },
-  { "version": "v5.2", "path": "/", "archived": false, "current": true },
+  { "version": "v5.3", "path": "/", "archived": false, "current": true },
+  { "version": "v5.2", "path": "/v5.2", "archived": true },
   { "version": "v5.1", "path": "/v5.1", "archived": true },
   { "version": "v5.0", "path": "/v5.0", "archived": true },
   { "version": "v4.6", "path": "/v4.6", "archived": true },

+ 2 - 2
latest.json

@@ -1,4 +1,4 @@
 {
-  "stable": "5.2.4",
-  "testing": "5.2.4"
+  "stable": "5.3.0",
+  "testing": "5.3.0"
 }

+ 3 - 2
package.json

@@ -17,6 +17,7 @@
     "@types/react": "^16.4.14",
     "@types/react-custom-scrollbars": "^4.0.5",
     "@types/react-dom": "^16.0.7",
+    "@types/react-select": "^2.0.4",
     "angular-mocks": "1.6.6",
     "autoprefixer": "^6.4.0",
     "axios": "^0.17.1",
@@ -86,7 +87,7 @@
     "tslint-loader": "^3.5.3",
     "typescript": "^3.0.3",
     "uglifyjs-webpack-plugin": "^1.2.7",
-    "webpack": "^4.8.0",
+    "webpack": "4.19.1",
     "webpack-bundle-analyzer": "^2.9.0",
     "webpack-cleanup-plugin": "^0.5.1",
     "webpack-cli": "^2.1.4",
@@ -157,7 +158,7 @@
     "react-highlight-words": "^0.10.0",
     "react-popper": "^0.7.5",
     "react-redux": "^5.0.7",
-    "react-select": "^1.1.0",
+    "react-select": "2.1.0",
     "react-sizeme": "^2.3.6",
     "react-transition-group": "^2.2.1",
     "redux": "^4.0.0",

+ 4 - 4
pkg/api/api.go

@@ -10,10 +10,10 @@ import (
 )
 
 func (hs *HTTPServer) registerRoutes() {
-	reqSignedIn := middleware.Auth(&middleware.AuthOptions{ReqSignedIn: true})
-	reqGrafanaAdmin := middleware.Auth(&middleware.AuthOptions{ReqSignedIn: true, ReqGrafanaAdmin: true})
-	reqEditorRole := middleware.RoleAuth(m.ROLE_EDITOR, m.ROLE_ADMIN)
-	reqOrgAdmin := middleware.RoleAuth(m.ROLE_ADMIN)
+	reqSignedIn := middleware.ReqSignedIn
+	reqGrafanaAdmin := middleware.ReqGrafanaAdmin
+	reqEditorRole := middleware.ReqEditorRole
+	reqOrgAdmin := middleware.ReqOrgAdmin
 	redirectFromLegacyDashboardURL := middleware.RedirectFromLegacyDashboardURL()
 	redirectFromLegacyDashboardSoloURL := middleware.RedirectFromLegacyDashboardSoloURL()
 	quota := middleware.Quota

+ 15 - 1
pkg/api/dataproxy.go

@@ -51,7 +51,21 @@ func (hs *HTTPServer) ProxyDataSourceRequest(c *m.ReqContext) {
 		return
 	}
 
-	proxyPath := c.Params("*")
+	// macaron does not include trailing slashes when resolving a wildcard path
+	proxyPath := ensureProxyPathTrailingSlash(c.Req.URL.Path, c.Params("*"))
+
 	proxy := pluginproxy.NewDataSourceProxy(ds, plugin, c, proxyPath)
 	proxy.HandleRequest()
 }
+
+// ensureProxyPathTrailingSlash Check for a trailing slash in original path and makes
+// sure that a trailing slash is added to proxy path, if not already exists.
+func ensureProxyPathTrailingSlash(originalPath, proxyPath string) string {
+	if len(proxyPath) > 1 {
+		if originalPath[len(originalPath)-1] == '/' && proxyPath[len(proxyPath)-1] != '/' {
+			return proxyPath + "/"
+		}
+	}
+
+	return proxyPath
+}

+ 19 - 0
pkg/api/dataproxy_test.go

@@ -0,0 +1,19 @@
+package api
+
+import (
+	"testing"
+
+	. "github.com/smartystreets/goconvey/convey"
+)
+
+func TestDataProxy(t *testing.T) {
+	Convey("Data proxy test", t, func() {
+		Convey("Should append trailing slash to proxy path if original path has a trailing slash", func() {
+			So(ensureProxyPathTrailingSlash("/api/datasources/proxy/6/api/v1/query_range/", "api/v1/query_range/"), ShouldEqual, "api/v1/query_range/")
+		})
+
+		Convey("Should not append trailing slash to proxy path if original path doesn't have a trailing slash", func() {
+			So(ensureProxyPathTrailingSlash("/api/datasources/proxy/6/api/v1/query_range", "api/v1/query_range"), ShouldEqual, "api/v1/query_range")
+		})
+	})
+}

+ 14 - 7
pkg/api/index.go

@@ -316,6 +316,19 @@ func (hs *HTTPServer) setIndexViewData(c *m.ReqContext) (*dtos.IndexViewData, er
 		}
 
 		if c.IsGrafanaAdmin {
+			children := []*dtos.NavLink{
+				{Text: "Users", Id: "global-users", Url: setting.AppSubUrl + "/admin/users", Icon: "gicon gicon-user"},
+				{Text: "Orgs", Id: "global-orgs", Url: setting.AppSubUrl + "/admin/orgs", Icon: "gicon gicon-org"},
+				{Text: "Settings", Id: "server-settings", Url: setting.AppSubUrl + "/admin/settings", Icon: "gicon gicon-preferences"},
+				{Text: "Stats", Id: "server-stats", Url: setting.AppSubUrl + "/admin/stats", Icon: "fa fa-fw fa-bar-chart"},
+			}
+
+			if setting.IsEnterprise {
+				children = append(children, &dtos.NavLink{Text: "Licensing", Id: "licensing", Url: setting.AppSubUrl + "/admin/licensing", Icon: "fa fa-fw fa-unlock-alt"})
+			}
+
+			children = append(children, &dtos.NavLink{Text: "Style Guide", Id: "styleguide", Url: setting.AppSubUrl + "/styleguide", Icon: "fa fa-fw fa-eyedropper"})
+
 			cfgNode.Children = append(cfgNode.Children, &dtos.NavLink{
 				Text:         "Server Admin",
 				HideFromTabs: true,
@@ -323,13 +336,7 @@ func (hs *HTTPServer) setIndexViewData(c *m.ReqContext) (*dtos.IndexViewData, er
 				Id:           "admin",
 				Icon:         "gicon gicon-shield",
 				Url:          setting.AppSubUrl + "/admin/users",
-				Children: []*dtos.NavLink{
-					{Text: "Users", Id: "global-users", Url: setting.AppSubUrl + "/admin/users", Icon: "gicon gicon-user"},
-					{Text: "Orgs", Id: "global-orgs", Url: setting.AppSubUrl + "/admin/orgs", Icon: "gicon gicon-org"},
-					{Text: "Settings", Id: "server-settings", Url: setting.AppSubUrl + "/admin/settings", Icon: "gicon gicon-preferences"},
-					{Text: "Stats", Id: "server-stats", Url: setting.AppSubUrl + "/admin/stats", Icon: "fa fa-fw fa-bar-chart"},
-					{Text: "Style Guide", Id: "styleguide", Url: setting.AppSubUrl + "/styleguide", Icon: "fa fa-fw fa-eyedropper"},
-				},
+				Children:     children,
 			})
 		}
 

+ 16 - 10
pkg/api/org_users.go

@@ -45,7 +45,7 @@ func addOrgUserHelper(cmd m.AddOrgUserCommand) Response {
 
 // GET /api/org/users
 func GetOrgUsersForCurrentOrg(c *m.ReqContext) Response {
-	return getOrgUsersHelper(c.OrgId, c.Params("query"), c.ParamsInt("limit"))
+	return getOrgUsersHelper(c.OrgId, c.Query("query"), c.QueryInt("limit"))
 }
 
 // GET /api/orgs/:orgId/users
@@ -102,26 +102,32 @@ func updateOrgUserHelper(cmd m.UpdateOrgUserCommand) Response {
 
 // DELETE /api/org/users/:userId
 func RemoveOrgUserForCurrentOrg(c *m.ReqContext) Response {
-	userID := c.ParamsInt64(":userId")
-	return removeOrgUserHelper(c.OrgId, userID)
+	return removeOrgUserHelper(&m.RemoveOrgUserCommand{
+		UserId:                   c.ParamsInt64(":userId"),
+		OrgId:                    c.OrgId,
+		ShouldDeleteOrphanedUser: true,
+	})
 }
 
 // DELETE /api/orgs/:orgId/users/:userId
 func RemoveOrgUser(c *m.ReqContext) Response {
-	userID := c.ParamsInt64(":userId")
-	orgID := c.ParamsInt64(":orgId")
-	return removeOrgUserHelper(orgID, userID)
+	return removeOrgUserHelper(&m.RemoveOrgUserCommand{
+		UserId: c.ParamsInt64(":userId"),
+		OrgId:  c.ParamsInt64(":orgId"),
+	})
 }
 
-func removeOrgUserHelper(orgID int64, userID int64) Response {
-	cmd := m.RemoveOrgUserCommand{OrgId: orgID, UserId: userID}
-
-	if err := bus.Dispatch(&cmd); err != nil {
+func removeOrgUserHelper(cmd *m.RemoveOrgUserCommand) Response {
+	if err := bus.Dispatch(cmd); err != nil {
 		if err == m.ErrLastOrgAdmin {
 			return Error(400, "Cannot remove last organization admin", nil)
 		}
 		return Error(500, "Failed to remove user from organization", err)
 	}
 
+	if cmd.UserWasDeleted {
+		return Success("User deleted")
+	}
+
 	return Success("User removed from organization")
 }

+ 17 - 0
pkg/api/pluginproxy/ds_proxy_test.go

@@ -362,6 +362,23 @@ func TestDSRouteRule(t *testing.T) {
 			})
 		})
 
+		Convey("When proxying a custom datasource", func() {
+			plugin := &plugins.DataSourcePlugin{}
+			ds := &m.DataSource{
+				Type: "custom-datasource",
+				Url:  "http://host/root/",
+			}
+			ctx := &m.ReqContext{}
+			proxy := NewDataSourceProxy(ds, plugin, ctx, "/path/to/folder/")
+			req, err := http.NewRequest(http.MethodGet, "http://grafana.com/sub", nil)
+			So(err, ShouldBeNil)
+
+			proxy.getDirector()(req)
+
+			Convey("Shoudl keep user request (including trailing slash)", func() {
+				So(req.URL.String(), ShouldEqual, "http://host/root/path/to/folder/")
+			})
+		})
 	})
 }
 

+ 4 - 0
pkg/extensions/main.go

@@ -1,3 +1,7 @@
 package extensions
 
+import (
+	_ "gopkg.in/square/go-jose.v2"
+)
+
 var IsEnterprise bool = false

+ 7 - 0
pkg/middleware/middleware.go

@@ -14,6 +14,13 @@ import (
 	"github.com/grafana/grafana/pkg/util"
 )
 
+var (
+	ReqGrafanaAdmin = Auth(&AuthOptions{ReqSignedIn: true, ReqGrafanaAdmin: true})
+	ReqSignedIn     = Auth(&AuthOptions{ReqSignedIn: true})
+	ReqEditorRole   = RoleAuth(m.ROLE_EDITOR, m.ROLE_ADMIN)
+	ReqOrgAdmin     = RoleAuth(m.ROLE_ADMIN)
+)
+
 func GetContextHandler() macaron.Handler {
 	return func(c *macaron.Context) {
 		ctx := &m.ReqContext{

+ 4 - 2
pkg/models/org_user.go

@@ -72,8 +72,10 @@ type OrgUser struct {
 // COMMANDS
 
 type RemoveOrgUserCommand struct {
-	UserId int64
-	OrgId  int64
+	UserId                   int64
+	OrgId                    int64
+	ShouldDeleteOrphanedUser bool
+	UserWasDeleted           bool
 }
 
 type AddOrgUserCommand struct {

+ 15 - 0
pkg/services/sqlstore/org_test.go

@@ -182,6 +182,21 @@ func TestAccountDataAccess(t *testing.T) {
 					})
 				})
 
+				Convey("Removing user from org should delete user completely if in no other org", func() {
+					// make sure ac2 has no org
+					err := DeleteOrg(&m.DeleteOrgCommand{Id: ac2.OrgId})
+					So(err, ShouldBeNil)
+
+					// remove frome ac2 from ac1 org
+					remCmd := m.RemoveOrgUserCommand{OrgId: ac1.OrgId, UserId: ac2.Id, ShouldDeleteOrphanedUser: true}
+					err = RemoveOrgUser(&remCmd)
+					So(err, ShouldBeNil)
+					So(remCmd.UserWasDeleted, ShouldBeTrue)
+
+					err = GetSignedInUser(&m.GetSignedInUserQuery{UserId: ac2.Id})
+					So(err, ShouldEqual, m.ErrUserNotFound)
+				})
+
 				Convey("Cannot delete last admin org user", func() {
 					cmd := m.RemoveOrgUserCommand{OrgId: ac1.OrgId, UserId: ac1.Id}
 					err := RemoveOrgUser(&cmd)

+ 25 - 10
pkg/services/sqlstore/org_users.go

@@ -157,6 +157,12 @@ func RemoveOrgUser(cmd *m.RemoveOrgUserCommand) error {
 			}
 		}
 
+		// validate that after delete there is at least one user with admin role in org
+		if err := validateOneAdminLeftInOrg(cmd.OrgId, sess); err != nil {
+			return err
+		}
+
+		// check user other orgs and update user current org
 		var userOrgs []*m.UserOrgDTO
 		sess.Table("org_user")
 		sess.Join("INNER", "org", "org_user.org_id=org.id")
@@ -168,22 +174,31 @@ func RemoveOrgUser(cmd *m.RemoveOrgUserCommand) error {
 			return err
 		}
 
-		hasCurrentOrgSet := false
-		for _, userOrg := range userOrgs {
-			if user.OrgId == userOrg.OrgId {
-				hasCurrentOrgSet = true
-				break
+		if len(userOrgs) > 0 {
+			hasCurrentOrgSet := false
+			for _, userOrg := range userOrgs {
+				if user.OrgId == userOrg.OrgId {
+					hasCurrentOrgSet = true
+					break
+				}
 			}
-		}
 
-		if !hasCurrentOrgSet && len(userOrgs) > 0 {
-			err = setUsingOrgInTransaction(sess, user.Id, userOrgs[0].OrgId)
-			if err != nil {
+			if !hasCurrentOrgSet {
+				err = setUsingOrgInTransaction(sess, user.Id, userOrgs[0].OrgId)
+				if err != nil {
+					return err
+				}
+			}
+		} else if cmd.ShouldDeleteOrphanedUser {
+			// no other orgs, delete the full user
+			if err := deleteUserInTransaction(sess, &m.DeleteUserCommand{UserId: user.Id}); err != nil {
 				return err
 			}
+
+			cmd.UserWasDeleted = true
 		}
 
-		return validateOneAdminLeftInOrg(cmd.OrgId, sess)
+		return nil
 	})
 }
 

+ 1 - 1
pkg/services/sqlstore/sqlstore.go

@@ -233,7 +233,7 @@ func (ss *SqlStore) buildConnectionString() (string, error) {
 	case migrator.SQLITE:
 		// special case for tests
 		if !filepath.IsAbs(ss.dbCfg.Path) {
-			ss.dbCfg.Path = filepath.Join(setting.DataPath, ss.dbCfg.Path)
+			ss.dbCfg.Path = filepath.Join(ss.Cfg.DataPath, ss.dbCfg.Path)
 		}
 		os.MkdirAll(path.Dir(ss.dbCfg.Path), os.ModePerm)
 		cnnstr = "file:" + ss.dbCfg.Path + "?cache=shared&mode=rwc"

+ 20 - 16
pkg/services/sqlstore/user.go

@@ -445,25 +445,29 @@ func SearchUsers(query *m.SearchUsersQuery) error {
 
 func DeleteUser(cmd *m.DeleteUserCommand) error {
 	return inTransaction(func(sess *DBSession) error {
-		deletes := []string{
-			"DELETE FROM star WHERE user_id = ?",
-			"DELETE FROM " + dialect.Quote("user") + " WHERE id = ?",
-			"DELETE FROM org_user WHERE user_id = ?",
-			"DELETE FROM dashboard_acl WHERE user_id = ?",
-			"DELETE FROM preferences WHERE user_id = ?",
-			"DELETE FROM team_member WHERE user_id = ?",
-			"DELETE FROM user_auth WHERE user_id = ?",
-		}
+		return deleteUserInTransaction(sess, cmd)
+	})
+}
 
-		for _, sql := range deletes {
-			_, err := sess.Exec(sql, cmd.UserId)
-			if err != nil {
-				return err
-			}
+func deleteUserInTransaction(sess *DBSession, cmd *m.DeleteUserCommand) error {
+	deletes := []string{
+		"DELETE FROM star WHERE user_id = ?",
+		"DELETE FROM " + dialect.Quote("user") + " WHERE id = ?",
+		"DELETE FROM org_user WHERE user_id = ?",
+		"DELETE FROM dashboard_acl WHERE user_id = ?",
+		"DELETE FROM preferences WHERE user_id = ?",
+		"DELETE FROM team_member WHERE user_id = ?",
+		"DELETE FROM user_auth WHERE user_id = ?",
+	}
+
+	for _, sql := range deletes {
+		_, err := sess.Exec(sql, cmd.UserId)
+		if err != nil {
+			return err
 		}
+	}
 
-		return nil
-	})
+	return nil
 }
 
 func UpdateUserPermissions(cmd *m.UpdateUserPermissionsCommand) error {

+ 24 - 18
pkg/setting/setting.go

@@ -54,14 +54,11 @@ var (
 	ApplicationName string
 
 	// Paths
-	LogsPath       string
 	HomePath       string
-	DataPath       string
 	PluginsPath    string
 	CustomInitPath = "conf/custom.ini"
 
 	// Log settings.
-	LogModes   []string
 	LogConfigs []util.DynMap
 
 	// Http server options
@@ -187,11 +184,18 @@ var (
 	ImageUploadProvider string
 )
 
+// TODO move all global vars to this struct
 type Cfg struct {
 	Raw *ini.File
 
+	// HTTP Server Settings
+	AppUrl    string
+	AppSubUrl string
+
 	// Paths
 	ProvisioningPath string
+	DataPath         string
+	LogsPath         string
 
 	// SMTP email settings
 	Smtp SmtpSettings
@@ -413,7 +417,7 @@ func loadSpecifedConfigFile(configFile string, masterFile *ini.File) error {
 	return nil
 }
 
-func loadConfiguration(args *CommandLineArgs) (*ini.File, error) {
+func (cfg *Cfg) loadConfiguration(args *CommandLineArgs) (*ini.File, error) {
 	var err error
 
 	// load config defaults
@@ -444,7 +448,7 @@ func loadConfiguration(args *CommandLineArgs) (*ini.File, error) {
 	// load specified config file
 	err = loadSpecifedConfigFile(args.Config, parsedFile)
 	if err != nil {
-		initLogging(parsedFile)
+		cfg.initLogging(parsedFile)
 		log.Fatal(3, err.Error())
 	}
 
@@ -461,8 +465,8 @@ func loadConfiguration(args *CommandLineArgs) (*ini.File, error) {
 	evalConfigValues(parsedFile)
 
 	// update data path and logging config
-	DataPath = makeAbsolute(parsedFile.Section("paths").Key("data").String(), HomePath)
-	initLogging(parsedFile)
+	cfg.DataPath = makeAbsolute(parsedFile.Section("paths").Key("data").String(), HomePath)
+	cfg.initLogging(parsedFile)
 
 	return parsedFile, err
 }
@@ -519,7 +523,7 @@ func NewCfg() *Cfg {
 func (cfg *Cfg) Load(args *CommandLineArgs) error {
 	setHomePath(args)
 
-	iniFile, err := loadConfiguration(args)
+	iniFile, err := cfg.loadConfiguration(args)
 	if err != nil {
 		return err
 	}
@@ -540,6 +544,8 @@ func (cfg *Cfg) Load(args *CommandLineArgs) error {
 	cfg.ProvisioningPath = makeAbsolute(iniFile.Section("paths").Key("provisioning").String(), HomePath)
 	server := iniFile.Section("server")
 	AppUrl, AppSubUrl = parseAppUrlAndSubUrl(server)
+	cfg.AppUrl = AppUrl
+	cfg.AppSubUrl = AppSubUrl
 
 	Protocol = HTTP
 	if server.Key("protocol").MustString("http") == "https" {
@@ -664,7 +670,7 @@ func (cfg *Cfg) Load(args *CommandLineArgs) error {
 			log.Fatal(4, "Invalid callback_url(%s): %s", cfg.RendererCallbackUrl, err)
 		}
 	}
-	cfg.ImagesDir = filepath.Join(DataPath, "png")
+	cfg.ImagesDir = filepath.Join(cfg.DataPath, "png")
 	cfg.PhantomDir = filepath.Join(HomePath, "tools/phantomjs")
 	cfg.TempDataLifetime = iniFile.Section("paths").Key("temp_data_lifetime").MustDuration(time.Second * 3600 * 24)
 	cfg.MetricsEndpointEnabled = iniFile.Section("metrics").Key("enabled").MustBool(true)
@@ -725,7 +731,7 @@ func (cfg *Cfg) readSessionConfig() {
 	SessionOptions.IDLength = 16
 
 	if SessionOptions.Provider == "file" {
-		SessionOptions.ProviderConfig = makeAbsolute(SessionOptions.ProviderConfig, DataPath)
+		SessionOptions.ProviderConfig = makeAbsolute(SessionOptions.ProviderConfig, cfg.DataPath)
 		os.MkdirAll(path.Dir(SessionOptions.ProviderConfig), os.ModePerm)
 	}
 
@@ -736,15 +742,15 @@ func (cfg *Cfg) readSessionConfig() {
 	SessionConnMaxLifetime = cfg.Raw.Section("session").Key("conn_max_lifetime").MustInt64(14400)
 }
 
-func initLogging(file *ini.File) {
+func (cfg *Cfg) initLogging(file *ini.File) {
 	// split on comma
-	LogModes = strings.Split(file.Section("log").Key("mode").MustString("console"), ",")
+	logModes := strings.Split(file.Section("log").Key("mode").MustString("console"), ",")
 	// also try space
-	if len(LogModes) == 1 {
-		LogModes = strings.Split(file.Section("log").Key("mode").MustString("console"), " ")
+	if len(logModes) == 1 {
+		logModes = strings.Split(file.Section("log").Key("mode").MustString("console"), " ")
 	}
-	LogsPath = makeAbsolute(file.Section("paths").Key("logs").String(), HomePath)
-	log.ReadLoggingConfig(LogModes, LogsPath, file)
+	cfg.LogsPath = makeAbsolute(file.Section("paths").Key("logs").String(), HomePath)
+	log.ReadLoggingConfig(logModes, cfg.LogsPath, file)
 }
 
 func (cfg *Cfg) LogConfigSources() {
@@ -768,8 +774,8 @@ func (cfg *Cfg) LogConfigSources() {
 	}
 
 	logger.Info("Path Home", "path", HomePath)
-	logger.Info("Path Data", "path", DataPath)
-	logger.Info("Path Logs", "path", LogsPath)
+	logger.Info("Path Data", "path", cfg.DataPath)
+	logger.Info("Path Logs", "path", cfg.LogsPath)
 	logger.Info("Path Plugins", "path", PluginsPath)
 	logger.Info("Path Provisioning", "path", cfg.ProvisioningPath)
 	logger.Info("App mode " + Env)

+ 12 - 12
pkg/setting/setting_test.go

@@ -30,8 +30,8 @@ func TestLoadingSettings(t *testing.T) {
 			cfg.Load(&CommandLineArgs{HomePath: "../../"})
 
 			So(AdminUser, ShouldEqual, "superduper")
-			So(DataPath, ShouldEqual, filepath.Join(HomePath, "data"))
-			So(LogsPath, ShouldEqual, filepath.Join(DataPath, "log"))
+			So(cfg.DataPath, ShouldEqual, filepath.Join(HomePath, "data"))
+			So(cfg.LogsPath, ShouldEqual, filepath.Join(cfg.DataPath, "log"))
 		})
 
 		Convey("Should replace password when defined in environment", func() {
@@ -76,8 +76,8 @@ func TestLoadingSettings(t *testing.T) {
 					HomePath: "../../",
 					Args:     []string{`cfg:paths.data=c:\tmp\data`, `cfg:paths.logs=c:\tmp\logs`},
 				})
-				So(DataPath, ShouldEqual, `c:\tmp\data`)
-				So(LogsPath, ShouldEqual, `c:\tmp\logs`)
+				So(cfg.DataPath, ShouldEqual, `c:\tmp\data`)
+				So(cfg.LogsPath, ShouldEqual, `c:\tmp\logs`)
 			} else {
 				cfg := NewCfg()
 				cfg.Load(&CommandLineArgs{
@@ -85,8 +85,8 @@ func TestLoadingSettings(t *testing.T) {
 					Args:     []string{"cfg:paths.data=/tmp/data", "cfg:paths.logs=/tmp/logs"},
 				})
 
-				So(DataPath, ShouldEqual, "/tmp/data")
-				So(LogsPath, ShouldEqual, "/tmp/logs")
+				So(cfg.DataPath, ShouldEqual, "/tmp/data")
+				So(cfg.LogsPath, ShouldEqual, "/tmp/logs")
 			}
 		})
 
@@ -112,7 +112,7 @@ func TestLoadingSettings(t *testing.T) {
 					Args:     []string{`cfg:default.paths.data=c:\tmp\data`},
 				})
 
-				So(DataPath, ShouldEqual, `c:\tmp\override`)
+				So(cfg.DataPath, ShouldEqual, `c:\tmp\override`)
 			} else {
 				cfg := NewCfg()
 				cfg.Load(&CommandLineArgs{
@@ -121,7 +121,7 @@ func TestLoadingSettings(t *testing.T) {
 					Args:     []string{"cfg:default.paths.data=/tmp/data"},
 				})
 
-				So(DataPath, ShouldEqual, "/tmp/override")
+				So(cfg.DataPath, ShouldEqual, "/tmp/override")
 			}
 		})
 
@@ -134,7 +134,7 @@ func TestLoadingSettings(t *testing.T) {
 					Args:     []string{`cfg:paths.data=c:\tmp\data`},
 				})
 
-				So(DataPath, ShouldEqual, `c:\tmp\data`)
+				So(cfg.DataPath, ShouldEqual, `c:\tmp\data`)
 			} else {
 				cfg := NewCfg()
 				cfg.Load(&CommandLineArgs{
@@ -143,7 +143,7 @@ func TestLoadingSettings(t *testing.T) {
 					Args:     []string{"cfg:paths.data=/tmp/data"},
 				})
 
-				So(DataPath, ShouldEqual, "/tmp/data")
+				So(cfg.DataPath, ShouldEqual, "/tmp/data")
 			}
 		})
 
@@ -156,7 +156,7 @@ func TestLoadingSettings(t *testing.T) {
 					Args:     []string{"cfg:paths.data=${GF_DATA_PATH}"},
 				})
 
-				So(DataPath, ShouldEqual, `c:\tmp\env_override`)
+				So(cfg.DataPath, ShouldEqual, `c:\tmp\env_override`)
 			} else {
 				os.Setenv("GF_DATA_PATH", "/tmp/env_override")
 				cfg := NewCfg()
@@ -165,7 +165,7 @@ func TestLoadingSettings(t *testing.T) {
 					Args:     []string{"cfg:paths.data=${GF_DATA_PATH}"},
 				})
 
-				So(DataPath, ShouldEqual, "/tmp/env_override")
+				So(cfg.DataPath, ShouldEqual, "/tmp/env_override")
 			}
 		})
 

+ 13 - 3
pkg/tsdb/cloudwatch/cloudwatch.go

@@ -129,10 +129,13 @@ func (e *CloudWatchExecutor) executeTimeSeriesQuery(ctx context.Context, queryCo
 			if ae, ok := err.(awserr.Error); ok && ae.Code() == "500" {
 				return err
 			}
-			result.Results[queryRes.RefId] = queryRes
 			if err != nil {
-				result.Results[queryRes.RefId].Error = err
+				result.Results[query.RefId] = &tsdb.QueryResult{
+					Error: err,
+				}
+				return nil
 			}
+			result.Results[queryRes.RefId] = queryRes
 			return nil
 		})
 	}
@@ -269,7 +272,7 @@ func (e *CloudWatchExecutor) executeGetMetricDataQuery(ctx context.Context, regi
 	for _, query := range queries {
 		// 1 minutes resolution metrics is stored for 15 days, 15 * 24 * 60 = 21600
 		if query.HighResolution && (((endTime.Unix() - startTime.Unix()) / int64(query.Period)) > 21600) {
-			return nil, errors.New("too long query period")
+			return queryResponses, errors.New("too long query period")
 		}
 
 		mdq := &cloudwatch.MetricDataQuery{
@@ -362,6 +365,7 @@ func (e *CloudWatchExecutor) executeGetMetricDataQuery(ctx context.Context, regi
 		}
 
 		queryRes.Series = append(queryRes.Series, &series)
+		queryRes.Meta = simplejson.New()
 		queryResponses = append(queryResponses, queryRes)
 	}
 
@@ -565,6 +569,12 @@ func parseResponse(resp *cloudwatch.GetMetricStatisticsOutput, query *CloudWatch
 		}
 
 		queryRes.Series = append(queryRes.Series, &series)
+		queryRes.Meta = simplejson.New()
+		if len(resp.Datapoints) > 0 && resp.Datapoints[0].Unit != nil {
+			if unit, ok := cloudwatchUnitMappings[*resp.Datapoints[0].Unit]; ok {
+				queryRes.Meta.Set("unit", unit)
+			}
+		}
 	}
 
 	return queryRes, nil

+ 5 - 0
pkg/tsdb/cloudwatch/cloudwatch_test.go

@@ -71,6 +71,7 @@ func TestCloudWatch(t *testing.T) {
 							"p50.00": aws.Float64(30.0),
 							"p90.00": aws.Float64(40.0),
 						},
+						Unit: aws.String("Seconds"),
 					},
 				},
 			}
@@ -103,6 +104,7 @@ func TestCloudWatch(t *testing.T) {
 			So(queryRes.Series[1].Points[0][0].String(), ShouldEqual, null.FloatFrom(20.0).String())
 			So(queryRes.Series[2].Points[0][0].String(), ShouldEqual, null.FloatFrom(30.0).String())
 			So(queryRes.Series[3].Points[0][0].String(), ShouldEqual, null.FloatFrom(40.0).String())
+			So(queryRes.Meta.Get("unit").MustString(), ShouldEqual, "s")
 		})
 
 		Convey("terminate gap of data points", func() {
@@ -118,6 +120,7 @@ func TestCloudWatch(t *testing.T) {
 							"p50.00": aws.Float64(30.0),
 							"p90.00": aws.Float64(40.0),
 						},
+						Unit: aws.String("Seconds"),
 					},
 					{
 						Timestamp: aws.Time(timestamp.Add(60 * time.Second)),
@@ -127,6 +130,7 @@ func TestCloudWatch(t *testing.T) {
 							"p50.00": aws.Float64(40.0),
 							"p90.00": aws.Float64(50.0),
 						},
+						Unit: aws.String("Seconds"),
 					},
 					{
 						Timestamp: aws.Time(timestamp.Add(180 * time.Second)),
@@ -136,6 +140,7 @@ func TestCloudWatch(t *testing.T) {
 							"p50.00": aws.Float64(50.0),
 							"p90.00": aws.Float64(60.0),
 						},
+						Unit: aws.String("Seconds"),
 					},
 				},
 			}

+ 30 - 0
pkg/tsdb/cloudwatch/constants.go

@@ -0,0 +1,30 @@
+package cloudwatch
+
+var cloudwatchUnitMappings = map[string]string{
+	"Seconds":      "s",
+	"Microseconds": "µs",
+	"Milliseconds": "ms",
+	"Bytes":        "bytes",
+	"Kilobytes":    "kbytes",
+	"Megabytes":    "mbytes",
+	"Gigabytes":    "gbytes",
+	//"Terabytes":        "",
+	"Bits": "bits",
+	//"Kilobits":         "",
+	//"Megabits":         "",
+	//"Gigabits":         "",
+	//"Terabits":         "",
+	"Percent": "percent",
+	//"Count":            "",
+	"Bytes/Second":     "Bps",
+	"Kilobytes/Second": "KBs",
+	"Megabytes/Second": "MBs",
+	"Gigabytes/Second": "GBs",
+	//"Terabytes/Second": "",
+	"Bits/Second":     "bps",
+	"Kilobits/Second": "Kbits",
+	"Megabits/Second": "Mbits",
+	"Gigabits/Second": "Gbits",
+	//"Terabits/Second":  "",
+	//"Count/Second":     "",
+}

+ 4 - 0
pkg/tsdb/elasticsearch/time_series_query.go

@@ -171,6 +171,10 @@ func addTermsAgg(aggBuilder es.AggBuilder, bucketAgg *BucketAgg, metrics []*Metr
 		} else {
 			a.Size = 500
 		}
+		if a.Size == 0 {
+			a.Size = 500
+		}
+
 		if minDocCount, err := bucketAgg.Settings.Get("min_doc_count").Int(); err == nil {
 			a.MinDocCount = &minDocCount
 		}

+ 4 - 2
pkg/tsdb/elasticsearch/time_series_query_test.go

@@ -60,7 +60,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
 			_, err := executeTsdbQuery(c, `{
 				"timeField": "@timestamp",
 				"bucketAggs": [
-					{ "type": "terms", "field": "@host", "id": "2" },
+					{ "type": "terms", "field": "@host", "id": "2", "settings": { "size": "0", "order": "asc" } },
 					{ "type": "date_histogram", "field": "@timestamp", "id": "3" }
 				],
 				"metrics": [{"type": "count", "id": "1" }]
@@ -69,7 +69,9 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
 			sr := c.multisearchRequests[0].Requests[0]
 			firstLevel := sr.Aggs[0]
 			So(firstLevel.Key, ShouldEqual, "2")
-			So(firstLevel.Aggregation.Aggregation.(*es.TermsAggregation).Field, ShouldEqual, "@host")
+			termsAgg := firstLevel.Aggregation.Aggregation.(*es.TermsAggregation)
+			So(termsAgg.Field, ShouldEqual, "@host")
+			So(termsAgg.Size, ShouldEqual, 500)
 			secondLevel := firstLevel.Aggregation.Aggs[0]
 			So(secondLevel.Key, ShouldEqual, "3")
 			So(secondLevel.Aggregation.Aggregation.(*es.DateHistogramAgg).Field, ShouldEqual, "@timestamp")

+ 0 - 8
pkg/tsdb/mssql/macros.go

@@ -66,10 +66,6 @@ func (m *msSqlMacroEngine) evaluateMacro(name string, args []string) (string, er
 		}
 
 		return fmt.Sprintf("%s BETWEEN '%s' AND '%s'", args[0], m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339), m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
-	case "__timeFrom":
-		return fmt.Sprintf("'%s'", m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339)), nil
-	case "__timeTo":
-		return fmt.Sprintf("'%s'", m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
 	case "__timeGroup":
 		if len(args) < 2 {
 			return "", fmt.Errorf("macro %v needs time column and interval", name)
@@ -96,10 +92,6 @@ func (m *msSqlMacroEngine) evaluateMacro(name string, args []string) (string, er
 			return "", fmt.Errorf("missing time column argument for macro %v", name)
 		}
 		return fmt.Sprintf("%s >= %d AND %s <= %d", args[0], m.timeRange.GetFromAsSecondsEpoch(), args[0], m.timeRange.GetToAsSecondsEpoch()), nil
-	case "__unixEpochFrom":
-		return fmt.Sprintf("%d", m.timeRange.GetFromAsSecondsEpoch()), nil
-	case "__unixEpochTo":
-		return fmt.Sprintf("%d", m.timeRange.GetToAsSecondsEpoch()), nil
 	case "__unixEpochGroup":
 		if len(args) < 2 {
 			return "", fmt.Errorf("macro %v needs time column and interval and optional fill value", name)

+ 0 - 84
pkg/tsdb/mssql/macros_test.go

@@ -111,20 +111,6 @@ func TestMacroEngine(t *testing.T) {
 				So(fillInterval, ShouldEqual, 5*time.Minute.Seconds())
 			})
 
-			Convey("interpolate __timeFrom function", func() {
-				sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)")
-				So(err, ShouldBeNil)
-
-				So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339)))
-			})
-
-			Convey("interpolate __timeTo function", func() {
-				sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)")
-				So(err, ShouldBeNil)
-
-				So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339)))
-			})
-
 			Convey("interpolate __unixEpochFilter function", func() {
 				sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFilter(time_column)")
 				So(err, ShouldBeNil)
@@ -132,20 +118,6 @@ func TestMacroEngine(t *testing.T) {
 				So(sql, ShouldEqual, fmt.Sprintf("select time_column >= %d AND time_column <= %d", from.Unix(), to.Unix()))
 			})
 
-			Convey("interpolate __unixEpochFrom function", func() {
-				sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFrom()")
-				So(err, ShouldBeNil)
-
-				So(sql, ShouldEqual, fmt.Sprintf("select %d", from.Unix()))
-			})
-
-			Convey("interpolate __unixEpochTo function", func() {
-				sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochTo()")
-				So(err, ShouldBeNil)
-
-				So(sql, ShouldEqual, fmt.Sprintf("select %d", to.Unix()))
-			})
-
 			Convey("interpolate __unixEpochGroup function", func() {
 
 				sql, err := engine.Interpolate(query, timeRange, "SELECT $__unixEpochGroup(time_column,'5m')")
@@ -171,40 +143,12 @@ func TestMacroEngine(t *testing.T) {
 				So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339)))
 			})
 
-			Convey("interpolate __timeFrom function", func() {
-				sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)")
-				So(err, ShouldBeNil)
-
-				So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339)))
-			})
-
-			Convey("interpolate __timeTo function", func() {
-				sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)")
-				So(err, ShouldBeNil)
-
-				So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339)))
-			})
-
 			Convey("interpolate __unixEpochFilter function", func() {
 				sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFilter(time_column)")
 				So(err, ShouldBeNil)
 
 				So(sql, ShouldEqual, fmt.Sprintf("select time_column >= %d AND time_column <= %d", from.Unix(), to.Unix()))
 			})
-
-			Convey("interpolate __unixEpochFrom function", func() {
-				sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFrom()")
-				So(err, ShouldBeNil)
-
-				So(sql, ShouldEqual, fmt.Sprintf("select %d", from.Unix()))
-			})
-
-			Convey("interpolate __unixEpochTo function", func() {
-				sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochTo()")
-				So(err, ShouldBeNil)
-
-				So(sql, ShouldEqual, fmt.Sprintf("select %d", to.Unix()))
-			})
 		})
 
 		Convey("Given a time range between 1960-02-01 07:00 and 1980-02-03 08:00", func() {
@@ -219,40 +163,12 @@ func TestMacroEngine(t *testing.T) {
 				So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339)))
 			})
 
-			Convey("interpolate __timeFrom function", func() {
-				sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)")
-				So(err, ShouldBeNil)
-
-				So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339)))
-			})
-
-			Convey("interpolate __timeTo function", func() {
-				sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)")
-				So(err, ShouldBeNil)
-
-				So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339)))
-			})
-
 			Convey("interpolate __unixEpochFilter function", func() {
 				sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFilter(time_column)")
 				So(err, ShouldBeNil)
 
 				So(sql, ShouldEqual, fmt.Sprintf("select time_column >= %d AND time_column <= %d", from.Unix(), to.Unix()))
 			})
-
-			Convey("interpolate __unixEpochFrom function", func() {
-				sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFrom()")
-				So(err, ShouldBeNil)
-
-				So(sql, ShouldEqual, fmt.Sprintf("select %d", from.Unix()))
-			})
-
-			Convey("interpolate __unixEpochTo function", func() {
-				sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochTo()")
-				So(err, ShouldBeNil)
-
-				So(sql, ShouldEqual, fmt.Sprintf("select %d", to.Unix()))
-			})
 		})
 	})
 }

+ 28 - 0
pkg/tsdb/mssql/mssql_test.go

@@ -676,6 +676,30 @@ func TestMSSQL(t *testing.T) {
 				So(queryResult.Series[3].Name, ShouldEqual, "Metric B valueTwo")
 			})
 
+			Convey("When doing a query with timeFrom,timeTo,unixEpochFrom,unixEpochTo macros", func() {
+				tsdb.Interpolate = origInterpolate
+				query := &tsdb.TsdbQuery{
+					TimeRange: tsdb.NewFakeTimeRange("5m", "now", fromStart),
+					Queries: []*tsdb.Query{
+						{
+							DataSource: &models.DataSource{JsonData: simplejson.New()},
+							Model: simplejson.NewFromAny(map[string]interface{}{
+								"rawSql": `SELECT time FROM metric_values WHERE time > $__timeFrom() OR time < $__timeFrom() OR 1 < $__unixEpochFrom() OR $__unixEpochTo() > 1 ORDER BY 1`,
+								"format": "time_series",
+							}),
+							RefId: "A",
+						},
+					},
+				}
+
+				resp, err := endpoint.Query(nil, nil, query)
+				So(err, ShouldBeNil)
+				queryResult := resp.Results["A"]
+				So(queryResult.Error, ShouldBeNil)
+				So(queryResult.Meta.Get("sql").MustString(), ShouldEqual, "SELECT time FROM metric_values WHERE time > '2018-03-15T12:55:00Z' OR time < '2018-03-15T12:55:00Z' OR 1 < 1521118500 OR 1521118800 > 1 ORDER BY 1")
+
+			})
+
 			Convey("Given a stored procedure that takes @from and @to in epoch time", func() {
 				sql := `
 						IF object_id('sp_test_epoch') IS NOT NULL
@@ -720,9 +744,11 @@ func TestMSSQL(t *testing.T) {
 				So(err, ShouldBeNil)
 
 				Convey("When doing a metric query using stored procedure should return correct result", func() {
+					tsdb.Interpolate = origInterpolate
 					query := &tsdb.TsdbQuery{
 						Queries: []*tsdb.Query{
 							{
+								DataSource: &models.DataSource{JsonData: simplejson.New()},
 								Model: simplejson.NewFromAny(map[string]interface{}{
 									"rawSql": `DECLARE
 											@from int = $__unixEpochFrom(),
@@ -797,9 +823,11 @@ func TestMSSQL(t *testing.T) {
 				So(err, ShouldBeNil)
 
 				Convey("When doing a metric query using stored procedure should return correct result", func() {
+					tsdb.Interpolate = origInterpolate
 					query := &tsdb.TsdbQuery{
 						Queries: []*tsdb.Query{
 							{
+								DataSource: &models.DataSource{JsonData: simplejson.New()},
 								Model: simplejson.NewFromAny(map[string]interface{}{
 									"rawSql": `DECLARE
 											@from int = $__unixEpochFrom(),

+ 0 - 8
pkg/tsdb/mysql/macros.go

@@ -61,10 +61,6 @@ func (m *mySqlMacroEngine) evaluateMacro(name string, args []string) (string, er
 		}
 
 		return fmt.Sprintf("%s BETWEEN '%s' AND '%s'", args[0], m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339), m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
-	case "__timeFrom":
-		return fmt.Sprintf("'%s'", m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339)), nil
-	case "__timeTo":
-		return fmt.Sprintf("'%s'", m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
 	case "__timeGroup":
 		if len(args) < 2 {
 			return "", fmt.Errorf("macro %v needs time column and interval", name)
@@ -91,10 +87,6 @@ func (m *mySqlMacroEngine) evaluateMacro(name string, args []string) (string, er
 			return "", fmt.Errorf("missing time column argument for macro %v", name)
 		}
 		return fmt.Sprintf("%s >= %d AND %s <= %d", args[0], m.timeRange.GetFromAsSecondsEpoch(), args[0], m.timeRange.GetToAsSecondsEpoch()), nil
-	case "__unixEpochFrom":
-		return fmt.Sprintf("%d", m.timeRange.GetFromAsSecondsEpoch()), nil
-	case "__unixEpochTo":
-		return fmt.Sprintf("%d", m.timeRange.GetToAsSecondsEpoch()), nil
 	case "__unixEpochGroup":
 		if len(args) < 2 {
 			return "", fmt.Errorf("macro %v needs time column and interval and optional fill value", name)

+ 0 - 84
pkg/tsdb/mysql/macros_test.go

@@ -63,20 +63,6 @@ func TestMacroEngine(t *testing.T) {
 				So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339)))
 			})
 
-			Convey("interpolate __timeFrom function", func() {
-				sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)")
-				So(err, ShouldBeNil)
-
-				So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339)))
-			})
-
-			Convey("interpolate __timeTo function", func() {
-				sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)")
-				So(err, ShouldBeNil)
-
-				So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339)))
-			})
-
 			Convey("interpolate __unixEpochFilter function", func() {
 				sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFilter(time)")
 				So(err, ShouldBeNil)
@@ -84,20 +70,6 @@ func TestMacroEngine(t *testing.T) {
 				So(sql, ShouldEqual, fmt.Sprintf("select time >= %d AND time <= %d", from.Unix(), to.Unix()))
 			})
 
-			Convey("interpolate __unixEpochFrom function", func() {
-				sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFrom()")
-				So(err, ShouldBeNil)
-
-				So(sql, ShouldEqual, fmt.Sprintf("select %d", from.Unix()))
-			})
-
-			Convey("interpolate __unixEpochTo function", func() {
-				sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochTo()")
-				So(err, ShouldBeNil)
-
-				So(sql, ShouldEqual, fmt.Sprintf("select %d", to.Unix()))
-			})
-
 			Convey("interpolate __unixEpochGroup function", func() {
 
 				sql, err := engine.Interpolate(query, timeRange, "SELECT $__unixEpochGroup(time_column,'5m')")
@@ -123,40 +95,12 @@ func TestMacroEngine(t *testing.T) {
 				So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339)))
 			})
 
-			Convey("interpolate __timeFrom function", func() {
-				sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)")
-				So(err, ShouldBeNil)
-
-				So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339)))
-			})
-
-			Convey("interpolate __timeTo function", func() {
-				sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)")
-				So(err, ShouldBeNil)
-
-				So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339)))
-			})
-
 			Convey("interpolate __unixEpochFilter function", func() {
 				sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFilter(time)")
 				So(err, ShouldBeNil)
 
 				So(sql, ShouldEqual, fmt.Sprintf("select time >= %d AND time <= %d", from.Unix(), to.Unix()))
 			})
-
-			Convey("interpolate __unixEpochFrom function", func() {
-				sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFrom()")
-				So(err, ShouldBeNil)
-
-				So(sql, ShouldEqual, fmt.Sprintf("select %d", from.Unix()))
-			})
-
-			Convey("interpolate __unixEpochTo function", func() {
-				sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochTo()")
-				So(err, ShouldBeNil)
-
-				So(sql, ShouldEqual, fmt.Sprintf("select %d", to.Unix()))
-			})
 		})
 
 		Convey("Given a time range between 1960-02-01 07:00 and 1980-02-03 08:00", func() {
@@ -171,40 +115,12 @@ func TestMacroEngine(t *testing.T) {
 				So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339)))
 			})
 
-			Convey("interpolate __timeFrom function", func() {
-				sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)")
-				So(err, ShouldBeNil)
-
-				So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339)))
-			})
-
-			Convey("interpolate __timeTo function", func() {
-				sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)")
-				So(err, ShouldBeNil)
-
-				So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339)))
-			})
-
 			Convey("interpolate __unixEpochFilter function", func() {
 				sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFilter(time)")
 				So(err, ShouldBeNil)
 
 				So(sql, ShouldEqual, fmt.Sprintf("select time >= %d AND time <= %d", from.Unix(), to.Unix()))
 			})
-
-			Convey("interpolate __unixEpochFrom function", func() {
-				sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFrom()")
-				So(err, ShouldBeNil)
-
-				So(sql, ShouldEqual, fmt.Sprintf("select %d", from.Unix()))
-			})
-
-			Convey("interpolate __unixEpochTo function", func() {
-				sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochTo()")
-				So(err, ShouldBeNil)
-
-				So(sql, ShouldEqual, fmt.Sprintf("select %d", to.Unix()))
-			})
 		})
 	})
 }

+ 25 - 1
pkg/tsdb/mysql/mysql_test.go

@@ -314,7 +314,7 @@ func TestMySQL(t *testing.T) {
 					query := &tsdb.TsdbQuery{
 						Queries: []*tsdb.Query{
 							{
-								DataSource: &models.DataSource{},
+								DataSource: &models.DataSource{JsonData: simplejson.New()},
 								Model: simplejson.NewFromAny(map[string]interface{}{
 									"rawSql": "SELECT $__timeGroup(time, $__interval) AS time, avg(value) as value FROM metric GROUP BY 1 ORDER BY 1",
 									"format": "time_series",
@@ -753,6 +753,30 @@ func TestMySQL(t *testing.T) {
 			})
 		})
 
+		Convey("When doing a query with timeFrom,timeTo,unixEpochFrom,unixEpochTo macros", func() {
+			tsdb.Interpolate = origInterpolate
+			query := &tsdb.TsdbQuery{
+				TimeRange: tsdb.NewFakeTimeRange("5m", "now", fromStart),
+				Queries: []*tsdb.Query{
+					{
+						DataSource: &models.DataSource{JsonData: simplejson.New()},
+						Model: simplejson.NewFromAny(map[string]interface{}{
+							"rawSql": `SELECT time FROM metric_values WHERE time > $__timeFrom() OR time < $__timeFrom() OR 1 < $__unixEpochFrom() OR $__unixEpochTo() > 1 ORDER BY 1`,
+							"format": "time_series",
+						}),
+						RefId: "A",
+					},
+				},
+			}
+
+			resp, err := endpoint.Query(nil, nil, query)
+			So(err, ShouldBeNil)
+			queryResult := resp.Results["A"]
+			So(queryResult.Error, ShouldBeNil)
+			So(queryResult.Meta.Get("sql").MustString(), ShouldEqual, "SELECT time FROM metric_values WHERE time > '2018-03-15T12:55:00Z' OR time < '2018-03-15T12:55:00Z' OR 1 < 1521118500 OR 1521118800 > 1 ORDER BY 1")
+
+		})
+
 		Convey("Given a table with event data", func() {
 			type event struct {
 				TimeSec     int64

+ 0 - 8
pkg/tsdb/postgres/macros.go

@@ -87,10 +87,6 @@ func (m *postgresMacroEngine) evaluateMacro(name string, args []string) (string,
 		}
 
 		return fmt.Sprintf("%s BETWEEN '%s' AND '%s'", args[0], m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339), m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
-	case "__timeFrom":
-		return fmt.Sprintf("'%s'", m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339)), nil
-	case "__timeTo":
-		return fmt.Sprintf("'%s'", m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
 	case "__timeGroup":
 		if len(args) < 2 {
 			return "", fmt.Errorf("macro %v needs time column and interval and optional fill value", name)
@@ -122,10 +118,6 @@ func (m *postgresMacroEngine) evaluateMacro(name string, args []string) (string,
 			return "", fmt.Errorf("missing time column argument for macro %v", name)
 		}
 		return fmt.Sprintf("%s >= %d AND %s <= %d", args[0], m.timeRange.GetFromAsSecondsEpoch(), args[0], m.timeRange.GetToAsSecondsEpoch()), nil
-	case "__unixEpochFrom":
-		return fmt.Sprintf("%d", m.timeRange.GetFromAsSecondsEpoch()), nil
-	case "__unixEpochTo":
-		return fmt.Sprintf("%d", m.timeRange.GetToAsSecondsEpoch()), nil
 	case "__unixEpochGroup":
 		if len(args) < 2 {
 			return "", fmt.Errorf("macro %v needs time column and interval and optional fill value", name)

+ 0 - 84
pkg/tsdb/postgres/macros_test.go

@@ -44,13 +44,6 @@ func TestMacroEngine(t *testing.T) {
 				So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339)))
 			})
 
-			Convey("interpolate __timeFrom function", func() {
-				sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)")
-				So(err, ShouldBeNil)
-
-				So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339)))
-			})
-
 			Convey("interpolate __timeGroup function pre 5.3 compatibility", func() {
 
 				sql, err := engine.Interpolate(query, timeRange, "SELECT $__timeGroup(time_column,'5m'), value")
@@ -102,13 +95,6 @@ func TestMacroEngine(t *testing.T) {
 				So(sql, ShouldEqual, "GROUP BY time_bucket('300s',time_column)")
 			})
 
-			Convey("interpolate __timeTo function", func() {
-				sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)")
-				So(err, ShouldBeNil)
-
-				So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339)))
-			})
-
 			Convey("interpolate __unixEpochFilter function", func() {
 				sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFilter(time)")
 				So(err, ShouldBeNil)
@@ -116,20 +102,6 @@ func TestMacroEngine(t *testing.T) {
 				So(sql, ShouldEqual, fmt.Sprintf("select time >= %d AND time <= %d", from.Unix(), to.Unix()))
 			})
 
-			Convey("interpolate __unixEpochFrom function", func() {
-				sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFrom()")
-				So(err, ShouldBeNil)
-
-				So(sql, ShouldEqual, fmt.Sprintf("select %d", from.Unix()))
-			})
-
-			Convey("interpolate __unixEpochTo function", func() {
-				sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochTo()")
-				So(err, ShouldBeNil)
-
-				So(sql, ShouldEqual, fmt.Sprintf("select %d", to.Unix()))
-			})
-
 			Convey("interpolate __unixEpochGroup function", func() {
 
 				sql, err := engine.Interpolate(query, timeRange, "SELECT $__unixEpochGroup(time_column,'5m')")
@@ -155,40 +127,12 @@ func TestMacroEngine(t *testing.T) {
 				So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339)))
 			})
 
-			Convey("interpolate __timeFrom function", func() {
-				sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)")
-				So(err, ShouldBeNil)
-
-				So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339)))
-			})
-
-			Convey("interpolate __timeTo function", func() {
-				sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)")
-				So(err, ShouldBeNil)
-
-				So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339)))
-			})
-
 			Convey("interpolate __unixEpochFilter function", func() {
 				sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFilter(time)")
 				So(err, ShouldBeNil)
 
 				So(sql, ShouldEqual, fmt.Sprintf("select time >= %d AND time <= %d", from.Unix(), to.Unix()))
 			})
-
-			Convey("interpolate __unixEpochFrom function", func() {
-				sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFrom()")
-				So(err, ShouldBeNil)
-
-				So(sql, ShouldEqual, fmt.Sprintf("select %d", from.Unix()))
-			})
-
-			Convey("interpolate __unixEpochTo function", func() {
-				sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochTo()")
-				So(err, ShouldBeNil)
-
-				So(sql, ShouldEqual, fmt.Sprintf("select %d", to.Unix()))
-			})
 		})
 
 		Convey("Given a time range between 1960-02-01 07:00 and 1980-02-03 08:00", func() {
@@ -203,40 +147,12 @@ func TestMacroEngine(t *testing.T) {
 				So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339)))
 			})
 
-			Convey("interpolate __timeFrom function", func() {
-				sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)")
-				So(err, ShouldBeNil)
-
-				So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339)))
-			})
-
-			Convey("interpolate __timeTo function", func() {
-				sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)")
-				So(err, ShouldBeNil)
-
-				So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339)))
-			})
-
 			Convey("interpolate __unixEpochFilter function", func() {
 				sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFilter(time)")
 				So(err, ShouldBeNil)
 
 				So(sql, ShouldEqual, fmt.Sprintf("select time >= %d AND time <= %d", from.Unix(), to.Unix()))
 			})
-
-			Convey("interpolate __unixEpochFrom function", func() {
-				sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFrom()")
-				So(err, ShouldBeNil)
-
-				So(sql, ShouldEqual, fmt.Sprintf("select %d", from.Unix()))
-			})
-
-			Convey("interpolate __unixEpochTo function", func() {
-				sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochTo()")
-				So(err, ShouldBeNil)
-
-				So(sql, ShouldEqual, fmt.Sprintf("select %d", to.Unix()))
-			})
 		})
 	})
 }

+ 24 - 0
pkg/tsdb/postgres/postgres_test.go

@@ -684,6 +684,30 @@ func TestPostgres(t *testing.T) {
 				So(queryResult.Series[0].Name, ShouldEqual, "valueOne")
 				So(queryResult.Series[1].Name, ShouldEqual, "valueTwo")
 			})
+
+			Convey("When doing a query with timeFrom,timeTo,unixEpochFrom,unixEpochTo macros", func() {
+				tsdb.Interpolate = origInterpolate
+				query := &tsdb.TsdbQuery{
+					TimeRange: tsdb.NewFakeTimeRange("5m", "now", fromStart),
+					Queries: []*tsdb.Query{
+						{
+							DataSource: &models.DataSource{JsonData: simplejson.New()},
+							Model: simplejson.NewFromAny(map[string]interface{}{
+								"rawSql": `SELECT time FROM metric_values WHERE time > $__timeFrom() OR time < $__timeFrom() OR 1 < $__unixEpochFrom() OR $__unixEpochTo() > 1 ORDER BY 1`,
+								"format": "time_series",
+							}),
+							RefId: "A",
+						},
+					},
+				}
+
+				resp, err := endpoint.Query(nil, nil, query)
+				So(err, ShouldBeNil)
+				queryResult := resp.Results["A"]
+				So(queryResult.Error, ShouldBeNil)
+				So(queryResult.Meta.Get("sql").MustString(), ShouldEqual, "SELECT time FROM metric_values WHERE time > '2018-03-15T12:55:00Z' OR time < '2018-03-15T12:55:00Z' OR 1 < 1521118500 OR 1521118800 > 1 ORDER BY 1")
+
+			})
 		})
 
 		Convey("Given a table with event data", func() {

+ 39 - 23
pkg/tsdb/sql_engine.go

@@ -98,8 +98,12 @@ var NewSqlQueryEndpoint = func(config *SqlQueryEndpointConfiguration, rowTransfo
 		return nil, err
 	}
 
-	engine.SetMaxOpenConns(10)
-	engine.SetMaxIdleConns(10)
+	maxOpenConns := config.Datasource.JsonData.Get("maxOpenConns").MustInt(0)
+	engine.SetMaxOpenConns(maxOpenConns)
+	maxIdleConns := config.Datasource.JsonData.Get("maxIdleConns").MustInt(2)
+	engine.SetMaxIdleConns(maxIdleConns)
+	connMaxLifetime := config.Datasource.JsonData.Get("connMaxLifetime").MustInt(14400)
+	engine.SetConnMaxLifetime(time.Duration(connMaxLifetime) * time.Second)
 
 	engineCache.versions[config.Datasource.Id] = config.Datasource.Version
 	engineCache.cache[config.Datasource.Id] = engine
@@ -116,9 +120,7 @@ func (e *sqlQueryEndpoint) Query(ctx context.Context, dsInfo *models.DataSource,
 		Results: make(map[string]*QueryResult),
 	}
 
-	session := e.engine.NewSession()
-	defer session.Close()
-	db := session.DB()
+	var wg sync.WaitGroup
 
 	for _, query := range tsdbQuery.Queries {
 		rawSQL := query.Model.Get("rawSql").MustString()
@@ -145,31 +147,41 @@ func (e *sqlQueryEndpoint) Query(ctx context.Context, dsInfo *models.DataSource,
 
 		queryResult.Meta.Set("sql", rawSQL)
 
-		rows, err := db.Query(rawSQL)
-		if err != nil {
-			queryResult.Error = err
-			continue
-		}
+		wg.Add(1)
 
-		defer rows.Close()
+		go func(rawSQL string, query *Query, queryResult *QueryResult) {
+			defer wg.Done()
+			session := e.engine.NewSession()
+			defer session.Close()
+			db := session.DB()
 
-		format := query.Model.Get("format").MustString("time_series")
-
-		switch format {
-		case "time_series":
-			err := e.transformToTimeSeries(query, rows, queryResult, tsdbQuery)
+			rows, err := db.Query(rawSQL)
 			if err != nil {
 				queryResult.Error = err
-				continue
+				return
 			}
-		case "table":
-			err := e.transformToTable(query, rows, queryResult, tsdbQuery)
-			if err != nil {
-				queryResult.Error = err
-				continue
+
+			defer rows.Close()
+
+			format := query.Model.Get("format").MustString("time_series")
+
+			switch format {
+			case "time_series":
+				err := e.transformToTimeSeries(query, rows, queryResult, tsdbQuery)
+				if err != nil {
+					queryResult.Error = err
+					return
+				}
+			case "table":
+				err := e.transformToTable(query, rows, queryResult, tsdbQuery)
+				if err != nil {
+					queryResult.Error = err
+					return
+				}
 			}
-		}
+		}(rawSQL, query, queryResult)
 	}
+	wg.Wait()
 
 	return result, nil
 }
@@ -184,6 +196,10 @@ var Interpolate = func(query *Query, timeRange *TimeRange, sql string) (string,
 
 	sql = strings.Replace(sql, "$__interval_ms", strconv.FormatInt(interval.Milliseconds(), 10), -1)
 	sql = strings.Replace(sql, "$__interval", interval.Text, -1)
+	sql = strings.Replace(sql, "$__timeFrom()", fmt.Sprintf("'%s'", timeRange.GetFromAsTimeUTC().Format(time.RFC3339)), -1)
+	sql = strings.Replace(sql, "$__timeTo()", fmt.Sprintf("'%s'", timeRange.GetToAsTimeUTC().Format(time.RFC3339)), -1)
+	sql = strings.Replace(sql, "$__unixEpochFrom()", fmt.Sprintf("%d", timeRange.GetFromAsSecondsEpoch()), -1)
+	sql = strings.Replace(sql, "$__unixEpochTo()", fmt.Sprintf("%d", timeRange.GetToAsSecondsEpoch()), -1)
 
 	return sql, nil
 }

+ 29 - 0
pkg/tsdb/sql_engine_test.go

@@ -1,6 +1,7 @@
 package tsdb
 
 import (
+	"fmt"
 	"testing"
 	"time"
 
@@ -43,6 +44,34 @@ func TestSqlEngine(t *testing.T) {
 				So(sql, ShouldEqual, "select 60000 ")
 			})
 
+			Convey("interpolate __timeFrom function", func() {
+				sql, err := Interpolate(query, timeRange, "select $__timeFrom()")
+				So(err, ShouldBeNil)
+
+				So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339)))
+			})
+
+			Convey("interpolate __timeTo function", func() {
+				sql, err := Interpolate(query, timeRange, "select $__timeTo()")
+				So(err, ShouldBeNil)
+
+				So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339)))
+			})
+
+			Convey("interpolate __unixEpochFrom function", func() {
+				sql, err := Interpolate(query, timeRange, "select $__unixEpochFrom()")
+				So(err, ShouldBeNil)
+
+				So(sql, ShouldEqual, fmt.Sprintf("select %d", from.Unix()))
+			})
+
+			Convey("interpolate __unixEpochTo function", func() {
+				sql, err := Interpolate(query, timeRange, "select $__unixEpochTo()")
+				So(err, ShouldBeNil)
+
+				So(sql, ShouldEqual, fmt.Sprintf("select %d", to.Unix()))
+			})
+
 		})
 
 		Convey("Given row values with time.Time as time columns", func() {

+ 138 - 31
pkg/tsdb/stackdriver/stackdriver.go

@@ -159,6 +159,39 @@ func (e *StackdriverExecutor) buildQueries(tsdbQuery *tsdb.TsdbQuery) ([]*Stackd
 	return stackdriverQueries, nil
 }
 
+func reverse(s string) string {
+	chars := []rune(s)
+	for i, j := 0, len(chars)-1; i < j; i, j = i+1, j-1 {
+		chars[i], chars[j] = chars[j], chars[i]
+	}
+	return string(chars)
+}
+
+func interpolateFilterWildcards(value string) string {
+	re := regexp.MustCompile("[*]")
+	matches := len(re.FindAllStringIndex(value, -1))
+	if matches == 2 && strings.HasSuffix(value, "*") && strings.HasPrefix(value, "*") {
+		value = strings.Replace(value, "*", "", -1)
+		value = fmt.Sprintf(`has_substring("%s")`, value)
+	} else if matches == 1 && strings.HasPrefix(value, "*") {
+		value = strings.Replace(value, "*", "", 1)
+		value = fmt.Sprintf(`ends_with("%s")`, value)
+	} else if matches == 1 && strings.HasSuffix(value, "*") {
+		value = reverse(strings.Replace(reverse(value), "*", "", 1))
+		value = fmt.Sprintf(`starts_with("%s")`, value)
+	} else if matches != 0 {
+		re := regexp.MustCompile(`[-\/^$+?.()|[\]{}]`)
+		value = string(re.ReplaceAllFunc([]byte(value), func(in []byte) []byte {
+			return []byte(strings.Replace(string(in), string(in), `\\`+string(in), 1))
+		}))
+		value = strings.Replace(value, "*", ".*", -1)
+		value = strings.Replace(value, `"`, `\\"`, -1)
+		value = fmt.Sprintf(`monitoring.regex.full_match("^%s$")`, value)
+	}
+
+	return value
+}
+
 func buildFilterString(metricType string, filterParts []interface{}) string {
 	filterString := ""
 	for i, part := range filterParts {
@@ -166,7 +199,15 @@ func buildFilterString(metricType string, filterParts []interface{}) string {
 		if part == "AND" {
 			filterString += " "
 		} else if mod == 2 {
-			filterString += fmt.Sprintf(`"%s"`, part)
+			operator := filterParts[i-1]
+			if operator == "=~" || operator == "!=~" {
+				filterString = reverse(strings.Replace(reverse(filterString), "~", "", 1))
+				filterString += fmt.Sprintf(`monitoring.regex.full_match("%s")`, part)
+			} else if strings.Contains(part.(string), "*") {
+				filterString += interpolateFilterWildcards(part.(string))
+			} else {
+				filterString += fmt.Sprintf(`"%s"`, part)
+			}
 		} else {
 			filterString += part.(string)
 		}
@@ -300,29 +341,6 @@ func (e *StackdriverExecutor) parseResponse(queryRes *tsdb.QueryResult, data Sta
 	for _, series := range data.TimeSeries {
 		points := make([]tsdb.TimePoint, 0)
 
-		// reverse the order to be ascending
-		for i := len(series.Points) - 1; i >= 0; i-- {
-			point := series.Points[i]
-			value := point.Value.DoubleValue
-
-			if series.ValueType == "INT64" {
-				parsedValue, err := strconv.ParseFloat(point.Value.IntValue, 64)
-				if err == nil {
-					value = parsedValue
-				}
-			}
-
-			if series.ValueType == "BOOL" {
-				if point.Value.BoolValue {
-					value = 1
-				} else {
-					value = 0
-				}
-			}
-
-			points = append(points, tsdb.NewTimePoint(null.FloatFrom(value), float64((point.Interval.EndTime).Unix())*1000))
-		}
-
 		defaultMetricName := series.Metric.Type
 
 		for key, value := range series.Metric.Labels {
@@ -338,18 +356,87 @@ func (e *StackdriverExecutor) parseResponse(queryRes *tsdb.QueryResult, data Sta
 			if !containsLabel(resourceLabels[key], value) {
 				resourceLabels[key] = append(resourceLabels[key], value)
 			}
-
 			if containsLabel(query.GroupBys, "resource.label."+key) {
 				defaultMetricName += " " + value
 			}
 		}
 
-		metricName := formatLegendKeys(series.Metric.Type, defaultMetricName, series.Metric.Labels, series.Resource.Labels, query)
+		// reverse the order to be ascending
+		if series.ValueType != "DISTRIBUTION" {
+			for i := len(series.Points) - 1; i >= 0; i-- {
+				point := series.Points[i]
+				value := point.Value.DoubleValue
+
+				if series.ValueType == "INT64" {
+					parsedValue, err := strconv.ParseFloat(point.Value.IntValue, 64)
+					if err == nil {
+						value = parsedValue
+					}
+				}
 
-		queryRes.Series = append(queryRes.Series, &tsdb.TimeSeries{
-			Name:   metricName,
-			Points: points,
-		})
+				if series.ValueType == "BOOL" {
+					if point.Value.BoolValue {
+						value = 1
+					} else {
+						value = 0
+					}
+				}
+
+				points = append(points, tsdb.NewTimePoint(null.FloatFrom(value), float64((point.Interval.EndTime).Unix())*1000))
+			}
+
+			metricName := formatLegendKeys(series.Metric.Type, defaultMetricName, series.Metric.Labels, series.Resource.Labels, make(map[string]string), query)
+
+			queryRes.Series = append(queryRes.Series, &tsdb.TimeSeries{
+				Name:   metricName,
+				Points: points,
+			})
+		} else {
+			buckets := make(map[int]*tsdb.TimeSeries)
+
+			for i := len(series.Points) - 1; i >= 0; i-- {
+				point := series.Points[i]
+				if len(point.Value.DistributionValue.BucketCounts) == 0 {
+					continue
+				}
+				maxKey := 0
+				for i := 0; i < len(point.Value.DistributionValue.BucketCounts); i++ {
+					value, err := strconv.ParseFloat(point.Value.DistributionValue.BucketCounts[i], 64)
+					if err != nil {
+						continue
+					}
+					if _, ok := buckets[i]; !ok {
+						// set lower bounds
+						// https://cloud.google.com/monitoring/api/ref_v3/rest/v3/TimeSeries#Distribution
+						bucketBound := calcBucketBound(point.Value.DistributionValue.BucketOptions, i)
+						additionalLabels := map[string]string{"bucket": bucketBound}
+						buckets[i] = &tsdb.TimeSeries{
+							Name:   formatLegendKeys(series.Metric.Type, defaultMetricName, series.Metric.Labels, series.Resource.Labels, additionalLabels, query),
+							Points: make([]tsdb.TimePoint, 0),
+						}
+						if maxKey < i {
+							maxKey = i
+						}
+					}
+					buckets[i].Points = append(buckets[i].Points, tsdb.NewTimePoint(null.FloatFrom(value), float64((point.Interval.EndTime).Unix())*1000))
+				}
+
+				// fill empty bucket
+				for i := 0; i < maxKey; i++ {
+					if _, ok := buckets[i]; !ok {
+						bucketBound := calcBucketBound(point.Value.DistributionValue.BucketOptions, i)
+						additionalLabels := map[string]string{"bucket": bucketBound}
+						buckets[i] = &tsdb.TimeSeries{
+							Name:   formatLegendKeys(series.Metric.Type, defaultMetricName, series.Metric.Labels, series.Resource.Labels, additionalLabels, query),
+							Points: make([]tsdb.TimePoint, 0),
+						}
+					}
+				}
+			}
+			for i := 0; i < len(buckets); i++ {
+				queryRes.Series = append(queryRes.Series, buckets[i])
+			}
+		}
 	}
 
 	queryRes.Meta.Set("resourceLabels", resourceLabels)
@@ -368,7 +455,7 @@ func containsLabel(labels []string, newLabel string) bool {
 	return false
 }
 
-func formatLegendKeys(metricType string, defaultMetricName string, metricLabels map[string]string, resourceLabels map[string]string, query *StackdriverQuery) string {
+func formatLegendKeys(metricType string, defaultMetricName string, metricLabels map[string]string, resourceLabels map[string]string, additionalLabels map[string]string, query *StackdriverQuery) string {
 	if query.AliasBy == "" {
 		return defaultMetricName
 	}
@@ -400,6 +487,10 @@ func formatLegendKeys(metricType string, defaultMetricName string, metricLabels
 			return []byte(val)
 		}
 
+		if val, exists := additionalLabels[metaPartName]; exists {
+			return []byte(val)
+		}
+
 		return in
 	})
 
@@ -425,6 +516,22 @@ func replaceWithMetricPart(metaPartName string, metricType string) []byte {
 	return nil
 }
 
+func calcBucketBound(bucketOptions StackdriverBucketOptions, n int) string {
+	bucketBound := "0"
+	if n == 0 {
+		return bucketBound
+	}
+
+	if bucketOptions.LinearBuckets != nil {
+		bucketBound = strconv.FormatInt(bucketOptions.LinearBuckets.Offset+(bucketOptions.LinearBuckets.Width*int64(n-1)), 10)
+	} else if bucketOptions.ExponentialBuckets != nil {
+		bucketBound = strconv.FormatInt(int64(bucketOptions.ExponentialBuckets.Scale*math.Pow(bucketOptions.ExponentialBuckets.GrowthFactor, float64(n-1))), 10)
+	} else if bucketOptions.ExplicitBuckets != nil {
+		bucketBound = strconv.FormatInt(bucketOptions.ExplicitBuckets.Bounds[(n-1)], 10)
+	}
+	return bucketBound
+}
+
 func (e *StackdriverExecutor) createRequest(ctx context.Context, dsInfo *models.DataSource) (*http.Request, error) {
 	u, _ := url.Parse(dsInfo.Url)
 	u.Path = path.Join(u.Path, "render")

+ 133 - 0
pkg/tsdb/stackdriver/stackdriver_test.go

@@ -4,6 +4,8 @@ import (
 	"encoding/json"
 	"fmt"
 	"io/ioutil"
+	"math"
+	"strconv"
 	"testing"
 	"time"
 
@@ -341,6 +343,137 @@ func TestStackdriver(t *testing.T) {
 					})
 				})
 			})
+
+			Convey("when data from query is distribution", func() {
+				data, err := loadTestFile("./test-data/3-series-response-distribution.json")
+				So(err, ShouldBeNil)
+				So(len(data.TimeSeries), ShouldEqual, 1)
+
+				res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"}
+				query := &StackdriverQuery{AliasBy: "{{bucket}}"}
+				err = executor.parseResponse(res, data, query)
+				So(err, ShouldBeNil)
+
+				So(len(res.Series), ShouldEqual, 11)
+				for i := 0; i < 11; i++ {
+					if i == 0 {
+						So(res.Series[i].Name, ShouldEqual, "0")
+					} else {
+						So(res.Series[i].Name, ShouldEqual, strconv.FormatInt(int64(math.Pow(float64(2), float64(i-1))), 10))
+					}
+					So(len(res.Series[i].Points), ShouldEqual, 3)
+				}
+
+				Convey("timestamps should be in ascending order", func() {
+					So(res.Series[0].Points[0][1].Float64, ShouldEqual, 1536668940000)
+					So(res.Series[0].Points[1][1].Float64, ShouldEqual, 1536669000000)
+					So(res.Series[0].Points[2][1].Float64, ShouldEqual, 1536669060000)
+				})
+
+				Convey("value should be correct", func() {
+					So(res.Series[8].Points[0][0].Float64, ShouldEqual, 1)
+					So(res.Series[9].Points[0][0].Float64, ShouldEqual, 1)
+					So(res.Series[10].Points[0][0].Float64, ShouldEqual, 1)
+					So(res.Series[8].Points[1][0].Float64, ShouldEqual, 0)
+					So(res.Series[9].Points[1][0].Float64, ShouldEqual, 0)
+					So(res.Series[10].Points[1][0].Float64, ShouldEqual, 1)
+					So(res.Series[8].Points[2][0].Float64, ShouldEqual, 0)
+					So(res.Series[9].Points[2][0].Float64, ShouldEqual, 1)
+					So(res.Series[10].Points[2][0].Float64, ShouldEqual, 0)
+				})
+			})
+
+		})
+
+		Convey("when interpolating filter wildcards", func() {
+			Convey("and wildcard is used in the beginning and the end of the word", func() {
+				Convey("and theres no wildcard in the middle of the word", func() {
+					value := interpolateFilterWildcards("*-central1*")
+					So(value, ShouldEqual, `has_substring("-central1")`)
+				})
+				Convey("and there is a wildcard in the middle of the word", func() {
+					value := interpolateFilterWildcards("*-cent*ral1*")
+					So(value, ShouldNotStartWith, `has_substring`)
+				})
+			})
+
+			Convey("and wildcard is used in the beginning of the word", func() {
+				Convey("and there is not a wildcard elsewhere in the word", func() {
+					value := interpolateFilterWildcards("*-central1")
+					So(value, ShouldEqual, `ends_with("-central1")`)
+				})
+				Convey("and there is a wildcard elsewhere in the word", func() {
+					value := interpolateFilterWildcards("*-cent*al1")
+					So(value, ShouldNotStartWith, `ends_with`)
+				})
+			})
+
+			Convey("and wildcard is used at the end of the word", func() {
+				Convey("and there is not a wildcard elsewhere in the word", func() {
+					value := interpolateFilterWildcards("us-central*")
+					So(value, ShouldEqual, `starts_with("us-central")`)
+				})
+				Convey("and there is a wildcard elsewhere in the word", func() {
+					value := interpolateFilterWildcards("*us-central*")
+					So(value, ShouldNotStartWith, `starts_with`)
+				})
+			})
+
+			Convey("and wildcard is used in the middle of the word", func() {
+				Convey("and there is only one wildcard", func() {
+					value := interpolateFilterWildcards("us-ce*tral1-b")
+					So(value, ShouldEqual, `monitoring.regex.full_match("^us\\-ce.*tral1\\-b$")`)
+				})
+
+				Convey("and there is more than one wildcard", func() {
+					value := interpolateFilterWildcards("us-ce*tra*1-b")
+					So(value, ShouldEqual, `monitoring.regex.full_match("^us\\-ce.*tra.*1\\-b$")`)
+				})
+			})
+
+			Convey("and wildcard is used in the middle of the word and in the beginning of the word", func() {
+				value := interpolateFilterWildcards("*s-ce*tral1-b")
+				So(value, ShouldEqual, `monitoring.regex.full_match("^.*s\\-ce.*tral1\\-b$")`)
+			})
+
+			Convey("and wildcard is used in the middle of the word and in the ending of the word", func() {
+				value := interpolateFilterWildcards("us-ce*tral1-*")
+				So(value, ShouldEqual, `monitoring.regex.full_match("^us\\-ce.*tral1\\-.*$")`)
+			})
+
+			Convey("and no wildcard is used", func() {
+				value := interpolateFilterWildcards("us-central1-a}")
+				So(value, ShouldEqual, `us-central1-a}`)
+			})
+		})
+
+		Convey("when building filter string", func() {
+			Convey("and theres no regex operator", func() {
+				Convey("and there are wildcards in a filter value", func() {
+					filterParts := []interface{}{"zone", "=", "*-central1*"}
+					value := buildFilterString("somemetrictype", filterParts)
+					So(value, ShouldEqual, `metric.type="somemetrictype" zone=has_substring("-central1")`)
+				})
+
+				Convey("and there are no wildcards in any filter value", func() {
+					filterParts := []interface{}{"zone", "!=", "us-central1-a"}
+					value := buildFilterString("somemetrictype", filterParts)
+					So(value, ShouldEqual, `metric.type="somemetrictype" zone!="us-central1-a"`)
+				})
+			})
+
+			Convey("and there is a regex operator", func() {
+				filterParts := []interface{}{"zone", "=~", "us-central1-a~"}
+				value := buildFilterString("somemetrictype", filterParts)
+				Convey("it should remove the ~ character from the operator that belongs to the value", func() {
+					So(value, ShouldNotContainSubstring, `=~`)
+					So(value, ShouldContainSubstring, `zone=`)
+				})
+
+				Convey("it should insert monitoring.regex.full_match before filter value", func() {
+					So(value, ShouldContainSubstring, `zone=monitoring.regex.full_match("us-central1-a~")`)
+				})
+			})
 		})
 	})
 }

+ 112 - 0
pkg/tsdb/stackdriver/test-data/3-series-response-distribution.json

@@ -0,0 +1,112 @@
+{
+  "timeSeries": [
+    {
+      "metric": {
+        "type": "loadbalancing.googleapis.com\/https\/backend_latencies"
+      },
+      "resource": {
+        "type": "https_lb_rule",
+        "labels": {
+          "project_id": "grafana-prod"
+        }
+      },
+      "metricKind": "DELTA",
+      "valueType": "DISTRIBUTION",
+      "points": [
+        {
+          "interval": {
+            "startTime": "2018-09-11T12:30:00Z",
+            "endTime": "2018-09-11T12:31:00Z"
+          },
+          "value": {
+            "distributionValue": {
+              "count": "1",
+              "bucketOptions": {
+                "exponentialBuckets": {
+                  "numFiniteBuckets": 10,
+                  "growthFactor": 2,
+                  "scale": 1
+                }
+              },
+              "bucketCounts": [
+                "0",
+                "0",
+                "0",
+                "0",
+                "0",
+                "0",
+                "0",
+                "0",
+                "0",
+                "1",
+                "0"
+              ]
+            }
+          }
+        },
+        {
+          "interval": {
+            "startTime": "2018-09-11T12:29:00Z",
+            "endTime": "2018-09-11T12:30:00Z"
+          },
+          "value": {
+            "distributionValue": {
+              "count": "1",
+              "bucketOptions": {
+                "exponentialBuckets": {
+                  "numFiniteBuckets": 10,
+                  "growthFactor": 2,
+                  "scale": 1
+                }
+              },
+              "bucketCounts": [
+                "0",
+                "0",
+                "0",
+                "0",
+                "0",
+                "0",
+                "0",
+                "0",
+                "0",
+                "0",
+                "1"
+              ]
+            }
+          }
+        },
+        {
+          "interval": {
+            "startTime": "2018-09-11T12:28:00Z",
+            "endTime": "2018-09-11T12:29:00Z"
+          },
+          "value": {
+            "distributionValue": {
+              "count": "3",
+              "bucketOptions": {
+                "exponentialBuckets": {
+                  "numFiniteBuckets": 10,
+                  "growthFactor": 2,
+                  "scale": 1
+                }
+              },
+              "bucketCounts": [
+                "0",
+                "0",
+                "0",
+                "0",
+                "0",
+                "0",
+                "0",
+                "0",
+                "1",
+                "1",
+                "1"
+              ]
+            }
+          }
+        }
+      ]
+    }
+  ]
+}

+ 36 - 4
pkg/tsdb/stackdriver/types.go

@@ -14,6 +14,22 @@ type StackdriverQuery struct {
 	AliasBy  string
 }
 
+type StackdriverBucketOptions struct {
+	LinearBuckets *struct {
+		NumFiniteBuckets int64 `json:"numFiniteBuckets"`
+		Width            int64 `json:"width"`
+		Offset           int64 `json:"offset"`
+	} `json:"linearBuckets"`
+	ExponentialBuckets *struct {
+		NumFiniteBuckets int64   `json:"numFiniteBuckets"`
+		GrowthFactor     float64 `json:"growthFactor"`
+		Scale            float64 `json:"scale"`
+	} `json:"exponentialBuckets"`
+	ExplicitBuckets *struct {
+		Bounds []int64 `json:"bounds"`
+	} `json:"explicitBuckets"`
+}
+
 // StackdriverResponse is the data returned from the external Google Stackdriver API
 type StackdriverResponse struct {
 	TimeSeries []struct {
@@ -33,10 +49,26 @@ type StackdriverResponse struct {
 				EndTime   time.Time `json:"endTime"`
 			} `json:"interval"`
 			Value struct {
-				DoubleValue float64 `json:"doubleValue"`
-				StringValue string  `json:"stringValue"`
-				BoolValue   bool    `json:"boolValue"`
-				IntValue    string  `json:"int64Value"`
+				DoubleValue       float64 `json:"doubleValue"`
+				StringValue       string  `json:"stringValue"`
+				BoolValue         bool    `json:"boolValue"`
+				IntValue          string  `json:"int64Value"`
+				DistributionValue struct {
+					Count                 string  `json:"count"`
+					Mean                  float64 `json:"mean"`
+					SumOfSquaredDeviation float64 `json:"sumOfSquaredDeviation"`
+					Range                 struct {
+						Min int `json:"min"`
+						Max int `json:"max"`
+					} `json:"range"`
+					BucketOptions StackdriverBucketOptions `json:"bucketOptions"`
+					BucketCounts  []string                 `json:"bucketCounts"`
+					Examplars     []struct {
+						Value     float64 `json:"value"`
+						Timestamp string  `json:"timestamp"`
+						// attachments
+					} `json:"examplars"`
+				} `json:"distributionValue"`
 			} `json:"value"`
 		} `json:"points"`
 	} `json:"timeSeries"`

+ 6 - 2
public/app/app.ts

@@ -29,7 +29,11 @@ _.move = (array, fromIndex, toIndex) => {
 import { coreModule, registerAngularDirectives } from './core/core';
 import { setupAngularRoutes } from './routes/routes';
 
-declare var System: any;
+// import symlinked extensions
+const extensionsIndex = (require as any).context('.', true, /extensions\/index.ts/);
+extensionsIndex.keys().forEach(key => {
+  extensionsIndex(key);
+});
 
 export class GrafanaApp {
   registerFunctions: any;
@@ -119,7 +123,7 @@ export class GrafanaApp {
     coreModule.config(setupAngularRoutes);
     registerAngularDirectives();
 
-    const preBootRequires = [System.import('app/features/all')];
+    const preBootRequires = [import('app/features/all')];
 
     Promise.all(preBootRequires)
       .then(() => {

+ 1 - 0
public/app/core/components/OrgActionBar/OrgActionBar.test.tsx

@@ -6,6 +6,7 @@ const setup = (propOverrides?: object) => {
   const props: Props = {
     searchQuery: '',
     setSearchQuery: jest.fn(),
+    target: '_blank',
     linkButton: { href: 'some/url', title: 'test' },
   };
 

+ 8 - 2
public/app/core/components/OrgActionBar/OrgActionBar.tsx

@@ -7,11 +7,17 @@ export interface Props {
   onSetLayoutMode?: (mode: LayoutMode) => {};
   setSearchQuery: (value: string) => {};
   linkButton: { href: string; title: string };
+  target?: string;
 }
 
 export default class OrgActionBar extends PureComponent<Props> {
   render() {
-    const { searchQuery, layoutMode, onSetLayoutMode, linkButton, setSearchQuery } = this.props;
+    const { searchQuery, layoutMode, onSetLayoutMode, linkButton, setSearchQuery, target } = this.props;
+    const linkProps = { href: linkButton.href, target: undefined };
+
+    if (target) {
+      linkProps.target = target;
+    }
 
     return (
       <div className="page-action-bar">
@@ -29,7 +35,7 @@ export default class OrgActionBar extends PureComponent<Props> {
           <LayoutSelector mode={layoutMode} onLayoutModeChanged={(mode: LayoutMode) => onSetLayoutMode(mode)} />
         </div>
         <div className="page-action-bar__spacer" />
-        <a className="btn btn-success" href={linkButton.href} target="_blank">
+        <a className="btn btn-success" {...linkProps}>
           {linkButton.title}
         </a>
       </div>

+ 17 - 0
public/app/core/components/PageLoader/PageLoader.tsx

@@ -0,0 +1,17 @@
+import React, { SFC } from 'react';
+
+interface Props {
+  pageName: string;
+}
+
+const PageLoader: SFC<Props> = ({ pageName }) => {
+  const loadingText = `Loading ${pageName}...`;
+  return (
+    <div className="page-loader-wrapper">
+      <i className="page-loader-wrapper__spinner fa fa-spinner fa-spin" />
+      <div className="page-loader-wrapper__text">{loadingText}</div>
+    </div>
+  );
+};
+
+export default PageLoader;

+ 5 - 15
public/app/core/components/PermissionList/AddPermission.tsx

@@ -50,11 +50,11 @@ class AddPermissions extends Component<Props, NewDashboardAclItem> {
   };
 
   onUserSelected = (user: User) => {
-    this.setState({ userId: user ? user.id : 0 });
+    this.setState({ userId: user && !Array.isArray(user) ? user.id : 0 });
   };
 
   onTeamSelected = (team: Team) => {
-    this.setState({ teamId: team ? team.id : 0 });
+    this.setState({ teamId: team && !Array.isArray(team) ? team.id : 0 });
   };
 
   onPermissionChanged = (permission: OptionWithDescription) => {
@@ -82,7 +82,6 @@ class AddPermissions extends Component<Props, NewDashboardAclItem> {
     const newItem = this.state;
     const pickerClassName = 'width-20';
     const isValid = this.isValid();
-
     return (
       <div className="gf-form-inline cta-form">
         <button className="cta-form__close btn btn-transparent" onClick={onCancel}>
@@ -107,21 +106,13 @@ class AddPermissions extends Component<Props, NewDashboardAclItem> {
 
             {newItem.type === AclTarget.User ? (
               <div className="gf-form">
-                <UserPicker
-                  onSelected={this.onUserSelected}
-                  value={newItem.userId.toString()}
-                  className={pickerClassName}
-                />
+                <UserPicker onSelected={this.onUserSelected} className={pickerClassName} />
               </div>
             ) : null}
 
             {newItem.type === AclTarget.Team ? (
               <div className="gf-form">
-                <TeamPicker
-                  onSelected={this.onTeamSelected}
-                  value={newItem.teamId.toString()}
-                  className={pickerClassName}
-                />
+                <TeamPicker onSelected={this.onTeamSelected} className={pickerClassName} />
               </div>
             ) : null}
 
@@ -129,9 +120,8 @@ class AddPermissions extends Component<Props, NewDashboardAclItem> {
               <DescriptionPicker
                 optionsWithDesc={dashboardPermissionLevels}
                 onSelected={this.onPermissionChanged}
-                value={newItem.permission}
                 disabled={false}
-                className={'gf-form-input--form-dropdown-right'}
+                className={'gf-form-select-box__control--menu-right'}
               />
             </div>
 

+ 2 - 2
public/app/core/components/PermissionList/DisabledPermissionListItem.tsx

@@ -26,9 +26,9 @@ export default class DisabledPermissionListItem extends Component<Props, any> {
             <DescriptionPicker
               optionsWithDesc={dashboardPermissionLevels}
               onSelected={() => {}}
-              value={item.permission}
               disabled={true}
-              className={'gf-form-input--form-dropdown-right'}
+              className={'gf-form-select-box__control--menu-right'}
+              value={item.permission}
             />
           </div>
         </td>

+ 2 - 2
public/app/core/components/PermissionList/PermissionListItem.tsx

@@ -77,9 +77,9 @@ export default class PermissionsListItem extends PureComponent<Props> {
             <DescriptionPicker
               optionsWithDesc={dashboardPermissionLevels}
               onSelected={this.onPermissionChanged}
-              value={item.permission}
               disabled={item.inherited}
-              className={'gf-form-input--form-dropdown-right'}
+              className={'gf-form-select-box__control--menu-right'}
+              value={item.permission}
             />
           </div>
         </td>

+ 18 - 49
public/app/core/components/Picker/DescriptionOption.tsx

@@ -1,56 +1,25 @@
-import React, { Component } from 'react';
+import React from 'react';
+import { components } from 'react-select';
+import { OptionProps } from 'react-select/lib/components/Option';
 
-export interface Props {
-  onSelect: any;
-  onFocus: any;
-  option: any;
-  isFocused: any;
-  className: any;
+// https://github.com/JedWatson/react-select/issues/3038
+interface ExtendedOptionProps extends OptionProps<any> {
+  data: any;
 }
 
-class DescriptionOption extends Component<Props, any> {
-  constructor(props) {
-    super(props);
-    this.handleMouseDown = this.handleMouseDown.bind(this);
-    this.handleMouseEnter = this.handleMouseEnter.bind(this);
-    this.handleMouseMove = this.handleMouseMove.bind(this);
-  }
-
-  handleMouseDown(event) {
-    event.preventDefault();
-    event.stopPropagation();
-    this.props.onSelect(this.props.option, event);
-  }
-
-  handleMouseEnter(event) {
-    this.props.onFocus(this.props.option, event);
-  }
-
-  handleMouseMove(event) {
-    if (this.props.isFocused) {
-      return;
-    }
-    this.props.onFocus(this.props.option, event);
-  }
-
-  render() {
-    const { option, children, className } = this.props;
-    return (
-      <button
-        onMouseDown={this.handleMouseDown}
-        onMouseEnter={this.handleMouseEnter}
-        onMouseMove={this.handleMouseMove}
-        title={option.title}
-        className={`description-picker-option__button btn btn-link ${className} width-19`}
-      >
+export const Option = (props: ExtendedOptionProps) => {
+  const { children, isSelected, data, className } = props;
+  return (
+    <components.Option {...props}>
+      <div className={`description-picker-option__button btn btn-link ${className}`}>
+        {isSelected && <i className="fa fa-check pull-right" aria-hidden="true" />}
         <div className="gf-form">{children}</div>
         <div className="gf-form">
-          <div className="muted width-17">{option.description}</div>
-          {className.indexOf('is-selected') > -1 && <i className="fa fa-check" aria-hidden="true" />}
+          <div className="muted width-17">{data.description}</div>
         </div>
-      </button>
-    );
-  }
-}
+      </div>
+    </components.Option>
+  );
+};
 
-export default DescriptionOption;
+export default Option;

+ 26 - 18
public/app/core/components/Picker/DescriptionPicker.tsx

@@ -1,44 +1,52 @@
 import React, { Component } from 'react';
 import Select from 'react-select';
 import DescriptionOption from './DescriptionOption';
+import IndicatorsContainer from './IndicatorsContainer';
+import ResetStyles from './ResetStyles';
+import NoOptionsMessage from './NoOptionsMessage';
+
+export interface OptionWithDescription {
+  value: any;
+  label: string;
+  description: string;
+}
 
 export interface Props {
   optionsWithDesc: OptionWithDescription[];
   onSelected: (permission) => void;
-  value: number;
   disabled: boolean;
   className?: string;
+  value?: any;
 }
 
-export interface OptionWithDescription {
-  value: any;
-  label: string;
-  description: string;
-}
+const getSelectedOption = (optionsWithDesc, value) => optionsWithDesc.find(option => option.value === value);
 
 class DescriptionPicker extends Component<Props, any> {
   constructor(props) {
     super(props);
-    this.state = {};
   }
 
   render() {
-    const { optionsWithDesc, onSelected, value, disabled, className } = this.props;
-
+    const { optionsWithDesc, onSelected, disabled, className, value } = this.props;
+    const selectedOption = getSelectedOption(optionsWithDesc, value);
     return (
       <div className="permissions-picker">
         <Select
-          value={value}
-          valueKey="value"
-          multi={false}
-          clearable={false}
-          labelKey="label"
+          placeholder="Choose"
+          classNamePrefix={`gf-form-select-box`}
+          className={`width-7 gf-form-input gf-form-input--form-dropdown ${className || ''}`}
           options={optionsWithDesc}
+          components={{
+            Option: DescriptionOption,
+            IndicatorsContainer,
+            NoOptionsMessage,
+          }}
+          styles={ResetStyles}
+          isDisabled={disabled}
           onChange={onSelected}
-          className={`width-7 gf-form-input gf-form-input--form-dropdown ${className || ''}`}
-          optionComponent={DescriptionOption}
-          placeholder="Choose"
-          disabled={disabled}
+          getOptionValue={i => i.value}
+          getOptionLabel={i => i.label}
+          value={selectedOption}
         />
       </div>
     );

+ 15 - 0
public/app/core/components/Picker/IndicatorsContainer.tsx

@@ -0,0 +1,15 @@
+import React from 'react';
+import { components } from 'react-select';
+
+export const IndicatorsContainer = props => {
+  const isOpen = props.selectProps.menuIsOpen;
+  return (
+    <components.IndicatorsContainer {...props}>
+      <span
+        className={`gf-form-select-box__select-arrow ${isOpen ? `gf-form-select-box__select-arrow--reversed` : ''}`}
+      />
+    </components.IndicatorsContainer>
+  );
+};
+
+export default IndicatorsContainer;

+ 18 - 0
public/app/core/components/Picker/NoOptionsMessage.tsx

@@ -0,0 +1,18 @@
+import React from 'react';
+import { components } from 'react-select';
+import { OptionProps } from 'react-select/lib/components/Option';
+
+export interface Props {
+  children: Element;
+}
+
+export const PickerOption = (props: OptionProps<any>) => {
+  const { children, className } = props;
+  return (
+    <components.Option {...props}>
+      <div className={`description-picker-option__button btn btn-link ${className}`}>{children}</div>
+    </components.Option>
+  );
+};
+
+export default PickerOption;

+ 20 - 4
public/app/core/components/Picker/PickerOption.test.tsx

@@ -3,10 +3,26 @@ import renderer from 'react-test-renderer';
 import PickerOption from './PickerOption';
 
 const model = {
-  onSelect: () => {},
-  onFocus: () => {},
-  isFocused: () => {},
-  option: {
+  cx: jest.fn(),
+  clearValue: jest.fn(),
+  onSelect: jest.fn(),
+  getStyles: jest.fn(),
+  getValue: jest.fn(),
+  hasValue: true,
+  isMulti: false,
+  options: [],
+  selectOption: jest.fn(),
+  selectProps: {},
+  setValue: jest.fn(),
+  isDisabled: false,
+  isFocused: false,
+  isSelected: false,
+  innerRef: null,
+  innerProps: null,
+  label: 'Option label',
+  type: null,
+  children: 'Model title',
+  data: {
     title: 'Model title',
     avatarUrl: 'url/to/avatar',
     label: 'User picker label',

+ 17 - 49
public/app/core/components/Picker/PickerOption.tsx

@@ -1,54 +1,22 @@
-import React, { Component } from 'react';
+import React from 'react';
+import { components } from 'react-select';
+import { OptionProps } from 'react-select/lib/components/Option';
 
-export interface Props {
-  onSelect: any;
-  onFocus: any;
-  option: any;
-  isFocused: any;
-  className: any;
+// https://github.com/JedWatson/react-select/issues/3038
+interface ExtendedOptionProps extends OptionProps<any> {
+  data: any;
 }
 
-class UserPickerOption extends Component<Props, any> {
-  constructor(props) {
-    super(props);
-    this.handleMouseDown = this.handleMouseDown.bind(this);
-    this.handleMouseEnter = this.handleMouseEnter.bind(this);
-    this.handleMouseMove = this.handleMouseMove.bind(this);
-  }
-
-  handleMouseDown(event) {
-    event.preventDefault();
-    event.stopPropagation();
-    this.props.onSelect(this.props.option, event);
-  }
-
-  handleMouseEnter(event) {
-    this.props.onFocus(this.props.option, event);
-  }
-
-  handleMouseMove(event) {
-    if (this.props.isFocused) {
-      return;
-    }
-    this.props.onFocus(this.props.option, event);
-  }
-
-  render() {
-    const { option, children, className } = this.props;
-
-    return (
-      <button
-        onMouseDown={this.handleMouseDown}
-        onMouseEnter={this.handleMouseEnter}
-        onMouseMove={this.handleMouseMove}
-        title={option.title}
-        className={`user-picker-option__button btn btn-link ${className}`}
-      >
-        <img src={option.avatarUrl} alt={option.label} className="user-picker-option__avatar" />
+export const PickerOption = (props: ExtendedOptionProps) => {
+  const { children, data, className } = props;
+  return (
+    <components.Option {...props}>
+      <div className={`description-picker-option__button btn btn-link ${className}`}>
+        {data.avatarUrl && <img src={data.avatarUrl} alt={data.label} className="user-picker-option__avatar" />}
         {children}
-      </button>
-    );
-  }
-}
+      </div>
+    </components.Option>
+  );
+};
 
-export default UserPickerOption;
+export default PickerOption;

+ 23 - 0
public/app/core/components/Picker/ResetStyles.tsx

@@ -0,0 +1,23 @@
+export default {
+  clearIndicator: () => ({}),
+  container: () => ({}),
+  control: () => ({}),
+  dropdownIndicator: () => ({}),
+  group: () => ({}),
+  groupHeading: () => ({}),
+  indicatorsContainer: () => ({}),
+  indicatorSeparator: () => ({}),
+  input: () => ({}),
+  loadingIndicator: () => ({}),
+  loadingMessage: () => ({}),
+  menu: () => ({}),
+  menuList: () => ({}),
+  multiValue: () => ({}),
+  multiValueLabel: () => ({}),
+  multiValueRemove: () => ({}),
+  noOptionsMessage: () => ({}),
+  option: () => ({}),
+  placeholder: () => ({}),
+  singleValue: () => ({}),
+  valueContainer: () => ({}),
+};

+ 30 - 25
public/app/core/components/Picker/TeamPicker.tsx

@@ -1,24 +1,26 @@
 import React, { Component } from 'react';
-import Select from 'react-select';
+import AsyncSelect from 'react-select/lib/Async';
 import PickerOption from './PickerOption';
 import { debounce } from 'lodash';
 import { getBackendSrv } from 'app/core/services/backend_srv';
+import ResetStyles from './ResetStyles';
+import IndicatorsContainer from './IndicatorsContainer';
+import NoOptionsMessage from './NoOptionsMessage';
+
+export interface Team {
+  id: number;
+  label: string;
+  name: string;
+  avatarUrl: string;
+}
 
 export interface Props {
   onSelected: (team: Team) => void;
-  value?: string;
   className?: string;
 }
 
 export interface State {
-  isLoading;
-}
-
-export interface Team {
-  id: number;
-  label: string;
-  name: string;
-  avatarUrl: string;
+  isLoading: boolean;
 }
 
 export class TeamPicker extends Component<Props, State> {
@@ -31,7 +33,7 @@ export class TeamPicker extends Component<Props, State> {
 
     this.debouncedSearch = debounce(this.search, 300, {
       leading: true,
-      trailing: false,
+      trailing: true,
     });
   }
 
@@ -39,7 +41,7 @@ export class TeamPicker extends Component<Props, State> {
     const backendSrv = getBackendSrv();
     this.setState({ isLoading: true });
 
-    return backendSrv.get(`/api/teams/search?perpage=50&page=1&query=${query}`).then(result => {
+    return backendSrv.get(`/api/teams/search?perpage=10&page=1&query=${query}`).then(result => {
       const teams = result.teams.map(team => {
         return {
           id: team.id,
@@ -50,31 +52,34 @@ export class TeamPicker extends Component<Props, State> {
       });
 
       this.setState({ isLoading: false });
-      return { options: teams };
+      return teams;
     });
   }
 
   render() {
-    const { onSelected, value, className } = this.props;
+    const { onSelected, className } = this.props;
     const { isLoading } = this.state;
-
     return (
       <div className="user-picker">
-        <Select.Async
-          valueKey="id"
-          multi={false}
-          labelKey="label"
-          cache={false}
+        <AsyncSelect
+          classNamePrefix={`gf-form-select-box`}
+          isMulti={false}
           isLoading={isLoading}
+          defaultOptions={true}
           loadOptions={this.debouncedSearch}
-          loadingPlaceholder="Loading..."
-          noResultsText="No teams found"
           onChange={onSelected}
           className={`gf-form-input gf-form-input--form-dropdown ${className || ''}`}
-          optionComponent={PickerOption}
+          styles={ResetStyles}
+          components={{
+            Option: PickerOption,
+            IndicatorsContainer,
+            NoOptionsMessage,
+          }}
           placeholder="Select a team"
-          value={value}
-          autosize={true}
+          loadingMessage={() => 'Loading...'}
+          noOptionsMessage={() => 'No teams found'}
+          getOptionValue={i => i.id}
+          getOptionLabel={i => i.label}
         />
       </div>
     );

+ 26 - 23
public/app/core/components/Picker/UserPicker.tsx

@@ -1,13 +1,15 @@
 import React, { Component } from 'react';
-import Select from 'react-select';
+import AsyncSelect from 'react-select/lib/Async';
 import PickerOption from './PickerOption';
 import { debounce } from 'lodash';
 import { getBackendSrv } from 'app/core/services/backend_srv';
 import { User } from 'app/types';
+import ResetStyles from './ResetStyles';
+import IndicatorsContainer from './IndicatorsContainer';
+import NoOptionsMessage from './NoOptionsMessage';
 
 export interface Props {
   onSelected: (user: User) => void;
-  value?: string;
   className?: string;
 }
 
@@ -31,20 +33,17 @@ export class UserPicker extends Component<Props, State> {
 
   search(query?: string) {
     const backendSrv = getBackendSrv();
-
     this.setState({ isLoading: true });
 
     return backendSrv
       .get(`/api/org/users?query=${query}&limit=10`)
       .then(result => {
-        return {
-          options: result.map(user => ({
-            id: user.userId,
-            label: `${user.login} - ${user.email}`,
-            avatarUrl: user.avatarUrl,
-            login: user.login,
-          })),
-        };
+        return result.map(user => ({
+          id: user.userId,
+          label: `${user.login} - ${user.email}`,
+          avatarUrl: user.avatarUrl,
+          login: user.login,
+        }));
       })
       .finally(() => {
         this.setState({ isLoading: false });
@@ -52,26 +51,30 @@ export class UserPicker extends Component<Props, State> {
   }
 
   render() {
-    const { value, className } = this.props;
+    const { className, onSelected } = this.props;
     const { isLoading } = this.state;
 
     return (
       <div className="user-picker">
-        <Select.Async
-          valueKey="id"
-          multi={false}
-          labelKey="label"
-          cache={false}
+        <AsyncSelect
+          classNamePrefix={`gf-form-select-box`}
+          isMulti={false}
           isLoading={isLoading}
+          defaultOptions={true}
           loadOptions={this.debouncedSearch}
-          loadingPlaceholder="Loading..."
-          noResultsText="No users found"
-          onChange={this.props.onSelected}
+          onChange={onSelected}
           className={`gf-form-input gf-form-input--form-dropdown ${className || ''}`}
-          optionComponent={PickerOption}
+          styles={ResetStyles}
+          components={{
+            Option: PickerOption,
+            IndicatorsContainer,
+            NoOptionsMessage,
+          }}
           placeholder="Select user"
-          value={value}
-          autosize={true}
+          loadingMessage={() => 'Loading...'}
+          noOptionsMessage={() => 'No users found'}
+          getOptionValue={i => i.id}
+          getOptionLabel={i => i.label}
         />
       </div>
     );

+ 12 - 13
public/app/core/components/Picker/__snapshots__/PickerOption.test.tsx.snap

@@ -1,17 +1,16 @@
 // Jest Snapshot v1, https://goo.gl/fbAQLP
 
 exports[`PickerOption renders correctly 1`] = `
-<button
-  className="user-picker-option__button btn btn-link class-for-user-picker"
-  onMouseDown={[Function]}
-  onMouseEnter={[Function]}
-  onMouseMove={[Function]}
-  title="Model title"
->
-  <img
-    alt="User picker label"
-    className="user-picker-option__avatar"
-    src="url/to/avatar"
-  />
-</button>
+<div>
+  <div
+    className="description-picker-option__button btn btn-link class-for-user-picker"
+  >
+    <img
+      alt="User picker label"
+      className="user-picker-option__avatar"
+      src="url/to/avatar"
+    />
+    Model title
+  </div>
+</div>
 `;

+ 85 - 55
public/app/core/components/Picker/__snapshots__/TeamPicker.test.tsx.snap

@@ -5,85 +5,115 @@ exports[`TeamPicker renders correctly 1`] = `
   className="user-picker"
 >
   <div
-    className="Select gf-form-input gf-form-input--form-dropdown  is-clearable is-loading is-searchable Select--single"
+    className="css-0 gf-form-input gf-form-input--form-dropdown"
+    onKeyDown={[Function]}
   >
     <div
-      className="Select-control"
-      onKeyDown={[Function]}
+      className="css-0 gf-form-select-box__control"
       onMouseDown={[Function]}
       onTouchEnd={[Function]}
-      onTouchMove={[Function]}
-      onTouchStart={[Function]}
     >
       <div
-        className="Select-multi-value-wrapper"
-        id="react-select-2--value"
+        className="css-0 gf-form-select-box__value-container"
       >
         <div
-          className="Select-placeholder"
+          className="css-0 gf-form-select-box__placeholder"
         >
-          Loading...
+          Select a team
         </div>
         <div
-          className="Select-input"
-          style={
-            Object {
-              "display": "inline-block",
-            }
-          }
+          className="css-0"
         >
-          <input
-            aria-activedescendant="react-select-2--value"
-            aria-expanded="false"
-            aria-haspopup="false"
-            aria-owns=""
-            onBlur={[Function]}
-            onChange={[Function]}
-            onFocus={[Function]}
-            required={false}
-            role="combobox"
-            style={
-              Object {
-                "boxSizing": "content-box",
-                "width": "5px",
-              }
-            }
-            value=""
-          />
           <div
+            className="gf-form-select-box__input"
             style={
               Object {
-                "height": 0,
-                "left": 0,
-                "overflow": "scroll",
-                "position": "absolute",
-                "top": 0,
-                "visibility": "hidden",
-                "whiteSpace": "pre",
+                "display": "inline-block",
               }
             }
           >
-            
+            <input
+              aria-autocomplete="list"
+              autoCapitalize="none"
+              autoComplete="off"
+              autoCorrect="off"
+              disabled={false}
+              id="react-select-2-input"
+              onBlur={[Function]}
+              onChange={[Function]}
+              onFocus={[Function]}
+              spellCheck="false"
+              style={
+                Object {
+                  "background": 0,
+                  "border": 0,
+                  "boxSizing": "content-box",
+                  "color": "inherit",
+                  "fontSize": "inherit",
+                  "opacity": 1,
+                  "outline": 0,
+                  "padding": 0,
+                  "width": "1px",
+                }
+              }
+              tabIndex="0"
+              theme={
+                Object {
+                  "borderRadius": 4,
+                  "colors": Object {
+                    "danger": "#DE350B",
+                    "dangerLight": "#FFBDAD",
+                    "neutral0": "hsl(0, 0%, 100%)",
+                    "neutral10": "hsl(0, 0%, 90%)",
+                    "neutral20": "hsl(0, 0%, 80%)",
+                    "neutral30": "hsl(0, 0%, 70%)",
+                    "neutral40": "hsl(0, 0%, 60%)",
+                    "neutral5": "hsl(0, 0%, 95%)",
+                    "neutral50": "hsl(0, 0%, 50%)",
+                    "neutral60": "hsl(0, 0%, 40%)",
+                    "neutral70": "hsl(0, 0%, 30%)",
+                    "neutral80": "hsl(0, 0%, 20%)",
+                    "neutral90": "hsl(0, 0%, 10%)",
+                    "primary": "#2684FF",
+                    "primary25": "#DEEBFF",
+                    "primary50": "#B2D4FF",
+                    "primary75": "#4C9AFF",
+                  },
+                  "spacing": Object {
+                    "baseUnit": 4,
+                    "controlHeight": 38,
+                    "menuGutter": 8,
+                  },
+                }
+              }
+              type="text"
+              value=""
+            />
+            <div
+              style={
+                Object {
+                  "height": 0,
+                  "left": 0,
+                  "overflow": "scroll",
+                  "position": "absolute",
+                  "top": 0,
+                  "visibility": "hidden",
+                  "whiteSpace": "pre",
+                }
+              }
+            >
+              
+            </div>
           </div>
         </div>
       </div>
-      <span
-        aria-hidden="true"
-        className="Select-loading-zone"
-      >
-        <span
-          className="Select-loading"
-        />
-      </span>
-      <span
-        className="Select-arrow-zone"
-        onMouseDown={[Function]}
+      <div
+        className="css-0 gf-form-select-box__indicators"
       >
         <span
-          className="Select-arrow"
-          onMouseDown={[Function]}
+          className="gf-form-select-box__select-arrow "
         />
-      </span>
+      </div>
     </div>
   </div>
 </div>

+ 85 - 55
public/app/core/components/Picker/__snapshots__/UserPicker.test.tsx.snap

@@ -5,85 +5,115 @@ exports[`UserPicker renders correctly 1`] = `
   className="user-picker"
 >
   <div
-    className="Select gf-form-input gf-form-input--form-dropdown  is-clearable is-loading is-searchable Select--single"
+    className="css-0 gf-form-input gf-form-input--form-dropdown"
+    onKeyDown={[Function]}
   >
     <div
-      className="Select-control"
-      onKeyDown={[Function]}
+      className="css-0 gf-form-select-box__control"
       onMouseDown={[Function]}
       onTouchEnd={[Function]}
-      onTouchMove={[Function]}
-      onTouchStart={[Function]}
     >
       <div
-        className="Select-multi-value-wrapper"
-        id="react-select-2--value"
+        className="css-0 gf-form-select-box__value-container"
       >
         <div
-          className="Select-placeholder"
+          className="css-0 gf-form-select-box__placeholder"
         >
-          Loading...
+          Select user
         </div>
         <div
-          className="Select-input"
-          style={
-            Object {
-              "display": "inline-block",
-            }
-          }
+          className="css-0"
         >
-          <input
-            aria-activedescendant="react-select-2--value"
-            aria-expanded="false"
-            aria-haspopup="false"
-            aria-owns=""
-            onBlur={[Function]}
-            onChange={[Function]}
-            onFocus={[Function]}
-            required={false}
-            role="combobox"
-            style={
-              Object {
-                "boxSizing": "content-box",
-                "width": "5px",
-              }
-            }
-            value=""
-          />
           <div
+            className="gf-form-select-box__input"
             style={
               Object {
-                "height": 0,
-                "left": 0,
-                "overflow": "scroll",
-                "position": "absolute",
-                "top": 0,
-                "visibility": "hidden",
-                "whiteSpace": "pre",
+                "display": "inline-block",
               }
             }
           >
-            
+            <input
+              aria-autocomplete="list"
+              autoCapitalize="none"
+              autoComplete="off"
+              autoCorrect="off"
+              disabled={false}
+              id="react-select-2-input"
+              onBlur={[Function]}
+              onChange={[Function]}
+              onFocus={[Function]}
+              spellCheck="false"
+              style={
+                Object {
+                  "background": 0,
+                  "border": 0,
+                  "boxSizing": "content-box",
+                  "color": "inherit",
+                  "fontSize": "inherit",
+                  "opacity": 1,
+                  "outline": 0,
+                  "padding": 0,
+                  "width": "1px",
+                }
+              }
+              tabIndex="0"
+              theme={
+                Object {
+                  "borderRadius": 4,
+                  "colors": Object {
+                    "danger": "#DE350B",
+                    "dangerLight": "#FFBDAD",
+                    "neutral0": "hsl(0, 0%, 100%)",
+                    "neutral10": "hsl(0, 0%, 90%)",
+                    "neutral20": "hsl(0, 0%, 80%)",
+                    "neutral30": "hsl(0, 0%, 70%)",
+                    "neutral40": "hsl(0, 0%, 60%)",
+                    "neutral5": "hsl(0, 0%, 95%)",
+                    "neutral50": "hsl(0, 0%, 50%)",
+                    "neutral60": "hsl(0, 0%, 40%)",
+                    "neutral70": "hsl(0, 0%, 30%)",
+                    "neutral80": "hsl(0, 0%, 20%)",
+                    "neutral90": "hsl(0, 0%, 10%)",
+                    "primary": "#2684FF",
+                    "primary25": "#DEEBFF",
+                    "primary50": "#B2D4FF",
+                    "primary75": "#4C9AFF",
+                  },
+                  "spacing": Object {
+                    "baseUnit": 4,
+                    "controlHeight": 38,
+                    "menuGutter": 8,
+                  },
+                }
+              }
+              type="text"
+              value=""
+            />
+            <div
+              style={
+                Object {
+                  "height": 0,
+                  "left": 0,
+                  "overflow": "scroll",
+                  "position": "absolute",
+                  "top": 0,
+                  "visibility": "hidden",
+                  "whiteSpace": "pre",
+                }
+              }
+            >
+              
+            </div>
           </div>
         </div>
       </div>
-      <span
-        aria-hidden="true"
-        className="Select-loading-zone"
-      >
-        <span
-          className="Select-loading"
-        />
-      </span>
-      <span
-        className="Select-arrow-zone"
-        onMouseDown={[Function]}
+      <div
+        className="css-0 gf-form-select-box__indicators"
       >
         <span
-          className="Select-arrow"
-          onMouseDown={[Function]}
+          className="gf-form-select-box__select-arrow "
         />
-      </span>
+      </div>
     </div>
   </div>
 </div>

+ 2 - 7
public/app/core/components/TagFilter/TagBadge.tsx

@@ -5,17 +5,12 @@ export interface Props {
   label: string;
   removeIcon: boolean;
   count: number;
-  onClick: any;
+  onClick?: any;
 }
 
 export class TagBadge extends React.Component<Props, any> {
   constructor(props) {
     super(props);
-    this.onClick = this.onClick.bind(this);
-  }
-
-  onClick(event) {
-    this.props.onClick(event);
   }
 
   render() {
@@ -28,7 +23,7 @@ export class TagBadge extends React.Component<Props, any> {
     const countLabel = count !== 0 && <span className="tag-count-label">{`(${count})`}</span>;
 
     return (
-      <span className={`label label-tag`} onClick={this.onClick} style={tagStyle}>
+      <span className={`label label-tag`} style={tagStyle}>
         {removeIcon && <i className="fa fa-remove" />}
         {label} {countLabel}
       </span>

Algunos archivos no se mostraron porque demasiados archivos cambiaron en este cambio