Browse Source

Merge remote-tracking branch 'origin/master' into davkal/explore-rate-hinting

Marcus Efraimsson 7 năm trước cách đây
mục cha
commit
75db4d8eee
100 tập tin đã thay đổi với 1303 bổ sung309 xóa
  1. 1 1
      .bra.toml
  2. 7 0
      .circleci/config.yml
  3. 3 3
      .github/CONTRIBUTING.md
  4. 1 1
      .gitignore
  5. 0 13
      .jscs.json
  6. 0 37
      .jshintrc
  7. 20 3
      CHANGELOG.md
  8. 0 1
      Gruntfile.js
  9. 6 15
      README.md
  10. 15 0
      conf/defaults.ini
  11. 3 0
      conf/ldap.toml
  12. 4 0
      conf/sample.ini
  13. 1 1
      devenv/bulk-dashboards/bulk-dashboards.yaml
  14. 207 17
      devenv/dev-dashboards/datasource_tests_mssql_unittest.json
  15. 199 15
      devenv/dev-dashboards/datasource_tests_mysql_unittest.json
  16. 187 15
      devenv/dev-dashboards/datasource_tests_postgres_unittest.json
  17. 2 2
      devenv/setup.sh
  18. 2 0
      docs/sources/features/datasources/cloudwatch.md
  19. 2 2
      docs/sources/features/datasources/elasticsearch.md
  20. 2 0
      docs/sources/features/datasources/mssql.md
  21. 2 0
      docs/sources/features/datasources/mysql.md
  22. 4 0
      docs/sources/features/datasources/postgres.md
  23. 4 4
      docs/sources/features/datasources/prometheus.md
  24. 2 2
      docs/sources/guides/basic_concepts.md
  25. 0 1
      docs/sources/http_api/alerting.md
  26. 1 1
      docs/sources/http_api/dashboard.md
  27. 1 1
      docs/sources/http_api/folder.md
  28. 33 0
      docs/sources/http_api/user.md
  29. 103 1
      docs/sources/installation/configuration.md
  30. 8 3
      docs/sources/installation/docker.md
  31. 3 0
      docs/sources/installation/ldap.md
  32. 8 11
      docs/sources/project/building_from_source.md
  33. 23 9
      docs/sources/reference/templating.md
  34. 1 1
      jest.config.js
  35. 0 40
      karma.conf.js
  36. 1 15
      package.json
  37. 1 3
      packaging/docker/README.md
  38. 2 2
      packaging/docker/push_to_docker_hub.sh
  39. 7 1
      packaging/docker/run.sh
  40. 1 0
      pkg/api/api.go
  41. 9 2
      pkg/api/pluginproxy/ds_proxy.go
  42. 28 8
      pkg/api/pluginproxy/ds_proxy_test.go
  43. 15 0
      pkg/api/user.go
  44. 10 0
      pkg/login/ldap.go
  45. 2 0
      pkg/login/ldap_settings.go
  46. 1 0
      pkg/models/models.go
  47. 1 2
      pkg/services/alerting/notifier.go
  48. 1 1
      pkg/services/alerting/notifiers/slack.go
  49. 3 3
      pkg/services/provisioning/datasources/config_reader.go
  50. 14 0
      pkg/services/provisioning/datasources/config_reader_test.go
  51. 1 1
      pkg/services/provisioning/datasources/datasources.go
  52. 25 0
      pkg/services/provisioning/datasources/testdata/multiple-org-default/config.yaml
  53. 132 0
      pkg/social/gitlab_oauth.go
  54. 15 1
      pkg/social/social.go
  55. 24 16
      pkg/tsdb/mssql/macros.go
  56. 12 0
      pkg/tsdb/mssql/macros_test.go
  57. 24 15
      pkg/tsdb/mysql/macros.go
  58. 12 0
      pkg/tsdb/mysql/macros_test.go
  59. 35 20
      pkg/tsdb/postgres/macros.go
  60. 32 1
      pkg/tsdb/postgres/macros_test.go
  61. 3 1
      pkg/tsdb/postgres/postgres.go
  62. 1 1
      pkg/tsdb/postgres/postgres_test.go
  63. 21 0
      pkg/tsdb/sql_engine.go
  64. 0 0
      public/app/containers/AlertRuleList/AlertRuleList.test.tsx
  65. 0 0
      public/app/containers/AlertRuleList/__snapshots__/AlertRuleList.test.tsx.snap
  66. 7 1
      public/app/containers/Explore/Explore.tsx
  67. 0 0
      public/app/containers/Explore/PromQueryField.test.tsx
  68. 1 1
      public/app/containers/Explore/QueryField.tsx
  69. 0 0
      public/app/containers/Explore/TimePicker.test.tsx
  70. 0 0
      public/app/containers/Explore/slate-plugins/braces.test.ts
  71. 0 0
      public/app/containers/Explore/slate-plugins/clear.test.ts
  72. 0 0
      public/app/containers/Explore/utils/prometheus.test.ts
  73. 0 0
      public/app/containers/ManageDashboards/FolderSettings.test.tsx
  74. 0 0
      public/app/containers/ServerStats/ServerStats.test.tsx
  75. 0 0
      public/app/containers/ServerStats/__snapshots__/ServerStats.test.tsx.snap
  76. 3 2
      public/app/containers/Teams/TeamMembers.tsx
  77. 0 0
      public/app/core/components/DeleteButton/DeleteButton.test.tsx
  78. 0 0
      public/app/core/components/EmptyListCTA/EmptyListCTA.test.tsx
  79. 0 0
      public/app/core/components/EmptyListCTA/__snapshots__/EmptyListCTA.test.tsx.snap
  80. 0 0
      public/app/core/components/PageHeader/PageHeader.test.tsx
  81. 0 0
      public/app/core/components/Permissions/AddPermissions.test.tsx
  82. 0 0
      public/app/core/components/Picker/PickerOption.test.tsx
  83. 0 0
      public/app/core/components/Picker/TeamPicker.test.tsx
  84. 0 0
      public/app/core/components/Picker/UserPicker.test.tsx
  85. 0 0
      public/app/core/components/Picker/__snapshots__/PickerOption.test.tsx.snap
  86. 0 0
      public/app/core/components/Picker/__snapshots__/TeamPicker.test.tsx.snap
  87. 0 0
      public/app/core/components/Picker/__snapshots__/UserPicker.test.tsx.snap
  88. 0 0
      public/app/core/components/Tooltip/Popover.test.tsx
  89. 0 0
      public/app/core/components/Tooltip/Tooltip.test.tsx
  90. 0 0
      public/app/core/components/Tooltip/__snapshots__/Popover.test.tsx.snap
  91. 0 0
      public/app/core/components/Tooltip/__snapshots__/Tooltip.test.tsx.snap
  92. 1 0
      public/app/core/components/help/help.ts
  93. 3 12
      public/app/core/services/keybindingSrv.ts
  94. 0 0
      public/app/core/specs/ColorPalette.test.tsx
  95. 0 0
      public/app/core/specs/PasswordStrength.test.tsx
  96. 0 0
      public/app/core/specs/__snapshots__/ColorPalette.test.tsx.snap
  97. 0 0
      public/app/core/specs/backend_srv.test.ts
  98. 0 0
      public/app/core/specs/datemath.test.ts
  99. 0 0
      public/app/core/specs/emitter.test.ts
  100. 0 0
      public/app/core/specs/file_export.test.ts

+ 1 - 1
.bra.toml

@@ -9,7 +9,7 @@ watch_dirs = [
 	"$WORKDIR/public/views",
 	"$WORKDIR/public/views",
 	"$WORKDIR/conf",
 	"$WORKDIR/conf",
 ]
 ]
-watch_exts = [".go", ".ini", ".toml"]
+watch_exts = [".go", ".ini", ".toml", ".template.html"]
 build_delay = 1500
 build_delay = 1500
 cmds = [
 cmds = [
   ["go", "run", "build.go", "-dev", "build-server"],
   ["go", "run", "build.go", "-dev", "build-server"],

+ 7 - 0
.circleci/config.yml

@@ -104,6 +104,7 @@ jobs:
       - run:
       - run:
           name: yarn install
           name: yarn install
           command: 'yarn install --pure-lockfile --no-progress'
           command: 'yarn install --pure-lockfile --no-progress'
+          no_output_timeout: 15m
       - save_cache:
       - save_cache:
           key: dependency-cache-{{ checksum "yarn.lock" }}
           key: dependency-cache-{{ checksum "yarn.lock" }}
           paths:
           paths:
@@ -146,6 +147,12 @@ jobs:
       - run:
       - run:
           name: sign packages
           name: sign packages
           command: './scripts/build/sign_packages.sh'
           command: './scripts/build/sign_packages.sh'
+      - run:
+          name: verify signed packages
+          command: |
+            mkdir -p ~/.rpmdb/pubkeys
+            curl -s https://grafanarel.s3.amazonaws.com/RPM-GPG-KEY-grafana > ~/.rpmdb/pubkeys/grafana.key
+            ./scripts/build/verify_signed_packages.sh dist/*.rpm
       - run:
       - run:
           name: sha-sum packages
           name: sha-sum packages
           command: 'go run build.go sha-dist'
           command: 'go run build.go sha-dist'

+ 3 - 3
.github/CONTRIBUTING.md

@@ -2,12 +2,12 @@ Follow the setup guide in README.md
 
 
 ### Rebuild frontend assets on source change
 ### Rebuild frontend assets on source change
 ```
 ```
-grunt && grunt watch
+yarn watch
 ```
 ```
 
 
 ### Rerun tests on source change
 ### Rerun tests on source change
 ```
 ```
-grunt karma:dev
+yarn jest
 ```
 ```
 
 
 ### Run tests for backend assets before commit
 ### Run tests for backend assets before commit
@@ -17,6 +17,6 @@ test -z "$(gofmt -s -l . | grep -v -E 'vendor/(github.com|golang.org|gopkg.in)'
 
 
 ### Run tests for frontend assets before commit
 ### Run tests for frontend assets before commit
 ```
 ```
-npm test
+yarn test
 go test -v ./pkg/...
 go test -v ./pkg/...
 ```
 ```

+ 1 - 1
.gitignore

@@ -71,4 +71,4 @@ debug.test
 /vendor/**/appengine*
 /vendor/**/appengine*
 *.orig
 *.orig
 
 
-/devenv/dashboards/bulk-testing/*.json
+/devenv/bulk-dashboards/*.json

+ 0 - 13
.jscs.json

@@ -1,13 +0,0 @@
-{
-    "disallowImplicitTypeConversion": ["string"],
-    "disallowKeywords": ["with"],
-    "disallowMultipleLineBreaks": true,
-    "disallowMixedSpacesAndTabs": true,
-    "disallowTrailingWhitespace": true,
-    "requireSpacesInFunctionExpression": {
-        "beforeOpeningCurlyBrace": true
-    },
-    "disallowSpacesInsideArrayBrackets": true,
-    "disallowSpacesInsideParentheses": true,
-    "validateIndentation": 2
-}

+ 0 - 37
.jshintrc

@@ -1,37 +0,0 @@
-{
-  "browser": true,
-  "esversion": 6,
-  "bitwise":false,
-  "curly": true,
-  "eqnull": true,
-  "strict": false,
-  "devel": true,
-  "eqeqeq": true,
-  "forin": false,
-  "immed": true,
-  "supernew": true,
-  "expr": true,
-  "indent": 2,
-  "latedef": false,
-  "newcap": true,
-  "noarg": true,
-  "noempty": true,
-  "undef": true,
-  "boss": true,
-  "trailing": true,
-  "laxbreak": true,
-  "laxcomma": true,
-  "sub": true,
-  "unused": true,
-  "maxdepth": 6,
-  "maxlen": 140,
-
-  "globals": {
-    "System": true,
-    "Promise": true,
-    "define": true,
-    "require": true,
-    "Chromath": false,
-    "setImmediate": true
-  }
-}

+ 20 - 3
CHANGELOG.md

@@ -1,21 +1,26 @@
 # 5.3.0 (unreleased)
 # 5.3.0 (unreleased)
 
 
+* **OAuth**: Gitlab OAuth with support for filter by groups [#5623](https://github.com/grafana/grafana/issues/5623), thx [@BenoitKnecht](https://github.com/BenoitKnecht)
 * **Dataproxy**: Pass configured/auth headers to a Datasource [#10971](https://github.com/grafana/grafana/issues/10971), thx [@mrsiano](https://github.com/mrsiano)
 * **Dataproxy**: Pass configured/auth headers to a Datasource [#10971](https://github.com/grafana/grafana/issues/10971), thx [@mrsiano](https://github.com/mrsiano)
 * **Cleanup**: Make temp file time to live configurable [#11607](https://github.com/grafana/grafana/issues/11607), thx [@xapon](https://github.com/xapon)
 * **Cleanup**: Make temp file time to live configurable [#11607](https://github.com/grafana/grafana/issues/11607), thx [@xapon](https://github.com/xapon)
 * **LDAP**: Define Grafana Admin permission in ldap group mappings [#2469](https://github.com/grafana/grafana/issues/2496), PR [#12622](https://github.com/grafana/grafana/issues/12622)
 * **LDAP**: Define Grafana Admin permission in ldap group mappings [#2469](https://github.com/grafana/grafana/issues/2496), PR [#12622](https://github.com/grafana/grafana/issues/12622)
 * **Cloudwatch**: CloudWatch GetMetricData support [#11487](https://github.com/grafana/grafana/issues/11487), thx [@mtanda](https://github.com/mtanda)
 * **Cloudwatch**: CloudWatch GetMetricData support [#11487](https://github.com/grafana/grafana/issues/11487), thx [@mtanda](https://github.com/mtanda)
 * **Configuration**: Allow auto-assigning users to specific organization (other than Main. Org) [#1823](https://github.com/grafana/grafana/issues/1823) [#12801](https://github.com/grafana/grafana/issues/12801), thx [@gzzo](https://github.com/gzzo) and [@ofosos](https://github.com/ofosos)
 * **Configuration**: Allow auto-assigning users to specific organization (other than Main. Org) [#1823](https://github.com/grafana/grafana/issues/1823) [#12801](https://github.com/grafana/grafana/issues/12801), thx [@gzzo](https://github.com/gzzo) and [@ofosos](https://github.com/ofosos)
+* **Profile**: List teams that the user is member of in current/active organization [#12476](https://github.com/grafana/grafana/issues/12476)
+* **LDAP**: Client certificates support [#12805](https://github.com/grafana/grafana/issues/12805), thx [@nyxi](https://github.com/nyxi)
+* **Postgres**: TimescaleDB support, e.g. use `time_bucket` for grouping by time when option enabled [#12680](https://github.com/grafana/grafana/pull/12680), thx [svenklemm](https://github.com/svenklemm)
 
 
 ### Minor
 ### Minor
 
 
 * **Api**: Delete nonexistent datasource should return 404 [#12313](https://github.com/grafana/grafana/issues/12313), thx [@AustinWinstanley](https://github.com/AustinWinstanley)
 * **Api**: Delete nonexistent datasource should return 404 [#12313](https://github.com/grafana/grafana/issues/12313), thx [@AustinWinstanley](https://github.com/AustinWinstanley)
 * **Dashboard**: Fix selecting current dashboard from search should not reload dashboard [#12248](https://github.com/grafana/grafana/issues/12248)
 * **Dashboard**: Fix selecting current dashboard from search should not reload dashboard [#12248](https://github.com/grafana/grafana/issues/12248)
+* **Dashboard**: Use uid when linking to dashboards internally in a dashboard [#10705](https://github.com/grafana/grafana/issues/10705)
 * **Singlestat**: Make colorization of prefix and postfix optional in singlestat [#11892](https://github.com/grafana/grafana/pull/11892), thx [@ApsOps](https://github.com/ApsOps)
 * **Singlestat**: Make colorization of prefix and postfix optional in singlestat [#11892](https://github.com/grafana/grafana/pull/11892), thx [@ApsOps](https://github.com/ApsOps)
-* **Table**: Make table sorting stable when null values exist [#12362](https://github.com/grafana/grafana/pull/12362), thx [@bz2](https://github.com/bz2)
 * **Prometheus**: Fix graph panel bar width issue in aligned prometheus queries [#12379](https://github.com/grafana/grafana/issues/12379)
 * **Prometheus**: Fix graph panel bar width issue in aligned prometheus queries [#12379](https://github.com/grafana/grafana/issues/12379)
 * **Prometheus**: Heatmap - fix unhandled error when some points are missing [#12484](https://github.com/grafana/grafana/issues/12484)
 * **Prometheus**: Heatmap - fix unhandled error when some points are missing [#12484](https://github.com/grafana/grafana/issues/12484)
-* **Prometheus**: Add $interval, $interval_ms, $range, and $range_ms support for dashboard and template queries [#12597](https://github.com/grafana/grafana/issues/12597)
+* **Prometheus**: Add $__interval, $__interval_ms, $__range, $__range_s & $__range_ms support for dashboard and template queries [#12597](https://github.com/grafana/grafana/issues/12597) [#12882](https://github.com/grafana/grafana/issues/12882), thx [@roidelapluie](https://github.com/roidelapluie)
 * **Variables**: Skip unneeded extra query request when de-selecting variable values used for repeated panels [#8186](https://github.com/grafana/grafana/issues/8186), thx [@mtanda](https://github.com/mtanda)
 * **Variables**: Skip unneeded extra query request when de-selecting variable values used for repeated panels [#8186](https://github.com/grafana/grafana/issues/8186), thx [@mtanda](https://github.com/mtanda)
+* **Postgres/MySQL/MSSQL**: New $__unixEpochGroup and $__unixEpochGroupAlias macros [#12892](https://github.com/grafana/grafana/issues/12892), thx [@svenklemm](https://github.com/svenklemm)
 * **Postgres/MySQL/MSSQL**: Add previous fill mode to $__timeGroup macro which will fill in previously seen value when point is missing [#12756](https://github.com/grafana/grafana/issues/12756), thx [@svenklemm](https://github.com/svenklemm)
 * **Postgres/MySQL/MSSQL**: Add previous fill mode to $__timeGroup macro which will fill in previously seen value when point is missing [#12756](https://github.com/grafana/grafana/issues/12756), thx [@svenklemm](https://github.com/svenklemm)
 * **Postgres/MySQL/MSSQL**: Use floor rounding in $__timeGroup macro function [#12460](https://github.com/grafana/grafana/issues/12460), thx [@svenklemm](https://github.com/svenklemm)
 * **Postgres/MySQL/MSSQL**: Use floor rounding in $__timeGroup macro function [#12460](https://github.com/grafana/grafana/issues/12460), thx [@svenklemm](https://github.com/svenklemm)
 * **Postgres/MySQL/MSSQL**: Use metric column as prefix when returning multiple value columns [#12727](https://github.com/grafana/grafana/issues/12727), thx [@svenklemm](https://github.com/svenklemm)
 * **Postgres/MySQL/MSSQL**: Use metric column as prefix when returning multiple value columns [#12727](https://github.com/grafana/grafana/issues/12727), thx [@svenklemm](https://github.com/svenklemm)
@@ -25,7 +30,7 @@
 * **Postgres**: Escape ssl mode parameter in connectionstring [#12644](https://github.com/grafana/grafana/issues/12644), thx [@yogyrahmawan](https://github.com/yogyrahmawan)
 * **Postgres**: Escape ssl mode parameter in connectionstring [#12644](https://github.com/grafana/grafana/issues/12644), thx [@yogyrahmawan](https://github.com/yogyrahmawan)
 * **Github OAuth**: Allow changes of user info at Github to be synched to Grafana when signing in [#11818](https://github.com/grafana/grafana/issues/11818), thx [@rwaweber](https://github.com/rwaweber)
 * **Github OAuth**: Allow changes of user info at Github to be synched to Grafana when signing in [#11818](https://github.com/grafana/grafana/issues/11818), thx [@rwaweber](https://github.com/rwaweber)
 * **Alerting**: Fix diff and percent_diff reducers [#11563](https://github.com/grafana/grafana/issues/11563), thx [@jessetane](https://github.com/jessetane)
 * **Alerting**: Fix diff and percent_diff reducers [#11563](https://github.com/grafana/grafana/issues/11563), thx [@jessetane](https://github.com/jessetane)
-* **Units**: Polish złoty currency [#12691](https://github.com/grafana/grafana/pull/12691), thx [@mwegrzynek](https://github.com/mwegrzynek)
+* **Alerting**: Fix rendering timeout which could cause notifications to not be sent due to rendering timing out [#12151](https://github.com/grafana/grafana/issues/12151)
 * **Cloudwatch**: Improved error handling [#12489](https://github.com/grafana/grafana/issues/12489), thx [@mtanda](https://github.com/mtanda)
 * **Cloudwatch**: Improved error handling [#12489](https://github.com/grafana/grafana/issues/12489), thx [@mtanda](https://github.com/mtanda)
 * **Cloudwatch**: AppSync metrics and dimensions [#12300](https://github.com/grafana/grafana/issues/12300), thx [@franciscocpg](https://github.com/franciscocpg)
 * **Cloudwatch**: AppSync metrics and dimensions [#12300](https://github.com/grafana/grafana/issues/12300), thx [@franciscocpg](https://github.com/franciscocpg)
 * **Cloudwatch**: Direct Connect metrics and dimensions [#12762](https://github.com/grafana/grafana/pulls/12762), thx [@mindriot88](https://github.com/mindriot88)
 * **Cloudwatch**: Direct Connect metrics and dimensions [#12762](https://github.com/grafana/grafana/pulls/12762), thx [@mindriot88](https://github.com/mindriot88)
@@ -33,13 +38,21 @@
 * **Cloudwatch**: Add new Redshift metrics and dimensions [#12063](https://github.com/grafana/grafana/pulls/12063), thx [@A21z](https://github.com/A21z)
 * **Cloudwatch**: Add new Redshift metrics and dimensions [#12063](https://github.com/grafana/grafana/pulls/12063), thx [@A21z](https://github.com/A21z)
 * **Table**: Adjust header contrast for the light theme [#12668](https://github.com/grafana/grafana/issues/12668)
 * **Table**: Adjust header contrast for the light theme [#12668](https://github.com/grafana/grafana/issues/12668)
 * **Table**: Fix link color when using light theme and thresholds in use [#12766](https://github.com/grafana/grafana/issues/12766)
 * **Table**: Fix link color when using light theme and thresholds in use [#12766](https://github.com/grafana/grafana/issues/12766)
+om/grafana/grafana/issues/12668)
+* **Table**: Fix for useless horizontal scrollbar for table panel [#9964](https://github.com/grafana/grafana/issues/9964)
+* **Table**: Make table sorting stable when null values exist [#12362](https://github.com/grafana/grafana/pull/12362), thx [@bz2](https://github.com/bz2)
 * **Elasticsearch**: For alerting/backend, support having index name to the right of pattern in index pattern [#12731](https://github.com/grafana/grafana/issues/12731)
 * **Elasticsearch**: For alerting/backend, support having index name to the right of pattern in index pattern [#12731](https://github.com/grafana/grafana/issues/12731)
 * **OAuth**: Fix overriding tls_skip_verify_insecure using environment variable [#12747](https://github.com/grafana/grafana/issues/12747), thx [@jangaraj](https://github.com/jangaraj)
 * **OAuth**: Fix overriding tls_skip_verify_insecure using environment variable [#12747](https://github.com/grafana/grafana/issues/12747), thx [@jangaraj](https://github.com/jangaraj)
 * **Units**: Change units to include characters for power of 2 and 3 [#12744](https://github.com/grafana/grafana/pull/12744), thx [@Worty](https://github.com/Worty)
 * **Units**: Change units to include characters for power of 2 and 3 [#12744](https://github.com/grafana/grafana/pull/12744), thx [@Worty](https://github.com/Worty)
+* **Units**: Polish złoty currency [#12691](https://github.com/grafana/grafana/pull/12691), thx [@mwegrzynek](https://github.com/mwegrzynek)
 * **Graph**: Option to hide series from tooltip [#3341](https://github.com/grafana/grafana/issues/3341), thx [@mtanda](https://github.com/mtanda)
 * **Graph**: Option to hide series from tooltip [#3341](https://github.com/grafana/grafana/issues/3341), thx [@mtanda](https://github.com/mtanda)
 * **UI**: Fix iOS home screen "app" icon and Windows 10 app experience [#12752](https://github.com/grafana/grafana/issues/12752), thx [@andig](https://github.com/andig)
 * **UI**: Fix iOS home screen "app" icon and Windows 10 app experience [#12752](https://github.com/grafana/grafana/issues/12752), thx [@andig](https://github.com/andig)
 * **Datasource**: Fix UI issue with secret fields after updating datasource [#11270](https://github.com/grafana/grafana/issues/11270)
 * **Datasource**: Fix UI issue with secret fields after updating datasource [#11270](https://github.com/grafana/grafana/issues/11270)
 * **Plugins**: Convert URL-like text to links in plugins readme [#12843](https://github.com/grafana/grafana/pull/12843), thx [pgiraud](https://github.com/pgiraud)
 * **Plugins**: Convert URL-like text to links in plugins readme [#12843](https://github.com/grafana/grafana/pull/12843), thx [pgiraud](https://github.com/pgiraud)
+* **Docker**: Make it possible to set a specific plugin url [#12861](https://github.com/grafana/grafana/pull/12861), thx [ClementGautier](https://github.com/ClementGautier)
+* **Graphite**: Fix for quoting of int function parameters (when using variables) [#11927](https://github.com/grafana/grafana/pull/11927)
+* **InfluxDB**: Support timeFilter in query templating for InfluxDB [#12598](https://github.com/grafana/grafana/pull/12598), thx [kichristensen](https://github.com/kichristensen)
+* **Provisioning**: Should allow one default datasource per organisation [#12229](https://github.com/grafana/grafana/issues/12229)
 
 
 ### Breaking changes
 ### Breaking changes
 
 
@@ -51,6 +64,10 @@ These are new features that's still being worked on and are in an experimental p
 
 
 * **Dashboard**: Auto fit dashboard panels to optimize space used for current TV / Monitor [#12768](https://github.com/grafana/grafana/issues/12768)
 * **Dashboard**: Auto fit dashboard panels to optimize space used for current TV / Monitor [#12768](https://github.com/grafana/grafana/issues/12768)
 
 
+### Tech
+
+* **Frontend**: Convert all Frontend Karma tests to Jest tests [#12224](https://github.com/grafana/grafana/issues/12224)
+
 # 5.2.2 (2018-07-25)
 # 5.2.2 (2018-07-25)
 
 
 ### Minor
 ### Minor

+ 0 - 1
Gruntfile.js

@@ -1,4 +1,3 @@
-/* jshint node:true */
 'use strict';
 'use strict';
 module.exports = function (grunt) {
 module.exports = function (grunt) {
   var os = require('os');
   var os = require('os');

+ 6 - 15
README.md

@@ -43,7 +43,7 @@ To build the assets, rebuild on file change, and serve them by Grafana's webserv
 ```bash
 ```bash
 npm install -g yarn
 npm install -g yarn
 yarn install --pure-lockfile
 yarn install --pure-lockfile
-yarn run watch
+yarn watch
 ```
 ```
 
 
 Build the assets, rebuild on file change with Hot Module Replacement (HMR), and serve them by webpack-dev-server (http://localhost:3333):
 Build the assets, rebuild on file change with Hot Module Replacement (HMR), and serve them by webpack-dev-server (http://localhost:3333):
@@ -56,12 +56,7 @@ Note: HMR for Angular is not supported. If you edit files in the Angular part of
 
 
 Run tests
 Run tests
 ```bash
 ```bash
-yarn run jest
-```
-
-Run karma tests
-```bash
-yarn run karma
+yarn jest
 ```
 ```
 
 
 ### Recompile backend on source change
 ### Recompile backend on source change
@@ -98,17 +93,13 @@ In your custom.ini uncomment (remove the leading `;`) sign. And set `app_mode =
 #### Frontend
 #### Frontend
 Execute all frontend tests
 Execute all frontend tests
 ```bash
 ```bash
-yarn run test
+yarn test
 ```
 ```
 
 
-Writing & watching frontend tests (we have two test runners)
+Writing & watching frontend tests
 
 
-- jest for all new tests that do not require browser context (React+more)
-   - Start watcher: `yarn run jest`
-   - Jest will run all test files that end with the name ".jest.ts"
-- karma + mocha is used for testing angularjs components. We do want to migrate these test to jest over time (if possible).
-  - Start watcher: `yarn run karma`
-  - Karma+Mocha runs all files that end with the name "_specs.ts".
+- Start watcher: `yarn jest`
+- Jest will run all test files that end with the name ".test.ts"
 
 
 #### Backend
 #### Backend
 ```bash
 ```bash

+ 15 - 0
conf/defaults.ini

@@ -270,6 +270,18 @@ api_url = https://api.github.com/user
 team_ids =
 team_ids =
 allowed_organizations =
 allowed_organizations =
 
 
+#################################### GitLab Auth #########################
+[auth.gitlab]
+enabled = false
+allow_sign_up = true
+client_id = some_id
+client_secret = some_secret
+scopes = api
+auth_url = https://gitlab.com/oauth/authorize
+token_url = https://gitlab.com/oauth/token
+api_url = https://gitlab.com/api/v4
+allowed_groups =
+
 #################################### Google Auth #########################
 #################################### Google Auth #########################
 [auth.google]
 [auth.google]
 enabled = false
 enabled = false
@@ -315,6 +327,9 @@ api_url =
 team_ids =
 team_ids =
 allowed_organizations =
 allowed_organizations =
 tls_skip_verify_insecure = false
 tls_skip_verify_insecure = false
+tls_client_cert =
+tls_client_key =
+tls_client_ca =
 
 
 #################################### Basic Auth ##########################
 #################################### Basic Auth ##########################
 [auth.basic]
 [auth.basic]

+ 3 - 0
conf/ldap.toml

@@ -15,6 +15,9 @@ start_tls = false
 ssl_skip_verify = false
 ssl_skip_verify = false
 # set to the path to your root CA certificate or leave unset to use system defaults
 # set to the path to your root CA certificate or leave unset to use system defaults
 # root_ca_cert = "/path/to/certificate.crt"
 # root_ca_cert = "/path/to/certificate.crt"
+# Authentication against LDAP servers requiring client certificates
+# client_cert = "/path/to/client.crt"
+# client_key = "/path/to/client.key"
 
 
 # Search user bind dn
 # Search user bind dn
 bind_dn = "cn=admin,dc=grafana,dc=org"
 bind_dn = "cn=admin,dc=grafana,dc=org"

+ 4 - 0
conf/sample.ini

@@ -272,6 +272,10 @@ log_queries =
 ;api_url = https://foo.bar/user
 ;api_url = https://foo.bar/user
 ;team_ids =
 ;team_ids =
 ;allowed_organizations =
 ;allowed_organizations =
+;tls_skip_verify_insecure = false
+;tls_client_cert =
+;tls_client_key =
+;tls_client_ca =
 
 
 #################################### Grafana.com Auth ####################
 #################################### Grafana.com Auth ####################
 [auth.grafana_com]
 [auth.grafana_com]

+ 1 - 1
devenv/bulk-dashboards/bulk-dashboards.yaml

@@ -5,5 +5,5 @@ providers:
    folder: 'Bulk dashboards'
    folder: 'Bulk dashboards'
    type: file
    type: file
    options:
    options:
-     path: devenv/dashboards/bulk-testing
+     path: devenv/bulk-dashboards
 
 

+ 207 - 17
devenv/dev-dashboards/datasource_tests_mssql_unittest.json

@@ -64,7 +64,7 @@
   "editable": true,
   "editable": true,
   "gnetId": null,
   "gnetId": null,
   "graphTooltip": 0,
   "graphTooltip": 0,
-  "iteration": 1533713720618,
+  "iteration": 1534507501976,
   "links": [],
   "links": [],
   "panels": [
   "panels": [
     {
     {
@@ -1197,6 +1197,196 @@
         "x": 0,
         "x": 0,
         "y": 27
         "y": 27
       },
       },
+      "id": 38,
+      "legend": {
+        "alignAsTable": true,
+        "avg": true,
+        "current": true,
+        "hideEmpty": false,
+        "hideZero": false,
+        "max": true,
+        "min": true,
+        "rightSide": true,
+        "show": true,
+        "total": true,
+        "values": true
+      },
+      "lines": true,
+      "linewidth": 2,
+      "links": [],
+      "nullPointMode": "null",
+      "percentage": false,
+      "pointradius": 3,
+      "points": false,
+      "renderer": "flot",
+      "seriesOverrides": [],
+      "spaceLength": 10,
+      "stack": false,
+      "steppedLine": false,
+      "targets": [
+        {
+          "alias": "",
+          "format": "time_series",
+          "rawSql": "SELECT \n  $__unixEpochGroupAlias(timeInt32, '$summarize'), \n  measurement as metric, \n  avg(valueOne) as valueOne,\n  avg(valueTwo) as valueTwo\nFROM\n  metric_values \nWHERE\n  $__unixEpochFilter(timeInt32) AND\n  ($metric = 'ALL' OR measurement = $metric)\nGROUP BY \n  $__unixEpochGroup(timeInt32, '$summarize'), \n  measurement \nORDER BY 1",
+          "refId": "A"
+        }
+      ],
+      "thresholds": [],
+      "timeFrom": null,
+      "timeShift": null,
+      "title": "Multiple series with metric column using unixEpochGroup macro ($summarize)",
+      "tooltip": {
+        "shared": true,
+        "sort": 0,
+        "value_type": "individual"
+      },
+      "type": "graph",
+      "xaxis": {
+        "buckets": null,
+        "mode": "time",
+        "name": null,
+        "show": true,
+        "values": []
+      },
+      "yaxes": [
+        {
+          "format": "short",
+          "label": null,
+          "logBase": 1,
+          "max": null,
+          "min": "0",
+          "show": true
+        },
+        {
+          "format": "short",
+          "label": null,
+          "logBase": 1,
+          "max": null,
+          "min": null,
+          "show": true
+        }
+      ],
+      "yaxis": {
+        "align": false,
+        "alignLevel": null
+      }
+    },
+    {
+      "aliasColors": {},
+      "bars": false,
+      "dashLength": 10,
+      "dashes": false,
+      "datasource": "gdev-mssql-ds-tests",
+      "fill": 2,
+      "gridPos": {
+        "h": 8,
+        "w": 12,
+        "x": 12,
+        "y": 27
+      },
+      "id": 39,
+      "legend": {
+        "alignAsTable": true,
+        "avg": true,
+        "current": true,
+        "max": true,
+        "min": true,
+        "rightSide": true,
+        "show": true,
+        "total": true,
+        "values": true
+      },
+      "lines": true,
+      "linewidth": 2,
+      "links": [],
+      "nullPointMode": "null",
+      "percentage": false,
+      "pointradius": 3,
+      "points": false,
+      "renderer": "flot",
+      "seriesOverrides": [
+        {
+          "alias": "MovingAverageValueOne",
+          "dashes": true,
+          "lines": false
+        },
+        {
+          "alias": "MovingAverageValueTwo",
+          "dashes": true,
+          "lines": false,
+          "yaxis": 1
+        }
+      ],
+      "spaceLength": 10,
+      "stack": false,
+      "steppedLine": false,
+      "targets": [
+        {
+          "alias": "",
+          "format": "time_series",
+          "rawSql": "SELECT \n  $__unixEpochGroupAlias(timeInt32, '$summarize'), \n  avg(valueOne) as valueOne,\n  avg(valueTwo) as valueTwo\nFROM\n  metric_values \nWHERE\n  $__unixEpochFilter(timeInt32) AND\n  ($metric = 'ALL' OR measurement = $metric)\nGROUP BY \n  $__unixEpochGroup(timeInt32, '$summarize')\nORDER BY 1",
+          "refId": "A"
+        },
+        {
+          "alias": "",
+          "format": "time_series",
+          "rawSql": "SELECT \n  time,\n  avg(valueOne) OVER (ORDER BY time ROWS BETWEEN 6 PRECEDING AND 6 FOLLOWING) as MovingAverageValueOne,\n  avg(valueTwo) OVER (ORDER BY time ROWS BETWEEN 6 PRECEDING AND 6 FOLLOWING) as MovingAverageValueTwo\nFROM\n  metric_values \nWHERE \n  $__timeFilter(time) AND \n  ($metric = 'ALL' OR measurement = $metric)\nORDER BY 1",
+          "refId": "B"
+        }
+      ],
+      "thresholds": [],
+      "timeFrom": null,
+      "timeShift": null,
+      "title": "Multiple series without metric column using unixEpochGroup macro ($summarize)",
+      "tooltip": {
+        "shared": true,
+        "sort": 0,
+        "value_type": "individual"
+      },
+      "type": "graph",
+      "xaxis": {
+        "buckets": null,
+        "mode": "time",
+        "name": null,
+        "show": true,
+        "values": []
+      },
+      "yaxes": [
+        {
+          "format": "short",
+          "label": null,
+          "logBase": 1,
+          "max": null,
+          "min": "0",
+          "show": true
+        },
+        {
+          "format": "short",
+          "label": null,
+          "logBase": 1,
+          "max": null,
+          "min": null,
+          "show": true
+        }
+      ],
+      "yaxis": {
+        "align": false,
+        "alignLevel": null
+      }
+    },
+    {
+      "aliasColors": {},
+      "bars": false,
+      "dashLength": 10,
+      "dashes": false,
+      "datasource": "gdev-mssql-ds-tests",
+      "fill": 2,
+      "gridPos": {
+        "h": 8,
+        "w": 12,
+        "x": 0,
+        "y": 35
+      },
       "id": 4,
       "id": 4,
       "legend": {
       "legend": {
         "alignAsTable": true,
         "alignAsTable": true,
@@ -1282,7 +1472,7 @@
         "h": 8,
         "h": 8,
         "w": 12,
         "w": 12,
         "x": 12,
         "x": 12,
-        "y": 27
+        "y": 35
       },
       },
       "id": 28,
       "id": 28,
       "legend": {
       "legend": {
@@ -1367,7 +1557,7 @@
         "h": 8,
         "h": 8,
         "w": 12,
         "w": 12,
         "x": 0,
         "x": 0,
-        "y": 35
+        "y": 43
       },
       },
       "id": 19,
       "id": 19,
       "legend": {
       "legend": {
@@ -1454,7 +1644,7 @@
         "h": 8,
         "h": 8,
         "w": 12,
         "w": 12,
         "x": 12,
         "x": 12,
-        "y": 35
+        "y": 43
       },
       },
       "id": 18,
       "id": 18,
       "legend": {
       "legend": {
@@ -1539,7 +1729,7 @@
         "h": 8,
         "h": 8,
         "w": 12,
         "w": 12,
         "x": 0,
         "x": 0,
-        "y": 43
+        "y": 51
       },
       },
       "id": 17,
       "id": 17,
       "legend": {
       "legend": {
@@ -1626,7 +1816,7 @@
         "h": 8,
         "h": 8,
         "w": 12,
         "w": 12,
         "x": 12,
         "x": 12,
-        "y": 43
+        "y": 51
       },
       },
       "id": 20,
       "id": 20,
       "legend": {
       "legend": {
@@ -1711,7 +1901,7 @@
         "h": 8,
         "h": 8,
         "w": 12,
         "w": 12,
         "x": 0,
         "x": 0,
-        "y": 51
+        "y": 59
       },
       },
       "id": 29,
       "id": 29,
       "legend": {
       "legend": {
@@ -1798,7 +1988,7 @@
         "h": 8,
         "h": 8,
         "w": 12,
         "w": 12,
         "x": 12,
         "x": 12,
-        "y": 51
+        "y": 59
       },
       },
       "id": 30,
       "id": 30,
       "legend": {
       "legend": {
@@ -1885,7 +2075,7 @@
         "h": 8,
         "h": 8,
         "w": 12,
         "w": 12,
         "x": 0,
         "x": 0,
-        "y": 59
+        "y": 67
       },
       },
       "id": 14,
       "id": 14,
       "legend": {
       "legend": {
@@ -1973,7 +2163,7 @@
         "h": 8,
         "h": 8,
         "w": 12,
         "w": 12,
         "x": 12,
         "x": 12,
-        "y": 59
+        "y": 67
       },
       },
       "id": 15,
       "id": 15,
       "legend": {
       "legend": {
@@ -2060,7 +2250,7 @@
         "h": 8,
         "h": 8,
         "w": 12,
         "w": 12,
         "x": 0,
         "x": 0,
-        "y": 67
+        "y": 75
       },
       },
       "id": 25,
       "id": 25,
       "legend": {
       "legend": {
@@ -2148,7 +2338,7 @@
         "h": 8,
         "h": 8,
         "w": 12,
         "w": 12,
         "x": 12,
         "x": 12,
-        "y": 67
+        "y": 75
       },
       },
       "id": 22,
       "id": 22,
       "legend": {
       "legend": {
@@ -2235,7 +2425,7 @@
         "h": 8,
         "h": 8,
         "w": 12,
         "w": 12,
         "x": 0,
         "x": 0,
-        "y": 75
+        "y": 83
       },
       },
       "id": 21,
       "id": 21,
       "legend": {
       "legend": {
@@ -2323,7 +2513,7 @@
         "h": 8,
         "h": 8,
         "w": 12,
         "w": 12,
         "x": 12,
         "x": 12,
-        "y": 75
+        "y": 83
       },
       },
       "id": 26,
       "id": 26,
       "legend": {
       "legend": {
@@ -2410,7 +2600,7 @@
         "h": 8,
         "h": 8,
         "w": 12,
         "w": 12,
         "x": 0,
         "x": 0,
-        "y": 83
+        "y": 91
       },
       },
       "id": 23,
       "id": 23,
       "legend": {
       "legend": {
@@ -2498,7 +2688,7 @@
         "h": 8,
         "h": 8,
         "w": 12,
         "w": 12,
         "x": 12,
         "x": 12,
-        "y": 83
+        "y": 91
       },
       },
       "id": 24,
       "id": 24,
       "legend": {
       "legend": {
@@ -2708,5 +2898,5 @@
   "timezone": "",
   "timezone": "",
   "title": "Datasource tests - MSSQL (unit test)",
   "title": "Datasource tests - MSSQL (unit test)",
   "uid": "GlAqcPgmz",
   "uid": "GlAqcPgmz",
-  "version": 10
+  "version": 2
 }
 }

+ 199 - 15
devenv/dev-dashboards/datasource_tests_mysql_unittest.json

@@ -64,7 +64,7 @@
   "editable": true,
   "editable": true,
   "gnetId": null,
   "gnetId": null,
   "graphTooltip": 0,
   "graphTooltip": 0,
-  "iteration": 1533714324007,
+  "iteration": 1534508678095,
   "links": [],
   "links": [],
   "panels": [
   "panels": [
     {
     {
@@ -1191,6 +1191,190 @@
         "x": 0,
         "x": 0,
         "y": 27
         "y": 27
       },
       },
+      "id": 38,
+      "legend": {
+        "alignAsTable": true,
+        "avg": true,
+        "current": true,
+        "hideEmpty": false,
+        "hideZero": false,
+        "max": true,
+        "min": true,
+        "rightSide": true,
+        "show": true,
+        "total": true,
+        "values": true
+      },
+      "lines": true,
+      "linewidth": 2,
+      "links": [],
+      "nullPointMode": "null",
+      "percentage": false,
+      "pointradius": 3,
+      "points": false,
+      "renderer": "flot",
+      "seriesOverrides": [],
+      "spaceLength": 10,
+      "stack": false,
+      "steppedLine": false,
+      "targets": [
+        {
+          "alias": "",
+          "format": "time_series",
+          "rawSql": "SELECT \n  $__unixEpochGroupAlias(timeInt32, '$summarize'), \n  measurement, \n  avg(valueOne) as valueOne,\n  avg(valueTwo) as valueTwo\nFROM\n  metric_values \nWHERE\n  $__unixEpochFilter(timeInt32) AND\n  measurement in($metric)\nGROUP BY 1, 2\nORDER BY 1, 2",
+          "refId": "A"
+        }
+      ],
+      "thresholds": [],
+      "timeFrom": null,
+      "timeShift": null,
+      "title": "Multiple series with metric column using unixEpochGroup macro ($summarize)",
+      "tooltip": {
+        "shared": true,
+        "sort": 0,
+        "value_type": "individual"
+      },
+      "type": "graph",
+      "xaxis": {
+        "buckets": null,
+        "mode": "time",
+        "name": null,
+        "show": true,
+        "values": []
+      },
+      "yaxes": [
+        {
+          "format": "short",
+          "label": null,
+          "logBase": 1,
+          "max": null,
+          "min": "0",
+          "show": true
+        },
+        {
+          "format": "short",
+          "label": null,
+          "logBase": 1,
+          "max": null,
+          "min": null,
+          "show": true
+        }
+      ],
+      "yaxis": {
+        "align": false,
+        "alignLevel": null
+      }
+    },
+    {
+      "aliasColors": {},
+      "bars": false,
+      "dashLength": 10,
+      "dashes": false,
+      "datasource": "gdev-mysql-ds-tests",
+      "fill": 2,
+      "gridPos": {
+        "h": 8,
+        "w": 12,
+        "x": 12,
+        "y": 27
+      },
+      "id": 39,
+      "legend": {
+        "alignAsTable": true,
+        "avg": true,
+        "current": true,
+        "max": true,
+        "min": true,
+        "rightSide": true,
+        "show": true,
+        "total": true,
+        "values": true
+      },
+      "lines": true,
+      "linewidth": 2,
+      "links": [],
+      "nullPointMode": "null",
+      "percentage": false,
+      "pointradius": 3,
+      "points": false,
+      "renderer": "flot",
+      "seriesOverrides": [
+        {
+          "alias": "MovingAverageValueOne",
+          "dashes": true,
+          "lines": false
+        },
+        {
+          "alias": "MovingAverageValueTwo",
+          "dashes": true,
+          "lines": false,
+          "yaxis": 1
+        }
+      ],
+      "spaceLength": 10,
+      "stack": false,
+      "steppedLine": false,
+      "targets": [
+        {
+          "alias": "",
+          "format": "time_series",
+          "rawSql": "SELECT \n  $__unixEpochGroupAlias(timeInt32, '$summarize'), \n  avg(valueOne) as valueOne,\n  avg(valueTwo) as valueTwo\nFROM\n  metric_values \nWHERE\n  $__unixEpochFilter(timeInt32) AND\n  measurement in($metric)\nGROUP BY 1\nORDER BY 1",
+          "refId": "A"
+        }
+      ],
+      "thresholds": [],
+      "timeFrom": null,
+      "timeShift": null,
+      "title": "Multiple series without metric column using unixEpochGroup macro ($summarize)",
+      "tooltip": {
+        "shared": true,
+        "sort": 0,
+        "value_type": "individual"
+      },
+      "type": "graph",
+      "xaxis": {
+        "buckets": null,
+        "mode": "time",
+        "name": null,
+        "show": true,
+        "values": []
+      },
+      "yaxes": [
+        {
+          "format": "short",
+          "label": null,
+          "logBase": 1,
+          "max": null,
+          "min": "0",
+          "show": true
+        },
+        {
+          "format": "short",
+          "label": null,
+          "logBase": 1,
+          "max": null,
+          "min": null,
+          "show": true
+        }
+      ],
+      "yaxis": {
+        "align": false,
+        "alignLevel": null
+      }
+    },
+    {
+      "aliasColors": {},
+      "bars": false,
+      "dashLength": 10,
+      "dashes": false,
+      "datasource": "gdev-mysql-ds-tests",
+      "fill": 2,
+      "gridPos": {
+        "h": 8,
+        "w": 12,
+        "x": 0,
+        "y": 35
+      },
       "id": 4,
       "id": 4,
       "legend": {
       "legend": {
         "alignAsTable": true,
         "alignAsTable": true,
@@ -1276,7 +1460,7 @@
         "h": 8,
         "h": 8,
         "w": 12,
         "w": 12,
         "x": 12,
         "x": 12,
-        "y": 27
+        "y": 35
       },
       },
       "id": 28,
       "id": 28,
       "legend": {
       "legend": {
@@ -1361,7 +1545,7 @@
         "h": 8,
         "h": 8,
         "w": 12,
         "w": 12,
         "x": 0,
         "x": 0,
-        "y": 35
+        "y": 43
       },
       },
       "id": 19,
       "id": 19,
       "legend": {
       "legend": {
@@ -1448,7 +1632,7 @@
         "h": 8,
         "h": 8,
         "w": 12,
         "w": 12,
         "x": 12,
         "x": 12,
-        "y": 35
+        "y": 43
       },
       },
       "id": 18,
       "id": 18,
       "legend": {
       "legend": {
@@ -1533,7 +1717,7 @@
         "h": 8,
         "h": 8,
         "w": 12,
         "w": 12,
         "x": 0,
         "x": 0,
-        "y": 43
+        "y": 51
       },
       },
       "id": 17,
       "id": 17,
       "legend": {
       "legend": {
@@ -1620,7 +1804,7 @@
         "h": 8,
         "h": 8,
         "w": 12,
         "w": 12,
         "x": 12,
         "x": 12,
-        "y": 43
+        "y": 51
       },
       },
       "id": 20,
       "id": 20,
       "legend": {
       "legend": {
@@ -1705,7 +1889,7 @@
         "h": 8,
         "h": 8,
         "w": 12,
         "w": 12,
         "x": 0,
         "x": 0,
-        "y": 51
+        "y": 59
       },
       },
       "id": 14,
       "id": 14,
       "legend": {
       "legend": {
@@ -1793,7 +1977,7 @@
         "h": 8,
         "h": 8,
         "w": 12,
         "w": 12,
         "x": 12,
         "x": 12,
-        "y": 51
+        "y": 59
       },
       },
       "id": 15,
       "id": 15,
       "legend": {
       "legend": {
@@ -1880,7 +2064,7 @@
         "h": 8,
         "h": 8,
         "w": 12,
         "w": 12,
         "x": 0,
         "x": 0,
-        "y": 59
+        "y": 67
       },
       },
       "id": 25,
       "id": 25,
       "legend": {
       "legend": {
@@ -1968,7 +2152,7 @@
         "h": 8,
         "h": 8,
         "w": 12,
         "w": 12,
         "x": 12,
         "x": 12,
-        "y": 59
+        "y": 67
       },
       },
       "id": 22,
       "id": 22,
       "legend": {
       "legend": {
@@ -2055,7 +2239,7 @@
         "h": 8,
         "h": 8,
         "w": 12,
         "w": 12,
         "x": 0,
         "x": 0,
-        "y": 67
+        "y": 75
       },
       },
       "id": 21,
       "id": 21,
       "legend": {
       "legend": {
@@ -2143,7 +2327,7 @@
         "h": 8,
         "h": 8,
         "w": 12,
         "w": 12,
         "x": 12,
         "x": 12,
-        "y": 67
+        "y": 75
       },
       },
       "id": 26,
       "id": 26,
       "legend": {
       "legend": {
@@ -2230,7 +2414,7 @@
         "h": 8,
         "h": 8,
         "w": 12,
         "w": 12,
         "x": 0,
         "x": 0,
-        "y": 75
+        "y": 83
       },
       },
       "id": 23,
       "id": 23,
       "legend": {
       "legend": {
@@ -2318,7 +2502,7 @@
         "h": 8,
         "h": 8,
         "w": 12,
         "w": 12,
         "x": 12,
         "x": 12,
-        "y": 75
+        "y": 83
       },
       },
       "id": 24,
       "id": 24,
       "legend": {
       "legend": {
@@ -2526,5 +2710,5 @@
   "timezone": "",
   "timezone": "",
   "title": "Datasource tests - MySQL (unittest)",
   "title": "Datasource tests - MySQL (unittest)",
   "uid": "Hmf8FDkmz",
   "uid": "Hmf8FDkmz",
-  "version": 9
+  "version": 2
 }
 }

+ 187 - 15
devenv/dev-dashboards/datasource_tests_postgres_unittest.json

@@ -64,7 +64,7 @@
   "editable": true,
   "editable": true,
   "gnetId": null,
   "gnetId": null,
   "graphTooltip": 0,
   "graphTooltip": 0,
-  "iteration": 1533714184500,
+  "iteration": 1534507993194,
   "links": [],
   "links": [],
   "panels": [
   "panels": [
     {
     {
@@ -1179,6 +1179,178 @@
         "x": 0,
         "x": 0,
         "y": 27
         "y": 27
       },
       },
+      "id": 38,
+      "legend": {
+        "alignAsTable": true,
+        "avg": true,
+        "current": true,
+        "hideEmpty": false,
+        "hideZero": false,
+        "max": true,
+        "min": true,
+        "rightSide": true,
+        "show": true,
+        "total": true,
+        "values": true
+      },
+      "lines": true,
+      "linewidth": 2,
+      "links": [],
+      "nullPointMode": "null",
+      "percentage": false,
+      "pointradius": 3,
+      "points": false,
+      "renderer": "flot",
+      "seriesOverrides": [],
+      "spaceLength": 10,
+      "stack": false,
+      "steppedLine": false,
+      "targets": [
+        {
+          "alias": "",
+          "format": "time_series",
+          "rawSql": "SELECT \n  $__unixEpochGroupAlias(\"timeInt32\", '$summarize'), \n  measurement, \n  avg(\"valueOne\") as \"valueOne\",\n  avg(\"valueTwo\") as \"valueTwo\"\nFROM\n  metric_values \nWHERE\n  $__unixEpochFilter(\"timeInt32\") AND\n  measurement in($metric)\nGROUP BY 1, 2\nORDER BY 1, 2",
+          "refId": "A"
+        }
+      ],
+      "thresholds": [],
+      "timeFrom": null,
+      "timeShift": null,
+      "title": "Multiple series with metric column using unixEpochGroup macro ($summarize)",
+      "tooltip": {
+        "shared": true,
+        "sort": 0,
+        "value_type": "individual"
+      },
+      "type": "graph",
+      "xaxis": {
+        "buckets": null,
+        "mode": "time",
+        "name": null,
+        "show": true,
+        "values": []
+      },
+      "yaxes": [
+        {
+          "format": "short",
+          "label": null,
+          "logBase": 1,
+          "max": null,
+          "min": "0",
+          "show": true
+        },
+        {
+          "format": "short",
+          "label": null,
+          "logBase": 1,
+          "max": null,
+          "min": null,
+          "show": true
+        }
+      ],
+      "yaxis": {
+        "align": false,
+        "alignLevel": null
+      }
+    },
+    {
+      "aliasColors": {},
+      "bars": false,
+      "dashLength": 10,
+      "dashes": false,
+      "datasource": "gdev-postgres-ds-tests",
+      "fill": 2,
+      "gridPos": {
+        "h": 8,
+        "w": 12,
+        "x": 12,
+        "y": 27
+      },
+      "id": 39,
+      "legend": {
+        "alignAsTable": true,
+        "avg": true,
+        "current": true,
+        "max": true,
+        "min": true,
+        "rightSide": true,
+        "show": true,
+        "total": true,
+        "values": true
+      },
+      "lines": true,
+      "linewidth": 2,
+      "links": [],
+      "nullPointMode": "null",
+      "percentage": false,
+      "pointradius": 3,
+      "points": false,
+      "renderer": "flot",
+      "seriesOverrides": [],
+      "spaceLength": 10,
+      "stack": false,
+      "steppedLine": false,
+      "targets": [
+        {
+          "alias": "",
+          "format": "time_series",
+          "rawSql": "SELECT \n  $__unixEpochGroupAlias(\"timeInt32\", '$summarize'), \n  avg(\"valueOne\") as \"valueOne\",\n  avg(\"valueTwo\") as \"valueTwo\"\nFROM\n  metric_values \nWHERE\n  $__unixEpochFilter(\"timeInt32\") AND\n  measurement in($metric)\nGROUP BY 1\nORDER BY 1",
+          "refId": "A"
+        }
+      ],
+      "thresholds": [],
+      "timeFrom": null,
+      "timeShift": null,
+      "title": "Multiple series without metric column using timeGroup macro ($summarize)",
+      "tooltip": {
+        "shared": true,
+        "sort": 0,
+        "value_type": "individual"
+      },
+      "type": "graph",
+      "xaxis": {
+        "buckets": null,
+        "mode": "time",
+        "name": null,
+        "show": true,
+        "values": []
+      },
+      "yaxes": [
+        {
+          "format": "short",
+          "label": null,
+          "logBase": 1,
+          "max": null,
+          "min": "0",
+          "show": true
+        },
+        {
+          "format": "short",
+          "label": null,
+          "logBase": 1,
+          "max": null,
+          "min": null,
+          "show": true
+        }
+      ],
+      "yaxis": {
+        "align": false,
+        "alignLevel": null
+      }
+    },
+    {
+      "aliasColors": {},
+      "bars": false,
+      "dashLength": 10,
+      "dashes": false,
+      "datasource": "gdev-postgres-ds-tests",
+      "fill": 2,
+      "gridPos": {
+        "h": 8,
+        "w": 12,
+        "x": 0,
+        "y": 35
+      },
       "id": 4,
       "id": 4,
       "legend": {
       "legend": {
         "alignAsTable": true,
         "alignAsTable": true,
@@ -1264,7 +1436,7 @@
         "h": 8,
         "h": 8,
         "w": 12,
         "w": 12,
         "x": 12,
         "x": 12,
-        "y": 27
+        "y": 35
       },
       },
       "id": 28,
       "id": 28,
       "legend": {
       "legend": {
@@ -1349,7 +1521,7 @@
         "h": 8,
         "h": 8,
         "w": 12,
         "w": 12,
         "x": 0,
         "x": 0,
-        "y": 35
+        "y": 43
       },
       },
       "id": 19,
       "id": 19,
       "legend": {
       "legend": {
@@ -1436,7 +1608,7 @@
         "h": 8,
         "h": 8,
         "w": 12,
         "w": 12,
         "x": 12,
         "x": 12,
-        "y": 35
+        "y": 43
       },
       },
       "id": 18,
       "id": 18,
       "legend": {
       "legend": {
@@ -1521,7 +1693,7 @@
         "h": 8,
         "h": 8,
         "w": 12,
         "w": 12,
         "x": 0,
         "x": 0,
-        "y": 43
+        "y": 51
       },
       },
       "id": 17,
       "id": 17,
       "legend": {
       "legend": {
@@ -1608,7 +1780,7 @@
         "h": 8,
         "h": 8,
         "w": 12,
         "w": 12,
         "x": 12,
         "x": 12,
-        "y": 43
+        "y": 51
       },
       },
       "id": 20,
       "id": 20,
       "legend": {
       "legend": {
@@ -1693,7 +1865,7 @@
         "h": 8,
         "h": 8,
         "w": 12,
         "w": 12,
         "x": 0,
         "x": 0,
-        "y": 51
+        "y": 59
       },
       },
       "id": 14,
       "id": 14,
       "legend": {
       "legend": {
@@ -1781,7 +1953,7 @@
         "h": 8,
         "h": 8,
         "w": 12,
         "w": 12,
         "x": 12,
         "x": 12,
-        "y": 51
+        "y": 59
       },
       },
       "id": 15,
       "id": 15,
       "legend": {
       "legend": {
@@ -1868,7 +2040,7 @@
         "h": 8,
         "h": 8,
         "w": 12,
         "w": 12,
         "x": 0,
         "x": 0,
-        "y": 59
+        "y": 67
       },
       },
       "id": 25,
       "id": 25,
       "legend": {
       "legend": {
@@ -1956,7 +2128,7 @@
         "h": 8,
         "h": 8,
         "w": 12,
         "w": 12,
         "x": 12,
         "x": 12,
-        "y": 59
+        "y": 67
       },
       },
       "id": 22,
       "id": 22,
       "legend": {
       "legend": {
@@ -2043,7 +2215,7 @@
         "h": 8,
         "h": 8,
         "w": 12,
         "w": 12,
         "x": 0,
         "x": 0,
-        "y": 67
+        "y": 75
       },
       },
       "id": 21,
       "id": 21,
       "legend": {
       "legend": {
@@ -2131,7 +2303,7 @@
         "h": 8,
         "h": 8,
         "w": 12,
         "w": 12,
         "x": 12,
         "x": 12,
-        "y": 67
+        "y": 75
       },
       },
       "id": 26,
       "id": 26,
       "legend": {
       "legend": {
@@ -2218,7 +2390,7 @@
         "h": 8,
         "h": 8,
         "w": 12,
         "w": 12,
         "x": 0,
         "x": 0,
-        "y": 75
+        "y": 83
       },
       },
       "id": 23,
       "id": 23,
       "legend": {
       "legend": {
@@ -2306,7 +2478,7 @@
         "h": 8,
         "h": 8,
         "w": 12,
         "w": 12,
         "x": 12,
         "x": 12,
-        "y": 75
+        "y": 83
       },
       },
       "id": 24,
       "id": 24,
       "legend": {
       "legend": {
@@ -2518,5 +2690,5 @@
   "timezone": "",
   "timezone": "",
   "title": "Datasource tests - Postgres (unittest)",
   "title": "Datasource tests - Postgres (unittest)",
   "uid": "vHQdlVziz",
   "uid": "vHQdlVziz",
-  "version": 9
+  "version": 1
 }
 }

+ 2 - 2
devenv/setup.sh

@@ -7,11 +7,11 @@ bulkDashboard() {
 		COUNTER=0
 		COUNTER=0
 		MAX=400
 		MAX=400
 		while [  $COUNTER -lt $MAX ]; do
 		while [  $COUNTER -lt $MAX ]; do
-				jsonnet -o "dashboards/bulk-testing/dashboard${COUNTER}.json" -e "local bulkDash = import 'dashboards/bulk-testing/bulkdash.jsonnet'; bulkDash + {  uid: 'uid-${COUNTER}',  title: 'title-${COUNTER}' }"
+				jsonnet -o "bulk-dashboards/dashboard${COUNTER}.json" -e "local bulkDash = import 'bulk-dashboards/bulkdash.jsonnet'; bulkDash + {  uid: 'uid-${COUNTER}',  title: 'title-${COUNTER}' }"
 				let COUNTER=COUNTER+1
 				let COUNTER=COUNTER+1
 		done
 		done
 
 
-		ln -s -f -r ./dashboards/bulk-testing/bulk-dashboards.yaml ../conf/provisioning/dashboards/custom.yaml
+		ln -s -f -r ./bulk-dashboards/bulk-dashboards.yaml ../conf/provisioning/dashboards/custom.yaml
 }
 }
 
 
 requiresJsonnet() {
 requiresJsonnet() {

+ 2 - 0
docs/sources/features/datasources/cloudwatch.md

@@ -115,6 +115,8 @@ and `dimension keys/values`.
 In place of `region` you can specify `default` to use the default region configured in the datasource for the query,
 In place of `region` you can specify `default` to use the default region configured in the datasource for the query,
 e.g. `metrics(AWS/DynamoDB, default)` or `dimension_values(default, ..., ..., ...)`.
 e.g. `metrics(AWS/DynamoDB, default)` or `dimension_values(default, ..., ..., ...)`.
 
 
+Read more about the available dimensions in the [CloudWatch  Metrics and Dimensions Reference](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/CW_Support_For_AWS.html).
+
 Name | Description
 Name | Description
 ------- | --------
 ------- | --------
 *regions()* | Returns a list of regions AWS provides their service.
 *regions()* | Returns a list of regions AWS provides their service.

+ 2 - 2
docs/sources/features/datasources/elasticsearch.md

@@ -58,8 +58,8 @@ a time pattern for the index name or a wildcard.
 
 
 ### Elasticsearch version
 ### Elasticsearch version
 
 
-Be sure to specify your Elasticsearch version in the version selection dropdown. This is very important as there are differences how queries are composed. Currently only 2.x and 5.x
-are supported.
+Be sure to specify your Elasticsearch version in the version selection dropdown. This is very important as there are differences how queries are composed.
+Currently the versions available is 2.x, 5.x and 5.6+ where 5.6+ means a version of 5.6 or higher, 6.3.2 for example.
 
 
 ### Min time interval
 ### Min time interval
 A lower limit for the auto group by time interval. Recommended to be set to write frequency, for example `1m` if your data is written every minute.
 A lower limit for the auto group by time interval. Recommended to be set to write frequency, for example `1m` if your data is written every minute.

+ 2 - 0
docs/sources/features/datasources/mssql.md

@@ -88,6 +88,8 @@ Macro example | Description
 *$__unixEpochFilter(dateColumn)* | Will be replaced by a time range filter using the specified column name with times represented as unix timestamp. For example, *dateColumn > 1494410783 AND dateColumn < 1494497183*
 *$__unixEpochFilter(dateColumn)* | Will be replaced by a time range filter using the specified column name with times represented as unix timestamp. For example, *dateColumn > 1494410783 AND dateColumn < 1494497183*
 *$__unixEpochFrom()* | Will be replaced by the start of the currently active time selection as unix timestamp. For example, *1494410783*
 *$__unixEpochFrom()* | Will be replaced by the start of the currently active time selection as unix timestamp. For example, *1494410783*
 *$__unixEpochTo()* | Will be replaced by the end of the currently active time selection as unix timestamp. For example, *1494497183*
 *$__unixEpochTo()* | Will be replaced by the end of the currently active time selection as unix timestamp. For example, *1494497183*
+*$__unixEpochGroup(dateColumn,'5m', [fillmode])* | Same as $__timeGroup but for times stored as unix timestamp (only available in Grafana 5.3+).
+*$__unixEpochGroupAlias(dateColumn,'5m', [fillmode])* | Same as above but also adds a column alias (only available in Grafana 5.3+).
 
 
 We plan to add many more macros. If you have suggestions for what macros you would like to see, please [open an issue](https://github.com/grafana/grafana) in our GitHub repo.
 We plan to add many more macros. If you have suggestions for what macros you would like to see, please [open an issue](https://github.com/grafana/grafana) in our GitHub repo.
 
 

+ 2 - 0
docs/sources/features/datasources/mysql.md

@@ -71,6 +71,8 @@ Macro example | Description
 *$__unixEpochFilter(dateColumn)* | Will be replaced by a time range filter using the specified column name with times represented as unix timestamp. For example, *dateColumn > 1494410783 AND dateColumn < 1494497183*
 *$__unixEpochFilter(dateColumn)* | Will be replaced by a time range filter using the specified column name with times represented as unix timestamp. For example, *dateColumn > 1494410783 AND dateColumn < 1494497183*
 *$__unixEpochFrom()* | Will be replaced by the start of the currently active time selection as unix timestamp. For example, *1494410783*
 *$__unixEpochFrom()* | Will be replaced by the start of the currently active time selection as unix timestamp. For example, *1494410783*
 *$__unixEpochTo()* | Will be replaced by the end of the currently active time selection as unix timestamp. For example, *1494497183*
 *$__unixEpochTo()* | Will be replaced by the end of the currently active time selection as unix timestamp. For example, *1494497183*
+*$__unixEpochGroup(dateColumn,'5m', [fillmode])* | Same as $__timeGroup but for times stored as unix timestamp (only available in Grafana 5.3+).
+*$__unixEpochGroupAlias(dateColumn,'5m', [fillmode])* | Same as above but also adds a column alias (only available in Grafana 5.3+).
 
 
 We plan to add many more macros. If you have suggestions for what macros you would like to see, please [open an issue](https://github.com/grafana/grafana) in our GitHub repo.
 We plan to add many more macros. If you have suggestions for what macros you would like to see, please [open an issue](https://github.com/grafana/grafana) in our GitHub repo.
 
 

+ 4 - 0
docs/sources/features/datasources/postgres.md

@@ -31,6 +31,7 @@ Name | Description
 *User* | Database user's login/username
 *User* | Database user's login/username
 *Password* | Database user's password
 *Password* | Database user's password
 *SSL Mode* | This option determines whether or with what priority a secure SSL TCP/IP connection will be negotiated with the server.
 *SSL Mode* | This option determines whether or with what priority a secure SSL TCP/IP connection will be negotiated with the server.
+*TimescaleDB* | With this option enabled Grafana will use TimescaleDB features, e.g. use ```time_bucket``` for grouping by time (only available in Grafana 5.3+).
 
 
 ### Database User Permissions (Important!)
 ### Database User Permissions (Important!)
 
 
@@ -68,6 +69,8 @@ Macro example | Description
 *$__unixEpochFilter(dateColumn)* | Will be replaced by a time range filter using the specified column name with times represented as unix timestamp. For example, *dateColumn >= 1494410783 AND dateColumn <= 1494497183*
 *$__unixEpochFilter(dateColumn)* | Will be replaced by a time range filter using the specified column name with times represented as unix timestamp. For example, *dateColumn >= 1494410783 AND dateColumn <= 1494497183*
 *$__unixEpochFrom()* | Will be replaced by the start of the currently active time selection as unix timestamp. For example, *1494410783*
 *$__unixEpochFrom()* | Will be replaced by the start of the currently active time selection as unix timestamp. For example, *1494410783*
 *$__unixEpochTo()* | Will be replaced by the end of the currently active time selection as unix timestamp. For example, *1494497183*
 *$__unixEpochTo()* | Will be replaced by the end of the currently active time selection as unix timestamp. For example, *1494497183*
+*$__unixEpochGroup(dateColumn,'5m', [fillmode])* | Same as $__timeGroup but for times stored as unix timestamp (only available in Grafana 5.3+).
+*$__unixEpochGroupAlias(dateColumn,'5m', [fillmode])* | Same as above but also adds a column alias (only available in Grafana 5.3+).
 
 
 We plan to add many more macros. If you have suggestions for what macros you would like to see, please [open an issue](https://github.com/grafana/grafana) in our GitHub repo.
 We plan to add many more macros. If you have suggestions for what macros you would like to see, please [open an issue](https://github.com/grafana/grafana) in our GitHub repo.
 
 
@@ -289,4 +292,5 @@ datasources:
       password: "Password!"
       password: "Password!"
     jsonData:
     jsonData:
       sslmode: "disable" # disable/require/verify-ca/verify-full
       sslmode: "disable" # disable/require/verify-ca/verify-full
+      timescaledb: false
 ```
 ```

+ 4 - 4
docs/sources/features/datasources/prometheus.md

@@ -78,9 +78,9 @@ For details of *metric names*, *label names* and *label values* are please refer
 
 
 #### Using interval and range variables
 #### Using interval and range variables
 
 
-> Support for `$__range` and `$__range_ms` only available from Grafana v5.3
+> Support for `$__range`, `$__range_s` and `$__range_ms` only available from Grafana v5.3
 
 
-It's possible to use some global built-in variables in query variables; `$__interval`, `$__interval_ms`, `$__range` and `$__range_ms`, see [Global built-in variables](/reference/templating/#global-built-in-variables) for more information. These can be convenient to use in conjunction with the `query_result` function when you need to filter variable queries since
+It's possible to use some global built-in variables in query variables; `$__interval`, `$__interval_ms`, `$__range`, `$__range_s` and `$__range_ms`, see [Global built-in variables](/reference/templating/#global-built-in-variables) for more information. These can be convenient to use in conjunction with the `query_result` function when you need to filter variable queries since
 `label_values` function doesn't support queries.
 `label_values` function doesn't support queries.
 
 
 Make sure to set the variable's `refresh` trigger to be `On Time Range Change` to get the correct instances when changing the time range on the dashboard.
 Make sure to set the variable's `refresh` trigger to be `On Time Range Change` to get the correct instances when changing the time range on the dashboard.
@@ -94,10 +94,10 @@ Query: query_result(topk(5, sum(rate(http_requests_total[$__range])) by (instanc
 Regex: /"([^"]+)"/
 Regex: /"([^"]+)"/
 ```
 ```
 
 
-Populate a variable with the instances having a certain state over the time range shown in the dashboard:
+Populate a variable with the instances having a certain state over the time range shown in the dashboard, using the more precise `$__range_s`:
 
 
 ```
 ```
-Query: query_result(max_over_time(<metric>[$__range]) != <state>)
+Query: query_result(max_over_time(<metric>[${__range_s}s]) != <state>)
 Regex:
 Regex:
 ```
 ```
 
 

+ 2 - 2
docs/sources/guides/basic_concepts.md

@@ -54,7 +54,7 @@ We utilize a unit abstraction so that Grafana looks great on all screens both sm
 
 
  > Note: With MaxDataPoint functionality, Grafana can show you the perfect amount of datapoints no matter your resolution or time-range.
  > Note: With MaxDataPoint functionality, Grafana can show you the perfect amount of datapoints no matter your resolution or time-range.
 
 
-Utilize the [Repeating Row functionality](/reference/templating/#utilizing-template-variables-with-repeating-panels-and-repeating-rows) to dynamically create or remove entire Rows (that can be filled with Panels), based on the Template variables selected.
+Utilize the [Repeating Rows functionality](/reference/templating/#repeating-rows) to dynamically create or remove entire Rows (that can be filled with Panels), based on the Template variables selected.
 
 
 Rows can be collapsed by clicking on the Row Title. If you save a Dashboard with a Row collapsed, it will save in that state and will not preload those graphs until the row is expanded.
 Rows can be collapsed by clicking on the Row Title. If you save a Dashboard with a Row collapsed, it will save in that state and will not preload those graphs until the row is expanded.
 
 
@@ -72,7 +72,7 @@ Panels like the [Graph](/reference/graph/) panel allow you to graph as many metr
 
 
 Panels can be made more dynamic by utilizing [Dashboard Templating](/reference/templating/) variable strings within the panel configuration (including queries to your Data Source configured via the Query Editor).
 Panels can be made more dynamic by utilizing [Dashboard Templating](/reference/templating/) variable strings within the panel configuration (including queries to your Data Source configured via the Query Editor).
 
 
-Utilize the [Repeating Panel](/reference/templating/#utilizing-template-variables-with-repeating-panels-and-repeating-rows) functionality to dynamically create or remove Panels based on the [Templating Variables](/reference/templating/#utilizing-template-variables-with-repeating-panels-and-repeating-rows) selected.
+Utilize the [Repeating Panel](/reference/templating/#repeating-panels) functionality to dynamically create or remove Panels based on the [Templating Variables](/reference/templating/#repeating-panels) selected.
 
 
 The time range on Panels is normally what is set in the [Dashboard time picker](/reference/timerange/) but this can be overridden by utilizes [Panel specific time overrides](/reference/timerange/#panel-time-overrides-timeshift).
 The time range on Panels is normally what is set in the [Dashboard time picker](/reference/timerange/) but this can be overridden by utilizes [Panel specific time overrides](/reference/timerange/#panel-time-overrides-timeshift).
 
 

+ 0 - 1
docs/sources/http_api/alerting.md

@@ -59,7 +59,6 @@ Content-Type: application/json
     "panelId": 1,
     "panelId": 1,
     "name": "fire place sensor",
     "name": "fire place sensor",
     "state": "alerting",
     "state": "alerting",
-    "message": "Someone is trying to break in through the fire place",
     "newStateDate": "2018-05-14T05:55:20+02:00",
     "newStateDate": "2018-05-14T05:55:20+02:00",
     "evalDate": "0001-01-01T00:00:00Z",
     "evalDate": "0001-01-01T00:00:00Z",
     "evalData": null,
     "evalData": null,

+ 1 - 1
docs/sources/http_api/dashboard.md

@@ -85,7 +85,7 @@ Status Codes:
 - **403** – Access denied
 - **403** – Access denied
 - **412** – Precondition failed
 - **412** – Precondition failed
 
 
-The **412** status code is used for explaing that you cannot create the dashboard and why.
+The **412** status code is used for explaining that you cannot create the dashboard and why.
 There can be different reasons for this:
 There can be different reasons for this:
 
 
 - The dashboard has been changed by someone else, `status=version-mismatch`
 - The dashboard has been changed by someone else, `status=version-mismatch`

+ 1 - 1
docs/sources/http_api/folder.md

@@ -223,7 +223,7 @@ Status Codes:
 - **404** – Folder not found
 - **404** – Folder not found
 - **412** – Precondition failed
 - **412** – Precondition failed
 
 
-The **412** status code is used for explaing that you cannot update the folder and why.
+The **412** status code is used for explaining that you cannot update the folder and why.
 There can be different reasons for this:
 There can be different reasons for this:
 
 
 - The folder has been changed by someone else, `status=version-mismatch`
 - The folder has been changed by someone else, `status=version-mismatch`

+ 33 - 0
docs/sources/http_api/user.md

@@ -363,6 +363,39 @@ Content-Type: application/json
 ]
 ]
 ```
 ```
 
 
+## Teams that the actual User is member of
+
+`GET /api/user/teams`
+
+Return a list of all teams that the current user is member of.
+
+**Example Request**:
+
+```http
+GET /api/user/teams HTTP/1.1
+Accept: application/json
+Content-Type: application/json
+Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk
+```
+
+**Example Response**:
+
+```http
+HTTP/1.1 200
+Content-Type: application/json
+
+[
+  {
+    "id": 1,
+    "orgId": 1,
+    "name": "MyTestTeam",
+    "email": "",
+    "avatarUrl": "\/avatar\/3f49c15916554246daa714b9bd0ee398",
+    "memberCount": 1
+  }
+]
+```
+
 ## Star a dashboard
 ## Star a dashboard
 
 
 `POST /api/user/stars/dashboard/:dashboardId`
 `POST /api/user/stars/dashboard/:dashboardId`

+ 103 - 1
docs/sources/installation/configuration.md

@@ -84,7 +84,7 @@ command line in the init.d script or the systemd service file.
 
 
 ### temp_data_lifetime
 ### temp_data_lifetime
 
 
-How long temporary images in `data` directory should be kept. Defaults to: `24h`. Supported modifiers: `h` (hours), 
+How long temporary images in `data` directory should be kept. Defaults to: `24h`. Supported modifiers: `h` (hours),
 `m` (minutes), for example: `168h`, `30m`, `10h30m`. Use `0` to never clean up temporary files.
 `m` (minutes), for example: `168h`, `30m`, `10h30m`. Use `0` to never clean up temporary files.
 
 
 ### logs
 ### logs
@@ -430,6 +430,108 @@ allowed_organizations = github google
 
 
 <hr>
 <hr>
 
 
+## [auth.gitlab]
+
+> Only available in Grafana v5.3+.
+
+You need to [create a GitLab OAuth
+application](https://docs.gitlab.com/ce/integration/oauth_provider.html).
+Choose a descriptive *Name*, and use the following *Redirect URI*:
+
+```
+https://grafana.example.com/login/gitlab
+```
+
+where `https://grafana.example.com` is the URL you use to connect to Grafana.
+Adjust it as needed if you don't use HTTPS or if you use a different port; for
+instance, if you access Grafana at `http://203.0.113.31:3000`, you should use
+
+```
+http://203.0.113.31:3000/login/gitlab
+```
+
+Finally, select *api* as the *Scope* and submit the form. Note that if you're
+not going to use GitLab groups for authorization (i.e. not setting
+`allowed_groups`, see below), you can select *read_user* instead of *api* as
+the *Scope*, thus giving a more restricted access to your GitLab API.
+
+You'll get an *Application Id* and a *Secret* in return; we'll call them
+`GITLAB_APPLICATION_ID` and `GITLAB_SECRET` respectively for the rest of this
+section.
+
+Add the following to your Grafana configuration file to enable GitLab
+authentication:
+
+```ini
+[auth.gitlab]
+enabled = false
+allow_sign_up = false
+client_id = GITLAB_APPLICATION_ID
+client_secret = GITLAB_SECRET
+scopes = api
+auth_url = https://gitlab.com/oauth/authorize
+token_url = https://gitlab.com/oauth/token
+api_url = https://gitlab.com/api/v4
+allowed_groups =
+```
+
+Restart the Grafana backend for your changes to take effect.
+
+If you use your own instance of GitLab instead of `gitlab.com`, adjust
+`auth_url`, `token_url` and `api_url` accordingly by replacing the `gitlab.com`
+hostname with your own.
+
+With `allow_sign_up` set to `false`, only existing users will be able to login
+using their GitLab account, but with `allow_sign_up` set to `true`, *any* user
+who can authenticate on GitLab will be able to login on your Grafana instance;
+if you use the public `gitlab.com`, it means anyone in the world would be able
+to login on your Grafana instance.
+
+You can can however limit access to only members of a given group or list of
+groups by setting the `allowed_groups` option.
+
+### allowed_groups
+
+To limit access to authenticated users that are members of one or more [GitLab
+groups](https://docs.gitlab.com/ce/user/group/index.html), set `allowed_groups`
+to a comma- or space-separated list of groups. For instance, if you want to
+only give access to members of the `example` group, set
+
+
+```ini
+allowed_groups = example
+```
+
+If you want to also give access to members of the subgroup `bar`, which is in
+the group `foo`, set
+
+```ini
+allowed_groups = example, foo/bar
+```
+
+Note that in GitLab, the group or subgroup name doesn't always match its
+display name, especially if the display name contains spaces or special
+characters. Make sure you always use the group or subgroup name as it appears
+in the URL of the group or subgroup.
+
+Here's a complete example with `alloed_sign_up` enabled, and access limited to
+the `example` and `foo/bar` groups:
+
+```ini
+[auth.gitlab]
+enabled = false
+allow_sign_up = true
+client_id = GITLAB_APPLICATION_ID
+client_secret = GITLAB_SECRET
+scopes = api
+auth_url = https://gitlab.com/oauth/authorize
+token_url = https://gitlab.com/oauth/token
+api_url = https://gitlab.com/api/v4
+allowed_groups = example, foo/bar
+```
+
+<hr>
+
 ## [auth.google]
 ## [auth.google]
 
 
 First, you need to create a Google OAuth Client:
 First, you need to create a Google OAuth Client:

+ 8 - 3
docs/sources/installation/docker.md

@@ -38,6 +38,8 @@ The back-end web server has a number of configuration options. Go to the
 [Configuration]({{< relref "configuration.md" >}}) page for details on all
 [Configuration]({{< relref "configuration.md" >}}) page for details on all
 those options.
 those options.
 
 
+> For any changes to `conf/grafana.ini` (or corresponding environment variables) to take effect you need to restart Grafana by restarting the Docker container.
+
 ## Running a Specific Version of Grafana
 ## Running a Specific Version of Grafana
 
 
 ```bash
 ```bash
@@ -49,10 +51,13 @@ $ docker run \
   grafana/grafana:5.1.0
   grafana/grafana:5.1.0
 ```
 ```
 
 
-## Running of the master branch
+## Running the master branch
+
+For every successful build of the master branch we update the `grafana/grafana:master` tag and create a new tag `grafana/grafana-dev:master-<commit hash>` with the hash of the git commit that was built. This means you can always get the latest version of Grafana.
+
+When running Grafana master in production we **strongly** recommend that you use the `grafana/grafana-dev:master-<commit hash>` tag as that will guarantee that you use a specific version of Grafana instead of whatever was the most recent commit at the time.
 
 
-For every successful commit we publish a Grafana container to [`grafana/grafana`](https://hub.docker.com/r/grafana/grafana/tags/) and [`grafana/grafana-dev`](https://hub.docker.com/r/grafana/grafana-dev/tags/). In `grafana/grafana` container we will always overwrite the `master` tag with the latest version. In `grafana/grafana-dev` we will include
-the git commit in the tag. If you run Grafana master in production we **strongly** recommend that you use the later since different machines might run different version of grafana if they pull the master tag at different times.
+For a list of available tags, check out [grafana/grafana](https://hub.docker.com/r/grafana/grafana/tags/) and [grafana/grafana-dev](https://hub.docker.com/r/grafana/grafana-dev/tags/). 
 
 
 ## Installing Plugins for Grafana
 ## Installing Plugins for Grafana
 
 

+ 3 - 0
docs/sources/installation/ldap.md

@@ -40,6 +40,9 @@ start_tls = false
 ssl_skip_verify = false
 ssl_skip_verify = false
 # set to the path to your root CA certificate or leave unset to use system defaults
 # set to the path to your root CA certificate or leave unset to use system defaults
 # root_ca_cert = "/path/to/certificate.crt"
 # root_ca_cert = "/path/to/certificate.crt"
+# Authentication against LDAP servers requiring client certificates
+# client_cert = "/path/to/client.crt"
+# client_key = "/path/to/client.key"
 
 
 # Search user bind dn
 # Search user bind dn
 bind_dn = "cn=admin,dc=grafana,dc=org"
 bind_dn = "cn=admin,dc=grafana,dc=org"

+ 8 - 11
docs/sources/project/building_from_source.md

@@ -57,7 +57,7 @@ For this you need nodejs (v.6+).
 ```bash
 ```bash
 npm install -g yarn
 npm install -g yarn
 yarn install --pure-lockfile
 yarn install --pure-lockfile
-npm run watch
+yarn watch
 ```
 ```
 
 
 ## Running Grafana Locally
 ## Running Grafana Locally
@@ -83,21 +83,18 @@ go get github.com/Unknwon/bra
 bra run
 bra run
 ```
 ```
 
 
-You'll also need to run `npm run watch` to watch for changes to the front-end (typescript, html, sass)
+You'll also need to run `yarn watch` to watch for changes to the front-end (typescript, html, sass)
 
 
 ### Running tests
 ### Running tests
 
 
-- You can run backend Golang tests using "go test ./pkg/...".
-- Execute all frontend tests with "npm run test"
+- You can run backend Golang tests using `go test ./pkg/...`.
+- Execute all frontend tests with `yarn test`
 
 
-Writing & watching frontend tests (we have two test runners)
+Writing & watching frontend tests
+
+- Start watcher: `yarn jest`
+- Jest will run all test files that end with the name ".test.ts"
 
 
-- jest for all new tests that do not require browser context (React+more)
-   - Start watcher: `npm run jest`
-   - Jest will run all test files that end with the name ".jest.ts"
-- karma + mocha is used for testing angularjs components. We do want to migrate these test to jest over time (if possible).
-  - Start watcher: `npm run karma`
-  - Karma+Mocha runs all files that end with the name "_specs.ts".
 
 
 ## Creating optimized release packages
 ## Creating optimized release packages
 
 

+ 23 - 9
docs/sources/reference/templating.md

@@ -277,31 +277,45 @@ This variable is only available in the Singlestat panel and can be used in the p
 
 
 > Only available in Grafana v5.3+
 > Only available in Grafana v5.3+
 
 
-Currently only supported for Prometheus data sources. This variable represents the range for the current dashboard. It is calculated by `to - from`. It has a millisecond representation called `$__range_ms`.
+Currently only supported for Prometheus data sources. This variable represents the range for the current dashboard. It is calculated by `to - from`. It has a millisecond and a second representation called `$__range_ms` and `$__range_s`.
 
 
 ## Repeating Panels
 ## Repeating Panels
 
 
 Template variables can be very useful to dynamically change your queries across a whole dashboard. If you want
 Template variables can be very useful to dynamically change your queries across a whole dashboard. If you want
 Grafana to dynamically create new panels or rows based on what values you have selected you can use the *Repeat* feature.
 Grafana to dynamically create new panels or rows based on what values you have selected you can use the *Repeat* feature.
 
 
-If you have a variable with `Multi-value` or `Include all value` options enabled you can choose one panel or one row and have Grafana repeat that row
-for every selected value. You find this option under the General tab in panel edit mode. Select the variable to repeat by, and a `min span`.
-The `min span` controls how small Grafana will make the panels (if you have many values selected). Grafana will automatically adjust the width of
-each repeated panel so that the whole row is filled. Currently, you cannot mix other panels on a row with a repeated panel.
+If you have a variable with `Multi-value` or `Include all value` options enabled you can choose one panel and have Grafana repeat that panel
+for every selected value. You find the *Repeat* feature under the *General tab* in panel edit mode.
+
+The `direction` controls how the panels will be arranged.
+
+By choosing `horizontal` the panels will be arranged side-by-side. Grafana will automatically adjust the width
+of each repeated panel so that the whole row is filled. Currently, you cannot mix other panels on a row with a repeated
+panel. Each panel will never be smaller that the provided `Min width` if you have many selected values.
+
+By choosing `vertical` the panels will be arranged from top to bottom in a column. The `Min width` doesn't have any effect in this case. The width of the repeated panels will be the same as of the first panel (the original template) being repeated.
 
 
 Only make changes to the first panel (the original template). To have the changes take effect on all panels you need to trigger a dynamic dashboard re-build.
 Only make changes to the first panel (the original template). To have the changes take effect on all panels you need to trigger a dynamic dashboard re-build.
 You can do this by either changing the variable value (that is the basis for the repeat) or reload the dashboard.
 You can do this by either changing the variable value (that is the basis for the repeat) or reload the dashboard.
 
 
 ## Repeating Rows
 ## Repeating Rows
 
 
-This option requires you to open the row options view. Hover over the row left side to trigger the row menu, in this menu click `Row Options`. This
-opens the row options view. Here you find a *Repeat* dropdown where you can select the variable to repeat by.
+As seen above with the *Panels* you can also repeat *Rows* if you have variables set with  `Multi-value` or
+`Include all value` selection option.
+
+To enable this feature you need to first add a new *Row* using the *Add Panel* menu. Then by hovering the row title and
+clicking on the cog button, you will access the `Row Options` configuration panel. You can then select the variable
+you want to repeat the row for.
+
+It may be a good idea to use a variable in the row title as well.
+
+Example: [Repeated Rows Dashboard](http://play.grafana.org/dashboard/db/repeated-rows)
 
 
-### URL state
+## URL state
 
 
 Variable values are always synced to the URL using the syntax `var-<varname>=value`.
 Variable values are always synced to the URL using the syntax `var-<varname>=value`.
 
 
-### Examples
+## Examples
 
 
 - [Graphite Templated Dashboard](http://play.grafana.org/dashboard/db/graphite-templated-nested)
 - [Graphite Templated Dashboard](http://play.grafana.org/dashboard/db/graphite-templated-nested)
 - [Elasticsearch Templated Dashboard](http://play.grafana.org/dashboard/db/elasticsearch-templated)
 - [Elasticsearch Templated Dashboard](http://play.grafana.org/dashboard/db/elasticsearch-templated)

+ 1 - 1
jest.config.js

@@ -13,7 +13,7 @@ module.exports = {
   "roots": [
   "roots": [
     "<rootDir>/public"
     "<rootDir>/public"
   ],
   ],
-  "testRegex": "(\\.|/)(jest)\\.(jsx?|tsx?)$",
+  "testRegex": "(\\.|/)(test)\\.(jsx?|tsx?)$",
   "moduleFileExtensions": [
   "moduleFileExtensions": [
     "ts",
     "ts",
     "tsx",
     "tsx",

+ 0 - 40
karma.conf.js

@@ -1,40 +0,0 @@
-var webpack = require('webpack');
-var path = require('path');
-var webpackTestConfig = require('./scripts/webpack/webpack.test.js');
-
-module.exports = function(config) {
-
-  'use strict';
-
-  config.set({
-    frameworks: ['mocha', 'expect', 'sinon'],
-
-    // list of files / patterns to load in the browser
-    files: [
-      { pattern: 'public/test/index.ts', watched: false }
-    ],
-
-    preprocessors: {
-      'public/test/index.ts': ['webpack', 'sourcemap'],
-    },
-
-    webpack: webpackTestConfig,
-    webpackMiddleware: {
-      stats: 'minimal',
-    },
-
-    // list of files to exclude
-    exclude: [],
-    reporters: ['dots'],
-    port: 9876,
-    colors: true,
-    logLevel: config.LOG_INFO,
-    autoWatch: true,
-    browsers: ['PhantomJS'],
-    captureTimeout: 20000,
-    singleRun: true,
-    // autoWatchBatchDelay: 1000,
-    // browserNoActivityTimeout: 60000,
-  });
-
-};

+ 1 - 15
package.json

@@ -32,7 +32,6 @@
     "es6-shim": "^0.35.3",
     "es6-shim": "^0.35.3",
     "expect.js": "~0.2.0",
     "expect.js": "~0.2.0",
     "expose-loader": "^0.7.3",
     "expose-loader": "^0.7.3",
-    "extract-text-webpack-plugin": "^4.0.0-beta.0",
     "file-loader": "^1.1.11",
     "file-loader": "^1.1.11",
     "fork-ts-checker-webpack-plugin": "^0.4.2",
     "fork-ts-checker-webpack-plugin": "^0.4.2",
     "gaze": "^1.1.2",
     "gaze": "^1.1.2",
@@ -45,10 +44,7 @@
     "grunt-contrib-concat": "^1.0.1",
     "grunt-contrib-concat": "^1.0.1",
     "grunt-contrib-copy": "~1.0.0",
     "grunt-contrib-copy": "~1.0.0",
     "grunt-contrib-cssmin": "~1.0.2",
     "grunt-contrib-cssmin": "~1.0.2",
-    "grunt-contrib-jshint": "~1.1.0",
     "grunt-exec": "^1.0.1",
     "grunt-exec": "^1.0.1",
-    "grunt-jscs": "3.0.1",
-    "grunt-karma": "~2.0.0",
     "grunt-notify": "^0.4.5",
     "grunt-notify": "^0.4.5",
     "grunt-postcss": "^0.8.0",
     "grunt-postcss": "^0.8.0",
     "grunt-sass": "^2.0.0",
     "grunt-sass": "^2.0.0",
@@ -60,22 +56,13 @@
     "html-webpack-plugin": "^3.2.0",
     "html-webpack-plugin": "^3.2.0",
     "husky": "^0.14.3",
     "husky": "^0.14.3",
     "jest": "^22.0.4",
     "jest": "^22.0.4",
-    "jshint-stylish": "~2.2.1",
-    "karma": "1.7.0",
-    "karma-chrome-launcher": "~2.2.0",
-    "karma-expect": "~1.1.3",
-    "karma-mocha": "~1.3.0",
-    "karma-phantomjs-launcher": "1.0.4",
-    "karma-sinon": "^1.0.5",
-    "karma-sourcemap-loader": "^0.3.7",
-    "karma-webpack": "^3.0.0",
     "lint-staged": "^6.0.0",
     "lint-staged": "^6.0.0",
     "load-grunt-tasks": "3.5.2",
     "load-grunt-tasks": "3.5.2",
     "mini-css-extract-plugin": "^0.4.0",
     "mini-css-extract-plugin": "^0.4.0",
     "mobx-react-devtools": "^4.2.15",
     "mobx-react-devtools": "^4.2.15",
     "mocha": "^4.0.1",
     "mocha": "^4.0.1",
     "ng-annotate-loader": "^0.6.1",
     "ng-annotate-loader": "^0.6.1",
-    "ng-annotate-webpack-plugin": "^0.2.1-pre",
+    "ng-annotate-webpack-plugin": "^0.3.0",
     "ngtemplate-loader": "^2.0.1",
     "ngtemplate-loader": "^2.0.1",
     "npm": "^5.4.2",
     "npm": "^5.4.2",
     "optimize-css-assets-webpack-plugin": "^4.0.2",
     "optimize-css-assets-webpack-plugin": "^4.0.2",
@@ -115,7 +102,6 @@
     "test": "grunt test",
     "test": "grunt test",
     "test:coverage": "grunt test --coverage=true",
     "test:coverage": "grunt test --coverage=true",
     "lint": "tslint -c tslint.json --project tsconfig.json --type-check",
     "lint": "tslint -c tslint.json --project tsconfig.json --type-check",
-    "karma": "grunt karma:dev",
     "jest": "jest --notify --watch",
     "jest": "jest --notify --watch",
     "api-tests": "jest --notify --watch --config=tests/api/jest.js",
     "api-tests": "jest --notify --watch --config=tests/api/jest.js",
     "precommit": "lint-staged && grunt precommit"
     "precommit": "lint-staged && grunt precommit"

+ 1 - 3
packaging/docker/README.md

@@ -1,7 +1,5 @@
 # Grafana Docker image
 # Grafana Docker image
 
 
-[![CircleCI](https://circleci.com/gh/grafana/grafana-docker.svg?style=svg)](https://circleci.com/gh/grafana/grafana-docker)
-
 ## Running your Grafana container
 ## Running your Grafana container
 
 
 Start your container binding the external port `3000`.
 Start your container binding the external port `3000`.
@@ -42,4 +40,4 @@ Further documentation can be found at http://docs.grafana.org/installation/docke
 * Plugins dir (`/var/lib/grafana/plugins`) is no longer a separate volume
 * Plugins dir (`/var/lib/grafana/plugins`) is no longer a separate volume
 
 
 ### v3.1.1
 ### v3.1.1
-* Make it possible to install specific plugin version https://github.com/grafana/grafana-docker/issues/59#issuecomment-260584026
+* Make it possible to install specific plugin version https://github.com/grafana/grafana-docker/issues/59#issuecomment-260584026

+ 2 - 2
packaging/docker/push_to_docker_hub.sh

@@ -15,10 +15,10 @@ fi
 echo "pushing ${_docker_repo}:${_grafana_version}"
 echo "pushing ${_docker_repo}:${_grafana_version}"
 docker push "${_docker_repo}:${_grafana_version}"
 docker push "${_docker_repo}:${_grafana_version}"
 
 
-if echo "$_grafana_tag" | grep -q "^v"; then
+if echo "$_grafana_tag" | grep -q "^v" && echo "$_grafana_tag" | grep -vq "beta"; then
 	echo "pushing ${_docker_repo}:latest"
 	echo "pushing ${_docker_repo}:latest"
 	docker push "${_docker_repo}:latest"
 	docker push "${_docker_repo}:latest"
-else
+elif echo "$_grafana_tag" | grep -q "master"; then
 	echo "pushing grafana/grafana:master"
 	echo "pushing grafana/grafana:master"
 	docker push grafana/grafana:master
 	docker push grafana/grafana:master
 fi
 fi

+ 7 - 1
packaging/docker/run.sh

@@ -67,7 +67,13 @@ if [ ! -z "${GF_INSTALL_PLUGINS}" ]; then
   IFS=','
   IFS=','
   for plugin in ${GF_INSTALL_PLUGINS}; do
   for plugin in ${GF_INSTALL_PLUGINS}; do
     IFS=$OLDIFS
     IFS=$OLDIFS
-    grafana-cli --pluginsDir "${GF_PATHS_PLUGINS}" plugins install ${plugin}
+    if [[ $plugin =~ .*\;.* ]]; then
+        pluginUrl=$(echo "$plugin" | cut -d';' -f 1)
+        pluginWithoutUrl=$(echo "$plugin" | cut -d';' -f 2)
+        grafana-cli --pluginUrl "${pluginUrl}" --pluginsDir "${GF_PATHS_PLUGINS}" plugins install ${pluginWithoutUrl}
+    else
+        grafana-cli --pluginsDir "${GF_PATHS_PLUGINS}" plugins install ${plugin}
+    fi
   done
   done
 fi
 fi
 
 

+ 1 - 0
pkg/api/api.go

@@ -120,6 +120,7 @@ func (hs *HTTPServer) registerRoutes() {
 			userRoute.Put("/", bind(m.UpdateUserCommand{}), Wrap(UpdateSignedInUser))
 			userRoute.Put("/", bind(m.UpdateUserCommand{}), Wrap(UpdateSignedInUser))
 			userRoute.Post("/using/:id", Wrap(UserSetUsingOrg))
 			userRoute.Post("/using/:id", Wrap(UserSetUsingOrg))
 			userRoute.Get("/orgs", Wrap(GetSignedInUserOrgList))
 			userRoute.Get("/orgs", Wrap(GetSignedInUserOrgList))
+			userRoute.Get("/teams", Wrap(GetSignedInUserTeamList))
 
 
 			userRoute.Post("/stars/dashboard/:id", Wrap(StarDashboard))
 			userRoute.Post("/stars/dashboard/:id", Wrap(StarDashboard))
 			userRoute.Delete("/stars/dashboard/:id", Wrap(UnstarDashboard))
 			userRoute.Delete("/stars/dashboard/:id", Wrap(UnstarDashboard))

+ 9 - 2
pkg/api/pluginproxy/ds_proxy.go

@@ -203,6 +203,7 @@ func (proxy *DataSourceProxy) getDirector() func(req *http.Request) {
 		req.Header.Del("X-Forwarded-Host")
 		req.Header.Del("X-Forwarded-Host")
 		req.Header.Del("X-Forwarded-Port")
 		req.Header.Del("X-Forwarded-Port")
 		req.Header.Del("X-Forwarded-Proto")
 		req.Header.Del("X-Forwarded-Proto")
+		req.Header.Set("User-Agent", fmt.Sprintf("Grafana/%s", setting.BuildVersion))
 
 
 		// set X-Forwarded-For header
 		// set X-Forwarded-For header
 		if req.RemoteAddr != "" {
 		if req.RemoteAddr != "" {
@@ -319,9 +320,15 @@ func (proxy *DataSourceProxy) applyRoute(req *http.Request) {
 		SecureJsonData: proxy.ds.SecureJsonData.Decrypt(),
 		SecureJsonData: proxy.ds.SecureJsonData.Decrypt(),
 	}
 	}
 
 
-	routeURL, err := url.Parse(proxy.route.Url)
+	interpolatedURL, err := interpolateString(proxy.route.Url, data)
 	if err != nil {
 	if err != nil {
-		logger.Error("Error parsing plugin route url")
+		logger.Error("Error interpolating proxy url", "error", err)
+		return
+	}
+
+	routeURL, err := url.Parse(interpolatedURL)
+	if err != nil {
+		logger.Error("Error parsing plugin route url", "error", err)
 		return
 		return
 	}
 	}
 
 

+ 28 - 8
pkg/api/pluginproxy/ds_proxy_test.go

@@ -49,6 +49,13 @@ func TestDSRouteRule(t *testing.T) {
 							{Name: "x-header", Content: "my secret {{.SecureJsonData.key}}"},
 							{Name: "x-header", Content: "my secret {{.SecureJsonData.key}}"},
 						},
 						},
 					},
 					},
+					{
+						Path: "api/common",
+						Url:  "{{.JsonData.dynamicUrl}}",
+						Headers: []plugins.AppPluginRouteHeader{
+							{Name: "x-header", Content: "my secret {{.SecureJsonData.key}}"},
+						},
+					},
 				},
 				},
 			}
 			}
 
 
@@ -57,7 +64,8 @@ func TestDSRouteRule(t *testing.T) {
 
 
 			ds := &m.DataSource{
 			ds := &m.DataSource{
 				JsonData: simplejson.NewFromAny(map[string]interface{}{
 				JsonData: simplejson.NewFromAny(map[string]interface{}{
-					"clientId": "asd",
+					"clientId":   "asd",
+					"dynamicUrl": "https://dynamic.grafana.com",
 				}),
 				}),
 				SecureJsonData: map[string][]byte{
 				SecureJsonData: map[string][]byte{
 					"key": key,
 					"key": key,
@@ -83,6 +91,17 @@ func TestDSRouteRule(t *testing.T) {
 				})
 				})
 			})
 			})
 
 
+			Convey("When matching route path and has dynamic url", func() {
+				proxy := NewDataSourceProxy(ds, plugin, ctx, "api/common/some/method")
+				proxy.route = plugin.Routes[3]
+				proxy.applyRoute(req)
+
+				Convey("should add headers and interpolate the url", func() {
+					So(req.URL.String(), ShouldEqual, "https://dynamic.grafana.com/some/method")
+					So(req.Header.Get("x-header"), ShouldEqual, "my secret 123")
+				})
+			})
+
 			Convey("Validating request", func() {
 			Convey("Validating request", func() {
 				Convey("plugin route with valid role", func() {
 				Convey("plugin route with valid role", func() {
 					proxy := NewDataSourceProxy(ds, plugin, ctx, "api/v4/some/method")
 					proxy := NewDataSourceProxy(ds, plugin, ctx, "api/v4/some/method")
@@ -212,20 +231,21 @@ func TestDSRouteRule(t *testing.T) {
 		})
 		})
 
 
 		Convey("When proxying graphite", func() {
 		Convey("When proxying graphite", func() {
+			setting.BuildVersion = "5.3.0"
 			plugin := &plugins.DataSourcePlugin{}
 			plugin := &plugins.DataSourcePlugin{}
 			ds := &m.DataSource{Url: "htttp://graphite:8080", Type: m.DS_GRAPHITE}
 			ds := &m.DataSource{Url: "htttp://graphite:8080", Type: m.DS_GRAPHITE}
 			ctx := &m.ReqContext{}
 			ctx := &m.ReqContext{}
 
 
 			proxy := NewDataSourceProxy(ds, plugin, ctx, "/render")
 			proxy := NewDataSourceProxy(ds, plugin, ctx, "/render")
+			req, err := http.NewRequest(http.MethodGet, "http://grafana.com/sub", nil)
+			So(err, ShouldBeNil)
 
 
-			requestURL, _ := url.Parse("http://grafana.com/sub")
-			req := http.Request{URL: requestURL}
-
-			proxy.getDirector()(&req)
+			proxy.getDirector()(req)
 
 
 			Convey("Can translate request url and path", func() {
 			Convey("Can translate request url and path", func() {
 				So(req.URL.Host, ShouldEqual, "graphite:8080")
 				So(req.URL.Host, ShouldEqual, "graphite:8080")
 				So(req.URL.Path, ShouldEqual, "/render")
 				So(req.URL.Path, ShouldEqual, "/render")
+				So(req.Header.Get("User-Agent"), ShouldEqual, "Grafana/5.3.0")
 			})
 			})
 		})
 		})
 
 
@@ -243,10 +263,10 @@ func TestDSRouteRule(t *testing.T) {
 			ctx := &m.ReqContext{}
 			ctx := &m.ReqContext{}
 			proxy := NewDataSourceProxy(ds, plugin, ctx, "")
 			proxy := NewDataSourceProxy(ds, plugin, ctx, "")
 
 
-			requestURL, _ := url.Parse("http://grafana.com/sub")
-			req := http.Request{URL: requestURL}
+			req, err := http.NewRequest(http.MethodGet, "http://grafana.com/sub", nil)
+			So(err, ShouldBeNil)
 
 
-			proxy.getDirector()(&req)
+			proxy.getDirector()(req)
 
 
 			Convey("Should add db to url", func() {
 			Convey("Should add db to url", func() {
 				So(req.URL.Path, ShouldEqual, "/db/site/")
 				So(req.URL.Path, ShouldEqual, "/db/site/")

+ 15 - 0
pkg/api/user.go

@@ -111,6 +111,21 @@ func GetSignedInUserOrgList(c *m.ReqContext) Response {
 	return getUserOrgList(c.UserId)
 	return getUserOrgList(c.UserId)
 }
 }
 
 
+// GET /api/user/teams
+func GetSignedInUserTeamList(c *m.ReqContext) Response {
+	query := m.GetTeamsByUserQuery{OrgId: c.OrgId, UserId: c.UserId}
+
+	if err := bus.Dispatch(&query); err != nil {
+		return Error(500, "Failed to get user teams", err)
+	}
+
+	for _, team := range query.Result {
+		team.AvatarUrl = dtos.GetGravatarUrlWithDefault(team.Email, team.Name)
+	}
+
+	return JSON(200, query.Result)
+}
+
 // GET /api/user/:id/orgs
 // GET /api/user/:id/orgs
 func GetUserOrgList(c *m.ReqContext) Response {
 func GetUserOrgList(c *m.ReqContext) Response {
 	return getUserOrgList(c.ParamsInt64(":id"))
 	return getUserOrgList(c.ParamsInt64(":id"))

+ 10 - 0
pkg/login/ldap.go

@@ -59,6 +59,13 @@ func (a *ldapAuther) Dial() error {
 			}
 			}
 		}
 		}
 	}
 	}
+	var clientCert tls.Certificate
+	if a.server.ClientCert != "" && a.server.ClientKey != "" {
+		clientCert, err = tls.LoadX509KeyPair(a.server.ClientCert, a.server.ClientKey)
+		if err != nil {
+			return err
+		}
+	}
 	for _, host := range strings.Split(a.server.Host, " ") {
 	for _, host := range strings.Split(a.server.Host, " ") {
 		address := fmt.Sprintf("%s:%d", host, a.server.Port)
 		address := fmt.Sprintf("%s:%d", host, a.server.Port)
 		if a.server.UseSSL {
 		if a.server.UseSSL {
@@ -67,6 +74,9 @@ func (a *ldapAuther) Dial() error {
 				ServerName:         host,
 				ServerName:         host,
 				RootCAs:            certPool,
 				RootCAs:            certPool,
 			}
 			}
+			if len(clientCert.Certificate) > 0 {
+				tlsCfg.Certificates = append(tlsCfg.Certificates, clientCert)
+			}
 			if a.server.StartTLS {
 			if a.server.StartTLS {
 				a.conn, err = ldap.Dial("tcp", address)
 				a.conn, err = ldap.Dial("tcp", address)
 				if err == nil {
 				if err == nil {

+ 2 - 0
pkg/login/ldap_settings.go

@@ -21,6 +21,8 @@ type LdapServerConf struct {
 	StartTLS      bool             `toml:"start_tls"`
 	StartTLS      bool             `toml:"start_tls"`
 	SkipVerifySSL bool             `toml:"ssl_skip_verify"`
 	SkipVerifySSL bool             `toml:"ssl_skip_verify"`
 	RootCACert    string           `toml:"root_ca_cert"`
 	RootCACert    string           `toml:"root_ca_cert"`
+	ClientCert    string           `toml:"client_cert"`
+	ClientKey     string           `toml:"client_key"`
 	BindDN        string           `toml:"bind_dn"`
 	BindDN        string           `toml:"bind_dn"`
 	BindPassword  string           `toml:"bind_password"`
 	BindPassword  string           `toml:"bind_password"`
 	Attr          LdapAttributeMap `toml:"attributes"`
 	Attr          LdapAttributeMap `toml:"attributes"`

+ 1 - 0
pkg/models/models.go

@@ -8,4 +8,5 @@ const (
 	TWITTER
 	TWITTER
 	GENERIC
 	GENERIC
 	GRAFANA_COM
 	GRAFANA_COM
+	GITLAB
 )
 )

+ 1 - 2
pkg/services/alerting/notifier.go

@@ -3,7 +3,6 @@ package alerting
 import (
 import (
 	"errors"
 	"errors"
 	"fmt"
 	"fmt"
-	"time"
 
 
 	"golang.org/x/sync/errgroup"
 	"golang.org/x/sync/errgroup"
 
 
@@ -81,7 +80,7 @@ func (n *notificationService) uploadImage(context *EvalContext) (err error) {
 	renderOpts := rendering.Opts{
 	renderOpts := rendering.Opts{
 		Width:   1000,
 		Width:   1000,
 		Height:  500,
 		Height:  500,
-		Timeout: time.Second * 30,
+		Timeout: alertTimeout / 2,
 		OrgId:   context.Rule.OrgId,
 		OrgId:   context.Rule.OrgId,
 		OrgRole: m.ROLE_ADMIN,
 		OrgRole: m.ROLE_ADMIN,
 	}
 	}

+ 1 - 1
pkg/services/alerting/notifiers/slack.go

@@ -58,7 +58,7 @@ func init() {
           data-placement="right">
           data-placement="right">
         </input>
         </input>
         <info-popover mode="right-absolute">
         <info-popover mode="right-absolute">
-          Provide a bot token to use the Slack file.upload API (starts with "xoxb")
+          Provide a bot token to use the Slack file.upload API (starts with "xoxb"). Specify #channel-name or @username in Recipient for this to work 
         </info-popover>
         </info-popover>
       </div>
       </div>
     `,
     `,

+ 3 - 3
pkg/services/provisioning/datasources/config_reader.go

@@ -83,7 +83,7 @@ func (cr *configReader) parseDatasourceConfig(path string, file os.FileInfo) (*D
 }
 }
 
 
 func validateDefaultUniqueness(datasources []*DatasourcesAsConfig) error {
 func validateDefaultUniqueness(datasources []*DatasourcesAsConfig) error {
-	defaultCount := 0
+	defaultCount := map[int64]int{}
 	for i := range datasources {
 	for i := range datasources {
 		if datasources[i].Datasources == nil {
 		if datasources[i].Datasources == nil {
 			continue
 			continue
@@ -95,8 +95,8 @@ func validateDefaultUniqueness(datasources []*DatasourcesAsConfig) error {
 			}
 			}
 
 
 			if ds.IsDefault {
 			if ds.IsDefault {
-				defaultCount++
-				if defaultCount > 1 {
+				defaultCount[ds.OrgId] = defaultCount[ds.OrgId] + 1
+				if defaultCount[ds.OrgId] > 1 {
 					return ErrInvalidConfigToManyDefault
 					return ErrInvalidConfigToManyDefault
 				}
 				}
 			}
 			}

+ 14 - 0
pkg/services/provisioning/datasources/config_reader_test.go

@@ -19,6 +19,7 @@ var (
 	allProperties                   = "testdata/all-properties"
 	allProperties                   = "testdata/all-properties"
 	versionZero                     = "testdata/version-0"
 	versionZero                     = "testdata/version-0"
 	brokenYaml                      = "testdata/broken-yaml"
 	brokenYaml                      = "testdata/broken-yaml"
+	multipleOrgsWithDefault         = "testdata/multiple-org-default"
 
 
 	fakeRepo *fakeRepository
 	fakeRepo *fakeRepository
 )
 )
@@ -73,6 +74,19 @@ func TestDatasourceAsConfig(t *testing.T) {
 			})
 			})
 		})
 		})
 
 
+		Convey("Multiple datasources in different organizations with isDefault in each organization", func() {
+			dc := newDatasourceProvisioner(logger)
+			err := dc.applyChanges(multipleOrgsWithDefault)
+			Convey("should not raise error", func() {
+				So(err, ShouldBeNil)
+				So(len(fakeRepo.inserted), ShouldEqual, 4)
+				So(fakeRepo.inserted[0].IsDefault, ShouldBeTrue)
+				So(fakeRepo.inserted[0].OrgId, ShouldEqual, 1)
+				So(fakeRepo.inserted[2].IsDefault, ShouldBeTrue)
+				So(fakeRepo.inserted[2].OrgId, ShouldEqual, 2)
+			})
+		})
+
 		Convey("Two configured datasource and purge others ", func() {
 		Convey("Two configured datasource and purge others ", func() {
 			Convey("two other datasources in database", func() {
 			Convey("two other datasources in database", func() {
 				fakeRepo.loadAll = []*models.DataSource{
 				fakeRepo.loadAll = []*models.DataSource{

+ 1 - 1
pkg/services/provisioning/datasources/datasources.go

@@ -11,7 +11,7 @@ import (
 )
 )
 
 
 var (
 var (
-	ErrInvalidConfigToManyDefault = errors.New("datasource.yaml config is invalid. Only one datasource can be marked as default")
+	ErrInvalidConfigToManyDefault = errors.New("datasource.yaml config is invalid. Only one datasource per organization can be marked as default")
 )
 )
 
 
 func Provision(configDirectory string) error {
 func Provision(configDirectory string) error {

+ 25 - 0
pkg/services/provisioning/datasources/testdata/multiple-org-default/config.yaml

@@ -0,0 +1,25 @@
+apiVersion: 1
+
+datasources:
+  - orgId: 1
+    name: prometheus
+    type: prometheus
+    isDefault: True
+    access: proxy
+    url: http://prometheus.example.com:9090
+  - name: Graphite
+    type: graphite
+    access: proxy
+    url: http://localhost:8080
+  - orgId: 2
+    name: prometheus
+    type: prometheus
+    isDefault: True
+    access: proxy
+    url: http://prometheus.example.com:9090
+  - orgId: 2
+    name: Graphite
+    type: graphite
+    access: proxy
+    url: http://localhost:8080
+

+ 132 - 0
pkg/social/gitlab_oauth.go

@@ -0,0 +1,132 @@
+package social
+
+import (
+	"encoding/json"
+	"fmt"
+	"net/http"
+	"regexp"
+
+	"github.com/grafana/grafana/pkg/models"
+
+	"golang.org/x/oauth2"
+)
+
+type SocialGitlab struct {
+	*SocialBase
+	allowedDomains []string
+	allowedGroups  []string
+	apiUrl         string
+	allowSignup    bool
+}
+
+var (
+	ErrMissingGroupMembership = &Error{"User not a member of one of the required groups"}
+)
+
+func (s *SocialGitlab) Type() int {
+	return int(models.GITLAB)
+}
+
+func (s *SocialGitlab) IsEmailAllowed(email string) bool {
+	return isEmailAllowed(email, s.allowedDomains)
+}
+
+func (s *SocialGitlab) IsSignupAllowed() bool {
+	return s.allowSignup
+}
+
+func (s *SocialGitlab) IsGroupMember(client *http.Client) bool {
+	if len(s.allowedGroups) == 0 {
+		return true
+	}
+
+	for groups, url := s.GetGroups(client, s.apiUrl+"/groups"); groups != nil; groups, url = s.GetGroups(client, url) {
+		for _, allowedGroup := range s.allowedGroups {
+			for _, group := range groups {
+				if group == allowedGroup {
+					return true
+				}
+			}
+		}
+	}
+
+	return false
+}
+
+func (s *SocialGitlab) GetGroups(client *http.Client, url string) ([]string, string) {
+	type Group struct {
+		FullPath string `json:"full_path"`
+	}
+
+	var (
+		groups []Group
+		next   string
+	)
+
+	if url == "" {
+		return nil, next
+	}
+
+	response, err := HttpGet(client, url)
+	if err != nil {
+		s.log.Error("Error getting groups from GitLab API", "err", err)
+		return nil, next
+	}
+
+	if err := json.Unmarshal(response.Body, &groups); err != nil {
+		s.log.Error("Error parsing JSON from GitLab API", "err", err)
+		return nil, next
+	}
+
+	fullPaths := make([]string, len(groups))
+	for i, group := range groups {
+		fullPaths[i] = group.FullPath
+	}
+
+	if link, ok := response.Headers["Link"]; ok {
+		pattern := regexp.MustCompile(`<([^>]+)>; rel="next"`)
+		if matches := pattern.FindStringSubmatch(link[0]); matches != nil {
+			next = matches[1]
+		}
+	}
+
+	return fullPaths, next
+}
+
+func (s *SocialGitlab) UserInfo(client *http.Client, token *oauth2.Token) (*BasicUserInfo, error) {
+
+	var data struct {
+		Id       int
+		Username string
+		Email    string
+		Name     string
+		State    string
+	}
+
+	response, err := HttpGet(client, s.apiUrl+"/user")
+	if err != nil {
+		return nil, fmt.Errorf("Error getting user info: %s", err)
+	}
+
+	err = json.Unmarshal(response.Body, &data)
+	if err != nil {
+		return nil, fmt.Errorf("Error getting user info: %s", err)
+	}
+
+	if data.State != "active" {
+		return nil, fmt.Errorf("User %s is inactive", data.Username)
+	}
+
+	userInfo := &BasicUserInfo{
+		Id:    fmt.Sprintf("%d", data.Id),
+		Name:  data.Name,
+		Login: data.Username,
+		Email: data.Email,
+	}
+
+	if !s.IsGroupMember(client) {
+		return nil, ErrMissingGroupMembership
+	}
+
+	return userInfo, nil
+}

+ 15 - 1
pkg/social/social.go

@@ -55,7 +55,7 @@ func NewOAuthService() {
 	setting.OAuthService = &setting.OAuther{}
 	setting.OAuthService = &setting.OAuther{}
 	setting.OAuthService.OAuthInfos = make(map[string]*setting.OAuthInfo)
 	setting.OAuthService.OAuthInfos = make(map[string]*setting.OAuthInfo)
 
 
-	allOauthes := []string{"github", "google", "generic_oauth", "grafananet", "grafana_com"}
+	allOauthes := []string{"github", "gitlab", "google", "generic_oauth", "grafananet", "grafana_com"}
 
 
 	for _, name := range allOauthes {
 	for _, name := range allOauthes {
 		sec := setting.Raw.Section("auth." + name)
 		sec := setting.Raw.Section("auth." + name)
@@ -115,6 +115,20 @@ func NewOAuthService() {
 			}
 			}
 		}
 		}
 
 
+		// GitLab.
+		if name == "gitlab" {
+			SocialMap["gitlab"] = &SocialGitlab{
+				SocialBase: &SocialBase{
+					Config: &config,
+					log:    logger,
+				},
+				allowedDomains: info.AllowedDomains,
+				apiUrl:         info.ApiUrl,
+				allowSignup:    info.AllowSignup,
+				allowedGroups:  util.SplitString(sec.Key("allowed_groups").String()),
+			}
+		}
+
 		// Google.
 		// Google.
 		if name == "google" {
 		if name == "google" {
 			SocialMap["google"] = &SocialGoogle{
 			SocialMap["google"] = &SocialGoogle{

+ 24 - 16
pkg/tsdb/mssql/macros.go

@@ -6,8 +6,6 @@ import (
 	"strings"
 	"strings"
 	"time"
 	"time"
 
 
-	"strconv"
-
 	"github.com/grafana/grafana/pkg/tsdb"
 	"github.com/grafana/grafana/pkg/tsdb"
 )
 )
 
 
@@ -97,20 +95,9 @@ func (m *msSqlMacroEngine) evaluateMacro(name string, args []string) (string, er
 			return "", fmt.Errorf("error parsing interval %v", args[1])
 			return "", fmt.Errorf("error parsing interval %v", args[1])
 		}
 		}
 		if len(args) == 3 {
 		if len(args) == 3 {
-			m.query.Model.Set("fill", true)
-			m.query.Model.Set("fillInterval", interval.Seconds())
-			switch args[2] {
-			case "NULL":
-				m.query.Model.Set("fillMode", "null")
-			case "previous":
-				m.query.Model.Set("fillMode", "previous")
-			default:
-				m.query.Model.Set("fillMode", "value")
-				floatVal, err := strconv.ParseFloat(args[2], 64)
-				if err != nil {
-					return "", fmt.Errorf("error parsing fill value %v", args[2])
-				}
-				m.query.Model.Set("fillValue", floatVal)
+			err := tsdb.SetupFillmode(m.query, interval, args[2])
+			if err != nil {
+				return "", err
 			}
 			}
 		}
 		}
 		return fmt.Sprintf("FLOOR(DATEDIFF(second, '1970-01-01', %s)/%.0f)*%.0f", args[0], interval.Seconds(), interval.Seconds()), nil
 		return fmt.Sprintf("FLOOR(DATEDIFF(second, '1970-01-01', %s)/%.0f)*%.0f", args[0], interval.Seconds(), interval.Seconds()), nil
@@ -129,6 +116,27 @@ func (m *msSqlMacroEngine) evaluateMacro(name string, args []string) (string, er
 		return fmt.Sprintf("%d", m.timeRange.GetFromAsSecondsEpoch()), nil
 		return fmt.Sprintf("%d", m.timeRange.GetFromAsSecondsEpoch()), nil
 	case "__unixEpochTo":
 	case "__unixEpochTo":
 		return fmt.Sprintf("%d", m.timeRange.GetToAsSecondsEpoch()), nil
 		return fmt.Sprintf("%d", m.timeRange.GetToAsSecondsEpoch()), nil
+	case "__unixEpochGroup":
+		if len(args) < 2 {
+			return "", fmt.Errorf("macro %v needs time column and interval and optional fill value", name)
+		}
+		interval, err := time.ParseDuration(strings.Trim(args[1], `'`))
+		if err != nil {
+			return "", fmt.Errorf("error parsing interval %v", args[1])
+		}
+		if len(args) == 3 {
+			err := tsdb.SetupFillmode(m.query, interval, args[2])
+			if err != nil {
+				return "", err
+			}
+		}
+		return fmt.Sprintf("FLOOR(%s/%v)*%v", args[0], interval.Seconds(), interval.Seconds()), nil
+	case "__unixEpochGroupAlias":
+		tg, err := m.evaluateMacro("__unixEpochGroup", args)
+		if err == nil {
+			return tg + " AS [time]", err
+		}
+		return "", err
 	default:
 	default:
 		return "", fmt.Errorf("Unknown macro %v", name)
 		return "", fmt.Errorf("Unknown macro %v", name)
 	}
 	}

+ 12 - 0
pkg/tsdb/mssql/macros_test.go

@@ -145,6 +145,18 @@ func TestMacroEngine(t *testing.T) {
 
 
 				So(sql, ShouldEqual, fmt.Sprintf("select %d", to.Unix()))
 				So(sql, ShouldEqual, fmt.Sprintf("select %d", to.Unix()))
 			})
 			})
+
+			Convey("interpolate __unixEpochGroup function", func() {
+
+				sql, err := engine.Interpolate(query, timeRange, "SELECT $__unixEpochGroup(time_column,'5m')")
+				So(err, ShouldBeNil)
+				sql2, err := engine.Interpolate(query, timeRange, "SELECT $__unixEpochGroupAlias(time_column,'5m')")
+				So(err, ShouldBeNil)
+
+				So(sql, ShouldEqual, "SELECT FLOOR(time_column/300)*300")
+				So(sql2, ShouldEqual, sql+" AS [time]")
+			})
+
 		})
 		})
 
 
 		Convey("Given a time range between 1960-02-01 07:00 and 1965-02-03 08:00", func() {
 		Convey("Given a time range between 1960-02-01 07:00 and 1965-02-03 08:00", func() {

+ 24 - 15
pkg/tsdb/mysql/macros.go

@@ -3,7 +3,6 @@ package mysql
 import (
 import (
 	"fmt"
 	"fmt"
 	"regexp"
 	"regexp"
-	"strconv"
 	"strings"
 	"strings"
 	"time"
 	"time"
 
 
@@ -92,20 +91,9 @@ func (m *mySqlMacroEngine) evaluateMacro(name string, args []string) (string, er
 			return "", fmt.Errorf("error parsing interval %v", args[1])
 			return "", fmt.Errorf("error parsing interval %v", args[1])
 		}
 		}
 		if len(args) == 3 {
 		if len(args) == 3 {
-			m.query.Model.Set("fill", true)
-			m.query.Model.Set("fillInterval", interval.Seconds())
-			switch args[2] {
-			case "NULL":
-				m.query.Model.Set("fillMode", "null")
-			case "previous":
-				m.query.Model.Set("fillMode", "previous")
-			default:
-				m.query.Model.Set("fillMode", "value")
-				floatVal, err := strconv.ParseFloat(args[2], 64)
-				if err != nil {
-					return "", fmt.Errorf("error parsing fill value %v", args[2])
-				}
-				m.query.Model.Set("fillValue", floatVal)
+			err := tsdb.SetupFillmode(m.query, interval, args[2])
+			if err != nil {
+				return "", err
 			}
 			}
 		}
 		}
 		return fmt.Sprintf("UNIX_TIMESTAMP(%s) DIV %.0f * %.0f", args[0], interval.Seconds(), interval.Seconds()), nil
 		return fmt.Sprintf("UNIX_TIMESTAMP(%s) DIV %.0f * %.0f", args[0], interval.Seconds(), interval.Seconds()), nil
@@ -124,6 +112,27 @@ func (m *mySqlMacroEngine) evaluateMacro(name string, args []string) (string, er
 		return fmt.Sprintf("%d", m.timeRange.GetFromAsSecondsEpoch()), nil
 		return fmt.Sprintf("%d", m.timeRange.GetFromAsSecondsEpoch()), nil
 	case "__unixEpochTo":
 	case "__unixEpochTo":
 		return fmt.Sprintf("%d", m.timeRange.GetToAsSecondsEpoch()), nil
 		return fmt.Sprintf("%d", m.timeRange.GetToAsSecondsEpoch()), nil
+	case "__unixEpochGroup":
+		if len(args) < 2 {
+			return "", fmt.Errorf("macro %v needs time column and interval and optional fill value", name)
+		}
+		interval, err := time.ParseDuration(strings.Trim(args[1], `'`))
+		if err != nil {
+			return "", fmt.Errorf("error parsing interval %v", args[1])
+		}
+		if len(args) == 3 {
+			err := tsdb.SetupFillmode(m.query, interval, args[2])
+			if err != nil {
+				return "", err
+			}
+		}
+		return fmt.Sprintf("%s DIV %v * %v", args[0], interval.Seconds(), interval.Seconds()), nil
+	case "__unixEpochGroupAlias":
+		tg, err := m.evaluateMacro("__unixEpochGroup", args)
+		if err == nil {
+			return tg + " AS \"time\"", err
+		}
+		return "", err
 	default:
 	default:
 		return "", fmt.Errorf("Unknown macro %v", name)
 		return "", fmt.Errorf("Unknown macro %v", name)
 	}
 	}

+ 12 - 0
pkg/tsdb/mysql/macros_test.go

@@ -97,6 +97,18 @@ func TestMacroEngine(t *testing.T) {
 
 
 				So(sql, ShouldEqual, fmt.Sprintf("select %d", to.Unix()))
 				So(sql, ShouldEqual, fmt.Sprintf("select %d", to.Unix()))
 			})
 			})
+
+			Convey("interpolate __unixEpochGroup function", func() {
+
+				sql, err := engine.Interpolate(query, timeRange, "SELECT $__unixEpochGroup(time_column,'5m')")
+				So(err, ShouldBeNil)
+				sql2, err := engine.Interpolate(query, timeRange, "SELECT $__unixEpochGroupAlias(time_column,'5m')")
+				So(err, ShouldBeNil)
+
+				So(sql, ShouldEqual, "SELECT time_column DIV 300 * 300")
+				So(sql2, ShouldEqual, sql+" AS \"time\"")
+			})
+
 		})
 		})
 
 
 		Convey("Given a time range between 1960-02-01 07:00 and 1965-02-03 08:00", func() {
 		Convey("Given a time range between 1960-02-01 07:00 and 1965-02-03 08:00", func() {

+ 35 - 20
pkg/tsdb/postgres/macros.go

@@ -3,7 +3,6 @@ package postgres
 import (
 import (
 	"fmt"
 	"fmt"
 	"regexp"
 	"regexp"
-	"strconv"
 	"strings"
 	"strings"
 	"time"
 	"time"
 
 
@@ -15,12 +14,13 @@ const rsIdentifier = `([_a-zA-Z0-9]+)`
 const sExpr = `\$` + rsIdentifier + `\(([^\)]*)\)`
 const sExpr = `\$` + rsIdentifier + `\(([^\)]*)\)`
 
 
 type postgresMacroEngine struct {
 type postgresMacroEngine struct {
-	timeRange *tsdb.TimeRange
-	query     *tsdb.Query
+	timeRange   *tsdb.TimeRange
+	query       *tsdb.Query
+	timescaledb bool
 }
 }
 
 
-func newPostgresMacroEngine() tsdb.SqlMacroEngine {
-	return &postgresMacroEngine{}
+func newPostgresMacroEngine(timescaledb bool) tsdb.SqlMacroEngine {
+	return &postgresMacroEngine{timescaledb: timescaledb}
 }
 }
 
 
 func (m *postgresMacroEngine) Interpolate(query *tsdb.Query, timeRange *tsdb.TimeRange, sql string) (string, error) {
 func (m *postgresMacroEngine) Interpolate(query *tsdb.Query, timeRange *tsdb.TimeRange, sql string) (string, error) {
@@ -114,23 +114,17 @@ func (m *postgresMacroEngine) evaluateMacro(name string, args []string) (string,
 			return "", fmt.Errorf("error parsing interval %v", args[1])
 			return "", fmt.Errorf("error parsing interval %v", args[1])
 		}
 		}
 		if len(args) == 3 {
 		if len(args) == 3 {
-			m.query.Model.Set("fill", true)
-			m.query.Model.Set("fillInterval", interval.Seconds())
-			switch args[2] {
-			case "NULL":
-				m.query.Model.Set("fillMode", "null")
-			case "previous":
-				m.query.Model.Set("fillMode", "previous")
-			default:
-				m.query.Model.Set("fillMode", "value")
-				floatVal, err := strconv.ParseFloat(args[2], 64)
-				if err != nil {
-					return "", fmt.Errorf("error parsing fill value %v", args[2])
-				}
-				m.query.Model.Set("fillValue", floatVal)
+			err := tsdb.SetupFillmode(m.query, interval, args[2])
+			if err != nil {
+				return "", err
 			}
 			}
 		}
 		}
-		return fmt.Sprintf("floor(extract(epoch from %s)/%v)*%v", args[0], interval.Seconds(), interval.Seconds()), nil
+
+		if m.timescaledb {
+			return fmt.Sprintf("time_bucket('%vs',%s)", interval.Seconds(), args[0]), nil
+		} else {
+			return fmt.Sprintf("floor(extract(epoch from %s)/%v)*%v", args[0], interval.Seconds(), interval.Seconds()), nil
+		}
 	case "__timeGroupAlias":
 	case "__timeGroupAlias":
 		tg, err := m.evaluateMacro("__timeGroup", args)
 		tg, err := m.evaluateMacro("__timeGroup", args)
 		if err == nil {
 		if err == nil {
@@ -146,6 +140,27 @@ func (m *postgresMacroEngine) evaluateMacro(name string, args []string) (string,
 		return fmt.Sprintf("%d", m.timeRange.GetFromAsSecondsEpoch()), nil
 		return fmt.Sprintf("%d", m.timeRange.GetFromAsSecondsEpoch()), nil
 	case "__unixEpochTo":
 	case "__unixEpochTo":
 		return fmt.Sprintf("%d", m.timeRange.GetToAsSecondsEpoch()), nil
 		return fmt.Sprintf("%d", m.timeRange.GetToAsSecondsEpoch()), nil
+	case "__unixEpochGroup":
+		if len(args) < 2 {
+			return "", fmt.Errorf("macro %v needs time column and interval and optional fill value", name)
+		}
+		interval, err := time.ParseDuration(strings.Trim(args[1], `'`))
+		if err != nil {
+			return "", fmt.Errorf("error parsing interval %v", args[1])
+		}
+		if len(args) == 3 {
+			err := tsdb.SetupFillmode(m.query, interval, args[2])
+			if err != nil {
+				return "", err
+			}
+		}
+		return fmt.Sprintf("floor(%s/%v)*%v", args[0], interval.Seconds(), interval.Seconds()), nil
+	case "__unixEpochGroupAlias":
+		tg, err := m.evaluateMacro("__unixEpochGroup", args)
+		if err == nil {
+			return tg + " AS \"time\"", err
+		}
+		return "", err
 	default:
 	default:
 		return "", fmt.Errorf("Unknown macro %v", name)
 		return "", fmt.Errorf("Unknown macro %v", name)
 	}
 	}

+ 32 - 1
pkg/tsdb/postgres/macros_test.go

@@ -12,7 +12,10 @@ import (
 
 
 func TestMacroEngine(t *testing.T) {
 func TestMacroEngine(t *testing.T) {
 	Convey("MacroEngine", t, func() {
 	Convey("MacroEngine", t, func() {
-		engine := newPostgresMacroEngine()
+		timescaledbEnabled := false
+		engine := newPostgresMacroEngine(timescaledbEnabled)
+		timescaledbEnabled = true
+		engineTS := newPostgresMacroEngine(timescaledbEnabled)
 		query := &tsdb.Query{}
 		query := &tsdb.Query{}
 
 
 		Convey("Given a time range between 2018-04-12 00:00 and 2018-04-12 00:05", func() {
 		Convey("Given a time range between 2018-04-12 00:00 and 2018-04-12 00:05", func() {
@@ -83,6 +86,22 @@ func TestMacroEngine(t *testing.T) {
 				So(sql2, ShouldEqual, sql+" AS \"time\"")
 				So(sql2, ShouldEqual, sql+" AS \"time\"")
 			})
 			})
 
 
+			Convey("interpolate __timeGroup function with TimescaleDB enabled", func() {
+
+				sql, err := engineTS.Interpolate(query, timeRange, "GROUP BY $__timeGroup(time_column,'5m')")
+				So(err, ShouldBeNil)
+
+				So(sql, ShouldEqual, "GROUP BY time_bucket('300s',time_column)")
+			})
+
+			Convey("interpolate __timeGroup function with spaces between args and TimescaleDB enabled", func() {
+
+				sql, err := engineTS.Interpolate(query, timeRange, "GROUP BY $__timeGroup(time_column , '5m')")
+				So(err, ShouldBeNil)
+
+				So(sql, ShouldEqual, "GROUP BY time_bucket('300s',time_column)")
+			})
+
 			Convey("interpolate __timeTo function", func() {
 			Convey("interpolate __timeTo function", func() {
 				sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)")
 				sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)")
 				So(err, ShouldBeNil)
 				So(err, ShouldBeNil)
@@ -110,6 +129,18 @@ func TestMacroEngine(t *testing.T) {
 
 
 				So(sql, ShouldEqual, fmt.Sprintf("select %d", to.Unix()))
 				So(sql, ShouldEqual, fmt.Sprintf("select %d", to.Unix()))
 			})
 			})
+
+			Convey("interpolate __unixEpochGroup function", func() {
+
+				sql, err := engine.Interpolate(query, timeRange, "SELECT $__unixEpochGroup(time_column,'5m')")
+				So(err, ShouldBeNil)
+				sql2, err := engine.Interpolate(query, timeRange, "SELECT $__unixEpochGroupAlias(time_column,'5m')")
+				So(err, ShouldBeNil)
+
+				So(sql, ShouldEqual, "SELECT floor(time_column/300)*300")
+				So(sql2, ShouldEqual, sql+" AS \"time\"")
+			})
+
 		})
 		})
 
 
 		Convey("Given a time range between 1960-02-01 07:00 and 1965-02-03 08:00", func() {
 		Convey("Given a time range between 1960-02-01 07:00 and 1965-02-03 08:00", func() {

+ 3 - 1
pkg/tsdb/postgres/postgres.go

@@ -32,7 +32,9 @@ func newPostgresQueryEndpoint(datasource *models.DataSource) (tsdb.TsdbQueryEndp
 		log: logger,
 		log: logger,
 	}
 	}
 
 
-	return tsdb.NewSqlQueryEndpoint(&config, &rowTransformer, newPostgresMacroEngine(), logger)
+	timescaledb := datasource.JsonData.Get("timescaledb").MustBool(false)
+
+	return tsdb.NewSqlQueryEndpoint(&config, &rowTransformer, newPostgresMacroEngine(timescaledb), logger)
 }
 }
 
 
 func generateConnectionString(datasource *models.DataSource) string {
 func generateConnectionString(datasource *models.DataSource) string {

+ 1 - 1
pkg/tsdb/postgres/postgres_test.go

@@ -27,7 +27,7 @@ import (
 // use to verify that the generated data are vizualized as expected, see
 // use to verify that the generated data are vizualized as expected, see
 // devenv/README.md for setup instructions.
 // devenv/README.md for setup instructions.
 func TestPostgres(t *testing.T) {
 func TestPostgres(t *testing.T) {
-	// change to true to run the MySQL tests
+	// change to true to run the PostgreSQL tests
 	runPostgresTests := false
 	runPostgresTests := false
 	// runPostgresTests := true
 	// runPostgresTests := true
 
 

+ 21 - 0
pkg/tsdb/sql_engine.go

@@ -6,6 +6,7 @@ import (
 	"database/sql"
 	"database/sql"
 	"fmt"
 	"fmt"
 	"math"
 	"math"
+	"strconv"
 	"strings"
 	"strings"
 	"sync"
 	"sync"
 	"time"
 	"time"
@@ -568,3 +569,23 @@ func ConvertSqlValueColumnToFloat(columnName string, columnValue interface{}) (n
 
 
 	return value, nil
 	return value, nil
 }
 }
+
+func SetupFillmode(query *Query, interval time.Duration, fillmode string) error {
+	query.Model.Set("fill", true)
+	query.Model.Set("fillInterval", interval.Seconds())
+	switch fillmode {
+	case "NULL":
+		query.Model.Set("fillMode", "null")
+	case "previous":
+		query.Model.Set("fillMode", "previous")
+	default:
+		query.Model.Set("fillMode", "value")
+		floatVal, err := strconv.ParseFloat(fillmode, 64)
+		if err != nil {
+			return fmt.Errorf("error parsing fill value %v", fillmode)
+		}
+		query.Model.Set("fillValue", floatVal)
+	}
+
+	return nil
+}

+ 0 - 0
public/app/containers/AlertRuleList/AlertRuleList.jest.tsx → public/app/containers/AlertRuleList/AlertRuleList.test.tsx


+ 0 - 0
public/app/containers/AlertRuleList/__snapshots__/AlertRuleList.jest.tsx.snap → public/app/containers/AlertRuleList/__snapshots__/AlertRuleList.test.tsx.snap


+ 7 - 1
public/app/containers/Explore/Explore.tsx

@@ -207,6 +207,7 @@ export class Explore extends React.Component<any, IExploreState> {
       datasourceError: null,
       datasourceError: null,
       datasourceLoading: true,
       datasourceLoading: true,
       graphResult: null,
       graphResult: null,
+      latency: 0,
       logsResult: null,
       logsResult: null,
       queryErrors: [],
       queryErrors: [],
       queryHints: [],
       queryHints: [],
@@ -254,7 +255,10 @@ export class Explore extends React.Component<any, IExploreState> {
     this.setState({
     this.setState({
       graphResult: null,
       graphResult: null,
       logsResult: null,
       logsResult: null,
+      latency: 0,
       queries: ensureQueries(),
       queries: ensureQueries(),
+      queryErrors: [],
+      queryHints: [],
       tableResult: null,
       tableResult: null,
     });
     });
   };
   };
@@ -276,8 +280,10 @@ export class Explore extends React.Component<any, IExploreState> {
 
 
   onClickSplit = () => {
   onClickSplit = () => {
     const { onChangeSplit } = this.props;
     const { onChangeSplit } = this.props;
+    const state = { ...this.state };
+    state.queries = state.queries.map(({ edited, ...rest }) => rest);
     if (onChangeSplit) {
     if (onChangeSplit) {
-      onChangeSplit(true, this.state);
+      onChangeSplit(true, state);
     }
     }
   };
   };
 
 

+ 0 - 0
public/app/containers/Explore/PromQueryField.jest.tsx → public/app/containers/Explore/PromQueryField.test.tsx


+ 1 - 1
public/app/containers/Explore/QueryField.tsx

@@ -331,7 +331,7 @@ class QueryField extends React.Component<TypeaheadFieldProps, TypeaheadFieldStat
         }
         }
         break;
         break;
       }
       }
-
+      case 'Enter':
       case 'Tab': {
       case 'Tab': {
         if (this.menuEl) {
         if (this.menuEl) {
           // Dont blur input
           // Dont blur input

+ 0 - 0
public/app/containers/Explore/TimePicker.jest.tsx → public/app/containers/Explore/TimePicker.test.tsx


+ 0 - 0
public/app/containers/Explore/slate-plugins/braces.jest.ts → public/app/containers/Explore/slate-plugins/braces.test.ts


+ 0 - 0
public/app/containers/Explore/slate-plugins/clear.jest.ts → public/app/containers/Explore/slate-plugins/clear.test.ts


+ 0 - 0
public/app/containers/Explore/utils/prometheus.jest.ts → public/app/containers/Explore/utils/prometheus.test.ts


+ 0 - 0
public/app/containers/ManageDashboards/FolderSettings.jest.tsx → public/app/containers/ManageDashboards/FolderSettings.test.tsx


+ 0 - 0
public/app/containers/ServerStats/ServerStats.jest.tsx → public/app/containers/ServerStats/ServerStats.test.tsx


+ 0 - 0
public/app/containers/ServerStats/__snapshots__/ServerStats.jest.tsx.snap → public/app/containers/ServerStats/__snapshots__/ServerStats.test.tsx.snap


+ 3 - 2
public/app/containers/Teams/TeamMembers.tsx

@@ -69,8 +69,9 @@ export class TeamMembers extends React.Component<Props, State> {
 
 
   render() {
   render() {
     const { newTeamMember, isAdding } = this.state;
     const { newTeamMember, isAdding } = this.state;
-    const members = this.props.team.members.values();
+    const members = this.props.team.filteredMembers;
     const newTeamMemberValue = newTeamMember && newTeamMember.id.toString();
     const newTeamMemberValue = newTeamMember && newTeamMember.id.toString();
+    const { team } = this.props;
 
 
     return (
     return (
       <div>
       <div>
@@ -81,7 +82,7 @@ export class TeamMembers extends React.Component<Props, State> {
                 type="text"
                 type="text"
                 className="gf-form-input"
                 className="gf-form-input"
                 placeholder="Search members"
                 placeholder="Search members"
-                value={''}
+                value={team.search}
                 onChange={this.onSearchQueryChange}
                 onChange={this.onSearchQueryChange}
               />
               />
               <i className="gf-form-input-icon fa fa-search" />
               <i className="gf-form-input-icon fa fa-search" />

+ 0 - 0
public/app/core/components/DeleteButton/DeleteButton.jest.tsx → public/app/core/components/DeleteButton/DeleteButton.test.tsx


+ 0 - 0
public/app/core/components/EmptyListCTA/EmptyListCTA.jest.tsx → public/app/core/components/EmptyListCTA/EmptyListCTA.test.tsx


+ 0 - 0
public/app/core/components/EmptyListCTA/__snapshots__/EmptyListCTA.jest.tsx.snap → public/app/core/components/EmptyListCTA/__snapshots__/EmptyListCTA.test.tsx.snap


+ 0 - 0
public/app/core/components/PageHeader/PageHeader.jest.tsx → public/app/core/components/PageHeader/PageHeader.test.tsx


+ 0 - 0
public/app/core/components/Permissions/AddPermissions.jest.tsx → public/app/core/components/Permissions/AddPermissions.test.tsx


+ 0 - 0
public/app/core/components/Picker/PickerOption.jest.tsx → public/app/core/components/Picker/PickerOption.test.tsx


+ 0 - 0
public/app/core/components/Picker/TeamPicker.jest.tsx → public/app/core/components/Picker/TeamPicker.test.tsx


+ 0 - 0
public/app/core/components/Picker/UserPicker.jest.tsx → public/app/core/components/Picker/UserPicker.test.tsx


+ 0 - 0
public/app/core/components/Picker/__snapshots__/PickerOption.jest.tsx.snap → public/app/core/components/Picker/__snapshots__/PickerOption.test.tsx.snap


+ 0 - 0
public/app/core/components/Picker/__snapshots__/TeamPicker.jest.tsx.snap → public/app/core/components/Picker/__snapshots__/TeamPicker.test.tsx.snap


+ 0 - 0
public/app/core/components/Picker/__snapshots__/UserPicker.jest.tsx.snap → public/app/core/components/Picker/__snapshots__/UserPicker.test.tsx.snap


+ 0 - 0
public/app/core/components/Tooltip/Popover.jest.tsx → public/app/core/components/Tooltip/Popover.test.tsx


+ 0 - 0
public/app/core/components/Tooltip/Tooltip.jest.tsx → public/app/core/components/Tooltip/Tooltip.test.tsx


+ 0 - 0
public/app/core/components/Tooltip/__snapshots__/Popover.jest.tsx.snap → public/app/core/components/Tooltip/__snapshots__/Popover.test.tsx.snap


+ 0 - 0
public/app/core/components/Tooltip/__snapshots__/Tooltip.jest.tsx.snap → public/app/core/components/Tooltip/__snapshots__/Tooltip.test.tsx.snap


+ 1 - 0
public/app/core/components/help/help.ts

@@ -25,6 +25,7 @@ export class HelpCtrl {
         { keys: ['d', 'k'], description: 'Toggle kiosk mode (hides top nav)' },
         { keys: ['d', 'k'], description: 'Toggle kiosk mode (hides top nav)' },
         { keys: ['d', 'E'], description: 'Expand all rows' },
         { keys: ['d', 'E'], description: 'Expand all rows' },
         { keys: ['d', 'C'], description: 'Collapse all rows' },
         { keys: ['d', 'C'], description: 'Collapse all rows' },
+        { keys: ['d', 'a'], description: 'Toggle auto fit panels (experimental feature)' },
         { keys: ['mod+o'], description: 'Toggle shared graph crosshair' },
         { keys: ['mod+o'], description: 'Toggle shared graph crosshair' },
       ],
       ],
       'Focused Panel': [
       'Focused Panel': [

+ 3 - 12
public/app/core/services/keybindingSrv.ts

@@ -15,14 +15,7 @@ export class KeybindingSrv {
   timepickerOpen = false;
   timepickerOpen = false;
 
 
   /** @ngInject */
   /** @ngInject */
-  constructor(
-    private $rootScope,
-    private $location,
-    private datasourceSrv,
-    private timeSrv,
-    private contextSrv,
-    private $route
-  ) {
+  constructor(private $rootScope, private $location, private datasourceSrv, private timeSrv, private contextSrv) {
     // clear out all shortcuts on route change
     // clear out all shortcuts on route change
     $rootScope.$on('$routeChangeSuccess', () => {
     $rootScope.$on('$routeChangeSuccess', () => {
       Mousetrap.reset();
       Mousetrap.reset();
@@ -269,10 +262,8 @@ export class KeybindingSrv {
 
 
     //Autofit panels
     //Autofit panels
     this.bind('d a', () => {
     this.bind('d a', () => {
-      this.$location.search('autofitpanels', this.$location.search().autofitpanels ? null : true);
-      //Force reload
-
-      this.$route.reload();
+      // this has to be a full page reload
+      window.location.href = window.location.href + '&autofitpanels';
     });
     });
   }
   }
 }
 }

+ 0 - 0
public/app/core/specs/ColorPalette.jest.tsx → public/app/core/specs/ColorPalette.test.tsx


+ 0 - 0
public/app/core/specs/PasswordStrength.jest.tsx → public/app/core/specs/PasswordStrength.test.tsx


+ 0 - 0
public/app/core/specs/__snapshots__/ColorPalette.jest.tsx.snap → public/app/core/specs/__snapshots__/ColorPalette.test.tsx.snap


+ 0 - 0
public/app/core/specs/backend_srv.jest.ts → public/app/core/specs/backend_srv.test.ts


+ 0 - 0
public/app/core/specs/datemath.jest.ts → public/app/core/specs/datemath.test.ts


+ 0 - 0
public/app/core/specs/emitter.jest.ts → public/app/core/specs/emitter.test.ts


+ 0 - 0
public/app/core/specs/file_export.jest.ts → public/app/core/specs/file_export.test.ts


Một số tệp đã không được hiển thị bởi vì quá nhiều tập tin thay đổi trong này khác