Просмотр исходного кода

Merge remote-tracking branch 'grafana/master'

* grafana/master: (187 commits)
  build: duplicate docker run-script removed.
  Set User-Agent header in all proxied datasource requests
  docs: cloudwatch dimensions reference link.
  docs: remove message property in response from get alerts http api
  changelog: add notes about closing #5623
  build: cleanup
  build: fixes rpm verification.
  docs: add grafana version note for gitlab oauth
  docs: gitlab: add note about more restrictive API scope
  social: gitlab_oauth: set user ID in case email changes
  docs: document GitLab authentication backend
  social: add GitLab authentication backend
  build: verifies the rpm packages signatures.
  changelog: add notes about closing #12224
  docs: update
  feat: add auto fit panels to shortcut modal, closes #12768
  changelog: add notes about closing #12680
  docs: update postgres provisioning
  Remove dependencies
  Rename test files
  ...
ryan 7 лет назад
Родитель
Сommit
8c86a1c4a9
100 измененных файлов с 2715 добавлено и 628 удалено
  1. 1 1
      .bra.toml
  2. 125 18
      .circleci/config.yml
  3. 3 0
      .dockerignore
  4. 3 3
      .github/CONTRIBUTING.md
  5. 1 0
      .gitignore
  6. 0 13
      .jscs.json
  7. 0 37
      .jshintrc
  8. 38 3
      CHANGELOG.md
  9. 82 0
      Dockerfile
  10. 0 1
      Gruntfile.js
  11. 10 1
      Makefile
  12. 1 1
      NOTICE.md
  13. 31 19
      README.md
  14. 8 8
      ROADMAP.md
  15. 18 0
      conf/defaults.ini
  16. 3 0
      conf/ldap.toml
  17. 4 0
      conf/sample.ini
  18. 213 47
      devenv/dev-dashboards/datasource_tests_mssql_unittest.json
  19. 211 45
      devenv/dev-dashboards/datasource_tests_mysql_unittest.json
  20. 211 45
      devenv/dev-dashboards/datasource_tests_postgres_unittest.json
  21. 2 0
      docs/sources/features/datasources/cloudwatch.md
  22. 1 1
      docs/sources/features/datasources/elasticsearch.md
  23. 4 1
      docs/sources/features/datasources/mssql.md
  24. 4 1
      docs/sources/features/datasources/mysql.md
  25. 7 2
      docs/sources/features/datasources/postgres.md
  26. 4 4
      docs/sources/features/datasources/prometheus.md
  27. 0 1
      docs/sources/http_api/alerting.md
  28. 1 1
      docs/sources/http_api/dashboard.md
  29. 1 1
      docs/sources/http_api/folder.md
  30. 33 0
      docs/sources/http_api/user.md
  31. 109 7
      docs/sources/installation/configuration.md
  32. 4 0
      docs/sources/installation/ldap.md
  33. 8 11
      docs/sources/project/building_from_source.md
  34. 1 1
      docs/sources/reference/templating.md
  35. 1 1
      jest.config.js
  36. 0 40
      karma.conf.js
  37. 2 14
      package.json
  38. 52 0
      packaging/docker/Dockerfile
  39. 43 0
      packaging/docker/README.md
  40. 13 0
      packaging/docker/build-deploy.sh
  41. 25 0
      packaging/docker/build.sh
  42. 16 0
      packaging/docker/custom/Dockerfile
  43. 6 0
      packaging/docker/deploy_to_k8s.sh
  44. 24 0
      packaging/docker/push_to_docker_hub.sh
  45. 88 0
      packaging/docker/run.sh
  46. 1 0
      pkg/api/api.go
  47. 16 2
      pkg/api/datasources.go
  48. 1 0
      pkg/api/pluginproxy/ds_proxy.go
  49. 8 7
      pkg/api/pluginproxy/ds_proxy_test.go
  50. 15 0
      pkg/api/user.go
  51. 10 0
      pkg/login/ldap.go
  52. 2 0
      pkg/login/ldap_settings.go
  53. 1 0
      pkg/models/models.go
  54. 1 2
      pkg/services/alerting/notifier.go
  55. 132 0
      pkg/social/gitlab_oauth.go
  56. 15 1
      pkg/social/social.go
  57. 2 2
      pkg/tsdb/cloudwatch/metric_find_query.go
  58. 9 12
      pkg/tsdb/mssql/macros.go
  59. 21 2
      pkg/tsdb/mssql/macros_test.go
  60. 9 11
      pkg/tsdb/mysql/macros.go
  61. 6 0
      pkg/tsdb/mysql/macros_test.go
  62. 30 1
      pkg/tsdb/mysql/mysql_test.go
  63. 37 16
      pkg/tsdb/postgres/macros.go
  64. 43 5
      pkg/tsdb/postgres/macros_test.go
  65. 3 1
      pkg/tsdb/postgres/postgres.go
  66. 33 5
      pkg/tsdb/postgres/postgres_test.go
  67. 44 2
      pkg/tsdb/sql_engine.go
  68. 1 1
      pkg/util/url.go
  69. 27 0
      pkg/util/url_test.go
  70. 22 0
      pkg/util/validation_test.go
  71. 0 0
      public/app/containers/AlertRuleList/AlertRuleList.test.tsx
  72. 0 0
      public/app/containers/AlertRuleList/__snapshots__/AlertRuleList.test.tsx.snap
  73. 183 60
      public/app/containers/Explore/Explore.tsx
  74. 10 2
      public/app/containers/Explore/Graph.tsx
  75. 1 0
      public/app/containers/Explore/Logs.tsx
  76. 113 19
      public/app/containers/Explore/PromQueryField.test.tsx
  77. 255 85
      public/app/containers/Explore/PromQueryField.tsx
  78. 11 3
      public/app/containers/Explore/QueryField.tsx
  79. 50 33
      public/app/containers/Explore/QueryRows.tsx
  80. 69 9
      public/app/containers/Explore/Table.tsx
  81. 0 0
      public/app/containers/Explore/TimePicker.test.tsx
  82. 9 0
      public/app/containers/Explore/slate-plugins/braces.test.ts
  83. 4 2
      public/app/containers/Explore/slate-plugins/braces.ts
  84. 33 0
      public/app/containers/Explore/utils/prometheus.test.ts
  85. 69 1
      public/app/containers/Explore/utils/prometheus.ts
  86. 0 0
      public/app/containers/ManageDashboards/FolderSettings.test.tsx
  87. 0 0
      public/app/containers/ServerStats/ServerStats.test.tsx
  88. 0 0
      public/app/containers/ServerStats/__snapshots__/ServerStats.test.tsx.snap
  89. 7 16
      public/app/containers/Teams/TeamMembers.tsx
  90. 0 0
      public/app/core/components/DeleteButton/DeleteButton.test.tsx
  91. 0 0
      public/app/core/components/EmptyListCTA/EmptyListCTA.test.tsx
  92. 0 0
      public/app/core/components/EmptyListCTA/__snapshots__/EmptyListCTA.test.tsx.snap
  93. 0 0
      public/app/core/components/PageHeader/PageHeader.test.tsx
  94. 0 0
      public/app/core/components/Permissions/AddPermissions.test.tsx
  95. 0 0
      public/app/core/components/Picker/PickerOption.test.tsx
  96. 0 0
      public/app/core/components/Picker/TeamPicker.test.tsx
  97. 0 0
      public/app/core/components/Picker/UserPicker.test.tsx
  98. 0 0
      public/app/core/components/Picker/__snapshots__/PickerOption.test.tsx.snap
  99. 0 0
      public/app/core/components/Picker/__snapshots__/TeamPicker.test.tsx.snap
  100. 0 0
      public/app/core/components/Picker/__snapshots__/UserPicker.test.tsx.snap

+ 1 - 1
.bra.toml

@@ -9,7 +9,7 @@ watch_dirs = [
 	"$WORKDIR/public/views",
 	"$WORKDIR/conf",
 ]
-watch_exts = [".go", ".ini", ".toml"]
+watch_exts = [".go", ".ini", ".toml", ".template.html"]
 build_delay = 1500
 cmds = [
   ["go", "run", "build.go", "-dev", "build-server"],

+ 125 - 18
.circleci/config.yml

@@ -5,9 +5,11 @@ aliases:
       ignore: /.*/
     tags:
       only: /^v[0-9]+(\.[0-9]+){2}(-.+|[^-.]*)$/
-  - &filter-not-release
+  - &filter-not-release-or-master
     tags:
       ignore: /^v[0-9]+(\.[0-9]+){2}(-.+|[^-.]*)$/
+    branches:
+      ignore: master
   - &filter-only-master
     branches:
       only: master
@@ -89,7 +91,7 @@ jobs:
           name: run linters
           command: 'gometalinter.v2 --enable-gc --vendor --deadline 10m --disable-all --enable=deadcode --enable=ineffassign --enable=structcheck --enable=unconvert --enable=varcheck ./...'
       - run:
-          name: run go vet 
+          name: run go vet
           command: 'go vet ./pkg/...'
 
   test-frontend:
@@ -102,6 +104,7 @@ jobs:
       - run:
           name: yarn install
           command: 'yarn install --pure-lockfile --no-progress'
+          no_output_timeout: 15m
       - save_cache:
           key: dependency-cache-{{ checksum "yarn.lock" }}
           paths:
@@ -144,6 +147,12 @@ jobs:
       - run:
           name: sign packages
           command: './scripts/build/sign_packages.sh'
+      - run:
+          name: verify signed packages
+          command: |
+            mkdir -p ~/.rpmdb/pubkeys
+            curl -s https://grafanarel.s3.amazonaws.com/RPM-GPG-KEY-grafana > ~/.rpmdb/pubkeys/grafana.key
+            ./scripts/build/verify_signed_packages.sh dist/*.rpm
       - run:
           name: sha-sum packages
           command: 'go run build.go sha-dist'
@@ -156,8 +165,65 @@ jobs:
             - dist/grafana*
             - scripts/*.sh
             - scripts/publish
-      - store_artifacts:
-          path: dist
+
+  build:
+    docker:
+     - image: grafana/build-container:1.0.0
+    working_directory: /go/src/github.com/grafana/grafana
+    steps:
+      - checkout
+      - run:
+          name: prepare build tools
+          command: '/tmp/bootstrap.sh'
+      - run:
+          name: build and package grafana
+          command: './scripts/build/build.sh'
+      - run:
+          name: sign packages
+          command: './scripts/build/sign_packages.sh'
+      - run:
+          name: sha-sum packages
+          command: 'go run build.go sha-dist'
+      - persist_to_workspace:
+          root: .
+          paths:
+            - dist/grafana*
+
+  grafana-docker-master:
+    docker:
+      - image: docker:stable-git
+    steps:
+      - checkout
+      - attach_workspace:
+          at: .
+      - setup_remote_docker
+      - run: docker info
+      - run: cp dist/grafana-latest.linux-x64.tar.gz packaging/docker
+      - run: cd packaging/docker && ./build-deploy.sh "master-${CIRCLE_SHA1}"
+
+  grafana-docker-pr:
+    docker:
+      - image: docker:stable-git
+    steps:
+      - checkout
+      - attach_workspace:
+          at: .
+      - setup_remote_docker
+      - run: docker info
+      - run: cp dist/grafana-latest.linux-x64.tar.gz packaging/docker
+      - run: cd packaging/docker && ./build.sh "${CIRCLE_SHA1}"
+
+  grafana-docker-release:
+      docker:
+        - image: docker:stable-git
+      steps:
+        - checkout
+        - attach_workspace:
+            at: .
+        - setup_remote_docker
+        - run: docker info
+        - run: cp dist/grafana-latest.linux-x64.tar.gz packaging/docker
+        - run: cd packaging/docker && ./build-deploy.sh "${CIRCLE_TAG}"
 
   build-enterprise:
     docker:
@@ -213,9 +279,6 @@ jobs:
       - run:
           name: Trigger Windows build
           command: './scripts/trigger_windows_build.sh ${APPVEYOR_TOKEN} ${CIRCLE_SHA1} master'
-      - run:
-          name: Trigger Docker build
-          command: './scripts/trigger_docker_build.sh ${TRIGGER_GRAFANA_PACKER_CIRCLECI_TOKEN} master-$(echo "${CIRCLE_SHA1}" | cut -b1-7)'
       - run:
           name: Publish to Grafana.com
           command: |
@@ -237,30 +300,27 @@ jobs:
       - run:
           name: Trigger Windows build
           command: './scripts/trigger_windows_build.sh ${APPVEYOR_TOKEN} ${CIRCLE_SHA1} release'
-      - run:
-          name: Trigger Docker build
-          command: './scripts/trigger_docker_build.sh ${TRIGGER_GRAFANA_PACKER_CIRCLECI_TOKEN} ${CIRCLE_TAG}'
 
 workflows:
   version: 2
-  test-and-build:
+  build-master:
     jobs:
       - build-all:
           filters: *filter-only-master
       - build-enterprise:
           filters: *filter-only-master
       - codespell:
-          filters: *filter-not-release
+          filters: *filter-only-master
       - gometalinter:
-          filters: *filter-not-release
+          filters: *filter-only-master
       - test-frontend:
-          filters: *filter-not-release
+          filters: *filter-only-master
       - test-backend:
-          filters: *filter-not-release
+          filters: *filter-only-master
       - mysql-integration-test:
-          filters: *filter-not-release
+          filters: *filter-only-master
       - postgres-integration-test:
-          filters: *filter-not-release
+          filters: *filter-only-master
       - deploy-master:
           requires:
             - build-all
@@ -270,7 +330,17 @@ workflows:
             - gometalinter
             - mysql-integration-test
             - postgres-integration-test
-          filters: *filter-only-master           
+          filters: *filter-only-master
+      - grafana-docker-master:
+          requires:
+            - build-all
+            - test-backend
+            - test-frontend
+            - codespell
+            - gometalinter
+            - mysql-integration-test
+            - postgres-integration-test
+          filters: *filter-only-master
       - deploy-enterprise-master:
           requires:
             - build-all
@@ -309,3 +379,40 @@ workflows:
             - mysql-integration-test
             - postgres-integration-test
           filters: *filter-only-release
+      - grafana-docker-release:
+          requires:
+            - build-all
+            - test-backend
+            - test-frontend
+            - codespell
+            - gometalinter
+            - mysql-integration-test
+            - postgres-integration-test
+          filters: *filter-only-release
+
+  build-branches-and-prs:
+      jobs:
+        - build:
+            filters: *filter-not-release-or-master
+        - codespell:
+            filters: *filter-not-release-or-master
+        - gometalinter:
+            filters: *filter-not-release-or-master
+        - test-frontend:
+            filters: *filter-not-release-or-master
+        - test-backend:
+            filters: *filter-not-release-or-master
+        - mysql-integration-test:
+            filters: *filter-not-release-or-master
+        - postgres-integration-test:
+            filters: *filter-not-release-or-master
+        - grafana-docker-pr:
+            requires:
+              - build
+              - test-backend
+              - test-frontend
+              - codespell
+              - gometalinter
+              - mysql-integration-test
+              - postgres-integration-test
+            filters: *filter-not-release-or-master

+ 3 - 0
.dockerignore

@@ -3,9 +3,12 @@
 .git
 .gitignore
 .github
+.vscode
+bin
 data*
 dist
 docker
+Dockerfile
 docs
 dump.rdb
 node_modules

+ 3 - 3
.github/CONTRIBUTING.md

@@ -2,12 +2,12 @@ Follow the setup guide in README.md
 
 ### Rebuild frontend assets on source change
 ```
-grunt && grunt watch
+yarn watch
 ```
 
 ### Rerun tests on source change
 ```
-grunt karma:dev
+yarn jest
 ```
 
 ### Run tests for backend assets before commit
@@ -17,6 +17,6 @@ test -z "$(gofmt -s -l . | grep -v -E 'vendor/(github.com|golang.org|gopkg.in)'
 
 ### Run tests for frontend assets before commit
 ```
-npm test
+yarn test
 go test -v ./pkg/...
 ```

+ 1 - 0
.gitignore

@@ -58,6 +58,7 @@ debug.test
 /examples/*/dist
 /packaging/**/*.rpm
 /packaging/**/*.deb
+/packaging/**/*.tar.gz
 
 # Ignore OSX indexing
 .DS_Store

+ 0 - 13
.jscs.json

@@ -1,13 +0,0 @@
-{
-    "disallowImplicitTypeConversion": ["string"],
-    "disallowKeywords": ["with"],
-    "disallowMultipleLineBreaks": true,
-    "disallowMixedSpacesAndTabs": true,
-    "disallowTrailingWhitespace": true,
-    "requireSpacesInFunctionExpression": {
-        "beforeOpeningCurlyBrace": true
-    },
-    "disallowSpacesInsideArrayBrackets": true,
-    "disallowSpacesInsideParentheses": true,
-    "validateIndentation": 2
-}

+ 0 - 37
.jshintrc

@@ -1,37 +0,0 @@
-{
-  "browser": true,
-  "esversion": 6,
-  "bitwise":false,
-  "curly": true,
-  "eqnull": true,
-  "strict": false,
-  "devel": true,
-  "eqeqeq": true,
-  "forin": false,
-  "immed": true,
-  "supernew": true,
-  "expr": true,
-  "indent": 2,
-  "latedef": false,
-  "newcap": true,
-  "noarg": true,
-  "noempty": true,
-  "undef": true,
-  "boss": true,
-  "trailing": true,
-  "laxbreak": true,
-  "laxcomma": true,
-  "sub": true,
-  "unused": true,
-  "maxdepth": 6,
-  "maxlen": 140,
-
-  "globals": {
-    "System": true,
-    "Promise": true,
-    "define": true,
-    "require": true,
-    "Chromath": false,
-    "setImmediate": true
-  }
-}

+ 38 - 3
CHANGELOG.md

@@ -1,35 +1,70 @@
 # 5.3.0 (unreleased)
 
+* **OAuth**: Gitlab OAuth with support for filter by groups [#5623](https://github.com/grafana/grafana/issues/5623), thx [@BenoitKnecht](https://github.com/BenoitKnecht)
 * **Dataproxy**: Pass configured/auth headers to a Datasource [#10971](https://github.com/grafana/grafana/issues/10971), thx [@mrsiano](https://github.com/mrsiano)
 * **Cleanup**: Make temp file time to live configurable [#11607](https://github.com/grafana/grafana/issues/11607), thx [@xapon](https://github.com/xapon)
 * **LDAP**: Define Grafana Admin permission in ldap group mappings [#2469](https://github.com/grafana/grafana/issues/2496), PR [#12622](https://github.com/grafana/grafana/issues/12622)
 * **Cloudwatch**: CloudWatch GetMetricData support [#11487](https://github.com/grafana/grafana/issues/11487), thx [@mtanda](https://github.com/mtanda)
+* **Configuration**: Allow auto-assigning users to specific organization (other than Main. Org) [#1823](https://github.com/grafana/grafana/issues/1823) [#12801](https://github.com/grafana/grafana/issues/12801), thx [@gzzo](https://github.com/gzzo) and [@ofosos](https://github.com/ofosos)
+* **Profile**: List teams that the user is member of in current/active organization [#12476](https://github.com/grafana/grafana/issues/12476)
+* **LDAP**: Client certificates support [#12805](https://github.com/grafana/grafana/issues/12805), thx [@nyxi](https://github.com/nyxi)
+* **Postgres**: TimescaleDB support, e.g. use `time_bucket` for grouping by time when option enabled [#12680](https://github.com/grafana/grafana/pull/12680), thx [svenklemm](https://github.com/svenklemm)
 
 ### Minor
 
 * **Api**: Delete nonexistent datasource should return 404 [#12313](https://github.com/grafana/grafana/issues/12313), thx [@AustinWinstanley](https://github.com/AustinWinstanley)
 * **Dashboard**: Fix selecting current dashboard from search should not reload dashboard [#12248](https://github.com/grafana/grafana/issues/12248)
+* **Dashboard**: Use uid when linking to dashboards internally in a dashboard [#10705](https://github.com/grafana/grafana/issues/10705)
 * **Singlestat**: Make colorization of prefix and postfix optional in singlestat [#11892](https://github.com/grafana/grafana/pull/11892), thx [@ApsOps](https://github.com/ApsOps)
-* **Table**: Make table sorting stable when null values exist [#12362](https://github.com/grafana/grafana/pull/12362), thx [@bz2](https://github.com/bz2)
 * **Prometheus**: Fix graph panel bar width issue in aligned prometheus queries [#12379](https://github.com/grafana/grafana/issues/12379)
 * **Prometheus**: Heatmap - fix unhandled error when some points are missing [#12484](https://github.com/grafana/grafana/issues/12484)
-* **Prometheus**: Add $interval, $interval_ms, $range, and $range_ms support for dashboard and template queries [#12597](https://github.com/grafana/grafana/issues/12597)
+* **Prometheus**: Add $__interval, $__interval_ms, $__range, $__range_s & $__range_ms support for dashboard and template queries [#12597](https://github.com/grafana/grafana/issues/12597) [#12882](https://github.com/grafana/grafana/issues/12882), thx [@roidelapluie](https://github.com/roidelapluie)
 * **Variables**: Skip unneeded extra query request when de-selecting variable values used for repeated panels [#8186](https://github.com/grafana/grafana/issues/8186), thx [@mtanda](https://github.com/mtanda)
+* **Postgres/MySQL/MSSQL**: Add previous fill mode to $__timeGroup macro which will fill in previously seen value when point is missing [#12756](https://github.com/grafana/grafana/issues/12756), thx [@svenklemm](https://github.com/svenklemm)
 * **Postgres/MySQL/MSSQL**: Use floor rounding in $__timeGroup macro function [#12460](https://github.com/grafana/grafana/issues/12460), thx [@svenklemm](https://github.com/svenklemm)
 * **Postgres/MySQL/MSSQL**: Use metric column as prefix when returning multiple value columns [#12727](https://github.com/grafana/grafana/issues/12727), thx [@svenklemm](https://github.com/svenklemm)
+* **Postgres/MySQL/MSSQL**: New $__timeGroupAlias macro. Postgres $__timeGroup no longer automatically adds time column alias [#12749](https://github.com/grafana/grafana/issues/12749), thx [@svenklemm](https://github.com/svenklemm)
+* **Postgres/MySQL/MSSQL**: Escape single quotes in variables [#12785](https://github.com/grafana/grafana/issues/12785), thx [@eMerzh](https://github.com/eMerzh)
 * **MySQL/MSSQL**: Use datetime format instead of epoch for $__timeFilter, $__timeFrom and $__timeTo macros [#11618](https://github.com/grafana/grafana/issues/11618) [#11619](https://github.com/grafana/grafana/issues/11619), thx [@AustinWinstanley](https://github.com/AustinWinstanley)
 * **Postgres**: Escape ssl mode parameter in connectionstring [#12644](https://github.com/grafana/grafana/issues/12644), thx [@yogyrahmawan](https://github.com/yogyrahmawan)
 * **Github OAuth**: Allow changes of user info at Github to be synched to Grafana when signing in [#11818](https://github.com/grafana/grafana/issues/11818), thx [@rwaweber](https://github.com/rwaweber)
 * **Alerting**: Fix diff and percent_diff reducers [#11563](https://github.com/grafana/grafana/issues/11563), thx [@jessetane](https://github.com/jessetane)
-* **Units**: Polish złoty currency [#12691](https://github.com/grafana/grafana/pull/12691), thx [@mwegrzynek](https://github.com/mwegrzynek)
+* **Alerting**: Fix rendering timeout which could cause notifications to not be sent due to rendering timing out [#12151](https://github.com/grafana/grafana/issues/12151)
 * **Cloudwatch**: Improved error handling [#12489](https://github.com/grafana/grafana/issues/12489), thx [@mtanda](https://github.com/mtanda)
 * **Cloudwatch**: AppSync metrics and dimensions [#12300](https://github.com/grafana/grafana/issues/12300), thx [@franciscocpg](https://github.com/franciscocpg)
 * **Cloudwatch**: Direct Connect metrics and dimensions [#12762](https://github.com/grafana/grafana/pulls/12762), thx [@mindriot88](https://github.com/mindriot88)
 * **Cloudwatch**: Added BurstBalance metric to list of AWS RDS metrics [#12561](https://github.com/grafana/grafana/pulls/12561), thx [@activeshadow](https://github.com/activeshadow)
+* **Cloudwatch**: Add new Redshift metrics and dimensions [#12063](https://github.com/grafana/grafana/pulls/12063), thx [@A21z](https://github.com/A21z)
 * **Table**: Adjust header contrast for the light theme [#12668](https://github.com/grafana/grafana/issues/12668)
+* **Table**: Fix link color when using light theme and thresholds in use [#12766](https://github.com/grafana/grafana/issues/12766)
+om/grafana/grafana/issues/12668)
+* **Table**: Fix for useless horizontal scrollbar for table panel [#9964](https://github.com/grafana/grafana/issues/9964)
+* **Table**: Make table sorting stable when null values exist [#12362](https://github.com/grafana/grafana/pull/12362), thx [@bz2](https://github.com/bz2)
 * **Elasticsearch**: For alerting/backend, support having index name to the right of pattern in index pattern [#12731](https://github.com/grafana/grafana/issues/12731)
 * **OAuth**: Fix overriding tls_skip_verify_insecure using environment variable [#12747](https://github.com/grafana/grafana/issues/12747), thx [@jangaraj](https://github.com/jangaraj)
 * **Units**: Change units to include characters for power of 2 and 3 [#12744](https://github.com/grafana/grafana/pull/12744), thx [@Worty](https://github.com/Worty)
+* **Units**: Polish złoty currency [#12691](https://github.com/grafana/grafana/pull/12691), thx [@mwegrzynek](https://github.com/mwegrzynek)
+* **Graph**: Option to hide series from tooltip [#3341](https://github.com/grafana/grafana/issues/3341), thx [@mtanda](https://github.com/mtanda)
+* **UI**: Fix iOS home screen "app" icon and Windows 10 app experience [#12752](https://github.com/grafana/grafana/issues/12752), thx [@andig](https://github.com/andig)
+* **Datasource**: Fix UI issue with secret fields after updating datasource [#11270](https://github.com/grafana/grafana/issues/11270)
+* **Plugins**: Convert URL-like text to links in plugins readme [#12843](https://github.com/grafana/grafana/pull/12843), thx [pgiraud](https://github.com/pgiraud)
+* **Docker**: Make it possible to set a specific plugin url [#12861](https://github.com/grafana/grafana/pull/12861), thx [ClementGautier](https://github.com/ClementGautier)
+* **Graphite**: Fix for quoting of int function parameters (when using variables) [#11927](https://github.com/grafana/grafana/pull/11927)
+* **InfluxDB**: Support timeFilter in query templating for InfluxDB [#12598](https://github.com/grafana/grafana/pull/12598), thx [kichristensen](https://github.com/kichristensen)
+
+### Breaking changes
+
+* Postgres datasource no longer automatically adds time column alias when using the $__timeGroup alias. However, there's code in place which should make this change backward compatible and shouldn't create any issues.
+
+### New experimental features
+
+These are new features that's still being worked on and are in an experimental phase. We incourage users to try these out and provide any feedback in related issue.
+
+* **Dashboard**: Auto fit dashboard panels to optimize space used for current TV / Monitor [#12768](https://github.com/grafana/grafana/issues/12768)
+
+### Tech
+
+* **Frontend**: Convert all Frontend Karma tests to Jest tests [#12224](https://github.com/grafana/grafana/issues/12224)
 
 # 5.2.2 (2018-07-25)
 

+ 82 - 0
Dockerfile

@@ -0,0 +1,82 @@
+# Golang build container
+FROM golang:1.10
+
+WORKDIR $GOPATH/src/github.com/grafana/grafana
+
+COPY Gopkg.toml Gopkg.lock ./
+COPY vendor vendor
+
+ARG DEP_ENSURE=""
+RUN if [ ! -z "${DEP_ENSURE}" ]; then \
+      go get -u github.com/golang/dep/cmd/dep && \
+      dep ensure --vendor-only; \
+    fi
+
+COPY pkg pkg
+COPY build.go build.go
+COPY package.json package.json
+
+RUN go run build.go build
+
+# Node build container
+FROM node:8
+
+WORKDIR /usr/src/app/
+
+COPY package.json yarn.lock ./
+RUN yarn install --pure-lockfile --no-progress
+
+COPY Gruntfile.js tsconfig.json tslint.json ./
+COPY public public
+COPY scripts scripts
+COPY emails emails
+
+ENV NODE_ENV production
+RUN ./node_modules/.bin/grunt build
+
+# Final container
+FROM debian:stretch-slim
+
+ARG GF_UID="472"
+ARG GF_GID="472"
+
+ENV PATH=/usr/share/grafana/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin \
+    GF_PATHS_CONFIG="/etc/grafana/grafana.ini" \
+    GF_PATHS_DATA="/var/lib/grafana" \
+    GF_PATHS_HOME="/usr/share/grafana" \
+    GF_PATHS_LOGS="/var/log/grafana" \
+    GF_PATHS_PLUGINS="/var/lib/grafana/plugins" \
+    GF_PATHS_PROVISIONING="/etc/grafana/provisioning"
+
+WORKDIR $GF_PATHS_HOME
+
+RUN apt-get update && apt-get install -qq -y libfontconfig ca-certificates && \
+    apt-get autoremove -y && \
+    rm -rf /var/lib/apt/lists/*
+
+COPY conf ./conf
+
+RUN mkdir -p "$GF_PATHS_HOME/.aws" && \
+    groupadd -r -g $GF_GID grafana && \
+    useradd -r -u $GF_UID -g grafana grafana && \
+    mkdir -p "$GF_PATHS_PROVISIONING/datasources" \
+             "$GF_PATHS_PROVISIONING/dashboards" \
+             "$GF_PATHS_LOGS" \
+             "$GF_PATHS_PLUGINS" \
+             "$GF_PATHS_DATA" && \
+    cp "$GF_PATHS_HOME/conf/sample.ini" "$GF_PATHS_CONFIG" && \
+    cp "$GF_PATHS_HOME/conf/ldap.toml" /etc/grafana/ldap.toml && \
+    chown -R grafana:grafana "$GF_PATHS_DATA" "$GF_PATHS_HOME/.aws" "$GF_PATHS_LOGS" "$GF_PATHS_PLUGINS" && \
+    chmod 777 "$GF_PATHS_DATA" "$GF_PATHS_HOME/.aws" "$GF_PATHS_LOGS" "$GF_PATHS_PLUGINS"
+
+COPY --from=0 /go/src/github.com/grafana/grafana/bin/linux-amd64/grafana-server /go/src/github.com/grafana/grafana/bin/linux-amd64/grafana-cli ./bin/
+COPY --from=1 /usr/src/app/public ./public
+COPY --from=1 /usr/src/app/tools ./tools
+COPY tools/phantomjs/render.js ./tools/phantomjs/render.js
+
+EXPOSE 3000
+
+COPY ./packaging/docker/run.sh /run.sh
+
+USER grafana
+ENTRYPOINT [ "/run.sh" ]

+ 0 - 1
Gruntfile.js

@@ -1,4 +1,3 @@
-/* jshint node:true */
 'use strict';
 module.exports = function (grunt) {
   var os = require('os');

+ 10 - 1
Makefile

@@ -24,6 +24,15 @@ build-js:
 
 build: build-go build-js
 
+build-docker-dev:
+	@echo "\033[92mInfo:\033[0m the frontend code is expected to be built already."
+	go run build.go -goos linux -pkg-arch amd64 ${OPT} build package-only latest
+	cp dist/grafana-latest.linux-x64.tar.gz packaging/docker
+	cd packaging/docker && docker build --tag grafana/grafana:dev .
+
+build-docker-full:
+	docker build --tag grafana/grafana:dev .
+
 test-go:
 	go test -v ./pkg/...
 
@@ -36,4 +45,4 @@ run:
 	./bin/grafana-server
 
 protoc:
-	protoc -I pkg/tsdb/models pkg/tsdb/models/*.proto --go_out=plugins=grpc:pkg/tsdb/models/.
+	protoc -I pkg/tsdb/models pkg/tsdb/models/*.proto --go_out=plugins=grpc:pkg/tsdb/models/.

+ 1 - 1
NOTICE.md

@@ -1,5 +1,5 @@
 
-Copyright 2014-2017 Grafana Labs
+Copyright 2014-2018 Grafana Labs
 
 This software is based on Kibana: 
 Copyright 2012-2013 Elasticsearch BV

+ 31 - 19
README.md

@@ -43,7 +43,7 @@ To build the assets, rebuild on file change, and serve them by Grafana's webserv
 ```bash
 npm install -g yarn
 yarn install --pure-lockfile
-npm run watch
+yarn watch
 ```
 
 Build the assets, rebuild on file change with Hot Module Replacement (HMR), and serve them by webpack-dev-server (http://localhost:3333):
@@ -54,14 +54,9 @@ env GRAFANA_THEME=light yarn start
 ```
 Note: HMR for Angular is not supported. If you edit files in the Angular part of the app, the whole page will reload.
 
-Run tests 
+Run tests
 ```bash
-npm run jest
-```
-
-Run karma tests
-```bash
-npm run karma
+yarn jest
 ```
 
 ### Recompile backend on source change
@@ -74,6 +69,15 @@ bra run
 
 Open grafana in your browser (default: `http://localhost:3000`) and login with admin user (default: `user/pass = admin/admin`).
 
+### Building a docker image (on linux/amd64)
+
+This builds a docker image from your local sources:
+
+1. Build the frontend `go run build.go build-frontend`
+2. Build the docker image `make build-docker-dev`
+
+The resulting image will be tagged as `grafana/grafana:dev`
+
 ### Dev config
 
 Create a custom.ini in the conf directory to override default configuration options.
@@ -89,30 +93,38 @@ In your custom.ini uncomment (remove the leading `;`) sign. And set `app_mode =
 #### Frontend
 Execute all frontend tests
 ```bash
-npm run test
+yarn test
 ```
 
-Writing & watching frontend tests (we have two test runners)
+Writing & watching frontend tests
 
-- jest for all new tests that do not require browser context (React+more)
-   - Start watcher: `npm run jest`
-   - Jest will run all test files that end with the name ".jest.ts"
-- karma + mocha is used for testing angularjs components. We do want to migrate these test to jest over time (if possible).
-  - Start watcher: `npm run karma`
-  - Karma+Mocha runs all files that end with the name "_specs.ts".
+- Start watcher: `yarn jest`
+- Jest will run all test files that end with the name ".test.ts"
 
 #### Backend
 ```bash
 # Run Golang tests using sqlite3 as database (default)
-go test ./pkg/... 
+go test ./pkg/...
 
 # Run Golang tests using mysql as database - convenient to use /docker/blocks/mysql_tests
-GRAFANA_TEST_DB=mysql go test ./pkg/... 
+GRAFANA_TEST_DB=mysql go test ./pkg/...
 
 # Run Golang tests using postgres as database - convenient to use /docker/blocks/postgres_tests
-GRAFANA_TEST_DB=postgres go test ./pkg/... 
+GRAFANA_TEST_DB=postgres go test ./pkg/...
 ```
 
+## Building custom docker image
+
+You can build a custom image using Docker, which doesn't require installing any dependencies besides docker itself.
+```bash
+git clone https://github.com/grafana/grafana
+cd grafana
+docker build -t grafana:dev .
+docker run -d --name=grafana -p 3000:3000 grafana:dev
+```
+
+Open grafana in your browser (default: `http://localhost:3000`) and login with admin user (default: `user/pass = admin/admin`).
+
 ## Contribute
 
 If you have any idea for an improvement or found a bug, do not hesitate to open an issue.

+ 8 - 8
ROADMAP.md

@@ -1,9 +1,10 @@
-# Roadmap (2018-06-26)
+# Roadmap (2018-08-07)
 
 This roadmap is a tentative plan for the core development team. Things change constantly as PRs come in and priorities change. 
 But it will give you an idea of our current vision and plan. 
   
 ### Short term (1-2 months)
+  - PRs & Bugs
   - Multi-Stat panel
   - Metrics & Log Explore UI 
  
@@ -11,17 +12,16 @@ But it will give you an idea of our current vision and plan.
   - React Panels 
   - Change visualization (panel type) on the fly. 
   - Templating Query Editor UI Plugin hook
+  - Backend plugins
   
 ### Long term (4 - 8 months)
-
-- Alerting improvements (silence, per series tracking, etc)
-- Progress on React migration
+ - Alerting improvements (silence, per series tracking, etc)
+ - Progress on React migration
 
 ### In a distant future far far away
-
-- Meta queries 
-- Integrated light weight TSDB
-- Web socket & live data sources
+ - Meta queries 
+ - Integrated light weight TSDB
+ - Web socket & live data sources
 
 ### Outside contributions
 We know this is being worked on right now by contributors (and we hope to merge it when it's ready). 

+ 18 - 0
conf/defaults.ini

@@ -213,6 +213,9 @@ allow_org_create = false
 # Set to true to automatically assign new users to the default organization (id 1)
 auto_assign_org = true
 
+# Set this value to automatically add new users to the provided organization (if auto_assign_org above is set to true)
+auto_assign_org_id = 1
+
 # Default role new users will be automatically assigned (if auto_assign_org above is set to true)
 auto_assign_org_role = Viewer
 
@@ -267,6 +270,18 @@ api_url = https://api.github.com/user
 team_ids =
 allowed_organizations =
 
+#################################### GitLab Auth #########################
+[auth.gitlab]
+enabled = false
+allow_sign_up = true
+client_id = some_id
+client_secret = some_secret
+scopes = api
+auth_url = https://gitlab.com/oauth/authorize
+token_url = https://gitlab.com/oauth/token
+api_url = https://gitlab.com/api/v4
+allowed_groups =
+
 #################################### Google Auth #########################
 [auth.google]
 enabled = false
@@ -312,6 +327,9 @@ api_url =
 team_ids =
 allowed_organizations =
 tls_skip_verify_insecure = false
+tls_client_cert =
+tls_client_key =
+tls_client_ca =
 
 #################################### Basic Auth ##########################
 [auth.basic]

+ 3 - 0
conf/ldap.toml

@@ -15,6 +15,9 @@ start_tls = false
 ssl_skip_verify = false
 # set to the path to your root CA certificate or leave unset to use system defaults
 # root_ca_cert = "/path/to/certificate.crt"
+# Authentication against LDAP servers requiring client certificates
+# client_cert = "/path/to/client.crt"
+# client_key = "/path/to/client.key"
 
 # Search user bind dn
 bind_dn = "cn=admin,dc=grafana,dc=org"

+ 4 - 0
conf/sample.ini

@@ -272,6 +272,10 @@ log_queries =
 ;api_url = https://foo.bar/user
 ;team_ids =
 ;allowed_organizations =
+;tls_skip_verify_insecure = false
+;tls_client_cert =
+;tls_client_key =
+;tls_client_ca =
 
 #################################### Grafana.com Auth ####################
 [auth.grafana_com]

+ 213 - 47
devenv/dev-dashboards/datasource_tests_mssql_unittest.json

@@ -64,7 +64,7 @@
   "editable": true,
   "gnetId": null,
   "graphTooltip": 0,
-  "iteration": 1532949769359,
+  "iteration": 1533713720618,
   "links": [],
   "panels": [
     {
@@ -338,8 +338,8 @@
       "datasource": "gdev-mssql-ds-tests",
       "fill": 2,
       "gridPos": {
-        "h": 9,
-        "w": 8,
+        "h": 6,
+        "w": 6,
         "x": 0,
         "y": 7
       },
@@ -369,7 +369,7 @@
         {
           "alias": "",
           "format": "time_series",
-          "rawSql": "SELECT $__timeGroup(time, '5m') AS time, avg(value) as value FROM metric WHERE $__timeFilter(time) GROUP BY $__timeGroup(time, '5m') ORDER BY 1",
+          "rawSql": "SELECT $__timeGroupAlias(time, '5m'), avg(value) as value FROM metric WHERE $__timeFilter(time) GROUP BY $__timeGroup(time, '5m') ORDER BY 1",
           "refId": "A"
         }
       ],
@@ -421,9 +421,9 @@
       "datasource": "gdev-mssql-ds-tests",
       "fill": 2,
       "gridPos": {
-        "h": 9,
-        "w": 8,
-        "x": 8,
+        "h": 6,
+        "w": 6,
+        "x": 6,
         "y": 7
       },
       "id": 9,
@@ -452,7 +452,7 @@
         {
           "alias": "",
           "format": "time_series",
-          "rawSql": "SELECT $__timeGroup(time, '5m', NULL) AS time, avg(value) as value FROM metric WHERE $__timeFilter(time) GROUP BY $__timeGroup(time, '5m') ORDER BY 1",
+          "rawSql": "SELECT $__timeGroupAlias(time, '5m', NULL), avg(value) as value FROM metric WHERE $__timeFilter(time) GROUP BY $__timeGroup(time, '5m') ORDER BY 1",
           "refId": "A"
         }
       ],
@@ -504,9 +504,9 @@
       "datasource": "gdev-mssql-ds-tests",
       "fill": 2,
       "gridPos": {
-        "h": 9,
-        "w": 8,
-        "x": 16,
+        "h": 6,
+        "w": 6,
+        "x": 12,
         "y": 7
       },
       "id": 10,
@@ -535,7 +535,7 @@
         {
           "alias": "",
           "format": "time_series",
-          "rawSql": "SELECT $__timeGroup(time, '5m', 10.0) AS time, avg(value) as value FROM metric WHERE $__timeFilter(time) GROUP BY $__timeGroup(time, '5m') ORDER BY 1",
+          "rawSql": "SELECT $__timeGroupAlias(time, '5m', 10.0), avg(value) as value FROM metric WHERE $__timeFilter(time) GROUP BY $__timeGroup(time, '5m') ORDER BY 1",
           "refId": "A"
         }
       ],
@@ -579,6 +579,89 @@
         "alignLevel": null
       }
     },
+    {
+      "aliasColors": {},
+      "bars": false,
+      "dashLength": 10,
+      "dashes": false,
+      "datasource": "gdev-mssql-ds-tests",
+      "fill": 2,
+      "gridPos": {
+        "h": 6,
+        "w": 6,
+        "x": 18,
+        "y": 7
+      },
+      "id": 36,
+      "legend": {
+        "avg": false,
+        "current": false,
+        "max": false,
+        "min": false,
+        "show": true,
+        "total": false,
+        "values": false
+      },
+      "lines": true,
+      "linewidth": 2,
+      "links": [],
+      "nullPointMode": "null as zero",
+      "percentage": false,
+      "pointradius": 3,
+      "points": true,
+      "renderer": "flot",
+      "seriesOverrides": [],
+      "spaceLength": 10,
+      "stack": false,
+      "steppedLine": true,
+      "targets": [
+        {
+          "alias": "",
+          "format": "time_series",
+          "rawSql": "SELECT $__timeGroupAlias(time, '5m', previous), avg(value) as value FROM metric WHERE $__timeFilter(time) GROUP BY $__timeGroup(time, '5m') ORDER BY 1",
+          "refId": "A"
+        }
+      ],
+      "thresholds": [],
+      "timeFrom": null,
+      "timeShift": null,
+      "title": "timeGroup macro 5m with fill(previous) and null as zero",
+      "tooltip": {
+        "shared": true,
+        "sort": 0,
+        "value_type": "individual"
+      },
+      "type": "graph",
+      "xaxis": {
+        "buckets": null,
+        "mode": "time",
+        "name": null,
+        "show": true,
+        "values": []
+      },
+      "yaxes": [
+        {
+          "format": "short",
+          "label": null,
+          "logBase": 1,
+          "max": null,
+          "min": "0",
+          "show": true
+        },
+        {
+          "format": "short",
+          "label": null,
+          "logBase": 1,
+          "max": null,
+          "min": null,
+          "show": true
+        }
+      ],
+      "yaxis": {
+        "align": false,
+        "alignLevel": null
+      }
+    },
     {
       "aliasColors": {},
       "bars": true,
@@ -587,10 +670,10 @@
       "datasource": "gdev-mssql-ds-tests",
       "fill": 2,
       "gridPos": {
-        "h": 9,
-        "w": 8,
+        "h": 6,
+        "w": 6,
         "x": 0,
-        "y": 16
+        "y": 13
       },
       "id": 16,
       "legend": {
@@ -618,7 +701,7 @@
         {
           "alias": "",
           "format": "time_series",
-          "rawSql": "SELECT $__timeGroup(time, '$summarize') AS time, avg(value) as value FROM metric WHERE $__timeFilter(time) GROUP BY $__timeGroup(time, '$summarize') ORDER BY 1",
+          "rawSql": "SELECT $__timeGroupAlias(time, '$summarize'), avg(value) as value FROM metric WHERE $__timeFilter(time) GROUP BY $__timeGroup(time, '$summarize') ORDER BY 1",
           "refId": "A"
         }
       ],
@@ -670,10 +753,10 @@
       "datasource": "gdev-mssql-ds-tests",
       "fill": 2,
       "gridPos": {
-        "h": 9,
-        "w": 8,
-        "x": 8,
-        "y": 16
+        "h": 6,
+        "w": 6,
+        "x": 6,
+        "y": 13
       },
       "id": 12,
       "legend": {
@@ -701,7 +784,7 @@
         {
           "alias": "",
           "format": "time_series",
-          "rawSql": "SELECT $__timeGroup(time, '$summarize', NULL) AS time, sum(value) as value FROM metric WHERE $__timeFilter(time) GROUP BY $__timeGroup(time, '$summarize') ORDER BY 1",
+          "rawSql": "SELECT $__timeGroupAlias(time, '$summarize', NULL), sum(value) as value FROM metric WHERE $__timeFilter(time) GROUP BY $__timeGroup(time, '$summarize') ORDER BY 1",
           "refId": "A"
         }
       ],
@@ -753,10 +836,10 @@
       "datasource": "gdev-mssql-ds-tests",
       "fill": 2,
       "gridPos": {
-        "h": 9,
-        "w": 8,
-        "x": 16,
-        "y": 16
+        "h": 6,
+        "w": 6,
+        "x": 12,
+        "y": 13
       },
       "id": 13,
       "legend": {
@@ -784,7 +867,7 @@
         {
           "alias": "",
           "format": "time_series",
-          "rawSql": "SELECT $__timeGroup(time, '$summarize', 100.0) AS time, sum(value) as value FROM metric WHERE $__timeFilter(time) GROUP BY $__timeGroup(time, '$summarize') ORDER BY 1",
+          "rawSql": "SELECT $__timeGroupAlias(time, '$summarize', 100.0), sum(value) as value FROM metric WHERE $__timeFilter(time) GROUP BY $__timeGroup(time, '$summarize') ORDER BY 1",
           "refId": "A"
         }
       ],
@@ -828,6 +911,89 @@
         "alignLevel": null
       }
     },
+    {
+      "aliasColors": {},
+      "bars": true,
+      "dashLength": 10,
+      "dashes": false,
+      "datasource": "gdev-mssql-ds-tests",
+      "fill": 2,
+      "gridPos": {
+        "h": 6,
+        "w": 6,
+        "x": 18,
+        "y": 13
+      },
+      "id": 37,
+      "legend": {
+        "avg": false,
+        "current": false,
+        "max": false,
+        "min": false,
+        "show": true,
+        "total": false,
+        "values": false
+      },
+      "lines": false,
+      "linewidth": 2,
+      "links": [],
+      "nullPointMode": "null",
+      "percentage": false,
+      "pointradius": 3,
+      "points": false,
+      "renderer": "flot",
+      "seriesOverrides": [],
+      "spaceLength": 10,
+      "stack": false,
+      "steppedLine": true,
+      "targets": [
+        {
+          "alias": "",
+          "format": "time_series",
+          "rawSql": "SELECT $__timeGroupAlias(time, '$summarize', previous), sum(value) as value FROM metric WHERE $__timeFilter(time) GROUP BY $__timeGroup(time, '$summarize') ORDER BY 1",
+          "refId": "A"
+        }
+      ],
+      "thresholds": [],
+      "timeFrom": null,
+      "timeShift": null,
+      "title": "Metrics - timeGroup macro $summarize with fill(previous)",
+      "tooltip": {
+        "shared": true,
+        "sort": 0,
+        "value_type": "individual"
+      },
+      "type": "graph",
+      "xaxis": {
+        "buckets": null,
+        "mode": "time",
+        "name": null,
+        "show": true,
+        "values": []
+      },
+      "yaxes": [
+        {
+          "format": "short",
+          "label": null,
+          "logBase": 1,
+          "max": null,
+          "min": null,
+          "show": true
+        },
+        {
+          "format": "short",
+          "label": null,
+          "logBase": 1,
+          "max": null,
+          "min": null,
+          "show": true
+        }
+      ],
+      "yaxis": {
+        "align": false,
+        "alignLevel": null
+      }
+    },
     {
       "aliasColors": {},
       "bars": false,
@@ -839,7 +1005,7 @@
         "h": 8,
         "w": 12,
         "x": 0,
-        "y": 25
+        "y": 19
       },
       "id": 27,
       "legend": {
@@ -871,7 +1037,7 @@
         {
           "alias": "",
           "format": "time_series",
-          "rawSql": "SELECT \n  $__timeGroup(time, '$summarize') as time, \n  measurement as metric, \n  avg(valueOne) as valueOne,\n  avg(valueTwo) as valueTwo\nFROM\n  metric_values \nWHERE\n  $__timeFilter(time) AND\n  ($metric = 'ALL' OR measurement = $metric)\nGROUP BY \n  $__timeGroup(time, '$summarize'), \n  measurement \nORDER BY 1",
+          "rawSql": "SELECT \n  $__timeGroupAlias(time, '$summarize'), \n  measurement as metric, \n  avg(valueOne) as valueOne,\n  avg(valueTwo) as valueTwo\nFROM\n  metric_values \nWHERE\n  $__timeFilter(time) AND\n  ($metric = 'ALL' OR measurement = $metric)\nGROUP BY \n  $__timeGroup(time, '$summarize'), \n  measurement \nORDER BY 1",
           "refId": "A"
         }
       ],
@@ -926,7 +1092,7 @@
         "h": 8,
         "w": 12,
         "x": 12,
-        "y": 25
+        "y": 19
       },
       "id": 5,
       "legend": {
@@ -968,7 +1134,7 @@
         {
           "alias": "",
           "format": "time_series",
-          "rawSql": "SELECT \n  $__timeGroup(time, '$summarize') as time, \n  avg(valueOne) as valueOne, \n  avg(valueTwo) as valueTwo \nFROM\n  metric_values \nWHERE \n  $__timeFilter(time) AND \n  ($metric = 'ALL' OR measurement = $metric)\nGROUP BY \n  $__timeGroup(time, '$summarize')\nORDER BY 1",
+          "rawSql": "SELECT \n  $__timeGroupAlias(time, '$summarize'), \n  avg(valueOne) as valueOne, \n  avg(valueTwo) as valueTwo \nFROM\n  metric_values \nWHERE \n  $__timeFilter(time) AND \n  ($metric = 'ALL' OR measurement = $metric)\nGROUP BY \n  $__timeGroup(time, '$summarize')\nORDER BY 1",
           "refId": "A"
         },
         {
@@ -1029,7 +1195,7 @@
         "h": 8,
         "w": 12,
         "x": 0,
-        "y": 33
+        "y": 27
       },
       "id": 4,
       "legend": {
@@ -1116,7 +1282,7 @@
         "h": 8,
         "w": 12,
         "x": 12,
-        "y": 33
+        "y": 27
       },
       "id": 28,
       "legend": {
@@ -1201,7 +1367,7 @@
         "h": 8,
         "w": 12,
         "x": 0,
-        "y": 41
+        "y": 35
       },
       "id": 19,
       "legend": {
@@ -1288,7 +1454,7 @@
         "h": 8,
         "w": 12,
         "x": 12,
-        "y": 41
+        "y": 35
       },
       "id": 18,
       "legend": {
@@ -1373,7 +1539,7 @@
         "h": 8,
         "w": 12,
         "x": 0,
-        "y": 49
+        "y": 43
       },
       "id": 17,
       "legend": {
@@ -1460,7 +1626,7 @@
         "h": 8,
         "w": 12,
         "x": 12,
-        "y": 49
+        "y": 43
       },
       "id": 20,
       "legend": {
@@ -1545,7 +1711,7 @@
         "h": 8,
         "w": 12,
         "x": 0,
-        "y": 57
+        "y": 51
       },
       "id": 29,
       "legend": {
@@ -1632,7 +1798,7 @@
         "h": 8,
         "w": 12,
         "x": 12,
-        "y": 57
+        "y": 51
       },
       "id": 30,
       "legend": {
@@ -1719,7 +1885,7 @@
         "h": 8,
         "w": 12,
         "x": 0,
-        "y": 65
+        "y": 59
       },
       "id": 14,
       "legend": {
@@ -1807,7 +1973,7 @@
         "h": 8,
         "w": 12,
         "x": 12,
-        "y": 65
+        "y": 59
       },
       "id": 15,
       "legend": {
@@ -1894,7 +2060,7 @@
         "h": 8,
         "w": 12,
         "x": 0,
-        "y": 73
+        "y": 67
       },
       "id": 25,
       "legend": {
@@ -1982,7 +2148,7 @@
         "h": 8,
         "w": 12,
         "x": 12,
-        "y": 73
+        "y": 67
       },
       "id": 22,
       "legend": {
@@ -2069,7 +2235,7 @@
         "h": 8,
         "w": 12,
         "x": 0,
-        "y": 81
+        "y": 75
       },
       "id": 21,
       "legend": {
@@ -2157,7 +2323,7 @@
         "h": 8,
         "w": 12,
         "x": 12,
-        "y": 81
+        "y": 75
       },
       "id": 26,
       "legend": {
@@ -2244,7 +2410,7 @@
         "h": 8,
         "w": 12,
         "x": 0,
-        "y": 89
+        "y": 83
       },
       "id": 23,
       "legend": {
@@ -2332,7 +2498,7 @@
         "h": 8,
         "w": 12,
         "x": 12,
-        "y": 89
+        "y": 83
       },
       "id": 24,
       "legend": {
@@ -2542,5 +2708,5 @@
   "timezone": "",
   "title": "Datasource tests - MSSQL (unit test)",
   "uid": "GlAqcPgmz",
-  "version": 3
+  "version": 10
 }

+ 211 - 45
devenv/dev-dashboards/datasource_tests_mysql_unittest.json

@@ -64,7 +64,7 @@
   "editable": true,
   "gnetId": null,
   "graphTooltip": 0,
-  "iteration": 1532949531280,
+  "iteration": 1533714324007,
   "links": [],
   "panels": [
     {
@@ -338,8 +338,8 @@
       "datasource": "gdev-mysql-ds-tests",
       "fill": 2,
       "gridPos": {
-        "h": 9,
-        "w": 8,
+        "h": 6,
+        "w": 6,
         "x": 0,
         "y": 7
       },
@@ -369,7 +369,7 @@
         {
           "alias": "",
           "format": "time_series",
-          "rawSql": "SELECT $__timeGroup(time, '5m') AS time, avg(value) as value FROM metric WHERE $__timeFilter(time) GROUP BY 1 ORDER BY 1",
+          "rawSql": "SELECT $__timeGroupAlias(time, '5m'), avg(value) as value FROM metric WHERE $__timeFilter(time) GROUP BY 1 ORDER BY 1",
           "refId": "A"
         }
       ],
@@ -421,9 +421,9 @@
       "datasource": "gdev-mysql-ds-tests",
       "fill": 2,
       "gridPos": {
-        "h": 9,
-        "w": 8,
-        "x": 8,
+        "h": 6,
+        "w": 6,
+        "x": 6,
         "y": 7
       },
       "id": 9,
@@ -452,7 +452,7 @@
         {
           "alias": "",
           "format": "time_series",
-          "rawSql": "SELECT $__timeGroup(time, '5m', NULL) AS time, avg(value) as value FROM metric WHERE $__timeFilter(time) GROUP BY 1 ORDER BY 1",
+          "rawSql": "SELECT $__timeGroupAlias(time, '5m', NULL), avg(value) as value FROM metric WHERE $__timeFilter(time) GROUP BY 1 ORDER BY 1",
           "refId": "A"
         }
       ],
@@ -504,9 +504,9 @@
       "datasource": "gdev-mysql-ds-tests",
       "fill": 2,
       "gridPos": {
-        "h": 9,
-        "w": 8,
-        "x": 16,
+        "h": 6,
+        "w": 6,
+        "x": 12,
         "y": 7
       },
       "id": 10,
@@ -535,7 +535,7 @@
         {
           "alias": "",
           "format": "time_series",
-          "rawSql": "SELECT $__timeGroup(time, '5m', 10.0) AS time, avg(value) as value FROM metric WHERE $__timeFilter(time) GROUP BY 1 ORDER BY 1",
+          "rawSql": "SELECT $__timeGroupAlias(time, '5m', 10.0), avg(value) as value FROM metric WHERE $__timeFilter(time) GROUP BY 1 ORDER BY 1",
           "refId": "A"
         }
       ],
@@ -579,6 +579,89 @@
         "alignLevel": null
       }
     },
+    {
+      "aliasColors": {},
+      "bars": false,
+      "dashLength": 10,
+      "dashes": false,
+      "datasource": "gdev-mysql-ds-tests",
+      "fill": 2,
+      "gridPos": {
+        "h": 6,
+        "w": 6,
+        "x": 18,
+        "y": 7
+      },
+      "id": 36,
+      "legend": {
+        "avg": false,
+        "current": false,
+        "max": false,
+        "min": false,
+        "show": true,
+        "total": false,
+        "values": false
+      },
+      "lines": true,
+      "linewidth": 2,
+      "links": [],
+      "nullPointMode": "null",
+      "percentage": false,
+      "pointradius": 3,
+      "points": true,
+      "renderer": "flot",
+      "seriesOverrides": [],
+      "spaceLength": 10,
+      "stack": false,
+      "steppedLine": true,
+      "targets": [
+        {
+          "alias": "",
+          "format": "time_series",
+          "rawSql": "SELECT $__timeGroupAlias(time, '5m', previous), avg(value) as value FROM metric WHERE $__timeFilter(time) GROUP BY 1 ORDER BY 1",
+          "refId": "A"
+        }
+      ],
+      "thresholds": [],
+      "timeFrom": null,
+      "timeShift": null,
+      "title": "timeGroup macro 5m with fill(previous)",
+      "tooltip": {
+        "shared": true,
+        "sort": 0,
+        "value_type": "individual"
+      },
+      "type": "graph",
+      "xaxis": {
+        "buckets": null,
+        "mode": "time",
+        "name": null,
+        "show": true,
+        "values": []
+      },
+      "yaxes": [
+        {
+          "format": "short",
+          "label": null,
+          "logBase": 1,
+          "max": null,
+          "min": "0",
+          "show": true
+        },
+        {
+          "format": "short",
+          "label": null,
+          "logBase": 1,
+          "max": null,
+          "min": null,
+          "show": true
+        }
+      ],
+      "yaxis": {
+        "align": false,
+        "alignLevel": null
+      }
+    },
     {
       "aliasColors": {},
       "bars": true,
@@ -587,10 +670,10 @@
       "datasource": "gdev-mysql-ds-tests",
       "fill": 2,
       "gridPos": {
-        "h": 9,
-        "w": 8,
+        "h": 6,
+        "w": 6,
         "x": 0,
-        "y": 16
+        "y": 13
       },
       "id": 16,
       "legend": {
@@ -618,7 +701,7 @@
         {
           "alias": "",
           "format": "time_series",
-          "rawSql": "SELECT $__timeGroup(time, '$summarize') AS time, avg(value) as value FROM metric WHERE $__timeFilter(time) GROUP BY 1 ORDER BY 1",
+          "rawSql": "SELECT $__timeGroupAlias(time, '$summarize'), avg(value) as value FROM metric WHERE $__timeFilter(time) GROUP BY 1 ORDER BY 1",
           "refId": "A"
         }
       ],
@@ -670,10 +753,10 @@
       "datasource": "gdev-mysql-ds-tests",
       "fill": 2,
       "gridPos": {
-        "h": 9,
-        "w": 8,
-        "x": 8,
-        "y": 16
+        "h": 6,
+        "w": 6,
+        "x": 6,
+        "y": 13
       },
       "id": 12,
       "legend": {
@@ -701,7 +784,7 @@
         {
           "alias": "",
           "format": "time_series",
-          "rawSql": "SELECT $__timeGroup(time, '$summarize', NULL) AS time, sum(value) as value FROM metric WHERE $__timeFilter(time) GROUP BY 1 ORDER BY 1",
+          "rawSql": "SELECT $__timeGroupAlias(time, '$summarize', NULL), sum(value) as value FROM metric WHERE $__timeFilter(time) GROUP BY 1 ORDER BY 1",
           "refId": "A"
         }
       ],
@@ -753,10 +836,10 @@
       "datasource": "gdev-mysql-ds-tests",
       "fill": 2,
       "gridPos": {
-        "h": 9,
-        "w": 8,
-        "x": 16,
-        "y": 16
+        "h": 6,
+        "w": 6,
+        "x": 12,
+        "y": 13
       },
       "id": 13,
       "legend": {
@@ -784,7 +867,7 @@
         {
           "alias": "",
           "format": "time_series",
-          "rawSql": "SELECT $__timeGroup(time, '$summarize', 100.0) AS time, sum(value) as value FROM metric WHERE $__timeFilter(time) GROUP BY 1 ORDER BY 1",
+          "rawSql": "SELECT $__timeGroupAlias(time, '$summarize', 100.0), sum(value) as value FROM metric WHERE $__timeFilter(time) GROUP BY 1 ORDER BY 1",
           "refId": "A"
         }
       ],
@@ -828,6 +911,89 @@
         "alignLevel": null
       }
     },
+    {
+      "aliasColors": {},
+      "bars": true,
+      "dashLength": 10,
+      "dashes": false,
+      "datasource": "gdev-mysql-ds-tests",
+      "fill": 2,
+      "gridPos": {
+        "h": 6,
+        "w": 6,
+        "x": 18,
+        "y": 13
+      },
+      "id": 37,
+      "legend": {
+        "avg": false,
+        "current": false,
+        "max": false,
+        "min": false,
+        "show": true,
+        "total": false,
+        "values": false
+      },
+      "lines": false,
+      "linewidth": 2,
+      "links": [],
+      "nullPointMode": "null",
+      "percentage": false,
+      "pointradius": 3,
+      "points": false,
+      "renderer": "flot",
+      "seriesOverrides": [],
+      "spaceLength": 10,
+      "stack": false,
+      "steppedLine": true,
+      "targets": [
+        {
+          "alias": "",
+          "format": "time_series",
+          "rawSql": "SELECT $__timeGroupAlias(time, '$summarize', previous), sum(value) as value FROM metric WHERE $__timeFilter(time) GROUP BY 1 ORDER BY 1",
+          "refId": "A"
+        }
+      ],
+      "thresholds": [],
+      "timeFrom": null,
+      "timeShift": null,
+      "title": "Metrics - timeGroup macro $summarize with fill(previous)",
+      "tooltip": {
+        "shared": true,
+        "sort": 0,
+        "value_type": "individual"
+      },
+      "type": "graph",
+      "xaxis": {
+        "buckets": null,
+        "mode": "time",
+        "name": null,
+        "show": true,
+        "values": []
+      },
+      "yaxes": [
+        {
+          "format": "short",
+          "label": null,
+          "logBase": 1,
+          "max": null,
+          "min": null,
+          "show": true
+        },
+        {
+          "format": "short",
+          "label": null,
+          "logBase": 1,
+          "max": null,
+          "min": null,
+          "show": true
+        }
+      ],
+      "yaxis": {
+        "align": false,
+        "alignLevel": null
+      }
+    },
     {
       "aliasColors": {},
       "bars": false,
@@ -839,7 +1005,7 @@
         "h": 8,
         "w": 12,
         "x": 0,
-        "y": 25
+        "y": 19
       },
       "id": 27,
       "legend": {
@@ -871,7 +1037,7 @@
         {
           "alias": "",
           "format": "time_series",
-          "rawSql": "SELECT \n  $__timeGroup(time, '$summarize') as time, \n  measurement as metric, \n  avg(valueOne) as valueOne,\n  avg(valueTwo) as valueTwo\nFROM\n  metric_values \nWHERE\n  $__timeFilter(time) AND\n  measurement IN($metric)\nGROUP BY 1, 2\nORDER BY 1",
+          "rawSql": "SELECT \n  $__timeGroupAlias(time, '$summarize'), \n  measurement as metric, \n  avg(valueOne) as valueOne,\n  avg(valueTwo) as valueTwo\nFROM\n  metric_values \nWHERE\n  $__timeFilter(time) AND\n  measurement IN($metric)\nGROUP BY 1, 2\nORDER BY 1",
           "refId": "A"
         }
       ],
@@ -926,7 +1092,7 @@
         "h": 8,
         "w": 12,
         "x": 12,
-        "y": 25
+        "y": 19
       },
       "id": 5,
       "legend": {
@@ -968,7 +1134,7 @@
         {
           "alias": "",
           "format": "time_series",
-          "rawSql": "SELECT \n  $__timeGroup(time, '$summarize') as time, \n  avg(valueOne) as valueOne, \n  avg(valueTwo) as valueTwo \nFROM\n  metric_values \nWHERE \n  $__timeFilter(time) AND \n  measurement IN($metric)\nGROUP BY 1\nORDER BY 1",
+          "rawSql": "SELECT \n  $__timeGroupAlias(time, '$summarize'), \n  avg(valueOne) as valueOne, \n  avg(valueTwo) as valueTwo \nFROM\n  metric_values \nWHERE \n  $__timeFilter(time) AND \n  measurement IN($metric)\nGROUP BY 1\nORDER BY 1",
           "refId": "A"
         }
       ],
@@ -1023,7 +1189,7 @@
         "h": 8,
         "w": 12,
         "x": 0,
-        "y": 33
+        "y": 27
       },
       "id": 4,
       "legend": {
@@ -1110,7 +1276,7 @@
         "h": 8,
         "w": 12,
         "x": 12,
-        "y": 33
+        "y": 27
       },
       "id": 28,
       "legend": {
@@ -1195,7 +1361,7 @@
         "h": 8,
         "w": 12,
         "x": 0,
-        "y": 41
+        "y": 35
       },
       "id": 19,
       "legend": {
@@ -1282,7 +1448,7 @@
         "h": 8,
         "w": 12,
         "x": 12,
-        "y": 41
+        "y": 35
       },
       "id": 18,
       "legend": {
@@ -1367,7 +1533,7 @@
         "h": 8,
         "w": 12,
         "x": 0,
-        "y": 49
+        "y": 43
       },
       "id": 17,
       "legend": {
@@ -1454,7 +1620,7 @@
         "h": 8,
         "w": 12,
         "x": 12,
-        "y": 49
+        "y": 43
       },
       "id": 20,
       "legend": {
@@ -1539,7 +1705,7 @@
         "h": 8,
         "w": 12,
         "x": 0,
-        "y": 57
+        "y": 51
       },
       "id": 14,
       "legend": {
@@ -1627,7 +1793,7 @@
         "h": 8,
         "w": 12,
         "x": 12,
-        "y": 57
+        "y": 51
       },
       "id": 15,
       "legend": {
@@ -1714,7 +1880,7 @@
         "h": 8,
         "w": 12,
         "x": 0,
-        "y": 65
+        "y": 59
       },
       "id": 25,
       "legend": {
@@ -1802,7 +1968,7 @@
         "h": 8,
         "w": 12,
         "x": 12,
-        "y": 65
+        "y": 59
       },
       "id": 22,
       "legend": {
@@ -1889,7 +2055,7 @@
         "h": 8,
         "w": 12,
         "x": 0,
-        "y": 73
+        "y": 67
       },
       "id": 21,
       "legend": {
@@ -1977,7 +2143,7 @@
         "h": 8,
         "w": 12,
         "x": 12,
-        "y": 73
+        "y": 67
       },
       "id": 26,
       "legend": {
@@ -2064,7 +2230,7 @@
         "h": 8,
         "w": 12,
         "x": 0,
-        "y": 81
+        "y": 75
       },
       "id": 23,
       "legend": {
@@ -2152,7 +2318,7 @@
         "h": 8,
         "w": 12,
         "x": 12,
-        "y": 81
+        "y": 75
       },
       "id": 24,
       "legend": {
@@ -2360,5 +2526,5 @@
   "timezone": "",
   "title": "Datasource tests - MySQL (unittest)",
   "uid": "Hmf8FDkmz",
-  "version": 1
+  "version": 9
 }

+ 211 - 45
devenv/dev-dashboards/datasource_tests_postgres_unittest.json

@@ -64,7 +64,7 @@
   "editable": true,
   "gnetId": null,
   "graphTooltip": 0,
-  "iteration": 1532951521836,
+  "iteration": 1533714184500,
   "links": [],
   "panels": [
     {
@@ -338,8 +338,8 @@
       "datasource": "gdev-postgres-ds-tests",
       "fill": 2,
       "gridPos": {
-        "h": 9,
-        "w": 8,
+        "h": 6,
+        "w": 6,
         "x": 0,
         "y": 7
       },
@@ -369,7 +369,7 @@
         {
           "alias": "",
           "format": "time_series",
-          "rawSql": "SELECT $__timeGroup(time, '5m'), avg(value) as value FROM metric WHERE $__timeFilter(time) GROUP BY 1 ORDER BY 1",
+          "rawSql": "SELECT $__timeGroupAlias(time, '5m'), avg(value) as value FROM metric WHERE $__timeFilter(time) GROUP BY 1 ORDER BY 1",
           "refId": "A"
         }
       ],
@@ -421,9 +421,9 @@
       "datasource": "gdev-postgres-ds-tests",
       "fill": 2,
       "gridPos": {
-        "h": 9,
-        "w": 8,
-        "x": 8,
+        "h": 6,
+        "w": 6,
+        "x": 6,
         "y": 7
       },
       "id": 9,
@@ -452,7 +452,7 @@
         {
           "alias": "",
           "format": "time_series",
-          "rawSql": "SELECT $__timeGroup(time, '5m', NULL), avg(value) as value FROM metric WHERE $__timeFilter(time) GROUP BY 1 ORDER BY 1",
+          "rawSql": "SELECT $__timeGroupAlias(time, '5m', NULL), avg(value) as value FROM metric WHERE $__timeFilter(time) GROUP BY 1 ORDER BY 1",
           "refId": "A"
         }
       ],
@@ -504,9 +504,9 @@
       "datasource": "gdev-postgres-ds-tests",
       "fill": 2,
       "gridPos": {
-        "h": 9,
-        "w": 8,
-        "x": 16,
+        "h": 6,
+        "w": 6,
+        "x": 12,
         "y": 7
       },
       "id": 10,
@@ -535,7 +535,7 @@
         {
           "alias": "",
           "format": "time_series",
-          "rawSql": "SELECT $__timeGroup(time, '5m', 10.0), avg(value) as value FROM metric WHERE $__timeFilter(time) GROUP BY 1 ORDER BY 1",
+          "rawSql": "SELECT $__timeGroupAlias(time, '5m', 10.0), avg(value) as value FROM metric WHERE $__timeFilter(time) GROUP BY 1 ORDER BY 1",
           "refId": "A"
         }
       ],
@@ -579,6 +579,89 @@
         "alignLevel": null
       }
     },
+    {
+      "aliasColors": {},
+      "bars": false,
+      "dashLength": 10,
+      "dashes": false,
+      "datasource": "gdev-postgres-ds-tests",
+      "fill": 2,
+      "gridPos": {
+        "h": 6,
+        "w": 6,
+        "x": 18,
+        "y": 7
+      },
+      "id": 36,
+      "legend": {
+        "avg": false,
+        "current": false,
+        "max": false,
+        "min": false,
+        "show": true,
+        "total": false,
+        "values": false
+      },
+      "lines": true,
+      "linewidth": 2,
+      "links": [],
+      "nullPointMode": "null",
+      "percentage": false,
+      "pointradius": 3,
+      "points": true,
+      "renderer": "flot",
+      "seriesOverrides": [],
+      "spaceLength": 10,
+      "stack": false,
+      "steppedLine": true,
+      "targets": [
+        {
+          "alias": "",
+          "format": "time_series",
+          "rawSql": "SELECT $__timeGroupAlias(time, '5m', previous), avg(value) as value FROM metric WHERE $__timeFilter(time) GROUP BY 1 ORDER BY 1",
+          "refId": "A"
+        }
+      ],
+      "thresholds": [],
+      "timeFrom": null,
+      "timeShift": null,
+      "title": "timeGroup macro 5m with fill(previous)",
+      "tooltip": {
+        "shared": true,
+        "sort": 0,
+        "value_type": "individual"
+      },
+      "type": "graph",
+      "xaxis": {
+        "buckets": null,
+        "mode": "time",
+        "name": null,
+        "show": true,
+        "values": []
+      },
+      "yaxes": [
+        {
+          "format": "short",
+          "label": null,
+          "logBase": 1,
+          "max": null,
+          "min": "0",
+          "show": true
+        },
+        {
+          "format": "short",
+          "label": null,
+          "logBase": 1,
+          "max": null,
+          "min": null,
+          "show": true
+        }
+      ],
+      "yaxis": {
+        "align": false,
+        "alignLevel": null
+      }
+    },
     {
       "aliasColors": {},
       "bars": true,
@@ -587,10 +670,10 @@
       "datasource": "gdev-postgres-ds-tests",
       "fill": 2,
       "gridPos": {
-        "h": 9,
-        "w": 8,
+        "h": 6,
+        "w": 6,
         "x": 0,
-        "y": 16
+        "y": 13
       },
       "id": 16,
       "legend": {
@@ -618,7 +701,7 @@
         {
           "alias": "",
           "format": "time_series",
-          "rawSql": "SELECT $__timeGroup(time, '$summarize'), avg(value) as value FROM metric WHERE $__timeFilter(time) GROUP BY 1 ORDER BY 1",
+          "rawSql": "SELECT $__timeGroupAlias(time, '$summarize'), avg(value) as value FROM metric WHERE $__timeFilter(time) GROUP BY 1 ORDER BY 1",
           "refId": "A"
         }
       ],
@@ -670,10 +753,10 @@
       "datasource": "gdev-postgres-ds-tests",
       "fill": 2,
       "gridPos": {
-        "h": 9,
-        "w": 8,
-        "x": 8,
-        "y": 16
+        "h": 6,
+        "w": 6,
+        "x": 6,
+        "y": 13
       },
       "id": 12,
       "legend": {
@@ -701,7 +784,7 @@
         {
           "alias": "",
           "format": "time_series",
-          "rawSql": "SELECT $__timeGroup(time, '$summarize', NULL), sum(value) as value FROM metric WHERE $__timeFilter(time) GROUP BY 1 ORDER BY 1",
+          "rawSql": "SELECT $__timeGroupAlias(time, '$summarize', NULL), sum(value) as value FROM metric WHERE $__timeFilter(time) GROUP BY 1 ORDER BY 1",
           "refId": "A"
         }
       ],
@@ -753,10 +836,10 @@
       "datasource": "gdev-postgres-ds-tests",
       "fill": 2,
       "gridPos": {
-        "h": 9,
-        "w": 8,
-        "x": 16,
-        "y": 16
+        "h": 6,
+        "w": 6,
+        "x": 12,
+        "y": 13
       },
       "id": 13,
       "legend": {
@@ -784,7 +867,7 @@
         {
           "alias": "",
           "format": "time_series",
-          "rawSql": "SELECT $__timeGroup(time, '$summarize', 100.0), sum(value) as value FROM metric WHERE $__timeFilter(time) GROUP BY 1 ORDER BY 1",
+          "rawSql": "SELECT $__timeGroupAlias(time, '$summarize', 100.0), sum(value) as value FROM metric WHERE $__timeFilter(time) GROUP BY 1 ORDER BY 1",
           "refId": "A"
         }
       ],
@@ -828,6 +911,89 @@
         "alignLevel": null
       }
     },
+    {
+      "aliasColors": {},
+      "bars": true,
+      "dashLength": 10,
+      "dashes": false,
+      "datasource": "gdev-postgres-ds-tests",
+      "fill": 2,
+      "gridPos": {
+        "h": 6,
+        "w": 6,
+        "x": 18,
+        "y": 13
+      },
+      "id": 37,
+      "legend": {
+        "avg": false,
+        "current": false,
+        "max": false,
+        "min": false,
+        "show": true,
+        "total": false,
+        "values": false
+      },
+      "lines": false,
+      "linewidth": 2,
+      "links": [],
+      "nullPointMode": "null",
+      "percentage": false,
+      "pointradius": 3,
+      "points": false,
+      "renderer": "flot",
+      "seriesOverrides": [],
+      "spaceLength": 10,
+      "stack": false,
+      "steppedLine": true,
+      "targets": [
+        {
+          "alias": "",
+          "format": "time_series",
+          "rawSql": "SELECT $__timeGroupAlias(time, '$summarize', previous), sum(value) as value FROM metric WHERE $__timeFilter(time) GROUP BY 1 ORDER BY 1",
+          "refId": "A"
+        }
+      ],
+      "thresholds": [],
+      "timeFrom": null,
+      "timeShift": null,
+      "title": "Metrics - timeGroup macro $summarize with fill(previous)",
+      "tooltip": {
+        "shared": true,
+        "sort": 0,
+        "value_type": "individual"
+      },
+      "type": "graph",
+      "xaxis": {
+        "buckets": null,
+        "mode": "time",
+        "name": null,
+        "show": true,
+        "values": []
+      },
+      "yaxes": [
+        {
+          "format": "short",
+          "label": null,
+          "logBase": 1,
+          "max": null,
+          "min": null,
+          "show": true
+        },
+        {
+          "format": "short",
+          "label": null,
+          "logBase": 1,
+          "max": null,
+          "min": null,
+          "show": true
+        }
+      ],
+      "yaxis": {
+        "align": false,
+        "alignLevel": null
+      }
+    },
     {
       "aliasColors": {},
       "bars": false,
@@ -839,7 +1005,7 @@
         "h": 8,
         "w": 12,
         "x": 0,
-        "y": 25
+        "y": 19
       },
       "id": 27,
       "legend": {
@@ -871,7 +1037,7 @@
         {
           "alias": "",
           "format": "time_series",
-          "rawSql": "SELECT \n  $__timeGroup(time, '$summarize'), \n  measurement, \n  avg(\"valueOne\") as \"valueOne\",\n  avg(\"valueTwo\") as \"valueTwo\"\nFROM\n  metric_values \nWHERE\n  $__timeFilter(time) AND\n  measurement in($metric)\nGROUP BY 1, 2\nORDER BY 1",
+          "rawSql": "SELECT \n  $__timeGroupAlias(time, '$summarize'), \n  measurement, \n  avg(\"valueOne\") as \"valueOne\",\n  avg(\"valueTwo\") as \"valueTwo\"\nFROM\n  metric_values \nWHERE\n  $__timeFilter(time) AND\n  measurement in($metric)\nGROUP BY 1, 2\nORDER BY 1",
           "refId": "A"
         }
       ],
@@ -926,7 +1092,7 @@
         "h": 8,
         "w": 12,
         "x": 12,
-        "y": 25
+        "y": 19
       },
       "id": 5,
       "legend": {
@@ -956,7 +1122,7 @@
         {
           "alias": "",
           "format": "time_series",
-          "rawSql": "SELECT \n  $__timeGroup(time, '$summarize'), \n  avg(\"valueOne\") as \"valueOne\", \n  avg(\"valueTwo\") as \"valueTwo\" \nFROM\n  metric_values \nWHERE\n  $__timeFilter(time) AND\n  measurement in($metric)\nGROUP BY 1\nORDER BY 1",
+          "rawSql": "SELECT \n  $__timeGroupAlias(time, '$summarize'), \n  avg(\"valueOne\") as \"valueOne\", \n  avg(\"valueTwo\") as \"valueTwo\" \nFROM\n  metric_values \nWHERE\n  $__timeFilter(time) AND\n  measurement in($metric)\nGROUP BY 1\nORDER BY 1",
           "refId": "A"
         }
       ],
@@ -1011,7 +1177,7 @@
         "h": 8,
         "w": 12,
         "x": 0,
-        "y": 33
+        "y": 27
       },
       "id": 4,
       "legend": {
@@ -1098,7 +1264,7 @@
         "h": 8,
         "w": 12,
         "x": 12,
-        "y": 33
+        "y": 27
       },
       "id": 28,
       "legend": {
@@ -1183,7 +1349,7 @@
         "h": 8,
         "w": 12,
         "x": 0,
-        "y": 41
+        "y": 35
       },
       "id": 19,
       "legend": {
@@ -1270,7 +1436,7 @@
         "h": 8,
         "w": 12,
         "x": 12,
-        "y": 41
+        "y": 35
       },
       "id": 18,
       "legend": {
@@ -1355,7 +1521,7 @@
         "h": 8,
         "w": 12,
         "x": 0,
-        "y": 49
+        "y": 43
       },
       "id": 17,
       "legend": {
@@ -1442,7 +1608,7 @@
         "h": 8,
         "w": 12,
         "x": 12,
-        "y": 49
+        "y": 43
       },
       "id": 20,
       "legend": {
@@ -1527,7 +1693,7 @@
         "h": 8,
         "w": 12,
         "x": 0,
-        "y": 57
+        "y": 51
       },
       "id": 14,
       "legend": {
@@ -1615,7 +1781,7 @@
         "h": 8,
         "w": 12,
         "x": 12,
-        "y": 57
+        "y": 51
       },
       "id": 15,
       "legend": {
@@ -1702,7 +1868,7 @@
         "h": 8,
         "w": 12,
         "x": 0,
-        "y": 65
+        "y": 59
       },
       "id": 25,
       "legend": {
@@ -1790,7 +1956,7 @@
         "h": 8,
         "w": 12,
         "x": 12,
-        "y": 65
+        "y": 59
       },
       "id": 22,
       "legend": {
@@ -1877,7 +2043,7 @@
         "h": 8,
         "w": 12,
         "x": 0,
-        "y": 73
+        "y": 67
       },
       "id": 21,
       "legend": {
@@ -1965,7 +2131,7 @@
         "h": 8,
         "w": 12,
         "x": 12,
-        "y": 73
+        "y": 67
       },
       "id": 26,
       "legend": {
@@ -2052,7 +2218,7 @@
         "h": 8,
         "w": 12,
         "x": 0,
-        "y": 81
+        "y": 75
       },
       "id": 23,
       "legend": {
@@ -2140,7 +2306,7 @@
         "h": 8,
         "w": 12,
         "x": 12,
-        "y": 81
+        "y": 75
       },
       "id": 24,
       "legend": {
@@ -2352,5 +2518,5 @@
   "timezone": "",
   "title": "Datasource tests - Postgres (unittest)",
   "uid": "vHQdlVziz",
-  "version": 1
+  "version": 9
 }

+ 2 - 0
docs/sources/features/datasources/cloudwatch.md

@@ -115,6 +115,8 @@ and `dimension keys/values`.
 In place of `region` you can specify `default` to use the default region configured in the datasource for the query,
 e.g. `metrics(AWS/DynamoDB, default)` or `dimension_values(default, ..., ..., ...)`.
 
+Read more about the available dimensions in the [CloudWatch  Metrics and Dimensions Reference](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/CW_Support_For_AWS.html).
+
 Name | Description
 ------- | --------
 *regions()* | Returns a list of regions AWS provides their service.

+ 1 - 1
docs/sources/features/datasources/elasticsearch.md

@@ -115,7 +115,7 @@ The Elasticsearch data source supports two types of queries you can use in the *
 
 Query | Description
 ------------ | -------------
-*{"find": "fields", "type": "keyword"} | Returns a list of field names with the index type `keyword`.
+*{"find": "fields", "type": "keyword"}* | Returns a list of field names with the index type `keyword`.
 *{"find": "terms", "field": "@hostname", "size": 1000}* |  Returns a list of values for a field using term aggregation. Query will user current dashboard time range as time range for query.
 *{"find": "terms", "field": "@hostname", "query": '<lucene query>'}* | Returns a list of values for a field using term aggregation & and a specified lucene query filter. Query will use current dashboard time range as time range for query.
 

+ 4 - 1
docs/sources/features/datasources/mssql.md

@@ -81,7 +81,10 @@ Macro example | Description
 *$__timeFrom()* | Will be replaced by the start of the currently active time selection. For example, *'2017-04-21T05:01:17Z'*
 *$__timeTo()* | Will be replaced by the end of the currently active time selection. For example, *'2017-04-21T05:06:17Z'*
 *$__timeGroup(dateColumn,'5m'[, fillvalue])* | Will be replaced by an expression usable in GROUP BY clause. Providing a *fillValue* of *NULL* or *floating value* will automatically fill empty series in timerange with that value. <br/>For example, *CAST(ROUND(DATEDIFF(second, '1970-01-01', time_column)/300.0, 0) as bigint)\*300*.
-*$__timeGroup(dateColumn,'5m', 0)* | Same as above but with a fill parameter so all null values will be converted to the fill value (all null values would be set to zero using this example).
+*$__timeGroup(dateColumn,'5m', 0)* | Same as above but with a fill parameter so missing points in that series will be added by grafana and 0 will be used as value.
+*$__timeGroup(dateColumn,'5m', NULL)* | Same as above but NULL will be used as value for missing points.
+*$__timeGroup(dateColumn,'5m', previous)* | Same as above but the previous value in that series will be used as fill value if no value has been seen yet NULL will be used (only available in Grafana 5.3+).
+*$__timeGroupAlias(dateColumn,'5m')* | Will be replaced identical to $__timeGroup but with an added column alias (only available in Grafana 5.3+).
 *$__unixEpochFilter(dateColumn)* | Will be replaced by a time range filter using the specified column name with times represented as unix timestamp. For example, *dateColumn > 1494410783 AND dateColumn < 1494497183*
 *$__unixEpochFrom()* | Will be replaced by the start of the currently active time selection as unix timestamp. For example, *1494410783*
 *$__unixEpochTo()* | Will be replaced by the end of the currently active time selection as unix timestamp. For example, *1494497183*

+ 4 - 1
docs/sources/features/datasources/mysql.md

@@ -64,7 +64,10 @@ Macro example | Description
 *$__timeFrom()* | Will be replaced by the start of the currently active time selection. For example, *'2017-04-21T05:01:17Z'*
 *$__timeTo()* | Will be replaced by the end of the currently active time selection. For example, *'2017-04-21T05:06:17Z'*
 *$__timeGroup(dateColumn,'5m')* | Will be replaced by an expression usable in GROUP BY clause. For example, *cast(cast(UNIX_TIMESTAMP(dateColumn)/(300) as signed)*300 as signed),*
-*$__timeGroup(dateColumn,'5m',0)* | Same as above but with a fill parameter so all null values will be converted to the fill value (all null values would be set to zero using this example).
+*$__timeGroup(dateColumn,'5m', 0)* | Same as above but with a fill parameter so missing points in that series will be added by grafana and 0 will be used as value.
+*$__timeGroup(dateColumn,'5m', NULL)* | Same as above but NULL will be used as value for missing points.
+*$__timeGroup(dateColumn,'5m', previous)* | Same as above but the previous value in that series will be used as fill value if no value has been seen yet NULL will be used (only available in Grafana 5.3+).
+*$__timeGroupAlias(dateColumn,'5m')* | Will be replaced identical to $__timeGroup but with an added column alias (only available in Grafana 5.3+).
 *$__unixEpochFilter(dateColumn)* | Will be replaced by a time range filter using the specified column name with times represented as unix timestamp. For example, *dateColumn > 1494410783 AND dateColumn < 1494497183*
 *$__unixEpochFrom()* | Will be replaced by the start of the currently active time selection as unix timestamp. For example, *1494410783*
 *$__unixEpochTo()* | Will be replaced by the end of the currently active time selection as unix timestamp. For example, *1494497183*

+ 7 - 2
docs/sources/features/datasources/postgres.md

@@ -31,6 +31,7 @@ Name | Description
 *User* | Database user's login/username
 *Password* | Database user's password
 *SSL Mode* | This option determines whether or with what priority a secure SSL TCP/IP connection will be negotiated with the server.
+*TimescaleDB* | With this option enabled Grafana will use TimescaleDB features, e.g. use ```time_bucket``` for grouping by time (only available in Grafana 5.3+).
 
 ### Database User Permissions (Important!)
 
@@ -60,8 +61,11 @@ Macro example | Description
 *$__timeFilter(dateColumn)* | Will be replaced by a time range filter using the specified column name. For example, *dateColumn BETWEEN '2017-04-21T05:01:17Z' AND '2017-04-21T05:06:17Z'*
 *$__timeFrom()* | Will be replaced by the start of the currently active time selection. For example, *'2017-04-21T05:01:17Z'*
 *$__timeTo()* | Will be replaced by the end of the currently active time selection. For example, *'2017-04-21T05:06:17Z'*
-*$__timeGroup(dateColumn,'5m')* | Will be replaced by an expression usable in GROUP BY clause. For example, *(extract(epoch from dateColumn)/300)::bigint*300 AS time*
-*$__timeGroup(dateColumn,'5m', 0)* | Same as above but with a fill parameter so all null values will be converted to the fill value (all null values would be set to zero using this example).
+*$__timeGroup(dateColumn,'5m')* | Will be replaced by an expression usable in GROUP BY clause. For example, *(extract(epoch from dateColumn)/300)::bigint*300*
+*$__timeGroup(dateColumn,'5m', 0)* | Same as above but with a fill parameter so missing points in that series will be added by grafana and 0 will be used as value.
+*$__timeGroup(dateColumn,'5m', NULL)* | Same as above but NULL will be used as value for missing points.
+*$__timeGroup(dateColumn,'5m', previous)* | Same as above but the previous value in that series will be used as fill value if no value has been seen yet NULL will be used (only available in Grafana 5.3+).
+*$__timeGroupAlias(dateColumn,'5m')* | Will be replaced identical to $__timeGroup but with an added column alias (only available in Grafana 5.3+).
 *$__unixEpochFilter(dateColumn)* | Will be replaced by a time range filter using the specified column name with times represented as unix timestamp. For example, *dateColumn >= 1494410783 AND dateColumn <= 1494497183*
 *$__unixEpochFrom()* | Will be replaced by the start of the currently active time selection as unix timestamp. For example, *1494410783*
 *$__unixEpochTo()* | Will be replaced by the end of the currently active time selection as unix timestamp. For example, *1494497183*
@@ -286,4 +290,5 @@ datasources:
       password: "Password!"
     jsonData:
       sslmode: "disable" # disable/require/verify-ca/verify-full
+      timescaledb: false
 ```

+ 4 - 4
docs/sources/features/datasources/prometheus.md

@@ -78,9 +78,9 @@ For details of *metric names*, *label names* and *label values* are please refer
 
 #### Using interval and range variables
 
-> Support for `$__range` and `$__range_ms` only available from Grafana v5.3
+> Support for `$__range`, `$__range_s` and `$__range_ms` only available from Grafana v5.3
 
-It's possible to use some global built-in variables in query variables; `$__interval`, `$__interval_ms`, `$__range` and `$__range_ms`, see [Global built-in variables](/reference/templating/#global-built-in-variables) for more information. These can be convenient to use in conjunction with the `query_result` function when you need to filter variable queries since
+It's possible to use some global built-in variables in query variables; `$__interval`, `$__interval_ms`, `$__range`, `$__range_s` and `$__range_ms`, see [Global built-in variables](/reference/templating/#global-built-in-variables) for more information. These can be convenient to use in conjunction with the `query_result` function when you need to filter variable queries since
 `label_values` function doesn't support queries.
 
 Make sure to set the variable's `refresh` trigger to be `On Time Range Change` to get the correct instances when changing the time range on the dashboard.
@@ -94,10 +94,10 @@ Query: query_result(topk(5, sum(rate(http_requests_total[$__range])) by (instanc
 Regex: /"([^"]+)"/
 ```
 
-Populate a variable with the instances having a certain state over the time range shown in the dashboard:
+Populate a variable with the instances having a certain state over the time range shown in the dashboard, using the more precise `$__range_s`:
 
 ```
-Query: query_result(max_over_time(<metric>[$__range]) != <state>)
+Query: query_result(max_over_time(<metric>[${__range_s}s]) != <state>)
 Regex:
 ```
 

+ 0 - 1
docs/sources/http_api/alerting.md

@@ -59,7 +59,6 @@ Content-Type: application/json
     "panelId": 1,
     "name": "fire place sensor",
     "state": "alerting",
-    "message": "Someone is trying to break in through the fire place",
     "newStateDate": "2018-05-14T05:55:20+02:00",
     "evalDate": "0001-01-01T00:00:00Z",
     "evalData": null,

+ 1 - 1
docs/sources/http_api/dashboard.md

@@ -85,7 +85,7 @@ Status Codes:
 - **403** – Access denied
 - **412** – Precondition failed
 
-The **412** status code is used for explaing that you cannot create the dashboard and why.
+The **412** status code is used for explaining that you cannot create the dashboard and why.
 There can be different reasons for this:
 
 - The dashboard has been changed by someone else, `status=version-mismatch`

+ 1 - 1
docs/sources/http_api/folder.md

@@ -223,7 +223,7 @@ Status Codes:
 - **404** – Folder not found
 - **412** – Precondition failed
 
-The **412** status code is used for explaing that you cannot update the folder and why.
+The **412** status code is used for explaining that you cannot update the folder and why.
 There can be different reasons for this:
 
 - The folder has been changed by someone else, `status=version-mismatch`

+ 33 - 0
docs/sources/http_api/user.md

@@ -363,6 +363,39 @@ Content-Type: application/json
 ]
 ```
 
+## Teams that the actual User is member of
+
+`GET /api/user/teams`
+
+Return a list of all teams that the current user is member of.
+
+**Example Request**:
+
+```http
+GET /api/user/teams HTTP/1.1
+Accept: application/json
+Content-Type: application/json
+Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk
+```
+
+**Example Response**:
+
+```http
+HTTP/1.1 200
+Content-Type: application/json
+
+[
+  {
+    "id": 1,
+    "orgId": 1,
+    "name": "MyTestTeam",
+    "email": "",
+    "avatarUrl": "\/avatar\/3f49c15916554246daa714b9bd0ee398",
+    "memberCount": 1
+  }
+]
+```
+
 ## Star a dashboard
 
 `POST /api/user/stars/dashboard/:dashboardId`

+ 109 - 7
docs/sources/installation/configuration.md

@@ -84,7 +84,7 @@ command line in the init.d script or the systemd service file.
 
 ### temp_data_lifetime
 
-How long temporary images in `data` directory should be kept. Defaults to: `24h`. Supported modifiers: `h` (hours), 
+How long temporary images in `data` directory should be kept. Defaults to: `24h`. Supported modifiers: `h` (hours),
 `m` (minutes), for example: `168h`, `30m`, `10h30m`. Use `0` to never clean up temporary files.
 
 ### logs
@@ -181,7 +181,7 @@ embedded database (included in the main Grafana binary).
 
 ### url
 
-Use either URL or or the other fields below to configure the database
+Use either URL or the other fields below to configure the database
 Example: `mysql://user:secret@host:port/database`
 
 ### type
@@ -195,9 +195,9 @@ will be stored.
 
 ### host
 
-Only applicable to MySQL or Postgres. Includes IP or hostname and port.
+Only applicable to MySQL or Postgres. Includes IP or hostname and port or in case of unix sockets the path to it.
 For example, for MySQL running on the same host as Grafana: `host =
-127.0.0.1:3306`
+127.0.0.1:3306` or with unix sockets: `host = /var/run/mysqld/mysqld.sock`
 
 ### name
 
@@ -430,6 +430,108 @@ allowed_organizations = github google
 
 <hr>
 
+## [auth.gitlab]
+
+> Only available in Grafana v5.3+.
+
+You need to [create a GitLab OAuth
+application](https://docs.gitlab.com/ce/integration/oauth_provider.html).
+Choose a descriptive *Name*, and use the following *Redirect URI*:
+
+```
+https://grafana.example.com/login/gitlab
+```
+
+where `https://grafana.example.com` is the URL you use to connect to Grafana.
+Adjust it as needed if you don't use HTTPS or if you use a different port; for
+instance, if you access Grafana at `http://203.0.113.31:3000`, you should use
+
+```
+http://203.0.113.31:3000/login/gitlab
+```
+
+Finally, select *api* as the *Scope* and submit the form. Note that if you're
+not going to use GitLab groups for authorization (i.e. not setting
+`allowed_groups`, see below), you can select *read_user* instead of *api* as
+the *Scope*, thus giving a more restricted access to your GitLab API.
+
+You'll get an *Application Id* and a *Secret* in return; we'll call them
+`GITLAB_APPLICATION_ID` and `GITLAB_SECRET` respectively for the rest of this
+section.
+
+Add the following to your Grafana configuration file to enable GitLab
+authentication:
+
+```ini
+[auth.gitlab]
+enabled = false
+allow_sign_up = false
+client_id = GITLAB_APPLICATION_ID
+client_secret = GITLAB_SECRET
+scopes = api
+auth_url = https://gitlab.com/oauth/authorize
+token_url = https://gitlab.com/oauth/token
+api_url = https://gitlab.com/api/v4
+allowed_groups =
+```
+
+Restart the Grafana backend for your changes to take effect.
+
+If you use your own instance of GitLab instead of `gitlab.com`, adjust
+`auth_url`, `token_url` and `api_url` accordingly by replacing the `gitlab.com`
+hostname with your own.
+
+With `allow_sign_up` set to `false`, only existing users will be able to login
+using their GitLab account, but with `allow_sign_up` set to `true`, *any* user
+who can authenticate on GitLab will be able to login on your Grafana instance;
+if you use the public `gitlab.com`, it means anyone in the world would be able
+to login on your Grafana instance.
+
+You can can however limit access to only members of a given group or list of
+groups by setting the `allowed_groups` option.
+
+### allowed_groups
+
+To limit access to authenticated users that are members of one or more [GitLab
+groups](https://docs.gitlab.com/ce/user/group/index.html), set `allowed_groups`
+to a comma- or space-separated list of groups. For instance, if you want to
+only give access to members of the `example` group, set
+
+
+```ini
+allowed_groups = example
+```
+
+If you want to also give access to members of the subgroup `bar`, which is in
+the group `foo`, set
+
+```ini
+allowed_groups = example, foo/bar
+```
+
+Note that in GitLab, the group or subgroup name doesn't always match its
+display name, especially if the display name contains spaces or special
+characters. Make sure you always use the group or subgroup name as it appears
+in the URL of the group or subgroup.
+
+Here's a complete example with `alloed_sign_up` enabled, and access limited to
+the `example` and `foo/bar` groups:
+
+```ini
+[auth.gitlab]
+enabled = false
+allow_sign_up = true
+client_id = GITLAB_APPLICATION_ID
+client_secret = GITLAB_SECRET
+scopes = api
+auth_url = https://gitlab.com/oauth/authorize
+token_url = https://gitlab.com/oauth/token
+api_url = https://gitlab.com/api/v4
+allowed_groups = example, foo/bar
+```
+
+<hr>
+
 ## [auth.google]
 
 First, you need to create a Google OAuth Client:
@@ -697,9 +799,9 @@ session provider you have configured.
 
 - **file:** session file path, e.g. `data/sessions`
 - **mysql:** go-sql-driver/mysql dsn config string, e.g. `user:password@tcp(127.0.0.1:3306)/database_name`
-- **postgres:** ex:  user=a password=b host=localhost port=5432 dbname=c sslmode=verify-full
-- **memcache:** ex:  127.0.0.1:11211
-- **redis:** ex: `addr=127.0.0.1:6379,pool_size=100,prefix=grafana`
+- **postgres:** ex:  `user=a password=b host=localhost port=5432 dbname=c sslmode=verify-full`
+- **memcache:** ex:  `127.0.0.1:11211`
+- **redis:** ex: `addr=127.0.0.1:6379,pool_size=100,prefix=grafana`. For unix socket, use for example: `network=unix,addr=/var/run/redis/redis.sock,pool_size=100,db=grafana`
 
 Postgres valid `sslmode` are `disable`, `require`, `verify-ca`, and `verify-full` (default).
 

+ 4 - 0
docs/sources/installation/ldap.md

@@ -40,6 +40,9 @@ start_tls = false
 ssl_skip_verify = false
 # set to the path to your root CA certificate or leave unset to use system defaults
 # root_ca_cert = "/path/to/certificate.crt"
+# Authentication against LDAP servers requiring client certificates
+# client_cert = "/path/to/client.crt"
+# client_key = "/path/to/client.key"
 
 # Search user bind dn
 bind_dn = "cn=admin,dc=grafana,dc=org"
@@ -48,6 +51,7 @@ bind_dn = "cn=admin,dc=grafana,dc=org"
 bind_password = 'grafana'
 
 # User search filter, for example "(cn=%s)" or "(sAMAccountName=%s)" or "(uid=%s)"
+# Allow login from email or username, example "(|(sAMAccountName=%s)(userPrincipalName=%s))"
 search_filter = "(cn=%s)"
 
 # An array of base dns to search through

+ 8 - 11
docs/sources/project/building_from_source.md

@@ -57,7 +57,7 @@ For this you need nodejs (v.6+).
 ```bash
 npm install -g yarn
 yarn install --pure-lockfile
-npm run watch
+yarn watch
 ```
 
 ## Running Grafana Locally
@@ -83,21 +83,18 @@ go get github.com/Unknwon/bra
 bra run
 ```
 
-You'll also need to run `npm run watch` to watch for changes to the front-end (typescript, html, sass)
+You'll also need to run `yarn watch` to watch for changes to the front-end (typescript, html, sass)
 
 ### Running tests
 
-- You can run backend Golang tests using "go test ./pkg/...".
-- Execute all frontend tests with "npm run test"
+- You can run backend Golang tests using `go test ./pkg/...`.
+- Execute all frontend tests with `yarn test`
 
-Writing & watching frontend tests (we have two test runners)
+Writing & watching frontend tests
+
+- Start watcher: `yarn jest`
+- Jest will run all test files that end with the name ".test.ts"
 
-- jest for all new tests that do not require browser context (React+more)
-   - Start watcher: `npm run jest`
-   - Jest will run all test files that end with the name ".jest.ts"
-- karma + mocha is used for testing angularjs components. We do want to migrate these test to jest over time (if possible).
-  - Start watcher: `npm run karma`
-  - Karma+Mocha runs all files that end with the name "_specs.ts".
 
 ## Creating optimized release packages
 

+ 1 - 1
docs/sources/reference/templating.md

@@ -277,7 +277,7 @@ This variable is only available in the Singlestat panel and can be used in the p
 
 > Only available in Grafana v5.3+
 
-Currently only supported for Prometheus data sources. This variable represents the range for the current dashboard. It is calculated by `to - from`. It has a millisecond representation called `$__range_ms`.
+Currently only supported for Prometheus data sources. This variable represents the range for the current dashboard. It is calculated by `to - from`. It has a millisecond and a second representation called `$__range_ms` and `$__range_s`.
 
 ## Repeating Panels
 

+ 1 - 1
jest.config.js

@@ -13,7 +13,7 @@ module.exports = {
   "roots": [
     "<rootDir>/public"
   ],
-  "testRegex": "(\\.|/)(jest)\\.(jsx?|tsx?)$",
+  "testRegex": "(\\.|/)(test)\\.(jsx?|tsx?)$",
   "moduleFileExtensions": [
     "ts",
     "tsx",

+ 0 - 40
karma.conf.js

@@ -1,40 +0,0 @@
-var webpack = require('webpack');
-var path = require('path');
-var webpackTestConfig = require('./scripts/webpack/webpack.test.js');
-
-module.exports = function(config) {
-
-  'use strict';
-
-  config.set({
-    frameworks: ['mocha', 'expect', 'sinon'],
-
-    // list of files / patterns to load in the browser
-    files: [
-      { pattern: 'public/test/index.ts', watched: false }
-    ],
-
-    preprocessors: {
-      'public/test/index.ts': ['webpack', 'sourcemap'],
-    },
-
-    webpack: webpackTestConfig,
-    webpackMiddleware: {
-      stats: 'minimal',
-    },
-
-    // list of files to exclude
-    exclude: [],
-    reporters: ['dots'],
-    port: 9876,
-    colors: true,
-    logLevel: config.LOG_INFO,
-    autoWatch: true,
-    browsers: ['PhantomJS'],
-    captureTimeout: 20000,
-    singleRun: true,
-    // autoWatchBatchDelay: 1000,
-    // browserNoActivityTimeout: 60000,
-  });
-
-};

+ 2 - 14
package.json

@@ -45,10 +45,7 @@
     "grunt-contrib-concat": "^1.0.1",
     "grunt-contrib-copy": "~1.0.0",
     "grunt-contrib-cssmin": "~1.0.2",
-    "grunt-contrib-jshint": "~1.1.0",
     "grunt-exec": "^1.0.1",
-    "grunt-jscs": "3.0.1",
-    "grunt-karma": "~2.0.0",
     "grunt-notify": "^0.4.5",
     "grunt-postcss": "^0.8.0",
     "grunt-sass": "^2.0.0",
@@ -60,15 +57,6 @@
     "html-webpack-plugin": "^3.2.0",
     "husky": "^0.14.3",
     "jest": "^22.0.4",
-    "jshint-stylish": "~2.2.1",
-    "karma": "1.7.0",
-    "karma-chrome-launcher": "~2.2.0",
-    "karma-expect": "~1.1.3",
-    "karma-mocha": "~1.3.0",
-    "karma-phantomjs-launcher": "1.0.4",
-    "karma-sinon": "^1.0.5",
-    "karma-sourcemap-loader": "^0.3.7",
-    "karma-webpack": "^3.0.0",
     "lint-staged": "^6.0.0",
     "load-grunt-tasks": "3.5.2",
     "mini-css-extract-plugin": "^0.4.0",
@@ -115,7 +103,6 @@
     "test": "grunt test",
     "test:coverage": "grunt test --coverage=true",
     "lint": "tslint -c tslint.json --project tsconfig.json --type-check",
-    "karma": "grunt karma:dev",
     "jest": "jest --notify --watch",
     "api-tests": "jest --notify --watch --config=tests/api/jest.js",
     "precommit": "lint-staged && grunt precommit"
@@ -166,6 +153,7 @@
     "mousetrap-global-bind": "^1.1.0",
     "prismjs": "^1.6.0",
     "prop-types": "^15.6.0",
+    "rc-cascader": "^0.14.0",
     "react": "^16.2.0",
     "react-dom": "^16.2.0",
     "react-grid-layout": "0.16.6",
@@ -187,4 +175,4 @@
   "resolutions": {
     "caniuse-db": "1.0.30000772"
   }
-}
+}

+ 52 - 0
packaging/docker/Dockerfile

@@ -0,0 +1,52 @@
+FROM debian:stretch-slim
+
+ARG GRAFANA_TGZ="grafana-latest.linux-x64.tar.gz"
+
+RUN apt-get update && apt-get install -qq -y tar && \
+    apt-get autoremove -y && \
+    rm -rf /var/lib/apt/lists/*
+
+COPY ${GRAFANA_TGZ} /tmp/grafana.tar.gz
+
+RUN mkdir /tmp/grafana && tar xfvz /tmp/grafana.tar.gz --strip-components=1 -C /tmp/grafana
+
+FROM debian:stretch-slim
+
+ARG GF_UID="472"
+ARG GF_GID="472"
+
+ENV PATH=/usr/share/grafana/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin \
+    GF_PATHS_CONFIG="/etc/grafana/grafana.ini" \
+    GF_PATHS_DATA="/var/lib/grafana" \
+    GF_PATHS_HOME="/usr/share/grafana" \
+    GF_PATHS_LOGS="/var/log/grafana" \
+    GF_PATHS_PLUGINS="/var/lib/grafana/plugins" \
+    GF_PATHS_PROVISIONING="/etc/grafana/provisioning"
+
+WORKDIR $GF_PATHS_HOME
+
+RUN apt-get update && apt-get install -qq -y libfontconfig ca-certificates && \
+    apt-get autoremove -y && \
+    rm -rf /var/lib/apt/lists/*
+
+COPY --from=0 /tmp/grafana "$GF_PATHS_HOME"
+
+RUN mkdir -p "$GF_PATHS_HOME/.aws" && \
+    groupadd -r -g $GF_GID grafana && \
+    useradd -r -u $GF_UID -g grafana grafana && \
+    mkdir -p "$GF_PATHS_PROVISIONING/datasources" \
+             "$GF_PATHS_PROVISIONING/dashboards" \
+             "$GF_PATHS_LOGS" \
+             "$GF_PATHS_PLUGINS" \
+             "$GF_PATHS_DATA" && \
+    cp "$GF_PATHS_HOME/conf/sample.ini" "$GF_PATHS_CONFIG" && \
+    cp "$GF_PATHS_HOME/conf/ldap.toml" /etc/grafana/ldap.toml && \
+    chown -R grafana:grafana "$GF_PATHS_DATA" "$GF_PATHS_HOME/.aws" "$GF_PATHS_LOGS" "$GF_PATHS_PLUGINS" && \
+    chmod 777 "$GF_PATHS_DATA" "$GF_PATHS_HOME/.aws" "$GF_PATHS_LOGS" "$GF_PATHS_PLUGINS"
+
+EXPOSE 3000
+
+COPY ./run.sh /run.sh
+
+USER grafana
+ENTRYPOINT [ "/run.sh" ]

+ 43 - 0
packaging/docker/README.md

@@ -0,0 +1,43 @@
+# Grafana Docker image
+
+## Running your Grafana container
+
+Start your container binding the external port `3000`.
+
+```bash
+docker run -d --name=grafana -p 3000:3000 grafana/grafana
+```
+
+Try it out, default admin user is admin/admin.
+
+## How to use the container
+
+Further documentation can be found at http://docs.grafana.org/installation/docker/
+
+## Changelog
+
+### v5.1.5, v5.2.0-beta2
+* Fix: config keys ending with _FILE are not respected [#170](https://github.com/grafana/grafana-docker/issues/170)
+
+### v5.2.0-beta1
+* Support for Docker Secrets
+
+### v5.1.0
+* Major restructuring of the container
+* Usage of `chown` removed
+* File permissions incompatibility with previous versions
+  * user id changed from 104 to 472
+  * group id changed from 107 to 472
+* Runs as the grafana user by default (instead of root)
+* All default volumes removed
+
+### v4.2.0
+* Plugins are now installed into ${GF_PATHS_PLUGINS}
+* Building the container now requires a full url to the deb package instead of just version
+* Fixes bug caused by installing multiple plugins
+
+### v4.0.0-beta2
+* Plugins dir (`/var/lib/grafana/plugins`) is no longer a separate volume
+
+### v3.1.1
+* Make it possible to install specific plugin version https://github.com/grafana/grafana-docker/issues/59#issuecomment-260584026

+ 13 - 0
packaging/docker/build-deploy.sh

@@ -0,0 +1,13 @@
+#!/bin/sh
+set -e
+
+_grafana_version=$1
+./build.sh "$_grafana_version"
+docker login -u "$DOCKER_USER" -p "$DOCKER_PASS"
+
+./push_to_docker_hub.sh "$_grafana_version"
+
+if echo "$_grafana_version" | grep -q "^master-"; then
+  apk add --no-cache curl
+  ./deploy_to_k8s.sh "grafana/grafana-dev:$_grafana_version"
+fi

+ 25 - 0
packaging/docker/build.sh

@@ -0,0 +1,25 @@
+#!/bin/sh
+
+_grafana_tag=$1
+
+# If the tag starts with v, treat this as a official release
+if echo "$_grafana_tag" | grep -q "^v"; then
+	_grafana_version=$(echo "${_grafana_tag}" | cut -d "v" -f 2)
+	_docker_repo=${2:-grafana/grafana}
+else
+	_grafana_version=$_grafana_tag
+	_docker_repo=${2:-grafana/grafana-dev}
+fi
+
+echo "Building ${_docker_repo}:${_grafana_version}"
+
+docker build \
+	--tag "${_docker_repo}:${_grafana_version}" \
+	--no-cache=true .
+
+# Tag as 'latest' for official release; otherwise tag as grafana/grafana:master
+if echo "$_grafana_tag" | grep -q "^v"; then
+	docker tag "${_docker_repo}:${_grafana_version}" "${_docker_repo}:latest"
+else
+	docker tag "${_docker_repo}:${_grafana_version}" "grafana/grafana:master"
+fi

+ 16 - 0
packaging/docker/custom/Dockerfile

@@ -0,0 +1,16 @@
+ARG GRAFANA_VERSION="latest"
+
+FROM grafana/grafana:${GRAFANA_VERSION}
+
+USER grafana
+
+ARG GF_INSTALL_PLUGINS=""
+
+RUN if [ ! -z "${GF_INSTALL_PLUGINS}" ]; then \
+    OLDIFS=$IFS; \
+        IFS=','; \
+    for plugin in ${GF_INSTALL_PLUGINS}; do \
+        IFS=$OLDIFS; \
+        grafana-cli --pluginsDir "$GF_PATHS_PLUGINS" plugins install ${plugin}; \
+    done; \
+fi

+ 6 - 0
packaging/docker/deploy_to_k8s.sh

@@ -0,0 +1,6 @@
+#!/bin/sh
+
+curl -s --header "Content-Type: application/json" \
+     --data "{\"build_parameters\": {\"CIRCLE_JOB\": \"deploy\", \"IMAGE_NAMES\": \"$1\"}}" \
+     --request POST \
+     https://circleci.com/api/v1.1/project/github/raintank/deployment_tools/tree/master?circle-token=$CIRCLE_TOKEN

+ 24 - 0
packaging/docker/push_to_docker_hub.sh

@@ -0,0 +1,24 @@
+#!/bin/sh
+set -e
+
+_grafana_tag=$1
+
+# If the tag starts with v, treat this as a official release
+if echo "$_grafana_tag" | grep -q "^v"; then
+	_grafana_version=$(echo "${_grafana_tag}" | cut -d "v" -f 2)
+	_docker_repo=${2:-grafana/grafana}
+else
+	_grafana_version=$_grafana_tag
+	_docker_repo=${2:-grafana/grafana-dev}
+fi
+
+echo "pushing ${_docker_repo}:${_grafana_version}"
+docker push "${_docker_repo}:${_grafana_version}"
+
+if echo "$_grafana_tag" | grep -q "^v"; then
+	echo "pushing ${_docker_repo}:latest"
+	docker push "${_docker_repo}:latest"
+else
+	echo "pushing grafana/grafana:master"
+	docker push grafana/grafana:master
+fi

+ 88 - 0
packaging/docker/run.sh

@@ -0,0 +1,88 @@
+#!/bin/bash -e
+
+PERMISSIONS_OK=0
+
+if [ ! -r "$GF_PATHS_CONFIG" ]; then
+    echo "GF_PATHS_CONFIG='$GF_PATHS_CONFIG' is not readable."
+    PERMISSIONS_OK=1
+fi
+
+if [ ! -w "$GF_PATHS_DATA" ]; then
+    echo "GF_PATHS_DATA='$GF_PATHS_DATA' is not writable."
+    PERMISSIONS_OK=1
+fi
+
+if [ ! -r "$GF_PATHS_HOME" ]; then
+    echo "GF_PATHS_HOME='$GF_PATHS_HOME' is not readable."
+    PERMISSIONS_OK=1
+fi
+
+if [ $PERMISSIONS_OK -eq 1 ]; then
+    echo "You may have issues with file permissions, more information here: http://docs.grafana.org/installation/docker/#migration-from-a-previous-version-of-the-docker-container-to-5-1-or-later"
+fi
+
+if [ ! -d "$GF_PATHS_PLUGINS" ]; then
+    mkdir "$GF_PATHS_PLUGINS"
+fi
+
+if [ ! -z ${GF_AWS_PROFILES+x} ]; then
+    > "$GF_PATHS_HOME/.aws/credentials"
+
+    for profile in ${GF_AWS_PROFILES}; do
+        access_key_varname="GF_AWS_${profile}_ACCESS_KEY_ID"
+        secret_key_varname="GF_AWS_${profile}_SECRET_ACCESS_KEY"
+        region_varname="GF_AWS_${profile}_REGION"
+
+        if [ ! -z "${!access_key_varname}" -a ! -z "${!secret_key_varname}" ]; then
+            echo "[${profile}]" >> "$GF_PATHS_HOME/.aws/credentials"
+            echo "aws_access_key_id = ${!access_key_varname}" >> "$GF_PATHS_HOME/.aws/credentials"
+            echo "aws_secret_access_key = ${!secret_key_varname}" >> "$GF_PATHS_HOME/.aws/credentials"
+            if [ ! -z "${!region_varname}" ]; then
+                echo "region = ${!region_varname}" >> "$GF_PATHS_HOME/.aws/credentials"
+            fi
+        fi
+    done
+
+    chmod 600 "$GF_PATHS_HOME/.aws/credentials"
+fi
+
+# Convert all environment variables with names ending in __FILE into the content of
+# the file that they point at and use the name without the trailing __FILE.
+# This can be used to carry in Docker secrets.
+for VAR_NAME in $(env | grep '^GF_[^=]\+__FILE=.\+' | sed -r "s/([^=]*)__FILE=.*/\1/g"); do
+    VAR_NAME_FILE="$VAR_NAME"__FILE
+    if [ "${!VAR_NAME}" ]; then
+        echo >&2 "ERROR: Both $VAR_NAME and $VAR_NAME_FILE are set (but are exclusive)"
+        exit 1
+    fi
+    echo "Getting secret $VAR_NAME from ${!VAR_NAME_FILE}"
+    export "$VAR_NAME"="$(< "${!VAR_NAME_FILE}")"
+    unset "$VAR_NAME_FILE"
+done
+
+export HOME="$GF_PATHS_HOME"
+
+if [ ! -z "${GF_INSTALL_PLUGINS}" ]; then
+  OLDIFS=$IFS
+  IFS=','
+  for plugin in ${GF_INSTALL_PLUGINS}; do
+    IFS=$OLDIFS
+    if [[ $plugin =~ .*\;.* ]]; then
+        pluginUrl=$(echo "$plugin" | cut -d';' -f 1)
+        pluginWithoutUrl=$(echo "$plugin" | cut -d';' -f 2)
+        grafana-cli --pluginUrl "${pluginUrl}" --pluginsDir "${GF_PATHS_PLUGINS}" plugins install ${pluginWithoutUrl}
+    else
+        grafana-cli --pluginsDir "${GF_PATHS_PLUGINS}" plugins install ${plugin}
+    fi
+  done
+fi
+
+exec grafana-server                                         \
+  --homepath="$GF_PATHS_HOME"                               \
+  --config="$GF_PATHS_CONFIG"                               \
+  "$@"                                                      \
+  cfg:default.log.mode="console"                            \
+  cfg:default.paths.data="$GF_PATHS_DATA"                   \
+  cfg:default.paths.logs="$GF_PATHS_LOGS"                   \
+  cfg:default.paths.plugins="$GF_PATHS_PLUGINS"             \
+  cfg:default.paths.provisioning="$GF_PATHS_PROVISIONING"

+ 1 - 0
pkg/api/api.go

@@ -120,6 +120,7 @@ func (hs *HTTPServer) registerRoutes() {
 			userRoute.Put("/", bind(m.UpdateUserCommand{}), Wrap(UpdateSignedInUser))
 			userRoute.Post("/using/:id", Wrap(UserSetUsingOrg))
 			userRoute.Get("/orgs", Wrap(GetSignedInUserOrgList))
+			userRoute.Get("/teams", Wrap(GetSignedInUserTeamList))
 
 			userRoute.Post("/stars/dashboard/:id", Wrap(StarDashboard))
 			userRoute.Delete("/stars/dashboard/:id", Wrap(UnstarDashboard))

+ 16 - 2
pkg/api/datasources.go

@@ -158,12 +158,26 @@ func UpdateDataSource(c *m.ReqContext, cmd m.UpdateDataSourceCommand) Response {
 		}
 		return Error(500, "Failed to update datasource", err)
 	}
-	ds := convertModelToDtos(cmd.Result)
+
+	query := m.GetDataSourceByIdQuery{
+		Id:    cmd.Id,
+		OrgId: c.OrgId,
+	}
+
+	if err := bus.Dispatch(&query); err != nil {
+		if err == m.ErrDataSourceNotFound {
+			return Error(404, "Data source not found", nil)
+		}
+		return Error(500, "Failed to query datasources", err)
+	}
+
+	dtos := convertModelToDtos(query.Result)
+
 	return JSON(200, util.DynMap{
 		"message":    "Datasource updated",
 		"id":         cmd.Id,
 		"name":       cmd.Name,
-		"datasource": ds,
+		"datasource": dtos,
 	})
 }
 

+ 1 - 0
pkg/api/pluginproxy/ds_proxy.go

@@ -203,6 +203,7 @@ func (proxy *DataSourceProxy) getDirector() func(req *http.Request) {
 		req.Header.Del("X-Forwarded-Host")
 		req.Header.Del("X-Forwarded-Port")
 		req.Header.Del("X-Forwarded-Proto")
+		req.Header.Set("User-Agent", fmt.Sprintf("Grafana/%s", setting.BuildVersion))
 
 		// set X-Forwarded-For header
 		if req.RemoteAddr != "" {

+ 8 - 7
pkg/api/pluginproxy/ds_proxy_test.go

@@ -212,20 +212,21 @@ func TestDSRouteRule(t *testing.T) {
 		})
 
 		Convey("When proxying graphite", func() {
+			setting.BuildVersion = "5.3.0"
 			plugin := &plugins.DataSourcePlugin{}
 			ds := &m.DataSource{Url: "htttp://graphite:8080", Type: m.DS_GRAPHITE}
 			ctx := &m.ReqContext{}
 
 			proxy := NewDataSourceProxy(ds, plugin, ctx, "/render")
+			req, err := http.NewRequest(http.MethodGet, "http://grafana.com/sub", nil)
+			So(err, ShouldBeNil)
 
-			requestURL, _ := url.Parse("http://grafana.com/sub")
-			req := http.Request{URL: requestURL}
-
-			proxy.getDirector()(&req)
+			proxy.getDirector()(req)
 
 			Convey("Can translate request url and path", func() {
 				So(req.URL.Host, ShouldEqual, "graphite:8080")
 				So(req.URL.Path, ShouldEqual, "/render")
+				So(req.Header.Get("User-Agent"), ShouldEqual, "Grafana/5.3.0")
 			})
 		})
 
@@ -243,10 +244,10 @@ func TestDSRouteRule(t *testing.T) {
 			ctx := &m.ReqContext{}
 			proxy := NewDataSourceProxy(ds, plugin, ctx, "")
 
-			requestURL, _ := url.Parse("http://grafana.com/sub")
-			req := http.Request{URL: requestURL}
+			req, err := http.NewRequest(http.MethodGet, "http://grafana.com/sub", nil)
+			So(err, ShouldBeNil)
 
-			proxy.getDirector()(&req)
+			proxy.getDirector()(req)
 
 			Convey("Should add db to url", func() {
 				So(req.URL.Path, ShouldEqual, "/db/site/")

+ 15 - 0
pkg/api/user.go

@@ -111,6 +111,21 @@ func GetSignedInUserOrgList(c *m.ReqContext) Response {
 	return getUserOrgList(c.UserId)
 }
 
+// GET /api/user/teams
+func GetSignedInUserTeamList(c *m.ReqContext) Response {
+	query := m.GetTeamsByUserQuery{OrgId: c.OrgId, UserId: c.UserId}
+
+	if err := bus.Dispatch(&query); err != nil {
+		return Error(500, "Failed to get user teams", err)
+	}
+
+	for _, team := range query.Result {
+		team.AvatarUrl = dtos.GetGravatarUrlWithDefault(team.Email, team.Name)
+	}
+
+	return JSON(200, query.Result)
+}
+
 // GET /api/user/:id/orgs
 func GetUserOrgList(c *m.ReqContext) Response {
 	return getUserOrgList(c.ParamsInt64(":id"))

+ 10 - 0
pkg/login/ldap.go

@@ -59,6 +59,13 @@ func (a *ldapAuther) Dial() error {
 			}
 		}
 	}
+	var clientCert tls.Certificate
+	if a.server.ClientCert != "" && a.server.ClientKey != "" {
+		clientCert, err = tls.LoadX509KeyPair(a.server.ClientCert, a.server.ClientKey)
+		if err != nil {
+			return err
+		}
+	}
 	for _, host := range strings.Split(a.server.Host, " ") {
 		address := fmt.Sprintf("%s:%d", host, a.server.Port)
 		if a.server.UseSSL {
@@ -67,6 +74,9 @@ func (a *ldapAuther) Dial() error {
 				ServerName:         host,
 				RootCAs:            certPool,
 			}
+			if len(clientCert.Certificate) > 0 {
+				tlsCfg.Certificates = append(tlsCfg.Certificates, clientCert)
+			}
 			if a.server.StartTLS {
 				a.conn, err = ldap.Dial("tcp", address)
 				if err == nil {

+ 2 - 0
pkg/login/ldap_settings.go

@@ -21,6 +21,8 @@ type LdapServerConf struct {
 	StartTLS      bool             `toml:"start_tls"`
 	SkipVerifySSL bool             `toml:"ssl_skip_verify"`
 	RootCACert    string           `toml:"root_ca_cert"`
+	ClientCert    string           `toml:"client_cert"`
+	ClientKey     string           `toml:"client_key"`
 	BindDN        string           `toml:"bind_dn"`
 	BindPassword  string           `toml:"bind_password"`
 	Attr          LdapAttributeMap `toml:"attributes"`

+ 1 - 0
pkg/models/models.go

@@ -8,4 +8,5 @@ const (
 	TWITTER
 	GENERIC
 	GRAFANA_COM
+	GITLAB
 )

+ 1 - 2
pkg/services/alerting/notifier.go

@@ -3,7 +3,6 @@ package alerting
 import (
 	"errors"
 	"fmt"
-	"time"
 
 	"golang.org/x/sync/errgroup"
 
@@ -81,7 +80,7 @@ func (n *notificationService) uploadImage(context *EvalContext) (err error) {
 	renderOpts := rendering.Opts{
 		Width:   1000,
 		Height:  500,
-		Timeout: time.Second * 30,
+		Timeout: alertTimeout / 2,
 		OrgId:   context.Rule.OrgId,
 		OrgRole: m.ROLE_ADMIN,
 	}

+ 132 - 0
pkg/social/gitlab_oauth.go

@@ -0,0 +1,132 @@
+package social
+
+import (
+	"encoding/json"
+	"fmt"
+	"net/http"
+	"regexp"
+
+	"github.com/grafana/grafana/pkg/models"
+
+	"golang.org/x/oauth2"
+)
+
+type SocialGitlab struct {
+	*SocialBase
+	allowedDomains []string
+	allowedGroups  []string
+	apiUrl         string
+	allowSignup    bool
+}
+
+var (
+	ErrMissingGroupMembership = &Error{"User not a member of one of the required groups"}
+)
+
+func (s *SocialGitlab) Type() int {
+	return int(models.GITLAB)
+}
+
+func (s *SocialGitlab) IsEmailAllowed(email string) bool {
+	return isEmailAllowed(email, s.allowedDomains)
+}
+
+func (s *SocialGitlab) IsSignupAllowed() bool {
+	return s.allowSignup
+}
+
+func (s *SocialGitlab) IsGroupMember(client *http.Client) bool {
+	if len(s.allowedGroups) == 0 {
+		return true
+	}
+
+	for groups, url := s.GetGroups(client, s.apiUrl+"/groups"); groups != nil; groups, url = s.GetGroups(client, url) {
+		for _, allowedGroup := range s.allowedGroups {
+			for _, group := range groups {
+				if group == allowedGroup {
+					return true
+				}
+			}
+		}
+	}
+
+	return false
+}
+
+func (s *SocialGitlab) GetGroups(client *http.Client, url string) ([]string, string) {
+	type Group struct {
+		FullPath string `json:"full_path"`
+	}
+
+	var (
+		groups []Group
+		next   string
+	)
+
+	if url == "" {
+		return nil, next
+	}
+
+	response, err := HttpGet(client, url)
+	if err != nil {
+		s.log.Error("Error getting groups from GitLab API", "err", err)
+		return nil, next
+	}
+
+	if err := json.Unmarshal(response.Body, &groups); err != nil {
+		s.log.Error("Error parsing JSON from GitLab API", "err", err)
+		return nil, next
+	}
+
+	fullPaths := make([]string, len(groups))
+	for i, group := range groups {
+		fullPaths[i] = group.FullPath
+	}
+
+	if link, ok := response.Headers["Link"]; ok {
+		pattern := regexp.MustCompile(`<([^>]+)>; rel="next"`)
+		if matches := pattern.FindStringSubmatch(link[0]); matches != nil {
+			next = matches[1]
+		}
+	}
+
+	return fullPaths, next
+}
+
+func (s *SocialGitlab) UserInfo(client *http.Client, token *oauth2.Token) (*BasicUserInfo, error) {
+
+	var data struct {
+		Id       int
+		Username string
+		Email    string
+		Name     string
+		State    string
+	}
+
+	response, err := HttpGet(client, s.apiUrl+"/user")
+	if err != nil {
+		return nil, fmt.Errorf("Error getting user info: %s", err)
+	}
+
+	err = json.Unmarshal(response.Body, &data)
+	if err != nil {
+		return nil, fmt.Errorf("Error getting user info: %s", err)
+	}
+
+	if data.State != "active" {
+		return nil, fmt.Errorf("User %s is inactive", data.Username)
+	}
+
+	userInfo := &BasicUserInfo{
+		Id:    fmt.Sprintf("%d", data.Id),
+		Name:  data.Name,
+		Login: data.Username,
+		Email: data.Email,
+	}
+
+	if !s.IsGroupMember(client) {
+		return nil, ErrMissingGroupMembership
+	}
+
+	return userInfo, nil
+}

+ 15 - 1
pkg/social/social.go

@@ -55,7 +55,7 @@ func NewOAuthService() {
 	setting.OAuthService = &setting.OAuther{}
 	setting.OAuthService.OAuthInfos = make(map[string]*setting.OAuthInfo)
 
-	allOauthes := []string{"github", "google", "generic_oauth", "grafananet", "grafana_com"}
+	allOauthes := []string{"github", "gitlab", "google", "generic_oauth", "grafananet", "grafana_com"}
 
 	for _, name := range allOauthes {
 		sec := setting.Raw.Section("auth." + name)
@@ -115,6 +115,20 @@ func NewOAuthService() {
 			}
 		}
 
+		// GitLab.
+		if name == "gitlab" {
+			SocialMap["gitlab"] = &SocialGitlab{
+				SocialBase: &SocialBase{
+					Config: &config,
+					log:    logger,
+				},
+				allowedDomains: info.AllowedDomains,
+				apiUrl:         info.ApiUrl,
+				allowSignup:    info.AllowSignup,
+				allowedGroups:  util.SplitString(sec.Key("allowed_groups").String()),
+			}
+		}
+
 		// Google.
 		if name == "google" {
 			SocialMap["google"] = &SocialGoogle{

+ 2 - 2
pkg/tsdb/cloudwatch/metric_find_query.go

@@ -93,7 +93,7 @@ func init() {
 		"AWS/NATGateway":       {"PacketsOutToDestination", "PacketsOutToSource", "PacketsInFromSource", "PacketsInFromDestination", "BytesOutToDestination", "BytesOutToSource", "BytesInFromSource", "BytesInFromDestination", "ErrorPortAllocation", "ActiveConnectionCount", "ConnectionAttemptCount", "ConnectionEstablishedCount", "IdleTimeoutCount", "PacketsDropCount"},
 		"AWS/NetworkELB":       {"ActiveFlowCount", "ConsumedLCUs", "HealthyHostCount", "NewFlowCount", "ProcessedBytes", "TCP_Client_Reset_Count", "TCP_ELB_Reset_Count", "TCP_Target_Reset_Count", "UnHealthyHostCount"},
 		"AWS/OpsWorks":         {"cpu_idle", "cpu_nice", "cpu_system", "cpu_user", "cpu_waitio", "load_1", "load_5", "load_15", "memory_buffers", "memory_cached", "memory_free", "memory_swap", "memory_total", "memory_used", "procs"},
-		"AWS/Redshift":         {"CPUUtilization", "DatabaseConnections", "HealthStatus", "MaintenanceMode", "NetworkReceiveThroughput", "NetworkTransmitThroughput", "PercentageDiskSpaceUsed", "ReadIOPS", "ReadLatency", "ReadThroughput", "WriteIOPS", "WriteLatency", "WriteThroughput"},
+		"AWS/Redshift":         {"CPUUtilization", "DatabaseConnections", "HealthStatus", "MaintenanceMode", "NetworkReceiveThroughput", "NetworkTransmitThroughput", "PercentageDiskSpaceUsed", "QueriesCompletedPerSecond", "QueryDuration", "QueryRuntimeBreakdown", "ReadIOPS", "ReadLatency", "ReadThroughput", "WLMQueriesCompletedPerSecond", "WLMQueryDuration", "WLMQueueLength", "WriteIOPS", "WriteLatency", "WriteThroughput"},
 		"AWS/RDS":              {"ActiveTransactions", "AuroraBinlogReplicaLag", "AuroraReplicaLag", "AuroraReplicaLagMaximum", "AuroraReplicaLagMinimum", "BinLogDiskUsage", "BlockedTransactions", "BufferCacheHitRatio", "BurstBalance", "CommitLatency", "CommitThroughput", "BinLogDiskUsage", "CPUCreditBalance", "CPUCreditUsage", "CPUUtilization", "DatabaseConnections", "DDLLatency", "DDLThroughput", "Deadlocks", "DeleteLatency", "DeleteThroughput", "DiskQueueDepth", "DMLLatency", "DMLThroughput", "EngineUptime", "FailedSqlStatements", "FreeableMemory", "FreeLocalStorage", "FreeStorageSpace", "InsertLatency", "InsertThroughput", "LoginFailures", "NetworkReceiveThroughput", "NetworkTransmitThroughput", "NetworkThroughput", "Queries", "ReadIOPS", "ReadLatency", "ReadThroughput", "ReplicaLag", "ResultSetCacheHitRatio", "SelectLatency", "SelectThroughput", "SwapUsage", "TotalConnections", "UpdateLatency", "UpdateThroughput", "VolumeBytesUsed", "VolumeReadIOPS", "VolumeWriteIOPS", "WriteIOPS", "WriteLatency", "WriteThroughput"},
 		"AWS/Route53":          {"ChildHealthCheckHealthyCount", "HealthCheckStatus", "HealthCheckPercentageHealthy", "ConnectionTime", "SSLHandshakeTime", "TimeToFirstByte"},
 		"AWS/S3":               {"BucketSizeBytes", "NumberOfObjects", "AllRequests", "GetRequests", "PutRequests", "DeleteRequests", "HeadRequests", "PostRequests", "ListRequests", "BytesDownloaded", "BytesUploaded", "4xxErrors", "5xxErrors", "FirstByteLatency", "TotalRequestLatency"},
@@ -144,7 +144,7 @@ func init() {
 		"AWS/NATGateway":       {"NatGatewayId"},
 		"AWS/NetworkELB":       {"LoadBalancer", "TargetGroup", "AvailabilityZone"},
 		"AWS/OpsWorks":         {"StackId", "LayerId", "InstanceId"},
-		"AWS/Redshift":         {"NodeID", "ClusterIdentifier"},
+		"AWS/Redshift":         {"NodeID", "ClusterIdentifier", "latency", "service class", "wmlid"},
 		"AWS/RDS":              {"DBInstanceIdentifier", "DBClusterIdentifier", "DbClusterIdentifier", "DatabaseClass", "EngineName", "Role"},
 		"AWS/Route53":          {"HealthCheckId", "Region"},
 		"AWS/S3":               {"BucketName", "StorageType", "FilterId"},

+ 9 - 12
pkg/tsdb/mssql/macros.go

@@ -6,8 +6,6 @@ import (
 	"strings"
 	"time"
 
-	"strconv"
-
 	"github.com/grafana/grafana/pkg/tsdb"
 )
 
@@ -97,19 +95,18 @@ func (m *msSqlMacroEngine) evaluateMacro(name string, args []string) (string, er
 			return "", fmt.Errorf("error parsing interval %v", args[1])
 		}
 		if len(args) == 3 {
-			m.query.Model.Set("fill", true)
-			m.query.Model.Set("fillInterval", interval.Seconds())
-			if args[2] == "NULL" {
-				m.query.Model.Set("fillNull", true)
-			} else {
-				floatVal, err := strconv.ParseFloat(args[2], 64)
-				if err != nil {
-					return "", fmt.Errorf("error parsing fill value %v", args[2])
-				}
-				m.query.Model.Set("fillValue", floatVal)
+			err := tsdb.SetupFillmode(m.query, interval, args[2])
+			if err != nil {
+				return "", err
 			}
 		}
 		return fmt.Sprintf("FLOOR(DATEDIFF(second, '1970-01-01', %s)/%.0f)*%.0f", args[0], interval.Seconds(), interval.Seconds()), nil
+	case "__timeGroupAlias":
+		tg, err := m.evaluateMacro("__timeGroup", args)
+		if err == nil {
+			return tg + " AS [time]", err
+		}
+		return "", err
 	case "__unixEpochFilter":
 		if len(args) == 0 {
 			return "", fmt.Errorf("missing time column argument for macro %v", name)

+ 21 - 2
pkg/tsdb/mssql/macros_test.go

@@ -55,27 +55,46 @@ func TestMacroEngine(t *testing.T) {
 			Convey("interpolate __timeGroup function", func() {
 				sql, err := engine.Interpolate(query, timeRange, "GROUP BY $__timeGroup(time_column,'5m')")
 				So(err, ShouldBeNil)
+				sql2, err := engine.Interpolate(query, timeRange, "GROUP BY $__timeGroupAlias(time_column,'5m')")
+				So(err, ShouldBeNil)
 
 				So(sql, ShouldEqual, "GROUP BY FLOOR(DATEDIFF(second, '1970-01-01', time_column)/300)*300")
+				So(sql2, ShouldEqual, sql+" AS [time]")
 			})
 
 			Convey("interpolate __timeGroup function with spaces around arguments", func() {
 				sql, err := engine.Interpolate(query, timeRange, "GROUP BY $__timeGroup(time_column , '5m')")
 				So(err, ShouldBeNil)
+				sql2, err := engine.Interpolate(query, timeRange, "GROUP BY $__timeGroupAlias(time_column , '5m')")
+				So(err, ShouldBeNil)
 
 				So(sql, ShouldEqual, "GROUP BY FLOOR(DATEDIFF(second, '1970-01-01', time_column)/300)*300")
+				So(sql2, ShouldEqual, sql+" AS [time]")
 			})
 
 			Convey("interpolate __timeGroup function with fill (value = NULL)", func() {
 				_, err := engine.Interpolate(query, timeRange, "GROUP BY $__timeGroup(time_column,'5m', NULL)")
 
 				fill := query.Model.Get("fill").MustBool()
-				fillNull := query.Model.Get("fillNull").MustBool()
+				fillMode := query.Model.Get("fillMode").MustString()
+				fillInterval := query.Model.Get("fillInterval").MustInt()
+
+				So(err, ShouldBeNil)
+				So(fill, ShouldBeTrue)
+				So(fillMode, ShouldEqual, "null")
+				So(fillInterval, ShouldEqual, 5*time.Minute.Seconds())
+			})
+
+			Convey("interpolate __timeGroup function with fill (value = previous)", func() {
+				_, err := engine.Interpolate(query, timeRange, "GROUP BY $__timeGroup(time_column,'5m', previous)")
+
+				fill := query.Model.Get("fill").MustBool()
+				fillMode := query.Model.Get("fillMode").MustString()
 				fillInterval := query.Model.Get("fillInterval").MustInt()
 
 				So(err, ShouldBeNil)
 				So(fill, ShouldBeTrue)
-				So(fillNull, ShouldBeTrue)
+				So(fillMode, ShouldEqual, "previous")
 				So(fillInterval, ShouldEqual, 5*time.Minute.Seconds())
 			})
 

+ 9 - 11
pkg/tsdb/mysql/macros.go

@@ -3,7 +3,6 @@ package mysql
 import (
 	"fmt"
 	"regexp"
-	"strconv"
 	"strings"
 	"time"
 
@@ -92,19 +91,18 @@ func (m *mySqlMacroEngine) evaluateMacro(name string, args []string) (string, er
 			return "", fmt.Errorf("error parsing interval %v", args[1])
 		}
 		if len(args) == 3 {
-			m.query.Model.Set("fill", true)
-			m.query.Model.Set("fillInterval", interval.Seconds())
-			if args[2] == "NULL" {
-				m.query.Model.Set("fillNull", true)
-			} else {
-				floatVal, err := strconv.ParseFloat(args[2], 64)
-				if err != nil {
-					return "", fmt.Errorf("error parsing fill value %v", args[2])
-				}
-				m.query.Model.Set("fillValue", floatVal)
+			err := tsdb.SetupFillmode(m.query, interval, args[2])
+			if err != nil {
+				return "", err
 			}
 		}
 		return fmt.Sprintf("UNIX_TIMESTAMP(%s) DIV %.0f * %.0f", args[0], interval.Seconds(), interval.Seconds()), nil
+	case "__timeGroupAlias":
+		tg, err := m.evaluateMacro("__timeGroup", args)
+		if err == nil {
+			return tg + " AS \"time\"", err
+		}
+		return "", err
 	case "__unixEpochFilter":
 		if len(args) == 0 {
 			return "", fmt.Errorf("missing time column argument for macro %v", name)

+ 6 - 0
pkg/tsdb/mysql/macros_test.go

@@ -38,16 +38,22 @@ func TestMacroEngine(t *testing.T) {
 
 				sql, err := engine.Interpolate(query, timeRange, "GROUP BY $__timeGroup(time_column,'5m')")
 				So(err, ShouldBeNil)
+				sql2, err := engine.Interpolate(query, timeRange, "GROUP BY $__timeGroupAlias(time_column,'5m')")
+				So(err, ShouldBeNil)
 
 				So(sql, ShouldEqual, "GROUP BY UNIX_TIMESTAMP(time_column) DIV 300 * 300")
+				So(sql2, ShouldEqual, sql+" AS \"time\"")
 			})
 
 			Convey("interpolate __timeGroup function with spaces around arguments", func() {
 
 				sql, err := engine.Interpolate(query, timeRange, "GROUP BY $__timeGroup(time_column , '5m')")
 				So(err, ShouldBeNil)
+				sql2, err := engine.Interpolate(query, timeRange, "GROUP BY $__timeGroupAlias(time_column , '5m')")
+				So(err, ShouldBeNil)
 
 				So(sql, ShouldEqual, "GROUP BY UNIX_TIMESTAMP(time_column) DIV 300 * 300")
+				So(sql2, ShouldEqual, sql+" AS \"time\"")
 			})
 
 			Convey("interpolate __timeFilter function", func() {

+ 30 - 1
pkg/tsdb/mysql/mysql_test.go

@@ -295,7 +295,7 @@ func TestMySQL(t *testing.T) {
 
 			})
 
-			Convey("When doing a metric query using timeGroup with float fill enabled", func() {
+			Convey("When doing a metric query using timeGroup with value fill enabled", func() {
 				query := &tsdb.TsdbQuery{
 					Queries: []*tsdb.Query{
 						{
@@ -320,6 +320,35 @@ func TestMySQL(t *testing.T) {
 				points := queryResult.Series[0].Points
 				So(points[3][0].Float64, ShouldEqual, 1.5)
 			})
+
+			Convey("When doing a metric query using timeGroup with previous fill enabled", func() {
+				query := &tsdb.TsdbQuery{
+					Queries: []*tsdb.Query{
+						{
+							Model: simplejson.NewFromAny(map[string]interface{}{
+								"rawSql": "SELECT $__timeGroup(time, '5m', previous) as time_sec, avg(value) as value FROM metric GROUP BY 1 ORDER BY 1",
+								"format": "time_series",
+							}),
+							RefId: "A",
+						},
+					},
+					TimeRange: &tsdb.TimeRange{
+						From: fmt.Sprintf("%v", fromStart.Unix()*1000),
+						To:   fmt.Sprintf("%v", fromStart.Add(34*time.Minute).Unix()*1000),
+					},
+				}
+
+				resp, err := endpoint.Query(nil, nil, query)
+				So(err, ShouldBeNil)
+				queryResult := resp.Results["A"]
+				So(queryResult.Error, ShouldBeNil)
+
+				points := queryResult.Series[0].Points
+				So(points[2][0].Float64, ShouldEqual, 15.0)
+				So(points[3][0].Float64, ShouldEqual, 15.0)
+				So(points[6][0].Float64, ShouldEqual, 20.0)
+			})
+
 		})
 
 		Convey("Given a table with metrics having multiple values and measurements", func() {

+ 37 - 16
pkg/tsdb/postgres/macros.go

@@ -3,7 +3,6 @@ package postgres
 import (
 	"fmt"
 	"regexp"
-	"strconv"
 	"strings"
 	"time"
 
@@ -15,12 +14,13 @@ const rsIdentifier = `([_a-zA-Z0-9]+)`
 const sExpr = `\$` + rsIdentifier + `\(([^\)]*)\)`
 
 type postgresMacroEngine struct {
-	timeRange *tsdb.TimeRange
-	query     *tsdb.Query
+	timeRange   *tsdb.TimeRange
+	query       *tsdb.Query
+	timescaledb bool
 }
 
-func newPostgresMacroEngine() tsdb.SqlMacroEngine {
-	return &postgresMacroEngine{}
+func newPostgresMacroEngine(timescaledb bool) tsdb.SqlMacroEngine {
+	return &postgresMacroEngine{timescaledb: timescaledb}
 }
 
 func (m *postgresMacroEngine) Interpolate(query *tsdb.Query, timeRange *tsdb.TimeRange, sql string) (string, error) {
@@ -30,6 +30,23 @@ func (m *postgresMacroEngine) Interpolate(query *tsdb.Query, timeRange *tsdb.Tim
 	var macroError error
 
 	sql = replaceAllStringSubmatchFunc(rExp, sql, func(groups []string) string {
+
+		// detect if $__timeGroup is supposed to add AS time for pre 5.3 compatibility
+		// if there is a ',' directly after the macro call $__timeGroup is probably used
+		// in the old way. Inside window function ORDER BY $__timeGroup will be followed
+		// by ')'
+		if groups[1] == "__timeGroup" {
+			if index := strings.Index(sql, groups[0]); index >= 0 {
+				index += len(groups[0])
+				if len(sql) > index {
+					// check for character after macro expression
+					if sql[index] == ',' {
+						groups[1] = "__timeGroupAlias"
+					}
+				}
+			}
+		}
+
 		args := strings.Split(groups[2], ",")
 		for i, arg := range args {
 			args[i] = strings.Trim(arg, " ")
@@ -97,19 +114,23 @@ func (m *postgresMacroEngine) evaluateMacro(name string, args []string) (string,
 			return "", fmt.Errorf("error parsing interval %v", args[1])
 		}
 		if len(args) == 3 {
-			m.query.Model.Set("fill", true)
-			m.query.Model.Set("fillInterval", interval.Seconds())
-			if args[2] == "NULL" {
-				m.query.Model.Set("fillNull", true)
-			} else {
-				floatVal, err := strconv.ParseFloat(args[2], 64)
-				if err != nil {
-					return "", fmt.Errorf("error parsing fill value %v", args[2])
-				}
-				m.query.Model.Set("fillValue", floatVal)
+			err := tsdb.SetupFillmode(m.query, interval, args[2])
+			if err != nil {
+				return "", err
 			}
 		}
-		return fmt.Sprintf("floor(extract(epoch from %s)/%v)*%v AS time", args[0], interval.Seconds(), interval.Seconds()), nil
+
+		if m.timescaledb {
+			return fmt.Sprintf("time_bucket('%vs',%s)", interval.Seconds(), args[0]), nil
+		} else {
+			return fmt.Sprintf("floor(extract(epoch from %s)/%v)*%v", args[0], interval.Seconds(), interval.Seconds()), nil
+		}
+	case "__timeGroupAlias":
+		tg, err := m.evaluateMacro("__timeGroup", args)
+		if err == nil {
+			return tg + " AS \"time\"", err
+		}
+		return "", err
 	case "__unixEpochFilter":
 		if len(args) == 0 {
 			return "", fmt.Errorf("missing time column argument for macro %v", name)

+ 43 - 5
pkg/tsdb/postgres/macros_test.go

@@ -12,7 +12,10 @@ import (
 
 func TestMacroEngine(t *testing.T) {
 	Convey("MacroEngine", t, func() {
-		engine := newPostgresMacroEngine()
+		timescaledbEnabled := false
+		engine := newPostgresMacroEngine(timescaledbEnabled)
+		timescaledbEnabled = true
+		engineTS := newPostgresMacroEngine(timescaledbEnabled)
 		query := &tsdb.Query{}
 
 		Convey("Given a time range between 2018-04-12 00:00 and 2018-04-12 00:05", func() {
@@ -48,20 +51,55 @@ func TestMacroEngine(t *testing.T) {
 				So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339)))
 			})
 
+			Convey("interpolate __timeGroup function pre 5.3 compatibility", func() {
+
+				sql, err := engine.Interpolate(query, timeRange, "SELECT $__timeGroup(time_column,'5m'), value")
+				So(err, ShouldBeNil)
+
+				So(sql, ShouldEqual, "SELECT floor(extract(epoch from time_column)/300)*300 AS \"time\", value")
+
+				sql, err = engine.Interpolate(query, timeRange, "SELECT $__timeGroup(time_column,'5m') as time, value")
+				So(err, ShouldBeNil)
+
+				So(sql, ShouldEqual, "SELECT floor(extract(epoch from time_column)/300)*300 as time, value")
+			})
+
 			Convey("interpolate __timeGroup function", func() {
 
-				sql, err := engine.Interpolate(query, timeRange, "GROUP BY $__timeGroup(time_column,'5m')")
+				sql, err := engine.Interpolate(query, timeRange, "SELECT $__timeGroup(time_column,'5m')")
+				So(err, ShouldBeNil)
+				sql2, err := engine.Interpolate(query, timeRange, "SELECT $__timeGroupAlias(time_column,'5m')")
 				So(err, ShouldBeNil)
 
-				So(sql, ShouldEqual, "GROUP BY floor(extract(epoch from time_column)/300)*300 AS time")
+				So(sql, ShouldEqual, "SELECT floor(extract(epoch from time_column)/300)*300")
+				So(sql2, ShouldEqual, sql+" AS \"time\"")
 			})
 
 			Convey("interpolate __timeGroup function with spaces between args", func() {
 
-				sql, err := engine.Interpolate(query, timeRange, "GROUP BY $__timeGroup(time_column , '5m')")
+				sql, err := engine.Interpolate(query, timeRange, "$__timeGroup(time_column , '5m')")
+				So(err, ShouldBeNil)
+				sql2, err := engine.Interpolate(query, timeRange, "$__timeGroupAlias(time_column , '5m')")
+				So(err, ShouldBeNil)
+
+				So(sql, ShouldEqual, "floor(extract(epoch from time_column)/300)*300")
+				So(sql2, ShouldEqual, sql+" AS \"time\"")
+			})
+
+			Convey("interpolate __timeGroup function with TimescaleDB enabled", func() {
+
+				sql, err := engineTS.Interpolate(query, timeRange, "GROUP BY $__timeGroup(time_column,'5m')")
+				So(err, ShouldBeNil)
+
+				So(sql, ShouldEqual, "GROUP BY time_bucket('300s',time_column)")
+			})
+
+			Convey("interpolate __timeGroup function with spaces between args and TimescaleDB enabled", func() {
+
+				sql, err := engineTS.Interpolate(query, timeRange, "GROUP BY $__timeGroup(time_column , '5m')")
 				So(err, ShouldBeNil)
 
-				So(sql, ShouldEqual, "GROUP BY floor(extract(epoch from time_column)/300)*300 AS time")
+				So(sql, ShouldEqual, "GROUP BY time_bucket('300s',time_column)")
 			})
 
 			Convey("interpolate __timeTo function", func() {

+ 3 - 1
pkg/tsdb/postgres/postgres.go

@@ -32,7 +32,9 @@ func newPostgresQueryEndpoint(datasource *models.DataSource) (tsdb.TsdbQueryEndp
 		log: logger,
 	}
 
-	return tsdb.NewSqlQueryEndpoint(&config, &rowTransformer, newPostgresMacroEngine(), logger)
+	timescaledb := datasource.JsonData.Get("timescaledb").MustBool(false)
+
+	return tsdb.NewSqlQueryEndpoint(&config, &rowTransformer, newPostgresMacroEngine(timescaledb), logger)
 }
 
 func generateConnectionString(datasource *models.DataSource) string {

+ 33 - 5
pkg/tsdb/postgres/postgres_test.go

@@ -27,7 +27,7 @@ import (
 // use to verify that the generated data are vizualized as expected, see
 // devenv/README.md for setup instructions.
 func TestPostgres(t *testing.T) {
-	// change to true to run the MySQL tests
+	// change to true to run the PostgreSQL tests
 	runPostgresTests := false
 	// runPostgresTests := true
 
@@ -183,7 +183,7 @@ func TestPostgres(t *testing.T) {
 					Queries: []*tsdb.Query{
 						{
 							Model: simplejson.NewFromAny(map[string]interface{}{
-								"rawSql": "SELECT $__timeGroup(time, '5m'), avg(value) as value FROM metric GROUP BY 1 ORDER BY 1",
+								"rawSql": "SELECT $__timeGroup(time, '5m') AS time, avg(value) as value FROM metric GROUP BY 1 ORDER BY 1",
 								"format": "time_series",
 							}),
 							RefId: "A",
@@ -227,7 +227,7 @@ func TestPostgres(t *testing.T) {
 					Queries: []*tsdb.Query{
 						{
 							Model: simplejson.NewFromAny(map[string]interface{}{
-								"rawSql": "SELECT $__timeGroup(time, '5m', NULL), avg(value) as value FROM metric GROUP BY 1 ORDER BY 1",
+								"rawSql": "SELECT $__timeGroup(time, '5m', NULL) AS time, avg(value) as value FROM metric GROUP BY 1 ORDER BY 1",
 								"format": "time_series",
 							}),
 							RefId: "A",
@@ -276,12 +276,12 @@ func TestPostgres(t *testing.T) {
 
 			})
 
-			Convey("When doing a metric query using timeGroup with float fill enabled", func() {
+			Convey("When doing a metric query using timeGroup with value fill enabled", func() {
 				query := &tsdb.TsdbQuery{
 					Queries: []*tsdb.Query{
 						{
 							Model: simplejson.NewFromAny(map[string]interface{}{
-								"rawSql": "SELECT $__timeGroup(time, '5m', 1.5), avg(value) as value FROM metric GROUP BY 1 ORDER BY 1",
+								"rawSql": "SELECT $__timeGroup(time, '5m', 1.5) AS time, avg(value) as value FROM metric GROUP BY 1 ORDER BY 1",
 								"format": "time_series",
 							}),
 							RefId: "A",
@@ -303,6 +303,34 @@ func TestPostgres(t *testing.T) {
 			})
 		})
 
+		Convey("When doing a metric query using timeGroup with previous fill enabled", func() {
+			query := &tsdb.TsdbQuery{
+				Queries: []*tsdb.Query{
+					{
+						Model: simplejson.NewFromAny(map[string]interface{}{
+							"rawSql": "SELECT $__timeGroup(time, '5m', previous), avg(value) as value FROM metric GROUP BY 1 ORDER BY 1",
+							"format": "time_series",
+						}),
+						RefId: "A",
+					},
+				},
+				TimeRange: &tsdb.TimeRange{
+					From: fmt.Sprintf("%v", fromStart.Unix()*1000),
+					To:   fmt.Sprintf("%v", fromStart.Add(34*time.Minute).Unix()*1000),
+				},
+			}
+
+			resp, err := endpoint.Query(nil, nil, query)
+			So(err, ShouldBeNil)
+			queryResult := resp.Results["A"]
+			So(queryResult.Error, ShouldBeNil)
+
+			points := queryResult.Series[0].Points
+			So(points[2][0].Float64, ShouldEqual, 15.0)
+			So(points[3][0].Float64, ShouldEqual, 15.0)
+			So(points[6][0].Float64, ShouldEqual, 20.0)
+		})
+
 		Convey("Given a table with metrics having multiple values and measurements", func() {
 			type metric_values struct {
 				Time                time.Time

+ 44 - 2
pkg/tsdb/sql_engine.go

@@ -6,6 +6,7 @@ import (
 	"database/sql"
 	"fmt"
 	"math"
+	"strconv"
 	"strings"
 	"sync"
 	"time"
@@ -253,7 +254,6 @@ func (e *sqlQueryEndpoint) transformToTimeSeries(query *Query, rows *core.Rows,
 				columnType := columnTypes[i].DatabaseTypeName()
 
 				for _, mct := range e.metricColumnTypes {
-					e.log.Info(mct)
 					if columnType == mct {
 						metricIndex = i
 						continue
@@ -275,9 +275,15 @@ func (e *sqlQueryEndpoint) transformToTimeSeries(query *Query, rows *core.Rows,
 	fillMissing := query.Model.Get("fill").MustBool(false)
 	var fillInterval float64
 	fillValue := null.Float{}
+	fillPrevious := false
+
 	if fillMissing {
 		fillInterval = query.Model.Get("fillInterval").MustFloat64() * 1000
-		if !query.Model.Get("fillNull").MustBool(false) {
+		switch query.Model.Get("fillMode").MustString() {
+		case "null":
+		case "previous":
+			fillPrevious = true
+		case "value":
 			fillValue.Float64 = query.Model.Get("fillValue").MustFloat64()
 			fillValue.Valid = true
 		}
@@ -353,6 +359,14 @@ func (e *sqlQueryEndpoint) transformToTimeSeries(query *Query, rows *core.Rows,
 					intervalStart = series.Points[len(series.Points)-1][1].Float64 + fillInterval
 				}
 
+				if fillPrevious {
+					if len(series.Points) > 0 {
+						fillValue = series.Points[len(series.Points)-1][0]
+					} else {
+						fillValue.Valid = false
+					}
+				}
+
 				// align interval start
 				intervalStart = math.Floor(intervalStart/fillInterval) * fillInterval
 
@@ -378,6 +392,14 @@ func (e *sqlQueryEndpoint) transformToTimeSeries(query *Query, rows *core.Rows,
 			intervalStart := series.Points[len(series.Points)-1][1].Float64
 			intervalEnd := float64(tsdbQuery.TimeRange.MustGetTo().UnixNano() / 1e6)
 
+			if fillPrevious {
+				if len(series.Points) > 0 {
+					fillValue = series.Points[len(series.Points)-1][0]
+				} else {
+					fillValue.Valid = false
+				}
+			}
+
 			// align interval start
 			intervalStart = math.Floor(intervalStart/fillInterval) * fillInterval
 			for i := intervalStart + fillInterval; i < intervalEnd; i += fillInterval {
@@ -547,3 +569,23 @@ func ConvertSqlValueColumnToFloat(columnName string, columnValue interface{}) (n
 
 	return value, nil
 }
+
+func SetupFillmode(query *Query, interval time.Duration, fillmode string) error {
+	query.Model.Set("fill", true)
+	query.Model.Set("fillInterval", interval.Seconds())
+	switch fillmode {
+	case "NULL":
+		query.Model.Set("fillMode", "null")
+	case "previous":
+		query.Model.Set("fillMode", "previous")
+	default:
+		query.Model.Set("fillMode", "value")
+		floatVal, err := strconv.ParseFloat(fillmode, 64)
+		if err != nil {
+			return fmt.Errorf("error parsing fill value %v", fillmode)
+		}
+		query.Model.Set("fillValue", floatVal)
+	}
+
+	return nil
+}

+ 1 - 1
pkg/util/url.go

@@ -10,7 +10,7 @@ type UrlQueryReader struct {
 }
 
 func NewUrlQueryReader(urlInfo *url.URL) (*UrlQueryReader, error) {
-	u, err := url.ParseQuery(urlInfo.String())
+	u, err := url.ParseQuery(urlInfo.RawQuery)
 	if err != nil {
 		return nil, err
 	}

+ 27 - 0
pkg/util/url_test.go

@@ -4,6 +4,7 @@ import (
 	"testing"
 
 	. "github.com/smartystreets/goconvey/convey"
+	"net/url"
 )
 
 func TestUrl(t *testing.T) {
@@ -43,4 +44,30 @@ func TestUrl(t *testing.T) {
 
 		So(result, ShouldEqual, "http://localhost:8080/api/")
 	})
+
+	Convey("When joining two urls where lefthand side has a trailing slash and righthand side has preceding slash", t, func() {
+		result := JoinUrlFragments("http://localhost:8080/", "/api/")
+
+		So(result, ShouldEqual, "http://localhost:8080/api/")
+	})
+}
+
+func TestNewUrlQueryReader(t *testing.T) {
+	u, _ := url.Parse("http://www.abc.com/foo?bar=baz&bar2=baz2")
+	uqr, _ := NewUrlQueryReader(u)
+
+	Convey("when trying to retrieve the first query value", t, func() {
+		result := uqr.Get("bar", "foodef")
+		So(result, ShouldEqual, "baz")
+	})
+
+	Convey("when trying to retrieve the second query value", t, func() {
+		result := uqr.Get("bar2", "foodef")
+		So(result, ShouldEqual, "baz2")
+	})
+
+	Convey("when trying to retrieve from a non-existent key, the default value is returned", t, func() {
+		result := uqr.Get("bar3", "foodef")
+		So(result, ShouldEqual, "foodef")
+	})
 }

+ 22 - 0
pkg/util/validation_test.go

@@ -0,0 +1,22 @@
+package util
+
+import (
+	"testing"
+
+	. "github.com/smartystreets/goconvey/convey"
+)
+
+func TestIsEmail(t *testing.T) {
+
+	Convey("When validating a string that is a valid email", t, func() {
+		result := IsEmail("abc@def.com")
+
+		So(result, ShouldEqual, true)
+	})
+
+	Convey("When validating a string that is not a valid email", t, func() {
+		result := IsEmail("abcdef.com")
+
+		So(result, ShouldEqual, false)
+	})
+}

+ 0 - 0
public/app/containers/AlertRuleList/AlertRuleList.jest.tsx → public/app/containers/AlertRuleList/AlertRuleList.test.tsx


+ 0 - 0
public/app/containers/AlertRuleList/__snapshots__/AlertRuleList.jest.tsx.snap → public/app/containers/AlertRuleList/__snapshots__/AlertRuleList.test.tsx.snap


+ 183 - 60
public/app/containers/Explore/Explore.tsx

@@ -4,6 +4,7 @@ import Select from 'react-select';
 
 import kbn from 'app/core/utils/kbn';
 import colors from 'app/core/utils/colors';
+import store from 'app/core/store';
 import TimeSeries from 'app/core/time_series2';
 import { decodePathComponent } from 'app/core/utils/location_util';
 import { parse as parseDate } from 'app/core/utils/datemath';
@@ -16,6 +17,18 @@ import Table from './Table';
 import TimePicker, { DEFAULT_RANGE } from './TimePicker';
 import { ensureQueries, generateQueryKey, hasQuery } from './utils/query';
 
+const MAX_HISTORY_ITEMS = 100;
+
+function makeHints(hints) {
+  const hintsByIndex = [];
+  hints.forEach(hint => {
+    if (hint) {
+      hintsByIndex[hint.index] = hint;
+    }
+  });
+  return hintsByIndex;
+}
+
 function makeTimeSeriesList(dataList, options) {
   return dataList.map((seriesData, index) => {
     const datapoints = seriesData.datapoints || [];
@@ -34,7 +47,7 @@ function makeTimeSeriesList(dataList, options) {
   });
 }
 
-function parseInitialState(initial: string | undefined) {
+function parseUrlState(initial: string | undefined) {
   if (initial) {
     try {
       const parsed = JSON.parse(decodePathComponent(initial));
@@ -56,12 +69,14 @@ interface IExploreState {
   datasourceLoading: boolean | null;
   datasourceMissing: boolean;
   graphResult: any;
+  history: any[];
   initialDatasource?: string;
   latency: number;
   loading: any;
   logsResult: any;
-  queries: any;
-  queryError: any;
+  queries: any[];
+  queryErrors: any[];
+  queryHints: any[];
   range: any;
   requestOptions: any;
   showingGraph: boolean;
@@ -78,7 +93,8 @@ export class Explore extends React.Component<any, IExploreState> {
 
   constructor(props) {
     super(props);
-    const { datasource, queries, range } = parseInitialState(props.routeParams.state);
+    const initialState: IExploreState = props.initialState;
+    const { datasource, queries, range } = parseUrlState(props.routeParams.state);
     this.state = {
       datasource: null,
       datasourceError: null,
@@ -86,11 +102,13 @@ export class Explore extends React.Component<any, IExploreState> {
       datasourceMissing: false,
       graphResult: null,
       initialDatasource: datasource,
+      history: [],
       latency: 0,
       loading: false,
       logsResult: null,
       queries: ensureQueries(queries),
-      queryError: null,
+      queryErrors: [],
+      queryHints: [],
       range: range || { ...DEFAULT_RANGE },
       requestOptions: null,
       showingGraph: true,
@@ -100,7 +118,7 @@ export class Explore extends React.Component<any, IExploreState> {
       supportsLogs: null,
       supportsTable: null,
       tableResult: null,
-      ...props.initialState,
+      ...initialState,
     };
   }
 
@@ -138,6 +156,7 @@ export class Explore extends React.Component<any, IExploreState> {
     const supportsGraph = datasource.meta.metrics;
     const supportsLogs = datasource.meta.logs;
     const supportsTable = datasource.meta.metrics;
+    const datasourceId = datasource.meta.id;
     let datasourceError = null;
 
     try {
@@ -147,16 +166,24 @@ export class Explore extends React.Component<any, IExploreState> {
       datasourceError = (error && error.statusText) || error;
     }
 
+    const historyKey = `grafana.explore.history.${datasourceId}`;
+    const history = store.getObject(historyKey, []);
+
+    if (datasource.init) {
+      datasource.init();
+    }
+
     this.setState(
       {
         datasource,
         datasourceError,
+        history,
         supportsGraph,
         supportsLogs,
         supportsTable,
         datasourceLoading: false,
       },
-      () => datasourceError === null && this.handleSubmit()
+      () => datasourceError === null && this.onSubmit()
     );
   }
 
@@ -164,7 +191,7 @@ export class Explore extends React.Component<any, IExploreState> {
     this.el = el;
   };
 
-  handleAddQueryRow = index => {
+  onAddQueryRow = index => {
     const { queries } = this.state;
     const nextQueries = [
       ...queries.slice(0, index + 1),
@@ -174,74 +201,131 @@ export class Explore extends React.Component<any, IExploreState> {
     this.setState({ queries: nextQueries });
   };
 
-  handleChangeDatasource = async option => {
+  onChangeDatasource = async option => {
     this.setState({
       datasource: null,
       datasourceError: null,
       datasourceLoading: true,
       graphResult: null,
       logsResult: null,
+      queryErrors: [],
+      queryHints: [],
       tableResult: null,
     });
     const datasource = await this.props.datasourceSrv.get(option.value);
     this.setDatasource(datasource);
   };
 
-  handleChangeQuery = (query, index) => {
+  onChangeQuery = (value: string, index: number, override?: boolean) => {
     const { queries } = this.state;
+    let { queryErrors, queryHints } = this.state;
+    const prevQuery = queries[index];
+    const edited = override ? false : prevQuery.query !== value;
     const nextQuery = {
       ...queries[index],
-      query,
+      edited,
+      query: value,
     };
     const nextQueries = [...queries];
     nextQueries[index] = nextQuery;
-    this.setState({ queries: nextQueries });
+    if (override) {
+      queryErrors = [];
+      queryHints = [];
+    }
+    this.setState(
+      {
+        queryErrors,
+        queryHints,
+        queries: nextQueries,
+      },
+      override ? () => this.onSubmit() : undefined
+    );
   };
 
-  handleChangeTime = nextRange => {
+  onChangeTime = nextRange => {
     const range = {
       from: nextRange.from,
       to: nextRange.to,
     };
-    this.setState({ range }, () => this.handleSubmit());
+    this.setState({ range }, () => this.onSubmit());
+  };
+
+  onClickClear = () => {
+    this.setState({
+      graphResult: null,
+      logsResult: null,
+      queries: ensureQueries(),
+      tableResult: null,
+    });
   };
 
-  handleClickCloseSplit = () => {
+  onClickCloseSplit = () => {
     const { onChangeSplit } = this.props;
     if (onChangeSplit) {
       onChangeSplit(false);
     }
   };
 
-  handleClickGraphButton = () => {
+  onClickGraphButton = () => {
     this.setState(state => ({ showingGraph: !state.showingGraph }));
   };
 
-  handleClickLogsButton = () => {
+  onClickLogsButton = () => {
     this.setState(state => ({ showingLogs: !state.showingLogs }));
   };
 
-  handleClickSplit = () => {
+  onClickSplit = () => {
     const { onChangeSplit } = this.props;
     if (onChangeSplit) {
       onChangeSplit(true, this.state);
     }
   };
 
-  handleClickTableButton = () => {
+  onClickTableButton = () => {
     this.setState(state => ({ showingTable: !state.showingTable }));
   };
 
-  handleRemoveQueryRow = index => {
+  onClickTableCell = (columnKey: string, rowValue: string) => {
+    this.onModifyQueries({ type: 'ADD_FILTER', key: columnKey, value: rowValue });
+  };
+
+  onModifyQueries = (action: object, index?: number) => {
+    const { datasource, queries } = this.state;
+    if (datasource && datasource.modifyQuery) {
+      let nextQueries;
+      if (index === undefined) {
+        // Modify all queries
+        nextQueries = queries.map(q => ({
+          ...q,
+          edited: false,
+          query: datasource.modifyQuery(q.query, action),
+        }));
+      } else {
+        // Modify query only at index
+        nextQueries = [
+          ...queries.slice(0, index),
+          {
+            ...queries[index],
+            edited: false,
+            query: datasource.modifyQuery(queries[index].query, action),
+          },
+          ...queries.slice(index + 1),
+        ];
+      }
+      this.setState({ queries: nextQueries }, () => this.onSubmit());
+    }
+  };
+
+  onRemoveQueryRow = index => {
     const { queries } = this.state;
     if (queries.length <= 1) {
       return;
     }
     const nextQueries = [...queries.slice(0, index), ...queries.slice(index + 1)];
-    this.setState({ queries: nextQueries }, () => this.handleSubmit());
+    this.setState({ queries: nextQueries }, () => this.onSubmit());
   };
 
-  handleSubmit = () => {
+  onSubmit = () => {
     const { showingLogs, showingGraph, showingTable, supportsGraph, supportsLogs, supportsTable } = this.state;
     if (showingTable && supportsTable) {
       this.runTableQuery();
@@ -254,7 +338,28 @@ export class Explore extends React.Component<any, IExploreState> {
     }
   };
 
-  buildQueryOptions(targetOptions: { format: string; instant?: boolean }) {
+  onQuerySuccess(datasourceId: string, queries: any[]): void {
+    // save queries to history
+    let { datasource, history } = this.state;
+    if (datasource.meta.id !== datasourceId) {
+      // Navigated away, queries did not matter
+      return;
+    }
+    const ts = Date.now();
+    queries.forEach(q => {
+      const { query } = q;
+      history = [{ query, ts }, ...history];
+    });
+    if (history.length > MAX_HISTORY_ITEMS) {
+      history = history.slice(0, MAX_HISTORY_ITEMS);
+    }
+    // Combine all queries of a datasource type into one history
+    const historyKey = `grafana.explore.history.${datasourceId}`;
+    store.setObject(historyKey, history);
+    this.setState({ history });
+  }
+
+  buildQueryOptions(targetOptions: { format: string; hinting?: boolean; instant?: boolean }) {
     const { datasource, queries, range } = this.state;
     const resolution = this.el.offsetWidth;
     const absoluteRange = {
@@ -278,18 +383,20 @@ export class Explore extends React.Component<any, IExploreState> {
     if (!hasQuery(queries)) {
       return;
     }
-    this.setState({ latency: 0, loading: true, graphResult: null, queryError: null });
+    this.setState({ latency: 0, loading: true, graphResult: null, queryErrors: [], queryHints: [] });
     const now = Date.now();
-    const options = this.buildQueryOptions({ format: 'time_series', instant: false });
+    const options = this.buildQueryOptions({ format: 'time_series', instant: false, hinting: true });
     try {
       const res = await datasource.query(options);
       const result = makeTimeSeriesList(res.data, options);
+      const queryHints = res.hints ? makeHints(res.hints) : [];
       const latency = Date.now() - now;
-      this.setState({ latency, loading: false, graphResult: result, requestOptions: options });
+      this.setState({ latency, loading: false, graphResult: result, queryHints, requestOptions: options });
+      this.onQuerySuccess(datasource.meta.id, queries);
     } catch (response) {
       console.error(response);
       const queryError = response.data ? response.data.error : response;
-      this.setState({ loading: false, queryError });
+      this.setState({ loading: false, queryErrors: [queryError] });
     }
   }
 
@@ -298,7 +405,7 @@ export class Explore extends React.Component<any, IExploreState> {
     if (!hasQuery(queries)) {
       return;
     }
-    this.setState({ latency: 0, loading: true, queryError: null, tableResult: null });
+    this.setState({ latency: 0, loading: true, queryErrors: [], queryHints: [], tableResult: null });
     const now = Date.now();
     const options = this.buildQueryOptions({
       format: 'table',
@@ -309,10 +416,11 @@ export class Explore extends React.Component<any, IExploreState> {
       const tableModel = res.data[0];
       const latency = Date.now() - now;
       this.setState({ latency, loading: false, tableResult: tableModel, requestOptions: options });
+      this.onQuerySuccess(datasource.meta.id, queries);
     } catch (response) {
       console.error(response);
       const queryError = response.data ? response.data.error : response;
-      this.setState({ loading: false, queryError });
+      this.setState({ loading: false, queryErrors: [queryError] });
     }
   }
 
@@ -321,7 +429,7 @@ export class Explore extends React.Component<any, IExploreState> {
     if (!hasQuery(queries)) {
       return;
     }
-    this.setState({ latency: 0, loading: true, queryError: null, logsResult: null });
+    this.setState({ latency: 0, loading: true, queryErrors: [], queryHints: [], logsResult: null });
     const now = Date.now();
     const options = this.buildQueryOptions({
       format: 'logs',
@@ -332,10 +440,11 @@ export class Explore extends React.Component<any, IExploreState> {
       const logsData = res.data;
       const latency = Date.now() - now;
       this.setState({ latency, loading: false, logsResult: logsData, requestOptions: options });
+      this.onQuerySuccess(datasource.meta.id, queries);
     } catch (response) {
       console.error(response);
       const queryError = response.data ? response.data.error : response;
-      this.setState({ loading: false, queryError });
+      this.setState({ loading: false, queryErrors: [queryError] });
     }
   }
 
@@ -352,11 +461,13 @@ export class Explore extends React.Component<any, IExploreState> {
       datasourceLoading,
       datasourceMissing,
       graphResult,
+      history,
       latency,
       loading,
       logsResult,
       queries,
-      queryError,
+      queryErrors,
+      queryHints,
       range,
       requestOptions,
       showingGraph,
@@ -391,7 +502,7 @@ export class Explore extends React.Component<any, IExploreState> {
             </div>
           ) : (
             <div className="navbar-buttons explore-first-button">
-              <button className="btn navbar-button" onClick={this.handleClickCloseSplit}>
+              <button className="btn navbar-button" onClick={this.onClickCloseSplit}>
                 Close Split
               </button>
             </div>
@@ -401,7 +512,7 @@ export class Explore extends React.Component<any, IExploreState> {
               <Select
                 className="datasource-picker"
                 clearable={false}
-                onChange={this.handleChangeDatasource}
+                onChange={this.onChangeDatasource}
                 options={datasources}
                 placeholder="Loading datasources..."
                 value={selectedDatasource}
@@ -411,31 +522,19 @@ export class Explore extends React.Component<any, IExploreState> {
           <div className="navbar__spacer" />
           {position === 'left' && !split ? (
             <div className="navbar-buttons">
-              <button className="btn navbar-button" onClick={this.handleClickSplit}>
+              <button className="btn navbar-button" onClick={this.onClickSplit}>
                 Split
               </button>
             </div>
           ) : null}
+          <TimePicker range={range} onChangeTime={this.onChangeTime} />
           <div className="navbar-buttons">
-            {supportsGraph ? (
-              <button className={`btn navbar-button ${graphButtonActive}`} onClick={this.handleClickGraphButton}>
-                Graph
-              </button>
-            ) : null}
-            {supportsTable ? (
-              <button className={`btn navbar-button ${tableButtonActive}`} onClick={this.handleClickTableButton}>
-                Table
-              </button>
-            ) : null}
-            {supportsLogs ? (
-              <button className={`btn navbar-button ${logsButtonActive}`} onClick={this.handleClickLogsButton}>
-                Logs
-              </button>
-            ) : null}
+            <button className="btn navbar-button navbar-button--no-icon" onClick={this.onClickClear}>
+              Clear All
+            </button>
           </div>
-          <TimePicker range={range} onChangeTime={this.handleChangeTime} />
           <div className="navbar-buttons relative">
-            <button className="btn navbar-button--primary" onClick={this.handleSubmit}>
+            <button className="btn navbar-button--primary" onClick={this.onSubmit}>
               Run Query <i className="fa fa-level-down run-icon" />
             </button>
             {loading || latency ? <ElapsedTime time={latency} className="text-info" /> : null}
@@ -455,26 +554,50 @@ export class Explore extends React.Component<any, IExploreState> {
         {datasource && !datasourceError ? (
           <div className="explore-container">
             <QueryRows
+              history={history}
               queries={queries}
+              queryErrors={queryErrors}
+              queryHints={queryHints}
               request={this.request}
-              onAddQueryRow={this.handleAddQueryRow}
-              onChangeQuery={this.handleChangeQuery}
-              onExecuteQuery={this.handleSubmit}
-              onRemoveQueryRow={this.handleRemoveQueryRow}
+              onAddQueryRow={this.onAddQueryRow}
+              onChangeQuery={this.onChangeQuery}
+              onClickHintFix={this.onModifyQueries}
+              onExecuteQuery={this.onSubmit}
+              onRemoveQueryRow={this.onRemoveQueryRow}
             />
-            {queryError ? <div className="text-warning m-a-2">{queryError}</div> : null}
+            <div className="result-options">
+              {supportsGraph ? (
+                <button className={`btn navbar-button ${graphButtonActive}`} onClick={this.onClickGraphButton}>
+                  Graph
+                </button>
+              ) : null}
+              {supportsTable ? (
+                <button className={`btn navbar-button ${tableButtonActive}`} onClick={this.onClickTableButton}>
+                  Table
+                </button>
+              ) : null}
+              {supportsLogs ? (
+                <button className={`btn navbar-button ${logsButtonActive}`} onClick={this.onClickLogsButton}>
+                  Logs
+                </button>
+              ) : null}
+            </div>
+
             <main className="m-t-2">
               {supportsGraph && showingGraph ? (
                 <Graph
                   data={graphResult}
+                  height={graphHeight}
+                  loading={loading}
                   id={`explore-graph-${position}`}
                   options={requestOptions}
-                  height={graphHeight}
                   split={split}
                 />
               ) : null}
-              {supportsTable && showingTable ? <Table data={tableResult} className="m-t-3" /> : null}
-              {supportsLogs && showingLogs ? <Logs data={logsResult} /> : null}
+              {supportsTable && showingTable ? (
+                <Table className="m-t-3" data={tableResult} loading={loading} onClickCell={this.onClickTableCell} />
+              ) : null}
+              {supportsLogs && showingLogs ? <Logs data={logsResult} loading={loading} /> : null}
             </main>
           </div>
         ) : null}

+ 10 - 2
public/app/containers/Explore/Graph.tsx

@@ -84,7 +84,9 @@ class Graph extends Component<any, any> {
 
   draw() {
     const { data, options: userOptions } = this.props;
+    const $el = $(`#${this.props.id}`);
     if (!data) {
+      $el.empty();
       return;
     }
     const series = data.map((ts: TimeSeries) => ({
@@ -93,7 +95,6 @@ class Graph extends Component<any, any> {
       data: ts.getFlotPairs('null'),
     }));
 
-    const $el = $(`#${this.props.id}`);
     const ticks = $el.width() / 100;
     let { from, to } = userOptions.range;
     if (!moment.isMoment(from)) {
@@ -123,7 +124,14 @@ class Graph extends Component<any, any> {
   }
 
   render() {
-    const { data, height } = this.props;
+    const { data, height, loading } = this.props;
+    if (!loading && data && data.length === 0) {
+      return (
+        <div className="panel-container">
+          <div className="muted m-a-1">The queries returned no time series to graph.</div>
+        </div>
+      );
+    }
     return (
       <div className="panel-container">
         <div id={this.props.id} className="explore-graph" style={{ height }} />

+ 1 - 0
public/app/containers/Explore/Logs.tsx

@@ -5,6 +5,7 @@ import { LogsModel, LogRow } from 'app/core/logs_model';
 interface LogsProps {
   className?: string;
   data: LogsModel;
+  loading: boolean;
 }
 
 const EXAMPLE_QUERY = '{job="default/prometheus"}';

+ 113 - 19
public/app/containers/Explore/PromQueryField.jest.tsx → public/app/containers/Explore/PromQueryField.test.tsx

@@ -1,10 +1,11 @@
 import React from 'react';
 import Enzyme, { shallow } from 'enzyme';
 import Adapter from 'enzyme-adapter-react-16';
+import Plain from 'slate-plain-serializer';
 
-Enzyme.configure({ adapter: new Adapter() });
+import PromQueryField, { groupMetricsByPrefix, RECORDING_RULES_GROUP } from './PromQueryField';
 
-import PromQueryField from './PromQueryField';
+Enzyme.configure({ adapter: new Adapter() });
 
 describe('PromQueryField typeahead handling', () => {
   const defaultProps = {
@@ -59,20 +60,35 @@ describe('PromQueryField typeahead handling', () => {
   describe('label suggestions', () => {
     it('returns default label suggestions on label context and no metric', () => {
       const instance = shallow(<PromQueryField {...defaultProps} />).instance() as PromQueryField;
-      const result = instance.getTypeahead({ text: 'j', prefix: 'j', wrapperClasses: ['context-labels'] });
+      const value = Plain.deserialize('{}');
+      const range = value.selection.merge({
+        anchorOffset: 1,
+      });
+      const valueWithSelection = value.change().select(range).value;
+      const result = instance.getTypeahead({
+        text: '',
+        prefix: '',
+        wrapperClasses: ['context-labels'],
+        value: valueWithSelection,
+      });
       expect(result.context).toBe('context-labels');
       expect(result.suggestions).toEqual([{ items: [{ label: 'job' }, { label: 'instance' }], label: 'Labels' }]);
     });
 
     it('returns label suggestions on label context and metric', () => {
       const instance = shallow(
-        <PromQueryField {...defaultProps} labelKeys={{ foo: ['bar'] }} />
+        <PromQueryField {...defaultProps} labelKeys={{ '{__name__="metric"}': ['bar'] }} />
       ).instance() as PromQueryField;
+      const value = Plain.deserialize('metric{}');
+      const range = value.selection.merge({
+        anchorOffset: 7,
+      });
+      const valueWithSelection = value.change().select(range).value;
       const result = instance.getTypeahead({
-        text: 'job',
-        prefix: 'job',
+        text: '',
+        prefix: '',
         wrapperClasses: ['context-labels'],
-        metric: 'foo',
+        value: valueWithSelection,
       });
       expect(result.context).toBe('context-labels');
       expect(result.suggestions).toEqual([{ items: [{ label: 'bar' }], label: 'Labels' }]);
@@ -80,13 +96,18 @@ describe('PromQueryField typeahead handling', () => {
 
     it('returns a refresher on label context and unavailable metric', () => {
       const instance = shallow(
-        <PromQueryField {...defaultProps} labelKeys={{ foo: ['bar'] }} />
+        <PromQueryField {...defaultProps} labelKeys={{ '{__name__="foo"}': ['bar'] }} />
       ).instance() as PromQueryField;
+      const value = Plain.deserialize('metric{}');
+      const range = value.selection.merge({
+        anchorOffset: 7,
+      });
+      const valueWithSelection = value.change().select(range).value;
       const result = instance.getTypeahead({
-        text: 'job',
-        prefix: 'job',
+        text: '',
+        prefix: '',
         wrapperClasses: ['context-labels'],
-        metric: 'xxx',
+        value: valueWithSelection,
       });
       expect(result.context).toBeUndefined();
       expect(result.refresher).toBeInstanceOf(Promise);
@@ -95,31 +116,104 @@ describe('PromQueryField typeahead handling', () => {
 
     it('returns label values on label context when given a metric and a label key', () => {
       const instance = shallow(
-        <PromQueryField {...defaultProps} labelKeys={{ foo: ['bar'] }} labelValues={{ foo: { bar: ['baz'] } }} />
+        <PromQueryField
+          {...defaultProps}
+          labelKeys={{ '{__name__="metric"}': ['bar'] }}
+          labelValues={{ '{__name__="metric"}': { bar: ['baz'] } }}
+        />
       ).instance() as PromQueryField;
+      const value = Plain.deserialize('metric{bar=ba}');
+      const range = value.selection.merge({
+        anchorOffset: 13,
+      });
+      const valueWithSelection = value.change().select(range).value;
       const result = instance.getTypeahead({
         text: '=ba',
         prefix: 'ba',
         wrapperClasses: ['context-labels'],
-        metric: 'foo',
         labelKey: 'bar',
+        value: valueWithSelection,
       });
       expect(result.context).toBe('context-label-values');
-      expect(result.suggestions).toEqual([{ items: [{ label: 'baz' }], label: 'Label values' }]);
+      expect(result.suggestions).toEqual([{ items: [{ label: 'baz' }], label: 'Label values for "bar"' }]);
     });
 
-    it('returns label suggestions on aggregation context and metric', () => {
+    it('returns label suggestions on aggregation context and metric w/ selector', () => {
       const instance = shallow(
-        <PromQueryField {...defaultProps} labelKeys={{ foo: ['bar'] }} />
+        <PromQueryField {...defaultProps} labelKeys={{ '{__name__="metric",foo="xx"}': ['bar'] }} />
       ).instance() as PromQueryField;
+      const value = Plain.deserialize('sum(metric{foo="xx"}) by ()');
+      const range = value.selection.merge({
+        anchorOffset: 26,
+      });
+      const valueWithSelection = value.change().select(range).value;
       const result = instance.getTypeahead({
-        text: 'job',
-        prefix: 'job',
+        text: '',
+        prefix: '',
         wrapperClasses: ['context-aggregation'],
-        metric: 'foo',
+        value: valueWithSelection,
       });
       expect(result.context).toBe('context-aggregation');
       expect(result.suggestions).toEqual([{ items: [{ label: 'bar' }], label: 'Labels' }]);
     });
+
+    it('returns label suggestions on aggregation context and metric w/o selector', () => {
+      const instance = shallow(
+        <PromQueryField {...defaultProps} labelKeys={{ '{__name__="metric"}': ['bar'] }} />
+      ).instance() as PromQueryField;
+      const value = Plain.deserialize('sum(metric) by ()');
+      const range = value.selection.merge({
+        anchorOffset: 16,
+      });
+      const valueWithSelection = value.change().select(range).value;
+      const result = instance.getTypeahead({
+        text: '',
+        prefix: '',
+        wrapperClasses: ['context-aggregation'],
+        value: valueWithSelection,
+      });
+      expect(result.context).toBe('context-aggregation');
+      expect(result.suggestions).toEqual([{ items: [{ label: 'bar' }], label: 'Labels' }]);
+    });
+  });
+});
+
+describe('groupMetricsByPrefix()', () => {
+  it('returns an empty group for no metrics', () => {
+    expect(groupMetricsByPrefix([])).toEqual([]);
+  });
+
+  it('returns options grouped by prefix', () => {
+    expect(groupMetricsByPrefix(['foo_metric'])).toMatchObject([
+      {
+        value: 'foo',
+        children: [
+          {
+            value: 'foo_metric',
+          },
+        ],
+      },
+    ]);
+  });
+
+  it('returns options without prefix as toplevel option', () => {
+    expect(groupMetricsByPrefix(['metric'])).toMatchObject([
+      {
+        value: 'metric',
+      },
+    ]);
+  });
+
+  it('returns recording rules grouped separately', () => {
+    expect(groupMetricsByPrefix([':foo_metric:'])).toMatchObject([
+      {
+        value: RECORDING_RULES_GROUP,
+        children: [
+          {
+            value: ':foo_metric:',
+          },
+        ],
+      },
+    ]);
   });
 });

+ 255 - 85
public/app/containers/Explore/PromQueryField.tsx

@@ -1,12 +1,16 @@
 import _ from 'lodash';
+import moment from 'moment';
 import React from 'react';
+import { Value } from 'slate';
+import Cascader from 'rc-cascader';
 
 // dom also includes Element polyfills
 import { getNextCharacter, getPreviousCousin } from './utils/dom';
 import PluginPrism, { setPrismTokens } from './slate-plugins/prism/index';
 import PrismPromql, { FUNCTIONS } from './slate-plugins/prism/promql';
+import BracesPlugin from './slate-plugins/braces';
 import RunnerPlugin from './slate-plugins/runner';
-import { processLabels, RATE_RANGES, cleanText } from './utils/prometheus';
+import { processLabels, RATE_RANGES, cleanText, getCleanSelector } from './utils/prometheus';
 
 import TypeaheadField, {
   Suggestion,
@@ -16,16 +20,71 @@ import TypeaheadField, {
   TypeaheadOutput,
 } from './QueryField';
 
-const EMPTY_METRIC = '';
+const DEFAULT_KEYS = ['job', 'instance'];
+const EMPTY_SELECTOR = '{}';
+const HISTOGRAM_GROUP = '__histograms__';
+const HISTOGRAM_SELECTOR = '{le!=""}'; // Returns all timeseries for histograms
+const HISTORY_ITEM_COUNT = 5;
+const HISTORY_COUNT_CUTOFF = 1000 * 60 * 60 * 24; // 24h
 const METRIC_MARK = 'metric';
 const PRISM_LANGUAGE = 'promql';
+export const RECORDING_RULES_GROUP = '__recording_rules__';
 
-export const wrapLabel = label => ({ label });
+export const wrapLabel = (label: string) => ({ label });
 export const setFunctionMove = (suggestion: Suggestion): Suggestion => {
   suggestion.move = -1;
   return suggestion;
 };
 
+export function addHistoryMetadata(item: Suggestion, history: any[]): Suggestion {
+  const cutoffTs = Date.now() - HISTORY_COUNT_CUTOFF;
+  const historyForItem = history.filter(h => h.ts > cutoffTs && h.query === item.label);
+  const count = historyForItem.length;
+  const recent = historyForItem[0];
+  let hint = `Queried ${count} times in the last 24h.`;
+  if (recent) {
+    const lastQueried = moment(recent.ts).fromNow();
+    hint = `${hint} Last queried ${lastQueried}.`;
+  }
+  return {
+    ...item,
+    documentation: hint,
+  };
+}
+
+export function groupMetricsByPrefix(metrics: string[], delimiter = '_'): CascaderOption[] {
+  // Filter out recording rules and insert as first option
+  const ruleRegex = /:\w+:/;
+  const ruleNames = metrics.filter(metric => ruleRegex.test(metric));
+  const rulesOption = {
+    label: 'Recording rules',
+    value: RECORDING_RULES_GROUP,
+    children: ruleNames
+      .slice()
+      .sort()
+      .map(name => ({ label: name, value: name })),
+  };
+
+  const options = ruleNames.length > 0 ? [rulesOption] : [];
+
+  const metricsOptions = _.chain(metrics)
+    .filter(metric => !ruleRegex.test(metric))
+    .groupBy(metric => metric.split(delimiter)[0])
+    .map((metricsForPrefix: string[], prefix: string): CascaderOption => {
+      const prefixIsMetric = metricsForPrefix.length === 1 && metricsForPrefix[0] === prefix;
+      const children = prefixIsMetric ? [] : metricsForPrefix.sort().map(m => ({ label: m, value: m }));
+      return {
+        children,
+        label: prefix,
+        value: prefix,
+      };
+    })
+    .sortBy('label')
+    .value();
+
+  return [...options, ...metricsOptions];
+}
+
 export function willApplySuggestion(
   suggestion: string,
   { typeaheadContext, typeaheadText }: TypeaheadFieldState
@@ -56,58 +115,105 @@ export function willApplySuggestion(
   return suggestion;
 }
 
+interface CascaderOption {
+  label: string;
+  value: string;
+  children?: CascaderOption[];
+  disabled?: boolean;
+}
+
 interface PromQueryFieldProps {
+  error?: string;
+  hint?: any;
+  histogramMetrics?: string[];
+  history?: any[];
   initialQuery?: string | null;
   labelKeys?: { [index: string]: string[] }; // metric -> [labelKey,...]
   labelValues?: { [index: string]: { [index: string]: string[] } }; // metric -> labelKey -> [labelValue,...]
   metrics?: string[];
+  metricsByPrefix?: CascaderOption[];
+  onClickHintFix?: (action: any) => void;
   onPressEnter?: () => void;
-  onQueryChange?: (value: string) => void;
+  onQueryChange?: (value: string, override?: boolean) => void;
   portalPrefix?: string;
   request?: (url: string) => any;
 }
 
 interface PromQueryFieldState {
+  histogramMetrics: string[];
   labelKeys: { [index: string]: string[] }; // metric -> [labelKey,...]
   labelValues: { [index: string]: { [index: string]: string[] } }; // metric -> labelKey -> [labelValue,...]
   metrics: string[];
+  metricsByPrefix: CascaderOption[];
 }
 
 interface PromTypeaheadInput {
   text: string;
   prefix: string;
   wrapperClasses: string[];
-  metric?: string;
   labelKey?: string;
+  value?: Value;
 }
 
 class PromQueryField extends React.Component<PromQueryFieldProps, PromQueryFieldState> {
   plugins: any[];
 
-  constructor(props, context) {
+  constructor(props: PromQueryFieldProps, context) {
     super(props, context);
 
     this.plugins = [
+      BracesPlugin(),
       RunnerPlugin({ handler: props.onPressEnter }),
       PluginPrism({ definition: PrismPromql, language: PRISM_LANGUAGE }),
     ];
 
     this.state = {
+      histogramMetrics: props.histogramMetrics || [],
       labelKeys: props.labelKeys || {},
       labelValues: props.labelValues || {},
       metrics: props.metrics || [],
+      metricsByPrefix: props.metricsByPrefix || [],
     };
   }
 
   componentDidMount() {
     this.fetchMetricNames();
+    this.fetchHistogramMetrics();
   }
 
-  onChangeQuery = value => {
+  onChangeMetrics = (values: string[], selectedOptions: CascaderOption[]) => {
+    let query;
+    if (selectedOptions.length === 1) {
+      if (selectedOptions[0].children.length === 0) {
+        query = selectedOptions[0].value;
+      } else {
+        // Ignore click on group
+        return;
+      }
+    } else {
+      const prefix = selectedOptions[0].value;
+      const metric = selectedOptions[1].value;
+      if (prefix === HISTOGRAM_GROUP) {
+        query = `histogram_quantile(0.95, sum(rate(${metric}[5m])) by (le))`;
+      } else {
+        query = metric;
+      }
+    }
+    this.onChangeQuery(query, true);
+  };
+
+  onChangeQuery = (value: string, override?: boolean) => {
     // Send text change to parent
     const { onQueryChange } = this.props;
     if (onQueryChange) {
-      onQueryChange(value);
+      onQueryChange(value, override);
+    }
+  };
+
+  onClickHintFix = () => {
+    const { hint, onClickHintFix } = this.props;
+    if (onClickHintFix && hint && hint.fix) {
+      onClickHintFix(hint.fix.action);
     }
   };
 
@@ -119,25 +225,23 @@ class PromQueryField extends React.Component<PromQueryFieldProps, PromQueryField
   };
 
   onTypeahead = (typeahead: TypeaheadInput): TypeaheadOutput => {
-    const { editorNode, prefix, text, wrapperNode } = typeahead;
+    const { prefix, text, value, wrapperNode } = typeahead;
 
     // Get DOM-dependent context
     const wrapperClasses = Array.from(wrapperNode.classList);
-    // Take first metric as lucky guess
-    const metricNode = editorNode.querySelector(`.${METRIC_MARK}`);
-    const metric = metricNode && metricNode.textContent;
     const labelKeyNode = getPreviousCousin(wrapperNode, '.attr-name');
     const labelKey = labelKeyNode && labelKeyNode.textContent;
+    const nextChar = getNextCharacter();
 
-    const result = this.getTypeahead({ text, prefix, wrapperClasses, metric, labelKey });
+    const result = this.getTypeahead({ text, value, prefix, wrapperClasses, labelKey });
 
-    console.log('handleTypeahead', wrapperClasses, text, prefix, result.context);
+    console.log('handleTypeahead', wrapperClasses, text, prefix, nextChar, labelKey, result.context);
 
     return result;
   };
 
   // Keep this DOM-free for testing
-  getTypeahead({ prefix, wrapperClasses, metric, text }: PromTypeaheadInput): TypeaheadOutput {
+  getTypeahead({ prefix, wrapperClasses, text }: PromTypeaheadInput): TypeaheadOutput {
     // Determine candidates by CSS context
     if (_.includes(wrapperClasses, 'context-range')) {
       // Suggestions for metric[|]
@@ -145,12 +249,11 @@ class PromQueryField extends React.Component<PromQueryFieldProps, PromQueryField
     } else if (_.includes(wrapperClasses, 'context-labels')) {
       // Suggestions for metric{|} and metric{foo=|}, as well as metric-independent label queries like {|}
       return this.getLabelTypeahead.apply(this, arguments);
-    } else if (metric && _.includes(wrapperClasses, 'context-aggregation')) {
+    } else if (_.includes(wrapperClasses, 'context-aggregation')) {
       return this.getAggregationTypeahead.apply(this, arguments);
     } else if (
-      // Non-empty but not inside known token unless it's a metric
+      // Non-empty but not inside known token
       (prefix && !_.includes(wrapperClasses, 'token')) ||
-      prefix === metric ||
       (prefix === '' && !text.match(/^[)\s]+$/)) || // Empty context or after ')'
       text.match(/[+\-*/^%]/) // After binary operator
     ) {
@@ -163,17 +266,37 @@ class PromQueryField extends React.Component<PromQueryFieldProps, PromQueryField
   }
 
   getEmptyTypeahead(): TypeaheadOutput {
+    const { history } = this.props;
+    const { metrics } = this.state;
     const suggestions: SuggestionGroup[] = [];
+
+    if (history && history.length > 0) {
+      const historyItems = _.chain(history)
+        .uniqBy('query')
+        .take(HISTORY_ITEM_COUNT)
+        .map(h => h.query)
+        .map(wrapLabel)
+        .map(item => addHistoryMetadata(item, history))
+        .value();
+
+      suggestions.push({
+        prefixMatch: true,
+        skipSort: true,
+        label: 'History',
+        items: historyItems,
+      });
+    }
+
     suggestions.push({
       prefixMatch: true,
       label: 'Functions',
       items: FUNCTIONS.map(setFunctionMove),
     });
 
-    if (this.state.metrics) {
+    if (metrics) {
       suggestions.push({
         label: 'Metrics',
-        items: this.state.metrics.map(wrapLabel),
+        items: metrics.map(wrapLabel),
       });
     }
     return { suggestions };
@@ -191,14 +314,27 @@ class PromQueryField extends React.Component<PromQueryFieldProps, PromQueryField
     };
   }
 
-  getAggregationTypeahead({ metric }: PromTypeaheadInput): TypeaheadOutput {
+  getAggregationTypeahead({ value }: PromTypeaheadInput): TypeaheadOutput {
     let refresher: Promise<any> = null;
     const suggestions: SuggestionGroup[] = [];
-    const labelKeys = this.state.labelKeys[metric];
+
+    // sum(foo{bar="1"}) by (|)
+    const line = value.anchorBlock.getText();
+    const cursorOffset: number = value.anchorOffset;
+    // sum(foo{bar="1"}) by (
+    const leftSide = line.slice(0, cursorOffset);
+    const openParensAggregationIndex = leftSide.lastIndexOf('(');
+    const openParensSelectorIndex = leftSide.slice(0, openParensAggregationIndex).lastIndexOf('(');
+    const closeParensSelectorIndex = leftSide.slice(openParensSelectorIndex).indexOf(')') + openParensSelectorIndex;
+    // foo{bar="1"}
+    const selectorString = leftSide.slice(openParensSelectorIndex + 1, closeParensSelectorIndex);
+    const selector = getCleanSelector(selectorString, selectorString.length - 2);
+
+    const labelKeys = this.state.labelKeys[selector];
     if (labelKeys) {
       suggestions.push({ label: 'Labels', items: labelKeys.map(wrapLabel) });
     } else {
-      refresher = this.fetchMetricLabels(metric);
+      refresher = this.fetchSeriesLabels(selector);
     }
 
     return {
@@ -208,59 +344,51 @@ class PromQueryField extends React.Component<PromQueryFieldProps, PromQueryField
     };
   }
 
-  getLabelTypeahead({ metric, text, wrapperClasses, labelKey }: PromTypeaheadInput): TypeaheadOutput {
+  getLabelTypeahead({ text, wrapperClasses, labelKey, value }: PromTypeaheadInput): TypeaheadOutput {
     let context: string;
     let refresher: Promise<any> = null;
     const suggestions: SuggestionGroup[] = [];
-    if (metric) {
-      const labelKeys = this.state.labelKeys[metric];
-      if (labelKeys) {
-        if ((text && text.startsWith('=')) || _.includes(wrapperClasses, 'attr-value')) {
-          // Label values
-          if (labelKey) {
-            const labelValues = this.state.labelValues[metric][labelKey];
-            context = 'context-label-values';
-            suggestions.push({
-              label: 'Label values',
-              items: labelValues.map(wrapLabel),
-            });
-          }
-        } else {
-          // Label keys
-          context = 'context-labels';
-          suggestions.push({ label: 'Labels', items: labelKeys.map(wrapLabel) });
-        }
-      } else {
-        refresher = this.fetchMetricLabels(metric);
+    const line = value.anchorBlock.getText();
+    const cursorOffset: number = value.anchorOffset;
+
+    // Get normalized selector
+    let selector;
+    try {
+      selector = getCleanSelector(line, cursorOffset);
+    } catch {
+      selector = EMPTY_SELECTOR;
+    }
+    const containsMetric = selector.indexOf('__name__=') > -1;
+
+    if ((text && text.startsWith('=')) || _.includes(wrapperClasses, 'attr-value')) {
+      // Label values
+      if (labelKey && this.state.labelValues[selector] && this.state.labelValues[selector][labelKey]) {
+        const labelValues = this.state.labelValues[selector][labelKey];
+        context = 'context-label-values';
+        suggestions.push({
+          label: `Label values for "${labelKey}"`,
+          items: labelValues.map(wrapLabel),
+        });
       }
     } else {
-      // Metric-independent label queries
-      const defaultKeys = ['job', 'instance'];
-      // Munge all keys that we have seen together
-      const labelKeys = Object.keys(this.state.labelKeys).reduce((acc, metric) => {
-        return acc.concat(this.state.labelKeys[metric].filter(key => acc.indexOf(key) === -1));
-      }, defaultKeys);
-      if ((text && text.startsWith('=')) || _.includes(wrapperClasses, 'attr-value')) {
-        // Label values
-        if (labelKey) {
-          if (this.state.labelValues[EMPTY_METRIC]) {
-            const labelValues = this.state.labelValues[EMPTY_METRIC][labelKey];
-            context = 'context-label-values';
-            suggestions.push({
-              label: 'Label values',
-              items: labelValues.map(wrapLabel),
-            });
-          } else {
-            // Can only query label values for now (API to query keys is under development)
-            refresher = this.fetchLabelValues(labelKey);
-          }
-        }
-      } else {
-        // Label keys
+      // Label keys
+      const labelKeys = this.state.labelKeys[selector] || (containsMetric ? null : DEFAULT_KEYS);
+      if (labelKeys) {
         context = 'context-labels';
-        suggestions.push({ label: 'Labels', items: labelKeys.map(wrapLabel) });
+        suggestions.push({ label: `Labels`, items: labelKeys.map(wrapLabel) });
+      }
+    }
+
+    // Query labels for selector
+    if (selector && !this.state.labelValues[selector]) {
+      if (selector === EMPTY_SELECTOR) {
+        // Query label values for default labels
+        refresher = Promise.all(DEFAULT_KEYS.map(key => this.fetchLabelValues(key)));
+      } else {
+        refresher = this.fetchSeriesLabels(selector, !containsMetric);
       }
     }
+
     return { context, refresher, suggestions };
   }
 
@@ -271,19 +399,29 @@ class PromQueryField extends React.Component<PromQueryFieldProps, PromQueryField
     return fetch(url);
   };
 
-  async fetchLabelValues(key) {
+  fetchHistogramMetrics() {
+    this.fetchSeriesLabels(HISTOGRAM_SELECTOR, true, () => {
+      const histogramSeries = this.state.labelValues[HISTOGRAM_SELECTOR];
+      if (histogramSeries && histogramSeries['__name__']) {
+        const histogramMetrics = histogramSeries['__name__'].slice().sort();
+        this.setState({ histogramMetrics });
+      }
+    });
+  }
+
+  async fetchLabelValues(key: string) {
     const url = `/api/v1/label/${key}/values`;
     try {
       const res = await this.request(url);
       const body = await (res.data || res.json());
-      const pairs = this.state.labelValues[EMPTY_METRIC];
+      const exisingValues = this.state.labelValues[EMPTY_SELECTOR];
       const values = {
-        ...pairs,
+        ...exisingValues,
         [key]: body.data,
       };
       const labelValues = {
         ...this.state.labelValues,
-        [EMPTY_METRIC]: values,
+        [EMPTY_SELECTOR]: values,
       };
       this.setState({ labelValues });
     } catch (e) {
@@ -291,12 +429,12 @@ class PromQueryField extends React.Component<PromQueryFieldProps, PromQueryField
     }
   }
 
-  async fetchMetricLabels(name) {
+  async fetchSeriesLabels(name: string, withName?: boolean, callback?: () => void) {
     const url = `/api/v1/series?match[]=${name}`;
     try {
       const res = await this.request(url);
       const body = await (res.data || res.json());
-      const { keys, values } = processLabels(body.data);
+      const { keys, values } = processLabels(body.data, withName);
       const labelKeys = {
         ...this.state.labelKeys,
         [name]: keys,
@@ -305,7 +443,7 @@ class PromQueryField extends React.Component<PromQueryFieldProps, PromQueryField
         ...this.state.labelValues,
         [name]: values,
       };
-      this.setState({ labelKeys, labelValues });
+      this.setState({ labelKeys, labelValues }, callback);
     } catch (e) {
       console.error(e);
     }
@@ -316,23 +454,55 @@ class PromQueryField extends React.Component<PromQueryFieldProps, PromQueryField
     try {
       const res = await this.request(url);
       const body = await (res.data || res.json());
-      this.setState({ metrics: body.data }, this.onReceiveMetrics);
+      const metrics = body.data;
+      const metricsByPrefix = groupMetricsByPrefix(metrics);
+      this.setState({ metrics, metricsByPrefix }, this.onReceiveMetrics);
     } catch (error) {
       console.error(error);
     }
   }
 
   render() {
+    const { error, hint } = this.props;
+    const { histogramMetrics, metricsByPrefix } = this.state;
+    const histogramOptions = histogramMetrics.map(hm => ({ label: hm, value: hm }));
+    const metricsOptions = [
+      { label: 'Histograms', value: HISTOGRAM_GROUP, children: histogramOptions },
+      ...metricsByPrefix,
+    ];
+
     return (
-      <TypeaheadField
-        additionalPlugins={this.plugins}
-        cleanText={cleanText}
-        initialValue={this.props.initialQuery}
-        onTypeahead={this.onTypeahead}
-        onWillApplySuggestion={willApplySuggestion}
-        onValueChanged={this.onChangeQuery}
-        placeholder="Enter a PromQL query"
-      />
+      <div className="prom-query-field">
+        <div className="prom-query-field-tools">
+          <Cascader options={metricsOptions} onChange={this.onChangeMetrics}>
+            <button className="btn navbar-button navbar-button--tight">Metrics</button>
+          </Cascader>
+        </div>
+        <div className="prom-query-field-wrapper">
+          <div className="slate-query-field-wrapper">
+            <TypeaheadField
+              additionalPlugins={this.plugins}
+              cleanText={cleanText}
+              initialValue={this.props.initialQuery}
+              onTypeahead={this.onTypeahead}
+              onWillApplySuggestion={willApplySuggestion}
+              onValueChanged={this.onChangeQuery}
+              placeholder="Enter a PromQL query"
+            />
+          </div>
+          {error ? <div className="prom-query-field-info text-error">{error}</div> : null}
+          {hint ? (
+            <div className="prom-query-field-info text-warning">
+              {hint.label}{' '}
+              {hint.fix ? (
+                <a className="text-link muted" onClick={this.onClickHintFix}>
+                  {hint.fix.label}
+                </a>
+              ) : null}
+            </div>
+          ) : null}
+        </div>
+      </div>
     );
   }
 }

+ 11 - 3
public/app/containers/Explore/QueryField.tsx

@@ -5,7 +5,6 @@ import { Block, Change, Document, Text, Value } from 'slate';
 import { Editor } from 'slate-react';
 import Plain from 'slate-plain-serializer';
 
-import BracesPlugin from './slate-plugins/braces';
 import ClearPlugin from './slate-plugins/clear';
 import NewlinePlugin from './slate-plugins/newline';
 
@@ -97,6 +96,10 @@ export interface SuggestionGroup {
    * If true, do not filter items in this group based on the search.
    */
   skipFilter?: boolean;
+  /**
+   * If true, do not sort items.
+   */
+  skipSort?: boolean;
 }
 
 interface TypeaheadFieldProps {
@@ -126,6 +129,7 @@ export interface TypeaheadInput {
   prefix: string;
   selection?: Selection;
   text: string;
+  value: Value;
   wrapperNode: Element;
 }
 
@@ -144,7 +148,7 @@ class QueryField extends React.Component<TypeaheadFieldProps, TypeaheadFieldStat
     super(props, context);
 
     // Base plugins
-    this.plugins = [BracesPlugin(), ClearPlugin(), NewlinePlugin(), ...props.additionalPlugins];
+    this.plugins = [ClearPlugin(), NewlinePlugin(), ...props.additionalPlugins];
 
     this.state = {
       suggestions: [],
@@ -199,6 +203,7 @@ class QueryField extends React.Component<TypeaheadFieldProps, TypeaheadFieldStat
   handleTypeahead = _.debounce(async () => {
     const selection = window.getSelection();
     const { cleanText, onTypeahead } = this.props;
+    const { value } = this.state;
 
     if (onTypeahead && selection.anchorNode) {
       const wrapperNode = selection.anchorNode.parentElement;
@@ -221,6 +226,7 @@ class QueryField extends React.Component<TypeaheadFieldProps, TypeaheadFieldStat
         prefix,
         selection,
         text,
+        value,
         wrapperNode,
       });
 
@@ -241,7 +247,9 @@ class QueryField extends React.Component<TypeaheadFieldProps, TypeaheadFieldStat
               group.items = group.items.filter(c => c.insertText || (c.filterText || c.label) !== prefix);
             }
 
-            group.items = _.sortBy(group.items, item => item.sortText || item.label);
+            if (!group.skipSort) {
+              group.items = _.sortBy(group.items, item => item.sortText || item.label);
+            }
           }
           return group;
         })

+ 50 - 33
public/app/containers/Explore/QueryRows.tsx

@@ -1,41 +1,42 @@
 import React, { PureComponent } from 'react';
 
+// TODO make this datasource-plugin-dependent
 import QueryField from './PromQueryField';
 
-class QueryRow extends PureComponent<any, any> {
-  constructor(props) {
-    super(props);
-    this.state = {
-      edited: false,
-      query: props.query || '',
-    };
-  }
-
-  handleChangeQuery = value => {
+class QueryRow extends PureComponent<any, {}> {
+  onChangeQuery = (value, override?: boolean) => {
     const { index, onChangeQuery } = this.props;
-    const { query } = this.state;
-    const edited = query !== value;
-    this.setState({ edited, query: value });
     if (onChangeQuery) {
-      onChangeQuery(value, index);
+      onChangeQuery(value, index, override);
     }
   };
 
-  handleClickAddButton = () => {
+  onClickAddButton = () => {
     const { index, onAddQueryRow } = this.props;
     if (onAddQueryRow) {
       onAddQueryRow(index);
     }
   };
 
-  handleClickRemoveButton = () => {
+  onClickClearButton = () => {
+    this.onChangeQuery('', true);
+  };
+
+  onClickHintFix = action => {
+    const { index, onClickHintFix } = this.props;
+    if (onClickHintFix) {
+      onClickHintFix(action, index);
+    }
+  };
+
+  onClickRemoveButton = () => {
     const { index, onRemoveQueryRow } = this.props;
     if (onRemoveQueryRow) {
       onRemoveQueryRow(index);
     }
   };
 
-  handlePressEnter = () => {
+  onPressEnter = () => {
     const { onExecuteQuery } = this.props;
     if (onExecuteQuery) {
       onExecuteQuery();
@@ -43,38 +44,54 @@ class QueryRow extends PureComponent<any, any> {
   };
 
   render() {
-    const { request } = this.props;
-    const { edited, query } = this.state;
+    const { edited, history, query, queryError, queryHint, request } = this.props;
     return (
       <div className="query-row">
-        <div className="query-row-tools">
-          <button className="btn navbar-button navbar-button--tight" onClick={this.handleClickAddButton}>
-            <i className="fa fa-plus" />
-          </button>
-          <button className="btn navbar-button navbar-button--tight" onClick={this.handleClickRemoveButton}>
-            <i className="fa fa-minus" />
-          </button>
-        </div>
-        <div className="slate-query-field-wrapper">
+        <div className="query-row-field">
           <QueryField
+            error={queryError}
+            hint={queryHint}
             initialQuery={edited ? null : query}
+            history={history}
             portalPrefix="explore"
-            onPressEnter={this.handlePressEnter}
-            onQueryChange={this.handleChangeQuery}
+            onClickHintFix={this.onClickHintFix}
+            onPressEnter={this.onPressEnter}
+            onQueryChange={this.onChangeQuery}
             request={request}
           />
         </div>
+        <div className="query-row-tools">
+          <button className="btn navbar-button navbar-button--tight" onClick={this.onClickClearButton}>
+            <i className="fa fa-times" />
+          </button>
+          <button className="btn navbar-button navbar-button--tight" onClick={this.onClickAddButton}>
+            <i className="fa fa-plus" />
+          </button>
+          <button className="btn navbar-button navbar-button--tight" onClick={this.onClickRemoveButton}>
+            <i className="fa fa-minus" />
+          </button>
+        </div>
       </div>
     );
   }
 }
 
-export default class QueryRows extends PureComponent<any, any> {
+export default class QueryRows extends PureComponent<any, {}> {
   render() {
-    const { className = '', queries, ...handlers } = this.props;
+    const { className = '', queries, queryErrors = [], queryHints = [], ...handlers } = this.props;
     return (
       <div className={className}>
-        {queries.map((q, index) => <QueryRow key={q.key} index={index} query={q.query} {...handlers} />)}
+        {queries.map((q, index) => (
+          <QueryRow
+            key={q.key}
+            index={index}
+            query={q.query}
+            queryError={queryErrors[index]}
+            queryHint={queryHints[index]}
+            edited={q.edited}
+            {...handlers}
+          />
+        ))}
       </div>
     );
   }

+ 69 - 9
public/app/containers/Explore/Table.tsx

@@ -1,22 +1,82 @@
 import React, { PureComponent } from 'react';
-// import TableModel from 'app/core/table_model';
+import TableModel from 'app/core/table_model';
 
-const EMPTY_TABLE = {
-  columns: [],
-  rows: [],
-};
+const EMPTY_TABLE = new TableModel();
 
-export default class Table extends PureComponent<any, any> {
+interface TableProps {
+  className?: string;
+  data: TableModel;
+  loading: boolean;
+  onClickCell?: (columnKey: string, rowValue: string) => void;
+}
+
+interface SFCCellProps {
+  columnIndex: number;
+  onClickCell?: (columnKey: string, rowValue: string, columnIndex: number, rowIndex: number, table: TableModel) => void;
+  rowIndex: number;
+  table: TableModel;
+  value: string;
+}
+
+function Cell(props: SFCCellProps) {
+  const { columnIndex, rowIndex, table, value, onClickCell } = props;
+  const column = table.columns[columnIndex];
+  if (column && column.filterable && onClickCell) {
+    const onClick = event => {
+      event.preventDefault();
+      onClickCell(column.text, value, columnIndex, rowIndex, table);
+    };
+    return (
+      <td>
+        <a className="link" onClick={onClick}>
+          {value}
+        </a>
+      </td>
+    );
+  }
+  return <td>{value}</td>;
+}
+
+export default class Table extends PureComponent<TableProps, {}> {
   render() {
-    const { className = '', data } = this.props;
-    const tableModel = data || EMPTY_TABLE;
+    const { className = '', data, loading, onClickCell } = this.props;
+    let tableModel = data || EMPTY_TABLE;
+    if (!loading && data && data.rows.length === 0) {
+      return (
+        <table className={`${className} filter-table`}>
+          <thead>
+            <tr>
+              <th>Table</th>
+            </tr>
+          </thead>
+          <tbody>
+            <tr>
+              <td className="muted">The queries returned no data for a table.</td>
+            </tr>
+          </tbody>
+        </table>
+      );
+    }
     return (
       <table className={`${className} filter-table`}>
         <thead>
           <tr>{tableModel.columns.map(col => <th key={col.text}>{col.text}</th>)}</tr>
         </thead>
         <tbody>
-          {tableModel.rows.map((row, i) => <tr key={i}>{row.map((content, j) => <td key={j}>{content}</td>)}</tr>)}
+          {tableModel.rows.map((row, i) => (
+            <tr key={i}>
+              {row.map((value, j) => (
+                <Cell
+                  key={j}
+                  columnIndex={j}
+                  rowIndex={i}
+                  value={String(value)}
+                  table={data}
+                  onClickCell={onClickCell}
+                />
+              ))}
+            </tr>
+          ))}
         </tbody>
       </table>
     );

+ 0 - 0
public/app/containers/Explore/TimePicker.jest.tsx → public/app/containers/Explore/TimePicker.test.tsx


+ 9 - 0
public/app/containers/Explore/slate-plugins/braces.test.ts

@@ -44,4 +44,13 @@ describe('braces', () => {
     handler(event, change);
     expect(Plain.serialize(change.value)).toEqual('(foo) (bar)() ugh');
   });
+
+  it('adds closing braces outside a selector', () => {
+    const change = Plain.deserialize('sumrate(metric{namespace="dev", cluster="c1"}[2m])').change();
+    let event;
+    change.move(3);
+    event = new window.KeyboardEvent('keydown', { key: '(' });
+    handler(event, change);
+    expect(Plain.serialize(change.value)).toEqual('sum(rate(metric{namespace="dev", cluster="c1"}[2m]))');
+  });
 });

+ 4 - 2
public/app/containers/Explore/slate-plugins/braces.ts

@@ -4,6 +4,8 @@ const BRACES = {
   '(': ')',
 };
 
+const NON_SELECTOR_SPACE_REGEXP = / (?![^}]+})/;
+
 export default function BracesPlugin() {
   return {
     onKeyDown(event, change) {
@@ -28,8 +30,8 @@ export default function BracesPlugin() {
           event.preventDefault();
           const text = value.anchorText.text;
           const offset = value.anchorOffset;
-          const space = text.indexOf(' ', offset);
-          const length = space > 0 ? space : text.length;
+          const delimiterIndex = text.slice(offset).search(NON_SELECTOR_SPACE_REGEXP);
+          const length = delimiterIndex > -1 ? delimiterIndex + offset : text.length;
           const forward = length - offset;
           // Insert matching braces
           change

+ 33 - 0
public/app/containers/Explore/utils/prometheus.test.ts

@@ -0,0 +1,33 @@
+import { getCleanSelector } from './prometheus';
+
+describe('getCleanSelector()', () => {
+  it('returns a clean selector from an empty selector', () => {
+    expect(getCleanSelector('{}', 1)).toBe('{}');
+  });
+  it('throws if selector is broken', () => {
+    expect(() => getCleanSelector('{foo')).toThrow();
+  });
+  it('returns the selector sorted by label key', () => {
+    expect(getCleanSelector('{foo="bar"}')).toBe('{foo="bar"}');
+    expect(getCleanSelector('{foo="bar",baz="xx"}')).toBe('{baz="xx",foo="bar"}');
+  });
+  it('returns a clean selector from an incomplete one', () => {
+    expect(getCleanSelector('{foo}')).toBe('{}');
+    expect(getCleanSelector('{foo="bar",baz}')).toBe('{foo="bar"}');
+    expect(getCleanSelector('{foo="bar",baz="}')).toBe('{foo="bar"}');
+  });
+  it('throws if not inside a selector', () => {
+    expect(() => getCleanSelector('foo{}', 0)).toThrow();
+    expect(() => getCleanSelector('foo{} + bar{}', 5)).toThrow();
+  });
+  it('returns the selector nearest to the cursor offset', () => {
+    expect(() => getCleanSelector('{foo="bar"} + {foo="bar"}', 0)).toThrow();
+    expect(getCleanSelector('{foo="bar"} + {foo="bar"}', 1)).toBe('{foo="bar"}');
+    expect(getCleanSelector('{foo="bar"} + {baz="xx"}', 1)).toBe('{foo="bar"}');
+    expect(getCleanSelector('{baz="xx"} + {foo="bar"}', 16)).toBe('{foo="bar"}');
+  });
+  it('returns a selector with metric if metric is given', () => {
+    expect(getCleanSelector('bar{foo}', 4)).toBe('{__name__="bar"}');
+    expect(getCleanSelector('baz{foo="bar"}', 12)).toBe('{__name__="baz",foo="bar"}');
+  });
+});

+ 69 - 1
public/app/containers/Explore/utils/prometheus.ts

@@ -1,9 +1,16 @@
 export const RATE_RANGES = ['1m', '5m', '10m', '30m', '1h'];
 
-export function processLabels(labels) {
+export function processLabels(labels, withName = false) {
   const values = {};
   labels.forEach(l => {
     const { __name__, ...rest } = l;
+    if (withName) {
+      values['__name__'] = values['__name__'] || [];
+      if (values['__name__'].indexOf(__name__) === -1) {
+        values['__name__'].push(__name__);
+      }
+    }
+
     Object.keys(rest).forEach(key => {
       if (!values[key]) {
         values[key] = [];
@@ -18,3 +25,64 @@ export function processLabels(labels) {
 
 // Strip syntax chars
 export const cleanText = s => s.replace(/[{}[\]="(),!~+\-*/^%]/g, '').trim();
+
+// const cleanSelectorRegexp = /\{(\w+="[^"\n]*?")(,\w+="[^"\n]*?")*\}/;
+const selectorRegexp = /\{[^}]*?\}/;
+const labelRegexp = /\b\w+="[^"\n]*?"/g;
+export function getCleanSelector(query: string, cursorOffset = 1): string {
+  if (!query.match(selectorRegexp)) {
+    // Special matcher for metrics
+    if (query.match(/^\w+$/)) {
+      return `{__name__="${query}"}`;
+    }
+    throw new Error('Query must contain a selector: ' + query);
+  }
+
+  // Check if inside a selector
+  const prefix = query.slice(0, cursorOffset);
+  const prefixOpen = prefix.lastIndexOf('{');
+  const prefixClose = prefix.lastIndexOf('}');
+  if (prefixOpen === -1) {
+    throw new Error('Not inside selector, missing open brace: ' + prefix);
+  }
+  if (prefixClose > -1 && prefixClose > prefixOpen) {
+    throw new Error('Not inside selector, previous selector already closed: ' + prefix);
+  }
+  const suffix = query.slice(cursorOffset);
+  const suffixCloseIndex = suffix.indexOf('}');
+  const suffixClose = suffixCloseIndex + cursorOffset;
+  const suffixOpenIndex = suffix.indexOf('{');
+  const suffixOpen = suffixOpenIndex + cursorOffset;
+  if (suffixClose === -1) {
+    throw new Error('Not inside selector, missing closing brace in suffix: ' + suffix);
+  }
+  if (suffixOpenIndex > -1 && suffixOpen < suffixClose) {
+    throw new Error('Not inside selector, next selector opens before this one closed: ' + suffix);
+  }
+
+  // Extract clean labels to form clean selector, incomplete labels are dropped
+  const selector = query.slice(prefixOpen, suffixClose);
+  let labels = {};
+  selector.replace(labelRegexp, match => {
+    const delimiterIndex = match.indexOf('=');
+    const key = match.slice(0, delimiterIndex);
+    const value = match.slice(delimiterIndex + 1, match.length);
+    labels[key] = value;
+    return '';
+  });
+
+  // Add metric if there is one before the selector
+  const metricPrefix = query.slice(0, prefixOpen);
+  const metricMatch = metricPrefix.match(/\w+$/);
+  if (metricMatch) {
+    labels['__name__'] = `"${metricMatch[0]}"`;
+  }
+
+  // Build sorted selector
+  const cleanSelector = Object.keys(labels)
+    .sort()
+    .map(key => `${key}=${labels[key]}`)
+    .join(',');
+
+  return ['{', cleanSelector, '}'].join('');
+}

+ 0 - 0
public/app/containers/ManageDashboards/FolderSettings.jest.tsx → public/app/containers/ManageDashboards/FolderSettings.test.tsx


+ 0 - 0
public/app/containers/ServerStats/ServerStats.jest.tsx → public/app/containers/ServerStats/ServerStats.test.tsx


+ 0 - 0
public/app/containers/ServerStats/__snapshots__/ServerStats.jest.tsx.snap → public/app/containers/ServerStats/__snapshots__/ServerStats.test.tsx.snap


+ 7 - 16
public/app/containers/Teams/TeamMembers.tsx

@@ -2,9 +2,9 @@ import React from 'react';
 import { hot } from 'react-hot-loader';
 import { observer } from 'mobx-react';
 import { ITeam, ITeamMember } from 'app/stores/TeamsStore/TeamsStore';
-import appEvents from 'app/core/app_events';
 import SlideDown from 'app/core/components/Animations/SlideDown';
 import { UserPicker, User } from 'app/core/components/Picker/UserPicker';
+import DeleteButton from 'app/core/components/DeleteButton/DeleteButton';
 
 interface Props {
   team: ITeam;
@@ -31,15 +31,7 @@ export class TeamMembers extends React.Component<Props, State> {
   };
 
   removeMember(member: ITeamMember) {
-    appEvents.emit('confirm-modal', {
-      title: 'Remove Member',
-      text: 'Are you sure you want to remove ' + member.login + ' from this group?',
-      yesText: 'Remove',
-      icon: 'fa-warning',
-      onConfirm: () => {
-        this.removeMemberConfirmed(member);
-      },
-    });
+    this.props.team.removeMember(member);
   }
 
   removeMemberConfirmed(member: ITeamMember) {
@@ -54,10 +46,8 @@ export class TeamMembers extends React.Component<Props, State> {
         </td>
         <td>{member.login}</td>
         <td>{member.email}</td>
-        <td style={{ width: '1%' }}>
-          <a onClick={() => this.removeMember(member)} className="btn btn-danger btn-mini">
-            <i className="fa fa-remove" />
-          </a>
+        <td className="text-right">
+          <DeleteButton onConfirmDelete={() => this.removeMember(member)} />
         </td>
       </tr>
     );
@@ -79,8 +69,9 @@ export class TeamMembers extends React.Component<Props, State> {
 
   render() {
     const { newTeamMember, isAdding } = this.state;
-    const members = this.props.team.members.values();
+    const members = this.props.team.filteredMembers;
     const newTeamMemberValue = newTeamMember && newTeamMember.id.toString();
+    const { team } = this.props;
 
     return (
       <div>
@@ -91,7 +82,7 @@ export class TeamMembers extends React.Component<Props, State> {
                 type="text"
                 className="gf-form-input"
                 placeholder="Search members"
-                value={''}
+                value={team.search}
                 onChange={this.onSearchQueryChange}
               />
               <i className="gf-form-input-icon fa fa-search" />

+ 0 - 0
public/app/core/components/DeleteButton/DeleteButton.jest.tsx → public/app/core/components/DeleteButton/DeleteButton.test.tsx


+ 0 - 0
public/app/core/components/EmptyListCTA/EmptyListCTA.jest.tsx → public/app/core/components/EmptyListCTA/EmptyListCTA.test.tsx


+ 0 - 0
public/app/core/components/EmptyListCTA/__snapshots__/EmptyListCTA.jest.tsx.snap → public/app/core/components/EmptyListCTA/__snapshots__/EmptyListCTA.test.tsx.snap


+ 0 - 0
public/app/core/components/PageHeader/PageHeader.jest.tsx → public/app/core/components/PageHeader/PageHeader.test.tsx


+ 0 - 0
public/app/core/components/Permissions/AddPermissions.jest.tsx → public/app/core/components/Permissions/AddPermissions.test.tsx


+ 0 - 0
public/app/core/components/Picker/PickerOption.jest.tsx → public/app/core/components/Picker/PickerOption.test.tsx


+ 0 - 0
public/app/core/components/Picker/TeamPicker.jest.tsx → public/app/core/components/Picker/TeamPicker.test.tsx


+ 0 - 0
public/app/core/components/Picker/UserPicker.jest.tsx → public/app/core/components/Picker/UserPicker.test.tsx


+ 0 - 0
public/app/core/components/Picker/__snapshots__/PickerOption.jest.tsx.snap → public/app/core/components/Picker/__snapshots__/PickerOption.test.tsx.snap


+ 0 - 0
public/app/core/components/Picker/__snapshots__/TeamPicker.jest.tsx.snap → public/app/core/components/Picker/__snapshots__/TeamPicker.test.tsx.snap


+ 0 - 0
public/app/core/components/Picker/__snapshots__/UserPicker.jest.tsx.snap → public/app/core/components/Picker/__snapshots__/UserPicker.test.tsx.snap


Некоторые файлы не были показаны из-за большого количества измененных файлов