Просмотр исходного кода

Merge remote-tracking branch 'grafana/master'

* grafana/master: (104 commits)
  Change to arrow functions
  Add all tests to one file
  changelog: add notes about closing #12561
  Remove angularMocks
  All tests passing
  changelog: add notes about closing #12762
  fix: team email tooltip was not showing
  fix: test data api route used old name for test data datasource, fixes #12773
  changelog: add notes about closing #12300
  Weird execution order for the tests...
  changelog: add notes about closing #12744
  changelog: add notes about closing #12727
  add aws_dx to cloudwatch datasource
  also fixed "Watt per square metre"
  fixed that missing one
  add version note to metric prefix and fix typo
  devenv: update sql dashboards
  mssql: update tests
  fix usage of metric column types so that you don't need to specify metric alias
  Begin conversion
  ...
ryan 7 лет назад
Родитель
Сommit
27e96179c1
76 измененных файлов с 3986 добавлено и 3436 удалено
  1. 15 1
      CHANGELOG.md
  2. 1 0
      conf/defaults.ini
  3. 27 1
      devenv/datasources.yaml
  4. 28 50
      devenv/dev-dashboards/datasource_tests_mssql_fakedata.json
  5. 76 134
      devenv/dev-dashboards/datasource_tests_mssql_unittest.json
  6. 20 47
      devenv/dev-dashboards/datasource_tests_mysql_fakedata.json
  7. 72 132
      devenv/dev-dashboards/datasource_tests_mysql_unittest.json
  8. 36 51
      devenv/dev-dashboards/datasource_tests_postgres_fakedata.json
  9. 76 132
      devenv/dev-dashboards/datasource_tests_postgres_unittest.json
  10. 2 1
      docs/sources/features/datasources/mssql.md
  11. 1 0
      docs/sources/features/datasources/mysql.md
  12. 1 0
      docs/sources/features/datasources/postgres.md
  13. 26 0
      docs/sources/features/datasources/prometheus.md
  14. 2 0
      docs/sources/installation/configuration.md
  15. 6 0
      docs/sources/reference/templating.md
  16. 8 8
      package.json
  17. 2 2
      pkg/api/metrics.go
  18. 1 1
      pkg/services/sqlstore/dashboard_test.go
  19. 19 5
      pkg/tsdb/cloudwatch/cloudwatch.go
  20. 5 1
      pkg/tsdb/cloudwatch/metric_find_query.go
  21. 27 8
      pkg/tsdb/elasticsearch/client/index_pattern.go
  22. 26 1
      pkg/tsdb/elasticsearch/client/index_pattern_test.go
  23. 19 19
      pkg/tsdb/mssql/macros.go
  24. 1 1
      pkg/tsdb/mssql/macros_test.go
  25. 25 243
      pkg/tsdb/mssql/mssql.go
  26. 58 49
      pkg/tsdb/mssql/mssql_test.go
  27. 19 19
      pkg/tsdb/mysql/macros.go
  28. 1 1
      pkg/tsdb/mysql/macros_test.go
  29. 22 243
      pkg/tsdb/mysql/mysql.go
  30. 44 11
      pkg/tsdb/mysql/mysql_test.go
  31. 19 19
      pkg/tsdb/postgres/macros.go
  32. 1 1
      pkg/tsdb/postgres/macros_test.go
  33. 32 245
      pkg/tsdb/postgres/postgres.go
  34. 44 11
      pkg/tsdb/postgres/postgres_test.go
  35. 297 45
      pkg/tsdb/sql_engine.go
  36. 125 0
      public/app/containers/Explore/PromQueryField.jest.tsx
  37. 340 0
      public/app/containers/Explore/PromQueryField.tsx
  38. 231 314
      public/app/containers/Explore/QueryField.tsx
  39. 1 5
      public/app/containers/Explore/QueryRows.tsx
  40. 39 22
      public/app/containers/Explore/Typeahead.tsx
  41. 359 58
      public/app/containers/Explore/slate-plugins/prism/promql.ts
  42. 2 16
      public/app/containers/Teams/TeamList.tsx
  43. 44 0
      public/app/core/components/DeleteButton/DeleteButton.jest.tsx
  44. 66 0
      public/app/core/components/DeleteButton/DeleteButton.tsx
  45. 1 1
      public/app/core/components/Forms/Forms.tsx
  46. 25 0
      public/app/core/specs/backend_srv.jest.ts
  47. 0 31
      public/app/core/specs/backend_srv_specs.ts
  48. 1 1
      public/app/core/specs/kbn.jest.ts
  49. 21 21
      public/app/core/utils/kbn.ts
  50. 83 61
      public/app/features/templating/specs/variable_srv_init.jest.ts
  51. 324 0
      public/app/plugins/datasource/graphite/specs/query_ctrl.jest.ts
  52. 0 338
      public/app/plugins/datasource/graphite/specs/query_ctrl_specs.ts
  53. 0 1
      public/app/plugins/datasource/influxdb/query_ctrl.ts
  54. 178 0
      public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts
  55. 0 193
      public/app/plugins/datasource/influxdb/specs/query_ctrl_specs.ts
  56. 4 2
      public/app/plugins/datasource/mssql/partials/query.editor.html
  57. 4 1
      public/app/plugins/datasource/mysql/partials/query.editor.html
  58. 4 1
      public/app/plugins/datasource/postgres/partials/query.editor.html
  59. 0 1
      public/app/plugins/datasource/prometheus/datasource.ts
  60. 33 40
      public/app/plugins/datasource/prometheus/specs/completer.jest.ts
  61. 808 14
      public/app/plugins/datasource/prometheus/specs/datasource.jest.ts
  62. 0 683
      public/app/plugins/datasource/prometheus/specs/datasource_specs.ts
  63. 3 0
      public/app/plugins/panel/graph/specs/graph_ctrl.jest.ts
  64. 19 18
      public/app/plugins/panel/heatmap/specs/heatmap_ctrl.jest.ts
  65. 80 79
      public/app/plugins/panel/singlestat/specs/singlestat.jest.ts
  66. 1 0
      public/sass/_grafana.scss
  67. 3 1
      public/sass/_variables.dark.scss
  68. 14 12
      public/sass/_variables.light.scss
  69. 50 0
      public/sass/components/_delete_button.scss
  70. 3 3
      public/sass/components/_query_editor.scss
  71. 2 1
      public/sass/components/_slate_editor.scss
  72. 1 1
      public/sass/components/_submenu.scss
  73. 1 1
      public/sass/components/_timepicker.scss
  74. 9 1
      scripts/webpack/webpack.hot.js
  75. 42 31
      tsconfig.json
  76. 6 2
      yarn.lock

+ 15 - 1
CHANGELOG.md

@@ -16,17 +16,31 @@
 * **Prometheus**: Add $interval, $interval_ms, $range, and $range_ms support for dashboard and template queries [#12597](https://github.com/grafana/grafana/issues/12597)
 * **Variables**: Skip unneeded extra query request when de-selecting variable values used for repeated panels [#8186](https://github.com/grafana/grafana/issues/8186), thx [@mtanda](https://github.com/mtanda)
 * **Postgres/MySQL/MSSQL**: Use floor rounding in $__timeGroup macro function [#12460](https://github.com/grafana/grafana/issues/12460), thx [@svenklemm](https://github.com/svenklemm)
+* **Postgres/MySQL/MSSQL**: Use metric column as prefix when returning multiple value columns [#12727](https://github.com/grafana/grafana/issues/12727), thx [@svenklemm](https://github.com/svenklemm)
 * **MySQL/MSSQL**: Use datetime format instead of epoch for $__timeFilter, $__timeFrom and $__timeTo macros [#11618](https://github.com/grafana/grafana/issues/11618) [#11619](https://github.com/grafana/grafana/issues/11619), thx [@AustinWinstanley](https://github.com/AustinWinstanley)
+* **Postgres**: Escape ssl mode parameter in connectionstring [#12644](https://github.com/grafana/grafana/issues/12644), thx [@yogyrahmawan](https://github.com/yogyrahmawan)
 * **Github OAuth**: Allow changes of user info at Github to be synched to Grafana when signing in [#11818](https://github.com/grafana/grafana/issues/11818), thx [@rwaweber](https://github.com/rwaweber)
 * **Alerting**: Fix diff and percent_diff reducers [#11563](https://github.com/grafana/grafana/issues/11563), thx [@jessetane](https://github.com/jessetane)
 * **Units**: Polish złoty currency [#12691](https://github.com/grafana/grafana/pull/12691), thx [@mwegrzynek](https://github.com/mwegrzynek)
+* **Cloudwatch**: Improved error handling [#12489](https://github.com/grafana/grafana/issues/12489), thx [@mtanda](https://github.com/mtanda)
+* **Cloudwatch**: AppSync metrics and dimensions [#12300](https://github.com/grafana/grafana/issues/12300), thx [@franciscocpg](https://github.com/franciscocpg)
+* **Cloudwatch**: Direct Connect metrics and dimensions [#12762](https://github.com/grafana/grafana/pulls/12762), thx [@mindriot88](https://github.com/mindriot88)
+* **Cloudwatch**: Added BurstBalance metric to list of AWS RDS metrics [#12561](https://github.com/grafana/grafana/pulls/12561), thx [@activeshadow](https://github.com/activeshadow)
+* **Table**: Adjust header contrast for the light theme [#12668](https://github.com/grafana/grafana/issues/12668)
+* **Elasticsearch**: For alerting/backend, support having index name to the right of pattern in index pattern [#12731](https://github.com/grafana/grafana/issues/12731)
+* **OAuth**: Fix overriding tls_skip_verify_insecure using environment variable [#12747](https://github.com/grafana/grafana/issues/12747), thx [@jangaraj](https://github.com/jangaraj)
+* **Units**: Change units to include characters for power of 2 and 3 [#12744](https://github.com/grafana/grafana/pull/12744), thx [@Worty](https://github.com/Worty)
 
-# 5.2.2 (unreleased)
+# 5.2.2 (2018-07-25)
 
 ### Minor
 
 * **Prometheus**: Fix graph panel bar width issue in aligned prometheus queries [#12379](https://github.com/grafana/grafana/issues/12379)
 * **Dashboard**: Dashboard links not updated when changing variables [#12506](https://github.com/grafana/grafana/issues/12506)
+* **Postgres/MySQL/MSSQL**: Fix connection leak [#12636](https://github.com/grafana/grafana/issues/12636) [#9827](https://github.com/grafana/grafana/issues/9827)
+* **Plugins**: Fix loading of external plugins [#12551](https://github.com/grafana/grafana/issues/12551)
+* **Dashboard**: Remove unwanted scrollbars in embedded panels [#12589](https://github.com/grafana/grafana/issues/12589)
+* **Prometheus**: Prevent error using $__interval_ms in query [#12533](https://github.com/grafana/grafana/pull/12533), thx [@mtanda](https://github.com/mtanda)
 
 # 5.2.1 (2018-06-29)
 

+ 1 - 0
conf/defaults.ini

@@ -311,6 +311,7 @@ token_url =
 api_url =
 team_ids =
 allowed_organizations =
+tls_skip_verify_insecure = false
 
 #################################### Basic Auth ##########################
 [auth.basic]

+ 27 - 1
devenv/datasources.yaml

@@ -51,12 +51,28 @@ datasources:
     user: grafana
     password: password
 
+  - name: gdev-mysql-ds-tests
+    type: mysql
+    url: localhost:3306
+    database: grafana_ds_tests
+    user: grafana
+    password: password
+
   - name: gdev-mssql
     type: mssql
     url: localhost:1433
     database: grafana
     user: grafana
-    password: "Password!"
+    secureJsonData:
+      password: Password!
+
+  - name: gdev-mssql-ds-tests
+    type: mssql
+    url: localhost:1433
+    database: grafanatest
+    user: grafana
+    secureJsonData:
+      password: Password!
 
   - name: gdev-postgres
     type: postgres
@@ -68,6 +84,16 @@ datasources:
     jsonData:
       sslmode: "disable"
 
+  - name: gdev-postgres-ds-tests
+    type: postgres
+    url: localhost:5432
+    database: grafanadstest
+    user: grafanatest
+    secureJsonData:
+      password: grafanatest
+    jsonData:
+      sslmode: "disable"
+
   - name: gdev-cloudwatch
     type: cloudwatch
     editable: true

+ 28 - 50
docker/blocks/mssql/dashboard.json → devenv/dev-dashboards/datasource_tests_mssql_fakedata.json

@@ -1,40 +1,4 @@
 {
-  "__inputs": [
-    {
-      "name": "DS_MSSQL",
-      "label": "MSSQL",
-      "description": "",
-      "type": "datasource",
-      "pluginId": "mssql",
-      "pluginName": "MSSQL"
-    }
-  ],
-  "__requires": [
-    {
-      "type": "grafana",
-      "id": "grafana",
-      "name": "Grafana",
-      "version": "5.0.0"
-    },
-    {
-      "type": "panel",
-      "id": "graph",
-      "name": "Graph",
-      "version": "5.0.0"
-    },
-    {
-      "type": "datasource",
-      "id": "mssql",
-      "name": "MSSQL",
-      "version": "1.0.0"
-    },
-    {
-      "type": "panel",
-      "id": "table",
-      "name": "Table",
-      "version": "5.0.0"
-    }
-  ],
   "annotations": {
     "list": [
       {
@@ -52,8 +16,7 @@
   "editable": true,
   "gnetId": null,
   "graphTooltip": 0,
-  "id": null,
-  "iteration": 1520976748896,
+  "iteration": 1532618661457,
   "links": [],
   "panels": [
     {
@@ -63,7 +26,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MSSQL}",
+      "datasource": "gdev-mssql",
       "fill": 2,
       "gridPos": {
         "h": 9,
@@ -149,14 +112,18 @@
           "min": null,
           "show": true
         }
-      ]
+      ],
+      "yaxis": {
+        "align": false,
+        "alignLevel": null
+      }
     },
     {
       "aliasColors": {},
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MSSQL}",
+      "datasource": "gdev-mssql",
       "fill": 2,
       "gridPos": {
         "h": 18,
@@ -234,14 +201,18 @@
           "min": null,
           "show": true
         }
-      ]
+      ],
+      "yaxis": {
+        "align": false,
+        "alignLevel": null
+      }
     },
     {
       "aliasColors": {},
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MSSQL}",
+      "datasource": "gdev-mssql",
       "fill": 2,
       "gridPos": {
         "h": 9,
@@ -313,11 +284,15 @@
           "min": null,
           "show": true
         }
-      ]
+      ],
+      "yaxis": {
+        "align": false,
+        "alignLevel": null
+      }
     },
     {
       "columns": [],
-      "datasource": "${DS_MSSQL}",
+      "datasource": "gdev-mssql",
       "fontSize": "100%",
       "gridPos": {
         "h": 10,
@@ -371,13 +346,13 @@
   ],
   "schemaVersion": 16,
   "style": "dark",
-  "tags": [],
+  "tags": ["gdev", "mssql", "fake-data-gen"],
   "templating": {
     "list": [
       {
         "allValue": null,
         "current": {},
-        "datasource": "${DS_MSSQL}",
+        "datasource": "gdev-mssql",
         "hide": 0,
         "includeAll": false,
         "label": "Datacenter",
@@ -387,6 +362,7 @@
         "query": "SELECT DISTINCT datacenter FROM grafana_metric",
         "refresh": 1,
         "regex": "",
+        "skipUrlSync": false,
         "sort": 1,
         "tagValuesQuery": "",
         "tags": [],
@@ -397,7 +373,7 @@
       {
         "allValue": null,
         "current": {},
-        "datasource": "${DS_MSSQL}",
+        "datasource": "gdev-mssql",
         "hide": 0,
         "includeAll": true,
         "label": "Hostname",
@@ -407,6 +383,7 @@
         "query": "SELECT DISTINCT hostname FROM grafana_metric WHERE datacenter='$datacenter'",
         "refresh": 1,
         "regex": "",
+        "skipUrlSync": false,
         "sort": 1,
         "tagValuesQuery": "",
         "tags": [],
@@ -499,6 +476,7 @@
         ],
         "query": "1s,10s,30s,1m,5m,10m,30m,1h,6h,12h,1d,7d,14d,30d",
         "refresh": 2,
+        "skipUrlSync": false,
         "type": "interval"
       }
     ]
@@ -533,7 +511,7 @@
     ]
   },
   "timezone": "",
-  "title": "Grafana Fake Data Gen - MSSQL",
+  "title": "Datasource tests - MSSQL",
   "uid": "86Js1xRmk",
-  "version": 11
+  "version": 1
 }

+ 76 - 134
docker/blocks/mssql_tests/dashboard.json → devenv/dev-dashboards/datasource_tests_mssql_unittest.json

@@ -1,40 +1,4 @@
 {
-  "__inputs": [
-    {
-      "name": "DS_MSSQL_TEST",
-      "label": "MSSQL Test",
-      "description": "",
-      "type": "datasource",
-      "pluginId": "mssql",
-      "pluginName": "Microsoft SQL Server"
-    }
-  ],
-  "__requires": [
-    {
-      "type": "grafana",
-      "id": "grafana",
-      "name": "Grafana",
-      "version": "5.0.0"
-    },
-    {
-      "type": "panel",
-      "id": "graph",
-      "name": "Graph",
-      "version": "5.0.0"
-    },
-    {
-      "type": "datasource",
-      "id": "mssql",
-      "name": "Microsoft SQL Server",
-      "version": "1.0.0"
-    },
-    {
-      "type": "panel",
-      "id": "table",
-      "name": "Table",
-      "version": "5.0.0"
-    }
-  ],
   "annotations": {
     "list": [
       {
@@ -47,7 +11,7 @@
         "type": "dashboard"
       },
       {
-        "datasource": "${DS_MSSQL_TEST}",
+        "datasource": "gdev-mssql-ds-tests",
         "enable": false,
         "hide": false,
         "iconColor": "#6ed0e0",
@@ -59,7 +23,7 @@
         "type": "tags"
       },
       {
-        "datasource": "${DS_MSSQL_TEST}",
+        "datasource": "gdev-mssql-ds-tests",
         "enable": false,
         "hide": false,
         "iconColor": "rgba(255, 96, 96, 1)",
@@ -71,7 +35,7 @@
         "type": "tags"
       },
       {
-        "datasource": "${DS_MSSQL_TEST}",
+        "datasource": "gdev-mssql-ds-tests",
         "enable": false,
         "hide": false,
         "iconColor": "#7eb26d",
@@ -83,7 +47,7 @@
         "type": "tags"
       },
       {
-        "datasource": "${DS_MSSQL_TEST}",
+        "datasource": "gdev-mssql-ds-tests",
         "enable": false,
         "hide": false,
         "iconColor": "#1f78c1",
@@ -96,16 +60,16 @@
       }
     ]
   },
+  "description": "Run the mssql unit tests to generate the data backing this dashboard",
   "editable": true,
   "gnetId": null,
   "graphTooltip": 0,
-  "id": null,
-  "iteration": 1523320861623,
+  "iteration": 1532949769359,
   "links": [],
   "panels": [
     {
       "columns": [],
-      "datasource": "${DS_MSSQL_TEST}",
+      "datasource": "gdev-mssql-ds-tests",
       "fontSize": "100%",
       "gridPos": {
         "h": 4,
@@ -152,7 +116,7 @@
     },
     {
       "columns": [],
-      "datasource": "${DS_MSSQL_TEST}",
+      "datasource": "gdev-mssql-ds-tests",
       "fontSize": "100%",
       "gridPos": {
         "h": 3,
@@ -206,7 +170,7 @@
     },
     {
       "columns": [],
-      "datasource": "${DS_MSSQL_TEST}",
+      "datasource": "gdev-mssql-ds-tests",
       "fontSize": "100%",
       "gridPos": {
         "h": 3,
@@ -260,7 +224,7 @@
     },
     {
       "columns": [],
-      "datasource": "${DS_MSSQL_TEST}",
+      "datasource": "gdev-mssql-ds-tests",
       "fontSize": "100%",
       "gridPos": {
         "h": 3,
@@ -314,7 +278,7 @@
     },
     {
       "columns": [],
-      "datasource": "${DS_MSSQL_TEST}",
+      "datasource": "gdev-mssql-ds-tests",
       "fontSize": "100%",
       "gridPos": {
         "h": 3,
@@ -371,7 +335,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MSSQL_TEST}",
+      "datasource": "gdev-mssql-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 9,
@@ -454,7 +418,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MSSQL_TEST}",
+      "datasource": "gdev-mssql-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 9,
@@ -537,7 +501,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MSSQL_TEST}",
+      "datasource": "gdev-mssql-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 9,
@@ -620,7 +584,7 @@
       "bars": true,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MSSQL_TEST}",
+      "datasource": "gdev-mssql-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 9,
@@ -703,7 +667,7 @@
       "bars": true,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MSSQL_TEST}",
+      "datasource": "gdev-mssql-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 9,
@@ -786,7 +750,7 @@
       "bars": true,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MSSQL_TEST}",
+      "datasource": "gdev-mssql-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 9,
@@ -869,7 +833,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MSSQL_TEST}",
+      "datasource": "gdev-mssql-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 8,
@@ -907,14 +871,8 @@
         {
           "alias": "",
           "format": "time_series",
-          "rawSql": "SELECT \n  $__timeGroup(time, '$summarize') as time, \n  measurement + ' - value one' as metric, \n  avg(valueOne) as valueOne\nFROM\n  metric_values \nWHERE\n  $__timeFilter(time) AND\n  ($metric = 'ALL' OR measurement = $metric)\nGROUP BY \n  $__timeGroup(time, '$summarize'), \n  measurement \nORDER BY 1",
+          "rawSql": "SELECT \n  $__timeGroup(time, '$summarize') as time, \n  measurement as metric, \n  avg(valueOne) as valueOne,\n  avg(valueTwo) as valueTwo\nFROM\n  metric_values \nWHERE\n  $__timeFilter(time) AND\n  ($metric = 'ALL' OR measurement = $metric)\nGROUP BY \n  $__timeGroup(time, '$summarize'), \n  measurement \nORDER BY 1",
           "refId": "A"
-        },
-        {
-          "alias": "",
-          "format": "time_series",
-          "rawSql": "SELECT \n  $__timeGroup(time, '$summarize') as time, \n  measurement + ' - value two' as metric, \n  avg(valueTwo) as valueTwo \nFROM\n  metric_values\nWHERE\n  $__timeFilter(time) AND\n  ($metric = 'ALL' OR measurement = $metric)\nGROUP BY \n  $__timeGroup(time, '$summarize'), \n  measurement \nORDER BY 1",
-          "refId": "B"
         }
       ],
       "thresholds": [],
@@ -962,7 +920,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MSSQL_TEST}",
+      "datasource": "gdev-mssql-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 8,
@@ -1065,7 +1023,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MSSQL_TEST}",
+      "datasource": "gdev-mssql-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 8,
@@ -1103,14 +1061,8 @@
         {
           "alias": "",
           "format": "time_series",
-          "rawSql": "SELECT $__timeEpoch(time), measurement + ' - value one' as metric, valueOne FROM metric_values WHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
+          "rawSql": "SELECT $__timeEpoch(time), measurement as metric, valueOne, valueTwo FROM metric_values WHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
           "refId": "A"
-        },
-        {
-          "alias": "",
-          "format": "time_series",
-          "rawSql": "SELECT $__timeEpoch(time), measurement + ' - value two' as metric, valueTwo FROM metric_values WHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
-          "refId": "B"
         }
       ],
       "thresholds": [],
@@ -1158,7 +1110,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MSSQL_TEST}",
+      "datasource": "gdev-mssql-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 8,
@@ -1243,7 +1195,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MSSQL_TEST}",
+      "datasource": "gdev-mssql-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 8,
@@ -1281,14 +1233,8 @@
         {
           "alias": "",
           "format": "time_series",
-          "rawSql": "SELECT $__timeEpoch(time), measurement + ' - value one' as metric, valueOne FROM metric_values WHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
+          "rawSql": "SELECT $__timeEpoch(time), measurement as metric, valueOne, valueTwo FROM metric_values WHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
           "refId": "A"
-        },
-        {
-          "alias": "",
-          "format": "time_series",
-          "rawSql": "SELECT $__timeEpoch(time), measurement + ' - value two' as metric, valueTwo FROM metric_values WHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
-          "refId": "B"
         }
       ],
       "thresholds": [],
@@ -1336,7 +1282,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MSSQL_TEST}",
+      "datasource": "gdev-mssql-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 8,
@@ -1421,7 +1367,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MSSQL_TEST}",
+      "datasource": "gdev-mssql-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 8,
@@ -1459,14 +1405,8 @@
         {
           "alias": "",
           "format": "time_series",
-          "rawSql": "SELECT $__timeEpoch(time), measurement + ' - value one' as metric, valueOne FROM metric_values WHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
+          "rawSql": "SELECT $__timeEpoch(time), measurement as metric, valueOne, valueTwo FROM metric_values WHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
           "refId": "A"
-        },
-        {
-          "alias": "",
-          "format": "time_series",
-          "rawSql": "SELECT $__timeEpoch(time), measurement + ' - value two' as metric, valueTwo FROM metric_values WHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
-          "refId": "B"
         }
       ],
       "thresholds": [],
@@ -1514,7 +1454,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MSSQL_TEST}",
+      "datasource": "gdev-mssql-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 8,
@@ -1599,7 +1539,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MSSQL_TEST}",
+      "datasource": "gdev-mssql-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 8,
@@ -1686,7 +1626,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MSSQL_TEST}",
+      "datasource": "gdev-mssql-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 8,
@@ -1773,7 +1713,7 @@
       "bars": true,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MSSQL_TEST}",
+      "datasource": "gdev-mssql-ds-tests",
       "fill": 1,
       "gridPos": {
         "h": 8,
@@ -1809,14 +1749,8 @@
         {
           "alias": "",
           "format": "time_series",
-          "rawSql": "SELECT $__timeEpoch(time), measurement + ' - value one' as metric, valueOne FROM metric_values \nWHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
+          "rawSql": "SELECT $__timeEpoch(time), measurement as metric, valueOne, valueTwo FROM metric_values WHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
           "refId": "A"
-        },
-        {
-          "alias": "",
-          "format": "time_series",
-          "rawSql": "SELECT $__timeEpoch(time), measurement + ' - value two' as metric, valueTwo FROM metric_values \nWHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
-          "refId": "B"
         }
       ],
       "thresholds": [],
@@ -1867,7 +1801,7 @@
       "bars": true,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MSSQL_TEST}",
+      "datasource": "gdev-mssql-ds-tests",
       "fill": 1,
       "gridPos": {
         "h": 8,
@@ -1954,7 +1888,7 @@
       "bars": true,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MSSQL_TEST}",
+      "datasource": "gdev-mssql-ds-tests",
       "fill": 1,
       "gridPos": {
         "h": 8,
@@ -1990,14 +1924,8 @@
         {
           "alias": "",
           "format": "time_series",
-          "rawSql": "SELECT $__timeEpoch(time), measurement + ' - value one' as metric, valueOne FROM metric_values \nWHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
+          "rawSql": "SELECT $__timeEpoch(time), measurement as metric, valueOne, valueTwo FROM metric_values WHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
           "refId": "A"
-        },
-        {
-          "alias": "",
-          "format": "time_series",
-          "rawSql": "SELECT $__timeEpoch(time), measurement + ' - value two' as metric, valueTwo FROM metric_values \nWHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
-          "refId": "B"
         }
       ],
       "thresholds": [],
@@ -2048,7 +1976,7 @@
       "bars": true,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MSSQL_TEST}",
+      "datasource": "gdev-mssql-ds-tests",
       "fill": 1,
       "gridPos": {
         "h": 8,
@@ -2135,7 +2063,7 @@
       "bars": true,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MSSQL_TEST}",
+      "datasource": "gdev-mssql-ds-tests",
       "fill": 1,
       "gridPos": {
         "h": 8,
@@ -2171,14 +2099,8 @@
         {
           "alias": "",
           "format": "time_series",
-          "rawSql": "SELECT $__timeEpoch(time), measurement + ' - value one' as metric, valueOne FROM metric_values \nWHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
+          "rawSql": "SELECT $__timeEpoch(time), measurement as metric, valueOne, valueTwo FROM metric_values WHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
           "refId": "A"
-        },
-        {
-          "alias": "",
-          "format": "time_series",
-          "rawSql": "SELECT $__timeEpoch(time), measurement + ' - value two' as metric, valueTwo FROM metric_values \nWHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
-          "refId": "B"
         }
       ],
       "thresholds": [],
@@ -2229,7 +2151,7 @@
       "bars": true,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MSSQL_TEST}",
+      "datasource": "gdev-mssql-ds-tests",
       "fill": 1,
       "gridPos": {
         "h": 8,
@@ -2316,7 +2238,7 @@
       "bars": true,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MSSQL_TEST}",
+      "datasource": "gdev-mssql-ds-tests",
       "fill": 1,
       "gridPos": {
         "h": 8,
@@ -2352,14 +2274,8 @@
         {
           "alias": "",
           "format": "time_series",
-          "rawSql": "SELECT $__timeEpoch(time), measurement + ' - value one' as metric, valueOne FROM metric_values\nWHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
+          "rawSql": "SELECT $__timeEpoch(time), measurement as metric, valueOne, valueTwo FROM metric_values WHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
           "refId": "A"
-        },
-        {
-          "alias": "",
-          "format": "time_series",
-          "rawSql": "SELECT $__timeEpoch(time), measurement + ' - value two' as metric, valueTwo FROM metric_values \nWHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
-          "refId": "B"
         }
       ],
       "thresholds": [],
@@ -2410,7 +2326,7 @@
       "bars": true,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MSSQL_TEST}",
+      "datasource": "gdev-mssql-ds-tests",
       "fill": 1,
       "gridPos": {
         "h": 8,
@@ -2496,22 +2412,47 @@
   "refresh": false,
   "schemaVersion": 16,
   "style": "dark",
-  "tags": [],
+  "tags": [
+    "gdev",
+    "mssql"
+  ],
   "templating": {
     "list": [
       {
         "allValue": "'ALL'",
-        "current": {},
-        "datasource": "${DS_MSSQL_TEST}",
+        "current": {
+          "selected": true,
+          "tags": [],
+          "text": "All",
+          "value": "$__all"
+        },
+        "datasource": "gdev-mssql-ds-tests",
         "hide": 0,
         "includeAll": true,
         "label": "Metric",
         "multi": false,
         "name": "metric",
-        "options": [],
+        "options": [
+          {
+            "selected": true,
+            "text": "All",
+            "value": "$__all"
+          },
+          {
+            "selected": false,
+            "text": "Metric A",
+            "value": "Metric A"
+          },
+          {
+            "selected": false,
+            "text": "Metric B",
+            "value": "Metric B"
+          }
+        ],
         "query": "SELECT DISTINCT measurement FROM metric_values",
-        "refresh": 1,
+        "refresh": 0,
         "regex": "",
+        "skipUrlSync": false,
         "sort": 0,
         "tagValuesQuery": "",
         "tags": [],
@@ -2564,6 +2505,7 @@
         ],
         "query": "1s,10s,30s,1m,5m,10m",
         "refresh": 2,
+        "skipUrlSync": false,
         "type": "interval"
       }
     ]
@@ -2598,7 +2540,7 @@
     ]
   },
   "timezone": "",
-  "title": "Microsoft SQL Server Data Source Test",
+  "title": "Datasource tests - MSSQL (unit test)",
   "uid": "GlAqcPgmz",
-  "version": 58
+  "version": 3
 }

+ 20 - 47
docker/blocks/mysql/dashboard.json → devenv/dev-dashboards/datasource_tests_mysql_fakedata.json

@@ -1,40 +1,4 @@
 {
-  "__inputs": [
-    {
-      "name": "DS_MYSQL",
-      "label": "MySQL",
-      "description": "",
-      "type": "datasource",
-      "pluginId": "mysql",
-      "pluginName": "MySQL"
-    }
-  ],
-  "__requires": [
-    {
-      "type": "grafana",
-      "id": "grafana",
-      "name": "Grafana",
-      "version": "5.0.0"
-    },
-    {
-      "type": "panel",
-      "id": "graph",
-      "name": "Graph",
-      "version": "5.0.0"
-    },
-    {
-      "type": "datasource",
-      "id": "mysql",
-      "name": "MySQL",
-      "version": "5.0.0"
-    },
-    {
-      "type": "panel",
-      "id": "table",
-      "name": "Table",
-      "version": "5.0.0"
-    }
-  ],
   "annotations": {
     "list": [
       {
@@ -52,8 +16,7 @@
   "editable": true,
   "gnetId": null,
   "graphTooltip": 0,
-  "id": null,
-  "iteration": 1523372133566,
+  "iteration": 1532620738041,
   "links": [],
   "panels": [
     {
@@ -63,7 +26,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MYSQL}",
+      "datasource": "gdev-mysql",
       "fill": 2,
       "gridPos": {
         "h": 9,
@@ -161,7 +124,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MYSQL}",
+      "datasource": "gdev-mysql",
       "fill": 2,
       "gridPos": {
         "h": 18,
@@ -251,7 +214,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MYSQL}",
+      "datasource": "gdev-mysql",
       "fill": 2,
       "gridPos": {
         "h": 9,
@@ -332,7 +295,7 @@
     },
     {
       "columns": [],
-      "datasource": "${DS_MYSQL}",
+      "datasource": "gdev-mysql",
       "fontSize": "100%",
       "gridPos": {
         "h": 9,
@@ -390,6 +353,7 @@
   "schemaVersion": 16,
   "style": "dark",
   "tags": [
+    "gdev",
     "fake-data-gen",
     "mysql"
   ],
@@ -397,8 +361,11 @@
     "list": [
       {
         "allValue": null,
-        "current": {},
-        "datasource": "${DS_MYSQL}",
+        "current": {
+          "text": "America",
+          "value": "America"
+        },
+        "datasource": "gdev-mysql",
         "hide": 0,
         "includeAll": false,
         "label": "Datacenter",
@@ -408,6 +375,7 @@
         "query": "SELECT DISTINCT datacenter FROM grafana_metric",
         "refresh": 1,
         "regex": "",
+        "skipUrlSync": false,
         "sort": 1,
         "tagValuesQuery": "",
         "tags": [],
@@ -417,8 +385,11 @@
       },
       {
         "allValue": null,
-        "current": {},
-        "datasource": "${DS_MYSQL}",
+        "current": {
+          "text": "All",
+          "value": "$__all"
+        },
+        "datasource": "gdev-mysql",
         "hide": 0,
         "includeAll": true,
         "label": "Hostname",
@@ -428,6 +399,7 @@
         "query": "SELECT DISTINCT hostname FROM grafana_metric WHERE datacenter='$datacenter'",
         "refresh": 1,
         "regex": "",
+        "skipUrlSync": false,
         "sort": 1,
         "tagValuesQuery": "",
         "tags": [],
@@ -520,6 +492,7 @@
         ],
         "query": "1s,10s,30s,1m,5m,10m,30m,1h,6h,12h,1d,7d,14d,30d",
         "refresh": 2,
+        "skipUrlSync": false,
         "type": "interval"
       }
     ]
@@ -554,7 +527,7 @@
     ]
   },
   "timezone": "",
-  "title": "Grafana Fake Data Gen - MySQL",
+  "title": "Datasource tests - MySQL",
   "uid": "DGsCac3kz",
   "version": 8
 }

+ 72 - 132
docker/blocks/mysql_tests/dashboard.json → devenv/dev-dashboards/datasource_tests_mysql_unittest.json

@@ -1,40 +1,4 @@
 {
-  "__inputs": [
-    {
-      "name": "DS_MYSQL_TEST",
-      "label": "MySQL TEST",
-      "description": "",
-      "type": "datasource",
-      "pluginId": "mysql",
-      "pluginName": "MySQL"
-    }
-  ],
-  "__requires": [
-    {
-      "type": "grafana",
-      "id": "grafana",
-      "name": "Grafana",
-      "version": "5.0.0"
-    },
-    {
-      "type": "panel",
-      "id": "graph",
-      "name": "Graph",
-      "version": "5.0.0"
-    },
-    {
-      "type": "datasource",
-      "id": "mysql",
-      "name": "MySQL",
-      "version": "5.0.0"
-    },
-    {
-      "type": "panel",
-      "id": "table",
-      "name": "Table",
-      "version": "5.0.0"
-    }
-  ],
   "annotations": {
     "list": [
       {
@@ -47,7 +11,7 @@
         "type": "dashboard"
       },
       {
-        "datasource": "${DS_MYSQL_TEST}",
+        "datasource": "gdev-mysql-ds-tests",
         "enable": false,
         "hide": false,
         "iconColor": "#6ed0e0",
@@ -59,7 +23,7 @@
         "type": "tags"
       },
       {
-        "datasource": "${DS_MYSQL_TEST}",
+        "datasource": "gdev-mysql-ds-tests",
         "enable": false,
         "hide": false,
         "iconColor": "rgba(255, 96, 96, 1)",
@@ -71,7 +35,7 @@
         "type": "tags"
       },
       {
-        "datasource": "${DS_MYSQL_TEST}",
+        "datasource": "gdev-mysql-ds-tests",
         "enable": false,
         "hide": false,
         "iconColor": "#7eb26d",
@@ -83,7 +47,7 @@
         "type": "tags"
       },
       {
-        "datasource": "${DS_MYSQL_TEST}",
+        "datasource": "gdev-mysql-ds-tests",
         "enable": false,
         "hide": false,
         "iconColor": "#1f78c1",
@@ -96,16 +60,16 @@
       }
     ]
   },
+  "description": "Run the mysql unit tests to generate the data backing this dashboard",
   "editable": true,
   "gnetId": null,
   "graphTooltip": 0,
-  "id": null,
-  "iteration": 1523320712115,
+  "iteration": 1532949531280,
   "links": [],
   "panels": [
     {
       "columns": [],
-      "datasource": "${DS_MYSQL_TEST}",
+      "datasource": "gdev-mysql-ds-tests",
       "fontSize": "100%",
       "gridPos": {
         "h": 4,
@@ -152,7 +116,7 @@
     },
     {
       "columns": [],
-      "datasource": "${DS_MYSQL_TEST}",
+      "datasource": "gdev-mysql-ds-tests",
       "fontSize": "100%",
       "gridPos": {
         "h": 3,
@@ -206,7 +170,7 @@
     },
     {
       "columns": [],
-      "datasource": "${DS_MYSQL_TEST}",
+      "datasource": "gdev-mysql-ds-tests",
       "fontSize": "100%",
       "gridPos": {
         "h": 3,
@@ -260,7 +224,7 @@
     },
     {
       "columns": [],
-      "datasource": "${DS_MYSQL_TEST}",
+      "datasource": "gdev-mysql-ds-tests",
       "fontSize": "100%",
       "gridPos": {
         "h": 3,
@@ -314,7 +278,7 @@
     },
     {
       "columns": [],
-      "datasource": "${DS_MYSQL_TEST}",
+      "datasource": "gdev-mysql-ds-tests",
       "fontSize": "100%",
       "gridPos": {
         "h": 3,
@@ -371,7 +335,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MYSQL_TEST}",
+      "datasource": "gdev-mysql-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 9,
@@ -454,7 +418,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MYSQL_TEST}",
+      "datasource": "gdev-mysql-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 9,
@@ -537,7 +501,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MYSQL_TEST}",
+      "datasource": "gdev-mysql-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 9,
@@ -620,7 +584,7 @@
       "bars": true,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MYSQL_TEST}",
+      "datasource": "gdev-mysql-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 9,
@@ -703,7 +667,7 @@
       "bars": true,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MYSQL_TEST}",
+      "datasource": "gdev-mysql-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 9,
@@ -786,7 +750,7 @@
       "bars": true,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MYSQL_TEST}",
+      "datasource": "gdev-mysql-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 9,
@@ -869,7 +833,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MYSQL_TEST}",
+      "datasource": "gdev-mysql-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 8,
@@ -907,14 +871,8 @@
         {
           "alias": "",
           "format": "time_series",
-          "rawSql": "SELECT \n  $__timeGroup(time, '$summarize') as time, \n  CONCAT(measurement, ' - value one') as metric, \n  avg(valueOne) as valueOne\nFROM\n  metric_values \nWHERE\n  $__timeFilter(time) AND\n  measurement IN($metric)\nGROUP BY 1, 2\nORDER BY 1",
+          "rawSql": "SELECT \n  $__timeGroup(time, '$summarize') as time, \n  measurement as metric, \n  avg(valueOne) as valueOne,\n  avg(valueTwo) as valueTwo\nFROM\n  metric_values \nWHERE\n  $__timeFilter(time) AND\n  measurement IN($metric)\nGROUP BY 1, 2\nORDER BY 1",
           "refId": "A"
-        },
-        {
-          "alias": "",
-          "format": "time_series",
-          "rawSql": "SELECT \n  $__timeGroup(time, '$summarize') as time, \n  CONCAT(measurement, ' - value two') as metric, \n  avg(valueTwo) as valueTwo \nFROM\n  metric_values\nWHERE\n  $__timeFilter(time) AND\n  measurement IN($metric)\nGROUP BY 1,2\nORDER BY 1",
-          "refId": "B"
         }
       ],
       "thresholds": [],
@@ -962,7 +920,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MYSQL_TEST}",
+      "datasource": "gdev-mysql-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 8,
@@ -1059,7 +1017,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MYSQL_TEST}",
+      "datasource": "gdev-mysql-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 8,
@@ -1097,14 +1055,8 @@
         {
           "alias": "",
           "format": "time_series",
-          "rawSql": "SELECT $__time(time), CONCAT(measurement, ' - value one') as metric, valueOne FROM metric_values WHERE $__timeFilter(time) AND measurement IN($metric) ORDER BY 1",
+          "rawSql": "SELECT $__time(time), measurement as metric, valueOne, valueTwo FROM metric_values WHERE $__timeFilter(time) AND measurement IN($metric) ORDER BY 1",
           "refId": "A"
-        },
-        {
-          "alias": "",
-          "format": "time_series",
-          "rawSql": "SELECT $__time(time), CONCAT(measurement, ' - value two') as metric, valueTwo FROM metric_values WHERE $__timeFilter(time) AND measurement IN($metric) ORDER BY 1",
-          "refId": "B"
         }
       ],
       "thresholds": [],
@@ -1152,7 +1104,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MYSQL_TEST}",
+      "datasource": "gdev-mysql-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 8,
@@ -1237,7 +1189,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MYSQL_TEST}",
+      "datasource": "gdev-mysql-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 8,
@@ -1275,14 +1227,8 @@
         {
           "alias": "",
           "format": "time_series",
-          "rawSql": "SELECT $__time(time), CONCAT(measurement, ' - value one') as metric, valueOne FROM metric_values WHERE $__timeFilter(time) AND measurement IN($metric) ORDER BY 1",
+          "rawSql": "SELECT $__time(time), measurement as metric, valueOne, valueTwo FROM metric_values WHERE $__timeFilter(time) AND measurement IN($metric) ORDER BY 1",
           "refId": "A"
-        },
-        {
-          "alias": "",
-          "format": "time_series",
-          "rawSql": "SELECT $__time(time), CONCAT(measurement, ' - value two') as metric, valueTwo FROM metric_values WHERE $__timeFilter(time) AND measurement IN($metric) ORDER BY 1",
-          "refId": "B"
         }
       ],
       "thresholds": [],
@@ -1330,7 +1276,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MYSQL_TEST}",
+      "datasource": "gdev-mysql-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 8,
@@ -1415,7 +1361,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MYSQL_TEST}",
+      "datasource": "gdev-mysql-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 8,
@@ -1453,14 +1399,8 @@
         {
           "alias": "",
           "format": "time_series",
-          "rawSql": "SELECT $__time(time), CONCAT(measurement, ' - value one') as metric, valueOne FROM metric_values WHERE $__timeFilter(time) AND measurement IN($metric) ORDER BY 1",
+          "rawSql": "SELECT $__time(time), measurement as metric, valueOne, valueTwo FROM metric_values WHERE $__timeFilter(time) AND measurement IN($metric) ORDER BY 1",
           "refId": "A"
-        },
-        {
-          "alias": "",
-          "format": "time_series",
-          "rawSql": "SELECT $__time(time), CONCAT(measurement, ' - value two') as metric, valueTwo FROM metric_values WHERE $__timeFilter(time) AND measurement IN($metric) ORDER BY 1",
-          "refId": "B"
         }
       ],
       "thresholds": [],
@@ -1508,7 +1448,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MYSQL_TEST}",
+      "datasource": "gdev-mysql-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 8,
@@ -1593,7 +1533,7 @@
       "bars": true,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MYSQL_TEST}",
+      "datasource": "gdev-mysql-ds-tests",
       "fill": 1,
       "gridPos": {
         "h": 8,
@@ -1629,14 +1569,8 @@
         {
           "alias": "",
           "format": "time_series",
-          "rawSql": "SELECT $__timeEpoch(time), CONCAT(measurement, ' - value one') as metric, valueOne FROM metric_values \nWHERE $__timeFilter(time) AND measurement IN($metric) ORDER BY 1",
+          "rawSql": "SELECT $__time(time), measurement as metric, valueOne, valueTwo FROM metric_values WHERE $__timeFilter(time) AND measurement IN($metric) ORDER BY 1",
           "refId": "A"
-        },
-        {
-          "alias": "",
-          "format": "time_series",
-          "rawSql": "SELECT $__timeEpoch(time), CONCAT(measurement, ' - value two') as metric, valueTwo FROM metric_values \nWHERE $__timeFilter(time) AND measurement IN($metric) ORDER BY 1",
-          "refId": "B"
         }
       ],
       "thresholds": [],
@@ -1687,7 +1621,7 @@
       "bars": true,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MYSQL_TEST}",
+      "datasource": "gdev-mysql-ds-tests",
       "fill": 1,
       "gridPos": {
         "h": 8,
@@ -1774,7 +1708,7 @@
       "bars": true,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MYSQL_TEST}",
+      "datasource": "gdev-mysql-ds-tests",
       "fill": 1,
       "gridPos": {
         "h": 8,
@@ -1810,14 +1744,8 @@
         {
           "alias": "",
           "format": "time_series",
-          "rawSql": "SELECT $__timeEpoch(time), CONCAT(measurement, ' - value one') as metric, valueOne FROM metric_values \nWHERE $__timeFilter(time) AND measurement IN($metric) ORDER BY 1",
+          "rawSql": "SELECT $__time(time), measurement as metric, valueOne, valueTwo FROM metric_values WHERE $__timeFilter(time) AND measurement IN($metric) ORDER BY 1",
           "refId": "A"
-        },
-        {
-          "alias": "",
-          "format": "time_series",
-          "rawSql": "SELECT $__timeEpoch(time), CONCAT(measurement, ' - value two') as metric, valueTwo FROM metric_values \nWHERE $__timeFilter(time) AND measurement IN($metric) ORDER BY 1",
-          "refId": "B"
         }
       ],
       "thresholds": [],
@@ -1868,7 +1796,7 @@
       "bars": true,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MYSQL_TEST}",
+      "datasource": "gdev-mysql-ds-tests",
       "fill": 1,
       "gridPos": {
         "h": 8,
@@ -1955,7 +1883,7 @@
       "bars": true,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MYSQL_TEST}",
+      "datasource": "gdev-mysql-ds-tests",
       "fill": 1,
       "gridPos": {
         "h": 8,
@@ -1991,14 +1919,8 @@
         {
           "alias": "",
           "format": "time_series",
-          "rawSql": "SELECT $__timeEpoch(time), CONCAT(measurement, ' - value one') as metric, valueOne FROM metric_values \nWHERE $__timeFilter(time) AND measurement IN($metric) ORDER BY 1",
+          "rawSql": "SELECT $__time(time), measurement as metric, valueOne, valueTwo FROM metric_values WHERE $__timeFilter(time) AND measurement IN($metric) ORDER BY 1",
           "refId": "A"
-        },
-        {
-          "alias": "",
-          "format": "time_series",
-          "rawSql": "SELECT $__timeEpoch(time), CONCAT(measurement, ' - value two') as metric, valueTwo FROM metric_values \nWHERE $__timeFilter(time) AND measurement IN($metric) ORDER BY 1",
-          "refId": "B"
         }
       ],
       "thresholds": [],
@@ -2049,7 +1971,7 @@
       "bars": true,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MYSQL_TEST}",
+      "datasource": "gdev-mysql-ds-tests",
       "fill": 1,
       "gridPos": {
         "h": 8,
@@ -2136,7 +2058,7 @@
       "bars": true,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MYSQL_TEST}",
+      "datasource": "gdev-mysql-ds-tests",
       "fill": 1,
       "gridPos": {
         "h": 8,
@@ -2172,14 +2094,8 @@
         {
           "alias": "",
           "format": "time_series",
-          "rawSql": "SELECT $__timeEpoch(time), CONCAT(measurement, ' - value one') as metric, valueOne FROM metric_values \nWHERE $__timeFilter(time) AND measurement IN($metric) ORDER BY 1",
+          "rawSql": "SELECT $__time(time), measurement as metric, valueOne, valueTwo FROM metric_values WHERE $__timeFilter(time) AND measurement IN($metric) ORDER BY 1",
           "refId": "A"
-        },
-        {
-          "alias": "",
-          "format": "time_series",
-          "rawSql": "SELECT $__timeEpoch(time), CONCAT(measurement, ' - value two') as metric, valueTwo FROM metric_values \nWHERE $__timeFilter(time) AND measurement IN($metric) ORDER BY 1",
-          "refId": "B"
         }
       ],
       "thresholds": [],
@@ -2230,7 +2146,7 @@
       "bars": true,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MYSQL_TEST}",
+      "datasource": "gdev-mysql-ds-tests",
       "fill": 1,
       "gridPos": {
         "h": 8,
@@ -2316,22 +2232,45 @@
   "refresh": false,
   "schemaVersion": 16,
   "style": "dark",
-  "tags": [],
+  "tags": [
+    "gdev",
+    "mysql"
+  ],
   "templating": {
     "list": [
       {
         "allValue": "",
-        "current": {},
-        "datasource": "${DS_MYSQL_TEST}",
+        "current": {
+          "text": "All",
+          "value": "$__all"
+        },
+        "datasource": "gdev-mysql-ds-tests",
         "hide": 0,
         "includeAll": true,
         "label": "Metric",
         "multi": true,
         "name": "metric",
-        "options": [],
+        "options": [
+          {
+            "selected": true,
+            "text": "All",
+            "value": "$__all"
+          },
+          {
+            "selected": false,
+            "text": "Metric A",
+            "value": "Metric A"
+          },
+          {
+            "selected": false,
+            "text": "Metric B",
+            "value": "Metric B"
+          }
+        ],
         "query": "SELECT DISTINCT measurement FROM metric_values",
-        "refresh": 1,
+        "refresh": 0,
         "regex": "",
+        "skipUrlSync": false,
         "sort": 0,
         "tagValuesQuery": "",
         "tags": [],
@@ -2384,6 +2323,7 @@
         ],
         "query": "1s,10s,30s,1m,5m,10m",
         "refresh": 2,
+        "skipUrlSync": false,
         "type": "interval"
       }
     ]
@@ -2418,7 +2358,7 @@
     ]
   },
   "timezone": "",
-  "title": "MySQL Data Source Test",
+  "title": "Datasource tests - MySQL (unittest)",
   "uid": "Hmf8FDkmz",
-  "version": 12
+  "version": 1
 }

+ 36 - 51
docker/blocks/postgres/dashboard.json → devenv/dev-dashboards/datasource_tests_postgres_fakedata.json

@@ -1,40 +1,4 @@
 {
-  "__inputs": [
-    {
-      "name": "DS_POSTGRESQL",
-      "label": "PostgreSQL",
-      "description": "",
-      "type": "datasource",
-      "pluginId": "postgres",
-      "pluginName": "PostgreSQL"
-    }
-  ],
-  "__requires": [
-    {
-      "type": "grafana",
-      "id": "grafana",
-      "name": "Grafana",
-      "version": "5.0.0"
-    },
-    {
-      "type": "panel",
-      "id": "graph",
-      "name": "Graph",
-      "version": ""
-    },
-    {
-      "type": "datasource",
-      "id": "postgres",
-      "name": "PostgreSQL",
-      "version": "1.0.0"
-    },
-    {
-      "type": "panel",
-      "id": "table",
-      "name": "Table",
-      "version": ""
-    }
-  ],
   "annotations": {
     "list": [
       {
@@ -52,8 +16,7 @@
   "editable": true,
   "gnetId": null,
   "graphTooltip": 0,
-  "id": null,
-  "iteration": 1518601837383,
+  "iteration": 1532620601931,
   "links": [],
   "panels": [
     {
@@ -63,7 +26,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_POSTGRESQL}",
+      "datasource": "gdev-postgres",
       "fill": 2,
       "gridPos": {
         "h": 9,
@@ -150,14 +113,18 @@
           "min": null,
           "show": true
         }
-      ]
+      ],
+      "yaxis": {
+        "align": false,
+        "alignLevel": null
+      }
     },
     {
       "aliasColors": {},
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_POSTGRESQL}",
+      "datasource": "gdev-postgres",
       "fill": 2,
       "gridPos": {
         "h": 18,
@@ -236,14 +203,18 @@
           "min": null,
           "show": true
         }
-      ]
+      ],
+      "yaxis": {
+        "align": false,
+        "alignLevel": null
+      }
     },
     {
       "aliasColors": {},
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_POSTGRESQL}",
+      "datasource": "gdev-postgres",
       "fill": 2,
       "gridPos": {
         "h": 9,
@@ -316,11 +287,15 @@
           "min": null,
           "show": true
         }
-      ]
+      ],
+      "yaxis": {
+        "align": false,
+        "alignLevel": null
+      }
     },
     {
       "columns": [],
-      "datasource": "${DS_POSTGRESQL}",
+      "datasource": "gdev-postgres",
       "fontSize": "100%",
       "gridPos": {
         "h": 9,
@@ -377,6 +352,7 @@
   "schemaVersion": 16,
   "style": "dark",
   "tags": [
+    "gdev",
     "fake-data-gen",
     "postgres"
   ],
@@ -384,8 +360,11 @@
     "list": [
       {
         "allValue": null,
-        "current": {},
-        "datasource": "${DS_POSTGRESQL}",
+        "current": {
+          "text": "America",
+          "value": "America"
+        },
+        "datasource": "gdev-postgres",
         "hide": 0,
         "includeAll": false,
         "label": "Datacenter",
@@ -395,6 +374,7 @@
         "query": "SELECT DISTINCT datacenter FROM grafana_metric",
         "refresh": 1,
         "regex": "",
+        "skipUrlSync": false,
         "sort": 1,
         "tagValuesQuery": "",
         "tags": [],
@@ -404,8 +384,11 @@
       },
       {
         "allValue": null,
-        "current": {},
-        "datasource": "${DS_POSTGRESQL}",
+        "current": {
+          "text": "All",
+          "value": "$__all"
+        },
+        "datasource": "gdev-postgres",
         "hide": 0,
         "includeAll": true,
         "label": "Hostname",
@@ -415,6 +398,7 @@
         "query": "SELECT DISTINCT hostname FROM grafana_metric WHERE datacenter='$datacenter'",
         "refresh": 1,
         "regex": "",
+        "skipUrlSync": false,
         "sort": 1,
         "tagValuesQuery": "",
         "tags": [],
@@ -507,6 +491,7 @@
         ],
         "query": "1s,10s,30s,1m,5m,10m,30m,1h,6h,12h,1d,7d,14d,30d",
         "refresh": 2,
+        "skipUrlSync": false,
         "type": "interval"
       }
     ]
@@ -541,7 +526,7 @@
     ]
   },
   "timezone": "",
-  "title": "Grafana Fake Data Gen - PostgreSQL",
+  "title": "Datasource tests - Postgres",
   "uid": "JYola5qzz",
-  "version": 1
+  "version": 4
 }

+ 76 - 132
docker/blocks/postgres_tests/dashboard.json → devenv/dev-dashboards/datasource_tests_postgres_unittest.json

@@ -1,40 +1,4 @@
 {
-  "__inputs": [
-    {
-      "name": "DS_POSTGRES_TEST",
-      "label": "Postgres TEST",
-      "description": "",
-      "type": "datasource",
-      "pluginId": "postgres",
-      "pluginName": "PostgreSQL"
-    }
-  ],
-  "__requires": [
-    {
-      "type": "grafana",
-      "id": "grafana",
-      "name": "Grafana",
-      "version": "5.0.0"
-    },
-    {
-      "type": "panel",
-      "id": "graph",
-      "name": "Graph",
-      "version": "5.0.0"
-    },
-    {
-      "type": "datasource",
-      "id": "postgres",
-      "name": "PostgreSQL",
-      "version": "5.0.0"
-    },
-    {
-      "type": "panel",
-      "id": "table",
-      "name": "Table",
-      "version": "5.0.0"
-    }
-  ],
   "annotations": {
     "list": [
       {
@@ -47,7 +11,7 @@
         "type": "dashboard"
       },
       {
-        "datasource": "${DS_POSTGRES_TEST}",
+        "datasource": "gdev-postgres-ds-tests",
         "enable": false,
         "hide": false,
         "iconColor": "#6ed0e0",
@@ -59,7 +23,7 @@
         "type": "tags"
       },
       {
-        "datasource": "${DS_POSTGRES_TEST}",
+        "datasource": "gdev-postgres-ds-tests",
         "enable": false,
         "hide": false,
         "iconColor": "rgba(255, 96, 96, 1)",
@@ -71,7 +35,7 @@
         "type": "tags"
       },
       {
-        "datasource": "${DS_POSTGRES_TEST}",
+        "datasource": "gdev-postgres-ds-tests",
         "enable": false,
         "hide": false,
         "iconColor": "#7eb26d",
@@ -83,7 +47,7 @@
         "type": "tags"
       },
       {
-        "datasource": "${DS_POSTGRES_TEST}",
+        "datasource": "gdev-postgres-ds-tests",
         "enable": false,
         "hide": false,
         "iconColor": "#1f78c1",
@@ -96,16 +60,16 @@
       }
     ]
   },
+  "description": "Run the postgres unit tests to generate the data backing this dashboard",
   "editable": true,
   "gnetId": null,
   "graphTooltip": 0,
-  "id": null,
-  "iteration": 1523320929325,
+  "iteration": 1532951521836,
   "links": [],
   "panels": [
     {
       "columns": [],
-      "datasource": "${DS_POSTGRES_TEST}",
+      "datasource": "gdev-postgres-ds-tests",
       "fontSize": "100%",
       "gridPos": {
         "h": 4,
@@ -152,7 +116,7 @@
     },
     {
       "columns": [],
-      "datasource": "${DS_POSTGRES_TEST}",
+      "datasource": "gdev-postgres-ds-tests",
       "fontSize": "100%",
       "gridPos": {
         "h": 3,
@@ -206,7 +170,7 @@
     },
     {
       "columns": [],
-      "datasource": "${DS_POSTGRES_TEST}",
+      "datasource": "gdev-postgres-ds-tests",
       "fontSize": "100%",
       "gridPos": {
         "h": 3,
@@ -260,7 +224,7 @@
     },
     {
       "columns": [],
-      "datasource": "${DS_POSTGRES_TEST}",
+      "datasource": "gdev-postgres-ds-tests",
       "fontSize": "100%",
       "gridPos": {
         "h": 3,
@@ -314,7 +278,7 @@
     },
     {
       "columns": [],
-      "datasource": "${DS_POSTGRES_TEST}",
+      "datasource": "gdev-postgres-ds-tests",
       "fontSize": "100%",
       "gridPos": {
         "h": 3,
@@ -371,7 +335,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_POSTGRES_TEST}",
+      "datasource": "gdev-postgres-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 9,
@@ -454,7 +418,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_POSTGRES_TEST}",
+      "datasource": "gdev-postgres-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 9,
@@ -537,7 +501,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_POSTGRES_TEST}",
+      "datasource": "gdev-postgres-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 9,
@@ -620,7 +584,7 @@
       "bars": true,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_POSTGRES_TEST}",
+      "datasource": "gdev-postgres-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 9,
@@ -703,7 +667,7 @@
       "bars": true,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_POSTGRES_TEST}",
+      "datasource": "gdev-postgres-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 9,
@@ -786,7 +750,7 @@
       "bars": true,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_POSTGRES_TEST}",
+      "datasource": "gdev-postgres-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 9,
@@ -869,7 +833,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_POSTGRES_TEST}",
+      "datasource": "gdev-postgres-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 8,
@@ -907,14 +871,8 @@
         {
           "alias": "",
           "format": "time_series",
-          "rawSql": "SELECT \n  $__timeGroup(time, '$summarize'), \n  measurement || ' - value one' as metric, \n  avg(\"valueOne\") as \"valueOne\"\nFROM\n  metric_values \nWHERE\n  $__timeFilter(time) AND\n  measurement in($metric)\nGROUP BY 1, 2\nORDER BY 1",
+          "rawSql": "SELECT \n  $__timeGroup(time, '$summarize'), \n  measurement, \n  avg(\"valueOne\") as \"valueOne\",\n  avg(\"valueTwo\") as \"valueTwo\"\nFROM\n  metric_values \nWHERE\n  $__timeFilter(time) AND\n  measurement in($metric)\nGROUP BY 1, 2\nORDER BY 1",
           "refId": "A"
-        },
-        {
-          "alias": "",
-          "format": "time_series",
-          "rawSql": "SELECT \n  $__timeGroup(time, '$summarize'), \n  measurement || ' - value two' as metric, \n  avg(\"valueTwo\") as \"valueTwo\"\nFROM\n  metric_values \nWHERE\n  $__timeFilter(time) AND\n  measurement in($metric)\nGROUP BY 1, 2\nORDER BY 1",
-          "refId": "B"
         }
       ],
       "thresholds": [],
@@ -962,7 +920,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_POSTGRES_TEST}",
+      "datasource": "gdev-postgres-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 8,
@@ -1047,7 +1005,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_POSTGRES_TEST}",
+      "datasource": "gdev-postgres-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 8,
@@ -1085,14 +1043,8 @@
         {
           "alias": "",
           "format": "time_series",
-          "rawSql": "SELECT $__timeEpoch(time), measurement || ' - value one' as metric, \"valueOne\" FROM metric_values \nWHERE $__timeFilter(time) AND measurement in($metric) ORDER BY 1",
+          "rawSql": "SELECT $__timeEpoch(time), measurement, \"valueOne\", \"valueTwo\" FROM metric_values \nWHERE $__timeFilter(time) AND measurement in($metric) ORDER BY 1",
           "refId": "A"
-        },
-        {
-          "alias": "",
-          "format": "time_series",
-          "rawSql": "SELECT $__timeEpoch(time), measurement || ' - value two' as metric, \"valueTwo\" FROM metric_values \nWHERE $__timeFilter(time) AND measurement in($metric) ORDER BY 1",
-          "refId": "B"
         }
       ],
       "thresholds": [],
@@ -1140,7 +1092,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_POSTGRES_TEST}",
+      "datasource": "gdev-postgres-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 8,
@@ -1225,7 +1177,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_POSTGRES_TEST}",
+      "datasource": "gdev-postgres-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 8,
@@ -1263,14 +1215,8 @@
         {
           "alias": "",
           "format": "time_series",
-          "rawSql": "SELECT $__timeEpoch(time), measurement || ' - value one' as metric, \"valueOne\" FROM metric_values \nWHERE $__timeFilter(time) AND measurement in($metric) ORDER BY 1",
+          "rawSql": "SELECT $__timeEpoch(time), measurement, \"valueOne\", \"valueTwo\" FROM metric_values \nWHERE $__timeFilter(time) AND measurement in($metric) ORDER BY 1",
           "refId": "A"
-        },
-        {
-          "alias": "",
-          "format": "time_series",
-          "rawSql": "SELECT $__timeEpoch(time), measurement || ' - value two' as metric, \"valueTwo\" FROM metric_values \nWHERE $__timeFilter(time) AND measurement in($metric) ORDER BY 1",
-          "refId": "B"
         }
       ],
       "thresholds": [],
@@ -1318,7 +1264,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_POSTGRES_TEST}",
+      "datasource": "gdev-postgres-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 8,
@@ -1403,7 +1349,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_POSTGRES_TEST}",
+      "datasource": "gdev-postgres-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 8,
@@ -1441,14 +1387,8 @@
         {
           "alias": "",
           "format": "time_series",
-          "rawSql": "SELECT $__timeEpoch(time), measurement || ' - value one' as metric, \"valueOne\" FROM metric_values \nWHERE $__timeFilter(time) AND measurement in($metric) ORDER BY 1",
+          "rawSql": "SELECT $__timeEpoch(time), measurement, \"valueOne\", \"valueTwo\" FROM metric_values \nWHERE $__timeFilter(time) AND measurement in($metric) ORDER BY 1",
           "refId": "A"
-        },
-        {
-          "alias": "",
-          "format": "time_series",
-          "rawSql": "SELECT $__timeEpoch(time), measurement || ' - value two' as metric, \"valueTwo\" FROM metric_values \nWHERE $__timeFilter(time) AND measurement in($metric) ORDER BY 1",
-          "refId": "B"
         }
       ],
       "thresholds": [],
@@ -1496,7 +1436,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_POSTGRES_TEST}",
+      "datasource": "gdev-postgres-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 8,
@@ -1581,7 +1521,7 @@
       "bars": true,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_POSTGRES_TEST}",
+      "datasource": "gdev-postgres-ds-tests",
       "fill": 1,
       "gridPos": {
         "h": 8,
@@ -1617,14 +1557,8 @@
         {
           "alias": "",
           "format": "time_series",
-          "rawSql": "SELECT $__timeEpoch(time), measurement || ' - value one' as metric, \"valueOne\" FROM metric_values \nWHERE $__timeFilter(time) AND measurement in($metric) ORDER BY 1",
+          "rawSql": "SELECT $__timeEpoch(time), measurement, \"valueOne\", \"valueTwo\" FROM metric_values \nWHERE $__timeFilter(time) AND measurement in($metric) ORDER BY 1",
           "refId": "A"
-        },
-        {
-          "alias": "",
-          "format": "time_series",
-          "rawSql": "SELECT $__timeEpoch(time), measurement || ' - value two' as metric, \"valueTwo\" FROM metric_values \nWHERE $__timeFilter(time) AND measurement in($metric) ORDER BY 1",
-          "refId": "B"
         }
       ],
       "thresholds": [],
@@ -1675,7 +1609,7 @@
       "bars": true,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_POSTGRES_TEST}",
+      "datasource": "gdev-postgres-ds-tests",
       "fill": 1,
       "gridPos": {
         "h": 8,
@@ -1762,7 +1696,7 @@
       "bars": true,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_POSTGRES_TEST}",
+      "datasource": "gdev-postgres-ds-tests",
       "fill": 1,
       "gridPos": {
         "h": 8,
@@ -1798,14 +1732,8 @@
         {
           "alias": "",
           "format": "time_series",
-          "rawSql": "SELECT $__timeEpoch(time), measurement || ' - value one' as metric, \"valueOne\" FROM metric_values \nWHERE $__timeFilter(time) AND measurement in($metric) ORDER BY 1",
+          "rawSql": "SELECT $__timeEpoch(time), measurement, \"valueOne\", \"valueTwo\" FROM metric_values \nWHERE $__timeFilter(time) AND measurement in($metric) ORDER BY 1",
           "refId": "A"
-        },
-        {
-          "alias": "",
-          "format": "time_series",
-          "rawSql": "SELECT $__timeEpoch(time), measurement || ' - value two' as metric, \"valueTwo\" FROM metric_values \nWHERE $__timeFilter(time) AND measurement in($metric) ORDER BY 1",
-          "refId": "B"
         }
       ],
       "thresholds": [],
@@ -1856,7 +1784,7 @@
       "bars": true,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_POSTGRES_TEST}",
+      "datasource": "gdev-postgres-ds-tests",
       "fill": 1,
       "gridPos": {
         "h": 8,
@@ -1943,7 +1871,7 @@
       "bars": true,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_POSTGRES_TEST}",
+      "datasource": "gdev-postgres-ds-tests",
       "fill": 1,
       "gridPos": {
         "h": 8,
@@ -1979,14 +1907,8 @@
         {
           "alias": "",
           "format": "time_series",
-          "rawSql": "SELECT $__timeEpoch(time), measurement || ' - value one' as metric, \"valueOne\" FROM metric_values \nWHERE $__timeFilter(time) AND measurement in($metric) ORDER BY 1",
+          "rawSql": "SELECT $__timeEpoch(time), measurement, \"valueOne\", \"valueTwo\" FROM metric_values \nWHERE $__timeFilter(time) AND measurement in($metric) ORDER BY 1",
           "refId": "A"
-        },
-        {
-          "alias": "",
-          "format": "time_series",
-          "rawSql": "SELECT $__timeEpoch(time), measurement || ' - value two' as metric, \"valueTwo\" FROM metric_values \nWHERE $__timeFilter(time) AND measurement in($metric) ORDER BY 1",
-          "refId": "B"
         }
       ],
       "thresholds": [],
@@ -2037,7 +1959,7 @@
       "bars": true,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_POSTGRES_TEST}",
+      "datasource": "gdev-postgres-ds-tests",
       "fill": 1,
       "gridPos": {
         "h": 8,
@@ -2124,7 +2046,7 @@
       "bars": true,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_POSTGRES_TEST}",
+      "datasource": "gdev-postgres-ds-tests",
       "fill": 1,
       "gridPos": {
         "h": 8,
@@ -2160,14 +2082,8 @@
         {
           "alias": "",
           "format": "time_series",
-          "rawSql": "SELECT $__timeEpoch(time), measurement || ' - value one' as metric, \"valueOne\" FROM metric_values \nWHERE $__timeFilter(time) AND measurement in($metric) ORDER BY 1",
+          "rawSql": "SELECT $__timeEpoch(time), measurement, \"valueOne\", \"valueTwo\" FROM metric_values \nWHERE $__timeFilter(time) AND measurement in($metric) ORDER BY 1",
           "refId": "A"
-        },
-        {
-          "alias": "",
-          "format": "time_series",
-          "rawSql": "SELECT $__timeEpoch(time), measurement || ' - value two' as metric, \"valueTwo\" FROM metric_values \nWHERE $__timeFilter(time) AND measurement in($metric) ORDER BY 1",
-          "refId": "B"
         }
       ],
       "thresholds": [],
@@ -2218,7 +2134,7 @@
       "bars": true,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_POSTGRES_TEST}",
+      "datasource": "gdev-postgres-ds-tests",
       "fill": 1,
       "gridPos": {
         "h": 8,
@@ -2304,22 +2220,49 @@
   "refresh": false,
   "schemaVersion": 16,
   "style": "dark",
-  "tags": [],
+  "tags": [
+    "gdev",
+    "postgres"
+  ],
   "templating": {
     "list": [
       {
         "allValue": null,
-        "current": {},
-        "datasource": "${DS_POSTGRES_TEST}",
+        "current": {
+          "selected": true,
+          "tags": [],
+          "text": "All",
+          "value": [
+            "$__all"
+          ]
+        },
+        "datasource": "gdev-postgres-ds-tests",
         "hide": 0,
         "includeAll": true,
         "label": "Metric",
         "multi": true,
         "name": "metric",
-        "options": [],
+        "options": [
+          {
+            "selected": true,
+            "text": "All",
+            "value": "$__all"
+          },
+          {
+            "selected": false,
+            "text": "Metric A",
+            "value": "Metric A"
+          },
+          {
+            "selected": false,
+            "text": "Metric B",
+            "value": "Metric B"
+          }
+        ],
         "query": "SELECT DISTINCT measurement FROM metric_values",
-        "refresh": 1,
+        "refresh": 0,
         "regex": "",
+        "skipUrlSync": false,
         "sort": 1,
         "tagValuesQuery": "",
         "tags": [],
@@ -2372,6 +2315,7 @@
         ],
         "query": "1s,10s,30s,1m,5m,10m",
         "refresh": 2,
+        "skipUrlSync": false,
         "type": "interval"
       }
     ]
@@ -2406,7 +2350,7 @@
     ]
   },
   "timezone": "",
-  "title": "Postgres Data Source Test",
+  "title": "Datasource tests - Postgres (unittest)",
   "uid": "vHQdlVziz",
-  "version": 14
+  "version": 1
 }

+ 2 - 1
docs/sources/features/datasources/mssql.md

@@ -148,7 +148,8 @@ The resulting table panel:
 
 ## Time series queries
 
-If you set `Format as` to `Time series`, for use in Graph panel for example, then the query must must have a column named `time` that returns either a sql datetime or any numeric datatype representing unix epoch in seconds. You may return a column named `metric` that is used as metric name for the value column. Any column except `time` and `metric` is treated as a value column. If you omit the `metric` column, tha name of the value column will be the metric name. You may select multiple value columns, each will have its name as metric.
+If you set `Format as` to `Time series`, for use in Graph panel for example, then the query must must have a column named `time` that returns either a sql datetime or any numeric datatype representing unix epoch in seconds. You may return a column named `metric` that is used as metric name for the value column. Any column except `time` and `metric` is treated as a value column. If you omit the `metric` column, the name of the value column will be the metric name. You may select multiple value columns, each will have its name as metric.
+If you return multiple value columns and a column named `metric` then this column is used as prefix for the series name (only available in Grafana 5.3+).
 
 **Example database table:**
 

+ 1 - 0
docs/sources/features/datasources/mysql.md

@@ -104,6 +104,7 @@ The resulting table panel:
 If you set `Format as` to `Time series`, for use in Graph panel for example, then the query must return a column named `time` that returns either a sql datetime or any numeric datatype representing unix epoch.
 Any column except `time` and `metric` is treated as a value column.
 You may return a column named `metric` that is used as metric name for the value column.
+If you return multiple value columns and a column named `metric` then this column is used as prefix for the series name (only available in Grafana 5.3+).
 
 **Example with `metric` column:**
 

+ 1 - 0
docs/sources/features/datasources/postgres.md

@@ -102,6 +102,7 @@ The resulting table panel:
 If you set `Format as` to `Time series`, for use in Graph panel for example, then the query must return a column named `time` that returns either a sql datetime or any numeric datatype representing unix epoch.
 Any column except `time` and `metric` is treated as a value column.
 You may return a column named `metric` that is used as metric name for the value column.
+If you return multiple value columns and a column named `metric` then this column is used as prefix for the series name (only available in Grafana 5.3+).
 
 **Example with `metric` column:**
 

+ 26 - 0
docs/sources/features/datasources/prometheus.md

@@ -75,6 +75,32 @@ Name | Description
 
 For details of *metric names*, *label names* and *label values* are please refer to the [Prometheus documentation](http://prometheus.io/docs/concepts/data_model/#metric-names-and-labels).
 
+
+#### Using interval and range variables
+
+> Support for `$__range` and `$__range_ms` only available from Grafana v5.3
+
+It's possible to use some global built-in variables in query variables; `$__interval`, `$__interval_ms`, `$__range` and `$__range_ms`, see [Global built-in variables](/reference/templating/#global-built-in-variables) for more information. These can be convenient to use in conjunction with the `query_result` function when you need to filter variable queries since
+`label_values` function doesn't support queries.
+
+Make sure to set the variable's `refresh` trigger to be `On Time Range Change` to get the correct instances when changing the time range on the dashboard.
+
+**Example usage:**
+
+Populate a variable with the the busiest 5 request instances based on average QPS over the time range shown in the dashboard:
+
+```
+Query: query_result(topk(5, sum(rate(http_requests_total[$__range])) by (instance)))
+Regex: /"([^"]+)"/
+```
+
+Populate a variable with the instances having a certain state over the time range shown in the dashboard:
+
+```
+Query: query_result(max_over_time(<metric>[$__range]) != <state>)
+Regex:
+```
+
 ### Using variables in queries
 
 There are two syntaxes:

+ 2 - 0
docs/sources/installation/configuration.md

@@ -15,6 +15,8 @@ weight = 1
 The Grafana back-end has a number of configuration options that can be
 specified in a `.ini` configuration file or specified using environment variables.
 
+> **Note.** Grafana needs to be restarted for any configuration changes to take effect.
+
 ## Comments In .ini Files
 
 Semicolons (the `;` char) are the standard way to comment out lines in a `.ini` file.

+ 6 - 0
docs/sources/reference/templating.md

@@ -273,6 +273,12 @@ The `$__timeFilter` is used in the MySQL data source.
 
 This variable is only available in the Singlestat panel and can be used in the prefix or suffix fields on the Options tab. The variable will be replaced with the series name or alias.
 
+### The $__range Variable
+
+> Only available in Grafana v5.3+
+
+Currently only supported for Prometheus data sources. This variable represents the range for the current dashboard. It is calculated by `to - from`. It has a millisecond representation called `$__range_ms`.
+
 ## Repeating Panels
 
 Template variables can be very useful to dynamically change your queries across a whole dashboard. If you want

+ 8 - 8
package.json

@@ -34,7 +34,7 @@
     "expose-loader": "^0.7.3",
     "extract-text-webpack-plugin": "^4.0.0-beta.0",
     "file-loader": "^1.1.11",
-    "fork-ts-checker-webpack-plugin": "^0.4.1",
+    "fork-ts-checker-webpack-plugin": "^0.4.2",
     "gaze": "^1.1.2",
     "glob": "~7.0.0",
     "grunt": "1.0.1",
@@ -71,12 +71,14 @@
     "karma-webpack": "^3.0.0",
     "lint-staged": "^6.0.0",
     "load-grunt-tasks": "3.5.2",
+    "mini-css-extract-plugin": "^0.4.0",
     "mobx-react-devtools": "^4.2.15",
     "mocha": "^4.0.1",
     "ng-annotate-loader": "^0.6.1",
     "ng-annotate-webpack-plugin": "^0.2.1-pre",
     "ngtemplate-loader": "^2.0.1",
     "npm": "^5.4.2",
+    "optimize-css-assets-webpack-plugin": "^4.0.2",
     "phantomjs-prebuilt": "^2.1.15",
     "postcss-browser-reporter": "^0.5.0",
     "postcss-loader": "^2.0.6",
@@ -90,15 +92,16 @@
     "style-loader": "^0.21.0",
     "systemjs": "0.20.19",
     "systemjs-plugin-css": "^0.1.36",
-    "ts-loader": "^4.3.0",
     "ts-jest": "^22.4.6",
+    "ts-loader": "^4.3.0",
+    "tslib": "^1.9.3",
     "tslint": "^5.8.0",
     "tslint-loader": "^3.5.3",
     "typescript": "^2.6.2",
+    "uglifyjs-webpack-plugin": "^1.2.7",
     "webpack": "^4.8.0",
     "webpack-bundle-analyzer": "^2.9.0",
     "webpack-cleanup-plugin": "^0.5.1",
-    "fork-ts-checker-webpack-plugin": "^0.4.2",
     "webpack-cli": "^2.1.4",
     "webpack-dev-server": "^3.1.0",
     "webpack-merge": "^4.1.0",
@@ -155,14 +158,12 @@
     "immutable": "^3.8.2",
     "jquery": "^3.2.1",
     "lodash": "^4.17.10",
-    "mini-css-extract-plugin": "^0.4.0",
     "mobx": "^3.4.1",
     "mobx-react": "^4.3.5",
     "mobx-state-tree": "^1.3.1",
     "moment": "^2.22.2",
     "mousetrap": "^1.6.0",
     "mousetrap-global-bind": "^1.1.0",
-    "optimize-css-assets-webpack-plugin": "^4.0.2",
     "prismjs": "^1.6.0",
     "prop-types": "^15.6.0",
     "react": "^16.2.0",
@@ -181,10 +182,9 @@
     "slate-react": "^0.12.4",
     "tether": "^1.4.0",
     "tether-drop": "https://github.com/torkelo/drop/tarball/master",
-    "tinycolor2": "^1.4.1",
-    "uglifyjs-webpack-plugin": "^1.2.7"
+    "tinycolor2": "^1.4.1"
   },
   "resolutions": {
     "caniuse-db": "1.0.30000772"
   }
-}
+}

+ 2 - 2
pkg/api/metrics.go

@@ -52,7 +52,7 @@ func QueryMetrics(c *m.ReqContext, reqDto dtos.MetricRequest) Response {
 		if res.Error != nil {
 			res.ErrorString = res.Error.Error()
 			resp.Message = res.ErrorString
-			statusCode = 500
+			statusCode = 400
 		}
 	}
 
@@ -99,7 +99,7 @@ func GetTestDataRandomWalk(c *m.ReqContext) Response {
 	timeRange := tsdb.NewTimeRange(from, to)
 	request := &tsdb.TsdbQuery{TimeRange: timeRange}
 
-	dsInfo := &m.DataSource{Type: "grafana-testdata-datasource"}
+	dsInfo := &m.DataSource{Type: "testdata"}
 	request.Queries = append(request.Queries, &tsdb.Query{
 		RefId:      "A",
 		IntervalMs: intervalMs,

+ 1 - 1
pkg/services/sqlstore/dashboard_test.go

@@ -181,7 +181,7 @@ func TestDashboardDataAccess(t *testing.T) {
 				So(err, ShouldBeNil)
 				So(query.Result.FolderId, ShouldEqual, 0)
 				So(query.Result.CreatedBy, ShouldEqual, savedDash.CreatedBy)
-				So(query.Result.Created, ShouldEqual, savedDash.Created.Truncate(time.Second))
+				So(query.Result.Created, ShouldHappenWithin, 3*time.Second, savedDash.Created)
 				So(query.Result.UpdatedBy, ShouldEqual, 100)
 				So(query.Result.Updated.IsZero(), ShouldBeFalse)
 			})

+ 19 - 5
pkg/tsdb/cloudwatch/cloudwatch.go

@@ -17,6 +17,7 @@ import (
 	"golang.org/x/sync/errgroup"
 
 	"github.com/aws/aws-sdk-go/aws"
+	"github.com/aws/aws-sdk-go/aws/awserr"
 	"github.com/aws/aws-sdk-go/aws/request"
 	"github.com/aws/aws-sdk-go/service/cloudwatch"
 	"github.com/aws/aws-sdk-go/service/ec2/ec2iface"
@@ -98,11 +99,15 @@ func (e *CloudWatchExecutor) executeTimeSeriesQuery(ctx context.Context, queryCo
 			continue
 		}
 
+		RefId := queryContext.Queries[i].RefId
 		query, err := parseQuery(queryContext.Queries[i].Model)
 		if err != nil {
-			return nil, err
+			result.Results[RefId] = &tsdb.QueryResult{
+				Error: err,
+			}
+			return result, nil
 		}
-		query.RefId = queryContext.Queries[i].RefId
+		query.RefId = RefId
 
 		if query.Id != "" {
 			if _, ok := getMetricDataQueries[query.Region]; !ok {
@@ -113,15 +118,21 @@ func (e *CloudWatchExecutor) executeTimeSeriesQuery(ctx context.Context, queryCo
 		}
 
 		if query.Id == "" && query.Expression != "" {
-			return nil, fmt.Errorf("Invalid query: id should be set if using expression")
+			result.Results[query.RefId] = &tsdb.QueryResult{
+				Error: fmt.Errorf("Invalid query: id should be set if using expression"),
+			}
+			return result, nil
 		}
 
 		eg.Go(func() error {
 			queryRes, err := e.executeQuery(ectx, query, queryContext)
-			if err != nil {
+			if ae, ok := err.(awserr.Error); ok && ae.Code() == "500" {
 				return err
 			}
 			result.Results[queryRes.RefId] = queryRes
+			if err != nil {
+				result.Results[queryRes.RefId].Error = err
+			}
 			return nil
 		})
 	}
@@ -131,11 +142,14 @@ func (e *CloudWatchExecutor) executeTimeSeriesQuery(ctx context.Context, queryCo
 			q := getMetricDataQuery
 			eg.Go(func() error {
 				queryResponses, err := e.executeGetMetricDataQuery(ectx, region, q, queryContext)
-				if err != nil {
+				if ae, ok := err.(awserr.Error); ok && ae.Code() == "500" {
 					return err
 				}
 				for _, queryRes := range queryResponses {
 					result.Results[queryRes.RefId] = queryRes
+					if err != nil {
+						result.Results[queryRes.RefId].Error = err
+					}
 				}
 				return nil
 			})

+ 5 - 1
pkg/tsdb/cloudwatch/metric_find_query.go

@@ -46,6 +46,7 @@ func init() {
 		"AWS/CloudFront":     {"Requests", "BytesDownloaded", "BytesUploaded", "TotalErrorRate", "4xxErrorRate", "5xxErrorRate"},
 		"AWS/CloudSearch":    {"SuccessfulRequests", "SearchableDocuments", "IndexUtilization", "Partitions"},
 		"AWS/DMS":            {"FreeableMemory", "WriteIOPS", "ReadIOPS", "WriteThroughput", "ReadThroughput", "WriteLatency", "ReadLatency", "SwapUsage", "NetworkTransmitThroughput", "NetworkReceiveThroughput", "FullLoadThroughputBandwidthSource", "FullLoadThroughputBandwidthTarget", "FullLoadThroughputRowsSource", "FullLoadThroughputRowsTarget", "CDCIncomingChanges", "CDCChangesMemorySource", "CDCChangesMemoryTarget", "CDCChangesDiskSource", "CDCChangesDiskTarget", "CDCThroughputBandwidthTarget", "CDCThroughputRowsSource", "CDCThroughputRowsTarget", "CDCLatencySource", "CDCLatencyTarget"},
+		"AWS/DX":             {"ConnectionState", "ConnectionBpsEgress", "ConnectionBpsIngress", "ConnectionPpsEgress", "ConnectionPpsIngress", "ConnectionCRCErrorCount", "ConnectionLightLevelTx", "ConnectionLightLevelRx"},
 		"AWS/DynamoDB":       {"ConditionalCheckFailedRequests", "ConsumedReadCapacityUnits", "ConsumedWriteCapacityUnits", "OnlineIndexConsumedWriteCapacity", "OnlineIndexPercentageProgress", "OnlineIndexThrottleEvents", "ProvisionedReadCapacityUnits", "ProvisionedWriteCapacityUnits", "ReadThrottleEvents", "ReturnedBytes", "ReturnedItemCount", "ReturnedRecordsCount", "SuccessfulRequestLatency", "SystemErrors", "TimeToLiveDeletedItemCount", "ThrottledRequests", "UserErrors", "WriteThrottleEvents"},
 		"AWS/EBS":            {"VolumeReadBytes", "VolumeWriteBytes", "VolumeReadOps", "VolumeWriteOps", "VolumeTotalReadTime", "VolumeTotalWriteTime", "VolumeIdleTime", "VolumeQueueLength", "VolumeThroughputPercentage", "VolumeConsumedReadWriteOps", "BurstBalance"},
 		"AWS/EC2":            {"CPUCreditUsage", "CPUCreditBalance", "CPUUtilization", "DiskReadOps", "DiskWriteOps", "DiskReadBytes", "DiskWriteBytes", "NetworkIn", "NetworkOut", "NetworkPacketsIn", "NetworkPacketsOut", "StatusCheckFailed", "StatusCheckFailed_Instance", "StatusCheckFailed_System"},
@@ -86,13 +87,14 @@ func init() {
 		"AWS/Kinesis":          {"GetRecords.Bytes", "GetRecords.IteratorAge", "GetRecords.IteratorAgeMilliseconds", "GetRecords.Latency", "GetRecords.Records", "GetRecords.Success", "IncomingBytes", "IncomingRecords", "PutRecord.Bytes", "PutRecord.Latency", "PutRecord.Success", "PutRecords.Bytes", "PutRecords.Latency", "PutRecords.Records", "PutRecords.Success", "ReadProvisionedThroughputExceeded", "WriteProvisionedThroughputExceeded", "IteratorAgeMilliseconds", "OutgoingBytes", "OutgoingRecords"},
 		"AWS/KinesisAnalytics": {"Bytes", "MillisBehindLatest", "Records", "Success"},
 		"AWS/Lambda":           {"Invocations", "Errors", "Duration", "Throttles", "IteratorAge"},
+		"AWS/AppSync":          {"Latency", "4XXError", "5XXError"},
 		"AWS/Logs":             {"IncomingBytes", "IncomingLogEvents", "ForwardedBytes", "ForwardedLogEvents", "DeliveryErrors", "DeliveryThrottling"},
 		"AWS/ML":               {"PredictCount", "PredictFailureCount"},
 		"AWS/NATGateway":       {"PacketsOutToDestination", "PacketsOutToSource", "PacketsInFromSource", "PacketsInFromDestination", "BytesOutToDestination", "BytesOutToSource", "BytesInFromSource", "BytesInFromDestination", "ErrorPortAllocation", "ActiveConnectionCount", "ConnectionAttemptCount", "ConnectionEstablishedCount", "IdleTimeoutCount", "PacketsDropCount"},
 		"AWS/NetworkELB":       {"ActiveFlowCount", "ConsumedLCUs", "HealthyHostCount", "NewFlowCount", "ProcessedBytes", "TCP_Client_Reset_Count", "TCP_ELB_Reset_Count", "TCP_Target_Reset_Count", "UnHealthyHostCount"},
 		"AWS/OpsWorks":         {"cpu_idle", "cpu_nice", "cpu_system", "cpu_user", "cpu_waitio", "load_1", "load_5", "load_15", "memory_buffers", "memory_cached", "memory_free", "memory_swap", "memory_total", "memory_used", "procs"},
 		"AWS/Redshift":         {"CPUUtilization", "DatabaseConnections", "HealthStatus", "MaintenanceMode", "NetworkReceiveThroughput", "NetworkTransmitThroughput", "PercentageDiskSpaceUsed", "ReadIOPS", "ReadLatency", "ReadThroughput", "WriteIOPS", "WriteLatency", "WriteThroughput"},
-		"AWS/RDS":              {"ActiveTransactions", "AuroraBinlogReplicaLag", "AuroraReplicaLag", "AuroraReplicaLagMaximum", "AuroraReplicaLagMinimum", "BinLogDiskUsage", "BlockedTransactions", "BufferCacheHitRatio", "CommitLatency", "CommitThroughput", "BinLogDiskUsage", "CPUCreditBalance", "CPUCreditUsage", "CPUUtilization", "DatabaseConnections", "DDLLatency", "DDLThroughput", "Deadlocks", "DeleteLatency", "DeleteThroughput", "DiskQueueDepth", "DMLLatency", "DMLThroughput", "EngineUptime", "FailedSqlStatements", "FreeableMemory", "FreeLocalStorage", "FreeStorageSpace", "InsertLatency", "InsertThroughput", "LoginFailures", "NetworkReceiveThroughput", "NetworkTransmitThroughput", "NetworkThroughput", "Queries", "ReadIOPS", "ReadLatency", "ReadThroughput", "ReplicaLag", "ResultSetCacheHitRatio", "SelectLatency", "SelectThroughput", "SwapUsage", "TotalConnections", "UpdateLatency", "UpdateThroughput", "VolumeBytesUsed", "VolumeReadIOPS", "VolumeWriteIOPS", "WriteIOPS", "WriteLatency", "WriteThroughput"},
+		"AWS/RDS":              {"ActiveTransactions", "AuroraBinlogReplicaLag", "AuroraReplicaLag", "AuroraReplicaLagMaximum", "AuroraReplicaLagMinimum", "BinLogDiskUsage", "BlockedTransactions", "BufferCacheHitRatio", "BurstBalance", "CommitLatency", "CommitThroughput", "BinLogDiskUsage", "CPUCreditBalance", "CPUCreditUsage", "CPUUtilization", "DatabaseConnections", "DDLLatency", "DDLThroughput", "Deadlocks", "DeleteLatency", "DeleteThroughput", "DiskQueueDepth", "DMLLatency", "DMLThroughput", "EngineUptime", "FailedSqlStatements", "FreeableMemory", "FreeLocalStorage", "FreeStorageSpace", "InsertLatency", "InsertThroughput", "LoginFailures", "NetworkReceiveThroughput", "NetworkTransmitThroughput", "NetworkThroughput", "Queries", "ReadIOPS", "ReadLatency", "ReadThroughput", "ReplicaLag", "ResultSetCacheHitRatio", "SelectLatency", "SelectThroughput", "SwapUsage", "TotalConnections", "UpdateLatency", "UpdateThroughput", "VolumeBytesUsed", "VolumeReadIOPS", "VolumeWriteIOPS", "WriteIOPS", "WriteLatency", "WriteThroughput"},
 		"AWS/Route53":          {"ChildHealthCheckHealthyCount", "HealthCheckStatus", "HealthCheckPercentageHealthy", "ConnectionTime", "SSLHandshakeTime", "TimeToFirstByte"},
 		"AWS/S3":               {"BucketSizeBytes", "NumberOfObjects", "AllRequests", "GetRequests", "PutRequests", "DeleteRequests", "HeadRequests", "PostRequests", "ListRequests", "BytesDownloaded", "BytesUploaded", "4xxErrors", "5xxErrors", "FirstByteLatency", "TotalRequestLatency"},
 		"AWS/SES":              {"Bounce", "Complaint", "Delivery", "Reject", "Send"},
@@ -118,6 +120,7 @@ func init() {
 		"AWS/CloudFront":       {"DistributionId", "Region"},
 		"AWS/CloudSearch":      {},
 		"AWS/DMS":              {"ReplicationInstanceIdentifier", "ReplicationTaskIdentifier"},
+		"AWS/DX":               {"ConnectionId"},
 		"AWS/DynamoDB":         {"TableName", "GlobalSecondaryIndexName", "Operation", "StreamLabel"},
 		"AWS/EBS":              {"VolumeId"},
 		"AWS/EC2":              {"AutoScalingGroupName", "ImageId", "InstanceId", "InstanceType"},
@@ -135,6 +138,7 @@ func init() {
 		"AWS/Kinesis":          {"StreamName", "ShardId"},
 		"AWS/KinesisAnalytics": {"Flow", "Id", "Application"},
 		"AWS/Lambda":           {"FunctionName", "Resource", "Version", "Alias"},
+		"AWS/AppSync":          {"GraphQLAPIId"},
 		"AWS/Logs":             {"LogGroupName", "DestinationType", "FilterName"},
 		"AWS/ML":               {"MLModelId", "RequestMode"},
 		"AWS/NATGateway":       {"NatGatewayId"},

+ 27 - 8
pkg/tsdb/elasticsearch/client/index_pattern.go

@@ -248,13 +248,28 @@ var datePatternReplacements = map[string]string{
 
 func formatDate(t time.Time, pattern string) string {
 	var datePattern string
-	parts := strings.Split(strings.TrimLeft(pattern, "["), "]")
-	base := parts[0]
-	if len(parts) == 2 {
-		datePattern = parts[1]
-	} else {
-		datePattern = base
-		base = ""
+	base := ""
+	ltr := false
+
+	if strings.HasPrefix(pattern, "[") {
+		parts := strings.Split(strings.TrimLeft(pattern, "["), "]")
+		base = parts[0]
+		if len(parts) == 2 {
+			datePattern = parts[1]
+		} else {
+			datePattern = base
+			base = ""
+		}
+		ltr = true
+	} else if strings.HasSuffix(pattern, "]") {
+		parts := strings.Split(strings.TrimRight(pattern, "]"), "[")
+		datePattern = parts[0]
+		if len(parts) == 2 {
+			base = parts[1]
+		} else {
+			base = ""
+		}
+		ltr = false
 	}
 
 	formatted := t.Format(patternToLayout(datePattern))
@@ -293,7 +308,11 @@ func formatDate(t time.Time, pattern string) string {
 		formatted = strings.Replace(formatted, "<stdHourNoZero>", fmt.Sprintf("%d", t.Hour()), -1)
 	}
 
-	return base + formatted
+	if ltr {
+		return base + formatted
+	}
+
+	return formatted + base
 }
 
 func patternToLayout(pattern string) string {

+ 26 - 1
pkg/tsdb/elasticsearch/client/index_pattern_test.go

@@ -28,29 +28,54 @@ func TestIndexPattern(t *testing.T) {
 		to := fmt.Sprintf("%d", time.Date(2018, 5, 15, 17, 55, 0, 0, time.UTC).UnixNano()/int64(time.Millisecond))
 
 		indexPatternScenario(intervalHourly, "[data-]YYYY.MM.DD.HH", tsdb.NewTimeRange(from, to), func(indices []string) {
-			//So(indices, ShouldHaveLength, 1)
+			So(indices, ShouldHaveLength, 1)
 			So(indices[0], ShouldEqual, "data-2018.05.15.17")
 		})
 
+		indexPatternScenario(intervalHourly, "YYYY.MM.DD.HH[-data]", tsdb.NewTimeRange(from, to), func(indices []string) {
+			So(indices, ShouldHaveLength, 1)
+			So(indices[0], ShouldEqual, "2018.05.15.17-data")
+		})
+
 		indexPatternScenario(intervalDaily, "[data-]YYYY.MM.DD", tsdb.NewTimeRange(from, to), func(indices []string) {
 			So(indices, ShouldHaveLength, 1)
 			So(indices[0], ShouldEqual, "data-2018.05.15")
 		})
 
+		indexPatternScenario(intervalDaily, "YYYY.MM.DD[-data]", tsdb.NewTimeRange(from, to), func(indices []string) {
+			So(indices, ShouldHaveLength, 1)
+			So(indices[0], ShouldEqual, "2018.05.15-data")
+		})
+
 		indexPatternScenario(intervalWeekly, "[data-]GGGG.WW", tsdb.NewTimeRange(from, to), func(indices []string) {
 			So(indices, ShouldHaveLength, 1)
 			So(indices[0], ShouldEqual, "data-2018.20")
 		})
 
+		indexPatternScenario(intervalWeekly, "GGGG.WW[-data]", tsdb.NewTimeRange(from, to), func(indices []string) {
+			So(indices, ShouldHaveLength, 1)
+			So(indices[0], ShouldEqual, "2018.20-data")
+		})
+
 		indexPatternScenario(intervalMonthly, "[data-]YYYY.MM", tsdb.NewTimeRange(from, to), func(indices []string) {
 			So(indices, ShouldHaveLength, 1)
 			So(indices[0], ShouldEqual, "data-2018.05")
 		})
 
+		indexPatternScenario(intervalMonthly, "YYYY.MM[-data]", tsdb.NewTimeRange(from, to), func(indices []string) {
+			So(indices, ShouldHaveLength, 1)
+			So(indices[0], ShouldEqual, "2018.05-data")
+		})
+
 		indexPatternScenario(intervalYearly, "[data-]YYYY", tsdb.NewTimeRange(from, to), func(indices []string) {
 			So(indices, ShouldHaveLength, 1)
 			So(indices[0], ShouldEqual, "data-2018")
 		})
+
+		indexPatternScenario(intervalYearly, "YYYY[-data]", tsdb.NewTimeRange(from, to), func(indices []string) {
+			So(indices, ShouldHaveLength, 1)
+			So(indices[0], ShouldEqual, "2018-data")
+		})
 	})
 
 	Convey("Hourly interval", t, func() {

+ 19 - 19
pkg/tsdb/mssql/macros.go

@@ -14,18 +14,18 @@ import (
 const rsIdentifier = `([_a-zA-Z0-9]+)`
 const sExpr = `\$` + rsIdentifier + `\(([^\)]*)\)`
 
-type MsSqlMacroEngine struct {
-	TimeRange *tsdb.TimeRange
-	Query     *tsdb.Query
+type msSqlMacroEngine struct {
+	timeRange *tsdb.TimeRange
+	query     *tsdb.Query
 }
 
-func NewMssqlMacroEngine() tsdb.SqlMacroEngine {
-	return &MsSqlMacroEngine{}
+func newMssqlMacroEngine() tsdb.SqlMacroEngine {
+	return &msSqlMacroEngine{}
 }
 
-func (m *MsSqlMacroEngine) Interpolate(query *tsdb.Query, timeRange *tsdb.TimeRange, sql string) (string, error) {
-	m.TimeRange = timeRange
-	m.Query = query
+func (m *msSqlMacroEngine) Interpolate(query *tsdb.Query, timeRange *tsdb.TimeRange, sql string) (string, error) {
+	m.timeRange = timeRange
+	m.query = query
 	rExp, _ := regexp.Compile(sExpr)
 	var macroError error
 
@@ -66,7 +66,7 @@ func replaceAllStringSubmatchFunc(re *regexp.Regexp, str string, repl func([]str
 	return result + str[lastIndex:]
 }
 
-func (m *MsSqlMacroEngine) evaluateMacro(name string, args []string) (string, error) {
+func (m *msSqlMacroEngine) evaluateMacro(name string, args []string) (string, error) {
 	switch name {
 	case "__time":
 		if len(args) == 0 {
@@ -83,11 +83,11 @@ func (m *MsSqlMacroEngine) evaluateMacro(name string, args []string) (string, er
 			return "", fmt.Errorf("missing time column argument for macro %v", name)
 		}
 
-		return fmt.Sprintf("%s BETWEEN '%s' AND '%s'", args[0], m.TimeRange.GetFromAsTimeUTC().Format(time.RFC3339), m.TimeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
+		return fmt.Sprintf("%s BETWEEN '%s' AND '%s'", args[0], m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339), m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
 	case "__timeFrom":
-		return fmt.Sprintf("'%s'", m.TimeRange.GetFromAsTimeUTC().Format(time.RFC3339)), nil
+		return fmt.Sprintf("'%s'", m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339)), nil
 	case "__timeTo":
-		return fmt.Sprintf("'%s'", m.TimeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
+		return fmt.Sprintf("'%s'", m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
 	case "__timeGroup":
 		if len(args) < 2 {
 			return "", fmt.Errorf("macro %v needs time column and interval", name)
@@ -97,16 +97,16 @@ func (m *MsSqlMacroEngine) evaluateMacro(name string, args []string) (string, er
 			return "", fmt.Errorf("error parsing interval %v", args[1])
 		}
 		if len(args) == 3 {
-			m.Query.Model.Set("fill", true)
-			m.Query.Model.Set("fillInterval", interval.Seconds())
+			m.query.Model.Set("fill", true)
+			m.query.Model.Set("fillInterval", interval.Seconds())
 			if args[2] == "NULL" {
-				m.Query.Model.Set("fillNull", true)
+				m.query.Model.Set("fillNull", true)
 			} else {
 				floatVal, err := strconv.ParseFloat(args[2], 64)
 				if err != nil {
 					return "", fmt.Errorf("error parsing fill value %v", args[2])
 				}
-				m.Query.Model.Set("fillValue", floatVal)
+				m.query.Model.Set("fillValue", floatVal)
 			}
 		}
 		return fmt.Sprintf("FLOOR(DATEDIFF(second, '1970-01-01', %s)/%.0f)*%.0f", args[0], interval.Seconds(), interval.Seconds()), nil
@@ -114,11 +114,11 @@ func (m *MsSqlMacroEngine) evaluateMacro(name string, args []string) (string, er
 		if len(args) == 0 {
 			return "", fmt.Errorf("missing time column argument for macro %v", name)
 		}
-		return fmt.Sprintf("%s >= %d AND %s <= %d", args[0], m.TimeRange.GetFromAsSecondsEpoch(), args[0], m.TimeRange.GetToAsSecondsEpoch()), nil
+		return fmt.Sprintf("%s >= %d AND %s <= %d", args[0], m.timeRange.GetFromAsSecondsEpoch(), args[0], m.timeRange.GetToAsSecondsEpoch()), nil
 	case "__unixEpochFrom":
-		return fmt.Sprintf("%d", m.TimeRange.GetFromAsSecondsEpoch()), nil
+		return fmt.Sprintf("%d", m.timeRange.GetFromAsSecondsEpoch()), nil
 	case "__unixEpochTo":
-		return fmt.Sprintf("%d", m.TimeRange.GetToAsSecondsEpoch()), nil
+		return fmt.Sprintf("%d", m.timeRange.GetToAsSecondsEpoch()), nil
 	default:
 		return "", fmt.Errorf("Unknown macro %v", name)
 	}

+ 1 - 1
pkg/tsdb/mssql/macros_test.go

@@ -14,7 +14,7 @@ import (
 
 func TestMacroEngine(t *testing.T) {
 	Convey("MacroEngine", t, func() {
-		engine := &MsSqlMacroEngine{}
+		engine := &msSqlMacroEngine{}
 		query := &tsdb.Query{
 			Model: simplejson.New(),
 		}

+ 25 - 243
pkg/tsdb/mssql/mssql.go

@@ -1,49 +1,40 @@
 package mssql
 
 import (
-	"container/list"
-	"context"
 	"database/sql"
 	"fmt"
 	"strconv"
 	"strings"
 
-	"math"
-
 	_ "github.com/denisenkom/go-mssqldb"
 	"github.com/go-xorm/core"
-	"github.com/grafana/grafana/pkg/components/null"
 	"github.com/grafana/grafana/pkg/log"
 	"github.com/grafana/grafana/pkg/models"
 	"github.com/grafana/grafana/pkg/tsdb"
 )
 
-type MssqlQueryEndpoint struct {
-	sqlEngine tsdb.SqlEngine
-	log       log.Logger
-}
-
 func init() {
-	tsdb.RegisterTsdbQueryEndpoint("mssql", NewMssqlQueryEndpoint)
+	tsdb.RegisterTsdbQueryEndpoint("mssql", newMssqlQueryEndpoint)
 }
 
-func NewMssqlQueryEndpoint(datasource *models.DataSource) (tsdb.TsdbQueryEndpoint, error) {
-	endpoint := &MssqlQueryEndpoint{
-		log: log.New("tsdb.mssql"),
-	}
-
-	endpoint.sqlEngine = &tsdb.DefaultSqlEngine{
-		MacroEngine: NewMssqlMacroEngine(),
-	}
+func newMssqlQueryEndpoint(datasource *models.DataSource) (tsdb.TsdbQueryEndpoint, error) {
+	logger := log.New("tsdb.mssql")
 
 	cnnstr := generateConnectionString(datasource)
-	endpoint.log.Debug("getEngine", "connection", cnnstr)
+	logger.Debug("getEngine", "connection", cnnstr)
 
-	if err := endpoint.sqlEngine.InitEngine("mssql", datasource, cnnstr); err != nil {
-		return nil, err
+	config := tsdb.SqlQueryEndpointConfiguration{
+		DriverName:        "mssql",
+		ConnectionString:  cnnstr,
+		Datasource:        datasource,
+		MetricColumnTypes: []string{"VARCHAR", "CHAR", "NVARCHAR", "NCHAR"},
+	}
+
+	rowTransformer := mssqlRowTransformer{
+		log: logger,
 	}
 
-	return endpoint, nil
+	return tsdb.NewSqlQueryEndpoint(&config, &rowTransformer, newMssqlMacroEngine(), logger)
 }
 
 func generateConnectionString(datasource *models.DataSource) string {
@@ -70,71 +61,16 @@ func generateConnectionString(datasource *models.DataSource) string {
 	)
 }
 
-// Query is the main function for the MssqlQueryEndpoint
-func (e *MssqlQueryEndpoint) Query(ctx context.Context, dsInfo *models.DataSource, tsdbQuery *tsdb.TsdbQuery) (*tsdb.Response, error) {
-	return e.sqlEngine.Query(ctx, dsInfo, tsdbQuery, e.transformToTimeSeries, e.transformToTable)
-}
-
-func (e MssqlQueryEndpoint) transformToTable(query *tsdb.Query, rows *core.Rows, result *tsdb.QueryResult, tsdbQuery *tsdb.TsdbQuery) error {
-	columnNames, err := rows.Columns()
-	columnCount := len(columnNames)
-
-	if err != nil {
-		return err
-	}
-
-	rowLimit := 1000000
-	rowCount := 0
-	timeIndex := -1
-
-	table := &tsdb.Table{
-		Columns: make([]tsdb.TableColumn, columnCount),
-		Rows:    make([]tsdb.RowValues, 0),
-	}
-
-	for i, name := range columnNames {
-		table.Columns[i].Text = name
-
-		// check if there is a column named time
-		switch name {
-		case "time":
-			timeIndex = i
-		}
-	}
-
-	columnTypes, err := rows.ColumnTypes()
-	if err != nil {
-		return err
-	}
-
-	for ; rows.Next(); rowCount++ {
-		if rowCount > rowLimit {
-			return fmt.Errorf("MsSQL query row limit exceeded, limit %d", rowLimit)
-		}
-
-		values, err := e.getTypedRowData(columnTypes, rows)
-		if err != nil {
-			return err
-		}
-
-		// converts column named time to unix timestamp in milliseconds
-		// to make native mssql datetime types and epoch dates work in
-		// annotation and table queries.
-		tsdb.ConvertSqlTimeColumnToEpochMs(values, timeIndex)
-		table.Rows = append(table.Rows, values)
-	}
-
-	result.Tables = append(result.Tables, table)
-	result.Meta.Set("rowCount", rowCount)
-	return nil
+type mssqlRowTransformer struct {
+	log log.Logger
 }
 
-func (e MssqlQueryEndpoint) getTypedRowData(types []*sql.ColumnType, rows *core.Rows) (tsdb.RowValues, error) {
-	values := make([]interface{}, len(types))
-	valuePtrs := make([]interface{}, len(types))
+func (t *mssqlRowTransformer) Transform(columnTypes []*sql.ColumnType, rows *core.Rows) (tsdb.RowValues, error) {
+	values := make([]interface{}, len(columnTypes))
+	valuePtrs := make([]interface{}, len(columnTypes))
 
-	for i, stype := range types {
-		e.log.Debug("type", "type", stype)
+	for i, stype := range columnTypes {
+		t.log.Debug("type", "type", stype)
 		valuePtrs[i] = &values[i]
 	}
 
@@ -144,17 +80,17 @@ func (e MssqlQueryEndpoint) getTypedRowData(types []*sql.ColumnType, rows *core.
 
 	// convert types not handled by denisenkom/go-mssqldb
 	// unhandled types are returned as []byte
-	for i := 0; i < len(types); i++ {
+	for i := 0; i < len(columnTypes); i++ {
 		if value, ok := values[i].([]byte); ok {
-			switch types[i].DatabaseTypeName() {
+			switch columnTypes[i].DatabaseTypeName() {
 			case "MONEY", "SMALLMONEY", "DECIMAL":
 				if v, err := strconv.ParseFloat(string(value), 64); err == nil {
 					values[i] = v
 				} else {
-					e.log.Debug("Rows", "Error converting numeric to float", value)
+					t.log.Debug("Rows", "Error converting numeric to float", value)
 				}
 			default:
-				e.log.Debug("Rows", "Unknown database type", types[i].DatabaseTypeName(), "value", value)
+				t.log.Debug("Rows", "Unknown database type", columnTypes[i].DatabaseTypeName(), "value", value)
 				values[i] = string(value)
 			}
 		}
@@ -162,157 +98,3 @@ func (e MssqlQueryEndpoint) getTypedRowData(types []*sql.ColumnType, rows *core.
 
 	return values, nil
 }
-
-func (e MssqlQueryEndpoint) transformToTimeSeries(query *tsdb.Query, rows *core.Rows, result *tsdb.QueryResult, tsdbQuery *tsdb.TsdbQuery) error {
-	pointsBySeries := make(map[string]*tsdb.TimeSeries)
-	seriesByQueryOrder := list.New()
-
-	columnNames, err := rows.Columns()
-	if err != nil {
-		return err
-	}
-
-	columnTypes, err := rows.ColumnTypes()
-	if err != nil {
-		return err
-	}
-
-	rowLimit := 1000000
-	rowCount := 0
-	timeIndex := -1
-	metricIndex := -1
-
-	// check columns of resultset: a column named time is mandatory
-	// the first text column is treated as metric name unless a column named metric is present
-	for i, col := range columnNames {
-		switch col {
-		case "time":
-			timeIndex = i
-		case "metric":
-			metricIndex = i
-		default:
-			if metricIndex == -1 {
-				switch columnTypes[i].DatabaseTypeName() {
-				case "VARCHAR", "CHAR", "NVARCHAR", "NCHAR":
-					metricIndex = i
-				}
-			}
-		}
-	}
-
-	if timeIndex == -1 {
-		return fmt.Errorf("Found no column named time")
-	}
-
-	fillMissing := query.Model.Get("fill").MustBool(false)
-	var fillInterval float64
-	fillValue := null.Float{}
-	if fillMissing {
-		fillInterval = query.Model.Get("fillInterval").MustFloat64() * 1000
-		if !query.Model.Get("fillNull").MustBool(false) {
-			fillValue.Float64 = query.Model.Get("fillValue").MustFloat64()
-			fillValue.Valid = true
-		}
-	}
-
-	for rows.Next() {
-		var timestamp float64
-		var value null.Float
-		var metric string
-
-		if rowCount > rowLimit {
-			return fmt.Errorf("MSSQL query row limit exceeded, limit %d", rowLimit)
-		}
-
-		values, err := e.getTypedRowData(columnTypes, rows)
-		if err != nil {
-			return err
-		}
-
-		// converts column named time to unix timestamp in milliseconds to make
-		// native mysql datetime types and epoch dates work in
-		// annotation and table queries.
-		tsdb.ConvertSqlTimeColumnToEpochMs(values, timeIndex)
-
-		switch columnValue := values[timeIndex].(type) {
-		case int64:
-			timestamp = float64(columnValue)
-		case float64:
-			timestamp = columnValue
-		default:
-			return fmt.Errorf("Invalid type for column time, must be of type timestamp or unix timestamp, got: %T %v", columnValue, columnValue)
-		}
-
-		if metricIndex >= 0 {
-			if columnValue, ok := values[metricIndex].(string); ok {
-				metric = columnValue
-			} else {
-				return fmt.Errorf("Column metric must be of type CHAR, VARCHAR, NCHAR or NVARCHAR. metric column name: %s type: %s but datatype is %T", columnNames[metricIndex], columnTypes[metricIndex].DatabaseTypeName(), values[metricIndex])
-			}
-		}
-
-		for i, col := range columnNames {
-			if i == timeIndex || i == metricIndex {
-				continue
-			}
-
-			if value, err = tsdb.ConvertSqlValueColumnToFloat(col, values[i]); err != nil {
-				return err
-			}
-
-			if metricIndex == -1 {
-				metric = col
-			}
-
-			series, exist := pointsBySeries[metric]
-			if !exist {
-				series = &tsdb.TimeSeries{Name: metric}
-				pointsBySeries[metric] = series
-				seriesByQueryOrder.PushBack(metric)
-			}
-
-			if fillMissing {
-				var intervalStart float64
-				if !exist {
-					intervalStart = float64(tsdbQuery.TimeRange.MustGetFrom().UnixNano() / 1e6)
-				} else {
-					intervalStart = series.Points[len(series.Points)-1][1].Float64 + fillInterval
-				}
-
-				// align interval start
-				intervalStart = math.Floor(intervalStart/fillInterval) * fillInterval
-
-				for i := intervalStart; i < timestamp; i += fillInterval {
-					series.Points = append(series.Points, tsdb.TimePoint{fillValue, null.FloatFrom(i)})
-					rowCount++
-				}
-			}
-
-			series.Points = append(series.Points, tsdb.TimePoint{value, null.FloatFrom(timestamp)})
-
-			e.log.Debug("Rows", "metric", metric, "time", timestamp, "value", value)
-		}
-	}
-
-	for elem := seriesByQueryOrder.Front(); elem != nil; elem = elem.Next() {
-		key := elem.Value.(string)
-		result.Series = append(result.Series, pointsBySeries[key])
-
-		if fillMissing {
-			series := pointsBySeries[key]
-			// fill in values from last fetched value till interval end
-			intervalStart := series.Points[len(series.Points)-1][1].Float64
-			intervalEnd := float64(tsdbQuery.TimeRange.MustGetTo().UnixNano() / 1e6)
-
-			// align interval start
-			intervalStart = math.Floor(intervalStart/fillInterval) * fillInterval
-			for i := intervalStart + fillInterval; i < intervalEnd; i += fillInterval {
-				series.Points = append(series.Points, tsdb.TimePoint{fillValue, null.FloatFrom(i)})
-				rowCount++
-			}
-		}
-	}
-
-	result.Meta.Set("rowCount", rowCount)
-	return nil
-}

+ 58 - 49
pkg/tsdb/mssql/mssql_test.go

@@ -8,8 +8,9 @@ import (
 	"time"
 
 	"github.com/go-xorm/xorm"
+	"github.com/grafana/grafana/pkg/components/securejsondata"
 	"github.com/grafana/grafana/pkg/components/simplejson"
-	"github.com/grafana/grafana/pkg/log"
+	"github.com/grafana/grafana/pkg/models"
 	"github.com/grafana/grafana/pkg/services/sqlstore/sqlutil"
 	"github.com/grafana/grafana/pkg/tsdb"
 	. "github.com/smartystreets/goconvey/convey"
@@ -19,8 +20,9 @@ import (
 // The tests require a MSSQL db named grafanatest and a user/password grafana/Password!
 // Use the docker/blocks/mssql_tests/docker-compose.yaml to spin up a
 // preconfigured MSSQL server suitable for running these tests.
-// There is also a dashboard.json in same directory that you can import to Grafana
-// once you've created a datasource for the test server/database.
+// There is also a datasource and dashboard provisioned by devenv scripts that you can
+// use to verify that the generated data are vizualized as expected, see
+// devenv/README.md for setup instructions.
 // If needed, change the variable below to the IP address of the database.
 var serverIP = "localhost"
 
@@ -28,19 +30,25 @@ func TestMSSQL(t *testing.T) {
 	SkipConvey("MSSQL", t, func() {
 		x := InitMSSQLTestDB(t)
 
-		endpoint := &MssqlQueryEndpoint{
-			sqlEngine: &tsdb.DefaultSqlEngine{
-				MacroEngine: NewMssqlMacroEngine(),
-				XormEngine:  x,
-			},
-			log: log.New("tsdb.mssql"),
+		origXormEngine := tsdb.NewXormEngine
+		tsdb.NewXormEngine = func(d, c string) (*xorm.Engine, error) {
+			return x, nil
 		}
 
-		sess := x.NewSession()
-		defer sess.Close()
+		endpoint, err := newMssqlQueryEndpoint(&models.DataSource{
+			JsonData:       simplejson.New(),
+			SecureJsonData: securejsondata.SecureJsonData{},
+		})
+		So(err, ShouldBeNil)
 
+		sess := x.NewSession()
 		fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC).In(time.Local)
 
+		Reset(func() {
+			sess.Close()
+			tsdb.NewXormEngine = origXormEngine
+		})
+
 		Convey("Given a table with different native data types", func() {
 			sql := `
 					IF OBJECT_ID('dbo.[mssql_types]', 'U') IS NOT NULL
@@ -602,6 +610,31 @@ func TestMSSQL(t *testing.T) {
 				So(queryResult.Series[1].Name, ShouldEqual, "valueTwo")
 			})
 
+			Convey("When doing a metric query with metric column and multiple value columns", func() {
+				query := &tsdb.TsdbQuery{
+					Queries: []*tsdb.Query{
+						{
+							Model: simplejson.NewFromAny(map[string]interface{}{
+								"rawSql": "SELECT $__timeEpoch(time), measurement, valueOne, valueTwo FROM metric_values ORDER BY 1",
+								"format": "time_series",
+							}),
+							RefId: "A",
+						},
+					},
+				}
+
+				resp, err := endpoint.Query(nil, nil, query)
+				So(err, ShouldBeNil)
+				queryResult := resp.Results["A"]
+				So(queryResult.Error, ShouldBeNil)
+
+				So(len(queryResult.Series), ShouldEqual, 4)
+				So(queryResult.Series[0].Name, ShouldEqual, "Metric A valueOne")
+				So(queryResult.Series[1].Name, ShouldEqual, "Metric A valueTwo")
+				So(queryResult.Series[2].Name, ShouldEqual, "Metric B valueOne")
+				So(queryResult.Series[3].Name, ShouldEqual, "Metric B valueTwo")
+			})
+
 			Convey("Given a stored procedure that takes @from and @to in epoch time", func() {
 				sql := `
 						IF object_id('sp_test_epoch') IS NOT NULL
@@ -627,21 +660,9 @@ func TestMSSQL(t *testing.T) {
 
 							SELECT
 								CAST(ROUND(DATEDIFF(second, '1970-01-01', time)/CAST(@dInterval as float), 0) as bigint)*@dInterval as time,
-								measurement + ' - value one' as metric,
-								avg(valueOne) as value
-							FROM
-								metric_values
-							WHERE
-								time BETWEEN DATEADD(s, @from, '1970-01-01') AND DATEADD(s, @to, '1970-01-01') AND
-								(@metric = 'ALL' OR measurement = @metric)
-							GROUP BY
-								CAST(ROUND(DATEDIFF(second, '1970-01-01', time)/CAST(@dInterval as float), 0) as bigint)*@dInterval,
-								measurement
-							UNION ALL
-							SELECT
-								CAST(ROUND(DATEDIFF(second, '1970-01-01', time)/CAST(@dInterval as float), 0) as bigint)*@dInterval as time,
-								measurement + ' - value two' as metric,
-								avg(valueTwo) as value
+								measurement as metric,
+								avg(valueOne) as valueOne,
+								avg(valueTwo) as valueTwo
 							FROM
 								metric_values
 							WHERE
@@ -684,10 +705,10 @@ func TestMSSQL(t *testing.T) {
 					So(queryResult.Error, ShouldBeNil)
 
 					So(len(queryResult.Series), ShouldEqual, 4)
-					So(queryResult.Series[0].Name, ShouldEqual, "Metric A - value one")
-					So(queryResult.Series[1].Name, ShouldEqual, "Metric B - value one")
-					So(queryResult.Series[2].Name, ShouldEqual, "Metric A - value two")
-					So(queryResult.Series[3].Name, ShouldEqual, "Metric B - value two")
+					So(queryResult.Series[0].Name, ShouldEqual, "Metric A valueOne")
+					So(queryResult.Series[1].Name, ShouldEqual, "Metric A valueTwo")
+					So(queryResult.Series[2].Name, ShouldEqual, "Metric B valueOne")
+					So(queryResult.Series[3].Name, ShouldEqual, "Metric B valueTwo")
 				})
 			})
 
@@ -716,21 +737,9 @@ func TestMSSQL(t *testing.T) {
 
 							SELECT
 								CAST(ROUND(DATEDIFF(second, '1970-01-01', time)/CAST(@dInterval as float), 0) as bigint)*@dInterval as time,
-								measurement + ' - value one' as metric,
-								avg(valueOne) as value
-							FROM
-								metric_values
-							WHERE
-								time BETWEEN @from AND @to AND
-								(@metric = 'ALL' OR measurement = @metric)
-							GROUP BY
-								CAST(ROUND(DATEDIFF(second, '1970-01-01', time)/CAST(@dInterval as float), 0) as bigint)*@dInterval,
-								measurement
-							UNION ALL
-							SELECT
-								CAST(ROUND(DATEDIFF(second, '1970-01-01', time)/CAST(@dInterval as float), 0) as bigint)*@dInterval as time,
-								measurement + ' - value two' as metric,
-								avg(valueTwo) as value
+								measurement as metric,
+								avg(valueOne) as valueOne,
+								avg(valueTwo) as valueTwo
 							FROM
 								metric_values
 							WHERE
@@ -773,10 +782,10 @@ func TestMSSQL(t *testing.T) {
 					So(queryResult.Error, ShouldBeNil)
 
 					So(len(queryResult.Series), ShouldEqual, 4)
-					So(queryResult.Series[0].Name, ShouldEqual, "Metric A - value one")
-					So(queryResult.Series[1].Name, ShouldEqual, "Metric B - value one")
-					So(queryResult.Series[2].Name, ShouldEqual, "Metric A - value two")
-					So(queryResult.Series[3].Name, ShouldEqual, "Metric B - value two")
+					So(queryResult.Series[0].Name, ShouldEqual, "Metric A valueOne")
+					So(queryResult.Series[1].Name, ShouldEqual, "Metric A valueTwo")
+					So(queryResult.Series[2].Name, ShouldEqual, "Metric B valueOne")
+					So(queryResult.Series[3].Name, ShouldEqual, "Metric B valueTwo")
 				})
 			})
 		})

+ 19 - 19
pkg/tsdb/mysql/macros.go

@@ -14,18 +14,18 @@ import (
 const rsIdentifier = `([_a-zA-Z0-9]+)`
 const sExpr = `\$` + rsIdentifier + `\(([^\)]*)\)`
 
-type MySqlMacroEngine struct {
-	TimeRange *tsdb.TimeRange
-	Query     *tsdb.Query
+type mySqlMacroEngine struct {
+	timeRange *tsdb.TimeRange
+	query     *tsdb.Query
 }
 
-func NewMysqlMacroEngine() tsdb.SqlMacroEngine {
-	return &MySqlMacroEngine{}
+func newMysqlMacroEngine() tsdb.SqlMacroEngine {
+	return &mySqlMacroEngine{}
 }
 
-func (m *MySqlMacroEngine) Interpolate(query *tsdb.Query, timeRange *tsdb.TimeRange, sql string) (string, error) {
-	m.TimeRange = timeRange
-	m.Query = query
+func (m *mySqlMacroEngine) Interpolate(query *tsdb.Query, timeRange *tsdb.TimeRange, sql string) (string, error) {
+	m.timeRange = timeRange
+	m.query = query
 	rExp, _ := regexp.Compile(sExpr)
 	var macroError error
 
@@ -66,7 +66,7 @@ func replaceAllStringSubmatchFunc(re *regexp.Regexp, str string, repl func([]str
 	return result + str[lastIndex:]
 }
 
-func (m *MySqlMacroEngine) evaluateMacro(name string, args []string) (string, error) {
+func (m *mySqlMacroEngine) evaluateMacro(name string, args []string) (string, error) {
 	switch name {
 	case "__timeEpoch", "__time":
 		if len(args) == 0 {
@@ -78,11 +78,11 @@ func (m *MySqlMacroEngine) evaluateMacro(name string, args []string) (string, er
 			return "", fmt.Errorf("missing time column argument for macro %v", name)
 		}
 
-		return fmt.Sprintf("%s BETWEEN '%s' AND '%s'", args[0], m.TimeRange.GetFromAsTimeUTC().Format(time.RFC3339), m.TimeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
+		return fmt.Sprintf("%s BETWEEN '%s' AND '%s'", args[0], m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339), m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
 	case "__timeFrom":
-		return fmt.Sprintf("'%s'", m.TimeRange.GetFromAsTimeUTC().Format(time.RFC3339)), nil
+		return fmt.Sprintf("'%s'", m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339)), nil
 	case "__timeTo":
-		return fmt.Sprintf("'%s'", m.TimeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
+		return fmt.Sprintf("'%s'", m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
 	case "__timeGroup":
 		if len(args) < 2 {
 			return "", fmt.Errorf("macro %v needs time column and interval", name)
@@ -92,16 +92,16 @@ func (m *MySqlMacroEngine) evaluateMacro(name string, args []string) (string, er
 			return "", fmt.Errorf("error parsing interval %v", args[1])
 		}
 		if len(args) == 3 {
-			m.Query.Model.Set("fill", true)
-			m.Query.Model.Set("fillInterval", interval.Seconds())
+			m.query.Model.Set("fill", true)
+			m.query.Model.Set("fillInterval", interval.Seconds())
 			if args[2] == "NULL" {
-				m.Query.Model.Set("fillNull", true)
+				m.query.Model.Set("fillNull", true)
 			} else {
 				floatVal, err := strconv.ParseFloat(args[2], 64)
 				if err != nil {
 					return "", fmt.Errorf("error parsing fill value %v", args[2])
 				}
-				m.Query.Model.Set("fillValue", floatVal)
+				m.query.Model.Set("fillValue", floatVal)
 			}
 		}
 		return fmt.Sprintf("UNIX_TIMESTAMP(%s) DIV %.0f * %.0f", args[0], interval.Seconds(), interval.Seconds()), nil
@@ -109,11 +109,11 @@ func (m *MySqlMacroEngine) evaluateMacro(name string, args []string) (string, er
 		if len(args) == 0 {
 			return "", fmt.Errorf("missing time column argument for macro %v", name)
 		}
-		return fmt.Sprintf("%s >= %d AND %s <= %d", args[0], m.TimeRange.GetFromAsSecondsEpoch(), args[0], m.TimeRange.GetToAsSecondsEpoch()), nil
+		return fmt.Sprintf("%s >= %d AND %s <= %d", args[0], m.timeRange.GetFromAsSecondsEpoch(), args[0], m.timeRange.GetToAsSecondsEpoch()), nil
 	case "__unixEpochFrom":
-		return fmt.Sprintf("%d", m.TimeRange.GetFromAsSecondsEpoch()), nil
+		return fmt.Sprintf("%d", m.timeRange.GetFromAsSecondsEpoch()), nil
 	case "__unixEpochTo":
-		return fmt.Sprintf("%d", m.TimeRange.GetToAsSecondsEpoch()), nil
+		return fmt.Sprintf("%d", m.timeRange.GetToAsSecondsEpoch()), nil
 	default:
 		return "", fmt.Errorf("Unknown macro %v", name)
 	}

+ 1 - 1
pkg/tsdb/mysql/macros_test.go

@@ -12,7 +12,7 @@ import (
 
 func TestMacroEngine(t *testing.T) {
 	Convey("MacroEngine", t, func() {
-		engine := &MySqlMacroEngine{}
+		engine := &mySqlMacroEngine{}
 		query := &tsdb.Query{}
 
 		Convey("Given a time range between 2018-04-12 00:00 and 2018-04-12 00:05", func() {

+ 22 - 243
pkg/tsdb/mysql/mysql.go

@@ -1,39 +1,24 @@
 package mysql
 
 import (
-	"container/list"
-	"context"
 	"database/sql"
 	"fmt"
-	"math"
 	"reflect"
 	"strconv"
 
 	"github.com/go-sql-driver/mysql"
 	"github.com/go-xorm/core"
-	"github.com/grafana/grafana/pkg/components/null"
 	"github.com/grafana/grafana/pkg/log"
 	"github.com/grafana/grafana/pkg/models"
 	"github.com/grafana/grafana/pkg/tsdb"
 )
 
-type MysqlQueryEndpoint struct {
-	sqlEngine tsdb.SqlEngine
-	log       log.Logger
-}
-
 func init() {
-	tsdb.RegisterTsdbQueryEndpoint("mysql", NewMysqlQueryEndpoint)
+	tsdb.RegisterTsdbQueryEndpoint("mysql", newMysqlQueryEndpoint)
 }
 
-func NewMysqlQueryEndpoint(datasource *models.DataSource) (tsdb.TsdbQueryEndpoint, error) {
-	endpoint := &MysqlQueryEndpoint{
-		log: log.New("tsdb.mysql"),
-	}
-
-	endpoint.sqlEngine = &tsdb.DefaultSqlEngine{
-		MacroEngine: NewMysqlMacroEngine(),
-	}
+func newMysqlQueryEndpoint(datasource *models.DataSource) (tsdb.TsdbQueryEndpoint, error) {
+	logger := log.New("tsdb.mysql")
 
 	cnnstr := fmt.Sprintf("%s:%s@%s(%s)/%s?collation=utf8mb4_unicode_ci&parseTime=true&loc=UTC&allowNativePasswords=true",
 		datasource.User,
@@ -42,85 +27,35 @@ func NewMysqlQueryEndpoint(datasource *models.DataSource) (tsdb.TsdbQueryEndpoin
 		datasource.Url,
 		datasource.Database,
 	)
-	endpoint.log.Debug("getEngine", "connection", cnnstr)
+	logger.Debug("getEngine", "connection", cnnstr)
 
-	if err := endpoint.sqlEngine.InitEngine("mysql", datasource, cnnstr); err != nil {
-		return nil, err
+	config := tsdb.SqlQueryEndpointConfiguration{
+		DriverName:        "mysql",
+		ConnectionString:  cnnstr,
+		Datasource:        datasource,
+		TimeColumnNames:   []string{"time", "time_sec"},
+		MetricColumnTypes: []string{"CHAR", "VARCHAR", "TINYTEXT", "TEXT", "MEDIUMTEXT", "LONGTEXT"},
 	}
 
-	return endpoint, nil
-}
-
-// Query is the main function for the MysqlExecutor
-func (e *MysqlQueryEndpoint) Query(ctx context.Context, dsInfo *models.DataSource, tsdbQuery *tsdb.TsdbQuery) (*tsdb.Response, error) {
-	return e.sqlEngine.Query(ctx, dsInfo, tsdbQuery, e.transformToTimeSeries, e.transformToTable)
-}
-
-func (e MysqlQueryEndpoint) transformToTable(query *tsdb.Query, rows *core.Rows, result *tsdb.QueryResult, tsdbQuery *tsdb.TsdbQuery) error {
-	columnNames, err := rows.Columns()
-	columnCount := len(columnNames)
-
-	if err != nil {
-		return err
+	rowTransformer := mysqlRowTransformer{
+		log: logger,
 	}
 
-	table := &tsdb.Table{
-		Columns: make([]tsdb.TableColumn, columnCount),
-		Rows:    make([]tsdb.RowValues, 0),
-	}
-
-	for i, name := range columnNames {
-		table.Columns[i].Text = name
-	}
-
-	rowLimit := 1000000
-	rowCount := 0
-	timeIndex := -1
-
-	// check if there is a column named time
-	for i, col := range columnNames {
-		switch col {
-		case "time", "time_sec":
-			timeIndex = i
-		}
-	}
-
-	for ; rows.Next(); rowCount++ {
-		if rowCount > rowLimit {
-			return fmt.Errorf("MySQL query row limit exceeded, limit %d", rowLimit)
-		}
-
-		values, err := e.getTypedRowData(rows)
-		if err != nil {
-			return err
-		}
-
-		// converts column named time to unix timestamp in milliseconds to make
-		// native mysql datetime types and epoch dates work in
-		// annotation and table queries.
-		tsdb.ConvertSqlTimeColumnToEpochMs(values, timeIndex)
-
-		table.Rows = append(table.Rows, values)
-	}
-
-	result.Tables = append(result.Tables, table)
-	result.Meta.Set("rowCount", rowCount)
-	return nil
+	return tsdb.NewSqlQueryEndpoint(&config, &rowTransformer, newMysqlMacroEngine(), logger)
 }
 
-func (e MysqlQueryEndpoint) getTypedRowData(rows *core.Rows) (tsdb.RowValues, error) {
-	types, err := rows.ColumnTypes()
-	if err != nil {
-		return nil, err
-	}
+type mysqlRowTransformer struct {
+	log log.Logger
+}
 
-	values := make([]interface{}, len(types))
+func (t *mysqlRowTransformer) Transform(columnTypes []*sql.ColumnType, rows *core.Rows) (tsdb.RowValues, error) {
+	values := make([]interface{}, len(columnTypes))
 
 	for i := range values {
-		scanType := types[i].ScanType()
+		scanType := columnTypes[i].ScanType()
 		values[i] = reflect.New(scanType).Interface()
 
-		if types[i].DatabaseTypeName() == "BIT" {
+		if columnTypes[i].DatabaseTypeName() == "BIT" {
 			values[i] = new([]byte)
 		}
 	}
@@ -129,7 +64,7 @@ func (e MysqlQueryEndpoint) getTypedRowData(rows *core.Rows) (tsdb.RowValues, er
 		return nil, err
 	}
 
-	for i := 0; i < len(types); i++ {
+	for i := 0; i < len(columnTypes); i++ {
 		typeName := reflect.ValueOf(values[i]).Type().String()
 
 		switch typeName {
@@ -158,7 +93,7 @@ func (e MysqlQueryEndpoint) getTypedRowData(rows *core.Rows) (tsdb.RowValues, er
 			}
 		}
 
-		if types[i].DatabaseTypeName() == "DECIMAL" {
+		if columnTypes[i].DatabaseTypeName() == "DECIMAL" {
 			f, err := strconv.ParseFloat(values[i].(string), 64)
 
 			if err == nil {
@@ -171,159 +106,3 @@ func (e MysqlQueryEndpoint) getTypedRowData(rows *core.Rows) (tsdb.RowValues, er
 
 	return values, nil
 }
-
-func (e MysqlQueryEndpoint) transformToTimeSeries(query *tsdb.Query, rows *core.Rows, result *tsdb.QueryResult, tsdbQuery *tsdb.TsdbQuery) error {
-	pointsBySeries := make(map[string]*tsdb.TimeSeries)
-	seriesByQueryOrder := list.New()
-
-	columnNames, err := rows.Columns()
-	if err != nil {
-		return err
-	}
-
-	columnTypes, err := rows.ColumnTypes()
-	if err != nil {
-		return err
-	}
-
-	rowLimit := 1000000
-	rowCount := 0
-	timeIndex := -1
-	metricIndex := -1
-
-	// check columns of resultset: a column named time is mandatory
-	// the first text column is treated as metric name unless a column named metric is present
-	for i, col := range columnNames {
-		switch col {
-		case "time", "time_sec":
-			timeIndex = i
-		case "metric":
-			metricIndex = i
-		default:
-			if metricIndex == -1 {
-				switch columnTypes[i].DatabaseTypeName() {
-				case "CHAR", "VARCHAR", "TINYTEXT", "TEXT", "MEDIUMTEXT", "LONGTEXT":
-					metricIndex = i
-				}
-			}
-		}
-	}
-
-	if timeIndex == -1 {
-		return fmt.Errorf("Found no column named time or time_sec")
-	}
-
-	fillMissing := query.Model.Get("fill").MustBool(false)
-	var fillInterval float64
-	fillValue := null.Float{}
-	if fillMissing {
-		fillInterval = query.Model.Get("fillInterval").MustFloat64() * 1000
-		if !query.Model.Get("fillNull").MustBool(false) {
-			fillValue.Float64 = query.Model.Get("fillValue").MustFloat64()
-			fillValue.Valid = true
-		}
-	}
-
-	for rows.Next() {
-		var timestamp float64
-		var value null.Float
-		var metric string
-
-		if rowCount > rowLimit {
-			return fmt.Errorf("PostgreSQL query row limit exceeded, limit %d", rowLimit)
-		}
-
-		values, err := e.getTypedRowData(rows)
-		if err != nil {
-			return err
-		}
-
-		// converts column named time to unix timestamp in milliseconds to make
-		// native mysql datetime types and epoch dates work in
-		// annotation and table queries.
-		tsdb.ConvertSqlTimeColumnToEpochMs(values, timeIndex)
-
-		switch columnValue := values[timeIndex].(type) {
-		case int64:
-			timestamp = float64(columnValue)
-		case float64:
-			timestamp = columnValue
-		default:
-			return fmt.Errorf("Invalid type for column time/time_sec, must be of type timestamp or unix timestamp, got: %T %v", columnValue, columnValue)
-		}
-
-		if metricIndex >= 0 {
-			if columnValue, ok := values[metricIndex].(string); ok {
-				metric = columnValue
-			} else {
-				return fmt.Errorf("Column metric must be of type char,varchar or text, got: %T %v", values[metricIndex], values[metricIndex])
-			}
-		}
-
-		for i, col := range columnNames {
-			if i == timeIndex || i == metricIndex {
-				continue
-			}
-
-			if value, err = tsdb.ConvertSqlValueColumnToFloat(col, values[i]); err != nil {
-				return err
-			}
-
-			if metricIndex == -1 {
-				metric = col
-			}
-
-			series, exist := pointsBySeries[metric]
-			if !exist {
-				series = &tsdb.TimeSeries{Name: metric}
-				pointsBySeries[metric] = series
-				seriesByQueryOrder.PushBack(metric)
-			}
-
-			if fillMissing {
-				var intervalStart float64
-				if !exist {
-					intervalStart = float64(tsdbQuery.TimeRange.MustGetFrom().UnixNano() / 1e6)
-				} else {
-					intervalStart = series.Points[len(series.Points)-1][1].Float64 + fillInterval
-				}
-
-				// align interval start
-				intervalStart = math.Floor(intervalStart/fillInterval) * fillInterval
-
-				for i := intervalStart; i < timestamp; i += fillInterval {
-					series.Points = append(series.Points, tsdb.TimePoint{fillValue, null.FloatFrom(i)})
-					rowCount++
-				}
-			}
-
-			series.Points = append(series.Points, tsdb.TimePoint{value, null.FloatFrom(timestamp)})
-
-			e.log.Debug("Rows", "metric", metric, "time", timestamp, "value", value)
-			rowCount++
-
-		}
-	}
-
-	for elem := seriesByQueryOrder.Front(); elem != nil; elem = elem.Next() {
-		key := elem.Value.(string)
-		result.Series = append(result.Series, pointsBySeries[key])
-
-		if fillMissing {
-			series := pointsBySeries[key]
-			// fill in values from last fetched value till interval end
-			intervalStart := series.Points[len(series.Points)-1][1].Float64
-			intervalEnd := float64(tsdbQuery.TimeRange.MustGetTo().UnixNano() / 1e6)
-
-			// align interval start
-			intervalStart = math.Floor(intervalStart/fillInterval) * fillInterval
-			for i := intervalStart + fillInterval; i < intervalEnd; i += fillInterval {
-				series.Points = append(series.Points, tsdb.TimePoint{fillValue, null.FloatFrom(i)})
-				rowCount++
-			}
-		}
-	}
-
-	result.Meta.Set("rowCount", rowCount)
-	return nil
-}

+ 44 - 11
pkg/tsdb/mysql/mysql_test.go

@@ -8,8 +8,9 @@ import (
 	"time"
 
 	"github.com/go-xorm/xorm"
+	"github.com/grafana/grafana/pkg/components/securejsondata"
 	"github.com/grafana/grafana/pkg/components/simplejson"
-	"github.com/grafana/grafana/pkg/log"
+	"github.com/grafana/grafana/pkg/models"
 	"github.com/grafana/grafana/pkg/services/sqlstore"
 	"github.com/grafana/grafana/pkg/services/sqlstore/sqlutil"
 	"github.com/grafana/grafana/pkg/tsdb"
@@ -21,8 +22,9 @@ import (
 // The tests require a MySQL db named grafana_ds_tests and a user/password grafana/password
 // Use the docker/blocks/mysql_tests/docker-compose.yaml to spin up a
 // preconfigured MySQL server suitable for running these tests.
-// There is also a dashboard.json in same directory that you can import to Grafana
-// once you've created a datasource for the test server/database.
+// There is also a datasource and dashboard provisioned by devenv scripts that you can
+// use to verify that the generated data are vizualized as expected, see
+// devenv/README.md for setup instructions.
 func TestMySQL(t *testing.T) {
 	// change to true to run the MySQL tests
 	runMySqlTests := false
@@ -35,19 +37,25 @@ func TestMySQL(t *testing.T) {
 	Convey("MySQL", t, func() {
 		x := InitMySQLTestDB(t)
 
-		endpoint := &MysqlQueryEndpoint{
-			sqlEngine: &tsdb.DefaultSqlEngine{
-				MacroEngine: NewMysqlMacroEngine(),
-				XormEngine:  x,
-			},
-			log: log.New("tsdb.mysql"),
+		origXormEngine := tsdb.NewXormEngine
+		tsdb.NewXormEngine = func(d, c string) (*xorm.Engine, error) {
+			return x, nil
 		}
 
-		sess := x.NewSession()
-		defer sess.Close()
+		endpoint, err := newMysqlQueryEndpoint(&models.DataSource{
+			JsonData:       simplejson.New(),
+			SecureJsonData: securejsondata.SecureJsonData{},
+		})
+		So(err, ShouldBeNil)
 
+		sess := x.NewSession()
 		fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC)
 
+		Reset(func() {
+			sess.Close()
+			tsdb.NewXormEngine = origXormEngine
+		})
+
 		Convey("Given a table with different native data types", func() {
 			if exists, err := sess.IsTableExist("mysql_types"); err != nil || exists {
 				So(err, ShouldBeNil)
@@ -626,6 +634,31 @@ func TestMySQL(t *testing.T) {
 				So(queryResult.Series[1].Name, ShouldEqual, "Metric B - value one")
 			})
 
+			Convey("When doing a metric query with metric column and multiple value columns", func() {
+				query := &tsdb.TsdbQuery{
+					Queries: []*tsdb.Query{
+						{
+							Model: simplejson.NewFromAny(map[string]interface{}{
+								"rawSql": `SELECT $__time(time), measurement as metric, valueOne, valueTwo FROM metric_values ORDER BY 1,2`,
+								"format": "time_series",
+							}),
+							RefId: "A",
+						},
+					},
+				}
+
+				resp, err := endpoint.Query(nil, nil, query)
+				So(err, ShouldBeNil)
+				queryResult := resp.Results["A"]
+				So(queryResult.Error, ShouldBeNil)
+
+				So(len(queryResult.Series), ShouldEqual, 4)
+				So(queryResult.Series[0].Name, ShouldEqual, "Metric A valueOne")
+				So(queryResult.Series[1].Name, ShouldEqual, "Metric A valueTwo")
+				So(queryResult.Series[2].Name, ShouldEqual, "Metric B valueOne")
+				So(queryResult.Series[3].Name, ShouldEqual, "Metric B valueTwo")
+			})
+
 			Convey("When doing a metric query grouping by time should return correct series", func() {
 				query := &tsdb.TsdbQuery{
 					Queries: []*tsdb.Query{

+ 19 - 19
pkg/tsdb/postgres/macros.go

@@ -14,18 +14,18 @@ import (
 const rsIdentifier = `([_a-zA-Z0-9]+)`
 const sExpr = `\$` + rsIdentifier + `\(([^\)]*)\)`
 
-type PostgresMacroEngine struct {
-	TimeRange *tsdb.TimeRange
-	Query     *tsdb.Query
+type postgresMacroEngine struct {
+	timeRange *tsdb.TimeRange
+	query     *tsdb.Query
 }
 
-func NewPostgresMacroEngine() tsdb.SqlMacroEngine {
-	return &PostgresMacroEngine{}
+func newPostgresMacroEngine() tsdb.SqlMacroEngine {
+	return &postgresMacroEngine{}
 }
 
-func (m *PostgresMacroEngine) Interpolate(query *tsdb.Query, timeRange *tsdb.TimeRange, sql string) (string, error) {
-	m.TimeRange = timeRange
-	m.Query = query
+func (m *postgresMacroEngine) Interpolate(query *tsdb.Query, timeRange *tsdb.TimeRange, sql string) (string, error) {
+	m.timeRange = timeRange
+	m.query = query
 	rExp, _ := regexp.Compile(sExpr)
 	var macroError error
 
@@ -66,7 +66,7 @@ func replaceAllStringSubmatchFunc(re *regexp.Regexp, str string, repl func([]str
 	return result + str[lastIndex:]
 }
 
-func (m *PostgresMacroEngine) evaluateMacro(name string, args []string) (string, error) {
+func (m *postgresMacroEngine) evaluateMacro(name string, args []string) (string, error) {
 	switch name {
 	case "__time":
 		if len(args) == 0 {
@@ -83,11 +83,11 @@ func (m *PostgresMacroEngine) evaluateMacro(name string, args []string) (string,
 			return "", fmt.Errorf("missing time column argument for macro %v", name)
 		}
 
-		return fmt.Sprintf("%s BETWEEN '%s' AND '%s'", args[0], m.TimeRange.GetFromAsTimeUTC().Format(time.RFC3339), m.TimeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
+		return fmt.Sprintf("%s BETWEEN '%s' AND '%s'", args[0], m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339), m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
 	case "__timeFrom":
-		return fmt.Sprintf("'%s'", m.TimeRange.GetFromAsTimeUTC().Format(time.RFC3339)), nil
+		return fmt.Sprintf("'%s'", m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339)), nil
 	case "__timeTo":
-		return fmt.Sprintf("'%s'", m.TimeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
+		return fmt.Sprintf("'%s'", m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
 	case "__timeGroup":
 		if len(args) < 2 {
 			return "", fmt.Errorf("macro %v needs time column and interval and optional fill value", name)
@@ -97,16 +97,16 @@ func (m *PostgresMacroEngine) evaluateMacro(name string, args []string) (string,
 			return "", fmt.Errorf("error parsing interval %v", args[1])
 		}
 		if len(args) == 3 {
-			m.Query.Model.Set("fill", true)
-			m.Query.Model.Set("fillInterval", interval.Seconds())
+			m.query.Model.Set("fill", true)
+			m.query.Model.Set("fillInterval", interval.Seconds())
 			if args[2] == "NULL" {
-				m.Query.Model.Set("fillNull", true)
+				m.query.Model.Set("fillNull", true)
 			} else {
 				floatVal, err := strconv.ParseFloat(args[2], 64)
 				if err != nil {
 					return "", fmt.Errorf("error parsing fill value %v", args[2])
 				}
-				m.Query.Model.Set("fillValue", floatVal)
+				m.query.Model.Set("fillValue", floatVal)
 			}
 		}
 		return fmt.Sprintf("floor(extract(epoch from %s)/%v)*%v AS time", args[0], interval.Seconds(), interval.Seconds()), nil
@@ -114,11 +114,11 @@ func (m *PostgresMacroEngine) evaluateMacro(name string, args []string) (string,
 		if len(args) == 0 {
 			return "", fmt.Errorf("missing time column argument for macro %v", name)
 		}
-		return fmt.Sprintf("%s >= %d AND %s <= %d", args[0], m.TimeRange.GetFromAsSecondsEpoch(), args[0], m.TimeRange.GetToAsSecondsEpoch()), nil
+		return fmt.Sprintf("%s >= %d AND %s <= %d", args[0], m.timeRange.GetFromAsSecondsEpoch(), args[0], m.timeRange.GetToAsSecondsEpoch()), nil
 	case "__unixEpochFrom":
-		return fmt.Sprintf("%d", m.TimeRange.GetFromAsSecondsEpoch()), nil
+		return fmt.Sprintf("%d", m.timeRange.GetFromAsSecondsEpoch()), nil
 	case "__unixEpochTo":
-		return fmt.Sprintf("%d", m.TimeRange.GetToAsSecondsEpoch()), nil
+		return fmt.Sprintf("%d", m.timeRange.GetToAsSecondsEpoch()), nil
 	default:
 		return "", fmt.Errorf("Unknown macro %v", name)
 	}

+ 1 - 1
pkg/tsdb/postgres/macros_test.go

@@ -12,7 +12,7 @@ import (
 
 func TestMacroEngine(t *testing.T) {
 	Convey("MacroEngine", t, func() {
-		engine := NewPostgresMacroEngine()
+		engine := newPostgresMacroEngine()
 		query := &tsdb.Query{}
 
 		Convey("Given a time range between 2018-04-12 00:00 and 2018-04-12 00:05", func() {

+ 32 - 245
pkg/tsdb/postgres/postgres.go

@@ -1,46 +1,38 @@
 package postgres
 
 import (
-	"container/list"
-	"context"
-	"fmt"
-	"math"
+	"database/sql"
 	"net/url"
 	"strconv"
 
 	"github.com/go-xorm/core"
-	"github.com/grafana/grafana/pkg/components/null"
 	"github.com/grafana/grafana/pkg/log"
 	"github.com/grafana/grafana/pkg/models"
 	"github.com/grafana/grafana/pkg/tsdb"
 )
 
-type PostgresQueryEndpoint struct {
-	sqlEngine tsdb.SqlEngine
-	log       log.Logger
-}
-
 func init() {
-	tsdb.RegisterTsdbQueryEndpoint("postgres", NewPostgresQueryEndpoint)
+	tsdb.RegisterTsdbQueryEndpoint("postgres", newPostgresQueryEndpoint)
 }
 
-func NewPostgresQueryEndpoint(datasource *models.DataSource) (tsdb.TsdbQueryEndpoint, error) {
-	endpoint := &PostgresQueryEndpoint{
-		log: log.New("tsdb.postgres"),
-	}
-
-	endpoint.sqlEngine = &tsdb.DefaultSqlEngine{
-		MacroEngine: NewPostgresMacroEngine(),
-	}
+func newPostgresQueryEndpoint(datasource *models.DataSource) (tsdb.TsdbQueryEndpoint, error) {
+	logger := log.New("tsdb.postgres")
 
 	cnnstr := generateConnectionString(datasource)
-	endpoint.log.Debug("getEngine", "connection", cnnstr)
+	logger.Debug("getEngine", "connection", cnnstr)
 
-	if err := endpoint.sqlEngine.InitEngine("postgres", datasource, cnnstr); err != nil {
-		return nil, err
+	config := tsdb.SqlQueryEndpointConfiguration{
+		DriverName:        "postgres",
+		ConnectionString:  cnnstr,
+		Datasource:        datasource,
+		MetricColumnTypes: []string{"UNKNOWN", "TEXT", "VARCHAR", "CHAR"},
 	}
 
-	return endpoint, nil
+	rowTransformer := postgresRowTransformer{
+		log: logger,
+	}
+
+	return tsdb.NewSqlQueryEndpoint(&config, &rowTransformer, newPostgresMacroEngine(), logger)
 }
 
 func generateConnectionString(datasource *models.DataSource) string {
@@ -53,74 +45,25 @@ func generateConnectionString(datasource *models.DataSource) string {
 	}
 
 	sslmode := datasource.JsonData.Get("sslmode").MustString("verify-full")
-	u := &url.URL{Scheme: "postgres", User: url.UserPassword(datasource.User, password), Host: datasource.Url, Path: datasource.Database, RawQuery: "sslmode=" + sslmode}
-	return u.String()
-}
-
-func (e *PostgresQueryEndpoint) Query(ctx context.Context, dsInfo *models.DataSource, tsdbQuery *tsdb.TsdbQuery) (*tsdb.Response, error) {
-	return e.sqlEngine.Query(ctx, dsInfo, tsdbQuery, e.transformToTimeSeries, e.transformToTable)
-}
-
-func (e PostgresQueryEndpoint) transformToTable(query *tsdb.Query, rows *core.Rows, result *tsdb.QueryResult, tsdbQuery *tsdb.TsdbQuery) error {
-	columnNames, err := rows.Columns()
-	if err != nil {
-		return err
-	}
-
-	table := &tsdb.Table{
-		Columns: make([]tsdb.TableColumn, len(columnNames)),
-		Rows:    make([]tsdb.RowValues, 0),
-	}
-
-	for i, name := range columnNames {
-		table.Columns[i].Text = name
-	}
-
-	rowLimit := 1000000
-	rowCount := 0
-	timeIndex := -1
-
-	// check if there is a column named time
-	for i, col := range columnNames {
-		switch col {
-		case "time":
-			timeIndex = i
-		}
-	}
-
-	for ; rows.Next(); rowCount++ {
-		if rowCount > rowLimit {
-			return fmt.Errorf("PostgreSQL query row limit exceeded, limit %d", rowLimit)
-		}
-
-		values, err := e.getTypedRowData(rows)
-		if err != nil {
-			return err
-		}
-
-		// converts column named time to unix timestamp in milliseconds to make
-		// native postgres datetime types and epoch dates work in
-		// annotation and table queries.
-		tsdb.ConvertSqlTimeColumnToEpochMs(values, timeIndex)
-
-		table.Rows = append(table.Rows, values)
+	u := &url.URL{
+		Scheme: "postgres",
+		User:   url.UserPassword(datasource.User, password),
+		Host:   datasource.Url, Path: datasource.Database,
+		RawQuery: "sslmode=" + url.QueryEscape(sslmode),
 	}
 
-	result.Tables = append(result.Tables, table)
-	result.Meta.Set("rowCount", rowCount)
-	return nil
+	return u.String()
 }
 
-func (e PostgresQueryEndpoint) getTypedRowData(rows *core.Rows) (tsdb.RowValues, error) {
-	types, err := rows.ColumnTypes()
-	if err != nil {
-		return nil, err
-	}
+type postgresRowTransformer struct {
+	log log.Logger
+}
 
-	values := make([]interface{}, len(types))
-	valuePtrs := make([]interface{}, len(types))
+func (t *postgresRowTransformer) Transform(columnTypes []*sql.ColumnType, rows *core.Rows) (tsdb.RowValues, error) {
+	values := make([]interface{}, len(columnTypes))
+	valuePtrs := make([]interface{}, len(columnTypes))
 
-	for i := 0; i < len(types); i++ {
+	for i := 0; i < len(columnTypes); i++ {
 		valuePtrs[i] = &values[i]
 	}
 
@@ -130,20 +73,20 @@ func (e PostgresQueryEndpoint) getTypedRowData(rows *core.Rows) (tsdb.RowValues,
 
 	// convert types not handled by lib/pq
 	// unhandled types are returned as []byte
-	for i := 0; i < len(types); i++ {
+	for i := 0; i < len(columnTypes); i++ {
 		if value, ok := values[i].([]byte); ok {
-			switch types[i].DatabaseTypeName() {
+			switch columnTypes[i].DatabaseTypeName() {
 			case "NUMERIC":
 				if v, err := strconv.ParseFloat(string(value), 64); err == nil {
 					values[i] = v
 				} else {
-					e.log.Debug("Rows", "Error converting numeric to float", value)
+					t.log.Debug("Rows", "Error converting numeric to float", value)
 				}
 			case "UNKNOWN", "CIDR", "INET", "MACADDR":
 				// char literals have type UNKNOWN
 				values[i] = string(value)
 			default:
-				e.log.Debug("Rows", "Unknown database type", types[i].DatabaseTypeName(), "value", value)
+				t.log.Debug("Rows", "Unknown database type", columnTypes[i].DatabaseTypeName(), "value", value)
 				values[i] = string(value)
 			}
 		}
@@ -151,159 +94,3 @@ func (e PostgresQueryEndpoint) getTypedRowData(rows *core.Rows) (tsdb.RowValues,
 
 	return values, nil
 }
-
-func (e PostgresQueryEndpoint) transformToTimeSeries(query *tsdb.Query, rows *core.Rows, result *tsdb.QueryResult, tsdbQuery *tsdb.TsdbQuery) error {
-	pointsBySeries := make(map[string]*tsdb.TimeSeries)
-	seriesByQueryOrder := list.New()
-
-	columnNames, err := rows.Columns()
-	if err != nil {
-		return err
-	}
-
-	columnTypes, err := rows.ColumnTypes()
-	if err != nil {
-		return err
-	}
-
-	rowLimit := 1000000
-	rowCount := 0
-	timeIndex := -1
-	metricIndex := -1
-
-	// check columns of resultset: a column named time is mandatory
-	// the first text column is treated as metric name unless a column named metric is present
-	for i, col := range columnNames {
-		switch col {
-		case "time":
-			timeIndex = i
-		case "metric":
-			metricIndex = i
-		default:
-			if metricIndex == -1 {
-				switch columnTypes[i].DatabaseTypeName() {
-				case "UNKNOWN", "TEXT", "VARCHAR", "CHAR":
-					metricIndex = i
-				}
-			}
-		}
-	}
-
-	if timeIndex == -1 {
-		return fmt.Errorf("Found no column named time")
-	}
-
-	fillMissing := query.Model.Get("fill").MustBool(false)
-	var fillInterval float64
-	fillValue := null.Float{}
-	if fillMissing {
-		fillInterval = query.Model.Get("fillInterval").MustFloat64() * 1000
-		if !query.Model.Get("fillNull").MustBool(false) {
-			fillValue.Float64 = query.Model.Get("fillValue").MustFloat64()
-			fillValue.Valid = true
-		}
-	}
-
-	for rows.Next() {
-		var timestamp float64
-		var value null.Float
-		var metric string
-
-		if rowCount > rowLimit {
-			return fmt.Errorf("PostgreSQL query row limit exceeded, limit %d", rowLimit)
-		}
-
-		values, err := e.getTypedRowData(rows)
-		if err != nil {
-			return err
-		}
-
-		// converts column named time to unix timestamp in milliseconds to make
-		// native mysql datetime types and epoch dates work in
-		// annotation and table queries.
-		tsdb.ConvertSqlTimeColumnToEpochMs(values, timeIndex)
-
-		switch columnValue := values[timeIndex].(type) {
-		case int64:
-			timestamp = float64(columnValue)
-		case float64:
-			timestamp = columnValue
-		default:
-			return fmt.Errorf("Invalid type for column time, must be of type timestamp or unix timestamp, got: %T %v", columnValue, columnValue)
-		}
-
-		if metricIndex >= 0 {
-			if columnValue, ok := values[metricIndex].(string); ok {
-				metric = columnValue
-			} else {
-				return fmt.Errorf("Column metric must be of type char,varchar or text, got: %T %v", values[metricIndex], values[metricIndex])
-			}
-		}
-
-		for i, col := range columnNames {
-			if i == timeIndex || i == metricIndex {
-				continue
-			}
-
-			if value, err = tsdb.ConvertSqlValueColumnToFloat(col, values[i]); err != nil {
-				return err
-			}
-
-			if metricIndex == -1 {
-				metric = col
-			}
-
-			series, exist := pointsBySeries[metric]
-			if !exist {
-				series = &tsdb.TimeSeries{Name: metric}
-				pointsBySeries[metric] = series
-				seriesByQueryOrder.PushBack(metric)
-			}
-
-			if fillMissing {
-				var intervalStart float64
-				if !exist {
-					intervalStart = float64(tsdbQuery.TimeRange.MustGetFrom().UnixNano() / 1e6)
-				} else {
-					intervalStart = series.Points[len(series.Points)-1][1].Float64 + fillInterval
-				}
-
-				// align interval start
-				intervalStart = math.Floor(intervalStart/fillInterval) * fillInterval
-
-				for i := intervalStart; i < timestamp; i += fillInterval {
-					series.Points = append(series.Points, tsdb.TimePoint{fillValue, null.FloatFrom(i)})
-					rowCount++
-				}
-			}
-
-			series.Points = append(series.Points, tsdb.TimePoint{value, null.FloatFrom(timestamp)})
-
-			e.log.Debug("Rows", "metric", metric, "time", timestamp, "value", value)
-			rowCount++
-
-		}
-	}
-
-	for elem := seriesByQueryOrder.Front(); elem != nil; elem = elem.Next() {
-		key := elem.Value.(string)
-		result.Series = append(result.Series, pointsBySeries[key])
-
-		if fillMissing {
-			series := pointsBySeries[key]
-			// fill in values from last fetched value till interval end
-			intervalStart := series.Points[len(series.Points)-1][1].Float64
-			intervalEnd := float64(tsdbQuery.TimeRange.MustGetTo().UnixNano() / 1e6)
-
-			// align interval start
-			intervalStart = math.Floor(intervalStart/fillInterval) * fillInterval
-			for i := intervalStart + fillInterval; i < intervalEnd; i += fillInterval {
-				series.Points = append(series.Points, tsdb.TimePoint{fillValue, null.FloatFrom(i)})
-				rowCount++
-			}
-		}
-	}
-
-	result.Meta.Set("rowCount", rowCount)
-	return nil
-}

+ 44 - 11
pkg/tsdb/postgres/postgres_test.go

@@ -8,8 +8,9 @@ import (
 	"time"
 
 	"github.com/go-xorm/xorm"
+	"github.com/grafana/grafana/pkg/components/securejsondata"
 	"github.com/grafana/grafana/pkg/components/simplejson"
-	"github.com/grafana/grafana/pkg/log"
+	"github.com/grafana/grafana/pkg/models"
 	"github.com/grafana/grafana/pkg/services/sqlstore"
 	"github.com/grafana/grafana/pkg/services/sqlstore/sqlutil"
 	"github.com/grafana/grafana/pkg/tsdb"
@@ -22,8 +23,9 @@ import (
 // The tests require a PostgreSQL db named grafanadstest and a user/password grafanatest/grafanatest!
 // Use the docker/blocks/postgres_tests/docker-compose.yaml to spin up a
 // preconfigured Postgres server suitable for running these tests.
-// There is also a dashboard.json in same directory that you can import to Grafana
-// once you've created a datasource for the test server/database.
+// There is also a datasource and dashboard provisioned by devenv scripts that you can
+// use to verify that the generated data are vizualized as expected, see
+// devenv/README.md for setup instructions.
 func TestPostgres(t *testing.T) {
 	// change to true to run the MySQL tests
 	runPostgresTests := false
@@ -36,19 +38,25 @@ func TestPostgres(t *testing.T) {
 	Convey("PostgreSQL", t, func() {
 		x := InitPostgresTestDB(t)
 
-		endpoint := &PostgresQueryEndpoint{
-			sqlEngine: &tsdb.DefaultSqlEngine{
-				MacroEngine: NewPostgresMacroEngine(),
-				XormEngine:  x,
-			},
-			log: log.New("tsdb.postgres"),
+		origXormEngine := tsdb.NewXormEngine
+		tsdb.NewXormEngine = func(d, c string) (*xorm.Engine, error) {
+			return x, nil
 		}
 
-		sess := x.NewSession()
-		defer sess.Close()
+		endpoint, err := newPostgresQueryEndpoint(&models.DataSource{
+			JsonData:       simplejson.New(),
+			SecureJsonData: securejsondata.SecureJsonData{},
+		})
+		So(err, ShouldBeNil)
 
+		sess := x.NewSession()
 		fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC).In(time.Local)
 
+		Reset(func() {
+			sess.Close()
+			tsdb.NewXormEngine = origXormEngine
+		})
+
 		Convey("Given a table with different native data types", func() {
 			sql := `
 				DROP TABLE IF EXISTS postgres_types;
@@ -560,6 +568,31 @@ func TestPostgres(t *testing.T) {
 				So(queryResult.Series[1].Name, ShouldEqual, "Metric B - value one")
 			})
 
+			Convey("When doing a metric query with metric column and multiple value columns", func() {
+				query := &tsdb.TsdbQuery{
+					Queries: []*tsdb.Query{
+						{
+							Model: simplejson.NewFromAny(map[string]interface{}{
+								"rawSql": `SELECT $__timeEpoch(time), measurement as metric, "valueOne", "valueTwo" FROM metric_values ORDER BY 1`,
+								"format": "time_series",
+							}),
+							RefId: "A",
+						},
+					},
+				}
+
+				resp, err := endpoint.Query(nil, nil, query)
+				So(err, ShouldBeNil)
+				queryResult := resp.Results["A"]
+				So(queryResult.Error, ShouldBeNil)
+
+				So(len(queryResult.Series), ShouldEqual, 4)
+				So(queryResult.Series[0].Name, ShouldEqual, "Metric A valueOne")
+				So(queryResult.Series[1].Name, ShouldEqual, "Metric A valueTwo")
+				So(queryResult.Series[2].Name, ShouldEqual, "Metric B valueOne")
+				So(queryResult.Series[3].Name, ShouldEqual, "Metric B valueTwo")
+			})
+
 			Convey("When doing a metric query grouping by time should return correct series", func() {
 				query := &tsdb.TsdbQuery{
 					Queries: []*tsdb.Query{

+ 297 - 45
pkg/tsdb/sql_engine.go

@@ -1,11 +1,17 @@
 package tsdb
 
 import (
+	"container/list"
 	"context"
+	"database/sql"
 	"fmt"
+	"math"
+	"strings"
 	"sync"
 	"time"
 
+	"github.com/grafana/grafana/pkg/log"
+
 	"github.com/grafana/grafana/pkg/components/null"
 
 	"github.com/go-xorm/core"
@@ -14,27 +20,15 @@ import (
 	"github.com/grafana/grafana/pkg/models"
 )
 
-// SqlEngine is a wrapper class around xorm for relational database data sources.
-type SqlEngine interface {
-	InitEngine(driverName string, dsInfo *models.DataSource, cnnstr string) error
-	Query(
-		ctx context.Context,
-		ds *models.DataSource,
-		query *TsdbQuery,
-		transformToTimeSeries func(query *Query, rows *core.Rows, result *QueryResult, tsdbQuery *TsdbQuery) error,
-		transformToTable func(query *Query, rows *core.Rows, result *QueryResult, tsdbQuery *TsdbQuery) error,
-	) (*Response, error)
-}
-
 // SqlMacroEngine interpolates macros into sql. It takes in the Query to have access to query context and
 // timeRange to be able to generate queries that use from and to.
 type SqlMacroEngine interface {
 	Interpolate(query *Query, timeRange *TimeRange, sql string) (string, error)
 }
 
-type DefaultSqlEngine struct {
-	MacroEngine SqlMacroEngine
-	XormEngine  *xorm.Engine
+// SqlTableRowTransformer transforms a query result row to RowValues with proper types.
+type SqlTableRowTransformer interface {
+	Transform(columnTypes []*sql.ColumnType, rows *core.Rows) (RowValues, error)
 }
 
 type engineCacheType struct {
@@ -48,69 +42,98 @@ var engineCache = engineCacheType{
 	versions: make(map[int64]int),
 }
 
-// InitEngine creates the db connection and inits the xorm engine or loads it from the engine cache
-func (e *DefaultSqlEngine) InitEngine(driverName string, dsInfo *models.DataSource, cnnstr string) error {
+var NewXormEngine = func(driverName string, connectionString string) (*xorm.Engine, error) {
+	return xorm.NewEngine(driverName, connectionString)
+}
+
+type sqlQueryEndpoint struct {
+	macroEngine       SqlMacroEngine
+	rowTransformer    SqlTableRowTransformer
+	engine            *xorm.Engine
+	timeColumnNames   []string
+	metricColumnTypes []string
+	log               log.Logger
+}
+
+type SqlQueryEndpointConfiguration struct {
+	DriverName        string
+	Datasource        *models.DataSource
+	ConnectionString  string
+	TimeColumnNames   []string
+	MetricColumnTypes []string
+}
+
+var NewSqlQueryEndpoint = func(config *SqlQueryEndpointConfiguration, rowTransformer SqlTableRowTransformer, macroEngine SqlMacroEngine, log log.Logger) (TsdbQueryEndpoint, error) {
+	queryEndpoint := sqlQueryEndpoint{
+		rowTransformer:  rowTransformer,
+		macroEngine:     macroEngine,
+		timeColumnNames: []string{"time"},
+		log:             log,
+	}
+
+	if len(config.TimeColumnNames) > 0 {
+		queryEndpoint.timeColumnNames = config.TimeColumnNames
+	}
+
+	if len(config.MetricColumnTypes) > 0 {
+		queryEndpoint.metricColumnTypes = config.MetricColumnTypes
+	}
+
 	engineCache.Lock()
 	defer engineCache.Unlock()
 
-	if engine, present := engineCache.cache[dsInfo.Id]; present {
-		if version := engineCache.versions[dsInfo.Id]; version == dsInfo.Version {
-			e.XormEngine = engine
-			return nil
+	if engine, present := engineCache.cache[config.Datasource.Id]; present {
+		if version := engineCache.versions[config.Datasource.Id]; version == config.Datasource.Version {
+			queryEndpoint.engine = engine
+			return &queryEndpoint, nil
 		}
 	}
 
-	engine, err := xorm.NewEngine(driverName, cnnstr)
+	engine, err := NewXormEngine(config.DriverName, config.ConnectionString)
 	if err != nil {
-		return err
+		return nil, err
 	}
 
 	engine.SetMaxOpenConns(10)
 	engine.SetMaxIdleConns(10)
 
-	engineCache.versions[dsInfo.Id] = dsInfo.Version
-	engineCache.cache[dsInfo.Id] = engine
-	e.XormEngine = engine
+	engineCache.versions[config.Datasource.Id] = config.Datasource.Version
+	engineCache.cache[config.Datasource.Id] = engine
+	queryEndpoint.engine = engine
 
-	return nil
+	return &queryEndpoint, nil
 }
 
-// Query is a default implementation of the Query method for an SQL data source.
-// The caller of this function must implement transformToTimeSeries and transformToTable and
-// pass them in as parameters.
-func (e *DefaultSqlEngine) Query(
-	ctx context.Context,
-	dsInfo *models.DataSource,
-	tsdbQuery *TsdbQuery,
-	transformToTimeSeries func(query *Query, rows *core.Rows, result *QueryResult, tsdbQuery *TsdbQuery) error,
-	transformToTable func(query *Query, rows *core.Rows, result *QueryResult, tsdbQuery *TsdbQuery) error,
-) (*Response, error) {
+const rowLimit = 1000000
+
+// Query is the main function for the SqlQueryEndpoint
+func (e *sqlQueryEndpoint) Query(ctx context.Context, dsInfo *models.DataSource, tsdbQuery *TsdbQuery) (*Response, error) {
 	result := &Response{
 		Results: make(map[string]*QueryResult),
 	}
 
-	session := e.XormEngine.NewSession()
+	session := e.engine.NewSession()
 	defer session.Close()
 	db := session.DB()
 
 	for _, query := range tsdbQuery.Queries {
-		rawSql := query.Model.Get("rawSql").MustString()
-		if rawSql == "" {
+		rawSQL := query.Model.Get("rawSql").MustString()
+		if rawSQL == "" {
 			continue
 		}
 
 		queryResult := &QueryResult{Meta: simplejson.New(), RefId: query.RefId}
 		result.Results[query.RefId] = queryResult
 
-		rawSql, err := e.MacroEngine.Interpolate(query, tsdbQuery.TimeRange, rawSql)
+		rawSQL, err := e.macroEngine.Interpolate(query, tsdbQuery.TimeRange, rawSQL)
 		if err != nil {
 			queryResult.Error = err
 			continue
 		}
 
-		queryResult.Meta.Set("sql", rawSql)
+		queryResult.Meta.Set("sql", rawSQL)
 
-		rows, err := db.Query(rawSql)
+		rows, err := db.Query(rawSQL)
 		if err != nil {
 			queryResult.Error = err
 			continue
@@ -122,13 +145,13 @@ func (e *DefaultSqlEngine) Query(
 
 		switch format {
 		case "time_series":
-			err := transformToTimeSeries(query, rows, queryResult, tsdbQuery)
+			err := e.transformToTimeSeries(query, rows, queryResult, tsdbQuery)
 			if err != nil {
 				queryResult.Error = err
 				continue
 			}
 		case "table":
-			err := transformToTable(query, rows, queryResult, tsdbQuery)
+			err := e.transformToTable(query, rows, queryResult, tsdbQuery)
 			if err != nil {
 				queryResult.Error = err
 				continue
@@ -139,6 +162,235 @@ func (e *DefaultSqlEngine) Query(
 	return result, nil
 }
 
+func (e *sqlQueryEndpoint) transformToTable(query *Query, rows *core.Rows, result *QueryResult, tsdbQuery *TsdbQuery) error {
+	columnNames, err := rows.Columns()
+	columnCount := len(columnNames)
+
+	if err != nil {
+		return err
+	}
+
+	rowCount := 0
+	timeIndex := -1
+
+	table := &Table{
+		Columns: make([]TableColumn, columnCount),
+		Rows:    make([]RowValues, 0),
+	}
+
+	for i, name := range columnNames {
+		table.Columns[i].Text = name
+
+		for _, tc := range e.timeColumnNames {
+			if name == tc {
+				timeIndex = i
+				break
+			}
+		}
+	}
+
+	columnTypes, err := rows.ColumnTypes()
+	if err != nil {
+		return err
+	}
+
+	for ; rows.Next(); rowCount++ {
+		if rowCount > rowLimit {
+			return fmt.Errorf("query row limit exceeded, limit %d", rowLimit)
+		}
+
+		values, err := e.rowTransformer.Transform(columnTypes, rows)
+		if err != nil {
+			return err
+		}
+
+		// converts column named time to unix timestamp in milliseconds
+		// to make native mssql datetime types and epoch dates work in
+		// annotation and table queries.
+		ConvertSqlTimeColumnToEpochMs(values, timeIndex)
+		table.Rows = append(table.Rows, values)
+	}
+
+	result.Tables = append(result.Tables, table)
+	result.Meta.Set("rowCount", rowCount)
+	return nil
+}
+
+func (e *sqlQueryEndpoint) transformToTimeSeries(query *Query, rows *core.Rows, result *QueryResult, tsdbQuery *TsdbQuery) error {
+	pointsBySeries := make(map[string]*TimeSeries)
+	seriesByQueryOrder := list.New()
+
+	columnNames, err := rows.Columns()
+	if err != nil {
+		return err
+	}
+
+	columnTypes, err := rows.ColumnTypes()
+	if err != nil {
+		return err
+	}
+
+	rowCount := 0
+	timeIndex := -1
+	metricIndex := -1
+	metricPrefix := false
+	var metricPrefixValue string
+
+	// check columns of resultset: a column named time is mandatory
+	// the first text column is treated as metric name unless a column named metric is present
+	for i, col := range columnNames {
+		for _, tc := range e.timeColumnNames {
+			if col == tc {
+				timeIndex = i
+				continue
+			}
+		}
+		switch col {
+		case "metric":
+			metricIndex = i
+		default:
+			if metricIndex == -1 {
+				columnType := columnTypes[i].DatabaseTypeName()
+
+				for _, mct := range e.metricColumnTypes {
+					e.log.Info(mct)
+					if columnType == mct {
+						metricIndex = i
+						continue
+					}
+				}
+			}
+		}
+	}
+
+	// use metric column as prefix with multiple value columns
+	if metricIndex != -1 && len(columnNames) > 3 {
+		metricPrefix = true
+	}
+
+	if timeIndex == -1 {
+		return fmt.Errorf("Found no column named %s", strings.Join(e.timeColumnNames, " or "))
+	}
+
+	fillMissing := query.Model.Get("fill").MustBool(false)
+	var fillInterval float64
+	fillValue := null.Float{}
+	if fillMissing {
+		fillInterval = query.Model.Get("fillInterval").MustFloat64() * 1000
+		if !query.Model.Get("fillNull").MustBool(false) {
+			fillValue.Float64 = query.Model.Get("fillValue").MustFloat64()
+			fillValue.Valid = true
+		}
+	}
+
+	for rows.Next() {
+		var timestamp float64
+		var value null.Float
+		var metric string
+
+		if rowCount > rowLimit {
+			return fmt.Errorf("query row limit exceeded, limit %d", rowLimit)
+		}
+
+		values, err := e.rowTransformer.Transform(columnTypes, rows)
+		if err != nil {
+			return err
+		}
+
+		// converts column named time to unix timestamp in milliseconds to make
+		// native mysql datetime types and epoch dates work in
+		// annotation and table queries.
+		ConvertSqlTimeColumnToEpochMs(values, timeIndex)
+
+		switch columnValue := values[timeIndex].(type) {
+		case int64:
+			timestamp = float64(columnValue)
+		case float64:
+			timestamp = columnValue
+		default:
+			return fmt.Errorf("Invalid type for column time, must be of type timestamp or unix timestamp, got: %T %v", columnValue, columnValue)
+		}
+
+		if metricIndex >= 0 {
+			if columnValue, ok := values[metricIndex].(string); ok {
+				if metricPrefix {
+					metricPrefixValue = columnValue
+				} else {
+					metric = columnValue
+				}
+			} else {
+				return fmt.Errorf("Column metric must be of type %s. metric column name: %s type: %s but datatype is %T", strings.Join(e.metricColumnTypes, ", "), columnNames[metricIndex], columnTypes[metricIndex].DatabaseTypeName(), values[metricIndex])
+			}
+		}
+
+		for i, col := range columnNames {
+			if i == timeIndex || i == metricIndex {
+				continue
+			}
+
+			if value, err = ConvertSqlValueColumnToFloat(col, values[i]); err != nil {
+				return err
+			}
+
+			if metricIndex == -1 {
+				metric = col
+			} else if metricPrefix {
+				metric = metricPrefixValue + " " + col
+			}
+
+			series, exist := pointsBySeries[metric]
+			if !exist {
+				series = &TimeSeries{Name: metric}
+				pointsBySeries[metric] = series
+				seriesByQueryOrder.PushBack(metric)
+			}
+
+			if fillMissing {
+				var intervalStart float64
+				if !exist {
+					intervalStart = float64(tsdbQuery.TimeRange.MustGetFrom().UnixNano() / 1e6)
+				} else {
+					intervalStart = series.Points[len(series.Points)-1][1].Float64 + fillInterval
+				}
+
+				// align interval start
+				intervalStart = math.Floor(intervalStart/fillInterval) * fillInterval
+
+				for i := intervalStart; i < timestamp; i += fillInterval {
+					series.Points = append(series.Points, TimePoint{fillValue, null.FloatFrom(i)})
+					rowCount++
+				}
+			}
+
+			series.Points = append(series.Points, TimePoint{value, null.FloatFrom(timestamp)})
+
+			e.log.Debug("Rows", "metric", metric, "time", timestamp, "value", value)
+		}
+	}
+
+	for elem := seriesByQueryOrder.Front(); elem != nil; elem = elem.Next() {
+		key := elem.Value.(string)
+		result.Series = append(result.Series, pointsBySeries[key])
+
+		if fillMissing {
+			series := pointsBySeries[key]
+			// fill in values from last fetched value till interval end
+			intervalStart := series.Points[len(series.Points)-1][1].Float64
+			intervalEnd := float64(tsdbQuery.TimeRange.MustGetTo().UnixNano() / 1e6)
+
+			// align interval start
+			intervalStart = math.Floor(intervalStart/fillInterval) * fillInterval
+			for i := intervalStart + fillInterval; i < intervalEnd; i += fillInterval {
+				series.Points = append(series.Points, TimePoint{fillValue, null.FloatFrom(i)})
+				rowCount++
+			}
+		}
+	}
+
+	result.Meta.Set("rowCount", rowCount)
+	return nil
+}
+
 // ConvertSqlTimeColumnToEpochMs converts column named time to unix timestamp in milliseconds
 // to make native datetime types and epoch dates work in annotation and table queries.
 func ConvertSqlTimeColumnToEpochMs(values RowValues, timeIndex int) {

+ 125 - 0
public/app/containers/Explore/PromQueryField.jest.tsx

@@ -0,0 +1,125 @@
+import React from 'react';
+import Enzyme, { shallow } from 'enzyme';
+import Adapter from 'enzyme-adapter-react-16';
+
+Enzyme.configure({ adapter: new Adapter() });
+
+import PromQueryField from './PromQueryField';
+
+describe('PromQueryField typeahead handling', () => {
+  const defaultProps = {
+    request: () => ({ data: { data: [] } }),
+  };
+
+  it('returns default suggestions on emtpty context', () => {
+    const instance = shallow(<PromQueryField {...defaultProps} />).instance() as PromQueryField;
+    const result = instance.getTypeahead({ text: '', prefix: '', wrapperClasses: [] });
+    expect(result.context).toBeUndefined();
+    expect(result.refresher).toBeUndefined();
+    expect(result.suggestions.length).toEqual(2);
+  });
+
+  describe('range suggestions', () => {
+    it('returns range suggestions in range context', () => {
+      const instance = shallow(<PromQueryField {...defaultProps} />).instance() as PromQueryField;
+      const result = instance.getTypeahead({ text: '1', prefix: '1', wrapperClasses: ['context-range'] });
+      expect(result.context).toBe('context-range');
+      expect(result.refresher).toBeUndefined();
+      expect(result.suggestions).toEqual([
+        {
+          items: [{ label: '1m' }, { label: '5m' }, { label: '10m' }, { label: '30m' }, { label: '1h' }],
+          label: 'Range vector',
+        },
+      ]);
+    });
+  });
+
+  describe('metric suggestions', () => {
+    it('returns metrics suggestions by default', () => {
+      const instance = shallow(
+        <PromQueryField {...defaultProps} metrics={['foo', 'bar']} />
+      ).instance() as PromQueryField;
+      const result = instance.getTypeahead({ text: 'a', prefix: 'a', wrapperClasses: [] });
+      expect(result.context).toBeUndefined();
+      expect(result.refresher).toBeUndefined();
+      expect(result.suggestions.length).toEqual(2);
+    });
+
+    it('returns default suggestions after a binary operator', () => {
+      const instance = shallow(
+        <PromQueryField {...defaultProps} metrics={['foo', 'bar']} />
+      ).instance() as PromQueryField;
+      const result = instance.getTypeahead({ text: '*', prefix: '', wrapperClasses: [] });
+      expect(result.context).toBeUndefined();
+      expect(result.refresher).toBeUndefined();
+      expect(result.suggestions.length).toEqual(2);
+    });
+  });
+
+  describe('label suggestions', () => {
+    it('returns default label suggestions on label context and no metric', () => {
+      const instance = shallow(<PromQueryField {...defaultProps} />).instance() as PromQueryField;
+      const result = instance.getTypeahead({ text: 'j', prefix: 'j', wrapperClasses: ['context-labels'] });
+      expect(result.context).toBe('context-labels');
+      expect(result.suggestions).toEqual([{ items: [{ label: 'job' }, { label: 'instance' }], label: 'Labels' }]);
+    });
+
+    it('returns label suggestions on label context and metric', () => {
+      const instance = shallow(
+        <PromQueryField {...defaultProps} labelKeys={{ foo: ['bar'] }} />
+      ).instance() as PromQueryField;
+      const result = instance.getTypeahead({
+        text: 'job',
+        prefix: 'job',
+        wrapperClasses: ['context-labels'],
+        metric: 'foo',
+      });
+      expect(result.context).toBe('context-labels');
+      expect(result.suggestions).toEqual([{ items: [{ label: 'bar' }], label: 'Labels' }]);
+    });
+
+    it('returns a refresher on label context and unavailable metric', () => {
+      const instance = shallow(
+        <PromQueryField {...defaultProps} labelKeys={{ foo: ['bar'] }} />
+      ).instance() as PromQueryField;
+      const result = instance.getTypeahead({
+        text: 'job',
+        prefix: 'job',
+        wrapperClasses: ['context-labels'],
+        metric: 'xxx',
+      });
+      expect(result.context).toBeUndefined();
+      expect(result.refresher).toBeInstanceOf(Promise);
+      expect(result.suggestions).toEqual([]);
+    });
+
+    it('returns label values on label context when given a metric and a label key', () => {
+      const instance = shallow(
+        <PromQueryField {...defaultProps} labelKeys={{ foo: ['bar'] }} labelValues={{ foo: { bar: ['baz'] } }} />
+      ).instance() as PromQueryField;
+      const result = instance.getTypeahead({
+        text: '=ba',
+        prefix: 'ba',
+        wrapperClasses: ['context-labels'],
+        metric: 'foo',
+        labelKey: 'bar',
+      });
+      expect(result.context).toBe('context-label-values');
+      expect(result.suggestions).toEqual([{ items: [{ label: 'baz' }], label: 'Label values' }]);
+    });
+
+    it('returns label suggestions on aggregation context and metric', () => {
+      const instance = shallow(
+        <PromQueryField {...defaultProps} labelKeys={{ foo: ['bar'] }} />
+      ).instance() as PromQueryField;
+      const result = instance.getTypeahead({
+        text: 'job',
+        prefix: 'job',
+        wrapperClasses: ['context-aggregation'],
+        metric: 'foo',
+      });
+      expect(result.context).toBe('context-aggregation');
+      expect(result.suggestions).toEqual([{ items: [{ label: 'bar' }], label: 'Labels' }]);
+    });
+  });
+});

+ 340 - 0
public/app/containers/Explore/PromQueryField.tsx

@@ -0,0 +1,340 @@
+import _ from 'lodash';
+import React from 'react';
+
+// dom also includes Element polyfills
+import { getNextCharacter, getPreviousCousin } from './utils/dom';
+import PluginPrism, { setPrismTokens } from './slate-plugins/prism/index';
+import PrismPromql, { FUNCTIONS } from './slate-plugins/prism/promql';
+import RunnerPlugin from './slate-plugins/runner';
+import { processLabels, RATE_RANGES, cleanText } from './utils/prometheus';
+
+import TypeaheadField, {
+  Suggestion,
+  SuggestionGroup,
+  TypeaheadInput,
+  TypeaheadFieldState,
+  TypeaheadOutput,
+} from './QueryField';
+
+const EMPTY_METRIC = '';
+const METRIC_MARK = 'metric';
+const PRISM_LANGUAGE = 'promql';
+
+export const wrapLabel = label => ({ label });
+export const setFunctionMove = (suggestion: Suggestion): Suggestion => {
+  suggestion.move = -1;
+  return suggestion;
+};
+
+export function willApplySuggestion(
+  suggestion: string,
+  { typeaheadContext, typeaheadText }: TypeaheadFieldState
+): string {
+  // Modify suggestion based on context
+  switch (typeaheadContext) {
+    case 'context-labels': {
+      const nextChar = getNextCharacter();
+      if (!nextChar || nextChar === '}' || nextChar === ',') {
+        suggestion += '=';
+      }
+      break;
+    }
+
+    case 'context-label-values': {
+      // Always add quotes and remove existing ones instead
+      if (!(typeaheadText.startsWith('="') || typeaheadText.startsWith('"'))) {
+        suggestion = `"${suggestion}`;
+      }
+      if (getNextCharacter() !== '"') {
+        suggestion = `${suggestion}"`;
+      }
+      break;
+    }
+
+    default:
+  }
+  return suggestion;
+}
+
+interface PromQueryFieldProps {
+  initialQuery?: string | null;
+  labelKeys?: { [index: string]: string[] }; // metric -> [labelKey,...]
+  labelValues?: { [index: string]: { [index: string]: string[] } }; // metric -> labelKey -> [labelValue,...]
+  metrics?: string[];
+  onPressEnter?: () => void;
+  onQueryChange?: (value: string) => void;
+  portalPrefix?: string;
+  request?: (url: string) => any;
+}
+
+interface PromQueryFieldState {
+  labelKeys: { [index: string]: string[] }; // metric -> [labelKey,...]
+  labelValues: { [index: string]: { [index: string]: string[] } }; // metric -> labelKey -> [labelValue,...]
+  metrics: string[];
+}
+
+interface PromTypeaheadInput {
+  text: string;
+  prefix: string;
+  wrapperClasses: string[];
+  metric?: string;
+  labelKey?: string;
+}
+
+class PromQueryField extends React.Component<PromQueryFieldProps, PromQueryFieldState> {
+  plugins: any[];
+
+  constructor(props, context) {
+    super(props, context);
+
+    this.plugins = [
+      RunnerPlugin({ handler: props.onPressEnter }),
+      PluginPrism({ definition: PrismPromql, language: PRISM_LANGUAGE }),
+    ];
+
+    this.state = {
+      labelKeys: props.labelKeys || {},
+      labelValues: props.labelValues || {},
+      metrics: props.metrics || [],
+    };
+  }
+
+  componentDidMount() {
+    this.fetchMetricNames();
+  }
+
+  onChangeQuery = value => {
+    // Send text change to parent
+    const { onQueryChange } = this.props;
+    if (onQueryChange) {
+      onQueryChange(value);
+    }
+  };
+
+  onReceiveMetrics = () => {
+    if (!this.state.metrics) {
+      return;
+    }
+    setPrismTokens(PRISM_LANGUAGE, METRIC_MARK, this.state.metrics);
+  };
+
+  onTypeahead = (typeahead: TypeaheadInput): TypeaheadOutput => {
+    const { editorNode, prefix, text, wrapperNode } = typeahead;
+
+    // Get DOM-dependent context
+    const wrapperClasses = Array.from(wrapperNode.classList);
+    // Take first metric as lucky guess
+    const metricNode = editorNode.querySelector(`.${METRIC_MARK}`);
+    const metric = metricNode && metricNode.textContent;
+    const labelKeyNode = getPreviousCousin(wrapperNode, '.attr-name');
+    const labelKey = labelKeyNode && labelKeyNode.textContent;
+
+    const result = this.getTypeahead({ text, prefix, wrapperClasses, metric, labelKey });
+
+    console.log('handleTypeahead', wrapperClasses, text, prefix, result.context);
+
+    return result;
+  };
+
+  // Keep this DOM-free for testing
+  getTypeahead({ prefix, wrapperClasses, metric, text }: PromTypeaheadInput): TypeaheadOutput {
+    // Determine candidates by CSS context
+    if (_.includes(wrapperClasses, 'context-range')) {
+      // Suggestions for metric[|]
+      return this.getRangeTypeahead();
+    } else if (_.includes(wrapperClasses, 'context-labels')) {
+      // Suggestions for metric{|} and metric{foo=|}, as well as metric-independent label queries like {|}
+      return this.getLabelTypeahead.apply(this, arguments);
+    } else if (metric && _.includes(wrapperClasses, 'context-aggregation')) {
+      return this.getAggregationTypeahead.apply(this, arguments);
+    } else if (
+      // Non-empty but not inside known token unless it's a metric
+      (prefix && !_.includes(wrapperClasses, 'token')) ||
+      prefix === metric ||
+      (prefix === '' && !text.match(/^[)\s]+$/)) || // Empty context or after ')'
+      text.match(/[+\-*/^%]/) // After binary operator
+    ) {
+      return this.getEmptyTypeahead();
+    }
+
+    return {
+      suggestions: [],
+    };
+  }
+
+  getEmptyTypeahead(): TypeaheadOutput {
+    const suggestions: SuggestionGroup[] = [];
+    suggestions.push({
+      prefixMatch: true,
+      label: 'Functions',
+      items: FUNCTIONS.map(setFunctionMove),
+    });
+
+    if (this.state.metrics) {
+      suggestions.push({
+        label: 'Metrics',
+        items: this.state.metrics.map(wrapLabel),
+      });
+    }
+    return { suggestions };
+  }
+
+  getRangeTypeahead(): TypeaheadOutput {
+    return {
+      context: 'context-range',
+      suggestions: [
+        {
+          label: 'Range vector',
+          items: [...RATE_RANGES].map(wrapLabel),
+        },
+      ],
+    };
+  }
+
+  getAggregationTypeahead({ metric }: PromTypeaheadInput): TypeaheadOutput {
+    let refresher: Promise<any> = null;
+    const suggestions: SuggestionGroup[] = [];
+    const labelKeys = this.state.labelKeys[metric];
+    if (labelKeys) {
+      suggestions.push({ label: 'Labels', items: labelKeys.map(wrapLabel) });
+    } else {
+      refresher = this.fetchMetricLabels(metric);
+    }
+
+    return {
+      refresher,
+      suggestions,
+      context: 'context-aggregation',
+    };
+  }
+
+  getLabelTypeahead({ metric, text, wrapperClasses, labelKey }: PromTypeaheadInput): TypeaheadOutput {
+    let context: string;
+    let refresher: Promise<any> = null;
+    const suggestions: SuggestionGroup[] = [];
+    if (metric) {
+      const labelKeys = this.state.labelKeys[metric];
+      if (labelKeys) {
+        if ((text && text.startsWith('=')) || _.includes(wrapperClasses, 'attr-value')) {
+          // Label values
+          if (labelKey) {
+            const labelValues = this.state.labelValues[metric][labelKey];
+            context = 'context-label-values';
+            suggestions.push({
+              label: 'Label values',
+              items: labelValues.map(wrapLabel),
+            });
+          }
+        } else {
+          // Label keys
+          context = 'context-labels';
+          suggestions.push({ label: 'Labels', items: labelKeys.map(wrapLabel) });
+        }
+      } else {
+        refresher = this.fetchMetricLabels(metric);
+      }
+    } else {
+      // Metric-independent label queries
+      const defaultKeys = ['job', 'instance'];
+      // Munge all keys that we have seen together
+      const labelKeys = Object.keys(this.state.labelKeys).reduce((acc, metric) => {
+        return acc.concat(this.state.labelKeys[metric].filter(key => acc.indexOf(key) === -1));
+      }, defaultKeys);
+      if ((text && text.startsWith('=')) || _.includes(wrapperClasses, 'attr-value')) {
+        // Label values
+        if (labelKey) {
+          if (this.state.labelValues[EMPTY_METRIC]) {
+            const labelValues = this.state.labelValues[EMPTY_METRIC][labelKey];
+            context = 'context-label-values';
+            suggestions.push({
+              label: 'Label values',
+              items: labelValues.map(wrapLabel),
+            });
+          } else {
+            // Can only query label values for now (API to query keys is under development)
+            refresher = this.fetchLabelValues(labelKey);
+          }
+        }
+      } else {
+        // Label keys
+        context = 'context-labels';
+        suggestions.push({ label: 'Labels', items: labelKeys.map(wrapLabel) });
+      }
+    }
+    return { context, refresher, suggestions };
+  }
+
+  request = url => {
+    if (this.props.request) {
+      return this.props.request(url);
+    }
+    return fetch(url);
+  };
+
+  async fetchLabelValues(key) {
+    const url = `/api/v1/label/${key}/values`;
+    try {
+      const res = await this.request(url);
+      const body = await (res.data || res.json());
+      const pairs = this.state.labelValues[EMPTY_METRIC];
+      const values = {
+        ...pairs,
+        [key]: body.data,
+      };
+      const labelValues = {
+        ...this.state.labelValues,
+        [EMPTY_METRIC]: values,
+      };
+      this.setState({ labelValues });
+    } catch (e) {
+      console.error(e);
+    }
+  }
+
+  async fetchMetricLabels(name) {
+    const url = `/api/v1/series?match[]=${name}`;
+    try {
+      const res = await this.request(url);
+      const body = await (res.data || res.json());
+      const { keys, values } = processLabels(body.data);
+      const labelKeys = {
+        ...this.state.labelKeys,
+        [name]: keys,
+      };
+      const labelValues = {
+        ...this.state.labelValues,
+        [name]: values,
+      };
+      this.setState({ labelKeys, labelValues });
+    } catch (e) {
+      console.error(e);
+    }
+  }
+
+  async fetchMetricNames() {
+    const url = '/api/v1/label/__name__/values';
+    try {
+      const res = await this.request(url);
+      const body = await (res.data || res.json());
+      this.setState({ metrics: body.data }, this.onReceiveMetrics);
+    } catch (error) {
+      console.error(error);
+    }
+  }
+
+  render() {
+    return (
+      <TypeaheadField
+        additionalPlugins={this.plugins}
+        cleanText={cleanText}
+        initialValue={this.props.initialQuery}
+        onTypeahead={this.onTypeahead}
+        onWillApplySuggestion={willApplySuggestion}
+        onValueChanged={this.onChangeQuery}
+        placeholder="Enter a PromQL query"
+      />
+    );
+  }
+}
+
+export default PromQueryField;

+ 231 - 314
public/app/containers/Explore/QueryField.tsx

@@ -1,106 +1,163 @@
+import _ from 'lodash';
 import React from 'react';
 import ReactDOM from 'react-dom';
-import { Value } from 'slate';
+import { Block, Change, Document, Text, Value } from 'slate';
 import { Editor } from 'slate-react';
 import Plain from 'slate-plain-serializer';
 
-// dom also includes Element polyfills
-import { getNextCharacter, getPreviousCousin } from './utils/dom';
 import BracesPlugin from './slate-plugins/braces';
 import ClearPlugin from './slate-plugins/clear';
 import NewlinePlugin from './slate-plugins/newline';
-import PluginPrism, { setPrismTokens } from './slate-plugins/prism/index';
-import RunnerPlugin from './slate-plugins/runner';
-import debounce from './utils/debounce';
-import { processLabels, RATE_RANGES, cleanText } from './utils/prometheus';
 
 import Typeahead from './Typeahead';
 
-const EMPTY_METRIC = '';
-const METRIC_MARK = 'metric';
 export const TYPEAHEAD_DEBOUNCE = 300;
 
-function flattenSuggestions(s) {
+function flattenSuggestions(s: any[]): any[] {
   return s ? s.reduce((acc, g) => acc.concat(g.items), []) : [];
 }
 
-export const getInitialValue = query =>
-  Value.fromJSON({
-    document: {
-      nodes: [
-        {
-          object: 'block',
-          type: 'paragraph',
-          nodes: [
-            {
-              object: 'text',
-              leaves: [
-                {
-                  text: query,
-                },
-              ],
-            },
-          ],
-        },
-      ],
-    },
+export const makeFragment = (text: string): Document => {
+  const lines = text.split('\n').map(line =>
+    Block.create({
+      type: 'paragraph',
+      nodes: [Text.create(line)],
+    })
+  );
+
+  const fragment = Document.create({
+    nodes: lines,
   });
+  return fragment;
+};
+
+export const getInitialValue = (value: string): Value => Value.create({ document: makeFragment(value) });
+
+export interface Suggestion {
+  /**
+   * The label of this completion item. By default
+   * this is also the text that is inserted when selecting
+   * this completion.
+   */
+  label: string;
+  /**
+   * The kind of this completion item. Based on the kind
+   * an icon is chosen by the editor.
+   */
+  kind?: string;
+  /**
+   * A human-readable string with additional information
+   * about this item, like type or symbol information.
+   */
+  detail?: string;
+  /**
+   * A human-readable string, can be Markdown, that represents a doc-comment.
+   */
+  documentation?: string;
+  /**
+   * A string that should be used when comparing this item
+   * with other items. When `falsy` the `label` is used.
+   */
+  sortText?: string;
+  /**
+   * A string that should be used when filtering a set of
+   * completion items. When `falsy` the `label` is used.
+   */
+  filterText?: string;
+  /**
+   * A string or snippet that should be inserted in a document when selecting
+   * this completion. When `falsy` the `label` is used.
+   */
+  insertText?: string;
+  /**
+   * Delete number of characters before the caret position,
+   * by default the letters from the beginning of the word.
+   */
+  deleteBackwards?: number;
+  /**
+   * Number of steps to move after the insertion, can be negative.
+   */
+  move?: number;
+}
 
-class Portal extends React.Component<any, any> {
-  node: any;
+export interface SuggestionGroup {
+  /**
+   * Label that will be displayed for all entries of this group.
+   */
+  label: string;
+  /**
+   * List of suggestions of this group.
+   */
+  items: Suggestion[];
+  /**
+   * If true, match only by prefix (and not mid-word).
+   */
+  prefixMatch?: boolean;
+  /**
+   * If true, do not filter items in this group based on the search.
+   */
+  skipFilter?: boolean;
+}
 
-  constructor(props) {
-    super(props);
-    const { index = 0, prefix = 'query' } = props;
-    this.node = document.createElement('div');
-    this.node.classList.add(`slate-typeahead`, `slate-typeahead-${prefix}-${index}`);
-    document.body.appendChild(this.node);
-  }
+interface TypeaheadFieldProps {
+  additionalPlugins?: any[];
+  cleanText?: (text: string) => string;
+  initialValue: string | null;
+  onBlur?: () => void;
+  onFocus?: () => void;
+  onTypeahead?: (typeahead: TypeaheadInput) => TypeaheadOutput;
+  onValueChanged?: (value: Value) => void;
+  onWillApplySuggestion?: (suggestion: string, state: TypeaheadFieldState) => string;
+  placeholder?: string;
+  portalPrefix?: string;
+}
 
-  componentWillUnmount() {
-    document.body.removeChild(this.node);
-  }
+export interface TypeaheadFieldState {
+  suggestions: SuggestionGroup[];
+  typeaheadContext: string | null;
+  typeaheadIndex: number;
+  typeaheadPrefix: string;
+  typeaheadText: string;
+  value: Value;
+}
 
-  render() {
-    return ReactDOM.createPortal(this.props.children, this.node);
-  }
+export interface TypeaheadInput {
+  editorNode: Element;
+  prefix: string;
+  selection?: Selection;
+  text: string;
+  wrapperNode: Element;
+}
+
+export interface TypeaheadOutput {
+  context?: string;
+  refresher?: Promise<{}>;
+  suggestions: SuggestionGroup[];
 }
 
-class QueryField extends React.Component<any, any> {
-  menuEl: any;
-  plugins: any;
+class QueryField extends React.Component<TypeaheadFieldProps, TypeaheadFieldState> {
+  menuEl: HTMLElement | null;
+  plugins: any[];
   resetTimer: any;
 
   constructor(props, context) {
     super(props, context);
 
-    const { prismDefinition = {}, prismLanguage = 'promql' } = props;
-
-    this.plugins = [
-      BracesPlugin(),
-      ClearPlugin(),
-      RunnerPlugin({ handler: props.onPressEnter }),
-      NewlinePlugin(),
-      PluginPrism({ definition: prismDefinition, language: prismLanguage }),
-    ];
+    // Base plugins
+    this.plugins = [BracesPlugin(), ClearPlugin(), NewlinePlugin(), ...props.additionalPlugins];
 
     this.state = {
-      labelKeys: {},
-      labelValues: {},
-      metrics: props.metrics || [],
       suggestions: [],
+      typeaheadContext: null,
       typeaheadIndex: 0,
       typeaheadPrefix: '',
-      value: getInitialValue(props.initialQuery || ''),
+      typeaheadText: '',
+      value: getInitialValue(props.initialValue || ''),
     };
   }
 
   componentDidMount() {
     this.updateMenu();
-
-    if (this.props.metrics === undefined) {
-      this.fetchMetricNames();
-    }
   }
 
   componentWillUnmount() {
@@ -112,12 +169,9 @@ class QueryField extends React.Component<any, any> {
   }
 
   componentWillReceiveProps(nextProps) {
-    if (nextProps.metrics && nextProps.metrics !== this.props.metrics) {
-      this.setState({ metrics: nextProps.metrics }, this.onMetricsReceived);
-    }
-    // initialQuery is null in case the user typed
-    if (nextProps.initialQuery !== null && nextProps.initialQuery !== this.props.initialQuery) {
-      this.setState({ value: getInitialValue(nextProps.initialQuery) });
+    // initialValue is null in case the user typed
+    if (nextProps.initialValue !== null && nextProps.initialValue !== this.props.initialValue) {
+      this.setState({ value: getInitialValue(nextProps.initialValue) });
     }
   }
 
@@ -125,48 +179,28 @@ class QueryField extends React.Component<any, any> {
     const changed = value.document !== this.state.value.document;
     this.setState({ value }, () => {
       if (changed) {
-        this.handleChangeQuery();
+        this.handleChangeValue();
       }
     });
 
-    window.requestAnimationFrame(this.handleTypeahead);
-  };
-
-  onMetricsReceived = () => {
-    if (!this.state.metrics) {
-      return;
-    }
-    setPrismTokens(this.props.prismLanguage, METRIC_MARK, this.state.metrics);
-
-    // Trigger re-render
-    window.requestAnimationFrame(() => {
-      // Bogus edit to trigger highlighting
-      const change = this.state.value
-        .change()
-        .insertText(' ')
-        .deleteBackward(1);
-      this.onChange(change);
-    });
-  };
-
-  request = url => {
-    if (this.props.request) {
-      return this.props.request(url);
+    if (changed) {
+      window.requestAnimationFrame(this.handleTypeahead);
     }
-    return fetch(url);
   };
 
-  handleChangeQuery = () => {
+  handleChangeValue = () => {
     // Send text change to parent
-    const { onQueryChange } = this.props;
-    if (onQueryChange) {
-      onQueryChange(Plain.serialize(this.state.value));
+    const { onValueChanged } = this.props;
+    if (onValueChanged) {
+      onValueChanged(Plain.serialize(this.state.value));
     }
   };
 
-  handleTypeahead = debounce(() => {
+  handleTypeahead = _.debounce(async () => {
     const selection = window.getSelection();
-    if (selection.anchorNode) {
+    const { cleanText, onTypeahead } = this.props;
+
+    if (onTypeahead && selection.anchorNode) {
       const wrapperNode = selection.anchorNode.parentElement;
       const editorNode = wrapperNode.closest('.slate-query-field');
       if (!editorNode || this.state.value.isBlurred) {
@@ -175,164 +209,96 @@ class QueryField extends React.Component<any, any> {
       }
 
       const range = selection.getRangeAt(0);
-      const text = selection.anchorNode.textContent;
       const offset = range.startOffset;
-      const prefix = cleanText(text.substr(0, offset));
-
-      // Determine candidates by context
-      const suggestionGroups = [];
-      const wrapperClasses = wrapperNode.classList;
-      let typeaheadContext = null;
-
-      // Take first metric as lucky guess
-      const metricNode = editorNode.querySelector(`.${METRIC_MARK}`);
-
-      if (wrapperClasses.contains('context-range')) {
-        // Rate ranges
-        typeaheadContext = 'context-range';
-        suggestionGroups.push({
-          label: 'Range vector',
-          items: [...RATE_RANGES],
-        });
-      } else if (wrapperClasses.contains('context-labels') && metricNode) {
-        const metric = metricNode.textContent;
-        const labelKeys = this.state.labelKeys[metric];
-        if (labelKeys) {
-          if ((text && text.startsWith('=')) || wrapperClasses.contains('attr-value')) {
-            // Label values
-            const labelKeyNode = getPreviousCousin(wrapperNode, '.attr-name');
-            if (labelKeyNode) {
-              const labelKey = labelKeyNode.textContent;
-              const labelValues = this.state.labelValues[metric][labelKey];
-              typeaheadContext = 'context-label-values';
-              suggestionGroups.push({
-                label: 'Label values',
-                items: labelValues,
-              });
-            }
-          } else {
-            // Label keys
-            typeaheadContext = 'context-labels';
-            suggestionGroups.push({ label: 'Labels', items: labelKeys });
-          }
-        } else {
-          this.fetchMetricLabels(metric);
-        }
-      } else if (wrapperClasses.contains('context-labels') && !metricNode) {
-        // Empty name queries
-        const defaultKeys = ['job', 'instance'];
-        // Munge all keys that we have seen together
-        const labelKeys = Object.keys(this.state.labelKeys).reduce((acc, metric) => {
-          return acc.concat(this.state.labelKeys[metric].filter(key => acc.indexOf(key) === -1));
-        }, defaultKeys);
-        if ((text && text.startsWith('=')) || wrapperClasses.contains('attr-value')) {
-          // Label values
-          const labelKeyNode = getPreviousCousin(wrapperNode, '.attr-name');
-          if (labelKeyNode) {
-            const labelKey = labelKeyNode.textContent;
-            if (this.state.labelValues[EMPTY_METRIC]) {
-              const labelValues = this.state.labelValues[EMPTY_METRIC][labelKey];
-              typeaheadContext = 'context-label-values';
-              suggestionGroups.push({
-                label: 'Label values',
-                items: labelValues,
-              });
-            } else {
-              // Can only query label values for now (API to query keys is under development)
-              this.fetchLabelValues(labelKey);
-            }
-          }
-        } else {
-          // Label keys
-          typeaheadContext = 'context-labels';
-          suggestionGroups.push({ label: 'Labels', items: labelKeys });
-        }
-      } else if (metricNode && wrapperClasses.contains('context-aggregation')) {
-        typeaheadContext = 'context-aggregation';
-        const metric = metricNode.textContent;
-        const labelKeys = this.state.labelKeys[metric];
-        if (labelKeys) {
-          suggestionGroups.push({ label: 'Labels', items: labelKeys });
-        } else {
-          this.fetchMetricLabels(metric);
-        }
-      } else if (
-        (this.state.metrics && ((prefix && !wrapperClasses.contains('token')) || text.match(/[+\-*/^%]/))) ||
-        wrapperClasses.contains('context-function')
-      ) {
-        // Need prefix for metrics
-        typeaheadContext = 'context-metrics';
-        suggestionGroups.push({
-          label: 'Metrics',
-          items: this.state.metrics,
-        });
+      const text = selection.anchorNode.textContent;
+      let prefix = text.substr(0, offset);
+      if (cleanText) {
+        prefix = cleanText(prefix);
       }
 
-      let results = 0;
-      const filteredSuggestions = suggestionGroups.map(group => {
-        if (group.items) {
-          group.items = group.items.filter(c => c.length !== prefix.length && c.indexOf(prefix) > -1);
-          results += group.items.length;
-        }
-        return group;
+      const { suggestions, context, refresher } = onTypeahead({
+        editorNode,
+        prefix,
+        selection,
+        text,
+        wrapperNode,
       });
 
-      console.log('handleTypeahead', selection.anchorNode, wrapperClasses, text, offset, prefix, typeaheadContext);
-
-      this.setState({
-        typeaheadPrefix: prefix,
-        typeaheadContext,
-        typeaheadText: text,
-        suggestions: results > 0 ? filteredSuggestions : [],
-      });
-    }
-  }, TYPEAHEAD_DEBOUNCE);
+      const filteredSuggestions = suggestions
+        .map(group => {
+          if (group.items) {
+            if (prefix) {
+              // Filter groups based on prefix
+              if (!group.skipFilter) {
+                group.items = group.items.filter(c => (c.filterText || c.label).length >= prefix.length);
+                if (group.prefixMatch) {
+                  group.items = group.items.filter(c => (c.filterText || c.label).indexOf(prefix) === 0);
+                } else {
+                  group.items = group.items.filter(c => (c.filterText || c.label).indexOf(prefix) > -1);
+                }
+              }
+              // Filter out the already typed value (prefix) unless it inserts custom text
+              group.items = group.items.filter(c => c.insertText || (c.filterText || c.label) !== prefix);
+            }
 
-  applyTypeahead(change, suggestion) {
-    const { typeaheadPrefix, typeaheadContext, typeaheadText } = this.state;
+            group.items = _.sortBy(group.items, item => item.sortText || item.label);
+          }
+          return group;
+        })
+        .filter(group => group.items && group.items.length > 0); // Filter out empty groups
 
-    // Modify suggestion based on context
-    switch (typeaheadContext) {
-      case 'context-labels': {
-        const nextChar = getNextCharacter();
-        if (!nextChar || nextChar === '}' || nextChar === ',') {
-          suggestion += '=';
+      this.setState(
+        {
+          suggestions: filteredSuggestions,
+          typeaheadPrefix: prefix,
+          typeaheadContext: context,
+          typeaheadText: text,
+        },
+        () => {
+          if (refresher) {
+            refresher.then(this.handleTypeahead).catch(e => console.error(e));
+          }
         }
-        break;
-      }
+      );
+    }
+  }, TYPEAHEAD_DEBOUNCE);
 
-      case 'context-label-values': {
-        // Always add quotes and remove existing ones instead
-        if (!(typeaheadText.startsWith('="') || typeaheadText.startsWith('"'))) {
-          suggestion = `"${suggestion}`;
-        }
-        if (getNextCharacter() !== '"') {
-          suggestion = `${suggestion}"`;
-        }
-        break;
-      }
+  applyTypeahead(change: Change, suggestion: Suggestion): Change {
+    const { cleanText, onWillApplySuggestion } = this.props;
+    const { typeaheadPrefix, typeaheadText } = this.state;
+    let suggestionText = suggestion.insertText || suggestion.label;
+    const move = suggestion.move || 0;
 
-      default:
+    if (onWillApplySuggestion) {
+      suggestionText = onWillApplySuggestion(suggestionText, { ...this.state });
     }
 
     this.resetTypeahead();
 
     // Remove the current, incomplete text and replace it with the selected suggestion
-    let backward = typeaheadPrefix.length;
-    const text = cleanText(typeaheadText);
+    const backward = suggestion.deleteBackwards || typeaheadPrefix.length;
+    const text = cleanText ? cleanText(typeaheadText) : typeaheadText;
     const suffixLength = text.length - typeaheadPrefix.length;
     const offset = typeaheadText.indexOf(typeaheadPrefix);
-    const midWord = typeaheadPrefix && ((suffixLength > 0 && offset > -1) || suggestion === typeaheadText);
+    const midWord = typeaheadPrefix && ((suffixLength > 0 && offset > -1) || suggestionText === typeaheadText);
     const forward = midWord ? suffixLength + offset : 0;
 
-    return (
-      change
-        // TODO this line breaks if cursor was moved left and length is longer than whole prefix
+    // If new-lines, apply suggestion as block
+    if (suggestionText.match(/\n/)) {
+      const fragment = makeFragment(suggestionText);
+      return change
         .deleteBackward(backward)
         .deleteForward(forward)
-        .insertText(suggestion)
-        .focus()
-    );
+        .insertFragment(fragment)
+        .focus();
+    }
+
+    return change
+      .deleteBackward(backward)
+      .deleteForward(forward)
+      .insertText(suggestionText)
+      .move(move)
+      .focus();
   }
 
   onKeyDown = (event, change) => {
@@ -413,74 +379,6 @@ class QueryField extends React.Component<any, any> {
     });
   };
 
-  async fetchLabelValues(key) {
-    const url = `/api/v1/label/${key}/values`;
-    try {
-      const res = await this.request(url);
-      console.log(res);
-      const body = await (res.data || res.json());
-      const pairs = this.state.labelValues[EMPTY_METRIC];
-      const values = {
-        ...pairs,
-        [key]: body.data,
-      };
-      // const labelKeys = {
-      //   ...this.state.labelKeys,
-      //   [EMPTY_METRIC]: keys,
-      // };
-      const labelValues = {
-        ...this.state.labelValues,
-        [EMPTY_METRIC]: values,
-      };
-      this.setState({ labelValues }, this.handleTypeahead);
-    } catch (e) {
-      if (this.props.onRequestError) {
-        this.props.onRequestError(e);
-      } else {
-        console.error(e);
-      }
-    }
-  }
-
-  async fetchMetricLabels(name) {
-    const url = `/api/v1/series?match[]=${name}`;
-    try {
-      const res = await this.request(url);
-      const body = await (res.data || res.json());
-      const { keys, values } = processLabels(body.data);
-      const labelKeys = {
-        ...this.state.labelKeys,
-        [name]: keys,
-      };
-      const labelValues = {
-        ...this.state.labelValues,
-        [name]: values,
-      };
-      this.setState({ labelKeys, labelValues }, this.handleTypeahead);
-    } catch (e) {
-      if (this.props.onRequestError) {
-        this.props.onRequestError(e);
-      } else {
-        console.error(e);
-      }
-    }
-  }
-
-  async fetchMetricNames() {
-    const url = '/api/v1/label/__name__/values';
-    try {
-      const res = await this.request(url);
-      const body = await (res.data || res.json());
-      this.setState({ metrics: body.data }, this.onMetricsReceived);
-    } catch (error) {
-      if (this.props.onRequestError) {
-        this.props.onRequestError(error);
-      } else {
-        console.error(error);
-      }
-    }
-  }
-
   handleBlur = () => {
     const { onBlur } = this.props;
     // If we dont wait here, menu clicks wont work because the menu
@@ -498,7 +396,7 @@ class QueryField extends React.Component<any, any> {
     }
   };
 
-  handleClickMenu = item => {
+  onClickMenu = (item: Suggestion) => {
     // Manually triggering change
     const change = this.applyTypeahead(this.state.value.change(), item);
     this.onChange(change);
@@ -531,7 +429,7 @@ class QueryField extends React.Component<any, any> {
 
       // Write DOM
       requestAnimationFrame(() => {
-        menu.style.opacity = 1;
+        menu.style.opacity = '1';
         menu.style.top = `${rect.top + scrollY + rect.height + 4}px`;
         menu.style.left = `${rect.left + scrollX - 2}px`;
       });
@@ -554,17 +452,16 @@ class QueryField extends React.Component<any, any> {
     let selectedIndex = Math.max(this.state.typeaheadIndex, 0);
     const flattenedSuggestions = flattenSuggestions(suggestions);
     selectedIndex = selectedIndex % flattenedSuggestions.length || 0;
-    const selectedKeys = (flattenedSuggestions.length > 0 ? [flattenedSuggestions[selectedIndex]] : []).map(
-      i => (typeof i === 'object' ? i.text : i)
-    );
+    const selectedItem: Suggestion | null =
+      flattenedSuggestions.length > 0 ? flattenedSuggestions[selectedIndex] : null;
 
     // Create typeahead in DOM root so we can later position it absolutely
     return (
       <Portal prefix={portalPrefix}>
         <Typeahead
           menuRef={this.menuRef}
-          selectedItems={selectedKeys}
-          onClickItem={this.handleClickMenu}
+          selectedItem={selectedItem}
+          onClickItem={this.onClickMenu}
           groupedItems={suggestions}
         />
       </Portal>
@@ -591,4 +488,24 @@ class QueryField extends React.Component<any, any> {
   }
 }
 
+class Portal extends React.Component<{ index?: number; prefix: string }, {}> {
+  node: HTMLElement;
+
+  constructor(props) {
+    super(props);
+    const { index = 0, prefix = 'query' } = props;
+    this.node = document.createElement('div');
+    this.node.classList.add(`slate-typeahead`, `slate-typeahead-${prefix}-${index}`);
+    document.body.appendChild(this.node);
+  }
+
+  componentWillUnmount() {
+    document.body.removeChild(this.node);
+  }
+
+  render() {
+    return ReactDOM.createPortal(this.props.children, this.node);
+  }
+}
+
 export default QueryField;

+ 1 - 5
public/app/containers/Explore/QueryRows.tsx

@@ -1,7 +1,6 @@
 import React, { PureComponent } from 'react';
 
-import promql from './slate-plugins/prism/promql';
-import QueryField from './QueryField';
+import QueryField from './PromQueryField';
 
 class QueryRow extends PureComponent<any, any> {
   constructor(props) {
@@ -62,9 +61,6 @@ class QueryRow extends PureComponent<any, any> {
             portalPrefix="explore"
             onPressEnter={this.handlePressEnter}
             onQueryChange={this.handleChangeQuery}
-            placeholder="Enter a PromQL query"
-            prismLanguage="promql"
-            prismDefinition={promql}
             request={request}
           />
         </div>

+ 39 - 22
public/app/containers/Explore/Typeahead.tsx

@@ -1,17 +1,26 @@
 import React from 'react';
 
-function scrollIntoView(el) {
+import { Suggestion, SuggestionGroup } from './QueryField';
+
+function scrollIntoView(el: HTMLElement) {
   if (!el || !el.offsetParent) {
     return;
   }
-  const container = el.offsetParent;
+  const container = el.offsetParent as HTMLElement;
   if (el.offsetTop > container.scrollTop + container.offsetHeight || el.offsetTop < container.scrollTop) {
     container.scrollTop = el.offsetTop - container.offsetTop;
   }
 }
 
-class TypeaheadItem extends React.PureComponent<any, any> {
-  el: any;
+interface TypeaheadItemProps {
+  isSelected: boolean;
+  item: Suggestion;
+  onClickItem: (Suggestion) => void;
+}
+
+class TypeaheadItem extends React.PureComponent<TypeaheadItemProps, {}> {
+  el: HTMLElement;
+
   componentDidUpdate(prevProps) {
     if (this.props.isSelected && !prevProps.isSelected) {
       scrollIntoView(this.el);
@@ -22,20 +31,30 @@ class TypeaheadItem extends React.PureComponent<any, any> {
     this.el = el;
   };
 
+  onClick = () => {
+    this.props.onClickItem(this.props.item);
+  };
+
   render() {
-    const { hint, isSelected, label, onClickItem } = this.props;
+    const { isSelected, item } = this.props;
     const className = isSelected ? 'typeahead-item typeahead-item__selected' : 'typeahead-item';
-    const onClick = () => onClickItem(label);
     return (
-      <li ref={this.getRef} className={className} onClick={onClick}>
-        {label}
-        {hint && isSelected ? <div className="typeahead-item-hint">{hint}</div> : null}
+      <li ref={this.getRef} className={className} onClick={this.onClick}>
+        {item.detail || item.label}
+        {item.documentation && isSelected ? <div className="typeahead-item-hint">{item.documentation}</div> : null}
       </li>
     );
   }
 }
 
-class TypeaheadGroup extends React.PureComponent<any, any> {
+interface TypeaheadGroupProps {
+  items: Suggestion[];
+  label: string;
+  onClickItem: (Suggestion) => void;
+  selected: Suggestion;
+}
+
+class TypeaheadGroup extends React.PureComponent<TypeaheadGroupProps, {}> {
   render() {
     const { items, label, selected, onClickItem } = this.props;
     return (
@@ -43,16 +62,8 @@ class TypeaheadGroup extends React.PureComponent<any, any> {
         <div className="typeahead-group__title">{label}</div>
         <ul className="typeahead-group__list">
           {items.map(item => {
-            const text = typeof item === 'object' ? item.text : item;
-            const label = typeof item === 'object' ? item.display || item.text : item;
             return (
-              <TypeaheadItem
-                key={text}
-                onClickItem={onClickItem}
-                isSelected={selected.indexOf(text) > -1}
-                hint={item.hint}
-                label={label}
-              />
+              <TypeaheadItem key={item.label} onClickItem={onClickItem} isSelected={selected === item} item={item} />
             );
           })}
         </ul>
@@ -61,13 +72,19 @@ class TypeaheadGroup extends React.PureComponent<any, any> {
   }
 }
 
-class Typeahead extends React.PureComponent<any, any> {
+interface TypeaheadProps {
+  groupedItems: SuggestionGroup[];
+  menuRef: any;
+  selectedItem: Suggestion | null;
+  onClickItem: (Suggestion) => void;
+}
+class Typeahead extends React.PureComponent<TypeaheadProps, {}> {
   render() {
-    const { groupedItems, menuRef, selectedItems, onClickItem } = this.props;
+    const { groupedItems, menuRef, selectedItem, onClickItem } = this.props;
     return (
       <ul className="typeahead" ref={menuRef}>
         {groupedItems.map(g => (
-          <TypeaheadGroup key={g.label} onClickItem={onClickItem} selected={selectedItems} {...g} />
+          <TypeaheadGroup key={g.label} onClickItem={onClickItem} selected={selectedItem} {...g} />
         ))}
       </ul>
     );

+ 359 - 58
public/app/containers/Explore/slate-plugins/prism/promql.ts

@@ -1,67 +1,368 @@
+/* tslint:disable max-line-length */
+
 export const OPERATORS = ['by', 'group_left', 'group_right', 'ignoring', 'on', 'offset', 'without'];
 
 const AGGREGATION_OPERATORS = [
-  'sum',
-  'min',
-  'max',
-  'avg',
-  'stddev',
-  'stdvar',
-  'count',
-  'count_values',
-  'bottomk',
-  'topk',
-  'quantile',
+  {
+    label: 'sum',
+    insertText: 'sum()',
+    documentation: 'Calculate sum over dimensions',
+  },
+  {
+    label: 'min',
+    insertText: 'min()',
+    documentation: 'Select minimum over dimensions',
+  },
+  {
+    label: 'max',
+    insertText: 'max()',
+    documentation: 'Select maximum over dimensions',
+  },
+  {
+    label: 'avg',
+    insertText: 'avg()',
+    documentation: 'Calculate the average over dimensions',
+  },
+  {
+    label: 'stddev',
+    insertText: 'stddev()',
+    documentation: 'Calculate population standard deviation over dimensions',
+  },
+  {
+    label: 'stdvar',
+    insertText: 'stdvar()',
+    documentation: 'Calculate population standard variance over dimensions',
+  },
+  {
+    label: 'count',
+    insertText: 'count()',
+    documentation: 'Count number of elements in the vector',
+  },
+  {
+    label: 'count_values',
+    insertText: 'count_values()',
+    documentation: 'Count number of elements with the same value',
+  },
+  {
+    label: 'bottomk',
+    insertText: 'bottomk()',
+    documentation: 'Smallest k elements by sample value',
+  },
+  {
+    label: 'topk',
+    insertText: 'topk()',
+    documentation: 'Largest k elements by sample value',
+  },
+  {
+    label: 'quantile',
+    insertText: 'quantile()',
+    documentation: 'Calculate φ-quantile (0 ≤ φ ≤ 1) over dimensions',
+  },
 ];
 
 export const FUNCTIONS = [
   ...AGGREGATION_OPERATORS,
-  'abs',
-  'absent',
-  'ceil',
-  'changes',
-  'clamp_max',
-  'clamp_min',
-  'count_scalar',
-  'day_of_month',
-  'day_of_week',
-  'days_in_month',
-  'delta',
-  'deriv',
-  'drop_common_labels',
-  'exp',
-  'floor',
-  'histogram_quantile',
-  'holt_winters',
-  'hour',
-  'idelta',
-  'increase',
-  'irate',
-  'label_replace',
-  'ln',
-  'log2',
-  'log10',
-  'minute',
-  'month',
-  'predict_linear',
-  'rate',
-  'resets',
-  'round',
-  'scalar',
-  'sort',
-  'sort_desc',
-  'sqrt',
-  'time',
-  'vector',
-  'year',
-  'avg_over_time',
-  'min_over_time',
-  'max_over_time',
-  'sum_over_time',
-  'count_over_time',
-  'quantile_over_time',
-  'stddev_over_time',
-  'stdvar_over_time',
+  {
+    insertText: 'abs()',
+    label: 'abs',
+    detail: 'abs(v instant-vector)',
+    documentation: 'Returns the input vector with all sample values converted to their absolute value.',
+  },
+  {
+    insertText: 'absent()',
+    label: 'absent',
+    detail: 'absent(v instant-vector)',
+    documentation:
+      'Returns an empty vector if the vector passed to it has any elements and a 1-element vector with the value 1 if the vector passed to it has no elements. This is useful for alerting on when no time series exist for a given metric name and label combination.',
+  },
+  {
+    insertText: 'ceil()',
+    label: 'ceil',
+    detail: 'ceil(v instant-vector)',
+    documentation: 'Rounds the sample values of all elements in `v` up to the nearest integer.',
+  },
+  {
+    insertText: 'changes()',
+    label: 'changes',
+    detail: 'changes(v range-vector)',
+    documentation:
+      'For each input time series, `changes(v range-vector)` returns the number of times its value has changed within the provided time range as an instant vector.',
+  },
+  {
+    insertText: 'clamp_max()',
+    label: 'clamp_max',
+    detail: 'clamp_max(v instant-vector, max scalar)',
+    documentation: 'Clamps the sample values of all elements in `v` to have an upper limit of `max`.',
+  },
+  {
+    insertText: 'clamp_min()',
+    label: 'clamp_min',
+    detail: 'clamp_min(v instant-vector, min scalar)',
+    documentation: 'Clamps the sample values of all elements in `v` to have a lower limit of `min`.',
+  },
+  {
+    insertText: 'count_scalar()',
+    label: 'count_scalar',
+    detail: 'count_scalar(v instant-vector)',
+    documentation:
+      'Returns the number of elements in a time series vector as a scalar. This is in contrast to the `count()` aggregation operator, which always returns a vector (an empty one if the input vector is empty) and allows grouping by labels via a `by` clause.',
+  },
+  {
+    insertText: 'day_of_month()',
+    label: 'day_of_month',
+    detail: 'day_of_month(v=vector(time()) instant-vector)',
+    documentation: 'Returns the day of the month for each of the given times in UTC. Returned values are from 1 to 31.',
+  },
+  {
+    insertText: 'day_of_week()',
+    label: 'day_of_week',
+    detail: 'day_of_week(v=vector(time()) instant-vector)',
+    documentation:
+      'Returns the day of the week for each of the given times in UTC. Returned values are from 0 to 6, where 0 means Sunday etc.',
+  },
+  {
+    insertText: 'days_in_month()',
+    label: 'days_in_month',
+    detail: 'days_in_month(v=vector(time()) instant-vector)',
+    documentation:
+      'Returns number of days in the month for each of the given times in UTC. Returned values are from 28 to 31.',
+  },
+  {
+    insertText: 'delta()',
+    label: 'delta',
+    detail: 'delta(v range-vector)',
+    documentation:
+      'Calculates the difference between the first and last value of each time series element in a range vector `v`, returning an instant vector with the given deltas and equivalent labels. The delta is extrapolated to cover the full time range as specified in the range vector selector, so that it is possible to get a non-integer result even if the sample values are all integers.',
+  },
+  {
+    insertText: 'deriv()',
+    label: 'deriv',
+    detail: 'deriv(v range-vector)',
+    documentation:
+      'Calculates the per-second derivative of the time series in a range vector `v`, using simple linear regression.',
+  },
+  {
+    insertText: 'drop_common_labels()',
+    label: 'drop_common_labels',
+    detail: 'drop_common_labels(instant-vector)',
+    documentation: 'Drops all labels that have the same name and value across all series in the input vector.',
+  },
+  {
+    insertText: 'exp()',
+    label: 'exp',
+    detail: 'exp(v instant-vector)',
+    documentation:
+      'Calculates the exponential function for all elements in `v`.\nSpecial cases are:\n* `Exp(+Inf) = +Inf` \n* `Exp(NaN) = NaN`',
+  },
+  {
+    insertText: 'floor()',
+    label: 'floor',
+    detail: 'floor(v instant-vector)',
+    documentation: 'Rounds the sample values of all elements in `v` down to the nearest integer.',
+  },
+  {
+    insertText: 'histogram_quantile()',
+    label: 'histogram_quantile',
+    detail: 'histogram_quantile(φ float, b instant-vector)',
+    documentation:
+      'Calculates the φ-quantile (0 ≤ φ ≤ 1) from the buckets `b` of a histogram. The samples in `b` are the counts of observations in each bucket. Each sample must have a label `le` where the label value denotes the inclusive upper bound of the bucket. (Samples without such a label are silently ignored.) The histogram metric type automatically provides time series with the `_bucket` suffix and the appropriate labels.',
+  },
+  {
+    insertText: 'holt_winters()',
+    label: 'holt_winters',
+    detail: 'holt_winters(v range-vector, sf scalar, tf scalar)',
+    documentation:
+      'Produces a smoothed value for time series based on the range in `v`. The lower the smoothing factor `sf`, the more importance is given to old data. The higher the trend factor `tf`, the more trends in the data is considered. Both `sf` and `tf` must be between 0 and 1.',
+  },
+  {
+    insertText: 'hour()',
+    label: 'hour',
+    detail: 'hour(v=vector(time()) instant-vector)',
+    documentation: 'Returns the hour of the day for each of the given times in UTC. Returned values are from 0 to 23.',
+  },
+  {
+    insertText: 'idelta()',
+    label: 'idelta',
+    detail: 'idelta(v range-vector)',
+    documentation:
+      'Calculates the difference between the last two samples in the range vector `v`, returning an instant vector with the given deltas and equivalent labels.',
+  },
+  {
+    insertText: 'increase()',
+    label: 'increase',
+    detail: 'increase(v range-vector)',
+    documentation:
+      'Calculates the increase in the time series in the range vector. Breaks in monotonicity (such as counter resets due to target restarts) are automatically adjusted for. The increase is extrapolated to cover the full time range as specified in the range vector selector, so that it is possible to get a non-integer result even if a counter increases only by integer increments.',
+  },
+  {
+    insertText: 'irate()',
+    label: 'irate',
+    detail: 'irate(v range-vector)',
+    documentation:
+      'Calculates the per-second instant rate of increase of the time series in the range vector. This is based on the last two data points. Breaks in monotonicity (such as counter resets due to target restarts) are automatically adjusted for.',
+  },
+  {
+    insertText: 'label_replace()',
+    label: 'label_replace',
+    detail: 'label_replace(v instant-vector, dst_label string, replacement string, src_label string, regex string)',
+    documentation:
+      "For each timeseries in `v`, `label_replace(v instant-vector, dst_label string, replacement string, src_label string, regex string)`  matches the regular expression `regex` against the label `src_label`.  If it matches, then the timeseries is returned with the label `dst_label` replaced by the expansion of `replacement`. `$1` is replaced with the first matching subgroup, `$2` with the second etc. If the regular expression doesn't match then the timeseries is returned unchanged.",
+  },
+  {
+    insertText: 'ln()',
+    label: 'ln',
+    detail: 'ln(v instant-vector)',
+    documentation:
+      'calculates the natural logarithm for all elements in `v`.\nSpecial cases are:\n * `ln(+Inf) = +Inf`\n * `ln(0) = -Inf`\n * `ln(x < 0) = NaN`\n * `ln(NaN) = NaN`',
+  },
+  {
+    insertText: 'log2()',
+    label: 'log2',
+    detail: 'log2(v instant-vector)',
+    documentation:
+      'Calculates the binary logarithm for all elements in `v`. The special cases are equivalent to those in `ln`.',
+  },
+  {
+    insertText: 'log10()',
+    label: 'log10',
+    detail: 'log10(v instant-vector)',
+    documentation:
+      'Calculates the decimal logarithm for all elements in `v`. The special cases are equivalent to those in `ln`.',
+  },
+  {
+    insertText: 'minute()',
+    label: 'minute',
+    detail: 'minute(v=vector(time()) instant-vector)',
+    documentation:
+      'Returns the minute of the hour for each of the given times in UTC. Returned values are from 0 to 59.',
+  },
+  {
+    insertText: 'month()',
+    label: 'month',
+    detail: 'month(v=vector(time()) instant-vector)',
+    documentation:
+      'Returns the month of the year for each of the given times in UTC. Returned values are from 1 to 12, where 1 means January etc.',
+  },
+  {
+    insertText: 'predict_linear()',
+    label: 'predict_linear',
+    detail: 'predict_linear(v range-vector, t scalar)',
+    documentation:
+      'Predicts the value of time series `t` seconds from now, based on the range vector `v`, using simple linear regression.',
+  },
+  {
+    insertText: 'rate()',
+    label: 'rate',
+    detail: 'rate(v range-vector)',
+    documentation:
+      "Calculates the per-second average rate of increase of the time series in the range vector. Breaks in monotonicity (such as counter resets due to target restarts) are automatically adjusted for. Also, the calculation extrapolates to the ends of the time range, allowing for missed scrapes or imperfect alignment of scrape cycles with the range's time period.",
+  },
+  {
+    insertText: 'resets()',
+    label: 'resets',
+    detail: 'resets(v range-vector)',
+    documentation:
+      'For each input time series, `resets(v range-vector)` returns the number of counter resets within the provided time range as an instant vector. Any decrease in the value between two consecutive samples is interpreted as a counter reset.',
+  },
+  {
+    insertText: 'round()',
+    label: 'round',
+    detail: 'round(v instant-vector, to_nearest=1 scalar)',
+    documentation:
+      'Rounds the sample values of all elements in `v` to the nearest integer. Ties are resolved by rounding up. The optional `to_nearest` argument allows specifying the nearest multiple to which the sample values should be rounded. This multiple may also be a fraction.',
+  },
+  {
+    insertText: 'scalar()',
+    label: 'scalar',
+    detail: 'scalar(v instant-vector)',
+    documentation:
+      'Given a single-element input vector, `scalar(v instant-vector)` returns the sample value of that single element as a scalar. If the input vector does not have exactly one element, `scalar` will return `NaN`.',
+  },
+  {
+    insertText: 'sort()',
+    label: 'sort',
+    detail: 'sort(v instant-vector)',
+    documentation: 'Returns vector elements sorted by their sample values, in ascending order.',
+  },
+  {
+    insertText: 'sort_desc()',
+    label: 'sort_desc',
+    detail: 'sort_desc(v instant-vector)',
+    documentation: 'Returns vector elements sorted by their sample values, in descending order.',
+  },
+  {
+    insertText: 'sqrt()',
+    label: 'sqrt',
+    detail: 'sqrt(v instant-vector)',
+    documentation: 'Calculates the square root of all elements in `v`.',
+  },
+  {
+    insertText: 'time()',
+    label: 'time',
+    detail: 'time()',
+    documentation:
+      'Returns the number of seconds since January 1, 1970 UTC. Note that this does not actually return the current time, but the time at which the expression is to be evaluated.',
+  },
+  {
+    insertText: 'vector()',
+    label: 'vector',
+    detail: 'vector(s scalar)',
+    documentation: 'Returns the scalar `s` as a vector with no labels.',
+  },
+  {
+    insertText: 'year()',
+    label: 'year',
+    detail: 'year(v=vector(time()) instant-vector)',
+    documentation: 'Returns the year for each of the given times in UTC.',
+  },
+  {
+    insertText: 'avg_over_time()',
+    label: 'avg_over_time',
+    detail: 'avg_over_time(range-vector)',
+    documentation: 'The average value of all points in the specified interval.',
+  },
+  {
+    insertText: 'min_over_time()',
+    label: 'min_over_time',
+    detail: 'min_over_time(range-vector)',
+    documentation: 'The minimum value of all points in the specified interval.',
+  },
+  {
+    insertText: 'max_over_time()',
+    label: 'max_over_time',
+    detail: 'max_over_time(range-vector)',
+    documentation: 'The maximum value of all points in the specified interval.',
+  },
+  {
+    insertText: 'sum_over_time()',
+    label: 'sum_over_time',
+    detail: 'sum_over_time(range-vector)',
+    documentation: 'The sum of all values in the specified interval.',
+  },
+  {
+    insertText: 'count_over_time()',
+    label: 'count_over_time',
+    detail: 'count_over_time(range-vector)',
+    documentation: 'The count of all values in the specified interval.',
+  },
+  {
+    insertText: 'quantile_over_time()',
+    label: 'quantile_over_time',
+    detail: 'quantile_over_time(scalar, range-vector)',
+    documentation: 'The φ-quantile (0 ≤ φ ≤ 1) of the values in the specified interval.',
+  },
+  {
+    insertText: 'stddev_over_time()',
+    label: 'stddev_over_time',
+    detail: 'stddev_over_time(range-vector)',
+    documentation: 'The population standard deviation of the values in the specified interval.',
+  },
+  {
+    insertText: 'stdvar_over_time()',
+    label: 'stdvar_over_time',
+    detail: 'stdvar_over_time(range-vector)',
+    documentation: 'The population standard variance of the values in the specified interval.',
+  },
 ];
 
 const tokenizer = {
@@ -93,7 +394,7 @@ const tokenizer = {
       },
     },
   },
-  function: new RegExp(`\\b(?:${FUNCTIONS.join('|')})(?=\\s*\\()`, 'i'),
+  function: new RegExp(`\\b(?:${FUNCTIONS.map(f => f.label).join('|')})(?=\\s*\\()`, 'i'),
   'context-range': [
     {
       pattern: /\[[^\]]*(?=])/, // [1m]

+ 2 - 16
public/app/containers/Teams/TeamList.tsx

@@ -5,7 +5,7 @@ import PageHeader from 'app/core/components/PageHeader/PageHeader';
 import { NavStore } from 'app/stores/NavStore/NavStore';
 import { TeamsStore, ITeam } from 'app/stores/TeamsStore/TeamsStore';
 import { BackendSrv } from 'app/core/services/backend_srv';
-import appEvents from 'app/core/app_events';
+import DeleteButton from 'app/core/components/DeleteButton/DeleteButton';
 
 interface Props {
   nav: typeof NavStore.Type;
@@ -28,18 +28,6 @@ export class TeamList extends React.Component<Props, any> {
   }
 
   deleteTeam(team: ITeam) {
-    appEvents.emit('confirm-modal', {
-      title: 'Delete',
-      text: 'Are you sure you want to delete Team ' + team.name + '?',
-      yesText: 'Delete',
-      icon: 'fa-warning',
-      onConfirm: () => {
-        this.deleteTeamConfirmed(team);
-      },
-    });
-  }
-
-  deleteTeamConfirmed(team) {
     this.props.backendSrv.delete('/api/teams/' + team.id).then(this.fetchTeams.bind(this));
   }
 
@@ -67,9 +55,7 @@ export class TeamList extends React.Component<Props, any> {
           <a href={teamUrl}>{team.memberCount}</a>
         </td>
         <td className="text-right">
-          <a onClick={() => this.deleteTeam(team)} className="btn btn-danger btn-small">
-            <i className="fa fa-remove" />
-          </a>
+          <DeleteButton onConfirmDelete={() => this.deleteTeam(team)} />
         </td>
       </tr>
     );

+ 44 - 0
public/app/core/components/DeleteButton/DeleteButton.jest.tsx

@@ -0,0 +1,44 @@
+import React from 'react';
+import DeleteButton from './DeleteButton';
+import { shallow } from 'enzyme';
+
+describe('DeleteButton', () => {
+  let wrapper;
+  let deleted;
+
+  beforeAll(() => {
+    deleted = false;
+
+    function deleteItem() {
+      deleted = true;
+    }
+    wrapper = shallow(<DeleteButton onConfirmDelete={() => deleteItem()} />);
+  });
+
+  it('should show confirm delete when clicked', () => {
+    expect(wrapper.state().showConfirm).toBe(false);
+    wrapper.find('.delete-button').simulate('click');
+    expect(wrapper.state().showConfirm).toBe(true);
+  });
+
+  it('should hide confirm delete when clicked', () => {
+    wrapper.find('.delete-button').simulate('click');
+    expect(wrapper.state().showConfirm).toBe(true);
+    wrapper
+      .find('.confirm-delete')
+      .find('.btn')
+      .at(0)
+      .simulate('click');
+    expect(wrapper.state().showConfirm).toBe(false);
+  });
+
+  it('should show confirm delete when clicked', () => {
+    expect(deleted).toBe(false);
+    wrapper
+      .find('.confirm-delete')
+      .find('.btn')
+      .at(1)
+      .simulate('click');
+    expect(deleted).toBe(true);
+  });
+});

+ 66 - 0
public/app/core/components/DeleteButton/DeleteButton.tsx

@@ -0,0 +1,66 @@
+import React, { PureComponent } from 'react';
+
+export interface DeleteButtonProps {
+  onConfirmDelete();
+}
+
+export interface DeleteButtonStates {
+  showConfirm: boolean;
+}
+
+export default class DeleteButton extends PureComponent<DeleteButtonProps, DeleteButtonStates> {
+  state: DeleteButtonStates = {
+    showConfirm: false,
+  };
+
+  onClickDelete = event => {
+    if (event) {
+      event.preventDefault();
+    }
+
+    this.setState({
+      showConfirm: true,
+    });
+  };
+
+  onClickCancel = event => {
+    if (event) {
+      event.preventDefault();
+    }
+    this.setState({
+      showConfirm: false,
+    });
+  };
+
+  render() {
+    const onClickConfirm = this.props.onConfirmDelete;
+    let showConfirm;
+    let showDeleteButton;
+
+    if (this.state.showConfirm) {
+      showConfirm = 'show';
+      showDeleteButton = 'hide';
+    } else {
+      showConfirm = 'hide';
+      showDeleteButton = 'show';
+    }
+
+    return (
+      <span className="delete-button-container">
+        <a className={'delete-button ' + showDeleteButton + ' btn btn-danger btn-small'} onClick={this.onClickDelete}>
+          <i className="fa fa-remove" />
+        </a>
+        <span className="confirm-delete-container">
+          <span className={'confirm-delete ' + showConfirm}>
+            <a className="btn btn-small" onClick={this.onClickCancel}>
+              Cancel
+            </a>
+            <a className="btn btn-danger btn-small" onClick={onClickConfirm}>
+              Confirm Delete
+            </a>
+          </span>
+        </span>
+      </span>
+    );
+  }
+}

+ 1 - 1
public/app/core/components/Forms/Forms.tsx

@@ -12,7 +12,7 @@ export const Label: SFC<Props> = props => {
     <span className="gf-form-label width-10">
       <span>{props.children}</span>
       {props.tooltip && (
-        <Tooltip className="gf-form-help-icon--right-normal" placement="auto" content="hello">
+        <Tooltip className="gf-form-help-icon--right-normal" placement="auto" content={props.tooltip}>
           <i className="gicon gicon-question gicon--has-hover" />
         </Tooltip>
       )}

+ 25 - 0
public/app/core/specs/backend_srv.jest.ts

@@ -0,0 +1,25 @@
+import { BackendSrv } from 'app/core/services/backend_srv';
+jest.mock('app/core/store');
+
+describe('backend_srv', function() {
+  let _httpBackend = options => {
+    if (options.url === 'gateway-error') {
+      return Promise.reject({ status: 502 });
+    }
+    return Promise.resolve({});
+  };
+
+  let _backendSrv = new BackendSrv(_httpBackend, {}, {}, {}, {});
+
+  describe('when handling errors', () => {
+    it('should return the http status code', async () => {
+      try {
+        await _backendSrv.datasourceRequest({
+          url: 'gateway-error',
+        });
+      } catch (err) {
+        expect(err.status).toBe(502);
+      }
+    });
+  });
+});

+ 0 - 31
public/app/core/specs/backend_srv_specs.ts

@@ -1,31 +0,0 @@
-import { describe, beforeEach, it, expect, angularMocks } from 'test/lib/common';
-import 'app/core/services/backend_srv';
-
-describe('backend_srv', function() {
-  var _backendSrv;
-  var _httpBackend;
-
-  beforeEach(angularMocks.module('grafana.core'));
-  beforeEach(angularMocks.module('grafana.services'));
-  beforeEach(
-    angularMocks.inject(function($httpBackend, $http, backendSrv) {
-      _httpBackend = $httpBackend;
-      _backendSrv = backendSrv;
-    })
-  );
-
-  describe('when handling errors', function() {
-    it('should return the http status code', function(done) {
-      _httpBackend.whenGET('gateway-error').respond(502);
-      _backendSrv
-        .datasourceRequest({
-          url: 'gateway-error',
-        })
-        .catch(function(err) {
-          expect(err.status).to.be(502);
-          done();
-        });
-      _httpBackend.flush();
-    });
-  });
-});

+ 1 - 1
public/app/core/specs/kbn.jest.ts

@@ -402,7 +402,7 @@ describe('duration', function() {
 describe('volume', function() {
   it('1000m3', function() {
     var str = kbn.valueFormats['m3'](1000, 1, null);
-    expect(str).toBe('1000.0 m3');
+    expect(str).toBe('1000.0 m³');
   });
 });
 

+ 21 - 21
public/app/core/utils/kbn.ts

@@ -500,7 +500,7 @@ kbn.valueFormats.watt = kbn.formatBuilders.decimalSIPrefix('W');
 kbn.valueFormats.kwatt = kbn.formatBuilders.decimalSIPrefix('W', 1);
 kbn.valueFormats.mwatt = kbn.formatBuilders.decimalSIPrefix('W', -1);
 kbn.valueFormats.kwattm = kbn.formatBuilders.decimalSIPrefix('W/Min', 1);
-kbn.valueFormats.Wm2 = kbn.formatBuilders.fixedUnit('W/m2');
+kbn.valueFormats.Wm2 = kbn.formatBuilders.fixedUnit('W/m²');
 kbn.valueFormats.voltamp = kbn.formatBuilders.decimalSIPrefix('VA');
 kbn.valueFormats.kvoltamp = kbn.formatBuilders.decimalSIPrefix('VA', 1);
 kbn.valueFormats.voltampreact = kbn.formatBuilders.decimalSIPrefix('var');
@@ -572,9 +572,9 @@ kbn.valueFormats.accG = kbn.formatBuilders.fixedUnit('g');
 // Volume
 kbn.valueFormats.litre = kbn.formatBuilders.decimalSIPrefix('L');
 kbn.valueFormats.mlitre = kbn.formatBuilders.decimalSIPrefix('L', -1);
-kbn.valueFormats.m3 = kbn.formatBuilders.fixedUnit('m3');
-kbn.valueFormats.Nm3 = kbn.formatBuilders.fixedUnit('Nm3');
-kbn.valueFormats.dm3 = kbn.formatBuilders.fixedUnit('dm3');
+kbn.valueFormats.m3 = kbn.formatBuilders.fixedUnit('m³');
+kbn.valueFormats.Nm3 = kbn.formatBuilders.fixedUnit('Nm³');
+kbn.valueFormats.dm3 = kbn.formatBuilders.fixedUnit('dm³');
 kbn.valueFormats.gallons = kbn.formatBuilders.fixedUnit('gal');
 
 // Flow
@@ -605,14 +605,14 @@ kbn.valueFormats.radsvh = kbn.formatBuilders.decimalSIPrefix('Sv/h');
 // Concentration
 kbn.valueFormats.ppm = kbn.formatBuilders.fixedUnit('ppm');
 kbn.valueFormats.conppb = kbn.formatBuilders.fixedUnit('ppb');
-kbn.valueFormats.conngm3 = kbn.formatBuilders.fixedUnit('ng/m3');
-kbn.valueFormats.conngNm3 = kbn.formatBuilders.fixedUnit('ng/Nm3');
-kbn.valueFormats.conμgm3 = kbn.formatBuilders.fixedUnit('μg/m3');
-kbn.valueFormats.conμgNm3 = kbn.formatBuilders.fixedUnit('μg/Nm3');
-kbn.valueFormats.conmgm3 = kbn.formatBuilders.fixedUnit('mg/m3');
-kbn.valueFormats.conmgNm3 = kbn.formatBuilders.fixedUnit('mg/Nm3');
-kbn.valueFormats.congm3 = kbn.formatBuilders.fixedUnit('g/m3');
-kbn.valueFormats.congNm3 = kbn.formatBuilders.fixedUnit('g/Nm3');
+kbn.valueFormats.conngm3 = kbn.formatBuilders.fixedUnit('ng/m³');
+kbn.valueFormats.conngNm3 = kbn.formatBuilders.fixedUnit('ng/Nm³');
+kbn.valueFormats.conμgm3 = kbn.formatBuilders.fixedUnit('μg/m³');
+kbn.valueFormats.conμgNm3 = kbn.formatBuilders.fixedUnit('μg/Nm³');
+kbn.valueFormats.conmgm3 = kbn.formatBuilders.fixedUnit('mg/m³');
+kbn.valueFormats.conmgNm3 = kbn.formatBuilders.fixedUnit('mg/Nm³');
+kbn.valueFormats.congm3 = kbn.formatBuilders.fixedUnit('g/m³');
+kbn.valueFormats.congNm3 = kbn.formatBuilders.fixedUnit('g/Nm³');
 
 // Time
 kbn.valueFormats.hertz = kbn.formatBuilders.decimalSIPrefix('Hz');
@@ -1021,7 +1021,7 @@ kbn.getUnitFormats = function() {
         { text: 'Watt (W)', value: 'watt' },
         { text: 'Kilowatt (kW)', value: 'kwatt' },
         { text: 'Milliwatt (mW)', value: 'mwatt' },
-        { text: 'Watt per square metre (W/m2)', value: 'Wm2' },
+        { text: 'Watt per square metre (W/m²)', value: 'Wm2' },
         { text: 'Volt-ampere (VA)', value: 'voltamp' },
         { text: 'Kilovolt-ampere (kVA)', value: 'kvoltamp' },
         { text: 'Volt-ampere reactive (var)', value: 'voltampreact' },
@@ -1118,14 +1118,14 @@ kbn.getUnitFormats = function() {
       submenu: [
         { text: 'parts-per-million (ppm)', value: 'ppm' },
         { text: 'parts-per-billion (ppb)', value: 'conppb' },
-        { text: 'nanogram per cubic metre (ng/m3)', value: 'conngm3' },
-        { text: 'nanogram per normal cubic metre (ng/Nm3)', value: 'conngNm3' },
-        { text: 'microgram per cubic metre (μg/m3)', value: 'conμgm3' },
-        { text: 'microgram per normal cubic metre (μg/Nm3)', value: 'conμgNm3' },
-        { text: 'milligram per cubic metre (mg/m3)', value: 'conmgm3' },
-        { text: 'milligram per normal cubic metre (mg/Nm3)', value: 'conmgNm3' },
-        { text: 'gram per cubic metre (g/m3)', value: 'congm3' },
-        { text: 'gram per normal cubic metre (g/Nm3)', value: 'congNm3' },
+        { text: 'nanogram per cubic metre (ng/m³)', value: 'conngm3' },
+        { text: 'nanogram per normal cubic metre (ng/Nm³)', value: 'conngNm3' },
+        { text: 'microgram per cubic metre (μg/m³)', value: 'conμgm3' },
+        { text: 'microgram per normal cubic metre (μg/Nm³)', value: 'conμgNm3' },
+        { text: 'milligram per cubic metre (mg/m³)', value: 'conmgm3' },
+        { text: 'milligram per normal cubic metre (mg/Nm³)', value: 'conmgNm3' },
+        { text: 'gram per cubic metre (g/m³)', value: 'congm3' },
+        { text: 'gram per normal cubic metre (g/Nm³)', value: 'congNm3' },
       ],
     },
   ];

+ 83 - 61
public/app/features/templating/specs/variable_srv_init_specs.ts → public/app/features/templating/specs/variable_srv_init.jest.ts

@@ -1,36 +1,31 @@
-import { describe, beforeEach, it, sinon, expect, angularMocks } from 'test/lib/common';
-
 import '../all';
 
 import _ from 'lodash';
-import helpers from 'test/specs/helpers';
-import { Emitter } from 'app/core/core';
+import { VariableSrv } from '../variable_srv';
+import $q from 'q';
 
 describe('VariableSrv init', function() {
-  var ctx = new helpers.ControllerTestContext();
-
-  beforeEach(angularMocks.module('grafana.core'));
-  beforeEach(angularMocks.module('grafana.controllers'));
-  beforeEach(angularMocks.module('grafana.services'));
-  beforeEach(
-    angularMocks.module(function($compileProvider) {
-      $compileProvider.preAssignBindingsEnabled(true);
-    })
-  );
-
-  beforeEach(ctx.providePhase(['datasourceSrv', 'timeSrv', 'templateSrv', '$location']));
-  beforeEach(
-    angularMocks.inject(($rootScope, $q, $location, $injector) => {
-      ctx.$q = $q;
-      ctx.$rootScope = $rootScope;
-      ctx.$location = $location;
-      ctx.variableSrv = $injector.get('variableSrv');
-      ctx.$rootScope.$digest();
-    })
-  );
+  let templateSrv = {
+    init: vars => {
+      this.variables = vars;
+    },
+    variableInitialized: () => {},
+    updateTemplateData: () => {},
+    replace: str =>
+      str.replace(this.regex, match => {
+        return match;
+      }),
+  };
+
+  let $injector = <any>{};
+  let $rootscope = {
+    $on: () => {},
+  };
+
+  let ctx = <any>{};
 
   function describeInitScenario(desc, fn) {
-    describe(desc, function() {
+    describe(desc, () => {
       var scenario: any = {
         urlParams: {},
         setup: setupFn => {
@@ -38,22 +33,34 @@ describe('VariableSrv init', function() {
         },
       };
 
-      beforeEach(function() {
+      beforeEach(async () => {
         scenario.setupFn();
-        ctx.datasource = {};
-        ctx.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when(scenario.queryResult));
+        ctx = {
+          datasource: {
+            metricFindQuery: jest.fn(() => Promise.resolve(scenario.queryResult)),
+          },
+          datasourceSrv: {
+            get: () => Promise.resolve(ctx.datasource),
+            getMetricSources: () => scenario.metricSources,
+          },
+          templateSrv,
+        };
 
-        ctx.datasourceSrv.get = sinon.stub().returns(ctx.$q.when(ctx.datasource));
-        ctx.datasourceSrv.getMetricSources = sinon.stub().returns(scenario.metricSources);
+        ctx.variableSrv = new VariableSrv($rootscope, $q, {}, $injector, templateSrv);
 
-        ctx.$location.search = sinon.stub().returns(scenario.urlParams);
-        ctx.dashboard = {
+        $injector.instantiate = (variable, model) => {
+          return getVarMockConstructor(variable, model, ctx);
+        };
+
+        ctx.variableSrv.datasource = ctx.datasource;
+        ctx.variableSrv.datasourceSrv = ctx.datasourceSrv;
+
+        ctx.variableSrv.$location.search = () => scenario.urlParams;
+        ctx.variableSrv.dashboard = {
           templating: { list: scenario.variables },
-          events: new Emitter(),
         };
 
-        ctx.variableSrv.init(ctx.dashboard);
-        ctx.$rootScope.$digest();
+        await ctx.variableSrv.init(ctx.variableSrv.dashboard);
 
         scenario.variables = ctx.variableSrv.variables;
       });
@@ -78,8 +85,8 @@ describe('VariableSrv init', function() {
       });
 
       it('should update current value', () => {
-        expect(scenario.variables[0].current.value).to.be('new');
-        expect(scenario.variables[0].current.text).to.be('new');
+        expect(scenario.variables[0].current.value).toBe('new');
+        expect(scenario.variables[0].current.text).toBe('new');
       });
     });
   });
@@ -111,12 +118,12 @@ describe('VariableSrv init', function() {
       });
 
       it('should update child variable', () => {
-        expect(scenario.variables[1].options.length).to.be(2);
-        expect(scenario.variables[1].current.text).to.be('google-server1');
+        expect(scenario.variables[1].options.length).toBe(2);
+        expect(scenario.variables[1].current.text).toBe('google-server1');
       });
 
       it('should only update it once', () => {
-        expect(ctx.datasource.metricFindQuery.callCount).to.be(1);
+        expect(ctx.variableSrv.datasource.metricFindQuery).toHaveBeenCalledTimes(1);
       });
     });
   });
@@ -140,9 +147,9 @@ describe('VariableSrv init', function() {
       ];
     });
 
-    it('should update current value', function() {
+    it('should update current value', () => {
       var variable = ctx.variableSrv.variables[0];
-      expect(variable.options.length).to.be(2);
+      expect(variable.options.length).toBe(2);
     });
   });
 
@@ -164,19 +171,19 @@ describe('VariableSrv init', function() {
       scenario.urlParams['var-apps'] = ['val2', 'val1'];
     });
 
-    it('should update current value', function() {
+    it('should update current value', () => {
       var variable = ctx.variableSrv.variables[0];
-      expect(variable.current.value.length).to.be(2);
-      expect(variable.current.value[0]).to.be('val2');
-      expect(variable.current.value[1]).to.be('val1');
-      expect(variable.current.text).to.be('val2 + val1');
-      expect(variable.options[0].selected).to.be(true);
-      expect(variable.options[1].selected).to.be(true);
+      expect(variable.current.value.length).toBe(2);
+      expect(variable.current.value[0]).toBe('val2');
+      expect(variable.current.value[1]).toBe('val1');
+      expect(variable.current.text).toBe('val2 + val1');
+      expect(variable.options[0].selected).toBe(true);
+      expect(variable.options[1].selected).toBe(true);
     });
 
-    it('should set options that are not in value to selected false', function() {
+    it('should set options that are not in value to selected false', () => {
       var variable = ctx.variableSrv.variables[0];
-      expect(variable.options[2].selected).to.be(false);
+      expect(variable.options[2].selected).toBe(false);
     });
   });
 
@@ -198,19 +205,34 @@ describe('VariableSrv init', function() {
       scenario.urlParams['var-apps'] = ['val2', 'val1'];
     });
 
-    it('should update current value', function() {
+    it('should update current value', () => {
       var variable = ctx.variableSrv.variables[0];
-      expect(variable.current.value.length).to.be(2);
-      expect(variable.current.value[0]).to.be('val2');
-      expect(variable.current.value[1]).to.be('val1');
-      expect(variable.current.text).to.be('Val2 + Val1');
-      expect(variable.options[0].selected).to.be(true);
-      expect(variable.options[1].selected).to.be(true);
+      expect(variable.current.value.length).toBe(2);
+      expect(variable.current.value[0]).toBe('val2');
+      expect(variable.current.value[1]).toBe('val1');
+      expect(variable.current.text).toBe('Val2 + Val1');
+      expect(variable.options[0].selected).toBe(true);
+      expect(variable.options[1].selected).toBe(true);
     });
 
-    it('should set options that are not in value to selected false', function() {
+    it('should set options that are not in value to selected false', () => {
       var variable = ctx.variableSrv.variables[0];
-      expect(variable.options[2].selected).to.be(false);
+      expect(variable.options[2].selected).toBe(false);
     });
   });
 });
+
+function getVarMockConstructor(variable, model, ctx) {
+  switch (model.model.type) {
+    case 'datasource':
+      return new variable(model.model, ctx.datasourceSrv, ctx.variableSrv, ctx.templateSrv);
+    case 'query':
+      return new variable(model.model, ctx.datasourceSrv, ctx.templateSrv, ctx.variableSrv);
+    case 'interval':
+      return new variable(model.model, {}, ctx.templateSrv, ctx.variableSrv);
+    case 'custom':
+      return new variable(model.model, ctx.variableSrv);
+    default:
+      return new variable(model.model);
+  }
+}

+ 324 - 0
public/app/plugins/datasource/graphite/specs/query_ctrl.jest.ts

@@ -0,0 +1,324 @@
+import { uiSegmentSrv } from 'app/core/services/segment_srv';
+import gfunc from '../gfunc';
+import { GraphiteQueryCtrl } from '../query_ctrl';
+
+describe('GraphiteQueryCtrl', () => {
+  let ctx = <any>{
+    datasource: {
+      metricFindQuery: jest.fn(() => Promise.resolve([])),
+      getFuncDefs: jest.fn(() => Promise.resolve(gfunc.getFuncDefs('1.0'))),
+      getFuncDef: gfunc.getFuncDef,
+      waitForFuncDefsLoaded: jest.fn(() => Promise.resolve(null)),
+      createFuncInstance: gfunc.createFuncInstance,
+    },
+    target: { target: 'aliasByNode(scaleToSeconds(test.prod.*,1),2)' },
+    panelCtrl: {
+      refresh: jest.fn(),
+    },
+  };
+
+  ctx.panelCtrl.panel = {
+    targets: [ctx.target],
+  };
+
+  beforeEach(() => {
+    GraphiteQueryCtrl.prototype.target = ctx.target;
+    GraphiteQueryCtrl.prototype.datasource = ctx.datasource;
+
+    GraphiteQueryCtrl.prototype.panelCtrl = ctx.panelCtrl;
+
+    ctx.ctrl = new GraphiteQueryCtrl(
+      {},
+      {},
+      new uiSegmentSrv({ trustAsHtml: html => html }, { highlightVariablesAsHtml: () => {} }),
+      {},
+      {}
+    );
+  });
+
+  describe('init', () => {
+    it('should validate metric key exists', () => {
+      expect(ctx.datasource.metricFindQuery.mock.calls[0][0]).toBe('test.prod.*');
+    });
+
+    it('should delete last segment if no metrics are found', () => {
+      expect(ctx.ctrl.segments[2].value).toBe('select metric');
+    });
+
+    it('should parse expression and build function model', () => {
+      expect(ctx.ctrl.queryModel.functions.length).toBe(2);
+    });
+  });
+
+  describe('when adding function', () => {
+    beforeEach(() => {
+      ctx.ctrl.target.target = 'test.prod.*.count';
+      ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([{ expandable: false }]);
+      ctx.ctrl.parseTarget();
+      ctx.ctrl.addFunction(gfunc.getFuncDef('aliasByNode'));
+    });
+
+    it('should add function with correct node number', () => {
+      expect(ctx.ctrl.queryModel.functions[0].params[0]).toBe(2);
+    });
+
+    it('should update target', () => {
+      expect(ctx.ctrl.target.target).toBe('aliasByNode(test.prod.*.count, 2)');
+    });
+
+    it('should call refresh', () => {
+      expect(ctx.panelCtrl.refresh).toHaveBeenCalled();
+    });
+  });
+
+  describe('when adding function before any metric segment', () => {
+    beforeEach(() => {
+      ctx.ctrl.target.target = '';
+      ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([{ expandable: true }]);
+      ctx.ctrl.parseTarget();
+      ctx.ctrl.addFunction(gfunc.getFuncDef('asPercent'));
+    });
+
+    it('should add function and remove select metric link', () => {
+      expect(ctx.ctrl.segments.length).toBe(0);
+    });
+  });
+
+  describe('when initializing target without metric expression and only function', () => {
+    beforeEach(() => {
+      ctx.ctrl.target.target = 'asPercent(#A, #B)';
+      ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([]);
+      ctx.ctrl.parseTarget();
+    });
+
+    it('should not add select metric segment', () => {
+      expect(ctx.ctrl.segments.length).toBe(1);
+    });
+
+    it('should add second series ref as param', () => {
+      expect(ctx.ctrl.queryModel.functions[0].params.length).toBe(1);
+    });
+  });
+
+  describe('when initializing a target with single param func using variable', () => {
+    beforeEach(() => {
+      ctx.ctrl.target.target = 'movingAverage(prod.count, $var)';
+      ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([]);
+      ctx.ctrl.parseTarget();
+    });
+
+    it('should add 2 segments', () => {
+      expect(ctx.ctrl.segments.length).toBe(2);
+    });
+
+    it('should add function param', () => {
+      expect(ctx.ctrl.queryModel.functions[0].params.length).toBe(1);
+    });
+  });
+
+  describe('when initializing target without metric expression and function with series-ref', () => {
+    beforeEach(() => {
+      ctx.ctrl.target.target = 'asPercent(metric.node.count, #A)';
+      ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([]);
+      ctx.ctrl.parseTarget();
+    });
+
+    it('should add segments', () => {
+      expect(ctx.ctrl.segments.length).toBe(3);
+    });
+
+    it('should have correct func params', () => {
+      expect(ctx.ctrl.queryModel.functions[0].params.length).toBe(1);
+    });
+  });
+
+  describe('when getting altSegments and metricFindQuery returns empty array', () => {
+    beforeEach(() => {
+      ctx.ctrl.target.target = 'test.count';
+      ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([]);
+      ctx.ctrl.parseTarget();
+      ctx.ctrl.getAltSegments(1).then(function(results) {
+        ctx.altSegments = results;
+      });
+    });
+
+    it('should have no segments', () => {
+      expect(ctx.altSegments.length).toBe(0);
+    });
+  });
+
+  describe('targetChanged', () => {
+    beforeEach(() => {
+      ctx.ctrl.target.target = 'aliasByNode(scaleToSeconds(test.prod.*, 1), 2)';
+      ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([{ expandable: false }]);
+      ctx.ctrl.parseTarget();
+      ctx.ctrl.target.target = '';
+      ctx.ctrl.targetChanged();
+    });
+
+    it('should rebuild target after expression model', () => {
+      expect(ctx.ctrl.target.target).toBe('aliasByNode(scaleToSeconds(test.prod.*, 1), 2)');
+    });
+
+    it('should call panelCtrl.refresh', () => {
+      expect(ctx.panelCtrl.refresh).toHaveBeenCalled();
+    });
+  });
+
+  describe('when updating targets with nested query', () => {
+    beforeEach(() => {
+      ctx.ctrl.target.target = 'scaleToSeconds(#A, 60)';
+      ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([{ expandable: false }]);
+      ctx.ctrl.parseTarget();
+    });
+
+    it('should add function params', () => {
+      expect(ctx.ctrl.queryModel.segments.length).toBe(1);
+      expect(ctx.ctrl.queryModel.segments[0].value).toBe('#A');
+
+      expect(ctx.ctrl.queryModel.functions[0].params.length).toBe(1);
+      expect(ctx.ctrl.queryModel.functions[0].params[0]).toBe(60);
+    });
+
+    it('target should remain the same', () => {
+      expect(ctx.ctrl.target.target).toBe('scaleToSeconds(#A, 60)');
+    });
+
+    it('targetFull should include nested queries', () => {
+      ctx.ctrl.panelCtrl.panel.targets = [
+        {
+          target: 'nested.query.count',
+          refId: 'A',
+        },
+      ];
+
+      ctx.ctrl.updateModelTarget();
+
+      expect(ctx.ctrl.target.target).toBe('scaleToSeconds(#A, 60)');
+
+      expect(ctx.ctrl.target.targetFull).toBe('scaleToSeconds(nested.query.count, 60)');
+    });
+  });
+
+  describe('when updating target used in other query', () => {
+    beforeEach(() => {
+      ctx.ctrl.target.target = 'metrics.a.count';
+      ctx.ctrl.target.refId = 'A';
+      ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([{ expandable: false }]);
+      ctx.ctrl.parseTarget();
+
+      ctx.ctrl.panelCtrl.panel.targets = [ctx.ctrl.target, { target: 'sumSeries(#A)', refId: 'B' }];
+
+      ctx.ctrl.updateModelTarget();
+    });
+
+    it('targetFull of other query should update', () => {
+      expect(ctx.ctrl.panel.targets[1].targetFull).toBe('sumSeries(metrics.a.count)');
+    });
+  });
+
+  describe('when adding seriesByTag function', () => {
+    beforeEach(() => {
+      ctx.ctrl.target.target = '';
+      ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([{ expandable: false }]);
+      ctx.ctrl.parseTarget();
+      ctx.ctrl.addFunction(gfunc.getFuncDef('seriesByTag'));
+    });
+
+    it('should update functions', () => {
+      expect(ctx.ctrl.queryModel.getSeriesByTagFuncIndex()).toBe(0);
+    });
+
+    it('should update seriesByTagUsed flag', () => {
+      expect(ctx.ctrl.queryModel.seriesByTagUsed).toBe(true);
+    });
+
+    it('should update target', () => {
+      expect(ctx.ctrl.target.target).toBe('seriesByTag()');
+    });
+
+    it('should call refresh', () => {
+      expect(ctx.panelCtrl.refresh).toHaveBeenCalled();
+    });
+  });
+
+  describe('when parsing seriesByTag function', () => {
+    beforeEach(() => {
+      ctx.ctrl.target.target = "seriesByTag('tag1=value1', 'tag2!=~value2')";
+      ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([{ expandable: false }]);
+      ctx.ctrl.parseTarget();
+    });
+
+    it('should add tags', () => {
+      const expected = [
+        { key: 'tag1', operator: '=', value: 'value1' },
+        { key: 'tag2', operator: '!=~', value: 'value2' },
+      ];
+      expect(ctx.ctrl.queryModel.tags).toEqual(expected);
+    });
+
+    it('should add plus button', () => {
+      expect(ctx.ctrl.addTagSegments.length).toBe(1);
+    });
+  });
+
+  describe('when tag added', () => {
+    beforeEach(() => {
+      ctx.ctrl.target.target = 'seriesByTag()';
+      ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([{ expandable: false }]);
+      ctx.ctrl.parseTarget();
+      ctx.ctrl.addNewTag({ value: 'tag1' });
+    });
+
+    it('should update tags with default value', () => {
+      const expected = [{ key: 'tag1', operator: '=', value: '' }];
+      expect(ctx.ctrl.queryModel.tags).toEqual(expected);
+    });
+
+    it('should update target', () => {
+      const expected = "seriesByTag('tag1=')";
+      expect(ctx.ctrl.target.target).toEqual(expected);
+    });
+  });
+
+  describe('when tag changed', () => {
+    beforeEach(() => {
+      ctx.ctrl.target.target = "seriesByTag('tag1=value1', 'tag2!=~value2')";
+      ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([{ expandable: false }]);
+      ctx.ctrl.parseTarget();
+      ctx.ctrl.tagChanged({ key: 'tag1', operator: '=', value: 'new_value' }, 0);
+    });
+
+    it('should update tags', () => {
+      const expected = [
+        { key: 'tag1', operator: '=', value: 'new_value' },
+        { key: 'tag2', operator: '!=~', value: 'value2' },
+      ];
+      expect(ctx.ctrl.queryModel.tags).toEqual(expected);
+    });
+
+    it('should update target', () => {
+      const expected = "seriesByTag('tag1=new_value', 'tag2!=~value2')";
+      expect(ctx.ctrl.target.target).toEqual(expected);
+    });
+  });
+
+  describe('when tag removed', () => {
+    beforeEach(() => {
+      ctx.ctrl.target.target = "seriesByTag('tag1=value1', 'tag2!=~value2')";
+      ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([{ expandable: false }]);
+      ctx.ctrl.parseTarget();
+      ctx.ctrl.removeTag(0);
+    });
+
+    it('should update tags', () => {
+      const expected = [{ key: 'tag2', operator: '!=~', value: 'value2' }];
+      expect(ctx.ctrl.queryModel.tags).toEqual(expected);
+    });
+
+    it('should update target', () => {
+      const expected = "seriesByTag('tag2!=~value2')";
+      expect(ctx.ctrl.target.target).toEqual(expected);
+    });
+  });
+});

+ 0 - 338
public/app/plugins/datasource/graphite/specs/query_ctrl_specs.ts

@@ -1,338 +0,0 @@
-import 'app/core/services/segment_srv';
-import { describe, beforeEach, it, sinon, expect, angularMocks } from 'test/lib/common';
-
-import gfunc from '../gfunc';
-import helpers from 'test/specs/helpers';
-import { GraphiteQueryCtrl } from '../query_ctrl';
-
-describe('GraphiteQueryCtrl', function() {
-  var ctx = new helpers.ControllerTestContext();
-
-  beforeEach(angularMocks.module('grafana.core'));
-  beforeEach(angularMocks.module('grafana.controllers'));
-  beforeEach(angularMocks.module('grafana.services'));
-  beforeEach(
-    angularMocks.module(function($compileProvider) {
-      $compileProvider.preAssignBindingsEnabled(true);
-    })
-  );
-
-  beforeEach(ctx.providePhase());
-  beforeEach(
-    angularMocks.inject(($rootScope, $controller, $q) => {
-      ctx.$q = $q;
-      ctx.scope = $rootScope.$new();
-      ctx.target = { target: 'aliasByNode(scaleToSeconds(test.prod.*,1),2)' };
-      ctx.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([]));
-      ctx.datasource.getFuncDefs = sinon.stub().returns(ctx.$q.when(gfunc.getFuncDefs('1.0')));
-      ctx.datasource.getFuncDef = gfunc.getFuncDef;
-      ctx.datasource.waitForFuncDefsLoaded = sinon.stub().returns(ctx.$q.when(null));
-      ctx.datasource.createFuncInstance = gfunc.createFuncInstance;
-      ctx.panelCtrl = { panel: {} };
-      ctx.panelCtrl = {
-        panel: {
-          targets: [ctx.target],
-        },
-      };
-      ctx.panelCtrl.refresh = sinon.spy();
-
-      ctx.ctrl = $controller(
-        GraphiteQueryCtrl,
-        { $scope: ctx.scope },
-        {
-          panelCtrl: ctx.panelCtrl,
-          datasource: ctx.datasource,
-          target: ctx.target,
-        }
-      );
-      ctx.scope.$digest();
-    })
-  );
-
-  describe('init', function() {
-    it('should validate metric key exists', function() {
-      expect(ctx.datasource.metricFindQuery.getCall(0).args[0]).to.be('test.prod.*');
-    });
-
-    it('should delete last segment if no metrics are found', function() {
-      expect(ctx.ctrl.segments[2].value).to.be('select metric');
-    });
-
-    it('should parse expression and build function model', function() {
-      expect(ctx.ctrl.queryModel.functions.length).to.be(2);
-    });
-  });
-
-  describe('when adding function', function() {
-    beforeEach(function() {
-      ctx.ctrl.target.target = 'test.prod.*.count';
-      ctx.ctrl.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([{ expandable: false }]));
-      ctx.ctrl.parseTarget();
-      ctx.ctrl.addFunction(gfunc.getFuncDef('aliasByNode'));
-    });
-
-    it('should add function with correct node number', function() {
-      expect(ctx.ctrl.queryModel.functions[0].params[0]).to.be(2);
-    });
-
-    it('should update target', function() {
-      expect(ctx.ctrl.target.target).to.be('aliasByNode(test.prod.*.count, 2)');
-    });
-
-    it('should call refresh', function() {
-      expect(ctx.panelCtrl.refresh.called).to.be(true);
-    });
-  });
-
-  describe('when adding function before any metric segment', function() {
-    beforeEach(function() {
-      ctx.ctrl.target.target = '';
-      ctx.ctrl.datasource.metricFindQuery.returns(ctx.$q.when([{ expandable: true }]));
-      ctx.ctrl.parseTarget();
-      ctx.ctrl.addFunction(gfunc.getFuncDef('asPercent'));
-    });
-
-    it('should add function and remove select metric link', function() {
-      expect(ctx.ctrl.segments.length).to.be(0);
-    });
-  });
-
-  describe('when initializing target without metric expression and only function', function() {
-    beforeEach(function() {
-      ctx.ctrl.target.target = 'asPercent(#A, #B)';
-      ctx.ctrl.datasource.metricFindQuery.returns(ctx.$q.when([]));
-      ctx.ctrl.parseTarget();
-      ctx.scope.$digest();
-    });
-
-    it('should not add select metric segment', function() {
-      expect(ctx.ctrl.segments.length).to.be(1);
-    });
-
-    it('should add second series ref as param', function() {
-      expect(ctx.ctrl.queryModel.functions[0].params.length).to.be(1);
-    });
-  });
-
-  describe('when initializing a target with single param func using variable', function() {
-    beforeEach(function() {
-      ctx.ctrl.target.target = 'movingAverage(prod.count, $var)';
-      ctx.ctrl.datasource.metricFindQuery.returns(ctx.$q.when([]));
-      ctx.ctrl.parseTarget();
-    });
-
-    it('should add 2 segments', function() {
-      expect(ctx.ctrl.segments.length).to.be(2);
-    });
-
-    it('should add function param', function() {
-      expect(ctx.ctrl.queryModel.functions[0].params.length).to.be(1);
-    });
-  });
-
-  describe('when initializing target without metric expression and function with series-ref', function() {
-    beforeEach(function() {
-      ctx.ctrl.target.target = 'asPercent(metric.node.count, #A)';
-      ctx.ctrl.datasource.metricFindQuery.returns(ctx.$q.when([]));
-      ctx.ctrl.parseTarget();
-    });
-
-    it('should add segments', function() {
-      expect(ctx.ctrl.segments.length).to.be(3);
-    });
-
-    it('should have correct func params', function() {
-      expect(ctx.ctrl.queryModel.functions[0].params.length).to.be(1);
-    });
-  });
-
-  describe('when getting altSegments and metricFindQuery returns empty array', function() {
-    beforeEach(function() {
-      ctx.ctrl.target.target = 'test.count';
-      ctx.ctrl.datasource.metricFindQuery.returns(ctx.$q.when([]));
-      ctx.ctrl.parseTarget();
-      ctx.ctrl.getAltSegments(1).then(function(results) {
-        ctx.altSegments = results;
-      });
-      ctx.scope.$digest();
-    });
-
-    it('should have no segments', function() {
-      expect(ctx.altSegments.length).to.be(0);
-    });
-  });
-
-  describe('targetChanged', function() {
-    beforeEach(function() {
-      ctx.ctrl.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([{ expandable: false }]));
-      ctx.ctrl.parseTarget();
-      ctx.ctrl.target.target = '';
-      ctx.ctrl.targetChanged();
-    });
-
-    it('should rebuld target after expression model', function() {
-      expect(ctx.ctrl.target.target).to.be('aliasByNode(scaleToSeconds(test.prod.*, 1), 2)');
-    });
-
-    it('should call panelCtrl.refresh', function() {
-      expect(ctx.panelCtrl.refresh.called).to.be(true);
-    });
-  });
-
-  describe('when updating targets with nested query', function() {
-    beforeEach(function() {
-      ctx.ctrl.target.target = 'scaleToSeconds(#A, 60)';
-      ctx.ctrl.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([{ expandable: false }]));
-      ctx.ctrl.parseTarget();
-    });
-
-    it('should add function params', function() {
-      expect(ctx.ctrl.queryModel.segments.length).to.be(1);
-      expect(ctx.ctrl.queryModel.segments[0].value).to.be('#A');
-
-      expect(ctx.ctrl.queryModel.functions[0].params.length).to.be(1);
-      expect(ctx.ctrl.queryModel.functions[0].params[0]).to.be(60);
-    });
-
-    it('target should remain the same', function() {
-      expect(ctx.ctrl.target.target).to.be('scaleToSeconds(#A, 60)');
-    });
-
-    it('targetFull should include nested queries', function() {
-      ctx.ctrl.panelCtrl.panel.targets = [
-        {
-          target: 'nested.query.count',
-          refId: 'A',
-        },
-      ];
-
-      ctx.ctrl.updateModelTarget();
-
-      expect(ctx.ctrl.target.target).to.be('scaleToSeconds(#A, 60)');
-
-      expect(ctx.ctrl.target.targetFull).to.be('scaleToSeconds(nested.query.count, 60)');
-    });
-  });
-
-  describe('when updating target used in other query', function() {
-    beforeEach(function() {
-      ctx.ctrl.target.target = 'metrics.a.count';
-      ctx.ctrl.target.refId = 'A';
-      ctx.ctrl.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([{ expandable: false }]));
-      ctx.ctrl.parseTarget();
-
-      ctx.ctrl.panelCtrl.panel.targets = [ctx.ctrl.target, { target: 'sumSeries(#A)', refId: 'B' }];
-
-      ctx.ctrl.updateModelTarget();
-    });
-
-    it('targetFull of other query should update', function() {
-      expect(ctx.ctrl.panel.targets[1].targetFull).to.be('sumSeries(metrics.a.count)');
-    });
-  });
-
-  describe('when adding seriesByTag function', function() {
-    beforeEach(function() {
-      ctx.ctrl.target.target = '';
-      ctx.ctrl.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([{ expandable: false }]));
-      ctx.ctrl.parseTarget();
-      ctx.ctrl.addFunction(gfunc.getFuncDef('seriesByTag'));
-    });
-
-    it('should update functions', function() {
-      expect(ctx.ctrl.queryModel.getSeriesByTagFuncIndex()).to.be(0);
-    });
-
-    it('should update seriesByTagUsed flag', function() {
-      expect(ctx.ctrl.queryModel.seriesByTagUsed).to.be(true);
-    });
-
-    it('should update target', function() {
-      expect(ctx.ctrl.target.target).to.be('seriesByTag()');
-    });
-
-    it('should call refresh', function() {
-      expect(ctx.panelCtrl.refresh.called).to.be(true);
-    });
-  });
-
-  describe('when parsing seriesByTag function', function() {
-    beforeEach(function() {
-      ctx.ctrl.target.target = "seriesByTag('tag1=value1', 'tag2!=~value2')";
-      ctx.ctrl.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([{ expandable: false }]));
-      ctx.ctrl.parseTarget();
-    });
-
-    it('should add tags', function() {
-      const expected = [
-        { key: 'tag1', operator: '=', value: 'value1' },
-        { key: 'tag2', operator: '!=~', value: 'value2' },
-      ];
-      expect(ctx.ctrl.queryModel.tags).to.eql(expected);
-    });
-
-    it('should add plus button', function() {
-      expect(ctx.ctrl.addTagSegments.length).to.be(1);
-    });
-  });
-
-  describe('when tag added', function() {
-    beforeEach(function() {
-      ctx.ctrl.target.target = 'seriesByTag()';
-      ctx.ctrl.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([{ expandable: false }]));
-      ctx.ctrl.parseTarget();
-      ctx.ctrl.addNewTag({ value: 'tag1' });
-    });
-
-    it('should update tags with default value', function() {
-      const expected = [{ key: 'tag1', operator: '=', value: '' }];
-      expect(ctx.ctrl.queryModel.tags).to.eql(expected);
-    });
-
-    it('should update target', function() {
-      const expected = "seriesByTag('tag1=')";
-      expect(ctx.ctrl.target.target).to.eql(expected);
-    });
-  });
-
-  describe('when tag changed', function() {
-    beforeEach(function() {
-      ctx.ctrl.target.target = "seriesByTag('tag1=value1', 'tag2!=~value2')";
-      ctx.ctrl.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([{ expandable: false }]));
-      ctx.ctrl.parseTarget();
-      ctx.ctrl.tagChanged({ key: 'tag1', operator: '=', value: 'new_value' }, 0);
-    });
-
-    it('should update tags', function() {
-      const expected = [
-        { key: 'tag1', operator: '=', value: 'new_value' },
-        { key: 'tag2', operator: '!=~', value: 'value2' },
-      ];
-      expect(ctx.ctrl.queryModel.tags).to.eql(expected);
-    });
-
-    it('should update target', function() {
-      const expected = "seriesByTag('tag1=new_value', 'tag2!=~value2')";
-      expect(ctx.ctrl.target.target).to.eql(expected);
-    });
-  });
-
-  describe('when tag removed', function() {
-    beforeEach(function() {
-      ctx.ctrl.target.target = "seriesByTag('tag1=value1', 'tag2!=~value2')";
-      ctx.ctrl.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([{ expandable: false }]));
-      ctx.ctrl.parseTarget();
-      ctx.ctrl.removeTag(0);
-    });
-
-    it('should update tags', function() {
-      const expected = [{ key: 'tag2', operator: '!=~', value: 'value2' }];
-      expect(ctx.ctrl.queryModel.tags).to.eql(expected);
-    });
-
-    it('should update target', function() {
-      const expected = "seriesByTag('tag2!=~value2')";
-      expect(ctx.ctrl.target.target).to.eql(expected);
-    });
-  });
-});

+ 0 - 1
public/app/plugins/datasource/influxdb/query_ctrl.ts

@@ -22,7 +22,6 @@ export class InfluxQueryCtrl extends QueryCtrl {
   /** @ngInject **/
   constructor($scope, $injector, private templateSrv, private $q, private uiSegmentSrv) {
     super($scope, $injector);
-
     this.target = this.target;
     this.queryModel = new InfluxQuery(this.target, templateSrv, this.panel.scopedVars);
     this.queryBuilder = new InfluxQueryBuilder(this.target, this.datasource.database);

+ 178 - 0
public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts

@@ -0,0 +1,178 @@
+import '../query_ctrl';
+import { uiSegmentSrv } from 'app/core/services/segment_srv';
+import { InfluxQueryCtrl } from '../query_ctrl';
+
+describe('InfluxDBQueryCtrl', () => {
+  let ctx = <any>{};
+
+  beforeEach(() => {
+    InfluxQueryCtrl.prototype.datasource = {
+      metricFindQuery: () => Promise.resolve([]),
+    };
+    InfluxQueryCtrl.prototype.target = { target: {} };
+    InfluxQueryCtrl.prototype.panelCtrl = {
+      panel: {
+        targets: [InfluxQueryCtrl.prototype.target],
+      },
+      refresh: () => {},
+    };
+
+    ctx.ctrl = new InfluxQueryCtrl(
+      {},
+      {},
+      {},
+      {},
+      new uiSegmentSrv({ trustAsHtml: html => html }, { highlightVariablesAsHtml: () => {} })
+    );
+  });
+
+  describe('init', () => {
+    it('should init tagSegments', () => {
+      expect(ctx.ctrl.tagSegments.length).toBe(1);
+    });
+
+    it('should init measurementSegment', () => {
+      expect(ctx.ctrl.measurementSegment.value).toBe('select measurement');
+    });
+  });
+
+  describe('when first tag segment is updated', () => {
+    beforeEach(() => {
+      ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0);
+    });
+
+    it('should update tag key', () => {
+      expect(ctx.ctrl.target.tags[0].key).toBe('asd');
+      expect(ctx.ctrl.tagSegments[0].type).toBe('key');
+    });
+
+    it('should add tagSegments', () => {
+      expect(ctx.ctrl.tagSegments.length).toBe(3);
+    });
+  });
+
+  describe('when last tag value segment is updated', () => {
+    beforeEach(() => {
+      ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0);
+      ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2);
+    });
+
+    it('should update tag value', () => {
+      expect(ctx.ctrl.target.tags[0].value).toBe('server1');
+    });
+
+    it('should set tag operator', () => {
+      expect(ctx.ctrl.target.tags[0].operator).toBe('=');
+    });
+
+    it('should add plus button for another filter', () => {
+      expect(ctx.ctrl.tagSegments[3].fake).toBe(true);
+    });
+  });
+
+  describe('when last tag value segment is updated to regex', () => {
+    beforeEach(() => {
+      ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0);
+      ctx.ctrl.tagSegmentUpdated({ value: '/server.*/', type: 'value' }, 2);
+    });
+
+    it('should update operator', () => {
+      expect(ctx.ctrl.tagSegments[1].value).toBe('=~');
+      expect(ctx.ctrl.target.tags[0].operator).toBe('=~');
+    });
+  });
+
+  describe('when second tag key is added', () => {
+    beforeEach(() => {
+      ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0);
+      ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2);
+      ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3);
+    });
+
+    it('should update tag key', () => {
+      expect(ctx.ctrl.target.tags[1].key).toBe('key2');
+    });
+
+    it('should add AND segment', () => {
+      expect(ctx.ctrl.tagSegments[3].value).toBe('AND');
+    });
+  });
+
+  describe('when condition is changed', () => {
+    beforeEach(() => {
+      ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0);
+      ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2);
+      ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3);
+      ctx.ctrl.tagSegmentUpdated({ value: 'OR', type: 'condition' }, 3);
+    });
+
+    it('should update tag condition', () => {
+      expect(ctx.ctrl.target.tags[1].condition).toBe('OR');
+    });
+
+    it('should update AND segment', () => {
+      expect(ctx.ctrl.tagSegments[3].value).toBe('OR');
+      expect(ctx.ctrl.tagSegments.length).toBe(7);
+    });
+  });
+
+  describe('when deleting first tag filter after value is selected', () => {
+    beforeEach(() => {
+      ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0);
+      ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2);
+      ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 0);
+    });
+
+    it('should remove tags', () => {
+      expect(ctx.ctrl.target.tags.length).toBe(0);
+    });
+
+    it('should remove all segment after 2 and replace with plus button', () => {
+      expect(ctx.ctrl.tagSegments.length).toBe(1);
+      expect(ctx.ctrl.tagSegments[0].type).toBe('plus-button');
+    });
+  });
+
+  describe('when deleting second tag value before second tag value is complete', () => {
+    beforeEach(() => {
+      ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0);
+      ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2);
+      ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3);
+      ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 4);
+    });
+
+    it('should remove all segment after 2 and replace with plus button', () => {
+      expect(ctx.ctrl.tagSegments.length).toBe(4);
+      expect(ctx.ctrl.tagSegments[3].type).toBe('plus-button');
+    });
+  });
+
+  describe('when deleting second tag value before second tag value is complete', () => {
+    beforeEach(() => {
+      ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0);
+      ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2);
+      ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3);
+      ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 4);
+    });
+
+    it('should remove all segment after 2 and replace with plus button', () => {
+      expect(ctx.ctrl.tagSegments.length).toBe(4);
+      expect(ctx.ctrl.tagSegments[3].type).toBe('plus-button');
+    });
+  });
+
+  describe('when deleting second tag value after second tag filter is complete', () => {
+    beforeEach(() => {
+      ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0);
+      ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2);
+      ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3);
+      ctx.ctrl.tagSegmentUpdated({ value: 'value', type: 'value' }, 6);
+      ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 4);
+    });
+
+    it('should remove all segment after 2 and replace with plus button', () => {
+      expect(ctx.ctrl.tagSegments.length).toBe(4);
+      expect(ctx.ctrl.tagSegments[3].type).toBe('plus-button');
+    });
+  });
+});

+ 0 - 193
public/app/plugins/datasource/influxdb/specs/query_ctrl_specs.ts

@@ -1,193 +0,0 @@
-import '../query_ctrl';
-import 'app/core/services/segment_srv';
-import { describe, beforeEach, it, sinon, expect, angularMocks } from 'test/lib/common';
-import helpers from 'test/specs/helpers';
-import { InfluxQueryCtrl } from '../query_ctrl';
-
-describe('InfluxDBQueryCtrl', function() {
-  var ctx = new helpers.ControllerTestContext();
-
-  beforeEach(angularMocks.module('grafana.core'));
-  beforeEach(angularMocks.module('grafana.controllers'));
-  beforeEach(angularMocks.module('grafana.services'));
-  beforeEach(
-    angularMocks.module(function($compileProvider) {
-      $compileProvider.preAssignBindingsEnabled(true);
-    })
-  );
-  beforeEach(ctx.providePhase());
-
-  beforeEach(
-    angularMocks.inject(($rootScope, $controller, $q) => {
-      ctx.$q = $q;
-      ctx.scope = $rootScope.$new();
-      ctx.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([]));
-      ctx.target = { target: {} };
-      ctx.panelCtrl = {
-        panel: {
-          targets: [ctx.target],
-        },
-      };
-      ctx.panelCtrl.refresh = sinon.spy();
-      ctx.ctrl = $controller(
-        InfluxQueryCtrl,
-        { $scope: ctx.scope },
-        {
-          panelCtrl: ctx.panelCtrl,
-          target: ctx.target,
-          datasource: ctx.datasource,
-        }
-      );
-    })
-  );
-
-  describe('init', function() {
-    it('should init tagSegments', function() {
-      expect(ctx.ctrl.tagSegments.length).to.be(1);
-    });
-
-    it('should init measurementSegment', function() {
-      expect(ctx.ctrl.measurementSegment.value).to.be('select measurement');
-    });
-  });
-
-  describe('when first tag segment is updated', function() {
-    beforeEach(function() {
-      ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0);
-    });
-
-    it('should update tag key', function() {
-      expect(ctx.ctrl.target.tags[0].key).to.be('asd');
-      expect(ctx.ctrl.tagSegments[0].type).to.be('key');
-    });
-
-    it('should add tagSegments', function() {
-      expect(ctx.ctrl.tagSegments.length).to.be(3);
-    });
-  });
-
-  describe('when last tag value segment is updated', function() {
-    beforeEach(function() {
-      ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0);
-      ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2);
-    });
-
-    it('should update tag value', function() {
-      expect(ctx.ctrl.target.tags[0].value).to.be('server1');
-    });
-
-    it('should set tag operator', function() {
-      expect(ctx.ctrl.target.tags[0].operator).to.be('=');
-    });
-
-    it('should add plus button for another filter', function() {
-      expect(ctx.ctrl.tagSegments[3].fake).to.be(true);
-    });
-  });
-
-  describe('when last tag value segment is updated to regex', function() {
-    beforeEach(function() {
-      ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0);
-      ctx.ctrl.tagSegmentUpdated({ value: '/server.*/', type: 'value' }, 2);
-    });
-
-    it('should update operator', function() {
-      expect(ctx.ctrl.tagSegments[1].value).to.be('=~');
-      expect(ctx.ctrl.target.tags[0].operator).to.be('=~');
-    });
-  });
-
-  describe('when second tag key is added', function() {
-    beforeEach(function() {
-      ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0);
-      ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2);
-      ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3);
-    });
-
-    it('should update tag key', function() {
-      expect(ctx.ctrl.target.tags[1].key).to.be('key2');
-    });
-
-    it('should add AND segment', function() {
-      expect(ctx.ctrl.tagSegments[3].value).to.be('AND');
-    });
-  });
-
-  describe('when condition is changed', function() {
-    beforeEach(function() {
-      ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0);
-      ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2);
-      ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3);
-      ctx.ctrl.tagSegmentUpdated({ value: 'OR', type: 'condition' }, 3);
-    });
-
-    it('should update tag condition', function() {
-      expect(ctx.ctrl.target.tags[1].condition).to.be('OR');
-    });
-
-    it('should update AND segment', function() {
-      expect(ctx.ctrl.tagSegments[3].value).to.be('OR');
-      expect(ctx.ctrl.tagSegments.length).to.be(7);
-    });
-  });
-
-  describe('when deleting first tag filter after value is selected', function() {
-    beforeEach(function() {
-      ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0);
-      ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2);
-      ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 0);
-    });
-
-    it('should remove tags', function() {
-      expect(ctx.ctrl.target.tags.length).to.be(0);
-    });
-
-    it('should remove all segment after 2 and replace with plus button', function() {
-      expect(ctx.ctrl.tagSegments.length).to.be(1);
-      expect(ctx.ctrl.tagSegments[0].type).to.be('plus-button');
-    });
-  });
-
-  describe('when deleting second tag value before second tag value is complete', function() {
-    beforeEach(function() {
-      ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0);
-      ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2);
-      ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3);
-      ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 4);
-    });
-
-    it('should remove all segment after 2 and replace with plus button', function() {
-      expect(ctx.ctrl.tagSegments.length).to.be(4);
-      expect(ctx.ctrl.tagSegments[3].type).to.be('plus-button');
-    });
-  });
-
-  describe('when deleting second tag value before second tag value is complete', function() {
-    beforeEach(function() {
-      ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0);
-      ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2);
-      ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3);
-      ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 4);
-    });
-
-    it('should remove all segment after 2 and replace with plus button', function() {
-      expect(ctx.ctrl.tagSegments.length).to.be(4);
-      expect(ctx.ctrl.tagSegments[3].type).to.be('plus-button');
-    });
-  });
-
-  describe('when deleting second tag value after second tag filter is complete', function() {
-    beforeEach(function() {
-      ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0);
-      ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2);
-      ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3);
-      ctx.ctrl.tagSegmentUpdated({ value: 'value', type: 'value' }, 6);
-      ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 4);
-    });
-
-    it('should remove all segment after 2 and replace with plus button', function() {
-      expect(ctx.ctrl.tagSegments.length).to.be(4);
-      expect(ctx.ctrl.tagSegments[3].type).to.be('plus-button');
-    });
-  });
-});

+ 4 - 2
public/app/plugins/datasource/mssql/partials/query.editor.html

@@ -39,9 +39,11 @@
 	<div class="gf-form"  ng-show="ctrl.showHelp">
 		<pre class="gf-form-pre alert alert-info">Time series:
 - return column named time (in UTC), as a unix time stamp or any sql native date data type. You can use the macros below.
-- optional: return column named metric to represent the series names.
 - any other columns returned will be the time point values.
-- if multiple value columns are present and a metric column is provided. the series name will be the combination of "MetricName - ValueColumnName".
+Optional:
+  - return column named <i>metric</i> to represent the series name.
+  - If multiple value columns are returned the metric column is used as prefix.
+  - If no column named metric is found the column name of the value column is used as series name
 
 Table:
 - return any set of columns

+ 4 - 1
public/app/plugins/datasource/mysql/partials/query.editor.html

@@ -40,7 +40,10 @@
 		<pre class="gf-form-pre alert alert-info">Time series:
 - return column named time or time_sec (in UTC), as a unix time stamp or any sql native date data type. You can use the macros below.
 - return column(s) with numeric datatype as values
-- (Optional: return column named <i>metric</i> to represent the series name. If no column named metric is found the column name of the value column is used as series name)
+Optional:
+  - return column named <i>metric</i> to represent the series name.
+  - If multiple value columns are returned the metric column is used as prefix.
+  - If no column named metric is found the column name of the value column is used as series name
 
 Table:
 - return any set of columns

+ 4 - 1
public/app/plugins/datasource/postgres/partials/query.editor.html

@@ -40,7 +40,10 @@
 		<pre class="gf-form-pre alert alert-info">Time series:
 - return column named <i>time</i> (UTC in seconds or timestamp)
 - return column(s) with numeric datatype as values
-- (Optional: return column named <i>metric</i> to represent the series name. If no column named metric is found the column name of the value column is used as series name)
+Optional: 
+  - return column named <i>metric</i> to represent the series name. 
+  - If multiple value columns are returned the metric column is used as prefix. 
+  - If no column named metric is found the column name of the value column is used as series name
 
 Table:
 - return any set of columns

+ 0 - 1
public/app/plugins/datasource/prometheus/datasource.ts

@@ -175,7 +175,6 @@ export class PrometheusDatasource {
           responseIndex: index,
           refId: activeTargets[index].refId,
         };
-
         this.resultTransformer.transform(result, response, transformerOptions);
       });
 

+ 33 - 40
public/app/plugins/datasource/prometheus/specs/completer_specs.ts → public/app/plugins/datasource/prometheus/specs/completer.jest.ts

@@ -1,47 +1,40 @@
-import { describe, it, sinon, expect } from 'test/lib/common';
-import helpers from 'test/specs/helpers';
-
 import { PromCompleter } from '../completer';
 import { PrometheusDatasource } from '../datasource';
+import { BackendSrv } from 'app/core/services/backend_srv';
+jest.mock('../datasource');
+jest.mock('app/core/services/backend_srv');
 
 describe('Prometheus editor completer', function() {
-  var ctx = new helpers.ServiceTestContext();
-  beforeEach(ctx.providePhase(['templateSrv']));
-
   function getSessionStub(data) {
     return {
-      getTokenAt: sinon.stub().returns(data.currentToken),
-      getTokens: sinon.stub().returns(data.tokens),
-      getLine: sinon.stub().returns(data.line),
+      getTokenAt: jest.fn(() => data.currentToken),
+      getTokens: jest.fn(() => data.tokens),
+      getLine: jest.fn(() => data.line),
     };
   }
 
   let editor = {};
-  let datasourceStub = <PrometheusDatasource>{
-    performInstantQuery: sinon
-      .stub()
-      .withArgs({ expr: '{__name__="node_cpu"' })
-      .returns(
-        Promise.resolve({
-          data: {
-            data: {
-              result: [
-                {
-                  metric: {
-                    job: 'node',
-                    instance: 'localhost:9100',
-                  },
-                },
-              ],
+
+  let backendSrv = <BackendSrv>{};
+  let datasourceStub = new PrometheusDatasource({}, {}, backendSrv, {}, {});
+
+  datasourceStub.performInstantQuery = jest.fn(() =>
+    Promise.resolve({
+      data: {
+        data: {
+          result: [
+            {
+              metric: {
+                job: 'node',
+                instance: 'localhost:9100',
+              },
             },
-          },
-        })
-      ),
-    performSuggestQuery: sinon
-      .stub()
-      .withArgs('node', true)
-      .returns(Promise.resolve(['node_cpu'])),
-  };
+          ],
+        },
+      },
+    })
+  );
+  datasourceStub.performSuggestQuery = jest.fn(() => Promise.resolve(['node_cpu']));
 
   let templateSrv = {
     variables: [
@@ -62,9 +55,9 @@ describe('Prometheus editor completer', function() {
       });
 
       return completer.getCompletions(editor, session, { row: 0, column: 10 }, '[', (s, res) => {
-        expect(res[0].caption).to.eql('$__interval');
-        expect(res[0].value).to.eql('[$__interval');
-        expect(res[0].meta).to.eql('range vector');
+        expect(res[0].caption).toEqual('$__interval');
+        expect(res[0].value).toEqual('[$__interval');
+        expect(res[0].meta).toEqual('range vector');
       });
     });
   });
@@ -93,7 +86,7 @@ describe('Prometheus editor completer', function() {
       });
 
       return completer.getCompletions(editor, session, { row: 0, column: 10 }, 'j', (s, res) => {
-        expect(res[0].meta).to.eql('label name');
+        expect(res[0].meta).toEqual('label name');
       });
     });
   });
@@ -125,7 +118,7 @@ describe('Prometheus editor completer', function() {
       });
 
       return completer.getCompletions(editor, session, { row: 0, column: 23 }, 'j', (s, res) => {
-        expect(res[0].meta).to.eql('label name');
+        expect(res[0].meta).toEqual('label name');
       });
     });
   });
@@ -156,7 +149,7 @@ describe('Prometheus editor completer', function() {
       });
 
       return completer.getCompletions(editor, session, { row: 0, column: 15 }, 'n', (s, res) => {
-        expect(res[0].meta).to.eql('label value');
+        expect(res[0].meta).toEqual('label value');
       });
     });
   });
@@ -192,7 +185,7 @@ describe('Prometheus editor completer', function() {
       });
 
       return completer.getCompletions(editor, session, { row: 0, column: 23 }, 'm', (s, res) => {
-        expect(res[0].meta).to.eql('label name');
+        expect(res[0].meta).toEqual('label name');
       });
     });
   });

+ 808 - 14
public/app/plugins/datasource/prometheus/specs/datasource.jest.ts

@@ -150,49 +150,49 @@ describe('PrometheusDatasource', () => {
     });
   });
 
-  describe('alignRange', function() {
-    it('does not modify already aligned intervals with perfect step', function() {
+  describe('alignRange', () => {
+    it('does not modify already aligned intervals with perfect step', () => {
       const range = alignRange(0, 3, 3);
       expect(range.start).toEqual(0);
       expect(range.end).toEqual(3);
     });
-    it('does modify end-aligned intervals to reflect number of steps possible', function() {
+    it('does modify end-aligned intervals to reflect number of steps possible', () => {
       const range = alignRange(1, 6, 3);
       expect(range.start).toEqual(0);
       expect(range.end).toEqual(6);
     });
-    it('does align intervals that are a multiple of steps', function() {
+    it('does align intervals that are a multiple of steps', () => {
       const range = alignRange(1, 4, 3);
       expect(range.start).toEqual(0);
       expect(range.end).toEqual(6);
     });
-    it('does align intervals that are not a multiple of steps', function() {
+    it('does align intervals that are not a multiple of steps', () => {
       const range = alignRange(1, 5, 3);
       expect(range.start).toEqual(0);
       expect(range.end).toEqual(6);
     });
   });
 
-  describe('Prometheus regular escaping', function() {
-    it('should not escape non-string', function() {
+  describe('Prometheus regular escaping', () => {
+    it('should not escape non-string', () => {
       expect(prometheusRegularEscape(12)).toEqual(12);
     });
-    it('should not escape simple string', function() {
+    it('should not escape simple string', () => {
       expect(prometheusRegularEscape('cryptodepression')).toEqual('cryptodepression');
     });
-    it("should escape '", function() {
+    it("should escape '", () => {
       expect(prometheusRegularEscape("looking'glass")).toEqual("looking\\\\'glass");
     });
-    it('should escape multiple characters', function() {
+    it('should escape multiple characters', () => {
       expect(prometheusRegularEscape("'looking'glass'")).toEqual("\\\\'looking\\\\'glass\\\\'");
     });
   });
 
-  describe('Prometheus regexes escaping', function() {
-    it('should not escape simple string', function() {
+  describe('Prometheus regexes escaping', () => {
+    it('should not escape simple string', () => {
       expect(prometheusSpecialRegexEscape('cryptodepression')).toEqual('cryptodepression');
     });
-    it('should escape $^*+?.()\\', function() {
+    it('should escape $^*+?.()\\', () => {
       expect(prometheusSpecialRegexEscape("looking'glass")).toEqual("looking\\\\'glass");
       expect(prometheusSpecialRegexEscape('looking{glass')).toEqual('looking\\\\{glass');
       expect(prometheusSpecialRegexEscape('looking}glass')).toEqual('looking\\\\}glass');
@@ -208,7 +208,7 @@ describe('PrometheusDatasource', () => {
       expect(prometheusSpecialRegexEscape('looking)glass')).toEqual('looking\\\\)glass');
       expect(prometheusSpecialRegexEscape('looking\\glass')).toEqual('looking\\\\\\\\glass');
     });
-    it('should escape multiple special characters', function() {
+    it('should escape multiple special characters', () => {
       expect(prometheusSpecialRegexEscape('+looking$glass?')).toEqual('\\\\+looking\\\\$glass\\\\?');
     });
   });
@@ -246,3 +246,797 @@ describe('PrometheusDatasource', () => {
     });
   });
 });
+
+const SECOND = 1000;
+const MINUTE = 60 * SECOND;
+const HOUR = 60 * MINUTE;
+
+const time = ({ hours = 0, seconds = 0, minutes = 0 }) => moment(hours * HOUR + minutes * MINUTE + seconds * SECOND);
+
+let ctx = <any>{};
+let instanceSettings = {
+  url: 'proxied',
+  directUrl: 'direct',
+  user: 'test',
+  password: 'mupp',
+  jsonData: { httpMethod: 'GET' },
+};
+let backendSrv = <any>{
+  datasourceRequest: jest.fn(),
+};
+
+let templateSrv = {
+  replace: jest.fn(str => str),
+};
+
+let timeSrv = {
+  timeRange: () => {
+    return { to: { diff: () => 2000 }, from: '' };
+  },
+};
+
+describe('PrometheusDatasource', () => {
+  describe('When querying prometheus with one target using query editor target spec', async () => {
+    var results;
+    var query = {
+      range: { from: time({ seconds: 63 }), to: time({ seconds: 183 }) },
+      targets: [{ expr: 'test{job="testjob"}', format: 'time_series' }],
+      interval: '60s',
+    };
+    // Interval alignment with step
+    var urlExpected =
+      'proxied/api/v1/query_range?query=' + encodeURIComponent('test{job="testjob"}') + '&start=60&end=240&step=60';
+
+    beforeEach(async () => {
+      let response = {
+        data: {
+          status: 'success',
+          data: {
+            resultType: 'matrix',
+            result: [
+              {
+                metric: { __name__: 'test', job: 'testjob' },
+                values: [[60, '3846']],
+              },
+            ],
+          },
+        },
+      };
+      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
+      ctx.ds = new PrometheusDatasource(instanceSettings, q, <any>backendSrv, templateSrv, timeSrv);
+
+      await ctx.ds.query(query).then(function(data) {
+        results = data;
+      });
+    });
+
+    it('should generate the correct query', () => {
+      let res = backendSrv.datasourceRequest.mock.calls[0][0];
+      expect(res.method).toBe('GET');
+      expect(res.url).toBe(urlExpected);
+    });
+    it('should return series list', async () => {
+      expect(results.data.length).toBe(1);
+      expect(results.data[0].target).toBe('test{job="testjob"}');
+    });
+  });
+  describe('When querying prometheus with one target which return multiple series', () => {
+    var results;
+    var start = 60;
+    var end = 360;
+    var step = 60;
+
+    var query = {
+      range: { from: time({ seconds: start }), to: time({ seconds: end }) },
+      targets: [{ expr: 'test{job="testjob"}', format: 'time_series' }],
+      interval: '60s',
+    };
+
+    beforeEach(async () => {
+      let response = {
+        status: 'success',
+        data: {
+          data: {
+            resultType: 'matrix',
+            result: [
+              {
+                metric: { __name__: 'test', job: 'testjob', series: 'series 1' },
+                values: [[start + step * 1, '3846'], [start + step * 3, '3847'], [end - step * 1, '3848']],
+              },
+              {
+                metric: { __name__: 'test', job: 'testjob', series: 'series 2' },
+                values: [[start + step * 2, '4846']],
+              },
+            ],
+          },
+        },
+      };
+
+      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
+      ctx.ds = new PrometheusDatasource(instanceSettings, q, <any>backendSrv, templateSrv, timeSrv);
+
+      await ctx.ds.query(query).then(function(data) {
+        results = data;
+      });
+    });
+
+    it('should be same length', () => {
+      expect(results.data.length).toBe(2);
+      expect(results.data[0].datapoints.length).toBe((end - start) / step + 1);
+      expect(results.data[1].datapoints.length).toBe((end - start) / step + 1);
+    });
+
+    it('should fill null until first datapoint in response', () => {
+      expect(results.data[0].datapoints[0][1]).toBe(start * 1000);
+      expect(results.data[0].datapoints[0][0]).toBe(null);
+      expect(results.data[0].datapoints[1][1]).toBe((start + step * 1) * 1000);
+      expect(results.data[0].datapoints[1][0]).toBe(3846);
+    });
+    it('should fill null after last datapoint in response', () => {
+      var length = (end - start) / step + 1;
+      expect(results.data[0].datapoints[length - 2][1]).toBe((end - step * 1) * 1000);
+      expect(results.data[0].datapoints[length - 2][0]).toBe(3848);
+      expect(results.data[0].datapoints[length - 1][1]).toBe(end * 1000);
+      expect(results.data[0].datapoints[length - 1][0]).toBe(null);
+    });
+    it('should fill null at gap between series', () => {
+      expect(results.data[0].datapoints[2][1]).toBe((start + step * 2) * 1000);
+      expect(results.data[0].datapoints[2][0]).toBe(null);
+      expect(results.data[1].datapoints[1][1]).toBe((start + step * 1) * 1000);
+      expect(results.data[1].datapoints[1][0]).toBe(null);
+      expect(results.data[1].datapoints[3][1]).toBe((start + step * 3) * 1000);
+      expect(results.data[1].datapoints[3][0]).toBe(null);
+    });
+  });
+  describe('When querying prometheus with one target and instant = true', () => {
+    var results;
+    var urlExpected = 'proxied/api/v1/query?query=' + encodeURIComponent('test{job="testjob"}') + '&time=123';
+    var query = {
+      range: { from: time({ seconds: 63 }), to: time({ seconds: 123 }) },
+      targets: [{ expr: 'test{job="testjob"}', format: 'time_series', instant: true }],
+      interval: '60s',
+    };
+
+    beforeEach(async () => {
+      let response = {
+        status: 'success',
+        data: {
+          data: {
+            resultType: 'vector',
+            result: [
+              {
+                metric: { __name__: 'test', job: 'testjob' },
+                value: [123, '3846'],
+              },
+            ],
+          },
+        },
+      };
+
+      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
+      ctx.ds = new PrometheusDatasource(instanceSettings, q, <any>backendSrv, templateSrv, timeSrv);
+
+      await ctx.ds.query(query).then(function(data) {
+        results = data;
+      });
+    });
+    it('should generate the correct query', () => {
+      let res = backendSrv.datasourceRequest.mock.calls[0][0];
+      expect(res.method).toBe('GET');
+      expect(res.url).toBe(urlExpected);
+    });
+    it('should return series list', () => {
+      expect(results.data.length).toBe(1);
+      expect(results.data[0].target).toBe('test{job="testjob"}');
+    });
+  });
+  describe('When performing annotationQuery', () => {
+    var results;
+
+    var options = {
+      annotation: {
+        expr: 'ALERTS{alertstate="firing"}',
+        tagKeys: 'job',
+        titleFormat: '{{alertname}}',
+        textFormat: '{{instance}}',
+      },
+      range: {
+        from: time({ seconds: 63 }),
+        to: time({ seconds: 123 }),
+      },
+    };
+
+    beforeEach(async () => {
+      let response = {
+        status: 'success',
+        data: {
+          data: {
+            resultType: 'matrix',
+            result: [
+              {
+                metric: {
+                  __name__: 'ALERTS',
+                  alertname: 'InstanceDown',
+                  alertstate: 'firing',
+                  instance: 'testinstance',
+                  job: 'testjob',
+                },
+                values: [[123, '1']],
+              },
+            ],
+          },
+        },
+      };
+
+      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
+      ctx.ds = new PrometheusDatasource(instanceSettings, q, <any>backendSrv, templateSrv, timeSrv);
+
+      await ctx.ds.annotationQuery(options).then(function(data) {
+        results = data;
+      });
+    });
+    it('should return annotation list', () => {
+      expect(results.length).toBe(1);
+      expect(results[0].tags).toContain('testjob');
+      expect(results[0].title).toBe('InstanceDown');
+      expect(results[0].text).toBe('testinstance');
+      expect(results[0].time).toBe(123 * 1000);
+    });
+  });
+
+  describe('When resultFormat is table and instant = true', () => {
+    var results;
+    var query = {
+      range: { from: time({ seconds: 63 }), to: time({ seconds: 123 }) },
+      targets: [{ expr: 'test{job="testjob"}', format: 'time_series', instant: true }],
+      interval: '60s',
+    };
+
+    beforeEach(async () => {
+      let response = {
+        status: 'success',
+        data: {
+          data: {
+            resultType: 'vector',
+            result: [
+              {
+                metric: { __name__: 'test', job: 'testjob' },
+                value: [123, '3846'],
+              },
+            ],
+          },
+        },
+      };
+
+      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
+      ctx.ds = new PrometheusDatasource(instanceSettings, q, <any>backendSrv, templateSrv, timeSrv);
+      await ctx.ds.query(query).then(function(data) {
+        results = data;
+      });
+    });
+
+    it('should return result', () => {
+      expect(results).not.toBe(null);
+    });
+  });
+
+  describe('The "step" query parameter', () => {
+    var response = {
+      status: 'success',
+      data: {
+        data: {
+          resultType: 'matrix',
+          result: [],
+        },
+      },
+    };
+
+    it('should be min interval when greater than auto interval', async () => {
+      let query = {
+        // 6 minute range
+        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
+        targets: [
+          {
+            expr: 'test',
+            interval: '10s',
+          },
+        ],
+        interval: '5s',
+      };
+      let urlExpected = 'proxied/api/v1/query_range?query=test&start=60&end=420&step=10';
+
+      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
+      ctx.ds = new PrometheusDatasource(instanceSettings, q, <any>backendSrv, templateSrv, timeSrv);
+      await ctx.ds.query(query);
+      let res = backendSrv.datasourceRequest.mock.calls[0][0];
+      expect(res.method).toBe('GET');
+      expect(res.url).toBe(urlExpected);
+    });
+
+    it('step should never go below 1', async () => {
+      var query = {
+        // 6 minute range
+        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
+        targets: [{ expr: 'test' }],
+        interval: '100ms',
+      };
+      var urlExpected = 'proxied/api/v1/query_range?query=test&start=60&end=420&step=1';
+      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
+      ctx.ds = new PrometheusDatasource(instanceSettings, q, <any>backendSrv, templateSrv, timeSrv);
+      await ctx.ds.query(query);
+      let res = backendSrv.datasourceRequest.mock.calls[0][0];
+      expect(res.method).toBe('GET');
+      expect(res.url).toBe(urlExpected);
+    });
+
+    it('should be auto interval when greater than min interval', async () => {
+      var query = {
+        // 6 minute range
+        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
+        targets: [
+          {
+            expr: 'test',
+            interval: '5s',
+          },
+        ],
+        interval: '10s',
+      };
+      var urlExpected = 'proxied/api/v1/query_range?query=test&start=60&end=420&step=10';
+      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
+      ctx.ds = new PrometheusDatasource(instanceSettings, q, <any>backendSrv, templateSrv, timeSrv);
+      await ctx.ds.query(query);
+      let res = backendSrv.datasourceRequest.mock.calls[0][0];
+      expect(res.method).toBe('GET');
+      expect(res.url).toBe(urlExpected);
+    });
+    it('should result in querying fewer than 11000 data points', async () => {
+      var query = {
+        // 6 hour range
+        range: { from: time({ hours: 1 }), to: time({ hours: 7 }) },
+        targets: [{ expr: 'test' }],
+        interval: '1s',
+      };
+      var end = 7 * 60 * 60;
+      var start = 60 * 60;
+      var urlExpected = 'proxied/api/v1/query_range?query=test&start=' + start + '&end=' + end + '&step=2';
+      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
+      ctx.ds = new PrometheusDatasource(instanceSettings, q, <any>backendSrv, templateSrv, timeSrv);
+      await ctx.ds.query(query);
+      let res = backendSrv.datasourceRequest.mock.calls[0][0];
+      expect(res.method).toBe('GET');
+      expect(res.url).toBe(urlExpected);
+    });
+    it('should not apply min interval when interval * intervalFactor greater', async () => {
+      var query = {
+        // 6 minute range
+        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
+        targets: [
+          {
+            expr: 'test',
+            interval: '10s',
+            intervalFactor: 10,
+          },
+        ],
+        interval: '5s',
+      };
+      // times get rounded up to interval
+      var urlExpected = 'proxied/api/v1/query_range?query=test&start=50&end=450&step=50';
+      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
+      ctx.ds = new PrometheusDatasource(instanceSettings, q, <any>backendSrv, templateSrv, timeSrv);
+      await ctx.ds.query(query);
+      let res = backendSrv.datasourceRequest.mock.calls[0][0];
+      expect(res.method).toBe('GET');
+      expect(res.url).toBe(urlExpected);
+    });
+    it('should apply min interval when interval * intervalFactor smaller', async () => {
+      var query = {
+        // 6 minute range
+        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
+        targets: [
+          {
+            expr: 'test',
+            interval: '15s',
+            intervalFactor: 2,
+          },
+        ],
+        interval: '5s',
+      };
+      var urlExpected = 'proxied/api/v1/query_range?query=test' + '&start=60&end=420&step=15';
+      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
+      ctx.ds = new PrometheusDatasource(instanceSettings, q, <any>backendSrv, templateSrv, timeSrv);
+      await ctx.ds.query(query);
+      let res = backendSrv.datasourceRequest.mock.calls[0][0];
+      expect(res.method).toBe('GET');
+      expect(res.url).toBe(urlExpected);
+    });
+    it('should apply intervalFactor to auto interval when greater', async () => {
+      var query = {
+        // 6 minute range
+        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
+        targets: [
+          {
+            expr: 'test',
+            interval: '5s',
+            intervalFactor: 10,
+          },
+        ],
+        interval: '10s',
+      };
+      // times get aligned to interval
+      var urlExpected = 'proxied/api/v1/query_range?query=test' + '&start=0&end=500&step=100';
+      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
+      ctx.ds = new PrometheusDatasource(instanceSettings, q, <any>backendSrv, templateSrv, timeSrv);
+      await ctx.ds.query(query);
+      let res = backendSrv.datasourceRequest.mock.calls[0][0];
+      expect(res.method).toBe('GET');
+      expect(res.url).toBe(urlExpected);
+    });
+    it('should not not be affected by the 11000 data points limit when large enough', async () => {
+      var query = {
+        // 1 week range
+        range: { from: time({}), to: time({ hours: 7 * 24 }) },
+        targets: [
+          {
+            expr: 'test',
+            intervalFactor: 10,
+          },
+        ],
+        interval: '10s',
+      };
+      var end = 7 * 24 * 60 * 60;
+      var start = 0;
+      var urlExpected = 'proxied/api/v1/query_range?query=test' + '&start=' + start + '&end=' + end + '&step=100';
+      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
+      ctx.ds = new PrometheusDatasource(instanceSettings, q, <any>backendSrv, templateSrv, timeSrv);
+      await ctx.ds.query(query);
+      let res = backendSrv.datasourceRequest.mock.calls[0][0];
+      expect(res.method).toBe('GET');
+      expect(res.url).toBe(urlExpected);
+    });
+    it('should be determined by the 11000 data points limit when too small', async () => {
+      var query = {
+        // 1 week range
+        range: { from: time({}), to: time({ hours: 7 * 24 }) },
+        targets: [
+          {
+            expr: 'test',
+            intervalFactor: 10,
+          },
+        ],
+        interval: '5s',
+      };
+      var end = 7 * 24 * 60 * 60;
+      var start = 0;
+      var urlExpected = 'proxied/api/v1/query_range?query=test' + '&start=' + start + '&end=' + end + '&step=60';
+      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
+      ctx.ds = new PrometheusDatasource(instanceSettings, q, <any>backendSrv, templateSrv, timeSrv);
+      await ctx.ds.query(query);
+      let res = backendSrv.datasourceRequest.mock.calls[0][0];
+      expect(res.method).toBe('GET');
+      expect(res.url).toBe(urlExpected);
+    });
+  });
+
+  describe('The __interval and __interval_ms template variables', () => {
+    var response = {
+      status: 'success',
+      data: {
+        data: {
+          resultType: 'matrix',
+          result: [],
+        },
+      },
+    };
+
+    it('should be unchanged when auto interval is greater than min interval', async () => {
+      var query = {
+        // 6 minute range
+        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
+        targets: [
+          {
+            expr: 'rate(test[$__interval])',
+            interval: '5s',
+          },
+        ],
+        interval: '10s',
+        scopedVars: {
+          __interval: { text: '10s', value: '10s' },
+          __interval_ms: { text: 10 * 1000, value: 10 * 1000 },
+        },
+      };
+
+      var urlExpected =
+        'proxied/api/v1/query_range?query=' +
+        encodeURIComponent('rate(test[$__interval])') +
+        '&start=60&end=420&step=10';
+
+      templateSrv.replace = jest.fn(str => str);
+      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
+      ctx.ds = new PrometheusDatasource(instanceSettings, q, <any>backendSrv, templateSrv, timeSrv);
+      await ctx.ds.query(query);
+      let res = backendSrv.datasourceRequest.mock.calls[0][0];
+      expect(res.method).toBe('GET');
+      expect(res.url).toBe(urlExpected);
+
+      expect(templateSrv.replace.mock.calls[0][1]).toEqual({
+        __interval: {
+          text: '10s',
+          value: '10s',
+        },
+        __interval_ms: {
+          text: 10000,
+          value: 10000,
+        },
+      });
+    });
+    it('should be min interval when it is greater than auto interval', async () => {
+      var query = {
+        // 6 minute range
+        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
+        targets: [
+          {
+            expr: 'rate(test[$__interval])',
+            interval: '10s',
+          },
+        ],
+        interval: '5s',
+        scopedVars: {
+          __interval: { text: '5s', value: '5s' },
+          __interval_ms: { text: 5 * 1000, value: 5 * 1000 },
+        },
+      };
+      var urlExpected =
+        'proxied/api/v1/query_range?query=' +
+        encodeURIComponent('rate(test[$__interval])') +
+        '&start=60&end=420&step=10';
+      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
+      templateSrv.replace = jest.fn(str => str);
+      ctx.ds = new PrometheusDatasource(instanceSettings, q, <any>backendSrv, templateSrv, timeSrv);
+      await ctx.ds.query(query);
+      let res = backendSrv.datasourceRequest.mock.calls[0][0];
+      expect(res.method).toBe('GET');
+      expect(res.url).toBe(urlExpected);
+
+      expect(templateSrv.replace.mock.calls[0][1]).toEqual({
+        __interval: {
+          text: '5s',
+          value: '5s',
+        },
+        __interval_ms: {
+          text: 5000,
+          value: 5000,
+        },
+      });
+    });
+    it('should account for intervalFactor', async () => {
+      var query = {
+        // 6 minute range
+        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
+        targets: [
+          {
+            expr: 'rate(test[$__interval])',
+            interval: '5s',
+            intervalFactor: 10,
+          },
+        ],
+        interval: '10s',
+        scopedVars: {
+          __interval: { text: '10s', value: '10s' },
+          __interval_ms: { text: 10 * 1000, value: 10 * 1000 },
+        },
+      };
+      var urlExpected =
+        'proxied/api/v1/query_range?query=' +
+        encodeURIComponent('rate(test[$__interval])') +
+        '&start=0&end=500&step=100';
+      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
+      templateSrv.replace = jest.fn(str => str);
+      ctx.ds = new PrometheusDatasource(instanceSettings, q, <any>backendSrv, templateSrv, timeSrv);
+      await ctx.ds.query(query);
+      let res = backendSrv.datasourceRequest.mock.calls[0][0];
+      expect(res.method).toBe('GET');
+      expect(res.url).toBe(urlExpected);
+
+      expect(templateSrv.replace.mock.calls[0][1]).toEqual({
+        __interval: {
+          text: '10s',
+          value: '10s',
+        },
+        __interval_ms: {
+          text: 10000,
+          value: 10000,
+        },
+      });
+
+      expect(query.scopedVars.__interval.text).toBe('10s');
+      expect(query.scopedVars.__interval.value).toBe('10s');
+      expect(query.scopedVars.__interval_ms.text).toBe(10 * 1000);
+      expect(query.scopedVars.__interval_ms.value).toBe(10 * 1000);
+    });
+    it('should be interval * intervalFactor when greater than min interval', async () => {
+      var query = {
+        // 6 minute range
+        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
+        targets: [
+          {
+            expr: 'rate(test[$__interval])',
+            interval: '10s',
+            intervalFactor: 10,
+          },
+        ],
+        interval: '5s',
+        scopedVars: {
+          __interval: { text: '5s', value: '5s' },
+          __interval_ms: { text: 5 * 1000, value: 5 * 1000 },
+        },
+      };
+      var urlExpected =
+        'proxied/api/v1/query_range?query=' +
+        encodeURIComponent('rate(test[$__interval])') +
+        '&start=50&end=450&step=50';
+
+      templateSrv.replace = jest.fn(str => str);
+      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
+      ctx.ds = new PrometheusDatasource(instanceSettings, q, <any>backendSrv, templateSrv, timeSrv);
+      await ctx.ds.query(query);
+      let res = backendSrv.datasourceRequest.mock.calls[0][0];
+      expect(res.method).toBe('GET');
+      expect(res.url).toBe(urlExpected);
+
+      expect(templateSrv.replace.mock.calls[0][1]).toEqual({
+        __interval: {
+          text: '5s',
+          value: '5s',
+        },
+        __interval_ms: {
+          text: 5000,
+          value: 5000,
+        },
+      });
+    });
+    it('should be min interval when greater than interval * intervalFactor', async () => {
+      var query = {
+        // 6 minute range
+        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
+        targets: [
+          {
+            expr: 'rate(test[$__interval])',
+            interval: '15s',
+            intervalFactor: 2,
+          },
+        ],
+        interval: '5s',
+        scopedVars: {
+          __interval: { text: '5s', value: '5s' },
+          __interval_ms: { text: 5 * 1000, value: 5 * 1000 },
+        },
+      };
+      var urlExpected =
+        'proxied/api/v1/query_range?query=' +
+        encodeURIComponent('rate(test[$__interval])') +
+        '&start=60&end=420&step=15';
+
+      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
+      ctx.ds = new PrometheusDatasource(instanceSettings, q, <any>backendSrv, templateSrv, timeSrv);
+      await ctx.ds.query(query);
+      let res = backendSrv.datasourceRequest.mock.calls[0][0];
+      expect(res.method).toBe('GET');
+      expect(res.url).toBe(urlExpected);
+
+      expect(templateSrv.replace.mock.calls[0][1]).toEqual({
+        __interval: {
+          text: '5s',
+          value: '5s',
+        },
+        __interval_ms: {
+          text: 5000,
+          value: 5000,
+        },
+      });
+    });
+    it('should be determined by the 11000 data points limit, accounting for intervalFactor', async () => {
+      var query = {
+        // 1 week range
+        range: { from: time({}), to: time({ hours: 7 * 24 }) },
+        targets: [
+          {
+            expr: 'rate(test[$__interval])',
+            intervalFactor: 10,
+          },
+        ],
+        interval: '5s',
+        scopedVars: {
+          __interval: { text: '5s', value: '5s' },
+          __interval_ms: { text: 5 * 1000, value: 5 * 1000 },
+        },
+      };
+      var end = 7 * 24 * 60 * 60;
+      var start = 0;
+      var urlExpected =
+        'proxied/api/v1/query_range?query=' +
+        encodeURIComponent('rate(test[$__interval])') +
+        '&start=' +
+        start +
+        '&end=' +
+        end +
+        '&step=60';
+      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
+      templateSrv.replace = jest.fn(str => str);
+      ctx.ds = new PrometheusDatasource(instanceSettings, q, <any>backendSrv, templateSrv, timeSrv);
+      await ctx.ds.query(query);
+      let res = backendSrv.datasourceRequest.mock.calls[0][0];
+      expect(res.method).toBe('GET');
+      expect(res.url).toBe(urlExpected);
+
+      expect(templateSrv.replace.mock.calls[0][1]).toEqual({
+        __interval: {
+          text: '5s',
+          value: '5s',
+        },
+        __interval_ms: {
+          text: 5000,
+          value: 5000,
+        },
+      });
+    });
+  });
+});
+
+describe('PrometheusDatasource for POST', () => {
+  //   var ctx = new helpers.ServiceTestContext();
+  let instanceSettings = {
+    url: 'proxied',
+    directUrl: 'direct',
+    user: 'test',
+    password: 'mupp',
+    jsonData: { httpMethod: 'POST' },
+  };
+
+  describe('When querying prometheus with one target using query editor target spec', () => {
+    var results;
+    var urlExpected = 'proxied/api/v1/query_range';
+    var dataExpected = {
+      query: 'test{job="testjob"}',
+      start: 1 * 60,
+      end: 3 * 60,
+      step: 60,
+    };
+    var query = {
+      range: { from: time({ minutes: 1, seconds: 3 }), to: time({ minutes: 2, seconds: 3 }) },
+      targets: [{ expr: 'test{job="testjob"}', format: 'time_series' }],
+      interval: '60s',
+    };
+
+    beforeEach(async () => {
+      let response = {
+        status: 'success',
+        data: {
+          data: {
+            resultType: 'matrix',
+            result: [
+              {
+                metric: { __name__: 'test', job: 'testjob' },
+                values: [[2 * 60, '3846']],
+              },
+            ],
+          },
+        },
+      };
+      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
+      ctx.ds = new PrometheusDatasource(instanceSettings, q, <any>backendSrv, templateSrv, timeSrv);
+      await ctx.ds.query(query).then(function(data) {
+        results = data;
+      });
+    });
+    it('should generate the correct query', () => {
+      let res = backendSrv.datasourceRequest.mock.calls[0][0];
+      expect(res.method).toBe('POST');
+      expect(res.url).toBe(urlExpected);
+      expect(res.data).toEqual(dataExpected);
+    });
+    it('should return series list', () => {
+      expect(results.data.length).toBe(1);
+      expect(results.data[0].target).toBe('test{job="testjob"}');
+    });
+  });
+});

+ 0 - 683
public/app/plugins/datasource/prometheus/specs/datasource_specs.ts

@@ -1,683 +0,0 @@
-import { describe, beforeEach, it, expect, angularMocks } from 'test/lib/common';
-import moment from 'moment';
-import $ from 'jquery';
-import helpers from 'test/specs/helpers';
-import { PrometheusDatasource } from '../datasource';
-
-const SECOND = 1000;
-const MINUTE = 60 * SECOND;
-const HOUR = 60 * MINUTE;
-
-const time = ({ hours = 0, seconds = 0, minutes = 0 }) => moment(hours * HOUR + minutes * MINUTE + seconds * SECOND);
-
-describe('PrometheusDatasource', function() {
-  var ctx = new helpers.ServiceTestContext();
-  var instanceSettings = {
-    url: 'proxied',
-    directUrl: 'direct',
-    user: 'test',
-    password: 'mupp',
-    jsonData: { httpMethod: 'GET' },
-  };
-
-  beforeEach(angularMocks.module('grafana.core'));
-  beforeEach(angularMocks.module('grafana.services'));
-  beforeEach(ctx.providePhase(['timeSrv']));
-
-  beforeEach(
-    angularMocks.inject(function($q, $rootScope, $httpBackend, $injector) {
-      ctx.$q = $q;
-      ctx.$httpBackend = $httpBackend;
-      ctx.$rootScope = $rootScope;
-      ctx.ds = $injector.instantiate(PrometheusDatasource, {
-        instanceSettings: instanceSettings,
-      });
-      $httpBackend.when('GET', /\.html$/).respond('');
-    })
-  );
-  describe('When querying prometheus with one target using query editor target spec', function() {
-    var results;
-    var query = {
-      range: { from: time({ seconds: 63 }), to: time({ seconds: 183 }) },
-      targets: [{ expr: 'test{job="testjob"}', format: 'time_series' }],
-      interval: '60s',
-    };
-    // Interval alignment with step
-    var urlExpected =
-      'proxied/api/v1/query_range?query=' + encodeURIComponent('test{job="testjob"}') + '&start=60&end=240&step=60';
-    var response = {
-      status: 'success',
-      data: {
-        resultType: 'matrix',
-        result: [
-          {
-            metric: { __name__: 'test', job: 'testjob' },
-            values: [[60, '3846']],
-          },
-        ],
-      },
-    };
-    beforeEach(function() {
-      ctx.$httpBackend.expect('GET', urlExpected).respond(response);
-      ctx.ds.query(query).then(function(data) {
-        results = data;
-      });
-      ctx.$httpBackend.flush();
-    });
-    it('should generate the correct query', function() {
-      ctx.$httpBackend.verifyNoOutstandingExpectation();
-    });
-    it('should return series list', function() {
-      expect(results.data.length).to.be(1);
-      expect(results.data[0].target).to.be('test{job="testjob"}');
-    });
-  });
-  describe('When querying prometheus with one target which return multiple series', function() {
-    var results;
-    var start = 60;
-    var end = 360;
-    var step = 60;
-    var urlExpected =
-      'proxied/api/v1/query_range?query=' +
-      encodeURIComponent('test{job="testjob"}') +
-      '&start=' +
-      start +
-      '&end=' +
-      end +
-      '&step=' +
-      step;
-    var query = {
-      range: { from: time({ seconds: start }), to: time({ seconds: end }) },
-      targets: [{ expr: 'test{job="testjob"}', format: 'time_series' }],
-      interval: '60s',
-    };
-    var response = {
-      status: 'success',
-      data: {
-        resultType: 'matrix',
-        result: [
-          {
-            metric: { __name__: 'test', job: 'testjob', series: 'series 1' },
-            values: [[start + step * 1, '3846'], [start + step * 3, '3847'], [end - step * 1, '3848']],
-          },
-          {
-            metric: { __name__: 'test', job: 'testjob', series: 'series 2' },
-            values: [[start + step * 2, '4846']],
-          },
-        ],
-      },
-    };
-    beforeEach(function() {
-      ctx.$httpBackend.expect('GET', urlExpected).respond(response);
-      ctx.ds.query(query).then(function(data) {
-        results = data;
-      });
-      ctx.$httpBackend.flush();
-    });
-    it('should be same length', function() {
-      expect(results.data.length).to.be(2);
-      expect(results.data[0].datapoints.length).to.be((end - start) / step + 1);
-      expect(results.data[1].datapoints.length).to.be((end - start) / step + 1);
-    });
-    it('should fill null until first datapoint in response', function() {
-      expect(results.data[0].datapoints[0][1]).to.be(start * 1000);
-      expect(results.data[0].datapoints[0][0]).to.be(null);
-      expect(results.data[0].datapoints[1][1]).to.be((start + step * 1) * 1000);
-      expect(results.data[0].datapoints[1][0]).to.be(3846);
-    });
-    it('should fill null after last datapoint in response', function() {
-      var length = (end - start) / step + 1;
-      expect(results.data[0].datapoints[length - 2][1]).to.be((end - step * 1) * 1000);
-      expect(results.data[0].datapoints[length - 2][0]).to.be(3848);
-      expect(results.data[0].datapoints[length - 1][1]).to.be(end * 1000);
-      expect(results.data[0].datapoints[length - 1][0]).to.be(null);
-    });
-    it('should fill null at gap between series', function() {
-      expect(results.data[0].datapoints[2][1]).to.be((start + step * 2) * 1000);
-      expect(results.data[0].datapoints[2][0]).to.be(null);
-      expect(results.data[1].datapoints[1][1]).to.be((start + step * 1) * 1000);
-      expect(results.data[1].datapoints[1][0]).to.be(null);
-      expect(results.data[1].datapoints[3][1]).to.be((start + step * 3) * 1000);
-      expect(results.data[1].datapoints[3][0]).to.be(null);
-    });
-  });
-  describe('When querying prometheus with one target and instant = true', function() {
-    var results;
-    var urlExpected = 'proxied/api/v1/query?query=' + encodeURIComponent('test{job="testjob"}') + '&time=123';
-    var query = {
-      range: { from: time({ seconds: 63 }), to: time({ seconds: 123 }) },
-      targets: [{ expr: 'test{job="testjob"}', format: 'time_series', instant: true }],
-      interval: '60s',
-    };
-    var response = {
-      status: 'success',
-      data: {
-        resultType: 'vector',
-        result: [
-          {
-            metric: { __name__: 'test', job: 'testjob' },
-            value: [123, '3846'],
-          },
-        ],
-      },
-    };
-    beforeEach(function() {
-      ctx.$httpBackend.expect('GET', urlExpected).respond(response);
-      ctx.ds.query(query).then(function(data) {
-        results = data;
-      });
-      ctx.$httpBackend.flush();
-    });
-    it('should generate the correct query', function() {
-      ctx.$httpBackend.verifyNoOutstandingExpectation();
-    });
-    it('should return series list', function() {
-      expect(results.data.length).to.be(1);
-      expect(results.data[0].target).to.be('test{job="testjob"}');
-    });
-  });
-  describe('When performing annotationQuery', function() {
-    var results;
-    var urlExpected =
-      'proxied/api/v1/query_range?query=' +
-      encodeURIComponent('ALERTS{alertstate="firing"}') +
-      '&start=60&end=180&step=60';
-    var options = {
-      annotation: {
-        expr: 'ALERTS{alertstate="firing"}',
-        tagKeys: 'job',
-        titleFormat: '{{alertname}}',
-        textFormat: '{{instance}}',
-      },
-      range: {
-        from: time({ seconds: 63 }),
-        to: time({ seconds: 123 }),
-      },
-    };
-    var response = {
-      status: 'success',
-      data: {
-        resultType: 'matrix',
-        result: [
-          {
-            metric: {
-              __name__: 'ALERTS',
-              alertname: 'InstanceDown',
-              alertstate: 'firing',
-              instance: 'testinstance',
-              job: 'testjob',
-            },
-            values: [[123, '1']],
-          },
-        ],
-      },
-    };
-    beforeEach(function() {
-      ctx.$httpBackend.expect('GET', urlExpected).respond(response);
-      ctx.ds.annotationQuery(options).then(function(data) {
-        results = data;
-      });
-      ctx.$httpBackend.flush();
-    });
-    it('should return annotation list', function() {
-      ctx.$rootScope.$apply();
-      expect(results.length).to.be(1);
-      expect(results[0].tags).to.contain('testjob');
-      expect(results[0].title).to.be('InstanceDown');
-      expect(results[0].text).to.be('testinstance');
-      expect(results[0].time).to.be(123 * 1000);
-    });
-  });
-
-  describe('When resultFormat is table and instant = true', function() {
-    var results;
-    var urlExpected = 'proxied/api/v1/query?query=' + encodeURIComponent('test{job="testjob"}') + '&time=123';
-    var query = {
-      range: { from: time({ seconds: 63 }), to: time({ seconds: 123 }) },
-      targets: [{ expr: 'test{job="testjob"}', format: 'time_series', instant: true }],
-      interval: '60s',
-    };
-    var response = {
-      status: 'success',
-      data: {
-        resultType: 'vector',
-        result: [
-          {
-            metric: { __name__: 'test', job: 'testjob' },
-            value: [123, '3846'],
-          },
-        ],
-      },
-    };
-
-    beforeEach(function() {
-      ctx.$httpBackend.expect('GET', urlExpected).respond(response);
-      ctx.ds.query(query).then(function(data) {
-        results = data;
-      });
-      ctx.$httpBackend.flush();
-    });
-
-    it('should return result', () => {
-      expect(results).not.to.be(null);
-    });
-  });
-
-  describe('The "step" query parameter', function() {
-    var response = {
-      status: 'success',
-      data: {
-        resultType: 'matrix',
-        result: [],
-      },
-    };
-
-    it('should be min interval when greater than auto interval', function() {
-      var query = {
-        // 6 minute range
-        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
-        targets: [
-          {
-            expr: 'test',
-            interval: '10s',
-          },
-        ],
-        interval: '5s',
-      };
-      var urlExpected = 'proxied/api/v1/query_range?query=test&start=60&end=420&step=10';
-      ctx.$httpBackend.expect('GET', urlExpected).respond(response);
-      ctx.ds.query(query);
-      ctx.$httpBackend.verifyNoOutstandingExpectation();
-    });
-
-    it('step should never go below 1', function() {
-      var query = {
-        // 6 minute range
-        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
-        targets: [{ expr: 'test' }],
-        interval: '100ms',
-      };
-      var urlExpected = 'proxied/api/v1/query_range?query=test&start=60&end=420&step=1';
-      ctx.$httpBackend.expect('GET', urlExpected).respond(response);
-      ctx.ds.query(query);
-      ctx.$httpBackend.verifyNoOutstandingExpectation();
-    });
-
-    it('should be auto interval when greater than min interval', function() {
-      var query = {
-        // 6 minute range
-        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
-        targets: [
-          {
-            expr: 'test',
-            interval: '5s',
-          },
-        ],
-        interval: '10s',
-      };
-      var urlExpected = 'proxied/api/v1/query_range?query=test&start=60&end=420&step=10';
-      ctx.$httpBackend.expect('GET', urlExpected).respond(response);
-      ctx.ds.query(query);
-      ctx.$httpBackend.verifyNoOutstandingExpectation();
-    });
-    it('should result in querying fewer than 11000 data points', function() {
-      var query = {
-        // 6 hour range
-        range: { from: time({ hours: 1 }), to: time({ hours: 7 }) },
-        targets: [{ expr: 'test' }],
-        interval: '1s',
-      };
-      var end = 7 * 60 * 60;
-      var start = 60 * 60;
-      var urlExpected = 'proxied/api/v1/query_range?query=test&start=' + start + '&end=' + end + '&step=2';
-      ctx.$httpBackend.expect('GET', urlExpected).respond(response);
-      ctx.ds.query(query);
-      ctx.$httpBackend.verifyNoOutstandingExpectation();
-    });
-    it('should not apply min interval when interval * intervalFactor greater', function() {
-      var query = {
-        // 6 minute range
-        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
-        targets: [
-          {
-            expr: 'test',
-            interval: '10s',
-            intervalFactor: 10,
-          },
-        ],
-        interval: '5s',
-      };
-      // times get rounded up to interval
-      var urlExpected = 'proxied/api/v1/query_range?query=test&start=50&end=450&step=50';
-      ctx.$httpBackend.expect('GET', urlExpected).respond(response);
-      ctx.ds.query(query);
-      ctx.$httpBackend.verifyNoOutstandingExpectation();
-    });
-    it('should apply min interval when interval * intervalFactor smaller', function() {
-      var query = {
-        // 6 minute range
-        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
-        targets: [
-          {
-            expr: 'test',
-            interval: '15s',
-            intervalFactor: 2,
-          },
-        ],
-        interval: '5s',
-      };
-      var urlExpected = 'proxied/api/v1/query_range?query=test' + '&start=60&end=420&step=15';
-      ctx.$httpBackend.expect('GET', urlExpected).respond(response);
-      ctx.ds.query(query);
-      ctx.$httpBackend.verifyNoOutstandingExpectation();
-    });
-    it('should apply intervalFactor to auto interval when greater', function() {
-      var query = {
-        // 6 minute range
-        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
-        targets: [
-          {
-            expr: 'test',
-            interval: '5s',
-            intervalFactor: 10,
-          },
-        ],
-        interval: '10s',
-      };
-      // times get aligned to interval
-      var urlExpected = 'proxied/api/v1/query_range?query=test' + '&start=0&end=500&step=100';
-      ctx.$httpBackend.expect('GET', urlExpected).respond(response);
-      ctx.ds.query(query);
-      ctx.$httpBackend.verifyNoOutstandingExpectation();
-    });
-    it('should not not be affected by the 11000 data points limit when large enough', function() {
-      var query = {
-        // 1 week range
-        range: { from: time({}), to: time({ hours: 7 * 24 }) },
-        targets: [
-          {
-            expr: 'test',
-            intervalFactor: 10,
-          },
-        ],
-        interval: '10s',
-      };
-      var end = 7 * 24 * 60 * 60;
-      var start = 0;
-      var urlExpected = 'proxied/api/v1/query_range?query=test' + '&start=' + start + '&end=' + end + '&step=100';
-      ctx.$httpBackend.expect('GET', urlExpected).respond(response);
-      ctx.ds.query(query);
-      ctx.$httpBackend.verifyNoOutstandingExpectation();
-    });
-    it('should be determined by the 11000 data points limit when too small', function() {
-      var query = {
-        // 1 week range
-        range: { from: time({}), to: time({ hours: 7 * 24 }) },
-        targets: [
-          {
-            expr: 'test',
-            intervalFactor: 10,
-          },
-        ],
-        interval: '5s',
-      };
-      var end = 7 * 24 * 60 * 60;
-      var start = 0;
-      var urlExpected = 'proxied/api/v1/query_range?query=test' + '&start=' + start + '&end=' + end + '&step=60';
-      ctx.$httpBackend.expect('GET', urlExpected).respond(response);
-      ctx.ds.query(query);
-      ctx.$httpBackend.verifyNoOutstandingExpectation();
-    });
-  });
-
-  describe('The __interval and __interval_ms template variables', function() {
-    var response = {
-      status: 'success',
-      data: {
-        resultType: 'matrix',
-        result: [],
-      },
-    };
-
-    it('should be unchanged when auto interval is greater than min interval', function() {
-      var query = {
-        // 6 minute range
-        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
-        targets: [
-          {
-            expr: 'rate(test[$__interval])',
-            interval: '5s',
-          },
-        ],
-        interval: '10s',
-        scopedVars: {
-          __interval: { text: '10s', value: '10s' },
-          __interval_ms: { text: 10 * 1000, value: 10 * 1000 },
-        },
-      };
-      var urlExpected =
-        'proxied/api/v1/query_range?query=' + encodeURIComponent('rate(test[10s])') + '&start=60&end=420&step=10';
-      ctx.$httpBackend.expect('GET', urlExpected).respond(response);
-      ctx.ds.query(query);
-      ctx.$httpBackend.verifyNoOutstandingExpectation();
-
-      expect(query.scopedVars.__interval.text).to.be('10s');
-      expect(query.scopedVars.__interval.value).to.be('10s');
-      expect(query.scopedVars.__interval_ms.text).to.be(10 * 1000);
-      expect(query.scopedVars.__interval_ms.value).to.be(10 * 1000);
-    });
-    it('should be min interval when it is greater than auto interval', function() {
-      var query = {
-        // 6 minute range
-        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
-        targets: [
-          {
-            expr: 'rate(test[$__interval])',
-            interval: '10s',
-          },
-        ],
-        interval: '5s',
-        scopedVars: {
-          __interval: { text: '5s', value: '5s' },
-          __interval_ms: { text: 5 * 1000, value: 5 * 1000 },
-        },
-      };
-      var urlExpected =
-        'proxied/api/v1/query_range?query=' + encodeURIComponent('rate(test[10s])') + '&start=60&end=420&step=10';
-      ctx.$httpBackend.expect('GET', urlExpected).respond(response);
-      ctx.ds.query(query);
-      ctx.$httpBackend.verifyNoOutstandingExpectation();
-
-      expect(query.scopedVars.__interval.text).to.be('5s');
-      expect(query.scopedVars.__interval.value).to.be('5s');
-      expect(query.scopedVars.__interval_ms.text).to.be(5 * 1000);
-      expect(query.scopedVars.__interval_ms.value).to.be(5 * 1000);
-    });
-    it('should account for intervalFactor', function() {
-      var query = {
-        // 6 minute range
-        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
-        targets: [
-          {
-            expr: 'rate(test[$__interval])',
-            interval: '5s',
-            intervalFactor: 10,
-          },
-        ],
-        interval: '10s',
-        scopedVars: {
-          __interval: { text: '10s', value: '10s' },
-          __interval_ms: { text: 10 * 1000, value: 10 * 1000 },
-        },
-      };
-      var urlExpected =
-        'proxied/api/v1/query_range?query=' + encodeURIComponent('rate(test[100s])') + '&start=0&end=500&step=100';
-      ctx.$httpBackend.expect('GET', urlExpected).respond(response);
-      ctx.ds.query(query);
-      ctx.$httpBackend.verifyNoOutstandingExpectation();
-
-      expect(query.scopedVars.__interval.text).to.be('10s');
-      expect(query.scopedVars.__interval.value).to.be('10s');
-      expect(query.scopedVars.__interval_ms.text).to.be(10 * 1000);
-      expect(query.scopedVars.__interval_ms.value).to.be(10 * 1000);
-    });
-    it('should be interval * intervalFactor when greater than min interval', function() {
-      var query = {
-        // 6 minute range
-        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
-        targets: [
-          {
-            expr: 'rate(test[$__interval])',
-            interval: '10s',
-            intervalFactor: 10,
-          },
-        ],
-        interval: '5s',
-        scopedVars: {
-          __interval: { text: '5s', value: '5s' },
-          __interval_ms: { text: 5 * 1000, value: 5 * 1000 },
-        },
-      };
-      var urlExpected =
-        'proxied/api/v1/query_range?query=' + encodeURIComponent('rate(test[50s])') + '&start=50&end=450&step=50';
-      ctx.$httpBackend.expect('GET', urlExpected).respond(response);
-      ctx.ds.query(query);
-      ctx.$httpBackend.verifyNoOutstandingExpectation();
-
-      expect(query.scopedVars.__interval.text).to.be('5s');
-      expect(query.scopedVars.__interval.value).to.be('5s');
-      expect(query.scopedVars.__interval_ms.text).to.be(5 * 1000);
-      expect(query.scopedVars.__interval_ms.value).to.be(5 * 1000);
-    });
-    it('should be min interval when greater than interval * intervalFactor', function() {
-      var query = {
-        // 6 minute range
-        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
-        targets: [
-          {
-            expr: 'rate(test[$__interval])',
-            interval: '15s',
-            intervalFactor: 2,
-          },
-        ],
-        interval: '5s',
-        scopedVars: {
-          __interval: { text: '5s', value: '5s' },
-          __interval_ms: { text: 5 * 1000, value: 5 * 1000 },
-        },
-      };
-      var urlExpected =
-        'proxied/api/v1/query_range?query=' + encodeURIComponent('rate(test[15s])') + '&start=60&end=420&step=15';
-      ctx.$httpBackend.expect('GET', urlExpected).respond(response);
-      ctx.ds.query(query);
-      ctx.$httpBackend.verifyNoOutstandingExpectation();
-
-      expect(query.scopedVars.__interval.text).to.be('5s');
-      expect(query.scopedVars.__interval.value).to.be('5s');
-      expect(query.scopedVars.__interval_ms.text).to.be(5 * 1000);
-      expect(query.scopedVars.__interval_ms.value).to.be(5 * 1000);
-    });
-    it('should be determined by the 11000 data points limit, accounting for intervalFactor', function() {
-      var query = {
-        // 1 week range
-        range: { from: time({}), to: time({ hours: 7 * 24 }) },
-        targets: [
-          {
-            expr: 'rate(test[$__interval])',
-            intervalFactor: 10,
-          },
-        ],
-        interval: '5s',
-        scopedVars: {
-          __interval: { text: '5s', value: '5s' },
-          __interval_ms: { text: 5 * 1000, value: 5 * 1000 },
-        },
-      };
-      var end = 7 * 24 * 60 * 60;
-      var start = 0;
-      var urlExpected =
-        'proxied/api/v1/query_range?query=' +
-        encodeURIComponent('rate(test[60s])') +
-        '&start=' +
-        start +
-        '&end=' +
-        end +
-        '&step=60';
-      ctx.$httpBackend.expect('GET', urlExpected).respond(response);
-      ctx.ds.query(query);
-      ctx.$httpBackend.verifyNoOutstandingExpectation();
-
-      expect(query.scopedVars.__interval.text).to.be('5s');
-      expect(query.scopedVars.__interval.value).to.be('5s');
-      expect(query.scopedVars.__interval_ms.text).to.be(5 * 1000);
-      expect(query.scopedVars.__interval_ms.value).to.be(5 * 1000);
-    });
-  });
-});
-
-describe('PrometheusDatasource for POST', function() {
-  var ctx = new helpers.ServiceTestContext();
-  var instanceSettings = {
-    url: 'proxied',
-    directUrl: 'direct',
-    user: 'test',
-    password: 'mupp',
-    jsonData: { httpMethod: 'POST' },
-  };
-
-  beforeEach(angularMocks.module('grafana.core'));
-  beforeEach(angularMocks.module('grafana.services'));
-  beforeEach(ctx.providePhase(['timeSrv']));
-
-  beforeEach(
-    angularMocks.inject(function($q, $rootScope, $httpBackend, $injector) {
-      ctx.$q = $q;
-      ctx.$httpBackend = $httpBackend;
-      ctx.$rootScope = $rootScope;
-      ctx.ds = $injector.instantiate(PrometheusDatasource, { instanceSettings: instanceSettings });
-      $httpBackend.when('GET', /\.html$/).respond('');
-    })
-  );
-
-  describe('When querying prometheus with one target using query editor target spec', function() {
-    var results;
-    var urlExpected = 'proxied/api/v1/query_range';
-    var dataExpected = $.param({
-      query: 'test{job="testjob"}',
-      start: 1 * 60,
-      end: 3 * 60,
-      step: 60,
-    });
-    var query = {
-      range: { from: time({ minutes: 1, seconds: 3 }), to: time({ minutes: 2, seconds: 3 }) },
-      targets: [{ expr: 'test{job="testjob"}', format: 'time_series' }],
-      interval: '60s',
-    };
-    var response = {
-      status: 'success',
-      data: {
-        resultType: 'matrix',
-        result: [
-          {
-            metric: { __name__: 'test', job: 'testjob' },
-            values: [[2 * 60, '3846']],
-          },
-        ],
-      },
-    };
-    beforeEach(function() {
-      ctx.$httpBackend.expectPOST(urlExpected, dataExpected).respond(response);
-      ctx.ds.query(query).then(function(data) {
-        results = data;
-      });
-      ctx.$httpBackend.flush();
-    });
-    it('should generate the correct query', function() {
-      ctx.$httpBackend.verifyNoOutstandingExpectation();
-    });
-    it('should return series list', function() {
-      expect(results.data.length).to.be(1);
-      expect(results.data[0].target).to.be('test{job="testjob"}');
-    });
-  });
-});

+ 3 - 0
public/app/plugins/panel/graph/specs/graph_ctrl.jest.ts

@@ -34,6 +34,9 @@ describe('GraphCtrl', () => {
 
   beforeEach(() => {
     ctx.ctrl = new GraphCtrl(scope, injector, {});
+    ctx.ctrl.events = {
+      emit: () => {},
+    };
     ctx.ctrl.annotationsPromise = Promise.resolve({});
     ctx.ctrl.updateTimeRange();
   });

+ 19 - 18
public/app/plugins/panel/heatmap/specs/heatmap_ctrl_specs.ts → public/app/plugins/panel/heatmap/specs/heatmap_ctrl.jest.ts

@@ -1,25 +1,26 @@
-import { describe, beforeEach, it, expect, angularMocks } from '../../../../../test/lib/common';
-
 import moment from 'moment';
 import { HeatmapCtrl } from '../heatmap_ctrl';
-import helpers from '../../../../../test/specs/helpers';
 
 describe('HeatmapCtrl', function() {
-  var ctx = new helpers.ControllerTestContext();
+  let ctx = <any>{};
+
+  let $injector = {
+    get: () => {},
+  };
+
+  let $scope = {
+    $on: () => {},
+  };
 
-  beforeEach(angularMocks.module('grafana.services'));
-  beforeEach(angularMocks.module('grafana.controllers'));
-  beforeEach(
-    angularMocks.module(function($compileProvider) {
-      $compileProvider.preAssignBindingsEnabled(true);
-    })
-  );
+  HeatmapCtrl.prototype.panel = {
+    events: {
+      on: () => {},
+      emit: () => {},
+    },
+  };
 
-  beforeEach(ctx.providePhase());
-  beforeEach(ctx.createPanelController(HeatmapCtrl));
   beforeEach(() => {
-    ctx.ctrl.annotationsPromise = Promise.resolve({});
-    ctx.ctrl.updateTimeRange();
+    ctx.ctrl = new HeatmapCtrl($scope, $injector, {});
   });
 
   describe('when time series are outside range', function() {
@@ -36,7 +37,7 @@ describe('HeatmapCtrl', function() {
     });
 
     it('should set datapointsOutside', function() {
-      expect(ctx.ctrl.dataWarning.title).to.be('Data points outside time range');
+      expect(ctx.ctrl.dataWarning.title).toBe('Data points outside time range');
     });
   });
 
@@ -61,7 +62,7 @@ describe('HeatmapCtrl', function() {
     });
 
     it('should set datapointsOutside', function() {
-      expect(ctx.ctrl.dataWarning).to.be(null);
+      expect(ctx.ctrl.dataWarning).toBe(null);
     });
   });
 
@@ -72,7 +73,7 @@ describe('HeatmapCtrl', function() {
     });
 
     it('should set datapointsCount warning', function() {
-      expect(ctx.ctrl.dataWarning.title).to.be('No data points');
+      expect(ctx.ctrl.dataWarning.title).toBe('No data points');
     });
   });
 });

+ 80 - 79
public/app/plugins/panel/singlestat/specs/singlestat_specs.ts → public/app/plugins/panel/singlestat/specs/singlestat.jest.ts

@@ -1,29 +1,37 @@
-import { describe, beforeEach, afterEach, it, sinon, expect, angularMocks } from 'test/lib/common';
-
-import helpers from 'test/specs/helpers';
 import { SingleStatCtrl } from '../module';
 import moment from 'moment';
 
 describe('SingleStatCtrl', function() {
-  var ctx = new helpers.ControllerTestContext();
-  var epoch = 1505826363746;
-  var clock;
+  let ctx = <any>{};
+  let epoch = 1505826363746;
+  Date.now = () => epoch;
+
+  let $scope = {
+    $on: () => {},
+  };
+
+  let $injector = {
+    get: () => {},
+  };
+
+  SingleStatCtrl.prototype.panel = {
+    events: {
+      on: () => {},
+      emit: () => {},
+    },
+  };
+  SingleStatCtrl.prototype.dashboard = {
+    isTimezoneUtc: jest.fn(() => true),
+  };
+  SingleStatCtrl.prototype.events = {
+    on: () => {},
+  };
 
   function singleStatScenario(desc, func) {
     describe(desc, function() {
       ctx.setup = function(setupFunc) {
-        beforeEach(angularMocks.module('grafana.services'));
-        beforeEach(angularMocks.module('grafana.controllers'));
-        beforeEach(
-          angularMocks.module(function($compileProvider) {
-            $compileProvider.preAssignBindingsEnabled(true);
-          })
-        );
-
-        beforeEach(ctx.providePhase());
-        beforeEach(ctx.createPanelController(SingleStatCtrl));
-
         beforeEach(function() {
+          ctx.ctrl = new SingleStatCtrl($scope, $injector, {});
           setupFunc();
           ctx.ctrl.onDataReceived(ctx.data);
           ctx.data = ctx.ctrl.data;
@@ -40,12 +48,12 @@ describe('SingleStatCtrl', function() {
     });
 
     it('Should use series avg as default main value', function() {
-      expect(ctx.data.value).to.be(15);
-      expect(ctx.data.valueRounded).to.be(15);
+      expect(ctx.data.value).toBe(15);
+      expect(ctx.data.valueRounded).toBe(15);
     });
 
     it('should set formatted falue', function() {
-      expect(ctx.data.valueFormatted).to.be('15');
+      expect(ctx.data.valueFormatted).toBe('15');
     });
   });
 
@@ -56,12 +64,12 @@ describe('SingleStatCtrl', function() {
     });
 
     it('Should use series avg as default main value', function() {
-      expect(ctx.data.value).to.be(0);
-      expect(ctx.data.valueRounded).to.be(0);
+      expect(ctx.data.value).toBe(0);
+      expect(ctx.data.valueRounded).toBe(0);
     });
 
     it('should set formatted value', function() {
-      expect(ctx.data.valueFormatted).to.be('test.cpu1');
+      expect(ctx.data.valueFormatted).toBe('test.cpu1');
     });
   });
 
@@ -70,28 +78,29 @@ describe('SingleStatCtrl', function() {
       ctx.data = [{ target: 'test.cpu1', datapoints: [[10, 12], [20, 1505634997920]] }];
       ctx.ctrl.panel.valueName = 'last_time';
       ctx.ctrl.panel.format = 'dateTimeAsIso';
+      ctx.ctrl.dashboard.isTimezoneUtc = () => false;
     });
 
     it('Should use time instead of value', function() {
-      expect(ctx.data.value).to.be(1505634997920);
-      expect(ctx.data.valueRounded).to.be(1505634997920);
+      expect(ctx.data.value).toBe(1505634997920);
+      expect(ctx.data.valueRounded).toBe(1505634997920);
     });
 
     it('should set formatted value', function() {
-      expect(ctx.data.valueFormatted).to.be(moment(1505634997920).format('YYYY-MM-DD HH:mm:ss'));
+      expect(moment(ctx.data.valueFormatted).valueOf()).toBe(1505634997000);
     });
   });
 
   singleStatScenario('showing last iso time instead of value (in UTC)', function(ctx) {
     ctx.setup(function() {
-      ctx.data = [{ target: 'test.cpu1', datapoints: [[10, 12], [20, 1505634997920]] }];
+      ctx.data = [{ target: 'test.cpu1', datapoints: [[10, 12], [20, 5000]] }];
       ctx.ctrl.panel.valueName = 'last_time';
       ctx.ctrl.panel.format = 'dateTimeAsIso';
-      ctx.setIsUtc(true);
+      ctx.ctrl.dashboard.isTimezoneUtc = () => true;
     });
 
-    it('should set formatted value', function() {
-      expect(ctx.data.valueFormatted).to.be(moment.utc(1505634997920).format('YYYY-MM-DD HH:mm:ss'));
+    it('should set value', function() {
+      expect(ctx.data.valueFormatted).toBe('1970-01-01 00:00:05');
     });
   });
 
@@ -100,36 +109,33 @@ describe('SingleStatCtrl', function() {
       ctx.data = [{ target: 'test.cpu1', datapoints: [[10, 12], [20, 1505634997920]] }];
       ctx.ctrl.panel.valueName = 'last_time';
       ctx.ctrl.panel.format = 'dateTimeAsUS';
+      ctx.ctrl.dashboard.isTimezoneUtc = () => false;
     });
 
     it('Should use time instead of value', function() {
-      expect(ctx.data.value).to.be(1505634997920);
-      expect(ctx.data.valueRounded).to.be(1505634997920);
+      expect(ctx.data.value).toBe(1505634997920);
+      expect(ctx.data.valueRounded).toBe(1505634997920);
     });
 
     it('should set formatted value', function() {
-      expect(ctx.data.valueFormatted).to.be(moment(1505634997920).format('MM/DD/YYYY h:mm:ss a'));
+      expect(ctx.data.valueFormatted).toBe(moment(1505634997920).format('MM/DD/YYYY h:mm:ss a'));
     });
   });
 
   singleStatScenario('showing last us time instead of value (in UTC)', function(ctx) {
     ctx.setup(function() {
-      ctx.data = [{ target: 'test.cpu1', datapoints: [[10, 12], [20, 1505634997920]] }];
+      ctx.data = [{ target: 'test.cpu1', datapoints: [[10, 12], [20, 5000]] }];
       ctx.ctrl.panel.valueName = 'last_time';
       ctx.ctrl.panel.format = 'dateTimeAsUS';
-      ctx.setIsUtc(true);
+      ctx.ctrl.dashboard.isTimezoneUtc = () => true;
     });
 
     it('should set formatted value', function() {
-      expect(ctx.data.valueFormatted).to.be(moment.utc(1505634997920).format('MM/DD/YYYY h:mm:ss a'));
+      expect(ctx.data.valueFormatted).toBe('01/01/1970 12:00:05 am');
     });
   });
 
   singleStatScenario('showing last time from now instead of value', function(ctx) {
-    beforeEach(() => {
-      clock = sinon.useFakeTimers(epoch);
-    });
-
     ctx.setup(function() {
       ctx.data = [{ target: 'test.cpu1', datapoints: [[10, 12], [20, 1505634997920]] }];
       ctx.ctrl.panel.valueName = 'last_time';
@@ -137,37 +143,24 @@ describe('SingleStatCtrl', function() {
     });
 
     it('Should use time instead of value', function() {
-      expect(ctx.data.value).to.be(1505634997920);
-      expect(ctx.data.valueRounded).to.be(1505634997920);
+      expect(ctx.data.value).toBe(1505634997920);
+      expect(ctx.data.valueRounded).toBe(1505634997920);
     });
 
     it('should set formatted value', function() {
-      expect(ctx.data.valueFormatted).to.be('2 days ago');
-    });
-
-    afterEach(() => {
-      clock.restore();
+      expect(ctx.data.valueFormatted).toBe('2 days ago');
     });
   });
 
   singleStatScenario('showing last time from now instead of value (in UTC)', function(ctx) {
-    beforeEach(() => {
-      clock = sinon.useFakeTimers(epoch);
-    });
-
     ctx.setup(function() {
       ctx.data = [{ target: 'test.cpu1', datapoints: [[10, 12], [20, 1505634997920]] }];
       ctx.ctrl.panel.valueName = 'last_time';
       ctx.ctrl.panel.format = 'dateTimeFromNow';
-      ctx.setIsUtc(true);
     });
 
     it('should set formatted value', function() {
-      expect(ctx.data.valueFormatted).to.be('2 days ago');
-    });
-
-    afterEach(() => {
-      clock.restore();
+      expect(ctx.data.valueFormatted).toBe('2 days ago');
     });
   });
 
@@ -176,15 +169,17 @@ describe('SingleStatCtrl', function() {
   ) {
     ctx.setup(function() {
       ctx.data = [{ target: 'test.cpu1', datapoints: [[99.999, 1], [99.99999, 2]] }];
+      ctx.ctrl.panel.valueName = 'avg';
+      ctx.ctrl.panel.format = 'none';
     });
 
     it('Should be rounded', function() {
-      expect(ctx.data.value).to.be(99.999495);
-      expect(ctx.data.valueRounded).to.be(100);
+      expect(ctx.data.value).toBe(99.999495);
+      expect(ctx.data.valueRounded).toBe(100);
     });
 
     it('should set formatted value', function() {
-      expect(ctx.data.valueFormatted).to.be('100');
+      expect(ctx.data.valueFormatted).toBe('100');
     });
   });
 
@@ -195,15 +190,15 @@ describe('SingleStatCtrl', function() {
     });
 
     it('value should remain', function() {
-      expect(ctx.data.value).to.be(9.9);
+      expect(ctx.data.value).toBe(9.9);
     });
 
     it('round should be rounded up', function() {
-      expect(ctx.data.valueRounded).to.be(10);
+      expect(ctx.data.valueRounded).toBe(10);
     });
 
     it('Should replace value with text', function() {
-      expect(ctx.data.valueFormatted).to.be('OK');
+      expect(ctx.data.valueFormatted).toBe('OK');
     });
   });
 
@@ -215,7 +210,7 @@ describe('SingleStatCtrl', function() {
     });
 
     it('Should replace value with text OK', function() {
-      expect(ctx.data.valueFormatted).to.be('OK');
+      expect(ctx.data.valueFormatted).toBe('OK');
     });
   });
 
@@ -227,7 +222,7 @@ describe('SingleStatCtrl', function() {
     });
 
     it('Should replace value with text NOT OK', function() {
-      expect(ctx.data.valueFormatted).to.be('NOT OK');
+      expect(ctx.data.valueFormatted).toBe('NOT OK');
     });
   });
 
@@ -243,16 +238,20 @@ describe('SingleStatCtrl', function() {
     singleStatScenario('with default values', function(ctx) {
       ctx.setup(function() {
         ctx.data = tableData;
+        ctx.ctrl.panel = {
+          emit: () => {},
+        };
         ctx.ctrl.panel.tableColumn = 'mean';
+        ctx.ctrl.panel.format = 'none';
       });
 
       it('Should use first rows value as default main value', function() {
-        expect(ctx.data.value).to.be(15);
-        expect(ctx.data.valueRounded).to.be(15);
+        expect(ctx.data.value).toBe(15);
+        expect(ctx.data.valueRounded).toBe(15);
       });
 
       it('should set formatted value', function() {
-        expect(ctx.data.valueFormatted).to.be('15');
+        expect(ctx.data.valueFormatted).toBe('15');
       });
     });
 
@@ -263,7 +262,7 @@ describe('SingleStatCtrl', function() {
       });
 
       it('Should set column to first column that is not time', function() {
-        expect(ctx.ctrl.panel.tableColumn).to.be('test1');
+        expect(ctx.ctrl.panel.tableColumn).toBe('test1');
       });
     });
 
@@ -273,16 +272,17 @@ describe('SingleStatCtrl', function() {
       ctx.setup(function() {
         ctx.data = tableData;
         ctx.data[0].rows[0] = [1492759673649, 'ignore1', 99.99999, 'ignore2'];
+        ctx.ctrl.panel.mappingType = 0;
         ctx.ctrl.panel.tableColumn = 'mean';
       });
 
       it('Should be rounded', function() {
-        expect(ctx.data.value).to.be(99.99999);
-        expect(ctx.data.valueRounded).to.be(100);
+        expect(ctx.data.value).toBe(99.99999);
+        expect(ctx.data.valueRounded).toBe(100);
       });
 
       it('should set formatted falue', function() {
-        expect(ctx.data.valueFormatted).to.be('100');
+        expect(ctx.data.valueFormatted).toBe('100');
       });
     });
 
@@ -290,20 +290,21 @@ describe('SingleStatCtrl', function() {
       ctx.setup(function() {
         ctx.data = tableData;
         ctx.data[0].rows[0] = [1492759673649, 'ignore1', 9.9, 'ignore2'];
+        ctx.ctrl.panel.mappingType = 2;
         ctx.ctrl.panel.tableColumn = 'mean';
         ctx.ctrl.panel.valueMaps = [{ value: '10', text: 'OK' }];
       });
 
       it('value should remain', function() {
-        expect(ctx.data.value).to.be(9.9);
+        expect(ctx.data.value).toBe(9.9);
       });
 
       it('round should be rounded up', function() {
-        expect(ctx.data.valueRounded).to.be(10);
+        expect(ctx.data.valueRounded).toBe(10);
       });
 
       it('Should replace value with text', function() {
-        expect(ctx.data.valueFormatted).to.be('OK');
+        expect(ctx.data.valueFormatted).toBe('OK');
       });
     });
 
@@ -317,7 +318,7 @@ describe('SingleStatCtrl', function() {
       });
 
       it('Should replace value with text OK', function() {
-        expect(ctx.data.valueFormatted).to.be('OK');
+        expect(ctx.data.valueFormatted).toBe('OK');
       });
     });
 
@@ -331,7 +332,7 @@ describe('SingleStatCtrl', function() {
       });
 
       it('Should replace value with text NOT OK', function() {
-        expect(ctx.data.valueFormatted).to.be('NOT OK');
+        expect(ctx.data.valueFormatted).toBe('NOT OK');
       });
     });
 
@@ -343,7 +344,7 @@ describe('SingleStatCtrl', function() {
       });
 
       it('Should replace value with text NOT OK', function() {
-        expect(ctx.data.valueFormatted).to.be('ignore1');
+        expect(ctx.data.valueFormatted).toBe('ignore1');
       });
     });
 
@@ -355,7 +356,7 @@ describe('SingleStatCtrl', function() {
       });
 
       it('Should return zero', function() {
-        expect(ctx.data.value).to.be(0);
+        expect(ctx.data.value).toBe(0);
       });
     });
   });

+ 1 - 0
public/sass/_grafana.scss

@@ -93,6 +93,7 @@
 @import 'components/form_select_box';
 @import 'components/user-picker';
 @import 'components/description-picker';
+@import 'components/delete_button';
 
 // PAGES
 @import 'pages/login';

+ 3 - 1
public/sass/_variables.dark.scss

@@ -44,7 +44,6 @@ $brand-success: $green;
 $brand-warning: $brand-primary;
 $brand-danger: $red;
 
-$query-blue: $blue;
 $query-red: $red;
 $query-green: $green;
 $query-purple: $purple;
@@ -347,3 +346,6 @@ $diff-json-changed-fg: $gray-5;
 $diff-json-changed-num: $text-color;
 
 $diff-json-icon: $gray-7;
+
+//Submenu
+$variable-option-bg: $blue-dark;

+ 14 - 12
public/sass/_variables.light.scss

@@ -30,8 +30,8 @@ $white: #fff;
 
 // Accent colors
 // -------------------------
-$blue: #61c2f2;
-$blue-dark: #0083b3;
+$blue: #0083b3;
+$blue-light: #00a8e6;
 $green: #3aa655;
 $red: #d44939;
 $yellow: #ff851b;
@@ -45,7 +45,6 @@ $brand-success: $green;
 $brand-warning: $orange;
 $brand-danger: $red;
 
-$query-blue: $blue-dark;
 $query-red: $red;
 $query-green: $green;
 $query-purple: $purple;
@@ -82,7 +81,7 @@ $page-gradient: linear-gradient(-60deg, $gray-7, #f5f6f9 70%, $gray-7 98%);
 $link-color: $gray-1;
 $link-color-disabled: lighten($link-color, 30%);
 $link-hover-color: darken($link-color, 20%);
-$external-link-color: $blue;
+$external-link-color: $blue-light;
 
 // Typography
 // -------------------------
@@ -150,8 +149,8 @@ $scrollbarBorder: $gray-4;
 $btn-primary-bg: $brand-primary;
 $btn-primary-bg-hl: lighten($brand-primary, 8%);
 
-$btn-secondary-bg: $blue-dark;
-$btn-secondary-bg-hl: lighten($blue-dark, 4%);
+$btn-secondary-bg: $blue;
+$btn-secondary-bg-hl: lighten($blue, 4%);
 
 $btn-success-bg: lighten($green, 3%);
 $btn-success-bg-hl: darken($green, 3%);
@@ -168,7 +167,7 @@ $btn-inverse-text-color: $gray-1;
 $btn-inverse-text-shadow: 0 1px 0 rgba(255, 255, 255, 0.4);
 
 $btn-active-bg: $white;
-$btn-active-text-color: $blue-dark;
+$btn-active-text-color: $blue;
 
 $btn-link-color: $gray-1;
 
@@ -219,8 +218,8 @@ $search-filter-box-bg: $gray-7;
 
 // Typeahead
 $typeahead-shadow: 0 5px 10px 0 $gray-5;
-$typeahead-selected-bg: lighten($blue, 25%);
-$typeahead-selected-color: $blue-dark;
+$typeahead-selected-bg: lighten($blue, 57%);
+$typeahead-selected-color: $blue;
 
 // Dropdowns
 // -------------------------
@@ -285,7 +284,7 @@ $info-text-color: $blue;
 $alert-error-bg: linear-gradient(90deg, #d44939, #e04d3d);
 $alert-success-bg: linear-gradient(90deg, #3aa655, #47b274);
 $alert-warning-bg: linear-gradient(90deg, #d44939, #e04d3d);
-$alert-info-bg: $blue-dark;
+$alert-info-bg: $blue;
 
 // popover
 $popover-bg: $page-bg;
@@ -293,7 +292,7 @@ $popover-color: $text-color;
 $popover-border-color: $gray-5;
 $popover-shadow: 0 0 20px $white;
 
-$popover-help-bg: $blue-dark;
+$popover-help-bg: $blue;
 $popover-help-color: $gray-6;
 $popover-error-bg: $btn-danger-bg;
 
@@ -310,7 +309,7 @@ $graph-tooltip-bg: $gray-5;
 $checkboxImageUrl: '../img/checkbox_white.png';
 
 // info box
-$info-box-background: linear-gradient(100deg, $blue-dark, darken($blue-dark, 5%));
+$info-box-background: linear-gradient(100deg, $blue, darken($blue, 5%));
 $info-box-color: $gray-7;
 
 // footer
@@ -356,3 +355,6 @@ $diff-json-new: #664e33;
 $diff-json-changed-fg: $gray-6;
 $diff-json-changed-num: $gray-4;
 $diff-json-icon: $gray-4;
+
+//Submenu
+$variable-option-bg: $blue-light;

+ 50 - 0
public/sass/components/_delete_button.scss

@@ -0,0 +1,50 @@
+// sets a fixed width so that the rest of the table
+// isn't affected by the animation
+.delete-button-container {
+  width: 24px;
+  direction: rtl;
+  display: flex;
+  align-items: center;
+}
+
+//this container is used to make sure confirm-delete isn't
+//shown outside of table
+.confirm-delete-container {
+  overflow: hidden;
+  width: 145px;
+  position: absolute;
+  z-index: 1;
+}
+
+.delete-button {
+  position: absolute;
+
+  &.show {
+    opacity: 1;
+    transition: opacity 0.1s ease;
+    z-index: 2;
+  }
+
+  &.hide {
+    opacity: 0;
+    transition: opacity 0.1s ease;
+    z-index: 0;
+  }
+}
+
+.confirm-delete {
+  display: flex;
+  align-items: flex-start;
+
+  &.show {
+    opacity: 1;
+    transition: opacity 0.08s ease-out, transform 0.1s ease-out;
+    transform: translateX(0);
+  }
+
+  &.hide {
+    opacity: 0;
+    transition: opacity 0.12s ease-in, transform 0.14s ease-in;
+    transform: translateX(100px);
+  }
+}

+ 3 - 3
public/sass/components/_query_editor.scss

@@ -1,11 +1,11 @@
 .query-keyword {
   font-weight: $font-weight-semi-bold;
-  color: $query-blue;
+  color: $blue;
 }
 
 .gf-form-disabled {
   .query-keyword {
-    color: darken($query-blue, 20%);
+    color: darken($blue, 20%);
   }
 }
 
@@ -63,7 +63,7 @@
   }
   .gf-form-query-letter-cell-letter {
     font-weight: bold;
-    color: $query-blue;
+    color: $blue;
   }
   .gf-form-query-letter-cell-ds {
     color: $text-color-weak;

+ 2 - 1
public/sass/components/_slate_editor.scss

@@ -71,6 +71,7 @@
     .typeahead-item-hint {
       font-size: $font-size-xs;
       color: $text-color;
+      white-space: normal;
     }
   }
 }
@@ -122,7 +123,7 @@
   .token.attr-value,
   .token.keyword,
   .token.class-name {
-    color: $query-blue;
+    color: $blue;
   }
 
   .token.regex,

+ 1 - 1
public/sass/components/_submenu.scss

@@ -138,7 +138,7 @@
 .variable-option {
   &:hover,
   &.highlighted {
-    background-color: $blue-dark;
+    background-color: $variable-option-bg;
   }
 }
 

+ 1 - 1
public/sass/components/_timepicker.scss

@@ -77,7 +77,7 @@
     border: none;
     color: $text-color;
     &.active span {
-      color: $query-blue;
+      color: $blue;
       font-weight: bold;
     }
     .text-info {

+ 9 - 1
scripts/webpack/webpack.hot.js

@@ -20,6 +20,7 @@ module.exports = merge(common, {
     path: path.resolve(__dirname, '../../public/build'),
     filename: '[name].[hash].js',
     publicPath: "/public/build/",
+    pathinfo: false,
   },
 
   resolve: {
@@ -37,6 +38,12 @@ module.exports = merge(common, {
     }
   },
 
+  optimization: {
+    removeAvailableModules: false,
+    removeEmptyChunks: false,
+    splitChunks: false,
+  },
+
   module: {
     rules: [
       {
@@ -56,7 +63,8 @@ module.exports = merge(common, {
         {
           loader: 'ts-loader',
           options: {
-            transpileOnly: true
+            transpileOnly: true,
+            experimentalWatchApi: true
           },
         }],
       },

+ 42 - 31
tsconfig.json

@@ -1,32 +1,43 @@
 {
-    "compilerOptions": {
-      "moduleResolution": "node",
-      "outDir": "public/dist",
-      "target": "es5",
-      "lib": ["es6", "dom"],
-      "rootDir": "public/",
-      "jsx": "react",
-      "module": "esnext",
-      "declaration": false,
-      "allowSyntheticDefaultImports": true,
-      "inlineSourceMap": false,
-      "sourceMap": true,
-      "noEmitOnError": false,
-      "emitDecoratorMetadata": false,
-      "experimentalDecorators": true,
-      "noImplicitReturns": true,
-      "noImplicitThis": false,
-      "noImplicitUseStrict":false,
-      "noImplicitAny": false,
-      "noUnusedLocals": true,
-      "baseUrl": "public",
-      "paths": {
-        "app": ["app"]
-      }
-    },
-    "include": [
-      "public/app/**/*.ts",
-      "public/app/**/*.tsx",
-      "public/test/**/*.ts"
-    ]
-}
+  "compilerOptions": {
+    "moduleResolution": "node",
+    "outDir": "public/dist",
+    "target": "es5",
+    "lib": [
+      "es6",
+      "dom"
+    ],
+    "rootDir": "public/",
+    "jsx": "react",
+    "module": "esnext",
+    "declaration": false,
+    "allowSyntheticDefaultImports": true,
+    "esModuleInterop": true,
+    "forceConsistentCasingInFileNames": true,
+    "importHelpers": true, // importing helper functions from tslib
+    "noEmitHelpers": true, // disable emitting inline helper functions
+    "removeComments": false, // comments are needed by angular injections
+    "inlineSourceMap": false,
+    "sourceMap": true,
+    "noEmitOnError": false,
+    "emitDecoratorMetadata": false,
+    "experimentalDecorators": true,
+    "noImplicitReturns": true,
+    "noImplicitThis": false,
+    "noImplicitUseStrict": false,
+    "noImplicitAny": false,
+    "noUnusedLocals": true,
+    "baseUrl": "public",
+    "pretty": true,
+    "paths": {
+      "app": [
+        "app"
+      ]
+    }
+  },
+  "include": [
+    "public/app/**/*.ts",
+    "public/app/**/*.tsx",
+    "public/test/**/*.ts"
+  ]
+}

+ 6 - 2
yarn.lock

@@ -3101,7 +3101,7 @@ d3-request@1.0.6:
     d3-dsv "1"
     xmlhttprequest "1"
 
-d3-scale-chromatic@^1.1.1:
+d3-scale-chromatic@^1.3.0:
   version "1.3.0"
   resolved "https://registry.yarnpkg.com/d3-scale-chromatic/-/d3-scale-chromatic-1.3.0.tgz#7ee38ffcaa7ad55cfed83a6a668aac5570c653c4"
   dependencies:
@@ -7974,7 +7974,7 @@ mocha@^4.0.1:
     mkdirp "0.5.1"
     supports-color "4.4.0"
 
-moment@^2.18.1:
+moment@^2.22.2:
   version "2.22.2"
   resolved "https://registry.yarnpkg.com/moment/-/moment-2.22.2.tgz#3c257f9839fc0e93ff53149632239eb90783ff66"
 
@@ -12029,6 +12029,10 @@ tslib@^1.8.0, tslib@^1.8.1, tslib@^1.9.0:
   version "1.9.2"
   resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.9.2.tgz#8be0cc9a1f6dc7727c38deb16c2ebd1a2892988e"
 
+tslib@^1.9.3:
+  version "1.9.3"
+  resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.9.3.tgz#d7e4dd79245d85428c4d7e4822a79917954ca286"
+
 tslint-loader@^3.5.3:
   version "3.6.0"
   resolved "https://registry.yarnpkg.com/tslint-loader/-/tslint-loader-3.6.0.tgz#12ed4d5ef57d68be25cd12692fb2108b66469d76"