Просмотр исходного кода

Merge pull request #12730 from marefr/sql_refactor

SQL datasources - backend refactor
Marcus Efraimsson 7 лет назад
Родитель
Сommit
02ba467bf1

+ 27 - 1
devenv/datasources.yaml

@@ -51,12 +51,28 @@ datasources:
     user: grafana
     password: password
 
+  - name: gdev-mysql-ds-tests
+    type: mysql
+    url: localhost:3306
+    database: grafana_ds_tests
+    user: grafana
+    password: password
+
   - name: gdev-mssql
     type: mssql
     url: localhost:1433
     database: grafana
     user: grafana
-    password: "Password!"
+    secureJsonData:
+      password: Password!
+
+  - name: gdev-mssql-ds-tests
+    type: mssql
+    url: localhost:1433
+    database: grafanatest
+    user: grafana
+    secureJsonData:
+      password: Password!
 
   - name: gdev-postgres
     type: postgres
@@ -68,6 +84,16 @@ datasources:
     jsonData:
       sslmode: "disable"
 
+  - name: gdev-postgres-ds-tests
+    type: postgres
+    url: localhost:5432
+    database: grafanadstest
+    user: grafanatest
+    secureJsonData:
+      password: grafanatest
+    jsonData:
+      sslmode: "disable"
+
   - name: gdev-cloudwatch
     type: cloudwatch
     editable: true

+ 28 - 50
docker/blocks/mssql/dashboard.json → devenv/dev-dashboards/datasource_tests_mssql_fakedata.json

@@ -1,40 +1,4 @@
 {
-  "__inputs": [
-    {
-      "name": "DS_MSSQL",
-      "label": "MSSQL",
-      "description": "",
-      "type": "datasource",
-      "pluginId": "mssql",
-      "pluginName": "MSSQL"
-    }
-  ],
-  "__requires": [
-    {
-      "type": "grafana",
-      "id": "grafana",
-      "name": "Grafana",
-      "version": "5.0.0"
-    },
-    {
-      "type": "panel",
-      "id": "graph",
-      "name": "Graph",
-      "version": "5.0.0"
-    },
-    {
-      "type": "datasource",
-      "id": "mssql",
-      "name": "MSSQL",
-      "version": "1.0.0"
-    },
-    {
-      "type": "panel",
-      "id": "table",
-      "name": "Table",
-      "version": "5.0.0"
-    }
-  ],
   "annotations": {
     "list": [
       {
@@ -52,8 +16,7 @@
   "editable": true,
   "gnetId": null,
   "graphTooltip": 0,
-  "id": null,
-  "iteration": 1520976748896,
+  "iteration": 1532618661457,
   "links": [],
   "panels": [
     {
@@ -63,7 +26,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MSSQL}",
+      "datasource": "gdev-mssql",
       "fill": 2,
       "gridPos": {
         "h": 9,
@@ -149,14 +112,18 @@
           "min": null,
           "show": true
         }
-      ]
+      ],
+      "yaxis": {
+        "align": false,
+        "alignLevel": null
+      }
     },
     {
       "aliasColors": {},
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MSSQL}",
+      "datasource": "gdev-mssql",
       "fill": 2,
       "gridPos": {
         "h": 18,
@@ -234,14 +201,18 @@
           "min": null,
           "show": true
         }
-      ]
+      ],
+      "yaxis": {
+        "align": false,
+        "alignLevel": null
+      }
     },
     {
       "aliasColors": {},
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MSSQL}",
+      "datasource": "gdev-mssql",
       "fill": 2,
       "gridPos": {
         "h": 9,
@@ -313,11 +284,15 @@
           "min": null,
           "show": true
         }
-      ]
+      ],
+      "yaxis": {
+        "align": false,
+        "alignLevel": null
+      }
     },
     {
       "columns": [],
-      "datasource": "${DS_MSSQL}",
+      "datasource": "gdev-mssql",
       "fontSize": "100%",
       "gridPos": {
         "h": 10,
@@ -371,13 +346,13 @@
   ],
   "schemaVersion": 16,
   "style": "dark",
-  "tags": [],
+  "tags": ["gdev", "mssql", "fake-data-gen"],
   "templating": {
     "list": [
       {
         "allValue": null,
         "current": {},
-        "datasource": "${DS_MSSQL}",
+        "datasource": "gdev-mssql",
         "hide": 0,
         "includeAll": false,
         "label": "Datacenter",
@@ -387,6 +362,7 @@
         "query": "SELECT DISTINCT datacenter FROM grafana_metric",
         "refresh": 1,
         "regex": "",
+        "skipUrlSync": false,
         "sort": 1,
         "tagValuesQuery": "",
         "tags": [],
@@ -397,7 +373,7 @@
       {
         "allValue": null,
         "current": {},
-        "datasource": "${DS_MSSQL}",
+        "datasource": "gdev-mssql",
         "hide": 0,
         "includeAll": true,
         "label": "Hostname",
@@ -407,6 +383,7 @@
         "query": "SELECT DISTINCT hostname FROM grafana_metric WHERE datacenter='$datacenter'",
         "refresh": 1,
         "regex": "",
+        "skipUrlSync": false,
         "sort": 1,
         "tagValuesQuery": "",
         "tags": [],
@@ -499,6 +476,7 @@
         ],
         "query": "1s,10s,30s,1m,5m,10m,30m,1h,6h,12h,1d,7d,14d,30d",
         "refresh": 2,
+        "skipUrlSync": false,
         "type": "interval"
       }
     ]
@@ -533,7 +511,7 @@
     ]
   },
   "timezone": "",
-  "title": "Grafana Fake Data Gen - MSSQL",
+  "title": "Datasource tests - MSSQL",
   "uid": "86Js1xRmk",
-  "version": 11
+  "version": 1
 }

+ 64 - 77
docker/blocks/mssql_tests/dashboard.json → devenv/dev-dashboards/datasource_tests_mssql_unittest.json

@@ -1,40 +1,4 @@
 {
-  "__inputs": [
-    {
-      "name": "DS_MSSQL_TEST",
-      "label": "MSSQL Test",
-      "description": "",
-      "type": "datasource",
-      "pluginId": "mssql",
-      "pluginName": "Microsoft SQL Server"
-    }
-  ],
-  "__requires": [
-    {
-      "type": "grafana",
-      "id": "grafana",
-      "name": "Grafana",
-      "version": "5.0.0"
-    },
-    {
-      "type": "panel",
-      "id": "graph",
-      "name": "Graph",
-      "version": "5.0.0"
-    },
-    {
-      "type": "datasource",
-      "id": "mssql",
-      "name": "Microsoft SQL Server",
-      "version": "1.0.0"
-    },
-    {
-      "type": "panel",
-      "id": "table",
-      "name": "Table",
-      "version": "5.0.0"
-    }
-  ],
   "annotations": {
     "list": [
       {
@@ -47,7 +11,7 @@
         "type": "dashboard"
       },
       {
-        "datasource": "${DS_MSSQL_TEST}",
+        "datasource": "gdev-mssql-ds-tests",
         "enable": false,
         "hide": false,
         "iconColor": "#6ed0e0",
@@ -59,7 +23,7 @@
         "type": "tags"
       },
       {
-        "datasource": "${DS_MSSQL_TEST}",
+        "datasource": "gdev-mssql-ds-tests",
         "enable": false,
         "hide": false,
         "iconColor": "rgba(255, 96, 96, 1)",
@@ -71,7 +35,7 @@
         "type": "tags"
       },
       {
-        "datasource": "${DS_MSSQL_TEST}",
+        "datasource": "gdev-mssql-ds-tests",
         "enable": false,
         "hide": false,
         "iconColor": "#7eb26d",
@@ -83,7 +47,7 @@
         "type": "tags"
       },
       {
-        "datasource": "${DS_MSSQL_TEST}",
+        "datasource": "gdev-mssql-ds-tests",
         "enable": false,
         "hide": false,
         "iconColor": "#1f78c1",
@@ -96,16 +60,16 @@
       }
     ]
   },
+  "description": "Run the mssql unit tests to generate the data backing this dashboard",
   "editable": true,
   "gnetId": null,
   "graphTooltip": 0,
-  "id": null,
-  "iteration": 1523320861623,
+  "iteration": 1532618879985,
   "links": [],
   "panels": [
     {
       "columns": [],
-      "datasource": "${DS_MSSQL_TEST}",
+      "datasource": "gdev-mssql-ds-tests",
       "fontSize": "100%",
       "gridPos": {
         "h": 4,
@@ -152,7 +116,7 @@
     },
     {
       "columns": [],
-      "datasource": "${DS_MSSQL_TEST}",
+      "datasource": "gdev-mssql-ds-tests",
       "fontSize": "100%",
       "gridPos": {
         "h": 3,
@@ -206,7 +170,7 @@
     },
     {
       "columns": [],
-      "datasource": "${DS_MSSQL_TEST}",
+      "datasource": "gdev-mssql-ds-tests",
       "fontSize": "100%",
       "gridPos": {
         "h": 3,
@@ -260,7 +224,7 @@
     },
     {
       "columns": [],
-      "datasource": "${DS_MSSQL_TEST}",
+      "datasource": "gdev-mssql-ds-tests",
       "fontSize": "100%",
       "gridPos": {
         "h": 3,
@@ -314,7 +278,7 @@
     },
     {
       "columns": [],
-      "datasource": "${DS_MSSQL_TEST}",
+      "datasource": "gdev-mssql-ds-tests",
       "fontSize": "100%",
       "gridPos": {
         "h": 3,
@@ -371,7 +335,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MSSQL_TEST}",
+      "datasource": "gdev-mssql-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 9,
@@ -454,7 +418,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MSSQL_TEST}",
+      "datasource": "gdev-mssql-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 9,
@@ -537,7 +501,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MSSQL_TEST}",
+      "datasource": "gdev-mssql-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 9,
@@ -620,7 +584,7 @@
       "bars": true,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MSSQL_TEST}",
+      "datasource": "gdev-mssql-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 9,
@@ -703,7 +667,7 @@
       "bars": true,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MSSQL_TEST}",
+      "datasource": "gdev-mssql-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 9,
@@ -786,7 +750,7 @@
       "bars": true,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MSSQL_TEST}",
+      "datasource": "gdev-mssql-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 9,
@@ -869,7 +833,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MSSQL_TEST}",
+      "datasource": "gdev-mssql-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 8,
@@ -962,7 +926,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MSSQL_TEST}",
+      "datasource": "gdev-mssql-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 8,
@@ -1065,7 +1029,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MSSQL_TEST}",
+      "datasource": "gdev-mssql-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 8,
@@ -1158,7 +1122,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MSSQL_TEST}",
+      "datasource": "gdev-mssql-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 8,
@@ -1243,7 +1207,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MSSQL_TEST}",
+      "datasource": "gdev-mssql-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 8,
@@ -1336,7 +1300,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MSSQL_TEST}",
+      "datasource": "gdev-mssql-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 8,
@@ -1421,7 +1385,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MSSQL_TEST}",
+      "datasource": "gdev-mssql-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 8,
@@ -1514,7 +1478,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MSSQL_TEST}",
+      "datasource": "gdev-mssql-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 8,
@@ -1599,7 +1563,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MSSQL_TEST}",
+      "datasource": "gdev-mssql-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 8,
@@ -1686,7 +1650,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MSSQL_TEST}",
+      "datasource": "gdev-mssql-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 8,
@@ -1773,7 +1737,7 @@
       "bars": true,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MSSQL_TEST}",
+      "datasource": "gdev-mssql-ds-tests",
       "fill": 1,
       "gridPos": {
         "h": 8,
@@ -1867,7 +1831,7 @@
       "bars": true,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MSSQL_TEST}",
+      "datasource": "gdev-mssql-ds-tests",
       "fill": 1,
       "gridPos": {
         "h": 8,
@@ -1954,7 +1918,7 @@
       "bars": true,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MSSQL_TEST}",
+      "datasource": "gdev-mssql-ds-tests",
       "fill": 1,
       "gridPos": {
         "h": 8,
@@ -2048,7 +2012,7 @@
       "bars": true,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MSSQL_TEST}",
+      "datasource": "gdev-mssql-ds-tests",
       "fill": 1,
       "gridPos": {
         "h": 8,
@@ -2135,7 +2099,7 @@
       "bars": true,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MSSQL_TEST}",
+      "datasource": "gdev-mssql-ds-tests",
       "fill": 1,
       "gridPos": {
         "h": 8,
@@ -2229,7 +2193,7 @@
       "bars": true,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MSSQL_TEST}",
+      "datasource": "gdev-mssql-ds-tests",
       "fill": 1,
       "gridPos": {
         "h": 8,
@@ -2316,7 +2280,7 @@
       "bars": true,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MSSQL_TEST}",
+      "datasource": "gdev-mssql-ds-tests",
       "fill": 1,
       "gridPos": {
         "h": 8,
@@ -2410,7 +2374,7 @@
       "bars": true,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MSSQL_TEST}",
+      "datasource": "gdev-mssql-ds-tests",
       "fill": 1,
       "gridPos": {
         "h": 8,
@@ -2496,22 +2460,44 @@
   "refresh": false,
   "schemaVersion": 16,
   "style": "dark",
-  "tags": [],
+  "tags": ["gdev", "mssql"],
   "templating": {
     "list": [
       {
         "allValue": "'ALL'",
-        "current": {},
-        "datasource": "${DS_MSSQL_TEST}",
+        "current": {
+          "selected": true,
+          "tags": [],
+          "text": "All",
+          "value": "$__all"
+        },
+        "datasource": "gdev-mssql-ds-tests",
         "hide": 0,
         "includeAll": true,
         "label": "Metric",
         "multi": false,
         "name": "metric",
-        "options": [],
+        "options": [
+          {
+            "selected": true,
+            "text": "All",
+            "value": "$__all"
+          },
+          {
+            "selected": false,
+            "text": "Metric A",
+            "value": "Metric A"
+          },
+          {
+            "selected": false,
+            "text": "Metric B",
+            "value": "Metric B"
+          }
+        ],
         "query": "SELECT DISTINCT measurement FROM metric_values",
-        "refresh": 1,
+        "refresh": 0,
         "regex": "",
+        "skipUrlSync": false,
         "sort": 0,
         "tagValuesQuery": "",
         "tags": [],
@@ -2564,6 +2550,7 @@
         ],
         "query": "1s,10s,30s,1m,5m,10m",
         "refresh": 2,
+        "skipUrlSync": false,
         "type": "interval"
       }
     ]
@@ -2598,7 +2585,7 @@
     ]
   },
   "timezone": "",
-  "title": "Microsoft SQL Server Data Source Test",
+  "title": "Datasource tests - MSSQL (unit test)",
   "uid": "GlAqcPgmz",
   "version": 58
 }

+ 20 - 47
docker/blocks/mysql/dashboard.json → devenv/dev-dashboards/datasource_tests_mysql_fakedata.json

@@ -1,40 +1,4 @@
 {
-  "__inputs": [
-    {
-      "name": "DS_MYSQL",
-      "label": "MySQL",
-      "description": "",
-      "type": "datasource",
-      "pluginId": "mysql",
-      "pluginName": "MySQL"
-    }
-  ],
-  "__requires": [
-    {
-      "type": "grafana",
-      "id": "grafana",
-      "name": "Grafana",
-      "version": "5.0.0"
-    },
-    {
-      "type": "panel",
-      "id": "graph",
-      "name": "Graph",
-      "version": "5.0.0"
-    },
-    {
-      "type": "datasource",
-      "id": "mysql",
-      "name": "MySQL",
-      "version": "5.0.0"
-    },
-    {
-      "type": "panel",
-      "id": "table",
-      "name": "Table",
-      "version": "5.0.0"
-    }
-  ],
   "annotations": {
     "list": [
       {
@@ -52,8 +16,7 @@
   "editable": true,
   "gnetId": null,
   "graphTooltip": 0,
-  "id": null,
-  "iteration": 1523372133566,
+  "iteration": 1532620738041,
   "links": [],
   "panels": [
     {
@@ -63,7 +26,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MYSQL}",
+      "datasource": "gdev-mysql",
       "fill": 2,
       "gridPos": {
         "h": 9,
@@ -161,7 +124,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MYSQL}",
+      "datasource": "gdev-mysql",
       "fill": 2,
       "gridPos": {
         "h": 18,
@@ -251,7 +214,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MYSQL}",
+      "datasource": "gdev-mysql",
       "fill": 2,
       "gridPos": {
         "h": 9,
@@ -332,7 +295,7 @@
     },
     {
       "columns": [],
-      "datasource": "${DS_MYSQL}",
+      "datasource": "gdev-mysql",
       "fontSize": "100%",
       "gridPos": {
         "h": 9,
@@ -390,6 +353,7 @@
   "schemaVersion": 16,
   "style": "dark",
   "tags": [
+    "gdev",
     "fake-data-gen",
     "mysql"
   ],
@@ -397,8 +361,11 @@
     "list": [
       {
         "allValue": null,
-        "current": {},
-        "datasource": "${DS_MYSQL}",
+        "current": {
+          "text": "America",
+          "value": "America"
+        },
+        "datasource": "gdev-mysql",
         "hide": 0,
         "includeAll": false,
         "label": "Datacenter",
@@ -408,6 +375,7 @@
         "query": "SELECT DISTINCT datacenter FROM grafana_metric",
         "refresh": 1,
         "regex": "",
+        "skipUrlSync": false,
         "sort": 1,
         "tagValuesQuery": "",
         "tags": [],
@@ -417,8 +385,11 @@
       },
       {
         "allValue": null,
-        "current": {},
-        "datasource": "${DS_MYSQL}",
+        "current": {
+          "text": "All",
+          "value": "$__all"
+        },
+        "datasource": "gdev-mysql",
         "hide": 0,
         "includeAll": true,
         "label": "Hostname",
@@ -428,6 +399,7 @@
         "query": "SELECT DISTINCT hostname FROM grafana_metric WHERE datacenter='$datacenter'",
         "refresh": 1,
         "regex": "",
+        "skipUrlSync": false,
         "sort": 1,
         "tagValuesQuery": "",
         "tags": [],
@@ -520,6 +492,7 @@
         ],
         "query": "1s,10s,30s,1m,5m,10m,30m,1h,6h,12h,1d,7d,14d,30d",
         "refresh": 2,
+        "skipUrlSync": false,
         "type": "interval"
       }
     ]
@@ -554,7 +527,7 @@
     ]
   },
   "timezone": "",
-  "title": "Grafana Fake Data Gen - MySQL",
+  "title": "Datasource tests - MySQL",
   "uid": "DGsCac3kz",
   "version": 8
 }

+ 60 - 75
docker/blocks/mysql_tests/dashboard.json → devenv/dev-dashboards/datasource_tests_mysql_unittest.json

@@ -1,40 +1,4 @@
 {
-  "__inputs": [
-    {
-      "name": "DS_MYSQL_TEST",
-      "label": "MySQL TEST",
-      "description": "",
-      "type": "datasource",
-      "pluginId": "mysql",
-      "pluginName": "MySQL"
-    }
-  ],
-  "__requires": [
-    {
-      "type": "grafana",
-      "id": "grafana",
-      "name": "Grafana",
-      "version": "5.0.0"
-    },
-    {
-      "type": "panel",
-      "id": "graph",
-      "name": "Graph",
-      "version": "5.0.0"
-    },
-    {
-      "type": "datasource",
-      "id": "mysql",
-      "name": "MySQL",
-      "version": "5.0.0"
-    },
-    {
-      "type": "panel",
-      "id": "table",
-      "name": "Table",
-      "version": "5.0.0"
-    }
-  ],
   "annotations": {
     "list": [
       {
@@ -47,7 +11,7 @@
         "type": "dashboard"
       },
       {
-        "datasource": "${DS_MYSQL_TEST}",
+        "datasource": "gdev-mysql-ds-tests",
         "enable": false,
         "hide": false,
         "iconColor": "#6ed0e0",
@@ -59,7 +23,7 @@
         "type": "tags"
       },
       {
-        "datasource": "${DS_MYSQL_TEST}",
+        "datasource": "gdev-mysql-ds-tests",
         "enable": false,
         "hide": false,
         "iconColor": "rgba(255, 96, 96, 1)",
@@ -71,7 +35,7 @@
         "type": "tags"
       },
       {
-        "datasource": "${DS_MYSQL_TEST}",
+        "datasource": "gdev-mysql-ds-tests",
         "enable": false,
         "hide": false,
         "iconColor": "#7eb26d",
@@ -83,7 +47,7 @@
         "type": "tags"
       },
       {
-        "datasource": "${DS_MYSQL_TEST}",
+        "datasource": "gdev-mysql-ds-tests",
         "enable": false,
         "hide": false,
         "iconColor": "#1f78c1",
@@ -96,16 +60,16 @@
       }
     ]
   },
+  "description": "Run the mysql unit tests to generate the data backing this dashboard",
   "editable": true,
   "gnetId": null,
   "graphTooltip": 0,
-  "id": null,
-  "iteration": 1523320712115,
+  "iteration": 1532620354037,
   "links": [],
   "panels": [
     {
       "columns": [],
-      "datasource": "${DS_MYSQL_TEST}",
+      "datasource": "gdev-mysql-ds-tests",
       "fontSize": "100%",
       "gridPos": {
         "h": 4,
@@ -152,7 +116,7 @@
     },
     {
       "columns": [],
-      "datasource": "${DS_MYSQL_TEST}",
+      "datasource": "gdev-mysql-ds-tests",
       "fontSize": "100%",
       "gridPos": {
         "h": 3,
@@ -206,7 +170,7 @@
     },
     {
       "columns": [],
-      "datasource": "${DS_MYSQL_TEST}",
+      "datasource": "gdev-mysql-ds-tests",
       "fontSize": "100%",
       "gridPos": {
         "h": 3,
@@ -260,7 +224,7 @@
     },
     {
       "columns": [],
-      "datasource": "${DS_MYSQL_TEST}",
+      "datasource": "gdev-mysql-ds-tests",
       "fontSize": "100%",
       "gridPos": {
         "h": 3,
@@ -314,7 +278,7 @@
     },
     {
       "columns": [],
-      "datasource": "${DS_MYSQL_TEST}",
+      "datasource": "gdev-mysql-ds-tests",
       "fontSize": "100%",
       "gridPos": {
         "h": 3,
@@ -371,7 +335,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MYSQL_TEST}",
+      "datasource": "gdev-mysql-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 9,
@@ -454,7 +418,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MYSQL_TEST}",
+      "datasource": "gdev-mysql-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 9,
@@ -537,7 +501,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MYSQL_TEST}",
+      "datasource": "gdev-mysql-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 9,
@@ -620,7 +584,7 @@
       "bars": true,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MYSQL_TEST}",
+      "datasource": "gdev-mysql-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 9,
@@ -703,7 +667,7 @@
       "bars": true,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MYSQL_TEST}",
+      "datasource": "gdev-mysql-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 9,
@@ -786,7 +750,7 @@
       "bars": true,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MYSQL_TEST}",
+      "datasource": "gdev-mysql-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 9,
@@ -869,7 +833,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MYSQL_TEST}",
+      "datasource": "gdev-mysql-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 8,
@@ -962,7 +926,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MYSQL_TEST}",
+      "datasource": "gdev-mysql-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 8,
@@ -1059,7 +1023,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MYSQL_TEST}",
+      "datasource": "gdev-mysql-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 8,
@@ -1152,7 +1116,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MYSQL_TEST}",
+      "datasource": "gdev-mysql-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 8,
@@ -1237,7 +1201,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MYSQL_TEST}",
+      "datasource": "gdev-mysql-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 8,
@@ -1330,7 +1294,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MYSQL_TEST}",
+      "datasource": "gdev-mysql-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 8,
@@ -1415,7 +1379,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MYSQL_TEST}",
+      "datasource": "gdev-mysql-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 8,
@@ -1508,7 +1472,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MYSQL_TEST}",
+      "datasource": "gdev-mysql-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 8,
@@ -1593,7 +1557,7 @@
       "bars": true,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MYSQL_TEST}",
+      "datasource": "gdev-mysql-ds-tests",
       "fill": 1,
       "gridPos": {
         "h": 8,
@@ -1687,7 +1651,7 @@
       "bars": true,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MYSQL_TEST}",
+      "datasource": "gdev-mysql-ds-tests",
       "fill": 1,
       "gridPos": {
         "h": 8,
@@ -1774,7 +1738,7 @@
       "bars": true,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MYSQL_TEST}",
+      "datasource": "gdev-mysql-ds-tests",
       "fill": 1,
       "gridPos": {
         "h": 8,
@@ -1868,7 +1832,7 @@
       "bars": true,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MYSQL_TEST}",
+      "datasource": "gdev-mysql-ds-tests",
       "fill": 1,
       "gridPos": {
         "h": 8,
@@ -1955,7 +1919,7 @@
       "bars": true,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MYSQL_TEST}",
+      "datasource": "gdev-mysql-ds-tests",
       "fill": 1,
       "gridPos": {
         "h": 8,
@@ -2049,7 +2013,7 @@
       "bars": true,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MYSQL_TEST}",
+      "datasource": "gdev-mysql-ds-tests",
       "fill": 1,
       "gridPos": {
         "h": 8,
@@ -2136,7 +2100,7 @@
       "bars": true,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MYSQL_TEST}",
+      "datasource": "gdev-mysql-ds-tests",
       "fill": 1,
       "gridPos": {
         "h": 8,
@@ -2230,7 +2194,7 @@
       "bars": true,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_MYSQL_TEST}",
+      "datasource": "gdev-mysql-ds-tests",
       "fill": 1,
       "gridPos": {
         "h": 8,
@@ -2316,22 +2280,42 @@
   "refresh": false,
   "schemaVersion": 16,
   "style": "dark",
-  "tags": [],
+  "tags": ["gdev", "mysql"],
   "templating": {
     "list": [
       {
         "allValue": "",
-        "current": {},
-        "datasource": "${DS_MYSQL_TEST}",
+        "current": {
+          "text": "All",
+          "value": "$__all"
+        },
+        "datasource": "gdev-mysql-ds-tests",
         "hide": 0,
         "includeAll": true,
         "label": "Metric",
         "multi": true,
         "name": "metric",
-        "options": [],
+        "options": [
+          {
+            "selected": true,
+            "text": "All",
+            "value": "$__all"
+          },
+          {
+            "selected": false,
+            "text": "Metric A",
+            "value": "Metric A"
+          },
+          {
+            "selected": false,
+            "text": "Metric B",
+            "value": "Metric B"
+          }
+        ],
         "query": "SELECT DISTINCT measurement FROM metric_values",
-        "refresh": 1,
+        "refresh": 0,
         "regex": "",
+        "skipUrlSync": false,
         "sort": 0,
         "tagValuesQuery": "",
         "tags": [],
@@ -2384,6 +2368,7 @@
         ],
         "query": "1s,10s,30s,1m,5m,10m",
         "refresh": 2,
+        "skipUrlSync": false,
         "type": "interval"
       }
     ]
@@ -2418,7 +2403,7 @@
     ]
   },
   "timezone": "",
-  "title": "MySQL Data Source Test",
+  "title": "Datasource tests - MySQL (unittest)",
   "uid": "Hmf8FDkmz",
   "version": 12
 }

+ 36 - 51
docker/blocks/postgres/dashboard.json → devenv/dev-dashboards/datasource_tests_postgres_fakedata.json

@@ -1,40 +1,4 @@
 {
-  "__inputs": [
-    {
-      "name": "DS_POSTGRESQL",
-      "label": "PostgreSQL",
-      "description": "",
-      "type": "datasource",
-      "pluginId": "postgres",
-      "pluginName": "PostgreSQL"
-    }
-  ],
-  "__requires": [
-    {
-      "type": "grafana",
-      "id": "grafana",
-      "name": "Grafana",
-      "version": "5.0.0"
-    },
-    {
-      "type": "panel",
-      "id": "graph",
-      "name": "Graph",
-      "version": ""
-    },
-    {
-      "type": "datasource",
-      "id": "postgres",
-      "name": "PostgreSQL",
-      "version": "1.0.0"
-    },
-    {
-      "type": "panel",
-      "id": "table",
-      "name": "Table",
-      "version": ""
-    }
-  ],
   "annotations": {
     "list": [
       {
@@ -52,8 +16,7 @@
   "editable": true,
   "gnetId": null,
   "graphTooltip": 0,
-  "id": null,
-  "iteration": 1518601837383,
+  "iteration": 1532620601931,
   "links": [],
   "panels": [
     {
@@ -63,7 +26,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_POSTGRESQL}",
+      "datasource": "gdev-postgres",
       "fill": 2,
       "gridPos": {
         "h": 9,
@@ -150,14 +113,18 @@
           "min": null,
           "show": true
         }
-      ]
+      ],
+      "yaxis": {
+        "align": false,
+        "alignLevel": null
+      }
     },
     {
       "aliasColors": {},
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_POSTGRESQL}",
+      "datasource": "gdev-postgres",
       "fill": 2,
       "gridPos": {
         "h": 18,
@@ -236,14 +203,18 @@
           "min": null,
           "show": true
         }
-      ]
+      ],
+      "yaxis": {
+        "align": false,
+        "alignLevel": null
+      }
     },
     {
       "aliasColors": {},
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_POSTGRESQL}",
+      "datasource": "gdev-postgres",
       "fill": 2,
       "gridPos": {
         "h": 9,
@@ -316,11 +287,15 @@
           "min": null,
           "show": true
         }
-      ]
+      ],
+      "yaxis": {
+        "align": false,
+        "alignLevel": null
+      }
     },
     {
       "columns": [],
-      "datasource": "${DS_POSTGRESQL}",
+      "datasource": "gdev-postgres",
       "fontSize": "100%",
       "gridPos": {
         "h": 9,
@@ -377,6 +352,7 @@
   "schemaVersion": 16,
   "style": "dark",
   "tags": [
+    "gdev",
     "fake-data-gen",
     "postgres"
   ],
@@ -384,8 +360,11 @@
     "list": [
       {
         "allValue": null,
-        "current": {},
-        "datasource": "${DS_POSTGRESQL}",
+        "current": {
+          "text": "America",
+          "value": "America"
+        },
+        "datasource": "gdev-postgres",
         "hide": 0,
         "includeAll": false,
         "label": "Datacenter",
@@ -395,6 +374,7 @@
         "query": "SELECT DISTINCT datacenter FROM grafana_metric",
         "refresh": 1,
         "regex": "",
+        "skipUrlSync": false,
         "sort": 1,
         "tagValuesQuery": "",
         "tags": [],
@@ -404,8 +384,11 @@
       },
       {
         "allValue": null,
-        "current": {},
-        "datasource": "${DS_POSTGRESQL}",
+        "current": {
+          "text": "All",
+          "value": "$__all"
+        },
+        "datasource": "gdev-postgres",
         "hide": 0,
         "includeAll": true,
         "label": "Hostname",
@@ -415,6 +398,7 @@
         "query": "SELECT DISTINCT hostname FROM grafana_metric WHERE datacenter='$datacenter'",
         "refresh": 1,
         "regex": "",
+        "skipUrlSync": false,
         "sort": 1,
         "tagValuesQuery": "",
         "tags": [],
@@ -507,6 +491,7 @@
         ],
         "query": "1s,10s,30s,1m,5m,10m,30m,1h,6h,12h,1d,7d,14d,30d",
         "refresh": 2,
+        "skipUrlSync": false,
         "type": "interval"
       }
     ]
@@ -541,7 +526,7 @@
     ]
   },
   "timezone": "",
-  "title": "Grafana Fake Data Gen - PostgreSQL",
+  "title": "Datasource tests - Postgres",
   "uid": "JYola5qzz",
-  "version": 1
+  "version": 4
 }

+ 65 - 76
docker/blocks/postgres_tests/dashboard.json → devenv/dev-dashboards/datasource_tests_postgres_unittest.json

@@ -1,40 +1,4 @@
 {
-  "__inputs": [
-    {
-      "name": "DS_POSTGRES_TEST",
-      "label": "Postgres TEST",
-      "description": "",
-      "type": "datasource",
-      "pluginId": "postgres",
-      "pluginName": "PostgreSQL"
-    }
-  ],
-  "__requires": [
-    {
-      "type": "grafana",
-      "id": "grafana",
-      "name": "Grafana",
-      "version": "5.0.0"
-    },
-    {
-      "type": "panel",
-      "id": "graph",
-      "name": "Graph",
-      "version": "5.0.0"
-    },
-    {
-      "type": "datasource",
-      "id": "postgres",
-      "name": "PostgreSQL",
-      "version": "5.0.0"
-    },
-    {
-      "type": "panel",
-      "id": "table",
-      "name": "Table",
-      "version": "5.0.0"
-    }
-  ],
   "annotations": {
     "list": [
       {
@@ -47,7 +11,7 @@
         "type": "dashboard"
       },
       {
-        "datasource": "${DS_POSTGRES_TEST}",
+        "datasource": "gdev-postgres-ds-tests",
         "enable": false,
         "hide": false,
         "iconColor": "#6ed0e0",
@@ -59,7 +23,7 @@
         "type": "tags"
       },
       {
-        "datasource": "${DS_POSTGRES_TEST}",
+        "datasource": "gdev-postgres-ds-tests",
         "enable": false,
         "hide": false,
         "iconColor": "rgba(255, 96, 96, 1)",
@@ -71,7 +35,7 @@
         "type": "tags"
       },
       {
-        "datasource": "${DS_POSTGRES_TEST}",
+        "datasource": "gdev-postgres-ds-tests",
         "enable": false,
         "hide": false,
         "iconColor": "#7eb26d",
@@ -83,7 +47,7 @@
         "type": "tags"
       },
       {
-        "datasource": "${DS_POSTGRES_TEST}",
+        "datasource": "gdev-postgres-ds-tests",
         "enable": false,
         "hide": false,
         "iconColor": "#1f78c1",
@@ -96,16 +60,16 @@
       }
     ]
   },
+  "description": "Run the postgres unit tests to generate the data backing this dashboard",
   "editable": true,
   "gnetId": null,
   "graphTooltip": 0,
-  "id": null,
-  "iteration": 1523320929325,
+  "iteration": 1532619575136,
   "links": [],
   "panels": [
     {
       "columns": [],
-      "datasource": "${DS_POSTGRES_TEST}",
+      "datasource": "gdev-postgres-ds-tests",
       "fontSize": "100%",
       "gridPos": {
         "h": 4,
@@ -152,7 +116,7 @@
     },
     {
       "columns": [],
-      "datasource": "${DS_POSTGRES_TEST}",
+      "datasource": "gdev-postgres-ds-tests",
       "fontSize": "100%",
       "gridPos": {
         "h": 3,
@@ -206,7 +170,7 @@
     },
     {
       "columns": [],
-      "datasource": "${DS_POSTGRES_TEST}",
+      "datasource": "gdev-postgres-ds-tests",
       "fontSize": "100%",
       "gridPos": {
         "h": 3,
@@ -260,7 +224,7 @@
     },
     {
       "columns": [],
-      "datasource": "${DS_POSTGRES_TEST}",
+      "datasource": "gdev-postgres-ds-tests",
       "fontSize": "100%",
       "gridPos": {
         "h": 3,
@@ -314,7 +278,7 @@
     },
     {
       "columns": [],
-      "datasource": "${DS_POSTGRES_TEST}",
+      "datasource": "gdev-postgres-ds-tests",
       "fontSize": "100%",
       "gridPos": {
         "h": 3,
@@ -371,7 +335,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_POSTGRES_TEST}",
+      "datasource": "gdev-postgres-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 9,
@@ -454,7 +418,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_POSTGRES_TEST}",
+      "datasource": "gdev-postgres-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 9,
@@ -537,7 +501,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_POSTGRES_TEST}",
+      "datasource": "gdev-postgres-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 9,
@@ -620,7 +584,7 @@
       "bars": true,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_POSTGRES_TEST}",
+      "datasource": "gdev-postgres-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 9,
@@ -703,7 +667,7 @@
       "bars": true,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_POSTGRES_TEST}",
+      "datasource": "gdev-postgres-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 9,
@@ -786,7 +750,7 @@
       "bars": true,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_POSTGRES_TEST}",
+      "datasource": "gdev-postgres-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 9,
@@ -869,7 +833,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_POSTGRES_TEST}",
+      "datasource": "gdev-postgres-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 8,
@@ -962,7 +926,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_POSTGRES_TEST}",
+      "datasource": "gdev-postgres-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 8,
@@ -1047,7 +1011,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_POSTGRES_TEST}",
+      "datasource": "gdev-postgres-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 8,
@@ -1140,7 +1104,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_POSTGRES_TEST}",
+      "datasource": "gdev-postgres-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 8,
@@ -1225,7 +1189,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_POSTGRES_TEST}",
+      "datasource": "gdev-postgres-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 8,
@@ -1318,7 +1282,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_POSTGRES_TEST}",
+      "datasource": "gdev-postgres-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 8,
@@ -1403,7 +1367,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_POSTGRES_TEST}",
+      "datasource": "gdev-postgres-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 8,
@@ -1496,7 +1460,7 @@
       "bars": false,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_POSTGRES_TEST}",
+      "datasource": "gdev-postgres-ds-tests",
       "fill": 2,
       "gridPos": {
         "h": 8,
@@ -1581,7 +1545,7 @@
       "bars": true,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_POSTGRES_TEST}",
+      "datasource": "gdev-postgres-ds-tests",
       "fill": 1,
       "gridPos": {
         "h": 8,
@@ -1675,7 +1639,7 @@
       "bars": true,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_POSTGRES_TEST}",
+      "datasource": "gdev-postgres-ds-tests",
       "fill": 1,
       "gridPos": {
         "h": 8,
@@ -1762,7 +1726,7 @@
       "bars": true,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_POSTGRES_TEST}",
+      "datasource": "gdev-postgres-ds-tests",
       "fill": 1,
       "gridPos": {
         "h": 8,
@@ -1856,7 +1820,7 @@
       "bars": true,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_POSTGRES_TEST}",
+      "datasource": "gdev-postgres-ds-tests",
       "fill": 1,
       "gridPos": {
         "h": 8,
@@ -1943,7 +1907,7 @@
       "bars": true,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_POSTGRES_TEST}",
+      "datasource": "gdev-postgres-ds-tests",
       "fill": 1,
       "gridPos": {
         "h": 8,
@@ -2037,7 +2001,7 @@
       "bars": true,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_POSTGRES_TEST}",
+      "datasource": "gdev-postgres-ds-tests",
       "fill": 1,
       "gridPos": {
         "h": 8,
@@ -2124,7 +2088,7 @@
       "bars": true,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_POSTGRES_TEST}",
+      "datasource": "gdev-postgres-ds-tests",
       "fill": 1,
       "gridPos": {
         "h": 8,
@@ -2218,7 +2182,7 @@
       "bars": true,
       "dashLength": 10,
       "dashes": false,
-      "datasource": "${DS_POSTGRES_TEST}",
+      "datasource": "gdev-postgres-ds-tests",
       "fill": 1,
       "gridPos": {
         "h": 8,
@@ -2304,22 +2268,46 @@
   "refresh": false,
   "schemaVersion": 16,
   "style": "dark",
-  "tags": [],
+  "tags": ["gdev", "postgres"],
   "templating": {
     "list": [
       {
         "allValue": null,
-        "current": {},
-        "datasource": "${DS_POSTGRES_TEST}",
+        "current": {
+          "selected": true,
+          "tags": [],
+          "text": "All",
+          "value": [
+            "$__all"
+          ]
+        },
+        "datasource": "gdev-postgres-ds-tests",
         "hide": 0,
         "includeAll": true,
         "label": "Metric",
         "multi": true,
         "name": "metric",
-        "options": [],
+        "options": [
+          {
+            "selected": true,
+            "text": "All",
+            "value": "$__all"
+          },
+          {
+            "selected": false,
+            "text": "Metric A",
+            "value": "Metric A"
+          },
+          {
+            "selected": false,
+            "text": "Metric B",
+            "value": "Metric B"
+          }
+        ],
         "query": "SELECT DISTINCT measurement FROM metric_values",
-        "refresh": 1,
+        "refresh": 0,
         "regex": "",
+        "skipUrlSync": false,
         "sort": 1,
         "tagValuesQuery": "",
         "tags": [],
@@ -2372,6 +2360,7 @@
         ],
         "query": "1s,10s,30s,1m,5m,10m",
         "refresh": 2,
+        "skipUrlSync": false,
         "type": "interval"
       }
     ]
@@ -2406,7 +2395,7 @@
     ]
   },
   "timezone": "",
-  "title": "Postgres Data Source Test",
+  "title": "Datasource tests - Postgres (unittest)",
   "uid": "vHQdlVziz",
-  "version": 14
+  "version": 17
 }

+ 19 - 19
pkg/tsdb/mssql/macros.go

@@ -14,18 +14,18 @@ import (
 const rsIdentifier = `([_a-zA-Z0-9]+)`
 const sExpr = `\$` + rsIdentifier + `\(([^\)]*)\)`
 
-type MsSqlMacroEngine struct {
-	TimeRange *tsdb.TimeRange
-	Query     *tsdb.Query
+type msSqlMacroEngine struct {
+	timeRange *tsdb.TimeRange
+	query     *tsdb.Query
 }
 
-func NewMssqlMacroEngine() tsdb.SqlMacroEngine {
-	return &MsSqlMacroEngine{}
+func newMssqlMacroEngine() tsdb.SqlMacroEngine {
+	return &msSqlMacroEngine{}
 }
 
-func (m *MsSqlMacroEngine) Interpolate(query *tsdb.Query, timeRange *tsdb.TimeRange, sql string) (string, error) {
-	m.TimeRange = timeRange
-	m.Query = query
+func (m *msSqlMacroEngine) Interpolate(query *tsdb.Query, timeRange *tsdb.TimeRange, sql string) (string, error) {
+	m.timeRange = timeRange
+	m.query = query
 	rExp, _ := regexp.Compile(sExpr)
 	var macroError error
 
@@ -66,7 +66,7 @@ func replaceAllStringSubmatchFunc(re *regexp.Regexp, str string, repl func([]str
 	return result + str[lastIndex:]
 }
 
-func (m *MsSqlMacroEngine) evaluateMacro(name string, args []string) (string, error) {
+func (m *msSqlMacroEngine) evaluateMacro(name string, args []string) (string, error) {
 	switch name {
 	case "__time":
 		if len(args) == 0 {
@@ -83,11 +83,11 @@ func (m *MsSqlMacroEngine) evaluateMacro(name string, args []string) (string, er
 			return "", fmt.Errorf("missing time column argument for macro %v", name)
 		}
 
-		return fmt.Sprintf("%s BETWEEN '%s' AND '%s'", args[0], m.TimeRange.GetFromAsTimeUTC().Format(time.RFC3339), m.TimeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
+		return fmt.Sprintf("%s BETWEEN '%s' AND '%s'", args[0], m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339), m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
 	case "__timeFrom":
-		return fmt.Sprintf("'%s'", m.TimeRange.GetFromAsTimeUTC().Format(time.RFC3339)), nil
+		return fmt.Sprintf("'%s'", m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339)), nil
 	case "__timeTo":
-		return fmt.Sprintf("'%s'", m.TimeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
+		return fmt.Sprintf("'%s'", m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
 	case "__timeGroup":
 		if len(args) < 2 {
 			return "", fmt.Errorf("macro %v needs time column and interval", name)
@@ -97,16 +97,16 @@ func (m *MsSqlMacroEngine) evaluateMacro(name string, args []string) (string, er
 			return "", fmt.Errorf("error parsing interval %v", args[1])
 		}
 		if len(args) == 3 {
-			m.Query.Model.Set("fill", true)
-			m.Query.Model.Set("fillInterval", interval.Seconds())
+			m.query.Model.Set("fill", true)
+			m.query.Model.Set("fillInterval", interval.Seconds())
 			if args[2] == "NULL" {
-				m.Query.Model.Set("fillNull", true)
+				m.query.Model.Set("fillNull", true)
 			} else {
 				floatVal, err := strconv.ParseFloat(args[2], 64)
 				if err != nil {
 					return "", fmt.Errorf("error parsing fill value %v", args[2])
 				}
-				m.Query.Model.Set("fillValue", floatVal)
+				m.query.Model.Set("fillValue", floatVal)
 			}
 		}
 		return fmt.Sprintf("FLOOR(DATEDIFF(second, '1970-01-01', %s)/%.0f)*%.0f", args[0], interval.Seconds(), interval.Seconds()), nil
@@ -114,11 +114,11 @@ func (m *MsSqlMacroEngine) evaluateMacro(name string, args []string) (string, er
 		if len(args) == 0 {
 			return "", fmt.Errorf("missing time column argument for macro %v", name)
 		}
-		return fmt.Sprintf("%s >= %d AND %s <= %d", args[0], m.TimeRange.GetFromAsSecondsEpoch(), args[0], m.TimeRange.GetToAsSecondsEpoch()), nil
+		return fmt.Sprintf("%s >= %d AND %s <= %d", args[0], m.timeRange.GetFromAsSecondsEpoch(), args[0], m.timeRange.GetToAsSecondsEpoch()), nil
 	case "__unixEpochFrom":
-		return fmt.Sprintf("%d", m.TimeRange.GetFromAsSecondsEpoch()), nil
+		return fmt.Sprintf("%d", m.timeRange.GetFromAsSecondsEpoch()), nil
 	case "__unixEpochTo":
-		return fmt.Sprintf("%d", m.TimeRange.GetToAsSecondsEpoch()), nil
+		return fmt.Sprintf("%d", m.timeRange.GetToAsSecondsEpoch()), nil
 	default:
 		return "", fmt.Errorf("Unknown macro %v", name)
 	}

+ 1 - 1
pkg/tsdb/mssql/macros_test.go

@@ -14,7 +14,7 @@ import (
 
 func TestMacroEngine(t *testing.T) {
 	Convey("MacroEngine", t, func() {
-		engine := &MsSqlMacroEngine{}
+		engine := &msSqlMacroEngine{}
 		query := &tsdb.Query{
 			Model: simplejson.New(),
 		}

+ 25 - 243
pkg/tsdb/mssql/mssql.go

@@ -1,49 +1,40 @@
 package mssql
 
 import (
-	"container/list"
-	"context"
 	"database/sql"
 	"fmt"
 	"strconv"
 	"strings"
 
-	"math"
-
 	_ "github.com/denisenkom/go-mssqldb"
 	"github.com/go-xorm/core"
-	"github.com/grafana/grafana/pkg/components/null"
 	"github.com/grafana/grafana/pkg/log"
 	"github.com/grafana/grafana/pkg/models"
 	"github.com/grafana/grafana/pkg/tsdb"
 )
 
-type MssqlQueryEndpoint struct {
-	sqlEngine tsdb.SqlEngine
-	log       log.Logger
-}
-
 func init() {
-	tsdb.RegisterTsdbQueryEndpoint("mssql", NewMssqlQueryEndpoint)
+	tsdb.RegisterTsdbQueryEndpoint("mssql", newMssqlQueryEndpoint)
 }
 
-func NewMssqlQueryEndpoint(datasource *models.DataSource) (tsdb.TsdbQueryEndpoint, error) {
-	endpoint := &MssqlQueryEndpoint{
-		log: log.New("tsdb.mssql"),
-	}
-
-	endpoint.sqlEngine = &tsdb.DefaultSqlEngine{
-		MacroEngine: NewMssqlMacroEngine(),
-	}
+func newMssqlQueryEndpoint(datasource *models.DataSource) (tsdb.TsdbQueryEndpoint, error) {
+	logger := log.New("tsdb.mssql")
 
 	cnnstr := generateConnectionString(datasource)
-	endpoint.log.Debug("getEngine", "connection", cnnstr)
+	logger.Debug("getEngine", "connection", cnnstr)
 
-	if err := endpoint.sqlEngine.InitEngine("mssql", datasource, cnnstr); err != nil {
-		return nil, err
+	config := tsdb.SqlQueryEndpointConfiguration{
+		DriverName:        "mssql",
+		ConnectionString:  cnnstr,
+		Datasource:        datasource,
+		MetricColumnTypes: []string{"VARCHAR", "CHAR", "NVARCHAR", "NCHAR"},
+	}
+
+	rowTransformer := mssqlRowTransformer{
+		log: logger,
 	}
 
-	return endpoint, nil
+	return tsdb.NewSqlQueryEndpoint(&config, &rowTransformer, newMssqlMacroEngine(), logger)
 }
 
 func generateConnectionString(datasource *models.DataSource) string {
@@ -70,71 +61,16 @@ func generateConnectionString(datasource *models.DataSource) string {
 	)
 }
 
-// Query is the main function for the MssqlQueryEndpoint
-func (e *MssqlQueryEndpoint) Query(ctx context.Context, dsInfo *models.DataSource, tsdbQuery *tsdb.TsdbQuery) (*tsdb.Response, error) {
-	return e.sqlEngine.Query(ctx, dsInfo, tsdbQuery, e.transformToTimeSeries, e.transformToTable)
-}
-
-func (e MssqlQueryEndpoint) transformToTable(query *tsdb.Query, rows *core.Rows, result *tsdb.QueryResult, tsdbQuery *tsdb.TsdbQuery) error {
-	columnNames, err := rows.Columns()
-	columnCount := len(columnNames)
-
-	if err != nil {
-		return err
-	}
-
-	rowLimit := 1000000
-	rowCount := 0
-	timeIndex := -1
-
-	table := &tsdb.Table{
-		Columns: make([]tsdb.TableColumn, columnCount),
-		Rows:    make([]tsdb.RowValues, 0),
-	}
-
-	for i, name := range columnNames {
-		table.Columns[i].Text = name
-
-		// check if there is a column named time
-		switch name {
-		case "time":
-			timeIndex = i
-		}
-	}
-
-	columnTypes, err := rows.ColumnTypes()
-	if err != nil {
-		return err
-	}
-
-	for ; rows.Next(); rowCount++ {
-		if rowCount > rowLimit {
-			return fmt.Errorf("MsSQL query row limit exceeded, limit %d", rowLimit)
-		}
-
-		values, err := e.getTypedRowData(columnTypes, rows)
-		if err != nil {
-			return err
-		}
-
-		// converts column named time to unix timestamp in milliseconds
-		// to make native mssql datetime types and epoch dates work in
-		// annotation and table queries.
-		tsdb.ConvertSqlTimeColumnToEpochMs(values, timeIndex)
-		table.Rows = append(table.Rows, values)
-	}
-
-	result.Tables = append(result.Tables, table)
-	result.Meta.Set("rowCount", rowCount)
-	return nil
+type mssqlRowTransformer struct {
+	log log.Logger
 }
 
-func (e MssqlQueryEndpoint) getTypedRowData(types []*sql.ColumnType, rows *core.Rows) (tsdb.RowValues, error) {
-	values := make([]interface{}, len(types))
-	valuePtrs := make([]interface{}, len(types))
+func (t *mssqlRowTransformer) Transform(columnTypes []*sql.ColumnType, rows *core.Rows) (tsdb.RowValues, error) {
+	values := make([]interface{}, len(columnTypes))
+	valuePtrs := make([]interface{}, len(columnTypes))
 
-	for i, stype := range types {
-		e.log.Debug("type", "type", stype)
+	for i, stype := range columnTypes {
+		t.log.Debug("type", "type", stype)
 		valuePtrs[i] = &values[i]
 	}
 
@@ -144,17 +80,17 @@ func (e MssqlQueryEndpoint) getTypedRowData(types []*sql.ColumnType, rows *core.
 
 	// convert types not handled by denisenkom/go-mssqldb
 	// unhandled types are returned as []byte
-	for i := 0; i < len(types); i++ {
+	for i := 0; i < len(columnTypes); i++ {
 		if value, ok := values[i].([]byte); ok {
-			switch types[i].DatabaseTypeName() {
+			switch columnTypes[i].DatabaseTypeName() {
 			case "MONEY", "SMALLMONEY", "DECIMAL":
 				if v, err := strconv.ParseFloat(string(value), 64); err == nil {
 					values[i] = v
 				} else {
-					e.log.Debug("Rows", "Error converting numeric to float", value)
+					t.log.Debug("Rows", "Error converting numeric to float", value)
 				}
 			default:
-				e.log.Debug("Rows", "Unknown database type", types[i].DatabaseTypeName(), "value", value)
+				t.log.Debug("Rows", "Unknown database type", columnTypes[i].DatabaseTypeName(), "value", value)
 				values[i] = string(value)
 			}
 		}
@@ -162,157 +98,3 @@ func (e MssqlQueryEndpoint) getTypedRowData(types []*sql.ColumnType, rows *core.
 
 	return values, nil
 }
-
-func (e MssqlQueryEndpoint) transformToTimeSeries(query *tsdb.Query, rows *core.Rows, result *tsdb.QueryResult, tsdbQuery *tsdb.TsdbQuery) error {
-	pointsBySeries := make(map[string]*tsdb.TimeSeries)
-	seriesByQueryOrder := list.New()
-
-	columnNames, err := rows.Columns()
-	if err != nil {
-		return err
-	}
-
-	columnTypes, err := rows.ColumnTypes()
-	if err != nil {
-		return err
-	}
-
-	rowLimit := 1000000
-	rowCount := 0
-	timeIndex := -1
-	metricIndex := -1
-
-	// check columns of resultset: a column named time is mandatory
-	// the first text column is treated as metric name unless a column named metric is present
-	for i, col := range columnNames {
-		switch col {
-		case "time":
-			timeIndex = i
-		case "metric":
-			metricIndex = i
-		default:
-			if metricIndex == -1 {
-				switch columnTypes[i].DatabaseTypeName() {
-				case "VARCHAR", "CHAR", "NVARCHAR", "NCHAR":
-					metricIndex = i
-				}
-			}
-		}
-	}
-
-	if timeIndex == -1 {
-		return fmt.Errorf("Found no column named time")
-	}
-
-	fillMissing := query.Model.Get("fill").MustBool(false)
-	var fillInterval float64
-	fillValue := null.Float{}
-	if fillMissing {
-		fillInterval = query.Model.Get("fillInterval").MustFloat64() * 1000
-		if !query.Model.Get("fillNull").MustBool(false) {
-			fillValue.Float64 = query.Model.Get("fillValue").MustFloat64()
-			fillValue.Valid = true
-		}
-	}
-
-	for rows.Next() {
-		var timestamp float64
-		var value null.Float
-		var metric string
-
-		if rowCount > rowLimit {
-			return fmt.Errorf("MSSQL query row limit exceeded, limit %d", rowLimit)
-		}
-
-		values, err := e.getTypedRowData(columnTypes, rows)
-		if err != nil {
-			return err
-		}
-
-		// converts column named time to unix timestamp in milliseconds to make
-		// native mysql datetime types and epoch dates work in
-		// annotation and table queries.
-		tsdb.ConvertSqlTimeColumnToEpochMs(values, timeIndex)
-
-		switch columnValue := values[timeIndex].(type) {
-		case int64:
-			timestamp = float64(columnValue)
-		case float64:
-			timestamp = columnValue
-		default:
-			return fmt.Errorf("Invalid type for column time, must be of type timestamp or unix timestamp, got: %T %v", columnValue, columnValue)
-		}
-
-		if metricIndex >= 0 {
-			if columnValue, ok := values[metricIndex].(string); ok {
-				metric = columnValue
-			} else {
-				return fmt.Errorf("Column metric must be of type CHAR, VARCHAR, NCHAR or NVARCHAR. metric column name: %s type: %s but datatype is %T", columnNames[metricIndex], columnTypes[metricIndex].DatabaseTypeName(), values[metricIndex])
-			}
-		}
-
-		for i, col := range columnNames {
-			if i == timeIndex || i == metricIndex {
-				continue
-			}
-
-			if value, err = tsdb.ConvertSqlValueColumnToFloat(col, values[i]); err != nil {
-				return err
-			}
-
-			if metricIndex == -1 {
-				metric = col
-			}
-
-			series, exist := pointsBySeries[metric]
-			if !exist {
-				series = &tsdb.TimeSeries{Name: metric}
-				pointsBySeries[metric] = series
-				seriesByQueryOrder.PushBack(metric)
-			}
-
-			if fillMissing {
-				var intervalStart float64
-				if !exist {
-					intervalStart = float64(tsdbQuery.TimeRange.MustGetFrom().UnixNano() / 1e6)
-				} else {
-					intervalStart = series.Points[len(series.Points)-1][1].Float64 + fillInterval
-				}
-
-				// align interval start
-				intervalStart = math.Floor(intervalStart/fillInterval) * fillInterval
-
-				for i := intervalStart; i < timestamp; i += fillInterval {
-					series.Points = append(series.Points, tsdb.TimePoint{fillValue, null.FloatFrom(i)})
-					rowCount++
-				}
-			}
-
-			series.Points = append(series.Points, tsdb.TimePoint{value, null.FloatFrom(timestamp)})
-
-			e.log.Debug("Rows", "metric", metric, "time", timestamp, "value", value)
-		}
-	}
-
-	for elem := seriesByQueryOrder.Front(); elem != nil; elem = elem.Next() {
-		key := elem.Value.(string)
-		result.Series = append(result.Series, pointsBySeries[key])
-
-		if fillMissing {
-			series := pointsBySeries[key]
-			// fill in values from last fetched value till interval end
-			intervalStart := series.Points[len(series.Points)-1][1].Float64
-			intervalEnd := float64(tsdbQuery.TimeRange.MustGetTo().UnixNano() / 1e6)
-
-			// align interval start
-			intervalStart = math.Floor(intervalStart/fillInterval) * fillInterval
-			for i := intervalStart + fillInterval; i < intervalEnd; i += fillInterval {
-				series.Points = append(series.Points, tsdb.TimePoint{fillValue, null.FloatFrom(i)})
-				rowCount++
-			}
-		}
-	}
-
-	result.Meta.Set("rowCount", rowCount)
-	return nil
-}

+ 19 - 11
pkg/tsdb/mssql/mssql_test.go

@@ -8,8 +8,9 @@ import (
 	"time"
 
 	"github.com/go-xorm/xorm"
+	"github.com/grafana/grafana/pkg/components/securejsondata"
 	"github.com/grafana/grafana/pkg/components/simplejson"
-	"github.com/grafana/grafana/pkg/log"
+	"github.com/grafana/grafana/pkg/models"
 	"github.com/grafana/grafana/pkg/services/sqlstore/sqlutil"
 	"github.com/grafana/grafana/pkg/tsdb"
 	. "github.com/smartystreets/goconvey/convey"
@@ -19,8 +20,9 @@ import (
 // The tests require a MSSQL db named grafanatest and a user/password grafana/Password!
 // Use the docker/blocks/mssql_tests/docker-compose.yaml to spin up a
 // preconfigured MSSQL server suitable for running these tests.
-// There is also a dashboard.json in same directory that you can import to Grafana
-// once you've created a datasource for the test server/database.
+// There is also a datasource and dashboard provisioned by devenv scripts that you can
+// use to verify that the generated data are vizualized as expected, see
+// devenv/README.md for setup instructions.
 // If needed, change the variable below to the IP address of the database.
 var serverIP = "localhost"
 
@@ -28,19 +30,25 @@ func TestMSSQL(t *testing.T) {
 	SkipConvey("MSSQL", t, func() {
 		x := InitMSSQLTestDB(t)
 
-		endpoint := &MssqlQueryEndpoint{
-			sqlEngine: &tsdb.DefaultSqlEngine{
-				MacroEngine: NewMssqlMacroEngine(),
-				XormEngine:  x,
-			},
-			log: log.New("tsdb.mssql"),
+		origXormEngine := tsdb.NewXormEngine
+		tsdb.NewXormEngine = func(d, c string) (*xorm.Engine, error) {
+			return x, nil
 		}
 
-		sess := x.NewSession()
-		defer sess.Close()
+		endpoint, err := newMssqlQueryEndpoint(&models.DataSource{
+			JsonData:       simplejson.New(),
+			SecureJsonData: securejsondata.SecureJsonData{},
+		})
+		So(err, ShouldBeNil)
 
+		sess := x.NewSession()
 		fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC).In(time.Local)
 
+		Reset(func() {
+			sess.Close()
+			tsdb.NewXormEngine = origXormEngine
+		})
+
 		Convey("Given a table with different native data types", func() {
 			sql := `
 					IF OBJECT_ID('dbo.[mssql_types]', 'U') IS NOT NULL

+ 19 - 19
pkg/tsdb/mysql/macros.go

@@ -14,18 +14,18 @@ import (
 const rsIdentifier = `([_a-zA-Z0-9]+)`
 const sExpr = `\$` + rsIdentifier + `\(([^\)]*)\)`
 
-type MySqlMacroEngine struct {
-	TimeRange *tsdb.TimeRange
-	Query     *tsdb.Query
+type mySqlMacroEngine struct {
+	timeRange *tsdb.TimeRange
+	query     *tsdb.Query
 }
 
-func NewMysqlMacroEngine() tsdb.SqlMacroEngine {
-	return &MySqlMacroEngine{}
+func newMysqlMacroEngine() tsdb.SqlMacroEngine {
+	return &mySqlMacroEngine{}
 }
 
-func (m *MySqlMacroEngine) Interpolate(query *tsdb.Query, timeRange *tsdb.TimeRange, sql string) (string, error) {
-	m.TimeRange = timeRange
-	m.Query = query
+func (m *mySqlMacroEngine) Interpolate(query *tsdb.Query, timeRange *tsdb.TimeRange, sql string) (string, error) {
+	m.timeRange = timeRange
+	m.query = query
 	rExp, _ := regexp.Compile(sExpr)
 	var macroError error
 
@@ -66,7 +66,7 @@ func replaceAllStringSubmatchFunc(re *regexp.Regexp, str string, repl func([]str
 	return result + str[lastIndex:]
 }
 
-func (m *MySqlMacroEngine) evaluateMacro(name string, args []string) (string, error) {
+func (m *mySqlMacroEngine) evaluateMacro(name string, args []string) (string, error) {
 	switch name {
 	case "__timeEpoch", "__time":
 		if len(args) == 0 {
@@ -78,11 +78,11 @@ func (m *MySqlMacroEngine) evaluateMacro(name string, args []string) (string, er
 			return "", fmt.Errorf("missing time column argument for macro %v", name)
 		}
 
-		return fmt.Sprintf("%s BETWEEN '%s' AND '%s'", args[0], m.TimeRange.GetFromAsTimeUTC().Format(time.RFC3339), m.TimeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
+		return fmt.Sprintf("%s BETWEEN '%s' AND '%s'", args[0], m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339), m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
 	case "__timeFrom":
-		return fmt.Sprintf("'%s'", m.TimeRange.GetFromAsTimeUTC().Format(time.RFC3339)), nil
+		return fmt.Sprintf("'%s'", m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339)), nil
 	case "__timeTo":
-		return fmt.Sprintf("'%s'", m.TimeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
+		return fmt.Sprintf("'%s'", m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
 	case "__timeGroup":
 		if len(args) < 2 {
 			return "", fmt.Errorf("macro %v needs time column and interval", name)
@@ -92,16 +92,16 @@ func (m *MySqlMacroEngine) evaluateMacro(name string, args []string) (string, er
 			return "", fmt.Errorf("error parsing interval %v", args[1])
 		}
 		if len(args) == 3 {
-			m.Query.Model.Set("fill", true)
-			m.Query.Model.Set("fillInterval", interval.Seconds())
+			m.query.Model.Set("fill", true)
+			m.query.Model.Set("fillInterval", interval.Seconds())
 			if args[2] == "NULL" {
-				m.Query.Model.Set("fillNull", true)
+				m.query.Model.Set("fillNull", true)
 			} else {
 				floatVal, err := strconv.ParseFloat(args[2], 64)
 				if err != nil {
 					return "", fmt.Errorf("error parsing fill value %v", args[2])
 				}
-				m.Query.Model.Set("fillValue", floatVal)
+				m.query.Model.Set("fillValue", floatVal)
 			}
 		}
 		return fmt.Sprintf("UNIX_TIMESTAMP(%s) DIV %.0f * %.0f", args[0], interval.Seconds(), interval.Seconds()), nil
@@ -109,11 +109,11 @@ func (m *MySqlMacroEngine) evaluateMacro(name string, args []string) (string, er
 		if len(args) == 0 {
 			return "", fmt.Errorf("missing time column argument for macro %v", name)
 		}
-		return fmt.Sprintf("%s >= %d AND %s <= %d", args[0], m.TimeRange.GetFromAsSecondsEpoch(), args[0], m.TimeRange.GetToAsSecondsEpoch()), nil
+		return fmt.Sprintf("%s >= %d AND %s <= %d", args[0], m.timeRange.GetFromAsSecondsEpoch(), args[0], m.timeRange.GetToAsSecondsEpoch()), nil
 	case "__unixEpochFrom":
-		return fmt.Sprintf("%d", m.TimeRange.GetFromAsSecondsEpoch()), nil
+		return fmt.Sprintf("%d", m.timeRange.GetFromAsSecondsEpoch()), nil
 	case "__unixEpochTo":
-		return fmt.Sprintf("%d", m.TimeRange.GetToAsSecondsEpoch()), nil
+		return fmt.Sprintf("%d", m.timeRange.GetToAsSecondsEpoch()), nil
 	default:
 		return "", fmt.Errorf("Unknown macro %v", name)
 	}

+ 1 - 1
pkg/tsdb/mysql/macros_test.go

@@ -12,7 +12,7 @@ import (
 
 func TestMacroEngine(t *testing.T) {
 	Convey("MacroEngine", t, func() {
-		engine := &MySqlMacroEngine{}
+		engine := &mySqlMacroEngine{}
 		query := &tsdb.Query{}
 
 		Convey("Given a time range between 2018-04-12 00:00 and 2018-04-12 00:05", func() {

+ 22 - 243
pkg/tsdb/mysql/mysql.go

@@ -1,39 +1,24 @@
 package mysql
 
 import (
-	"container/list"
-	"context"
 	"database/sql"
 	"fmt"
-	"math"
 	"reflect"
 	"strconv"
 
 	"github.com/go-sql-driver/mysql"
 	"github.com/go-xorm/core"
-	"github.com/grafana/grafana/pkg/components/null"
 	"github.com/grafana/grafana/pkg/log"
 	"github.com/grafana/grafana/pkg/models"
 	"github.com/grafana/grafana/pkg/tsdb"
 )
 
-type MysqlQueryEndpoint struct {
-	sqlEngine tsdb.SqlEngine
-	log       log.Logger
-}
-
 func init() {
-	tsdb.RegisterTsdbQueryEndpoint("mysql", NewMysqlQueryEndpoint)
+	tsdb.RegisterTsdbQueryEndpoint("mysql", newMysqlQueryEndpoint)
 }
 
-func NewMysqlQueryEndpoint(datasource *models.DataSource) (tsdb.TsdbQueryEndpoint, error) {
-	endpoint := &MysqlQueryEndpoint{
-		log: log.New("tsdb.mysql"),
-	}
-
-	endpoint.sqlEngine = &tsdb.DefaultSqlEngine{
-		MacroEngine: NewMysqlMacroEngine(),
-	}
+func newMysqlQueryEndpoint(datasource *models.DataSource) (tsdb.TsdbQueryEndpoint, error) {
+	logger := log.New("tsdb.mysql")
 
 	cnnstr := fmt.Sprintf("%s:%s@%s(%s)/%s?collation=utf8mb4_unicode_ci&parseTime=true&loc=UTC&allowNativePasswords=true",
 		datasource.User,
@@ -42,85 +27,35 @@ func NewMysqlQueryEndpoint(datasource *models.DataSource) (tsdb.TsdbQueryEndpoin
 		datasource.Url,
 		datasource.Database,
 	)
-	endpoint.log.Debug("getEngine", "connection", cnnstr)
+	logger.Debug("getEngine", "connection", cnnstr)
 
-	if err := endpoint.sqlEngine.InitEngine("mysql", datasource, cnnstr); err != nil {
-		return nil, err
+	config := tsdb.SqlQueryEndpointConfiguration{
+		DriverName:        "mysql",
+		ConnectionString:  cnnstr,
+		Datasource:        datasource,
+		TimeColumnNames:   []string{"time", "time_sec"},
+		MetricColumnTypes: []string{"CHAR", "VARCHAR", "TINYTEXT", "TEXT", "MEDIUMTEXT", "LONGTEXT"},
 	}
 
-	return endpoint, nil
-}
-
-// Query is the main function for the MysqlExecutor
-func (e *MysqlQueryEndpoint) Query(ctx context.Context, dsInfo *models.DataSource, tsdbQuery *tsdb.TsdbQuery) (*tsdb.Response, error) {
-	return e.sqlEngine.Query(ctx, dsInfo, tsdbQuery, e.transformToTimeSeries, e.transformToTable)
-}
-
-func (e MysqlQueryEndpoint) transformToTable(query *tsdb.Query, rows *core.Rows, result *tsdb.QueryResult, tsdbQuery *tsdb.TsdbQuery) error {
-	columnNames, err := rows.Columns()
-	columnCount := len(columnNames)
-
-	if err != nil {
-		return err
+	rowTransformer := mysqlRowTransformer{
+		log: logger,
 	}
 
-	table := &tsdb.Table{
-		Columns: make([]tsdb.TableColumn, columnCount),
-		Rows:    make([]tsdb.RowValues, 0),
-	}
-
-	for i, name := range columnNames {
-		table.Columns[i].Text = name
-	}
-
-	rowLimit := 1000000
-	rowCount := 0
-	timeIndex := -1
-
-	// check if there is a column named time
-	for i, col := range columnNames {
-		switch col {
-		case "time", "time_sec":
-			timeIndex = i
-		}
-	}
-
-	for ; rows.Next(); rowCount++ {
-		if rowCount > rowLimit {
-			return fmt.Errorf("MySQL query row limit exceeded, limit %d", rowLimit)
-		}
-
-		values, err := e.getTypedRowData(rows)
-		if err != nil {
-			return err
-		}
-
-		// converts column named time to unix timestamp in milliseconds to make
-		// native mysql datetime types and epoch dates work in
-		// annotation and table queries.
-		tsdb.ConvertSqlTimeColumnToEpochMs(values, timeIndex)
-
-		table.Rows = append(table.Rows, values)
-	}
-
-	result.Tables = append(result.Tables, table)
-	result.Meta.Set("rowCount", rowCount)
-	return nil
+	return tsdb.NewSqlQueryEndpoint(&config, &rowTransformer, newMysqlMacroEngine(), logger)
 }
 
-func (e MysqlQueryEndpoint) getTypedRowData(rows *core.Rows) (tsdb.RowValues, error) {
-	types, err := rows.ColumnTypes()
-	if err != nil {
-		return nil, err
-	}
+type mysqlRowTransformer struct {
+	log log.Logger
+}
 
-	values := make([]interface{}, len(types))
+func (t *mysqlRowTransformer) Transform(columnTypes []*sql.ColumnType, rows *core.Rows) (tsdb.RowValues, error) {
+	values := make([]interface{}, len(columnTypes))
 
 	for i := range values {
-		scanType := types[i].ScanType()
+		scanType := columnTypes[i].ScanType()
 		values[i] = reflect.New(scanType).Interface()
 
-		if types[i].DatabaseTypeName() == "BIT" {
+		if columnTypes[i].DatabaseTypeName() == "BIT" {
 			values[i] = new([]byte)
 		}
 	}
@@ -129,7 +64,7 @@ func (e MysqlQueryEndpoint) getTypedRowData(rows *core.Rows) (tsdb.RowValues, er
 		return nil, err
 	}
 
-	for i := 0; i < len(types); i++ {
+	for i := 0; i < len(columnTypes); i++ {
 		typeName := reflect.ValueOf(values[i]).Type().String()
 
 		switch typeName {
@@ -158,7 +93,7 @@ func (e MysqlQueryEndpoint) getTypedRowData(rows *core.Rows) (tsdb.RowValues, er
 			}
 		}
 
-		if types[i].DatabaseTypeName() == "DECIMAL" {
+		if columnTypes[i].DatabaseTypeName() == "DECIMAL" {
 			f, err := strconv.ParseFloat(values[i].(string), 64)
 
 			if err == nil {
@@ -171,159 +106,3 @@ func (e MysqlQueryEndpoint) getTypedRowData(rows *core.Rows) (tsdb.RowValues, er
 
 	return values, nil
 }
-
-func (e MysqlQueryEndpoint) transformToTimeSeries(query *tsdb.Query, rows *core.Rows, result *tsdb.QueryResult, tsdbQuery *tsdb.TsdbQuery) error {
-	pointsBySeries := make(map[string]*tsdb.TimeSeries)
-	seriesByQueryOrder := list.New()
-
-	columnNames, err := rows.Columns()
-	if err != nil {
-		return err
-	}
-
-	columnTypes, err := rows.ColumnTypes()
-	if err != nil {
-		return err
-	}
-
-	rowLimit := 1000000
-	rowCount := 0
-	timeIndex := -1
-	metricIndex := -1
-
-	// check columns of resultset: a column named time is mandatory
-	// the first text column is treated as metric name unless a column named metric is present
-	for i, col := range columnNames {
-		switch col {
-		case "time", "time_sec":
-			timeIndex = i
-		case "metric":
-			metricIndex = i
-		default:
-			if metricIndex == -1 {
-				switch columnTypes[i].DatabaseTypeName() {
-				case "CHAR", "VARCHAR", "TINYTEXT", "TEXT", "MEDIUMTEXT", "LONGTEXT":
-					metricIndex = i
-				}
-			}
-		}
-	}
-
-	if timeIndex == -1 {
-		return fmt.Errorf("Found no column named time or time_sec")
-	}
-
-	fillMissing := query.Model.Get("fill").MustBool(false)
-	var fillInterval float64
-	fillValue := null.Float{}
-	if fillMissing {
-		fillInterval = query.Model.Get("fillInterval").MustFloat64() * 1000
-		if !query.Model.Get("fillNull").MustBool(false) {
-			fillValue.Float64 = query.Model.Get("fillValue").MustFloat64()
-			fillValue.Valid = true
-		}
-	}
-
-	for rows.Next() {
-		var timestamp float64
-		var value null.Float
-		var metric string
-
-		if rowCount > rowLimit {
-			return fmt.Errorf("PostgreSQL query row limit exceeded, limit %d", rowLimit)
-		}
-
-		values, err := e.getTypedRowData(rows)
-		if err != nil {
-			return err
-		}
-
-		// converts column named time to unix timestamp in milliseconds to make
-		// native mysql datetime types and epoch dates work in
-		// annotation and table queries.
-		tsdb.ConvertSqlTimeColumnToEpochMs(values, timeIndex)
-
-		switch columnValue := values[timeIndex].(type) {
-		case int64:
-			timestamp = float64(columnValue)
-		case float64:
-			timestamp = columnValue
-		default:
-			return fmt.Errorf("Invalid type for column time/time_sec, must be of type timestamp or unix timestamp, got: %T %v", columnValue, columnValue)
-		}
-
-		if metricIndex >= 0 {
-			if columnValue, ok := values[metricIndex].(string); ok {
-				metric = columnValue
-			} else {
-				return fmt.Errorf("Column metric must be of type char,varchar or text, got: %T %v", values[metricIndex], values[metricIndex])
-			}
-		}
-
-		for i, col := range columnNames {
-			if i == timeIndex || i == metricIndex {
-				continue
-			}
-
-			if value, err = tsdb.ConvertSqlValueColumnToFloat(col, values[i]); err != nil {
-				return err
-			}
-
-			if metricIndex == -1 {
-				metric = col
-			}
-
-			series, exist := pointsBySeries[metric]
-			if !exist {
-				series = &tsdb.TimeSeries{Name: metric}
-				pointsBySeries[metric] = series
-				seriesByQueryOrder.PushBack(metric)
-			}
-
-			if fillMissing {
-				var intervalStart float64
-				if !exist {
-					intervalStart = float64(tsdbQuery.TimeRange.MustGetFrom().UnixNano() / 1e6)
-				} else {
-					intervalStart = series.Points[len(series.Points)-1][1].Float64 + fillInterval
-				}
-
-				// align interval start
-				intervalStart = math.Floor(intervalStart/fillInterval) * fillInterval
-
-				for i := intervalStart; i < timestamp; i += fillInterval {
-					series.Points = append(series.Points, tsdb.TimePoint{fillValue, null.FloatFrom(i)})
-					rowCount++
-				}
-			}
-
-			series.Points = append(series.Points, tsdb.TimePoint{value, null.FloatFrom(timestamp)})
-
-			e.log.Debug("Rows", "metric", metric, "time", timestamp, "value", value)
-			rowCount++
-
-		}
-	}
-
-	for elem := seriesByQueryOrder.Front(); elem != nil; elem = elem.Next() {
-		key := elem.Value.(string)
-		result.Series = append(result.Series, pointsBySeries[key])
-
-		if fillMissing {
-			series := pointsBySeries[key]
-			// fill in values from last fetched value till interval end
-			intervalStart := series.Points[len(series.Points)-1][1].Float64
-			intervalEnd := float64(tsdbQuery.TimeRange.MustGetTo().UnixNano() / 1e6)
-
-			// align interval start
-			intervalStart = math.Floor(intervalStart/fillInterval) * fillInterval
-			for i := intervalStart + fillInterval; i < intervalEnd; i += fillInterval {
-				series.Points = append(series.Points, tsdb.TimePoint{fillValue, null.FloatFrom(i)})
-				rowCount++
-			}
-		}
-	}
-
-	result.Meta.Set("rowCount", rowCount)
-	return nil
-}

+ 19 - 11
pkg/tsdb/mysql/mysql_test.go

@@ -8,8 +8,9 @@ import (
 	"time"
 
 	"github.com/go-xorm/xorm"
+	"github.com/grafana/grafana/pkg/components/securejsondata"
 	"github.com/grafana/grafana/pkg/components/simplejson"
-	"github.com/grafana/grafana/pkg/log"
+	"github.com/grafana/grafana/pkg/models"
 	"github.com/grafana/grafana/pkg/services/sqlstore"
 	"github.com/grafana/grafana/pkg/services/sqlstore/sqlutil"
 	"github.com/grafana/grafana/pkg/tsdb"
@@ -21,8 +22,9 @@ import (
 // The tests require a MySQL db named grafana_ds_tests and a user/password grafana/password
 // Use the docker/blocks/mysql_tests/docker-compose.yaml to spin up a
 // preconfigured MySQL server suitable for running these tests.
-// There is also a dashboard.json in same directory that you can import to Grafana
-// once you've created a datasource for the test server/database.
+// There is also a datasource and dashboard provisioned by devenv scripts that you can
+// use to verify that the generated data are vizualized as expected, see
+// devenv/README.md for setup instructions.
 func TestMySQL(t *testing.T) {
 	// change to true to run the MySQL tests
 	runMySqlTests := false
@@ -35,19 +37,25 @@ func TestMySQL(t *testing.T) {
 	Convey("MySQL", t, func() {
 		x := InitMySQLTestDB(t)
 
-		endpoint := &MysqlQueryEndpoint{
-			sqlEngine: &tsdb.DefaultSqlEngine{
-				MacroEngine: NewMysqlMacroEngine(),
-				XormEngine:  x,
-			},
-			log: log.New("tsdb.mysql"),
+		origXormEngine := tsdb.NewXormEngine
+		tsdb.NewXormEngine = func(d, c string) (*xorm.Engine, error) {
+			return x, nil
 		}
 
-		sess := x.NewSession()
-		defer sess.Close()
+		endpoint, err := newMysqlQueryEndpoint(&models.DataSource{
+			JsonData:       simplejson.New(),
+			SecureJsonData: securejsondata.SecureJsonData{},
+		})
+		So(err, ShouldBeNil)
 
+		sess := x.NewSession()
 		fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC)
 
+		Reset(func() {
+			sess.Close()
+			tsdb.NewXormEngine = origXormEngine
+		})
+
 		Convey("Given a table with different native data types", func() {
 			if exists, err := sess.IsTableExist("mysql_types"); err != nil || exists {
 				So(err, ShouldBeNil)

+ 19 - 19
pkg/tsdb/postgres/macros.go

@@ -14,18 +14,18 @@ import (
 const rsIdentifier = `([_a-zA-Z0-9]+)`
 const sExpr = `\$` + rsIdentifier + `\(([^\)]*)\)`
 
-type PostgresMacroEngine struct {
-	TimeRange *tsdb.TimeRange
-	Query     *tsdb.Query
+type postgresMacroEngine struct {
+	timeRange *tsdb.TimeRange
+	query     *tsdb.Query
 }
 
-func NewPostgresMacroEngine() tsdb.SqlMacroEngine {
-	return &PostgresMacroEngine{}
+func newPostgresMacroEngine() tsdb.SqlMacroEngine {
+	return &postgresMacroEngine{}
 }
 
-func (m *PostgresMacroEngine) Interpolate(query *tsdb.Query, timeRange *tsdb.TimeRange, sql string) (string, error) {
-	m.TimeRange = timeRange
-	m.Query = query
+func (m *postgresMacroEngine) Interpolate(query *tsdb.Query, timeRange *tsdb.TimeRange, sql string) (string, error) {
+	m.timeRange = timeRange
+	m.query = query
 	rExp, _ := regexp.Compile(sExpr)
 	var macroError error
 
@@ -66,7 +66,7 @@ func replaceAllStringSubmatchFunc(re *regexp.Regexp, str string, repl func([]str
 	return result + str[lastIndex:]
 }
 
-func (m *PostgresMacroEngine) evaluateMacro(name string, args []string) (string, error) {
+func (m *postgresMacroEngine) evaluateMacro(name string, args []string) (string, error) {
 	switch name {
 	case "__time":
 		if len(args) == 0 {
@@ -83,11 +83,11 @@ func (m *PostgresMacroEngine) evaluateMacro(name string, args []string) (string,
 			return "", fmt.Errorf("missing time column argument for macro %v", name)
 		}
 
-		return fmt.Sprintf("%s BETWEEN '%s' AND '%s'", args[0], m.TimeRange.GetFromAsTimeUTC().Format(time.RFC3339), m.TimeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
+		return fmt.Sprintf("%s BETWEEN '%s' AND '%s'", args[0], m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339), m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
 	case "__timeFrom":
-		return fmt.Sprintf("'%s'", m.TimeRange.GetFromAsTimeUTC().Format(time.RFC3339)), nil
+		return fmt.Sprintf("'%s'", m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339)), nil
 	case "__timeTo":
-		return fmt.Sprintf("'%s'", m.TimeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
+		return fmt.Sprintf("'%s'", m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
 	case "__timeGroup":
 		if len(args) < 2 {
 			return "", fmt.Errorf("macro %v needs time column and interval and optional fill value", name)
@@ -97,16 +97,16 @@ func (m *PostgresMacroEngine) evaluateMacro(name string, args []string) (string,
 			return "", fmt.Errorf("error parsing interval %v", args[1])
 		}
 		if len(args) == 3 {
-			m.Query.Model.Set("fill", true)
-			m.Query.Model.Set("fillInterval", interval.Seconds())
+			m.query.Model.Set("fill", true)
+			m.query.Model.Set("fillInterval", interval.Seconds())
 			if args[2] == "NULL" {
-				m.Query.Model.Set("fillNull", true)
+				m.query.Model.Set("fillNull", true)
 			} else {
 				floatVal, err := strconv.ParseFloat(args[2], 64)
 				if err != nil {
 					return "", fmt.Errorf("error parsing fill value %v", args[2])
 				}
-				m.Query.Model.Set("fillValue", floatVal)
+				m.query.Model.Set("fillValue", floatVal)
 			}
 		}
 		return fmt.Sprintf("floor(extract(epoch from %s)/%v)*%v AS time", args[0], interval.Seconds(), interval.Seconds()), nil
@@ -114,11 +114,11 @@ func (m *PostgresMacroEngine) evaluateMacro(name string, args []string) (string,
 		if len(args) == 0 {
 			return "", fmt.Errorf("missing time column argument for macro %v", name)
 		}
-		return fmt.Sprintf("%s >= %d AND %s <= %d", args[0], m.TimeRange.GetFromAsSecondsEpoch(), args[0], m.TimeRange.GetToAsSecondsEpoch()), nil
+		return fmt.Sprintf("%s >= %d AND %s <= %d", args[0], m.timeRange.GetFromAsSecondsEpoch(), args[0], m.timeRange.GetToAsSecondsEpoch()), nil
 	case "__unixEpochFrom":
-		return fmt.Sprintf("%d", m.TimeRange.GetFromAsSecondsEpoch()), nil
+		return fmt.Sprintf("%d", m.timeRange.GetFromAsSecondsEpoch()), nil
 	case "__unixEpochTo":
-		return fmt.Sprintf("%d", m.TimeRange.GetToAsSecondsEpoch()), nil
+		return fmt.Sprintf("%d", m.timeRange.GetToAsSecondsEpoch()), nil
 	default:
 		return "", fmt.Errorf("Unknown macro %v", name)
 	}

+ 1 - 1
pkg/tsdb/postgres/macros_test.go

@@ -12,7 +12,7 @@ import (
 
 func TestMacroEngine(t *testing.T) {
 	Convey("MacroEngine", t, func() {
-		engine := NewPostgresMacroEngine()
+		engine := newPostgresMacroEngine()
 		query := &tsdb.Query{}
 
 		Convey("Given a time range between 2018-04-12 00:00 and 2018-04-12 00:05", func() {

+ 25 - 244
pkg/tsdb/postgres/postgres.go

@@ -1,46 +1,38 @@
 package postgres
 
 import (
-	"container/list"
-	"context"
-	"fmt"
-	"math"
+	"database/sql"
 	"net/url"
 	"strconv"
 
 	"github.com/go-xorm/core"
-	"github.com/grafana/grafana/pkg/components/null"
 	"github.com/grafana/grafana/pkg/log"
 	"github.com/grafana/grafana/pkg/models"
 	"github.com/grafana/grafana/pkg/tsdb"
 )
 
-type PostgresQueryEndpoint struct {
-	sqlEngine tsdb.SqlEngine
-	log       log.Logger
-}
-
 func init() {
-	tsdb.RegisterTsdbQueryEndpoint("postgres", NewPostgresQueryEndpoint)
+	tsdb.RegisterTsdbQueryEndpoint("postgres", newPostgresQueryEndpoint)
 }
 
-func NewPostgresQueryEndpoint(datasource *models.DataSource) (tsdb.TsdbQueryEndpoint, error) {
-	endpoint := &PostgresQueryEndpoint{
-		log: log.New("tsdb.postgres"),
-	}
-
-	endpoint.sqlEngine = &tsdb.DefaultSqlEngine{
-		MacroEngine: NewPostgresMacroEngine(),
-	}
+func newPostgresQueryEndpoint(datasource *models.DataSource) (tsdb.TsdbQueryEndpoint, error) {
+	logger := log.New("tsdb.postgres")
 
 	cnnstr := generateConnectionString(datasource)
-	endpoint.log.Debug("getEngine", "connection", cnnstr)
+	logger.Debug("getEngine", "connection", cnnstr)
 
-	if err := endpoint.sqlEngine.InitEngine("postgres", datasource, cnnstr); err != nil {
-		return nil, err
+	config := tsdb.SqlQueryEndpointConfiguration{
+		DriverName:        "postgres",
+		ConnectionString:  cnnstr,
+		Datasource:        datasource,
+		MetricColumnTypes: []string{"UNKNOWN", "TEXT", "VARCHAR", "CHAR"},
 	}
 
-	return endpoint, nil
+	rowTransformer := postgresRowTransformer{
+		log: logger,
+	}
+
+	return tsdb.NewSqlQueryEndpoint(&config, &rowTransformer, newPostgresMacroEngine(), logger)
 }
 
 func generateConnectionString(datasource *models.DataSource) string {
@@ -63,70 +55,15 @@ func generateConnectionString(datasource *models.DataSource) string {
 	return u.String()
 }
 
-func (e *PostgresQueryEndpoint) Query(ctx context.Context, dsInfo *models.DataSource, tsdbQuery *tsdb.TsdbQuery) (*tsdb.Response, error) {
-	return e.sqlEngine.Query(ctx, dsInfo, tsdbQuery, e.transformToTimeSeries, e.transformToTable)
+type postgresRowTransformer struct {
+	log log.Logger
 }
 
-func (e PostgresQueryEndpoint) transformToTable(query *tsdb.Query, rows *core.Rows, result *tsdb.QueryResult, tsdbQuery *tsdb.TsdbQuery) error {
-	columnNames, err := rows.Columns()
-	if err != nil {
-		return err
-	}
-
-	table := &tsdb.Table{
-		Columns: make([]tsdb.TableColumn, len(columnNames)),
-		Rows:    make([]tsdb.RowValues, 0),
-	}
-
-	for i, name := range columnNames {
-		table.Columns[i].Text = name
-	}
-
-	rowLimit := 1000000
-	rowCount := 0
-	timeIndex := -1
-
-	// check if there is a column named time
-	for i, col := range columnNames {
-		switch col {
-		case "time":
-			timeIndex = i
-		}
-	}
-
-	for ; rows.Next(); rowCount++ {
-		if rowCount > rowLimit {
-			return fmt.Errorf("PostgreSQL query row limit exceeded, limit %d", rowLimit)
-		}
-
-		values, err := e.getTypedRowData(rows)
-		if err != nil {
-			return err
-		}
-
-		// converts column named time to unix timestamp in milliseconds to make
-		// native postgres datetime types and epoch dates work in
-		// annotation and table queries.
-		tsdb.ConvertSqlTimeColumnToEpochMs(values, timeIndex)
+func (t *postgresRowTransformer) Transform(columnTypes []*sql.ColumnType, rows *core.Rows) (tsdb.RowValues, error) {
+	values := make([]interface{}, len(columnTypes))
+	valuePtrs := make([]interface{}, len(columnTypes))
 
-		table.Rows = append(table.Rows, values)
-	}
-
-	result.Tables = append(result.Tables, table)
-	result.Meta.Set("rowCount", rowCount)
-	return nil
-}
-
-func (e PostgresQueryEndpoint) getTypedRowData(rows *core.Rows) (tsdb.RowValues, error) {
-	types, err := rows.ColumnTypes()
-	if err != nil {
-		return nil, err
-	}
-
-	values := make([]interface{}, len(types))
-	valuePtrs := make([]interface{}, len(types))
-
-	for i := 0; i < len(types); i++ {
+	for i := 0; i < len(columnTypes); i++ {
 		valuePtrs[i] = &values[i]
 	}
 
@@ -136,20 +73,20 @@ func (e PostgresQueryEndpoint) getTypedRowData(rows *core.Rows) (tsdb.RowValues,
 
 	// convert types not handled by lib/pq
 	// unhandled types are returned as []byte
-	for i := 0; i < len(types); i++ {
+	for i := 0; i < len(columnTypes); i++ {
 		if value, ok := values[i].([]byte); ok {
-			switch types[i].DatabaseTypeName() {
+			switch columnTypes[i].DatabaseTypeName() {
 			case "NUMERIC":
 				if v, err := strconv.ParseFloat(string(value), 64); err == nil {
 					values[i] = v
 				} else {
-					e.log.Debug("Rows", "Error converting numeric to float", value)
+					t.log.Debug("Rows", "Error converting numeric to float", value)
 				}
 			case "UNKNOWN", "CIDR", "INET", "MACADDR":
 				// char literals have type UNKNOWN
 				values[i] = string(value)
 			default:
-				e.log.Debug("Rows", "Unknown database type", types[i].DatabaseTypeName(), "value", value)
+				t.log.Debug("Rows", "Unknown database type", columnTypes[i].DatabaseTypeName(), "value", value)
 				values[i] = string(value)
 			}
 		}
@@ -157,159 +94,3 @@ func (e PostgresQueryEndpoint) getTypedRowData(rows *core.Rows) (tsdb.RowValues,
 
 	return values, nil
 }
-
-func (e PostgresQueryEndpoint) transformToTimeSeries(query *tsdb.Query, rows *core.Rows, result *tsdb.QueryResult, tsdbQuery *tsdb.TsdbQuery) error {
-	pointsBySeries := make(map[string]*tsdb.TimeSeries)
-	seriesByQueryOrder := list.New()
-
-	columnNames, err := rows.Columns()
-	if err != nil {
-		return err
-	}
-
-	columnTypes, err := rows.ColumnTypes()
-	if err != nil {
-		return err
-	}
-
-	rowLimit := 1000000
-	rowCount := 0
-	timeIndex := -1
-	metricIndex := -1
-
-	// check columns of resultset: a column named time is mandatory
-	// the first text column is treated as metric name unless a column named metric is present
-	for i, col := range columnNames {
-		switch col {
-		case "time":
-			timeIndex = i
-		case "metric":
-			metricIndex = i
-		default:
-			if metricIndex == -1 {
-				switch columnTypes[i].DatabaseTypeName() {
-				case "UNKNOWN", "TEXT", "VARCHAR", "CHAR":
-					metricIndex = i
-				}
-			}
-		}
-	}
-
-	if timeIndex == -1 {
-		return fmt.Errorf("Found no column named time")
-	}
-
-	fillMissing := query.Model.Get("fill").MustBool(false)
-	var fillInterval float64
-	fillValue := null.Float{}
-	if fillMissing {
-		fillInterval = query.Model.Get("fillInterval").MustFloat64() * 1000
-		if !query.Model.Get("fillNull").MustBool(false) {
-			fillValue.Float64 = query.Model.Get("fillValue").MustFloat64()
-			fillValue.Valid = true
-		}
-	}
-
-	for rows.Next() {
-		var timestamp float64
-		var value null.Float
-		var metric string
-
-		if rowCount > rowLimit {
-			return fmt.Errorf("PostgreSQL query row limit exceeded, limit %d", rowLimit)
-		}
-
-		values, err := e.getTypedRowData(rows)
-		if err != nil {
-			return err
-		}
-
-		// converts column named time to unix timestamp in milliseconds to make
-		// native mysql datetime types and epoch dates work in
-		// annotation and table queries.
-		tsdb.ConvertSqlTimeColumnToEpochMs(values, timeIndex)
-
-		switch columnValue := values[timeIndex].(type) {
-		case int64:
-			timestamp = float64(columnValue)
-		case float64:
-			timestamp = columnValue
-		default:
-			return fmt.Errorf("Invalid type for column time, must be of type timestamp or unix timestamp, got: %T %v", columnValue, columnValue)
-		}
-
-		if metricIndex >= 0 {
-			if columnValue, ok := values[metricIndex].(string); ok {
-				metric = columnValue
-			} else {
-				return fmt.Errorf("Column metric must be of type char,varchar or text, got: %T %v", values[metricIndex], values[metricIndex])
-			}
-		}
-
-		for i, col := range columnNames {
-			if i == timeIndex || i == metricIndex {
-				continue
-			}
-
-			if value, err = tsdb.ConvertSqlValueColumnToFloat(col, values[i]); err != nil {
-				return err
-			}
-
-			if metricIndex == -1 {
-				metric = col
-			}
-
-			series, exist := pointsBySeries[metric]
-			if !exist {
-				series = &tsdb.TimeSeries{Name: metric}
-				pointsBySeries[metric] = series
-				seriesByQueryOrder.PushBack(metric)
-			}
-
-			if fillMissing {
-				var intervalStart float64
-				if !exist {
-					intervalStart = float64(tsdbQuery.TimeRange.MustGetFrom().UnixNano() / 1e6)
-				} else {
-					intervalStart = series.Points[len(series.Points)-1][1].Float64 + fillInterval
-				}
-
-				// align interval start
-				intervalStart = math.Floor(intervalStart/fillInterval) * fillInterval
-
-				for i := intervalStart; i < timestamp; i += fillInterval {
-					series.Points = append(series.Points, tsdb.TimePoint{fillValue, null.FloatFrom(i)})
-					rowCount++
-				}
-			}
-
-			series.Points = append(series.Points, tsdb.TimePoint{value, null.FloatFrom(timestamp)})
-
-			e.log.Debug("Rows", "metric", metric, "time", timestamp, "value", value)
-			rowCount++
-
-		}
-	}
-
-	for elem := seriesByQueryOrder.Front(); elem != nil; elem = elem.Next() {
-		key := elem.Value.(string)
-		result.Series = append(result.Series, pointsBySeries[key])
-
-		if fillMissing {
-			series := pointsBySeries[key]
-			// fill in values from last fetched value till interval end
-			intervalStart := series.Points[len(series.Points)-1][1].Float64
-			intervalEnd := float64(tsdbQuery.TimeRange.MustGetTo().UnixNano() / 1e6)
-
-			// align interval start
-			intervalStart = math.Floor(intervalStart/fillInterval) * fillInterval
-			for i := intervalStart + fillInterval; i < intervalEnd; i += fillInterval {
-				series.Points = append(series.Points, tsdb.TimePoint{fillValue, null.FloatFrom(i)})
-				rowCount++
-			}
-		}
-	}
-
-	result.Meta.Set("rowCount", rowCount)
-	return nil
-}

+ 19 - 11
pkg/tsdb/postgres/postgres_test.go

@@ -8,8 +8,9 @@ import (
 	"time"
 
 	"github.com/go-xorm/xorm"
+	"github.com/grafana/grafana/pkg/components/securejsondata"
 	"github.com/grafana/grafana/pkg/components/simplejson"
-	"github.com/grafana/grafana/pkg/log"
+	"github.com/grafana/grafana/pkg/models"
 	"github.com/grafana/grafana/pkg/services/sqlstore"
 	"github.com/grafana/grafana/pkg/services/sqlstore/sqlutil"
 	"github.com/grafana/grafana/pkg/tsdb"
@@ -22,8 +23,9 @@ import (
 // The tests require a PostgreSQL db named grafanadstest and a user/password grafanatest/grafanatest!
 // Use the docker/blocks/postgres_tests/docker-compose.yaml to spin up a
 // preconfigured Postgres server suitable for running these tests.
-// There is also a dashboard.json in same directory that you can import to Grafana
-// once you've created a datasource for the test server/database.
+// There is also a datasource and dashboard provisioned by devenv scripts that you can
+// use to verify that the generated data are vizualized as expected, see
+// devenv/README.md for setup instructions.
 func TestPostgres(t *testing.T) {
 	// change to true to run the MySQL tests
 	runPostgresTests := false
@@ -36,19 +38,25 @@ func TestPostgres(t *testing.T) {
 	Convey("PostgreSQL", t, func() {
 		x := InitPostgresTestDB(t)
 
-		endpoint := &PostgresQueryEndpoint{
-			sqlEngine: &tsdb.DefaultSqlEngine{
-				MacroEngine: NewPostgresMacroEngine(),
-				XormEngine:  x,
-			},
-			log: log.New("tsdb.postgres"),
+		origXormEngine := tsdb.NewXormEngine
+		tsdb.NewXormEngine = func(d, c string) (*xorm.Engine, error) {
+			return x, nil
 		}
 
-		sess := x.NewSession()
-		defer sess.Close()
+		endpoint, err := newPostgresQueryEndpoint(&models.DataSource{
+			JsonData:       simplejson.New(),
+			SecureJsonData: securejsondata.SecureJsonData{},
+		})
+		So(err, ShouldBeNil)
 
+		sess := x.NewSession()
 		fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC).In(time.Local)
 
+		Reset(func() {
+			sess.Close()
+			tsdb.NewXormEngine = origXormEngine
+		})
+
 		Convey("Given a table with different native data types", func() {
 			sql := `
 				DROP TABLE IF EXISTS postgres_types;

+ 279 - 45
pkg/tsdb/sql_engine.go

@@ -1,11 +1,17 @@
 package tsdb
 
 import (
+	"container/list"
 	"context"
+	"database/sql"
 	"fmt"
+	"math"
+	"strings"
 	"sync"
 	"time"
 
+	"github.com/grafana/grafana/pkg/log"
+
 	"github.com/grafana/grafana/pkg/components/null"
 
 	"github.com/go-xorm/core"
@@ -14,27 +20,15 @@ import (
 	"github.com/grafana/grafana/pkg/models"
 )
 
-// SqlEngine is a wrapper class around xorm for relational database data sources.
-type SqlEngine interface {
-	InitEngine(driverName string, dsInfo *models.DataSource, cnnstr string) error
-	Query(
-		ctx context.Context,
-		ds *models.DataSource,
-		query *TsdbQuery,
-		transformToTimeSeries func(query *Query, rows *core.Rows, result *QueryResult, tsdbQuery *TsdbQuery) error,
-		transformToTable func(query *Query, rows *core.Rows, result *QueryResult, tsdbQuery *TsdbQuery) error,
-	) (*Response, error)
-}
-
 // SqlMacroEngine interpolates macros into sql. It takes in the Query to have access to query context and
 // timeRange to be able to generate queries that use from and to.
 type SqlMacroEngine interface {
 	Interpolate(query *Query, timeRange *TimeRange, sql string) (string, error)
 }
 
-type DefaultSqlEngine struct {
-	MacroEngine SqlMacroEngine
-	XormEngine  *xorm.Engine
+// SqlTableRowTransformer transforms a query result row to RowValues with proper types.
+type SqlTableRowTransformer interface {
+	Transform(columnTypes []*sql.ColumnType, rows *core.Rows) (RowValues, error)
 }
 
 type engineCacheType struct {
@@ -48,69 +42,94 @@ var engineCache = engineCacheType{
 	versions: make(map[int64]int),
 }
 
-// InitEngine creates the db connection and inits the xorm engine or loads it from the engine cache
-func (e *DefaultSqlEngine) InitEngine(driverName string, dsInfo *models.DataSource, cnnstr string) error {
+var NewXormEngine = func(driverName string, connectionString string) (*xorm.Engine, error) {
+	return xorm.NewEngine(driverName, connectionString)
+}
+
+type sqlQueryEndpoint struct {
+	macroEngine       SqlMacroEngine
+	rowTransformer    SqlTableRowTransformer
+	engine            *xorm.Engine
+	timeColumnNames   []string
+	metricColumnTypes []string
+	log               log.Logger
+}
+
+type SqlQueryEndpointConfiguration struct {
+	DriverName        string
+	Datasource        *models.DataSource
+	ConnectionString  string
+	TimeColumnNames   []string
+	MetricColumnTypes []string
+}
+
+var NewSqlQueryEndpoint = func(config *SqlQueryEndpointConfiguration, rowTransformer SqlTableRowTransformer, macroEngine SqlMacroEngine, log log.Logger) (TsdbQueryEndpoint, error) {
+	queryEndpoint := sqlQueryEndpoint{
+		rowTransformer:  rowTransformer,
+		macroEngine:     macroEngine,
+		timeColumnNames: []string{"time"},
+		log:             log,
+	}
+
+	if len(config.TimeColumnNames) > 0 {
+		queryEndpoint.timeColumnNames = config.TimeColumnNames
+	}
+
 	engineCache.Lock()
 	defer engineCache.Unlock()
 
-	if engine, present := engineCache.cache[dsInfo.Id]; present {
-		if version := engineCache.versions[dsInfo.Id]; version == dsInfo.Version {
-			e.XormEngine = engine
-			return nil
+	if engine, present := engineCache.cache[config.Datasource.Id]; present {
+		if version := engineCache.versions[config.Datasource.Id]; version == config.Datasource.Version {
+			queryEndpoint.engine = engine
+			return &queryEndpoint, nil
 		}
 	}
 
-	engine, err := xorm.NewEngine(driverName, cnnstr)
+	engine, err := NewXormEngine(config.DriverName, config.ConnectionString)
 	if err != nil {
-		return err
+		return nil, err
 	}
 
 	engine.SetMaxOpenConns(10)
 	engine.SetMaxIdleConns(10)
 
-	engineCache.versions[dsInfo.Id] = dsInfo.Version
-	engineCache.cache[dsInfo.Id] = engine
-	e.XormEngine = engine
+	engineCache.versions[config.Datasource.Id] = config.Datasource.Version
+	engineCache.cache[config.Datasource.Id] = engine
+	queryEndpoint.engine = engine
 
-	return nil
+	return &queryEndpoint, nil
 }
 
-// Query is a default implementation of the Query method for an SQL data source.
-// The caller of this function must implement transformToTimeSeries and transformToTable and
-// pass them in as parameters.
-func (e *DefaultSqlEngine) Query(
-	ctx context.Context,
-	dsInfo *models.DataSource,
-	tsdbQuery *TsdbQuery,
-	transformToTimeSeries func(query *Query, rows *core.Rows, result *QueryResult, tsdbQuery *TsdbQuery) error,
-	transformToTable func(query *Query, rows *core.Rows, result *QueryResult, tsdbQuery *TsdbQuery) error,
-) (*Response, error) {
+const rowLimit = 1000000
+
+// Query is the main function for the SqlQueryEndpoint
+func (e *sqlQueryEndpoint) Query(ctx context.Context, dsInfo *models.DataSource, tsdbQuery *TsdbQuery) (*Response, error) {
 	result := &Response{
 		Results: make(map[string]*QueryResult),
 	}
 
-	session := e.XormEngine.NewSession()
+	session := e.engine.NewSession()
 	defer session.Close()
 	db := session.DB()
 
 	for _, query := range tsdbQuery.Queries {
-		rawSql := query.Model.Get("rawSql").MustString()
-		if rawSql == "" {
+		rawSQL := query.Model.Get("rawSql").MustString()
+		if rawSQL == "" {
 			continue
 		}
 
 		queryResult := &QueryResult{Meta: simplejson.New(), RefId: query.RefId}
 		result.Results[query.RefId] = queryResult
 
-		rawSql, err := e.MacroEngine.Interpolate(query, tsdbQuery.TimeRange, rawSql)
+		rawSQL, err := e.macroEngine.Interpolate(query, tsdbQuery.TimeRange, rawSQL)
 		if err != nil {
 			queryResult.Error = err
 			continue
 		}
 
-		queryResult.Meta.Set("sql", rawSql)
+		queryResult.Meta.Set("sql", rawSQL)
 
-		rows, err := db.Query(rawSql)
+		rows, err := db.Query(rawSQL)
 		if err != nil {
 			queryResult.Error = err
 			continue
@@ -122,13 +141,13 @@ func (e *DefaultSqlEngine) Query(
 
 		switch format {
 		case "time_series":
-			err := transformToTimeSeries(query, rows, queryResult, tsdbQuery)
+			err := e.transformToTimeSeries(query, rows, queryResult, tsdbQuery)
 			if err != nil {
 				queryResult.Error = err
 				continue
 			}
 		case "table":
-			err := transformToTable(query, rows, queryResult, tsdbQuery)
+			err := e.transformToTable(query, rows, queryResult, tsdbQuery)
 			if err != nil {
 				queryResult.Error = err
 				continue
@@ -139,6 +158,221 @@ func (e *DefaultSqlEngine) Query(
 	return result, nil
 }
 
+func (e *sqlQueryEndpoint) transformToTable(query *Query, rows *core.Rows, result *QueryResult, tsdbQuery *TsdbQuery) error {
+	columnNames, err := rows.Columns()
+	columnCount := len(columnNames)
+
+	if err != nil {
+		return err
+	}
+
+	rowCount := 0
+	timeIndex := -1
+
+	table := &Table{
+		Columns: make([]TableColumn, columnCount),
+		Rows:    make([]RowValues, 0),
+	}
+
+	for i, name := range columnNames {
+		table.Columns[i].Text = name
+
+		for _, tc := range e.timeColumnNames {
+			if name == tc {
+				timeIndex = i
+				break
+			}
+		}
+	}
+
+	columnTypes, err := rows.ColumnTypes()
+	if err != nil {
+		return err
+	}
+
+	for ; rows.Next(); rowCount++ {
+		if rowCount > rowLimit {
+			return fmt.Errorf("query row limit exceeded, limit %d", rowLimit)
+		}
+
+		values, err := e.rowTransformer.Transform(columnTypes, rows)
+		if err != nil {
+			return err
+		}
+
+		// converts column named time to unix timestamp in milliseconds
+		// to make native mssql datetime types and epoch dates work in
+		// annotation and table queries.
+		ConvertSqlTimeColumnToEpochMs(values, timeIndex)
+		table.Rows = append(table.Rows, values)
+	}
+
+	result.Tables = append(result.Tables, table)
+	result.Meta.Set("rowCount", rowCount)
+	return nil
+}
+
+func (e *sqlQueryEndpoint) transformToTimeSeries(query *Query, rows *core.Rows, result *QueryResult, tsdbQuery *TsdbQuery) error {
+	pointsBySeries := make(map[string]*TimeSeries)
+	seriesByQueryOrder := list.New()
+
+	columnNames, err := rows.Columns()
+	if err != nil {
+		return err
+	}
+
+	columnTypes, err := rows.ColumnTypes()
+	if err != nil {
+		return err
+	}
+
+	rowCount := 0
+	timeIndex := -1
+	metricIndex := -1
+
+	// check columns of resultset: a column named time is mandatory
+	// the first text column is treated as metric name unless a column named metric is present
+	for i, col := range columnNames {
+		for _, tc := range e.timeColumnNames {
+			if col == tc {
+				timeIndex = i
+				continue
+			}
+		}
+		switch col {
+		case "metric":
+			metricIndex = i
+		default:
+			if metricIndex == -1 {
+				columnType := columnTypes[i].DatabaseTypeName()
+
+				for _, mct := range e.metricColumnTypes {
+					if columnType == mct {
+						metricIndex = i
+						continue
+					}
+				}
+			}
+		}
+	}
+
+	if timeIndex == -1 {
+		return fmt.Errorf("Found no column named %s", strings.Join(e.timeColumnNames, " or "))
+	}
+
+	fillMissing := query.Model.Get("fill").MustBool(false)
+	var fillInterval float64
+	fillValue := null.Float{}
+	if fillMissing {
+		fillInterval = query.Model.Get("fillInterval").MustFloat64() * 1000
+		if !query.Model.Get("fillNull").MustBool(false) {
+			fillValue.Float64 = query.Model.Get("fillValue").MustFloat64()
+			fillValue.Valid = true
+		}
+	}
+
+	for rows.Next() {
+		var timestamp float64
+		var value null.Float
+		var metric string
+
+		if rowCount > rowLimit {
+			return fmt.Errorf("query row limit exceeded, limit %d", rowLimit)
+		}
+
+		values, err := e.rowTransformer.Transform(columnTypes, rows)
+		if err != nil {
+			return err
+		}
+
+		// converts column named time to unix timestamp in milliseconds to make
+		// native mysql datetime types and epoch dates work in
+		// annotation and table queries.
+		ConvertSqlTimeColumnToEpochMs(values, timeIndex)
+
+		switch columnValue := values[timeIndex].(type) {
+		case int64:
+			timestamp = float64(columnValue)
+		case float64:
+			timestamp = columnValue
+		default:
+			return fmt.Errorf("Invalid type for column time, must be of type timestamp or unix timestamp, got: %T %v", columnValue, columnValue)
+		}
+
+		if metricIndex >= 0 {
+			if columnValue, ok := values[metricIndex].(string); ok {
+				metric = columnValue
+			} else {
+				return fmt.Errorf("Column metric must be of type %s. metric column name: %s type: %s but datatype is %T", strings.Join(e.metricColumnTypes, ", "), columnNames[metricIndex], columnTypes[metricIndex].DatabaseTypeName(), values[metricIndex])
+			}
+		}
+
+		for i, col := range columnNames {
+			if i == timeIndex || i == metricIndex {
+				continue
+			}
+
+			if value, err = ConvertSqlValueColumnToFloat(col, values[i]); err != nil {
+				return err
+			}
+
+			if metricIndex == -1 {
+				metric = col
+			}
+
+			series, exist := pointsBySeries[metric]
+			if !exist {
+				series = &TimeSeries{Name: metric}
+				pointsBySeries[metric] = series
+				seriesByQueryOrder.PushBack(metric)
+			}
+
+			if fillMissing {
+				var intervalStart float64
+				if !exist {
+					intervalStart = float64(tsdbQuery.TimeRange.MustGetFrom().UnixNano() / 1e6)
+				} else {
+					intervalStart = series.Points[len(series.Points)-1][1].Float64 + fillInterval
+				}
+
+				// align interval start
+				intervalStart = math.Floor(intervalStart/fillInterval) * fillInterval
+
+				for i := intervalStart; i < timestamp; i += fillInterval {
+					series.Points = append(series.Points, TimePoint{fillValue, null.FloatFrom(i)})
+					rowCount++
+				}
+			}
+
+			series.Points = append(series.Points, TimePoint{value, null.FloatFrom(timestamp)})
+
+			e.log.Debug("Rows", "metric", metric, "time", timestamp, "value", value)
+		}
+	}
+
+	for elem := seriesByQueryOrder.Front(); elem != nil; elem = elem.Next() {
+		key := elem.Value.(string)
+		result.Series = append(result.Series, pointsBySeries[key])
+
+		if fillMissing {
+			series := pointsBySeries[key]
+			// fill in values from last fetched value till interval end
+			intervalStart := series.Points[len(series.Points)-1][1].Float64
+			intervalEnd := float64(tsdbQuery.TimeRange.MustGetTo().UnixNano() / 1e6)
+
+			// align interval start
+			intervalStart = math.Floor(intervalStart/fillInterval) * fillInterval
+			for i := intervalStart + fillInterval; i < intervalEnd; i += fillInterval {
+				series.Points = append(series.Points, TimePoint{fillValue, null.FloatFrom(i)})
+				rowCount++
+			}
+		}
+	}
+
+	result.Meta.Set("rowCount", rowCount)
+	return nil
+}
+
 // ConvertSqlTimeColumnToEpochMs converts column named time to unix timestamp in milliseconds
 // to make native datetime types and epoch dates work in annotation and table queries.
 func ConvertSqlTimeColumnToEpochMs(values RowValues, timeIndex int) {