浏览代码

Merge branch 'master' into develop

Torkel Ödegaard 7 年之前
父节点
当前提交
180ca021b5
共有 100 个文件被更改,包括 1500 次插入534 次删除
  1. 11 0
      .babelrc
  2. 5 2
      .circleci/config.yml
  3. 25 10
      CHANGELOG.md
  4. 2 1
      Dockerfile
  5. 2 0
      README.md
  6. 2 2
      UPGRADING_DEPENDENCIES.md
  7. 3 4
      conf/defaults.ini
  8. 1 1
      conf/provisioning/datasources/sample.yaml
  9. 3 0
      conf/sample.ini
  10. 1 1
      docs/sources/administration/provisioning.md
  11. 5 1
      docs/sources/alerting/notifications.md
  12. 3 2
      docs/sources/auth/ldap.md
  13. 2 2
      docs/sources/contribute/cla.md
  14. 1 1
      docs/sources/enterprise/index.md
  15. 3 3
      docs/sources/features/datasources/mysql.md
  16. 1 1
      docs/sources/guides/whats-new-in-v4.md
  17. 65 0
      docs/sources/guides/whats-new-in-v5-4.md
  18. 4 1
      docs/sources/http_api/alerting.md
  19. 1 1
      docs/sources/http_api/index.md
  20. 39 39
      docs/sources/http_api/org.md
  21. 3 3
      docs/sources/http_api/user.md
  22. 3 3
      docs/sources/index.md
  23. 2 0
      docs/sources/reference/export_import.md
  24. 1 1
      docs/sources/reference/templating.md
  25. 2 1
      docs/versions.json
  26. 2 2
      latest.json
  27. 16 20
      package.json
  28. 2 1
      packaging/docker/Dockerfile
  29. 5 0
      packaging/docker/build-enterprise.sh
  30. 6 0
      pkg/api/admin_users.go
  31. 50 0
      pkg/api/admin_users_test.go
  32. 0 4
      pkg/api/dashboard.go
  33. 0 1
      pkg/api/dashboard_test.go
  34. 8 0
      pkg/api/password.go
  35. 3 3
      pkg/api/pluginproxy/ds_auth_provider.go
  36. 1 1
      pkg/api/pluginproxy/pluginproxy.go
  37. 0 1
      pkg/models/dashboards.go
  38. 2 1
      pkg/models/user.go
  39. 1 1
      pkg/services/dashboards/dashboard_service.go
  40. 25 1
      pkg/services/sqlstore/user.go
  41. 26 0
      pkg/services/sqlstore/user_test.go
  42. 2 0
      pkg/tsdb/cloudwatch/metric_find_query.go
  43. 1 1
      pkg/tsdb/elasticsearch/client/client.go
  44. 13 6
      pkg/tsdb/elasticsearch/client/client_test.go
  45. 1 0
      pkg/tsdb/elasticsearch/client/models.go
  46. 4 0
      pkg/tsdb/elasticsearch/time_series_query.go
  47. 4 0
      pkg/tsdb/mssql/macros.go
  48. 14 0
      pkg/tsdb/mssql/macros_test.go
  49. 4 0
      pkg/tsdb/mysql/macros.go
  50. 14 0
      pkg/tsdb/mysql/macros_test.go
  51. 2 2
      pkg/tsdb/mysql/mysql_test.go
  52. 4 0
      pkg/tsdb/postgres/macros.go
  53. 14 0
      pkg/tsdb/postgres/macros_test.go
  54. 0 2
      pkg/tsdb/sql_engine.go
  55. 0 14
      pkg/tsdb/sql_engine_test.go
  56. 1 1
      public/app/app.ts
  57. 1 1
      public/app/core/components/code_editor/theme-grafana-dark.js
  58. 4 0
      public/app/core/controllers/reset_password_ctrl.ts
  59. 63 13
      public/app/core/logs_model.ts
  60. 54 1
      public/app/core/specs/logs_model.test.ts
  61. 1 1
      public/app/core/utils/explore.test.ts
  62. 40 3
      public/app/core/utils/explore.ts
  63. 8 2
      public/app/core/utils/kbn.ts
  64. 16 5
      public/app/core/utils/text.test.ts
  65. 22 10
      public/app/core/utils/text.ts
  66. 154 65
      public/app/features/explore/Explore.tsx
  67. 19 15
      public/app/features/explore/Graph.tsx
  68. 141 0
      public/app/features/explore/LogLabels.tsx
  69. 242 39
      public/app/features/explore/Logs.tsx
  70. 10 7
      public/app/features/explore/TimePicker.tsx
  71. 1 1
      public/app/features/teams/CreateTeamCtrl.ts
  72. 1 1
      public/app/partials/login.html
  73. 8 1
      public/app/partials/reset_password.html
  74. 15 16
      public/app/plugins/datasource/elasticsearch/bucket_agg.ts
  75. 16 16
      public/app/plugins/datasource/elasticsearch/metric_agg.ts
  76. 10 0
      public/app/plugins/datasource/elasticsearch/partials/bucket_agg.html
  77. 4 0
      public/app/plugins/datasource/elasticsearch/query_builder.ts
  78. 1 1
      public/app/plugins/datasource/logging/components/LoggingCheatSheet.tsx
  79. 6 50
      public/app/plugins/datasource/logging/components/LoggingStartPage.tsx
  80. 21 25
      public/app/plugins/datasource/logging/datasource.ts
  81. 29 2
      public/app/plugins/datasource/logging/language_provider.test.ts
  82. 7 12
      public/app/plugins/datasource/logging/language_provider.ts
  83. 19 1
      public/app/plugins/datasource/logging/query_utils.test.ts
  84. 17 0
      public/app/plugins/datasource/logging/query_utils.ts
  85. 25 19
      public/app/plugins/datasource/logging/result_transformer.test.ts
  86. 25 10
      public/app/plugins/datasource/logging/result_transformer.ts
  87. 29 0
      public/app/plugins/datasource/logging/syntax.ts
  88. 3 3
      public/app/plugins/datasource/mysql/partials/annotations.editor.html
  89. 3 3
      public/app/plugins/datasource/mysql/partials/query.editor.html
  90. 5 3
      public/app/plugins/datasource/prometheus/add_label_to_query.ts
  91. 1 1
      public/app/plugins/datasource/prometheus/components/PromCheatSheet.tsx
  92. 6 50
      public/app/plugins/datasource/prometheus/components/PromStart.tsx
  93. 1 1
      public/app/plugins/datasource/prometheus/datasource.ts
  94. 3 3
      public/app/plugins/datasource/prometheus/language_provider.ts
  95. 15 2
      public/app/plugins/datasource/prometheus/promql.ts
  96. 1 1
      public/app/plugins/datasource/prometheus/query_hints.ts
  97. 14 1
      public/app/plugins/datasource/prometheus/specs/add_label_to_query.test.ts
  98. 9 2
      public/app/plugins/datasource/prometheus/specs/language_provider.test.ts
  99. 10 1
      public/app/types/explore.ts
  100. 5 4
      public/sass/_variables.dark.scss

+ 11 - 0
.babelrc

@@ -0,0 +1,11 @@
+{
+  "presets": [
+    [
+      "@babel/preset-env",
+      {
+		  "targets": { "browsers": "last 3 versions" },
+		  "useBuiltIns": "entry"
+      }
+    ]
+  ]
+}

+ 5 - 2
.circleci/config.yml

@@ -162,8 +162,8 @@ jobs:
           name: Build Grafana.com master publisher
           name: Build Grafana.com master publisher
           command: 'go build -o scripts/publish scripts/build/publish.go'
           command: 'go build -o scripts/publish scripts/build/publish.go'
       - run:
       - run:
-          name: Build Grafana.com release publisher
-          command: 'cd scripts/build/release_publisher && go build -o release_publisher .'
+          name: Test and build Grafana.com release publisher
+          command: 'cd scripts/build/release_publisher && go test . && go build -o release_publisher .'
       - persist_to_workspace:
       - persist_to_workspace:
           root: .
           root: .
           paths:
           paths:
@@ -191,6 +191,9 @@ jobs:
       - run:
       - run:
           name: sha-sum packages
           name: sha-sum packages
           command: 'go run build.go sha-dist'
           command: 'go run build.go sha-dist'
+      - run:
+          name: Test Grafana.com release publisher
+          command: 'cd scripts/build/release_publisher && go test .'
       - persist_to_workspace:
       - persist_to_workspace:
           root: .
           root: .
           paths:
           paths:

+ 25 - 10
CHANGELOG.md

@@ -1,6 +1,21 @@
-# 5.4.0 (unreleased)
+# 5.5.0 (unreleased)
+
+### Minor
+
+* **Elasticsearch**: Add support for offset in date histogram aggregation [#12653](https://github.com/grafana/grafana/issues/12653), thx [@mattiarossi](https://github.com/mattiarossi)
+* **Auth**: Prevent password reset when login form is disabled or either LDAP or Auth Proxy is enabled [#14246](https://github.com/grafana/grafana/issues/14246), thx [@SilverFire](https://github.com/SilverFire)
+* **Dataproxy**: Override incoming Authorization header [#13815](https://github.com/grafana/grafana/issues/13815), thx [@kornholi](https://github.com/kornholi)
+* **Admin**: Fix prevent removing last grafana admin permissions [#11067](https://github.com/grafana/grafana/issues/11067), thx [@danielbh](https://github.com/danielbh)
+
+# 5.4.0 (2018-12-03)
 
 
 * **Cloudwatch**: Fix invalid time range causes segmentation fault [#14150](https://github.com/grafana/grafana/issues/14150)
 * **Cloudwatch**: Fix invalid time range causes segmentation fault [#14150](https://github.com/grafana/grafana/issues/14150)
+* **Cloudwatch**: AWS/CodeBuild metrics and dimensions [#14167](https://github.com/grafana/grafana/issues/14167), thx [@mmcoltman](https://github.com/mmcoltman)
+* **MySQL**: Fix `$__timeFrom()` and `$__timeTo()` should respect local time zone [#14228](https://github.com/grafana/grafana/issues/14228)
+
+### 5.4.0-beta1 fixes
+* **Graph**: Fix legend always visible even if configured to be hidden [#14144](https://github.com/grafana/grafana/issues/14144)
+* **Elasticsearch**: Fix regression when using datasource version 6.0+ and alerting [#14175](https://github.com/grafana/grafana/pull/14175)
 
 
 # 5.4.0-beta1 (2018-11-20)
 # 5.4.0-beta1 (2018-11-20)
 
 
@@ -156,7 +171,7 @@ See [security announcement](https://community.grafana.com/t/grafana-5-3-3-and-4-
 * **Alerting**: Fix rendering timeout which could cause notifications to not be sent due to rendering timing out [#12151](https://github.com/grafana/grafana/issues/12151)
 * **Alerting**: Fix rendering timeout which could cause notifications to not be sent due to rendering timing out [#12151](https://github.com/grafana/grafana/issues/12151)
 * **Docker**: Make it possible to set a specific plugin url [#12861](https://github.com/grafana/grafana/pull/12861), thx [ClementGautier](https://github.com/ClementGautier)
 * **Docker**: Make it possible to set a specific plugin url [#12861](https://github.com/grafana/grafana/pull/12861), thx [ClementGautier](https://github.com/ClementGautier)
 * **GrafanaCli**: Fixed issue with grafana-cli install plugin resulting in corrupt http response from source error. Fixes [#13079](https://github.com/grafana/grafana/issues/13079)
 * **GrafanaCli**: Fixed issue with grafana-cli install plugin resulting in corrupt http response from source error. Fixes [#13079](https://github.com/grafana/grafana/issues/13079)
-* **Provisioning**: Should allow one default datasource per organisation [#12229](https://github.com/grafana/grafana/issues/12229)
+* **Provisioning**: Should allow one default datasource per organization [#12229](https://github.com/grafana/grafana/issues/12229)
 * **Github OAuth**: Allow changes of user info at Github to be synched to Grafana when signing in [#11818](https://github.com/grafana/grafana/issues/11818), thx [@rwaweber](https://github.com/rwaweber)
 * **Github OAuth**: Allow changes of user info at Github to be synched to Grafana when signing in [#11818](https://github.com/grafana/grafana/issues/11818), thx [@rwaweber](https://github.com/rwaweber)
 * **OAuth**: Fix overriding tls_skip_verify_insecure using environment variable [#12747](https://github.com/grafana/grafana/issues/12747), thx [@jangaraj](https://github.com/jangaraj)
 * **OAuth**: Fix overriding tls_skip_verify_insecure using environment variable [#12747](https://github.com/grafana/grafana/issues/12747), thx [@jangaraj](https://github.com/jangaraj)
 * **Prometheus**: Fix graph panel bar width issue in aligned prometheus queries [#12379](https://github.com/grafana/grafana/issues/12379)
 * **Prometheus**: Fix graph panel bar width issue in aligned prometheus queries [#12379](https://github.com/grafana/grafana/issues/12379)
@@ -279,7 +294,7 @@ See [security announcement](https://community.grafana.com/t/grafana-5-2-3-and-4-
 * **Dashboard**: Prevent double-click when saving dashboard [#11963](https://github.com/grafana/grafana/issues/11963)
 * **Dashboard**: Prevent double-click when saving dashboard [#11963](https://github.com/grafana/grafana/issues/11963)
 * **Dashboard**: AutoFocus the add-panel search filter [#12189](https://github.com/grafana/grafana/pull/12189) thx [@ryantxu](https://github.com/ryantxu)
 * **Dashboard**: AutoFocus the add-panel search filter [#12189](https://github.com/grafana/grafana/pull/12189) thx [@ryantxu](https://github.com/ryantxu)
 * **Units**: W/m2 (energy), l/h (flow) and kPa (pressure) [#11233](https://github.com/grafana/grafana/pull/11233), thx [@flopp999](https://github.com/flopp999)
 * **Units**: W/m2 (energy), l/h (flow) and kPa (pressure) [#11233](https://github.com/grafana/grafana/pull/11233), thx [@flopp999](https://github.com/flopp999)
-* **Units**: Litre/min (flow) and milliLitre/min (flow) [#12282](https://github.com/grafana/grafana/pull/12282), thx [@flopp999](https://github.com/flopp999)
+* **Units**: Liter/min (flow) and milliLiter/min (flow) [#12282](https://github.com/grafana/grafana/pull/12282), thx [@flopp999](https://github.com/flopp999)
 * **Alerting**: Fix mobile notifications for Microsoft Teams alert notifier [#11484](https://github.com/grafana/grafana/pull/11484), thx [@manacker](https://github.com/manacker)
 * **Alerting**: Fix mobile notifications for Microsoft Teams alert notifier [#11484](https://github.com/grafana/grafana/pull/11484), thx [@manacker](https://github.com/manacker)
 * **Influxdb**: Add support for mode function [#12286](https://github.com/grafana/grafana/issues/12286)
 * **Influxdb**: Add support for mode function [#12286](https://github.com/grafana/grafana/issues/12286)
 * **Cloudwatch**: Fixes panic caused by bad timerange settings [#12199](https://github.com/grafana/grafana/issues/12199)
 * **Cloudwatch**: Fixes panic caused by bad timerange settings [#12199](https://github.com/grafana/grafana/issues/12199)
@@ -414,7 +429,7 @@ See [security announcement](https://community.grafana.com/t/grafana-5-2-3-and-4-
 * **Units**: Use B/s instead Bps for Bytes per second [#9342](https://github.com/grafana/grafana/pull/9342), thx [@mayli](https://github.com/mayli)
 * **Units**: Use B/s instead Bps for Bytes per second [#9342](https://github.com/grafana/grafana/pull/9342), thx [@mayli](https://github.com/mayli)
 * **Units**: Radiation units [#11001](https://github.com/grafana/grafana/issues/11001), thx [@victorclaessen](https://github.com/victorclaessen)
 * **Units**: Radiation units [#11001](https://github.com/grafana/grafana/issues/11001), thx [@victorclaessen](https://github.com/victorclaessen)
 * **Units**: Timeticks unit [#11183](https://github.com/grafana/grafana/pull/11183), thx [@jtyr](https://github.com/jtyr)
 * **Units**: Timeticks unit [#11183](https://github.com/grafana/grafana/pull/11183), thx [@jtyr](https://github.com/jtyr)
-* **Units**: Concentration units and "Normal cubic metre" [#11211](https://github.com/grafana/grafana/issues/11211), thx [@flopp999](https://github.com/flopp999)
+* **Units**: Concentration units and "Normal cubic meter" [#11211](https://github.com/grafana/grafana/issues/11211), thx [@flopp999](https://github.com/flopp999)
 * **Units**: New currency - Czech koruna [#11384](https://github.com/grafana/grafana/pull/11384), thx [@Rohlik](https://github.com/Rohlik)
 * **Units**: New currency - Czech koruna [#11384](https://github.com/grafana/grafana/pull/11384), thx [@Rohlik](https://github.com/Rohlik)
 * **Avatar**: Fix DISABLE_GRAVATAR option [#11095](https://github.com/grafana/grafana/issues/11095)
 * **Avatar**: Fix DISABLE_GRAVATAR option [#11095](https://github.com/grafana/grafana/issues/11095)
 * **Heatmap**: Disable log scale when using time time series buckets [#10792](https://github.com/grafana/grafana/issues/10792)
 * **Heatmap**: Disable log scale when using time time series buckets [#10792](https://github.com/grafana/grafana/issues/10792)
@@ -731,7 +746,7 @@ See [security announcement](https://community.grafana.com/t/grafana-5-2-3-and-4-
 ## Enhancements
 ## Enhancements
 
 
 * **GitHub OAuth**: Support for GitHub organizations with 100+ teams. [#8846](https://github.com/grafana/grafana/issues/8846), thx [@skwashd](https://github.com/skwashd)
 * **GitHub OAuth**: Support for GitHub organizations with 100+ teams. [#8846](https://github.com/grafana/grafana/issues/8846), thx [@skwashd](https://github.com/skwashd)
-* **Graphite**: Calls to Graphite api /metrics/find now include panel or dashboad time range (from & until) in most cases, [#8055](https://github.com/grafana/grafana/issues/8055)
+* **Graphite**: Calls to Graphite api /metrics/find now include panel or dashboard time range (from & until) in most cases, [#8055](https://github.com/grafana/grafana/issues/8055)
 * **Graphite**: Added new graphite 1.0 functions, available if you set version to 1.0.x in data source settings. New Functions: mapSeries, reduceSeries, isNonNull, groupByNodes, offsetToZero, grep, weightedAverage, removeEmptySeries, aggregateLine, averageOutsidePercentile, delay, exponentialMovingAverage, fallbackSeries, integralByInterval, interpolate, invert, linearRegression, movingMin, movingMax, movingSum, multiplySeriesWithWildcards, pow, powSeries, removeBetweenPercentile, squareRoot, timeSlice, closes [#8261](https://github.com/grafana/grafana/issues/8261)
 * **Graphite**: Added new graphite 1.0 functions, available if you set version to 1.0.x in data source settings. New Functions: mapSeries, reduceSeries, isNonNull, groupByNodes, offsetToZero, grep, weightedAverage, removeEmptySeries, aggregateLine, averageOutsidePercentile, delay, exponentialMovingAverage, fallbackSeries, integralByInterval, interpolate, invert, linearRegression, movingMin, movingMax, movingSum, multiplySeriesWithWildcards, pow, powSeries, removeBetweenPercentile, squareRoot, timeSlice, closes [#8261](https://github.com/grafana/grafana/issues/8261)
 - **Elasticsearch**: Ad-hoc filters now use query phrase match filters instead of term filters, works on non keyword/raw fields [#9095](https://github.com/grafana/grafana/issues/9095).
 - **Elasticsearch**: Ad-hoc filters now use query phrase match filters instead of term filters, works on non keyword/raw fields [#9095](https://github.com/grafana/grafana/issues/9095).
 
 
@@ -896,7 +911,7 @@ Pull Request: [#8472](https://github.com/grafana/grafana/pull/8472)
 * **InfluxDB**: Influxb Datasource test passes even if the Database doesn't exist [#7864](https://github.com/grafana/grafana/issues/7864)
 * **InfluxDB**: Influxb Datasource test passes even if the Database doesn't exist [#7864](https://github.com/grafana/grafana/issues/7864)
 * **Prometheus**: Displaying Prometheus annotations is incredibly slow [#7750](https://github.com/grafana/grafana/issues/7750), thx [@mtanda](https://github.com/mtanda)
 * **Prometheus**: Displaying Prometheus annotations is incredibly slow [#7750](https://github.com/grafana/grafana/issues/7750), thx [@mtanda](https://github.com/mtanda)
 * **Graphite**: grafana generates empty find query to graphite -> 422 Unprocessable Entity [#7740](https://github.com/grafana/grafana/issues/7740)
 * **Graphite**: grafana generates empty find query to graphite -> 422 Unprocessable Entity [#7740](https://github.com/grafana/grafana/issues/7740)
-* **Admin**: make organisation filter case insensitive [#8194](https://github.com/grafana/grafana/issues/8194), thx [@Alexander-N](https://github.com/Alexander-N)
+* **Admin**: make organization filter case insensitive [#8194](https://github.com/grafana/grafana/issues/8194), thx [@Alexander-N](https://github.com/Alexander-N)
 
 
 ## Changes
 ## Changes
 * **Elasticsearch**: Changed elasticsearch Terms aggregation to default to Min Doc Count to 1, and sort order to Top [#8321](https://github.com/grafana/grafana/issues/8321)
 * **Elasticsearch**: Changed elasticsearch Terms aggregation to default to Min Doc Count to 1, and sort order to Top [#8321](https://github.com/grafana/grafana/issues/8321)
@@ -1024,7 +1039,7 @@ Pull Request: [#8472](https://github.com/grafana/grafana/pull/8472)
 * **CLI**: Make it possible to reset the admin password using the grafana-cli. [#5479](https://github.com/grafana/grafana/issues/5479)
 * **CLI**: Make it possible to reset the admin password using the grafana-cli. [#5479](https://github.com/grafana/grafana/issues/5479)
 * **Influxdb**: Support multiple tags in InfluxDB annotations. [#4550](https://github.com/grafana/grafana/pull/4550), thx [@adrianlzt](https://github.com/adrianlzt)
 * **Influxdb**: Support multiple tags in InfluxDB annotations. [#4550](https://github.com/grafana/grafana/pull/4550), thx [@adrianlzt](https://github.com/adrianlzt)
 * **LDAP**:  Basic Auth now supports LDAP username and password, [#6940](https://github.com/grafana/grafana/pull/6940), thx [@utkarshcmu](https://github.com/utkarshcmu)
 * **LDAP**:  Basic Auth now supports LDAP username and password, [#6940](https://github.com/grafana/grafana/pull/6940), thx [@utkarshcmu](https://github.com/utkarshcmu)
-* **LDAP**: Now works with Auth Proxy, role and organisation mapping & sync will regularly be performed. [#6895](https://github.com/grafana/grafana/pull/6895), thx [@Seuf](https://github.com/seuf)
+* **LDAP**: Now works with Auth Proxy, role and organization mapping & sync will regularly be performed. [#6895](https://github.com/grafana/grafana/pull/6895), thx [@Seuf](https://github.com/seuf)
 * **Alerting**: Adds OK as no data option. [#6866](https://github.com/grafana/grafana/issues/6866)
 * **Alerting**: Adds OK as no data option. [#6866](https://github.com/grafana/grafana/issues/6866)
 * **Alert list**: Order alerts based on state. [#6676](https://github.com/grafana/grafana/issues/6676)
 * **Alert list**: Order alerts based on state. [#6676](https://github.com/grafana/grafana/issues/6676)
 * **Alerting**: Add api endpoint for pausing all alerts. [#6589](https://github.com/grafana/grafana/issues/6589)
 * **Alerting**: Add api endpoint for pausing all alerts. [#6589](https://github.com/grafana/grafana/issues/6589)
@@ -1163,7 +1178,7 @@ due to too many connections/file handles on the data source backend. This proble
 * **Scripts**: Use restart instead of start for deb package script, closes [#5282](https://github.com/grafana/grafana/pull/5282)
 * **Scripts**: Use restart instead of start for deb package script, closes [#5282](https://github.com/grafana/grafana/pull/5282)
 * **Logging**: Moved to structured logging lib, and moved to component specific level filters via config file, closes [#4590](https://github.com/grafana/grafana/issues/4590)
 * **Logging**: Moved to structured logging lib, and moved to component specific level filters via config file, closes [#4590](https://github.com/grafana/grafana/issues/4590)
 * **OpenTSDB**: Support nested template variables in tag_values function, closes [#4398](https://github.com/grafana/grafana/issues/4398)
 * **OpenTSDB**: Support nested template variables in tag_values function, closes [#4398](https://github.com/grafana/grafana/issues/4398)
-* **Datasource**: Pending data source requests are cancelled before new ones are issues (Graphite & Prometheus), closes [#5321](https://github.com/grafana/grafana/issues/5321)
+* **Datasource**: Pending data source requests are canceled before new ones are issues (Graphite & Prometheus), closes [#5321](https://github.com/grafana/grafana/issues/5321)
 
 
 ### Breaking changes
 ### Breaking changes
 * **Logging** : Changed default logging output format (now structured into message, and key value pairs, with logger key acting as component). You can also no change in config to json log output.
 * **Logging** : Changed default logging output format (now structured into message, and key value pairs, with logger key acting as component). You can also no change in config to json log output.
@@ -1867,7 +1882,7 @@ Read this [blog post](https://grafana.com/blog/2014/09/11/grafana-1.8.0-rc1-rele
 
 
 #### Fixes
 #### Fixes
 - [Issue #126](https://github.com/grafana/grafana/issues/126). Graphite query lexer change, can now handle regex parameters for aliasSub function
 - [Issue #126](https://github.com/grafana/grafana/issues/126). Graphite query lexer change, can now handle regex parameters for aliasSub function
-- [Issue #447](https://github.com/grafana/grafana/issues/447). Filter option loading when having muliple nested filters now works better. Options are now reloaded correctly and there are no multiple renders/refresh in between.
+- [Issue #447](https://github.com/grafana/grafana/issues/447). Filter option loading when having multiple nested filters now works better. Options are now reloaded correctly and there are no multiple renders/refresh in between.
 - [Issue #412](https://github.com/grafana/grafana/issues/412). After a filter option is changed and a nested template param is reloaded, if the current value exists after the options are reloaded the current selected value is kept.
 - [Issue #412](https://github.com/grafana/grafana/issues/412). After a filter option is changed and a nested template param is reloaded, if the current value exists after the options are reloaded the current selected value is kept.
 - [Issue #460](https://github.com/grafana/grafana/issues/460). Legend Current value did not display when value was zero
 - [Issue #460](https://github.com/grafana/grafana/issues/460). Legend Current value did not display when value was zero
 - [Issue #328](https://github.com/grafana/grafana/issues/328). Fix to series toggling bug that caused annotations to be hidden when toggling/hiding series.
 - [Issue #328](https://github.com/grafana/grafana/issues/328). Fix to series toggling bug that caused annotations to be hidden when toggling/hiding series.
@@ -1902,7 +1917,7 @@ Read this [blog post](https://grafana.com/blog/2014/09/11/grafana-1.8.0-rc1-rele
 - Graphite errors are now much easier to see and troubleshoot with the new inspector ([Issue #265](https://github.com/grafana/grafana/issues/265))
 - Graphite errors are now much easier to see and troubleshoot with the new inspector ([Issue #265](https://github.com/grafana/grafana/issues/265))
 - Use influxdb aliases to distinguish between multiple columns ([Issue #283](https://github.com/grafana/grafana/issues/283))
 - Use influxdb aliases to distinguish between multiple columns ([Issue #283](https://github.com/grafana/grafana/issues/283))
 - Correction to ms axis formater, now formats days correctly. ([Issue #189](https://github.com/grafana/grafana/issues/189))
 - Correction to ms axis formater, now formats days correctly. ([Issue #189](https://github.com/grafana/grafana/issues/189))
-- Css fix for Firefox and using top menu dropdowns in panel fullscren / edit mode ([Issue #106](https://github.com/grafana/grafana/issues/106))
+- Css fix for Firefox and using top menu dropdowns in panel fullscreen / edit mode ([Issue #106](https://github.com/grafana/grafana/issues/106))
 - Browser page title is now Grafana - {{dashboard title}} ([Issue #294](https://github.com/grafana/grafana/issues/294))
 - Browser page title is now Grafana - {{dashboard title}} ([Issue #294](https://github.com/grafana/grafana/issues/294))
 - Disable auto refresh zooming in (every time you change to an absolute time range), refresh will be restored when you change time range back to relative ([Issue #282](https://github.com/grafana/grafana/issues/282))
 - Disable auto refresh zooming in (every time you change to an absolute time range), refresh will be restored when you change time range back to relative ([Issue #282](https://github.com/grafana/grafana/issues/282))
 - More graphite functions
 - More graphite functions

+ 2 - 1
Dockerfile

@@ -50,7 +50,8 @@ ENV PATH=/usr/share/grafana/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bi
 
 
 WORKDIR $GF_PATHS_HOME
 WORKDIR $GF_PATHS_HOME
 
 
-RUN apt-get update && apt-get install -qq -y libfontconfig ca-certificates && \
+RUN apt-get update && apt-get upgrade -y && \
+    apt-get install -qq -y libfontconfig ca-certificates && \
     apt-get autoremove -y && \
     apt-get autoremove -y && \
     rm -rf /var/lib/apt/lists/*
     rm -rf /var/lib/apt/lists/*
 
 

+ 2 - 0
README.md

@@ -90,6 +90,8 @@ Choose this option to build on platforms other than linux/amd64 and/or not have
 
 
 The resulting image will be tagged as `grafana/grafana:dev`
 The resulting image will be tagged as `grafana/grafana:dev`
 
 
+Notice: If you are using Docker for MacOS, be sure to let limit of Memory bigger than 2 GiB (at docker -> Perferences -> Advanced), otherwize you may faild at `grunt build`
+
 ### Dev config
 ### Dev config
 
 
 Create a custom.ini in the conf directory to override default configuration options.
 Create a custom.ini in the conf directory to override default configuration options.

+ 2 - 2
UPGRADING_DEPENDENCIES.md

@@ -47,7 +47,7 @@ Our builds run on CircleCI through our build script.
 
 
 ### grafana/build-container
 ### grafana/build-container
 
 
-The main build step (in CircleCI) is built using a custom build container that comes pre-baked with some of the neccesary dependencies.
+The main build step (in CircleCI) is built using a custom build container that comes pre-baked with some of the necessary dependencies.
 
 
 Link: [grafana-build-container](https://github.com/grafana/grafana-build-container)
 Link: [grafana-build-container](https://github.com/grafana/grafana-build-container)
 
 
@@ -86,4 +86,4 @@ There is a Docker build for Grafana in the root of the project that allows anyon
 
 
 ### Local developer environments
 ### Local developer environments
 
 
-Please send out a notice in the grafana-dev slack channel when updating Go or Node.js to make it easier for everyone to update their local developer environments.
+Please send out a notice in the grafana-dev slack channel when updating Go or Node.js to make it easier for everyone to update their local developer environments.

+ 3 - 4
conf/defaults.ini

@@ -34,7 +34,7 @@ protocol = http
 # The ip address to bind to, empty will bind to all interfaces
 # The ip address to bind to, empty will bind to all interfaces
 http_addr =
 http_addr =
 
 
-# The http port  to use
+# The http port to use
 http_port = 3000
 http_port = 3000
 
 
 # The public facing domain name used to access grafana from a browser
 # The public facing domain name used to access grafana from a browser
@@ -166,7 +166,7 @@ google_tag_manager_id =
 # default admin user, created on startup
 # default admin user, created on startup
 admin_user = admin
 admin_user = admin
 
 
-# default admin password, can be changed before first start of grafana,  or in profile settings
+# default admin password, can be changed before first start of grafana, or in profile settings
 admin_password = admin
 admin_password = admin
 
 
 # used for signing
 # used for signing
@@ -372,7 +372,7 @@ templates_pattern = emails/*.html
 
 
 #################################### Logging ##########################
 #################################### Logging ##########################
 [log]
 [log]
-# Either "console", "file", "syslog". Default is console and  file
+# Either "console", "file", "syslog". Default is console and file
 # Use space to separate multiple modes, e.g. "console file"
 # Use space to separate multiple modes, e.g. "console file"
 mode = console file
 mode = console file
 
 
@@ -565,4 +565,3 @@ enable_alpha = false
 
 
 [enterprise]
 [enterprise]
 license_path =
 license_path =
-

+ 1 - 1
conf/provisioning/datasources/sample.yaml

@@ -7,7 +7,7 @@ apiVersion: 1
 #     orgId: 1
 #     orgId: 1
 
 
 # # list of datasources to insert/update depending
 # # list of datasources to insert/update depending
-# # on what's available in the datbase
+# # on what's available in the database
 #datasources:
 #datasources:
 #   # <string, required> name of the datasource. Required
 #   # <string, required> name of the datasource. Required
 # - name: Graphite
 # - name: Graphite

+ 3 - 0
conf/sample.ini

@@ -145,6 +145,9 @@ log_queries =
 # Google Analytics universal tracking code, only enabled if you specify an id here
 # Google Analytics universal tracking code, only enabled if you specify an id here
 ;google_analytics_ua_id =
 ;google_analytics_ua_id =
 
 
+# Google Tag Manager ID, only enabled if you specify an id here
+;google_tag_manager_id =
+
 #################################### Security ####################################
 #################################### Security ####################################
 [security]
 [security]
 # default admin user, created on startup
 # default admin user, created on startup

+ 1 - 1
docs/sources/administration/provisioning.md

@@ -230,4 +230,4 @@ By default Grafana will delete dashboards in the database if the file is removed
 > **Note.** Provisioning allows you to overwrite existing dashboards
 > **Note.** Provisioning allows you to overwrite existing dashboards
 > which leads to problems if you re-use settings that are supposed to be unique.
 > which leads to problems if you re-use settings that are supposed to be unique.
 > Be careful not to re-use the same `title` multiple times within a folder
 > Be careful not to re-use the same `title` multiple times within a folder
-> or `uid` within the same installation as this will cause weird behaviours.
+> or `uid` within the same installation as this will cause weird behaviors.

+ 5 - 1
docs/sources/alerting/notifications.md

@@ -55,6 +55,10 @@ Alert rule evaluation interval | Send reminders every | Reminder sent every (aft
 
 
 <div class="clearfix"></div>
 <div class="clearfix"></div>
 
 
+### Disable resolve message
+
+When checked, this option will disable resolve message [OK] that is sent when alerting state returns to false.
+
 ## Supported Notification Types
 ## Supported Notification Types
 
 
 Grafana ships with the following set of notification types:
 Grafana ships with the following set of notification types:
@@ -132,7 +136,7 @@ In DingTalk PC Client:
 
 
 2. Click "Robot Manage" item in the pop menu, there will be a new panel call "Robot Manage".
 2. Click "Robot Manage" item in the pop menu, there will be a new panel call "Robot Manage".
 
 
-3. In the  "Robot Manage" panel, select "customised: customised robot with Webhook".
+3. In the  "Robot Manage" panel, select "customized: customized robot with Webhook".
 
 
 4. In the next new panel named "robot detail", click "Add" button.
 4. In the next new panel named "robot detail", click "Add" button.
 
 

+ 3 - 2
docs/sources/auth/ldap.md

@@ -3,6 +3,7 @@ title = "LDAP Authentication"
 description = "Grafana LDAP Authentication Guide "
 description = "Grafana LDAP Authentication Guide "
 keywords = ["grafana", "configuration", "documentation", "ldap", "active directory"]
 keywords = ["grafana", "configuration", "documentation", "ldap", "active directory"]
 type = "docs"
 type = "docs"
+aliases = ["/installation/ldap/"]
 [menu.docs]
 [menu.docs]
 name = "LDAP"
 name = "LDAP"
 identifier = "ldap"
 identifier = "ldap"
@@ -162,9 +163,9 @@ org_role = "Viewer"
 Setting | Required | Description | Default
 Setting | Required | Description | Default
 ------------ | ------------ | ------------- | -------------
 ------------ | ------------ | ------------- | -------------
 `group_dn` | Yes | LDAP distinguished name (DN) of LDAP group. If you want to match all (or no LDAP groups) then you can use wildcard (`"*"`) |
 `group_dn` | Yes | LDAP distinguished name (DN) of LDAP group. If you want to match all (or no LDAP groups) then you can use wildcard (`"*"`) |
-`org_role` | Yes | Assign users of `group_dn` the organisation role `"Admin"`, `"Editor"` or `"Viewer"` |
+`org_role` | Yes | Assign users of `group_dn` the organization role `"Admin"`, `"Editor"` or `"Viewer"` |
 `org_id` | No | The Grafana organization database id. Setting this allows for multiple group_dn's to be assigned to the same `org_role` provided the `org_id` differs | `1` (default org id)
 `org_id` | No | The Grafana organization database id. Setting this allows for multiple group_dn's to be assigned to the same `org_role` provided the `org_id` differs | `1` (default org id)
-`grafana_admin` | No | When `true` makes user of `group_dn` Grafana server admin. A Grafana server admin has admin access over all organisations and users. Available in Grafana v5.3 and above | `false`
+`grafana_admin` | No | When `true` makes user of `group_dn` Grafana server admin. A Grafana server admin has admin access over all organizations and users. Available in Grafana v5.3 and above | `false`
 
 
 ### Nested/recursive group membership
 ### Nested/recursive group membership
 
 

+ 2 - 2
docs/sources/contribute/cla.md

@@ -1,6 +1,6 @@
 +++
 +++
-title = "Contributor Licence Agreement (CLA)"
-description = "Contributor Licence Agreement (CLA)"
+title = "Contributor License Agreement (CLA)"
+description = "Contributor License Agreement (CLA)"
 type = "docs"
 type = "docs"
 aliases = ["/project/cla", "docs/contributing/cla.html"]
 aliases = ["/project/cla", "docs/contributing/cla.html"]
 [menu.docs]
 [menu.docs]

+ 1 - 1
docs/sources/enterprise/index.md

@@ -31,7 +31,7 @@ Datasource permissions allow you to restrict query access to only specific Teams
 
 
 ### Premium Plugins
 ### Premium Plugins
 
 
-With a Grafana Enterprise licence you will get access to premium plugins, including:
+With a Grafana Enterprise license you will get access to premium plugins, including:
 
 
 * [Splunk](https://grafana.com/plugins/grafana-splunk-datasource)
 * [Splunk](https://grafana.com/plugins/grafana-splunk-datasource)
 * [AppDynamics](https://grafana.com/plugins/dlopes7-appdynamics-datasource)
 * [AppDynamics](https://grafana.com/plugins/dlopes7-appdynamics-datasource)

+ 3 - 3
docs/sources/features/datasources/mysql.md

@@ -133,9 +133,9 @@ Macro example | Description
 ------------ | -------------
 ------------ | -------------
 *$__time(dateColumn)* | Will be replaced by an expression to convert to a UNIX timestamp and rename the column to `time_sec`. For example, *UNIX_TIMESTAMP(dateColumn) as time_sec*
 *$__time(dateColumn)* | Will be replaced by an expression to convert to a UNIX timestamp and rename the column to `time_sec`. For example, *UNIX_TIMESTAMP(dateColumn) as time_sec*
 *$__timeEpoch(dateColumn)* | Will be replaced by an expression to convert to a UNIX timestamp and rename the column to `time_sec`. For example, *UNIX_TIMESTAMP(dateColumn) as time_sec*
 *$__timeEpoch(dateColumn)* | Will be replaced by an expression to convert to a UNIX timestamp and rename the column to `time_sec`. For example, *UNIX_TIMESTAMP(dateColumn) as time_sec*
-*$__timeFilter(dateColumn)* | Will be replaced by a time range filter using the specified column name. For example, *dateColumn BETWEEN '2017-04-21T05:01:17Z' AND '2017-04-21T05:06:17Z'*
-*$__timeFrom()* | Will be replaced by the start of the currently active time selection. For example, *'2017-04-21T05:01:17Z'*
-*$__timeTo()* | Will be replaced by the end of the currently active time selection. For example, *'2017-04-21T05:06:17Z'*
+*$__timeFilter(dateColumn)* | Will be replaced by a time range filter using the specified column name. For example, *dateColumn BETWEEN FROM_UNIXTIME(1494410783) AND FROM_UNIXTIME(1494410983)*
+*$__timeFrom()* | Will be replaced by the start of the currently active time selection. For example, *FROM_UNIXTIME(1494410783)*
+*$__timeTo()* | Will be replaced by the end of the currently active time selection. For example, *FROM_UNIXTIME(1494410983)*
 *$__timeGroup(dateColumn,'5m')* | Will be replaced by an expression usable in GROUP BY clause. For example, *cast(cast(UNIX_TIMESTAMP(dateColumn)/(300) as signed)*300 as signed),*
 *$__timeGroup(dateColumn,'5m')* | Will be replaced by an expression usable in GROUP BY clause. For example, *cast(cast(UNIX_TIMESTAMP(dateColumn)/(300) as signed)*300 as signed),*
 *$__timeGroup(dateColumn,'5m', 0)* | Same as above but with a fill parameter so missing points in that series will be added by grafana and 0 will be used as value.
 *$__timeGroup(dateColumn,'5m', 0)* | Same as above but with a fill parameter so missing points in that series will be added by grafana and 0 will be used as value.
 *$__timeGroup(dateColumn,'5m', NULL)* | Same as above but NULL will be used as value for missing points.
 *$__timeGroup(dateColumn,'5m', NULL)* | Same as above but NULL will be used as value for missing points.

+ 1 - 1
docs/sources/guides/whats-new-in-v4.md

@@ -134,7 +134,7 @@ continue work on a `build mode` for a future release.
 The new row menu automatically slides out when you mouse over the edge of the row. You no longer need
 The new row menu automatically slides out when you mouse over the edge of the row. You no longer need
 to hover over the small green icon and then click it to expand the row menu.
 to hover over the small green icon and then click it to expand the row menu.
 
 
-There are some minor improvements to drag and drop behaviour. Now when dragging a panel from one row
+There are some minor improvements to drag and drop behavior. Now when dragging a panel from one row
 to another you will insert the panel and Grafana will automatically make room for it.
 to another you will insert the panel and Grafana will automatically make room for it.
 When you drag a panel within a row you will simply reorder the panels.
 When you drag a panel within a row you will simply reorder the panels.
 
 

+ 65 - 0
docs/sources/guides/whats-new-in-v5-4.md

@@ -12,6 +12,71 @@ weight = -10
 
 
 # What's New in Grafana v5.4
 # What's New in Grafana v5.4
 
 
+Grafana v5.4 brings new features, many enhancements and bug fixes. This article will detail the major new features and enhancements.
+
+- [Alerting]({{< relref "#alerting" >}}) Limit false positives with the new `For` setting
+- [Google Stackdriver]({{< relref "#google-stackdriver" >}}) Now with support for templating queries
+- [MySQL]({{< relref "#mysql-query-builder" >}}) gets a new query builder!
+- [Graph Panel]({{< relref "#graph-panel-enhancements" >}}) Highlight time regions and more
+- [Team Preferences]({{< relref "#team-preferences" >}}) Give your teams their own home dashboard
+
+## Alerting
+
+{{< docs-imagebox img="/img/docs/v54/alerting-for-dark-theme.png" max-width="600px" class="docs-image--right" >}}
+
+Grafana v5.4 ships with a new alert rule setting named `For` which is great for removing false positives. If an alert rule has a configured `For` and the query violates the configured threshold it will first go from `OK` to `Pending`. Going from `OK` to `Pending` Grafana will not send any notifications. Once the alert rule has been firing for more than `For` duration, it will change to `Alerting` and send alert notifications. Typically, it's always a good idea to use this setting since it's often worse to get false positive than wait a few minutes before the alert notification triggers.
+
+In the screenshot you can see an example timeline of an alert using the `For` setting. At ~16:04 the alert state changes to `Pending` and after 4 minutes it changes to `Alerting` which is when alert notifications are sent. Once the series falls back to normal the alert rule goes back to `OK`. [Learn more](/alerting/rules/#for).
+
+Additionally, there's now support for disable the sending of `OK` alert notifications. [Learn more](/alerting/notifications/#disable-resolve-message).
+
+<div class="clearfix"></div>
+
+## Google Stackdriver
+
+{{< docs-imagebox img="/img/docs/v54/stackdriver_template_query.png" max-width="600px" class="docs-image--right" >}}
+
+Grafana v5.3 included built-in support for [Google Stackdriver](https://cloud.google.com/stackdriver/) which enables you to visualize your Stackdriver metrics in Grafana.
+One important feature missing was support for templating queries. This is now included together with a brand new templating query editor for Stackdriver.
+
+The Stackdriver templating query editor lets you choose from a set of different Query Types. This will in turn reveal additional drop downs to help you
+find, filter and select the templating values you're interested in, see screenshot for details. The templating query editor also supports chaining multiple variables
+making it easy to define variables that's dependent on other variables.
+
+Stackdriver is the first datasource which has support for a custom templating query editor. But starting from Grafana v5.4 it's now possible for all datasources, including plugin datasources, to
+create their very own templating query editor.
+
+Additionally, if Grafana is running on a Google Compute Engine (GCE) virtual machine, it is now possible for Grafana to automatically retrieve default credentials from the metadata server.
+This has the advantage of not needing to generate a private key file for the service account and also not having to upload the file to Grafana. [Learn more](/features/datasources/stackdriver/#using-gce-default-service-account).
+
+Please read [Using Google Stackdriver in Grafana](/features/datasources/stackdriver/) for more detailed information on how to get started and use it.
+
+<div class="clearfix"></div>
+
+## MySQL Query Builder
+
+Grafana v5.4 comes with a new graphical query builder for MySQL. This brings MySQL integration more in line with some of the other datasources and makes it easier for both advanced users and beginners to work with timeseries in MySQL. Learn more about it in the [documentation](/features/datasources/mysql/#query-editor).
+
+{{< docs-imagebox img="/img/docs/v54/mysql_query_still.png" animated-gif="/img/docs/v54/mysql_query.gif" >}}
+
+## Graph Panel Enhancements
+
+Grafana v5.4 adds support for highlighting weekdays and/or certain timespans in the graph panel. This should make it easier to compare for example weekends, business hours and/or off work hours.
+
+{{< docs-imagebox img="/img/docs/v54/graph_time_regions.png" max-width= "800px" >}}
+
+Additionally, when rendering series as lines in the graph panel, should there be only one data point available for one series so that a connecting line cannot be established, a point will
+automatically be rendered for that data point. This should make it easier to understand what's going on when only receiving a single data point.
+
+{{< docs-imagebox img="/img/docs/v54/graph_dot_single_point.png" max-width= "800px" >}}
+
+## Team Preferences
+
+Grafana v5.4 adds support for customizing home dashboard, timezone and theme for teams, in addition to the existing customization on Organization and user Profile level.
+
+1. Specifying a preference on User Profile level will override preference on Team and/or Organization level
+2. Specifying a preference on Team level will override preference on Organization level.
+
 ## Changelog
 ## Changelog
 
 
 Checkout the [CHANGELOG.md](https://github.com/grafana/grafana/blob/master/CHANGELOG.md) file for a complete list
 Checkout the [CHANGELOG.md](https://github.com/grafana/grafana/blob/master/CHANGELOG.md) file for a complete list

+ 4 - 1
docs/sources/http_api/alerting.md

@@ -160,11 +160,14 @@ Content-Type: application/json
 
 
 `POST /api/admin/pause-all-alerts`
 `POST /api/admin/pause-all-alerts`
 
 
+Only works with Basic Authentication (username and password). See [introduction](http://docs.grafana.org/http_api/admin/#admin-api) for an explanation.
+
+**Example Request**:
+
 ```http
 ```http
 POST /api/admin/pause-all-alerts HTTP/1.1
 POST /api/admin/pause-all-alerts HTTP/1.1
 Accept: application/json
 Accept: application/json
 Content-Type: application/json
 Content-Type: application/json
-Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk
 
 
 {
 {
   "paused": true
   "paused": true

+ 1 - 1
docs/sources/http_api/index.md

@@ -26,7 +26,7 @@ dashboards, creating users and updating data sources.
 * [Folder Permissions API]({{< relref "http_api/folder_permissions.md" >}})
 * [Folder Permissions API]({{< relref "http_api/folder_permissions.md" >}})
 * [Folder/dashboard search API]({{< relref "/http_api/folder_dashboard_search.md" >}})
 * [Folder/dashboard search API]({{< relref "/http_api/folder_dashboard_search.md" >}})
 * [Data Source API]({{< relref "http_api/data_source.md" >}})
 * [Data Source API]({{< relref "http_api/data_source.md" >}})
-* [Organisation API]({{< relref "http_api/org.md" >}})
+* [Organization API]({{< relref "http_api/org.md" >}})
 * [Snapshot API]({{< relref "http_api/snapshot.md" >}})
 * [Snapshot API]({{< relref "http_api/snapshot.md" >}})
 * [Annotations API]({{< relref "http_api/annotations.md" >}})
 * [Annotations API]({{< relref "http_api/annotations.md" >}})
 * [Alerting API]({{< relref "http_api/alerting.md" >}})
 * [Alerting API]({{< relref "http_api/alerting.md" >}})

+ 39 - 39
docs/sources/http_api/org.md

@@ -1,24 +1,24 @@
 +++
 +++
-title = "Organisation HTTP API "
-description = "Grafana Organisation HTTP API"
-keywords = ["grafana", "http", "documentation", "api", "organisation"]
-aliases = ["/http_api/organisation/"]
+title = "Organization HTTP API "
+description = "Grafana Organization HTTP API"
+keywords = ["grafana", "http", "documentation", "api", "organization"]
+aliases = ["/http_api/organization/"]
 type = "docs"
 type = "docs"
 [menu.docs]
 [menu.docs]
-name = "Organisation"
+name = "Organization"
 parent = "http_api"
 parent = "http_api"
 +++
 +++
 
 
 
 
-# Organisation API
+# Organization API
 
 
-The Organisation HTTP API is divided in two resources, `/api/org` (current organisation)
-and `/api/orgs` (admin organisations). One big difference between these are that
-the admin of all organisations API only works with basic authentication, see [Admin Organisations API](#admin-organisations-api) for more information.
+The Organization HTTP API is divided in two resources, `/api/org` (current organization)
+and `/api/orgs` (admin organizations). One big difference between these are that
+the admin of all organizations API only works with basic authentication, see [Admin Organizations API](#admin-organizations-api) for more information.
 
 
-## Current Organisation API
+## Current Organization API
 
 
-### Get current Organisation
+### Get current Organization
 
 
 `GET /api/org/`
 `GET /api/org/`
 
 
@@ -43,7 +43,7 @@ Content-Type: application/json
 }
 }
 ```
 ```
 
 
-### Get all users within the current organisation
+### Get all users within the current organization
 
 
 `GET /api/org/users`
 `GET /api/org/users`
 
 
@@ -99,7 +99,7 @@ Content-Type: application/json
 {"message":"Organization user updated"}
 {"message":"Organization user updated"}
 ```
 ```
 
 
-### Delete user in current organisation
+### Delete user in current organization
 
 
 `DELETE /api/org/users/:userId`
 `DELETE /api/org/users/:userId`
 
 
@@ -121,7 +121,7 @@ Content-Type: application/json
 {"message":"User removed from organization"}
 {"message":"User removed from organization"}
 ```
 ```
 
 
-### Update current Organisation
+### Update current Organization
 
 
 `PUT /api/org`
 `PUT /api/org`
 
 
@@ -147,11 +147,11 @@ Content-Type: application/json
 {"message":"Organization updated"}
 {"message":"Organization updated"}
 ```
 ```
 
 
-### Add a new user to the current organisation
+### Add a new user to the current organization
 
 
 `POST /api/org/users`
 `POST /api/org/users`
 
 
-Adds a global user to the current organisation.
+Adds a global user to the current organization.
 
 
 **Example Request**:
 **Example Request**:
 
 
@@ -176,19 +176,19 @@ Content-Type: application/json
 {"message":"User added to organization"}
 {"message":"User added to organization"}
 ```
 ```
 
 
-## Admin Organisations API
+## Admin Organizations API
 
 
-The Admin Organisations HTTP API does not currently work with an API Token. API Tokens are currently
+The Admin Organizations HTTP API does not currently work with an API Token. API Tokens are currently
 only linked to an organization and an organization role. They cannot be given the permission of server
 only linked to an organization and an organization role. They cannot be given the permission of server
 admin, only users can be given that permission. So in order to use these API calls you will have to
 admin, only users can be given that permission. So in order to use these API calls you will have to
 use Basic Auth and the Grafana user must have the Grafana Admin permission (The default admin user
 use Basic Auth and the Grafana user must have the Grafana Admin permission (The default admin user
 is called `admin` and has permission to use this API).
 is called `admin` and has permission to use this API).
 
 
-### Get Organisation by Id
+### Get Organization by Id
 
 
 `GET /api/orgs/:orgId`
 `GET /api/orgs/:orgId`
 
 
-Only works with Basic Authentication (username and password), see [introduction](#admin-organisations-api).
+Only works with Basic Authentication (username and password), see [introduction](#admin-organizations-api).
 
 
 **Example Request**:
 **Example Request**:
 
 
@@ -217,11 +217,11 @@ Content-Type: application/json
   }
   }
 }
 }
 ```
 ```
-### Get Organisation by Name
+### Get Organization by Name
 
 
 `GET /api/orgs/name/:orgName`
 `GET /api/orgs/name/:orgName`
 
 
-Only works with Basic Authentication (username and password), see [introduction](#admin-organisations-api).
+Only works with Basic Authentication (username and password), see [introduction](#admin-organizations-api).
 
 
 **Example Request**:
 **Example Request**:
 
 
@@ -251,11 +251,11 @@ Content-Type: application/json
 }
 }
 ```
 ```
 
 
-### Create Organisation
+### Create Organization
 
 
 `POST /api/orgs`
 `POST /api/orgs`
 
 
-Only works with Basic Authentication (username and password), see [introduction](#admin-organisations-api).
+Only works with Basic Authentication (username and password), see [introduction](#admin-organizations-api).
 
 
 **Example Request**:
 **Example Request**:
 
 
@@ -284,11 +284,11 @@ Content-Type: application/json
 }
 }
 ```
 ```
 
 
-### Search all Organisations
+### Search all Organizations
 
 
 `GET /api/orgs`
 `GET /api/orgs`
 
 
-Only works with Basic Authentication (username and password), see [introduction](#admin-organisations-api).
+Only works with Basic Authentication (username and password), see [introduction](#admin-organizations-api).
 
 
 **Example Request**:
 **Example Request**:
 
 
@@ -314,12 +314,12 @@ Content-Type: application/json
 ]
 ]
 ```
 ```
 
 
-### Update Organisation
+### Update Organization
 
 
 `PUT /api/orgs/:orgId`
 `PUT /api/orgs/:orgId`
 
 
-Update Organisation, fields *Address 1*, *Address 2*, *City* are not implemented yet.
-Only works with Basic Authentication (username and password), see [introduction](#admin-organisations-api).
+Update Organization, fields *Address 1*, *Address 2*, *City* are not implemented yet.
+Only works with Basic Authentication (username and password), see [introduction](#admin-organizations-api).
 
 
 **Example Request**:
 **Example Request**:
 
 
@@ -342,11 +342,11 @@ Content-Type: application/json
 {"message":"Organization updated"}
 {"message":"Organization updated"}
 ```
 ```
 
 
-### Delete Organisation
+### Delete Organization
 
 
 `DELETE /api/orgs/:orgId`
 `DELETE /api/orgs/:orgId`
 
 
-Only works with Basic Authentication (username and password), see [introduction](#admin-organisations-api).
+Only works with Basic Authentication (username and password), see [introduction](#admin-organizations-api).
 
 
 **Example Request**:
 **Example Request**:
 
 
@@ -364,11 +364,11 @@ Content-Type: application/json
 {"message":"Organization deleted"}
 {"message":"Organization deleted"}
 ```
 ```
 
 
-### Get Users in Organisation
+### Get Users in Organization
 
 
 `GET /api/orgs/:orgId/users`
 `GET /api/orgs/:orgId/users`
 
 
-Only works with Basic Authentication (username and password), see [introduction](#admin-organisations-api).
+Only works with Basic Authentication (username and password), see [introduction](#admin-organizations-api).
 
 
 **Example Request**:
 **Example Request**:
 
 
@@ -397,11 +397,11 @@ Content-Type: application/json
 ]
 ]
 ```
 ```
 
 
-### Add User in Organisation
+### Add User in Organization
 
 
 `POST /api/orgs/:orgId/users`
 `POST /api/orgs/:orgId/users`
 
 
-Only works with Basic Authentication (username and password), see [introduction](#admin-organisations-api).
+Only works with Basic Authentication (username and password), see [introduction](#admin-organizations-api).
 
 
 **Example Request**:
 **Example Request**:
 
 
@@ -425,11 +425,11 @@ Content-Type: application/json
 {"message":"User added to organization"}
 {"message":"User added to organization"}
 ```
 ```
 
 
-### Update Users in Organisation
+### Update Users in Organization
 
 
 `PATCH /api/orgs/:orgId/users/:userId`
 `PATCH /api/orgs/:orgId/users/:userId`
 
 
-Only works with Basic Authentication (username and password), see [introduction](#admin-organisations-api).
+Only works with Basic Authentication (username and password), see [introduction](#admin-organizations-api).
 
 
 **Example Request**:
 **Example Request**:
 
 
@@ -452,11 +452,11 @@ Content-Type: application/json
 {"message":"Organization user updated"}
 {"message":"Organization user updated"}
 ```
 ```
 
 
-### Delete User in Organisation
+### Delete User in Organization
 
 
 `DELETE /api/orgs/:orgId/users/:userId`
 `DELETE /api/orgs/:orgId/users/:userId`
 
 
-Only works with Basic Authentication (username and password), see [introduction](#admin-organisations-api).
+Only works with Basic Authentication (username and password), see [introduction](#admin-organizations-api).
 
 
 **Example Request**:
 **Example Request**:
 
 

+ 3 - 3
docs/sources/http_api/user.md

@@ -196,7 +196,7 @@ Content-Type: application/json
 {"message":"User updated"}
 {"message":"User updated"}
 ```
 ```
 
 
-## Get Organisations for user
+## Get Organizations for user
 
 
 `GET /api/users/:id/orgs`
 `GET /api/users/:id/orgs`
 
 
@@ -367,11 +367,11 @@ Content-Type: application/json
 {"message":"Active organization changed"}
 {"message":"Active organization changed"}
 ```
 ```
 
 
-## Organisations of the actual User
+## Organizations of the actual User
 
 
 `GET /api/user/orgs`
 `GET /api/user/orgs`
 
 
-Return a list of all organisations of the current user.
+Return a list of all organizations of the current user.
 
 
 **Example Request**:
 **Example Request**:
 
 

+ 3 - 3
docs/sources/index.md

@@ -60,9 +60,9 @@ aliases = ["v1.1", "guides/reference/admin"]
         <h4>Provisioning</h4>
         <h4>Provisioning</h4>
         <p>A guide to help you automate your Grafana setup & configuration.</p>
         <p>A guide to help you automate your Grafana setup & configuration.</p>
     </a>
     </a>
-    <a href="{{< relref "guides/whats-new-in-v5-3.md" >}}" class="nav-cards__item nav-cards__item--guide">
-        <h4>What's new in v5.3</h4>
-        <p>Article on all the new cool features and enhancements in v5.3</p>
+    <a href="{{< relref "guides/whats-new-in-v5-4.md" >}}" class="nav-cards__item nav-cards__item--guide">
+        <h4>What's new in v5.4</h4>
+        <p>Article on all the new cool features and enhancements in v5.4</p>
     </a>
     </a>
     <a href="{{< relref "tutorials/screencasts.md" >}}" class="nav-cards__item nav-cards__item--guide">
     <a href="{{< relref "tutorials/screencasts.md" >}}" class="nav-cards__item nav-cards__item--guide">
         <h4>Screencasts</h4>
         <h4>Screencasts</h4>

+ 2 - 0
docs/sources/reference/export_import.md

@@ -107,3 +107,5 @@ it as usual and then update the data source option in the metrics tab so that th
 data source. Another alternative is to open the json file in a a text editor and update the data source properties
 data source. Another alternative is to open the json file in a a text editor and update the data source properties
 to value that matches a name of your data source.
 to value that matches a name of your data source.
 
 
+## Note
+In Grafana v5.3.4+ the export modal has new checkbox for sharing for external use (other instances). If the checkbox is not checked then the `__inputs` section will not be included in the exported JSON file.

+ 1 - 1
docs/sources/reference/templating.md

@@ -25,7 +25,7 @@ the value, using the dropdown at the top of the dashboard, your panel's metric q
 
 
 Panel titles and metric queries can refer to variables using two different syntaxes:
 Panel titles and metric queries can refer to variables using two different syntaxes:
 
 
-- `$<varname>`  Example: apps.frontend.$server.requests.count
+- `$varname`  Example: apps.frontend.$server.requests.count
 - `[[varname]]` Example: apps.frontend.[[server]].requests.count
 - `[[varname]]` Example: apps.frontend.[[server]].requests.count
 
 
 Why two ways? The first syntax is easier to read and write but does not allow you to use a variable in the middle of word. Use
 Why two ways? The first syntax is easier to read and write but does not allow you to use a variable in the middle of word. Use

+ 2 - 1
docs/versions.json

@@ -1,5 +1,6 @@
 [
 [
-  { "version": "v5.3", "path": "/", "archived": false, "current": true },
+  { "version": "v5.4", "path": "/", "archived": false, "current": true },
+  { "version": "v5.3", "path": "/v5.3", "archived": true },
   { "version": "v5.2", "path": "/v5.2", "archived": true },
   { "version": "v5.2", "path": "/v5.2", "archived": true },
   { "version": "v5.1", "path": "/v5.1", "archived": true },
   { "version": "v5.1", "path": "/v5.1", "archived": true },
   { "version": "v5.0", "path": "/v5.0", "archived": true },
   { "version": "v5.0", "path": "/v5.0", "archived": true },

+ 2 - 2
latest.json

@@ -1,4 +1,4 @@
 {
 {
-  "stable": "5.3.4",
-  "testing": "5.3.4"
+  "stable": "5.4.0",
+  "testing": "5.4.0"
 }
 }

+ 16 - 20
package.json

@@ -4,12 +4,18 @@
     "company": "Grafana Labs"
     "company": "Grafana Labs"
   },
   },
   "name": "grafana",
   "name": "grafana",
-  "version": "5.4.0-pre1",
+  "version": "5.5.0-pre1",
   "repository": {
   "repository": {
     "type": "git",
     "type": "git",
     "url": "http://github.com/grafana/grafana.git"
     "url": "http://github.com/grafana/grafana.git"
   },
   },
   "devDependencies": {
   "devDependencies": {
+    "@babel/core": "^7.1.2",
+    "@rtsao/plugin-proposal-class-properties": "^7.0.1-patch.1",
+    "@babel/plugin-syntax-dynamic-import": "^7.0.0",
+    "@babel/preset-env": "^7.1.0",
+    "@babel/preset-react": "^7.0.0",
+    "@babel/preset-typescript": "^7.1.0",
     "@types/d3": "^4.10.1",
     "@types/d3": "^4.10.1",
     "@types/enzyme": "^3.1.13",
     "@types/enzyme": "^3.1.13",
     "@types/jest": "^23.3.2",
     "@types/jest": "^23.3.2",
@@ -21,10 +27,10 @@
     "angular-mocks": "1.6.6",
     "angular-mocks": "1.6.6",
     "autoprefixer": "^6.4.0",
     "autoprefixer": "^6.4.0",
     "axios": "^0.17.1",
     "axios": "^0.17.1",
-    "babel-core": "^6.26.0",
-    "babel-loader": "^7.1.4",
-    "babel-plugin-syntax-dynamic-import": "^6.18.0",
-    "babel-preset-es2015": "^6.24.1",
+    "babel-core": "^7.0.0-bridge",
+    "babel-jest": "^23.6.0",
+    "babel-loader": "^8.0.4",
+    "babel-plugin-angularjs-annotate": "^0.9.0",
     "clean-webpack-plugin": "^0.1.19",
     "clean-webpack-plugin": "^0.1.19",
     "css-loader": "^0.28.7",
     "css-loader": "^0.28.7",
     "enzyme": "^3.6.0",
     "enzyme": "^3.6.0",
@@ -108,18 +114,9 @@
     "precommit": "lint-staged && grunt precommit"
     "precommit": "lint-staged && grunt precommit"
   },
   },
   "lint-staged": {
   "lint-staged": {
-    "*.{ts,tsx}": [
-      "prettier --write",
-      "git add"
-    ],
-    "*.scss": [
-      "prettier --write",
-      "git add"
-    ],
-    "*pkg/**/*.go": [
-      "gofmt -w -s",
-      "git add"
-    ]
+    "*.{ts,tsx}": ["prettier --write", "git add"],
+    "*.scss": ["prettier --write", "git add"],
+    "*pkg/**/*.go": ["gofmt -w -s", "git add"]
   },
   },
   "prettier": {
   "prettier": {
     "trailingComma": "es5",
     "trailingComma": "es5",
@@ -128,13 +125,12 @@
   },
   },
   "license": "Apache-2.0",
   "license": "Apache-2.0",
   "dependencies": {
   "dependencies": {
+    "@babel/polyfill": "^7.0.0",
     "angular": "1.6.6",
     "angular": "1.6.6",
     "angular-bindonce": "0.3.1",
     "angular-bindonce": "0.3.1",
     "angular-native-dragdrop": "1.2.2",
     "angular-native-dragdrop": "1.2.2",
     "angular-route": "1.6.6",
     "angular-route": "1.6.6",
     "angular-sanitize": "1.6.6",
     "angular-sanitize": "1.6.6",
-    "babel-jest": "^23.6.0",
-    "babel-polyfill": "^6.26.0",
     "baron": "^3.0.3",
     "baron": "^3.0.3",
     "brace": "^0.10.0",
     "brace": "^0.10.0",
     "classnames": "^2.2.5",
     "classnames": "^2.2.5",
@@ -156,8 +152,8 @@
     "react-custom-scrollbars": "^4.2.1",
     "react-custom-scrollbars": "^4.2.1",
     "react-dom": "^16.6.3",
     "react-dom": "^16.6.3",
     "react-grid-layout": "0.16.6",
     "react-grid-layout": "0.16.6",
-    "react-highlight-words": "^0.10.0",
     "react-popper": "^1.3.0",
     "react-popper": "^1.3.0",
+    "react-highlight-words": "0.11.0",
     "react-redux": "^5.0.7",
     "react-redux": "^5.0.7",
     "react-select": "2.1.0",
     "react-select": "2.1.0",
     "react-sizeme": "^2.3.6",
     "react-sizeme": "^2.3.6",

+ 2 - 1
packaging/docker/Dockerfile

@@ -25,7 +25,8 @@ ENV PATH=/usr/share/grafana/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bi
 
 
 WORKDIR $GF_PATHS_HOME
 WORKDIR $GF_PATHS_HOME
 
 
-RUN apt-get update && apt-get install -qq -y libfontconfig ca-certificates curl && \
+RUN apt-get update && apt-get -y upgrade && \
+    apt-get install -qq -y libfontconfig ca-certificates curl && \
     apt-get autoremove -y && \
     apt-get autoremove -y && \
     rm -rf /var/lib/apt/lists/*
     rm -rf /var/lib/apt/lists/*
 
 

+ 5 - 0
packaging/docker/build-enterprise.sh

@@ -18,3 +18,8 @@ docker build \
   .
   .
 
 
 docker push "${_docker_repo}:${_grafana_tag}"
 docker push "${_docker_repo}:${_grafana_tag}"
+
+if echo "$_raw_grafana_tag" | grep -q "^v" && echo "$_raw_grafana_tag" | grep -qv "beta"; then
+  docker tag "${_docker_repo}:${_grafana_tag}" "${_docker_repo}:latest"
+  docker push "${_docker_repo}:latest"
+fi

+ 6 - 0
pkg/api/admin_users.go

@@ -76,6 +76,7 @@ func AdminUpdateUserPassword(c *m.ReqContext, form dtos.AdminUpdateUserPasswordF
 	c.JsonOK("User password updated")
 	c.JsonOK("User password updated")
 }
 }
 
 
+// PUT /api/admin/users/:id/permissions
 func AdminUpdateUserPermissions(c *m.ReqContext, form dtos.AdminUpdateUserPermissionsForm) {
 func AdminUpdateUserPermissions(c *m.ReqContext, form dtos.AdminUpdateUserPermissionsForm) {
 	userID := c.ParamsInt64(":id")
 	userID := c.ParamsInt64(":id")
 
 
@@ -85,6 +86,11 @@ func AdminUpdateUserPermissions(c *m.ReqContext, form dtos.AdminUpdateUserPermis
 	}
 	}
 
 
 	if err := bus.Dispatch(&cmd); err != nil {
 	if err := bus.Dispatch(&cmd); err != nil {
+		if err == m.ErrLastGrafanaAdmin {
+			c.JsonApiErr(400, m.ErrLastGrafanaAdmin.Error(), nil)
+			return
+		}
+
 		c.JsonApiErr(500, "Failed to update user permissions", err)
 		c.JsonApiErr(500, "Failed to update user permissions", err)
 		return
 		return
 	}
 	}

+ 50 - 0
pkg/api/admin_users_test.go

@@ -0,0 +1,50 @@
+package api
+
+import (
+	"testing"
+
+	"github.com/grafana/grafana/pkg/api/dtos"
+	"github.com/grafana/grafana/pkg/bus"
+	m "github.com/grafana/grafana/pkg/models"
+
+	. "github.com/smartystreets/goconvey/convey"
+)
+
+func TestAdminApiEndpoint(t *testing.T) {
+	role := m.ROLE_ADMIN
+	Convey("Given a server admin attempts to remove themself as an admin", t, func() {
+
+		updateCmd := dtos.AdminUpdateUserPermissionsForm{
+			IsGrafanaAdmin: false,
+		}
+
+		bus.AddHandler("test", func(cmd *m.UpdateUserPermissionsCommand) error {
+			return m.ErrLastGrafanaAdmin
+		})
+
+		putAdminScenario("When calling PUT on", "/api/admin/users/1/permissions", "/api/admin/users/:id/permissions", role, updateCmd, func(sc *scenarioContext) {
+			sc.fakeReqWithParams("PUT", sc.url, map[string]string{}).exec()
+			So(sc.resp.Code, ShouldEqual, 400)
+		})
+	})
+}
+
+func putAdminScenario(desc string, url string, routePattern string, role m.RoleType, cmd dtos.AdminUpdateUserPermissionsForm, fn scenarioFunc) {
+	Convey(desc+" "+url, func() {
+		defer bus.ClearBusHandlers()
+
+		sc := setupScenarioContext(url)
+		sc.defaultHandler = Wrap(func(c *m.ReqContext) {
+			sc.context = c
+			sc.context.UserId = TestUserID
+			sc.context.OrgId = TestOrgID
+			sc.context.OrgRole = role
+
+			AdminUpdateUserPermissions(c, cmd)
+		})
+
+		sc.m.Put(routePattern, sc.defaultHandler)
+
+		fn(sc)
+	})
+}

+ 0 - 4
pkg/api/dashboard.go

@@ -277,10 +277,6 @@ func PostDashboard(c *m.ReqContext, cmd m.SaveDashboardCommand) Response {
 		return Error(500, "Failed to save dashboard", err)
 		return Error(500, "Failed to save dashboard", err)
 	}
 	}
 
 
-	if err == m.ErrDashboardFailedToUpdateAlertData {
-		return Error(500, "Invalid alert data. Cannot save dashboard", err)
-	}
-
 	c.TimeRequest(metrics.M_Api_Dashboard_Save)
 	c.TimeRequest(metrics.M_Api_Dashboard_Save)
 	return JSON(200, util.DynMap{
 	return JSON(200, util.DynMap{
 		"status":  "success",
 		"status":  "success",

+ 0 - 1
pkg/api/dashboard_test.go

@@ -727,7 +727,6 @@ func TestDashboardApiEndpoint(t *testing.T) {
 				{SaveError: m.ErrDashboardTitleEmpty, ExpectedStatusCode: 400},
 				{SaveError: m.ErrDashboardTitleEmpty, ExpectedStatusCode: 400},
 				{SaveError: m.ErrDashboardFolderCannotHaveParent, ExpectedStatusCode: 400},
 				{SaveError: m.ErrDashboardFolderCannotHaveParent, ExpectedStatusCode: 400},
 				{SaveError: alerting.ValidationError{Reason: "Mu"}, ExpectedStatusCode: 422},
 				{SaveError: alerting.ValidationError{Reason: "Mu"}, ExpectedStatusCode: 422},
-				{SaveError: m.ErrDashboardFailedToUpdateAlertData, ExpectedStatusCode: 500},
 				{SaveError: m.ErrDashboardFailedGenerateUniqueUid, ExpectedStatusCode: 500},
 				{SaveError: m.ErrDashboardFailedGenerateUniqueUid, ExpectedStatusCode: 500},
 				{SaveError: m.ErrDashboardTypeMismatch, ExpectedStatusCode: 400},
 				{SaveError: m.ErrDashboardTypeMismatch, ExpectedStatusCode: 400},
 				{SaveError: m.ErrDashboardFolderWithSameNameAsDashboard, ExpectedStatusCode: 400},
 				{SaveError: m.ErrDashboardFolderWithSameNameAsDashboard, ExpectedStatusCode: 400},

+ 8 - 0
pkg/api/password.go

@@ -4,10 +4,18 @@ import (
 	"github.com/grafana/grafana/pkg/api/dtos"
 	"github.com/grafana/grafana/pkg/api/dtos"
 	"github.com/grafana/grafana/pkg/bus"
 	"github.com/grafana/grafana/pkg/bus"
 	m "github.com/grafana/grafana/pkg/models"
 	m "github.com/grafana/grafana/pkg/models"
+	"github.com/grafana/grafana/pkg/setting"
 	"github.com/grafana/grafana/pkg/util"
 	"github.com/grafana/grafana/pkg/util"
 )
 )
 
 
 func SendResetPasswordEmail(c *m.ReqContext, form dtos.SendResetPasswordEmailForm) Response {
 func SendResetPasswordEmail(c *m.ReqContext, form dtos.SendResetPasswordEmailForm) Response {
+	if setting.LdapEnabled || setting.AuthProxyEnabled {
+		return Error(401, "Not allowed to reset password when LDAP or Auth Proxy is enabled", nil)
+	}
+	if setting.DisableLoginForm {
+		return Error(401, "Not allowed to reset password when login form is disabled", nil)
+	}
+
 	userQuery := m.GetUserByLoginQuery{LoginOrEmail: form.UserOrEmail}
 	userQuery := m.GetUserByLoginQuery{LoginOrEmail: form.UserOrEmail}
 
 
 	if err := bus.Dispatch(&userQuery); err != nil {
 	if err := bus.Dispatch(&userQuery); err != nil {

+ 3 - 3
pkg/api/pluginproxy/ds_auth_provider.go

@@ -51,7 +51,7 @@ func ApplyRoute(ctx context.Context, req *http.Request, proxyPath string, route
 		if token, err := tokenProvider.getAccessToken(data); err != nil {
 		if token, err := tokenProvider.getAccessToken(data); err != nil {
 			logger.Error("Failed to get access token", "error", err)
 			logger.Error("Failed to get access token", "error", err)
 		} else {
 		} else {
-			req.Header.Add("Authorization", fmt.Sprintf("Bearer %s", token))
+			req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", token))
 		}
 		}
 	}
 	}
 
 
@@ -60,7 +60,7 @@ func ApplyRoute(ctx context.Context, req *http.Request, proxyPath string, route
 		if token, err := tokenProvider.getJwtAccessToken(ctx, data); err != nil {
 		if token, err := tokenProvider.getJwtAccessToken(ctx, data); err != nil {
 			logger.Error("Failed to get access token", "error", err)
 			logger.Error("Failed to get access token", "error", err)
 		} else {
 		} else {
-			req.Header.Add("Authorization", fmt.Sprintf("Bearer %s", token))
+			req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", token))
 		}
 		}
 	}
 	}
 
 
@@ -73,7 +73,7 @@ func ApplyRoute(ctx context.Context, req *http.Request, proxyPath string, route
 			if err != nil {
 			if err != nil {
 				logger.Error("Failed to get default access token from meta data server", "error", err)
 				logger.Error("Failed to get default access token from meta data server", "error", err)
 			} else {
 			} else {
-				req.Header.Add("Authorization", fmt.Sprintf("Bearer %s", token.AccessToken))
+				req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", token.AccessToken))
 			}
 			}
 		}
 		}
 	}
 	}

+ 1 - 1
pkg/api/pluginproxy/pluginproxy.go

@@ -87,7 +87,7 @@ func NewApiPluginProxy(ctx *m.ReqContext, proxyPath string, route *plugins.AppPl
 			}
 			}
 
 
 			for key, value := range headers {
 			for key, value := range headers {
-				log.Trace("setting key %v value %v", key, value[0])
+				log.Trace("setting key %v value <redacted>", key)
 				req.Header.Set(key, value[0])
 				req.Header.Set(key, value[0])
 			}
 			}
 		}
 		}

+ 0 - 1
pkg/models/dashboards.go

@@ -21,7 +21,6 @@ var (
 	ErrDashboardVersionMismatch                = errors.New("The dashboard has been changed by someone else")
 	ErrDashboardVersionMismatch                = errors.New("The dashboard has been changed by someone else")
 	ErrDashboardTitleEmpty                     = errors.New("Dashboard title cannot be empty")
 	ErrDashboardTitleEmpty                     = errors.New("Dashboard title cannot be empty")
 	ErrDashboardFolderCannotHaveParent         = errors.New("A Dashboard Folder cannot be added to another folder")
 	ErrDashboardFolderCannotHaveParent         = errors.New("A Dashboard Folder cannot be added to another folder")
-	ErrDashboardFailedToUpdateAlertData        = errors.New("Failed to save alert data")
 	ErrDashboardsWithSameSlugExists            = errors.New("Multiple dashboards with the same slug exists")
 	ErrDashboardsWithSameSlugExists            = errors.New("Multiple dashboards with the same slug exists")
 	ErrDashboardFailedGenerateUniqueUid        = errors.New("Failed to generate unique dashboard id")
 	ErrDashboardFailedGenerateUniqueUid        = errors.New("Failed to generate unique dashboard id")
 	ErrDashboardTypeMismatch                   = errors.New("Dashboard cannot be changed to a folder")
 	ErrDashboardTypeMismatch                   = errors.New("Dashboard cannot be changed to a folder")

+ 2 - 1
pkg/models/user.go

@@ -7,7 +7,8 @@ import (
 
 
 // Typed errors
 // Typed errors
 var (
 var (
-	ErrUserNotFound = errors.New("User not found")
+	ErrUserNotFound     = errors.New("User not found")
+	ErrLastGrafanaAdmin = errors.New("Cannot remove last grafana admin")
 )
 )
 
 
 type Password string
 type Password string

+ 1 - 1
pkg/services/dashboards/dashboard_service.go

@@ -165,7 +165,7 @@ func (dr *dashboardServiceImpl) updateAlerting(cmd *models.SaveDashboardCommand,
 	}
 	}
 
 
 	if err := bus.Dispatch(&alertCmd); err != nil {
 	if err := bus.Dispatch(&alertCmd); err != nil {
-		return models.ErrDashboardFailedToUpdateAlertData
+		return err
 	}
 	}
 
 
 	return nil
 	return nil

+ 25 - 1
pkg/services/sqlstore/user.go

@@ -504,8 +504,18 @@ func UpdateUserPermissions(cmd *m.UpdateUserPermissionsCommand) error {
 
 
 		user.IsAdmin = cmd.IsGrafanaAdmin
 		user.IsAdmin = cmd.IsGrafanaAdmin
 		sess.UseBool("is_admin")
 		sess.UseBool("is_admin")
+
 		_, err := sess.ID(user.Id).Update(&user)
 		_, err := sess.ID(user.Id).Update(&user)
-		return err
+		if err != nil {
+			return err
+		}
+
+		// validate that after update there is at least one server admin
+		if err := validateOneAdminLeft(sess); err != nil {
+			return err
+		}
+
+		return nil
 	})
 	})
 }
 }
 
 
@@ -522,3 +532,17 @@ func SetUserHelpFlag(cmd *m.SetUserHelpFlagCommand) error {
 		return err
 		return err
 	})
 	})
 }
 }
+
+func validateOneAdminLeft(sess *DBSession) error {
+	// validate that there is an admin user left
+	count, err := sess.Where("is_admin=?", true).Count(&m.User{})
+	if err != nil {
+		return err
+	}
+
+	if count == 0 {
+		return m.ErrLastGrafanaAdmin
+	}
+
+	return nil
+}

+ 26 - 0
pkg/services/sqlstore/user_test.go

@@ -155,6 +155,32 @@ func TestUserDataAccess(t *testing.T) {
 				})
 				})
 			})
 			})
 		})
 		})
+
+		Convey("Given one grafana admin user", func() {
+			var err error
+			createUserCmd := &m.CreateUserCommand{
+				Email:   fmt.Sprint("admin", "@test.com"),
+				Name:    fmt.Sprint("admin"),
+				Login:   fmt.Sprint("admin"),
+				IsAdmin: true,
+			}
+			err = CreateUser(context.Background(), createUserCmd)
+			So(err, ShouldBeNil)
+
+			Convey("Cannot make themselves a non-admin", func() {
+				updateUserPermsCmd := m.UpdateUserPermissionsCommand{IsGrafanaAdmin: false, UserId: 1}
+				updatePermsError := UpdateUserPermissions(&updateUserPermsCmd)
+
+				So(updatePermsError, ShouldEqual, m.ErrLastGrafanaAdmin)
+
+				query := m.GetUserByIdQuery{Id: createUserCmd.Result.Id}
+				getUserError := GetUserById(&query)
+
+				So(getUserError, ShouldBeNil)
+
+				So(query.Result.IsAdmin, ShouldEqual, true)
+			})
+		})
 	})
 	})
 }
 }
 
 

+ 2 - 0
pkg/tsdb/cloudwatch/metric_find_query.go

@@ -47,6 +47,7 @@ func init() {
 		"AWS/CloudFront":     {"Requests", "BytesDownloaded", "BytesUploaded", "TotalErrorRate", "4xxErrorRate", "5xxErrorRate"},
 		"AWS/CloudFront":     {"Requests", "BytesDownloaded", "BytesUploaded", "TotalErrorRate", "4xxErrorRate", "5xxErrorRate"},
 		"AWS/CloudSearch":    {"SuccessfulRequests", "SearchableDocuments", "IndexUtilization", "Partitions"},
 		"AWS/CloudSearch":    {"SuccessfulRequests", "SearchableDocuments", "IndexUtilization", "Partitions"},
 		"AWS/CloudHSM":       {"HsmUnhealthy", "HsmTemperature", "HsmKeysSessionOccupied", "HsmKeysTokenOccupied", "HsmSslCtxsOccupied", "HsmSessionCount", "HsmUsersAvailable", "HsmUsersMax", "InterfaceEth2OctetsInput", "InterfaceEth2OctetsOutput"},
 		"AWS/CloudHSM":       {"HsmUnhealthy", "HsmTemperature", "HsmKeysSessionOccupied", "HsmKeysTokenOccupied", "HsmSslCtxsOccupied", "HsmSessionCount", "HsmUsersAvailable", "HsmUsersMax", "InterfaceEth2OctetsInput", "InterfaceEth2OctetsOutput"},
+		"AWS/CodeBuild":      {"BuildDuration", "Builds", "DownloadSourceDuration", "Duration", "FailedBuilds", "FinalizingDuration", "InstallDuration", "PostBuildDuration", "PreBuildDuration", "ProvisioningDuration", "QueuedDuration", "SubmittedDuration", "SucceededBuilds", "UploadArtifactsDuration"},
 		"AWS/Connect":        {"CallsBreachingConcurrencyQuota", "CallBackNotDialableNumber", "CallRecordingUploadError", "CallsPerInterval", "ConcurrentCalls", "ConcurrentCallsPercentage", "ContactFlowErrors", "ContactFlowFatalErrors", "LongestQueueWaitTime", "MissedCalls", "MisconfiguredPhoneNumbers", "PublicSigningKeyUsage", "QueueCapacityExceededError", "QueueSize", "ThrottledCalls", "ToInstancePacketLossRate"},
 		"AWS/Connect":        {"CallsBreachingConcurrencyQuota", "CallBackNotDialableNumber", "CallRecordingUploadError", "CallsPerInterval", "ConcurrentCalls", "ConcurrentCallsPercentage", "ContactFlowErrors", "ContactFlowFatalErrors", "LongestQueueWaitTime", "MissedCalls", "MisconfiguredPhoneNumbers", "PublicSigningKeyUsage", "QueueCapacityExceededError", "QueueSize", "ThrottledCalls", "ToInstancePacketLossRate"},
 		"AWS/DMS":            {"FreeableMemory", "WriteIOPS", "ReadIOPS", "WriteThroughput", "ReadThroughput", "WriteLatency", "ReadLatency", "SwapUsage", "NetworkTransmitThroughput", "NetworkReceiveThroughput", "FullLoadThroughputBandwidthSource", "FullLoadThroughputBandwidthTarget", "FullLoadThroughputRowsSource", "FullLoadThroughputRowsTarget", "CDCIncomingChanges", "CDCChangesMemorySource", "CDCChangesMemoryTarget", "CDCChangesDiskSource", "CDCChangesDiskTarget", "CDCThroughputBandwidthTarget", "CDCThroughputRowsSource", "CDCThroughputRowsTarget", "CDCLatencySource", "CDCLatencyTarget"},
 		"AWS/DMS":            {"FreeableMemory", "WriteIOPS", "ReadIOPS", "WriteThroughput", "ReadThroughput", "WriteLatency", "ReadLatency", "SwapUsage", "NetworkTransmitThroughput", "NetworkReceiveThroughput", "FullLoadThroughputBandwidthSource", "FullLoadThroughputBandwidthTarget", "FullLoadThroughputRowsSource", "FullLoadThroughputRowsTarget", "CDCIncomingChanges", "CDCChangesMemorySource", "CDCChangesMemoryTarget", "CDCChangesDiskSource", "CDCChangesDiskTarget", "CDCThroughputBandwidthTarget", "CDCThroughputRowsSource", "CDCThroughputRowsTarget", "CDCLatencySource", "CDCLatencyTarget"},
 		"AWS/DX":             {"ConnectionState", "ConnectionBpsEgress", "ConnectionBpsIngress", "ConnectionPpsEgress", "ConnectionPpsIngress", "ConnectionCRCErrorCount", "ConnectionLightLevelTx", "ConnectionLightLevelRx"},
 		"AWS/DX":             {"ConnectionState", "ConnectionBpsEgress", "ConnectionBpsIngress", "ConnectionPpsEgress", "ConnectionPpsIngress", "ConnectionCRCErrorCount", "ConnectionLightLevelTx", "ConnectionLightLevelRx"},
@@ -123,6 +124,7 @@ func init() {
 		"AWS/CloudFront":       {"DistributionId", "Region"},
 		"AWS/CloudFront":       {"DistributionId", "Region"},
 		"AWS/CloudSearch":      {},
 		"AWS/CloudSearch":      {},
 		"AWS/CloudHSM":         {"Region", "ClusterId", "HsmId"},
 		"AWS/CloudHSM":         {"Region", "ClusterId", "HsmId"},
+		"AWS/CodeBuild":        {"ProjectName"},
 		"AWS/Connect":          {"InstanceId", "MetricGroup", "Participant", "QueueName", "Stream Type", "Type of Connection"},
 		"AWS/Connect":          {"InstanceId", "MetricGroup", "Participant", "QueueName", "Stream Type", "Type of Connection"},
 		"AWS/DMS":              {"ReplicationInstanceIdentifier", "ReplicationTaskIdentifier"},
 		"AWS/DMS":              {"ReplicationInstanceIdentifier", "ReplicationTaskIdentifier"},
 		"AWS/DX":               {"ConnectionId"},
 		"AWS/DX":               {"ConnectionId"},

+ 1 - 1
pkg/tsdb/elasticsearch/client/client.go

@@ -65,7 +65,7 @@ var NewClient = func(ctx context.Context, ds *models.DataSource, timeRange *tsdb
 	clientLog.Debug("Creating new client", "version", version, "timeField", timeField, "indices", strings.Join(indices, ", "))
 	clientLog.Debug("Creating new client", "version", version, "timeField", timeField, "indices", strings.Join(indices, ", "))
 
 
 	switch version {
 	switch version {
-	case 2, 5, 56:
+	case 2, 5, 56, 60:
 		return &baseClientImpl{
 		return &baseClientImpl{
 			ctx:       ctx,
 			ctx:       ctx,
 			ds:        ds,
 			ds:        ds,

+ 13 - 6
pkg/tsdb/elasticsearch/client/client_test.go

@@ -90,6 +90,19 @@ func TestClient(t *testing.T) {
 				So(err, ShouldBeNil)
 				So(err, ShouldBeNil)
 				So(c.GetVersion(), ShouldEqual, 56)
 				So(c.GetVersion(), ShouldEqual, 56)
 			})
 			})
+
+			Convey("When version 60 should return v6.0 client", func() {
+				ds := &models.DataSource{
+					JsonData: simplejson.NewFromAny(map[string]interface{}{
+						"esVersion": 60,
+						"timeField": "@timestamp",
+					}),
+				}
+
+				c, err := NewClient(context.Background(), ds, nil)
+				So(err, ShouldBeNil)
+				So(c.GetVersion(), ShouldEqual, 60)
+			})
 		})
 		})
 
 
 		Convey("Given a fake http client", func() {
 		Convey("Given a fake http client", func() {
@@ -153,8 +166,6 @@ func TestClient(t *testing.T) {
 						jBody, err := simplejson.NewJson(bodyBytes)
 						jBody, err := simplejson.NewJson(bodyBytes)
 						So(err, ShouldBeNil)
 						So(err, ShouldBeNil)
 
 
-						fmt.Println("body", string(headerBytes))
-
 						So(jHeader.Get("index").MustString(), ShouldEqual, "metrics-2018.05.15")
 						So(jHeader.Get("index").MustString(), ShouldEqual, "metrics-2018.05.15")
 						So(jHeader.Get("ignore_unavailable").MustBool(false), ShouldEqual, true)
 						So(jHeader.Get("ignore_unavailable").MustBool(false), ShouldEqual, true)
 						So(jHeader.Get("search_type").MustString(), ShouldEqual, "count")
 						So(jHeader.Get("search_type").MustString(), ShouldEqual, "count")
@@ -209,8 +220,6 @@ func TestClient(t *testing.T) {
 						jBody, err := simplejson.NewJson(bodyBytes)
 						jBody, err := simplejson.NewJson(bodyBytes)
 						So(err, ShouldBeNil)
 						So(err, ShouldBeNil)
 
 
-						fmt.Println("body", string(headerBytes))
-
 						So(jHeader.Get("index").MustString(), ShouldEqual, "metrics-2018.05.15")
 						So(jHeader.Get("index").MustString(), ShouldEqual, "metrics-2018.05.15")
 						So(jHeader.Get("ignore_unavailable").MustBool(false), ShouldEqual, true)
 						So(jHeader.Get("ignore_unavailable").MustBool(false), ShouldEqual, true)
 						So(jHeader.Get("search_type").MustString(), ShouldEqual, "query_then_fetch")
 						So(jHeader.Get("search_type").MustString(), ShouldEqual, "query_then_fetch")
@@ -265,8 +274,6 @@ func TestClient(t *testing.T) {
 						jBody, err := simplejson.NewJson(bodyBytes)
 						jBody, err := simplejson.NewJson(bodyBytes)
 						So(err, ShouldBeNil)
 						So(err, ShouldBeNil)
 
 
-						fmt.Println("body", string(headerBytes))
-
 						So(jHeader.Get("index").MustString(), ShouldEqual, "metrics-2018.05.15")
 						So(jHeader.Get("index").MustString(), ShouldEqual, "metrics-2018.05.15")
 						So(jHeader.Get("ignore_unavailable").MustBool(false), ShouldEqual, true)
 						So(jHeader.Get("ignore_unavailable").MustBool(false), ShouldEqual, true)
 						So(jHeader.Get("search_type").MustString(), ShouldEqual, "query_then_fetch")
 						So(jHeader.Get("search_type").MustString(), ShouldEqual, "query_then_fetch")

+ 1 - 0
pkg/tsdb/elasticsearch/client/models.go

@@ -240,6 +240,7 @@ type DateHistogramAgg struct {
 	Missing        *string         `json:"missing,omitempty"`
 	Missing        *string         `json:"missing,omitempty"`
 	ExtendedBounds *ExtendedBounds `json:"extended_bounds"`
 	ExtendedBounds *ExtendedBounds `json:"extended_bounds"`
 	Format         string          `json:"format"`
 	Format         string          `json:"format"`
+	Offset         string          `json:"offset,omitempty"`
 }
 }
 
 
 // FiltersAggregation represents a filters aggregation
 // FiltersAggregation represents a filters aggregation

+ 4 - 0
pkg/tsdb/elasticsearch/time_series_query.go

@@ -134,6 +134,10 @@ func addDateHistogramAgg(aggBuilder es.AggBuilder, bucketAgg *BucketAgg, timeFro
 			a.Interval = "$__interval"
 			a.Interval = "$__interval"
 		}
 		}
 
 
+		if offset, err := bucketAgg.Settings.Get("offset").String(); err == nil {
+			a.Offset = offset
+		}
+
 		if missing, err := bucketAgg.Settings.Get("missing").String(); err == nil {
 		if missing, err := bucketAgg.Settings.Get("missing").String(); err == nil {
 			a.Missing = &missing
 			a.Missing = &missing
 		}
 		}

+ 4 - 0
pkg/tsdb/mssql/macros.go

@@ -66,6 +66,10 @@ func (m *msSqlMacroEngine) evaluateMacro(name string, args []string) (string, er
 		}
 		}
 
 
 		return fmt.Sprintf("%s BETWEEN '%s' AND '%s'", args[0], m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339), m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
 		return fmt.Sprintf("%s BETWEEN '%s' AND '%s'", args[0], m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339), m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
+	case "__timeFrom":
+		return fmt.Sprintf("'%s'", m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339)), nil
+	case "__timeTo":
+		return fmt.Sprintf("'%s'", m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
 	case "__timeGroup":
 	case "__timeGroup":
 		if len(args) < 2 {
 		if len(args) < 2 {
 			return "", fmt.Errorf("macro %v needs time column and interval", name)
 			return "", fmt.Errorf("macro %v needs time column and interval", name)

+ 14 - 0
pkg/tsdb/mssql/macros_test.go

@@ -52,6 +52,20 @@ func TestMacroEngine(t *testing.T) {
 				So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339)))
 				So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339)))
 			})
 			})
 
 
+			Convey("interpolate __timeFrom function", func() {
+				sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom()")
+				So(err, ShouldBeNil)
+
+				So(sql, ShouldEqual, "select '2018-04-12T18:00:00Z'")
+			})
+
+			Convey("interpolate __timeTo function", func() {
+				sql, err := engine.Interpolate(query, timeRange, "select $__timeTo()")
+				So(err, ShouldBeNil)
+
+				So(sql, ShouldEqual, "select '2018-04-12T18:05:00Z'")
+			})
+
 			Convey("interpolate __timeGroup function", func() {
 			Convey("interpolate __timeGroup function", func() {
 				sql, err := engine.Interpolate(query, timeRange, "GROUP BY $__timeGroup(time_column,'5m')")
 				sql, err := engine.Interpolate(query, timeRange, "GROUP BY $__timeGroup(time_column,'5m')")
 				So(err, ShouldBeNil)
 				So(err, ShouldBeNil)

+ 4 - 0
pkg/tsdb/mysql/macros.go

@@ -61,6 +61,10 @@ func (m *mySqlMacroEngine) evaluateMacro(name string, args []string) (string, er
 		}
 		}
 
 
 		return fmt.Sprintf("%s BETWEEN FROM_UNIXTIME(%d) AND FROM_UNIXTIME(%d)", args[0], m.timeRange.GetFromAsSecondsEpoch(), m.timeRange.GetToAsSecondsEpoch()), nil
 		return fmt.Sprintf("%s BETWEEN FROM_UNIXTIME(%d) AND FROM_UNIXTIME(%d)", args[0], m.timeRange.GetFromAsSecondsEpoch(), m.timeRange.GetToAsSecondsEpoch()), nil
+	case "__timeFrom":
+		return fmt.Sprintf("FROM_UNIXTIME(%d)", m.timeRange.GetFromAsSecondsEpoch()), nil
+	case "__timeTo":
+		return fmt.Sprintf("FROM_UNIXTIME(%d)", m.timeRange.GetToAsSecondsEpoch()), nil
 	case "__timeGroup":
 	case "__timeGroup":
 		if len(args) < 2 {
 		if len(args) < 2 {
 			return "", fmt.Errorf("macro %v needs time column and interval", name)
 			return "", fmt.Errorf("macro %v needs time column and interval", name)

+ 14 - 0
pkg/tsdb/mysql/macros_test.go

@@ -63,6 +63,20 @@ func TestMacroEngine(t *testing.T) {
 				So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN FROM_UNIXTIME(%d) AND FROM_UNIXTIME(%d)", from.Unix(), to.Unix()))
 				So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN FROM_UNIXTIME(%d) AND FROM_UNIXTIME(%d)", from.Unix(), to.Unix()))
 			})
 			})
 
 
+			Convey("interpolate __timeFrom function", func() {
+				sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom()")
+				So(err, ShouldBeNil)
+
+				So(sql, ShouldEqual, fmt.Sprintf("select FROM_UNIXTIME(%d)", from.Unix()))
+			})
+
+			Convey("interpolate __timeTo function", func() {
+				sql, err := engine.Interpolate(query, timeRange, "select $__timeTo()")
+				So(err, ShouldBeNil)
+
+				So(sql, ShouldEqual, fmt.Sprintf("select FROM_UNIXTIME(%d)", to.Unix()))
+			})
+
 			Convey("interpolate __unixEpochFilter function", func() {
 			Convey("interpolate __unixEpochFilter function", func() {
 				sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFilter(time)")
 				sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFilter(time)")
 				So(err, ShouldBeNil)
 				So(err, ShouldBeNil)

+ 2 - 2
pkg/tsdb/mysql/mysql_test.go

@@ -761,7 +761,7 @@ func TestMySQL(t *testing.T) {
 					{
 					{
 						DataSource: &models.DataSource{JsonData: simplejson.New()},
 						DataSource: &models.DataSource{JsonData: simplejson.New()},
 						Model: simplejson.NewFromAny(map[string]interface{}{
 						Model: simplejson.NewFromAny(map[string]interface{}{
-							"rawSql": `SELECT time FROM metric_values WHERE time > $__timeFrom() OR time < $__timeFrom() OR 1 < $__unixEpochFrom() OR $__unixEpochTo() > 1 ORDER BY 1`,
+							"rawSql": `SELECT time FROM metric_values WHERE time > $__timeFrom() OR time < $__timeTo() OR 1 < $__unixEpochFrom() OR $__unixEpochTo() > 1 ORDER BY 1`,
 							"format": "time_series",
 							"format": "time_series",
 						}),
 						}),
 						RefId: "A",
 						RefId: "A",
@@ -773,7 +773,7 @@ func TestMySQL(t *testing.T) {
 			So(err, ShouldBeNil)
 			So(err, ShouldBeNil)
 			queryResult := resp.Results["A"]
 			queryResult := resp.Results["A"]
 			So(queryResult.Error, ShouldBeNil)
 			So(queryResult.Error, ShouldBeNil)
-			So(queryResult.Meta.Get("sql").MustString(), ShouldEqual, "SELECT time FROM metric_values WHERE time > '2018-03-15T12:55:00Z' OR time < '2018-03-15T12:55:00Z' OR 1 < 1521118500 OR 1521118800 > 1 ORDER BY 1")
+			So(queryResult.Meta.Get("sql").MustString(), ShouldEqual, "SELECT time FROM metric_values WHERE time > FROM_UNIXTIME(1521118500) OR time < FROM_UNIXTIME(1521118800) OR 1 < 1521118500 OR 1521118800 > 1 ORDER BY 1")
 
 
 		})
 		})
 
 

+ 4 - 0
pkg/tsdb/postgres/macros.go

@@ -87,6 +87,10 @@ func (m *postgresMacroEngine) evaluateMacro(name string, args []string) (string,
 		}
 		}
 
 
 		return fmt.Sprintf("%s BETWEEN '%s' AND '%s'", args[0], m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339), m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
 		return fmt.Sprintf("%s BETWEEN '%s' AND '%s'", args[0], m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339), m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
+	case "__timeFrom":
+		return fmt.Sprintf("'%s'", m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339)), nil
+	case "__timeTo":
+		return fmt.Sprintf("'%s'", m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
 	case "__timeGroup":
 	case "__timeGroup":
 		if len(args) < 2 {
 		if len(args) < 2 {
 			return "", fmt.Errorf("macro %v needs time column and interval and optional fill value", name)
 			return "", fmt.Errorf("macro %v needs time column and interval and optional fill value", name)

+ 14 - 0
pkg/tsdb/postgres/macros_test.go

@@ -44,6 +44,20 @@ func TestMacroEngine(t *testing.T) {
 				So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339)))
 				So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339)))
 			})
 			})
 
 
+			Convey("interpolate __timeFrom function", func() {
+				sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom()")
+				So(err, ShouldBeNil)
+
+				So(sql, ShouldEqual, "select '2018-04-12T18:00:00Z'")
+			})
+
+			Convey("interpolate __timeTo function", func() {
+				sql, err := engine.Interpolate(query, timeRange, "select $__timeTo()")
+				So(err, ShouldBeNil)
+
+				So(sql, ShouldEqual, "select '2018-04-12T18:05:00Z'")
+			})
+
 			Convey("interpolate __timeGroup function pre 5.3 compatibility", func() {
 			Convey("interpolate __timeGroup function pre 5.3 compatibility", func() {
 
 
 				sql, err := engine.Interpolate(query, timeRange, "SELECT $__timeGroup(time_column,'5m'), value")
 				sql, err := engine.Interpolate(query, timeRange, "SELECT $__timeGroup(time_column,'5m'), value")

+ 0 - 2
pkg/tsdb/sql_engine.go

@@ -196,8 +196,6 @@ var Interpolate = func(query *Query, timeRange *TimeRange, sql string) (string,
 
 
 	sql = strings.Replace(sql, "$__interval_ms", strconv.FormatInt(interval.Milliseconds(), 10), -1)
 	sql = strings.Replace(sql, "$__interval_ms", strconv.FormatInt(interval.Milliseconds(), 10), -1)
 	sql = strings.Replace(sql, "$__interval", interval.Text, -1)
 	sql = strings.Replace(sql, "$__interval", interval.Text, -1)
-	sql = strings.Replace(sql, "$__timeFrom()", fmt.Sprintf("'%s'", timeRange.GetFromAsTimeUTC().Format(time.RFC3339)), -1)
-	sql = strings.Replace(sql, "$__timeTo()", fmt.Sprintf("'%s'", timeRange.GetToAsTimeUTC().Format(time.RFC3339)), -1)
 	sql = strings.Replace(sql, "$__unixEpochFrom()", fmt.Sprintf("%d", timeRange.GetFromAsSecondsEpoch()), -1)
 	sql = strings.Replace(sql, "$__unixEpochFrom()", fmt.Sprintf("%d", timeRange.GetFromAsSecondsEpoch()), -1)
 	sql = strings.Replace(sql, "$__unixEpochTo()", fmt.Sprintf("%d", timeRange.GetToAsSecondsEpoch()), -1)
 	sql = strings.Replace(sql, "$__unixEpochTo()", fmt.Sprintf("%d", timeRange.GetToAsSecondsEpoch()), -1)
 
 

+ 0 - 14
pkg/tsdb/sql_engine_test.go

@@ -44,20 +44,6 @@ func TestSqlEngine(t *testing.T) {
 				So(sql, ShouldEqual, "select 60000 ")
 				So(sql, ShouldEqual, "select 60000 ")
 			})
 			})
 
 
-			Convey("interpolate __timeFrom function", func() {
-				sql, err := Interpolate(query, timeRange, "select $__timeFrom()")
-				So(err, ShouldBeNil)
-
-				So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339)))
-			})
-
-			Convey("interpolate __timeTo function", func() {
-				sql, err := Interpolate(query, timeRange, "select $__timeTo()")
-				So(err, ShouldBeNil)
-
-				So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339)))
-			})
-
 			Convey("interpolate __unixEpochFrom function", func() {
 			Convey("interpolate __unixEpochFrom function", func() {
 				sql, err := Interpolate(query, timeRange, "select $__unixEpochFrom()")
 				sql, err := Interpolate(query, timeRange, "select $__unixEpochFrom()")
 				So(err, ShouldBeNil)
 				So(err, ShouldBeNil)

+ 1 - 1
public/app/app.ts

@@ -1,4 +1,4 @@
-import 'babel-polyfill';
+import '@babel/polyfill';
 import 'file-saver';
 import 'file-saver';
 import 'lodash';
 import 'lodash';
 import 'jquery';
 import 'jquery';

+ 1 - 1
public/app/core/components/code_editor/theme-grafana-dark.js

@@ -14,7 +14,7 @@ ace.define("ace/theme/grafana-dark",["require","exports","module","ace/lib/dom"]
   background: #555651\
   background: #555651\
   }\
   }\
   .gf-code-dark {\
   .gf-code-dark {\
-  background-color: #111;\
+  background-color: #09090b;\
   color: #e0e0e0\
   color: #e0e0e0\
   }\
   }\
   .gf-code-dark .ace_cursor {\
   .gf-code-dark .ace_cursor {\

+ 4 - 0
public/app/core/controllers/reset_password_ctrl.ts

@@ -1,4 +1,5 @@
 import coreModule from '../core_module';
 import coreModule from '../core_module';
+import config from 'app/core/config';
 
 
 export class ResetPasswordCtrl {
 export class ResetPasswordCtrl {
   /** @ngInject */
   /** @ngInject */
@@ -6,6 +7,9 @@ export class ResetPasswordCtrl {
     contextSrv.sidemenu = false;
     contextSrv.sidemenu = false;
     $scope.formModel = {};
     $scope.formModel = {};
     $scope.mode = 'send';
     $scope.mode = 'send';
+    $scope.ldapEnabled = config.ldapEnabled;
+    $scope.authProxyEnabled = config.authProxyEnabled;
+    $scope.disableLoginForm = config.disableLoginForm;
 
 
     const params = $location.search();
     const params = $location.search();
     if (params.code) {
     if (params.code) {

+ 63 - 13
public/app/core/logs_model.ts

@@ -3,25 +3,26 @@ import { TimeSeries } from 'app/core/core';
 import colors from 'app/core/utils/colors';
 import colors from 'app/core/utils/colors';
 
 
 export enum LogLevel {
 export enum LogLevel {
-  crit = 'crit',
-  warn = 'warn',
+  crit = 'critical',
+  critical = 'critical',
+  warn = 'warning',
+  warning = 'warning',
   err = 'error',
   err = 'error',
   error = 'error',
   error = 'error',
   info = 'info',
   info = 'info',
   debug = 'debug',
   debug = 'debug',
   trace = 'trace',
   trace = 'trace',
-  none = 'none',
+  unkown = 'unkown',
 }
 }
 
 
 export const LogLevelColor = {
 export const LogLevelColor = {
-  [LogLevel.crit]: colors[7],
-  [LogLevel.warn]: colors[1],
-  [LogLevel.err]: colors[4],
+  [LogLevel.critical]: colors[7],
+  [LogLevel.warning]: colors[1],
   [LogLevel.error]: colors[4],
   [LogLevel.error]: colors[4],
   [LogLevel.info]: colors[0],
   [LogLevel.info]: colors[0],
-  [LogLevel.debug]: colors[3],
-  [LogLevel.trace]: colors[3],
-  [LogLevel.none]: '#eee',
+  [LogLevel.debug]: colors[5],
+  [LogLevel.trace]: colors[2],
+  [LogLevel.unkown]: '#ddd',
 };
 };
 
 
 export interface LogSearchMatch {
 export interface LogSearchMatch {
@@ -34,22 +35,37 @@ export interface LogRow {
   duplicates?: number;
   duplicates?: number;
   entry: string;
   entry: string;
   key: string; // timestamp + labels
   key: string; // timestamp + labels
-  labels: string;
+  labels: LogsStreamLabels;
   logLevel: LogLevel;
   logLevel: LogLevel;
   searchWords?: string[];
   searchWords?: string[];
   timestamp: string; // ISO with nanosec precision
   timestamp: string; // ISO with nanosec precision
   timeFromNow: string;
   timeFromNow: string;
   timeEpochMs: number;
   timeEpochMs: number;
   timeLocal: string;
   timeLocal: string;
-  uniqueLabels?: string;
+  uniqueLabels?: LogsStreamLabels;
+}
+
+export interface LogsLabelStat {
+  active?: boolean;
+  count: number;
+  proportion: number;
+  value: string;
+}
+
+export enum LogsMetaKind {
+  Number,
+  String,
+  LabelsMap,
 }
 }
 
 
 export interface LogsMetaItem {
 export interface LogsMetaItem {
   label: string;
   label: string;
-  value: string;
+  value: string | number | LogsStreamLabels;
+  kind: LogsMetaKind;
 }
 }
 
 
 export interface LogsModel {
 export interface LogsModel {
+  id: string; // Identify one logs result from another
   meta?: LogsMetaItem[];
   meta?: LogsMetaItem[];
   rows: LogRow[];
   rows: LogRow[];
   series?: TimeSeries[];
   series?: TimeSeries[];
@@ -60,7 +76,7 @@ export interface LogsStream {
   entries: LogsStreamEntry[];
   entries: LogsStreamEntry[];
   search?: string;
   search?: string;
   parsedLabels?: LogsStreamLabels;
   parsedLabels?: LogsStreamLabels;
-  uniqueLabels?: string;
+  uniqueLabels?: LogsStreamLabels;
 }
 }
 
 
 export interface LogsStreamEntry {
 export interface LogsStreamEntry {
@@ -79,6 +95,22 @@ export enum LogsDedupStrategy {
   signature = 'signature',
   signature = 'signature',
 }
 }
 
 
+export function calculateLogsLabelStats(rows: LogRow[], label: string): LogsLabelStat[] {
+  // Consider only rows that have the given label
+  const rowsWithLabel = rows.filter(row => row.labels[label] !== undefined);
+  const rowCount = rowsWithLabel.length;
+
+  // Get label value counts for eligible rows
+  const countsByValue = _.countBy(rowsWithLabel, row => (row as LogRow).labels[label]);
+  const sortedCounts = _.chain(countsByValue)
+    .map((count, value) => ({ count, value, proportion: count / rowCount }))
+    .sortBy('count')
+    .reverse()
+    .value();
+
+  return sortedCounts;
+}
+
 const isoDateRegexp = /\d{4}-[01]\d-[0-3]\dT[0-2]\d:[0-5]\d:[0-6]\d[,\.]\d+([+-][0-2]\d:[0-5]\d|Z)/g;
 const isoDateRegexp = /\d{4}-[01]\d-[0-3]\dT[0-2]\d:[0-5]\d:[0-6]\d[,\.]\d+([+-][0-2]\d:[0-5]\d|Z)/g;
 function isDuplicateRow(row: LogRow, other: LogRow, strategy: LogsDedupStrategy): boolean {
 function isDuplicateRow(row: LogRow, other: LogRow, strategy: LogsDedupStrategy): boolean {
   switch (strategy) {
   switch (strategy) {
@@ -119,6 +151,24 @@ export function dedupLogRows(logs: LogsModel, strategy: LogsDedupStrategy): Logs
   };
   };
 }
 }
 
 
+export function filterLogLevels(logs: LogsModel, hiddenLogLevels: Set<LogLevel>): LogsModel {
+  if (hiddenLogLevels.size === 0) {
+    return logs;
+  }
+
+  const filteredRows = logs.rows.reduce((result: LogRow[], row: LogRow, index, list) => {
+    if (!hiddenLogLevels.has(row.logLevel)) {
+      result.push(row);
+    }
+    return result;
+  }, []);
+
+  return {
+    ...logs,
+    rows: filteredRows,
+  };
+}
+
 export function makeSeriesForLogs(rows: LogRow[], intervalMs: number): TimeSeries[] {
 export function makeSeriesForLogs(rows: LogRow[], intervalMs: number): TimeSeries[] {
   // Graph time series by log level
   // Graph time series by log level
   const seriesByLevel = {};
   const seriesByLevel = {};

+ 54 - 1
public/app/core/specs/logs_model.test.ts

@@ -1,4 +1,4 @@
-import { dedupLogRows, LogsDedupStrategy, LogsModel } from '../logs_model';
+import { calculateLogsLabelStats, dedupLogRows, LogsDedupStrategy, LogsModel } from '../logs_model';
 
 
 describe('dedupLogRows()', () => {
 describe('dedupLogRows()', () => {
   test('should return rows as is when dedup is set to none', () => {
   test('should return rows as is when dedup is set to none', () => {
@@ -106,3 +106,56 @@ describe('dedupLogRows()', () => {
     ]);
     ]);
   });
   });
 });
 });
+
+describe('calculateLogsLabelStats()', () => {
+  test('should return no stats for empty rows', () => {
+    expect(calculateLogsLabelStats([], '')).toEqual([]);
+  });
+
+  test('should return no stats of label is not found', () => {
+    const rows = [
+      {
+        entry: 'foo 1',
+        labels: {
+          foo: 'bar',
+        },
+      },
+    ];
+
+    expect(calculateLogsLabelStats(rows as any, 'baz')).toEqual([]);
+  });
+
+  test('should return stats for found labels', () => {
+    const rows = [
+      {
+        entry: 'foo 1',
+        labels: {
+          foo: 'bar',
+        },
+      },
+      {
+        entry: 'foo 0',
+        labels: {
+          foo: 'xxx',
+        },
+      },
+      {
+        entry: 'foo 2',
+        labels: {
+          foo: 'bar',
+        },
+      },
+    ];
+
+    expect(calculateLogsLabelStats(rows as any, 'foo')).toMatchObject([
+      {
+        value: 'bar',
+        count: 2,
+      },
+      {
+        value: 'xxx',
+        count: 1,
+      },
+    ]);
+  });
+});

+ 1 - 1
public/app/core/utils/explore.test.ts

@@ -16,7 +16,7 @@ const DEFAULT_EXPLORE_STATE: ExploreState = {
   datasourceMissing: false,
   datasourceMissing: false,
   datasourceName: '',
   datasourceName: '',
   exploreDatasources: [],
   exploreDatasources: [],
-  graphRange: DEFAULT_RANGE,
+  graphInterval: 1000,
   history: [],
   history: [],
   initialQueries: [],
   initialQueries: [],
   queryTransactions: [],
   queryTransactions: [],

+ 40 - 3
public/app/core/utils/explore.ts

@@ -1,7 +1,10 @@
+import _ from 'lodash';
+
 import { renderUrl } from 'app/core/utils/url';
 import { renderUrl } from 'app/core/utils/url';
-import { ExploreState, ExploreUrlState, HistoryItem } from 'app/types/explore';
+import { ExploreState, ExploreUrlState, HistoryItem, QueryTransaction } from 'app/types/explore';
 import { DataQuery, RawTimeRange } from 'app/types/series';
 import { DataQuery, RawTimeRange } from 'app/types/series';
 
 
+import TableModel, { mergeTablesIntoModel } from 'app/core/table_model';
 import kbn from 'app/core/utils/kbn';
 import kbn from 'app/core/utils/kbn';
 import colors from 'app/core/utils/colors';
 import colors from 'app/core/utils/colors';
 import TimeSeries from 'app/core/time_series2';
 import TimeSeries from 'app/core/time_series2';
@@ -127,10 +130,44 @@ export function ensureQueries(queries?: DataQuery[]): DataQuery[] {
 }
 }
 
 
 /**
 /**
- * A target is non-empty when it has keys other than refId and key.
+ * A target is non-empty when it has keys (with non-empty values) other than refId and key.
  */
  */
 export function hasNonEmptyQuery(queries: DataQuery[]): boolean {
 export function hasNonEmptyQuery(queries: DataQuery[]): boolean {
-  return queries.some(query => Object.keys(query).length > 2);
+  return queries.some(
+    query =>
+      Object.keys(query)
+        .map(k => query[k])
+        .filter(v => v).length > 2
+  );
+}
+
+export function calculateResultsFromQueryTransactions(
+  queryTransactions: QueryTransaction[],
+  datasource: any,
+  graphInterval: number
+) {
+  const graphResult = _.flatten(
+    queryTransactions.filter(qt => qt.resultType === 'Graph' && qt.done && qt.result).map(qt => qt.result)
+  );
+  const tableResult = mergeTablesIntoModel(
+    new TableModel(),
+    ...queryTransactions.filter(qt => qt.resultType === 'Table' && qt.done && qt.result).map(qt => qt.result)
+  );
+  const logsResult =
+    datasource && datasource.mergeStreams
+      ? datasource.mergeStreams(
+          _.flatten(
+            queryTransactions.filter(qt => qt.resultType === 'Logs' && qt.done && qt.result).map(qt => qt.result)
+          ),
+          graphInterval
+        )
+      : undefined;
+
+  return {
+    graphResult,
+    tableResult,
+    logsResult,
+  };
 }
 }
 
 
 export function getIntervals(
 export function getIntervals(

+ 8 - 2
public/app/core/utils/kbn.ts

@@ -429,10 +429,16 @@ kbn.valueFormats.hex0x = (value, decimals) => {
 };
 };
 
 
 kbn.valueFormats.sci = (value, decimals) => {
 kbn.valueFormats.sci = (value, decimals) => {
+  if (value == null) {
+    return '';
+  }
   return value.toExponential(decimals);
   return value.toExponential(decimals);
 };
 };
 
 
 kbn.valueFormats.locale = (value, decimals) => {
 kbn.valueFormats.locale = (value, decimals) => {
+  if (value == null) {
+    return '';
+  }
   return value.toLocaleString(undefined, { maximumFractionDigits: decimals });
   return value.toLocaleString(undefined, { maximumFractionDigits: decimals });
 };
 };
 
 
@@ -585,8 +591,8 @@ kbn.valueFormats.flowcms = kbn.formatBuilders.fixedUnit('cms');
 kbn.valueFormats.flowcfs = kbn.formatBuilders.fixedUnit('cfs');
 kbn.valueFormats.flowcfs = kbn.formatBuilders.fixedUnit('cfs');
 kbn.valueFormats.flowcfm = kbn.formatBuilders.fixedUnit('cfm');
 kbn.valueFormats.flowcfm = kbn.formatBuilders.fixedUnit('cfm');
 kbn.valueFormats.litreh = kbn.formatBuilders.fixedUnit('l/h');
 kbn.valueFormats.litreh = kbn.formatBuilders.fixedUnit('l/h');
-kbn.valueFormats.flowlpm = kbn.formatBuilders.decimalSIPrefix('l/min');
-kbn.valueFormats.flowmlpm = kbn.formatBuilders.decimalSIPrefix('mL/min', -1);
+kbn.valueFormats.flowlpm = kbn.formatBuilders.fixedUnit('l/min');
+kbn.valueFormats.flowmlpm = kbn.formatBuilders.fixedUnit('mL/min');
 
 
 // Angle
 // Angle
 kbn.valueFormats.degree = kbn.formatBuilders.fixedUnit('°');
 kbn.valueFormats.degree = kbn.formatBuilders.fixedUnit('°');

+ 16 - 5
public/app/core/utils/text.test.ts

@@ -16,9 +16,20 @@ describe('findMatchesInText()', () => {
     expect(findMatchesInText(' foo ', 'foo')).toEqual([{ length: 3, start: 1, text: 'foo', end: 4 }]);
     expect(findMatchesInText(' foo ', 'foo')).toEqual([{ length: 3, start: 1, text: 'foo', end: 4 }]);
   });
   });
 
 
-  expect(findMatchesInText(' foo foo bar ', 'foo|bar')).toEqual([
-    { length: 3, start: 1, text: 'foo', end: 4 },
-    { length: 3, start: 5, text: 'foo', end: 8 },
-    { length: 3, start: 9, text: 'bar', end: 12 },
-  ]);
+  test('should find all matches for a complete regex', () => {
+    expect(findMatchesInText(' foo foo bar ', 'foo|bar')).toEqual([
+      { length: 3, start: 1, text: 'foo', end: 4 },
+      { length: 3, start: 5, text: 'foo', end: 8 },
+      { length: 3, start: 9, text: 'bar', end: 12 },
+    ]);
+  });
+
+  test('not fail on incomplete regex', () => {
+    expect(findMatchesInText(' foo foo bar ', 'foo|')).toEqual([
+      { length: 3, start: 1, text: 'foo', end: 4 },
+      { length: 3, start: 5, text: 'foo', end: 8 },
+    ]);
+    expect(findMatchesInText('foo foo bar', '(')).toEqual([]);
+    expect(findMatchesInText('foo foo bar', '(foo|')).toEqual([]);
+  });
 });
 });

+ 22 - 10
public/app/core/utils/text.ts

@@ -8,6 +8,10 @@ export function findHighlightChunksInText({ searchWords, textToHighlight }) {
   return findMatchesInText(textToHighlight, searchWords.join(' '));
   return findMatchesInText(textToHighlight, searchWords.join(' '));
 }
 }
 
 
+const cleanNeedle = (needle: string): string => {
+  return needle.replace(/[[{(][\w,.-?:*+]+$/, '');
+};
+
 /**
 /**
  * Returns a list of substring regexp matches.
  * Returns a list of substring regexp matches.
  */
  */
@@ -16,17 +20,25 @@ export function findMatchesInText(haystack: string, needle: string): TextMatch[]
   if (!haystack || !needle) {
   if (!haystack || !needle) {
     return [];
     return [];
   }
   }
-  const regexp = new RegExp(`(?:${needle})`, 'g');
   const matches = [];
   const matches = [];
-  let match = regexp.exec(haystack);
-  while (match) {
-    matches.push({
-      text: match[0],
-      start: match.index,
-      length: match[0].length,
-      end: match.index + match[0].length,
-    });
-    match = regexp.exec(haystack);
+  const cleaned = cleanNeedle(needle);
+  let regexp;
+  try {
+    regexp = new RegExp(`(?:${cleaned})`, 'g');
+  } catch (error) {
+    return matches;
   }
   }
+  haystack.replace(regexp, (substring, ...rest) => {
+    if (substring) {
+      const offset = rest[rest.length - 2];
+      matches.push({
+        text: substring,
+        start: offset,
+        length: substring.length,
+        end: offset + substring.length,
+      });
+    }
+    return '';
+  });
   return matches;
   return matches;
 }
 }

+ 154 - 65
public/app/features/explore/Explore.tsx

@@ -16,6 +16,7 @@ import { TimeRange, DataQuery } from 'app/types/series';
 import store from 'app/core/store';
 import store from 'app/core/store';
 import {
 import {
   DEFAULT_RANGE,
   DEFAULT_RANGE,
+  calculateResultsFromQueryTransactions,
   ensureQueries,
   ensureQueries,
   getIntervals,
   getIntervals,
   generateKey,
   generateKey,
@@ -28,7 +29,7 @@ import ResetStyles from 'app/core/components/Picker/ResetStyles';
 import PickerOption from 'app/core/components/Picker/PickerOption';
 import PickerOption from 'app/core/components/Picker/PickerOption';
 import IndicatorsContainer from 'app/core/components/Picker/IndicatorsContainer';
 import IndicatorsContainer from 'app/core/components/Picker/IndicatorsContainer';
 import NoOptionsMessage from 'app/core/components/Picker/NoOptionsMessage';
 import NoOptionsMessage from 'app/core/components/Picker/NoOptionsMessage';
-import TableModel, { mergeTablesIntoModel } from 'app/core/table_model';
+import TableModel from 'app/core/table_model';
 import { DatasourceSrv } from 'app/features/plugins/datasource_srv';
 import { DatasourceSrv } from 'app/features/plugins/datasource_srv';
 import { Emitter } from 'app/core/utils/emitter';
 import { Emitter } from 'app/core/utils/emitter';
 import * as dateMath from 'app/core/utils/datemath';
 import * as dateMath from 'app/core/utils/datemath';
@@ -70,9 +71,9 @@ interface ExploreProps {
  * contain one empty DataQuery. While the user modifies the DataQuery, the
  * contain one empty DataQuery. While the user modifies the DataQuery, the
  * modifications are being tracked in `this.modifiedQueries`, which need to be
  * modifications are being tracked in `this.modifiedQueries`, which need to be
  * used whenever a query is sent to the datasource to reflect what the user sees
  * used whenever a query is sent to the datasource to reflect what the user sees
- * on the screen. Query rows can be initialized or reset using `initialQueries`,
- * by giving the respective row a new key. This wipes the old row and its state.
- * This property is also used to govern how many query rows there are (minimum 1).
+ * on the screen. Query"react-popper": "^0.7.5", rows can be initialized or reset using `initialQueries`,
+ * by giving the respec"react-popper": "^0.7.5",tive row a new key. This wipes the old row and its state.
+ * This property is als"react-popper": "^0.7.5",o used to govern how many query rows there are (minimum 1).
  *
  *
  * This flow makes sure that a query row can be arbitrarily complex without the
  * This flow makes sure that a query row can be arbitrarily complex without the
  * fear of being wiped or re-initialized via props. The query row is free to keep
  * fear of being wiped or re-initialized via props. The query row is free to keep
@@ -101,6 +102,11 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
    * Local ID cache to compare requested vs selected datasource
    * Local ID cache to compare requested vs selected datasource
    */
    */
   requestedDatasourceId: string;
   requestedDatasourceId: string;
+  scanTimer: NodeJS.Timer;
+  /**
+   * Timepicker to control scanning
+   */
+  timepickerRef: React.RefObject<TimePicker>;
 
 
   constructor(props) {
   constructor(props) {
     super(props);
     super(props);
@@ -114,6 +120,8 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
       const { datasource, queries, range } = props.urlState as ExploreUrlState;
       const { datasource, queries, range } = props.urlState as ExploreUrlState;
       initialQueries = ensureQueries(queries);
       initialQueries = ensureQueries(queries);
       const initialRange = range || { ...DEFAULT_RANGE };
       const initialRange = range || { ...DEFAULT_RANGE };
+      // Millies step for helper bar charts
+      const initialGraphInterval = 15 * 1000;
       this.state = {
       this.state = {
         datasource: null,
         datasource: null,
         datasourceError: null,
         datasourceError: null,
@@ -121,11 +129,14 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
         datasourceMissing: false,
         datasourceMissing: false,
         datasourceName: datasource,
         datasourceName: datasource,
         exploreDatasources: [],
         exploreDatasources: [],
-        graphRange: initialRange,
+        graphInterval: initialGraphInterval,
+        graphResult: [],
         initialQueries,
         initialQueries,
         history: [],
         history: [],
+        logsResult: null,
         queryTransactions: [],
         queryTransactions: [],
         range: initialRange,
         range: initialRange,
+        scanning: false,
         showingGraph: true,
         showingGraph: true,
         showingLogs: true,
         showingLogs: true,
         showingStartPage: false,
         showingStartPage: false,
@@ -133,10 +144,12 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
         supportsGraph: null,
         supportsGraph: null,
         supportsLogs: null,
         supportsLogs: null,
         supportsTable: null,
         supportsTable: null,
+        tableResult: new TableModel(),
       };
       };
     }
     }
     this.modifiedQueries = initialQueries.slice();
     this.modifiedQueries = initialQueries.slice();
     this.exploreEvents = new Emitter();
     this.exploreEvents = new Emitter();
+    this.timepickerRef = React.createRef();
   }
   }
 
 
   async componentDidMount() {
   async componentDidMount() {
@@ -168,9 +181,12 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
 
 
   componentWillUnmount() {
   componentWillUnmount() {
     this.exploreEvents.removeAllListeners();
     this.exploreEvents.removeAllListeners();
+    clearTimeout(this.scanTimer);
   }
   }
 
 
   async setDatasource(datasource: any, origin?: DataSource) {
   async setDatasource(datasource: any, origin?: DataSource) {
+    const { initialQueries, range } = this.state;
+
     const supportsGraph = datasource.meta.metrics;
     const supportsGraph = datasource.meta.metrics;
     const supportsLogs = datasource.meta.logs;
     const supportsLogs = datasource.meta.logs;
     const supportsTable = datasource.meta.tables;
     const supportsTable = datasource.meta.tables;
@@ -215,7 +231,7 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
     }
     }
 
 
     // Reset edit state with new queries
     // Reset edit state with new queries
-    const nextQueries = this.state.initialQueries.map((q, i) => ({
+    const nextQueries = initialQueries.map((q, i) => ({
       ...modifiedQueries[i],
       ...modifiedQueries[i],
       ...generateQueryKeys(i),
       ...generateQueryKeys(i),
     }));
     }));
@@ -224,11 +240,15 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
     // Custom components
     // Custom components
     const StartPage = datasource.pluginExports.ExploreStartPage;
     const StartPage = datasource.pluginExports.ExploreStartPage;
 
 
+    // Calculate graph bucketing interval
+    const graphInterval = getIntervals(range, datasource, this.el ? this.el.offsetWidth : 0).intervalMs;
+
     this.setState(
     this.setState(
       {
       {
         StartPage,
         StartPage,
         datasource,
         datasource,
         datasourceError,
         datasourceError,
+        graphInterval,
         history,
         history,
         supportsGraph,
         supportsGraph,
         supportsLogs,
         supportsLogs,
@@ -236,6 +256,7 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
         datasourceLoading: false,
         datasourceLoading: false,
         datasourceName: datasource.name,
         datasourceName: datasource.name,
         initialQueries: nextQueries,
         initialQueries: nextQueries,
+        logsHighlighterExpressions: undefined,
         showingStartPage: Boolean(StartPage),
         showingStartPage: Boolean(StartPage),
       },
       },
       () => {
       () => {
@@ -274,7 +295,11 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
         return qt;
         return qt;
       });
       });
 
 
-      return { initialQueries: nextQueries, queryTransactions: nextQueryTransactions };
+      return {
+        initialQueries: nextQueries,
+        logsHighlighterExpressions: undefined,
+        queryTransactions: nextQueryTransactions,
+      };
     });
     });
   };
   };
 
 
@@ -320,23 +345,24 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
           queryTransactions: nextQueryTransactions,
           queryTransactions: nextQueryTransactions,
         };
         };
       }, this.onSubmit);
       }, this.onSubmit);
+    } else if (this.state.datasource.getHighlighterExpression && this.modifiedQueries.length === 1) {
+      // Live preview of log search matches. Can only work on single row query for now
+      this.updateLogsHighlights(value);
     }
     }
   };
   };
 
 
-  // onChangeTime = (nextRange: RawTimeRange) => {
-  //   const range: RawTimeRange = {
-  //     ...nextRange,
-  //   };
-  //   this.setState({ range }, () => this.onSubmit());
-  // };
-  onChangeTime = (nextRange: TimeRange) => {
+  onChangeTime = (nextRange: TimeRange, scanning?: boolean) => {
     const range: TimeRange = {
     const range: TimeRange = {
       ...nextRange,
       ...nextRange,
     };
     };
-    this.setState({ range }, () => this.onSubmit());
+    if (this.state.scanning && !scanning) {
+      this.onStopScanning();
+    }
+    this.setState({ range, scanning }, () => this.onSubmit());
   };
   };
 
 
   onClickClear = () => {
   onClickClear = () => {
+    this.onStopScanning();
     this.modifiedQueries = ensureQueries();
     this.modifiedQueries = ensureQueries();
     this.setState(
     this.setState(
       prevState => ({
       prevState => ({
@@ -412,12 +438,19 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
     this.setState(
     this.setState(
       state => {
       state => {
         const showingTable = !state.showingTable;
         const showingTable = !state.showingTable;
-        let nextQueryTransactions = state.queryTransactions;
-        if (!showingTable) {
-          // Discard transactions related to Table query
-          nextQueryTransactions = state.queryTransactions.filter(qt => qt.resultType !== 'Table');
+        if (showingTable) {
+          return { showingTable, queryTransactions: state.queryTransactions };
         }
         }
-        return { queryTransactions: nextQueryTransactions, showingTable };
+
+        // Toggle off needs discarding of table queries
+        const nextQueryTransactions = state.queryTransactions.filter(qt => qt.resultType !== 'Table');
+        const results = calculateResultsFromQueryTransactions(
+          nextQueryTransactions,
+          state.datasource,
+          state.graphInterval
+        );
+
+        return { ...results, queryTransactions: nextQueryTransactions, showingTable };
       },
       },
       () => {
       () => {
         if (this.state.showingTable) {
         if (this.state.showingTable) {
@@ -427,8 +460,8 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
     );
     );
   };
   };
 
 
-  onClickTableCell = (columnKey: string, rowValue: string) => {
-    this.onModifyQueries({ type: 'ADD_FILTER', key: columnKey, value: rowValue });
+  onClickLabel = (key: string, value: string) => {
+    this.onModifyQueries({ type: 'ADD_FILTER', key, value });
   };
   };
 
 
   onModifyQueries = (action, index?: number) => {
   onModifyQueries = (action, index?: number) => {
@@ -498,9 +531,16 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
 
 
         // Discard transactions related to row query
         // Discard transactions related to row query
         const nextQueryTransactions = queryTransactions.filter(qt => qt.rowIndex !== index);
         const nextQueryTransactions = queryTransactions.filter(qt => qt.rowIndex !== index);
+        const results = calculateResultsFromQueryTransactions(
+          nextQueryTransactions,
+          state.datasource,
+          state.graphInterval
+        );
 
 
         return {
         return {
+          ...results,
           initialQueries: nextQueries,
           initialQueries: nextQueries,
+          logsHighlighterExpressions: undefined,
           queryTransactions: nextQueryTransactions,
           queryTransactions: nextQueryTransactions,
         };
         };
       },
       },
@@ -508,6 +548,24 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
     );
     );
   };
   };
 
 
+  onStartScanning = () => {
+    this.setState({ scanning: true }, this.scanPreviousRange);
+  };
+
+  scanPreviousRange = () => {
+    const scanRange = this.timepickerRef.current.move(-1, true);
+    this.setState({ scanRange });
+  };
+
+  onStopScanning = () => {
+    clearTimeout(this.scanTimer);
+    this.setState(state => {
+      const { queryTransactions } = state;
+      const nextQueryTransactions = queryTransactions.filter(qt => qt.scanning && !qt.done);
+      return { queryTransactions: nextQueryTransactions, scanning: false, scanRange: undefined };
+    });
+  };
+
   onSubmit = () => {
   onSubmit = () => {
     const { showingLogs, showingGraph, showingTable, supportsGraph, supportsLogs, supportsTable } = this.state;
     const { showingLogs, showingGraph, showingTable, supportsGraph, supportsLogs, supportsTable } = this.state;
     // Keep table queries first since they need to return quickly
     // Keep table queries first since they need to return quickly
@@ -542,7 +600,12 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
     const { datasource, range } = this.state;
     const { datasource, range } = this.state;
     const { interval, intervalMs } = getIntervals(range, datasource, this.el.offsetWidth);
     const { interval, intervalMs } = getIntervals(range, datasource, this.el.offsetWidth);
 
 
-    const configuredQueries = [Object.assign(query, queryOptions)];
+    const configuredQueries = [
+      {
+        ...query,
+        ...queryOptions,
+      },
+    ];
 
 
     // Clone range for query request
     // Clone range for query request
     // const queryRange: RawTimeRange = { ...range };
     // const queryRange: RawTimeRange = { ...range };
@@ -575,6 +638,7 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
       done: false,
       done: false,
       latency: 0,
       latency: 0,
       options: queryOptions,
       options: queryOptions,
+      scanning: this.state.scanning,
     };
     };
 
 
     // Using updater style because we might be modifying queryTransactions in quick succession
     // Using updater style because we might be modifying queryTransactions in quick succession
@@ -588,7 +652,14 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
       // Append new transaction
       // Append new transaction
       const nextQueryTransactions = [...remainingTransactions, transaction];
       const nextQueryTransactions = [...remainingTransactions, transaction];
 
 
+      const results = calculateResultsFromQueryTransactions(
+        nextQueryTransactions,
+        state.datasource,
+        state.graphInterval
+      );
+
       return {
       return {
+        ...results,
         queryTransactions: nextQueryTransactions,
         queryTransactions: nextQueryTransactions,
         showingStartPage: false,
         showingStartPage: false,
       };
       };
@@ -611,7 +682,7 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
     }
     }
 
 
     this.setState(state => {
     this.setState(state => {
-      const { history, queryTransactions } = state;
+      const { history, queryTransactions, scanning } = state;
 
 
       // Transaction might have been discarded
       // Transaction might have been discarded
       const transaction = queryTransactions.find(qt => qt.id === transactionId);
       const transaction = queryTransactions.find(qt => qt.id === transactionId);
@@ -639,22 +710,30 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
         return qt;
         return qt;
       });
       });
 
 
+      const results = calculateResultsFromQueryTransactions(
+        nextQueryTransactions,
+        state.datasource,
+        state.graphInterval
+      );
+
       const nextHistory = updateHistory(history, datasourceId, queries);
       const nextHistory = updateHistory(history, datasourceId, queries);
 
 
+      // Keep scanning for results if this was the last scanning transaction
+      if (_.size(result) === 0 && scanning) {
+        const other = nextQueryTransactions.find(qt => qt.scanning && !qt.done);
+        if (!other) {
+          this.scanTimer = setTimeout(this.scanPreviousRange, 1000);
+        }
+      }
+
       return {
       return {
+        ...results,
         history: nextHistory,
         history: nextHistory,
         queryTransactions: nextQueryTransactions,
         queryTransactions: nextQueryTransactions,
       };
       };
     });
     });
   }
   }
 
 
-  discardTransactions(rowIndex: number) {
-    this.setState(state => {
-      const remainingTransactions = state.queryTransactions.filter(qt => qt.rowIndex !== rowIndex);
-      return { queryTransactions: remainingTransactions };
-    });
-  }
-
   failQueryTransaction(transactionId: string, response: any, datasourceId: string) {
   failQueryTransaction(transactionId: string, response: any, datasourceId: string) {
     const { datasource } = this.state;
     const { datasource } = this.state;
     if (datasource.meta.id !== datasourceId || response.cancelled) {
     if (datasource.meta.id !== datasourceId || response.cancelled) {
@@ -666,14 +745,20 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
 
 
     let error: string | JSX.Element = response;
     let error: string | JSX.Element = response;
     if (response.data) {
     if (response.data) {
-      error = response.data.error;
-      if (response.data.response) {
-        error = (
-          <>
-            <span>{response.data.error}</span>
-            <details>{response.data.response}</details>
-          </>
-        );
+      if (typeof response.data === 'string') {
+        error = response.data;
+      } else if (response.data.error) {
+        error = response.data.error;
+        if (response.data.response) {
+          error = (
+            <>
+              <span>{response.data.error}</span>
+              <details>{response.data.response}</details>
+            </>
+          );
+        }
+      } else {
+        throw new Error('Could not handle error response');
       }
       }
     }
     }
 
 
@@ -704,6 +789,9 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
   async runQueries(resultType: ResultType, queryOptions: any, resultGetter?: any) {
   async runQueries(resultType: ResultType, queryOptions: any, resultGetter?: any) {
     const queries = [...this.modifiedQueries];
     const queries = [...this.modifiedQueries];
     if (!hasNonEmptyQuery(queries)) {
     if (!hasNonEmptyQuery(queries)) {
+      this.setState({
+        queryTransactions: [],
+      });
       return;
       return;
     }
     }
     const { datasource } = this.state;
     const { datasource } = this.state;
@@ -718,7 +806,6 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
         const latency = Date.now() - now;
         const latency = Date.now() - now;
         const results = resultGetter ? resultGetter(res.data) : res.data;
         const results = resultGetter ? resultGetter(res.data) : res.data;
         this.completeQueryTransaction(transaction.id, results, latency, queries, datasourceId);
         this.completeQueryTransaction(transaction.id, results, latency, queries, datasourceId);
-        this.setState({ graphRange: transaction.options.range });
       } catch (response) {
       } catch (response) {
         this.exploreEvents.emit('data-error', response);
         this.exploreEvents.emit('data-error', response);
         this.failQueryTransaction(transaction.id, response, datasourceId);
         this.failQueryTransaction(transaction.id, response, datasourceId);
@@ -726,6 +813,17 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
     });
     });
   }
   }
 
 
+  updateLogsHighlights = _.debounce((value: DataQuery, index: number) => {
+    this.setState(state => {
+      const { datasource } = state;
+      if (datasource.getHighlighterExpression) {
+        const logsHighlighterExpressions = [state.datasource.getHighlighterExpression(value)];
+        return { logsHighlighterExpressions };
+      }
+      return null;
+    });
+  }, 500);
+
   cloneState(): ExploreState {
   cloneState(): ExploreState {
     // Copy state, but copy queries including modifications
     // Copy state, but copy queries including modifications
     return {
     return {
@@ -749,11 +847,15 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
       datasourceLoading,
       datasourceLoading,
       datasourceMissing,
       datasourceMissing,
       exploreDatasources,
       exploreDatasources,
-      graphRange,
+      graphResult,
       history,
       history,
       initialQueries,
       initialQueries,
+      logsHighlighterExpressions,
+      logsResult,
       queryTransactions,
       queryTransactions,
       range,
       range,
+      scanning,
+      scanRange,
       showingGraph,
       showingGraph,
       showingLogs,
       showingLogs,
       showingStartPage,
       showingStartPage,
@@ -761,34 +863,14 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
       supportsGraph,
       supportsGraph,
       supportsLogs,
       supportsLogs,
       supportsTable,
       supportsTable,
+      tableResult,
     } = this.state;
     } = this.state;
     const graphHeight = showingGraph && showingTable ? '200px' : '400px';
     const graphHeight = showingGraph && showingTable ? '200px' : '400px';
     const exploreClass = split ? 'explore explore-split' : 'explore';
     const exploreClass = split ? 'explore explore-split' : 'explore';
     const selectedDatasource = datasource ? exploreDatasources.find(d => d.label === datasource.name) : undefined;
     const selectedDatasource = datasource ? exploreDatasources.find(d => d.label === datasource.name) : undefined;
-    const graphRangeIntervals = getIntervals(graphRange, datasource, this.el ? this.el.offsetWidth : 0);
     const graphLoading = queryTransactions.some(qt => qt.resultType === 'Graph' && !qt.done);
     const graphLoading = queryTransactions.some(qt => qt.resultType === 'Graph' && !qt.done);
     const tableLoading = queryTransactions.some(qt => qt.resultType === 'Table' && !qt.done);
     const tableLoading = queryTransactions.some(qt => qt.resultType === 'Table' && !qt.done);
     const logsLoading = queryTransactions.some(qt => qt.resultType === 'Logs' && !qt.done);
     const logsLoading = queryTransactions.some(qt => qt.resultType === 'Logs' && !qt.done);
-    // TODO don't recreate those on each re-render
-    const graphResult = _.flatten(
-      queryTransactions.filter(qt => qt.resultType === 'Graph' && qt.done && qt.result).map(qt => qt.result)
-    );
-
-    //Temp solution... How do detect if ds supports table format?
-    let tableResult;
-    tableResult = mergeTablesIntoModel(
-      new TableModel(),
-      ...queryTransactions.filter(qt => qt.resultType === 'Table' && qt.done && qt.result).map(qt => qt.result)
-    );
-    const logsResult =
-      datasource && datasource.mergeStreams
-        ? datasource.mergeStreams(
-            _.flatten(
-              queryTransactions.filter(qt => qt.resultType === 'Logs' && qt.done && qt.result).map(qt => qt.result)
-            ),
-            graphRangeIntervals.intervalMs
-          )
-        : undefined;
     const loading = queryTransactions.some(qt => !qt.done);
     const loading = queryTransactions.some(qt => !qt.done);
 
 
     return (
     return (
@@ -839,7 +921,7 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
               </button>
               </button>
             </div>
             </div>
           ) : null}
           ) : null}
-          <TimePicker range={range} onChangeTime={this.onChangeTime} />
+          <TimePicker ref={this.timepickerRef} range={range} onChangeTime={this.onChangeTime} />
           <div className="navbar-buttons">
           <div className="navbar-buttons">
             <button className="btn navbar-button navbar-button--no-icon" onClick={this.onClickClear}>
             <button className="btn navbar-button navbar-button--no-icon" onClick={this.onClickClear}>
               Clear All
               Clear All
@@ -895,7 +977,7 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
                           height={graphHeight}
                           height={graphHeight}
                           id={`explore-graph-${position}`}
                           id={`explore-graph-${position}`}
                           onChangeTime={this.onChangeTime}
                           onChangeTime={this.onChangeTime}
-                          range={graphRange}
+                          range={range}
                           split={split}
                           split={split}
                         />
                         />
                       </Panel>
                       </Panel>
@@ -907,17 +989,24 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
                         isOpen={showingTable}
                         isOpen={showingTable}
                         onToggle={this.onClickTableButton}
                         onToggle={this.onClickTableButton}
                       >
                       >
-                        <Table data={tableResult} loading={tableLoading} onClickCell={this.onClickTableCell} />
+                        <Table data={tableResult} loading={tableLoading} onClickCell={this.onClickLabel} />
                       </Panel>
                       </Panel>
                     )}
                     )}
                     {supportsLogs && (
                     {supportsLogs && (
                       <Panel label="Logs" loading={logsLoading} isOpen={showingLogs} onToggle={this.onClickLogsButton}>
                       <Panel label="Logs" loading={logsLoading} isOpen={showingLogs} onToggle={this.onClickLogsButton}>
                         <Logs
                         <Logs
                           data={logsResult}
                           data={logsResult}
+                          key={logsResult.id}
+                          highlighterExpressions={logsHighlighterExpressions}
                           loading={logsLoading}
                           loading={logsLoading}
                           position={position}
                           position={position}
                           onChangeTime={this.onChangeTime}
                           onChangeTime={this.onChangeTime}
+                          onClickLabel={this.onClickLabel}
+                          onStartScanning={this.onStartScanning}
+                          onStopScanning={this.onStopScanning}
                           range={range}
                           range={range}
+                          scanning={scanning}
+                          scanRange={scanRange}
                         />
                         />
                       </Panel>
                       </Panel>
                     )}
                     )}

+ 19 - 15
public/app/features/explore/Graph.tsx

@@ -83,6 +83,7 @@ interface GraphProps {
   size?: { width: number; height: number };
   size?: { width: number; height: number };
   userOptions?: any;
   userOptions?: any;
   onChangeTime?: (range: RawTimeRange) => void;
   onChangeTime?: (range: RawTimeRange) => void;
+  onToggleSeries?: (alias: string, hiddenSeries: Set<string>) => void;
 }
 }
 
 
 interface GraphState {
 interface GraphState {
@@ -178,26 +179,29 @@ export class Graph extends PureComponent<GraphProps, GraphState> {
 
 
   onToggleSeries = (series: TimeSeries, exclusive: boolean) => {
   onToggleSeries = (series: TimeSeries, exclusive: boolean) => {
     this.setState((state, props) => {
     this.setState((state, props) => {
-      const { data } = props;
+      const { data, onToggleSeries } = props;
       const { hiddenSeries } = state;
       const { hiddenSeries } = state;
-      const hidden = hiddenSeries.has(series.alias);
+
       // Deduplicate series as visibility tracks the alias property
       // Deduplicate series as visibility tracks the alias property
       const oneSeriesVisible = hiddenSeries.size === new Set(data.map(d => d.alias)).size - 1;
       const oneSeriesVisible = hiddenSeries.size === new Set(data.map(d => d.alias)).size - 1;
+
+      let nextHiddenSeries = new Set();
       if (exclusive) {
       if (exclusive) {
-        return {
-          hiddenSeries:
-            !hidden && oneSeriesVisible
-              ? new Set()
-              : new Set(data.filter(d => d.alias !== series.alias).map(d => d.alias)),
-        };
-      }
-      // Prune hidden series no longer part of those available from the most recent query
-      const availableSeries = new Set(data.map(d => d.alias));
-      const nextHiddenSeries = intersect(new Set(hiddenSeries), availableSeries);
-      if (nextHiddenSeries.has(series.alias)) {
-        nextHiddenSeries.delete(series.alias);
+        if (hiddenSeries.has(series.alias) || !oneSeriesVisible) {
+          nextHiddenSeries = new Set(data.filter(d => d.alias !== series.alias).map(d => d.alias));
+        }
       } else {
       } else {
-        nextHiddenSeries.add(series.alias);
+        // Prune hidden series no longer part of those available from the most recent query
+        const availableSeries = new Set(data.map(d => d.alias));
+        nextHiddenSeries = intersect(new Set(hiddenSeries), availableSeries);
+        if (nextHiddenSeries.has(series.alias)) {
+          nextHiddenSeries.delete(series.alias);
+        } else {
+          nextHiddenSeries.add(series.alias);
+        }
+      }
+      if (onToggleSeries) {
+        onToggleSeries(series.alias, nextHiddenSeries);
       }
       }
       return {
       return {
         hiddenSeries: nextHiddenSeries,
         hiddenSeries: nextHiddenSeries,

+ 141 - 0
public/app/features/explore/LogLabels.tsx

@@ -0,0 +1,141 @@
+import _ from 'lodash';
+import React, { PureComponent } from 'react';
+import classnames from 'classnames';
+
+import { calculateLogsLabelStats, LogsLabelStat, LogsStreamLabels, LogRow } from 'app/core/logs_model';
+
+function StatsRow({ active, count, proportion, value }: LogsLabelStat) {
+  const percent = `${Math.round(proportion * 100)}%`;
+  const barStyle = { width: percent };
+  const className = classnames('logs-stats-row', { 'logs-stats-row--active': active });
+
+  return (
+    <div className={className}>
+      <div className="logs-stats-row__label">
+        <div className="logs-stats-row__value">{value}</div>
+        <div className="logs-stats-row__count">{count}</div>
+        <div className="logs-stats-row__percent">{percent}</div>
+      </div>
+      <div className="logs-stats-row__bar">
+        <div className="logs-stats-row__innerbar" style={barStyle} />
+      </div>
+    </div>
+  );
+}
+
+const STATS_ROW_LIMIT = 5;
+class Stats extends PureComponent<{
+  stats: LogsLabelStat[];
+  label: string;
+  value: string;
+  rowCount: number;
+  onClickClose: () => void;
+}> {
+  render() {
+    const { label, rowCount, stats, value, onClickClose } = this.props;
+    const topRows = stats.slice(0, STATS_ROW_LIMIT);
+    let activeRow = topRows.find(row => row.value === value);
+    let otherRows = stats.slice(STATS_ROW_LIMIT);
+    const insertActiveRow = !activeRow;
+    // Remove active row from other to show extra
+    if (insertActiveRow) {
+      activeRow = otherRows.find(row => row.value === value);
+      otherRows = otherRows.filter(row => row.value !== value);
+    }
+    const otherCount = otherRows.reduce((sum, row) => sum + row.count, 0);
+    const topCount = topRows.reduce((sum, row) => sum + row.count, 0);
+    const total = topCount + otherCount;
+    const otherProportion = otherCount / total;
+
+    return (
+      <>
+        <div className="logs-stats__info">
+          {label}: {total} of {rowCount} rows have that label
+          <span className="logs-stats__icon fa fa-window-close" onClick={onClickClose} />
+        </div>
+        {topRows.map(stat => <StatsRow key={stat.value} {...stat} active={stat.value === value} />)}
+        {insertActiveRow && <StatsRow key={activeRow.value} {...activeRow} active />}
+        {otherCount > 0 && <StatsRow key="__OTHERS__" count={otherCount} value="Other" proportion={otherProportion} />}
+      </>
+    );
+  }
+}
+
+class Label extends PureComponent<
+  {
+    allRows?: LogRow[];
+    label: string;
+    plain?: boolean;
+    value: string;
+    onClickLabel?: (label: string, value: string) => void;
+  },
+  { showStats: boolean; stats: LogsLabelStat[] }
+> {
+  state = {
+    stats: null,
+    showStats: false,
+  };
+
+  onClickClose = () => {
+    this.setState({ showStats: false });
+  };
+
+  onClickLabel = () => {
+    const { onClickLabel, label, value } = this.props;
+    if (onClickLabel) {
+      onClickLabel(label, value);
+    }
+  };
+
+  onClickStats = () => {
+    this.setState(state => {
+      if (state.showStats) {
+        return { showStats: false, stats: null };
+      }
+      const stats = calculateLogsLabelStats(this.props.allRows, this.props.label);
+      return { showStats: true, stats };
+    });
+  };
+
+  render() {
+    const { allRows, label, plain, value } = this.props;
+    const { showStats, stats } = this.state;
+    const tooltip = `${label}: ${value}`;
+    return (
+      <span className="logs-label">
+        <span className="logs-label__value" title={tooltip}>
+          {value}
+        </span>
+        {!plain && (
+          <span title="Filter for label" onClick={this.onClickLabel} className="logs-label__icon fa fa-search-plus" />
+        )}
+        {!plain && allRows && <span onClick={this.onClickStats} className="logs-label__icon fa fa-signal" />}
+        {showStats && (
+          <span className="logs-label__stats">
+            <Stats
+              stats={stats}
+              rowCount={allRows.length}
+              label={label}
+              value={value}
+              onClickClose={this.onClickClose}
+            />
+          </span>
+        )}
+      </span>
+    );
+  }
+}
+
+export default class LogLabels extends PureComponent<{
+  allRows?: LogRow[];
+  labels: LogsStreamLabels;
+  plain?: boolean;
+  onClickLabel?: (label: string, value: string) => void;
+}> {
+  render() {
+    const { allRows, labels, onClickLabel, plain } = this.props;
+    return Object.keys(labels).map(key => (
+      <Label key={key} allRows={allRows} label={key} value={labels[key]} plain={plain} onClickLabel={onClickLabel} />
+    ));
+  }
+}

+ 242 - 39
public/app/features/explore/Logs.tsx

@@ -1,12 +1,26 @@
-import React, { Fragment, PureComponent } from 'react';
+import _ from 'lodash';
+import React, { PureComponent } from 'react';
 import Highlighter from 'react-highlight-words';
 import Highlighter from 'react-highlight-words';
+import classnames from 'classnames';
 
 
+import * as rangeUtil from 'app/core/utils/rangeutil';
 import { RawTimeRange } from 'app/types/series';
 import { RawTimeRange } from 'app/types/series';
-import { LogsDedupStrategy, LogsModel, dedupLogRows } from 'app/core/logs_model';
+import {
+  LogsDedupStrategy,
+  LogsModel,
+  dedupLogRows,
+  filterLogLevels,
+  LogLevel,
+  LogsMetaKind,
+  LogRow,
+} from 'app/core/logs_model';
 import { findHighlightChunksInText } from 'app/core/utils/text';
 import { findHighlightChunksInText } from 'app/core/utils/text';
 import { Switch } from 'app/core/components/Switch/Switch';
 import { Switch } from 'app/core/components/Switch/Switch';
 
 
 import Graph from './Graph';
 import Graph from './Graph';
+import LogLabels from './LogLabels';
+
+const PREVIEW_LIMIT = 100;
 
 
 const graphOptions = {
 const graphOptions = {
   series: {
   series: {
@@ -22,30 +36,143 @@ const graphOptions = {
   },
   },
 };
 };
 
 
+interface RowProps {
+  allRows: LogRow[];
+  highlighterExpressions?: string[];
+  row: LogRow;
+  showDuplicates: boolean;
+  showLabels: boolean | null; // Tristate: null means auto
+  showLocalTime: boolean;
+  showUtc: boolean;
+  onClickLabel?: (label: string, value: string) => void;
+}
+
+function Row({
+  allRows,
+  highlighterExpressions,
+  onClickLabel,
+  row,
+  showDuplicates,
+  showLabels,
+  showLocalTime,
+  showUtc,
+}: RowProps) {
+  const previewHighlights = highlighterExpressions && !_.isEqual(highlighterExpressions, row.searchWords);
+  const highlights = previewHighlights ? highlighterExpressions : row.searchWords;
+  const needsHighlighter = highlights && highlights.length > 0;
+  const highlightClassName = classnames('logs-row-match-highlight', {
+    'logs-row-match-highlight--preview': previewHighlights,
+  });
+  return (
+    <>
+      {showDuplicates && (
+        <div className="logs-row-duplicates">{row.duplicates > 0 ? `${row.duplicates + 1}x` : null}</div>
+      )}
+      <div className={row.logLevel ? `logs-row-level logs-row-level-${row.logLevel}` : ''} />
+      {showUtc && (
+        <div className="logs-row-time" title={`Local: ${row.timeLocal} (${row.timeFromNow})`}>
+          {row.timestamp}
+        </div>
+      )}
+      {showLocalTime && (
+        <div className="logs-row-time" title={`${row.timestamp} (${row.timeFromNow})`}>
+          {row.timeLocal}
+        </div>
+      )}
+      {showLabels && (
+        <div className="logs-row-labels">
+          <LogLabels allRows={allRows} labels={row.uniqueLabels} onClickLabel={onClickLabel} />
+        </div>
+      )}
+      <div className="logs-row-message">
+        {needsHighlighter ? (
+          <Highlighter
+            textToHighlight={row.entry}
+            searchWords={highlights}
+            findChunks={findHighlightChunksInText}
+            highlightClassName={highlightClassName}
+          />
+        ) : (
+          row.entry
+        )}
+      </div>
+    </>
+  );
+}
+
+function renderMetaItem(value: any, kind: LogsMetaKind) {
+  if (kind === LogsMetaKind.LabelsMap) {
+    return (
+      <span className="logs-meta-item__value-labels">
+        <LogLabels labels={value} plain />
+      </span>
+    );
+  }
+  return value;
+}
+
 interface LogsProps {
 interface LogsProps {
   className?: string;
   className?: string;
   data: LogsModel;
   data: LogsModel;
+  highlighterExpressions: string[];
   loading: boolean;
   loading: boolean;
   position: string;
   position: string;
   range?: RawTimeRange;
   range?: RawTimeRange;
+  scanning?: boolean;
+  scanRange?: RawTimeRange;
   onChangeTime?: (range: RawTimeRange) => void;
   onChangeTime?: (range: RawTimeRange) => void;
+  onClickLabel?: (label: string, value: string) => void;
+  onStartScanning?: () => void;
+  onStopScanning?: () => void;
 }
 }
 
 
 interface LogsState {
 interface LogsState {
   dedup: LogsDedupStrategy;
   dedup: LogsDedupStrategy;
-  showLabels: boolean;
+  deferLogs: boolean;
+  hiddenLogLevels: Set<LogLevel>;
+  renderAll: boolean;
+  showLabels: boolean | null; // Tristate: null means auto
   showLocalTime: boolean;
   showLocalTime: boolean;
   showUtc: boolean;
   showUtc: boolean;
 }
 }
 
 
 export default class Logs extends PureComponent<LogsProps, LogsState> {
 export default class Logs extends PureComponent<LogsProps, LogsState> {
+  deferLogsTimer: NodeJS.Timer;
+  renderAllTimer: NodeJS.Timer;
+
   state = {
   state = {
     dedup: LogsDedupStrategy.none,
     dedup: LogsDedupStrategy.none,
-    showLabels: true,
+    deferLogs: true,
+    hiddenLogLevels: new Set(),
+    renderAll: false,
+    showLabels: null,
     showLocalTime: true,
     showLocalTime: true,
     showUtc: false,
     showUtc: false,
   };
   };
 
 
+  componentDidMount() {
+    // Staged rendering
+    if (this.state.deferLogs) {
+      const { data } = this.props;
+      const rowCount = data && data.rows ? data.rows.length : 0;
+      // Render all right away if not too far over the limit
+      const renderAll = rowCount <= PREVIEW_LIMIT * 2;
+      this.deferLogsTimer = setTimeout(() => this.setState({ deferLogs: false, renderAll }), rowCount);
+    }
+  }
+
+  componentDidUpdate(prevProps, prevState) {
+    // Staged rendering
+    if (prevState.deferLogs && !this.state.deferLogs && !this.state.renderAll) {
+      this.renderAllTimer = setTimeout(() => this.setState({ renderAll: true }), 2000);
+    }
+  }
+
+  componentWillUnmount() {
+    clearTimeout(this.deferLogsTimer);
+    clearTimeout(this.renderAllTimer);
+  }
+
   onChangeDedup = (dedup: LogsDedupStrategy) => {
   onChangeDedup = (dedup: LogsDedupStrategy) => {
     this.setState(prevState => {
     this.setState(prevState => {
       if (prevState.dedup === dedup) {
       if (prevState.dedup === dedup) {
@@ -76,20 +203,72 @@ export default class Logs extends PureComponent<LogsProps, LogsState> {
     });
     });
   };
   };
 
 
+  onToggleLogLevel = (rawLevel: string, hiddenRawLevels: Set<string>) => {
+    const hiddenLogLevels: Set<LogLevel> = new Set(Array.from(hiddenRawLevels).map(level => LogLevel[level]));
+    this.setState({ hiddenLogLevels });
+  };
+
+  onClickScan = (event: React.SyntheticEvent) => {
+    event.preventDefault();
+    this.props.onStartScanning();
+  };
+
+  onClickStopScan = (event: React.SyntheticEvent) => {
+    event.preventDefault();
+    this.props.onStopScanning();
+  };
+
   render() {
   render() {
-    const { className = '', data, loading = false, position, range } = this.props;
-    const { dedup, showLabels, showLocalTime, showUtc } = this.state;
+    const {
+      className = '',
+      data,
+      highlighterExpressions,
+      loading = false,
+      onClickLabel,
+      position,
+      range,
+      scanning,
+      scanRange,
+    } = this.props;
+    const { dedup, deferLogs, hiddenLogLevels, renderAll, showLocalTime, showUtc } = this.state;
+    let { showLabels } = this.state;
     const hasData = data && data.rows && data.rows.length > 0;
     const hasData = data && data.rows && data.rows.length > 0;
-    const dedupedData = dedupLogRows(data, dedup);
+    const showDuplicates = dedup !== LogsDedupStrategy.none;
+
+    // Filtering
+    const filteredData = filterLogLevels(data, hiddenLogLevels);
+    const dedupedData = dedupLogRows(filteredData, dedup);
     const dedupCount = dedupedData.rows.reduce((sum, row) => sum + row.duplicates, 0);
     const dedupCount = dedupedData.rows.reduce((sum, row) => sum + row.duplicates, 0);
     const meta = [...data.meta];
     const meta = [...data.meta];
     if (dedup !== LogsDedupStrategy.none) {
     if (dedup !== LogsDedupStrategy.none) {
       meta.push({
       meta.push({
         label: 'Dedup count',
         label: 'Dedup count',
-        value: String(dedupCount),
+        value: dedupCount,
+        kind: LogsMetaKind.Number,
       });
       });
     }
     }
-    const cssColumnSizes = ['3px']; // Log-level indicator line
+
+    // Staged rendering
+    const processedRows = dedupedData.rows;
+    const firstRows = processedRows.slice(0, PREVIEW_LIMIT);
+    const lastRows = processedRows.slice(PREVIEW_LIMIT);
+
+    // Check for labels
+    if (showLabels === null) {
+      if (hasData) {
+        showLabels = data.rows.some(row => _.size(row.uniqueLabels) > 0);
+      } else {
+        showLabels = true;
+      }
+    }
+
+    // Grid options
+    const cssColumnSizes = [];
+    if (showDuplicates) {
+      cssColumnSizes.push('max-content');
+    }
+    // Log-level indicator line
+    cssColumnSizes.push('3px');
     if (showUtc) {
     if (showUtc) {
       cssColumnSizes.push('minmax(100px, max-content)');
       cssColumnSizes.push('minmax(100px, max-content)');
     }
     }
@@ -97,13 +276,15 @@ export default class Logs extends PureComponent<LogsProps, LogsState> {
       cssColumnSizes.push('minmax(100px, max-content)');
       cssColumnSizes.push('minmax(100px, max-content)');
     }
     }
     if (showLabels) {
     if (showLabels) {
-      cssColumnSizes.push('minmax(100px, 25%)');
+      cssColumnSizes.push('fit-content(20%)');
     }
     }
     cssColumnSizes.push('1fr');
     cssColumnSizes.push('1fr');
     const logEntriesStyle = {
     const logEntriesStyle = {
       gridTemplateColumns: cssColumnSizes.join(' '),
       gridTemplateColumns: cssColumnSizes.join(' '),
     };
     };
 
 
+    const scanText = scanRange ? `Scanning ${rangeUtil.describeTimeRange(scanRange)}` : 'Scanning...';
+
     return (
     return (
       <div className={`${className} logs`}>
       <div className={`${className} logs`}>
         <div className="logs-graph">
         <div className="logs-graph">
@@ -113,6 +294,7 @@ export default class Logs extends PureComponent<LogsProps, LogsState> {
             range={range}
             range={range}
             id={`explore-logs-graph-${position}`}
             id={`explore-logs-graph-${position}`}
             onChangeTime={this.props.onChangeTime}
             onChangeTime={this.props.onChangeTime}
+            onToggleSeries={this.onToggleLogLevel}
             userOptions={graphOptions}
             userOptions={graphOptions}
           />
           />
         </div>
         </div>
@@ -152,7 +334,7 @@ export default class Logs extends PureComponent<LogsProps, LogsState> {
                   {meta.map(item => (
                   {meta.map(item => (
                     <div className="logs-meta-item" key={item.label}>
                     <div className="logs-meta-item" key={item.label}>
                       <span className="logs-meta-item__label">{item.label}:</span>
                       <span className="logs-meta-item__label">{item.label}:</span>
-                      <span className="logs-meta-item__value">{item.value}</span>
+                      <span className="logs-meta-item__value">{renderMetaItem(item.value, item.kind)}</span>
                     </div>
                     </div>
                   ))}
                   ))}
                 </div>
                 </div>
@@ -162,36 +344,57 @@ export default class Logs extends PureComponent<LogsProps, LogsState> {
 
 
         <div className="logs-entries" style={logEntriesStyle}>
         <div className="logs-entries" style={logEntriesStyle}>
           {hasData &&
           {hasData &&
-            dedupedData.rows.map(row => (
-              <Fragment key={row.key}>
-                <div className={row.logLevel ? `logs-row-level logs-row-level-${row.logLevel}` : ''}>
-                  {row.duplicates > 0 && (
-                    <div className="logs-row-level__duplicates" title={`${row.duplicates} duplicates`}>
-                      {Array.apply(null, { length: row.duplicates }).map(index => (
-                        <div className="logs-row-level__duplicate" key={`${index}`} />
-                      ))}
-                    </div>
-                  )}
-                </div>
-                {showUtc && <div title={`Local: ${row.timeLocal} (${row.timeFromNow})`}>{row.timestamp}</div>}
-                {showLocalTime && <div title={`${row.timestamp} (${row.timeFromNow})`}>{row.timeLocal}</div>}
-                {showLabels && (
-                  <div className="max-width" title={row.labels}>
-                    {row.labels}
-                  </div>
-                )}
-                <div>
-                  <Highlighter
-                    textToHighlight={row.entry}
-                    searchWords={row.searchWords}
-                    findChunks={findHighlightChunksInText}
-                    highlightClassName="logs-row-match-highlight"
-                  />
-                </div>
-              </Fragment>
+            !deferLogs &&
+            // Only inject highlighterExpression in the first set for performance reasons
+            firstRows.map(row => (
+              <Row
+                key={row.key + row.duplicates}
+                allRows={processedRows}
+                highlighterExpressions={highlighterExpressions}
+                row={row}
+                showDuplicates={showDuplicates}
+                showLabels={showLabels}
+                showLocalTime={showLocalTime}
+                showUtc={showUtc}
+                onClickLabel={onClickLabel}
+              />
+            ))}
+          {hasData &&
+            !deferLogs &&
+            renderAll &&
+            lastRows.map(row => (
+              <Row
+                key={row.key + row.duplicates}
+                allRows={processedRows}
+                row={row}
+                showDuplicates={showDuplicates}
+                showLabels={showLabels}
+                showLocalTime={showLocalTime}
+                showUtc={showUtc}
+                onClickLabel={onClickLabel}
+              />
             ))}
             ))}
+          {hasData && deferLogs && <span>Rendering {dedupedData.rows.length} rows...</span>}
         </div>
         </div>
-        {!loading && !hasData && 'No data was returned.'}
+        {!loading &&
+          !hasData &&
+          !scanning && (
+            <div className="logs-nodata">
+              No logs found.
+              <a className="link" onClick={this.onClickScan}>
+                Scan for older logs
+              </a>
+            </div>
+          )}
+
+        {scanning && (
+          <div className="logs-nodata">
+            <span>{scanText}</span>
+            <a className="link" onClick={this.onClickStopScan}>
+              Stop scan
+            </a>
+          </div>
+        )}
       </div>
       </div>
     );
     );
   }
   }

+ 10 - 7
public/app/features/explore/TimePicker.tsx

@@ -35,7 +35,7 @@ interface TimePickerProps {
   isOpen?: boolean;
   isOpen?: boolean;
   isUtc?: boolean;
   isUtc?: boolean;
   range?: RawTimeRange;
   range?: RawTimeRange;
-  onChangeTime?: (Range) => void;
+  onChangeTime?: (range: RawTimeRange, scanning?: boolean) => void;
 }
 }
 
 
 interface TimePickerState {
 interface TimePickerState {
@@ -65,7 +65,7 @@ export default class TimePicker extends PureComponent<TimePickerProps, TimePicke
       initialRange: DEFAULT_RANGE,
       initialRange: DEFAULT_RANGE,
       refreshInterval: '',
       refreshInterval: '',
     };
     };
-  }
+  } //Temp solution... How do detect if ds supports table format?
 
 
   static getDerivedStateFromProps(props, state) {
   static getDerivedStateFromProps(props, state) {
     if (state.initialRange && state.initialRange === props.range) {
     if (state.initialRange && state.initialRange === props.range) {
@@ -92,12 +92,13 @@ export default class TimePicker extends PureComponent<TimePickerProps, TimePicke
     };
     };
   }
   }
 
 
-  move(direction: number) {
+  move(direction: number, scanning?: boolean): RawTimeRange {
     const { onChangeTime } = this.props;
     const { onChangeTime } = this.props;
     const { fromRaw, toRaw } = this.state;
     const { fromRaw, toRaw } = this.state;
     const from = dateMath.parse(fromRaw, false);
     const from = dateMath.parse(fromRaw, false);
     const to = dateMath.parse(toRaw, true);
     const to = dateMath.parse(toRaw, true);
-    const timespan = (to.valueOf() - from.valueOf()) / 2;
+    const step = scanning ? 1 : 2;
+    const timespan = (to.valueOf() - from.valueOf()) / step;
 
 
     let nextTo, nextFrom;
     let nextTo, nextFrom;
     if (direction === -1) {
     if (direction === -1) {
@@ -122,8 +123,8 @@ export default class TimePicker extends PureComponent<TimePickerProps, TimePicke
 
 
     const nextTimeRange: TimeRange = {
     const nextTimeRange: TimeRange = {
       raw: nextRange,
       raw: nextRange,
-      from,
-      to,
+      from: nextRange.from,
+      to: nextRange.to,
     };
     };
 
 
     this.setState(
     this.setState(
@@ -133,9 +134,11 @@ export default class TimePicker extends PureComponent<TimePickerProps, TimePicke
         toRaw: nextRange.to.format(DATE_FORMAT),
         toRaw: nextRange.to.format(DATE_FORMAT),
       },
       },
       () => {
       () => {
-        onChangeTime(nextTimeRange);
+        onChangeTime(nextTimeRange, scanning);
       }
       }
     );
     );
+
+    return nextRange;
   }
   }
 
 
   handleChangeFrom = e => {
   handleChangeFrom = e => {

+ 1 - 1
public/app/features/teams/CreateTeamCtrl.ts

@@ -1,6 +1,6 @@
 import coreModule from 'app/core/core_module';
 import coreModule from 'app/core/core_module';
 
 
-export default class CreateTeamCtrl {
+export class CreateTeamCtrl {
   name: string;
   name: string;
   email: string;
   email: string;
   navModel: any;
   navModel: any;

+ 1 - 1
public/app/partials/login.html

@@ -22,7 +22,7 @@
             <button type="submit" class="btn btn-large p-x-2 btn-inverse btn-loading" ng-if="loggingIn">
             <button type="submit" class="btn btn-large p-x-2 btn-inverse btn-loading" ng-if="loggingIn">
               Logging In<span>.</span><span>.</span><span>.</span>
               Logging In<span>.</span><span>.</span><span>.</span>
             </button>
             </button>
-            <div class="small login-button-forgot-password">
+            <div class="small login-button-forgot-password" ng-hide="ldapEnabled || authProxyEnabled">
               <a href="user/password/send-reset-email">
               <a href="user/password/send-reset-email">
                 Forgot your password?
                 Forgot your password?
               </a>
               </a>

+ 8 - 1
public/app/partials/reset_password.html

@@ -3,7 +3,14 @@
 <div class="page-container page-body">
 <div class="page-container page-body">
 	<div class="signup">
 	<div class="signup">
 		<h3 class="p-b-1">Reset password</h3>
 		<h3 class="p-b-1">Reset password</h3>
-		<form name="sendResetForm" class="login-form gf-form-group" ng-show="mode === 'send'">
+
+		<div ng-if="ldapEnabled || authProxyEnabled">
+			You cannot reset password when LDAP or Auth Proxy authentication is enabled.
+		</div>
+		<div ng-if="disableLoginForm">
+			You cannot reset password when login form is disabled.
+		</div>
+		<form name="sendResetForm" class="login-form gf-form-group" ng-show="mode === 'send'" ng-hide="ldapEnabled || authProxyEnabled || disableLoginForm">
 			<div class="gf-form">
 			<div class="gf-form">
 					<span class="gf-form-label width-7">User</span>
 					<span class="gf-form-label width-7">User</span>
 					<input type="text" name="username" class="gf-form-input max-width-14" required ng-model='formModel.userOrEmail' placeholder="email or username">
 					<input type="text" name="username" class="gf-form-input max-width-14" required ng-model='formModel.userOrEmail' placeholder="email or username">

+ 15 - 16
public/app/plugins/datasource/elasticsearch/bucket_agg.ts

@@ -2,22 +2,8 @@ import coreModule from 'app/core/core_module';
 import _ from 'lodash';
 import _ from 'lodash';
 import * as queryDef from './query_def';
 import * as queryDef from './query_def';
 
 
-export function elasticBucketAgg() {
-  return {
-    templateUrl: 'public/app/plugins/datasource/elasticsearch/partials/bucket_agg.html',
-    controller: 'ElasticBucketAggCtrl',
-    restrict: 'E',
-    scope: {
-      target: '=',
-      index: '=',
-      onChange: '&',
-      getFields: '&',
-    },
-  };
-}
-
 export class ElasticBucketAggCtrl {
 export class ElasticBucketAggCtrl {
-  /** @nginject */
+  /** @ngInject */
   constructor($scope, uiSegmentSrv, $q, $rootScope) {
   constructor($scope, uiSegmentSrv, $q, $rootScope) {
     const bucketAggs = $scope.target.bucketAggs;
     const bucketAggs = $scope.target.bucketAggs;
 
 
@@ -226,5 +212,18 @@ export class ElasticBucketAggCtrl {
   }
   }
 }
 }
 
 
+export function elasticBucketAgg() {
+  return {
+    templateUrl: 'public/app/plugins/datasource/elasticsearch/partials/bucket_agg.html',
+    controller: ElasticBucketAggCtrl,
+    restrict: 'E',
+    scope: {
+      target: '=',
+      index: '=',
+      onChange: '&',
+      getFields: '&',
+    },
+  };
+}
+
 coreModule.directive('elasticBucketAgg', elasticBucketAgg);
 coreModule.directive('elasticBucketAgg', elasticBucketAgg);
-coreModule.controller('ElasticBucketAggCtrl', ElasticBucketAggCtrl);

+ 16 - 16
public/app/plugins/datasource/elasticsearch/metric_agg.ts

@@ -2,22 +2,8 @@ import coreModule from 'app/core/core_module';
 import _ from 'lodash';
 import _ from 'lodash';
 import * as queryDef from './query_def';
 import * as queryDef from './query_def';
 
 
-export function elasticMetricAgg() {
-  return {
-    templateUrl: 'public/app/plugins/datasource/elasticsearch/partials/metric_agg.html',
-    controller: 'ElasticMetricAggCtrl',
-    restrict: 'E',
-    scope: {
-      target: '=',
-      index: '=',
-      onChange: '&',
-      getFields: '&',
-      esVersion: '=',
-    },
-  };
-}
-
 export class ElasticMetricAggCtrl {
 export class ElasticMetricAggCtrl {
+  /** @ngInject */
   constructor($scope, uiSegmentSrv, $q, $rootScope) {
   constructor($scope, uiSegmentSrv, $q, $rootScope) {
     const metricAggs = $scope.target.metrics;
     const metricAggs = $scope.target.metrics;
     $scope.metricAggTypes = queryDef.getMetricAggTypes($scope.esVersion);
     $scope.metricAggTypes = queryDef.getMetricAggTypes($scope.esVersion);
@@ -209,5 +195,19 @@ export class ElasticMetricAggCtrl {
   }
   }
 }
 }
 
 
+export function elasticMetricAgg() {
+  return {
+    templateUrl: 'public/app/plugins/datasource/elasticsearch/partials/metric_agg.html',
+    controller: ElasticMetricAggCtrl,
+    restrict: 'E',
+    scope: {
+      target: '=',
+      index: '=',
+      onChange: '&',
+      getFields: '&',
+      esVersion: '=',
+    },
+  };
+}
+
 coreModule.directive('elasticMetricAgg', elasticMetricAgg);
 coreModule.directive('elasticMetricAgg', elasticMetricAgg);
-coreModule.controller('ElasticMetricAggCtrl', ElasticMetricAggCtrl);

+ 10 - 0
public/app/plugins/datasource/elasticsearch/partials/bucket_agg.html

@@ -70,6 +70,16 @@
 			</label>
 			</label>
 			<input class="gf-form-input max-width-12" type="number" ng-model="agg.settings.trimEdges" ng-change="onChangeInternal()">
 			<input class="gf-form-input max-width-12" type="number" ng-model="agg.settings.trimEdges" ng-change="onChangeInternal()">
 		</div>
 		</div>
+		<div class="gf-form offset-width-7">
+			<label class="gf-form-label width-10">
+				Offset
+				<info-popover mode="right-normal">
+					Change the start value of each bucket by the specified positive (+) or negative offset (-) duration, such as 1h for an hour, or 1d for a day
+				</info-popover>
+			</label>
+			<input class="gf-form-input max-width-12" type="text" ng-model="agg.settings.offset" ng-change="onChangeInternal()">
+		</div>
+
 	</div>
 	</div>
 
 
 	<div ng-if="agg.type === 'histogram'">
 	<div ng-if="agg.type === 'histogram'">

+ 4 - 0
public/app/plugins/datasource/elasticsearch/query_builder.ts

@@ -72,6 +72,10 @@ export class ElasticQueryBuilder {
     esAgg.extended_bounds = { min: '$timeFrom', max: '$timeTo' };
     esAgg.extended_bounds = { min: '$timeFrom', max: '$timeTo' };
     esAgg.format = 'epoch_millis';
     esAgg.format = 'epoch_millis';
 
 
+    if (settings.offset !== '') {
+      esAgg.offset = settings.offset;
+    }
+
     if (esAgg.interval === 'auto') {
     if (esAgg.interval === 'auto') {
       esAgg.interval = '$__interval';
       esAgg.interval = '$__interval';
     }
     }

+ 1 - 1
public/app/plugins/datasource/logging/components/LoggingCheatSheet.tsx

@@ -15,7 +15,7 @@ const CHEAT_SHEET_ITEMS = [
 
 
 export default (props: any) => (
 export default (props: any) => (
   <div>
   <div>
-    <h1>Logging Cheat Sheet</h1>
+    <h2>Logging Cheat Sheet</h2>
     {CHEAT_SHEET_ITEMS.map(item => (
     {CHEAT_SHEET_ITEMS.map(item => (
       <div className="cheat-sheet-item" key={item.expression}>
       <div className="cheat-sheet-item" key={item.expression}>
         <div className="cheat-sheet-item__title">{item.title}</div>
         <div className="cheat-sheet-item__title">{item.title}</div>

+ 6 - 50
public/app/plugins/datasource/logging/components/LoggingStartPage.tsx

@@ -1,59 +1,15 @@
 import React, { PureComponent } from 'react';
 import React, { PureComponent } from 'react';
-import classNames from 'classnames';
-
 import LoggingCheatSheet from './LoggingCheatSheet';
 import LoggingCheatSheet from './LoggingCheatSheet';
 
 
-const TAB_MENU_ITEMS = [
-  {
-    text: 'Start',
-    id: 'start',
-    icon: 'fa fa-rocket',
-  },
-];
-
-export default class LoggingStartPage extends PureComponent<any, { active: string }> {
-  state = {
-    active: 'start',
-  };
-
-  onClickTab = active => {
-    this.setState({ active });
-  };
+interface Props {
+  onClickExample: () => void;
+}
 
 
+export default class LoggingStartPage extends PureComponent<Props> {
   render() {
   render() {
-    const { active } = this.state;
-    const customCss = '';
-
     return (
     return (
-      <div style={{ margin: '45px 0', border: '1px solid #ddd', borderRadius: 5 }}>
-        <div className="page-header-canvas">
-          <div className="page-container">
-            <div className="page-header">
-              <nav>
-                <ul className={`gf-tabs ${customCss}`}>
-                  {TAB_MENU_ITEMS.map((tab, idx) => {
-                    const tabClasses = classNames({
-                      'gf-tabs-link': true,
-                      active: tab.id === active,
-                    });
-
-                    return (
-                      <li className="gf-tabs-item" key={tab.id}>
-                        <a className={tabClasses} onClick={() => this.onClickTab(tab.id)}>
-                          <i className={tab.icon} />
-                          {tab.text}
-                        </a>
-                      </li>
-                    );
-                  })}
-                </ul>
-              </nav>
-            </div>
-          </div>
-        </div>
-        <div className="page-container page-body">
-          {active === 'start' && <LoggingCheatSheet onClickExample={this.props.onClickExample} />}
-        </div>
+      <div className="grafana-info-box grafana-info-box--max-lg">
+        <LoggingCheatSheet onClickExample={this.props.onClickExample} />
       </div>
       </div>
     );
     );
   }
   }

+ 21 - 25
public/app/plugins/datasource/logging/datasource.ts

@@ -3,9 +3,11 @@ import _ from 'lodash';
 import * as dateMath from 'app/core/utils/datemath';
 import * as dateMath from 'app/core/utils/datemath';
 import { LogsStream, LogsModel, makeSeriesForLogs } from 'app/core/logs_model';
 import { LogsStream, LogsModel, makeSeriesForLogs } from 'app/core/logs_model';
 import { PluginMeta, DataQuery } from 'app/types';
 import { PluginMeta, DataQuery } from 'app/types';
+import { addLabelToSelector } from 'app/plugins/datasource/prometheus/add_label_to_query';
 
 
 import LanguageProvider from './language_provider';
 import LanguageProvider from './language_provider';
 import { mergeStreamsToLogs } from './result_transformer';
 import { mergeStreamsToLogs } from './result_transformer';
+import { formatQuery, parseQuery } from './query_utils';
 
 
 export const DEFAULT_LIMIT = 1000;
 export const DEFAULT_LIMIT = 1000;
 
 
@@ -16,31 +18,6 @@ const DEFAULT_QUERY_PARAMS = {
   query: '',
   query: '',
 };
 };
 
 
-const QUERY_REGEXP = /({\w+="[^"]+"})?\s*(\w[^{]+)?\s*({\w+="[^"]+"})?/;
-export function parseQuery(input: string) {
-  const match = input.match(QUERY_REGEXP);
-  let query = '';
-  let regexp = '';
-
-  if (match) {
-    if (match[1]) {
-      query = match[1];
-    }
-    if (match[2]) {
-      regexp = match[2].trim();
-    }
-    if (match[3]) {
-      if (match[1]) {
-        query = `${match[1].slice(0, -1)},${match[3].slice(1)}`;
-      } else {
-        query = match[3];
-      }
-    }
-  }
-
-  return { query, regexp };
-}
-
 function serializeParams(data: any) {
 function serializeParams(data: any) {
   return Object.keys(data)
   return Object.keys(data)
     .map(k => {
     .map(k => {
@@ -125,6 +102,25 @@ export default class LoggingDatasource {
     });
     });
   }
   }
 
 
+  modifyQuery(query: DataQuery, action: any): DataQuery {
+    const parsed = parseQuery(query.expr || '');
+    let selector = parsed.query;
+    switch (action.type) {
+      case 'ADD_FILTER': {
+        selector = addLabelToSelector(selector, action.key, action.value);
+        break;
+      }
+      default:
+        break;
+    }
+    const expression = formatQuery(selector, parsed.regexp);
+    return { ...query, expr: expression };
+  }
+
+  getHighlighterExpression(query: DataQuery): string {
+    return parseQuery(query.expr).regexp;
+  }
+
   getTime(date, roundUp) {
   getTime(date, roundUp) {
     if (_.isString(date)) {
     if (_.isString(date)) {
       date = dateMath.parse(date, roundUp);
       date = dateMath.parse(date, roundUp);

+ 29 - 2
public/app/plugins/datasource/logging/language_provider.test.ts

@@ -8,9 +8,10 @@ describe('Language completion provider', () => {
   };
   };
 
 
   describe('empty query suggestions', () => {
   describe('empty query suggestions', () => {
-    it('returns default suggestions on emtpty context', () => {
+    it('returns no suggestions on emtpty context', () => {
       const instance = new LanguageProvider(datasource);
       const instance = new LanguageProvider(datasource);
-      const result = instance.provideCompletionItems({ text: '', prefix: '', wrapperClasses: [] });
+      const value = Plain.deserialize('');
+      const result = instance.provideCompletionItems({ text: '', prefix: '', value, wrapperClasses: [] });
       expect(result.context).toBeUndefined();
       expect(result.context).toBeUndefined();
       expect(result.refresher).toBeUndefined();
       expect(result.refresher).toBeUndefined();
       expect(result.suggestions.length).toEqual(0);
       expect(result.suggestions.length).toEqual(0);
@@ -38,6 +39,32 @@ describe('Language completion provider', () => {
         },
         },
       ]);
       ]);
     });
     });
+
+    it('returns no suggestions within regexp', () => {
+      const instance = new LanguageProvider(datasource);
+      const value = Plain.deserialize('{} ()');
+      const range = value.selection.merge({
+        anchorOffset: 4,
+      });
+      const valueWithSelection = value.change().select(range).value;
+      const history = [
+        {
+          query: { refId: '1', expr: '{app="foo"}' },
+        },
+      ];
+      const result = instance.provideCompletionItems(
+        {
+          text: '',
+          prefix: '',
+          value: valueWithSelection,
+          wrapperClasses: [],
+        },
+        { history }
+      );
+      expect(result.context).toBeUndefined();
+      expect(result.refresher).toBeUndefined();
+      expect(result.suggestions.length).toEqual(0);
+    });
   });
   });
 
 
   describe('label suggestions', () => {
   describe('label suggestions', () => {

+ 7 - 12
public/app/plugins/datasource/logging/language_provider.ts

@@ -10,7 +10,7 @@ import {
   HistoryItem,
   HistoryItem,
 } from 'app/types/explore';
 } from 'app/types/explore';
 import { parseSelector, labelRegexp, selectorRegexp } from 'app/plugins/datasource/prometheus/language_utils';
 import { parseSelector, labelRegexp, selectorRegexp } from 'app/plugins/datasource/prometheus/language_utils';
-import PromqlSyntax from 'app/plugins/datasource/prometheus/promql';
+import syntax from './syntax';
 import { DataQuery } from 'app/types';
 import { DataQuery } from 'app/types';
 
 
 const DEFAULT_KEYS = ['job', 'namespace'];
 const DEFAULT_KEYS = ['job', 'namespace'];
@@ -55,7 +55,7 @@ export default class LoggingLanguageProvider extends LanguageProvider {
   cleanText = s => s.replace(/[{}[\]="(),!~+\-*/^%]/g, '').trim();
   cleanText = s => s.replace(/[{}[\]="(),!~+\-*/^%]/g, '').trim();
 
 
   getSyntax() {
   getSyntax() {
-    return PromqlSyntax;
+    return syntax;
   }
   }
 
 
   request = url => {
   request = url => {
@@ -70,19 +70,14 @@ export default class LoggingLanguageProvider extends LanguageProvider {
   };
   };
 
 
   // Keep this DOM-free for testing
   // Keep this DOM-free for testing
-  provideCompletionItems({ prefix, wrapperClasses, text }: TypeaheadInput, context?: any): TypeaheadOutput {
-    // Syntax spans have 3 classes by default. More indicate a recognized token
-    const tokenRecognized = wrapperClasses.length > 3;
+  provideCompletionItems({ prefix, wrapperClasses, text, value }: TypeaheadInput, context?: any): TypeaheadOutput {
+    // Local text properties
+    const empty = value.document.text.length === 0;
     // Determine candidates by CSS context
     // Determine candidates by CSS context
     if (_.includes(wrapperClasses, 'context-labels')) {
     if (_.includes(wrapperClasses, 'context-labels')) {
-      // Suggestions for metric{|} and metric{foo=|}, as well as metric-independent label queries like {|}
+      // Suggestions for {|} and {foo=|}
       return this.getLabelCompletionItems.apply(this, arguments);
       return this.getLabelCompletionItems.apply(this, arguments);
-    } else if (
-      // Show default suggestions in a couple of scenarios
-      (prefix && !tokenRecognized) || // Non-empty prefix, but not inside known token
-      (prefix === '' && !text.match(/^[\]})\s]+$/)) || // Empty prefix, but not following a closing brace
-      text.match(/[+\-*/^%]/) // Anything after binary operator
-    ) {
+    } else if (empty) {
       return this.getEmptyCompletionItems(context || {});
       return this.getEmptyCompletionItems(context || {});
     }
     }
 
 

+ 19 - 1
public/app/plugins/datasource/logging/datasource.test.ts → public/app/plugins/datasource/logging/query_utils.test.ts

@@ -1,4 +1,4 @@
-import { parseQuery } from './datasource';
+import { parseQuery } from './query_utils';
 
 
 describe('parseQuery', () => {
 describe('parseQuery', () => {
   it('returns empty for empty string', () => {
   it('returns empty for empty string', () => {
@@ -35,4 +35,22 @@ describe('parseQuery', () => {
       regexp: 'x|y',
       regexp: 'x|y',
     });
     });
   });
   });
+
+  it('returns query for selector with two labels', () => {
+    expect(parseQuery('{foo="bar", baz="42"}')).toEqual({
+      query: '{foo="bar", baz="42"}',
+      regexp: '',
+    });
+  });
+
+  it('returns query and regexp with quantifiers', () => {
+    expect(parseQuery('{foo="bar"} \\.java:[0-9]{1,5}')).toEqual({
+      query: '{foo="bar"}',
+      regexp: '\\.java:[0-9]{1,5}',
+    });
+    expect(parseQuery('\\.java:[0-9]{1,5} {foo="bar"}')).toEqual({
+      query: '{foo="bar"}',
+      regexp: '\\.java:[0-9]{1,5}',
+    });
+  });
 });
 });

+ 17 - 0
public/app/plugins/datasource/logging/query_utils.ts

@@ -0,0 +1,17 @@
+const selectorRegexp = /(?:^|\s){[^{]*}/g;
+export function parseQuery(input: string) {
+  const match = input.match(selectorRegexp);
+  let query = '';
+  let regexp = input;
+
+  if (match) {
+    query = match[0].trim();
+    regexp = input.replace(selectorRegexp, '').trim();
+  }
+
+  return { query, regexp };
+}
+
+export function formatQuery(selector: string, search: string): string {
+  return `${selector || ''} ${search || ''}`.trim();
+}

+ 25 - 19
public/app/plugins/datasource/logging/result_transformer.test.ts

@@ -11,11 +11,17 @@ import {
 
 
 describe('getLoglevel()', () => {
 describe('getLoglevel()', () => {
   it('returns no log level on empty line', () => {
   it('returns no log level on empty line', () => {
-    expect(getLogLevel('')).toBe(LogLevel.none);
+    expect(getLogLevel('')).toBe(LogLevel.unkown);
   });
   });
 
 
   it('returns no log level on when level is part of a word', () => {
   it('returns no log level on when level is part of a word', () => {
-    expect(getLogLevel('this is a warning')).toBe(LogLevel.none);
+    expect(getLogLevel('this is information')).toBe(LogLevel.unkown);
+  });
+
+  it('returns same log level for long and short version', () => {
+    expect(getLogLevel('[Warn]')).toBe(LogLevel.warning);
+    expect(getLogLevel('[Warning]')).toBe(LogLevel.warning);
+    expect(getLogLevel('[Warn]')).toBe('warning');
   });
   });
 
 
   it('returns log level on line contains a log level', () => {
   it('returns log level on line contains a log level', () => {
@@ -35,7 +41,7 @@ describe('parseLabels()', () => {
   });
   });
 
 
   it('returns labels on labels string', () => {
   it('returns labels on labels string', () => {
-    expect(parseLabels('{foo="bar", baz="42"}')).toEqual({ foo: '"bar"', baz: '"42"' });
+    expect(parseLabels('{foo="bar", baz="42"}')).toEqual({ foo: 'bar', baz: '42' });
   });
   });
 });
 });
 
 
@@ -46,7 +52,7 @@ describe('formatLabels()', () => {
   });
   });
 
 
   it('returns label string on label set', () => {
   it('returns label string on label set', () => {
-    expect(formatLabels({ foo: '"bar"', baz: '"42"' })).toEqual('{baz="42", foo="bar"}');
+    expect(formatLabels({ foo: 'bar', baz: '42' })).toEqual('{baz="42", foo="bar"}');
   });
   });
 });
 });
 
 
@@ -57,14 +63,14 @@ describe('findCommonLabels()', () => {
   });
   });
 
 
   it('returns no common labels on differing sets', () => {
   it('returns no common labels on differing sets', () => {
-    expect(findCommonLabels([{ foo: '"bar"' }, {}])).toEqual({});
-    expect(findCommonLabels([{}, { foo: '"bar"' }])).toEqual({});
-    expect(findCommonLabels([{ baz: '42' }, { foo: '"bar"' }])).toEqual({});
-    expect(findCommonLabels([{ foo: '42', baz: '"bar"' }, { foo: '"bar"' }])).toEqual({});
+    expect(findCommonLabels([{ foo: 'bar' }, {}])).toEqual({});
+    expect(findCommonLabels([{}, { foo: 'bar' }])).toEqual({});
+    expect(findCommonLabels([{ baz: '42' }, { foo: 'bar' }])).toEqual({});
+    expect(findCommonLabels([{ foo: '42', baz: 'bar' }, { foo: 'bar' }])).toEqual({});
   });
   });
 
 
   it('returns the single labels set as common labels', () => {
   it('returns the single labels set as common labels', () => {
-    expect(findCommonLabels([{ foo: '"bar"' }])).toEqual({ foo: '"bar"' });
+    expect(findCommonLabels([{ foo: 'bar' }])).toEqual({ foo: 'bar' });
   });
   });
 });
 });
 
 
@@ -100,10 +106,10 @@ describe('mergeStreamsToLogs()', () => {
     expect(mergeStreamsToLogs([stream1]).rows).toMatchObject([
     expect(mergeStreamsToLogs([stream1]).rows).toMatchObject([
       {
       {
         entry: 'WARN boooo',
         entry: 'WARN boooo',
-        labels: '{foo="bar"}',
+        labels: { foo: 'bar' },
         key: 'EK1970-01-01T00:00:00Z{foo="bar"}',
         key: 'EK1970-01-01T00:00:00Z{foo="bar"}',
-        logLevel: 'warn',
-        uniqueLabels: '',
+        logLevel: 'warning',
+        uniqueLabels: {},
       },
       },
     ]);
     ]);
   });
   });
@@ -134,21 +140,21 @@ describe('mergeStreamsToLogs()', () => {
     expect(mergeStreamsToLogs([stream1, stream2]).rows).toMatchObject([
     expect(mergeStreamsToLogs([stream1, stream2]).rows).toMatchObject([
       {
       {
         entry: 'INFO 2',
         entry: 'INFO 2',
-        labels: '{foo="bar", baz="2"}',
+        labels: { foo: 'bar', baz: '2' },
         logLevel: 'info',
         logLevel: 'info',
-        uniqueLabels: '{baz="2"}',
+        uniqueLabels: { baz: '2' },
       },
       },
       {
       {
         entry: 'WARN boooo',
         entry: 'WARN boooo',
-        labels: '{foo="bar", baz="1"}',
-        logLevel: 'warn',
-        uniqueLabels: '{baz="1"}',
+        labels: { foo: 'bar', baz: '1' },
+        logLevel: 'warning',
+        uniqueLabels: { baz: '1' },
       },
       },
       {
       {
         entry: 'INFO 1',
         entry: 'INFO 1',
-        labels: '{foo="bar", baz="2"}',
+        labels: { foo: 'bar', baz: '2' },
         logLevel: 'info',
         logLevel: 'info',
-        uniqueLabels: '{baz="2"}',
+        uniqueLabels: { baz: '2' },
       },
       },
     ]);
     ]);
   });
   });

+ 25 - 10
public/app/plugins/datasource/logging/result_transformer.ts

@@ -9,18 +9,19 @@ import {
   LogsStream,
   LogsStream,
   LogsStreamEntry,
   LogsStreamEntry,
   LogsStreamLabels,
   LogsStreamLabels,
+  LogsMetaKind,
 } from 'app/core/logs_model';
 } from 'app/core/logs_model';
 import { DEFAULT_LIMIT } from './datasource';
 import { DEFAULT_LIMIT } from './datasource';
 
 
 /**
 /**
  * Returns the log level of a log line.
  * Returns the log level of a log line.
- * Parse the line for level words. If no level is found, it returns `LogLevel.none`.
+ * Parse the line for level words. If no level is found, it returns `LogLevel.unknown`.
  *
  *
  * Example: `getLogLevel('WARN 1999-12-31 this is great') // LogLevel.warn`
  * Example: `getLogLevel('WARN 1999-12-31 this is great') // LogLevel.warn`
  */
  */
 export function getLogLevel(line: string): LogLevel {
 export function getLogLevel(line: string): LogLevel {
   if (!line) {
   if (!line) {
-    return LogLevel.none;
+    return LogLevel.unkown;
   }
   }
   let level: LogLevel;
   let level: LogLevel;
   Object.keys(LogLevel).forEach(key => {
   Object.keys(LogLevel).forEach(key => {
@@ -32,7 +33,7 @@ export function getLogLevel(line: string): LogLevel {
     }
     }
   });
   });
   if (!level) {
   if (!level) {
-    level = LogLevel.none;
+    level = LogLevel.unkown;
   }
   }
   return level;
   return level;
 }
 }
@@ -40,7 +41,7 @@ export function getLogLevel(line: string): LogLevel {
 /**
 /**
  * Regexp to extract Prometheus-style labels
  * Regexp to extract Prometheus-style labels
  */
  */
-const labelRegexp = /\b(\w+)(!?=~?)("[^"\n]*?")/g;
+const labelRegexp = /\b(\w+)(!?=~?)"([^"\n]*?)"/g;
 
 
 /**
 /**
  * Returns a map of label keys to value from an input selector string.
  * Returns a map of label keys to value from an input selector string.
@@ -104,11 +105,17 @@ export function formatLabels(labels: LogsStreamLabels, defaultValue = ''): strin
     return defaultValue;
     return defaultValue;
   }
   }
   const labelKeys = Object.keys(labels).sort();
   const labelKeys = Object.keys(labels).sort();
-  const cleanSelector = labelKeys.map(key => `${key}=${labels[key]}`).join(', ');
+  const cleanSelector = labelKeys.map(key => `${key}="${labels[key]}"`).join(', ');
   return ['{', cleanSelector, '}'].join('');
   return ['{', cleanSelector, '}'].join('');
 }
 }
 
 
-export function processEntry(entry: LogsStreamEntry, labels: string, uniqueLabels: string, search: string): LogRow {
+export function processEntry(
+  entry: LogsStreamEntry,
+  labels: string,
+  parsedLabels: LogsStreamLabels,
+  uniqueLabels: LogsStreamLabels,
+  search: string
+): LogRow {
   const { line, timestamp } = entry;
   const { line, timestamp } = entry;
   // Assumes unique-ness, needs nanosec precision for timestamp
   // Assumes unique-ness, needs nanosec precision for timestamp
   const key = `EK${timestamp}${labels}`;
   const key = `EK${timestamp}${labels}`;
@@ -120,19 +127,22 @@ export function processEntry(entry: LogsStreamEntry, labels: string, uniqueLabel
 
 
   return {
   return {
     key,
     key,
-    labels,
     logLevel,
     logLevel,
     timeFromNow,
     timeFromNow,
     timeEpochMs,
     timeEpochMs,
     timeLocal,
     timeLocal,
     uniqueLabels,
     uniqueLabels,
     entry: line,
     entry: line,
+    labels: parsedLabels,
     searchWords: search ? [search] : [],
     searchWords: search ? [search] : [],
     timestamp: timestamp,
     timestamp: timestamp,
   };
   };
 }
 }
 
 
 export function mergeStreamsToLogs(streams: LogsStream[], limit = DEFAULT_LIMIT): LogsModel {
 export function mergeStreamsToLogs(streams: LogsStream[], limit = DEFAULT_LIMIT): LogsModel {
+  // Unique model identifier
+  const id = streams.map(stream => stream.labels).join();
+
   // Find unique labels for each stream
   // Find unique labels for each stream
   streams = streams.map(stream => ({
   streams = streams.map(stream => ({
     ...stream,
     ...stream,
@@ -141,7 +151,7 @@ export function mergeStreamsToLogs(streams: LogsStream[], limit = DEFAULT_LIMIT)
   const commonLabels = findCommonLabels(streams.map(model => model.parsedLabels));
   const commonLabels = findCommonLabels(streams.map(model => model.parsedLabels));
   streams = streams.map(stream => ({
   streams = streams.map(stream => ({
     ...stream,
     ...stream,
-    uniqueLabels: formatLabels(findUniqueLabels(stream.parsedLabels, commonLabels)),
+    uniqueLabels: findUniqueLabels(stream.parsedLabels, commonLabels),
   }));
   }));
 
 
   // Merge stream entries into single list of log rows
   // Merge stream entries into single list of log rows
@@ -149,7 +159,9 @@ export function mergeStreamsToLogs(streams: LogsStream[], limit = DEFAULT_LIMIT)
     .reduce(
     .reduce(
       (acc: LogRow[], stream: LogsStream) => [
       (acc: LogRow[], stream: LogsStream) => [
         ...acc,
         ...acc,
-        ...stream.entries.map(entry => processEntry(entry, stream.labels, stream.uniqueLabels, stream.search)),
+        ...stream.entries.map(entry =>
+          processEntry(entry, stream.labels, stream.parsedLabels, stream.uniqueLabels, stream.search)
+        ),
       ],
       ],
       []
       []
     )
     )
@@ -162,17 +174,20 @@ export function mergeStreamsToLogs(streams: LogsStream[], limit = DEFAULT_LIMIT)
   if (_.size(commonLabels) > 0) {
   if (_.size(commonLabels) > 0) {
     meta.push({
     meta.push({
       label: 'Common labels',
       label: 'Common labels',
-      value: formatLabels(commonLabels),
+      value: commonLabels,
+      kind: LogsMetaKind.LabelsMap,
     });
     });
   }
   }
   if (limit) {
   if (limit) {
     meta.push({
     meta.push({
       label: 'Limit',
       label: 'Limit',
       value: `${limit} (${sortedRows.length} returned)`,
       value: `${limit} (${sortedRows.length} returned)`,
+      kind: LogsMetaKind.String,
     });
     });
   }
   }
 
 
   return {
   return {
+    id,
     meta,
     meta,
     rows: sortedRows,
     rows: sortedRows,
   };
   };

+ 29 - 0
public/app/plugins/datasource/logging/syntax.ts

@@ -0,0 +1,29 @@
+/* tslint:disable max-line-length */
+
+const tokenizer = {
+  comment: {
+    pattern: /(^|[^\n])#.*/,
+    lookbehind: true,
+  },
+  'context-labels': {
+    pattern: /(^|\s)\{[^}]*(?=})/,
+    lookbehind: true,
+    inside: {
+      'label-key': {
+        pattern: /[a-z_]\w*(?=\s*(=|!=|=~|!~))/,
+        alias: 'attr-name',
+      },
+      'label-value': {
+        pattern: /"(?:\\.|[^\\"])*"/,
+        greedy: true,
+        alias: 'attr-value',
+      },
+      punctuation: /[{]/,
+    },
+  },
+  // number: /\b-?\d+((\.\d*)?([eE][+-]?\d+)?)?\b/,
+  operator: new RegExp(`/&&?|\\|?\\||!=?|<(?:=>?|<|>)?|>[>=]?`, 'i'),
+  punctuation: /[{}`,.]/,
+};
+
+export default tokenizer;

+ 3 - 3
public/app/plugins/datasource/mysql/partials/annotations.editor.html

@@ -28,12 +28,12 @@ An annotation is an event that is overlaid on top of graphs. The query can have
 Macros:
 Macros:
 - $__time(column) -&gt; UNIX_TIMESTAMP(column) as time (or as time_sec)
 - $__time(column) -&gt; UNIX_TIMESTAMP(column) as time (or as time_sec)
 - $__timeEpoch(column) -&gt; UNIX_TIMESTAMP(column) as time (or as time_sec)
 - $__timeEpoch(column) -&gt; UNIX_TIMESTAMP(column) as time (or as time_sec)
-- $__timeFilter(column) -&gt; column BETWEEN '2017-04-21T05:01:17Z' AND '2017-04-21T05:01:17Z'
+- $__timeFilter(column) -&gt; column BETWEEN FROM_UNIXTIME(1492750877) AND FROM_UNIXTIME(1492750877)
 - $__unixEpochFilter(column) -&gt;  time_unix_epoch &gt; 1492750877 AND time_unix_epoch &lt; 1492750877
 - $__unixEpochFilter(column) -&gt;  time_unix_epoch &gt; 1492750877 AND time_unix_epoch &lt; 1492750877
 
 
 Or build your own conditionals using these macros which just return the values:
 Or build your own conditionals using these macros which just return the values:
-- $__timeFrom() -&gt;  '2017-04-21T05:01:17Z'
-- $__timeTo() -&gt;  '2017-04-21T05:01:17Z'
+- $__timeFrom() -&gt; FROM_UNIXTIME(1492750877)
+- $__timeTo() -&gt;  FROM_UNIXTIME(1492750877)
 - $__unixEpochFrom() -&gt;  1492750877
 - $__unixEpochFrom() -&gt;  1492750877
 - $__unixEpochTo() -&gt;  1492750877
 - $__unixEpochTo() -&gt;  1492750877
 		</pre>
 		</pre>

+ 3 - 3
public/app/plugins/datasource/mysql/partials/query.editor.html

@@ -151,7 +151,7 @@ Table:
 Macros:
 Macros:
 - $__time(column) -&gt; UNIX_TIMESTAMP(column) as time_sec
 - $__time(column) -&gt; UNIX_TIMESTAMP(column) as time_sec
 - $__timeEpoch(column) -&gt; UNIX_TIMESTAMP(column) as time_sec
 - $__timeEpoch(column) -&gt; UNIX_TIMESTAMP(column) as time_sec
-- $__timeFilter(column) -&gt; column BETWEEN '2017-04-21T05:01:17Z' AND '2017-04-21T05:01:17Z'
+- $__timeFilter(column) -&gt; column BETWEEN FROM_UNIXTIME(1492750877) AND FROM_UNIXTIME(1492750877)
 - $__unixEpochFilter(column) -&gt;  time_unix_epoch &gt; 1492750877 AND time_unix_epoch &lt; 1492750877
 - $__unixEpochFilter(column) -&gt;  time_unix_epoch &gt; 1492750877 AND time_unix_epoch &lt; 1492750877
 - $__timeGroup(column,'5m'[, fillvalue]) -&gt; cast(cast(UNIX_TIMESTAMP(column)/(300) as signed)*300 as signed)
 - $__timeGroup(column,'5m'[, fillvalue]) -&gt; cast(cast(UNIX_TIMESTAMP(column)/(300) as signed)*300 as signed)
      by setting fillvalue grafana will fill in missing values according to the interval
      by setting fillvalue grafana will fill in missing values according to the interval
@@ -169,8 +169,8 @@ GROUP BY 1
 ORDER BY 1
 ORDER BY 1
 
 
 Or build your own conditionals using these macros which just return the values:
 Or build your own conditionals using these macros which just return the values:
-- $__timeFrom() -&gt;  '2017-04-21T05:01:17Z'
-- $__timeTo() -&gt;  '2017-04-21T05:01:17Z'
+- $__timeFrom() -&gt; FROM_UNIXTIME(1492750877)
+- $__timeTo() -&gt;  FROM_UNIXTIME(1492750877)
 - $__unixEpochFrom() -&gt;  1492750877
 - $__unixEpochFrom() -&gt;  1492750877
 - $__unixEpochTo() -&gt;  1492750877
 - $__unixEpochTo() -&gt;  1492750877
     </pre>
     </pre>

+ 5 - 3
public/app/plugins/datasource/prometheus/add_label_to_query.ts

@@ -49,7 +49,7 @@ export function addLabelToQuery(query: string, key: string, value: string, opera
     const selectorWithLabel = addLabelToSelector(selector, key, value, operator);
     const selectorWithLabel = addLabelToSelector(selector, key, value, operator);
     lastIndex = match.index + match[1].length + 2;
     lastIndex = match.index + match[1].length + 2;
     suffix = query.slice(match.index + match[0].length);
     suffix = query.slice(match.index + match[0].length);
-    parts.push(prefix, '{', selectorWithLabel, '}');
+    parts.push(prefix, selectorWithLabel);
     match = selectorRegexp.exec(query);
     match = selectorRegexp.exec(query);
   }
   }
 
 
@@ -59,7 +59,7 @@ export function addLabelToQuery(query: string, key: string, value: string, opera
 
 
 const labelRegexp = /(\w+)\s*(=|!=|=~|!~)\s*("[^"]*")/g;
 const labelRegexp = /(\w+)\s*(=|!=|=~|!~)\s*("[^"]*")/g;
 
 
-function addLabelToSelector(selector: string, labelKey: string, labelValue: string, labelOperator?: string) {
+export function addLabelToSelector(selector: string, labelKey: string, labelValue: string, labelOperator?: string) {
   const parsedLabels = [];
   const parsedLabels = [];
 
 
   // Split selector into labels
   // Split selector into labels
@@ -76,13 +76,15 @@ function addLabelToSelector(selector: string, labelKey: string, labelValue: stri
   parsedLabels.push({ key: labelKey, operator: operatorForLabelKey, value: `"${labelValue}"` });
   parsedLabels.push({ key: labelKey, operator: operatorForLabelKey, value: `"${labelValue}"` });
 
 
   // Sort labels by key and put them together
   // Sort labels by key and put them together
-  return _.chain(parsedLabels)
+  const formatted = _.chain(parsedLabels)
     .uniqWith(_.isEqual)
     .uniqWith(_.isEqual)
     .compact()
     .compact()
     .sortBy('key')
     .sortBy('key')
     .map(({ key, operator, value }) => `${key}${operator}${value}`)
     .map(({ key, operator, value }) => `${key}${operator}${value}`)
     .value()
     .value()
     .join(',');
     .join(',');
+
+  return `{${formatted}}`;
 }
 }
 
 
 function isPositionInsideChars(text: string, position: number, openChar: string, closeChar: string) {
 function isPositionInsideChars(text: string, position: number, openChar: string, closeChar: string) {

+ 1 - 1
public/app/plugins/datasource/prometheus/components/PromCheatSheet.tsx

@@ -21,7 +21,7 @@ const CHEAT_SHEET_ITEMS = [
 
 
 export default (props: any) => (
 export default (props: any) => (
   <div>
   <div>
-    <h1>PromQL Cheat Sheet</h1>
+    <h2>PromQL Cheat Sheet</h2>
     {CHEAT_SHEET_ITEMS.map(item => (
     {CHEAT_SHEET_ITEMS.map(item => (
       <div className="cheat-sheet-item" key={item.expression}>
       <div className="cheat-sheet-item" key={item.expression}>
         <div className="cheat-sheet-item__title">{item.title}</div>
         <div className="cheat-sheet-item__title">{item.title}</div>

+ 6 - 50
public/app/plugins/datasource/prometheus/components/PromStart.tsx

@@ -1,59 +1,15 @@
 import React, { PureComponent } from 'react';
 import React, { PureComponent } from 'react';
-import classNames from 'classnames';
-
 import PromCheatSheet from './PromCheatSheet';
 import PromCheatSheet from './PromCheatSheet';
 
 
-const TAB_MENU_ITEMS = [
-  {
-    text: 'Start',
-    id: 'start',
-    icon: 'fa fa-rocket',
-  },
-];
-
-export default class PromStart extends PureComponent<any, { active: string }> {
-  state = {
-    active: 'start',
-  };
-
-  onClickTab = active => {
-    this.setState({ active });
-  };
+interface Props {
+  onClickExample: () => void;
+}
 
 
+export default class PromStart extends PureComponent<Props> {
   render() {
   render() {
-    const { active } = this.state;
-    const customCss = '';
-
     return (
     return (
-      <div style={{ margin: '45px 0', border: '1px solid #ddd', borderRadius: 5 }}>
-        <div className="page-header-canvas">
-          <div className="page-container">
-            <div className="page-header">
-              <nav>
-                <ul className={`gf-tabs ${customCss}`}>
-                  {TAB_MENU_ITEMS.map((tab, idx) => {
-                    const tabClasses = classNames({
-                      'gf-tabs-link': true,
-                      active: tab.id === active,
-                    });
-
-                    return (
-                      <li className="gf-tabs-item" key={tab.id}>
-                        <a className={tabClasses} onClick={() => this.onClickTab(tab.id)}>
-                          <i className={tab.icon} />
-                          {tab.text}
-                        </a>
-                      </li>
-                    );
-                  })}
-                </ul>
-              </nav>
-            </div>
-          </div>
-        </div>
-        <div className="page-container page-body">
-          {active === 'start' && <PromCheatSheet onClickExample={this.props.onClickExample} />}
-        </div>
+      <div className="grafana-info-box grafana-info-box--max-lg">
+        <PromCheatSheet onClickExample={this.props.onClickExample} />
       </div>
       </div>
     );
     );
   }
   }

+ 1 - 1
public/app/plugins/datasource/prometheus/datasource.ts

@@ -437,7 +437,7 @@ export class PrometheusDatasource {
   }
   }
 
 
   getQueryHints(query: DataQuery, result: any[]) {
   getQueryHints(query: DataQuery, result: any[]) {
-    return getQueryHints(query.expr, result, this);
+    return getQueryHints(query.expr || '', result, this);
   }
   }
 
 
   loadRules() {
   loadRules() {

+ 3 - 3
public/app/plugins/datasource/prometheus/language_provider.ts

@@ -9,8 +9,8 @@ import {
   TypeaheadOutput,
   TypeaheadOutput,
 } from 'app/types/explore';
 } from 'app/types/explore';
 
 
-import { parseSelector, processLabels, RATE_RANGES } from './language_utils';
-import PromqlSyntax, { FUNCTIONS } from './promql';
+import { parseSelector, processLabels } from './language_utils';
+import PromqlSyntax, { FUNCTIONS, RATE_RANGES } from './promql';
 
 
 const DEFAULT_KEYS = ['job', 'instance'];
 const DEFAULT_KEYS = ['job', 'instance'];
 const EMPTY_SELECTOR = '{}';
 const EMPTY_SELECTOR = '{}';
@@ -172,7 +172,7 @@ export default class PromQlLanguageProvider extends LanguageProvider {
       suggestions: [
       suggestions: [
         {
         {
           label: 'Range vector',
           label: 'Range vector',
-          items: [...RATE_RANGES].map(wrapLabel),
+          items: [...RATE_RANGES],
         },
         },
       ],
       ],
     };
     };

+ 15 - 2
public/app/plugins/datasource/prometheus/promql.ts

@@ -1,8 +1,19 @@
 /* tslint:disable max-line-length */
 /* tslint:disable max-line-length */
 
 
+import { CompletionItem } from 'app/types/explore';
+
+export const RATE_RANGES: CompletionItem[] = [
+  { label: '1m', sortText: '00:01:00' },
+  { label: '5m', sortText: '00:05:00' },
+  { label: '10m', sortText: '00:10:00' },
+  { label: '30m', sortText: '00:30:00' },
+  { label: '1h', sortText: '01:00:00' },
+  { label: '1d', sortText: '24:00:00' },
+];
+
 export const OPERATORS = ['by', 'group_left', 'group_right', 'ignoring', 'on', 'offset', 'without'];
 export const OPERATORS = ['by', 'group_left', 'group_right', 'ignoring', 'on', 'offset', 'without'];
 
 
-const AGGREGATION_OPERATORS = [
+const AGGREGATION_OPERATORS: CompletionItem[] = [
   {
   {
     label: 'sum',
     label: 'sum',
     insertText: 'sum',
     insertText: 'sum',
@@ -375,9 +386,10 @@ const tokenizer = {
     lookbehind: true,
     lookbehind: true,
     inside: {
     inside: {
       'label-key': {
       'label-key': {
-        pattern: /[^,\s][^,]*[^,\s]*/,
+        pattern: /[^(),\s][^,)]*[^),\s]*/,
         alias: 'attr-name',
         alias: 'attr-name',
       },
       },
+      punctuation: /[()]/,
     },
     },
   },
   },
   'context-labels': {
   'context-labels': {
@@ -392,6 +404,7 @@ const tokenizer = {
         greedy: true,
         greedy: true,
         alias: 'attr-value',
         alias: 'attr-value',
       },
       },
+      punctuation: /[{]/,
     },
     },
   },
   },
   function: new RegExp(`\\b(?:${FUNCTIONS.map(f => f.label).join('|')})(?=\\s*\\()`, 'i'),
   function: new RegExp(`\\b(?:${FUNCTIONS.map(f => f.label).join('|')})(?=\\s*\\()`, 'i'),

+ 1 - 1
public/app/plugins/datasource/prometheus/query_hints.ts

@@ -96,7 +96,7 @@ export function getQueryHints(query: string, series?: any[], datasource?: any):
     }
     }
   }
   }
 
 
-  if (series.length >= SUM_HINT_THRESHOLD_COUNT) {
+  if (series && series.length >= SUM_HINT_THRESHOLD_COUNT) {
     const simpleMetric = query.trim().match(/^\w+$/);
     const simpleMetric = query.trim().match(/^\w+$/);
     if (simpleMetric) {
     if (simpleMetric) {
       hints.push({
       hints.push({

+ 14 - 1
public/app/plugins/datasource/prometheus/specs/add_label_to_query.test.ts

@@ -1,4 +1,4 @@
-import addLabelToQuery from '../add_label_to_query';
+import { addLabelToQuery, addLabelToSelector } from '../add_label_to_query';
 
 
 describe('addLabelToQuery()', () => {
 describe('addLabelToQuery()', () => {
   it('should add label to simple query', () => {
   it('should add label to simple query', () => {
@@ -56,3 +56,16 @@ describe('addLabelToQuery()', () => {
     );
     );
   });
   });
 });
 });
+
+describe('addLabelToSelector()', () => {
+  test('should add a label to an empty selector', () => {
+    expect(addLabelToSelector('{}', 'foo', 'bar')).toBe('{foo="bar"}');
+    expect(addLabelToSelector('', 'foo', 'bar')).toBe('{foo="bar"}');
+  });
+  test('should add a label to a selector', () => {
+    expect(addLabelToSelector('{foo="bar"}', 'baz', '42')).toBe('{baz="42",foo="bar"}');
+  });
+  test('should add a label to a selector with custom operator', () => {
+    expect(addLabelToSelector('{}', 'baz', '42', '!=')).toBe('{baz!="42"}');
+  });
+});

+ 9 - 2
public/app/plugins/datasource/prometheus/specs/language_provider.test.ts

@@ -76,9 +76,16 @@ describe('Language completion provider', () => {
       });
       });
       expect(result.context).toBe('context-range');
       expect(result.context).toBe('context-range');
       expect(result.refresher).toBeUndefined();
       expect(result.refresher).toBeUndefined();
-      expect(result.suggestions).toEqual([
+      expect(result.suggestions).toMatchObject([
         {
         {
-          items: [{ label: '1m' }, { label: '5m' }, { label: '10m' }, { label: '30m' }, { label: '1h' }],
+          items: [
+            { label: '1m' },
+            { label: '5m' },
+            { label: '10m' },
+            { label: '30m' },
+            { label: '1h' },
+            { label: '1d' },
+          ],
           label: 'Range vector',
           label: 'Range vector',
         },
         },
       ]);
       ]);

+ 10 - 1
public/app/types/explore.ts

@@ -1,6 +1,8 @@
 import { Value } from 'slate';
 import { Value } from 'slate';
 
 
 import { DataQuery, RawTimeRange } from './series';
 import { DataQuery, RawTimeRange } from './series';
+import TableModel from 'app/core/table_model';
+import { LogsModel } from 'app/core/logs_model';
 
 
 export interface CompletionItem {
 export interface CompletionItem {
   /**
   /**
@@ -140,6 +142,7 @@ export interface QueryTransaction {
   result?: any; // Table model / Timeseries[] / Logs
   result?: any; // Table model / Timeseries[] / Logs
   resultType: ResultType;
   resultType: ResultType;
   rowIndex: number;
   rowIndex: number;
+  scanning?: boolean;
 }
 }
 
 
 export interface TextMatch {
 export interface TextMatch {
@@ -157,11 +160,16 @@ export interface ExploreState {
   datasourceMissing: boolean;
   datasourceMissing: boolean;
   datasourceName?: string;
   datasourceName?: string;
   exploreDatasources: ExploreDatasource[];
   exploreDatasources: ExploreDatasource[];
-  graphRange: RawTimeRange;
+  graphInterval: number; // in ms
+  graphResult?: any[];
   history: HistoryItem[];
   history: HistoryItem[];
   initialQueries: DataQuery[];
   initialQueries: DataQuery[];
+  logsHighlighterExpressions?: string[];
+  logsResult?: LogsModel;
   queryTransactions: QueryTransaction[];
   queryTransactions: QueryTransaction[];
   range: RawTimeRange;
   range: RawTimeRange;
+  scanning?: boolean;
+  scanRange?: RawTimeRange;
   showingGraph: boolean;
   showingGraph: boolean;
   showingLogs: boolean;
   showingLogs: boolean;
   showingStartPage?: boolean;
   showingStartPage?: boolean;
@@ -169,6 +177,7 @@ export interface ExploreState {
   supportsGraph: boolean | null;
   supportsGraph: boolean | null;
   supportsLogs: boolean | null;
   supportsLogs: boolean | null;
   supportsTable: boolean | null;
   supportsTable: boolean | null;
+  tableResult?: TableModel;
 }
 }
 
 
 export interface ExploreUrlState {
 export interface ExploreUrlState {

+ 5 - 4
public/sass/_variables.dark.scss

@@ -44,9 +44,10 @@ $brand-success: $green;
 $brand-warning: $brand-primary;
 $brand-warning: $brand-primary;
 $brand-danger: $red;
 $brand-danger: $red;
 
 
-$query-red: $red;
-$query-green: $green;
-$query-purple: $purple;
+$query-red: #e24d42;
+$query-green: #74e680;
+$query-purple: #fe85fc;
+$query-keyword: #66d9ef;
 $query-orange: $orange;
 $query-orange: $orange;
 
 
 // Status colors
 // Status colors
@@ -205,7 +206,7 @@ $search-filter-box-bg: $gray-blue;
 // Typeahead
 // Typeahead
 $typeahead-shadow: 0 5px 10px 0 $black;
 $typeahead-shadow: 0 5px 10px 0 $black;
 $typeahead-selected-bg: $dark-4;
 $typeahead-selected-bg: $dark-4;
-$typeahead-selected-color: $blue;
+$typeahead-selected-color: $yellow;
 
 
 // Dropdowns
 // Dropdowns
 // -------------------------
 // -------------------------

部分文件因为文件数量过多而无法显示