Просмотр исходного кода

Merge pull request #2 from grafana/master

Update master
Pavel 7 лет назад
Родитель
Сommit
bf55f36e95
57 измененных файлов с 728 добавлено и 273 удалено
  1. 20 10
      CHANGELOG.md
  2. 2 0
      README.md
  3. 2 2
      UPGRADING_DEPENDENCIES.md
  4. 1 1
      conf/provisioning/datasources/sample.yaml
  5. 1 1
      docs/sources/administration/provisioning.md
  6. 1 1
      docs/sources/alerting/notifications.md
  7. 2 2
      docs/sources/auth/ldap.md
  8. 2 2
      docs/sources/contribute/cla.md
  9. 1 1
      docs/sources/enterprise/index.md
  10. 3 3
      docs/sources/features/datasources/mysql.md
  11. 1 1
      docs/sources/guides/whats-new-in-v4.md
  12. 1 1
      docs/sources/http_api/index.md
  13. 39 39
      docs/sources/http_api/org.md
  14. 3 3
      docs/sources/http_api/user.md
  15. 2 1
      docs/versions.json
  16. 2 2
      latest.json
  17. 1 1
      package.json
  18. 8 0
      pkg/api/password.go
  19. 3 3
      pkg/api/pluginproxy/ds_auth_provider.go
  20. 2 0
      pkg/tsdb/cloudwatch/metric_find_query.go
  21. 1 0
      pkg/tsdb/elasticsearch/client/models.go
  22. 4 0
      pkg/tsdb/elasticsearch/time_series_query.go
  23. 4 0
      pkg/tsdb/mssql/macros.go
  24. 14 0
      pkg/tsdb/mssql/macros_test.go
  25. 4 0
      pkg/tsdb/mysql/macros.go
  26. 14 0
      pkg/tsdb/mysql/macros_test.go
  27. 2 2
      pkg/tsdb/mysql/mysql_test.go
  28. 4 0
      pkg/tsdb/postgres/macros.go
  29. 14 0
      pkg/tsdb/postgres/macros_test.go
  30. 0 2
      pkg/tsdb/sql_engine.go
  31. 0 14
      pkg/tsdb/sql_engine_test.go
  32. 4 0
      public/app/core/controllers/reset_password_ctrl.ts
  33. 12 4
      public/app/core/logs_model.ts
  34. 1 1
      public/app/core/utils/explore.test.ts
  35. 33 1
      public/app/core/utils/explore.ts
  36. 2 2
      public/app/core/utils/kbn.ts
  37. 71 48
      public/app/features/explore/Explore.tsx
  38. 190 49
      public/app/features/explore/Logs.tsx
  39. 1 1
      public/app/partials/login.html
  40. 8 1
      public/app/partials/reset_password.html
  41. 10 0
      public/app/plugins/datasource/elasticsearch/partials/bucket_agg.html
  42. 4 0
      public/app/plugins/datasource/elasticsearch/query_builder.ts
  43. 17 14
      public/app/plugins/datasource/logging/datasource.ts
  44. 29 2
      public/app/plugins/datasource/logging/language_provider.test.ts
  45. 7 12
      public/app/plugins/datasource/logging/language_provider.ts
  46. 12 1
      public/app/plugins/datasource/logging/query_utils.test.ts
  47. 17 0
      public/app/plugins/datasource/logging/query_utils.ts
  48. 15 15
      public/app/plugins/datasource/logging/result_transformer.test.ts
  49. 22 7
      public/app/plugins/datasource/logging/result_transformer.ts
  50. 28 0
      public/app/plugins/datasource/logging/syntax.ts
  51. 3 3
      public/app/plugins/datasource/mysql/partials/annotations.editor.html
  52. 3 3
      public/app/plugins/datasource/mysql/partials/query.editor.html
  53. 5 3
      public/app/plugins/datasource/prometheus/add_label_to_query.ts
  54. 14 1
      public/app/plugins/datasource/prometheus/specs/add_label_to_query.test.ts
  55. 6 1
      public/app/types/explore.ts
  56. 55 11
      public/sass/pages/_explore.scss
  57. 1 1
      scripts/build/publish_test.go

+ 20 - 10
CHANGELOG.md

@@ -1,6 +1,16 @@
-# 5.4.0 (unreleased)
+# 5.5.0 (unreleased)
+
+### Minor
+
+* **Elasticsearch**: Add support for offset in date histogram aggregation [#12653](https://github.com/grafana/grafana/issues/12653), thx [@mattiarossi](https://github.com/mattiarossi)
+* **Auth**: Prevent password reset when login form is disabled or either LDAP or Auth Proxy is enabled [#14246](https://github.com/grafana/grafana/issues/14246), thx [@SilverFire](https://github.com/SilverFire)
+* **Dataproxy**: Override incoming Authorization header [#13815](https://github.com/grafana/grafana/issues/13815), thx [@kornholi](https://github.com/kornholi)
+
+# 5.4.0 (2018-12-03)
 
 * **Cloudwatch**: Fix invalid time range causes segmentation fault [#14150](https://github.com/grafana/grafana/issues/14150)
+* **Cloudwatch**: AWS/CodeBuild metrics and dimensions [#14167](https://github.com/grafana/grafana/issues/14167), thx [@mmcoltman](https://github.com/mmcoltman)
+* **MySQL**: Fix `$__timeFrom()` and `$__timeTo()` should respect local time zone [#14228](https://github.com/grafana/grafana/issues/14228)
 
 ### 5.4.0-beta1 fixes
 * **Graph**: Fix legend always visible even if configured to be hidden [#14144](https://github.com/grafana/grafana/issues/14144)
@@ -160,7 +170,7 @@ See [security announcement](https://community.grafana.com/t/grafana-5-3-3-and-4-
 * **Alerting**: Fix rendering timeout which could cause notifications to not be sent due to rendering timing out [#12151](https://github.com/grafana/grafana/issues/12151)
 * **Docker**: Make it possible to set a specific plugin url [#12861](https://github.com/grafana/grafana/pull/12861), thx [ClementGautier](https://github.com/ClementGautier)
 * **GrafanaCli**: Fixed issue with grafana-cli install plugin resulting in corrupt http response from source error. Fixes [#13079](https://github.com/grafana/grafana/issues/13079)
-* **Provisioning**: Should allow one default datasource per organisation [#12229](https://github.com/grafana/grafana/issues/12229)
+* **Provisioning**: Should allow one default datasource per organization [#12229](https://github.com/grafana/grafana/issues/12229)
 * **Github OAuth**: Allow changes of user info at Github to be synched to Grafana when signing in [#11818](https://github.com/grafana/grafana/issues/11818), thx [@rwaweber](https://github.com/rwaweber)
 * **OAuth**: Fix overriding tls_skip_verify_insecure using environment variable [#12747](https://github.com/grafana/grafana/issues/12747), thx [@jangaraj](https://github.com/jangaraj)
 * **Prometheus**: Fix graph panel bar width issue in aligned prometheus queries [#12379](https://github.com/grafana/grafana/issues/12379)
@@ -283,7 +293,7 @@ See [security announcement](https://community.grafana.com/t/grafana-5-2-3-and-4-
 * **Dashboard**: Prevent double-click when saving dashboard [#11963](https://github.com/grafana/grafana/issues/11963)
 * **Dashboard**: AutoFocus the add-panel search filter [#12189](https://github.com/grafana/grafana/pull/12189) thx [@ryantxu](https://github.com/ryantxu)
 * **Units**: W/m2 (energy), l/h (flow) and kPa (pressure) [#11233](https://github.com/grafana/grafana/pull/11233), thx [@flopp999](https://github.com/flopp999)
-* **Units**: Litre/min (flow) and milliLitre/min (flow) [#12282](https://github.com/grafana/grafana/pull/12282), thx [@flopp999](https://github.com/flopp999)
+* **Units**: Liter/min (flow) and milliLiter/min (flow) [#12282](https://github.com/grafana/grafana/pull/12282), thx [@flopp999](https://github.com/flopp999)
 * **Alerting**: Fix mobile notifications for Microsoft Teams alert notifier [#11484](https://github.com/grafana/grafana/pull/11484), thx [@manacker](https://github.com/manacker)
 * **Influxdb**: Add support for mode function [#12286](https://github.com/grafana/grafana/issues/12286)
 * **Cloudwatch**: Fixes panic caused by bad timerange settings [#12199](https://github.com/grafana/grafana/issues/12199)
@@ -418,7 +428,7 @@ See [security announcement](https://community.grafana.com/t/grafana-5-2-3-and-4-
 * **Units**: Use B/s instead Bps for Bytes per second [#9342](https://github.com/grafana/grafana/pull/9342), thx [@mayli](https://github.com/mayli)
 * **Units**: Radiation units [#11001](https://github.com/grafana/grafana/issues/11001), thx [@victorclaessen](https://github.com/victorclaessen)
 * **Units**: Timeticks unit [#11183](https://github.com/grafana/grafana/pull/11183), thx [@jtyr](https://github.com/jtyr)
-* **Units**: Concentration units and "Normal cubic metre" [#11211](https://github.com/grafana/grafana/issues/11211), thx [@flopp999](https://github.com/flopp999)
+* **Units**: Concentration units and "Normal cubic meter" [#11211](https://github.com/grafana/grafana/issues/11211), thx [@flopp999](https://github.com/flopp999)
 * **Units**: New currency - Czech koruna [#11384](https://github.com/grafana/grafana/pull/11384), thx [@Rohlik](https://github.com/Rohlik)
 * **Avatar**: Fix DISABLE_GRAVATAR option [#11095](https://github.com/grafana/grafana/issues/11095)
 * **Heatmap**: Disable log scale when using time time series buckets [#10792](https://github.com/grafana/grafana/issues/10792)
@@ -735,7 +745,7 @@ See [security announcement](https://community.grafana.com/t/grafana-5-2-3-and-4-
 ## Enhancements
 
 * **GitHub OAuth**: Support for GitHub organizations with 100+ teams. [#8846](https://github.com/grafana/grafana/issues/8846), thx [@skwashd](https://github.com/skwashd)
-* **Graphite**: Calls to Graphite api /metrics/find now include panel or dashboad time range (from & until) in most cases, [#8055](https://github.com/grafana/grafana/issues/8055)
+* **Graphite**: Calls to Graphite api /metrics/find now include panel or dashboard time range (from & until) in most cases, [#8055](https://github.com/grafana/grafana/issues/8055)
 * **Graphite**: Added new graphite 1.0 functions, available if you set version to 1.0.x in data source settings. New Functions: mapSeries, reduceSeries, isNonNull, groupByNodes, offsetToZero, grep, weightedAverage, removeEmptySeries, aggregateLine, averageOutsidePercentile, delay, exponentialMovingAverage, fallbackSeries, integralByInterval, interpolate, invert, linearRegression, movingMin, movingMax, movingSum, multiplySeriesWithWildcards, pow, powSeries, removeBetweenPercentile, squareRoot, timeSlice, closes [#8261](https://github.com/grafana/grafana/issues/8261)
 - **Elasticsearch**: Ad-hoc filters now use query phrase match filters instead of term filters, works on non keyword/raw fields [#9095](https://github.com/grafana/grafana/issues/9095).
 
@@ -900,7 +910,7 @@ Pull Request: [#8472](https://github.com/grafana/grafana/pull/8472)
 * **InfluxDB**: Influxb Datasource test passes even if the Database doesn't exist [#7864](https://github.com/grafana/grafana/issues/7864)
 * **Prometheus**: Displaying Prometheus annotations is incredibly slow [#7750](https://github.com/grafana/grafana/issues/7750), thx [@mtanda](https://github.com/mtanda)
 * **Graphite**: grafana generates empty find query to graphite -> 422 Unprocessable Entity [#7740](https://github.com/grafana/grafana/issues/7740)
-* **Admin**: make organisation filter case insensitive [#8194](https://github.com/grafana/grafana/issues/8194), thx [@Alexander-N](https://github.com/Alexander-N)
+* **Admin**: make organization filter case insensitive [#8194](https://github.com/grafana/grafana/issues/8194), thx [@Alexander-N](https://github.com/Alexander-N)
 
 ## Changes
 * **Elasticsearch**: Changed elasticsearch Terms aggregation to default to Min Doc Count to 1, and sort order to Top [#8321](https://github.com/grafana/grafana/issues/8321)
@@ -1028,7 +1038,7 @@ Pull Request: [#8472](https://github.com/grafana/grafana/pull/8472)
 * **CLI**: Make it possible to reset the admin password using the grafana-cli. [#5479](https://github.com/grafana/grafana/issues/5479)
 * **Influxdb**: Support multiple tags in InfluxDB annotations. [#4550](https://github.com/grafana/grafana/pull/4550), thx [@adrianlzt](https://github.com/adrianlzt)
 * **LDAP**:  Basic Auth now supports LDAP username and password, [#6940](https://github.com/grafana/grafana/pull/6940), thx [@utkarshcmu](https://github.com/utkarshcmu)
-* **LDAP**: Now works with Auth Proxy, role and organisation mapping & sync will regularly be performed. [#6895](https://github.com/grafana/grafana/pull/6895), thx [@Seuf](https://github.com/seuf)
+* **LDAP**: Now works with Auth Proxy, role and organization mapping & sync will regularly be performed. [#6895](https://github.com/grafana/grafana/pull/6895), thx [@Seuf](https://github.com/seuf)
 * **Alerting**: Adds OK as no data option. [#6866](https://github.com/grafana/grafana/issues/6866)
 * **Alert list**: Order alerts based on state. [#6676](https://github.com/grafana/grafana/issues/6676)
 * **Alerting**: Add api endpoint for pausing all alerts. [#6589](https://github.com/grafana/grafana/issues/6589)
@@ -1167,7 +1177,7 @@ due to too many connections/file handles on the data source backend. This proble
 * **Scripts**: Use restart instead of start for deb package script, closes [#5282](https://github.com/grafana/grafana/pull/5282)
 * **Logging**: Moved to structured logging lib, and moved to component specific level filters via config file, closes [#4590](https://github.com/grafana/grafana/issues/4590)
 * **OpenTSDB**: Support nested template variables in tag_values function, closes [#4398](https://github.com/grafana/grafana/issues/4398)
-* **Datasource**: Pending data source requests are cancelled before new ones are issues (Graphite & Prometheus), closes [#5321](https://github.com/grafana/grafana/issues/5321)
+* **Datasource**: Pending data source requests are canceled before new ones are issues (Graphite & Prometheus), closes [#5321](https://github.com/grafana/grafana/issues/5321)
 
 ### Breaking changes
 * **Logging** : Changed default logging output format (now structured into message, and key value pairs, with logger key acting as component). You can also no change in config to json log output.
@@ -1871,7 +1881,7 @@ Read this [blog post](https://grafana.com/blog/2014/09/11/grafana-1.8.0-rc1-rele
 
 #### Fixes
 - [Issue #126](https://github.com/grafana/grafana/issues/126). Graphite query lexer change, can now handle regex parameters for aliasSub function
-- [Issue #447](https://github.com/grafana/grafana/issues/447). Filter option loading when having muliple nested filters now works better. Options are now reloaded correctly and there are no multiple renders/refresh in between.
+- [Issue #447](https://github.com/grafana/grafana/issues/447). Filter option loading when having multiple nested filters now works better. Options are now reloaded correctly and there are no multiple renders/refresh in between.
 - [Issue #412](https://github.com/grafana/grafana/issues/412). After a filter option is changed and a nested template param is reloaded, if the current value exists after the options are reloaded the current selected value is kept.
 - [Issue #460](https://github.com/grafana/grafana/issues/460). Legend Current value did not display when value was zero
 - [Issue #328](https://github.com/grafana/grafana/issues/328). Fix to series toggling bug that caused annotations to be hidden when toggling/hiding series.
@@ -1906,7 +1916,7 @@ Read this [blog post](https://grafana.com/blog/2014/09/11/grafana-1.8.0-rc1-rele
 - Graphite errors are now much easier to see and troubleshoot with the new inspector ([Issue #265](https://github.com/grafana/grafana/issues/265))
 - Use influxdb aliases to distinguish between multiple columns ([Issue #283](https://github.com/grafana/grafana/issues/283))
 - Correction to ms axis formater, now formats days correctly. ([Issue #189](https://github.com/grafana/grafana/issues/189))
-- Css fix for Firefox and using top menu dropdowns in panel fullscren / edit mode ([Issue #106](https://github.com/grafana/grafana/issues/106))
+- Css fix for Firefox and using top menu dropdowns in panel fullscreen / edit mode ([Issue #106](https://github.com/grafana/grafana/issues/106))
 - Browser page title is now Grafana - {{dashboard title}} ([Issue #294](https://github.com/grafana/grafana/issues/294))
 - Disable auto refresh zooming in (every time you change to an absolute time range), refresh will be restored when you change time range back to relative ([Issue #282](https://github.com/grafana/grafana/issues/282))
 - More graphite functions

+ 2 - 0
README.md

@@ -90,6 +90,8 @@ Choose this option to build on platforms other than linux/amd64 and/or not have
 
 The resulting image will be tagged as `grafana/grafana:dev`
 
+Notice: If you are using Docker for MacOS, be sure to let limit of Memory bigger than 2 GiB (at docker -> Perferences -> Advanced), otherwize you may faild at `grunt build`
+
 ### Dev config
 
 Create a custom.ini in the conf directory to override default configuration options.

+ 2 - 2
UPGRADING_DEPENDENCIES.md

@@ -47,7 +47,7 @@ Our builds run on CircleCI through our build script.
 
 ### grafana/build-container
 
-The main build step (in CircleCI) is built using a custom build container that comes pre-baked with some of the neccesary dependencies.
+The main build step (in CircleCI) is built using a custom build container that comes pre-baked with some of the necessary dependencies.
 
 Link: [grafana-build-container](https://github.com/grafana/grafana-build-container)
 
@@ -86,4 +86,4 @@ There is a Docker build for Grafana in the root of the project that allows anyon
 
 ### Local developer environments
 
-Please send out a notice in the grafana-dev slack channel when updating Go or Node.js to make it easier for everyone to update their local developer environments.
+Please send out a notice in the grafana-dev slack channel when updating Go or Node.js to make it easier for everyone to update their local developer environments.

+ 1 - 1
conf/provisioning/datasources/sample.yaml

@@ -7,7 +7,7 @@ apiVersion: 1
 #     orgId: 1
 
 # # list of datasources to insert/update depending
-# # on what's available in the datbase
+# # on what's available in the database
 #datasources:
 #   # <string, required> name of the datasource. Required
 # - name: Graphite

+ 1 - 1
docs/sources/administration/provisioning.md

@@ -230,4 +230,4 @@ By default Grafana will delete dashboards in the database if the file is removed
 > **Note.** Provisioning allows you to overwrite existing dashboards
 > which leads to problems if you re-use settings that are supposed to be unique.
 > Be careful not to re-use the same `title` multiple times within a folder
-> or `uid` within the same installation as this will cause weird behaviours.
+> or `uid` within the same installation as this will cause weird behaviors.

+ 1 - 1
docs/sources/alerting/notifications.md

@@ -136,7 +136,7 @@ In DingTalk PC Client:
 
 2. Click "Robot Manage" item in the pop menu, there will be a new panel call "Robot Manage".
 
-3. In the  "Robot Manage" panel, select "customised: customised robot with Webhook".
+3. In the  "Robot Manage" panel, select "customized: customized robot with Webhook".
 
 4. In the next new panel named "robot detail", click "Add" button.
 

+ 2 - 2
docs/sources/auth/ldap.md

@@ -163,9 +163,9 @@ org_role = "Viewer"
 Setting | Required | Description | Default
 ------------ | ------------ | ------------- | -------------
 `group_dn` | Yes | LDAP distinguished name (DN) of LDAP group. If you want to match all (or no LDAP groups) then you can use wildcard (`"*"`) |
-`org_role` | Yes | Assign users of `group_dn` the organisation role `"Admin"`, `"Editor"` or `"Viewer"` |
+`org_role` | Yes | Assign users of `group_dn` the organization role `"Admin"`, `"Editor"` or `"Viewer"` |
 `org_id` | No | The Grafana organization database id. Setting this allows for multiple group_dn's to be assigned to the same `org_role` provided the `org_id` differs | `1` (default org id)
-`grafana_admin` | No | When `true` makes user of `group_dn` Grafana server admin. A Grafana server admin has admin access over all organisations and users. Available in Grafana v5.3 and above | `false`
+`grafana_admin` | No | When `true` makes user of `group_dn` Grafana server admin. A Grafana server admin has admin access over all organizations and users. Available in Grafana v5.3 and above | `false`
 
 ### Nested/recursive group membership
 

+ 2 - 2
docs/sources/contribute/cla.md

@@ -1,6 +1,6 @@
 +++
-title = "Contributor Licence Agreement (CLA)"
-description = "Contributor Licence Agreement (CLA)"
+title = "Contributor License Agreement (CLA)"
+description = "Contributor License Agreement (CLA)"
 type = "docs"
 aliases = ["/project/cla", "docs/contributing/cla.html"]
 [menu.docs]

+ 1 - 1
docs/sources/enterprise/index.md

@@ -31,7 +31,7 @@ Datasource permissions allow you to restrict query access to only specific Teams
 
 ### Premium Plugins
 
-With a Grafana Enterprise licence you will get access to premium plugins, including:
+With a Grafana Enterprise license you will get access to premium plugins, including:
 
 * [Splunk](https://grafana.com/plugins/grafana-splunk-datasource)
 * [AppDynamics](https://grafana.com/plugins/dlopes7-appdynamics-datasource)

+ 3 - 3
docs/sources/features/datasources/mysql.md

@@ -133,9 +133,9 @@ Macro example | Description
 ------------ | -------------
 *$__time(dateColumn)* | Will be replaced by an expression to convert to a UNIX timestamp and rename the column to `time_sec`. For example, *UNIX_TIMESTAMP(dateColumn) as time_sec*
 *$__timeEpoch(dateColumn)* | Will be replaced by an expression to convert to a UNIX timestamp and rename the column to `time_sec`. For example, *UNIX_TIMESTAMP(dateColumn) as time_sec*
-*$__timeFilter(dateColumn)* | Will be replaced by a time range filter using the specified column name. For example, *dateColumn BETWEEN '2017-04-21T05:01:17Z' AND '2017-04-21T05:06:17Z'*
-*$__timeFrom()* | Will be replaced by the start of the currently active time selection. For example, *'2017-04-21T05:01:17Z'*
-*$__timeTo()* | Will be replaced by the end of the currently active time selection. For example, *'2017-04-21T05:06:17Z'*
+*$__timeFilter(dateColumn)* | Will be replaced by a time range filter using the specified column name. For example, *dateColumn BETWEEN FROM_UNIXTIME(1494410783) AND FROM_UNIXTIME(1494410983)*
+*$__timeFrom()* | Will be replaced by the start of the currently active time selection. For example, *FROM_UNIXTIME(1494410783)*
+*$__timeTo()* | Will be replaced by the end of the currently active time selection. For example, *FROM_UNIXTIME(1494410983)*
 *$__timeGroup(dateColumn,'5m')* | Will be replaced by an expression usable in GROUP BY clause. For example, *cast(cast(UNIX_TIMESTAMP(dateColumn)/(300) as signed)*300 as signed),*
 *$__timeGroup(dateColumn,'5m', 0)* | Same as above but with a fill parameter so missing points in that series will be added by grafana and 0 will be used as value.
 *$__timeGroup(dateColumn,'5m', NULL)* | Same as above but NULL will be used as value for missing points.

+ 1 - 1
docs/sources/guides/whats-new-in-v4.md

@@ -134,7 +134,7 @@ continue work on a `build mode` for a future release.
 The new row menu automatically slides out when you mouse over the edge of the row. You no longer need
 to hover over the small green icon and then click it to expand the row menu.
 
-There are some minor improvements to drag and drop behaviour. Now when dragging a panel from one row
+There are some minor improvements to drag and drop behavior. Now when dragging a panel from one row
 to another you will insert the panel and Grafana will automatically make room for it.
 When you drag a panel within a row you will simply reorder the panels.
 

+ 1 - 1
docs/sources/http_api/index.md

@@ -26,7 +26,7 @@ dashboards, creating users and updating data sources.
 * [Folder Permissions API]({{< relref "http_api/folder_permissions.md" >}})
 * [Folder/dashboard search API]({{< relref "/http_api/folder_dashboard_search.md" >}})
 * [Data Source API]({{< relref "http_api/data_source.md" >}})
-* [Organisation API]({{< relref "http_api/org.md" >}})
+* [Organization API]({{< relref "http_api/org.md" >}})
 * [Snapshot API]({{< relref "http_api/snapshot.md" >}})
 * [Annotations API]({{< relref "http_api/annotations.md" >}})
 * [Alerting API]({{< relref "http_api/alerting.md" >}})

+ 39 - 39
docs/sources/http_api/org.md

@@ -1,24 +1,24 @@
 +++
-title = "Organisation HTTP API "
-description = "Grafana Organisation HTTP API"
-keywords = ["grafana", "http", "documentation", "api", "organisation"]
-aliases = ["/http_api/organisation/"]
+title = "Organization HTTP API "
+description = "Grafana Organization HTTP API"
+keywords = ["grafana", "http", "documentation", "api", "organization"]
+aliases = ["/http_api/organization/"]
 type = "docs"
 [menu.docs]
-name = "Organisation"
+name = "Organization"
 parent = "http_api"
 +++
 
 
-# Organisation API
+# Organization API
 
-The Organisation HTTP API is divided in two resources, `/api/org` (current organisation)
-and `/api/orgs` (admin organisations). One big difference between these are that
-the admin of all organisations API only works with basic authentication, see [Admin Organisations API](#admin-organisations-api) for more information.
+The Organization HTTP API is divided in two resources, `/api/org` (current organization)
+and `/api/orgs` (admin organizations). One big difference between these are that
+the admin of all organizations API only works with basic authentication, see [Admin Organizations API](#admin-organizations-api) for more information.
 
-## Current Organisation API
+## Current Organization API
 
-### Get current Organisation
+### Get current Organization
 
 `GET /api/org/`
 
@@ -43,7 +43,7 @@ Content-Type: application/json
 }
 ```
 
-### Get all users within the current organisation
+### Get all users within the current organization
 
 `GET /api/org/users`
 
@@ -99,7 +99,7 @@ Content-Type: application/json
 {"message":"Organization user updated"}
 ```
 
-### Delete user in current organisation
+### Delete user in current organization
 
 `DELETE /api/org/users/:userId`
 
@@ -121,7 +121,7 @@ Content-Type: application/json
 {"message":"User removed from organization"}
 ```
 
-### Update current Organisation
+### Update current Organization
 
 `PUT /api/org`
 
@@ -147,11 +147,11 @@ Content-Type: application/json
 {"message":"Organization updated"}
 ```
 
-### Add a new user to the current organisation
+### Add a new user to the current organization
 
 `POST /api/org/users`
 
-Adds a global user to the current organisation.
+Adds a global user to the current organization.
 
 **Example Request**:
 
@@ -176,19 +176,19 @@ Content-Type: application/json
 {"message":"User added to organization"}
 ```
 
-## Admin Organisations API
+## Admin Organizations API
 
-The Admin Organisations HTTP API does not currently work with an API Token. API Tokens are currently
+The Admin Organizations HTTP API does not currently work with an API Token. API Tokens are currently
 only linked to an organization and an organization role. They cannot be given the permission of server
 admin, only users can be given that permission. So in order to use these API calls you will have to
 use Basic Auth and the Grafana user must have the Grafana Admin permission (The default admin user
 is called `admin` and has permission to use this API).
 
-### Get Organisation by Id
+### Get Organization by Id
 
 `GET /api/orgs/:orgId`
 
-Only works with Basic Authentication (username and password), see [introduction](#admin-organisations-api).
+Only works with Basic Authentication (username and password), see [introduction](#admin-organizations-api).
 
 **Example Request**:
 
@@ -217,11 +217,11 @@ Content-Type: application/json
   }
 }
 ```
-### Get Organisation by Name
+### Get Organization by Name
 
 `GET /api/orgs/name/:orgName`
 
-Only works with Basic Authentication (username and password), see [introduction](#admin-organisations-api).
+Only works with Basic Authentication (username and password), see [introduction](#admin-organizations-api).
 
 **Example Request**:
 
@@ -251,11 +251,11 @@ Content-Type: application/json
 }
 ```
 
-### Create Organisation
+### Create Organization
 
 `POST /api/orgs`
 
-Only works with Basic Authentication (username and password), see [introduction](#admin-organisations-api).
+Only works with Basic Authentication (username and password), see [introduction](#admin-organizations-api).
 
 **Example Request**:
 
@@ -284,11 +284,11 @@ Content-Type: application/json
 }
 ```
 
-### Search all Organisations
+### Search all Organizations
 
 `GET /api/orgs`
 
-Only works with Basic Authentication (username and password), see [introduction](#admin-organisations-api).
+Only works with Basic Authentication (username and password), see [introduction](#admin-organizations-api).
 
 **Example Request**:
 
@@ -314,12 +314,12 @@ Content-Type: application/json
 ]
 ```
 
-### Update Organisation
+### Update Organization
 
 `PUT /api/orgs/:orgId`
 
-Update Organisation, fields *Address 1*, *Address 2*, *City* are not implemented yet.
-Only works with Basic Authentication (username and password), see [introduction](#admin-organisations-api).
+Update Organization, fields *Address 1*, *Address 2*, *City* are not implemented yet.
+Only works with Basic Authentication (username and password), see [introduction](#admin-organizations-api).
 
 **Example Request**:
 
@@ -342,11 +342,11 @@ Content-Type: application/json
 {"message":"Organization updated"}
 ```
 
-### Delete Organisation
+### Delete Organization
 
 `DELETE /api/orgs/:orgId`
 
-Only works with Basic Authentication (username and password), see [introduction](#admin-organisations-api).
+Only works with Basic Authentication (username and password), see [introduction](#admin-organizations-api).
 
 **Example Request**:
 
@@ -364,11 +364,11 @@ Content-Type: application/json
 {"message":"Organization deleted"}
 ```
 
-### Get Users in Organisation
+### Get Users in Organization
 
 `GET /api/orgs/:orgId/users`
 
-Only works with Basic Authentication (username and password), see [introduction](#admin-organisations-api).
+Only works with Basic Authentication (username and password), see [introduction](#admin-organizations-api).
 
 **Example Request**:
 
@@ -397,11 +397,11 @@ Content-Type: application/json
 ]
 ```
 
-### Add User in Organisation
+### Add User in Organization
 
 `POST /api/orgs/:orgId/users`
 
-Only works with Basic Authentication (username and password), see [introduction](#admin-organisations-api).
+Only works with Basic Authentication (username and password), see [introduction](#admin-organizations-api).
 
 **Example Request**:
 
@@ -425,11 +425,11 @@ Content-Type: application/json
 {"message":"User added to organization"}
 ```
 
-### Update Users in Organisation
+### Update Users in Organization
 
 `PATCH /api/orgs/:orgId/users/:userId`
 
-Only works with Basic Authentication (username and password), see [introduction](#admin-organisations-api).
+Only works with Basic Authentication (username and password), see [introduction](#admin-organizations-api).
 
 **Example Request**:
 
@@ -452,11 +452,11 @@ Content-Type: application/json
 {"message":"Organization user updated"}
 ```
 
-### Delete User in Organisation
+### Delete User in Organization
 
 `DELETE /api/orgs/:orgId/users/:userId`
 
-Only works with Basic Authentication (username and password), see [introduction](#admin-organisations-api).
+Only works with Basic Authentication (username and password), see [introduction](#admin-organizations-api).
 
 **Example Request**:
 

+ 3 - 3
docs/sources/http_api/user.md

@@ -196,7 +196,7 @@ Content-Type: application/json
 {"message":"User updated"}
 ```
 
-## Get Organisations for user
+## Get Organizations for user
 
 `GET /api/users/:id/orgs`
 
@@ -367,11 +367,11 @@ Content-Type: application/json
 {"message":"Active organization changed"}
 ```
 
-## Organisations of the actual User
+## Organizations of the actual User
 
 `GET /api/user/orgs`
 
-Return a list of all organisations of the current user.
+Return a list of all organizations of the current user.
 
 **Example Request**:
 

+ 2 - 1
docs/versions.json

@@ -1,5 +1,6 @@
 [
-  { "version": "v5.3", "path": "/", "archived": false, "current": true },
+  { "version": "v5.4", "path": "/", "archived": false, "current": true },
+  { "version": "v5.3", "path": "/v5.3", "archived": true },
   { "version": "v5.2", "path": "/v5.2", "archived": true },
   { "version": "v5.1", "path": "/v5.1", "archived": true },
   { "version": "v5.0", "path": "/v5.0", "archived": true },

+ 2 - 2
latest.json

@@ -1,4 +1,4 @@
 {
-  "stable": "5.3.4",
-  "testing": "5.3.4"
+  "stable": "5.4.0",
+  "testing": "5.4.0"
 }

+ 1 - 1
package.json

@@ -4,7 +4,7 @@
     "company": "Grafana Labs"
   },
   "name": "grafana",
-  "version": "5.4.0-pre1",
+  "version": "5.5.0-pre1",
   "repository": {
     "type": "git",
     "url": "http://github.com/grafana/grafana.git"

+ 8 - 0
pkg/api/password.go

@@ -4,10 +4,18 @@ import (
 	"github.com/grafana/grafana/pkg/api/dtos"
 	"github.com/grafana/grafana/pkg/bus"
 	m "github.com/grafana/grafana/pkg/models"
+	"github.com/grafana/grafana/pkg/setting"
 	"github.com/grafana/grafana/pkg/util"
 )
 
 func SendResetPasswordEmail(c *m.ReqContext, form dtos.SendResetPasswordEmailForm) Response {
+	if setting.LdapEnabled || setting.AuthProxyEnabled {
+		return Error(401, "Not allowed to reset password when LDAP or Auth Proxy is enabled", nil)
+	}
+	if setting.DisableLoginForm {
+		return Error(401, "Not allowed to reset password when login form is disabled", nil)
+	}
+
 	userQuery := m.GetUserByLoginQuery{LoginOrEmail: form.UserOrEmail}
 
 	if err := bus.Dispatch(&userQuery); err != nil {

+ 3 - 3
pkg/api/pluginproxy/ds_auth_provider.go

@@ -51,7 +51,7 @@ func ApplyRoute(ctx context.Context, req *http.Request, proxyPath string, route
 		if token, err := tokenProvider.getAccessToken(data); err != nil {
 			logger.Error("Failed to get access token", "error", err)
 		} else {
-			req.Header.Add("Authorization", fmt.Sprintf("Bearer %s", token))
+			req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", token))
 		}
 	}
 
@@ -60,7 +60,7 @@ func ApplyRoute(ctx context.Context, req *http.Request, proxyPath string, route
 		if token, err := tokenProvider.getJwtAccessToken(ctx, data); err != nil {
 			logger.Error("Failed to get access token", "error", err)
 		} else {
-			req.Header.Add("Authorization", fmt.Sprintf("Bearer %s", token))
+			req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", token))
 		}
 	}
 
@@ -73,7 +73,7 @@ func ApplyRoute(ctx context.Context, req *http.Request, proxyPath string, route
 			if err != nil {
 				logger.Error("Failed to get default access token from meta data server", "error", err)
 			} else {
-				req.Header.Add("Authorization", fmt.Sprintf("Bearer %s", token.AccessToken))
+				req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", token.AccessToken))
 			}
 		}
 	}

+ 2 - 0
pkg/tsdb/cloudwatch/metric_find_query.go

@@ -47,6 +47,7 @@ func init() {
 		"AWS/CloudFront":     {"Requests", "BytesDownloaded", "BytesUploaded", "TotalErrorRate", "4xxErrorRate", "5xxErrorRate"},
 		"AWS/CloudSearch":    {"SuccessfulRequests", "SearchableDocuments", "IndexUtilization", "Partitions"},
 		"AWS/CloudHSM":       {"HsmUnhealthy", "HsmTemperature", "HsmKeysSessionOccupied", "HsmKeysTokenOccupied", "HsmSslCtxsOccupied", "HsmSessionCount", "HsmUsersAvailable", "HsmUsersMax", "InterfaceEth2OctetsInput", "InterfaceEth2OctetsOutput"},
+		"AWS/CodeBuild":      {"BuildDuration", "Builds", "DownloadSourceDuration", "Duration", "FailedBuilds", "FinalizingDuration", "InstallDuration", "PostBuildDuration", "PreBuildDuration", "ProvisioningDuration", "QueuedDuration", "SubmittedDuration", "SucceededBuilds", "UploadArtifactsDuration"},
 		"AWS/Connect":        {"CallsBreachingConcurrencyQuota", "CallBackNotDialableNumber", "CallRecordingUploadError", "CallsPerInterval", "ConcurrentCalls", "ConcurrentCallsPercentage", "ContactFlowErrors", "ContactFlowFatalErrors", "LongestQueueWaitTime", "MissedCalls", "MisconfiguredPhoneNumbers", "PublicSigningKeyUsage", "QueueCapacityExceededError", "QueueSize", "ThrottledCalls", "ToInstancePacketLossRate"},
 		"AWS/DMS":            {"FreeableMemory", "WriteIOPS", "ReadIOPS", "WriteThroughput", "ReadThroughput", "WriteLatency", "ReadLatency", "SwapUsage", "NetworkTransmitThroughput", "NetworkReceiveThroughput", "FullLoadThroughputBandwidthSource", "FullLoadThroughputBandwidthTarget", "FullLoadThroughputRowsSource", "FullLoadThroughputRowsTarget", "CDCIncomingChanges", "CDCChangesMemorySource", "CDCChangesMemoryTarget", "CDCChangesDiskSource", "CDCChangesDiskTarget", "CDCThroughputBandwidthTarget", "CDCThroughputRowsSource", "CDCThroughputRowsTarget", "CDCLatencySource", "CDCLatencyTarget"},
 		"AWS/DX":             {"ConnectionState", "ConnectionBpsEgress", "ConnectionBpsIngress", "ConnectionPpsEgress", "ConnectionPpsIngress", "ConnectionCRCErrorCount", "ConnectionLightLevelTx", "ConnectionLightLevelRx"},
@@ -123,6 +124,7 @@ func init() {
 		"AWS/CloudFront":       {"DistributionId", "Region"},
 		"AWS/CloudSearch":      {},
 		"AWS/CloudHSM":         {"Region", "ClusterId", "HsmId"},
+		"AWS/CodeBuild":        {"ProjectName"},
 		"AWS/Connect":          {"InstanceId", "MetricGroup", "Participant", "QueueName", "Stream Type", "Type of Connection"},
 		"AWS/DMS":              {"ReplicationInstanceIdentifier", "ReplicationTaskIdentifier"},
 		"AWS/DX":               {"ConnectionId"},

+ 1 - 0
pkg/tsdb/elasticsearch/client/models.go

@@ -240,6 +240,7 @@ type DateHistogramAgg struct {
 	Missing        *string         `json:"missing,omitempty"`
 	ExtendedBounds *ExtendedBounds `json:"extended_bounds"`
 	Format         string          `json:"format"`
+	Offset         string          `json:"offset,omitempty"`
 }
 
 // FiltersAggregation represents a filters aggregation

+ 4 - 0
pkg/tsdb/elasticsearch/time_series_query.go

@@ -134,6 +134,10 @@ func addDateHistogramAgg(aggBuilder es.AggBuilder, bucketAgg *BucketAgg, timeFro
 			a.Interval = "$__interval"
 		}
 
+		if offset, err := bucketAgg.Settings.Get("offset").String(); err == nil {
+			a.Offset = offset
+		}
+
 		if missing, err := bucketAgg.Settings.Get("missing").String(); err == nil {
 			a.Missing = &missing
 		}

+ 4 - 0
pkg/tsdb/mssql/macros.go

@@ -66,6 +66,10 @@ func (m *msSqlMacroEngine) evaluateMacro(name string, args []string) (string, er
 		}
 
 		return fmt.Sprintf("%s BETWEEN '%s' AND '%s'", args[0], m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339), m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
+	case "__timeFrom":
+		return fmt.Sprintf("'%s'", m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339)), nil
+	case "__timeTo":
+		return fmt.Sprintf("'%s'", m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
 	case "__timeGroup":
 		if len(args) < 2 {
 			return "", fmt.Errorf("macro %v needs time column and interval", name)

+ 14 - 0
pkg/tsdb/mssql/macros_test.go

@@ -52,6 +52,20 @@ func TestMacroEngine(t *testing.T) {
 				So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339)))
 			})
 
+			Convey("interpolate __timeFrom function", func() {
+				sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom()")
+				So(err, ShouldBeNil)
+
+				So(sql, ShouldEqual, "select '2018-04-12T18:00:00Z'")
+			})
+
+			Convey("interpolate __timeTo function", func() {
+				sql, err := engine.Interpolate(query, timeRange, "select $__timeTo()")
+				So(err, ShouldBeNil)
+
+				So(sql, ShouldEqual, "select '2018-04-12T18:05:00Z'")
+			})
+
 			Convey("interpolate __timeGroup function", func() {
 				sql, err := engine.Interpolate(query, timeRange, "GROUP BY $__timeGroup(time_column,'5m')")
 				So(err, ShouldBeNil)

+ 4 - 0
pkg/tsdb/mysql/macros.go

@@ -61,6 +61,10 @@ func (m *mySqlMacroEngine) evaluateMacro(name string, args []string) (string, er
 		}
 
 		return fmt.Sprintf("%s BETWEEN FROM_UNIXTIME(%d) AND FROM_UNIXTIME(%d)", args[0], m.timeRange.GetFromAsSecondsEpoch(), m.timeRange.GetToAsSecondsEpoch()), nil
+	case "__timeFrom":
+		return fmt.Sprintf("FROM_UNIXTIME(%d)", m.timeRange.GetFromAsSecondsEpoch()), nil
+	case "__timeTo":
+		return fmt.Sprintf("FROM_UNIXTIME(%d)", m.timeRange.GetToAsSecondsEpoch()), nil
 	case "__timeGroup":
 		if len(args) < 2 {
 			return "", fmt.Errorf("macro %v needs time column and interval", name)

+ 14 - 0
pkg/tsdb/mysql/macros_test.go

@@ -63,6 +63,20 @@ func TestMacroEngine(t *testing.T) {
 				So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN FROM_UNIXTIME(%d) AND FROM_UNIXTIME(%d)", from.Unix(), to.Unix()))
 			})
 
+			Convey("interpolate __timeFrom function", func() {
+				sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom()")
+				So(err, ShouldBeNil)
+
+				So(sql, ShouldEqual, fmt.Sprintf("select FROM_UNIXTIME(%d)", from.Unix()))
+			})
+
+			Convey("interpolate __timeTo function", func() {
+				sql, err := engine.Interpolate(query, timeRange, "select $__timeTo()")
+				So(err, ShouldBeNil)
+
+				So(sql, ShouldEqual, fmt.Sprintf("select FROM_UNIXTIME(%d)", to.Unix()))
+			})
+
 			Convey("interpolate __unixEpochFilter function", func() {
 				sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFilter(time)")
 				So(err, ShouldBeNil)

+ 2 - 2
pkg/tsdb/mysql/mysql_test.go

@@ -761,7 +761,7 @@ func TestMySQL(t *testing.T) {
 					{
 						DataSource: &models.DataSource{JsonData: simplejson.New()},
 						Model: simplejson.NewFromAny(map[string]interface{}{
-							"rawSql": `SELECT time FROM metric_values WHERE time > $__timeFrom() OR time < $__timeFrom() OR 1 < $__unixEpochFrom() OR $__unixEpochTo() > 1 ORDER BY 1`,
+							"rawSql": `SELECT time FROM metric_values WHERE time > $__timeFrom() OR time < $__timeTo() OR 1 < $__unixEpochFrom() OR $__unixEpochTo() > 1 ORDER BY 1`,
 							"format": "time_series",
 						}),
 						RefId: "A",
@@ -773,7 +773,7 @@ func TestMySQL(t *testing.T) {
 			So(err, ShouldBeNil)
 			queryResult := resp.Results["A"]
 			So(queryResult.Error, ShouldBeNil)
-			So(queryResult.Meta.Get("sql").MustString(), ShouldEqual, "SELECT time FROM metric_values WHERE time > '2018-03-15T12:55:00Z' OR time < '2018-03-15T12:55:00Z' OR 1 < 1521118500 OR 1521118800 > 1 ORDER BY 1")
+			So(queryResult.Meta.Get("sql").MustString(), ShouldEqual, "SELECT time FROM metric_values WHERE time > FROM_UNIXTIME(1521118500) OR time < FROM_UNIXTIME(1521118800) OR 1 < 1521118500 OR 1521118800 > 1 ORDER BY 1")
 
 		})
 

+ 4 - 0
pkg/tsdb/postgres/macros.go

@@ -87,6 +87,10 @@ func (m *postgresMacroEngine) evaluateMacro(name string, args []string) (string,
 		}
 
 		return fmt.Sprintf("%s BETWEEN '%s' AND '%s'", args[0], m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339), m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
+	case "__timeFrom":
+		return fmt.Sprintf("'%s'", m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339)), nil
+	case "__timeTo":
+		return fmt.Sprintf("'%s'", m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
 	case "__timeGroup":
 		if len(args) < 2 {
 			return "", fmt.Errorf("macro %v needs time column and interval and optional fill value", name)

+ 14 - 0
pkg/tsdb/postgres/macros_test.go

@@ -44,6 +44,20 @@ func TestMacroEngine(t *testing.T) {
 				So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339)))
 			})
 
+			Convey("interpolate __timeFrom function", func() {
+				sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom()")
+				So(err, ShouldBeNil)
+
+				So(sql, ShouldEqual, "select '2018-04-12T18:00:00Z'")
+			})
+
+			Convey("interpolate __timeTo function", func() {
+				sql, err := engine.Interpolate(query, timeRange, "select $__timeTo()")
+				So(err, ShouldBeNil)
+
+				So(sql, ShouldEqual, "select '2018-04-12T18:05:00Z'")
+			})
+
 			Convey("interpolate __timeGroup function pre 5.3 compatibility", func() {
 
 				sql, err := engine.Interpolate(query, timeRange, "SELECT $__timeGroup(time_column,'5m'), value")

+ 0 - 2
pkg/tsdb/sql_engine.go

@@ -196,8 +196,6 @@ var Interpolate = func(query *Query, timeRange *TimeRange, sql string) (string,
 
 	sql = strings.Replace(sql, "$__interval_ms", strconv.FormatInt(interval.Milliseconds(), 10), -1)
 	sql = strings.Replace(sql, "$__interval", interval.Text, -1)
-	sql = strings.Replace(sql, "$__timeFrom()", fmt.Sprintf("'%s'", timeRange.GetFromAsTimeUTC().Format(time.RFC3339)), -1)
-	sql = strings.Replace(sql, "$__timeTo()", fmt.Sprintf("'%s'", timeRange.GetToAsTimeUTC().Format(time.RFC3339)), -1)
 	sql = strings.Replace(sql, "$__unixEpochFrom()", fmt.Sprintf("%d", timeRange.GetFromAsSecondsEpoch()), -1)
 	sql = strings.Replace(sql, "$__unixEpochTo()", fmt.Sprintf("%d", timeRange.GetToAsSecondsEpoch()), -1)
 

+ 0 - 14
pkg/tsdb/sql_engine_test.go

@@ -44,20 +44,6 @@ func TestSqlEngine(t *testing.T) {
 				So(sql, ShouldEqual, "select 60000 ")
 			})
 
-			Convey("interpolate __timeFrom function", func() {
-				sql, err := Interpolate(query, timeRange, "select $__timeFrom()")
-				So(err, ShouldBeNil)
-
-				So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339)))
-			})
-
-			Convey("interpolate __timeTo function", func() {
-				sql, err := Interpolate(query, timeRange, "select $__timeTo()")
-				So(err, ShouldBeNil)
-
-				So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339)))
-			})
-
 			Convey("interpolate __unixEpochFrom function", func() {
 				sql, err := Interpolate(query, timeRange, "select $__unixEpochFrom()")
 				So(err, ShouldBeNil)

+ 4 - 0
public/app/core/controllers/reset_password_ctrl.ts

@@ -1,4 +1,5 @@
 import coreModule from '../core_module';
+import config from 'app/core/config';
 
 export class ResetPasswordCtrl {
   /** @ngInject */
@@ -6,6 +7,9 @@ export class ResetPasswordCtrl {
     contextSrv.sidemenu = false;
     $scope.formModel = {};
     $scope.mode = 'send';
+    $scope.ldapEnabled = config.ldapEnabled;
+    $scope.authProxyEnabled = config.authProxyEnabled;
+    $scope.disableLoginForm = config.disableLoginForm;
 
     const params = $location.search();
     if (params.code) {

+ 12 - 4
public/app/core/logs_model.ts

@@ -35,22 +35,30 @@ export interface LogRow {
   duplicates?: number;
   entry: string;
   key: string; // timestamp + labels
-  labels: string;
+  labels: LogsStreamLabels;
   logLevel: LogLevel;
   searchWords?: string[];
   timestamp: string; // ISO with nanosec precision
   timeFromNow: string;
   timeEpochMs: number;
   timeLocal: string;
-  uniqueLabels?: string;
+  uniqueLabels?: LogsStreamLabels;
+}
+
+export enum LogsMetaKind {
+  Number,
+  String,
+  LabelsMap,
 }
 
 export interface LogsMetaItem {
   label: string;
-  value: string;
+  value: string | number | LogsStreamLabels;
+  kind: LogsMetaKind;
 }
 
 export interface LogsModel {
+  id: string; // Identify one logs result from another
   meta?: LogsMetaItem[];
   rows: LogRow[];
   series?: TimeSeries[];
@@ -61,7 +69,7 @@ export interface LogsStream {
   entries: LogsStreamEntry[];
   search?: string;
   parsedLabels?: LogsStreamLabels;
-  uniqueLabels?: string;
+  uniqueLabels?: LogsStreamLabels;
 }
 
 export interface LogsStreamEntry {

+ 1 - 1
public/app/core/utils/explore.test.ts

@@ -16,7 +16,7 @@ const DEFAULT_EXPLORE_STATE: ExploreState = {
   datasourceMissing: false,
   datasourceName: '',
   exploreDatasources: [],
-  graphRange: DEFAULT_RANGE,
+  graphInterval: 1000,
   history: [],
   initialQueries: [],
   queryTransactions: [],

+ 33 - 1
public/app/core/utils/explore.ts

@@ -1,7 +1,10 @@
+import _ from 'lodash';
+
 import { renderUrl } from 'app/core/utils/url';
-import { ExploreState, ExploreUrlState, HistoryItem } from 'app/types/explore';
+import { ExploreState, ExploreUrlState, HistoryItem, QueryTransaction } from 'app/types/explore';
 import { DataQuery, RawTimeRange } from 'app/types/series';
 
+import TableModel, { mergeTablesIntoModel } from 'app/core/table_model';
 import kbn from 'app/core/utils/kbn';
 import colors from 'app/core/utils/colors';
 import TimeSeries from 'app/core/time_series2';
@@ -133,6 +136,35 @@ export function hasNonEmptyQuery(queries: DataQuery[]): boolean {
   return queries.some(query => Object.keys(query).length > 2);
 }
 
+export function calculateResultsFromQueryTransactions(
+  queryTransactions: QueryTransaction[],
+  datasource: any,
+  graphInterval: number
+) {
+  const graphResult = _.flatten(
+    queryTransactions.filter(qt => qt.resultType === 'Graph' && qt.done && qt.result).map(qt => qt.result)
+  );
+  const tableResult = mergeTablesIntoModel(
+    new TableModel(),
+    ...queryTransactions.filter(qt => qt.resultType === 'Table' && qt.done && qt.result).map(qt => qt.result)
+  );
+  const logsResult =
+    datasource && datasource.mergeStreams
+      ? datasource.mergeStreams(
+          _.flatten(
+            queryTransactions.filter(qt => qt.resultType === 'Logs' && qt.done && qt.result).map(qt => qt.result)
+          ),
+          graphInterval
+        )
+      : undefined;
+
+  return {
+    graphResult,
+    tableResult,
+    logsResult,
+  };
+}
+
 export function getIntervals(
   range: RawTimeRange,
   datasource,

+ 2 - 2
public/app/core/utils/kbn.ts

@@ -590,8 +590,8 @@ kbn.valueFormats.flowcms = kbn.formatBuilders.fixedUnit('cms');
 kbn.valueFormats.flowcfs = kbn.formatBuilders.fixedUnit('cfs');
 kbn.valueFormats.flowcfm = kbn.formatBuilders.fixedUnit('cfm');
 kbn.valueFormats.litreh = kbn.formatBuilders.fixedUnit('l/h');
-kbn.valueFormats.flowlpm = kbn.formatBuilders.decimalSIPrefix('l/min');
-kbn.valueFormats.flowmlpm = kbn.formatBuilders.decimalSIPrefix('mL/min', -1);
+kbn.valueFormats.flowlpm = kbn.formatBuilders.fixedUnit('l/min');
+kbn.valueFormats.flowmlpm = kbn.formatBuilders.fixedUnit('mL/min');
 
 // Angle
 kbn.valueFormats.degree = kbn.formatBuilders.fixedUnit('°');

+ 71 - 48
public/app/features/explore/Explore.tsx

@@ -16,6 +16,7 @@ import { RawTimeRange, DataQuery } from 'app/types/series';
 import store from 'app/core/store';
 import {
   DEFAULT_RANGE,
+  calculateResultsFromQueryTransactions,
   ensureQueries,
   getIntervals,
   generateKey,
@@ -28,7 +29,7 @@ import ResetStyles from 'app/core/components/Picker/ResetStyles';
 import PickerOption from 'app/core/components/Picker/PickerOption';
 import IndicatorsContainer from 'app/core/components/Picker/IndicatorsContainer';
 import NoOptionsMessage from 'app/core/components/Picker/NoOptionsMessage';
-import TableModel, { mergeTablesIntoModel } from 'app/core/table_model';
+import TableModel from 'app/core/table_model';
 import { DatasourceSrv } from 'app/features/plugins/datasource_srv';
 
 import Panel from './Panel';
@@ -115,6 +116,8 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
       const { datasource, queries, range } = props.urlState as ExploreUrlState;
       initialQueries = ensureQueries(queries);
       const initialRange = range || { ...DEFAULT_RANGE };
+      // Millies step for helper bar charts
+      const initialGraphInterval = 15 * 1000;
       this.state = {
         datasource: null,
         datasourceError: null,
@@ -122,9 +125,11 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
         datasourceMissing: false,
         datasourceName: datasource,
         exploreDatasources: [],
-        graphRange: initialRange,
+        graphInterval: initialGraphInterval,
+        graphResult: [],
         initialQueries,
         history: [],
+        logsResult: null,
         queryTransactions: [],
         range: initialRange,
         scanning: false,
@@ -135,6 +140,7 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
         supportsGraph: null,
         supportsLogs: null,
         supportsTable: null,
+        tableResult: new TableModel(),
       };
     }
     this.modifiedQueries = initialQueries.slice();
@@ -176,6 +182,8 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
   }
 
   async setDatasource(datasource: any, origin?: DataSource) {
+    const { initialQueries, range } = this.state;
+
     const supportsGraph = datasource.meta.metrics;
     const supportsLogs = datasource.meta.logs;
     const supportsTable = datasource.meta.metrics;
@@ -220,7 +228,7 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
     }
 
     // Reset edit state with new queries
-    const nextQueries = this.state.initialQueries.map((q, i) => ({
+    const nextQueries = initialQueries.map((q, i) => ({
       ...modifiedQueries[i],
       ...generateQueryKeys(i),
     }));
@@ -229,11 +237,15 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
     // Custom components
     const StartPage = datasource.pluginExports.ExploreStartPage;
 
+    // Calculate graph bucketing interval
+    const graphInterval = getIntervals(range, datasource, this.el ? this.el.offsetWidth : 0).intervalMs;
+
     this.setState(
       {
         StartPage,
         datasource,
         datasourceError,
+        graphInterval,
         history,
         supportsGraph,
         supportsLogs,
@@ -414,12 +426,19 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
     this.setState(
       state => {
         const showingTable = !state.showingTable;
-        let nextQueryTransactions = state.queryTransactions;
-        if (!showingTable) {
-          // Discard transactions related to Table query
-          nextQueryTransactions = state.queryTransactions.filter(qt => qt.resultType !== 'Table');
+        if (showingTable) {
+          return { showingTable, queryTransactions: state.queryTransactions };
         }
-        return { queryTransactions: nextQueryTransactions, showingTable };
+
+        // Toggle off needs discarding of table queries
+        const nextQueryTransactions = state.queryTransactions.filter(qt => qt.resultType !== 'Table');
+        const results = calculateResultsFromQueryTransactions(
+          nextQueryTransactions,
+          state.datasource,
+          state.graphInterval
+        );
+
+        return { ...results, queryTransactions: nextQueryTransactions, showingTable };
       },
       () => {
         if (this.state.showingTable) {
@@ -429,8 +448,8 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
     );
   };
 
-  onClickTableCell = (columnKey: string, rowValue: string) => {
-    this.onModifyQueries({ type: 'ADD_FILTER', key: columnKey, value: rowValue });
+  onClickLabel = (key: string, value: string) => {
+    this.onModifyQueries({ type: 'ADD_FILTER', key, value });
   };
 
   onModifyQueries = (action, index?: number) => {
@@ -500,8 +519,14 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
 
         // Discard transactions related to row query
         const nextQueryTransactions = queryTransactions.filter(qt => qt.rowIndex !== index);
+        const results = calculateResultsFromQueryTransactions(
+          nextQueryTransactions,
+          state.datasource,
+          state.graphInterval
+        );
 
         return {
+          ...results,
           initialQueries: nextQueries,
           queryTransactions: nextQueryTransactions,
         };
@@ -564,8 +589,8 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
 
     const configuredQueries = [
       {
-        ...queryOptions,
         ...query,
+        ...queryOptions,
       },
     ];
 
@@ -609,7 +634,14 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
       // Append new transaction
       const nextQueryTransactions = [...remainingTransactions, transaction];
 
+      const results = calculateResultsFromQueryTransactions(
+        nextQueryTransactions,
+        state.datasource,
+        state.graphInterval
+      );
+
       return {
+        ...results,
         queryTransactions: nextQueryTransactions,
         showingStartPage: false,
       };
@@ -660,6 +692,12 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
         return qt;
       });
 
+      const results = calculateResultsFromQueryTransactions(
+        nextQueryTransactions,
+        state.datasource,
+        state.graphInterval
+      );
+
       const nextHistory = updateHistory(history, datasourceId, queries);
 
       // Keep scanning for results if this was the last scanning transaction
@@ -671,19 +709,13 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
       }
 
       return {
+        ...results,
         history: nextHistory,
         queryTransactions: nextQueryTransactions,
       };
     });
   }
 
-  discardTransactions(rowIndex: number) {
-    this.setState(state => {
-      const remainingTransactions = state.queryTransactions.filter(qt => qt.rowIndex !== rowIndex);
-      return { queryTransactions: remainingTransactions };
-    });
-  }
-
   failQueryTransaction(transactionId: string, response: any, datasourceId: string) {
     const { datasource } = this.state;
     if (datasource.meta.id !== datasourceId || response.cancelled) {
@@ -695,14 +727,20 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
 
     let error: string | JSX.Element = response;
     if (response.data) {
-      error = response.data.error;
-      if (response.data.response) {
-        error = (
-          <>
-            <span>{response.data.error}</span>
-            <details>{response.data.response}</details>
-          </>
-        );
+      if (typeof response.data === 'string') {
+        error = response.data;
+      } else if (response.data.error) {
+        error = response.data.error;
+        if (response.data.response) {
+          error = (
+            <>
+              <span>{response.data.error}</span>
+              <details>{response.data.response}</details>
+            </>
+          );
+        }
+      } else {
+        throw new Error('Could not handle error response');
       }
     }
 
@@ -746,7 +784,6 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
         const latency = Date.now() - now;
         const results = resultGetter ? resultGetter(res.data) : res.data;
         this.completeQueryTransaction(transaction.id, results, latency, queries, datasourceId);
-        this.setState({ graphRange: transaction.options.range });
       } catch (response) {
         this.failQueryTransaction(transaction.id, response, datasourceId);
       }
@@ -776,9 +813,10 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
       datasourceLoading,
       datasourceMissing,
       exploreDatasources,
-      graphRange,
+      graphResult,
       history,
       initialQueries,
+      logsResult,
       queryTransactions,
       range,
       scanning,
@@ -790,31 +828,14 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
       supportsGraph,
       supportsLogs,
       supportsTable,
+      tableResult,
     } = this.state;
     const graphHeight = showingGraph && showingTable ? '200px' : '400px';
     const exploreClass = split ? 'explore explore-split' : 'explore';
     const selectedDatasource = datasource ? exploreDatasources.find(d => d.label === datasource.name) : undefined;
-    const graphRangeIntervals = getIntervals(graphRange, datasource, this.el ? this.el.offsetWidth : 0);
     const graphLoading = queryTransactions.some(qt => qt.resultType === 'Graph' && !qt.done);
     const tableLoading = queryTransactions.some(qt => qt.resultType === 'Table' && !qt.done);
     const logsLoading = queryTransactions.some(qt => qt.resultType === 'Logs' && !qt.done);
-    // TODO don't recreate those on each re-render
-    const graphResult = _.flatten(
-      queryTransactions.filter(qt => qt.resultType === 'Graph' && qt.done && qt.result).map(qt => qt.result)
-    );
-    const tableResult = mergeTablesIntoModel(
-      new TableModel(),
-      ...queryTransactions.filter(qt => qt.resultType === 'Table' && qt.done && qt.result).map(qt => qt.result)
-    );
-    const logsResult =
-      datasource && datasource.mergeStreams
-        ? datasource.mergeStreams(
-            _.flatten(
-              queryTransactions.filter(qt => qt.resultType === 'Logs' && qt.done && qt.result).map(qt => qt.result)
-            ),
-            graphRangeIntervals.intervalMs
-          )
-        : undefined;
     const loading = queryTransactions.some(qt => !qt.done);
 
     return (
@@ -919,7 +940,7 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
                           height={graphHeight}
                           id={`explore-graph-${position}`}
                           onChangeTime={this.onChangeTime}
-                          range={graphRange}
+                          range={range}
                           split={split}
                         />
                       </Panel>
@@ -931,16 +952,18 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
                         isOpen={showingTable}
                         onToggle={this.onClickTableButton}
                       >
-                        <Table data={tableResult} loading={tableLoading} onClickCell={this.onClickTableCell} />
+                        <Table data={tableResult} loading={tableLoading} onClickCell={this.onClickLabel} />
                       </Panel>
                     )}
                     {supportsLogs && (
                       <Panel label="Logs" loading={logsLoading} isOpen={showingLogs} onToggle={this.onClickLogsButton}>
                         <Logs
                           data={logsResult}
+                          key={logsResult.id}
                           loading={logsLoading}
                           position={position}
                           onChangeTime={this.onChangeTime}
+                          onClickLabel={this.onClickLabel}
                           onStartScanning={this.onStartScanning}
                           onStopScanning={this.onStopScanning}
                           range={range}

+ 190 - 49
public/app/features/explore/Logs.tsx

@@ -1,14 +1,26 @@
-import React, { Fragment, PureComponent } from 'react';
+import _ from 'lodash';
+import React, { PureComponent } from 'react';
 import Highlighter from 'react-highlight-words';
 
 import * as rangeUtil from 'app/core/utils/rangeutil';
 import { RawTimeRange } from 'app/types/series';
-import { LogsDedupStrategy, LogsModel, dedupLogRows, filterLogLevels, LogLevel } from 'app/core/logs_model';
+import {
+  LogsDedupStrategy,
+  LogsModel,
+  dedupLogRows,
+  filterLogLevels,
+  LogLevel,
+  LogsStreamLabels,
+  LogsMetaKind,
+  LogRow,
+} from 'app/core/logs_model';
 import { findHighlightChunksInText } from 'app/core/utils/text';
 import { Switch } from 'app/core/components/Switch/Switch';
 
 import Graph from './Graph';
 
+const PREVIEW_LIMIT = 100;
+
 const graphOptions = {
   series: {
     bars: {
@@ -23,6 +35,103 @@ const graphOptions = {
   },
 };
 
+function renderMetaItem(value: any, kind: LogsMetaKind) {
+  if (kind === LogsMetaKind.LabelsMap) {
+    return (
+      <span className="logs-meta-item__value-labels">
+        <Labels labels={value} />
+      </span>
+    );
+  }
+  return value;
+}
+
+class Label extends PureComponent<{
+  label: string;
+  value: string;
+  onClickLabel?: (label: string, value: string) => void;
+}> {
+  onClickLabel = () => {
+    const { onClickLabel, label, value } = this.props;
+    if (onClickLabel) {
+      onClickLabel(label, value);
+    }
+  };
+
+  render() {
+    const { label, value } = this.props;
+    const tooltip = `${label}: ${value}`;
+    return (
+      <span className="logs-label" title={tooltip} onClick={this.onClickLabel}>
+        {value}
+      </span>
+    );
+  }
+}
+class Labels extends PureComponent<{
+  labels: LogsStreamLabels;
+  onClickLabel?: (label: string, value: string) => void;
+}> {
+  render() {
+    const { labels, onClickLabel } = this.props;
+    return Object.keys(labels).map(key => (
+      <Label key={key} label={key} value={labels[key]} onClickLabel={onClickLabel} />
+    ));
+  }
+}
+
+interface RowProps {
+  row: LogRow;
+  showLabels: boolean | null; // Tristate: null means auto
+  showLocalTime: boolean;
+  showUtc: boolean;
+  onClickLabel?: (label: string, value: string) => void;
+}
+
+function Row({ onClickLabel, row, showLabels, showLocalTime, showUtc }: RowProps) {
+  const needsHighlighter = row.searchWords && row.searchWords.length > 0;
+  return (
+    <div className="logs-row">
+      <div className={row.logLevel ? `logs-row-level logs-row-level-${row.logLevel}` : ''}>
+        {row.duplicates > 0 && (
+          <div className="logs-row-level__duplicates" title={`${row.duplicates} duplicates`}>
+            {Array.apply(null, { length: row.duplicates }).map((bogus, index) => (
+              <div className="logs-row-level__duplicate" key={`${index}`} />
+            ))}
+          </div>
+        )}
+      </div>
+      {showUtc && (
+        <div className="logs-row-time" title={`Local: ${row.timeLocal} (${row.timeFromNow})`}>
+          {row.timestamp}
+        </div>
+      )}
+      {showLocalTime && (
+        <div className="logs-row-time" title={`${row.timestamp} (${row.timeFromNow})`}>
+          {row.timeLocal}
+        </div>
+      )}
+      {showLabels && (
+        <div className="logs-row-labels">
+          <Labels labels={row.uniqueLabels} onClickLabel={onClickLabel} />
+        </div>
+      )}
+      <div className="logs-row-message">
+        {needsHighlighter ? (
+          <Highlighter
+            textToHighlight={row.entry}
+            searchWords={row.searchWords}
+            findChunks={findHighlightChunksInText}
+            highlightClassName="logs-row-match-highlight"
+          />
+        ) : (
+          row.entry
+        )}
+      </div>
+    </div>
+  );
+}
+
 interface LogsProps {
   className?: string;
   data: LogsModel;
@@ -32,27 +141,58 @@ interface LogsProps {
   scanning?: boolean;
   scanRange?: RawTimeRange;
   onChangeTime?: (range: RawTimeRange) => void;
+  onClickLabel?: (label: string, value: string) => void;
   onStartScanning?: () => void;
   onStopScanning?: () => void;
 }
 
 interface LogsState {
   dedup: LogsDedupStrategy;
+  deferLogs: boolean;
   hiddenLogLevels: Set<LogLevel>;
-  showLabels: boolean;
+  renderAll: boolean;
+  showLabels: boolean | null; // Tristate: null means auto
   showLocalTime: boolean;
   showUtc: boolean;
 }
 
 export default class Logs extends PureComponent<LogsProps, LogsState> {
+  deferLogsTimer: NodeJS.Timer;
+  renderAllTimer: NodeJS.Timer;
+
   state = {
     dedup: LogsDedupStrategy.none,
+    deferLogs: true,
     hiddenLogLevels: new Set(),
-    showLabels: true,
+    renderAll: false,
+    showLabels: null,
     showLocalTime: true,
     showUtc: false,
   };
 
+  componentDidMount() {
+    // Staged rendering
+    if (this.state.deferLogs) {
+      const { data } = this.props;
+      const rowCount = data && data.rows ? data.rows.length : 0;
+      // Render all right away if not too far over the limit
+      const renderAll = rowCount <= PREVIEW_LIMIT * 2;
+      this.deferLogsTimer = setTimeout(() => this.setState({ deferLogs: false, renderAll }), rowCount);
+    }
+  }
+
+  componentDidUpdate(prevProps, prevState) {
+    // Staged rendering
+    if (prevState.deferLogs && !this.state.deferLogs && !this.state.renderAll) {
+      this.renderAllTimer = setTimeout(() => this.setState({ renderAll: true }), 2000);
+    }
+  }
+
+  componentWillUnmount() {
+    clearTimeout(this.deferLogsTimer);
+    clearTimeout(this.renderAllTimer);
+  }
+
   onChangeDedup = (dedup: LogsDedupStrategy) => {
     this.setState(prevState => {
       if (prevState.dedup === dedup) {
@@ -99,9 +239,12 @@ export default class Logs extends PureComponent<LogsProps, LogsState> {
   };
 
   render() {
-    const { className = '', data, loading = false, position, range, scanning, scanRange } = this.props;
-    const { dedup, hiddenLogLevels, showLabels, showLocalTime, showUtc } = this.state;
+    const { className = '', data, loading = false, onClickLabel, position, range, scanning, scanRange } = this.props;
+    const { dedup, deferLogs, hiddenLogLevels, renderAll, showLocalTime, showUtc } = this.state;
+    let { showLabels } = this.state;
     const hasData = data && data.rows && data.rows.length > 0;
+
+    // Filtering
     const filteredData = filterLogLevels(data, hiddenLogLevels);
     const dedupedData = dedupLogRows(filteredData, dedup);
     const dedupCount = dedupedData.rows.reduce((sum, row) => sum + row.duplicates, 0);
@@ -109,23 +252,24 @@ export default class Logs extends PureComponent<LogsProps, LogsState> {
     if (dedup !== LogsDedupStrategy.none) {
       meta.push({
         label: 'Dedup count',
-        value: String(dedupCount),
+        value: dedupCount,
+        kind: LogsMetaKind.Number,
       });
     }
-    const cssColumnSizes = ['3px']; // Log-level indicator line
-    if (showUtc) {
-      cssColumnSizes.push('minmax(100px, max-content)');
-    }
-    if (showLocalTime) {
-      cssColumnSizes.push('minmax(100px, max-content)');
-    }
-    if (showLabels) {
-      cssColumnSizes.push('minmax(100px, 25%)');
+
+    // Staged rendering
+    const firstRows = dedupedData.rows.slice(0, PREVIEW_LIMIT);
+    const lastRows = dedupedData.rows.slice(PREVIEW_LIMIT);
+
+    // Check for labels
+    if (showLabels === null) {
+      if (hasData) {
+        showLabels = data.rows.some(row => _.size(row.uniqueLabels) > 0);
+      } else {
+        showLabels = true;
+      }
     }
-    cssColumnSizes.push('1fr');
-    const logEntriesStyle = {
-      gridTemplateColumns: cssColumnSizes.join(' '),
-    };
+
     const scanText = scanRange ? `Scanning ${rangeUtil.describeTimeRange(scanRange)}` : 'Scanning...';
 
     return (
@@ -177,7 +321,7 @@ export default class Logs extends PureComponent<LogsProps, LogsState> {
                   {meta.map(item => (
                     <div className="logs-meta-item" key={item.label}>
                       <span className="logs-meta-item__label">{item.label}:</span>
-                      <span className="logs-meta-item__value">{item.value}</span>
+                      <span className="logs-meta-item__value">{renderMetaItem(item.value, item.kind)}</span>
                     </div>
                   ))}
                 </div>
@@ -185,36 +329,33 @@ export default class Logs extends PureComponent<LogsProps, LogsState> {
           </div>
         </div>
 
-        <div className="logs-entries" style={logEntriesStyle}>
+        <div className="logs-entries">
           {hasData &&
-            dedupedData.rows.map(row => (
-              <Fragment key={row.key + row.duplicates}>
-                <div className={row.logLevel ? `logs-row-level logs-row-level-${row.logLevel}` : ''}>
-                  {row.duplicates > 0 && (
-                    <div className="logs-row-level__duplicates" title={`${row.duplicates} duplicates`}>
-                      {Array.apply(null, { length: row.duplicates }).map((bogus, index) => (
-                        <div className="logs-row-level__duplicate" key={`${index}`} />
-                      ))}
-                    </div>
-                  )}
-                </div>
-                {showUtc && <div title={`Local: ${row.timeLocal} (${row.timeFromNow})`}>{row.timestamp}</div>}
-                {showLocalTime && <div title={`${row.timestamp} (${row.timeFromNow})`}>{row.timeLocal}</div>}
-                {showLabels && (
-                  <div className="max-width" title={row.labels}>
-                    {row.labels}
-                  </div>
-                )}
-                <div>
-                  <Highlighter
-                    textToHighlight={row.entry}
-                    searchWords={row.searchWords}
-                    findChunks={findHighlightChunksInText}
-                    highlightClassName="logs-row-match-highlight"
-                  />
-                </div>
-              </Fragment>
+            !deferLogs &&
+            firstRows.map(row => (
+              <Row
+                key={row.key + row.duplicates}
+                row={row}
+                showLabels={showLabels}
+                showLocalTime={showLocalTime}
+                showUtc={showUtc}
+                onClickLabel={onClickLabel}
+              />
+            ))}
+          {hasData &&
+            !deferLogs &&
+            renderAll &&
+            lastRows.map(row => (
+              <Row
+                key={row.key + row.duplicates}
+                row={row}
+                showLabels={showLabels}
+                showLocalTime={showLocalTime}
+                showUtc={showUtc}
+                onClickLabel={onClickLabel}
+              />
             ))}
+          {hasData && deferLogs && <span>Rendering {dedupedData.rows.length} rows...</span>}
         </div>
         {!loading &&
           !hasData &&

+ 1 - 1
public/app/partials/login.html

@@ -22,7 +22,7 @@
             <button type="submit" class="btn btn-large p-x-2 btn-inverse btn-loading" ng-if="loggingIn">
               Logging In<span>.</span><span>.</span><span>.</span>
             </button>
-            <div class="small login-button-forgot-password">
+            <div class="small login-button-forgot-password" ng-hide="ldapEnabled || authProxyEnabled">
               <a href="user/password/send-reset-email">
                 Forgot your password?
               </a>

+ 8 - 1
public/app/partials/reset_password.html

@@ -3,7 +3,14 @@
 <div class="page-container page-body">
 	<div class="signup">
 		<h3 class="p-b-1">Reset password</h3>
-		<form name="sendResetForm" class="login-form gf-form-group" ng-show="mode === 'send'">
+
+		<div ng-if="ldapEnabled || authProxyEnabled">
+			You cannot reset password when LDAP or Auth Proxy authentication is enabled.
+		</div>
+		<div ng-if="disableLoginForm">
+			You cannot reset password when login form is disabled.
+		</div>
+		<form name="sendResetForm" class="login-form gf-form-group" ng-show="mode === 'send'" ng-hide="ldapEnabled || authProxyEnabled || disableLoginForm">
 			<div class="gf-form">
 					<span class="gf-form-label width-7">User</span>
 					<input type="text" name="username" class="gf-form-input max-width-14" required ng-model='formModel.userOrEmail' placeholder="email or username">

+ 10 - 0
public/app/plugins/datasource/elasticsearch/partials/bucket_agg.html

@@ -70,6 +70,16 @@
 			</label>
 			<input class="gf-form-input max-width-12" type="number" ng-model="agg.settings.trimEdges" ng-change="onChangeInternal()">
 		</div>
+		<div class="gf-form offset-width-7">
+			<label class="gf-form-label width-10">
+				Offset
+				<info-popover mode="right-normal">
+					Change the start value of each bucket by the specified positive (+) or negative offset (-) duration, such as 1h for an hour, or 1d for a day
+				</info-popover>
+			</label>
+			<input class="gf-form-input max-width-12" type="text" ng-model="agg.settings.offset" ng-change="onChangeInternal()">
+		</div>
+
 	</div>
 
 	<div ng-if="agg.type === 'histogram'">

+ 4 - 0
public/app/plugins/datasource/elasticsearch/query_builder.ts

@@ -72,6 +72,10 @@ export class ElasticQueryBuilder {
     esAgg.extended_bounds = { min: '$timeFrom', max: '$timeTo' };
     esAgg.format = 'epoch_millis';
 
+    if (settings.offset !== '') {
+      esAgg.offset = settings.offset;
+    }
+
     if (esAgg.interval === 'auto') {
       esAgg.interval = '$__interval';
     }

+ 17 - 14
public/app/plugins/datasource/logging/datasource.ts

@@ -3,9 +3,11 @@ import _ from 'lodash';
 import * as dateMath from 'app/core/utils/datemath';
 import { LogsStream, LogsModel, makeSeriesForLogs } from 'app/core/logs_model';
 import { PluginMeta, DataQuery } from 'app/types';
+import { addLabelToSelector } from 'app/plugins/datasource/prometheus/add_label_to_query';
 
 import LanguageProvider from './language_provider';
 import { mergeStreamsToLogs } from './result_transformer';
+import { formatQuery, parseQuery } from './query_utils';
 
 export const DEFAULT_LIMIT = 1000;
 
@@ -16,20 +18,6 @@ const DEFAULT_QUERY_PARAMS = {
   query: '',
 };
 
-const selectorRegexp = /{[^{]*}/g;
-export function parseQuery(input: string) {
-  const match = input.match(selectorRegexp);
-  let query = '';
-  let regexp = input;
-
-  if (match) {
-    query = match[0];
-    regexp = input.replace(selectorRegexp, '').trim();
-  }
-
-  return { query, regexp };
-}
-
 function serializeParams(data: any) {
   return Object.keys(data)
     .map(k => {
@@ -114,6 +102,21 @@ export default class LoggingDatasource {
     });
   }
 
+  modifyQuery(query: DataQuery, action: any): DataQuery {
+    const parsed = parseQuery(query.expr || '');
+    let selector = parsed.query;
+    switch (action.type) {
+      case 'ADD_FILTER': {
+        selector = addLabelToSelector(selector, action.key, action.value);
+        break;
+      }
+      default:
+        break;
+    }
+    const expression = formatQuery(selector, parsed.regexp);
+    return { ...query, expr: expression };
+  }
+
   getTime(date, roundUp) {
     if (_.isString(date)) {
       date = dateMath.parse(date, roundUp);

+ 29 - 2
public/app/plugins/datasource/logging/language_provider.test.ts

@@ -8,9 +8,10 @@ describe('Language completion provider', () => {
   };
 
   describe('empty query suggestions', () => {
-    it('returns default suggestions on emtpty context', () => {
+    it('returns no suggestions on emtpty context', () => {
       const instance = new LanguageProvider(datasource);
-      const result = instance.provideCompletionItems({ text: '', prefix: '', wrapperClasses: [] });
+      const value = Plain.deserialize('');
+      const result = instance.provideCompletionItems({ text: '', prefix: '', value, wrapperClasses: [] });
       expect(result.context).toBeUndefined();
       expect(result.refresher).toBeUndefined();
       expect(result.suggestions.length).toEqual(0);
@@ -38,6 +39,32 @@ describe('Language completion provider', () => {
         },
       ]);
     });
+
+    it('returns no suggestions within regexp', () => {
+      const instance = new LanguageProvider(datasource);
+      const value = Plain.deserialize('{} ()');
+      const range = value.selection.merge({
+        anchorOffset: 4,
+      });
+      const valueWithSelection = value.change().select(range).value;
+      const history = [
+        {
+          query: { refId: '1', expr: '{app="foo"}' },
+        },
+      ];
+      const result = instance.provideCompletionItems(
+        {
+          text: '',
+          prefix: '',
+          value: valueWithSelection,
+          wrapperClasses: [],
+        },
+        { history }
+      );
+      expect(result.context).toBeUndefined();
+      expect(result.refresher).toBeUndefined();
+      expect(result.suggestions.length).toEqual(0);
+    });
   });
 
   describe('label suggestions', () => {

+ 7 - 12
public/app/plugins/datasource/logging/language_provider.ts

@@ -10,7 +10,7 @@ import {
   HistoryItem,
 } from 'app/types/explore';
 import { parseSelector, labelRegexp, selectorRegexp } from 'app/plugins/datasource/prometheus/language_utils';
-import PromqlSyntax from 'app/plugins/datasource/prometheus/promql';
+import syntax from './syntax';
 import { DataQuery } from 'app/types';
 
 const DEFAULT_KEYS = ['job', 'namespace'];
@@ -55,7 +55,7 @@ export default class LoggingLanguageProvider extends LanguageProvider {
   cleanText = s => s.replace(/[{}[\]="(),!~+\-*/^%]/g, '').trim();
 
   getSyntax() {
-    return PromqlSyntax;
+    return syntax;
   }
 
   request = url => {
@@ -70,19 +70,14 @@ export default class LoggingLanguageProvider extends LanguageProvider {
   };
 
   // Keep this DOM-free for testing
-  provideCompletionItems({ prefix, wrapperClasses, text }: TypeaheadInput, context?: any): TypeaheadOutput {
-    // Syntax spans have 3 classes by default. More indicate a recognized token
-    const tokenRecognized = wrapperClasses.length > 3;
+  provideCompletionItems({ prefix, wrapperClasses, text, value }: TypeaheadInput, context?: any): TypeaheadOutput {
+    // Local text properties
+    const empty = value.document.text.length === 0;
     // Determine candidates by CSS context
     if (_.includes(wrapperClasses, 'context-labels')) {
-      // Suggestions for metric{|} and metric{foo=|}, as well as metric-independent label queries like {|}
+      // Suggestions for {|} and {foo=|}
       return this.getLabelCompletionItems.apply(this, arguments);
-    } else if (
-      // Show default suggestions in a couple of scenarios
-      (prefix && !tokenRecognized) || // Non-empty prefix, but not inside known token
-      (prefix === '' && !text.match(/^[\]})\s]+$/)) || // Empty prefix, but not following a closing brace
-      text.match(/[+\-*/^%]/) // Anything after binary operator
-    ) {
+    } else if (empty) {
       return this.getEmptyCompletionItems(context || {});
     }
 

+ 12 - 1
public/app/plugins/datasource/logging/datasource.test.ts → public/app/plugins/datasource/logging/query_utils.test.ts

@@ -1,4 +1,4 @@
-import { parseQuery } from './datasource';
+import { parseQuery } from './query_utils';
 
 describe('parseQuery', () => {
   it('returns empty for empty string', () => {
@@ -42,4 +42,15 @@ describe('parseQuery', () => {
       regexp: '',
     });
   });
+
+  it('returns query and regexp with quantifiers', () => {
+    expect(parseQuery('{foo="bar"} \\.java:[0-9]{1,5}')).toEqual({
+      query: '{foo="bar"}',
+      regexp: '\\.java:[0-9]{1,5}',
+    });
+    expect(parseQuery('\\.java:[0-9]{1,5} {foo="bar"}')).toEqual({
+      query: '{foo="bar"}',
+      regexp: '\\.java:[0-9]{1,5}',
+    });
+  });
 });

+ 17 - 0
public/app/plugins/datasource/logging/query_utils.ts

@@ -0,0 +1,17 @@
+const selectorRegexp = /(?:^|\s){[^{]*}/g;
+export function parseQuery(input: string) {
+  const match = input.match(selectorRegexp);
+  let query = '';
+  let regexp = input;
+
+  if (match) {
+    query = match[0].trim();
+    regexp = input.replace(selectorRegexp, '').trim();
+  }
+
+  return { query, regexp };
+}
+
+export function formatQuery(selector: string, search: string): string {
+  return `${selector || ''} ${search || ''}`.trim();
+}

+ 15 - 15
public/app/plugins/datasource/logging/result_transformer.test.ts

@@ -41,7 +41,7 @@ describe('parseLabels()', () => {
   });
 
   it('returns labels on labels string', () => {
-    expect(parseLabels('{foo="bar", baz="42"}')).toEqual({ foo: '"bar"', baz: '"42"' });
+    expect(parseLabels('{foo="bar", baz="42"}')).toEqual({ foo: 'bar', baz: '42' });
   });
 });
 
@@ -52,7 +52,7 @@ describe('formatLabels()', () => {
   });
 
   it('returns label string on label set', () => {
-    expect(formatLabels({ foo: '"bar"', baz: '"42"' })).toEqual('{baz="42", foo="bar"}');
+    expect(formatLabels({ foo: 'bar', baz: '42' })).toEqual('{baz="42", foo="bar"}');
   });
 });
 
@@ -63,14 +63,14 @@ describe('findCommonLabels()', () => {
   });
 
   it('returns no common labels on differing sets', () => {
-    expect(findCommonLabels([{ foo: '"bar"' }, {}])).toEqual({});
-    expect(findCommonLabels([{}, { foo: '"bar"' }])).toEqual({});
-    expect(findCommonLabels([{ baz: '42' }, { foo: '"bar"' }])).toEqual({});
-    expect(findCommonLabels([{ foo: '42', baz: '"bar"' }, { foo: '"bar"' }])).toEqual({});
+    expect(findCommonLabels([{ foo: 'bar' }, {}])).toEqual({});
+    expect(findCommonLabels([{}, { foo: 'bar' }])).toEqual({});
+    expect(findCommonLabels([{ baz: '42' }, { foo: 'bar' }])).toEqual({});
+    expect(findCommonLabels([{ foo: '42', baz: 'bar' }, { foo: 'bar' }])).toEqual({});
   });
 
   it('returns the single labels set as common labels', () => {
-    expect(findCommonLabels([{ foo: '"bar"' }])).toEqual({ foo: '"bar"' });
+    expect(findCommonLabels([{ foo: 'bar' }])).toEqual({ foo: 'bar' });
   });
 });
 
@@ -106,10 +106,10 @@ describe('mergeStreamsToLogs()', () => {
     expect(mergeStreamsToLogs([stream1]).rows).toMatchObject([
       {
         entry: 'WARN boooo',
-        labels: '{foo="bar"}',
+        labels: { foo: 'bar' },
         key: 'EK1970-01-01T00:00:00Z{foo="bar"}',
         logLevel: 'warning',
-        uniqueLabels: '',
+        uniqueLabels: {},
       },
     ]);
   });
@@ -140,21 +140,21 @@ describe('mergeStreamsToLogs()', () => {
     expect(mergeStreamsToLogs([stream1, stream2]).rows).toMatchObject([
       {
         entry: 'INFO 2',
-        labels: '{foo="bar", baz="2"}',
+        labels: { foo: 'bar', baz: '2' },
         logLevel: 'info',
-        uniqueLabels: '{baz="2"}',
+        uniqueLabels: { baz: '2' },
       },
       {
         entry: 'WARN boooo',
-        labels: '{foo="bar", baz="1"}',
+        labels: { foo: 'bar', baz: '1' },
         logLevel: 'warning',
-        uniqueLabels: '{baz="1"}',
+        uniqueLabels: { baz: '1' },
       },
       {
         entry: 'INFO 1',
-        labels: '{foo="bar", baz="2"}',
+        labels: { foo: 'bar', baz: '2' },
         logLevel: 'info',
-        uniqueLabels: '{baz="2"}',
+        uniqueLabels: { baz: '2' },
       },
     ]);
   });

+ 22 - 7
public/app/plugins/datasource/logging/result_transformer.ts

@@ -9,6 +9,7 @@ import {
   LogsStream,
   LogsStreamEntry,
   LogsStreamLabels,
+  LogsMetaKind,
 } from 'app/core/logs_model';
 import { DEFAULT_LIMIT } from './datasource';
 
@@ -40,7 +41,7 @@ export function getLogLevel(line: string): LogLevel {
 /**
  * Regexp to extract Prometheus-style labels
  */
-const labelRegexp = /\b(\w+)(!?=~?)("[^"\n]*?")/g;
+const labelRegexp = /\b(\w+)(!?=~?)"([^"\n]*?)"/g;
 
 /**
  * Returns a map of label keys to value from an input selector string.
@@ -104,11 +105,17 @@ export function formatLabels(labels: LogsStreamLabels, defaultValue = ''): strin
     return defaultValue;
   }
   const labelKeys = Object.keys(labels).sort();
-  const cleanSelector = labelKeys.map(key => `${key}=${labels[key]}`).join(', ');
+  const cleanSelector = labelKeys.map(key => `${key}="${labels[key]}"`).join(', ');
   return ['{', cleanSelector, '}'].join('');
 }
 
-export function processEntry(entry: LogsStreamEntry, labels: string, uniqueLabels: string, search: string): LogRow {
+export function processEntry(
+  entry: LogsStreamEntry,
+  labels: string,
+  parsedLabels: LogsStreamLabels,
+  uniqueLabels: LogsStreamLabels,
+  search: string
+): LogRow {
   const { line, timestamp } = entry;
   // Assumes unique-ness, needs nanosec precision for timestamp
   const key = `EK${timestamp}${labels}`;
@@ -120,19 +127,22 @@ export function processEntry(entry: LogsStreamEntry, labels: string, uniqueLabel
 
   return {
     key,
-    labels,
     logLevel,
     timeFromNow,
     timeEpochMs,
     timeLocal,
     uniqueLabels,
     entry: line,
+    labels: parsedLabels,
     searchWords: search ? [search] : [],
     timestamp: timestamp,
   };
 }
 
 export function mergeStreamsToLogs(streams: LogsStream[], limit = DEFAULT_LIMIT): LogsModel {
+  // Unique model identifier
+  const id = streams.map(stream => stream.labels).join();
+
   // Find unique labels for each stream
   streams = streams.map(stream => ({
     ...stream,
@@ -141,7 +151,7 @@ export function mergeStreamsToLogs(streams: LogsStream[], limit = DEFAULT_LIMIT)
   const commonLabels = findCommonLabels(streams.map(model => model.parsedLabels));
   streams = streams.map(stream => ({
     ...stream,
-    uniqueLabels: formatLabels(findUniqueLabels(stream.parsedLabels, commonLabels)),
+    uniqueLabels: findUniqueLabels(stream.parsedLabels, commonLabels),
   }));
 
   // Merge stream entries into single list of log rows
@@ -149,7 +159,9 @@ export function mergeStreamsToLogs(streams: LogsStream[], limit = DEFAULT_LIMIT)
     .reduce(
       (acc: LogRow[], stream: LogsStream) => [
         ...acc,
-        ...stream.entries.map(entry => processEntry(entry, stream.labels, stream.uniqueLabels, stream.search)),
+        ...stream.entries.map(entry =>
+          processEntry(entry, stream.labels, stream.parsedLabels, stream.uniqueLabels, stream.search)
+        ),
       ],
       []
     )
@@ -162,17 +174,20 @@ export function mergeStreamsToLogs(streams: LogsStream[], limit = DEFAULT_LIMIT)
   if (_.size(commonLabels) > 0) {
     meta.push({
       label: 'Common labels',
-      value: formatLabels(commonLabels),
+      value: commonLabels,
+      kind: LogsMetaKind.LabelsMap,
     });
   }
   if (limit) {
     meta.push({
       label: 'Limit',
       value: `${limit} (${sortedRows.length} returned)`,
+      kind: LogsMetaKind.String,
     });
   }
 
   return {
+    id,
     meta,
     rows: sortedRows,
   };

+ 28 - 0
public/app/plugins/datasource/logging/syntax.ts

@@ -0,0 +1,28 @@
+/* tslint:disable max-line-length */
+
+const tokenizer = {
+  comment: {
+    pattern: /(^|[^\n])#.*/,
+    lookbehind: true,
+  },
+  'context-labels': {
+    pattern: /(^|\s)\{[^}]*(?=})/,
+    lookbehind: true,
+    inside: {
+      'label-key': {
+        pattern: /[a-z_]\w*(?=\s*(=|!=|=~|!~))/,
+        alias: 'attr-name',
+      },
+      'label-value': {
+        pattern: /"(?:\\.|[^\\"])*"/,
+        greedy: true,
+        alias: 'attr-value',
+      },
+    },
+  },
+  // number: /\b-?\d+((\.\d*)?([eE][+-]?\d+)?)?\b/,
+  operator: new RegExp(`/&&?|\\|?\\||!=?|<(?:=>?|<|>)?|>[>=]?`, 'i'),
+  punctuation: /[{}`,.]/,
+};
+
+export default tokenizer;

+ 3 - 3
public/app/plugins/datasource/mysql/partials/annotations.editor.html

@@ -28,12 +28,12 @@ An annotation is an event that is overlaid on top of graphs. The query can have
 Macros:
 - $__time(column) -&gt; UNIX_TIMESTAMP(column) as time (or as time_sec)
 - $__timeEpoch(column) -&gt; UNIX_TIMESTAMP(column) as time (or as time_sec)
-- $__timeFilter(column) -&gt; column BETWEEN '2017-04-21T05:01:17Z' AND '2017-04-21T05:01:17Z'
+- $__timeFilter(column) -&gt; column BETWEEN FROM_UNIXTIME(1492750877) AND FROM_UNIXTIME(1492750877)
 - $__unixEpochFilter(column) -&gt;  time_unix_epoch &gt; 1492750877 AND time_unix_epoch &lt; 1492750877
 
 Or build your own conditionals using these macros which just return the values:
-- $__timeFrom() -&gt;  '2017-04-21T05:01:17Z'
-- $__timeTo() -&gt;  '2017-04-21T05:01:17Z'
+- $__timeFrom() -&gt; FROM_UNIXTIME(1492750877)
+- $__timeTo() -&gt;  FROM_UNIXTIME(1492750877)
 - $__unixEpochFrom() -&gt;  1492750877
 - $__unixEpochTo() -&gt;  1492750877
 		</pre>

+ 3 - 3
public/app/plugins/datasource/mysql/partials/query.editor.html

@@ -151,7 +151,7 @@ Table:
 Macros:
 - $__time(column) -&gt; UNIX_TIMESTAMP(column) as time_sec
 - $__timeEpoch(column) -&gt; UNIX_TIMESTAMP(column) as time_sec
-- $__timeFilter(column) -&gt; column BETWEEN '2017-04-21T05:01:17Z' AND '2017-04-21T05:01:17Z'
+- $__timeFilter(column) -&gt; column BETWEEN FROM_UNIXTIME(1492750877) AND FROM_UNIXTIME(1492750877)
 - $__unixEpochFilter(column) -&gt;  time_unix_epoch &gt; 1492750877 AND time_unix_epoch &lt; 1492750877
 - $__timeGroup(column,'5m'[, fillvalue]) -&gt; cast(cast(UNIX_TIMESTAMP(column)/(300) as signed)*300 as signed)
      by setting fillvalue grafana will fill in missing values according to the interval
@@ -169,8 +169,8 @@ GROUP BY 1
 ORDER BY 1
 
 Or build your own conditionals using these macros which just return the values:
-- $__timeFrom() -&gt;  '2017-04-21T05:01:17Z'
-- $__timeTo() -&gt;  '2017-04-21T05:01:17Z'
+- $__timeFrom() -&gt; FROM_UNIXTIME(1492750877)
+- $__timeTo() -&gt;  FROM_UNIXTIME(1492750877)
 - $__unixEpochFrom() -&gt;  1492750877
 - $__unixEpochTo() -&gt;  1492750877
     </pre>

+ 5 - 3
public/app/plugins/datasource/prometheus/add_label_to_query.ts

@@ -49,7 +49,7 @@ export function addLabelToQuery(query: string, key: string, value: string, opera
     const selectorWithLabel = addLabelToSelector(selector, key, value, operator);
     lastIndex = match.index + match[1].length + 2;
     suffix = query.slice(match.index + match[0].length);
-    parts.push(prefix, '{', selectorWithLabel, '}');
+    parts.push(prefix, selectorWithLabel);
     match = selectorRegexp.exec(query);
   }
 
@@ -59,7 +59,7 @@ export function addLabelToQuery(query: string, key: string, value: string, opera
 
 const labelRegexp = /(\w+)\s*(=|!=|=~|!~)\s*("[^"]*")/g;
 
-function addLabelToSelector(selector: string, labelKey: string, labelValue: string, labelOperator?: string) {
+export function addLabelToSelector(selector: string, labelKey: string, labelValue: string, labelOperator?: string) {
   const parsedLabels = [];
 
   // Split selector into labels
@@ -76,13 +76,15 @@ function addLabelToSelector(selector: string, labelKey: string, labelValue: stri
   parsedLabels.push({ key: labelKey, operator: operatorForLabelKey, value: `"${labelValue}"` });
 
   // Sort labels by key and put them together
-  return _.chain(parsedLabels)
+  const formatted = _.chain(parsedLabels)
     .uniqWith(_.isEqual)
     .compact()
     .sortBy('key')
     .map(({ key, operator, value }) => `${key}${operator}${value}`)
     .value()
     .join(',');
+
+  return `{${formatted}}`;
 }
 
 function isPositionInsideChars(text: string, position: number, openChar: string, closeChar: string) {

+ 14 - 1
public/app/plugins/datasource/prometheus/specs/add_label_to_query.test.ts

@@ -1,4 +1,4 @@
-import addLabelToQuery from '../add_label_to_query';
+import { addLabelToQuery, addLabelToSelector } from '../add_label_to_query';
 
 describe('addLabelToQuery()', () => {
   it('should add label to simple query', () => {
@@ -56,3 +56,16 @@ describe('addLabelToQuery()', () => {
     );
   });
 });
+
+describe('addLabelToSelector()', () => {
+  test('should add a label to an empty selector', () => {
+    expect(addLabelToSelector('{}', 'foo', 'bar')).toBe('{foo="bar"}');
+    expect(addLabelToSelector('', 'foo', 'bar')).toBe('{foo="bar"}');
+  });
+  test('should add a label to a selector', () => {
+    expect(addLabelToSelector('{foo="bar"}', 'baz', '42')).toBe('{baz="42",foo="bar"}');
+  });
+  test('should add a label to a selector with custom operator', () => {
+    expect(addLabelToSelector('{}', 'baz', '42', '!=')).toBe('{baz!="42"}');
+  });
+});

+ 6 - 1
public/app/types/explore.ts

@@ -1,6 +1,8 @@
 import { Value } from 'slate';
 
 import { DataQuery, RawTimeRange } from './series';
+import TableModel from 'app/core/table_model';
+import { LogsModel } from 'app/core/logs_model';
 
 export interface CompletionItem {
   /**
@@ -158,9 +160,11 @@ export interface ExploreState {
   datasourceMissing: boolean;
   datasourceName?: string;
   exploreDatasources: ExploreDatasource[];
-  graphRange: RawTimeRange;
+  graphInterval: number; // in ms
+  graphResult?: any[];
   history: HistoryItem[];
   initialQueries: DataQuery[];
+  logsResult?: LogsModel;
   queryTransactions: QueryTransaction[];
   range: RawTimeRange;
   scanning?: boolean;
@@ -172,6 +176,7 @@ export interface ExploreState {
   supportsGraph: boolean | null;
   supportsLogs: boolean | null;
   supportsTable: boolean | null;
+  tableResult?: TableModel;
 }
 
 export interface ExploreUrlState {

+ 55 - 11
public/sass/pages/_explore.scss

@@ -244,15 +244,6 @@
 
 .explore {
   .logs {
-    .logs-entries {
-      display: grid;
-      grid-column-gap: 1rem;
-      grid-row-gap: 0.1rem;
-      grid-template-columns: 4px minmax(100px, max-content) minmax(100px, 25%) 1fr;
-      font-family: $font-family-monospace;
-      font-size: 12px;
-    }
-
     .logs-controls {
       display: flex;
       background-color: $page-bg;
@@ -261,6 +252,8 @@
       border-radius: $border-radius;
       margin: 2*$panel-margin 0;
       border: $panel-border;
+      justify-items: flex-start;
+      align-items: flex-start;
 
       > * {
         margin-right: 1em;
@@ -276,11 +269,11 @@
     .logs-meta {
       flex: 1;
       color: $text-color-weak;
-      padding: 2px 0;
+      // Align first line with controls labels
+      margin-top: -2px;
     }
 
     .logs-meta-item {
-      display: inline-block;
       margin-right: 1em;
     }
 
@@ -294,6 +287,38 @@
       font-family: $font-family-monospace;
     }
 
+    .logs-meta-item__value-labels {
+      // compensate for the labels padding
+      position: relative;
+      top: 4px;
+    }
+
+    .logs-entries {
+      font-family: $font-family-monospace;
+      font-size: 12px;
+    }
+
+    .logs-row {
+      display: flex;
+      flex-direction: row;
+
+      > div + div {
+        margin-left: 0.5rem;
+      }
+    }
+
+    .logs-row-level {
+      width: 3px;
+    }
+
+    .logs-row-labels {
+      flex: 0 0 25%;
+    }
+
+    .logs-row-message {
+      flex: 1;
+    }
+
     .logs-row-match-highlight {
       // Undoing mark styling
       background: inherit;
@@ -356,6 +381,25 @@
       background-color: #1f78c1;
       margin: 0 1px 1px 0;
     }
+
+    .logs-label {
+      display: inline-block;
+      padding: 0 2px;
+      background-color: $btn-inverse-bg;
+      border-radius: $border-radius;
+      margin-right: 4px;
+      overflow: hidden;
+      text-overflow: ellipsis;
+      white-space: nowrap;
+    }
+
+    .logs-row-labels {
+      line-height: 1.2;
+
+      .logs-label {
+        cursor: pointer;
+      }
+    }
   }
 }
 

+ 1 - 1
scripts/build/publish_test.go

@@ -105,6 +105,6 @@ func TestFileWalker(t *testing.T) {
 	incorrectPackageName := "grafana_5.2.0-474pre1_armfoo.deb"
 	_, err := mapPackage(incorrectPackageName, incorrectPackageName, []byte{})
 	if err == nil {
-		t.Errorf("Testing (%v), expected to fail due to an unrecognized arch, but signalled no error.", incorrectPackageName)
+		t.Errorf("Testing (%v), expected to fail due to an unrecognized arch, but signaled no error.", incorrectPackageName)
 	}
 }