Просмотр исходного кода

Merge remote-tracking branch 'grafana/master'

* grafana/master: (116 commits)
  Adjust UI depth of query statistics
  Preserve suffix text when applying function suggestion
  changelog: adds note about closing #13993
  Refactored log stream merging, added types, tests, comments
  Fixes #13993 - adds more options for Slack notifications
  add auth.proxy headers to sample.ini
  add auth.proxy headers to default.ini
  fixed issue with reducer sharing url query instance with angular router
  fixed exporter bug missing adding requires for datasources only used via data source variable, fixes #13891
  minor text change in export modal
  build: removes unused.
  Fixed issues introduced by changing to PureComponent
  further refactoring of #13984
  minor fix
  refactorings and some clean-up / removal of things not used
  Update docs/sources/permissions/dashboard_folder_permissions.md
  Fix typo in docs/sources/reference/scripting.md
  move enterprise down in menu
  wip: panel-header: Fix shareModal compatibility with react and angular
  wip: panel-header: Remove custom menu items from panels completely
  ...
ryan 7 лет назад
Родитель
Сommit
594051b3fb
100 измененных файлов с 2610 добавлено и 668 удалено
  1. 4 1
      .circleci/config.yml
  2. 6 0
      CHANGELOG.md
  3. 20 8
      build.go
  4. 1 0
      conf/defaults.ini
  5. 1 0
      conf/sample.ini
  6. 108 2
      devenv/dev-dashboards/panel_tests_table.json
  7. 0 116
      docs/sources/administration/permissions.md
  8. 1 1
      docs/sources/administration/provisioning.md
  9. 43 0
      docs/sources/auth/enhanced_ldap.md
  10. 67 0
      docs/sources/enterprise/index.md
  11. 2 1
      docs/sources/features/datasources/cloudwatch.md
  12. 1 1
      docs/sources/features/datasources/elasticsearch.md
  13. 3 3
      docs/sources/guides/whats-new-in-v5-3.md
  14. 249 0
      docs/sources/http_api/datasource_permissions.md
  15. 111 0
      docs/sources/http_api/external_group_sync.md
  16. 73 0
      docs/sources/permissions/dashboard_folder_permissions.md
  17. 71 0
      docs/sources/permissions/datasource_permissions.md
  18. 12 0
      docs/sources/permissions/index.md
  19. 38 0
      docs/sources/permissions/organization_roles.md
  20. 42 0
      docs/sources/permissions/overview.md
  21. 1 1
      docs/sources/reference/scripting.md
  22. 1 1
      docs/sources/whatsnew/index.md
  23. 4 0
      pkg/api/alerting.go
  24. 5 45
      pkg/api/dataproxy.go
  25. 10 8
      pkg/api/http_server.go
  26. 4 1
      pkg/api/metrics.go
  27. 13 8
      pkg/cmd/grafana-server/server.go
  28. 1 0
      pkg/login/auth.go
  29. 14 0
      pkg/middleware/headers.go
  30. 1 0
      pkg/middleware/middleware.go
  31. 2 1
      pkg/models/alert.go
  32. 1 0
      pkg/models/context.go
  33. 0 5
      pkg/models/datasource.go
  34. 1 0
      pkg/models/user.go
  35. 34 3
      pkg/registry/registry.go
  36. 3 3
      pkg/services/alerting/commands.go
  37. 18 0
      pkg/services/alerting/conditions/reducer_test.go
  38. 18 1
      pkg/services/alerting/extractor.go
  39. 8 8
      pkg/services/alerting/extractor_test.go
  40. 51 0
      pkg/services/alerting/notifiers/slack.go
  41. 10 1
      pkg/services/alerting/notifiers/slack_test.go
  42. 2 1
      pkg/services/alerting/test_rule.go
  43. 17 0
      pkg/services/cache/cache.go
  44. 2 1
      pkg/services/dashboards/dashboard_service.go
  45. 53 0
      pkg/services/datasources/cache.go
  46. 18 4
      pkg/services/sqlstore/dashboard.go
  47. 8 3
      pkg/services/sqlstore/sqlstore.go
  48. 30 3
      pkg/services/sqlstore/user.go
  49. 2 0
      pkg/tsdb/cloudwatch/metric_find_query.go
  50. 18 7
      pkg/tsdb/elasticsearch/client/search_request.go
  51. 54 0
      pkg/tsdb/elasticsearch/time_series_query_test.go
  52. 8 3
      public/app/core/components/Switch/Switch.tsx
  53. 1 1
      public/app/core/components/form_dropdown/form_dropdown.ts
  54. 4 4
      public/app/core/controllers/json_editor_ctrl.ts
  55. 71 12
      public/app/core/logs_model.ts
  56. 3 1
      public/app/core/reducers/location.ts
  57. 1 1
      public/app/core/services/bridge_srv.ts
  58. 10 10
      public/app/core/utils/colors.ts
  59. 8 34
      public/app/features/dashboard/dashboard_ctrl.ts
  60. 0 5
      public/app/features/dashboard/dashboard_model.ts
  61. 6 2
      public/app/features/dashboard/dashgrid/DashboardGrid.tsx
  62. 6 6
      public/app/features/dashboard/dashgrid/DashboardPanel.tsx
  63. 28 5
      public/app/features/dashboard/dashgrid/PanelChrome.tsx
  64. 19 10
      public/app/features/dashboard/dashgrid/PanelEditor.tsx
  65. 0 83
      public/app/features/dashboard/dashgrid/PanelHeader.tsx
  66. 51 0
      public/app/features/dashboard/dashgrid/PanelHeader/PanelHeader.tsx
  67. 40 0
      public/app/features/dashboard/dashgrid/PanelHeader/PanelHeaderMenu.tsx
  68. 23 0
      public/app/features/dashboard/dashgrid/PanelHeader/PanelHeaderMenuItem.tsx
  69. 9 13
      public/app/features/dashboard/export/export_modal.html
  70. 34 13
      public/app/features/dashboard/export/export_modal.ts
  71. 22 11
      public/app/features/dashboard/export/exporter.ts
  72. 15 4
      public/app/features/dashboard/panel_model.ts
  73. 1 1
      public/app/features/dashboard/settings/settings.ts
  74. 2 0
      public/app/features/dashboard/shareModalCtrl.ts
  75. 12 2
      public/app/features/dashboard/specs/exporter.test.ts
  76. 120 0
      public/app/features/dashboard/utils/getPanelMenu.ts
  77. 86 0
      public/app/features/dashboard/utils/panel.ts
  78. 33 11
      public/app/features/explore/Explore.tsx
  79. 29 5
      public/app/features/explore/Graph.tsx
  80. 110 4
      public/app/features/explore/Logs.tsx
  81. 2 1
      public/app/features/explore/QueryField.tsx
  82. 24 5
      public/app/features/explore/TimePicker.tsx
  83. 12 41
      public/app/features/panel/panel_ctrl.ts
  84. 1 3
      public/app/features/panel/viz_tab.ts
  85. 1 1
      public/app/partials/reset_password.html
  86. 6 1
      public/app/plugins/datasource/elasticsearch/config_ctrl.ts
  87. 12 1
      public/app/plugins/datasource/elasticsearch/query_builder.ts
  88. 62 0
      public/app/plugins/datasource/elasticsearch/specs/query_builder.test.ts
  89. 4 4
      public/app/plugins/datasource/logging/components/LoggingQueryField.tsx
  90. 14 8
      public/app/plugins/datasource/logging/datasource.ts
  91. 3 5
      public/app/plugins/datasource/logging/language_provider.ts
  92. 137 4
      public/app/plugins/datasource/logging/result_transformer.test.ts
  93. 139 12
      public/app/plugins/datasource/logging/result_transformer.ts
  94. 3 3
      public/app/plugins/datasource/prometheus/components/PromQueryField.tsx
  95. 33 21
      public/app/plugins/datasource/prometheus/language_provider.ts
  96. 57 57
      public/app/plugins/datasource/prometheus/promql.ts
  97. 43 0
      public/app/plugins/datasource/prometheus/specs/language_provider.test.ts
  98. 39 15
      public/app/plugins/panel/graph2/module.tsx
  99. 7 6
      public/app/plugins/panel/table/renderer.ts
  100. 2 1
      public/app/types/explore.ts

+ 4 - 1
.circleci/config.yml

@@ -335,6 +335,9 @@ jobs:
       - run:
           name: deploy to gcp
           command: '/opt/google-cloud-sdk/bin/gsutil cp ./enterprise-dist/* gs://$GCP_BUCKET_NAME/enterprise/master'
+      - run:
+          name: Deploy to grafana.com
+          command: 'cd enterprise-dist && ../scripts/build/release_publisher/release_publisher -apikey ${GRAFANA_COM_API_KEY} -enterprise -from-local'
 
 
   deploy-enterprise-release:
@@ -403,7 +406,7 @@ jobs:
           command: '/opt/google-cloud-sdk/bin/gcloud auth activate-service-account --key-file=/tmp/gcpkey.json'
       - run:
           name: deploy to gcp
-          command: '/opt/google-cloud-sdk/bin/gsutil cp ./dist/* gs://R/oss/release'
+          command: '/opt/google-cloud-sdk/bin/gsutil cp ./dist/* gs://$GCP_BUCKET_NAME/oss/release'
       - run:
           name: Deploy to Grafana.com
           command: './scripts/build/publish.sh'

+ 6 - 0
CHANGELOG.md

@@ -12,12 +12,15 @@
 ### Minor
 
 * **Cloudwatch**: Show all available CloudWatch regions [#12308](https://github.com/grafana/grafana/issues/12308), thx [@mtanda](https://github.com/mtanda)
+* **Cloudwatch**: AWS/Connect metrics and dimensions [#13970](https://github.com/grafana/grafana/pull/13970), thx [@zcoffy](https://github.com/zcoffy)
 * **Postgres**: Add delta window function to postgres query builder [#13925](https://github.com/grafana/grafana/issues/13925), thx [svenklemm](https://github.com/svenklemm)
 * **Units**: New clock time format, to format ms or second values as for example `01h:59m`, [#13635](https://github.com/grafana/grafana/issues/13635), thx [@franciscocpg](https://github.com/franciscocpg)
 * **Datasource Proxy**: Keep trailing slash for datasource proxy requests [#13326](https://github.com/grafana/grafana/pull/13326), thx [@ryantxu](https://github.com/ryantxu)
 * **DingDing**: Can't receive DingDing alert when alert is triggered [#13723](https://github.com/grafana/grafana/issues/13723), thx [@Yukinoshita-Yukino](https://github.com/Yukinoshita-Yukino)
 * **Internal metrics**: Renamed `grafana_info` to `grafana_build_info` and added branch, goversion and revision [#13876](https://github.com/grafana/grafana/pull/13876)
 * **Alerting**: Increaste default duration for queries [#13945](https://github.com/grafana/grafana/pull/13945)
+* **Table**: Fix CSS alpha background-color applied twice in table cell with link [#13606](https://github.com/grafana/grafana/issues/13606), thx [@grisme](https://github.com/grisme)
+* **Alerting**: More options for the Slack Alert notifier [#13993](https://github.com/grafana/grafana/issues/13993), thx [@andreykaipov](https://github.com/andreykaipov)
 
 ### Breaking changes
 
@@ -25,7 +28,10 @@
 
 # 5.3.3 (unreleased)
 
+* **Alerting**: Delete alerts when parent folder was deleted [#13322](https://github.com/grafana/grafana/issues/13322)
 * **MySQL**: Fix `$__timeFilter()` should respect local time zone [#13769](https://github.com/grafana/grafana/issues/13769)
+* **Dashboard**: Fix datasource selection in panel by enter key [#13932](https://github.com/grafana/grafana/issues/13932)
+* **Graph**: Fix table legend height when positioned below graph and using Internet Explorer 11 [#13903](https://github.com/grafana/grafana/issues/13903)
 
 # 5.3.2 (2018-10-24)
 

+ 20 - 8
build.go

@@ -41,8 +41,8 @@ var (
 	race                  bool
 	phjsToRelease         string
 	workingDir            string
-	includeBuildNumber    bool     = true
-	buildNumber           int      = 0
+	includeBuildId        bool     = true
+	buildId               string   = "0"
 	binaries              []string = []string{"grafana-server", "grafana-cli"}
 	isDev                 bool     = false
 	enterprise            bool     = false
@@ -54,6 +54,8 @@ func main() {
 
 	ensureGoPath()
 
+	var buildIdRaw string
+
 	flag.StringVar(&goarch, "goarch", runtime.GOARCH, "GOARCH")
 	flag.StringVar(&goos, "goos", runtime.GOOS, "GOOS")
 	flag.StringVar(&gocc, "cc", "", "CC")
@@ -61,12 +63,14 @@ func main() {
 	flag.StringVar(&pkgArch, "pkg-arch", "", "PKG ARCH")
 	flag.StringVar(&phjsToRelease, "phjs", "", "PhantomJS binary")
 	flag.BoolVar(&race, "race", race, "Use race detector")
-	flag.BoolVar(&includeBuildNumber, "includeBuildNumber", includeBuildNumber, "IncludeBuildNumber in package name")
+	flag.BoolVar(&includeBuildId, "includeBuildId", includeBuildId, "IncludeBuildId in package name")
 	flag.BoolVar(&enterprise, "enterprise", enterprise, "Build enterprise version of Grafana")
-	flag.IntVar(&buildNumber, "buildNumber", 0, "Build number from CI system")
+	flag.StringVar(&buildIdRaw, "buildId", "0", "Build ID from CI system")
 	flag.BoolVar(&isDev, "dev", isDev, "optimal for development, skips certain steps")
 	flag.Parse()
 
+	buildId = shortenBuildId(buildIdRaw)
+
 	readVersionFromPackageJson()
 
 	if pkgArch == "" {
@@ -197,9 +201,9 @@ func readVersionFromPackageJson() {
 	}
 
 	// add timestamp to iteration
-	if includeBuildNumber {
-		if buildNumber != 0 {
-			linuxPackageIteration = fmt.Sprintf("%d%s", buildNumber, linuxPackageIteration)
+	if includeBuildId {
+		if buildId != "0" {
+			linuxPackageIteration = fmt.Sprintf("%s%s", buildId, linuxPackageIteration)
 		} else {
 			linuxPackageIteration = fmt.Sprintf("%d%s", time.Now().Unix(), linuxPackageIteration)
 		}
@@ -392,7 +396,7 @@ func grunt(params ...string) {
 
 func gruntBuildArg(task string) []string {
 	args := []string{task}
-	if includeBuildNumber {
+	if includeBuildId {
 		args = append(args, fmt.Sprintf("--pkgVer=%v-%v", linuxPackageVersion, linuxPackageIteration))
 	} else {
 		args = append(args, fmt.Sprintf("--pkgVer=%v", version))
@@ -632,3 +636,11 @@ func shaFile(file string) error {
 
 	return out.Close()
 }
+
+func shortenBuildId(buildId string) string {
+	buildId = strings.Replace(buildId, "-", "", -1)
+	if (len(buildId) < 9) {
+		return buildId
+	}
+	return buildId[0:8]
+}

+ 1 - 0
conf/defaults.ini

@@ -344,6 +344,7 @@ header_property = username
 auto_sign_up = true
 ldap_sync_ttl = 60
 whitelist =
+headers =
 
 #################################### Auth LDAP ###########################
 [auth.ldap]

+ 1 - 0
conf/sample.ini

@@ -294,6 +294,7 @@ log_queries =
 ;auto_sign_up = true
 ;ldap_sync_ttl = 60
 ;whitelist = 192.168.1.1, 192.168.2.1
+;headers = Email:X-User-Email, Name:X-User-Name
 
 #################################### Basic Auth ##########################
 [auth.basic]

+ 108 - 2
devenv/dev-dashboards/panel_tests_table.json

@@ -404,6 +404,112 @@
       "title": "Column style thresholds & units",
       "transform": "timeseries_to_columns",
       "type": "table"
+    },
+    {
+      "columns": [],
+      "datasource": "gdev-testdata",
+      "fontSize": "100%",
+      "gridPos": {
+        "h": 10,
+        "w": 24,
+        "x": 0,
+        "y": 26
+      },
+      "id": 6,
+      "links": [],
+      "pageSize": 20,
+      "scroll": true,
+      "showHeader": true,
+      "sort": {
+        "col": 0,
+        "desc": true
+      },
+      "styles": [
+        {
+          "alias": "Time",
+          "dateFormat": "YYYY-MM-DD HH:mm:ss",
+          "pattern": "Time",
+          "type": "date"
+        },
+        {
+          "alias": "",
+          "colorMode": "cell",
+          "colors": [
+            "rgba(245, 54, 54, 0.5)",
+            "rgba(237, 129, 40, 0.5)",
+            "rgba(50, 172, 45, 0.5)"
+          ],
+          "dateFormat": "YYYY-MM-DD HH:mm:ss",
+          "decimals": 2,
+          "link": true,
+          "linkTargetBlank": true,
+          "linkTooltip": "",
+          "linkUrl": "http://www.grafana.com",
+          "mappingType": 1,
+          "pattern": "ColorCell",
+          "thresholds": [
+            "5",
+            "10"
+          ],
+          "type": "number",
+          "unit": "currencyUSD"
+        },
+        {
+          "alias": "",
+          "colorMode": "value",
+          "colors": [
+            "rgba(245, 54, 54, 0.5)",
+            "rgba(237, 129, 40, 0.5)",
+            "rgba(50, 172, 45, 0.5)"
+          ],
+          "dateFormat": "YYYY-MM-DD HH:mm:ss",
+          "decimals": 2,
+          "link": true,
+          "linkUrl": "http://www.grafana.com",
+          "mappingType": 1,
+          "pattern": "ColorValue",
+          "thresholds": [
+            "5",
+            "10"
+          ],
+          "type": "number",
+          "unit": "Bps"
+        },
+        {
+          "alias": "",
+          "colorMode": null,
+          "colors": [
+            "rgba(245, 54, 54, 0.9)",
+            "rgba(237, 129, 40, 0.89)",
+            "rgba(50, 172, 45, 0.97)"
+          ],
+          "decimals": 2,
+          "pattern": "/.*/",
+          "thresholds": [],
+          "type": "number",
+          "unit": "short"
+        }
+      ],
+      "targets": [
+        {
+          "alias": "ColorValue",
+          "expr": "",
+          "format": "table",
+          "intervalFactor": 1,
+          "refId": "A",
+          "scenarioId": "csv_metric_values",
+          "stringInput": "null,1,20,90,30,5,0,20,10"
+        },
+        {
+          "alias": "ColorCell",
+          "refId": "B",
+          "scenarioId": "csv_metric_values",
+          "stringInput": "null,5,1,2,3,4,5,10,20"
+        }
+      ],
+      "title": "Column style thresholds and links",
+      "transform": "timeseries_to_columns",
+      "type": "table"
     }
   ],
   "refresh": false,
@@ -449,5 +555,5 @@
   "timezone": "browser",
   "title": "Panel Tests - Table",
   "uid": "pttable",
-  "version": 1
-}
+  "version": 2
+}

+ 0 - 116
docs/sources/administration/permissions.md

@@ -1,116 +0,0 @@
-+++
-title = "Permissions"
-description = "Grafana user permissions"
-keywords = ["grafana", "configuration", "documentation", "admin", "users", "permissions"]
-type = "docs"
-aliases = ["/reference/admin"]
-[menu.docs]
-name = "Permissions"
-parent = "admin"
-weight = 3
-+++
-
-# Permissions
-
-Grafana users have permissions that are determined by their:
-
-- **Organization Role** (Admin, Editor, Viewer)
-- Via **Team** memberships where the **Team** has been assigned specific permissions.
-- Via permissions assigned directly to user (on folders or dashboards)
-- The Grafana Admin (i.e. Super Admin) user flag.
-
-## Organization Roles
-
-Users can be belong to one or more organizations. A user's organization membership is tied to a role that defines what the user is allowed to do
-in that organization.
-
-### Admin Role
-
-Can do everything scoped to the organization. For example:
-
-- Add & Edit data sources.
-- Add & Edit organization users & teams.
-- Configure App plugins & set org settings.
-
-### Editor Role
-
-- Can create and modify dashboards & alert rules. This can be disabled on specific folders and dashboards.
-- **Cannot** create or edit data sources nor invite new users.
-
-### Viewer Role
-
-- View any dashboard. This can be disabled on specific folders and dashboards.
-- **Cannot** create or edit dashboards nor data sources.
-
-This role can be tweaked via Grafana server setting [viewers_can_edit]({{< relref "installation/configuration.md#viewers-can-edit" >}}). If you set this to true users
-with **Viewer** can also make transient dashboard edits, meaning they can modify panels & queries but not save the changes (nor create new dashboards).
-Useful for public Grafana installations where you want anonymous users to be able to edit panels & queries but not save or create new dashboards.
-
-## Grafana Admin
-
-This admin flag makes a user a `Super Admin`. This means they can access the `Server Admin` views where all users and organizations can be administrated.
-
-### Dashboard & Folder Permissions
-
-{{< docs-imagebox img="/img/docs/v50/folder_permissions.png" max-width="500px" class="docs-image--right" >}}
-
-For dashboards and dashboard folders there is a **Permissions** page that make it possible to
-remove the default role based permissions for Editors and Viewers. It's here you can add and assign permissions to specific **Users** and **Teams**.
-
-You can assign & remove permissions for **Organization Roles**, **Users** and **Teams**.
-
-Permission levels:
-
-- **Admin**: Can edit & create dashboards and edit permissions.
-- **Edit**: Can edit & create dashboards. **Cannot** edit folder/dashboard permissions.
-- **View**: Can only view existing dashboards/folders.
-
-#### Restricting Access
-
-The highest permission always wins so if you for example want to hide a folder or dashboard from others you need to remove the **Organization Role** based permission from the Access Control List (ACL).
-
-- You cannot override permissions for users with the **Org Admin Role**. Admins always have access to everything.
-- A more specific permission with a lower permission level will not have any effect if a more general rule exists with higher permission level. You need to remove or lower the permission level of the more general rule.
-
-#### How Grafana Resolves Multiple Permissions - Examples
-
-##### Example 1 (`user1` has the Editor Role)
-
-Permissions for a dashboard:
-
-- `Everyone with Editor Role Can Edit`
-- `user1 Can View`
-
-Result: `user1` has Edit permission as the highest permission always wins.
-
-##### Example 2 (`user1` has the Viewer Role and is a member of `team1`)
-
-Permissions for a dashboard:
-
-- `Everyone with Viewer Role Can View`
-- `user1 Can Edit`
-- `team1 Can Admin`
-
-Result: `user1` has Admin permission as the highest permission always wins.
-
-##### Example 3
-
-Permissions for a dashboard:
-
-- `user1 Can Admin (inherited from parent folder)`
-- `user1 Can Edit`
-
-Result: You cannot override to a lower permission. `user1` has Admin permission as the highest permission always wins.
-
-- **View**: Can only view existing dashboards/folders.
-- You cannot override permissions for users with **Org Admin Role**
-- A more specific permission with lower permission level will not have any effect if a more general rule exists with higher permission level. For example if "Everyone with Editor Role Can Edit" exists in the ACL list then **John Doe** will still have Edit permission even after you have specifically added a permission for this user with the permission set to **View**. You need to remove or lower the permission level of the more general rule.
-
-### Data source permissions
-
-Permissions on dashboards and folders **do not** include permissions on data sources. A user with `Viewer` role
-can still issue any possible query to a data source, not just those queries that exist on dashboards he/she has access to.
-We hope to add permissions on data sources in a future release. Until then **do not** view dashboard permissions as a secure
-way to restrict user data access. Dashboard permissions only limits what dashboards & folders a user can view & edit not which
-data sources a user can access nor what queries a user can issue.
-

+ 1 - 1
docs/sources/administration/provisioning.md

@@ -156,7 +156,7 @@ Since not all datasources have the same configuration settings we only have the
 | tlsSkipVerify | boolean | *All* | Controls whether a client verifies the server's certificate chain and host name. |
 | graphiteVersion | string | Graphite |  Graphite version  |
 | timeInterval | string | Prometheus, Elasticsearch, InfluxDB, MySQL, PostgreSQL & MSSQL | Lowest interval/step value that should be used for this data source |
-| esVersion | number | Elasticsearch | Elasticsearch version as a number (2/5/56) |
+| esVersion | number | Elasticsearch | Elasticsearch version as a number (2/5/56/60) |
 | timeField | string | Elasticsearch | Which field that should be used as timestamp |
 | interval | string | Elasticsearch | Index date time format. nil(No Pattern), 'Hourly', 'Daily', 'Weekly', 'Monthly' or 'Yearly' |
 | authType | string | Cloudwatch | Auth provider. keys/credentials/arn |

+ 43 - 0
docs/sources/auth/enhanced_ldap.md

@@ -0,0 +1,43 @@
++++
+title = "Enhanced LDAP Integration"
+description = "Grafana Enhanced LDAP Integration Guide "
+keywords = ["grafana", "configuration", "documentation", "ldap", "active directory", "enterprise"]
+type = "docs"
+[menu.docs]
+name = "Enhanced LDAP"
+identifier = "enhanced-ldap"
+parent = "authentication"
+weight = 3
++++
+
+# Enhanced LDAP Integration
+
+> Enhanced LDAP Integration is only available in Grafana Enterprise. Read more about [Grafana Enterprise]({{< relref "enterprise/index.md" >}}).
+
+The enhanced LDAP integration adds additional functionality on top of the [existing LDAP integration]({{< relref "auth/ldap.md" >}}).
+
+## LDAP Group Synchronization for Teams
+
+{{< docs-imagebox img="/img/docs/enterprise/team_members_ldap.png" class="docs-image--no-shadow docs-image--right" max-width= "600px" >}}
+
+With the enhanced LDAP integration it's possible to setup synchronization between LDAP groups and teams. This enables LDAP users which are members
+of certain LDAP groups to automatically be added/removed as members to certain teams in Grafana. Currently the synchronization will only happen every
+time a user logs in, but an active background synchronization is currently being developed.
+
+Grafana keeps track of all synchronized users in teams and you can see which users have been synchronized from LDAP in the team members list, see `LDAP` label in screenshot.
+This mechanism allows Grafana to remove an existing synchronized user from a team when its LDAP group membership changes. This mechanism also enables you to manually add
+a user as member of a team and it will not be removed when the user signs in. This gives you flexibility to combine LDAP group memberships and Grafana team memberships.
+
+<div class="clearfix"></div>
+
+### Enable LDAP group synchronization for a team
+
+{{< docs-imagebox img="/img/docs/enterprise/team_add_external_group.png" class="docs-image--no-shadow docs-image--right" max-width= "600px" >}}
+
+1. Navigate to Configuration / Teams.
+2. Select a team.
+3. Select the External group sync tab and click on the `Add group` button.
+4. Insert LDAP distinguished name (DN) of LDAP group you want to synchronize with the team.
+5. Click on `Add group` button to save.
+
+<div class="clearfix"></div>

+ 67 - 0
docs/sources/enterprise/index.md

@@ -0,0 +1,67 @@
++++
+title = "Grafana Enterprise"
+description = "Grafana Enterprise overview"
+keywords = ["grafana", "documentation", "datasource", "permissions", "ldap", "licensing", "enterprise"]
+type = "docs"
+[menu.docs]
+name = "Grafana Enterprise"
+identifier = "enterprise"
+weight = 30
++++
+
+# Grafana Enterprise
+
+Grafana Enterprise is a commercial edition of Grafana that includes additional features not found in the open source
+version.
+
+Building on everything you already know and love about Grafana, Grafana Enterprise adds premium data sources,
+advanced authentication options, more permission controls, 24x7x365 support, and training from the core Grafana team.
+
+Grafana Enterprise includes all of the features found in the open source edition and more.
+
+___
+
+### Enhanced LDAP Integration
+
+With Grafana Enterprise you can set up synchronization between LDAP Groups and Teams. [Learn More]({{< relref "auth/enhanced_ldap.md" >}}).
+
+### Datasource Permissions
+
+Datasource permissions allow you to restrict query access to only specific Teams and Users. [Learn More]({{< relref "permissions/datasource_permissions.md" >}}).
+
+### Premium Plugins
+
+With a Grafana Enterprise licence you will get access to premium plugins, including:
+
+* [Splunk](https://grafana.com/plugins/grafana-splunk-datasource)
+* [AppDynamics](https://grafana.com/plugins/dlopes7-appdynamics-datasource)
+* [DataDog](https://grafana.com/plugins/grafana-datadog-datasource)
+* [Dynatrace](https://grafana.com/plugins/grafana-dynatrace-datasource)
+* [New Relic](https://grafana.com/plugins/grafana-newrelic-datasource)
+
+## Try Grafana Enterprise
+
+You can learn more about Grafana Enterprise [here](https://grafana.com/enterprise). To purchase or obtain a trial license contact
+the Grafana Labs [Sales Team](https://grafana.com/contact?about=support&topic=Grafana%20Enterprise).
+
+## License file management
+
+To download your Grafana Enterprise license log in to your [Grafana.com](https://grafana.com) account and go to your **Org
+Profile**. In the side menu there is a section for Grafana Enterprise licenses. At the bottom of the license
+details page there is **Download Token** link that will download the *license.jwt* file containing your license.
+
+Place the *license.jwt* file in Grafana's data folder. This is usually located at `/var/lib/grafana/data` on linux systems.
+
+You can also configure a custom location for the license file via the ini setting:
+
+```bash
+[enterprise]
+license_path = /company/secrets/license.jwt
+```
+
+This setting can also be set via ENV variable which is useful if you're running Grafana via docker and have a custom
+volume where you have placed the license file. In this case set the ENV variable `GF_ENTERPRISE_LICENSE_PATH` to point
+to the location of your license file.
+
+
+

+ 2 - 1
docs/sources/features/datasources/cloudwatch.md

@@ -60,7 +60,8 @@ Here is a minimal policy example:
             "Effect": "Allow",
             "Action": [
                 "cloudwatch:ListMetrics",
-                "cloudwatch:GetMetricStatistics"
+                "cloudwatch:GetMetricStatistics",
+                "cloudwatch:GetMetricData"
             ],
             "Resource": "*"
         },

+ 1 - 1
docs/sources/features/datasources/elasticsearch.md

@@ -59,7 +59,7 @@ a time pattern for the index name or a wildcard.
 ### Elasticsearch version
 
 Be sure to specify your Elasticsearch version in the version selection dropdown. This is very important as there are differences how queries are composed.
-Currently the versions available is 2.x, 5.x and 5.6+ where 5.6+ means a version of 5.6 or higher, 6.3.2 for example.
+Currently the versions available is 2.x, 5.x, 5.6+ or 6.0+. 5.6+ means a version of 5.6 or less than 6.0. 6.0+ means a version of 6.0 or higher, 6.3.2 for example.
 
 ### Min time interval
 A lower limit for the auto group by time interval. Recommended to be set to write frequency, for example `1m` if your data is written every minute.

+ 3 - 3
docs/sources/guides/whats-new-in-v5-3.md

@@ -18,7 +18,7 @@ Grafana v5.3 brings new features, many enhancements and bug fixes. This article
 - [TV mode]({{< relref "#tv-and-kiosk-mode" >}}) is improved and more accessible
 - [Alerting]({{< relref "#notification-reminders" >}}) with notification reminders
 - [Postgres]({{< relref "#postgres-query-builder" >}}) gets a new query builder!
-- [OAuth]({{< relref "#improved-oauth-support-for-gitlab" >}}) support for Gitlab is improved
+- [OAuth]({{< relref "#improved-oauth-support-for-gitlab" >}}) support for GitLab is improved
 - [Annotations]({{< relref "#annotations" >}}) with template variable filtering
 - [Variables]({{< relref "#variables" >}}) with free text support
 
@@ -69,9 +69,9 @@ Grafana 5.3 comes with a new graphical query builder for Postgres. This brings P
 
 {{< docs-imagebox img="/img/docs/v53/postgres_query_still.png" class="docs-image--no-shadow" animated-gif="/img/docs/v53/postgres_query.gif" >}}
 
-## Improved OAuth Support for Gitlab
+## Improved OAuth Support for GitLab
 
-Grafana 5.3 comes with a new OAuth integration for Gitlab that enables configuration to only allow users that are a member of certain Gitlab groups to authenticate. This makes it possible to use Gitlab OAuth with Grafana in a shared environment without giving everyone access to Grafana.
+Grafana 5.3 comes with a new OAuth integration for GitLab that enables configuration to only allow users that are a member of certain GitLab groups to authenticate. This makes it possible to use GitLab OAuth with Grafana in a shared environment without giving everyone access to Grafana.
 Learn how to enable and configure it in the [documentation](/auth/gitlab/).
 
 ## Annotations

+ 249 - 0
docs/sources/http_api/datasource_permissions.md

@@ -0,0 +1,249 @@
++++
+title = "Datasource Permissions HTTP API "
+description = "Grafana Datasource Permissions HTTP API"
+keywords = ["grafana", "http", "documentation", "api", "datasource", "permission", "permissions", "acl", "enterprise"]
+aliases = ["/http_api/datasourcepermissions/"]
+type = "docs"
+[menu.docs]
+name = "Datasource Permissions"
+parent = "http_api"
++++
+
+# Datasource Permissions API
+
+> Datasource Permissions is only available in Grafana Enterprise. Read more about [Grafana Enterprise]({{< relref "enterprise/index.md" >}}).
+
+This API can be used to enable, disable, list, add and remove permissions for a datasource.
+
+Permissions can be set for a user or a team. Permissions cannot be set for Admins - they always have access to everything.
+
+The permission levels for the permission field:
+
+- 1 = Query
+
+## Enable permissions for a datasource
+
+`POST /api/datasources/:id/enable-permissions`
+
+Enables permissions for the datasource with the given `id`. No one except Org Admins will be able to query the datasource until permissions have been added which permit certain users or teams to query the datasource.
+
+**Example request**:
+
+```http
+POST /api/datasources/1/enable-permissions
+Accept: application/json
+Content-Type: application/json
+Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk
+
+{}
+```
+
+**Example response**:
+
+```http
+HTTP/1.1 200 OK
+Content-Type: application/json; charset=UTF-8
+Content-Length: 35
+
+{"message":"Datasource permissions enabled"}
+```
+
+Status Codes:
+
+- **200** - Ok
+- **400** - Permissions cannot be enabled, see response body for details
+- **401** - Unauthorized
+- **403** - Access denied
+- **404** - Datasource not found
+
+## Disable permissions for a datasource
+
+`POST /api/datasources/:id/disable-permissions`
+
+Disables permissions for the datasource with the given `id`. All existing permissions will be removed and anyone will be able to query the datasource.
+
+**Example request**:
+
+```http
+POST /api/datasources/1/disable-permissions
+Accept: application/json
+Content-Type: application/json
+Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk
+
+{}
+```
+
+**Example response**:
+
+```http
+HTTP/1.1 200 OK
+Content-Type: application/json; charset=UTF-8
+Content-Length: 35
+
+{"message":"Datasource permissions disabled"}
+```
+
+Status Codes:
+
+- **200** - Ok
+- **400** - Permissions cannot be disabled, see response body for details
+- **401** - Unauthorized
+- **403** - Access denied
+- **404** - Datasource not found
+
+## Get permissions for a datasource
+
+`GET /api/datasources/:id/permissions`
+
+Gets all existing permissions for the datasource with the given `id`.
+
+**Example request**:
+
+```http
+GET /api/datasources/1/permissions HTTP/1.1
+Accept: application/json
+Content-Type: application/json
+Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk
+```
+
+**Example Response**
+
+```http
+HTTP/1.1 200 OK
+Content-Type: application/json; charset=UTF-8
+Content-Length: 551
+
+{
+  "datasourceId": 1,
+  "enabled": true,
+  "permissions":
+  [
+    {
+      "id": 1,
+      "datasourceId": 1,
+      "userId": 1,
+      "userLogin": "user",
+      "userEmail": "user@test.com",
+      "userAvatarUrl": "/avatar/46d229b033af06a191ff2267bca9ae56",
+      "permission": 1,
+      "permissionName": "Query",
+      "created": "2017-06-20T02:00:00+02:00",
+      "updated": "2017-06-20T02:00:00+02:00",
+    },
+    {
+      "id": 2,
+      "datasourceId": 1,
+      "teamId": 1,
+      "team": "A Team",
+      "teamAvatarUrl": "/avatar/46d229b033af06a191ff2267bca9ae56",
+      "permission": 1,
+      "permissionName": "Query",
+      "created": "2017-06-20T02:00:00+02:00",
+      "updated": "2017-06-20T02:00:00+02:00",
+    }
+  ]
+}
+```
+
+Status Codes:
+
+- **200** - Ok
+- **401** - Unauthorized
+- **403** - Access denied
+- **404** - Datasource not found
+
+## Add permission for a datasource
+
+`POST /api/datasources/:id/permissions`
+
+Adds a user permission for the datasource with the given `id`.
+
+**Example request**:
+
+```http
+POST /api/datasources/1/permissions
+Accept: application/json
+Content-Type: application/json
+Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk
+
+{
+  "userId": 1,
+  "permission": 1
+}
+```
+
+**Example response**:
+
+```http
+HTTP/1.1 200 OK
+Content-Type: application/json; charset=UTF-8
+Content-Length: 35
+
+{"message":"Datasource permission added"}
+```
+
+Adds a team permission for the datasource with the given `id`.
+
+**Example request**:
+
+```http
+POST /api/datasources/1/permissions
+Accept: application/json
+Content-Type: application/json
+Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk
+
+{
+  "teamId": 1,
+  "permission": 1
+}
+```
+
+**Example response**:
+
+```http
+HTTP/1.1 200 OK
+Content-Type: application/json; charset=UTF-8
+Content-Length: 35
+
+{"message":"Datasource permission added"}
+```
+
+Status Codes:
+
+- **200** - Ok
+- **400** - Permission cannot be added, see response body for details
+- **401** - Unauthorized
+- **403** - Access denied
+- **404** - Datasource not found
+
+## Remove permission for a datasource
+
+`DELETE /api/datasources/:id/permissions/:permissionId`
+
+Removes the permission with the given `permissionId` for the datasource with the given `id`.
+
+**Example request**:
+
+```http
+DELETE /api/datasources/1/permissions/2
+Accept: application/json
+Content-Type: application/json
+Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk
+```
+
+**Example response**:
+
+```http
+HTTP/1.1 200 OK
+Content-Type: application/json; charset=UTF-8
+Content-Length: 35
+
+{"message":"Datasource permission removed"}
+```
+
+Status Codes:
+
+- **200** - Ok
+- **401** - Unauthorized
+- **403** - Access denied
+- **404** - Datasource not found or permission not found

+ 111 - 0
docs/sources/http_api/external_group_sync.md

@@ -0,0 +1,111 @@
++++
+title = "External Group Sync HTTP API "
+description = "Grafana External Group Sync HTTP API"
+keywords = ["grafana", "http", "documentation", "api", "team", "teams", "group", "member", "enterprise"]
+aliases = ["/http_api/external_group_sync/"]
+type = "docs"
+[menu.docs]
+name = "External Group Sync"
+parent = "http_api"
++++
+
+# External Group Synchronization API
+
+> External Group Synchronization is only available in Grafana Enterprise. Read more about [Grafana Enterprise]({{< relref "enterprise/index.md" >}}).
+
+## Get External Groups
+
+`GET /api/teams/:teamId/groups`
+
+**Example Request**:
+
+```http
+GET /api/teams/1/groups HTTP/1.1
+Accept: application/json
+Content-Type: application/json
+Authorization: Basic YWRtaW46YWRtaW4=
+```
+
+**Example Response**:
+
+```http
+HTTP/1.1 200
+Content-Type: application/json
+
+[
+  {
+    "orgId": 1,
+    "teamId": 1,
+    "groupId": "cn=editors,ou=groups,dc=grafana,dc=org"
+  }
+]
+```
+
+Status Codes:
+
+- **200** - Ok
+- **401** - Unauthorized
+- **403** - Permission denied
+
+## Add External Group
+
+`POST /api/teams/:teamId/groups`
+
+**Example Request**:
+
+```http
+POST /api/teams/1/members HTTP/1.1
+Accept: application/json
+Content-Type: application/json
+Authorization: Basic YWRtaW46YWRtaW4=
+
+{
+  "groupId": "cn=editors,ou=groups,dc=grafana,dc=org"
+}
+```
+
+**Example Response**:
+
+```http
+HTTP/1.1 200
+Content-Type: application/json
+
+{"message":"Group added to Team"}
+```
+
+Status Codes:
+
+- **200** - Ok
+- **400** - Group is already added to this team
+- **401** - Unauthorized
+- **403** - Permission denied
+- **404** - Team not found
+
+## Remove External Group
+
+`DELETE /api/teams/:teamId/groups/:groupId`
+
+**Example Request**:
+
+```http
+DELETE /api/teams/1/groups/cn=editors,ou=groups,dc=grafana,dc=org HTTP/1.1
+Accept: application/json
+Content-Type: application/json
+Authorization: Basic YWRtaW46YWRtaW4=
+```
+
+**Example Response**:
+
+```http
+HTTP/1.1 200
+Content-Type: application/json
+
+{"message":"Team Group removed"}
+```
+
+Status Codes:
+
+- **200** - Ok
+- **401** - Unauthorized
+- **403** - Permission denied
+- **404** - Team not found/Group not found

+ 73 - 0
docs/sources/permissions/dashboard_folder_permissions.md

@@ -0,0 +1,73 @@
++++
+title = "Dashboard & Folder Permissions"
+description = "Grafana Dashboard & Folder Permissions Guide "
+keywords = ["grafana", "configuration", "documentation", "dashboard", "folder", "permissions", "teams"]
+type = "docs"
+[menu.docs]
+name = "Dashboard & Folder"
+identifier = "dashboard-folder-permissions"
+parent = "permissions"
+weight = 3
++++
+
+# Dashboard & Folder Permissions
+
+{{< docs-imagebox img="/img/docs/v50/folder_permissions.png" max-width="500px" class="docs-image--right" >}}
+
+For dashboards and dashboard folders there is a **Permissions** page that make it possible to
+remove the default role based permissions for Editors and Viewers. On this page you can add and assign permissions to specific **Users** and **Teams**.
+
+You can assign & remove permissions for **Organization Roles**, **Users** and **Teams**.
+
+Permission levels:
+
+- **Admin**: Can edit & create dashboards and edit permissions.
+- **Edit**: Can edit & create dashboards. **Cannot** edit folder/dashboard permissions.
+- **View**: Can only view existing dashboards/folders.
+
+## Restricting Access
+
+The highest permission always wins so if you for example want to hide a folder or dashboard from others you need to remove the **Organization Role** based permission from the Access Control List (ACL).
+
+- You cannot override permissions for users with the **Org Admin Role**. Admins always have access to everything.
+- A more specific permission with a lower permission level will not have any effect if a more general rule exists with higher permission level. You need to remove or lower the permission level of the more general rule.
+
+### How Grafana Resolves Multiple Permissions - Examples
+
+#### Example 1 (`user1` has the Editor Role)
+
+Permissions for a dashboard:
+
+- `Everyone with Editor Role Can Edit`
+- `user1 Can View`
+
+Result: `user1` has Edit permission as the highest permission always wins.
+
+#### Example 2 (`user1` has the Viewer Role and is a member of `team1`)
+
+Permissions for a dashboard:
+
+- `Everyone with Viewer Role Can View`
+- `user1 Can Edit`
+- `team1 Can Admin`
+
+Result: `user1` has Admin permission as the highest permission always wins.
+
+#### Example 3
+
+Permissions for a dashboard:
+
+- `user1 Can Admin (inherited from parent folder)`
+- `user1 Can Edit`
+
+Result: You cannot override to a lower permission. `user1` has Admin permission as the highest permission always wins.
+
+## Summary
+
+- **View**: Can only view existing dashboards/folders.
+- You cannot override permissions for users with **Org Admin Role**
+- A more specific permission with lower permission level will not have any effect if a more general rule exists with higher permission level.
+
+For example if "Everyone with Editor Role Can Edit" exists in the ACL list then **John Doe** will still have Edit permission even after you have specifically added a permission for this user with the permission set to **View**. You need to remove or lower the permission level of the more general rule.
+- You cannot override permissions for users with **Org Admin Role**
+- A more specific permission with lower permission level will not have any effect if a more general rule exists with higher permission level. For example if "Everyone with Editor Role Can Edit" exists in the ACL list then **John Doe** will still have Edit permission even after you have specifically added a permission for this user with the permission set to **View**. You need to remove or lower the permission level of the more general rule.

+ 71 - 0
docs/sources/permissions/datasource_permissions.md

@@ -0,0 +1,71 @@
++++
+title = "Datasource Permissions"
+description = "Grafana Datasource Permissions Guide "
+keywords = ["grafana", "configuration", "documentation", "datasource", "permissions", "users", "teams", "enterprise"]
+type = "docs"
+[menu.docs]
+name = "Datasource"
+identifier = "datasource-permissions"
+parent = "permissions"
+weight = 4
++++
+
+# Datasource Permissions
+
+> Datasource Permissions is only available in Grafana Enterprise. Read more about [Grafana Enterprise]({{< relref "enterprise/index.md" >}}).
+
+Datasource permissions allows you to restrict access for users to query a datasource. For each datasource there is
+a permission page that makes it possible to enable permissions and restrict query permissions to specific
+**Users** and **Teams**.
+
+## Restricting Access - Enable Permissions
+
+{{< docs-imagebox img="/img/docs/enterprise/datasource_permissions_enable_still.png" class="docs-image--no-shadow docs-image--right" max-width= "600px" animated-gif="/img/docs/enterprise/datasource_permissions_enable.gif" >}}
+
+By default, permissions are disabled for datasources and a datasource in an organization can be queried by any user in
+that organization. For example a user with `Viewer` role can still issue any possible query to a datasource, not just
+those queries that exist on dashboards he/she has access to.
+
+When permissions are enabled for a datasource in an organization you will restrict admin and query access for that
+datasource to [admin users](/permissions/organization_roles/#admin-role) in that organization.
+
+**To enable permissions for a datasource:**
+
+1. Navigate to Configuration / Data Sources.
+2. Select the datasource you want to enable permissions for.
+3. Select the Permissions tab and click on the `Enable` button.
+
+<div class="clearfix"></div>
+
+## Allow users and teams to query a datasource
+
+{{< docs-imagebox img="/img/docs/enterprise/datasource_permissions_add_still.png" class="docs-image--no-shadow docs-image--right" max-width= "600px" animated-gif="/img/docs/enterprise/datasource_permissions_add.gif" >}}
+
+After you have [enabled permissions](#restricting-access-enable-permissions) for a datasource you can assign query
+permissions to users and teams which will allow access to query the datasource.
+
+**Assign query permission to users and teams:**
+
+1. Navigate to Configuration / Data Sources.
+2. Select the datasource you want to assign query permissions for.
+3. Select the Permissions tab.
+4. click on the `Add Permission` button.
+5. Select Team/User and find the team/user you want to allow query access and click on the `Save` button.
+
+<div class="clearfix"></div>
+
+## Restore Default Access - Disable Permissions
+
+{{< docs-imagebox img="/img/docs/enterprise/datasource_permissions_disable_still.png" class="docs-image--no-shadow docs-image--right" max-width= "600px" animated-gif="/img/docs/enterprise/datasource_permissions_disable.gif" >}}
+
+If you have enabled permissions for a datasource and want to return datasource permissions to the default, i.e.
+datasource can be queried by any user in that organization, you can disable permissions with a click of a button.
+Note that all existing permissions created for datasource will be deleted.
+
+**To disable permissions for a datasource:**
+
+1. Navigate to Configuration / Data Sources.
+2. Select the datasource you want to disable permissions for.
+3. Select the Permissions tab and click on the `Disable Permissions` button.
+
+<div class="clearfix"></div>

+ 12 - 0
docs/sources/permissions/index.md

@@ -0,0 +1,12 @@
++++
+title = "Permissions"
+description = "Permissions"
+type = "docs"
+[menu.docs]
+name = "Permissions"
+identifier = "permissions"
+parent = "admin"
+weight = 3
++++
+
+

+ 38 - 0
docs/sources/permissions/organization_roles.md

@@ -0,0 +1,38 @@
++++
+title = "Organization Roles"
+description = "Grafana Organization Roles Guide "
+keywords = ["grafana", "configuration", "documentation", "organization", "roles", "permissions"]
+type = "docs"
+[menu.docs]
+name = "Organization Roles"
+identifier = "organization-roles"
+parent = "permissions"
+weight = 2
++++
+
+# Organization Roles
+
+Users can be belong to one or more organizations. A user's organization membership is tied to a role that defines what the user is allowed to do
+in that organization.
+
+## Admin Role
+
+Can do everything scoped to the organization. For example:
+
+- Add & Edit data sources.
+- Add & Edit organization users & teams.
+- Configure App plugins & set org settings.
+
+## Editor Role
+
+- Can create and modify dashboards & alert rules. This can be disabled on specific folders and dashboards.
+- **Cannot** create or edit data sources nor invite new users.
+
+## Viewer Role
+
+- View any dashboard. This can be disabled on specific folders and dashboards.
+- **Cannot** create or edit dashboards nor data sources.
+
+This role can be tweaked via Grafana server setting [viewers_can_edit]({{< relref "installation/configuration.md#viewers-can-edit" >}}). If you set this to true users
+with **Viewer** can also make transient dashboard edits, meaning they can modify panels & queries but not save the changes (nor create new dashboards).
+Useful for public Grafana installations where you want anonymous users to be able to edit panels & queries but not save or create new dashboards.

+ 42 - 0
docs/sources/permissions/overview.md

@@ -0,0 +1,42 @@
++++
+title = "Overview"
+description = "Overview for permissions"
+keywords = ["grafana", "configuration", "documentation", "admin", "users", "datasources", "permissions"]
+type = "docs"
+aliases = ["/reference/admin", "/administration/permissions/"]
+[menu.docs]
+name = "Overview"
+identifier = "overview-permissions"
+parent = "permissions"
+weight = 1
++++
+
+# Permissions Overview
+
+Grafana users have permissions that are determined by their:
+
+- **Organization Role** (Admin, Editor, Viewer)
+- Via **Team** memberships where the **Team** has been assigned specific permissions.
+- Via permissions assigned directly to user (on folders, dashboards, datasources)
+- The Grafana Admin (i.e. Super Admin) user flag.
+
+## Grafana Admin
+
+This admin flag makes a user a `Super Admin`. This means they can access the `Server Admin` views where all users and organizations can be administrated.
+
+## Organization Roles
+
+Users can be belong to one or more organizations. A user's organization membership is tied to a role that defines what the user is allowed to do
+in that organization. Learn more about [Organization Roles]({{< relref "permissions/organization_roles.md" >}}).
+
+
+## Dashboard & Folder Permissions
+
+Dashboard and folder permissions allows you to remove the default role based permissions for Editors and Viewers and assign permissions to specific **Users** and **Teams**. Learn more about [Dashboard & Folder Permissions]({{< relref "permissions/dashboard_folder_permissions.md" >}}).
+
+## Datasource Permissions
+
+Per default, a datasource in an organization can be queried by any user in that organization. For example a user with `Viewer` role can still
+issue any possible query to a data source, not just those queries that exist on dashboards he/she has access to.
+
+Datasource permissions allows you to change the default permissions for datasources and restrict query permissions to specific **Users** and **Teams**. Read more about [Datasource Permissions]({{< relref "permissions/datasource_permissions.md" >}}).

+ 1 - 1
docs/sources/reference/scripting.md

@@ -12,7 +12,7 @@ weight = 9
 
 If you have lots of metric names that change (new servers etc) in a defined pattern it is irritating to constantly have to create new dashboards.
 
-With scripted dashboards you can dynamically create your dashboards using javascript. In the folder grafana install folder
+With scripted dashboards you can dynamically create your dashboards using javascript. In the grafana install folder
 under `public/dashboards/` there is a file named `scripted.js`. This file contains an example of a scripted dashboard. You can access it by using the url:
 `http://grafana_url/dashboard/script/scripted.js?rows=3&name=myName`
 

+ 1 - 1
docs/sources/whatsnew/index.md

@@ -3,7 +3,7 @@ title = "What's New in Grafana"
 [menu.docs]
 name = "What's New In Grafana"
 identifier = "whatsnew"
-weight = 3
+weight = 5
 +++
 
 

+ 4 - 0
pkg/api/alerting.go

@@ -134,12 +134,16 @@ func AlertTest(c *m.ReqContext, dto dtos.AlertTestCommand) Response {
 		OrgId:     c.OrgId,
 		Dashboard: dto.Dashboard,
 		PanelId:   dto.PanelId,
+		User:      c.SignedInUser,
 	}
 
 	if err := bus.Dispatch(&backendCmd); err != nil {
 		if validationErr, ok := err.(alerting.ValidationError); ok {
 			return Error(422, validationErr.Error(), nil)
 		}
+		if err == m.ErrDataSourceAccessDenied {
+			return Error(403, "Access denied to datasource", err)
+		}
 		return Error(500, "Failed to test rule", err)
 	}
 

+ 5 - 45
pkg/api/dataproxy.go

@@ -1,62 +1,22 @@
 package api
 
 import (
-	"fmt"
-	"github.com/pkg/errors"
-	"time"
-
 	"github.com/grafana/grafana/pkg/api/pluginproxy"
-	"github.com/grafana/grafana/pkg/bus"
 	"github.com/grafana/grafana/pkg/metrics"
 	m "github.com/grafana/grafana/pkg/models"
 	"github.com/grafana/grafana/pkg/plugins"
 )
 
-const HeaderNameNoBackendCache = "X-Grafana-NoCache"
-
-func (hs *HTTPServer) getDatasourceFromCache(id int64, c *m.ReqContext) (*m.DataSource, error) {
-	userPermissionsQuery := m.GetDataSourcePermissionsForUserQuery{
-		User: c.SignedInUser,
-	}
-	if err := bus.Dispatch(&userPermissionsQuery); err != nil {
-		if err != bus.ErrHandlerNotFound {
-			return nil, err
-		}
-	} else {
-		permissionType, exists := userPermissionsQuery.Result[id]
-		if exists && permissionType != m.DsPermissionQuery {
-			return nil, errors.New("User not allowed to access datasource")
-		}
-	}
-
-	nocache := c.Req.Header.Get(HeaderNameNoBackendCache) == "true"
-	cacheKey := fmt.Sprintf("ds-%d", id)
-
-	if !nocache {
-		if cached, found := hs.cache.Get(cacheKey); found {
-			ds := cached.(*m.DataSource)
-			if ds.OrgId == c.OrgId {
-				return ds, nil
-			}
-		}
-	}
-
-	query := m.GetDataSourceByIdQuery{Id: id, OrgId: c.OrgId}
-	if err := bus.Dispatch(&query); err != nil {
-		return nil, err
-	}
-
-	hs.cache.Set(cacheKey, query.Result, time.Second*5)
-	return query.Result, nil
-}
-
 func (hs *HTTPServer) ProxyDataSourceRequest(c *m.ReqContext) {
 	c.TimeRequest(metrics.M_DataSource_ProxyReq_Timer)
 
 	dsId := c.ParamsInt64(":id")
-	ds, err := hs.getDatasourceFromCache(dsId, c)
-
+	ds, err := hs.DatasourceCache.GetDatasource(dsId, c.SignedInUser, c.SkipCache)
 	if err != nil {
+		if err == m.ErrDataSourceAccessDenied {
+			c.JsonApiErr(403, "Access denied to datasource", err)
+			return
+		}
 		c.JsonApiErr(500, "Unable to load datasource meta data", err)
 		return
 	}

+ 10 - 8
pkg/api/http_server.go

@@ -16,7 +16,6 @@ import (
 
 	"github.com/prometheus/client_golang/prometheus/promhttp"
 
-	gocache "github.com/patrickmn/go-cache"
 	macaron "gopkg.in/macaron.v1"
 
 	"github.com/grafana/grafana/pkg/api/live"
@@ -28,6 +27,8 @@ import (
 	"github.com/grafana/grafana/pkg/models"
 	"github.com/grafana/grafana/pkg/plugins"
 	"github.com/grafana/grafana/pkg/registry"
+	"github.com/grafana/grafana/pkg/services/cache"
+	"github.com/grafana/grafana/pkg/services/datasources"
 	"github.com/grafana/grafana/pkg/services/hooks"
 	"github.com/grafana/grafana/pkg/services/rendering"
 	"github.com/grafana/grafana/pkg/setting"
@@ -46,19 +47,19 @@ type HTTPServer struct {
 	macaron       *macaron.Macaron
 	context       context.Context
 	streamManager *live.StreamManager
-	cache         *gocache.Cache
 	httpSrv       *http.Server
 
-	RouteRegister routing.RouteRegister `inject:""`
-	Bus           bus.Bus               `inject:""`
-	RenderService rendering.Service     `inject:""`
-	Cfg           *setting.Cfg          `inject:""`
-	HooksService  *hooks.HooksService   `inject:""`
+	RouteRegister   routing.RouteRegister    `inject:""`
+	Bus             bus.Bus                  `inject:""`
+	RenderService   rendering.Service        `inject:""`
+	Cfg             *setting.Cfg             `inject:""`
+	HooksService    *hooks.HooksService      `inject:""`
+	CacheService    *cache.CacheService      `inject:""`
+	DatasourceCache datasources.CacheService `inject:""`
 }
 
 func (hs *HTTPServer) Init() error {
 	hs.log = log.New("http.server")
-	hs.cache = gocache.New(5*time.Minute, 10*time.Minute)
 
 	hs.streamManager = live.NewStreamManager()
 	hs.macaron = hs.newMacaron()
@@ -231,6 +232,7 @@ func (hs *HTTPServer) addMiddlewaresAndStaticRoutes() {
 		m.Use(middleware.ValidateHostHeader(setting.Domain))
 	}
 
+	m.Use(middleware.HandleNoCacheHeader())
 	m.Use(middleware.AddDefaultResponseHeaders())
 }
 

+ 4 - 1
pkg/api/metrics.go

@@ -25,8 +25,11 @@ func (hs *HTTPServer) QueryMetrics(c *m.ReqContext, reqDto dtos.MetricRequest) R
 		return Error(400, "Query missing datasourceId", nil)
 	}
 
-	ds, err := hs.getDatasourceFromCache(datasourceId, c)
+	ds, err := hs.DatasourceCache.GetDatasource(datasourceId, c.SignedInUser, c.SkipCache)
 	if err != nil {
+		if err == m.ErrDataSourceAccessDenied {
+			return Error(403, "Access denied to datasource", err)
+		}
 		return Error(500, "Unable to load datasource meta data", err)
 	}
 

+ 13 - 8
pkg/cmd/grafana-server/server.go

@@ -15,13 +15,21 @@ import (
 	"github.com/grafana/grafana/pkg/api"
 	"github.com/grafana/grafana/pkg/api/routing"
 	"github.com/grafana/grafana/pkg/bus"
-	_ "github.com/grafana/grafana/pkg/extensions"
-	"github.com/grafana/grafana/pkg/log"
 	"github.com/grafana/grafana/pkg/login"
-	_ "github.com/grafana/grafana/pkg/metrics"
 	"github.com/grafana/grafana/pkg/middleware"
-	_ "github.com/grafana/grafana/pkg/plugins"
 	"github.com/grafana/grafana/pkg/registry"
+	"github.com/grafana/grafana/pkg/social"
+
+	"golang.org/x/sync/errgroup"
+
+	"github.com/grafana/grafana/pkg/log"
+	"github.com/grafana/grafana/pkg/services/cache"
+	"github.com/grafana/grafana/pkg/setting"
+
+	// self registering services
+	_ "github.com/grafana/grafana/pkg/extensions"
+	_ "github.com/grafana/grafana/pkg/metrics"
+	_ "github.com/grafana/grafana/pkg/plugins"
 	_ "github.com/grafana/grafana/pkg/services/alerting"
 	_ "github.com/grafana/grafana/pkg/services/cleanup"
 	_ "github.com/grafana/grafana/pkg/services/notifications"
@@ -29,10 +37,7 @@ import (
 	_ "github.com/grafana/grafana/pkg/services/rendering"
 	_ "github.com/grafana/grafana/pkg/services/search"
 	_ "github.com/grafana/grafana/pkg/services/sqlstore"
-	"github.com/grafana/grafana/pkg/setting"
-	"github.com/grafana/grafana/pkg/social" // self registering services
 	_ "github.com/grafana/grafana/pkg/tracing"
-	"golang.org/x/sync/errgroup"
 )
 
 func NewGrafanaServer() *GrafanaServerImpl {
@@ -72,6 +77,7 @@ func (g *GrafanaServerImpl) Run() error {
 	serviceGraph.Provide(&inject.Object{Value: bus.GetBus()})
 	serviceGraph.Provide(&inject.Object{Value: g.cfg})
 	serviceGraph.Provide(&inject.Object{Value: routing.NewRouteRegister(middleware.RequestMetrics, middleware.RequestTracing)})
+	serviceGraph.Provide(&inject.Object{Value: cache.New(5*time.Minute, 10*time.Minute)})
 
 	// self registered services
 	services := registry.GetServices()
@@ -138,7 +144,6 @@ func (g *GrafanaServerImpl) Run() error {
 	}
 
 	sendSystemdNotification("READY=1")
-
 	return g.childRoutines.Wait()
 }
 

+ 1 - 0
pkg/login/auth.go

@@ -2,6 +2,7 @@ package login
 
 import (
 	"errors"
+
 	"github.com/grafana/grafana/pkg/bus"
 	m "github.com/grafana/grafana/pkg/models"
 )

+ 14 - 0
pkg/middleware/headers.go

@@ -0,0 +1,14 @@
+package middleware
+
+import (
+	m "github.com/grafana/grafana/pkg/models"
+	macaron "gopkg.in/macaron.v1"
+)
+
+const HeaderNameNoBackendCache = "X-Grafana-NoCache"
+
+func HandleNoCacheHeader() macaron.Handler {
+	return func(ctx *m.ReqContext) {
+		ctx.SkipCache = ctx.Req.Header.Get(HeaderNameNoBackendCache) == "true"
+	}
+}

+ 1 - 0
pkg/middleware/middleware.go

@@ -29,6 +29,7 @@ func GetContextHandler() macaron.Handler {
 			Session:        session.GetSession(),
 			IsSignedIn:     false,
 			AllowAnonymous: false,
+			SkipCache:      false,
 			Logger:         log.New("context"),
 		}
 

+ 2 - 1
pkg/models/alert.go

@@ -215,13 +215,14 @@ type AlertStateInfoDTO struct {
 // "Internal" commands
 
 type UpdateDashboardAlertsCommand struct {
-	UserId    int64
 	OrgId     int64
 	Dashboard *Dashboard
+	User      *SignedInUser
 }
 
 type ValidateDashboardAlertsCommand struct {
 	UserId    int64
 	OrgId     int64
 	Dashboard *Dashboard
+	User      *SignedInUser
 }

+ 1 - 0
pkg/models/context.go

@@ -20,6 +20,7 @@ type ReqContext struct {
 	IsSignedIn     bool
 	IsRenderCall   bool
 	AllowAnonymous bool
+	SkipCache      bool
 	Logger         log.Logger
 }
 

+ 0 - 5
pkg/models/datasource.go

@@ -207,11 +207,6 @@ func (p DsPermissionType) String() string {
 	return names[int(p)]
 }
 
-type GetDataSourcePermissionsForUserQuery struct {
-	User   *SignedInUser
-	Result map[int64]DsPermissionType
-}
-
 type DatasourcesPermissionFilterQuery struct {
 	User        *SignedInUser
 	Datasources []*DataSource

+ 1 - 0
pkg/models/user.go

@@ -165,6 +165,7 @@ type SignedInUser struct {
 	IsAnonymous    bool
 	HelpFlags1     HelpFlags1
 	LastSeenAt     time.Time
+	Teams          []int64
 }
 
 func (u *SignedInUser) ShouldUpdateLastSeenAt() bool {

+ 34 - 3
pkg/registry/registry.go

@@ -29,11 +29,42 @@ func Register(descriptor *Descriptor) {
 }
 
 func GetServices() []*Descriptor {
-	sort.Slice(services, func(i, j int) bool {
-		return services[i].InitPriority > services[j].InitPriority
+	slice := getServicesWithOverrides()
+
+	sort.Slice(slice, func(i, j int) bool {
+		return slice[i].InitPriority > slice[j].InitPriority
 	})
 
-	return services
+	return slice
+}
+
+type OverrideServiceFunc func(descriptor Descriptor) (*Descriptor, bool)
+
+var overrides []OverrideServiceFunc
+
+func RegisterOverride(fn OverrideServiceFunc) {
+	overrides = append(overrides, fn)
+}
+
+func getServicesWithOverrides() []*Descriptor {
+	slice := []*Descriptor{}
+	for _, s := range services {
+		var descriptor *Descriptor
+		for _, fn := range overrides {
+			if newDescriptor, override := fn(*s); override {
+				descriptor = newDescriptor
+				break
+			}
+		}
+
+		if descriptor != nil {
+			slice = append(slice, descriptor)
+		} else {
+			slice = append(slice, s)
+		}
+	}
+
+	return slice
 }
 
 // Service interface is the lowest common shape that services

+ 3 - 3
pkg/services/alerting/commands.go

@@ -11,7 +11,7 @@ func init() {
 }
 
 func validateDashboardAlerts(cmd *m.ValidateDashboardAlertsCommand) error {
-	extractor := NewDashAlertExtractor(cmd.Dashboard, cmd.OrgId)
+	extractor := NewDashAlertExtractor(cmd.Dashboard, cmd.OrgId, cmd.User)
 
 	return extractor.ValidateAlerts()
 }
@@ -19,11 +19,11 @@ func validateDashboardAlerts(cmd *m.ValidateDashboardAlertsCommand) error {
 func updateDashboardAlerts(cmd *m.UpdateDashboardAlertsCommand) error {
 	saveAlerts := m.SaveAlertsCommand{
 		OrgId:       cmd.OrgId,
-		UserId:      cmd.UserId,
+		UserId:      cmd.User.UserId,
 		DashboardId: cmd.Dashboard.Id,
 	}
 
-	extractor := NewDashAlertExtractor(cmd.Dashboard, cmd.OrgId)
+	extractor := NewDashAlertExtractor(cmd.Dashboard, cmd.OrgId, cmd.User)
 
 	alerts, err := extractor.GetAlerts()
 	if err != nil {

+ 18 - 0
pkg/services/alerting/conditions/reducer_test.go

@@ -52,6 +52,24 @@ func TestSimpleReducer(t *testing.T) {
 			So(result, ShouldEqual, float64(1))
 		})
 
+		Convey("median should ignore null values", func() {
+			reducer := NewSimpleReducer("median")
+			series := &tsdb.TimeSeries{
+				Name: "test time serie",
+			}
+
+			series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFromPtr(nil), 1))
+			series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFromPtr(nil), 2))
+			series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFromPtr(nil), 3))
+			series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFrom(float64(1)), 4))
+			series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFrom(float64(2)), 5))
+			series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFrom(float64(3)), 6))
+
+			result := reducer.Reduce(series)
+			So(result.Valid, ShouldEqual, true)
+			So(result.Float64, ShouldEqual, float64(2))
+		})
+
 		Convey("avg", func() {
 			result := testReducer("avg", 1, 2, 3)
 			So(result, ShouldEqual, float64(2))

+ 18 - 1
pkg/services/alerting/extractor.go

@@ -13,14 +13,16 @@ import (
 
 // DashAlertExtractor extracts alerts from the dashboard json
 type DashAlertExtractor struct {
+	User  *m.SignedInUser
 	Dash  *m.Dashboard
 	OrgID int64
 	log   log.Logger
 }
 
 // NewDashAlertExtractor returns a new DashAlertExtractor
-func NewDashAlertExtractor(dash *m.Dashboard, orgID int64) *DashAlertExtractor {
+func NewDashAlertExtractor(dash *m.Dashboard, orgID int64, user *m.SignedInUser) *DashAlertExtractor {
 	return &DashAlertExtractor{
+		User:  user,
 		Dash:  dash,
 		OrgID: orgID,
 		log:   log.New("alerting.extractor"),
@@ -149,6 +151,21 @@ func (e *DashAlertExtractor) getAlertFromPanels(jsonWithPanels *simplejson.Json,
 				return nil, ValidationError{Reason: fmt.Sprintf("Data source used by alert rule not found, alertName=%v, datasource=%s", alert.Name, dsName)}
 			}
 
+			dsFilterQuery := m.DatasourcesPermissionFilterQuery{
+				User:        e.User,
+				Datasources: []*m.DataSource{datasource},
+			}
+
+			if err := bus.Dispatch(&dsFilterQuery); err != nil {
+				if err != bus.ErrHandlerNotFound {
+					return nil, err
+				}
+			} else {
+				if len(dsFilterQuery.Result) == 0 {
+					return nil, m.ErrDataSourceAccessDenied
+				}
+			}
+
 			jsonQuery.SetPath([]string{"datasourceId"}, datasource.Id)
 
 			if interval, err := panel.Get("interval").String(); err == nil {

+ 8 - 8
pkg/services/alerting/extractor_test.go

@@ -69,7 +69,7 @@ func TestAlertRuleExtraction(t *testing.T) {
 				So(getTarget(dashJson), ShouldEqual, "")
 			})
 
-			extractor := NewDashAlertExtractor(dash, 1)
+			extractor := NewDashAlertExtractor(dash, 1, nil)
 			_, _ = extractor.GetAlerts()
 
 			Convey("Dashboard json should not be updated after extracting rules", func() {
@@ -83,7 +83,7 @@ func TestAlertRuleExtraction(t *testing.T) {
 			So(err, ShouldBeNil)
 
 			dash := m.NewDashboardFromJson(dashJson)
-			extractor := NewDashAlertExtractor(dash, 1)
+			extractor := NewDashAlertExtractor(dash, 1, nil)
 
 			alerts, err := extractor.GetAlerts()
 
@@ -146,7 +146,7 @@ func TestAlertRuleExtraction(t *testing.T) {
 			dashJson, err := simplejson.NewJson(panelWithoutId)
 			So(err, ShouldBeNil)
 			dash := m.NewDashboardFromJson(dashJson)
-			extractor := NewDashAlertExtractor(dash, 1)
+			extractor := NewDashAlertExtractor(dash, 1, nil)
 
 			_, err = extractor.GetAlerts()
 
@@ -162,7 +162,7 @@ func TestAlertRuleExtraction(t *testing.T) {
 			dashJson, err := simplejson.NewJson(panelWithIdZero)
 			So(err, ShouldBeNil)
 			dash := m.NewDashboardFromJson(dashJson)
-			extractor := NewDashAlertExtractor(dash, 1)
+			extractor := NewDashAlertExtractor(dash, 1, nil)
 
 			_, err = extractor.GetAlerts()
 
@@ -178,7 +178,7 @@ func TestAlertRuleExtraction(t *testing.T) {
 			dashJson, err := simplejson.NewJson(json)
 			So(err, ShouldBeNil)
 			dash := m.NewDashboardFromJson(dashJson)
-			extractor := NewDashAlertExtractor(dash, 1)
+			extractor := NewDashAlertExtractor(dash, 1, nil)
 
 			alerts, err := extractor.GetAlerts()
 
@@ -198,7 +198,7 @@ func TestAlertRuleExtraction(t *testing.T) {
 			dashJson, err := simplejson.NewJson(json)
 			So(err, ShouldBeNil)
 			dash := m.NewDashboardFromJson(dashJson)
-			extractor := NewDashAlertExtractor(dash, 1)
+			extractor := NewDashAlertExtractor(dash, 1, nil)
 
 			alerts, err := extractor.GetAlerts()
 
@@ -228,7 +228,7 @@ func TestAlertRuleExtraction(t *testing.T) {
 			So(err, ShouldBeNil)
 
 			dash := m.NewDashboardFromJson(dashJson)
-			extractor := NewDashAlertExtractor(dash, 1)
+			extractor := NewDashAlertExtractor(dash, 1, nil)
 
 			alerts, err := extractor.GetAlerts()
 
@@ -248,7 +248,7 @@ func TestAlertRuleExtraction(t *testing.T) {
 			dashJSON, err := simplejson.NewJson(json)
 			So(err, ShouldBeNil)
 			dash := m.NewDashboardFromJson(dashJSON)
-			extractor := NewDashAlertExtractor(dash, 1)
+			extractor := NewDashAlertExtractor(dash, 1, nil)
 
 			err = extractor.ValidateAlerts()
 

+ 51 - 0
pkg/services/alerting/notifiers/slack.go

@@ -39,6 +39,39 @@ func init() {
           Override default channel or user, use #channel-name or @username
         </info-popover>
       </div>
+      <div class="gf-form max-width-30">
+        <span class="gf-form-label width-6">Username</span>
+        <input type="text"
+          class="gf-form-input max-width-30"
+          ng-model="ctrl.model.settings.username"
+          data-placement="right">
+        </input>
+        <info-popover mode="right-absolute">
+          Set the username for the bot's message
+        </info-popover>
+      </div>
+      <div class="gf-form max-width-30">
+        <span class="gf-form-label width-6">Icon emoji</span>
+        <input type="text"
+          class="gf-form-input max-width-30"
+          ng-model="ctrl.model.settings.icon_emoji"
+          data-placement="right">
+        </input>
+        <info-popover mode="right-absolute">
+          Provide an emoji to use as the icon for the bot's message. Overrides the icon URL
+        </info-popover>
+      </div>
+      <div class="gf-form max-width-30">
+        <span class="gf-form-label width-6">Icon URL</span>
+        <input type="text"
+          class="gf-form-input max-width-30"
+          ng-model="ctrl.model.settings.icon_url"
+          data-placement="right">
+        </input>
+        <info-popover mode="right-absolute">
+          Provide a URL to an image to use as the icon for the bot's message
+        </info-popover>
+      </div>
       <div class="gf-form max-width-30">
         <span class="gf-form-label width-6">Mention</span>
         <input type="text"
@@ -73,6 +106,9 @@ func NewSlackNotifier(model *m.AlertNotification) (alerting.Notifier, error) {
 	}
 
 	recipient := model.Settings.Get("recipient").MustString()
+	username := model.Settings.Get("username").MustString()
+	iconEmoji := model.Settings.Get("icon_emoji").MustString()
+	iconUrl := model.Settings.Get("icon_url").MustString()
 	mention := model.Settings.Get("mention").MustString()
 	token := model.Settings.Get("token").MustString()
 	uploadImage := model.Settings.Get("uploadImage").MustBool(true)
@@ -81,6 +117,9 @@ func NewSlackNotifier(model *m.AlertNotification) (alerting.Notifier, error) {
 		NotifierBase: NewNotifierBase(model),
 		Url:          url,
 		Recipient:    recipient,
+		Username:     username,
+		IconEmoji:    iconEmoji,
+		IconUrl:      iconUrl,
 		Mention:      mention,
 		Token:        token,
 		Upload:       uploadImage,
@@ -92,6 +131,9 @@ type SlackNotifier struct {
 	NotifierBase
 	Url       string
 	Recipient string
+	Username  string
+	IconEmoji string
+	IconUrl   string
 	Mention   string
 	Token     string
 	Upload    bool
@@ -160,6 +202,15 @@ func (this *SlackNotifier) Notify(evalContext *alerting.EvalContext) error {
 	if this.Recipient != "" {
 		body["channel"] = this.Recipient
 	}
+	if this.Username != "" {
+		body["username"] = this.Username
+	}
+	if this.IconEmoji != "" {
+		body["icon_emoji"] = this.IconEmoji
+	}
+	if this.IconUrl != "" {
+		body["icon_url"] = this.IconUrl
+	}
 	data, _ := json.Marshal(&body)
 	cmd := &m.SendWebhookSync{Url: this.Url, Body: string(data)}
 	if err := bus.DispatchCtx(evalContext.Ctx, cmd); err != nil {

+ 10 - 1
pkg/services/alerting/notifiers/slack_test.go

@@ -47,15 +47,21 @@ func TestSlackNotifier(t *testing.T) {
 				So(slackNotifier.Type, ShouldEqual, "slack")
 				So(slackNotifier.Url, ShouldEqual, "http://google.com")
 				So(slackNotifier.Recipient, ShouldEqual, "")
+				So(slackNotifier.Username, ShouldEqual, "")
+				So(slackNotifier.IconEmoji, ShouldEqual, "")
+				So(slackNotifier.IconUrl, ShouldEqual, "")
 				So(slackNotifier.Mention, ShouldEqual, "")
 				So(slackNotifier.Token, ShouldEqual, "")
 			})
 
-			Convey("from settings with Recipient, Mention, and Token", func() {
+			Convey("from settings with Recipient, Username, IconEmoji, IconUrl, Mention, and Token", func() {
 				json := `
 				{
           "url": "http://google.com",
           "recipient": "#ds-opentsdb",
+          "username": "Grafana Alerts",
+          "icon_emoji": ":smile:",
+          "icon_url": "https://grafana.com/img/fav32.png",
           "mention": "@carl",
           "token": "xoxb-XXXXXXXX-XXXXXXXX-XXXXXXXXXX"
 				}`
@@ -75,6 +81,9 @@ func TestSlackNotifier(t *testing.T) {
 				So(slackNotifier.Type, ShouldEqual, "slack")
 				So(slackNotifier.Url, ShouldEqual, "http://google.com")
 				So(slackNotifier.Recipient, ShouldEqual, "#ds-opentsdb")
+				So(slackNotifier.Username, ShouldEqual, "Grafana Alerts")
+				So(slackNotifier.IconEmoji, ShouldEqual, ":smile:")
+				So(slackNotifier.IconUrl, ShouldEqual, "https://grafana.com/img/fav32.png")
 				So(slackNotifier.Mention, ShouldEqual, "@carl")
 				So(slackNotifier.Token, ShouldEqual, "xoxb-XXXXXXXX-XXXXXXXX-XXXXXXXXXX")
 			})

+ 2 - 1
pkg/services/alerting/test_rule.go

@@ -13,6 +13,7 @@ type AlertTestCommand struct {
 	Dashboard *simplejson.Json
 	PanelId   int64
 	OrgId     int64
+	User      *m.SignedInUser
 
 	Result *EvalContext
 }
@@ -25,7 +26,7 @@ func handleAlertTestCommand(cmd *AlertTestCommand) error {
 
 	dash := m.NewDashboardFromJson(cmd.Dashboard)
 
-	extractor := NewDashAlertExtractor(dash, cmd.OrgId)
+	extractor := NewDashAlertExtractor(dash, cmd.OrgId, cmd.User)
 	alerts, err := extractor.GetAlerts()
 	if err != nil {
 		return err

+ 17 - 0
pkg/services/cache/cache.go

@@ -0,0 +1,17 @@
+package cache
+
+import (
+	"time"
+
+	gocache "github.com/patrickmn/go-cache"
+)
+
+type CacheService struct {
+	*gocache.Cache
+}
+
+func New(defaultExpiration, cleanupInterval time.Duration) *CacheService {
+	return &CacheService{
+		Cache: gocache.New(defaultExpiration, cleanupInterval),
+	}
+}

+ 2 - 1
pkg/services/dashboards/dashboard_service.go

@@ -90,6 +90,7 @@ func (dr *dashboardServiceImpl) buildSaveDashboardCommand(dto *SaveDashboardDTO,
 		validateAlertsCmd := models.ValidateDashboardAlertsCommand{
 			OrgId:     dto.OrgId,
 			Dashboard: dash,
+			User:      dto.User,
 		}
 
 		if err := bus.Dispatch(&validateAlertsCmd); err != nil {
@@ -159,8 +160,8 @@ func (dr *dashboardServiceImpl) buildSaveDashboardCommand(dto *SaveDashboardDTO,
 func (dr *dashboardServiceImpl) updateAlerting(cmd *models.SaveDashboardCommand, dto *SaveDashboardDTO) error {
 	alertCmd := models.UpdateDashboardAlertsCommand{
 		OrgId:     dto.OrgId,
-		UserId:    dto.User.UserId,
 		Dashboard: cmd.Result,
+		User:      dto.User,
 	}
 
 	if err := bus.Dispatch(&alertCmd); err != nil {

+ 53 - 0
pkg/services/datasources/cache.go

@@ -0,0 +1,53 @@
+package datasources
+
+import (
+	"fmt"
+	"time"
+
+	"github.com/grafana/grafana/pkg/bus"
+	m "github.com/grafana/grafana/pkg/models"
+	"github.com/grafana/grafana/pkg/registry"
+	"github.com/grafana/grafana/pkg/services/cache"
+)
+
+type CacheService interface {
+	GetDatasource(datasourceID int64, user *m.SignedInUser, skipCache bool) (*m.DataSource, error)
+}
+
+type CacheServiceImpl struct {
+	Bus          bus.Bus             `inject:""`
+	CacheService *cache.CacheService `inject:""`
+}
+
+func init() {
+	registry.Register(&registry.Descriptor{
+		Name:         "DatasourceCacheService",
+		Instance:     &CacheServiceImpl{},
+		InitPriority: registry.Low,
+	})
+}
+
+func (dc *CacheServiceImpl) Init() error {
+	return nil
+}
+
+func (dc *CacheServiceImpl) GetDatasource(datasourceID int64, user *m.SignedInUser, skipCache bool) (*m.DataSource, error) {
+	cacheKey := fmt.Sprintf("ds-%d", datasourceID)
+
+	if !skipCache {
+		if cached, found := dc.CacheService.Get(cacheKey); found {
+			ds := cached.(*m.DataSource)
+			if ds.OrgId == user.OrgId {
+				return ds, nil
+			}
+		}
+	}
+
+	query := m.GetDataSourceByIdQuery{Id: datasourceID, OrgId: user.OrgId}
+	if err := dc.Bus.Dispatch(&query); err != nil {
+		return nil, err
+	}
+
+	dc.CacheService.Set(cacheKey, query.Result, time.Second*5)
+	return query.Result, nil
+}

+ 18 - 4
pkg/services/sqlstore/dashboard.go

@@ -327,20 +327,34 @@ func DeleteDashboard(cmd *m.DeleteDashboardCommand) error {
 		if dashboard.IsFolder {
 			deletes = append(deletes, "DELETE FROM dashboard_provisioning WHERE dashboard_id in (select id from dashboard where folder_id = ?)")
 			deletes = append(deletes, "DELETE FROM dashboard WHERE folder_id = ?")
-		}
-
-		for _, sql := range deletes {
-			_, err := sess.Exec(sql, dashboard.Id)
 
+			dashIds := []struct {
+				Id int64
+			}{}
+			err := sess.SQL("select id from dashboard where folder_id = ?", dashboard.Id).Find(&dashIds)
 			if err != nil {
 				return err
 			}
+
+			for _, id := range dashIds {
+				if err := deleteAlertDefinition(id.Id, sess); err != nil {
+					return nil
+				}
+			}
 		}
 
 		if err := deleteAlertDefinition(dashboard.Id, sess); err != nil {
 			return nil
 		}
 
+		for _, sql := range deletes {
+			_, err := sess.Exec(sql, dashboard.Id)
+
+			if err != nil {
+				return err
+			}
+		}
+
 		return nil
 	})
 }

+ 8 - 3
pkg/services/sqlstore/sqlstore.go

@@ -16,6 +16,7 @@ import (
 	m "github.com/grafana/grafana/pkg/models"
 	"github.com/grafana/grafana/pkg/registry"
 	"github.com/grafana/grafana/pkg/services/annotations"
+	"github.com/grafana/grafana/pkg/services/cache"
 	"github.com/grafana/grafana/pkg/services/sqlstore/migrations"
 	"github.com/grafana/grafana/pkg/services/sqlstore/migrator"
 	"github.com/grafana/grafana/pkg/services/sqlstore/sqlutil"
@@ -47,8 +48,9 @@ func init() {
 }
 
 type SqlStore struct {
-	Cfg *setting.Cfg `inject:""`
-	Bus bus.Bus      `inject:""`
+	Cfg          *setting.Cfg        `inject:""`
+	Bus          bus.Bus             `inject:""`
+	CacheService *cache.CacheService `inject:""`
 
 	dbCfg           DatabaseConfig
 	engine          *xorm.Engine
@@ -148,9 +150,11 @@ func (ss *SqlStore) Init() error {
 
 	// Init repo instances
 	annotations.SetRepository(&SqlAnnotationRepo{})
-
 	ss.Bus.SetTransactionManager(ss)
 
+	// Register handlers
+	ss.addUserQueryAndCommandHandlers()
+
 	// ensure admin user
 	if ss.skipEnsureAdmin {
 		return nil
@@ -322,6 +326,7 @@ func InitTestDB(t *testing.T) *SqlStore {
 	sqlstore := &SqlStore{}
 	sqlstore.skipEnsureAdmin = true
 	sqlstore.Bus = bus.New()
+	sqlstore.CacheService = cache.New(5*time.Minute, 10*time.Minute)
 
 	dbType := migrator.SQLITE
 

+ 30 - 3
pkg/services/sqlstore/user.go

@@ -15,8 +15,9 @@ import (
 	"github.com/grafana/grafana/pkg/util"
 )
 
-func init() {
-	//bus.AddHandler("sql", CreateUser)
+func (ss *SqlStore) addUserQueryAndCommandHandlers() {
+	ss.Bus.AddHandler(ss.GetSignedInUserWithCache)
+
 	bus.AddHandler("sql", GetUserById)
 	bus.AddHandler("sql", UpdateUser)
 	bus.AddHandler("sql", ChangeUserPassword)
@@ -25,7 +26,6 @@ func init() {
 	bus.AddHandler("sql", SetUsingOrg)
 	bus.AddHandler("sql", UpdateUserLastSeenAt)
 	bus.AddHandler("sql", GetUserProfile)
-	bus.AddHandler("sql", GetSignedInUser)
 	bus.AddHandler("sql", SearchUsers)
 	bus.AddHandler("sql", GetUserOrgList)
 	bus.AddHandler("sql", DeleteUser)
@@ -345,6 +345,22 @@ func GetUserOrgList(query *m.GetUserOrgListQuery) error {
 	return err
 }
 
+func (ss *SqlStore) GetSignedInUserWithCache(query *m.GetSignedInUserQuery) error {
+	cacheKey := fmt.Sprintf("signed-in-user-%d-%d", query.UserId, query.OrgId)
+	if cached, found := ss.CacheService.Get(cacheKey); found {
+		query.Result = cached.(*m.SignedInUser)
+		return nil
+	}
+
+	err := GetSignedInUser(query)
+	if err != nil {
+		return err
+	}
+
+	ss.CacheService.Set(cacheKey, query.Result, time.Second*5)
+	return nil
+}
+
 func GetSignedInUser(query *m.GetSignedInUserQuery) error {
 	orgId := "u.org_id"
 	if query.OrgId > 0 {
@@ -389,6 +405,17 @@ func GetSignedInUser(query *m.GetSignedInUserQuery) error {
 		user.OrgName = "Org missing"
 	}
 
+	getTeamsByUserQuery := &m.GetTeamsByUserQuery{OrgId: user.OrgId, UserId: user.UserId}
+	err = GetTeamsByUser(getTeamsByUserQuery)
+	if err != nil {
+		return err
+	}
+
+	user.Teams = make([]int64, len(getTeamsByUserQuery.Result))
+	for i, t := range getTeamsByUserQuery.Result {
+		user.Teams[i] = t.Id
+	}
+
 	query.Result = &user
 	return err
 }

+ 2 - 0
pkg/tsdb/cloudwatch/metric_find_query.go

@@ -46,6 +46,7 @@ func init() {
 		"AWS/Billing":        {"EstimatedCharges"},
 		"AWS/CloudFront":     {"Requests", "BytesDownloaded", "BytesUploaded", "TotalErrorRate", "4xxErrorRate", "5xxErrorRate"},
 		"AWS/CloudSearch":    {"SuccessfulRequests", "SearchableDocuments", "IndexUtilization", "Partitions"},
+		"AWS/Connect":        {"CallsBreachingConcurrencyQuota", "CallBackNotDialableNumber", "CallRecordingUploadError", "CallsPerInterval", "ConcurrentCalls", "ConcurrentCallsPercentage", "ContactFlowErrors", "ContactFlowFatalErrors", "LongestQueueWaitTime", "MissedCalls", "MisconfiguredPhoneNumbers", "PublicSigningKeyUsage", "QueueCapacityExceededError", "QueueSize", "ThrottledCalls", "ToInstancePacketLossRate"},
 		"AWS/DMS":            {"FreeableMemory", "WriteIOPS", "ReadIOPS", "WriteThroughput", "ReadThroughput", "WriteLatency", "ReadLatency", "SwapUsage", "NetworkTransmitThroughput", "NetworkReceiveThroughput", "FullLoadThroughputBandwidthSource", "FullLoadThroughputBandwidthTarget", "FullLoadThroughputRowsSource", "FullLoadThroughputRowsTarget", "CDCIncomingChanges", "CDCChangesMemorySource", "CDCChangesMemoryTarget", "CDCChangesDiskSource", "CDCChangesDiskTarget", "CDCThroughputBandwidthTarget", "CDCThroughputRowsSource", "CDCThroughputRowsTarget", "CDCLatencySource", "CDCLatencyTarget"},
 		"AWS/DX":             {"ConnectionState", "ConnectionBpsEgress", "ConnectionBpsIngress", "ConnectionPpsEgress", "ConnectionPpsIngress", "ConnectionCRCErrorCount", "ConnectionLightLevelTx", "ConnectionLightLevelRx"},
 		"AWS/DynamoDB":       {"ConditionalCheckFailedRequests", "ConsumedReadCapacityUnits", "ConsumedWriteCapacityUnits", "OnlineIndexConsumedWriteCapacity", "OnlineIndexPercentageProgress", "OnlineIndexThrottleEvents", "ProvisionedReadCapacityUnits", "ProvisionedWriteCapacityUnits", "ReadThrottleEvents", "ReturnedBytes", "ReturnedItemCount", "ReturnedRecordsCount", "SuccessfulRequestLatency", "SystemErrors", "TimeToLiveDeletedItemCount", "ThrottledRequests", "UserErrors", "WriteThrottleEvents"},
@@ -120,6 +121,7 @@ func init() {
 		"AWS/Billing":          {"ServiceName", "LinkedAccount", "Currency"},
 		"AWS/CloudFront":       {"DistributionId", "Region"},
 		"AWS/CloudSearch":      {},
+		"AWS/Connect":          {"InstanceId", "MetricGroup", "Participant", "QueueName", "Stream Type", "Type of Connection"},
 		"AWS/DMS":              {"ReplicationInstanceIdentifier", "ReplicationTaskIdentifier"},
 		"AWS/DX":               {"ConnectionId"},
 		"AWS/DynamoDB":         {"TableName", "GlobalSecondaryIndexName", "Operation", "StreamLabel"},

+ 18 - 7
pkg/tsdb/elasticsearch/client/search_request.go

@@ -112,7 +112,7 @@ func (b *SearchRequestBuilder) Query() *QueryBuilder {
 
 // Agg initiate and returns a new aggregation builder
 func (b *SearchRequestBuilder) Agg() AggBuilder {
-	aggBuilder := newAggBuilder()
+	aggBuilder := newAggBuilder(b.version)
 	b.aggBuilders = append(b.aggBuilders, aggBuilder)
 	return aggBuilder
 }
@@ -275,11 +275,13 @@ type AggBuilder interface {
 type aggBuilderImpl struct {
 	AggBuilder
 	aggDefs []*aggDef
+	version int
 }
 
-func newAggBuilder() *aggBuilderImpl {
+func newAggBuilder(version int) *aggBuilderImpl {
 	return &aggBuilderImpl{
 		aggDefs: make([]*aggDef, 0),
+		version: version,
 	}
 }
 
@@ -317,7 +319,7 @@ func (b *aggBuilderImpl) Histogram(key, field string, fn func(a *HistogramAgg, b
 	})
 
 	if fn != nil {
-		builder := newAggBuilder()
+		builder := newAggBuilder(b.version)
 		aggDef.builders = append(aggDef.builders, builder)
 		fn(innerAgg, builder)
 	}
@@ -337,7 +339,7 @@ func (b *aggBuilderImpl) DateHistogram(key, field string, fn func(a *DateHistogr
 	})
 
 	if fn != nil {
-		builder := newAggBuilder()
+		builder := newAggBuilder(b.version)
 		aggDef.builders = append(aggDef.builders, builder)
 		fn(innerAgg, builder)
 	}
@@ -347,6 +349,8 @@ func (b *aggBuilderImpl) DateHistogram(key, field string, fn func(a *DateHistogr
 	return b
 }
 
+const termsOrderTerm = "_term"
+
 func (b *aggBuilderImpl) Terms(key, field string, fn func(a *TermsAggregation, b AggBuilder)) AggBuilder {
 	innerAgg := &TermsAggregation{
 		Field: field,
@@ -358,11 +362,18 @@ func (b *aggBuilderImpl) Terms(key, field string, fn func(a *TermsAggregation, b
 	})
 
 	if fn != nil {
-		builder := newAggBuilder()
+		builder := newAggBuilder(b.version)
 		aggDef.builders = append(aggDef.builders, builder)
 		fn(innerAgg, builder)
 	}
 
+	if b.version >= 60 && len(innerAgg.Order) > 0 {
+		if orderBy, exists := innerAgg.Order[termsOrderTerm]; exists {
+			innerAgg.Order["_key"] = orderBy
+			delete(innerAgg.Order, termsOrderTerm)
+		}
+	}
+
 	b.aggDefs = append(b.aggDefs, aggDef)
 
 	return b
@@ -377,7 +388,7 @@ func (b *aggBuilderImpl) Filters(key string, fn func(a *FiltersAggregation, b Ag
 		Aggregation: innerAgg,
 	})
 	if fn != nil {
-		builder := newAggBuilder()
+		builder := newAggBuilder(b.version)
 		aggDef.builders = append(aggDef.builders, builder)
 		fn(innerAgg, builder)
 	}
@@ -398,7 +409,7 @@ func (b *aggBuilderImpl) GeoHashGrid(key, field string, fn func(a *GeoHashGridAg
 	})
 
 	if fn != nil {
-		builder := newAggBuilder()
+		builder := newAggBuilder(b.version)
 		aggDef.builders = append(aggDef.builders, builder)
 		fn(innerAgg, builder)
 	}

+ 54 - 0
pkg/tsdb/elasticsearch/time_series_query_test.go

@@ -127,6 +127,60 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
 			So(avgAgg.Aggregation.Type, ShouldEqual, "avg")
 		})
 
+		Convey("With term agg and order by term", func() {
+			c := newFakeClient(5)
+			_, err := executeTsdbQuery(c, `{
+				"timeField": "@timestamp",
+				"bucketAggs": [
+					{
+						"type": "terms",
+						"field": "@host",
+						"id": "2",
+						"settings": { "size": "5", "order": "asc", "orderBy": "_term"	}
+					},
+					{ "type": "date_histogram", "field": "@timestamp", "id": "3" }
+				],
+				"metrics": [
+					{"type": "count", "id": "1" },
+					{"type": "avg", "field": "@value", "id": "5" }
+				]
+			}`, from, to, 15*time.Second)
+			So(err, ShouldBeNil)
+			sr := c.multisearchRequests[0].Requests[0]
+
+			firstLevel := sr.Aggs[0]
+			So(firstLevel.Key, ShouldEqual, "2")
+			termsAgg := firstLevel.Aggregation.Aggregation.(*es.TermsAggregation)
+			So(termsAgg.Order["_term"], ShouldEqual, "asc")
+		})
+
+		Convey("With term agg and order by term with es6.x", func() {
+			c := newFakeClient(60)
+			_, err := executeTsdbQuery(c, `{
+				"timeField": "@timestamp",
+				"bucketAggs": [
+					{
+						"type": "terms",
+						"field": "@host",
+						"id": "2",
+						"settings": { "size": "5", "order": "asc", "orderBy": "_term"	}
+					},
+					{ "type": "date_histogram", "field": "@timestamp", "id": "3" }
+				],
+				"metrics": [
+					{"type": "count", "id": "1" },
+					{"type": "avg", "field": "@value", "id": "5" }
+				]
+			}`, from, to, 15*time.Second)
+			So(err, ShouldBeNil)
+			sr := c.multisearchRequests[0].Requests[0]
+
+			firstLevel := sr.Aggs[0]
+			So(firstLevel.Key, ShouldEqual, "2")
+			termsAgg := firstLevel.Aggregation.Aggregation.(*es.TermsAggregation)
+			So(termsAgg.Order["_key"], ShouldEqual, "asc")
+		})
+
 		Convey("With metric percentiles", func() {
 			c := newFakeClient(5)
 			_, err := executeTsdbQuery(c, `{

+ 8 - 3
public/app/core/components/Switch/Switch.tsx

@@ -5,6 +5,7 @@ export interface Props {
   label: string;
   checked: boolean;
   labelClass?: string;
+  small?: boolean;
   switchClass?: string;
   onChange: (event) => any;
 }
@@ -24,10 +25,14 @@ export class Switch extends PureComponent<Props, State> {
   };
 
   render() {
-    const { labelClass, switchClass, label, checked } = this.props;
+    const { labelClass = '', switchClass = '', label, checked, small } = this.props;
     const labelId = `check-${this.state.id}`;
-    const labelClassName = `gf-form-label ${labelClass} pointer`;
-    const switchClassName = `gf-form-switch ${switchClass}`;
+    let labelClassName = `gf-form-label ${labelClass} pointer`;
+    let switchClassName = `gf-form-switch ${switchClass}`;
+    if (small) {
+      labelClassName += ' gf-form-label--small';
+      switchClassName += ' gf-form-switch--small';
+    }
 
     return (
       <div className="gf-form">

+ 1 - 1
public/app/core/components/form_dropdown/form_dropdown.ts

@@ -88,7 +88,7 @@ export class FormDropdownCtrl {
       if (evt.keyCode === 13) {
         setTimeout(() => {
           this.inputElement.blur();
-        }, 100);
+        }, 300);
       }
     });
 

+ 4 - 4
public/app/core/controllers/json_editor_ctrl.ts

@@ -4,13 +4,13 @@ import coreModule from '../core_module';
 export class JsonEditorCtrl {
   /** @ngInject */
   constructor($scope) {
-    $scope.json = angular.toJson($scope.object, true);
-    $scope.canUpdate = $scope.updateHandler !== void 0 && $scope.contextSrv.isEditor;
-    $scope.canCopy = $scope.enableCopy;
+    $scope.json = angular.toJson($scope.model.object, true);
+    $scope.canUpdate = $scope.model.updateHandler !== void 0 && $scope.contextSrv.isEditor;
+    $scope.canCopy = $scope.model.enableCopy;
 
     $scope.update = () => {
       const newObject = angular.fromJson($scope.json);
-      $scope.updateHandler(newObject, $scope.object);
+      $scope.model.updateHandler(newObject, $scope.model.object);
     };
 
     $scope.getContentForClipboard = () => $scope.json;

+ 71 - 12
public/app/core/logs_model.ts

@@ -1,4 +1,6 @@
 import _ from 'lodash';
+import { TimeSeries } from 'app/core/core';
+import colors from 'app/core/utils/colors';
 
 export enum LogLevel {
   crit = 'crit',
@@ -8,8 +10,20 @@ export enum LogLevel {
   info = 'info',
   debug = 'debug',
   trace = 'trace',
+  none = 'none',
 }
 
+export const LogLevelColor = {
+  [LogLevel.crit]: colors[7],
+  [LogLevel.warn]: colors[1],
+  [LogLevel.err]: colors[4],
+  [LogLevel.error]: colors[4],
+  [LogLevel.info]: colors[0],
+  [LogLevel.debug]: colors[3],
+  [LogLevel.trace]: colors[3],
+  [LogLevel.none]: '#eee',
+};
+
 export interface LogSearchMatch {
   start: number;
   length: number;
@@ -17,27 +31,72 @@ export interface LogSearchMatch {
 }
 
 export interface LogRow {
-  key: string;
   entry: string;
+  key: string; // timestamp + labels
+  labels: string;
   logLevel: LogLevel;
-  timestamp: string;
+  searchWords?: string[];
+  timestamp: string; // ISO with nanosec precision
   timeFromNow: string;
+  timeEpochMs: number;
   timeLocal: string;
-  searchWords?: string[];
+  uniqueLabels?: string;
+}
+
+export interface LogsMetaItem {
+  label: string;
+  value: string;
 }
 
 export interface LogsModel {
+  meta?: LogsMetaItem[];
   rows: LogRow[];
+  series?: TimeSeries[];
+}
+
+export interface LogsStream {
+  labels: string;
+  entries: LogsStreamEntry[];
+  search?: string;
+  parsedLabels?: LogsStreamLabels;
+  uniqueLabels?: string;
 }
 
-export function mergeStreams(streams: LogsModel[], limit?: number): LogsModel {
-  const combinedEntries = streams.reduce((acc, stream) => {
-    return [...acc, ...stream.rows];
+export interface LogsStreamEntry {
+  line: string;
+  timestamp: string;
+}
+
+export interface LogsStreamLabels {
+  [key: string]: string;
+}
+
+export function makeSeriesForLogs(rows: LogRow[], intervalMs: number): TimeSeries[] {
+  // Graph time series by log level
+  const seriesByLevel = {};
+  rows.forEach(row => {
+    if (!seriesByLevel[row.logLevel]) {
+      seriesByLevel[row.logLevel] = { lastTs: null, datapoints: [], alias: row.logLevel };
+    }
+    const levelSeries = seriesByLevel[row.logLevel];
+
+    // Bucket to nearest minute
+    const time = Math.round(row.timeEpochMs / intervalMs / 10) * intervalMs * 10;
+    // Entry for time
+    if (time === levelSeries.lastTs) {
+      levelSeries.datapoints[levelSeries.datapoints.length - 1][0]++;
+    } else {
+      levelSeries.datapoints.push([1, time]);
+      levelSeries.lastTs = time;
+    }
+  });
+
+  return Object.keys(seriesByLevel).reduce((acc, level) => {
+    if (seriesByLevel[level]) {
+      const gs = new TimeSeries(seriesByLevel[level]);
+      gs.setColor(LogLevelColor[level]);
+      acc.push(gs);
+    }
+    return acc;
   }, []);
-  const sortedEntries = _.chain(combinedEntries)
-    .sortBy('timestamp')
-    .reverse()
-    .slice(0, limit || combinedEntries.length)
-    .value();
-  return { rows: sortedEntries };
 }

+ 3 - 1
public/app/core/reducers/location.ts

@@ -23,7 +23,9 @@ export const locationReducer = (state = initialState, action: Action): LocationS
       return {
         url: renderUrl(path || state.path, query),
         path: path || state.path,
-        query: query,
+        query: {
+          ...query,
+        },
         routeParams: routeParams || state.routeParams,
       };
     }

+ 1 - 1
public/app/core/services/bridge_srv.ts

@@ -4,7 +4,7 @@ import { store } from 'app/store/configureStore';
 import locationUtil from 'app/core/utils/location_util';
 import { updateLocation } from 'app/core/actions';
 
-// Services that handles angular -> mobx store sync & other react <-> angular sync
+// Services that handles angular -> redux store sync & other react <-> angular sync
 export class BridgeSrv {
   private fullPageReloadRoutes;
 

+ 10 - 10
public/app/core/utils/colors.ts

@@ -10,16 +10,16 @@ export const NO_DATA_COLOR = 'rgba(150, 150, 150, 1)';
 export const REGION_FILL_ALPHA = 0.09;
 
 const colors = [
-  '#7EB26D',
-  '#EAB839',
-  '#6ED0E0',
-  '#EF843C',
-  '#E24D42',
-  '#1F78C1',
-  '#BA43A9',
-  '#705DA0',
-  '#508642',
-  '#CCA300',
+  '#7EB26D', // 0: pale green
+  '#EAB839', // 1: mustard
+  '#6ED0E0', // 2: light blue
+  '#EF843C', // 3: orange
+  '#E24D42', // 4: red
+  '#1F78C1', // 5: ocean
+  '#BA43A9', // 6: purple
+  '#705DA0', // 7: violet
+  '#508642', // 8: dark green
+  '#CCA300', // 9: dark sand
   '#447EBC',
   '#C15C17',
   '#890F02',

+ 8 - 34
public/app/features/dashboard/dashboard_ctrl.ts

@@ -2,13 +2,13 @@
 import config from 'app/core/config';
 import appEvents from 'app/core/app_events';
 import coreModule from 'app/core/core_module';
+import { removePanel } from 'app/features/dashboard/utils/panel';
 
 // Services
 import { AnnotationsSrv } from '../annotations/annotations_srv';
 
 // Types
 import { DashboardModel } from './dashboard_model';
-import { PanelModel } from './panel_model';
 
 export class DashboardCtrl {
   dashboard: DashboardModel;
@@ -19,7 +19,6 @@ export class DashboardCtrl {
   /** @ngInject */
   constructor(
     private $scope,
-    private $rootScope,
     private keybindingSrv,
     private timeSrv,
     private variableSrv,
@@ -112,12 +111,14 @@ export class DashboardCtrl {
   }
 
   showJsonEditor(evt, options) {
-    const editScope = this.$rootScope.$new();
-    editScope.object = options.object;
-    editScope.updateHandler = options.updateHandler;
+    const model = {
+      object: options.object,
+      updateHandler: options.updateHandler,
+    };
+
     this.$scope.appEvent('show-dash-editor', {
       src: 'public/app/partials/edit_json.html',
-      scope: editScope,
+      model: model,
     });
   }
 
@@ -136,34 +137,7 @@ export class DashboardCtrl {
     }
 
     const panelInfo = this.dashboard.getPanelInfoById(options.panelId);
-    this.removePanel(panelInfo.panel, true);
-  }
-
-  removePanel(panel: PanelModel, ask: boolean) {
-    // confirm deletion
-    if (ask !== false) {
-      let text2, confirmText;
-
-      if (panel.alert) {
-        text2 = 'Panel includes an alert rule, removing panel will also remove alert rule';
-        confirmText = 'YES';
-      }
-
-      this.$scope.appEvent('confirm-modal', {
-        title: 'Remove Panel',
-        text: 'Are you sure you want to remove this panel?',
-        text2: text2,
-        icon: 'fa-trash',
-        confirmText: confirmText,
-        yesText: 'Remove',
-        onConfirm: () => {
-          this.removePanel(panel, false);
-        },
-      });
-      return;
-    }
-
-    this.dashboard.removePanel(panel);
+    removePanel(this.dashboard, panelInfo.panel, true);
   }
 
   onDestroy() {

+ 0 - 5
public/app/features/dashboard/dashboard_model.ts

@@ -232,11 +232,6 @@ export class DashboardModel {
     return this.meta.fullscreen && !panel.fullscreen;
   }
 
-  changePanelType(panel: PanelModel, pluginId: string) {
-    panel.changeType(pluginId);
-    this.events.emit('panel-type-changed', panel);
-  }
-
   private ensureListExist(data) {
     if (!data) {
       data = {};

+ 6 - 2
public/app/features/dashboard/dashgrid/DashboardGrid.tsx

@@ -83,7 +83,6 @@ export class DashboardGrid extends React.Component<DashboardGridProps, any> {
     dashboard.on('view-mode-changed', this.onViewModeChanged.bind(this));
     dashboard.on('row-collapsed', this.triggerForceUpdate.bind(this));
     dashboard.on('row-expanded', this.triggerForceUpdate.bind(this));
-    dashboard.on('panel-type-changed', this.triggerForceUpdate.bind(this));
   }
 
   buildLayout() {
@@ -176,7 +175,12 @@ export class DashboardGrid extends React.Component<DashboardGridProps, any> {
       const panelClasses = classNames({ panel: true, 'panel--fullscreen': panel.fullscreen });
       panelElements.push(
         <div key={panel.id.toString()} className={panelClasses} id={`panel-${panel.id}`}>
-          <DashboardPanel panel={panel} dashboard={this.props.dashboard} panelType={panel.type} />
+          <DashboardPanel
+            panel={panel}
+            dashboard={this.props.dashboard}
+            isEditing={panel.isEditing}
+            isFullscreen={panel.fullscreen}
+          />
         </div>
       );
     }

+ 6 - 6
public/app/features/dashboard/dashgrid/DashboardPanel.tsx

@@ -1,4 +1,4 @@
-import React from 'react';
+import React, { PureComponent } from 'react';
 import config from 'app/core/config';
 import { PanelModel } from '../panel_model';
 import { DashboardModel } from '../dashboard_model';
@@ -11,16 +11,17 @@ import { PanelChrome } from './PanelChrome';
 import { PanelEditor } from './PanelEditor';
 
 export interface Props {
-  panelType: string;
   panel: PanelModel;
   dashboard: DashboardModel;
+  isEditing: boolean;
+  isFullscreen: boolean;
 }
 
 export interface State {
   pluginExports: PluginExports;
 }
 
-export class DashboardPanel extends React.Component<Props, State> {
+export class DashboardPanel extends PureComponent<Props, State> {
   element: any;
   angularPanel: AngularComponent;
   pluginInfo: any;
@@ -113,9 +114,8 @@ export class DashboardPanel extends React.Component<Props, State> {
 
   renderReactPanel() {
     const { pluginExports } = this.state;
-    const containerClass = this.props.panel.isEditing ? 'panel-editor-container' : 'panel-height-helper';
-    const panelWrapperClass = this.props.panel.isEditing ? 'panel-editor-container__panel' : 'panel-height-helper';
-
+    const containerClass = this.props.isEditing ? 'panel-editor-container' : 'panel-height-helper';
+    const panelWrapperClass = this.props.isEditing ? 'panel-editor-container__panel' : 'panel-height-helper';
     // this might look strange with these classes that change when edit, but
     // I want to try to keep markup (parents) for panel the same in edit mode to avoide unmount / new mount of panel
     return (

+ 28 - 5
public/app/features/dashboard/dashgrid/PanelChrome.tsx

@@ -5,7 +5,7 @@ import React, { ComponentClass, PureComponent } from 'react';
 import { getTimeSrv } from '../time_srv';
 
 // Components
-import { PanelHeader } from './PanelHeader';
+import { PanelHeader } from './PanelHeader/PanelHeader';
 import { DataPanel } from './DataPanel';
 
 // Types
@@ -21,6 +21,7 @@ export interface Props {
 
 export interface State {
   refreshCounter: number;
+  renderCounter: number;
   timeRange?: TimeRange;
 }
 
@@ -30,11 +31,13 @@ export class PanelChrome extends PureComponent<Props, State> {
 
     this.state = {
       refreshCounter: 0,
+      renderCounter: 0,
     };
   }
 
   componentDidMount() {
     this.props.panel.events.on('refresh', this.onRefresh);
+    this.props.panel.events.on('render', this.onRender);
     this.props.dashboard.panelInitialized(this.props.panel);
   }
 
@@ -46,10 +49,19 @@ export class PanelChrome extends PureComponent<Props, State> {
     const timeSrv = getTimeSrv();
     const timeRange = timeSrv.timeRange();
 
-    this.setState({
+    this.setState(prevState => ({
+      ...prevState,
       refreshCounter: this.state.refreshCounter + 1,
       timeRange: timeRange,
-    });
+    }));
+  };
+
+  onRender = () => {
+    console.log('onRender');
+    this.setState(prevState => ({
+      ...prevState,
+      renderCounter: this.state.renderCounter + 1,
+    }));
   };
 
   get isVisible() {
@@ -58,10 +70,12 @@ export class PanelChrome extends PureComponent<Props, State> {
 
   render() {
     const { panel, dashboard } = this.props;
+    const { refreshCounter, timeRange, renderCounter } = this.state;
+
     const { datasource, targets } = panel;
-    const { refreshCounter, timeRange } = this.state;
     const PanelComponent = this.props.component;
 
+    console.log('panelChrome render');
     return (
       <div className="panel-container">
         <PanelHeader panel={panel} dashboard={dashboard} />
@@ -74,7 +88,16 @@ export class PanelChrome extends PureComponent<Props, State> {
             refreshCounter={refreshCounter}
           >
             {({ loading, timeSeries }) => {
-              return <PanelComponent loading={loading} timeSeries={timeSeries} timeRange={timeRange} />;
+              console.log('panelcrome inner render');
+              return (
+                <PanelComponent
+                  loading={loading}
+                  timeSeries={timeSeries}
+                  timeRange={timeRange}
+                  options={panel.getOptions()}
+                  renderCounter={renderCounter}
+                />
+              );
             }}
           </DataPanel>
         </div>

+ 19 - 10
public/app/features/dashboard/dashgrid/PanelEditor.tsx

@@ -1,13 +1,16 @@
-import React from 'react';
+import React, { PureComponent } from 'react';
 import classNames from 'classnames';
-import { PanelModel } from '../panel_model';
-import { DashboardModel } from '../dashboard_model';
-import { store } from 'app/store/configureStore';
+
 import { QueriesTab } from './QueriesTab';
-import { PanelPlugin, PluginExports } from 'app/types/plugins';
 import { VizTypePicker } from './VizTypePicker';
+
+import { store } from 'app/store/configureStore';
 import { updateLocation } from 'app/core/actions';
 
+import { PanelModel } from '../panel_model';
+import { DashboardModel } from '../dashboard_model';
+import { PanelPlugin, PluginExports } from 'app/types/plugins';
+
 interface PanelEditorProps {
   panel: PanelModel;
   dashboard: DashboardModel;
@@ -22,7 +25,7 @@ interface PanelEditorTab {
   icon: string;
 }
 
-export class PanelEditor extends React.Component<PanelEditorProps, any> {
+export class PanelEditor extends PureComponent<PanelEditorProps> {
   tabs: PanelEditorTab[];
 
   constructor(props) {
@@ -39,16 +42,21 @@ export class PanelEditor extends React.Component<PanelEditorProps, any> {
   }
 
   renderPanelOptions() {
-    const { pluginExports } = this.props;
+    const { pluginExports, panel } = this.props;
 
-    if (pluginExports.PanelOptions) {
-      const PanelOptions = pluginExports.PanelOptions;
-      return <PanelOptions />;
+    if (pluginExports.PanelOptionsComponent) {
+      const OptionsComponent = pluginExports.PanelOptionsComponent;
+      return <OptionsComponent options={panel.getOptions()} onChange={this.onPanelOptionsChanged} />;
     } else {
       return <p>Visualization has no options</p>;
     }
   }
 
+  onPanelOptionsChanged = (options: any) => {
+    this.props.panel.updateOptions(options);
+    this.forceUpdate();
+  };
+
   renderVizTab() {
     return (
       <div className="viz-editor">
@@ -70,6 +78,7 @@ export class PanelEditor extends React.Component<PanelEditorProps, any> {
         partial: true,
       })
     );
+    this.forceUpdate();
   };
 
   render() {

+ 0 - 83
public/app/features/dashboard/dashgrid/PanelHeader.tsx

@@ -1,83 +0,0 @@
-import React from 'react';
-import classNames from 'classnames';
-import { PanelModel } from '../panel_model';
-import { DashboardModel } from '../dashboard_model';
-import { store } from 'app/store/configureStore';
-import { updateLocation } from 'app/core/actions';
-
-interface PanelHeaderProps {
-  panel: PanelModel;
-  dashboard: DashboardModel;
-}
-
-export class PanelHeader extends React.Component<PanelHeaderProps, any> {
-  onEditPanel = () => {
-    store.dispatch(
-      updateLocation({
-        query: {
-          panelId: this.props.panel.id,
-          edit: true,
-          fullscreen: true,
-        },
-      })
-    );
-  };
-
-  onViewPanel = () => {
-    store.dispatch(
-      updateLocation({
-        query: {
-          panelId: this.props.panel.id,
-          edit: false,
-          fullscreen: true,
-        },
-      })
-    );
-  };
-
-  render() {
-    const isFullscreen = false;
-    const isLoading = false;
-    const panelHeaderClass = classNames({ 'panel-header': true, 'grid-drag-handle': !isFullscreen });
-
-    return (
-      <div className={panelHeaderClass}>
-        <span className="panel-info-corner">
-          <i className="fa" />
-          <span className="panel-info-corner-inner" />
-        </span>
-
-        {isLoading && (
-          <span className="panel-loading">
-            <i className="fa fa-spinner fa-spin" />
-          </span>
-        )}
-
-        <div className="panel-title-container">
-          <span className="panel-title">
-            <span className="icon-gf panel-alert-icon" />
-            <span className="panel-title-text">{this.props.panel.title}</span>
-            <span className="panel-menu-container dropdown">
-              <span className="fa fa-caret-down panel-menu-toggle" data-toggle="dropdown" />
-              <ul className="dropdown-menu dropdown-menu--menu panel-menu" role="menu">
-                <li>
-                  <a onClick={this.onEditPanel}>
-                    <i className="fa fa-fw fa-edit" /> Edit
-                  </a>
-                </li>
-                <li>
-                  <a onClick={this.onViewPanel}>
-                    <i className="fa fa-fw fa-eye" /> View
-                  </a>
-                </li>
-              </ul>
-            </span>
-            <span className="panel-time-info">
-              <i className="fa fa-clock-o" /> 4m
-            </span>
-          </span>
-        </div>
-      </div>
-    );
-  }
-}

+ 51 - 0
public/app/features/dashboard/dashgrid/PanelHeader/PanelHeader.tsx

@@ -0,0 +1,51 @@
+import React, { PureComponent } from 'react';
+import classNames from 'classnames';
+
+import { PanelHeaderMenu } from './PanelHeaderMenu';
+
+import { DashboardModel } from 'app/features/dashboard/dashboard_model';
+import { PanelModel } from 'app/features/dashboard/panel_model';
+
+export interface Props {
+  panel: PanelModel;
+  dashboard: DashboardModel;
+}
+
+export class PanelHeader extends PureComponent<Props> {
+  render() {
+    const isFullscreen = false;
+    const isLoading = false;
+    const panelHeaderClass = classNames({ 'panel-header': true, 'grid-drag-handle': !isFullscreen });
+    const { panel, dashboard } = this.props;
+
+    return (
+      <div className={panelHeaderClass}>
+        <span className="panel-info-corner">
+          <i className="fa" />
+          <span className="panel-info-corner-inner" />
+        </span>
+
+        {isLoading && (
+          <span className="panel-loading">
+            <i className="fa fa-spinner fa-spin" />
+          </span>
+        )}
+
+        <div className="panel-title-container">
+          <div className="panel-title">
+            <span className="icon-gf panel-alert-icon" />
+            <span className="panel-title-text" data-toggle="dropdown">
+              {panel.title} <span className="fa fa-caret-down panel-menu-toggle" />
+            </span>
+
+            <PanelHeaderMenu panel={panel} dashboard={dashboard} />
+
+            <span className="panel-time-info">
+              <i className="fa fa-clock-o" /> 4m
+            </span>
+          </div>
+        </div>
+      </div>
+    );
+  }
+}

+ 40 - 0
public/app/features/dashboard/dashgrid/PanelHeader/PanelHeaderMenu.tsx

@@ -0,0 +1,40 @@
+import React, { PureComponent } from 'react';
+import { DashboardModel } from 'app/features/dashboard/dashboard_model';
+import { PanelModel } from 'app/features/dashboard/panel_model';
+import { PanelHeaderMenuItem } from './PanelHeaderMenuItem';
+import { getPanelMenu } from 'app/features/dashboard/utils/getPanelMenu';
+import { PanelMenuItem } from 'app/types/panel';
+
+export interface Props {
+  panel: PanelModel;
+  dashboard: DashboardModel;
+}
+
+export class PanelHeaderMenu extends PureComponent<Props> {
+  renderItems = (menu: PanelMenuItem[], isSubMenu = false) => {
+    return (
+      <ul className="dropdown-menu dropdown-menu--menu panel-menu" role={isSubMenu ? '' : 'menu'}>
+        {menu.map((menuItem, idx: number) => {
+          return (
+            <PanelHeaderMenuItem
+              key={`${menuItem.text}${idx}`}
+              type={menuItem.type}
+              text={menuItem.text}
+              iconClassName={menuItem.iconClassName}
+              onClick={menuItem.onClick}
+              shortcut={menuItem.shortcut}
+            >
+              {menuItem.subMenu && this.renderItems(menuItem.subMenu, true)}
+            </PanelHeaderMenuItem>
+          );
+        })}
+      </ul>
+    );
+  };
+
+  render() {
+    const { dashboard, panel } = this.props;
+    const menu = getPanelMenu(dashboard, panel);
+    return <div className="panel-menu-container dropdown">{this.renderItems(menu)}</div>;
+  }
+}

+ 23 - 0
public/app/features/dashboard/dashgrid/PanelHeader/PanelHeaderMenuItem.tsx

@@ -0,0 +1,23 @@
+import React, { SFC } from 'react';
+import { PanelMenuItem } from 'app/types/panel';
+
+interface Props {
+  children: any;
+}
+
+export const PanelHeaderMenuItem: SFC<Props & PanelMenuItem> = props => {
+  const isSubMenu = props.type === 'submenu';
+  const isDivider = props.type === 'divider';
+  return isDivider ? (
+    <li className="divider" />
+  ) : (
+    <li className={isSubMenu ? 'dropdown-submenu' : null}>
+      <a onClick={props.onClick}>
+        {props.iconClassName && <i className={props.iconClassName} />}
+        <span className="dropdown-item-text">{props.text}</span>
+        {props.shortcut && <span className="dropdown-menu-item-shortcut">{props.shortcut}</span>}
+      </a>
+      {props.children}
+    </li>
+  );
+};

+ 9 - 13
public/app/features/dashboard/export/export_modal.html

@@ -1,25 +1,21 @@
-
-<!-- <p> -->
-<!-- 	Exporting will export a cleaned sharable dashboard that can be imported -->
-<!-- 	into another Grafana instance. -->
-<!-- </p> -->
-
 <div class="share-modal-header">
 	<div class="share-modal-big-icon">
 		<i class="fa fa-cloud-upload"></i>
 	</div>
 	<div>
-		<p class="share-modal-info-text">
-			Export the dashboard to a JSON file. The exporter will templatize the
-			dashboard's data sources to make it easy for others to import and reuse.
-			You can share dashboards on <a class="external-link" href="https://grafana.com">Grafana.com</a>
-		</p>
+		<gf-form-switch
+			class="gf-form"
+			label="Export for sharing externally"
+			label-class="width-16"
+			checked="ctrl.shareExternally"
+			tooltip="Useful for sharing dashboard publicly on grafana.com. Will templatize data source names. Can then only be used with the specific dashboard import API.">
+		</gf-form-switch>
 
 		<div class="gf-form-button-row">
-			<button type="button" class="btn gf-form-btn width-10 btn-success" ng-click="ctrl.save()">
+			<button type="button" class="btn gf-form-btn width-10 btn-success" ng-click="ctrl.saveDashboardAsFile()">
 				<i class="fa fa-save"></i> Save to file
 			</button>
-			<button type="button" class="btn gf-form-btn width-10 btn-secondary" ng-click="ctrl.saveJson()">
+			<button type="button" class="btn gf-form-btn width-10 btn-secondary" ng-click="ctrl.viewJson()">
 				<i class="fa fa-file-text-o"></i> View JSON
 			</button>
 			<a class="btn btn-link" ng-click="ctrl.dismiss()">Cancel</a>

+ 34 - 13
public/app/features/dashboard/export/export_modal.ts

@@ -8,34 +8,55 @@ export class DashExportCtrl {
   dash: any;
   exporter: DashboardExporter;
   dismiss: () => void;
+  shareExternally: boolean;
 
   /** @ngInject */
   constructor(private dashboardSrv, datasourceSrv, private $scope, private $rootScope) {
     this.exporter = new DashboardExporter(datasourceSrv);
 
-    this.exporter.makeExportable(this.dashboardSrv.getCurrent()).then(dash => {
-      this.$scope.$apply(() => {
-        this.dash = dash;
+    this.dash = this.dashboardSrv.getCurrent();
+  }
+
+  saveDashboardAsFile() {
+    if (this.shareExternally) {
+      this.exporter.makeExportable(this.dash).then((dashboardJson: any) => {
+        this.$scope.$apply(() => {
+          this.openSaveAsDialog(dashboardJson);
+        });
       });
-    });
+    } else {
+      this.openSaveAsDialog(this.dash.getSaveModelClone());
+    }
+  }
+
+  viewJson() {
+    if (this.shareExternally) {
+      this.exporter.makeExportable(this.dash).then((dashboardJson: any) => {
+        this.$scope.$apply(() => {
+          this.openJsonModal(dashboardJson);
+        });
+      });
+    } else {
+      this.openJsonModal(this.dash.getSaveModelClone());
+    }
   }
 
-  save() {
-    const blob = new Blob([angular.toJson(this.dash, true)], {
+  private openSaveAsDialog(dash: any) {
+    const blob = new Blob([angular.toJson(dash, true)], {
       type: 'application/json;charset=utf-8',
     });
-    saveAs(blob, this.dash.title + '-' + new Date().getTime() + '.json');
+    saveAs(blob, dash.title + '-' + new Date().getTime() + '.json');
   }
 
-  saveJson() {
-    const clone = this.dash;
-    const editScope = this.$rootScope.$new();
-    editScope.object = clone;
-    editScope.enableCopy = true;
+  private openJsonModal(clone: object) {
+    const model = {
+      object: clone,
+      enableCopy: true,
+    };
 
     this.$rootScope.appEvent('show-modal', {
       src: 'public/app/partials/edit_json.html',
-      scope: editScope,
+      model: model,
     });
 
     this.dismiss();

+ 22 - 11
public/app/features/dashboard/export/exporter.ts

@@ -29,19 +29,36 @@ export class DashboardExporter {
     }
 
     const templateizeDatasourceUsage = obj => {
+      let datasource = obj.datasource;
+      let datasourceVariable = null;
+
       // ignore data source properties that contain a variable
-      if (obj.datasource && obj.datasource.indexOf('$') === 0) {
-        if (variableLookup[obj.datasource.substring(1)]) {
-          return;
+      if (datasource && datasource.indexOf('$') === 0) {
+        datasourceVariable = variableLookup[datasource.substring(1)];
+        if (datasourceVariable && datasourceVariable.current) {
+          datasource = datasourceVariable.current.value;
         }
       }
 
       promises.push(
-        this.datasourceSrv.get(obj.datasource).then(ds => {
+        this.datasourceSrv.get(datasource).then(ds => {
           if (ds.meta.builtIn) {
             return;
           }
 
+          // add data source type to require list
+          requires['datasource' + ds.meta.id] = {
+            type: 'datasource',
+            id: ds.meta.id,
+            name: ds.meta.name,
+            version: ds.meta.info.version || '1.0.0',
+          };
+
+          // if used via variable we can skip templatizing usage
+          if (datasourceVariable) {
+            return;
+          }
+
           const refName = 'DS_' + ds.name.replace(' ', '_').toUpperCase();
           datasources[refName] = {
             name: refName,
@@ -51,14 +68,8 @@ export class DashboardExporter {
             pluginId: ds.meta.id,
             pluginName: ds.meta.name,
           };
-          obj.datasource = '${' + refName + '}';
 
-          requires['datasource' + ds.meta.id] = {
-            type: 'datasource',
-            id: ds.meta.id,
-            name: ds.meta.name,
-            version: ds.meta.info.version || '1.0.0',
-          };
+          obj.datasource = '${' + refName + '}';
         })
       );
     };

+ 15 - 4
public/app/features/dashboard/panel_model.ts

@@ -60,6 +60,21 @@ export class PanelModel {
     _.defaultsDeep(this, _.cloneDeep(defaults));
   }
 
+  getOptions() {
+    return this[this.getOptionsKey()] || {};
+  }
+
+  updateOptions(options: object) {
+    const update: any = {};
+    update[this.getOptionsKey()] = options;
+    Object.assign(this, update);
+    this.render();
+  }
+
+  private getOptionsKey() {
+    return this.type + 'Options';
+  }
+
   getSaveModel() {
     const model: any = {};
     for (const property in this) {
@@ -121,10 +136,6 @@ export class PanelModel {
     this.events.emit('panel-initialized');
   }
 
-  initEditMode() {
-    this.events.emit('panel-init-edit-mode');
-  }
-
   changeType(pluginId: string) {
     this.type = pluginId;
 

+ 1 - 1
public/app/features/dashboard/settings/settings.ts

@@ -32,9 +32,9 @@ export class SettingsCtrl {
 
     this.$scope.$on('$destroy', () => {
       this.dashboard.updateSubmenuVisibility();
-      this.dashboard.startRefresh();
       setTimeout(() => {
         this.$rootScope.appEvent('dash-scroll', { restore: true });
+        this.dashboard.startRefresh();
       });
     });
 

+ 2 - 0
public/app/features/dashboard/shareModalCtrl.ts

@@ -12,6 +12,8 @@ export function ShareModalCtrl($scope, $rootScope, $location, $timeout, timeSrv,
   $scope.editor = { index: $scope.tabIndex || 0 };
 
   $scope.init = () => {
+    $scope.panel = $scope.model && $scope.model.panel ? $scope.model.panel : $scope.panel; // React pass panel and dashboard in the "model" property
+    $scope.dashboard = $scope.model && $scope.model.dashboard ? $scope.model.dashboard : $scope.dashboard; // ^
     $scope.modeSharePanel = $scope.panel ? true : false;
 
     $scope.tabs = [{ title: 'Link', src: 'shareLink.html' }];

+ 12 - 2
public/app/features/dashboard/specs/exporter.test.ts

@@ -32,8 +32,8 @@ describe('given dashboard with repeated panels', () => {
           {
             name: 'ds',
             type: 'datasource',
-            query: 'testdb',
-            current: { value: 'prod', text: 'prod' },
+            query: 'other2',
+            current: { value: 'other2', text: 'other2' },
             options: [],
           },
         ],
@@ -205,6 +205,11 @@ describe('given dashboard with repeated panels', () => {
     expect(variable.options[0].text).toBe('${VAR_PREFIX}');
     expect(variable.options[0].value).toBe('${VAR_PREFIX}');
   });
+
+  it('should add datasources only use via datasource variable to requires', () => {
+    const require = _.find(exported.__requires, { name: 'OtherDB_2' });
+    expect(require.id).toBe('other2');
+  });
 });
 
 // Stub responses
@@ -219,6 +224,11 @@ stubs['other'] = {
   meta: { id: 'other', info: { version: '1.2.1' }, name: 'OtherDB' },
 };
 
+stubs['other2'] = {
+  name: 'other2',
+  meta: { id: 'other2', info: { version: '1.2.1' }, name: 'OtherDB_2' },
+};
+
 stubs['-- Mixed --'] = {
   name: 'mixed',
   meta: {

+ 120 - 0
public/app/features/dashboard/utils/getPanelMenu.ts

@@ -0,0 +1,120 @@
+import { updateLocation } from 'app/core/actions';
+import { store } from 'app/store/configureStore';
+
+import { removePanel, duplicatePanel, copyPanel, editPanelJson, sharePanel } from 'app/features/dashboard/utils/panel';
+import { PanelModel } from 'app/features/dashboard/panel_model';
+import { DashboardModel } from 'app/features/dashboard/dashboard_model';
+import { PanelMenuItem } from 'app/types/panel';
+
+export const getPanelMenu = (dashboard: DashboardModel, panel: PanelModel) => {
+  const onViewPanel = () => {
+    store.dispatch(
+      updateLocation({
+        query: {
+          panelId: panel.id,
+          edit: false,
+          fullscreen: true,
+        },
+        partial: true,
+      })
+    );
+  };
+
+  const onEditPanel = () => {
+    store.dispatch(
+      updateLocation({
+        query: {
+          panelId: panel.id,
+          edit: true,
+          fullscreen: true,
+        },
+        partial: true,
+      })
+    );
+  };
+
+  const onSharePanel = () => {
+    sharePanel(dashboard, panel);
+  };
+
+  const onDuplicatePanel = () => {
+    duplicatePanel(dashboard, panel);
+  };
+
+  const onCopyPanel = () => {
+    copyPanel(panel);
+  };
+
+  const onEditPanelJson = () => {
+    editPanelJson(dashboard, panel);
+  };
+
+  const onRemovePanel = () => {
+    removePanel(dashboard, panel, true);
+  };
+
+  const menu: PanelMenuItem[] = [];
+
+  menu.push({
+    text: 'View',
+    iconClassName: 'fa fa-fw fa-eye',
+    onClick: onViewPanel,
+    shortcut: 'v',
+  });
+
+  if (dashboard.meta.canEdit) {
+    menu.push({
+      text: 'Edit',
+      iconClassName: 'fa fa-fw fa-edit',
+      onClick: onEditPanel,
+      shortcut: 'e',
+    });
+  }
+
+  menu.push({
+    text: 'Share',
+    iconClassName: 'fa fa-fw fa-share',
+    onClick: onSharePanel,
+    shortcut: 'p s',
+  });
+
+  const subMenu: PanelMenuItem[] = [];
+
+  if (!panel.fullscreen && dashboard.meta.canEdit) {
+    subMenu.push({
+      text: 'Duplicate',
+      onClick: onDuplicatePanel,
+      shortcut: 'p d',
+    });
+
+    subMenu.push({
+      text: 'Copy',
+      onClick: onCopyPanel,
+    });
+  }
+
+  subMenu.push({
+    text: 'Panel JSON',
+    onClick: onEditPanelJson,
+  });
+
+  menu.push({
+    type: 'submenu',
+    text: 'More...',
+    iconClassName: 'fa fa-fw fa-cube',
+    subMenu: subMenu,
+  });
+
+  if (dashboard.meta.canEdit) {
+    menu.push({ type: 'divider' });
+
+    menu.push({
+      text: 'Remove',
+      iconClassName: 'fa fa-fw fa-trash',
+      onClick: onRemovePanel,
+      shortcut: 'p r',
+    });
+  }
+
+  return menu;
+};

+ 86 - 0
public/app/features/dashboard/utils/panel.ts

@@ -0,0 +1,86 @@
+import appEvents from 'app/core/app_events';
+import { DashboardModel } from 'app/features/dashboard/dashboard_model';
+import { PanelModel } from 'app/features/dashboard/panel_model';
+import store from 'app/core/store';
+import { LS_PANEL_COPY_KEY } from 'app/core/constants';
+
+export const removePanel = (dashboard: DashboardModel, panel: PanelModel, ask: boolean) => {
+  // confirm deletion
+  if (ask !== false) {
+    const text2 = panel.alert ? 'Panel includes an alert rule, removing panel will also remove alert rule' : null;
+    const confirmText = panel.alert ? 'YES' : null;
+
+    appEvents.emit('confirm-modal', {
+      title: 'Remove Panel',
+      text: 'Are you sure you want to remove this panel?',
+      text2: text2,
+      icon: 'fa-trash',
+      confirmText: confirmText,
+      yesText: 'Remove',
+      onConfirm: () => removePanel(dashboard, panel, false),
+    });
+    return;
+  }
+  dashboard.removePanel(panel);
+};
+
+export const duplicatePanel = (dashboard: DashboardModel, panel: PanelModel) => {
+  dashboard.duplicatePanel(panel);
+};
+
+export const copyPanel = (panel: PanelModel) => {
+  store.set(LS_PANEL_COPY_KEY, JSON.stringify(panel.getSaveModel()));
+  appEvents.emit('alert-success', ['Panel copied. Open Add Panel to paste']);
+};
+
+const replacePanel = (dashboard: DashboardModel, newPanel: PanelModel, oldPanel: PanelModel) => {
+  const index = dashboard.panels.findIndex(panel => {
+    return panel.id === oldPanel.id;
+  });
+
+  const deletedPanel = dashboard.panels.splice(index, 1);
+  dashboard.events.emit('panel-removed', deletedPanel);
+
+  newPanel = new PanelModel(newPanel);
+  newPanel.id = oldPanel.id;
+
+  dashboard.panels.splice(index, 0, newPanel);
+  dashboard.sortPanelsByGridPos();
+  dashboard.events.emit('panel-added', newPanel);
+};
+
+export const editPanelJson = (dashboard: DashboardModel, panel: PanelModel) => {
+  const model = {
+    object: panel.getSaveModel(),
+    updateHandler: (newPanel: PanelModel, oldPanel: PanelModel) => {
+      replacePanel(dashboard, newPanel, oldPanel);
+    },
+    enableCopy: true,
+  };
+
+  appEvents.emit('show-modal', {
+    src: 'public/app/partials/edit_json.html',
+    model: model,
+  });
+};
+
+export const sharePanel = (dashboard: DashboardModel, panel: PanelModel) => {
+  appEvents.emit('show-modal', {
+    src: 'public/app/features/dashboard/partials/shareModal.html',
+    model: {
+      dashboard: dashboard,
+      panel: panel,
+    },
+  });
+};
+
+export const refreshPanel = (panel: PanelModel) => {
+  panel.refresh();
+};
+
+export const toggleLegend = (panel: PanelModel) => {
+  console.log('Toggle legend is not implemented yet');
+  // We need to set panel.legend defaults first
+  // panel.legend.show = !panel.legend.show;
+  refreshPanel(panel);
+};

+ 33 - 11
public/app/features/explore/Explore.tsx

@@ -25,10 +25,20 @@ import ErrorBoundary from './ErrorBoundary';
 import TimePicker from './TimePicker';
 import { ensureQueries, generateQueryKey, hasQuery } from './utils/query';
 import { DataSource } from 'app/types/datasources';
-import { mergeStreams } from 'app/core/logs_model';
 
 const MAX_HISTORY_ITEMS = 100;
 
+function getIntervals(range: RawTimeRange, datasource, resolution: number): { interval: string; intervalMs: number } {
+  if (!datasource || !resolution) {
+    return { interval: '1s', intervalMs: 1000 };
+  }
+  const absoluteRange: RawTimeRange = {
+    from: parseDate(range.from, false),
+    to: parseDate(range.to, true),
+  };
+  return kbn.calculateInterval(absoluteRange, resolution, datasource.interval);
+}
+
 function makeTimeSeriesList(dataList, options) {
   return dataList.map((seriesData, index) => {
     const datapoints = seriesData.datapoints || [];
@@ -471,12 +481,7 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
     targetOptions: { format: string; hinting?: boolean; instant?: boolean }
   ) {
     const { datasource, range } = this.state;
-    const resolution = this.el.offsetWidth;
-    const absoluteRange: RawTimeRange = {
-      from: parseDate(range.from, false),
-      to: parseDate(range.to, true),
-    };
-    const { interval } = kbn.calculateInterval(absoluteRange, resolution, datasource.interval);
+    const { interval, intervalMs } = getIntervals(range, datasource, this.el.offsetWidth);
     const targets = [
       {
         ...targetOptions,
@@ -491,6 +496,7 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
 
     return {
       interval,
+      intervalMs,
       targets,
       range: queryRange,
     };
@@ -759,6 +765,7 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
     const tableButtonActive = showingBoth || showingTable ? 'active' : '';
     const exploreClass = split ? 'explore explore-split' : 'explore';
     const selectedDatasource = datasource ? exploreDatasources.find(d => d.label === datasource.name) : undefined;
+    const graphRangeIntervals = getIntervals(graphRange, datasource, this.el ? this.el.offsetWidth : 0);
     const graphLoading = queryTransactions.some(qt => qt.resultType === 'Graph' && !qt.done);
     const tableLoading = queryTransactions.some(qt => qt.resultType === 'Table' && !qt.done);
     const logsLoading = queryTransactions.some(qt => qt.resultType === 'Logs' && !qt.done);
@@ -770,9 +777,15 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
       new TableModel(),
       ...queryTransactions.filter(qt => qt.resultType === 'Table' && qt.done && qt.result).map(qt => qt.result)
     );
-    const logsResult = mergeStreams(
-      queryTransactions.filter(qt => qt.resultType === 'Logs' && qt.done && qt.result).map(qt => qt.result)
-    );
+    const logsResult =
+      datasource && datasource.mergeStreams
+        ? datasource.mergeStreams(
+            _.flatten(
+              queryTransactions.filter(qt => qt.resultType === 'Logs' && qt.done && qt.result).map(qt => qt.result)
+            ),
+            graphRangeIntervals.intervalMs
+          )
+        : undefined;
     const loading = queryTransactions.some(qt => !qt.done);
     const showStartPages = StartPage && queryTransactions.length === 0;
     const viewModeCount = [supportsGraph, supportsLogs, supportsTable].filter(m => m).length;
@@ -894,6 +907,7 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
                           height={graphHeight}
                           loading={graphLoading}
                           id={`explore-graph-${position}`}
+                          onChangeTime={this.onChangeTime}
                           range={graphRange}
                           split={split}
                         />
@@ -903,7 +917,15 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
                         <Table data={tableResult} loading={tableLoading} onClickCell={this.onClickTableCell} />
                       </div>
                     ) : null}
-                    {supportsLogs && showingLogs ? <Logs data={logsResult} loading={logsLoading} /> : null}
+                    {supportsLogs && showingLogs ? (
+                      <Logs
+                        data={logsResult}
+                        loading={logsLoading}
+                        position={position}
+                        onChangeTime={this.onChangeTime}
+                        range={range}
+                      />
+                    ) : null}
                   </>
                 )}
               </ErrorBoundary>

+ 29 - 5
public/app/features/explore/Graph.tsx

@@ -5,6 +5,8 @@ import { withSize } from 'react-sizeme';
 
 import 'vendor/flot/jquery.flot';
 import 'vendor/flot/jquery.flot.time';
+import 'vendor/flot/jquery.flot.selection';
+import 'vendor/flot/jquery.flot.stack';
 
 import { RawTimeRange } from 'app/types/series';
 import * as dateMath from 'app/core/utils/datemath';
@@ -62,10 +64,10 @@ const FLOT_OPTIONS = {
     margin: { left: 0, right: 0 },
     labelMarginX: 0,
   },
-  // selection: {
-  //   mode: 'x',
-  //   color: '#666',
-  // },
+  selection: {
+    mode: 'x',
+    color: '#666',
+  },
   // crosshair: {
   //   mode: 'x',
   // },
@@ -79,6 +81,8 @@ interface GraphProps {
   range: RawTimeRange;
   split?: boolean;
   size?: { width: number; height: number };
+  userOptions?: any;
+  onChangeTime?: (range: RawTimeRange) => void;
 }
 
 interface GraphState {
@@ -86,6 +90,8 @@ interface GraphState {
 }
 
 export class Graph extends PureComponent<GraphProps, GraphState> {
+  $el: any;
+
   state = {
     showAllTimeSeries: false,
   };
@@ -98,6 +104,8 @@ export class Graph extends PureComponent<GraphProps, GraphState> {
 
   componentDidMount() {
     this.draw();
+    this.$el = $(`#${this.props.id}`);
+    this.$el.bind('plotselected', this.onPlotSelected);
   }
 
   componentDidUpdate(prevProps: GraphProps) {
@@ -112,6 +120,20 @@ export class Graph extends PureComponent<GraphProps, GraphState> {
     }
   }
 
+  componentWillUnmount() {
+    this.$el.unbind('plotselected', this.onPlotSelected);
+  }
+
+  onPlotSelected = (event, ranges) => {
+    if (this.props.onChangeTime) {
+      const range = {
+        from: moment(ranges.xaxis.from),
+        to: moment(ranges.xaxis.to),
+      };
+      this.props.onChangeTime(range);
+    }
+  };
+
   onShowAllTimeSeries = () => {
     this.setState(
       {
@@ -122,7 +144,7 @@ export class Graph extends PureComponent<GraphProps, GraphState> {
   };
 
   draw() {
-    const { range, size } = this.props;
+    const { range, size, userOptions = {} } = this.props;
     const data = this.getGraphData();
 
     const $el = $(`#${this.props.id}`);
@@ -153,12 +175,14 @@ export class Graph extends PureComponent<GraphProps, GraphState> {
         max: max,
         label: 'Datetime',
         ticks: ticks,
+        timezone: 'browser',
         timeformat: time_format(ticks, min, max),
       },
     };
     const options = {
       ...FLOT_OPTIONS,
       ...dynamicOptions,
+      ...userOptions,
     };
     $.plot($el, series, options);
   }

+ 110 - 4
public/app/features/explore/Logs.tsx

@@ -1,29 +1,135 @@
 import React, { Fragment, PureComponent } from 'react';
 import Highlighter from 'react-highlight-words';
 
+import { RawTimeRange } from 'app/types/series';
 import { LogsModel } from 'app/core/logs_model';
 import { findHighlightChunksInText } from 'app/core/utils/text';
+import { Switch } from 'app/core/components/Switch/Switch';
+
+import Graph from './Graph';
+
+const graphOptions = {
+  series: {
+    bars: {
+      show: true,
+      lineWidth: 5,
+      // barWidth: 10,
+    },
+    // stack: true,
+  },
+  yaxis: {
+    tickDecimals: 0,
+  },
+};
 
 interface LogsProps {
   className?: string;
   data: LogsModel;
   loading: boolean;
+  position: string;
+  range?: RawTimeRange;
+  onChangeTime?: (range: RawTimeRange) => void;
+}
+
+interface LogsState {
+  showLabels: boolean;
+  showLocalTime: boolean;
+  showUtc: boolean;
 }
 
-export default class Logs extends PureComponent<LogsProps, {}> {
+export default class Logs extends PureComponent<LogsProps, LogsState> {
+  state = {
+    showLabels: true,
+    showLocalTime: true,
+    showUtc: false,
+  };
+
+  onChangeLabels = (event: React.SyntheticEvent) => {
+    const target = event.target as HTMLInputElement;
+    this.setState({
+      showLabels: target.checked,
+    });
+  };
+
+  onChangeLocalTime = (event: React.SyntheticEvent) => {
+    const target = event.target as HTMLInputElement;
+    this.setState({
+      showLocalTime: target.checked,
+    });
+  };
+
+  onChangeUtc = (event: React.SyntheticEvent) => {
+    const target = event.target as HTMLInputElement;
+    this.setState({
+      showUtc: target.checked,
+    });
+  };
+
   render() {
-    const { className = '', data, loading = false } = this.props;
+    const { className = '', data, loading = false, position, range } = this.props;
+    const { showLabels, showLocalTime, showUtc } = this.state;
     const hasData = data && data.rows && data.rows.length > 0;
+    const cssColumnSizes = ['4px'];
+    if (showUtc) {
+      cssColumnSizes.push('minmax(100px, max-content)');
+    }
+    if (showLocalTime) {
+      cssColumnSizes.push('minmax(100px, max-content)');
+    }
+    if (showLabels) {
+      cssColumnSizes.push('minmax(100px, 25%)');
+    }
+    cssColumnSizes.push('1fr');
+    const logEntriesStyle = {
+      gridTemplateColumns: cssColumnSizes.join(' '),
+    };
+
     return (
       <div className={`${className} logs`}>
+        <div className="logs-graph">
+          <Graph
+            data={data.series}
+            height="100px"
+            range={range}
+            id={`explore-logs-graph-${position}`}
+            onChangeTime={this.props.onChangeTime}
+            userOptions={graphOptions}
+          />
+        </div>
+
+        <div className="panel-container logs-options">
+          <div className="logs-controls">
+            <Switch label="Timestamp" checked={showUtc} onChange={this.onChangeUtc} small />
+            <Switch label="Local time" checked={showLocalTime} onChange={this.onChangeLocalTime} small />
+            <Switch label="Labels" checked={showLabels} onChange={this.onChangeLabels} small />
+            {hasData &&
+              data.meta && (
+                <div className="logs-meta">
+                  {data.meta.map(item => (
+                    <div className="logs-meta-item" key={item.label}>
+                      <span className="logs-meta-item__label">{item.label}:</span>
+                      <span className="logs-meta-item__value">{item.value}</span>
+                    </div>
+                  ))}
+                </div>
+              )}
+          </div>
+        </div>
+
         <div className="panel-container">
           {loading && <div className="explore-panel__loader" />}
-          <div className="logs-entries">
+          <div className="logs-entries" style={logEntriesStyle}>
             {hasData &&
               data.rows.map(row => (
                 <Fragment key={row.key}>
                   <div className={row.logLevel ? `logs-row-level logs-row-level-${row.logLevel}` : ''} />
-                  <div title={`${row.timestamp} (${row.timeFromNow})`}>{row.timeLocal}</div>
+                  {showUtc && <div title={`Local: ${row.timeLocal} (${row.timeFromNow})`}>{row.timestamp}</div>}
+                  {showLocalTime && <div title={`${row.timestamp} (${row.timeFromNow})`}>{row.timeLocal}</div>}
+                  {showLabels && (
+                    <div className="max-width" title={row.labels}>
+                      {row.labels}
+                    </div>
+                  )}
                   <div>
                     <Highlighter
                       textToHighlight={row.entry}

+ 2 - 1
public/app/features/explore/QueryField.tsx

@@ -229,6 +229,7 @@ export class QueryField extends React.PureComponent<QueryFieldProps, QueryFieldS
     const { cleanText, onWillApplySuggestion, syntax } = this.props;
     const { typeaheadPrefix, typeaheadText } = this.state;
     let suggestionText = suggestion.insertText || suggestion.label;
+    const preserveSuffix = suggestion.kind === 'function';
     const move = suggestion.move || 0;
 
     if (onWillApplySuggestion) {
@@ -243,7 +244,7 @@ export class QueryField extends React.PureComponent<QueryFieldProps, QueryFieldS
     const suffixLength = text.length - typeaheadPrefix.length;
     const offset = typeaheadText.indexOf(typeaheadPrefix);
     const midWord = typeaheadPrefix && ((suffixLength > 0 && offset > -1) || suggestionText === typeaheadText);
-    const forward = midWord ? suffixLength + offset : 0;
+    const forward = midWord && !preserveSuffix ? suffixLength + offset : 0;
 
     // If new-lines, apply suggestion as block
     if (suggestionText.match(/\n/)) {

+ 24 - 5
public/app/features/explore/TimePicker.tsx

@@ -16,6 +16,9 @@ export const DEFAULT_RANGE = {
  * @param value Epoch or relative time
  */
 export function parseTime(value: string, isUtc = false): string {
+  if (moment.isMoment(value)) {
+    return value;
+  }
   if (value.indexOf('now') !== -1) {
     return value;
   }
@@ -39,7 +42,8 @@ interface TimePickerState {
   isOpen: boolean;
   isUtc: boolean;
   rangeString: string;
-  refreshInterval: string;
+  refreshInterval?: string;
+  initialRange?: RawTimeRange;
 
   // Input-controlled text, keep these in a shape that is human-editable
   fromRaw: string;
@@ -52,6 +56,22 @@ export default class TimePicker extends PureComponent<TimePickerProps, TimePicke
   constructor(props) {
     super(props);
 
+    this.state = {
+      isOpen: props.isOpen,
+      isUtc: props.isUtc,
+      rangeString: '',
+      fromRaw: '',
+      toRaw: '',
+      initialRange: DEFAULT_RANGE,
+      refreshInterval: '',
+    };
+  }
+
+  static getDerivedStateFromProps(props, state) {
+    if (state.initialRange && state.initialRange === props.range) {
+      return state;
+    }
+
     const from = props.range ? props.range.from : DEFAULT_RANGE.from;
     const to = props.range ? props.range.to : DEFAULT_RANGE.to;
 
@@ -63,13 +83,12 @@ export default class TimePicker extends PureComponent<TimePickerProps, TimePicke
       to: toRaw,
     };
 
-    this.state = {
+    return {
+      ...state,
       fromRaw,
       toRaw,
-      isOpen: props.isOpen,
-      isUtc: props.isUtc,
+      initialRange: props.range,
       rangeString: rangeUtil.describeTimeRange(range),
-      refreshInterval: '',
     };
   }
 

+ 12 - 41
public/app/features/panel/panel_ctrl.ts

@@ -1,11 +1,15 @@
 import config from 'app/core/config';
 import _ from 'lodash';
 import $ from 'jquery';
-import { appEvents, profiler } from 'app/core/core';
-import { PanelModel } from 'app/features/dashboard/panel_model';
+import { profiler } from 'app/core/core';
+import {
+  duplicatePanel,
+  copyPanel as copyPanelUtil,
+  editPanelJson as editPanelJsonUtil,
+  sharePanel as sharePanelUtil,
+} from 'app/features/dashboard/utils/panel';
 import Remarkable from 'remarkable';
-import { GRID_CELL_HEIGHT, GRID_CELL_VMARGIN, LS_PANEL_COPY_KEY } from 'app/core/constants';
-import store from 'app/core/store';
+import { GRID_CELL_HEIGHT, GRID_CELL_VMARGIN } from 'app/core/constants';
 
 const TITLE_HEIGHT = 27;
 const PANEL_BORDER = 2;
@@ -241,7 +245,7 @@ export class PanelCtrl {
   }
 
   duplicate() {
-    this.dashboard.duplicatePanel(this.panel);
+    duplicatePanel(this.dashboard, this.panel);
   }
 
   removePanel() {
@@ -251,48 +255,15 @@ export class PanelCtrl {
   }
 
   editPanelJson() {
-    const editScope = this.$scope.$root.$new();
-    editScope.object = this.panel.getSaveModel();
-    editScope.updateHandler = this.replacePanel.bind(this);
-    editScope.enableCopy = true;
-
-    this.publishAppEvent('show-modal', {
-      src: 'public/app/partials/edit_json.html',
-      scope: editScope,
-    });
+    editPanelJsonUtil(this.dashboard, this.panel);
   }
 
   copyPanel() {
-    store.set(LS_PANEL_COPY_KEY, JSON.stringify(this.panel.getSaveModel()));
-    appEvents.emit('alert-success', ['Panel copied. Open Add Panel to paste']);
-  }
-
-  replacePanel(newPanel, oldPanel) {
-    const dashboard = this.dashboard;
-    const index = _.findIndex(dashboard.panels, panel => {
-      return panel.id === oldPanel.id;
-    });
-
-    const deletedPanel = dashboard.panels.splice(index, 1);
-    this.dashboard.events.emit('panel-removed', deletedPanel);
-
-    newPanel = new PanelModel(newPanel);
-    newPanel.id = oldPanel.id;
-
-    dashboard.panels.splice(index, 0, newPanel);
-    dashboard.sortPanelsByGridPos();
-    dashboard.events.emit('panel-added', newPanel);
+    copyPanelUtil(this.panel);
   }
 
   sharePanel() {
-    const shareScope = this.$scope.$new();
-    shareScope.panel = this.panel;
-    shareScope.dashboard = this.dashboard;
-
-    this.publishAppEvent('show-modal', {
-      src: 'public/app/features/dashboard/partials/shareModal.html',
-      scope: shareScope,
-    });
+    sharePanelUtil(this.dashboard, this.panel);
   }
 
   getInfoMode() {

+ 1 - 3
public/app/features/panel/viz_tab.ts

@@ -16,9 +16,7 @@ export class VizTabCtrl {
     $scope.ctrl = this;
   }
 
-  onTypeChanged = (plugin: PanelPlugin) => {
-    this.dashboard.changePanelType(this.panelCtrl.panel, plugin.id);
-  };
+  onTypeChanged = (plugin: PanelPlugin) => {};
 }
 
 const template = `

+ 1 - 1
public/app/partials/reset_password.html

@@ -19,7 +19,7 @@
 			</div>
 		</form>
 		<div ng-show="mode === 'email-sent'">
-			An email with a reset link as been sent to the email address. <br>
+			An email with a reset link has been sent to the email address. <br>
 			You should receive it shortly.
 			<div class="p-t-1">
 				<a href="login" class="btn btn-success p-t-1">

+ 6 - 1
public/app/plugins/datasource/elasticsearch/config_ctrl.ts

@@ -20,7 +20,12 @@ export class ElasticConfigCtrl {
     { name: 'Yearly', value: 'Yearly', example: '[logstash-]YYYY' },
   ];
 
-  esVersions = [{ name: '2.x', value: 2 }, { name: '5.x', value: 5 }, { name: '5.6+', value: 56 }];
+  esVersions = [
+    { name: '2.x', value: 2 },
+    { name: '5.x', value: 5 },
+    { name: '5.6+', value: 56 },
+    { name: '6.0+', value: 60 },
+  ];
 
   indexPatternTypeChanged() {
     const def = _.find(this.indexPatternTypes, {

+ 12 - 1
public/app/plugins/datasource/elasticsearch/query_builder.ts

@@ -31,7 +31,11 @@ export class ElasticQueryBuilder {
     queryNode.terms.size = parseInt(aggDef.settings.size, 10) === 0 ? 500 : parseInt(aggDef.settings.size, 10);
     if (aggDef.settings.orderBy !== void 0) {
       queryNode.terms.order = {};
-      queryNode.terms.order[aggDef.settings.orderBy] = aggDef.settings.order;
+      if (aggDef.settings.orderBy === '_term' && this.esVersion >= 60) {
+        queryNode.terms.order['_key'] = aggDef.settings.order;
+      } else {
+        queryNode.terms.order[aggDef.settings.orderBy] = aggDef.settings.order;
+      }
 
       // if metric ref, look it up and add it to this agg level
       metricRef = parseInt(aggDef.settings.orderBy, 10);
@@ -318,6 +322,13 @@ export class ElasticQueryBuilder {
         },
       },
     };
+
+    if (this.esVersion >= 60) {
+      query.aggs['1'].terms.order = {
+        _key: 'asc',
+      };
+    }
+
     return query;
   }
 }

+ 62 - 0
public/app/plugins/datasource/elasticsearch/specs/query_builder.test.ts

@@ -62,6 +62,54 @@ describe('ElasticQueryBuilder', () => {
     expect(aggs['1'].avg.field).toBe('@value');
   });
 
+  it('with term agg and order by term', () => {
+    const query = builder.build(
+      {
+        metrics: [{ type: 'count', id: '1' }, { type: 'avg', field: '@value', id: '5' }],
+        bucketAggs: [
+          {
+            type: 'terms',
+            field: '@host',
+            settings: { size: 5, order: 'asc', orderBy: '_term' },
+            id: '2',
+          },
+          { type: 'date_histogram', field: '@timestamp', id: '3' },
+        ],
+      },
+      100,
+      1000
+    );
+
+    const firstLevel = query.aggs['2'];
+    expect(firstLevel.terms.order._term).toBe('asc');
+  });
+
+  it('with term agg and order by term on es6.x', () => {
+    const builder6x = new ElasticQueryBuilder({
+      timeField: '@timestamp',
+      esVersion: 60,
+    });
+    const query = builder6x.build(
+      {
+        metrics: [{ type: 'count', id: '1' }, { type: 'avg', field: '@value', id: '5' }],
+        bucketAggs: [
+          {
+            type: 'terms',
+            field: '@host',
+            settings: { size: 5, order: 'asc', orderBy: '_term' },
+            id: '2',
+          },
+          { type: 'date_histogram', field: '@timestamp', id: '3' },
+        ],
+      },
+      100,
+      1000
+    );
+
+    const firstLevel = query.aggs['2'];
+    expect(firstLevel.terms.order._key).toBe('asc');
+  });
+
   it('with term agg and order by metric agg', () => {
     const query = builder.build(
       {
@@ -302,4 +350,18 @@ describe('ElasticQueryBuilder', () => {
     expect(query.query.bool.filter[4].regexp['key5']).toBe('value5');
     expect(query.query.bool.filter[5].bool.must_not.regexp['key6']).toBe('value6');
   });
+
+  it('getTermsQuery should set correct sorting', () => {
+    const query = builder.getTermsQuery({});
+    expect(query.aggs['1'].terms.order._term).toBe('asc');
+  });
+
+  it('getTermsQuery es6.x should set correct sorting', () => {
+    const builder6x = new ElasticQueryBuilder({
+      timeField: '@timestamp',
+      esVersion: 60,
+    });
+    const query = builder6x.getTermsQuery({});
+    expect(query.aggs['1'].terms.order._key).toBe('asc');
+  });
 });

+ 4 - 4
public/app/plugins/datasource/logging/components/LoggingQueryField.tsx

@@ -95,9 +95,9 @@ class LoggingQueryField extends React.PureComponent<LoggingQueryFieldProps, Logg
       this.languageProvider
         .start()
         .then(remaining => {
-          remaining.map(task => task.then(this.onReceiveMetrics).catch(() => {}));
+          remaining.map(task => task.then(this.onUpdateLanguage).catch(() => {}));
         })
-        .then(() => this.onReceiveMetrics());
+        .then(() => this.onUpdateLanguage());
     }
   }
 
@@ -119,7 +119,7 @@ class LoggingQueryField extends React.PureComponent<LoggingQueryFieldProps, Logg
 
     this.languageProvider
       .fetchLabelValues(targetOption.value)
-      .then(this.onReceiveMetrics)
+      .then(this.onUpdateLanguage)
       .catch(() => {});
   };
 
@@ -147,7 +147,7 @@ class LoggingQueryField extends React.PureComponent<LoggingQueryFieldProps, Logg
     }
   };
 
-  onReceiveMetrics = () => {
+  onUpdateLanguage = () => {
     Prism.languages[PRISM_SYNTAX] = this.languageProvider.getSyntax();
     const { logLabelOptions } = this.languageProvider;
     this.setState({

+ 14 - 8
public/app/plugins/datasource/logging/datasource.ts

@@ -3,9 +3,10 @@ import _ from 'lodash';
 import * as dateMath from 'app/core/utils/datemath';
 
 import LanguageProvider from './language_provider';
-import { processStreams } from './result_transformer';
+import { mergeStreamsToLogs } from './result_transformer';
+import { LogsStream, LogsModel, makeSeriesForLogs } from 'app/core/logs_model';
 
-const DEFAULT_LIMIT = 100;
+export const DEFAULT_LIMIT = 1000;
 
 const DEFAULT_QUERY_PARAMS = {
   direction: 'BACKWARD',
@@ -67,6 +68,12 @@ export default class LoggingDatasource {
     return this.backendSrv.datasourceRequest(req);
   }
 
+  mergeStreams(streams: LogsStream[], intervalMs: number): LogsModel {
+    const logs = mergeStreamsToLogs(streams);
+    logs.series = makeSeriesForLogs(logs.rows, intervalMs);
+    return logs;
+  }
+
   prepareQueryTarget(target, options) {
     const interpolated = this.templateSrv.replace(target.expr);
     const start = this.getTime(options.range.from, false);
@@ -79,7 +86,7 @@ export default class LoggingDatasource {
     };
   }
 
-  query(options) {
+  query(options): Promise<{ data: LogsStream[] }> {
     const queryTargets = options.targets
       .filter(target => target.expr)
       .map(target => this.prepareQueryTarget(target, options));
@@ -91,17 +98,16 @@ export default class LoggingDatasource {
 
     return Promise.all(queries).then((results: any[]) => {
       // Flatten streams from multiple queries
-      const allStreams = results.reduce((acc, response, i) => {
-        const streams = response.data.streams || [];
+      const allStreams: LogsStream[] = results.reduce((acc, response, i) => {
+        const streams: LogsStream[] = response.data.streams || [];
         // Inject search for match highlighting
-        const search = queryTargets[i].regexp;
+        const search: string = queryTargets[i].regexp;
         streams.forEach(s => {
           s.search = search;
         });
         return [...acc, ...streams];
       }, []);
-      const model = processStreams(allStreams, DEFAULT_LIMIT);
-      return { data: model };
+      return { data: allStreams };
     });
   }
 

+ 3 - 5
public/app/plugins/datasource/logging/language_provider.ts

@@ -47,7 +47,6 @@ export default class LoggingLanguageProvider extends LanguageProvider {
     this.datasource = datasource;
     this.labelKeys = {};
     this.labelValues = {};
-    this.started = false;
 
     Object.assign(this, initialValues);
   }
@@ -63,11 +62,10 @@ export default class LoggingLanguageProvider extends LanguageProvider {
   };
 
   start = () => {
-    if (!this.started) {
-      this.started = true;
-      return this.fetchLogLabels();
+    if (!this.startTask) {
+      this.startTask = this.fetchLogLabels();
     }
-    return Promise.resolve([]);
+    return this.startTask;
   };
 
   // Keep this DOM-free for testing

+ 137 - 4
public/app/plugins/datasource/logging/result_transformer.test.ts

@@ -1,14 +1,21 @@
-import { LogLevel } from 'app/core/logs_model';
+import { LogLevel, LogsStream } from 'app/core/logs_model';
 
-import { getLogLevel } from './result_transformer';
+import {
+  findCommonLabels,
+  findUniqueLabels,
+  formatLabels,
+  getLogLevel,
+  mergeStreamsToLogs,
+  parseLabels,
+} from './result_transformer';
 
 describe('getLoglevel()', () => {
   it('returns no log level on empty line', () => {
-    expect(getLogLevel('')).toBe(undefined);
+    expect(getLogLevel('')).toBe(LogLevel.none);
   });
 
   it('returns no log level on when level is part of a word', () => {
-    expect(getLogLevel('this is a warning')).toBe(undefined);
+    expect(getLogLevel('this is a warning')).toBe(LogLevel.none);
   });
 
   it('returns log level on line contains a log level', () => {
@@ -20,3 +27,129 @@ describe('getLoglevel()', () => {
     expect(getLogLevel('WARN this could be a debug message')).toBe(LogLevel.warn);
   });
 });
+
+describe('parseLabels()', () => {
+  it('returns no labels on emtpy labels string', () => {
+    expect(parseLabels('')).toEqual({});
+    expect(parseLabels('{}')).toEqual({});
+  });
+
+  it('returns labels on labels string', () => {
+    expect(parseLabels('{foo="bar", baz="42"}')).toEqual({ foo: '"bar"', baz: '"42"' });
+  });
+});
+
+describe('formatLabels()', () => {
+  it('returns no labels on emtpy label set', () => {
+    expect(formatLabels({})).toEqual('');
+    expect(formatLabels({}, 'foo')).toEqual('foo');
+  });
+
+  it('returns label string on label set', () => {
+    expect(formatLabels({ foo: '"bar"', baz: '"42"' })).toEqual('{baz="42", foo="bar"}');
+  });
+});
+
+describe('findCommonLabels()', () => {
+  it('returns no common labels on empty sets', () => {
+    expect(findCommonLabels([{}])).toEqual({});
+    expect(findCommonLabels([{}, {}])).toEqual({});
+  });
+
+  it('returns no common labels on differing sets', () => {
+    expect(findCommonLabels([{ foo: '"bar"' }, {}])).toEqual({});
+    expect(findCommonLabels([{}, { foo: '"bar"' }])).toEqual({});
+    expect(findCommonLabels([{ baz: '42' }, { foo: '"bar"' }])).toEqual({});
+    expect(findCommonLabels([{ foo: '42', baz: '"bar"' }, { foo: '"bar"' }])).toEqual({});
+  });
+
+  it('returns the single labels set as common labels', () => {
+    expect(findCommonLabels([{ foo: '"bar"' }])).toEqual({ foo: '"bar"' });
+  });
+});
+
+describe('findUniqueLabels()', () => {
+  it('returns no uncommon labels on empty sets', () => {
+    expect(findUniqueLabels({}, {})).toEqual({});
+  });
+
+  it('returns all labels given no common labels', () => {
+    expect(findUniqueLabels({ foo: '"bar"' }, {})).toEqual({ foo: '"bar"' });
+  });
+
+  it('returns all labels except the common labels', () => {
+    expect(findUniqueLabels({ foo: '"bar"', baz: '"42"' }, { foo: '"bar"' })).toEqual({ baz: '"42"' });
+  });
+});
+
+describe('mergeStreamsToLogs()', () => {
+  it('returns empty logs given no streams', () => {
+    expect(mergeStreamsToLogs([]).rows).toEqual([]);
+  });
+
+  it('returns processed logs from single stream', () => {
+    const stream1: LogsStream = {
+      labels: '{foo="bar"}',
+      entries: [
+        {
+          line: 'WARN boooo',
+          timestamp: '1970-01-01T00:00:00Z',
+        },
+      ],
+    };
+    expect(mergeStreamsToLogs([stream1]).rows).toMatchObject([
+      {
+        entry: 'WARN boooo',
+        labels: '{foo="bar"}',
+        key: 'EK1970-01-01T00:00:00Z{foo="bar"}',
+        logLevel: 'warn',
+        uniqueLabels: '',
+      },
+    ]);
+  });
+
+  it('returns merged logs from multiple streams sorted by time and with unique labels', () => {
+    const stream1: LogsStream = {
+      labels: '{foo="bar", baz="1"}',
+      entries: [
+        {
+          line: 'WARN boooo',
+          timestamp: '1970-01-01T00:00:01Z',
+        },
+      ],
+    };
+    const stream2: LogsStream = {
+      labels: '{foo="bar", baz="2"}',
+      entries: [
+        {
+          line: 'INFO 1',
+          timestamp: '1970-01-01T00:00:00Z',
+        },
+        {
+          line: 'INFO 2',
+          timestamp: '1970-01-01T00:00:02Z',
+        },
+      ],
+    };
+    expect(mergeStreamsToLogs([stream1, stream2]).rows).toMatchObject([
+      {
+        entry: 'INFO 2',
+        labels: '{foo="bar", baz="2"}',
+        logLevel: 'info',
+        uniqueLabels: '{baz="2"}',
+      },
+      {
+        entry: 'WARN boooo',
+        labels: '{foo="bar", baz="1"}',
+        logLevel: 'warn',
+        uniqueLabels: '{baz="1"}',
+      },
+      {
+        entry: 'INFO 1',
+        labels: '{foo="bar", baz="2"}',
+        logLevel: 'info',
+        uniqueLabels: '{baz="2"}',
+      },
+    ]);
+  });
+});

+ 139 - 12
public/app/plugins/datasource/logging/result_transformer.ts

@@ -1,11 +1,26 @@
 import _ from 'lodash';
 import moment from 'moment';
 
-import { LogLevel, LogsModel, LogRow } from 'app/core/logs_model';
+import {
+  LogLevel,
+  LogsMetaItem,
+  LogsModel,
+  LogRow,
+  LogsStream,
+  LogsStreamEntry,
+  LogsStreamLabels,
+} from 'app/core/logs_model';
+import { DEFAULT_LIMIT } from './datasource';
 
+/**
+ * Returns the log level of a log line.
+ * Parse the line for level words. If no level is found, it returns `LogLevel.none`.
+ *
+ * Example: `getLogLevel('WARN 1999-12-31 this is great') // LogLevel.warn`
+ */
 export function getLogLevel(line: string): LogLevel {
   if (!line) {
-    return undefined;
+    return LogLevel.none;
   }
   let level: LogLevel;
   Object.keys(LogLevel).forEach(key => {
@@ -16,37 +31,149 @@ export function getLogLevel(line: string): LogLevel {
       }
     }
   });
+  if (!level) {
+    level = LogLevel.none;
+  }
   return level;
 }
 
-export function processEntry(entry: { line: string; timestamp: string }, stream): LogRow {
+/**
+ * Regexp to extract Prometheus-style labels
+ */
+const labelRegexp = /\b(\w+)(!?=~?)("[^"\n]*?")/g;
+
+/**
+ * Returns a map of label keys to value from an input selector string.
+ *
+ * Example: `parseLabels('{job="foo", instance="bar"}) // {job: "foo", instance: "bar"}`
+ */
+export function parseLabels(labels: string): LogsStreamLabels {
+  const labelsByKey: LogsStreamLabels = {};
+  labels.replace(labelRegexp, (_, key, operator, value) => {
+    labelsByKey[key] = value;
+    return '';
+  });
+  return labelsByKey;
+}
+
+/**
+ * Returns a map labels that are common to the given label sets.
+ */
+export function findCommonLabels(labelsSets: LogsStreamLabels[]): LogsStreamLabels {
+  return labelsSets.reduce((acc, labels) => {
+    if (!labels) {
+      throw new Error('Need parsed labels to find common labels.');
+    }
+    if (!acc) {
+      // Initial set
+      acc = { ...labels };
+    } else {
+      // Remove incoming labels that are missing or not matching in value
+      Object.keys(labels).forEach(key => {
+        if (acc[key] === undefined || acc[key] !== labels[key]) {
+          delete acc[key];
+        }
+      });
+      // Remove common labels that are missing from incoming label set
+      Object.keys(acc).forEach(key => {
+        if (labels[key] === undefined) {
+          delete acc[key];
+        }
+      });
+    }
+    return acc;
+  }, undefined);
+}
+
+/**
+ * Returns a map of labels that are in `labels`, but not in `commonLabels`.
+ */
+export function findUniqueLabels(labels: LogsStreamLabels, commonLabels: LogsStreamLabels): LogsStreamLabels {
+  const uncommonLabels: LogsStreamLabels = { ...labels };
+  Object.keys(commonLabels).forEach(key => {
+    delete uncommonLabels[key];
+  });
+  return uncommonLabels;
+}
+
+/**
+ * Serializes the given labels to a string.
+ */
+export function formatLabels(labels: LogsStreamLabels, defaultValue = ''): string {
+  if (!labels || Object.keys(labels).length === 0) {
+    return defaultValue;
+  }
+  const labelKeys = Object.keys(labels).sort();
+  const cleanSelector = labelKeys.map(key => `${key}=${labels[key]}`).join(', ');
+  return ['{', cleanSelector, '}'].join('');
+}
+
+export function processEntry(entry: LogsStreamEntry, labels: string, uniqueLabels: string, search: string): LogRow {
   const { line, timestamp } = entry;
-  const { labels } = stream;
+  // Assumes unique-ness, needs nanosec precision for timestamp
   const key = `EK${timestamp}${labels}`;
   const time = moment(timestamp);
+  const timeEpochMs = time.valueOf();
   const timeFromNow = time.fromNow();
   const timeLocal = time.format('YYYY-MM-DD HH:mm:ss');
   const logLevel = getLogLevel(line);
 
   return {
     key,
+    labels,
     logLevel,
     timeFromNow,
+    timeEpochMs,
     timeLocal,
+    uniqueLabels,
     entry: line,
-    searchWords: [stream.search],
+    searchWords: search ? [search] : [],
     timestamp: timestamp,
   };
 }
 
-export function processStreams(streams, limit?: number): LogsModel {
-  const combinedEntries = streams.reduce((acc, stream) => {
-    return [...acc, ...stream.entries.map(entry => processEntry(entry, stream))];
-  }, []);
-  const sortedEntries = _.chain(combinedEntries)
+export function mergeStreamsToLogs(streams: LogsStream[], limit = DEFAULT_LIMIT): LogsModel {
+  // Find unique labels for each stream
+  streams = streams.map(stream => ({
+    ...stream,
+    parsedLabels: parseLabels(stream.labels),
+  }));
+  const commonLabels = findCommonLabels(streams.map(model => model.parsedLabels));
+  streams = streams.map(stream => ({
+    ...stream,
+    uniqueLabels: formatLabels(findUniqueLabels(stream.parsedLabels, commonLabels)),
+  }));
+
+  // Merge stream entries into single list of log rows
+  const sortedRows: LogRow[] = _.chain(streams)
+    .reduce(
+      (acc: LogRow[], stream: LogsStream) => [
+        ...acc,
+        ...stream.entries.map(entry => processEntry(entry, stream.labels, stream.uniqueLabels, stream.search)),
+      ],
+      []
+    )
     .sortBy('timestamp')
     .reverse()
-    .slice(0, limit || combinedEntries.length)
     .value();
-  return { rows: sortedEntries };
+
+  // Meta data to display in status
+  const meta: LogsMetaItem[] = [];
+  if (_.size(commonLabels) > 0) {
+    meta.push({
+      label: 'Common labels',
+      value: formatLabels(commonLabels),
+    });
+  }
+  if (limit) {
+    meta.push({
+      label: 'Limit',
+      value: `${limit} (${sortedRows.length} returned)`,
+    });
+  }
+
+  return {
+    meta,
+    rows: sortedRows,
+  };
 }

+ 3 - 3
public/app/plugins/datasource/prometheus/components/PromQueryField.tsx

@@ -134,9 +134,9 @@ class PromQueryField extends React.PureComponent<PromQueryFieldProps, PromQueryF
       this.languageProvider
         .start()
         .then(remaining => {
-          remaining.map(task => task.then(this.onReceiveMetrics).catch(() => {}));
+          remaining.map(task => task.then(this.onUpdateLanguage).catch(() => {}));
         })
-        .then(() => this.onReceiveMetrics());
+        .then(() => this.onUpdateLanguage());
     }
   }
 
@@ -176,7 +176,7 @@ class PromQueryField extends React.PureComponent<PromQueryFieldProps, PromQueryF
     }
   };
 
-  onReceiveMetrics = () => {
+  onUpdateLanguage = () => {
     const { histogramMetrics, metrics } = this.languageProvider;
     if (!metrics) {
       return;

+ 33 - 21
public/app/plugins/datasource/prometheus/language_provider.ts

@@ -20,8 +20,8 @@ const HISTORY_COUNT_CUTOFF = 1000 * 60 * 60 * 24; // 24h
 
 const wrapLabel = (label: string) => ({ label });
 
-const setFunctionMove = (suggestion: CompletionItem): CompletionItem => {
-  suggestion.move = -1;
+const setFunctionKind = (suggestion: CompletionItem): CompletionItem => {
+  suggestion.kind = 'function';
   return suggestion;
 };
 
@@ -46,7 +46,7 @@ export default class PromQlLanguageProvider extends LanguageProvider {
   labelKeys?: { [index: string]: string[] }; // metric -> [labelKey,...]
   labelValues?: { [index: string]: { [index: string]: string[] } }; // metric -> labelKey -> [labelValue,...]
   metrics?: string[];
-  started: boolean;
+  startTask: Promise<any>;
 
   constructor(datasource: any, initialValues?: any) {
     super();
@@ -56,7 +56,6 @@ export default class PromQlLanguageProvider extends LanguageProvider {
     this.labelKeys = {};
     this.labelValues = {};
     this.metrics = [];
-    this.started = false;
 
     Object.assign(this, initialValues);
   }
@@ -72,11 +71,10 @@ export default class PromQlLanguageProvider extends LanguageProvider {
   };
 
   start = () => {
-    if (!this.started) {
-      this.started = true;
-      return this.fetchMetricNames().then(() => [this.fetchHistogramMetrics()]);
+    if (!this.startTask) {
+      this.startTask = this.fetchMetricNames().then(() => [this.fetchHistogramMetrics()]);
     }
-    return Promise.resolve([]);
+    return this.startTask;
   };
 
   // Keep this DOM-free for testing
@@ -131,7 +129,7 @@ export default class PromQlLanguageProvider extends LanguageProvider {
     suggestions.push({
       prefixMatch: true,
       label: 'Functions',
-      items: FUNCTIONS.map(setFunctionMove),
+      items: FUNCTIONS.map(setFunctionKind),
     });
 
     if (metrics) {
@@ -156,7 +154,7 @@ export default class PromQlLanguageProvider extends LanguageProvider {
   }
 
   getAggregationCompletionItems({ value }: TypeaheadInput): TypeaheadOutput {
-    let refresher: Promise<any> = null;
+    const refresher: Promise<any> = null;
     const suggestions: CompletionItemGroup[] = [];
 
     // Stitch all query lines together to support multi-line queries
@@ -172,12 +170,30 @@ export default class PromQlLanguageProvider extends LanguageProvider {
       return text;
     }, '');
 
-    const leftSide = queryText.slice(0, queryOffset);
-    const openParensAggregationIndex = leftSide.lastIndexOf('(');
-    const openParensSelectorIndex = leftSide.slice(0, openParensAggregationIndex).lastIndexOf('(');
-    const closeParensSelectorIndex = leftSide.slice(openParensSelectorIndex).indexOf(')') + openParensSelectorIndex;
+    // Try search for selector part on the left-hand side, such as `sum (m) by (l)`
+    const openParensAggregationIndex = queryText.lastIndexOf('(', queryOffset);
+    let openParensSelectorIndex = queryText.lastIndexOf('(', openParensAggregationIndex - 1);
+    let closeParensSelectorIndex = queryText.indexOf(')', openParensSelectorIndex);
+
+    // Try search for selector part of an alternate aggregation clause, such as `sum by (l) (m)`
+    if (openParensSelectorIndex === -1) {
+      const closeParensAggregationIndex = queryText.indexOf(')', queryOffset);
+      closeParensSelectorIndex = queryText.indexOf(')', closeParensAggregationIndex + 1);
+      openParensSelectorIndex = queryText.lastIndexOf('(', closeParensSelectorIndex);
+    }
 
-    let selectorString = leftSide.slice(openParensSelectorIndex + 1, closeParensSelectorIndex);
+    const result = {
+      refresher,
+      suggestions,
+      context: 'context-aggregation',
+    };
+
+    // Suggestions are useless for alternative aggregation clauses without a selector in context
+    if (openParensSelectorIndex === -1) {
+      return result;
+    }
+
+    let selectorString = queryText.slice(openParensSelectorIndex + 1, closeParensSelectorIndex);
 
     // Range vector syntax not accounted for by subsequent parse so discard it if present
     selectorString = selectorString.replace(/\[[^\]]+\]$/, '');
@@ -188,14 +204,10 @@ export default class PromQlLanguageProvider extends LanguageProvider {
     if (labelKeys) {
       suggestions.push({ label: 'Labels', items: labelKeys.map(wrapLabel) });
     } else {
-      refresher = this.fetchSeriesLabels(selector);
+      result.refresher = this.fetchSeriesLabels(selector);
     }
 
-    return {
-      refresher,
-      suggestions,
-      context: 'context-aggregation',
-    };
+    return result;
   }
 
   getLabelCompletionItems({ text, wrapperClasses, labelKey, value }: TypeaheadInput): TypeaheadOutput {

+ 57 - 57
public/app/plugins/datasource/prometheus/promql.ts

@@ -5,57 +5,57 @@ export const OPERATORS = ['by', 'group_left', 'group_right', 'ignoring', 'on', '
 const AGGREGATION_OPERATORS = [
   {
     label: 'sum',
-    insertText: 'sum()',
+    insertText: 'sum',
     documentation: 'Calculate sum over dimensions',
   },
   {
     label: 'min',
-    insertText: 'min()',
+    insertText: 'min',
     documentation: 'Select minimum over dimensions',
   },
   {
     label: 'max',
-    insertText: 'max()',
+    insertText: 'max',
     documentation: 'Select maximum over dimensions',
   },
   {
     label: 'avg',
-    insertText: 'avg()',
+    insertText: 'avg',
     documentation: 'Calculate the average over dimensions',
   },
   {
     label: 'stddev',
-    insertText: 'stddev()',
+    insertText: 'stddev',
     documentation: 'Calculate population standard deviation over dimensions',
   },
   {
     label: 'stdvar',
-    insertText: 'stdvar()',
+    insertText: 'stdvar',
     documentation: 'Calculate population standard variance over dimensions',
   },
   {
     label: 'count',
-    insertText: 'count()',
+    insertText: 'count',
     documentation: 'Count number of elements in the vector',
   },
   {
     label: 'count_values',
-    insertText: 'count_values()',
+    insertText: 'count_values',
     documentation: 'Count number of elements with the same value',
   },
   {
     label: 'bottomk',
-    insertText: 'bottomk()',
+    insertText: 'bottomk',
     documentation: 'Smallest k elements by sample value',
   },
   {
     label: 'topk',
-    insertText: 'topk()',
+    insertText: 'topk',
     documentation: 'Largest k elements by sample value',
   },
   {
     label: 'quantile',
-    insertText: 'quantile()',
+    insertText: 'quantile',
     documentation: 'Calculate φ-quantile (0 ≤ φ ≤ 1) over dimensions',
   },
 ];
@@ -63,302 +63,302 @@ const AGGREGATION_OPERATORS = [
 export const FUNCTIONS = [
   ...AGGREGATION_OPERATORS,
   {
-    insertText: 'abs()',
+    insertText: 'abs',
     label: 'abs',
     detail: 'abs(v instant-vector)',
     documentation: 'Returns the input vector with all sample values converted to their absolute value.',
   },
   {
-    insertText: 'absent()',
+    insertText: 'absent',
     label: 'absent',
     detail: 'absent(v instant-vector)',
     documentation:
       'Returns an empty vector if the vector passed to it has any elements and a 1-element vector with the value 1 if the vector passed to it has no elements. This is useful for alerting on when no time series exist for a given metric name and label combination.',
   },
   {
-    insertText: 'ceil()',
+    insertText: 'ceil',
     label: 'ceil',
     detail: 'ceil(v instant-vector)',
     documentation: 'Rounds the sample values of all elements in `v` up to the nearest integer.',
   },
   {
-    insertText: 'changes()',
+    insertText: 'changes',
     label: 'changes',
     detail: 'changes(v range-vector)',
     documentation:
       'For each input time series, `changes(v range-vector)` returns the number of times its value has changed within the provided time range as an instant vector.',
   },
   {
-    insertText: 'clamp_max()',
+    insertText: 'clamp_max',
     label: 'clamp_max',
     detail: 'clamp_max(v instant-vector, max scalar)',
     documentation: 'Clamps the sample values of all elements in `v` to have an upper limit of `max`.',
   },
   {
-    insertText: 'clamp_min()',
+    insertText: 'clamp_min',
     label: 'clamp_min',
     detail: 'clamp_min(v instant-vector, min scalar)',
     documentation: 'Clamps the sample values of all elements in `v` to have a lower limit of `min`.',
   },
   {
-    insertText: 'count_scalar()',
+    insertText: 'count_scalar',
     label: 'count_scalar',
     detail: 'count_scalar(v instant-vector)',
     documentation:
       'Returns the number of elements in a time series vector as a scalar. This is in contrast to the `count()` aggregation operator, which always returns a vector (an empty one if the input vector is empty) and allows grouping by labels via a `by` clause.',
   },
   {
-    insertText: 'day_of_month()',
+    insertText: 'day_of_month',
     label: 'day_of_month',
     detail: 'day_of_month(v=vector(time()) instant-vector)',
     documentation: 'Returns the day of the month for each of the given times in UTC. Returned values are from 1 to 31.',
   },
   {
-    insertText: 'day_of_week()',
+    insertText: 'day_of_week',
     label: 'day_of_week',
     detail: 'day_of_week(v=vector(time()) instant-vector)',
     documentation:
       'Returns the day of the week for each of the given times in UTC. Returned values are from 0 to 6, where 0 means Sunday etc.',
   },
   {
-    insertText: 'days_in_month()',
+    insertText: 'days_in_month',
     label: 'days_in_month',
     detail: 'days_in_month(v=vector(time()) instant-vector)',
     documentation:
       'Returns number of days in the month for each of the given times in UTC. Returned values are from 28 to 31.',
   },
   {
-    insertText: 'delta()',
+    insertText: 'delta',
     label: 'delta',
     detail: 'delta(v range-vector)',
     documentation:
       'Calculates the difference between the first and last value of each time series element in a range vector `v`, returning an instant vector with the given deltas and equivalent labels. The delta is extrapolated to cover the full time range as specified in the range vector selector, so that it is possible to get a non-integer result even if the sample values are all integers.',
   },
   {
-    insertText: 'deriv()',
+    insertText: 'deriv',
     label: 'deriv',
     detail: 'deriv(v range-vector)',
     documentation:
       'Calculates the per-second derivative of the time series in a range vector `v`, using simple linear regression.',
   },
   {
-    insertText: 'drop_common_labels()',
+    insertText: 'drop_common_labels',
     label: 'drop_common_labels',
     detail: 'drop_common_labels(instant-vector)',
     documentation: 'Drops all labels that have the same name and value across all series in the input vector.',
   },
   {
-    insertText: 'exp()',
+    insertText: 'exp',
     label: 'exp',
     detail: 'exp(v instant-vector)',
     documentation:
       'Calculates the exponential function for all elements in `v`.\nSpecial cases are:\n* `Exp(+Inf) = +Inf` \n* `Exp(NaN) = NaN`',
   },
   {
-    insertText: 'floor()',
+    insertText: 'floor',
     label: 'floor',
     detail: 'floor(v instant-vector)',
     documentation: 'Rounds the sample values of all elements in `v` down to the nearest integer.',
   },
   {
-    insertText: 'histogram_quantile()',
+    insertText: 'histogram_quantile',
     label: 'histogram_quantile',
     detail: 'histogram_quantile(φ float, b instant-vector)',
     documentation:
       'Calculates the φ-quantile (0 ≤ φ ≤ 1) from the buckets `b` of a histogram. The samples in `b` are the counts of observations in each bucket. Each sample must have a label `le` where the label value denotes the inclusive upper bound of the bucket. (Samples without such a label are silently ignored.) The histogram metric type automatically provides time series with the `_bucket` suffix and the appropriate labels.',
   },
   {
-    insertText: 'holt_winters()',
+    insertText: 'holt_winters',
     label: 'holt_winters',
     detail: 'holt_winters(v range-vector, sf scalar, tf scalar)',
     documentation:
       'Produces a smoothed value for time series based on the range in `v`. The lower the smoothing factor `sf`, the more importance is given to old data. The higher the trend factor `tf`, the more trends in the data is considered. Both `sf` and `tf` must be between 0 and 1.',
   },
   {
-    insertText: 'hour()',
+    insertText: 'hour',
     label: 'hour',
     detail: 'hour(v=vector(time()) instant-vector)',
     documentation: 'Returns the hour of the day for each of the given times in UTC. Returned values are from 0 to 23.',
   },
   {
-    insertText: 'idelta()',
+    insertText: 'idelta',
     label: 'idelta',
     detail: 'idelta(v range-vector)',
     documentation:
       'Calculates the difference between the last two samples in the range vector `v`, returning an instant vector with the given deltas and equivalent labels.',
   },
   {
-    insertText: 'increase()',
+    insertText: 'increase',
     label: 'increase',
     detail: 'increase(v range-vector)',
     documentation:
       'Calculates the increase in the time series in the range vector. Breaks in monotonicity (such as counter resets due to target restarts) are automatically adjusted for. The increase is extrapolated to cover the full time range as specified in the range vector selector, so that it is possible to get a non-integer result even if a counter increases only by integer increments.',
   },
   {
-    insertText: 'irate()',
+    insertText: 'irate',
     label: 'irate',
     detail: 'irate(v range-vector)',
     documentation:
       'Calculates the per-second instant rate of increase of the time series in the range vector. This is based on the last two data points. Breaks in monotonicity (such as counter resets due to target restarts) are automatically adjusted for.',
   },
   {
-    insertText: 'label_replace()',
+    insertText: 'label_replace',
     label: 'label_replace',
     detail: 'label_replace(v instant-vector, dst_label string, replacement string, src_label string, regex string)',
     documentation:
       "For each timeseries in `v`, `label_replace(v instant-vector, dst_label string, replacement string, src_label string, regex string)`  matches the regular expression `regex` against the label `src_label`.  If it matches, then the timeseries is returned with the label `dst_label` replaced by the expansion of `replacement`. `$1` is replaced with the first matching subgroup, `$2` with the second etc. If the regular expression doesn't match then the timeseries is returned unchanged.",
   },
   {
-    insertText: 'ln()',
+    insertText: 'ln',
     label: 'ln',
     detail: 'ln(v instant-vector)',
     documentation:
       'calculates the natural logarithm for all elements in `v`.\nSpecial cases are:\n * `ln(+Inf) = +Inf`\n * `ln(0) = -Inf`\n * `ln(x < 0) = NaN`\n * `ln(NaN) = NaN`',
   },
   {
-    insertText: 'log2()',
+    insertText: 'log2',
     label: 'log2',
     detail: 'log2(v instant-vector)',
     documentation:
       'Calculates the binary logarithm for all elements in `v`. The special cases are equivalent to those in `ln`.',
   },
   {
-    insertText: 'log10()',
+    insertText: 'log10',
     label: 'log10',
     detail: 'log10(v instant-vector)',
     documentation:
       'Calculates the decimal logarithm for all elements in `v`. The special cases are equivalent to those in `ln`.',
   },
   {
-    insertText: 'minute()',
+    insertText: 'minute',
     label: 'minute',
     detail: 'minute(v=vector(time()) instant-vector)',
     documentation:
       'Returns the minute of the hour for each of the given times in UTC. Returned values are from 0 to 59.',
   },
   {
-    insertText: 'month()',
+    insertText: 'month',
     label: 'month',
     detail: 'month(v=vector(time()) instant-vector)',
     documentation:
       'Returns the month of the year for each of the given times in UTC. Returned values are from 1 to 12, where 1 means January etc.',
   },
   {
-    insertText: 'predict_linear()',
+    insertText: 'predict_linear',
     label: 'predict_linear',
     detail: 'predict_linear(v range-vector, t scalar)',
     documentation:
       'Predicts the value of time series `t` seconds from now, based on the range vector `v`, using simple linear regression.',
   },
   {
-    insertText: 'rate()',
+    insertText: 'rate',
     label: 'rate',
     detail: 'rate(v range-vector)',
     documentation:
       "Calculates the per-second average rate of increase of the time series in the range vector. Breaks in monotonicity (such as counter resets due to target restarts) are automatically adjusted for. Also, the calculation extrapolates to the ends of the time range, allowing for missed scrapes or imperfect alignment of scrape cycles with the range's time period.",
   },
   {
-    insertText: 'resets()',
+    insertText: 'resets',
     label: 'resets',
     detail: 'resets(v range-vector)',
     documentation:
       'For each input time series, `resets(v range-vector)` returns the number of counter resets within the provided time range as an instant vector. Any decrease in the value between two consecutive samples is interpreted as a counter reset.',
   },
   {
-    insertText: 'round()',
+    insertText: 'round',
     label: 'round',
     detail: 'round(v instant-vector, to_nearest=1 scalar)',
     documentation:
       'Rounds the sample values of all elements in `v` to the nearest integer. Ties are resolved by rounding up. The optional `to_nearest` argument allows specifying the nearest multiple to which the sample values should be rounded. This multiple may also be a fraction.',
   },
   {
-    insertText: 'scalar()',
+    insertText: 'scalar',
     label: 'scalar',
     detail: 'scalar(v instant-vector)',
     documentation:
       'Given a single-element input vector, `scalar(v instant-vector)` returns the sample value of that single element as a scalar. If the input vector does not have exactly one element, `scalar` will return `NaN`.',
   },
   {
-    insertText: 'sort()',
+    insertText: 'sort',
     label: 'sort',
     detail: 'sort(v instant-vector)',
     documentation: 'Returns vector elements sorted by their sample values, in ascending order.',
   },
   {
-    insertText: 'sort_desc()',
+    insertText: 'sort_desc',
     label: 'sort_desc',
     detail: 'sort_desc(v instant-vector)',
     documentation: 'Returns vector elements sorted by their sample values, in descending order.',
   },
   {
-    insertText: 'sqrt()',
+    insertText: 'sqrt',
     label: 'sqrt',
     detail: 'sqrt(v instant-vector)',
     documentation: 'Calculates the square root of all elements in `v`.',
   },
   {
-    insertText: 'time()',
+    insertText: 'time',
     label: 'time',
     detail: 'time()',
     documentation:
       'Returns the number of seconds since January 1, 1970 UTC. Note that this does not actually return the current time, but the time at which the expression is to be evaluated.',
   },
   {
-    insertText: 'vector()',
+    insertText: 'vector',
     label: 'vector',
     detail: 'vector(s scalar)',
     documentation: 'Returns the scalar `s` as a vector with no labels.',
   },
   {
-    insertText: 'year()',
+    insertText: 'year',
     label: 'year',
     detail: 'year(v=vector(time()) instant-vector)',
     documentation: 'Returns the year for each of the given times in UTC.',
   },
   {
-    insertText: 'avg_over_time()',
+    insertText: 'avg_over_time',
     label: 'avg_over_time',
     detail: 'avg_over_time(range-vector)',
     documentation: 'The average value of all points in the specified interval.',
   },
   {
-    insertText: 'min_over_time()',
+    insertText: 'min_over_time',
     label: 'min_over_time',
     detail: 'min_over_time(range-vector)',
     documentation: 'The minimum value of all points in the specified interval.',
   },
   {
-    insertText: 'max_over_time()',
+    insertText: 'max_over_time',
     label: 'max_over_time',
     detail: 'max_over_time(range-vector)',
     documentation: 'The maximum value of all points in the specified interval.',
   },
   {
-    insertText: 'sum_over_time()',
+    insertText: 'sum_over_time',
     label: 'sum_over_time',
     detail: 'sum_over_time(range-vector)',
     documentation: 'The sum of all values in the specified interval.',
   },
   {
-    insertText: 'count_over_time()',
+    insertText: 'count_over_time',
     label: 'count_over_time',
     detail: 'count_over_time(range-vector)',
     documentation: 'The count of all values in the specified interval.',
   },
   {
-    insertText: 'quantile_over_time()',
+    insertText: 'quantile_over_time',
     label: 'quantile_over_time',
     detail: 'quantile_over_time(scalar, range-vector)',
     documentation: 'The φ-quantile (0 ≤ φ ≤ 1) of the values in the specified interval.',
   },
   {
-    insertText: 'stddev_over_time()',
+    insertText: 'stddev_over_time',
     label: 'stddev_over_time',
     detail: 'stddev_over_time(range-vector)',
     documentation: 'The population standard deviation of the values in the specified interval.',
   },
   {
-    insertText: 'stdvar_over_time()',
+    insertText: 'stdvar_over_time',
     label: 'stdvar_over_time',
     detail: 'stdvar_over_time(range-vector)',
     documentation: 'The population standard variance of the values in the specified interval.',

+ 43 - 0
public/app/plugins/datasource/prometheus/specs/language_provider.test.ts

@@ -269,5 +269,48 @@ describe('Language completion provider', () => {
         },
       ]);
     });
+
+    it('returns no suggestions inside an unclear aggregation context using alternate syntax', () => {
+      const instance = new LanguageProvider(datasource, {
+        labelKeys: { '{__name__="metric"}': ['label1', 'label2', 'label3'] },
+      });
+      const value = Plain.deserialize('sum by ()');
+      const range = value.selection.merge({
+        anchorOffset: 8,
+      });
+      const valueWithSelection = value.change().select(range).value;
+      const result = instance.provideCompletionItems({
+        text: '',
+        prefix: '',
+        wrapperClasses: ['context-aggregation'],
+        value: valueWithSelection,
+      });
+      expect(result.context).toBe('context-aggregation');
+      expect(result.suggestions).toEqual([]);
+    });
+
+    it('returns label suggestions inside an aggregation context using alternate syntax', () => {
+      const instance = new LanguageProvider(datasource, {
+        labelKeys: { '{__name__="metric"}': ['label1', 'label2', 'label3'] },
+      });
+      const value = Plain.deserialize('sum by () (metric)');
+      const range = value.selection.merge({
+        anchorOffset: 8,
+      });
+      const valueWithSelection = value.change().select(range).value;
+      const result = instance.provideCompletionItems({
+        text: '',
+        prefix: '',
+        wrapperClasses: ['context-aggregation'],
+        value: valueWithSelection,
+      });
+      expect(result.context).toBe('context-aggregation');
+      expect(result.suggestions).toEqual([
+        {
+          items: [{ label: 'label1' }, { label: 'label2' }, { label: 'label3' }],
+          label: 'Labels',
+        },
+      ]);
+    });
   });
 });

+ 39 - 15
public/app/plugins/panel/graph2/module.tsx

@@ -1,23 +1,22 @@
-// Libraries
 import _ from 'lodash';
 import React, { PureComponent } from 'react';
 
-// Components
 import Graph from 'app/viz/Graph';
-import { getTimeSeriesVMs } from 'app/viz/state/timeSeries';
 import { Switch } from 'app/core/components/Switch/Switch';
 
-// Types
-import { PanelProps, NullValueMode } from 'app/types';
+import { getTimeSeriesVMs } from 'app/viz/state/timeSeries';
+import { PanelProps, PanelOptionsProps, NullValueMode } from 'app/types';
 
 interface Options {
   showBars: boolean;
-}
+  showLines: boolean;
+  showPoints: boolean;
 
-interface Props extends PanelProps {
-  options: Options;
+  onChange: (options: Options) => void;
 }
 
+interface Props extends PanelProps<Options> {}
+
 export class Graph2 extends PureComponent<Props> {
   constructor(props) {
     super(props);
@@ -25,27 +24,52 @@ export class Graph2 extends PureComponent<Props> {
 
   render() {
     const { timeSeries, timeRange } = this.props;
+    const { showLines, showBars, showPoints } = this.props.options;
 
     const vmSeries = getTimeSeriesVMs({
       timeSeries: timeSeries,
       nullValueMode: NullValueMode.Ignore,
     });
 
-    return <Graph timeSeries={vmSeries} timeRange={timeRange} />;
+    return (
+      <Graph
+        timeSeries={vmSeries}
+        timeRange={timeRange}
+        showLines={showLines}
+        showPoints={showPoints}
+        showBars={showBars}
+      />
+    );
   }
 }
 
-export class TextOptions extends PureComponent<any> {
-  onChange = () => {};
+export class GraphOptions extends PureComponent<PanelOptionsProps<Options>> {
+  onToggleLines = () => {
+    this.props.onChange({ ...this.props.options, showLines: !this.props.options.showLines });
+  };
+
+  onToggleBars = () => {
+    this.props.onChange({ ...this.props.options, showBars: !this.props.options.showBars });
+  };
+
+  onTogglePoints = () => {
+    this.props.onChange({ ...this.props.options, showPoints: !this.props.options.showPoints });
+  };
 
   render() {
+    const { showBars, showPoints, showLines } = this.props.options;
+
     return (
-      <div className="section gf-form-group">
-        <h5 className="section-heading">Draw Modes</h5>
-        <Switch label="Lines" checked={true} onChange={this.onChange} />
+      <div>
+        <div className="section gf-form-group">
+          <h5 className="page-heading">Draw Modes</h5>
+          <Switch label="Lines" labelClass="width-5" checked={showLines} onChange={this.onToggleLines} />
+          <Switch label="Bars" labelClass="width-5" checked={showBars} onChange={this.onToggleBars} />
+          <Switch label="Points" labelClass="width-5" checked={showPoints} onChange={this.onTogglePoints} />
+        </div>
       </div>
     );
   }
 }
 
-export { Graph2 as PanelComponent, TextOptions as PanelOptions };
+export { Graph2 as PanelComponent, GraphOptions as PanelOptionsComponent };

+ 7 - 6
public/app/plugins/panel/table/renderer.ts

@@ -211,16 +211,17 @@ export class TableRenderer {
     value = this.formatColumnValue(columnIndex, value);
 
     const column = this.table.columns[columnIndex];
-    let style = '';
+    let cellStyle = '';
+    let textStyle = '';
     const cellClasses = [];
     let cellClass = '';
 
     if (this.colorState.cell) {
-      style = ' style="background-color:' + this.colorState.cell + '"';
+      cellStyle = ' style="background-color:' + this.colorState.cell + '"';
       cellClasses.push('table-panel-color-cell');
       this.colorState.cell = null;
     } else if (this.colorState.value) {
-      style = ' style="color:' + this.colorState.value + '"';
+      textStyle = ' style="color:' + this.colorState.value + '"';
       this.colorState.value = null;
     }
     // because of the fixed table headers css only solution
@@ -232,7 +233,7 @@ export class TableRenderer {
     }
 
     if (value === undefined) {
-      style = ' style="display:none;"';
+      cellStyle = ' style="display:none;"';
       column.hidden = true;
     } else {
       column.hidden = false;
@@ -258,7 +259,7 @@ export class TableRenderer {
       cellClasses.push('table-panel-cell-link');
 
       columnHtml += `
-        <a href="${cellLink}" target="${cellTarget}" data-link-tooltip data-original-title="${cellLinkTooltip}" data-placement="right"${style}>
+        <a href="${cellLink}" target="${cellTarget}" data-link-tooltip data-original-title="${cellLinkTooltip}" data-placement="right"${textStyle}>
           ${value}
         </a>
       `;
@@ -283,7 +284,7 @@ export class TableRenderer {
       cellClass = ' class="' + cellClasses.join(' ') + '"';
     }
 
-    columnHtml = '<td' + cellClass + style + '>' + columnHtml + '</td>';
+    columnHtml = '<td' + cellClass + cellStyle + textStyle + '>' + columnHtml + '</td>';
     return columnHtml;
   }
 

+ 2 - 1
public/app/types/explore.ts

@@ -86,10 +86,11 @@ export abstract class LanguageProvider {
   datasource: any;
   request: (url) => Promise<any>;
   /**
-   * Returns a promise that resolves with a task list when main syntax is loaded.
+   * Returns startTask that resolves with a task list when main syntax is loaded.
    * Task list consists of secondary promises that load more detailed language features.
    */
   start: () => Promise<any[]>;
+  startTask?: Promise<any[]>;
 }
 
 export interface TypeaheadInput {

Некоторые файлы не были показаны из-за большого количества измененных файлов