Browse Source

Merge branch 'master' into alerting_opentsdb

bergquist 9 years ago
parent
commit
455df7f0a0
100 changed files with 2631 additions and 372 deletions
  1. 1 0
      CHANGELOG.md
  2. 15 3
      Makefile
  3. 1 1
      README.md
  4. 4 1
      appveyor.yml
  5. 40 0
      build.go
  6. 1 0
      circle.yml
  7. 7 2
      conf/defaults.ini
  8. 9 3
      conf/sample.ini
  9. 3 1
      docker/blocks/influxdb/fig
  10. 5 1
      docs/sources/installation/configuration.md
  11. 1 1
      docs/sources/reference/graph.md
  12. 20 23
      pkg/api/alerting.go
  13. 1 0
      pkg/api/api.go
  14. 1 1
      pkg/api/cloudwatch/metrics.go
  15. 7 9
      pkg/api/dashboard.go
  16. 5 0
      pkg/api/dtos/alerting.go
  17. 0 1
      pkg/api/frontendsettings.go
  18. 1 0
      pkg/api/gnetproxy.go
  19. 1 1
      pkg/api/index.go
  20. 71 4
      pkg/api/login_oauth.go
  21. 3 0
      pkg/cmd/grafana-cli/services/services.go
  22. 1 0
      pkg/cmd/grafana-server/main.go
  23. 1 1
      pkg/cmd/grafana-server/server.go
  24. 8 1
      pkg/components/imguploader/imguploader.go
  25. 5 2
      pkg/login/ldap.go
  26. 1 0
      pkg/middleware/session.go
  27. 6 0
      pkg/models/alert.go
  28. 3 3
      pkg/services/alerting/conditions/evaluator.go
  29. 2 1
      pkg/services/alerting/conditions/query.go
  30. 3 1
      pkg/services/alerting/conditions/reducer_test.go
  31. 6 18
      pkg/services/alerting/eval_context.go
  32. 11 2
      pkg/services/alerting/extractor.go
  33. 349 22
      pkg/services/alerting/extractor_test.go
  34. 3 0
      pkg/services/alerting/interfaces.go
  35. 14 5
      pkg/services/alerting/notifier.go
  36. 8 0
      pkg/services/alerting/notifier_test.go
  37. 19 5
      pkg/services/alerting/notifiers/base.go
  38. 2 2
      pkg/services/alerting/notifiers/email.go
  39. 2 2
      pkg/services/alerting/notifiers/slack.go
  40. 2 2
      pkg/services/alerting/notifiers/webhook.go
  41. 13 3
      pkg/services/alerting/rule.go
  42. 9 4
      pkg/services/alerting/rule_test.go
  43. 2 1
      pkg/services/alerting/scheduler.go
  44. 24 0
      pkg/services/sqlstore/alert.go
  45. 7 5
      pkg/setting/setting.go
  46. 3 0
      pkg/setting/setting_oauth.go
  47. 25 13
      pkg/social/generic_oauth.go
  48. 4 6
      pkg/social/github_oauth.go
  49. 2 4
      pkg/social/google_oauth.go
  50. 12 42
      pkg/social/grafananet_oauth.go
  51. 19 13
      pkg/social/social.go
  52. 1 1
      pkg/tsdb/executor.go
  53. 5 1
      pkg/tsdb/graphite/graphite.go
  54. 133 0
      pkg/tsdb/influxdb/influxdb.go
  55. 162 0
      pkg/tsdb/influxdb/model_parser.go
  56. 178 0
      pkg/tsdb/influxdb/model_parser_test.go
  57. 49 0
      pkg/tsdb/influxdb/models.go
  58. 116 0
      pkg/tsdb/influxdb/query_builder.go
  59. 87 0
      pkg/tsdb/influxdb/query_builder_test.go
  60. 166 0
      pkg/tsdb/influxdb/query_part.go
  61. 93 0
      pkg/tsdb/influxdb/query_part_test.go
  62. 94 0
      pkg/tsdb/influxdb/response_parser.go
  63. 59 0
      pkg/tsdb/influxdb/response_parser_test.go
  64. 145 0
      pkg/tsdb/interval.go
  65. 57 0
      pkg/tsdb/interval_test.go
  66. 9 3
      pkg/tsdb/models.go
  67. 9 6
      pkg/tsdb/prometheus/prometheus.go
  68. 2 0
      pkg/tsdb/request.go
  69. 10 5
      pkg/tsdb/testdata/scenarios.go
  70. 3 5
      public/app/core/components/query_part/query_part_editor.ts
  71. 1 0
      public/app/core/controllers/login_ctrl.js
  72. 12 11
      public/app/core/directives/metric_segment.js
  73. 14 1
      public/app/features/alerting/alert_list_ctrl.ts
  74. 3 2
      public/app/features/alerting/alert_tab_ctrl.ts
  75. 4 1
      public/app/features/alerting/partials/alert_list.html
  76. 2 2
      public/app/features/alerting/partials/alert_tab.html
  77. 4 2
      public/app/features/dashboard/dashboard_ctrl.ts
  78. 9 5
      public/app/features/dashboard/dashboard_srv.ts
  79. 11 15
      public/app/features/dashboard/dynamic_dashboard_srv.ts
  80. 2 1
      public/app/features/dashboard/export/exporter.ts
  81. 5 5
      public/app/features/dashboard/specs/dashboard_srv_specs.ts
  82. 11 7
      public/app/features/dashboard/specs/dynamic_dashboard_srv_specs.ts
  83. 3 4
      public/app/features/dashboard/timeSrv.js
  84. 4 1
      public/app/features/dashboard/viewStateSrv.js
  85. 23 5
      public/app/features/panel/panel_directive.ts
  86. 2 1
      public/app/features/panel/panel_menu.js
  87. 4 2
      public/app/features/plugins/import_list/import_list.html
  88. 3 3
      public/app/features/plugins/partials/plugin_edit.html
  89. 5 5
      public/app/features/plugins/plugin_edit_ctrl.ts
  90. 314 62
      public/app/plugins/app/testdata/dashboards/graph_last_1h.json
  91. 1 1
      public/app/plugins/app/testdata/plugin.json
  92. BIN
      public/app/plugins/datasource/cloudwatch/img/amazon-web-services.png
  93. 3 0
      public/app/plugins/datasource/elasticsearch/elastic_response.js
  94. BIN
      public/app/plugins/datasource/elasticsearch/img/logo_large.png
  95. 1 1
      public/app/plugins/datasource/elasticsearch/partials/bucket_agg.html
  96. 1 1
      public/app/plugins/datasource/elasticsearch/query_def.js
  97. 4 0
      public/app/plugins/datasource/graphite/datasource.ts
  98. BIN
      public/app/plugins/datasource/graphite/img/graphite_logo.png
  99. 26 3
      public/app/plugins/datasource/graphite/query_ctrl.ts
  100. 18 0
      public/app/plugins/datasource/influxdb/datasource.ts

+ 1 - 0
CHANGELOG.md

@@ -27,6 +27,7 @@
 * **Graph panel**: Fixed problem with auto decimals on y axis when datamin=datamax, fixes [#6070](https://github.com/grafana/grafana/pull/6070)
 * **Graph panel**: Fixed problem with auto decimals on y axis when datamin=datamax, fixes [#6070](https://github.com/grafana/grafana/pull/6070)
 * **Snapshot**: Can view embedded panels/png rendered panels in snapshots without login, fixes [#3769](https://github.com/grafana/grafana/pull/3769)
 * **Snapshot**: Can view embedded panels/png rendered panels in snapshots without login, fixes [#3769](https://github.com/grafana/grafana/pull/3769)
 * **Elasticsearch**: Fix for query template variable when looking up terms without query, no longer relies on elasticsearch default field, fixes [#3887](https://github.com/grafana/grafana/pull/3887)
 * **Elasticsearch**: Fix for query template variable when looking up terms without query, no longer relies on elasticsearch default field, fixes [#3887](https://github.com/grafana/grafana/pull/3887)
+* **Elasticsearch**: Fix for displaying IP address used in terms aggregations, fixes [#4393](https://github.com/grafana/grafana/pull/4393)
 * **PNG Rendering**: Fix for server side rendering when using auth proxy, fixes [#5906](https://github.com/grafana/grafana/pull/5906)
 * **PNG Rendering**: Fix for server side rendering when using auth proxy, fixes [#5906](https://github.com/grafana/grafana/pull/5906)
 
 
 # 3.1.2 (unreleased)
 # 3.1.2 (unreleased)

+ 15 - 3
Makefile

@@ -1,16 +1,28 @@
 all: deps build
 all: deps build
 
 
-deps:
+deps-go:
 	go run build.go setup
 	go run build.go setup
+
+deps-js:
 	npm install
 	npm install
 
 
-build:
+deps: deps-go deps-js
+
+build-go:
 	go run build.go build
 	go run build.go build
+
+build-js:
 	npm run build
 	npm run build
 
 
-test:
+build: build-go build-js
+
+test-go:
 	go test -v ./pkg/...
 	go test -v ./pkg/...
+
+test-js:
 	npm test
 	npm test
 
 
+test: test-go test-js
+
 run:
 run:
 	./bin/grafana-server
 	./bin/grafana-server

+ 1 - 1
README.md

@@ -74,7 +74,7 @@ Be sure to read the [getting started guide](http://docs.grafana.org/guides/getti
 
 
 ## Run from master
 ## Run from master
 If you want to build a package yourself, or contribute. Here is a guide for how to do that. You can always find
 If you want to build a package yourself, or contribute. Here is a guide for how to do that. You can always find
-the latest master builds [here](http://grafana.org/download/builds)
+the latest master builds [here](http://grafana.org/builds)
 
 
 ### Dependencies
 ### Dependencies
 
 

+ 4 - 1
appveyor.yml

@@ -25,7 +25,10 @@ install:
 build_script:
 build_script:
   - go run build.go build
   - go run build.go build
   - grunt release
   - grunt release
+  #- 7z a grafana.zip %APPVEYOR_BUILD_FOLDER%\dist\*
+  - cp dist/* .
 
 
 artifacts:
 artifacts:
-  - path: dist/*
+  - path: grafana-*windows-ia32.zip
+  #- path: dist/*
     name: binzip
     name: binzip

+ 40 - 0
build.go

@@ -5,6 +5,7 @@ package main
 import (
 import (
 	"bytes"
 	"bytes"
 	"crypto/md5"
 	"crypto/md5"
+	"crypto/sha1"
 	"encoding/json"
 	"encoding/json"
 	"flag"
 	"flag"
 	"fmt"
 	"fmt"
@@ -85,17 +86,21 @@ func main() {
 		case "package":
 		case "package":
 			grunt(gruntBuildArg("release")...)
 			grunt(gruntBuildArg("release")...)
 			createLinuxPackages()
 			createLinuxPackages()
+			sha1FilesInDist()
 
 
 		case "pkg-rpm":
 		case "pkg-rpm":
 			grunt(gruntBuildArg("release")...)
 			grunt(gruntBuildArg("release")...)
 			createRpmPackages()
 			createRpmPackages()
+			sha1FilesInDist()
 
 
 		case "pkg-deb":
 		case "pkg-deb":
 			grunt(gruntBuildArg("release")...)
 			grunt(gruntBuildArg("release")...)
 			createDebPackages()
 			createDebPackages()
+			sha1FilesInDist()
 
 
 		case "latest":
 		case "latest":
 			makeLatestDistCopies()
 			makeLatestDistCopies()
+			sha1FilesInDist()
 
 
 		case "clean":
 		case "clean":
 			clean()
 			clean()
@@ -501,3 +506,38 @@ func md5File(file string) error {
 
 
 	return out.Close()
 	return out.Close()
 }
 }
+
+func sha1FilesInDist() {
+	filepath.Walk("./dist", func(path string, f os.FileInfo, err error) error {
+		if strings.Contains(path, ".sha1") == false {
+			sha1File(path)
+		}
+		return nil
+	})
+}
+
+func sha1File(file string) error {
+	fd, err := os.Open(file)
+	if err != nil {
+		return err
+	}
+	defer fd.Close()
+
+	h := sha1.New()
+	_, err = io.Copy(h, fd)
+	if err != nil {
+		return err
+	}
+
+	out, err := os.Create(file + ".sha1")
+	if err != nil {
+		return err
+	}
+
+	_, err = fmt.Fprintf(out, "%x\n", h.Sum(nil))
+	if err != nil {
+		return err
+	}
+
+	return out.Close()
+}

+ 1 - 0
circle.yml

@@ -28,3 +28,4 @@ deployment:
     owner: grafana
     owner: grafana
     commands:
     commands:
       - ./scripts/trigger_grafana_packer.sh ${TRIGGER_GRAFANA_PACKER_CIRCLECI_TOKEN}
       - ./scripts/trigger_grafana_packer.sh ${TRIGGER_GRAFANA_PACKER_CIRCLECI_TOKEN}
+      - ./scripts/trigger_windows_build.sh ${APPVEYOR_TOKEN}

+ 7 - 2
conf/defaults.ini

@@ -267,6 +267,7 @@ auto_sign_up = true
 [auth.ldap]
 [auth.ldap]
 enabled = false
 enabled = false
 config_file = /etc/grafana/ldap.toml
 config_file = /etc/grafana/ldap.toml
+allow_sign_up = true
 
 
 #################################### SMTP / Emailing #####################
 #################################### SMTP / Emailing #####################
 [smtp]
 [smtp]
@@ -292,6 +293,9 @@ mode = console, file
 # Either "debug", "info", "warn", "error", "critical", default is "info"
 # Either "debug", "info", "warn", "error", "critical", default is "info"
 level = info
 level = info
 
 
+# optional settings to set different levels for specific loggers. Ex filters = sqlstore:debug
+filters =
+
 # For "console" mode only
 # For "console" mode only
 [log.console]
 [log.console]
 level =
 level =
@@ -401,7 +405,8 @@ global_session = -1
 #   \______(_______;;;)__;;;)
 #   \______(_______;;;)__;;;)
 
 
 [alerting]
 [alerting]
-enabled = true
+# Makes it possible to turn off alert rule execution.
+execute_alerts = true
 
 
 #################################### Internal Grafana Metrics ############
 #################################### Internal Grafana Metrics ############
 # Metrics available at HTTP API Url /api/metrics
 # Metrics available at HTTP API Url /api/metrics
@@ -421,7 +426,7 @@ url = https://grafana.net
 #################################### External Image Storage ##############
 #################################### External Image Storage ##############
 [external_image_storage]
 [external_image_storage]
 # You can choose between (s3, webdav)
 # You can choose between (s3, webdav)
-provider = s3
+provider =
 
 
 [external_image_storage.s3]
 [external_image_storage.s3]
 bucket_url =
 bucket_url =

+ 9 - 3
conf/sample.ini

@@ -252,6 +252,7 @@
 [auth.ldap]
 [auth.ldap]
 ;enabled = false
 ;enabled = false
 ;config_file = /etc/grafana/ldap.toml
 ;config_file = /etc/grafana/ldap.toml
+;allow_sign_up = true
 
 
 #################################### SMTP / Emailing ##########################
 #################################### SMTP / Emailing ##########################
 [smtp]
 [smtp]
@@ -276,6 +277,10 @@
 # Either "trace", "debug", "info", "warn", "error", "critical", default is "info"
 # Either "trace", "debug", "info", "warn", "error", "critical", default is "info"
 ;level = info
 ;level = info
 
 
+# optional settings to set different levels for specific loggers. Ex filters = sqlstore:debug
+;filters =
+
+
 # For "console" mode only
 # For "console" mode only
 [log.console]
 [log.console]
 ;level =
 ;level =
@@ -350,7 +355,8 @@
 #   \______(_______;;;)__;;;)
 #   \______(_______;;;)__;;;)
 
 
 [alerting]
 [alerting]
-;enabled = false
+# Makes it possible to turn off alert rule execution.
+;execute_alerts = true
 
 
 #################################### Internal Grafana Metrics ##########################
 #################################### Internal Grafana Metrics ##########################
 # Metrics available at HTTP API Url /api/metrics
 # Metrics available at HTTP API Url /api/metrics
@@ -375,8 +381,8 @@
 #################################### External image storage ##########################
 #################################### External image storage ##########################
 [external_image_storage]
 [external_image_storage]
 # Used for uploading images to public servers so they can be included in slack/email messages.
 # Used for uploading images to public servers so they can be included in slack/email messages.
-# you can choose between (s3, webdav or internal)
-;provider = s3
+# you can choose between (s3, webdav)
+;provider =
 
 
 [external_image_storage.s3]
 [external_image_storage.s3]
 ;bucket_url =
 ;bucket_url =

+ 3 - 1
docker/blocks/influxdb/fig

@@ -1,5 +1,7 @@
 influxdb:
 influxdb:
-  image: tutum/influxdb:0.12
+  #image: influxdb/influxdb:1.0-alpine
+  image: influxdb:latest
+  container_name: influxdb
   ports:
   ports:
     - "2004:2004"
     - "2004:2004"
     - "8083:8083"
     - "8083:8083"

+ 5 - 1
docs/sources/installation/configuration.md

@@ -30,6 +30,9 @@ using environment variables using the syntax:
 Where the section name is the text within the brackets. Everything
 Where the section name is the text within the brackets. Everything
 should be upper case, `.` should be replaced by `_`. For example, given these configuration settings:
 should be upper case, `.` should be replaced by `_`. For example, given these configuration settings:
 
 
+    # default section
+    instance_name = ${HOSTNAME}
+
     [security]
     [security]
     admin_user = admin
     admin_user = admin
 
 
@@ -39,6 +42,7 @@ should be upper case, `.` should be replaced by `_`. For example, given these co
 
 
 Then you can override them using:
 Then you can override them using:
 
 
+    export GF_DEFAULT_INSTANCE_NAME=my-instance
     export GF_SECURITY_ADMIN_USER=true
     export GF_SECURITY_ADMIN_USER=true
     export GF_AUTH_GOOGLE_CLIENT_SECRET=newS3cretKey
     export GF_AUTH_GOOGLE_CLIENT_SECRET=newS3cretKey
 
 
@@ -528,7 +532,7 @@ Use space to separate multiple modes, e.g. "console file"
 ### level
 ### level
 Either "debug", "info", "warn", "error", "critical", default is "info"
 Either "debug", "info", "warn", "error", "critical", default is "info"
 
 
-### filter
+### filters
 optional settings to set different levels for specific loggers.
 optional settings to set different levels for specific loggers.
 Ex `filters = sqlstore:debug`
 Ex `filters = sqlstore:debug`
 
 

+ 1 - 1
docs/sources/reference/graph.md

@@ -91,7 +91,7 @@ The legend values are calculated client side by Grafana and depend on what type
 aggregation or point consolidation you metric query is using. All the above legend values cannot
 aggregation or point consolidation you metric query is using. All the above legend values cannot
 be correct at the same time. For example if you plot a rate like requests/second, this is probably
 be correct at the same time. For example if you plot a rate like requests/second, this is probably
 using average as aggregator, then the Total in the legend will not represent the total number of requests.
 using average as aggregator, then the Total in the legend will not represent the total number of requests.
-It is just the sum of all data data points received by Grafana.
+It is just the sum of all data points received by Grafana.
 
 
 ## Display styles
 ## Display styles
 
 

+ 20 - 23
pkg/api/alerting.go

@@ -252,33 +252,30 @@ func NotificationTest(c *middleware.Context, dto dtos.NotificationTestCommand) R
 	return ApiSuccess("Test notification sent")
 	return ApiSuccess("Test notification sent")
 }
 }
 
 
-func getAlertIdForRequest(c *middleware.Context) (int64, error) {
-	alertId := c.QueryInt64("alertId")
-	panelId := c.QueryInt64("panelId")
-	dashboardId := c.QueryInt64("dashboardId")
-
-	if alertId == 0 && dashboardId == 0 && panelId == 0 {
-		return 0, fmt.Errorf("Missing alertId or dashboardId and panelId")
+//POST /api/:alertId/pause
+func PauseAlert(c *middleware.Context, dto dtos.PauseAlertCommand) Response {
+	cmd := models.PauseAlertCommand{
+		OrgId:   c.OrgId,
+		AlertId: c.ParamsInt64("alertId"),
+		Paused:  dto.Paused,
 	}
 	}
 
 
-	if alertId == 0 {
-		//fetch alertId
-		query := models.GetAlertsQuery{
-			OrgId:       c.OrgId,
-			DashboardId: dashboardId,
-			PanelId:     panelId,
-		}
-
-		if err := bus.Dispatch(&query); err != nil {
-			return 0, err
-		}
+	if err := bus.Dispatch(&cmd); err != nil {
+		return ApiError(500, "", err)
+	}
 
 
-		if len(query.Result) != 1 {
-			return 0, fmt.Errorf("PanelId is not unique on dashboard")
-		}
+	var response models.AlertStateType = models.AlertStateNoData
+	pausedState := "un paused"
+	if cmd.Paused {
+		response = models.AlertStatePaused
+		pausedState = "paused"
+	}
 
 
-		alertId = query.Result[0].Id
+	result := map[string]interface{}{
+		"alertId": cmd.AlertId,
+		"state":   response,
+		"message": "alert " + pausedState,
 	}
 	}
 
 
-	return alertId, nil
+	return Json(200, result)
 }
 }

+ 1 - 0
pkg/api/api.go

@@ -252,6 +252,7 @@ func Register(r *macaron.Macaron) {
 
 
 		r.Group("/alerts", func() {
 		r.Group("/alerts", func() {
 			r.Post("/test", bind(dtos.AlertTestCommand{}), wrap(AlertTest))
 			r.Post("/test", bind(dtos.AlertTestCommand{}), wrap(AlertTest))
+			r.Post("/:alertId/pause", ValidateOrgAlert, bind(dtos.PauseAlertCommand{}), wrap(PauseAlert))
 			r.Get("/:alertId", ValidateOrgAlert, wrap(GetAlert))
 			r.Get("/:alertId", ValidateOrgAlert, wrap(GetAlert))
 			r.Get("/", wrap(GetAlerts))
 			r.Get("/", wrap(GetAlerts))
 			r.Get("/states-for-dashboard", wrap(GetAlertStatesForDashboard))
 			r.Get("/states-for-dashboard", wrap(GetAlertStatesForDashboard))

+ 1 - 1
pkg/api/cloudwatch/metrics.go

@@ -29,7 +29,7 @@ var customMetricsDimensionsMap map[string]map[string]map[string]*CustomMetricsCa
 func init() {
 func init() {
 	metricsMap = map[string][]string{
 	metricsMap = map[string][]string{
 		"AWS/ApiGateway":     {"4XXError", "5XXError", "CacheHitCount", "CacheMissCount", "Count", "IntegrationLatency", "Latency"},
 		"AWS/ApiGateway":     {"4XXError", "5XXError", "CacheHitCount", "CacheMissCount", "Count", "IntegrationLatency", "Latency"},
-		"AWS/ApplicationELB": {"ActiveConnectionCount", "ClientTLSNegotiationErrorCount", "HealthyHostCount", "HTTPCode_ELB_4XX_Count", "HTTPCode_ELB_5XX_Count", "HTTPCode_Target_2XX_Count", "HTTPCode_Target_3XX_Count", "HTTPCode_Target_4XX_Count", "HTTPCode_Target_5XX_Count", "NewConnectionCount", "ProcessedBytes", "RejectedConnectionCount", "RequestCount", "TargetConnectionErrorCount", "TargetResponseTime", "TargetTLSNegotiationErrorCount", "UnhealthyHostCount"},
+		"AWS/ApplicationELB": {"ActiveConnectionCount", "ClientTLSNegotiationErrorCount", "HealthyHostCount", "HTTPCode_ELB_4XX_Count", "HTTPCode_ELB_5XX_Count", "HTTPCode_Target_2XX_Count", "HTTPCode_Target_3XX_Count", "HTTPCode_Target_4XX_Count", "HTTPCode_Target_5XX_Count", "NewConnectionCount", "ProcessedBytes", "RejectedConnectionCount", "RequestCount", "TargetConnectionErrorCount", "TargetResponseTime", "TargetTLSNegotiationErrorCount", "UnHealthyHostCount"},
 		"AWS/AutoScaling":    {"GroupMinSize", "GroupMaxSize", "GroupDesiredCapacity", "GroupInServiceInstances", "GroupPendingInstances", "GroupStandbyInstances", "GroupTerminatingInstances", "GroupTotalInstances"},
 		"AWS/AutoScaling":    {"GroupMinSize", "GroupMaxSize", "GroupDesiredCapacity", "GroupInServiceInstances", "GroupPendingInstances", "GroupStandbyInstances", "GroupTerminatingInstances", "GroupTotalInstances"},
 		"AWS/Billing":        {"EstimatedCharges"},
 		"AWS/Billing":        {"EstimatedCharges"},
 		"AWS/CloudFront":     {"Requests", "BytesDownloaded", "BytesUploaded", "TotalErrorRate", "4xxErrorRate", "5xxErrorRate"},
 		"AWS/CloudFront":     {"Requests", "BytesDownloaded", "BytesUploaded", "TotalErrorRate", "4xxErrorRate", "5xxErrorRate"},

+ 7 - 9
pkg/api/dashboard.go

@@ -153,16 +153,14 @@ func PostDashboard(c *middleware.Context, cmd m.SaveDashboardCommand) Response {
 		return ApiError(500, "Failed to save dashboard", err)
 		return ApiError(500, "Failed to save dashboard", err)
 	}
 	}
 
 
-	if setting.AlertingEnabled {
-		alertCmd := alerting.UpdateDashboardAlertsCommand{
-			OrgId:     c.OrgId,
-			UserId:    c.UserId,
-			Dashboard: cmd.Result,
-		}
+	alertCmd := alerting.UpdateDashboardAlertsCommand{
+		OrgId:     c.OrgId,
+		UserId:    c.UserId,
+		Dashboard: cmd.Result,
+	}
 
 
-		if err := bus.Dispatch(&alertCmd); err != nil {
-			return ApiError(500, "Failed to save alerts", err)
-		}
+	if err := bus.Dispatch(&alertCmd); err != nil {
+		return ApiError(500, "Failed to save alerts", err)
 	}
 	}
 
 
 	c.TimeRequest(metrics.M_Api_Dashboard_Save)
 	c.TimeRequest(metrics.M_Api_Dashboard_Save)

+ 5 - 0
pkg/api/dtos/alerting.go

@@ -58,3 +58,8 @@ type NotificationTestCommand struct {
 	Type     string           `json:"type"`
 	Type     string           `json:"type"`
 	Settings *simplejson.Json `json:"settings"`
 	Settings *simplejson.Json `json:"settings"`
 }
 }
+
+type PauseAlertCommand struct {
+	AlertId int64 `json:"alertId"`
+	Paused  bool  `json:"paused"`
+}

+ 0 - 1
pkg/api/frontendsettings.go

@@ -145,7 +145,6 @@ func getFrontendSettingsMap(c *middleware.Context) (map[string]interface{}, erro
 			"hasUpdate":     plugins.GrafanaHasUpdate,
 			"hasUpdate":     plugins.GrafanaHasUpdate,
 			"env":           setting.Env,
 			"env":           setting.Env,
 		},
 		},
-		"alertingEnabled": setting.AlertingEnabled,
 	}
 	}
 
 
 	return jsonObj, nil
 	return jsonObj, nil

+ 1 - 0
pkg/api/gnetproxy.go

@@ -36,6 +36,7 @@ func ReverseProxyGnetReq(proxyPath string) *httputil.ReverseProxy {
 		// clear cookie headers
 		// clear cookie headers
 		req.Header.Del("Cookie")
 		req.Header.Del("Cookie")
 		req.Header.Del("Set-Cookie")
 		req.Header.Del("Set-Cookie")
+		req.Header.Del("Authorization")
 	}
 	}
 
 
 	return &httputil.ReverseProxy{Director: director}
 	return &httputil.ReverseProxy{Director: director}

+ 1 - 1
pkg/api/index.go

@@ -102,7 +102,7 @@ func setIndexViewData(c *middleware.Context) (*dtos.IndexViewData, error) {
 		Children: dashboardChildNavs,
 		Children: dashboardChildNavs,
 	})
 	})
 
 
-	if setting.AlertingEnabled && (c.OrgRole == m.ROLE_ADMIN || c.OrgRole == m.ROLE_EDITOR) {
+	if c.OrgRole == m.ROLE_ADMIN || c.OrgRole == m.ROLE_EDITOR {
 		alertChildNavs := []*dtos.NavLink{
 		alertChildNavs := []*dtos.NavLink{
 			{Text: "Alert List", Url: setting.AppSubUrl + "/alerting/list"},
 			{Text: "Alert List", Url: setting.AppSubUrl + "/alerting/list"},
 			{Text: "Notifications", Url: setting.AppSubUrl + "/alerting/notifications"},
 			{Text: "Notifications", Url: setting.AppSubUrl + "/alerting/notifications"},

+ 71 - 4
pkg/api/login_oauth.go

@@ -1,9 +1,17 @@
 package api
 package api
 
 
 import (
 import (
+	"crypto/rand"
+	"crypto/tls"
+	"crypto/x509"
+	"encoding/base64"
 	"errors"
 	"errors"
 	"fmt"
 	"fmt"
+	"io/ioutil"
+	"log"
+	"net/http"
 
 
+	"golang.org/x/net/context"
 	"golang.org/x/oauth2"
 	"golang.org/x/oauth2"
 
 
 	"github.com/grafana/grafana/pkg/bus"
 	"github.com/grafana/grafana/pkg/bus"
@@ -14,6 +22,12 @@ import (
 	"github.com/grafana/grafana/pkg/social"
 	"github.com/grafana/grafana/pkg/social"
 )
 )
 
 
+func GenStateString() string {
+	rnd := make([]byte, 32)
+	rand.Read(rnd)
+	return base64.StdEncoding.EncodeToString(rnd)
+}
+
 func OAuthLogin(ctx *middleware.Context) {
 func OAuthLogin(ctx *middleware.Context) {
 	if setting.OAuthService == nil {
 	if setting.OAuthService == nil {
 		ctx.Handle(404, "login.OAuthLogin(oauth service not enabled)", nil)
 		ctx.Handle(404, "login.OAuthLogin(oauth service not enabled)", nil)
@@ -27,14 +41,63 @@ func OAuthLogin(ctx *middleware.Context) {
 		return
 		return
 	}
 	}
 
 
+	error := ctx.Query("error")
+	if error != "" {
+		errorDesc := ctx.Query("error_description")
+		ctx.Logger.Info("OAuthLogin Failed", "error", error, "errorDesc", errorDesc)
+		ctx.Redirect(setting.AppSubUrl + "/login?failCode=1003")
+		return
+	}
+
 	code := ctx.Query("code")
 	code := ctx.Query("code")
 	if code == "" {
 	if code == "" {
-		ctx.Redirect(connect.AuthCodeURL("", oauth2.AccessTypeOnline))
+		state := GenStateString()
+		ctx.Session.Set(middleware.SESS_KEY_OAUTH_STATE, state)
+		ctx.Redirect(connect.AuthCodeURL(state, oauth2.AccessTypeOnline))
+		return
+	}
+
+	// verify state string
+	savedState := ctx.Session.Get(middleware.SESS_KEY_OAUTH_STATE).(string)
+	queryState := ctx.Query("state")
+	if savedState != queryState {
+		ctx.Handle(500, "login.OAuthLogin(state mismatch)", nil)
 		return
 		return
 	}
 	}
 
 
 	// handle call back
 	// handle call back
-	token, err := connect.Exchange(oauth2.NoContext, code)
+
+	// initialize oauth2 context
+	oauthCtx := oauth2.NoContext
+	if setting.OAuthService.OAuthInfos[name].TlsClientCert != "" {
+		cert, err := tls.LoadX509KeyPair(setting.OAuthService.OAuthInfos[name].TlsClientCert, setting.OAuthService.OAuthInfos[name].TlsClientKey)
+		if err != nil {
+			log.Fatal(err)
+		}
+
+		// Load CA cert
+		caCert, err := ioutil.ReadFile(setting.OAuthService.OAuthInfos[name].TlsClientCa)
+		if err != nil {
+			log.Fatal(err)
+		}
+		caCertPool := x509.NewCertPool()
+		caCertPool.AppendCertsFromPEM(caCert)
+
+		tr := &http.Transport{
+			TLSClientConfig: &tls.Config{
+				InsecureSkipVerify: true,
+				Certificates: []tls.Certificate{cert},
+				RootCAs: caCertPool,
+			},
+		}
+		sslcli := &http.Client{Transport: tr}
+
+		oauthCtx = context.TODO()
+		oauthCtx = context.WithValue(oauthCtx, oauth2.HTTPClient, sslcli)
+	}
+
+	// get token from provider
+	token, err := connect.Exchange(oauthCtx, code)
 	if err != nil {
 	if err != nil {
 		ctx.Handle(500, "login.OAuthLogin(NewTransportWithCode)", err)
 		ctx.Handle(500, "login.OAuthLogin(NewTransportWithCode)", err)
 		return
 		return
@@ -42,7 +105,11 @@ func OAuthLogin(ctx *middleware.Context) {
 
 
 	ctx.Logger.Debug("OAuthLogin Got token")
 	ctx.Logger.Debug("OAuthLogin Got token")
 
 
-	userInfo, err := connect.UserInfo(token)
+	// set up oauth2 client
+	client := connect.Client(oauthCtx, token)
+
+	// get user info
+	userInfo, err := connect.UserInfo(client)
 	if err != nil {
 	if err != nil {
 		if err == social.ErrMissingTeamMembership {
 		if err == social.ErrMissingTeamMembership {
 			ctx.Redirect(setting.AppSubUrl + "/login?failCode=1000")
 			ctx.Redirect(setting.AppSubUrl + "/login?failCode=1000")
@@ -82,7 +149,7 @@ func OAuthLogin(ctx *middleware.Context) {
 			return
 			return
 		}
 		}
 		cmd := m.CreateUserCommand{
 		cmd := m.CreateUserCommand{
-			Login:          userInfo.Email,
+			Login:          userInfo.Login,
 			Email:          userInfo.Email,
 			Email:          userInfo.Email,
 			Name:           userInfo.Name,
 			Name:           userInfo.Name,
 			Company:        userInfo.Company,
 			Company:        userInfo.Company,

+ 3 - 0
pkg/cmd/grafana-cli/services/services.go

@@ -149,6 +149,9 @@ func createRequest(repoUrl string, subPaths ...string) ([]byte, error) {
 	}
 	}
 
 
 	res, err := HttpClient.Do(req)
 	res, err := HttpClient.Do(req)
+	if res.StatusCode/100 != 2 {
+		return []byte{}, fmt.Errorf("Api returned invalid status: %s", res.Status)
+	}
 
 
 	body, err := ioutil.ReadAll(res.Body)
 	body, err := ioutil.ReadAll(res.Body)
 	defer res.Body.Close()
 	defer res.Body.Close()

+ 1 - 0
pkg/cmd/grafana-server/main.go

@@ -20,6 +20,7 @@ import (
 	_ "github.com/grafana/grafana/pkg/services/alerting/conditions"
 	_ "github.com/grafana/grafana/pkg/services/alerting/conditions"
 	_ "github.com/grafana/grafana/pkg/services/alerting/notifiers"
 	_ "github.com/grafana/grafana/pkg/services/alerting/notifiers"
 	_ "github.com/grafana/grafana/pkg/tsdb/graphite"
 	_ "github.com/grafana/grafana/pkg/tsdb/graphite"
+	_ "github.com/grafana/grafana/pkg/tsdb/influxdb"
 	_ "github.com/grafana/grafana/pkg/tsdb/opentsdb"
 	_ "github.com/grafana/grafana/pkg/tsdb/opentsdb"
 	_ "github.com/grafana/grafana/pkg/tsdb/prometheus"
 	_ "github.com/grafana/grafana/pkg/tsdb/prometheus"
 	_ "github.com/grafana/grafana/pkg/tsdb/testdata"
 	_ "github.com/grafana/grafana/pkg/tsdb/testdata"

+ 1 - 1
pkg/cmd/grafana-server/server.go

@@ -59,7 +59,7 @@ func (g *GrafanaServerImpl) Start() {
 	plugins.Init()
 	plugins.Init()
 
 
 	// init alerting
 	// init alerting
-	if setting.AlertingEnabled {
+	if setting.ExecuteAlerts {
 		engine := alerting.NewEngine()
 		engine := alerting.NewEngine()
 		g.childRoutines.Go(func() error { return engine.Run(g.context) })
 		g.childRoutines.Go(func() error { return engine.Run(g.context) })
 	}
 	}

+ 8 - 1
pkg/components/imguploader/imguploader.go

@@ -10,6 +10,13 @@ type ImageUploader interface {
 	Upload(path string) (string, error)
 	Upload(path string) (string, error)
 }
 }
 
 
+type NopImageUploader struct {
+}
+
+func (NopImageUploader) Upload(path string) (string, error) {
+	return "", nil
+}
+
 func NewImageUploader() (ImageUploader, error) {
 func NewImageUploader() (ImageUploader, error) {
 
 
 	switch setting.ImageUploadProvider {
 	switch setting.ImageUploadProvider {
@@ -53,5 +60,5 @@ func NewImageUploader() (ImageUploader, error) {
 		return NewWebdavImageUploader(url, username, password)
 		return NewWebdavImageUploader(url, username, password)
 	}
 	}
 
 
-	return nil, fmt.Errorf("could not find specified provider")
+	return NopImageUploader{}, nil
 }
 }

+ 5 - 2
pkg/login/ldap.go

@@ -13,6 +13,7 @@ import (
 	"github.com/grafana/grafana/pkg/bus"
 	"github.com/grafana/grafana/pkg/bus"
 	"github.com/grafana/grafana/pkg/log"
 	"github.com/grafana/grafana/pkg/log"
 	m "github.com/grafana/grafana/pkg/models"
 	m "github.com/grafana/grafana/pkg/models"
+	"github.com/grafana/grafana/pkg/setting"
 )
 )
 
 
 type ldapAuther struct {
 type ldapAuther struct {
@@ -29,7 +30,7 @@ func (a *ldapAuther) Dial() error {
 	var err error
 	var err error
 	var certPool *x509.CertPool
 	var certPool *x509.CertPool
 	if a.server.RootCACert != "" {
 	if a.server.RootCACert != "" {
-		certPool := x509.NewCertPool()
+		certPool = x509.NewCertPool()
 		for _, caCertFile := range strings.Split(a.server.RootCACert, " ") {
 		for _, caCertFile := range strings.Split(a.server.RootCACert, " ") {
 			if pem, err := ioutil.ReadFile(caCertFile); err != nil {
 			if pem, err := ioutil.ReadFile(caCertFile); err != nil {
 				return err
 				return err
@@ -132,8 +133,10 @@ func (a *ldapAuther) getGrafanaUserFor(ldapUser *ldapUserInfo) (*m.User, error)
 	// get user from grafana db
 	// get user from grafana db
 	userQuery := m.GetUserByLoginQuery{LoginOrEmail: ldapUser.Username}
 	userQuery := m.GetUserByLoginQuery{LoginOrEmail: ldapUser.Username}
 	if err := bus.Dispatch(&userQuery); err != nil {
 	if err := bus.Dispatch(&userQuery); err != nil {
-		if err == m.ErrUserNotFound {
+		if err == m.ErrUserNotFound && setting.LdapAllowSignup {
 			return a.createGrafanaUser(ldapUser)
 			return a.createGrafanaUser(ldapUser)
+		} else if err == m.ErrUserNotFound {
+			return nil, ErrInvalidCredentials
 		} else {
 		} else {
 			return nil, err
 			return nil, err
 		}
 		}

+ 1 - 0
pkg/middleware/session.go

@@ -13,6 +13,7 @@ import (
 
 
 const (
 const (
 	SESS_KEY_USERID = "uid"
 	SESS_KEY_USERID = "uid"
+	SESS_KEY_OAUTH_STATE = "state"
 )
 )
 
 
 var sessionManager *session.Manager
 var sessionManager *session.Manager

+ 6 - 0
pkg/models/alert.go

@@ -101,6 +101,12 @@ type SaveAlertsCommand struct {
 	Alerts []*Alert
 	Alerts []*Alert
 }
 }
 
 
+type PauseAlertCommand struct {
+	OrgId   int64
+	AlertId int64
+	Paused  bool
+}
+
 type SetAlertStateCommand struct {
 type SetAlertStateCommand struct {
 	AlertId  int64
 	AlertId  int64
 	OrgId    int64
 	OrgId    int64

+ 3 - 3
pkg/services/alerting/conditions/evaluator.go

@@ -28,7 +28,7 @@ type ThresholdEvaluator struct {
 	Threshold float64
 	Threshold float64
 }
 }
 
 
-func newThresholdEvaludator(typ string, model *simplejson.Json) (*ThresholdEvaluator, error) {
+func newThresholdEvaluator(typ string, model *simplejson.Json) (*ThresholdEvaluator, error) {
 	params := model.Get("params").MustArray()
 	params := model.Get("params").MustArray()
 	if len(params) == 0 {
 	if len(params) == 0 {
 		return nil, alerting.ValidationError{Reason: "Evaluator missing threshold parameter"}
 		return nil, alerting.ValidationError{Reason: "Evaluator missing threshold parameter"}
@@ -111,7 +111,7 @@ func NewAlertEvaluator(model *simplejson.Json) (AlertEvaluator, error) {
 	}
 	}
 
 
 	if inSlice(typ, defaultTypes) {
 	if inSlice(typ, defaultTypes) {
-		return newThresholdEvaludator(typ, model)
+		return newThresholdEvaluator(typ, model)
 	}
 	}
 
 
 	if inSlice(typ, rangedTypes) {
 	if inSlice(typ, rangedTypes) {
@@ -122,7 +122,7 @@ func NewAlertEvaluator(model *simplejson.Json) (AlertEvaluator, error) {
 		return &NoDataEvaluator{}, nil
 		return &NoDataEvaluator{}, nil
 	}
 	}
 
 
-	return nil, alerting.ValidationError{Reason: "Evaludator invalid evaluator type"}
+	return nil, alerting.ValidationError{Reason: "Evaluator invalid evaluator type: " + typ}
 }
 }
 
 
 func inSlice(a string, list []string) bool {
 func inSlice(a string, list []string) bool {

+ 2 - 1
pkg/services/alerting/conditions/query.go

@@ -82,7 +82,7 @@ func (c *QueryCondition) executeQuery(context *alerting.EvalContext, timeRange *
 	req := c.getRequestForAlertRule(getDsInfo.Result, timeRange)
 	req := c.getRequestForAlertRule(getDsInfo.Result, timeRange)
 	result := make(tsdb.TimeSeriesSlice, 0)
 	result := make(tsdb.TimeSeriesSlice, 0)
 
 
-	resp, err := c.HandleRequest(context.Context, req)
+	resp, err := c.HandleRequest(context.Ctx, req)
 	if err != nil {
 	if err != nil {
 		return nil, fmt.Errorf("tsdb.HandleRequest() error %v", err)
 		return nil, fmt.Errorf("tsdb.HandleRequest() error %v", err)
 	}
 	}
@@ -123,6 +123,7 @@ func (c *QueryCondition) getRequestForAlertRule(datasource *m.DataSource, timeRa
 					BasicAuth:         datasource.BasicAuth,
 					BasicAuth:         datasource.BasicAuth,
 					BasicAuthUser:     datasource.BasicAuthUser,
 					BasicAuthUser:     datasource.BasicAuthUser,
 					BasicAuthPassword: datasource.BasicAuthPassword,
 					BasicAuthPassword: datasource.BasicAuthPassword,
+					JsonData:          datasource.JsonData,
 				},
 				},
 			},
 			},
 		},
 		},

+ 3 - 1
pkg/services/alerting/conditions/reducer_test.go

@@ -3,6 +3,8 @@ package conditions
 import (
 import (
 	"testing"
 	"testing"
 
 
+	"gopkg.in/guregu/null.v3"
+
 	"github.com/grafana/grafana/pkg/tsdb"
 	"github.com/grafana/grafana/pkg/tsdb"
 	. "github.com/smartystreets/goconvey/convey"
 	. "github.com/smartystreets/goconvey/convey"
 )
 )
@@ -43,7 +45,7 @@ func testReducer(typ string, datapoints ...float64) float64 {
 	}
 	}
 
 
 	for idx := range datapoints {
 	for idx := range datapoints {
-		series.Points = append(series.Points, tsdb.NewTimePoint(datapoints[idx], 1234134))
+		series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFrom(datapoints[idx]), 1234134))
 	}
 	}
 
 
 	return reducer.Reduce(series).Float64
 	return reducer.Reduce(series).Float64

+ 6 - 18
pkg/services/alerting/eval_context.go

@@ -28,23 +28,7 @@ type EvalContext struct {
 	NoDataFound     bool
 	NoDataFound     bool
 	RetryCount      int
 	RetryCount      int
 
 
-	Context context.Context
-}
-
-func (evalContext *EvalContext) Deadline() (deadline time.Time, ok bool) {
-	return evalContext.Deadline()
-}
-
-func (evalContext *EvalContext) Done() <-chan struct{} {
-	return evalContext.Context.Done()
-}
-
-func (evalContext *EvalContext) Err() error {
-	return evalContext.Context.Err()
-}
-
-func (evalContext *EvalContext) Value(key interface{}) interface{} {
-	return evalContext.Context.Value(key)
+	Ctx context.Context
 }
 }
 
 
 type StateDescription struct {
 type StateDescription struct {
@@ -103,6 +87,10 @@ func (c *EvalContext) GetDashboardSlug() (string, error) {
 }
 }
 
 
 func (c *EvalContext) GetRuleUrl() (string, error) {
 func (c *EvalContext) GetRuleUrl() (string, error) {
+	if c.IsTestRun {
+		return setting.AppUrl, nil
+	}
+
 	if slug, err := c.GetDashboardSlug(); err != nil {
 	if slug, err := c.GetDashboardSlug(); err != nil {
 		return "", err
 		return "", err
 	} else {
 	} else {
@@ -113,7 +101,7 @@ func (c *EvalContext) GetRuleUrl() (string, error) {
 
 
 func NewEvalContext(alertCtx context.Context, rule *Rule) *EvalContext {
 func NewEvalContext(alertCtx context.Context, rule *Rule) *EvalContext {
 	return &EvalContext{
 	return &EvalContext{
-		Context:     alertCtx,
+		Ctx:         alertCtx,
 		StartTime:   time.Now(),
 		StartTime:   time.Now(),
 		Rule:        rule,
 		Rule:        rule,
 		Logs:        make([]*ResultLogEntry, 0),
 		Logs:        make([]*ResultLogEntry, 0),

+ 11 - 2
pkg/services/alerting/extractor.go

@@ -80,6 +80,11 @@ func (e *DashAlertExtractor) GetAlerts() ([]*m.Alert, error) {
 				continue
 				continue
 			}
 			}
 
 
+			frequency, err := getTimeDurationStringToSeconds(jsonAlert.Get("frequency").MustString())
+			if err != nil {
+				return nil, ValidationError{Reason: "Could not parse frequency"}
+			}
+
 			alert := &m.Alert{
 			alert := &m.Alert{
 				DashboardId: e.Dash.Id,
 				DashboardId: e.Dash.Id,
 				OrgId:       e.OrgId,
 				OrgId:       e.OrgId,
@@ -88,7 +93,7 @@ func (e *DashAlertExtractor) GetAlerts() ([]*m.Alert, error) {
 				Name:        jsonAlert.Get("name").MustString(),
 				Name:        jsonAlert.Get("name").MustString(),
 				Handler:     jsonAlert.Get("handler").MustInt64(),
 				Handler:     jsonAlert.Get("handler").MustInt64(),
 				Message:     jsonAlert.Get("message").MustString(),
 				Message:     jsonAlert.Get("message").MustString(),
-				Frequency:   getTimeDurationStringToSeconds(jsonAlert.Get("frequency").MustString()),
+				Frequency:   frequency,
 			}
 			}
 
 
 			for _, condition := range jsonAlert.Get("conditions").MustArray() {
 			for _, condition := range jsonAlert.Get("conditions").MustArray() {
@@ -115,13 +120,17 @@ func (e *DashAlertExtractor) GetAlerts() ([]*m.Alert, error) {
 					jsonQuery.SetPath([]string{"datasourceId"}, datasource.Id)
 					jsonQuery.SetPath([]string{"datasourceId"}, datasource.Id)
 				}
 				}
 
 
+				if interval, err := panel.Get("interval").String(); err == nil {
+					panelQuery.Set("interval", interval)
+				}
+
 				jsonQuery.Set("model", panelQuery.Interface())
 				jsonQuery.Set("model", panelQuery.Interface())
 			}
 			}
 
 
 			alert.Settings = jsonAlert
 			alert.Settings = jsonAlert
 
 
 			// validate
 			// validate
-			_, err := NewRuleFromDBAlert(alert)
+			_, err = NewRuleFromDBAlert(alert)
 			if err == nil && alert.ValidToSave() {
 			if err == nil && alert.ValidToSave() {
 				alerts = append(alerts, alert)
 				alerts = append(alerts, alert)
 			} else {
 			} else {

+ 349 - 22
pkg/services/alerting/extractor_test.go

@@ -6,6 +6,7 @@ import (
 	"github.com/grafana/grafana/pkg/bus"
 	"github.com/grafana/grafana/pkg/bus"
 	"github.com/grafana/grafana/pkg/components/simplejson"
 	"github.com/grafana/grafana/pkg/components/simplejson"
 	m "github.com/grafana/grafana/pkg/models"
 	m "github.com/grafana/grafana/pkg/models"
+	"github.com/grafana/grafana/pkg/setting"
 	. "github.com/smartystreets/goconvey/convey"
 	. "github.com/smartystreets/goconvey/convey"
 )
 )
 
 
@@ -17,8 +18,35 @@ func TestAlertRuleExtraction(t *testing.T) {
 			return &FakeCondition{}, nil
 			return &FakeCondition{}, nil
 		})
 		})
 
 
-		Convey("Parsing and validating alerts from dashboards", func() {
-			json := `{
+		setting.NewConfigContext(&setting.CommandLineArgs{
+			HomePath: "../../../",
+		})
+
+		// mock data
+		defaultDs := &m.DataSource{Id: 12, OrgId: 1, Name: "I am default", IsDefault: true}
+		graphite2Ds := &m.DataSource{Id: 15, OrgId: 1, Name: "graphite2"}
+		influxDBDs := &m.DataSource{Id: 16, OrgId: 1, Name: "InfluxDB"}
+
+		bus.AddHandler("test", func(query *m.GetDataSourcesQuery) error {
+			query.Result = []*m.DataSource{defaultDs, graphite2Ds}
+			return nil
+		})
+
+		bus.AddHandler("test", func(query *m.GetDataSourceByNameQuery) error {
+			if query.Name == defaultDs.Name {
+				query.Result = defaultDs
+			}
+			if query.Name == graphite2Ds.Name {
+				query.Result = graphite2Ds
+			}
+			if query.Name == influxDBDs.Name {
+				query.Result = influxDBDs
+			}
+			return nil
+		})
+
+		json := `
+      {
         "id": 57,
         "id": 57,
         "title": "Graphite 4",
         "title": "Graphite 4",
         "originalTitle": "Graphite 4",
         "originalTitle": "Graphite 4",
@@ -80,32 +108,16 @@ func TestAlertRuleExtraction(t *testing.T) {
           ]
           ]
         }
         }
       ]
       ]
-    }`
+      }`
+
+		Convey("Parsing and validating dashboard containing graphite alerts", func() {
+
 			dashJson, err := simplejson.NewJson([]byte(json))
 			dashJson, err := simplejson.NewJson([]byte(json))
 			So(err, ShouldBeNil)
 			So(err, ShouldBeNil)
 
 
 			dash := m.NewDashboardFromJson(dashJson)
 			dash := m.NewDashboardFromJson(dashJson)
 			extractor := NewDashAlertExtractor(dash, 1)
 			extractor := NewDashAlertExtractor(dash, 1)
 
 
-			// mock data
-			defaultDs := &m.DataSource{Id: 12, OrgId: 2, Name: "I am default", IsDefault: true}
-			graphite2Ds := &m.DataSource{Id: 15, OrgId: 2, Name: "graphite2"}
-
-			bus.AddHandler("test", func(query *m.GetDataSourcesQuery) error {
-				query.Result = []*m.DataSource{defaultDs, graphite2Ds}
-				return nil
-			})
-
-			bus.AddHandler("test", func(query *m.GetDataSourceByNameQuery) error {
-				if query.Name == defaultDs.Name {
-					query.Result = defaultDs
-				}
-				if query.Name == graphite2Ds.Name {
-					query.Result = graphite2Ds
-				}
-				return nil
-			})
-
 			alerts, err := extractor.GetAlerts()
 			alerts, err := extractor.GetAlerts()
 
 
 			Convey("Get rules without error", func() {
 			Convey("Get rules without error", func() {
@@ -119,6 +131,9 @@ func TestAlertRuleExtraction(t *testing.T) {
 					So(v.DashboardId, ShouldEqual, 57)
 					So(v.DashboardId, ShouldEqual, 57)
 					So(v.Name, ShouldNotBeEmpty)
 					So(v.Name, ShouldNotBeEmpty)
 					So(v.Message, ShouldNotBeEmpty)
 					So(v.Message, ShouldNotBeEmpty)
+
+					settings := simplejson.NewFromAny(v.Settings)
+					So(settings.Get("interval").MustString(""), ShouldEqual, "")
 				}
 				}
 
 
 				Convey("should extract handler property", func() {
 				Convey("should extract handler property", func() {
@@ -156,5 +171,317 @@ func TestAlertRuleExtraction(t *testing.T) {
 				})
 				})
 			})
 			})
 		})
 		})
+
+		Convey("Parse and validate dashboard containing influxdb alert", func() {
+
+			json2 := `{
+				  "id": 4,
+				  "title": "Influxdb",
+				  "tags": [
+				    "apa"
+				  ],
+				  "style": "dark",
+				  "timezone": "browser",
+				  "editable": true,
+				  "hideControls": false,
+				  "sharedCrosshair": false,
+				  "rows": [
+				    {
+				      "collapse": false,
+				      "editable": true,
+				      "height": "450px",
+				      "panels": [
+				        {
+				          "alert": {
+				            "conditions": [
+				              {
+				                "evaluator": {
+				                  "params": [
+				                    10
+				                  ],
+				                  "type": "gt"
+				                },
+				                "query": {
+				                  "params": [
+				                    "B",
+				                    "5m",
+				                    "now"
+				                  ]
+				                },
+				                "reducer": {
+				                  "params": [],
+				                  "type": "avg"
+				                },
+				                "type": "query"
+				              }
+				            ],
+				            "frequency": "3s",
+				            "handler": 1,
+				            "name": "Influxdb",
+				            "noDataState": "no_data",
+				            "notifications": [
+				              {
+				                "id": 6
+				              }
+				            ]
+				          },
+				          "alerting": {},
+				          "aliasColors": {
+				            "logins.count.count": "#890F02"
+				          },
+				          "bars": false,
+				          "datasource": "InfluxDB",
+				          "editable": true,
+				          "error": false,
+				          "fill": 1,
+				          "grid": {},
+				          "id": 1,
+				          "interval": ">10s",
+				          "isNew": true,
+				          "legend": {
+				            "avg": false,
+				            "current": false,
+				            "max": false,
+				            "min": false,
+				            "show": true,
+				            "total": false,
+				            "values": false
+				          },
+				          "lines": true,
+				          "linewidth": 2,
+				          "links": [],
+				          "nullPointMode": "connected",
+				          "percentage": false,
+				          "pointradius": 5,
+				          "points": false,
+				          "renderer": "flot",
+				          "seriesOverrides": [],
+				          "span": 10,
+				          "stack": false,
+				          "steppedLine": false,
+				          "targets": [
+				            {
+				              "dsType": "influxdb",
+				              "groupBy": [
+				                {
+				                  "params": [
+				                    "$interval"
+				                  ],
+				                  "type": "time"
+				                },
+				                {
+				                  "params": [
+				                    "datacenter"
+				                  ],
+				                  "type": "tag"
+				                },
+				                {
+				                  "params": [
+				                    "none"
+				                  ],
+				                  "type": "fill"
+				                }
+				              ],
+				              "hide": false,
+				              "measurement": "logins.count",
+				              "policy": "default",
+				              "query": "SELECT 8 * count(\"value\") FROM \"logins.count\" WHERE $timeFilter GROUP BY time($interval), \"datacenter\" fill(none)",
+				              "rawQuery": true,
+				              "refId": "B",
+				              "resultFormat": "time_series",
+				              "select": [
+				                [
+				                  {
+				                    "params": [
+				                      "value"
+				                    ],
+				                    "type": "field"
+				                  },
+				                  {
+				                    "params": [],
+				                    "type": "count"
+				                  }
+				                ]
+				              ],
+				              "tags": []
+				            },
+				            {
+				              "dsType": "influxdb",
+				              "groupBy": [
+				                {
+				                  "params": [
+				                    "$interval"
+				                  ],
+				                  "type": "time"
+				                },
+				                {
+				                  "params": [
+				                    "null"
+				                  ],
+				                  "type": "fill"
+				                }
+				              ],
+				              "hide": true,
+				              "measurement": "cpu",
+				              "policy": "default",
+				              "refId": "A",
+				              "resultFormat": "time_series",
+				              "select": [
+				                [
+				                  {
+				                    "params": [
+				                      "value"
+				                    ],
+				                    "type": "field"
+				                  },
+				                  {
+				                    "params": [],
+				                    "type": "mean"
+				                  }
+				                ],
+				                [
+				                  {
+				                    "params": [
+				                      "value"
+				                    ],
+				                    "type": "field"
+				                  },
+				                  {
+				                    "params": [],
+				                    "type": "sum"
+				                  }
+				                ]
+				              ],
+				              "tags": []
+				            }
+				          ],
+				          "thresholds": [
+				            {
+				              "colorMode": "critical",
+				              "fill": true,
+				              "line": true,
+				              "op": "gt",
+				              "value": 10
+				            }
+				          ],
+				          "timeFrom": null,
+				          "timeShift": null,
+				          "title": "Panel Title",
+				          "tooltip": {
+				            "msResolution": false,
+				            "ordering": "alphabetical",
+				            "shared": true,
+				            "sort": 0,
+				            "value_type": "cumulative"
+				          },
+				          "type": "graph",
+				          "xaxis": {
+				            "mode": "time",
+				            "name": null,
+				            "show": true,
+				            "values": []
+				          },
+				          "yaxes": [
+				            {
+				              "format": "short",
+				              "logBase": 1,
+				              "max": null,
+				              "min": null,
+				              "show": true
+				            },
+				            {
+				              "format": "short",
+				              "logBase": 1,
+				              "max": null,
+				              "min": null,
+				              "show": true
+				            }
+				          ]
+				        },
+				        {
+				          "editable": true,
+				          "error": false,
+				          "id": 2,
+				          "isNew": true,
+				          "limit": 10,
+				          "links": [],
+				          "show": "current",
+				          "span": 2,
+				          "stateFilter": [
+				            "alerting"
+				          ],
+				          "title": "Alert status",
+				          "type": "alertlist"
+				        }
+				      ],
+				      "title": "Row"
+				    }
+				  ],
+				  "time": {
+				    "from": "now-5m",
+				    "to": "now"
+				  },
+				  "timepicker": {
+				    "now": true,
+				    "refresh_intervals": [
+				      "5s",
+				      "10s",
+				      "30s",
+				      "1m",
+				      "5m",
+				      "15m",
+				      "30m",
+				      "1h",
+				      "2h",
+				      "1d"
+				    ],
+				    "time_options": [
+				      "5m",
+				      "15m",
+				      "1h",
+				      "6h",
+				      "12h",
+				      "24h",
+				      "2d",
+				      "7d",
+				      "30d"
+				    ]
+				  },
+				  "templating": {
+				    "list": []
+				  },
+				  "annotations": {
+				    "list": []
+				  },
+				  "schemaVersion": 13,
+				  "version": 120,
+				  "links": [],
+				  "gnetId": null
+				}`
+
+			dashJson, err := simplejson.NewJson([]byte(json2))
+			So(err, ShouldBeNil)
+			dash := m.NewDashboardFromJson(dashJson)
+			extractor := NewDashAlertExtractor(dash, 1)
+
+			alerts, err := extractor.GetAlerts()
+
+			Convey("Get rules without error", func() {
+				So(err, ShouldBeNil)
+			})
+
+			Convey("should be able to read interval", func() {
+				So(len(alerts), ShouldEqual, 1)
+
+				for _, alert := range alerts {
+					So(alert.DashboardId, ShouldEqual, 4)
+
+					conditions := alert.Settings.Get("conditions").MustArray()
+					cond := simplejson.NewFromAny(conditions[0])
+
+					So(cond.Get("query").Get("model").Get("interval").MustString(), ShouldEqual, ">10s")
+				}
+			})
+		})
 	})
 	})
 }
 }

+ 3 - 0
pkg/services/alerting/interfaces.go

@@ -16,6 +16,9 @@ type Notifier interface {
 	GetType() string
 	GetType() string
 	NeedsImage() bool
 	NeedsImage() bool
 	PassesFilter(rule *Rule) bool
 	PassesFilter(rule *Rule) bool
+
+	GetNotifierId() int64
+	GetIsDefault() bool
 }
 }
 
 
 type Condition interface {
 type Condition interface {

+ 14 - 5
pkg/services/alerting/notifier.go

@@ -35,14 +35,22 @@ func (n *RootNotifier) PassesFilter(rule *Rule) bool {
 	return false
 	return false
 }
 }
 
 
-func (n *RootNotifier) Notify(context *EvalContext) error {
-	n.log.Info("Sending notifications for", "ruleId", context.Rule.Id)
+func (n *RootNotifier) GetNotifierId() int64 {
+	return 0
+}
 
 
+func (n *RootNotifier) GetIsDefault() bool {
+	return false
+}
+
+func (n *RootNotifier) Notify(context *EvalContext) error {
 	notifiers, err := n.getNotifiers(context.Rule.OrgId, context.Rule.Notifications, context)
 	notifiers, err := n.getNotifiers(context.Rule.OrgId, context.Rule.Notifications, context)
 	if err != nil {
 	if err != nil {
 		return err
 		return err
 	}
 	}
 
 
+	n.log.Info("Sending notifications for", "ruleId", context.Rule.Id, "Amount to send", len(notifiers))
+
 	if len(notifiers) == 0 {
 	if len(notifiers) == 0 {
 		return nil
 		return nil
 	}
 	}
@@ -57,11 +65,12 @@ func (n *RootNotifier) Notify(context *EvalContext) error {
 }
 }
 
 
 func (n *RootNotifier) sendNotifications(context *EvalContext, notifiers []Notifier) error {
 func (n *RootNotifier) sendNotifications(context *EvalContext, notifiers []Notifier) error {
-	g, _ := errgroup.WithContext(context.Context)
+	g, _ := errgroup.WithContext(context.Ctx)
 
 
 	for _, notifier := range notifiers {
 	for _, notifier := range notifiers {
-		n.log.Info("Sending notification", "firing", context.Firing, "type", notifier.GetType())
-		g.Go(func() error { return notifier.Notify(context) })
+		not := notifier //avoid updating scope variable in go routine
+		n.log.Info("Sending notification", "type", not.GetType(), "id", not.GetNotifierId(), "isDefault", not.GetIsDefault())
+		g.Go(func() error { return not.Notify(context) })
 	}
 	}
 
 
 	return g.Wait()
 	return g.Wait()

+ 8 - 0
pkg/services/alerting/notifier_test.go

@@ -22,6 +22,14 @@ func (fn *FakeNotifier) NeedsImage() bool {
 	return true
 	return true
 }
 }
 
 
+func (n *FakeNotifier) GetNotifierId() int64 {
+	return 0
+}
+
+func (n *FakeNotifier) GetIsDefault() bool {
+	return false
+}
+
 func (fn *FakeNotifier) Notify(alertResult *EvalContext) error { return nil }
 func (fn *FakeNotifier) Notify(alertResult *EvalContext) error { return nil }
 
 
 func (fn *FakeNotifier) PassesFilter(rule *Rule) bool {
 func (fn *FakeNotifier) PassesFilter(rule *Rule) bool {

+ 19 - 5
pkg/services/alerting/notifiers/base.go

@@ -6,13 +6,19 @@ import (
 )
 )
 
 
 type NotifierBase struct {
 type NotifierBase struct {
-	Name string
-	Type string
+	Name     string
+	Type     string
+	Id       int64
+	IsDeault bool
 }
 }
 
 
-func NewNotifierBase(name, notifierType string, model *simplejson.Json) NotifierBase {
-	base := NotifierBase{Name: name, Type: notifierType}
-	return base
+func NewNotifierBase(id int64, isDefault bool, name, notifierType string, model *simplejson.Json) NotifierBase {
+	return NotifierBase{
+		Id:       id,
+		Name:     name,
+		IsDeault: isDefault,
+		Type:     notifierType,
+	}
 }
 }
 
 
 func (n *NotifierBase) PassesFilter(rule *alerting.Rule) bool {
 func (n *NotifierBase) PassesFilter(rule *alerting.Rule) bool {
@@ -26,3 +32,11 @@ func (n *NotifierBase) GetType() string {
 func (n *NotifierBase) NeedsImage() bool {
 func (n *NotifierBase) NeedsImage() bool {
 	return true
 	return true
 }
 }
+
+func (n *NotifierBase) GetNotifierId() int64 {
+	return n.Id
+}
+
+func (n *NotifierBase) GetIsDefault() bool {
+	return n.IsDeault
+}

+ 2 - 2
pkg/services/alerting/notifiers/email.go

@@ -29,7 +29,7 @@ func NewEmailNotifier(model *m.AlertNotification) (alerting.Notifier, error) {
 	}
 	}
 
 
 	return &EmailNotifier{
 	return &EmailNotifier{
-		NotifierBase: NewNotifierBase(model.Name, model.Type, model.Settings),
+		NotifierBase: NewNotifierBase(model.Id, model.IsDefault, model.Name, model.Type, model.Settings),
 		Addresses:    strings.Split(addressesString, "\n"),
 		Addresses:    strings.Split(addressesString, "\n"),
 		log:          log.New("alerting.notifier.email"),
 		log:          log.New("alerting.notifier.email"),
 	}, nil
 	}, nil
@@ -63,7 +63,7 @@ func (this *EmailNotifier) Notify(evalContext *alerting.EvalContext) error {
 		},
 		},
 	}
 	}
 
 
-	err = bus.DispatchCtx(evalContext, cmd)
+	err = bus.DispatchCtx(evalContext.Ctx, cmd)
 
 
 	if err != nil {
 	if err != nil {
 		this.log.Error("Failed to send alert notification email", "error", err)
 		this.log.Error("Failed to send alert notification email", "error", err)

+ 2 - 2
pkg/services/alerting/notifiers/slack.go

@@ -23,7 +23,7 @@ func NewSlackNotifier(model *m.AlertNotification) (alerting.Notifier, error) {
 	}
 	}
 
 
 	return &SlackNotifier{
 	return &SlackNotifier{
-		NotifierBase: NewNotifierBase(model.Name, model.Type, model.Settings),
+		NotifierBase: NewNotifierBase(model.Id, model.IsDefault, model.Name, model.Type, model.Settings),
 		Url:          url,
 		Url:          url,
 		log:          log.New("alerting.notifier.slack"),
 		log:          log.New("alerting.notifier.slack"),
 	}, nil
 	}, nil
@@ -90,7 +90,7 @@ func (this *SlackNotifier) Notify(evalContext *alerting.EvalContext) error {
 	data, _ := json.Marshal(&body)
 	data, _ := json.Marshal(&body)
 	cmd := &m.SendWebhookSync{Url: this.Url, Body: string(data)}
 	cmd := &m.SendWebhookSync{Url: this.Url, Body: string(data)}
 
 
-	if err := bus.DispatchCtx(evalContext, cmd); err != nil {
+	if err := bus.DispatchCtx(evalContext.Ctx, cmd); err != nil {
 		this.log.Error("Failed to send slack notification", "error", err, "webhook", this.Name)
 		this.log.Error("Failed to send slack notification", "error", err, "webhook", this.Name)
 	}
 	}
 
 

+ 2 - 2
pkg/services/alerting/notifiers/webhook.go

@@ -20,7 +20,7 @@ func NewWebHookNotifier(model *m.AlertNotification) (alerting.Notifier, error) {
 	}
 	}
 
 
 	return &WebhookNotifier{
 	return &WebhookNotifier{
-		NotifierBase: NewNotifierBase(model.Name, model.Type, model.Settings),
+		NotifierBase: NewNotifierBase(model.Id, model.IsDefault, model.Name, model.Type, model.Settings),
 		Url:          url,
 		Url:          url,
 		User:         model.Settings.Get("user").MustString(),
 		User:         model.Settings.Get("user").MustString(),
 		Password:     model.Settings.Get("password").MustString(),
 		Password:     model.Settings.Get("password").MustString(),
@@ -65,7 +65,7 @@ func (this *WebhookNotifier) Notify(evalContext *alerting.EvalContext) error {
 		Body:     string(body),
 		Body:     string(body),
 	}
 	}
 
 
-	if err := bus.DispatchCtx(evalContext, cmd); err != nil {
+	if err := bus.DispatchCtx(evalContext.Ctx, cmd); err != nil {
 		this.log.Error("Failed to send webhook", "error", err, "webhook", this.Name)
 		this.log.Error("Failed to send webhook", "error", err, "webhook", this.Name)
 	}
 	}
 
 

+ 13 - 3
pkg/services/alerting/rule.go

@@ -43,17 +43,27 @@ var unitMultiplier = map[string]int{
 	"h": 3600,
 	"h": 3600,
 }
 }
 
 
-func getTimeDurationStringToSeconds(str string) int64 {
+func getTimeDurationStringToSeconds(str string) (int64, error) {
 	multiplier := 1
 	multiplier := 1
 
 
-	value, _ := strconv.Atoi(ValueFormatRegex.FindAllString(str, 1)[0])
+	matches := ValueFormatRegex.FindAllString(str, 1)
+
+	if len(matches) <= 0 {
+		return 0, fmt.Errorf("Frequency could not be parsed")
+	}
+
+	value, err := strconv.Atoi(matches[0])
+	if err != nil {
+		return 0, err
+	}
+
 	unit := UnitFormatRegex.FindAllString(str, 1)[0]
 	unit := UnitFormatRegex.FindAllString(str, 1)[0]
 
 
 	if val, ok := unitMultiplier[unit]; ok {
 	if val, ok := unitMultiplier[unit]; ok {
 		multiplier = val
 		multiplier = val
 	}
 	}
 
 
-	return int64(value * multiplier)
+	return int64(value * multiplier), nil
 }
 }
 
 
 func NewRuleFromDBAlert(ruleDef *m.Alert) (*Rule, error) {
 func NewRuleFromDBAlert(ruleDef *m.Alert) (*Rule, error) {

+ 9 - 4
pkg/services/alerting/rule_test.go

@@ -20,25 +20,30 @@ func TestAlertRuleModel(t *testing.T) {
 		})
 		})
 
 
 		Convey("Can parse seconds", func() {
 		Convey("Can parse seconds", func() {
-			seconds := getTimeDurationStringToSeconds("10s")
+			seconds, _ := getTimeDurationStringToSeconds("10s")
 			So(seconds, ShouldEqual, 10)
 			So(seconds, ShouldEqual, 10)
 		})
 		})
 
 
 		Convey("Can parse minutes", func() {
 		Convey("Can parse minutes", func() {
-			seconds := getTimeDurationStringToSeconds("10m")
+			seconds, _ := getTimeDurationStringToSeconds("10m")
 			So(seconds, ShouldEqual, 600)
 			So(seconds, ShouldEqual, 600)
 		})
 		})
 
 
 		Convey("Can parse hours", func() {
 		Convey("Can parse hours", func() {
-			seconds := getTimeDurationStringToSeconds("1h")
+			seconds, _ := getTimeDurationStringToSeconds("1h")
 			So(seconds, ShouldEqual, 3600)
 			So(seconds, ShouldEqual, 3600)
 		})
 		})
 
 
 		Convey("defaults to seconds", func() {
 		Convey("defaults to seconds", func() {
-			seconds := getTimeDurationStringToSeconds("1o")
+			seconds, _ := getTimeDurationStringToSeconds("1o")
 			So(seconds, ShouldEqual, 1)
 			So(seconds, ShouldEqual, 1)
 		})
 		})
 
 
+		Convey("should return err for empty string", func() {
+			_, err := getTimeDurationStringToSeconds("")
+			So(err, ShouldNotBeNil)
+		})
+
 		Convey("can construct alert rule model", func() {
 		Convey("can construct alert rule model", func() {
 			json := `
 			json := `
 			{
 			{

+ 2 - 1
pkg/services/alerting/scheduler.go

@@ -5,6 +5,7 @@ import (
 	"time"
 	"time"
 
 
 	"github.com/grafana/grafana/pkg/log"
 	"github.com/grafana/grafana/pkg/log"
+	"github.com/grafana/grafana/pkg/models"
 )
 )
 
 
 type SchedulerImpl struct {
 type SchedulerImpl struct {
@@ -48,7 +49,7 @@ func (s *SchedulerImpl) Tick(tickTime time.Time, execQueue chan *Job) {
 	now := tickTime.Unix()
 	now := tickTime.Unix()
 
 
 	for _, job := range s.jobs {
 	for _, job := range s.jobs {
-		if job.Running {
+		if job.Running || job.Rule.State == models.AlertStatePaused {
 			continue
 			continue
 		}
 		}
 
 

+ 24 - 0
pkg/services/sqlstore/alert.go

@@ -18,6 +18,7 @@ func init() {
 	bus.AddHandler("sql", GetAllAlertQueryHandler)
 	bus.AddHandler("sql", GetAllAlertQueryHandler)
 	bus.AddHandler("sql", SetAlertState)
 	bus.AddHandler("sql", SetAlertState)
 	bus.AddHandler("sql", GetAlertStatesForDashboard)
 	bus.AddHandler("sql", GetAlertStatesForDashboard)
+	bus.AddHandler("sql", PauseAlertRule)
 }
 }
 
 
 func GetAlertById(query *m.GetAlertByIdQuery) error {
 func GetAlertById(query *m.GetAlertByIdQuery) error {
@@ -243,6 +244,29 @@ func SetAlertState(cmd *m.SetAlertStateCommand) error {
 	})
 	})
 }
 }
 
 
+func PauseAlertRule(cmd *m.PauseAlertCommand) error {
+	return inTransaction(func(sess *xorm.Session) error {
+		alert := m.Alert{}
+
+		if has, err := sess.Id(cmd.AlertId).Get(&alert); err != nil {
+			return err
+		} else if !has {
+			return fmt.Errorf("Could not find alert")
+		}
+
+		var newState m.AlertStateType
+		if cmd.Paused {
+			newState = m.AlertStatePaused
+		} else {
+			newState = m.AlertStateNoData
+		}
+		alert.State = newState
+
+		sess.Id(alert.Id).Update(&alert)
+		return nil
+	})
+}
+
 func GetAlertStatesForDashboard(query *m.GetAlertStatesForDashboardQuery) error {
 func GetAlertStatesForDashboard(query *m.GetAlertStatesForDashboardQuery) error {
 	var rawSql = `SELECT
 	var rawSql = `SELECT
 	                id,
 	                id,

+ 7 - 5
pkg/setting/setting.go

@@ -134,8 +134,9 @@ var (
 	GoogleTagManagerId string
 	GoogleTagManagerId string
 
 
 	// LDAP
 	// LDAP
-	LdapEnabled    bool
-	LdapConfigFile string
+	LdapEnabled     bool
+	LdapConfigFile  string
+	LdapAllowSignup bool = true
 
 
 	// SMTP email settings
 	// SMTP email settings
 	Smtp SmtpSettings
 	Smtp SmtpSettings
@@ -144,7 +145,7 @@ var (
 	Quota QuotaSettings
 	Quota QuotaSettings
 
 
 	// Alerting
 	// Alerting
-	AlertingEnabled bool
+	ExecuteAlerts bool
 
 
 	// logger
 	// logger
 	logger log.Logger
 	logger log.Logger
@@ -460,7 +461,7 @@ func NewConfigContext(args *CommandLineArgs) error {
 
 
 	Env = Cfg.Section("").Key("app_mode").MustString("development")
 	Env = Cfg.Section("").Key("app_mode").MustString("development")
 	InstanceName = Cfg.Section("").Key("instance_name").MustString("unknown_instance_name")
 	InstanceName = Cfg.Section("").Key("instance_name").MustString("unknown_instance_name")
-	PluginsPath = Cfg.Section("paths").Key("plugins").String()
+	PluginsPath = makeAbsolute(Cfg.Section("paths").Key("plugins").String(), HomePath)
 
 
 	server := Cfg.Section("server")
 	server := Cfg.Section("server")
 	AppUrl, AppSubUrl = parseAppUrlAndSubUrl(server)
 	AppUrl, AppSubUrl = parseAppUrlAndSubUrl(server)
@@ -551,9 +552,10 @@ func NewConfigContext(args *CommandLineArgs) error {
 	ldapSec := Cfg.Section("auth.ldap")
 	ldapSec := Cfg.Section("auth.ldap")
 	LdapEnabled = ldapSec.Key("enabled").MustBool(false)
 	LdapEnabled = ldapSec.Key("enabled").MustBool(false)
 	LdapConfigFile = ldapSec.Key("config_file").String()
 	LdapConfigFile = ldapSec.Key("config_file").String()
+	LdapAllowSignup = ldapSec.Key("allow_sign_up").MustBool(true)
 
 
 	alerting := Cfg.Section("alerting")
 	alerting := Cfg.Section("alerting")
-	AlertingEnabled = alerting.Key("enabled").MustBool(false)
+	ExecuteAlerts = alerting.Key("execute_alerts").MustBool(true)
 
 
 	readSessionConfig()
 	readSessionConfig()
 	readSmtpSettings()
 	readSmtpSettings()

+ 3 - 0
pkg/setting/setting_oauth.go

@@ -9,6 +9,9 @@ type OAuthInfo struct {
 	ApiUrl                 string
 	ApiUrl                 string
 	AllowSignup            bool
 	AllowSignup            bool
 	Name                   string
 	Name                   string
+	TlsClientCert          string
+	TlsClientKey           string
+	TlsClientCa            string
 }
 }
 
 
 type OAuther struct {
 type OAuther struct {

+ 25 - 13
pkg/social/generic_oauth.go

@@ -5,7 +5,6 @@ import (
 	"errors"
 	"errors"
 	"fmt"
 	"fmt"
 	"net/http"
 	"net/http"
-	"strconv"
 
 
 	"github.com/grafana/grafana/pkg/models"
 	"github.com/grafana/grafana/pkg/models"
 
 
@@ -160,15 +159,16 @@ func (s *GenericOAuth) FetchOrganizations(client *http.Client) ([]string, error)
 	return logins, nil
 	return logins, nil
 }
 }
 
 
-func (s *GenericOAuth) UserInfo(token *oauth2.Token) (*BasicUserInfo, error) {
+func (s *GenericOAuth) UserInfo(client *http.Client) (*BasicUserInfo, error) {
 	var data struct {
 	var data struct {
-		Id    int    `json:"id"`
-		Name  string `json:"login"`
-		Email string `json:"email"`
+		Name       string              `json:"name"`
+		Login      string              `json:"login"`
+		Username   string              `json:"username"`
+		Email      string              `json:"email"`
+		Attributes map[string][]string `json:"attributes"`
 	}
 	}
 
 
 	var err error
 	var err error
-	client := s.Client(oauth2.NoContext, token)
 	r, err := client.Get(s.apiUrl)
 	r, err := client.Get(s.apiUrl)
 	if err != nil {
 	if err != nil {
 		return nil, err
 		return nil, err
@@ -181,17 +181,13 @@ func (s *GenericOAuth) UserInfo(token *oauth2.Token) (*BasicUserInfo, error) {
 	}
 	}
 
 
 	userInfo := &BasicUserInfo{
 	userInfo := &BasicUserInfo{
-		Identity: strconv.Itoa(data.Id),
 		Name:     data.Name,
 		Name:     data.Name,
+		Login:    data.Login,
 		Email:    data.Email,
 		Email:    data.Email,
 	}
 	}
 
 
-	if !s.IsTeamMember(client) {
-		return nil, errors.New("User not a member of one of the required teams")
-	}
-
-	if !s.IsOrganizationMember(client) {
-		return nil, errors.New("User not a member of one of the required organizations")
+	if (userInfo.Email == "" && data.Attributes["email:primary"] != nil) {
+		userInfo.Email = data.Attributes["email:primary"][0]
 	}
 	}
 
 
 	if userInfo.Email == "" {
 	if userInfo.Email == "" {
@@ -201,5 +197,21 @@ func (s *GenericOAuth) UserInfo(token *oauth2.Token) (*BasicUserInfo, error) {
 		}
 		}
 	}
 	}
 
 
+	if (userInfo.Login == "" && data.Username != "") {
+		userInfo.Login = data.Username
+	}
+
+	if (userInfo.Login == "") {
+		userInfo.Login = data.Email
+	}
+
+	if !s.IsTeamMember(client) {
+		return nil, errors.New("User not a member of one of the required teams")
+	}
+
+	if !s.IsOrganizationMember(client) {
+		return nil, errors.New("User not a member of one of the required organizations")
+	}
+
 	return userInfo, nil
 	return userInfo, nil
 }
 }

+ 4 - 6
pkg/social/github_oauth.go

@@ -5,7 +5,6 @@ import (
 	"errors"
 	"errors"
 	"fmt"
 	"fmt"
 	"net/http"
 	"net/http"
-	"strconv"
 
 
 	"github.com/grafana/grafana/pkg/models"
 	"github.com/grafana/grafana/pkg/models"
 
 
@@ -168,15 +167,14 @@ func (s *SocialGithub) FetchOrganizations(client *http.Client) ([]string, error)
 	return logins, nil
 	return logins, nil
 }
 }
 
 
-func (s *SocialGithub) UserInfo(token *oauth2.Token) (*BasicUserInfo, error) {
+func (s *SocialGithub) UserInfo(client *http.Client) (*BasicUserInfo, error) {
 	var data struct {
 	var data struct {
 		Id    int    `json:"id"`
 		Id    int    `json:"id"`
-		Name  string `json:"login"`
+		Login string `json:"login"`
 		Email string `json:"email"`
 		Email string `json:"email"`
 	}
 	}
 
 
 	var err error
 	var err error
-	client := s.Client(oauth2.NoContext, token)
 	r, err := client.Get(s.apiUrl)
 	r, err := client.Get(s.apiUrl)
 	if err != nil {
 	if err != nil {
 		return nil, err
 		return nil, err
@@ -189,8 +187,8 @@ func (s *SocialGithub) UserInfo(token *oauth2.Token) (*BasicUserInfo, error) {
 	}
 	}
 
 
 	userInfo := &BasicUserInfo{
 	userInfo := &BasicUserInfo{
-		Identity: strconv.Itoa(data.Id),
-		Name:     data.Name,
+		Name:     data.Login,
+		Login:    data.Login,
 		Email:    data.Email,
 		Email:    data.Email,
 	}
 	}
 
 

+ 2 - 4
pkg/social/google_oauth.go

@@ -2,6 +2,7 @@ package social
 
 
 import (
 import (
 	"encoding/json"
 	"encoding/json"
+	"net/http"
 
 
 	"github.com/grafana/grafana/pkg/models"
 	"github.com/grafana/grafana/pkg/models"
 
 
@@ -27,15 +28,13 @@ func (s *SocialGoogle) IsSignupAllowed() bool {
 	return s.allowSignup
 	return s.allowSignup
 }
 }
 
 
-func (s *SocialGoogle) UserInfo(token *oauth2.Token) (*BasicUserInfo, error) {
+func (s *SocialGoogle) UserInfo(client *http.Client) (*BasicUserInfo, error) {
 	var data struct {
 	var data struct {
-		Id    string `json:"id"`
 		Name  string `json:"name"`
 		Name  string `json:"name"`
 		Email string `json:"email"`
 		Email string `json:"email"`
 	}
 	}
 	var err error
 	var err error
 
 
-	client := s.Client(oauth2.NoContext, token)
 	r, err := client.Get(s.apiUrl)
 	r, err := client.Get(s.apiUrl)
 	if err != nil {
 	if err != nil {
 		return nil, err
 		return nil, err
@@ -45,7 +44,6 @@ func (s *SocialGoogle) UserInfo(token *oauth2.Token) (*BasicUserInfo, error) {
 		return nil, err
 		return nil, err
 	}
 	}
 	return &BasicUserInfo{
 	return &BasicUserInfo{
-		Identity: data.Id,
 		Name:     data.Name,
 		Name:     data.Name,
 		Email:    data.Email,
 		Email:    data.Email,
 	}, nil
 	}, nil

+ 12 - 42
pkg/social/grafananet_oauth.go

@@ -2,9 +2,7 @@ package social
 
 
 import (
 import (
 	"encoding/json"
 	"encoding/json"
-	"fmt"
 	"net/http"
 	"net/http"
-	"strconv"
 
 
 	"github.com/grafana/grafana/pkg/models"
 	"github.com/grafana/grafana/pkg/models"
 
 
@@ -18,6 +16,10 @@ type SocialGrafanaNet struct {
 	allowSignup          bool
 	allowSignup          bool
 }
 }
 
 
+type OrgRecord struct {
+	Login string `json:"login"`
+}
+
 func (s *SocialGrafanaNet) Type() int {
 func (s *SocialGrafanaNet) Type() int {
 	return int(models.GRAFANANET)
 	return int(models.GRAFANANET)
 }
 }
@@ -30,19 +32,14 @@ func (s *SocialGrafanaNet) IsSignupAllowed() bool {
 	return s.allowSignup
 	return s.allowSignup
 }
 }
 
 
-func (s *SocialGrafanaNet) IsOrganizationMember(client *http.Client) bool {
+func (s *SocialGrafanaNet) IsOrganizationMember(organizations []OrgRecord) bool {
 	if len(s.allowedOrganizations) == 0 {
 	if len(s.allowedOrganizations) == 0 {
 		return true
 		return true
 	}
 	}
 
 
-	organizations, err := s.FetchOrganizations(client)
-	if err != nil {
-		return false
-	}
-
 	for _, allowedOrganization := range s.allowedOrganizations {
 	for _, allowedOrganization := range s.allowedOrganizations {
 		for _, organization := range organizations {
 		for _, organization := range organizations {
-			if organization == allowedOrganization {
+			if organization.Login == allowedOrganization {
 				return true
 				return true
 			}
 			}
 		}
 		}
@@ -51,43 +48,16 @@ func (s *SocialGrafanaNet) IsOrganizationMember(client *http.Client) bool {
 	return false
 	return false
 }
 }
 
 
-func (s *SocialGrafanaNet) FetchOrganizations(client *http.Client) ([]string, error) {
-	type Record struct {
-		Login string `json:"login"`
-	}
-
-	url := fmt.Sprintf(s.url + "/api/oauth2/user/orgs")
-	r, err := client.Get(url)
-	if err != nil {
-		return nil, err
-	}
-
-	defer r.Body.Close()
-
-	var records []Record
-
-	if err = json.NewDecoder(r.Body).Decode(&records); err != nil {
-		return nil, err
-	}
-
-	var logins = make([]string, len(records))
-	for i, record := range records {
-		logins[i] = record.Login
-	}
-
-	return logins, nil
-}
-
-func (s *SocialGrafanaNet) UserInfo(token *oauth2.Token) (*BasicUserInfo, error) {
+func (s *SocialGrafanaNet) UserInfo(client *http.Client) (*BasicUserInfo, error) {
 	var data struct {
 	var data struct {
-		Id    int    `json:"id"`
-		Name  string `json:"login"`
+		Name  string `json:"name"`
+		Login string `json:"username"`
 		Email string `json:"email"`
 		Email string `json:"email"`
 		Role  string `json:"role"`
 		Role  string `json:"role"`
+		Orgs  []OrgRecord `json:"orgs"`
 	}
 	}
 
 
 	var err error
 	var err error
-	client := s.Client(oauth2.NoContext, token)
 	r, err := client.Get(s.url + "/api/oauth2/user")
 	r, err := client.Get(s.url + "/api/oauth2/user")
 	if err != nil {
 	if err != nil {
 		return nil, err
 		return nil, err
@@ -100,13 +70,13 @@ func (s *SocialGrafanaNet) UserInfo(token *oauth2.Token) (*BasicUserInfo, error)
 	}
 	}
 
 
 	userInfo := &BasicUserInfo{
 	userInfo := &BasicUserInfo{
-		Identity: strconv.Itoa(data.Id),
 		Name:     data.Name,
 		Name:     data.Name,
+		Login:    data.Login,
 		Email:    data.Email,
 		Email:    data.Email,
 		Role:     data.Role,
 		Role:     data.Role,
 	}
 	}
 
 
-	if !s.IsOrganizationMember(client) {
+	if !s.IsOrganizationMember(data.Orgs) {
 		return nil, ErrMissingOrganizationMembership
 		return nil, ErrMissingOrganizationMembership
 	}
 	}
 
 

+ 19 - 13
pkg/social/social.go

@@ -1,16 +1,16 @@
 package social
 package social
 
 
 import (
 import (
+	"net/http"
 	"strings"
 	"strings"
 
 
-	"github.com/grafana/grafana/pkg/setting"
 	"golang.org/x/net/context"
 	"golang.org/x/net/context"
-
 	"golang.org/x/oauth2"
 	"golang.org/x/oauth2"
+
+	"github.com/grafana/grafana/pkg/setting"
 )
 )
 
 
 type BasicUserInfo struct {
 type BasicUserInfo struct {
-	Identity string
 	Name     string
 	Name     string
 	Email    string
 	Email    string
 	Login    string
 	Login    string
@@ -20,12 +20,13 @@ type BasicUserInfo struct {
 
 
 type SocialConnector interface {
 type SocialConnector interface {
 	Type() int
 	Type() int
-	UserInfo(token *oauth2.Token) (*BasicUserInfo, error)
+	UserInfo(client *http.Client) (*BasicUserInfo, error)
 	IsEmailAllowed(email string) bool
 	IsEmailAllowed(email string) bool
 	IsSignupAllowed() bool
 	IsSignupAllowed() bool
 
 
 	AuthCodeURL(state string, opts ...oauth2.AuthCodeOption) string
 	AuthCodeURL(state string, opts ...oauth2.AuthCodeOption) string
 	Exchange(ctx context.Context, code string) (*oauth2.Token, error)
 	Exchange(ctx context.Context, code string) (*oauth2.Token, error)
+	Client(ctx context.Context, t *oauth2.Token) *http.Client
 }
 }
 
 
 var (
 var (
@@ -52,6 +53,9 @@ func NewOAuthService() {
 			AllowedDomains: sec.Key("allowed_domains").Strings(" "),
 			AllowedDomains: sec.Key("allowed_domains").Strings(" "),
 			AllowSignup:    sec.Key("allow_sign_up").MustBool(),
 			AllowSignup:    sec.Key("allow_sign_up").MustBool(),
 			Name:           sec.Key("name").MustString(name),
 			Name:           sec.Key("name").MustString(name),
+			TlsClientCert:  sec.Key("tls_client_cert").String(),
+			TlsClientKey:   sec.Key("tls_client_key").String(),
+			TlsClientCa:    sec.Key("tls_client_ca").String(),
 		}
 		}
 
 
 		if !info.Enabled {
 		if !info.Enabled {
@@ -59,6 +63,7 @@ func NewOAuthService() {
 		}
 		}
 
 
 		setting.OAuthService.OAuthInfos[name] = info
 		setting.OAuthService.OAuthInfos[name] = info
+
 		config := oauth2.Config{
 		config := oauth2.Config{
 			ClientID:     info.ClientId,
 			ClientID:     info.ClientId,
 			ClientSecret: info.ClientSecret,
 			ClientSecret: info.ClientSecret,
@@ -85,9 +90,10 @@ func NewOAuthService() {
 		// Google.
 		// Google.
 		if name == "google" {
 		if name == "google" {
 			SocialMap["google"] = &SocialGoogle{
 			SocialMap["google"] = &SocialGoogle{
-				Config: &config, allowedDomains: info.AllowedDomains,
-				apiUrl:      info.ApiUrl,
-				allowSignup: info.AllowSignup,
+				Config:               &config,
+				allowedDomains:       info.AllowedDomains,
+				apiUrl:               info.ApiUrl,
+				allowSignup:          info.AllowSignup,
 			}
 			}
 		}
 		}
 
 
@@ -104,15 +110,15 @@ func NewOAuthService() {
 		}
 		}
 
 
 		if name == "grafananet" {
 		if name == "grafananet" {
-			config := oauth2.Config{
+			config = oauth2.Config{
 				ClientID:     info.ClientId,
 				ClientID:     info.ClientId,
 				ClientSecret: info.ClientSecret,
 				ClientSecret: info.ClientSecret,
-				Endpoint: oauth2.Endpoint{
-					AuthURL:  setting.GrafanaNetUrl + "/oauth2/authorize",
-					TokenURL: setting.GrafanaNetUrl + "/api/oauth2/token",
+				Endpoint:     oauth2.Endpoint{
+					AuthURL:      setting.GrafanaNetUrl + "/oauth2/authorize",
+					TokenURL:     setting.GrafanaNetUrl + "/api/oauth2/token",
 				},
 				},
-				RedirectURL: strings.TrimSuffix(setting.AppUrl, "/") + SocialBaseUrl + name,
-				Scopes:      info.Scopes,
+				RedirectURL:  strings.TrimSuffix(setting.AppUrl, "/") + SocialBaseUrl + name,
+				Scopes:       info.Scopes,
 			}
 			}
 
 
 			SocialMap["grafananet"] = &SocialGrafanaNet{
 			SocialMap["grafananet"] = &SocialGrafanaNet{

+ 1 - 1
pkg/tsdb/executor.go

@@ -3,7 +3,7 @@ package tsdb
 import "context"
 import "context"
 
 
 type Executor interface {
 type Executor interface {
-	Execute(ctx context.Context, queries QuerySlice, context *QueryContext) *BatchResult
+	Execute(ctx context.Context, queries QuerySlice, query *QueryContext) *BatchResult
 }
 }
 
 
 var registry map[string]GetExecutorFn
 var registry map[string]GetExecutorFn

+ 5 - 1
pkg/tsdb/graphite/graphite.go

@@ -57,7 +57,11 @@ func (e *GraphiteExecutor) Execute(ctx context.Context, queries tsdb.QuerySlice,
 	}
 	}
 
 
 	for _, query := range queries {
 	for _, query := range queries {
-		formData["target"] = []string{query.Model.Get("target").MustString()}
+		if fullTarget, err := query.Model.Get("targetFull").String(); err == nil {
+			formData["target"] = []string{fullTarget}
+		} else {
+			formData["target"] = []string{query.Model.Get("target").MustString()}
+		}
 	}
 	}
 
 
 	if setting.Env == setting.DEV {
 	if setting.Env == setting.DEV {

+ 133 - 0
pkg/tsdb/influxdb/influxdb.go

@@ -0,0 +1,133 @@
+package influxdb
+
+import (
+	"context"
+	"crypto/tls"
+	"encoding/json"
+	"fmt"
+	"net/http"
+	"net/url"
+	"path"
+	"time"
+
+	"golang.org/x/net/context/ctxhttp"
+
+	"github.com/grafana/grafana/pkg/log"
+	"github.com/grafana/grafana/pkg/tsdb"
+)
+
+type InfluxDBExecutor struct {
+	*tsdb.DataSourceInfo
+	QueryParser    *InfluxdbQueryParser
+	QueryBuilder   *QueryBuilder
+	ResponseParser *ResponseParser
+}
+
+func NewInfluxDBExecutor(dsInfo *tsdb.DataSourceInfo) tsdb.Executor {
+	return &InfluxDBExecutor{
+		DataSourceInfo: dsInfo,
+		QueryParser:    &InfluxdbQueryParser{},
+		QueryBuilder:   &QueryBuilder{},
+		ResponseParser: &ResponseParser{},
+	}
+}
+
+var (
+	glog       log.Logger
+	HttpClient *http.Client
+)
+
+func init() {
+	glog = log.New("tsdb.influxdb")
+	tsdb.RegisterExecutor("influxdb", NewInfluxDBExecutor)
+
+	tr := &http.Transport{
+		TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
+	}
+
+	HttpClient = &http.Client{
+		Timeout:   time.Duration(15 * time.Second),
+		Transport: tr,
+	}
+}
+
+func (e *InfluxDBExecutor) Execute(ctx context.Context, queries tsdb.QuerySlice, context *tsdb.QueryContext) *tsdb.BatchResult {
+	result := &tsdb.BatchResult{}
+
+	query, err := e.getQuery(queries, context)
+	if err != nil {
+		return result.WithError(err)
+	}
+
+	glog.Debug("Influxdb query", "raw query", query)
+
+	req, err := e.createRequest(query)
+	if err != nil {
+		return result.WithError(err)
+	}
+
+	resp, err := ctxhttp.Do(ctx, HttpClient, req)
+	if err != nil {
+		return result.WithError(err)
+	}
+
+	if resp.StatusCode/100 != 2 {
+		return result.WithError(fmt.Errorf("Influxdb returned statuscode invalid status code: %v", resp.Status))
+	}
+
+	var response Response
+	dec := json.NewDecoder(resp.Body)
+	dec.UseNumber()
+	err = dec.Decode(&response)
+	if err != nil {
+		return result.WithError(err)
+	}
+
+	result.QueryResults = make(map[string]*tsdb.QueryResult)
+	result.QueryResults["A"] = e.ResponseParser.Parse(&response)
+
+	return result
+}
+
+func (e *InfluxDBExecutor) getQuery(queries tsdb.QuerySlice, context *tsdb.QueryContext) (string, error) {
+	for _, v := range queries {
+
+		query, err := e.QueryParser.Parse(v.Model, e.DataSourceInfo)
+		if err != nil {
+			return "", err
+		}
+
+		rawQuery, err := e.QueryBuilder.Build(query, context)
+		if err != nil {
+			return "", err
+		}
+
+		return rawQuery, nil
+	}
+
+	return "", fmt.Errorf("query request contains no queries")
+}
+
+func (e *InfluxDBExecutor) createRequest(query string) (*http.Request, error) {
+	u, _ := url.Parse(e.Url)
+	u.Path = path.Join(u.Path, "query")
+
+	req, err := http.NewRequest(http.MethodGet, u.String(), nil)
+	if err != nil {
+		return nil, err
+	}
+
+	params := req.URL.Query()
+	params.Set("q", query)
+	params.Set("db", e.Database)
+	params.Set("epoch", "s")
+	req.URL.RawQuery = params.Encode()
+
+	req.Header.Set("User-Agent", "Grafana")
+	if e.BasicAuth {
+		req.SetBasicAuth(e.BasicAuthUser, e.BasicAuthPassword)
+	}
+
+	glog.Debug("Influxdb request", "url", req.URL.String())
+	return req, nil
+}

+ 162 - 0
pkg/tsdb/influxdb/model_parser.go

@@ -0,0 +1,162 @@
+package influxdb
+
+import (
+	"strconv"
+
+	"github.com/grafana/grafana/pkg/components/simplejson"
+	"github.com/grafana/grafana/pkg/tsdb"
+)
+
+type InfluxdbQueryParser struct{}
+
+func (qp *InfluxdbQueryParser) Parse(model *simplejson.Json, dsInfo *tsdb.DataSourceInfo) (*Query, error) {
+	policy := model.Get("policy").MustString("default")
+	rawQuery := model.Get("query").MustString("")
+	interval := model.Get("interval").MustString("")
+
+	measurement := model.Get("measurement").MustString("")
+
+	resultFormat, err := model.Get("resultFormat").String()
+	if err != nil {
+		return nil, err
+	}
+
+	tags, err := qp.parseTags(model)
+	if err != nil {
+		return nil, err
+	}
+
+	groupBys, err := qp.parseGroupBy(model)
+	if err != nil {
+		return nil, err
+	}
+
+	selects, err := qp.parseSelects(model)
+	if err != nil {
+		return nil, err
+	}
+
+	if interval == "" {
+		dsInterval := dsInfo.JsonData.Get("timeInterval").MustString("")
+		if dsInterval != "" {
+			interval = dsInterval
+		}
+	}
+
+	return &Query{
+		Measurement:  measurement,
+		Policy:       policy,
+		ResultFormat: resultFormat,
+		GroupBy:      groupBys,
+		Tags:         tags,
+		Selects:      selects,
+		RawQuery:     rawQuery,
+		Interval:     interval,
+	}, nil
+}
+
+func (qp *InfluxdbQueryParser) parseSelects(model *simplejson.Json) ([]*Select, error) {
+	var result []*Select
+
+	for _, selectObj := range model.Get("select").MustArray() {
+		selectJson := simplejson.NewFromAny(selectObj)
+		var parts Select
+
+		for _, partObj := range selectJson.MustArray() {
+			part := simplejson.NewFromAny(partObj)
+			queryPart, err := qp.parseQueryPart(part)
+			if err != nil {
+				return nil, err
+			}
+
+			parts = append(parts, *queryPart)
+		}
+
+		result = append(result, &parts)
+	}
+
+	return result, nil
+}
+
+func (*InfluxdbQueryParser) parseTags(model *simplejson.Json) ([]*Tag, error) {
+	var result []*Tag
+	for _, t := range model.Get("tags").MustArray() {
+		tagJson := simplejson.NewFromAny(t)
+		tag := &Tag{}
+		var err error
+
+		tag.Key, err = tagJson.Get("key").String()
+		if err != nil {
+			return nil, err
+		}
+
+		tag.Value, err = tagJson.Get("value").String()
+		if err != nil {
+			return nil, err
+		}
+
+		operator, err := tagJson.Get("operator").String()
+		if err == nil {
+			tag.Operator = operator
+		}
+
+		condition, err := tagJson.Get("condition").String()
+		if err == nil {
+			tag.Condition = condition
+		}
+
+		result = append(result, tag)
+	}
+
+	return result, nil
+}
+
+func (*InfluxdbQueryParser) parseQueryPart(model *simplejson.Json) (*QueryPart, error) {
+	typ, err := model.Get("type").String()
+	if err != nil {
+		return nil, err
+	}
+
+	var params []string
+	for _, paramObj := range model.Get("params").MustArray() {
+		param := simplejson.NewFromAny(paramObj)
+
+		stringParam, err := param.String()
+		if err == nil {
+			params = append(params, stringParam)
+			continue
+		}
+
+		intParam, err := param.Int()
+		if err == nil {
+			params = append(params, strconv.Itoa(intParam))
+			continue
+		}
+
+		return nil, err
+
+	}
+
+	qp, err := NewQueryPart(typ, params)
+	if err != nil {
+		return nil, err
+	}
+
+	return qp, nil
+}
+
+func (qp *InfluxdbQueryParser) parseGroupBy(model *simplejson.Json) ([]*QueryPart, error) {
+	var result []*QueryPart
+
+	for _, groupObj := range model.Get("groupBy").MustArray() {
+		groupJson := simplejson.NewFromAny(groupObj)
+		queryPart, err := qp.parseQueryPart(groupJson)
+
+		if err != nil {
+			return nil, err
+		}
+		result = append(result, queryPart)
+	}
+
+	return result, nil
+}

+ 178 - 0
pkg/tsdb/influxdb/model_parser_test.go

@@ -0,0 +1,178 @@
+package influxdb
+
+import (
+	"testing"
+
+	"github.com/grafana/grafana/pkg/components/simplejson"
+	"github.com/grafana/grafana/pkg/tsdb"
+	. "github.com/smartystreets/goconvey/convey"
+)
+
+func TestInfluxdbQueryParser(t *testing.T) {
+	Convey("Influxdb query parser", t, func() {
+
+		parser := &InfluxdbQueryParser{}
+		dsInfo := &tsdb.DataSourceInfo{
+			JsonData: simplejson.New(),
+		}
+
+		Convey("can parse influxdb json model", func() {
+			json := `
+        {
+        "dsType": "influxdb",
+        "groupBy": [
+          {
+            "params": [
+              "$interval"
+            ],
+            "type": "time"
+          },
+          {
+            "params": [
+              "datacenter"
+            ],
+            "type": "tag"
+          },
+          {
+            "params": [
+              "none"
+            ],
+            "type": "fill"
+          }
+        ],
+        "measurement": "logins.count",
+        "policy": "default",
+        "refId": "B",
+        "resultFormat": "time_series",
+        "select": [
+          [
+            {
+              "type": "field",
+              "params": [
+                "value"
+              ]
+            },
+            {
+              "type": "count",
+              "params": []
+            }
+          ],
+          [
+            {
+              "type": "field",
+              "params": [
+                "value"
+              ]
+            },
+            {
+              "type": "bottom",
+              "params": [
+                3
+              ]
+            }
+          ],
+          [
+            {
+              "type": "field",
+              "params": [
+                "value"
+              ]
+            },
+            {
+              "type": "mean",
+              "params": []
+            },
+            {
+              "type": "math",
+              "params": [
+                " / 100"
+              ]
+            }
+          ]
+        ],
+        "tags": [
+          {
+            "key": "datacenter",
+            "operator": "=",
+            "value": "America"
+          },
+          {
+            "condition": "OR",
+            "key": "hostname",
+            "operator": "=",
+            "value": "server1"
+          }
+        ]
+      }
+      `
+			dsInfo.JsonData.Set("timeInterval", ">20s")
+			modelJson, err := simplejson.NewJson([]byte(json))
+			So(err, ShouldBeNil)
+
+			res, err := parser.Parse(modelJson, dsInfo)
+			So(err, ShouldBeNil)
+			So(len(res.GroupBy), ShouldEqual, 3)
+			So(len(res.Selects), ShouldEqual, 3)
+			So(len(res.Tags), ShouldEqual, 2)
+			So(res.Interval, ShouldEqual, ">20s")
+		})
+
+		Convey("can part raw query json model", func() {
+			json := `
+      {
+        "dsType": "influxdb",
+        "groupBy": [
+          {
+            "params": [
+              "$interval"
+            ],
+            "type": "time"
+          },
+          {
+            "params": [
+              "null"
+            ],
+            "type": "fill"
+          }
+        ],
+        "interval": ">10s",
+        "policy": "default",
+        "query": "RawDummieQuery",
+        "rawQuery": true,
+        "refId": "A",
+        "resultFormat": "time_series",
+        "select": [
+          [
+            {
+              "params": [
+                "value"
+              ],
+              "type": "field"
+            },
+            {
+              "params": [
+
+              ],
+              "type": "mean"
+            }
+          ]
+        ],
+        "tags": [
+
+        ]
+      }
+      `
+
+			modelJson, err := simplejson.NewJson([]byte(json))
+			So(err, ShouldBeNil)
+
+			res, err := parser.Parse(modelJson, dsInfo)
+			So(err, ShouldBeNil)
+			So(res.RawQuery, ShouldEqual, "RawDummieQuery")
+			So(len(res.GroupBy), ShouldEqual, 2)
+			So(len(res.Selects), ShouldEqual, 1)
+			So(len(res.Tags), ShouldEqual, 0)
+			So(res.Interval, ShouldEqual, ">10s")
+		})
+	})
+}

+ 49 - 0
pkg/tsdb/influxdb/models.go

@@ -0,0 +1,49 @@
+package influxdb
+
+type Query struct {
+	Measurement  string
+	Policy       string
+	ResultFormat string
+	Tags         []*Tag
+	GroupBy      []*QueryPart
+	Selects      []*Select
+	RawQuery     string
+
+	Interval string
+}
+
+type Tag struct {
+	Key       string
+	Operator  string
+	Value     string
+	Condition string
+}
+
+type Select []QueryPart
+
+type InfluxDbSelect struct {
+	Type string
+}
+
+type Response struct {
+	Results []Result
+	Err     error
+}
+
+type Result struct {
+	Series   []Row
+	Messages []*Message
+	Err      error
+}
+
+type Message struct {
+	Level string `json:"level,omitempty"`
+	Text  string `json:"text,omitempty"`
+}
+
+type Row struct {
+	Name    string            `json:"name,omitempty"`
+	Tags    map[string]string `json:"tags,omitempty"`
+	Columns []string          `json:"columns,omitempty"`
+	Values  [][]interface{}   `json:"values,omitempty"`
+}

+ 116 - 0
pkg/tsdb/influxdb/query_builder.go

@@ -0,0 +1,116 @@
+package influxdb
+
+import (
+	"fmt"
+	"strings"
+
+	"github.com/grafana/grafana/pkg/tsdb"
+)
+
+type QueryBuilder struct{}
+
+func (qb *QueryBuilder) Build(query *Query, queryContext *tsdb.QueryContext) (string, error) {
+	if query.RawQuery != "" {
+		q := query.RawQuery
+
+		q = strings.Replace(q, "$timeFilter", qb.renderTimeFilter(query, queryContext), 1)
+		q = strings.Replace(q, "$interval", tsdb.CalculateInterval(queryContext.TimeRange), 1)
+
+		return q, nil
+	}
+
+	res := qb.renderSelectors(query, queryContext)
+	res += qb.renderMeasurement(query)
+	res += qb.renderWhereClause(query)
+	res += qb.renderTimeFilter(query, queryContext)
+	res += qb.renderGroupBy(query, queryContext)
+
+	return res, nil
+}
+
+func (qb *QueryBuilder) renderTags(query *Query) []string {
+	var res []string
+	for i, tag := range query.Tags {
+		str := ""
+
+		if i > 0 {
+			if tag.Condition == "" {
+				str += "AND"
+			} else {
+				str += tag.Condition
+			}
+			str += " "
+		}
+
+		res = append(res, fmt.Sprintf(`%s"%s" %s '%s'`, str, tag.Key, tag.Operator, tag.Value))
+	}
+
+	return res
+}
+
+func (qb *QueryBuilder) renderTimeFilter(query *Query, queryContext *tsdb.QueryContext) string {
+	from := "now() - " + queryContext.TimeRange.From
+	to := ""
+
+	if queryContext.TimeRange.To != "now" && queryContext.TimeRange.To != "" {
+		to = " and time < now() - " + strings.Replace(queryContext.TimeRange.To, "now-", "", 1)
+	}
+
+	return fmt.Sprintf("time > %s%s", from, to)
+}
+
+func (qb *QueryBuilder) renderSelectors(query *Query, queryContext *tsdb.QueryContext) string {
+	res := "SELECT "
+
+	var selectors []string
+	for _, sel := range query.Selects {
+
+		stk := ""
+		for _, s := range *sel {
+			stk = s.Render(query, queryContext, stk)
+		}
+		selectors = append(selectors, stk)
+	}
+
+	return res + strings.Join(selectors, ", ")
+}
+
+func (qb *QueryBuilder) renderMeasurement(query *Query) string {
+	policy := ""
+	if query.Policy == "" || query.Policy == "default" {
+		policy = ""
+	} else {
+		policy = `"` + query.Policy + `".`
+	}
+	return fmt.Sprintf(` FROM %s"%s"`, policy, query.Measurement)
+}
+
+func (qb *QueryBuilder) renderWhereClause(query *Query) string {
+	res := " WHERE "
+	conditions := qb.renderTags(query)
+	res += strings.Join(conditions, " ")
+	if len(conditions) > 0 {
+		res += " AND "
+	}
+
+	return res
+}
+
+func (qb *QueryBuilder) renderGroupBy(query *Query, queryContext *tsdb.QueryContext) string {
+	groupBy := ""
+	for i, group := range query.GroupBy {
+		if i == 0 {
+			groupBy += " GROUP BY"
+		}
+
+		if i > 0 && group.Type != "fill" {
+			groupBy += ", " //fill is so very special. fill is a creep, fill is a weirdo
+		} else {
+			groupBy += " "
+		}
+
+		groupBy += group.Render(query, queryContext, "")
+	}
+
+	return groupBy
+}

+ 87 - 0
pkg/tsdb/influxdb/query_builder_test.go

@@ -0,0 +1,87 @@
+package influxdb
+
+import (
+	"testing"
+
+	"github.com/grafana/grafana/pkg/tsdb"
+	. "github.com/smartystreets/goconvey/convey"
+)
+
+func TestInfluxdbQueryBuilder(t *testing.T) {
+
+	Convey("Influxdb query builder", t, func() {
+		builder := QueryBuilder{}
+
+		qp1, _ := NewQueryPart("field", []string{"value"})
+		qp2, _ := NewQueryPart("mean", []string{})
+
+		groupBy1, _ := NewQueryPart("time", []string{"$interval"})
+		groupBy2, _ := NewQueryPart("tag", []string{"datacenter"})
+		groupBy3, _ := NewQueryPart("fill", []string{"null"})
+
+		tag1 := &Tag{Key: "hostname", Value: "server1", Operator: "="}
+		tag2 := &Tag{Key: "hostname", Value: "server2", Operator: "=", Condition: "OR"}
+
+		queryContext := &tsdb.QueryContext{
+			TimeRange: tsdb.NewTimeRange("5m", "now"),
+		}
+
+		Convey("can build simple query", func() {
+			query := &Query{
+				Selects:     []*Select{{*qp1, *qp2}},
+				Measurement: "cpu",
+				Policy:      "policy",
+				GroupBy:     []*QueryPart{groupBy1, groupBy3},
+				Interval:    "10s",
+			}
+
+			rawQuery, err := builder.Build(query, queryContext)
+			So(err, ShouldBeNil)
+			So(rawQuery, ShouldEqual, `SELECT mean("value") FROM "policy"."cpu" WHERE time > now() - 5m GROUP BY time(10s) fill(null)`)
+		})
+
+		Convey("can build query with group bys", func() {
+			query := &Query{
+				Selects:     []*Select{{*qp1, *qp2}},
+				Measurement: "cpu",
+				GroupBy:     []*QueryPart{groupBy1, groupBy2, groupBy3},
+				Tags:        []*Tag{tag1, tag2},
+				Interval:    "5s",
+			}
+
+			rawQuery, err := builder.Build(query, queryContext)
+			So(err, ShouldBeNil)
+			So(rawQuery, ShouldEqual, `SELECT mean("value") FROM "cpu" WHERE "hostname" = 'server1' OR "hostname" = 'server2' AND time > now() - 5m GROUP BY time(5s), "datacenter" fill(null)`)
+		})
+
+		Convey("can render time range", func() {
+			query := Query{}
+			builder := &QueryBuilder{}
+			Convey("render from: 2h to now-1h", func() {
+				query := Query{}
+				queryContext := &tsdb.QueryContext{TimeRange: tsdb.NewTimeRange("2h", "now-1h")}
+				So(builder.renderTimeFilter(&query, queryContext), ShouldEqual, "time > now() - 2h and time < now() - 1h")
+			})
+
+			Convey("render from: 10m", func() {
+				queryContext := &tsdb.QueryContext{TimeRange: tsdb.NewTimeRange("10m", "now")}
+				So(builder.renderTimeFilter(&query, queryContext), ShouldEqual, "time > now() - 10m")
+			})
+		})
+
+		Convey("can build query from raw query", func() {
+			query := &Query{
+				Selects:     []*Select{{*qp1, *qp2}},
+				Measurement: "cpu",
+				Policy:      "policy",
+				GroupBy:     []*QueryPart{groupBy1, groupBy3},
+				Interval:    "10s",
+				RawQuery:    "Raw query",
+			}
+
+			rawQuery, err := builder.Build(query, queryContext)
+			So(err, ShouldBeNil)
+			So(rawQuery, ShouldEqual, `Raw query`)
+		})
+	})
+}

+ 166 - 0
pkg/tsdb/influxdb/query_part.go

@@ -0,0 +1,166 @@
+package influxdb
+
+import (
+	"fmt"
+	"strings"
+	"time"
+
+	"github.com/grafana/grafana/pkg/tsdb"
+)
+
+var renders map[string]QueryDefinition
+
+type DefinitionParameters struct {
+	Name string
+	Type string
+}
+
+type QueryDefinition struct {
+	Renderer func(query *Query, queryContext *tsdb.QueryContext, part *QueryPart, innerExpr string) string
+	Params   []DefinitionParameters
+}
+
+func init() {
+	renders = make(map[string]QueryDefinition)
+
+	renders["field"] = QueryDefinition{Renderer: fieldRenderer}
+
+	renders["spread"] = QueryDefinition{Renderer: functionRenderer}
+	renders["count"] = QueryDefinition{Renderer: functionRenderer}
+	renders["distinct"] = QueryDefinition{Renderer: functionRenderer}
+	renders["integral"] = QueryDefinition{Renderer: functionRenderer}
+	renders["mean"] = QueryDefinition{Renderer: functionRenderer}
+	renders["median"] = QueryDefinition{Renderer: functionRenderer}
+	renders["sum"] = QueryDefinition{Renderer: functionRenderer}
+
+	renders["derivative"] = QueryDefinition{
+		Renderer: functionRenderer,
+		Params:   []DefinitionParameters{{Name: "duration", Type: "interval"}},
+	}
+
+	renders["non_negative_derivative"] = QueryDefinition{
+		Renderer: functionRenderer,
+		Params:   []DefinitionParameters{{Name: "duration", Type: "interval"}},
+	}
+	renders["difference"] = QueryDefinition{Renderer: functionRenderer}
+	renders["moving_average"] = QueryDefinition{
+		Renderer: functionRenderer,
+		Params:   []DefinitionParameters{{Name: "window", Type: "number"}},
+	}
+	renders["stddev"] = QueryDefinition{Renderer: functionRenderer}
+	renders["time"] = QueryDefinition{
+		Renderer: functionRenderer,
+		Params:   []DefinitionParameters{{Name: "interval", Type: "time"}},
+	}
+	renders["fill"] = QueryDefinition{
+		Renderer: functionRenderer,
+		Params:   []DefinitionParameters{{Name: "fill", Type: "string"}},
+	}
+	renders["elapsed"] = QueryDefinition{
+		Renderer: functionRenderer,
+		Params:   []DefinitionParameters{{Name: "duration", Type: "interval"}},
+	}
+	renders["bottom"] = QueryDefinition{
+		Renderer: functionRenderer,
+		Params:   []DefinitionParameters{{Name: "count", Type: "int"}},
+	}
+
+	renders["first"] = QueryDefinition{Renderer: functionRenderer}
+	renders["last"] = QueryDefinition{Renderer: functionRenderer}
+	renders["max"] = QueryDefinition{Renderer: functionRenderer}
+	renders["min"] = QueryDefinition{Renderer: functionRenderer}
+	renders["percentile"] = QueryDefinition{
+		Renderer: functionRenderer,
+		Params:   []DefinitionParameters{{Name: "nth", Type: "int"}},
+	}
+	renders["top"] = QueryDefinition{
+		Renderer: functionRenderer,
+		Params:   []DefinitionParameters{{Name: "count", Type: "int"}},
+	}
+	renders["tag"] = QueryDefinition{
+		Renderer: fieldRenderer,
+		Params:   []DefinitionParameters{{Name: "tag", Type: "string"}},
+	}
+
+	renders["math"] = QueryDefinition{Renderer: suffixRenderer}
+	renders["alias"] = QueryDefinition{Renderer: aliasRenderer}
+}
+
+func fieldRenderer(query *Query, queryContext *tsdb.QueryContext, part *QueryPart, innerExpr string) string {
+	if part.Params[0] == "*" {
+		return "*"
+	}
+	return fmt.Sprintf(`"%s"`, part.Params[0])
+}
+
+func getDefinedInterval(query *Query, queryContext *tsdb.QueryContext) string {
+	setInterval := strings.Replace(strings.Replace(query.Interval, "<", "", 1), ">", "", 1)
+	defaultInterval := tsdb.CalculateInterval(queryContext.TimeRange)
+
+	if strings.Contains(query.Interval, ">") {
+		parsedDefaultInterval, err := time.ParseDuration(defaultInterval)
+		parsedSetInterval, err2 := time.ParseDuration(setInterval)
+
+		if err == nil && err2 == nil && parsedDefaultInterval > parsedSetInterval {
+			return defaultInterval
+		}
+	}
+
+	return setInterval
+}
+
+func functionRenderer(query *Query, queryContext *tsdb.QueryContext, part *QueryPart, innerExpr string) string {
+	for i, param := range part.Params {
+		if param == "$interval" {
+			if query.Interval != "" {
+				part.Params[i] = getDefinedInterval(query, queryContext)
+			} else {
+				part.Params[i] = tsdb.CalculateInterval(queryContext.TimeRange)
+			}
+		}
+	}
+
+	if innerExpr != "" {
+		part.Params = append([]string{innerExpr}, part.Params...)
+	}
+
+	params := strings.Join(part.Params, ", ")
+
+	return fmt.Sprintf("%s(%s)", part.Type, params)
+}
+
+func suffixRenderer(query *Query, queryContext *tsdb.QueryContext, part *QueryPart, innerExpr string) string {
+	return fmt.Sprintf("%s %s", innerExpr, part.Params[0])
+}
+
+func aliasRenderer(query *Query, queryContext *tsdb.QueryContext, part *QueryPart, innerExpr string) string {
+	return fmt.Sprintf(`%s AS "%s"`, innerExpr, part.Params[0])
+}
+
+func (r QueryDefinition) Render(query *Query, queryContext *tsdb.QueryContext, part *QueryPart, innerExpr string) string {
+	return r.Renderer(query, queryContext, part, innerExpr)
+}
+
+func NewQueryPart(typ string, params []string) (*QueryPart, error) {
+	def, exist := renders[typ]
+
+	if !exist {
+		return nil, fmt.Errorf("Missing query definition for %s", typ)
+	}
+
+	return &QueryPart{
+		Type:   typ,
+		Params: params,
+		Def:    def,
+	}, nil
+}
+
+type QueryPart struct {
+	Def    QueryDefinition
+	Type   string
+	Params []string
+}
+
+func (qp *QueryPart) Render(query *Query, queryContext *tsdb.QueryContext, expr string) string {
+	return qp.Def.Renderer(query, queryContext, qp, expr)
+}

+ 93 - 0
pkg/tsdb/influxdb/query_part_test.go

@@ -0,0 +1,93 @@
+package influxdb
+
+import (
+	"testing"
+
+	"github.com/grafana/grafana/pkg/tsdb"
+	. "github.com/smartystreets/goconvey/convey"
+)
+
+func TestInfluxdbQueryPart(t *testing.T) {
+	Convey("Influxdb query parts", t, func() {
+
+		queryContext := &tsdb.QueryContext{TimeRange: tsdb.NewTimeRange("5m", "now")}
+		query := &Query{}
+
+		Convey("render field ", func() {
+			part, err := NewQueryPart("field", []string{"value"})
+			So(err, ShouldBeNil)
+
+			res := part.Render(query, queryContext, "value")
+			So(res, ShouldEqual, `"value"`)
+		})
+
+		Convey("render nested part", func() {
+			part, err := NewQueryPart("derivative", []string{"10s"})
+			So(err, ShouldBeNil)
+
+			res := part.Render(query, queryContext, "mean(value)")
+			So(res, ShouldEqual, "derivative(mean(value), 10s)")
+		})
+
+		Convey("render bottom", func() {
+			part, err := NewQueryPart("bottom", []string{"3"})
+			So(err, ShouldBeNil)
+
+			res := part.Render(query, queryContext, "value")
+			So(res, ShouldEqual, "bottom(value, 3)")
+		})
+
+		Convey("render time", func() {
+			part, err := NewQueryPart("time", []string{"$interval"})
+			So(err, ShouldBeNil)
+
+			res := part.Render(query, queryContext, "")
+			So(res, ShouldEqual, "time(200ms)")
+		})
+
+		Convey("render time interval >10s", func() {
+			part, err := NewQueryPart("time", []string{"$interval"})
+			So(err, ShouldBeNil)
+
+			query.Interval = ">10s"
+
+			res := part.Render(query, queryContext, "")
+			So(res, ShouldEqual, "time(10s)")
+		})
+
+		Convey("render time interval >1s and higher interval calculation", func() {
+			part, err := NewQueryPart("time", []string{"$interval"})
+			queryContext := &tsdb.QueryContext{TimeRange: tsdb.NewTimeRange("1y", "now")}
+			So(err, ShouldBeNil)
+
+			query.Interval = ">1s"
+
+			res := part.Render(query, queryContext, "")
+			So(res, ShouldEqual, "time(168h)")
+		})
+
+		Convey("render spread", func() {
+			part, err := NewQueryPart("spread", []string{})
+			So(err, ShouldBeNil)
+
+			res := part.Render(query, queryContext, "value")
+			So(res, ShouldEqual, `spread(value)`)
+		})
+
+		Convey("render suffix", func() {
+			part, err := NewQueryPart("math", []string{"/ 100"})
+			So(err, ShouldBeNil)
+
+			res := part.Render(query, queryContext, "mean(value)")
+			So(res, ShouldEqual, "mean(value) / 100")
+		})
+
+		Convey("render alias", func() {
+			part, err := NewQueryPart("alias", []string{"test"})
+			So(err, ShouldBeNil)
+
+			res := part.Render(query, queryContext, "mean(value)")
+			So(res, ShouldEqual, `mean(value) AS "test"`)
+		})
+	})
+}

+ 94 - 0
pkg/tsdb/influxdb/response_parser.go

@@ -0,0 +1,94 @@
+package influxdb
+
+import (
+	"encoding/json"
+	"fmt"
+	"strings"
+
+	"github.com/grafana/grafana/pkg/tsdb"
+	"gopkg.in/guregu/null.v3"
+)
+
+type ResponseParser struct{}
+
+func (rp *ResponseParser) Parse(response *Response) *tsdb.QueryResult {
+	queryRes := tsdb.NewQueryResult()
+
+	for _, result := range response.Results {
+		queryRes.Series = append(queryRes.Series, rp.transformRows(result.Series, queryRes)...)
+	}
+
+	return queryRes
+}
+
+func (rp *ResponseParser) transformRows(rows []Row, queryResult *tsdb.QueryResult) tsdb.TimeSeriesSlice {
+	var result tsdb.TimeSeriesSlice
+
+	for _, row := range rows {
+		for columnIndex, column := range row.Columns {
+			if column == "time" {
+				continue
+			}
+
+			var points tsdb.TimeSeriesPoints
+			for _, valuePair := range row.Values {
+				point, err := rp.parseTimepoint(valuePair, columnIndex)
+				if err == nil {
+					points = append(points, point)
+				}
+			}
+			result = append(result, &tsdb.TimeSeries{
+				Name:   rp.formatSerieName(row, column),
+				Points: points,
+			})
+		}
+	}
+
+	return result
+}
+
+func (rp *ResponseParser) formatSerieName(row Row, column string) string {
+	var tags []string
+
+	for k, v := range row.Tags {
+		tags = append(tags, fmt.Sprintf("%s: %s", k, v))
+	}
+
+	tagText := ""
+	if len(tags) > 0 {
+		tagText = fmt.Sprintf(" { %s }", strings.Join(tags, " "))
+	}
+
+	return fmt.Sprintf("%s.%s%s", row.Name, column, tagText)
+}
+
+func (rp *ResponseParser) parseTimepoint(valuePair []interface{}, valuePosition int) (tsdb.TimePoint, error) {
+	var value null.Float = rp.parseValue(valuePair[valuePosition])
+
+	timestampNumber, _ := valuePair[0].(json.Number)
+	timestamp, err := timestampNumber.Float64()
+	if err != nil {
+		return tsdb.TimePoint{}, err
+	}
+
+	return tsdb.NewTimePoint(value, timestamp), nil
+}
+
+func (rp *ResponseParser) parseValue(value interface{}) null.Float {
+	number, ok := value.(json.Number)
+	if !ok {
+		return null.FloatFromPtr(nil)
+	}
+
+	fvalue, err := number.Float64()
+	if err == nil {
+		return null.FloatFrom(fvalue)
+	}
+
+	ivalue, err := number.Int64()
+	if err == nil {
+		return null.FloatFrom(float64(ivalue))
+	}
+
+	return null.FloatFromPtr(nil)
+}

+ 59 - 0
pkg/tsdb/influxdb/response_parser_test.go

@@ -0,0 +1,59 @@
+package influxdb
+
+import (
+	"encoding/json"
+	"testing"
+
+	. "github.com/smartystreets/goconvey/convey"
+)
+
+func TestInfluxdbResponseParser(t *testing.T) {
+	Convey("Influxdb response parser", t, func() {
+
+		parser := &ResponseParser{}
+
+		response := &Response{
+			Results: []Result{
+				Result{
+					Series: []Row{
+						{
+							Name:    "cpu",
+							Columns: []string{"time", "mean", "sum"},
+							Tags:    map[string]string{"datacenter": "America"},
+							Values: [][]interface{}{
+								{json.Number("111"), json.Number("222"), json.Number("333")},
+								{json.Number("111"), json.Number("222"), json.Number("333")},
+								{json.Number("111"), json.Number("null"), json.Number("333")},
+							},
+						},
+					},
+				},
+			},
+		}
+
+		result := parser.Parse(response)
+
+		Convey("can parse all series", func() {
+			So(len(result.Series), ShouldEqual, 2)
+		})
+
+		Convey("can parse all points", func() {
+			So(len(result.Series[0].Points), ShouldEqual, 3)
+			So(len(result.Series[1].Points), ShouldEqual, 3)
+		})
+
+		Convey("can parse multi row result", func() {
+			So(result.Series[0].Points[1][0].Float64, ShouldEqual, float64(222))
+			So(result.Series[1].Points[1][0].Float64, ShouldEqual, float64(333))
+		})
+
+		Convey("can parse null points", func() {
+			So(result.Series[0].Points[2][0].Valid, ShouldBeFalse)
+		})
+
+		Convey("can format serie names", func() {
+			So(result.Series[0].Name, ShouldEqual, "cpu.mean { datacenter: America }")
+			So(result.Series[1].Name, ShouldEqual, "cpu.sum { datacenter: America }")
+		})
+	})
+}

+ 145 - 0
pkg/tsdb/interval.go

@@ -0,0 +1,145 @@
+package tsdb
+
+import (
+	"fmt"
+	"time"
+)
+
+var (
+	defaultRes  int64         = 1500
+	minInterval time.Duration = 1 * time.Millisecond
+	year        time.Duration = time.Hour * 24 * 365
+	day         time.Duration = time.Hour * 24 * 365
+)
+
+func CalculateInterval(timerange *TimeRange) string {
+	interval := time.Duration((timerange.MustGetTo().UnixNano() - timerange.MustGetFrom().UnixNano()) / defaultRes)
+
+	if interval < minInterval {
+		return formatDuration(minInterval)
+	}
+
+	return formatDuration(roundInterval(interval))
+}
+
+func formatDuration(inter time.Duration) string {
+	if inter >= year {
+		return fmt.Sprintf("%dy", inter/year)
+	}
+
+	if inter >= day {
+		return fmt.Sprintf("%dd", inter/day)
+	}
+
+	if inter >= time.Hour {
+		return fmt.Sprintf("%dh", inter/time.Hour)
+	}
+
+	if inter >= time.Minute {
+		return fmt.Sprintf("%dm", inter/time.Minute)
+	}
+
+	if inter >= time.Second {
+		return fmt.Sprintf("%ds", inter/time.Second)
+	}
+
+	if inter >= time.Millisecond {
+		return fmt.Sprintf("%dms", inter/time.Millisecond)
+	}
+
+	return "1ms"
+}
+
+func roundInterval(interval time.Duration) time.Duration {
+	switch true {
+	// 0.015s
+	case interval <= 15*time.Millisecond:
+		return time.Millisecond * 10 // 0.01s
+	// 0.035s
+	case interval <= 35*time.Millisecond:
+		return time.Millisecond * 20 // 0.02s
+	// 0.075s
+	case interval <= 75*time.Millisecond:
+		return time.Millisecond * 50 // 0.05s
+	// 0.15s
+	case interval <= 150*time.Millisecond:
+		return time.Millisecond * 100 // 0.1s
+	// 0.35s
+	case interval <= 350*time.Millisecond:
+		return time.Millisecond * 200 // 0.2s
+	// 0.75s
+	case interval <= 750*time.Millisecond:
+		return time.Millisecond * 500 // 0.5s
+	// 1.5s
+	case interval <= 1500*time.Millisecond:
+		return time.Millisecond * 1000 // 1s
+	// 3.5s
+	case interval <= 3500*time.Millisecond:
+		return time.Millisecond * 2000 // 2s
+	// 7.5s
+	case interval <= 7500*time.Millisecond:
+		return time.Millisecond * 5000 // 5s
+	// 12.5s
+	case interval <= 12500*time.Millisecond:
+		return time.Millisecond * 10000 // 10s
+	// 17.5s
+	case interval <= 17500*time.Millisecond:
+		return time.Millisecond * 15000 // 15s
+	// 25s
+	case interval <= 25000*time.Millisecond:
+		return time.Millisecond * 20000 // 20s
+	// 45s
+	case interval <= 45000*time.Millisecond:
+		return time.Millisecond * 30000 // 30s
+	// 1.5m
+	case interval <= 90000*time.Millisecond:
+		return time.Millisecond * 60000 // 1m
+	// 3.5m
+	case interval <= 210000*time.Millisecond:
+		return time.Millisecond * 120000 // 2m
+	// 7.5m
+	case interval <= 450000*time.Millisecond:
+		return time.Millisecond * 300000 // 5m
+	// 12.5m
+	case interval <= 750000*time.Millisecond:
+		return time.Millisecond * 600000 // 10m
+	// 12.5m
+	case interval <= 1050000*time.Millisecond:
+		return time.Millisecond * 900000 // 15m
+	// 25m
+	case interval <= 1500000*time.Millisecond:
+		return time.Millisecond * 1200000 // 20m
+	// 45m
+	case interval <= 2700000*time.Millisecond:
+		return time.Millisecond * 1800000 // 30m
+	// 1.5h
+	case interval <= 5400000*time.Millisecond:
+		return time.Millisecond * 3600000 // 1h
+	// 2.5h
+	case interval <= 9000000*time.Millisecond:
+		return time.Millisecond * 7200000 // 2h
+	// 4.5h
+	case interval <= 16200000*time.Millisecond:
+		return time.Millisecond * 10800000 // 3h
+	// 9h
+	case interval <= 32400000*time.Millisecond:
+		return time.Millisecond * 21600000 // 6h
+	// 24h
+	case interval <= 86400000*time.Millisecond:
+		return time.Millisecond * 43200000 // 12h
+	// 48h
+	case interval <= 172800000*time.Millisecond:
+		return time.Millisecond * 86400000 // 24h
+	// 1w
+	case interval <= 604800000*time.Millisecond:
+		return time.Millisecond * 86400000 // 24h
+	// 3w
+	case interval <= 1814400000*time.Millisecond:
+		return time.Millisecond * 604800000 // 1w
+	// 2y
+	case interval < 3628800000*time.Millisecond:
+		return time.Millisecond * 2592000000 // 30d
+	default:
+		return time.Millisecond * 31536000000 // 1y
+	}
+}

+ 57 - 0
pkg/tsdb/interval_test.go

@@ -0,0 +1,57 @@
+package tsdb
+
+import (
+	"testing"
+	"time"
+
+	"github.com/grafana/grafana/pkg/setting"
+	. "github.com/smartystreets/goconvey/convey"
+)
+
+func TestInterval(t *testing.T) {
+	Convey("Default interval ", t, func() {
+		setting.NewConfigContext(&setting.CommandLineArgs{
+			HomePath: "../../",
+		})
+
+		Convey("for 5min", func() {
+			tr := NewTimeRange("5m", "now")
+
+			interval := CalculateInterval(tr)
+			So(interval, ShouldEqual, "200ms")
+		})
+
+		Convey("for 15min", func() {
+			tr := NewTimeRange("15m", "now")
+
+			interval := CalculateInterval(tr)
+			So(interval, ShouldEqual, "500ms")
+		})
+
+		Convey("for 30min", func() {
+			tr := NewTimeRange("30m", "now")
+
+			interval := CalculateInterval(tr)
+			So(interval, ShouldEqual, "1s")
+		})
+
+		Convey("for 1h", func() {
+			tr := NewTimeRange("1h", "now")
+
+			interval := CalculateInterval(tr)
+			So(interval, ShouldEqual, "2s")
+		})
+
+		Convey("Round interval", func() {
+			So(roundInterval(time.Millisecond*30), ShouldEqual, time.Millisecond*20)
+			So(roundInterval(time.Millisecond*45), ShouldEqual, time.Millisecond*50)
+		})
+
+		Convey("Format value", func() {
+			So(formatDuration(time.Second*61), ShouldEqual, "1m")
+			So(formatDuration(time.Millisecond*30), ShouldEqual, "30ms")
+			So(formatDuration(time.Hour*23), ShouldEqual, "23h")
+			So(formatDuration(time.Hour*24*367), ShouldEqual, "1y")
+		})
+	})
+}

+ 9 - 3
pkg/tsdb/models.go

@@ -39,6 +39,7 @@ type DataSourceInfo struct {
 	BasicAuth         bool
 	BasicAuth         bool
 	BasicAuthUser     string
 	BasicAuthUser     string
 	BasicAuthPassword string
 	BasicAuthPassword string
+	JsonData          *simplejson.Json
 }
 }
 
 
 type BatchTiming struct {
 type BatchTiming struct {
@@ -51,6 +52,11 @@ type BatchResult struct {
 	Timings      *BatchTiming
 	Timings      *BatchTiming
 }
 }
 
 
+func (br *BatchResult) WithError(err error) *BatchResult {
+	br.Error = err
+	return br
+}
+
 type QueryResult struct {
 type QueryResult struct {
 	Error  error           `json:"error"`
 	Error  error           `json:"error"`
 	RefId  string          `json:"refId"`
 	RefId  string          `json:"refId"`
@@ -72,15 +78,15 @@ func NewQueryResult() *QueryResult {
 	}
 	}
 }
 }
 
 
-func NewTimePoint(value float64, timestamp float64) TimePoint {
-	return TimePoint{null.FloatFrom(value), null.FloatFrom(timestamp)}
+func NewTimePoint(value null.Float, timestamp float64) TimePoint {
+	return TimePoint{value, null.FloatFrom(timestamp)}
 }
 }
 
 
 func NewTimeSeriesPointsFromArgs(values ...float64) TimeSeriesPoints {
 func NewTimeSeriesPointsFromArgs(values ...float64) TimeSeriesPoints {
 	points := make(TimeSeriesPoints, 0)
 	points := make(TimeSeriesPoints, 0)
 
 
 	for i := 0; i < len(values); i += 2 {
 	for i := 0; i < len(values); i += 2 {
-		points = append(points, NewTimePoint(values[i], values[i+1]))
+		points = append(points, NewTimePoint(null.FloatFrom(values[i]), values[i+1]))
 	}
 	}
 
 
 	return points
 	return points

+ 9 - 6
pkg/tsdb/prometheus/prometheus.go

@@ -8,6 +8,8 @@ import (
 	"strings"
 	"strings"
 	"time"
 	"time"
 
 
+	"gopkg.in/guregu/null.v3"
+
 	"github.com/grafana/grafana/pkg/log"
 	"github.com/grafana/grafana/pkg/log"
 	"github.com/grafana/grafana/pkg/tsdb"
 	"github.com/grafana/grafana/pkg/tsdb"
 	"github.com/prometheus/client_golang/api/prometheus"
 	"github.com/prometheus/client_golang/api/prometheus"
@@ -50,12 +52,12 @@ func (e *PrometheusExecutor) Execute(ctx context.Context, queries tsdb.QuerySlic
 
 
 	client, err := e.getClient()
 	client, err := e.getClient()
 	if err != nil {
 	if err != nil {
-		return resultWithError(result, err)
+		return result.WithError(err)
 	}
 	}
 
 
 	query, err := parseQuery(queries, queryContext)
 	query, err := parseQuery(queries, queryContext)
 	if err != nil {
 	if err != nil {
-		return resultWithError(result, err)
+		return result.WithError(err)
 	}
 	}
 
 
 	timeRange := prometheus.Range{
 	timeRange := prometheus.Range{
@@ -67,12 +69,12 @@ func (e *PrometheusExecutor) Execute(ctx context.Context, queries tsdb.QuerySlic
 	value, err := client.QueryRange(ctx, query.Expr, timeRange)
 	value, err := client.QueryRange(ctx, query.Expr, timeRange)
 
 
 	if err != nil {
 	if err != nil {
-		return resultWithError(result, err)
+		return result.WithError(err)
 	}
 	}
 
 
 	queryResult, err := parseResponse(value, query)
 	queryResult, err := parseResponse(value, query)
 	if err != nil {
 	if err != nil {
-		return resultWithError(result, err)
+		return result.WithError(err)
 	}
 	}
 	result.QueryResults = queryResult
 	result.QueryResults = queryResult
 	return result
 	return result
@@ -145,7 +147,7 @@ func parseResponse(value pmodel.Value, query *PrometheusQuery) (map[string]*tsdb
 		}
 		}
 
 
 		for _, k := range v.Values {
 		for _, k := range v.Values {
-			series.Points = append(series.Points, tsdb.NewTimePoint(float64(k.Value), float64(k.Timestamp.Unix()*1000)))
+			series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFrom(float64(k.Value)), float64(k.Timestamp.Unix()*1000)))
 		}
 		}
 
 
 		queryRes.Series = append(queryRes.Series, &series)
 		queryRes.Series = append(queryRes.Series, &series)
@@ -155,7 +157,8 @@ func parseResponse(value pmodel.Value, query *PrometheusQuery) (map[string]*tsdb
 	return queryResults, nil
 	return queryResults, nil
 }
 }
 
 
+/*
 func resultWithError(result *tsdb.BatchResult, err error) *tsdb.BatchResult {
 func resultWithError(result *tsdb.BatchResult, err error) *tsdb.BatchResult {
 	result.Error = err
 	result.Error = err
 	return result
 	return result
-}
+}*/

+ 2 - 0
pkg/tsdb/request.go

@@ -51,6 +51,8 @@ func HandleRequest(ctx context.Context, req *Request) (*Response, error) {
 					go batch.process(ctx, context)
 					go batch.process(ctx, context)
 				}
 				}
 			}
 			}
+		case <-ctx.Done():
+			return nil, ctx.Err()
 		}
 		}
 	}
 	}
 
 

+ 10 - 5
pkg/tsdb/testdata/scenarios.go

@@ -6,6 +6,8 @@ import (
 	"strings"
 	"strings"
 	"time"
 	"time"
 
 
+	"gopkg.in/guregu/null.v3"
+
 	"github.com/grafana/grafana/pkg/log"
 	"github.com/grafana/grafana/pkg/log"
 	"github.com/grafana/grafana/pkg/tsdb"
 	"github.com/grafana/grafana/pkg/tsdb"
 )
 )
@@ -42,7 +44,7 @@ func init() {
 			walker := rand.Float64() * 100
 			walker := rand.Float64() * 100
 
 
 			for i := int64(0); i < 10000 && timeWalkerMs < to; i++ {
 			for i := int64(0); i < 10000 && timeWalkerMs < to; i++ {
-				points = append(points, tsdb.NewTimePoint(walker, float64(timeWalkerMs)))
+				points = append(points, tsdb.NewTimePoint(null.FloatFrom(walker), float64(timeWalkerMs)))
 
 
 				walker += rand.Float64() - 0.5
 				walker += rand.Float64() - 0.5
 				timeWalkerMs += query.IntervalMs
 				timeWalkerMs += query.IntervalMs
@@ -73,7 +75,7 @@ func init() {
 			series := newSeriesForQuery(query)
 			series := newSeriesForQuery(query)
 			outsideTime := context.TimeRange.MustGetFrom().Add(-1*time.Hour).Unix() * 1000
 			outsideTime := context.TimeRange.MustGetFrom().Add(-1*time.Hour).Unix() * 1000
 
 
-			series.Points = append(series.Points, tsdb.NewTimePoint(10, float64(outsideTime)))
+			series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFrom(10), float64(outsideTime)))
 			queryRes.Series = append(queryRes.Series, series)
 			queryRes.Series = append(queryRes.Series, series)
 
 
 			return queryRes
 			return queryRes
@@ -88,10 +90,13 @@ func init() {
 			queryRes := tsdb.NewQueryResult()
 			queryRes := tsdb.NewQueryResult()
 
 
 			stringInput := query.Model.Get("stringInput").MustString()
 			stringInput := query.Model.Get("stringInput").MustString()
-			values := []float64{}
+			values := []null.Float{}
 			for _, strVal := range strings.Split(stringInput, ",") {
 			for _, strVal := range strings.Split(stringInput, ",") {
+				if strVal == "null" {
+					values = append(values, null.FloatFromPtr(nil))
+				}
 				if val, err := strconv.ParseFloat(strVal, 64); err == nil {
 				if val, err := strconv.ParseFloat(strVal, 64); err == nil {
-					values = append(values, val)
+					values = append(values, null.FloatFrom(val))
 				}
 				}
 			}
 			}
 
 
@@ -105,7 +110,7 @@ func init() {
 			step := (endTime - startTime) / int64(len(values)-1)
 			step := (endTime - startTime) / int64(len(values)-1)
 
 
 			for _, val := range values {
 			for _, val := range values {
-				series.Points = append(series.Points, tsdb.NewTimePoint(val, float64(startTime)))
+				series.Points = append(series.Points, tsdb.TimePoint{val, null.FloatFrom(float64(startTime))})
 				startTime += step
 				startTime += step
 			}
 			}
 
 

+ 3 - 5
public/app/core/components/query_part/query_part_editor.ts

@@ -128,11 +128,9 @@ export function queryPartEditorDirective($compile, templateSrv) {
       }
       }
 
 
       $scope.showActionsMenu = function() {
       $scope.showActionsMenu = function() {
-        if ($scope.partActions.length === 0) {
-          $scope.handleEvent({$event: {name: 'get-part-actions'}}).then(res => {
-            $scope.partActions = res;
-          });
-        }
+        $scope.handleEvent({$event: {name: 'get-part-actions'}}).then(res => {
+          $scope.partActions = res;
+        });
       };
       };
 
 
       $scope.triggerPartAction = function(action) {
       $scope.triggerPartAction = function(action) {

+ 1 - 0
public/app/core/controllers/login_ctrl.js

@@ -11,6 +11,7 @@ function (angular, _, coreModule, config) {
     "1000": "Required team membership not fulfilled",
     "1000": "Required team membership not fulfilled",
     "1001": "Required organization membership not fulfilled",
     "1001": "Required organization membership not fulfilled",
     "1002": "Required email domain not fulfilled",
     "1002": "Required email domain not fulfilled",
+    "1003": "Login provider denied login request",
   };
   };
 
 
   coreModule.default.controller('LoginCtrl', function($scope, backendSrv, contextSrv, $location) {
   coreModule.default.controller('LoginCtrl', function($scope, backendSrv, contextSrv, $location) {

+ 12 - 11
public/app/core/directives/metric_segment.js

@@ -43,7 +43,7 @@ function (_, $, coreModule) {
             var selected = _.find($scope.altSegments, {value: value});
             var selected = _.find($scope.altSegments, {value: value});
             if (selected) {
             if (selected) {
               segment.value = selected.value;
               segment.value = selected.value;
-              segment.html = selected.html;
+              segment.html = selected.html || selected.value;
               segment.fake = false;
               segment.fake = false;
               segment.expandable = selected.expandable;
               segment.expandable = selected.expandable;
             }
             }
@@ -186,25 +186,26 @@ function (_, $, coreModule) {
 
 
           $scope.getOptionsInternal = function() {
           $scope.getOptionsInternal = function() {
             if ($scope.options) {
             if ($scope.options) {
-              var optionSegments = _.map($scope.options, function(option) {
-                return uiSegmentSrv.newSegment({value: option.text});
-              });
-              return $q.when(optionSegments);
+              cachedOptions = $scope.options;
+              return $q.when(_.map($scope.options, function(option) {
+                return {value: option.text};
+              }));
             } else {
             } else {
               return $scope.getOptions().then(function(options) {
               return $scope.getOptions().then(function(options) {
                 cachedOptions = options;
                 cachedOptions = options;
-                return _.map(options, function(option) {
-                  return uiSegmentSrv.newSegment({value: option.text});
+                return  _.map(options, function(option) {
+                  if (option.html) {
+                    return option;
+                  }
+                  return {value: option.text};
                 });
                 });
               });
               });
             }
             }
           };
           };
 
 
           $scope.onSegmentChange = function() {
           $scope.onSegmentChange = function() {
-            var options = $scope.options || cachedOptions;
-
-            if (options) {
-              var option = _.find(options, {text: $scope.segment.value});
+            if (cachedOptions) {
+              var option = _.find(cachedOptions, {text: $scope.segment.value});
               if (option && option.value !== $scope.property) {
               if (option && option.value !== $scope.property) {
                 $scope.property = option.value;
                 $scope.property = option.value;
               } else if (attrs.custom !== 'false') {
               } else if (attrs.custom !== 'false') {

+ 14 - 1
public/app/features/alerting/alert_list_ctrl.ts

@@ -23,7 +23,7 @@ export class AlertListCtrl {
   };
   };
 
 
   /** @ngInject */
   /** @ngInject */
-  constructor(private backendSrv, private $location) {
+  constructor(private backendSrv, private $location, private $scope) {
     var params = $location.search();
     var params = $location.search();
     this.filters.state = params.state || null;
     this.filters.state = params.state || null;
     this.loadAlerts();
     this.loadAlerts();
@@ -43,6 +43,19 @@ export class AlertListCtrl {
     });
     });
   }
   }
 
 
+  pauseAlertRule(alertId: any) {
+    var alert = _.find(this.alerts, {id: alertId});
+
+    var payload = {
+      paused: alert.state !== "paused"
+    };
+
+    this.backendSrv.post(`/api/alerts/${alert.id}/pause`, payload).then(result => {
+      alert.state = result.state;
+      alert.stateModel = alertDef.getStateDisplayModel(result.state);
+    });
+  }
+
   openHowTo() {
   openHowTo() {
     appEvents.emit('show-modal', {
     appEvents.emit('show-modal', {
       src: 'public/app/features/alerting/partials/alert_howto.html',
       src: 'public/app/features/alerting/partials/alert_howto.html',

+ 3 - 2
public/app/features/alerting/alert_tab_ctrl.ts

@@ -156,7 +156,7 @@ export class AlertTabCtrl {
 
 
     for (let addedNotification of alert.notifications) {
     for (let addedNotification of alert.notifications) {
       var model = _.find(this.notifications, {id: addedNotification.id});
       var model = _.find(this.notifications, {id: addedNotification.id});
-      if (model) {
+      if (model && model.isDefault === false) {
         model.iconClass = this.getNotificationIcon(model.type);
         model.iconClass = this.getNotificationIcon(model.type);
         this.alertNotifications.push(model);
         this.alertNotifications.push(model);
       }
       }
@@ -231,7 +231,7 @@ export class AlertTabCtrl {
       this.datasourceSrv.get(datasourceName).then(ds => {
       this.datasourceSrv.get(datasourceName).then(ds => {
         if (!ds.meta.alerting) {
         if (!ds.meta.alerting) {
           this.error = 'The datasource does not support alerting queries';
           this.error = 'The datasource does not support alerting queries';
-        } else if (this.templateSrv.variableExists(foundTarget.target)) {
+        } else if (ds.targetContainsTemplate(foundTarget)) {
           this.error = 'Template variables are not supported in alert queries';
           this.error = 'Template variables are not supported in alert queries';
         } else {
         } else {
           this.error = '';
           this.error = '';
@@ -315,6 +315,7 @@ export class AlertTabCtrl {
         this.alert = null;
         this.alert = null;
         this.panel.thresholds = [];
         this.panel.thresholds = [];
         this.conditionModels = [];
         this.conditionModels = [];
+        this.panelCtrl.alertState = null;
         this.panelCtrl.render();
         this.panelCtrl.render();
       }
       }
     });
     });

+ 4 - 1
public/app/features/alerting/partials/alert_list.html

@@ -29,7 +29,10 @@
         <div class="card-item card-item--alert">
         <div class="card-item card-item--alert">
           <div class="card-item-header">
           <div class="card-item-header">
             <div class="card-item-type">
             <div class="card-item-type">
-              <a class="card-item-cog" href="dashboard/{{alert.dashboardUri}}?panelId={{alert.panelId}}&fullscreen&edit&tab=alert" bs-tooltip="'Edit alert rule'">
+              <a class="card-item-cog" bs-tooltip="'Pausing an alert rule prevents it from executing'" ng-click="ctrl.pauseAlertRule(alert.id)">
+								<i class="fa fa-pause"></i>
+							</a>
+							<a class="card-item-cog" href="dashboard/{{alert.dashboardUri}}?panelId={{alert.panelId}}&fullscreen&edit&tab=alert" bs-tooltip="'Edit alert rule'">
                 <i class="icon-gf icon-gf-settings"></i>
                 <i class="icon-gf icon-gf-settings"></i>
               </a>
               </a>
             </div>
             </div>

+ 2 - 2
public/app/features/alerting/partials/alert_tab.html

@@ -52,9 +52,9 @@
 					</div>
 					</div>
 					<div class="gf-form">
 					<div class="gf-form">
 						<metric-segment-model property="conditionModel.evaluator.type" options="ctrl.evalFunctions" custom="false" css-class="query-keyword" on-change="ctrl.evaluatorTypeChanged(conditionModel.evaluator)"></metric-segment-model>
 						<metric-segment-model property="conditionModel.evaluator.type" options="ctrl.evalFunctions" custom="false" css-class="query-keyword" on-change="ctrl.evaluatorTypeChanged(conditionModel.evaluator)"></metric-segment-model>
-						<input class="gf-form-input max-width-7" type="number" ng-hide="conditionModel.evaluator.params.length === 0" ng-model="conditionModel.evaluator.params[0]" ng-change="ctrl.evaluatorParamsChanged()"></input>
+						<input class="gf-form-input max-width-7" type="number" step="any" ng-hide="conditionModel.evaluator.params.length === 0" ng-model="conditionModel.evaluator.params[0]" ng-change="ctrl.evaluatorParamsChanged()"></input>
             <label class="gf-form-label query-keyword" ng-show="conditionModel.evaluator.params.length === 2">TO</label>
             <label class="gf-form-label query-keyword" ng-show="conditionModel.evaluator.params.length === 2">TO</label>
-            <input class="gf-form-input max-width-7" type="number" ng-if="conditionModel.evaluator.params.length === 2" ng-model="conditionModel.evaluator.params[1]" ng-change="ctrl.evaluatorParamsChanged()"></input>
+            <input class="gf-form-input max-width-7" type="number" step="any" ng-if="conditionModel.evaluator.params.length === 2" ng-model="conditionModel.evaluator.params[1]" ng-change="ctrl.evaluatorParamsChanged()"></input>
 					</div>
 					</div>
 					<div class="gf-form">
 					<div class="gf-form">
 						<label class="gf-form-label">
 						<label class="gf-form-label">

+ 4 - 2
public/app/features/dashboard/dashboard_ctrl.ts

@@ -51,7 +51,9 @@ export class DashboardCtrl {
         .catch($scope.onInitFailed.bind(this, 'Templating init failed', false))
         .catch($scope.onInitFailed.bind(this, 'Templating init failed', false))
         // continue
         // continue
         .finally(function() {
         .finally(function() {
-          dynamicDashboardSrv.init(dashboard);
+          dynamicDashboardSrv.init(dashboard, variableSrv);
+          dynamicDashboardSrv.process();
+
           unsavedChangesSrv.init(dashboard, $scope);
           unsavedChangesSrv.init(dashboard, $scope);
 
 
           $scope.dashboard = dashboard;
           $scope.dashboard = dashboard;
@@ -87,7 +89,7 @@ export class DashboardCtrl {
       };
       };
 
 
       $scope.templateVariableUpdated = function() {
       $scope.templateVariableUpdated = function() {
-        dynamicDashboardSrv.update($scope.dashboard);
+        dynamicDashboardSrv.process();
       };
       };
 
 
       $scope.updateSubmenuVisibility = function() {
       $scope.updateSubmenuVisibility = function() {

+ 9 - 5
public/app/features/dashboard/dashboard_srv.ts

@@ -514,9 +514,11 @@ export class DashboardModel {
             if (panel.grid.thresholdLine) {
             if (panel.grid.thresholdLine) {
               t1.line = true;
               t1.line = true;
               t1.lineColor = panel.grid.threshold1Color;
               t1.lineColor = panel.grid.threshold1Color;
+              t1.colorMode = 'custom';
             } else {
             } else {
               t1.fill = true;
               t1.fill = true;
               t1.fillColor = panel.grid.threshold1Color;
               t1.fillColor = panel.grid.threshold1Color;
+              t1.colorMode = 'custom';
             }
             }
           }
           }
 
 
@@ -525,25 +527,27 @@ export class DashboardModel {
             if (panel.grid.thresholdLine) {
             if (panel.grid.thresholdLine) {
               t2.line = true;
               t2.line = true;
               t2.lineColor = panel.grid.threshold2Color;
               t2.lineColor = panel.grid.threshold2Color;
+              t2.colorMode = 'custom';
             } else {
             } else {
               t2.fill = true;
               t2.fill = true;
               t2.fillColor = panel.grid.threshold2Color;
               t2.fillColor = panel.grid.threshold2Color;
+              t2.colorMode = 'custom';
             }
             }
           }
           }
 
 
           if (_.isNumber(t1.value)) {
           if (_.isNumber(t1.value)) {
             if (_.isNumber(t2.value)) {
             if (_.isNumber(t2.value)) {
               if (t1.value > t2.value) {
               if (t1.value > t2.value) {
-                t1.op = t2.op = '<';
-                panel.thresholds.push(t2);
+                t1.op = t2.op = 'lt';
                 panel.thresholds.push(t1);
                 panel.thresholds.push(t1);
-              } else {
-                t1.op = t2.op = '>';
                 panel.thresholds.push(t2);
                 panel.thresholds.push(t2);
+              } else {
+                t1.op = t2.op = 'gt';
                 panel.thresholds.push(t1);
                 panel.thresholds.push(t1);
+                panel.thresholds.push(t2);
               }
               }
             } else {
             } else {
-              t1.op = '>';
+              t1.op = 'gt';
               panel.thresholds.push(t1);
               panel.thresholds.push(t1);
             }
             }
           }
           }

+ 11 - 15
public/app/features/dashboard/dynamic_dashboard_srv.ts

@@ -9,23 +9,21 @@ import coreModule from 'app/core/core_module';
 export class DynamicDashboardSrv {
 export class DynamicDashboardSrv {
   iteration: number;
   iteration: number;
   dashboard: any;
   dashboard: any;
+  variables: any;
 
 
-  init(dashboard) {
-    if (dashboard.snapshot) { return; }
-    this.process(dashboard, {});
-  }
-
-  update(dashboard) {
-    if (dashboard.snapshot) { return; }
-    this.process(dashboard, {});
+  init(dashboard, variableSrv) {
+    this.dashboard = dashboard;
+    this.variables = variableSrv.variables;
   }
   }
 
 
-  process(dashboard, options) {
-    if (dashboard.templating.list.length === 0) { return; }
+  process(options) {
+    if (this.dashboard.snapshot || this.variables.length === 0) {
+      return;
+    }
 
 
-    this.dashboard = dashboard;
     this.iteration = (this.iteration || new Date().getTime()) + 1;
     this.iteration = (this.iteration || new Date().getTime()) + 1;
 
 
+    options = options || {};
     var cleanUpOnly = options.cleanUpOnly;
     var cleanUpOnly = options.cleanUpOnly;
     var i, j, row, panel;
     var i, j, row, panel;
 
 
@@ -105,8 +103,7 @@ export class DynamicDashboardSrv {
 
 
   // returns a new row clone or reuses a clone from previous iteration
   // returns a new row clone or reuses a clone from previous iteration
   repeatRow(row, rowIndex) {
   repeatRow(row, rowIndex) {
-    var variables = this.dashboard.templating.list;
-    var variable = _.find(variables, {name: row.repeat});
+    var variable = _.find(this.variables, {name: row.repeat});
     if (!variable) {
     if (!variable) {
       return;
       return;
     }
     }
@@ -166,8 +163,7 @@ export class DynamicDashboardSrv {
   }
   }
 
 
   repeatPanel(panel, row) {
   repeatPanel(panel, row) {
-    var variables = this.dashboard.templating.list;
-    var variable = _.find(variables, {name: panel.repeat});
+    var variable = _.find(this.variables, {name: panel.repeat});
     if (!variable) { return; }
     if (!variable) { return; }
 
 
     var selected;
     var selected;

+ 2 - 1
public/app/features/dashboard/export/exporter.ts

@@ -13,7 +13,8 @@ export class DashboardExporter {
 
 
   makeExportable(dash) {
   makeExportable(dash) {
     var dynSrv = new DynamicDashboardSrv();
     var dynSrv = new DynamicDashboardSrv();
-    dynSrv.process(dash, {cleanUpOnly: true});
+    dynSrv.init(dash, {variables: dash.templating.list});
+    dynSrv.process({cleanUpOnly: true});
 
 
     dash.id = null;
     dash.id = null;
 
 

+ 5 - 5
public/app/features/dashboard/specs/dashboard_srv_specs.ts

@@ -221,11 +221,11 @@ describe('dashboardSrv', function() {
 
 
     it('graph thresholds should be migrated', function() {
     it('graph thresholds should be migrated', function() {
       expect(graph.thresholds.length).to.be(2);
       expect(graph.thresholds.length).to.be(2);
-      expect(graph.thresholds[0].op).to.be('>');
-      expect(graph.thresholds[0].value).to.be(400);
-      expect(graph.thresholds[0].fillColor).to.be('red');
-      expect(graph.thresholds[1].value).to.be(200);
-      expect(graph.thresholds[1].fillColor).to.be('yellow');
+      expect(graph.thresholds[0].op).to.be('gt');
+      expect(graph.thresholds[0].value).to.be(200);
+      expect(graph.thresholds[0].fillColor).to.be('yellow');
+      expect(graph.thresholds[1].value).to.be(400);
+      expect(graph.thresholds[1].fillColor).to.be('red');
     });
     });
   });
   });
 
 

+ 11 - 7
public/app/features/dashboard/specs/dynamic_dashboard_srv_specs.ts

@@ -20,6 +20,8 @@ function dynamicDashScenario(desc, func)  {
 
 
       beforeEach(angularMocks.inject(function(dashboardSrv) {
       beforeEach(angularMocks.inject(function(dashboardSrv) {
         ctx.dashboardSrv = dashboardSrv;
         ctx.dashboardSrv = dashboardSrv;
+        ctx.variableSrv = {};
+
         var model = {
         var model = {
           rows: [],
           rows: [],
           templating: { list: [] }
           templating: { list: [] }
@@ -27,8 +29,10 @@ function dynamicDashScenario(desc, func)  {
 
 
         setupFunc(model);
         setupFunc(model);
         ctx.dash = ctx.dashboardSrv.create(model);
         ctx.dash = ctx.dashboardSrv.create(model);
+        ctx.variableSrv.variables = ctx.dash.templating.list;
         ctx.dynamicDashboardSrv = new DynamicDashboardSrv();
         ctx.dynamicDashboardSrv = new DynamicDashboardSrv();
-        ctx.dynamicDashboardSrv.init(ctx.dash);
+        ctx.dynamicDashboardSrv.init(ctx.dash, ctx.variableSrv);
+        ctx.dynamicDashboardSrv.process();
         ctx.rows = ctx.dash.rows;
         ctx.rows = ctx.dash.rows;
       }));
       }));
     };
     };
@@ -78,7 +82,7 @@ dynamicDashScenario('given dashboard with panel repeat', function(ctx) {
     beforeEach(function() {
     beforeEach(function() {
       repeatedPanelAfterIteration1 = ctx.rows[0].panels[1];
       repeatedPanelAfterIteration1 = ctx.rows[0].panels[1];
       ctx.rows[0].panels[0].fill = 10;
       ctx.rows[0].panels[0].fill = 10;
-      ctx.dynamicDashboardSrv.update(ctx.dash);
+      ctx.dynamicDashboardSrv.process();
     });
     });
 
 
     it('should have reused same panel instances', function() {
     it('should have reused same panel instances', function() {
@@ -102,7 +106,7 @@ dynamicDashScenario('given dashboard with panel repeat', function(ctx) {
         options: [{text: 'se1', value: 'se1', selected: true}]
         options: [{text: 'se1', value: 'se1', selected: true}]
       });
       });
       ctx.rows[0].panels[0].repeat = "server";
       ctx.rows[0].panels[0].repeat = "server";
-      ctx.dynamicDashboardSrv.update(ctx.dash);
+      ctx.dynamicDashboardSrv.process();
     });
     });
 
 
     it('should remove scopedVars value for last variable', function() {
     it('should remove scopedVars value for last variable', function() {
@@ -117,7 +121,7 @@ dynamicDashScenario('given dashboard with panel repeat', function(ctx) {
   describe('After a second iteration and selected values reduced', function() {
   describe('After a second iteration and selected values reduced', function() {
     beforeEach(function() {
     beforeEach(function() {
       ctx.dash.templating.list[0].options[1].selected = false;
       ctx.dash.templating.list[0].options[1].selected = false;
-      ctx.dynamicDashboardSrv.update(ctx.dash);
+      ctx.dynamicDashboardSrv.process();
     });
     });
 
 
     it('should clean up repeated panel', function() {
     it('should clean up repeated panel', function() {
@@ -128,7 +132,7 @@ dynamicDashScenario('given dashboard with panel repeat', function(ctx) {
   describe('After a second iteration and panel repeat is turned off', function() {
   describe('After a second iteration and panel repeat is turned off', function() {
     beforeEach(function() {
     beforeEach(function() {
       ctx.rows[0].panels[0].repeat = null;
       ctx.rows[0].panels[0].repeat = null;
-      ctx.dynamicDashboardSrv.update(ctx.dash);
+      ctx.dynamicDashboardSrv.process();
     });
     });
 
 
     it('should clean up repeated panel', function() {
     it('should clean up repeated panel', function() {
@@ -199,7 +203,7 @@ dynamicDashScenario('given dashboard with row repeat', function(ctx) {
     beforeEach(function() {
     beforeEach(function() {
       repeatedRowAfterFirstIteration = ctx.rows[1];
       repeatedRowAfterFirstIteration = ctx.rows[1];
       ctx.rows[0].height = 500;
       ctx.rows[0].height = 500;
-      ctx.dynamicDashboardSrv.update(ctx.dash);
+      ctx.dynamicDashboardSrv.process();
     });
     });
 
 
     it('should still only have 2 rows', function() {
     it('should still only have 2 rows', function() {
@@ -218,7 +222,7 @@ dynamicDashScenario('given dashboard with row repeat', function(ctx) {
   describe('After a second iteration and selected values reduced', function() {
   describe('After a second iteration and selected values reduced', function() {
     beforeEach(function() {
     beforeEach(function() {
       ctx.dash.templating.list[0].options[1].selected = false;
       ctx.dash.templating.list[0].options[1].selected = false;
-      ctx.dynamicDashboardSrv.update(ctx.dash);
+      ctx.dynamicDashboardSrv.process();
     });
     });
 
 
     it('should remove repeated second row', function() {
     it('should remove repeated second row', function() {

+ 3 - 4
public/app/features/dashboard/timeSrv.js

@@ -77,12 +77,11 @@ define([
     this.setAutoRefresh = function (interval) {
     this.setAutoRefresh = function (interval) {
       this.dashboard.refresh = interval;
       this.dashboard.refresh = interval;
       if (interval) {
       if (interval) {
-        var _i = kbn.interval_to_ms(interval);
-        var wait_ms = _i - (Date.now() % _i);
+        var interval_ms = kbn.interval_to_ms(interval);
         $timeout(function () {
         $timeout(function () {
-          self.start_scheduled_refresh(_i);
+          self.start_scheduled_refresh(interval_ms);
           self.refreshDashboard();
           self.refreshDashboard();
-        }, wait_ms);
+        }, interval_ms);
       } else {
       } else {
         this.cancel_scheduled_refresh();
         this.cancel_scheduled_refresh();
       }
       }

+ 4 - 1
public/app/features/dashboard/viewStateSrv.js

@@ -83,6 +83,9 @@ function (angular, _, $) {
     };
     };
 
 
     DashboardViewState.prototype.update = function(state) {
     DashboardViewState.prototype.update = function(state) {
+      // remember if editStateChanged
+      this.editStateChanged = state.edit !== this.state.edit;
+
       _.extend(this.state, state);
       _.extend(this.state, state);
       this.dashboard.meta.fullscreen = this.state.fullscreen;
       this.dashboard.meta.fullscreen = this.state.fullscreen;
 
 
@@ -115,7 +118,7 @@ function (angular, _, $) {
 
 
         if (this.fullscreenPanel) {
         if (this.fullscreenPanel) {
           // if already fullscreen
           // if already fullscreen
-          if (this.fullscreenPanel === panelScope) {
+          if (this.fullscreenPanel === panelScope && this.editStateChanged === false) {
             return;
             return;
           } else {
           } else {
             this.leaveFullscreen(false);
             this.leaveFullscreen(false);

+ 23 - 5
public/app/features/panel/panel_directive.ts

@@ -68,21 +68,39 @@ module.directive('grafanaPanel', function() {
 
 
       // the reason for handling these classes this way is for performance
       // the reason for handling these classes this way is for performance
       // limit the watchers on panels etc
       // limit the watchers on panels etc
+      var transparentLastState;
+      var lastHasAlertRule;
+      var lastAlertState;
+      var hasAlertRule;
 
 
       ctrl.events.on('render', () => {
       ctrl.events.on('render', () => {
-        panelContainer.toggleClass('panel-transparent', ctrl.panel.transparent === true);
-        panelContainer.toggleClass('panel-has-alert', ctrl.panel.alert !== undefined);
+        if (transparentLastState !== ctrl.panel.transparent) {
+          panelContainer.toggleClass('panel-transparent', ctrl.panel.transparent === true);
+          transparentLastState = ctrl.panel.transparent;
+        }
+
+        hasAlertRule = ctrl.panel.alert !== undefined;
+        if (lastHasAlertRule !== hasAlertRule) {
+          panelContainer.toggleClass('panel-has-alert', hasAlertRule);
 
 
-        if (panelContainer.hasClass('panel-has-alert')) {
-          panelContainer.removeClass('panel-alert-state--ok panel-alert-state--alerting');
+          lastHasAlertRule = hasAlertRule;
         }
         }
 
 
-        // set special class for ok, or alerting states
         if (ctrl.alertState) {
         if (ctrl.alertState) {
+          if (lastAlertState) {
+            panelContainer.removeClass('panel-alert-state--' + lastAlertState);
+          }
+
           if (ctrl.alertState.state === 'ok' || ctrl.alertState.state === 'alerting') {
           if (ctrl.alertState.state === 'ok' || ctrl.alertState.state === 'alerting') {
             panelContainer.addClass('panel-alert-state--' + ctrl.alertState.state);
             panelContainer.addClass('panel-alert-state--' + ctrl.alertState.state);
           }
           }
+
+          lastAlertState = ctrl.alertState.state;
+        } else if (lastAlertState) {
+          panelContainer.removeClass('panel-alert-state--' + lastAlertState);
+          lastAlertState = null;
         }
         }
+
       });
       });
 
 
       scope.$watchGroup(['ctrl.fullscreen', 'ctrl.containerHeight'], function() {
       scope.$watchGroup(['ctrl.fullscreen', 'ctrl.containerHeight'], function() {

+ 2 - 1
public/app/features/panel/panel_menu.js

@@ -88,7 +88,8 @@ function (angular, $, _, Tether) {
 
 
           $scope.$watchCollection('ctrl.panel.links', function(newValue) {
           $scope.$watchCollection('ctrl.panel.links', function(newValue) {
             var showIcon = (newValue ? newValue.length > 0 : false) && ctrl.panel.title !== '';
             var showIcon = (newValue ? newValue.length > 0 : false) && ctrl.panel.title !== '';
-            $panelLinksBtn.toggle(showIcon);
+            // cannot use toggle here, only works for attached elements
+            $panelLinksBtn.css({display: showIcon ? 'inline' : 'none'});
           });
           });
 
 
           function dismiss(time, force) {
           function dismiss(time, force) {

+ 4 - 2
public/app/features/plugins/import_list/import_list.html

@@ -14,8 +14,9 @@
 					</span>
 					</span>
 				</td>
 				</td>
 				<td>
 				<td>
-          <span ng-if="dash.imported" bs-tooltip='"Imported revision:" + dash.importedRevision'>
+          <span>
             Revision: {{dash.revision}}
             Revision: {{dash.revision}}
+            <span ng-if="dash.imported" class="small">(Imported: {{dash.importedRevision}})</span>
           <span>
           <span>
 				</td>
 				</td>
 				<td style="text-align: right">
 				<td style="text-align: right">
@@ -23,7 +24,8 @@
 						Import
 						Import
 					</button>
 					</button>
 					<button class="btn btn-secondary btn-small" ng-click="ctrl.import(dash, true)" ng-show="dash.imported">
 					<button class="btn btn-secondary btn-small" ng-click="ctrl.import(dash, true)" ng-show="dash.imported">
-						Update
+            <span ng-if="dash.revision !== dash.importedRevision">Update</span>
+            <span ng-if="dash.revision === dash.importedRevision">Re-import</span>
 					</button>
 					</button>
 					<button class="btn btn-danger btn-small" ng-click="ctrl.remove(dash)" ng-show="dash.imported">
 					<button class="btn btn-danger btn-small" ng-click="ctrl.remove(dash)" ng-show="dash.imported">
 						<i class="fa fa-trash"></i>
 						<i class="fa fa-trash"></i>

+ 3 - 3
public/app/features/plugins/partials/plugin_edit.html

@@ -30,12 +30,12 @@
   </div>
   </div>
 
 
   <div class="page-body">
   <div class="page-body">
-    <div class="tab-content page-content-with-sidebar" ng-if="ctrl.tabIndex === 0">
+    <div class="tab-content page-content-with-sidebar" ng-if="ctrl.tabs[ctrl.tabIndex] === 'Readme'">
       <div ng-bind-html="ctrl.readmeHtml" class="plugin-markdown-readme">
       <div ng-bind-html="ctrl.readmeHtml" class="plugin-markdown-readme">
       </div>
       </div>
     </div>
     </div>
 
 
-    <div class="tab-content page-content-with-sidebar" ng-if="ctrl.tabIndex === 1">
+    <div class="tab-content page-content-with-sidebar" ng-if="ctrl.tabs[ctrl.tabIndex] === 'Config'">
       <div ng-if="ctrl.model.id">
       <div ng-if="ctrl.model.id">
         <plugin-component type="app-config-ctrl"></plugin-component>
         <plugin-component type="app-config-ctrl"></plugin-component>
 
 
@@ -47,7 +47,7 @@
       </div>
       </div>
     </div>
     </div>
 
 
-    <div class="tab-content page-content-with-sidebar" ng-if="ctrl.tabIndex === 2">
+    <div class="tab-content page-content-with-sidebar" ng-if="ctrl.tabs[ctrl.tabIndex] === 'Dashboards'">
 			<dashboard-import-list plugin="ctrl.model"></dashboard-import-list>
 			<dashboard-import-list plugin="ctrl.model"></dashboard-import-list>
     </div>
     </div>
 
 

+ 5 - 5
public/app/features/plugins/plugin_edit_ctrl.ts

@@ -27,7 +27,7 @@ export class PluginEditCtrl {
     this.model = {};
     this.model = {};
     this.pluginId = $routeParams.pluginId;
     this.pluginId = $routeParams.pluginId;
     this.tabIndex = 0;
     this.tabIndex = 0;
-    this.tabs = ['Overview'];
+    this.tabs = ['Readme'];
 
 
     this.preUpdateHook = () => Promise.resolve();
     this.preUpdateHook = () => Promise.resolve();
     this.postUpdateHook = () => Promise.resolve();
     this.postUpdateHook = () => Promise.resolve();
@@ -48,13 +48,13 @@ export class PluginEditCtrl {
       });
       });
 
 
       if (this.model.type === 'app') {
       if (this.model.type === 'app') {
-        this.tabIndex = 1;
-        this.tabs.push('Config');
-
         this.hasDashboards = _.find(result.includes, {type: 'dashboard'});
         this.hasDashboards = _.find(result.includes, {type: 'dashboard'});
         if (this.hasDashboards) {
         if (this.hasDashboards) {
-          this.tabs.push('Dashboards');
+          this.tabs.unshift('Dashboards');
         }
         }
+
+        this.tabs.unshift('Config');
+        this.tabIndex = 0;
       }
       }
 
 
       return this.initReadme();
       return this.initReadme();

+ 314 - 62
public/app/plugins/app/testdata/dashboards/graph_last_1h.json

@@ -1,5 +1,5 @@
 {
 {
-  "revision": 4,
+  "revision": 5,
   "title": "TestData - Graph Panel Last 1h",
   "title": "TestData - Graph Panel Last 1h",
   "tags": [
   "tags": [
     "grafana-test"
     "grafana-test"
@@ -320,124 +320,376 @@
           ]
           ]
         },
         },
         {
         {
-          "title": "",
-          "error": false,
-          "span": 4,
+          "content": "Just verify that the tooltip time has millisecond resolution ",
           "editable": true,
           "editable": true,
-          "type": "text",
-          "isNew": true,
+          "error": false,
           "id": 6,
           "id": 6,
+          "isNew": true,
+          "links": [],
           "mode": "markdown",
           "mode": "markdown",
-          "content": "Just verify that the tooltip time has millisecond resolution ",
-          "links": []
+          "span": 4,
+          "title": "",
+          "type": "text"
         }
         }
       ],
       ],
       "title": "New row"
       "title": "New row"
     },
     },
     {
     {
-      "title": "New row",
-      "height": 336,
-      "editable": true,
       "collapse": false,
       "collapse": false,
+      "editable": true,
+      "height": 336,
       "panels": [
       "panels": [
         {
         {
-          "title": "2 yaxis and axis lables",
-          "error": false,
-          "span": 7.99561403508772,
+          "aliasColors": {},
+          "bars": false,
+          "datasource": "Grafana TestData",
           "editable": true,
           "editable": true,
-          "type": "graph",
-          "isNew": true,
+          "error": false,
+          "fill": 1,
           "id": 5,
           "id": 5,
+          "isNew": true,
+          "legend": {
+            "avg": false,
+            "current": false,
+            "max": false,
+            "min": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [
+            {
+              "alias": "B-series",
+              "yaxis": 2
+            }
+          ],
+          "span": 7.99561403508772,
+          "stack": false,
+          "steppedLine": false,
           "targets": [
           "targets": [
             {
             {
-              "target": "",
               "refId": "A",
               "refId": "A",
               "scenarioId": "csv_metric_values",
               "scenarioId": "csv_metric_values",
-              "stringInput": "1,20,90,30,5,0"
+              "stringInput": "1,20,90,30,5,0",
+              "target": ""
             },
             },
             {
             {
-              "target": "",
               "refId": "B",
               "refId": "B",
               "scenarioId": "csv_metric_values",
               "scenarioId": "csv_metric_values",
-              "stringInput": "2000,3000,4000,1000,3000,10000"
+              "stringInput": "2000,3000,4000,1000,3000,10000",
+              "target": ""
             }
             }
           ],
           ],
-          "datasource": "Grafana TestData",
-          "renderer": "flot",
+          "thresholds": [],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "2 yaxis and axis lables",
+          "tooltip": {
+            "msResolution": false,
+            "shared": true,
+            "sort": 0,
+            "value_type": "cumulative"
+          },
+          "type": "graph",
+          "xaxis": {
+            "mode": "time",
+            "name": null,
+            "show": true,
+            "values": []
+          },
           "yaxes": [
           "yaxes": [
             {
             {
+              "format": "percent",
               "label": "Perecent",
               "label": "Perecent",
-              "show": true,
               "logBase": 1,
               "logBase": 1,
-              "min": null,
               "max": null,
               "max": null,
-              "format": "percent"
+              "min": null,
+              "show": true
             },
             },
             {
             {
+              "format": "short",
               "label": "Pressure",
               "label": "Pressure",
-              "show": true,
               "logBase": 1,
               "logBase": 1,
-              "min": null,
               "max": null,
               "max": null,
-              "format": "short"
+              "min": null,
+              "show": true
+            }
+          ]
+        },
+        {
+          "content": "Verify that axis labels look ok",
+          "editable": true,
+          "error": false,
+          "id": 7,
+          "isNew": true,
+          "links": [],
+          "mode": "markdown",
+          "span": 4.00438596491228,
+          "title": "",
+          "type": "text"
+        }
+      ],
+      "title": "New row"
+    },
+    {
+      "collapse": false,
+      "editable": true,
+      "height": "250px",
+      "panels": [
+        {
+          "aliasColors": {},
+          "bars": false,
+          "datasource": "Grafana TestData",
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "id": 8,
+          "isNew": true,
+          "legend": {
+            "avg": false,
+            "current": false,
+            "max": false,
+            "min": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "span": 4,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "refId": "B",
+              "scenarioId": "csv_metric_values",
+              "stringInput": "1,20,null,null,null,null,null,null,100,10,10,20,30,40,10",
+              "target": ""
             }
             }
           ],
           ],
+          "thresholds": [],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "null value connected",
+          "tooltip": {
+            "msResolution": false,
+            "shared": true,
+            "sort": 0,
+            "value_type": "cumulative"
+          },
+          "type": "graph",
           "xaxis": {
           "xaxis": {
-            "show": true,
             "mode": "time",
             "mode": "time",
             "name": null,
             "name": null,
+            "show": true,
             "values": []
             "values": []
           },
           },
-          "lines": true,
-          "fill": 1,
-          "linewidth": 2,
-          "points": false,
-          "pointradius": 5,
+          "yaxes": [
+            {
+              "format": "short",
+              "label": null,
+              "logBase": 1,
+              "max": null,
+              "min": null,
+              "show": true
+            },
+            {
+              "format": "short",
+              "label": null,
+              "logBase": 1,
+              "max": null,
+              "min": null,
+              "show": true
+            }
+          ]
+        },
+        {
+          "aliasColors": {},
           "bars": false,
           "bars": false,
-          "stack": false,
-          "percentage": false,
+          "datasource": "Grafana TestData",
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "id": 10,
+          "isNew": true,
           "legend": {
           "legend": {
-            "show": true,
-            "values": false,
-            "min": false,
-            "max": false,
+            "avg": false,
             "current": false,
             "current": false,
+            "max": false,
+            "min": false,
+            "show": true,
             "total": false,
             "total": false,
-            "avg": false
+            "values": false
           },
           },
-          "nullPointMode": "connected",
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "null as zero",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "span": 3,
+          "stack": false,
           "steppedLine": false,
           "steppedLine": false,
+          "targets": [
+            {
+              "refId": "B",
+              "scenarioId": "csv_metric_values",
+              "stringInput": "1,20,null,null,null,null,null,null,100,10,10,20,30,40,10",
+              "target": ""
+            }
+          ],
+          "thresholds": [],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "null value null as zero",
           "tooltip": {
           "tooltip": {
-            "value_type": "cumulative",
+            "msResolution": false,
             "shared": true,
             "shared": true,
             "sort": 0,
             "sort": 0,
-            "msResolution": false
+            "value_type": "cumulative"
           },
           },
-          "timeFrom": null,
-          "timeShift": null,
+          "type": "graph",
+          "xaxis": {
+            "mode": "time",
+            "name": null,
+            "show": true,
+            "values": []
+          },
+          "yaxes": [
+            {
+              "format": "short",
+              "label": null,
+              "logBase": 1,
+              "max": null,
+              "min": null,
+              "show": true
+            },
+            {
+              "format": "short",
+              "label": null,
+              "logBase": 1,
+              "max": null,
+              "min": null,
+              "show": true
+            }
+          ]
+        },
+        {
           "aliasColors": {},
           "aliasColors": {},
+          "bars": false,
+          "datasource": "Grafana TestData",
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "id": 9,
+          "isNew": true,
+          "legend": {
+            "avg": false,
+            "current": false,
+            "max": false,
+            "min": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "null",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
           "seriesOverrides": [
           "seriesOverrides": [
             {
             {
               "alias": "B-series",
               "alias": "B-series",
-              "yaxis": 2
+              "zindex": -3
+            }
+          ],
+          "span": 5,
+          "stack": true,
+          "steppedLine": false,
+          "targets": [
+            {
+              "hide": false,
+              "refId": "B",
+              "scenarioId": "csv_metric_values",
+              "stringInput": "1,20,null,null,null,null,null,null,100,10,10,20,30,40,10",
+              "target": ""
+            },
+            {
+              "alias": "",
+              "hide": false,
+              "refId": "A",
+              "scenarioId": "csv_metric_values",
+              "stringInput": "1,20,90,30,5,10,20,30,40,40,40,100,10,20,20",
+              "target": ""
+            },
+            {
+              "alias": "",
+              "hide": false,
+              "refId": "C",
+              "scenarioId": "csv_metric_values",
+              "stringInput": "1,20,90,30,5,10,20,30,40,40,40,100,10,20,20",
+              "target": ""
             }
             }
           ],
           ],
           "thresholds": [],
           "thresholds": [],
-          "links": []
-        },
-        {
-          "title": "",
-          "error": false,
-          "span": 4.00438596491228,
-          "editable": true,
-          "type": "text",
-          "isNew": true,
-          "id": 7,
-          "mode": "markdown",
-          "content": "Verify that axis labels look ok",
-          "links": []
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Stacking value ontop of nulls",
+          "tooltip": {
+            "msResolution": false,
+            "shared": true,
+            "sort": 0,
+            "value_type": "cumulative"
+          },
+          "type": "graph",
+          "xaxis": {
+            "mode": "time",
+            "name": null,
+            "show": true,
+            "values": []
+          },
+          "yaxes": [
+            {
+              "format": "short",
+              "label": null,
+              "logBase": 1,
+              "max": null,
+              "min": null,
+              "show": true
+            },
+            {
+              "format": "short",
+              "label": null,
+              "logBase": 1,
+              "max": null,
+              "min": null,
+              "show": true
+            }
+          ]
         }
         }
-      ]
+      ],
+      "title": "New row"
     }
     }
   ],
   ],
   "time": {
   "time": {
@@ -477,7 +729,7 @@
   },
   },
   "refresh": false,
   "refresh": false,
   "schemaVersion": 13,
   "schemaVersion": 13,
-  "version": 3,
+  "version": 13,
   "links": [],
   "links": [],
   "gnetId": null
   "gnetId": null
 }
 }

+ 1 - 1
public/app/plugins/app/testdata/plugin.json

@@ -9,7 +9,7 @@
       "name": "Grafana Project",
       "name": "Grafana Project",
       "url": "http://grafana.org"
       "url": "http://grafana.org"
     },
     },
-    "version": "1.0.13",
+    "version": "1.0.14",
     "updated": "2016-09-26"
     "updated": "2016-09-26"
   },
   },
 
 

BIN
public/app/plugins/datasource/cloudwatch/img/amazon-web-services.png


+ 3 - 0
public/app/plugins/datasource/elasticsearch/elastic_response.js

@@ -171,6 +171,9 @@ function (_, queryDef) {
           } else {
           } else {
             props["filter"] = nameIndex;
             props["filter"] = nameIndex;
           }
           }
+          if (bucket.key_as_string) {
+            props[aggDef.field] = bucket.key_as_string;
+          }
           this.processBuckets(bucket, target, seriesList, docs, props, depth+1);
           this.processBuckets(bucket, target, seriesList, docs, props, depth+1);
         }
         }
       }
       }

BIN
public/app/plugins/datasource/elasticsearch/img/logo_large.png


+ 1 - 1
public/app/plugins/datasource/elasticsearch/partials/bucket_agg.html

@@ -23,7 +23,7 @@
 		<label class="gf-form-label" ng-if="isFirst">
 		<label class="gf-form-label" ng-if="isFirst">
 			<a class="pointer" ng-click="addBucketAgg()"><i class="fa fa-plus"></i></a>
 			<a class="pointer" ng-click="addBucketAgg()"><i class="fa fa-plus"></i></a>
 		</label>
 		</label>
-		<label class="gf-form-label">
+		<label class="gf-form-label" ng-if="!isFirst">
 			<a class="pointer" ng-click="removeBucketAgg()"><i class="fa fa-minus"></i></a>
 			<a class="pointer" ng-click="removeBucketAgg()"><i class="fa fa-minus"></i></a>
 		</label>
 		</label>
 	</div>
 	</div>

+ 1 - 1
public/app/plugins/datasource/elasticsearch/query_def.js

@@ -22,7 +22,7 @@ function (_) {
     bucketAggTypes: [
     bucketAggTypes: [
       {text: "Terms",           value: 'terms', requiresField: true},
       {text: "Terms",           value: 'terms', requiresField: true},
       {text: "Filters",         value: 'filters' },
       {text: "Filters",         value: 'filters' },
-      {text: "Geo Hash Grid",        value: 'geohash_grid', requiresField: true},
+      {text: "Geo Hash Grid",   value: 'geohash_grid', requiresField: true},
       {text: "Date Histogram",  value: 'date_histogram', requiresField: true},
       {text: "Date Histogram",  value: 'date_histogram', requiresField: true},
     ],
     ],
 
 

+ 4 - 0
public/app/plugins/datasource/graphite/datasource.ts

@@ -126,6 +126,10 @@ export function GraphiteDatasource(instanceSettings, $q, backendSrv, templateSrv
     }
     }
   };
   };
 
 
+  this.targetContainsTemplate = function(target) {
+    return templateSrv.variableExists(target.target);
+  };
+
   this.translateTime = function(date, roundUp) {
   this.translateTime = function(date, roundUp) {
     if (_.isString(date)) {
     if (_.isString(date)) {
       if (date === 'now') {
       if (date === 'now') {

BIN
public/app/plugins/datasource/graphite/img/graphite_logo.png


+ 26 - 3
public/app/plugins/datasource/graphite/query_ctrl.ts

@@ -209,17 +209,40 @@ export class GraphiteQueryCtrl extends QueryCtrl {
     this.panelCtrl.refresh();
     this.panelCtrl.refresh();
   }
   }
 
 
+  updateModelTarget() {
+    // render query
+    var metricPath = this.getSegmentPathUpTo(this.segments.length);
+    this.target.target = _.reduce(this.functions, this.wrapFunction, metricPath);
+
+    // render nested query
+    var targetsByRefId = _.keyBy(this.panelCtrl.panel.targets, 'refId');
+    var nestedSeriesRefRegex = /\#([A-Z])/g;
+    var targetWithNestedQueries = this.target.target.replace(nestedSeriesRefRegex, (match, g1) => {
+      var target  = targetsByRefId[g1];
+      if (!target) {
+        return match;
+      }
+
+      return target.targetFull || target.target;
+    });
+
+    delete this.target.targetFull;
+    if (this.target.target !== targetWithNestedQueries) {
+      this.target.targetFull = targetWithNestedQueries;
+    }
+  }
+
   targetChanged() {
   targetChanged() {
     if (this.error) {
     if (this.error) {
       return;
       return;
     }
     }
 
 
     var oldTarget = this.target.target;
     var oldTarget = this.target.target;
-    var target = this.getSegmentPathUpTo(this.segments.length);
-    this.target.target = _.reduce(this.functions, this.wrapFunction, target);
+    this.updateModelTarget();
 
 
     if (this.target.target !== oldTarget) {
     if (this.target.target !== oldTarget) {
-      if (this.segments[this.segments.length - 1].value !== 'select metric') {
+      var lastSegment = this.segments.length > 0 ? this.segments[this.segments.length - 1] : {};
+      if (lastSegment.value !== 'select metric') {
         this.panelCtrl.refresh();
         this.panelCtrl.refresh();
       }
       }
     }
     }

+ 18 - 0
public/app/plugins/datasource/influxdb/datasource.ts

@@ -139,6 +139,24 @@ export default class InfluxDatasource {
     });
     });
   };
   };
 
 
+  targetContainsTemplate(target) {
+    for (let group of target.groupBy) {
+      for (let param of group.params) {
+        if (this.templateSrv.variableExists(param)) {
+          return true;
+        }
+      }
+    }
+
+    for (let i in target.tags) {
+      if (this.templateSrv.variableExists(target.tags[i].value)) {
+        return true;
+      }
+    }
+
+    return false;
+  };
+
   metricFindQuery(query) {
   metricFindQuery(query) {
     var interpolated = this.templateSrv.replace(query, null, 'regex');
     var interpolated = this.templateSrv.replace(query, null, 'regex');
 
 

Some files were not shown because too many files changed in this diff