Bladeren bron

Merge branch 'master' into alerting_mqe

bergquist 9 jaren geleden
bovenliggende
commit
99e3db8208
100 gewijzigde bestanden met toevoegingen van 1825 en 539 verwijderingen
  1. 6 5
      .github/ISSUE_TEMPLATE.md
  2. 3 1
      .github/PULL_REQUEST_TEMPLATE.md
  3. 61 1
      CHANGELOG.md
  4. 2 5
      README.md
  5. 4 0
      build.go
  6. 1 1
      circle.yml
  7. 6 5
      conf/defaults.ini
  8. 6 5
      conf/sample.ini
  9. 2 0
      docker/blocks/elastic1/elasticsearch.yml
  10. 8 0
      docker/blocks/elastic1/fig
  11. 2 0
      docker/blocks/elastic5/elasticsearch.yml
  12. 8 0
      docker/blocks/elastic5/fig
  13. 23 0
      docs/sources/alerting/metrics.md
  14. 2 1
      docs/sources/alerting/notifications.md
  15. 4 1
      docs/sources/alerting/rules.md
  16. 20 0
      docs/sources/datasources/index.md
  17. 1 1
      docs/sources/datasources/influxdb.md
  18. 0 25
      docs/sources/datasources/overview.md
  19. 1 1
      docs/sources/guides/basic_concepts.md
  20. 10 5
      docs/sources/http_api/admin.md
  21. 10 5
      docs/sources/http_api/auth.md
  22. 10 5
      docs/sources/http_api/dashboard.md
  23. 11 5
      docs/sources/http_api/data_source.md
  24. 22 0
      docs/sources/http_api/index.md
  25. 13 7
      docs/sources/http_api/org.md
  26. 11 5
      docs/sources/http_api/other.md
  27. 0 22
      docs/sources/http_api/overview.md
  28. 10 5
      docs/sources/http_api/preferences.md
  29. 10 5
      docs/sources/http_api/snapshot.md
  30. 5 4
      docs/sources/installation/configuration.md
  31. 3 12
      docs/sources/installation/debian.md
  32. 4 20
      docs/sources/installation/rpm.md
  33. 1 3
      docs/sources/installation/windows.md
  34. 1 1
      docs/sources/plugins/datasources.md
  35. 1 4
      docs/sources/project/building_from_source.md
  36. 4 4
      docs/sources/reference/export_import.md
  37. 6 9
      docs/sources/reference/graph.md
  38. 10 2
      docs/sources/reference/singlestat.md
  39. 2 2
      latest.json
  40. 50 51
      package.json
  41. 2 3
      packaging/publish/publish.sh
  42. 6 1
      pkg/api/alerting.go
  43. 4 1
      pkg/api/api.go
  44. 16 1
      pkg/api/app_routes.go
  45. 78 28
      pkg/api/cloudwatch/cloudwatch.go
  46. 44 40
      pkg/api/cloudwatch/metrics.go
  47. 16 12
      pkg/api/cloudwatch/metrics_test.go
  48. 34 0
      pkg/api/dashboard.go
  49. 5 13
      pkg/api/dataproxy.go
  50. 7 4
      pkg/api/dataproxy_test.go
  51. 60 8
      pkg/api/datasources.go
  52. 6 5
      pkg/api/dtos/alerting.go
  53. 23 13
      pkg/api/dtos/models.go
  54. 7 6
      pkg/api/frontendsettings.go
  55. 1 0
      pkg/api/index.go
  56. 8 3
      pkg/api/metrics.go
  57. 3 0
      pkg/api/org.go
  58. 3 0
      pkg/api/org_users.go
  59. 34 2
      pkg/api/user.go
  60. 52 8
      pkg/cmd/grafana-cli/commands/commands.go
  61. 44 0
      pkg/cmd/grafana-cli/commands/reset_password_command.go
  62. 1 1
      pkg/cmd/grafana-server/main.go
  63. 5 4
      pkg/cmd/grafana-server/web.go
  64. 6 4
      pkg/components/imguploader/webdavuploader.go
  65. 8 2
      pkg/components/renderer/renderer.go
  66. 24 0
      pkg/components/securejsondata/securejsondata.go
  67. 3 2
      pkg/metrics/gauge.go
  68. 46 43
      pkg/metrics/metrics.go
  69. 4 0
      pkg/middleware/middleware.go
  70. 6 1
      pkg/middleware/validate_host.go
  71. 1 0
      pkg/models/dashboards.go
  72. 30 26
      pkg/models/datasource.go
  73. 95 0
      pkg/models/datasource_cache.go
  74. 157 0
      pkg/models/datasource_cache_test.go
  75. 18 0
      pkg/models/helpflags.go
  76. 4 19
      pkg/models/plugin_settings.go
  77. 8 0
      pkg/models/user.go
  78. 1 0
      pkg/plugins/models.go
  79. 17 0
      pkg/services/alerting/commands.go
  80. 4 4
      pkg/services/alerting/conditions/evaluator.go
  81. 12 7
      pkg/services/alerting/conditions/evaluator_test.go
  82. 10 16
      pkg/services/alerting/conditions/query.go
  83. 24 1
      pkg/services/alerting/conditions/reducer.go
  84. 33 4
      pkg/services/alerting/conditions/reducer_test.go
  85. 1 1
      pkg/services/alerting/eval_context.go
  86. 22 5
      pkg/services/alerting/eval_handler.go
  87. 127 5
      pkg/services/alerting/eval_handler_test.go
  88. 1 0
      pkg/services/alerting/interfaces.go
  89. 2 4
      pkg/services/alerting/notifier.go
  90. 1 0
      pkg/services/alerting/notifiers/email.go
  91. 119 0
      pkg/services/alerting/notifiers/opsgenie.go
  92. 52 0
      pkg/services/alerting/notifiers/opsgenie_test.go
  93. 1 0
      pkg/services/alerting/notifiers/pagerduty.go
  94. 1 0
      pkg/services/alerting/notifiers/slack.go
  95. 101 0
      pkg/services/alerting/notifiers/victorops.go
  96. 52 0
      pkg/services/alerting/notifiers/victorops_test.go
  97. 5 0
      pkg/services/alerting/notifiers/webhook.go
  98. 1 1
      pkg/services/alerting/reader.go
  99. 3 0
      pkg/services/alerting/scheduler.go
  100. 12 13
      pkg/services/notifications/webhook.go

+ 6 - 5
.github/ISSUE_TEMPLATE.md

@@ -1,7 +1,5 @@
-* **I'm submitting a ...**
-- [ ] Bug report
-- [ ] Feature request
-- [ ] Question / Support request: **Please do not** open a github issue. [Support Options](http://grafana.org/support/)
+Please prefix your title with [Bug] or [Feature request]
+For question please check [Support Options](http://grafana.org/support/). **Do not** open a github issue
 
 Please include this information:
 - What Grafana version are you using?
@@ -11,7 +9,10 @@ Please include this information:
 - What was the expected result?
 - What happened instead?
 
-**IMPORTANT** If it relates to metric data viz:
+**IMPORTANT** 
+If it relates to *metric data viz*:
 - An image or text representation of your metric query
 - The raw query and response for the network request (check this in chrome dev tools network tab, here you can see metric requests and other request, please include the request body and request response)
 
+If it relates to *alerting*
+- An image of the test execution data fully expanded.

+ 3 - 1
.github/PULL_REQUEST_TEMPLATE.md

@@ -1,2 +1,4 @@
 * Link the PR to an issue for new features
-* Rebase your PR if it gets out of sync with master
+* Rebase your PR if it gets out of sync with master
+
+**REMOVE THE TEXT ABOVE BEFORE CREATING THE PULL REQUST**

+ 61 - 1
CHANGELOG.md

@@ -1,13 +1,73 @@
-# 4.0-beta2 (unrelased)
+# 4.1-beta (unreleased)
+
+### Enhancements
+* **Postgres**: Add support for Certs for Postgres database [#6655](https://github.com/grafana/grafana/issues/6655)
+* **Victorops**: Add VictorOps notification integration [#6411](https://github.com/grafana/grafana/issues/6411), thx [@ichekrygin](https://github.com/ichekrygin)
+* **Opsgenie**: Add OpsGenie notification integratiion [#6687](https://github.com/grafana/grafana/issues/6687), thx [@kylemcc](https://github.com/kylemcc)
+* **Singlestat**: New aggregation on singlestat panel [#6740](https://github.com/grafana/grafana/pull/6740), thx [@dirk-leroux](https://github.com/dirk-leroux)
+* **Cloudwatch**: Make it possible to specify access and secret key on the data source config page [#6697](https://github.com/grafana/grafana/issues/6697)
+* **Table**: Added Hidden Column Style for Table Panel [#5677](https://github.com/grafana/grafana/pull/5677), thx [@bmundt](https://github.com/bmundt)
+* **Graph**: Shared crosshair option renamed to shared tooltip, shows tooltip on all graphs as you hover over one graph. [#1578](https://github.com/grafana/grafana/pull/1578), [#6274](https://github.com/grafana/grafana/pull/6274)
+* **Elasticsearch**: Added support for Missing option (bucket) for terms aggregation [#4244](https://github.com/grafana/grafana/pull/4244), thx [@shanielh](https://github.com/shanielh)
+* **Elasticsearch**: Added support for Elasticsearch 5.x [#6356](https://github.com/grafana/grafana/pull/6356), thx [@lpic10](https://github.com/lpic10)
+* **CLI**: Make it possible to reset the admin password using the grafana-cli. [#5479](https://github.com/grafana/grafana/issues/5479)
+* **Influxdb**: Support multiple tags in InfluxDB annotations. [#4550](https://github.com/grafana/grafana/pull/4550)
+
+### Bugfixes
+* **API**: HTTP API for deleting org returning incorrect message for a non-existing org [#6679](https://github.com/grafana/grafana/issues/6679)
+* **Dashboard**: Posting empty dashboard result in corrupted dashboard [#5443](https://github.com/grafana/grafana/issues/5443)
+
+# 4.0.2 (2016-12-08)
+
+### Enhancements
+* **Playlist**: Add support for kiosk mode [#6727](https://github.com/grafana/grafana/issues/6727)
+
+### Bugfixes
+* **Alerting**: Add alert message to webhook notifications [#6807](https://github.com/grafana/grafana/issues/6807)
+* **Alerting**: Fixes a bug where avg() reducer treated null as zero. [#6879](https://github.com/grafana/grafana/issues/6879)
+* **PNG Rendering**: Fix for server side rendering when using non default http addr bind and domain setting [#6813](https://github.com/grafana/grafana/issues/6813)
+* **PNG Rendering**: Fix for server side rendering when setting enforce_domain to true [#6769](https://github.com/grafana/grafana/issues/6769)
+* **Webhooks**: Add content type json to outgoing webhooks [#6822](https://github.com/grafana/grafana/issues/6822)
+* **Keyboard shortcut**: Fixed zoom out shortcut [#6837](https://github.com/grafana/grafana/issues/6837)
+* **Webdav**: Adds basic auth headers to webdav uploader [#6779](https://github.com/grafana/grafana/issues/6779)
+
+# 4.0.1 (2016-12-02)
+
+> **Notice**
+4.0.0 had serious connection pooling issue when using a data source in proxy access. This bug caused lots of resource issues
+due to too many connections/file handles on the data source backend. This problem is fixed in this release.
+
+### Bugfixes
+* **Metrics**: Fixes nil pointer dereference on my arm build [#6749](https://github.com/grafana/grafana/issues/6749)
+* **Data proxy**: Fixes a tcp pooling issue in the datasource reverse proxy [#6759](https://github.com/grafana/grafana/issues/6759)
+
+# 4.0-stable (2016-11-29)
+
+### Bugfixes
+* **Server-side rendering**: Fixed address used when rendering panel via phantomjs and using non default http_addr config [#6660](https://github.com/grafana/grafana/issues/6660)
+* **Graph panel**: Fixed graph panel tooltip sort order issue [#6648](https://github.com/grafana/grafana/issues/6648)
+* **Unsaved changes**: You now navigate to the intended page after saving in the unsaved changes dialog [#6675](https://github.com/grafana/grafana/issues/6675)
+* **TLS Client Auth**: Support for TLS client authentication for datasource proxies [#2316](https://github.com/grafana/grafana/issues/2316)
+* **Alerts out of sync**: Saving dashboards with broken alerts causes sync problem[#6576](https://github.com/grafana/grafana/issues/6576)
+* **Alerting**: Saving an alert with condition "HAS NO DATA" throws an error[#6701](https://github.com/grafana/grafana/issues/6701)
+* **Config**: Improve error message when parsing broken config file [#6731](https://github.com/grafana/grafana/issues/6731)
+* **Table**: Render empty dates as - instead of current date [#6728](https://github.com/grafana/grafana/issues/6728)
+
+# 4.0-beta2 (2016-11-21)
 
 ### Bugfixes
 * **Graph Panel**: Log base scale on right Y-axis had no effect, max value calc was not applied, [#6534](https://github.com/grafana/grafana/issues/6534)
 * **Graph Panel**: Bar width if bars was only used in series override, [#6528](https://github.com/grafana/grafana/issues/6528)
 * **UI/Browser**: Fixed issue with page/view header gradient border not showing in Safari, [#6530](https://github.com/grafana/grafana/issues/6530)
+* **Cloudwatch**: Fixed cloudwatch datasource requesting to many datapoints, [#6544](https://github.com/grafana/grafana/issues/6544)
 * **UX**: Panel Drop zone visible after duplicating panel, and when entering fullscreen/edit view, [#6598](https://github.com/grafana/grafana/issues/6598)
+* **Templating**: Newly added variable was not visible directly only after dashboard reload, [#6622](https://github.com/grafana/grafana/issues/6622)
 
 ### Enhancements
 * **Singlestat**: Support repeated template variables in prefix/postfix [#6595](https://github.com/grafana/grafana/issues/6595)
+* **Templating**: Don't persist variable options with refresh option [#6586](https://github.com/grafana/grafana/issues/6586)
+* **Alerting**: Add ability to have OR conditions (and mixing AND & OR) [#6579](https://github.com/grafana/grafana/issues/6579)
+* **InfluxDB**: Fix for Ad-Hoc Filters variable & changing dashboards [#6821](https://github.com/grafana/grafana/issues/6821)
 
 # 4.0-beta1 (2016-11-09)
 

+ 2 - 5
README.md

@@ -17,6 +17,7 @@ Graphite, Elasticsearch, OpenTSDB, Prometheus and InfluxDB.
 - [What's New in Grafana 2.1](http://docs.grafana.org/guides/whats-new-in-v2-1/)
 - [What's New in Grafana 2.5](http://docs.grafana.org/guides/whats-new-in-v2-5/)
 - [What's New in Grafana 3.0](http://docs.grafana.org/guides/whats-new-in-v3/)
+- [What's New in Grafana 4.0](http://docs.grafana.org/guides/whats-new-in-v4/)
 
 ## Features
 ### Graphite Target Editor
@@ -78,7 +79,7 @@ the latest master builds [here](http://grafana.org/builds)
 
 ### Dependencies
 
-- Go 1.7
+- Go 1.7.3
 - NodeJS v4+
 
 ### Get Code
@@ -154,10 +155,6 @@ If you have any idea for an improvement or found a bug do not hesitate to open a
 And if you have time clone this repo and submit a pull request and help me make Grafana
 the kickass metrics & devops dashboard we all dream about!
 
-Before creating a pull request be sure that "grunt test" runs without any style or unit test errors, also
-please [sign the CLA](http://docs.grafana.org/project/cla/)
-
 ## License
-
 Grafana is distributed under Apache 2.0 License.
 Work in progress Grafana 2.0 (with included Grafana backend)

+ 4 - 0
build.go

@@ -73,6 +73,10 @@ func main() {
 		case "setup":
 			setup()
 
+    case "build-cli":
+      clean()
+      build("grafana-cli", "./pkg/cmd/grafana-cli", []string{})
+
 		case "build":
 			clean()
 			for _, binary := range binaries {

+ 1 - 1
circle.yml

@@ -5,7 +5,7 @@ machine:
     GOPATH: "/home/ubuntu/.go_workspace"
     ORG_PATH: "github.com/grafana"
     REPO_PATH: "${ORG_PATH}/grafana"
-    GODIST: "go1.7.3.linux-amd64.tar.gz"
+    GODIST: "go1.7.4.linux-amd64.tar.gz"
   post:
     - mkdir -p download
     - test -e download/$GODIST || curl -o download/$GODIST https://storage.googleapis.com/golang/$GODIST

+ 6 - 5
conf/defaults.ini

@@ -67,6 +67,7 @@ type = sqlite3
 host = 127.0.0.1:3306
 name = grafana
 user = root
+# If the password contains # or ; you have to wrap it with trippel quotes. Ex """#password;"""
 password =
 # Use either URL or the previous fields to configure the database
 # Example: mysql://user:secret@host:port/database
@@ -208,7 +209,7 @@ org_role = Viewer
 #################################### Github Auth #########################
 [auth.github]
 enabled = false
-allow_sign_up = false
+allow_sign_up = true
 client_id = some_id
 client_secret = some_secret
 scopes = user:email
@@ -221,7 +222,7 @@ allowed_organizations =
 #################################### Google Auth #########################
 [auth.google]
 enabled = false
-allow_sign_up = false
+allow_sign_up = true
 client_id = some_client_id
 client_secret = some_client_secret
 scopes = https://www.googleapis.com/auth/userinfo.profile https://www.googleapis.com/auth/userinfo.email
@@ -234,7 +235,7 @@ hosted_domain =
 #################################### Grafana.net Auth ####################
 [auth.grafananet]
 enabled = false
-allow_sign_up = false
+allow_sign_up = true
 client_id = some_id
 client_secret = some_secret
 scopes = user:email
@@ -243,7 +244,7 @@ allowed_organizations =
 #################################### Generic OAuth #######################
 [auth.generic_oauth]
 enabled = false
-allow_sign_up = false
+allow_sign_up = true
 client_id = some_id
 client_secret = some_secret
 scopes = user:email
@@ -289,7 +290,7 @@ templates_pattern = emails/*.html
 [log]
 # Either "console", "file", "syslog". Default is console and  file
 # Use space to separate multiple modes, e.g. "console file"
-mode = console, file
+mode = console file
 
 # Either "debug", "info", "warn", "error", "critical", default is "info"
 level = info

+ 6 - 5
conf/sample.ini

@@ -69,6 +69,7 @@
 ;host = 127.0.0.1:3306
 ;name = grafana
 ;user = root
+# If the password contains # or ; you have to wrap it with trippel quotes. Ex """#password;"""
 ;password =
 
 # Use either URL or the previous fields to configure the database
@@ -193,7 +194,7 @@
 #################################### Github Auth ##########################
 [auth.github]
 ;enabled = false
-;allow_sign_up = false
+;allow_sign_up = true
 ;client_id = some_id
 ;client_secret = some_secret
 ;scopes = user:email,read:org
@@ -206,7 +207,7 @@
 #################################### Google Auth ##########################
 [auth.google]
 ;enabled = false
-;allow_sign_up = false
+;allow_sign_up = true
 ;client_id = some_client_id
 ;client_secret = some_client_secret
 ;scopes = https://www.googleapis.com/auth/userinfo.profile https://www.googleapis.com/auth/userinfo.email
@@ -218,7 +219,7 @@
 #################################### Generic OAuth ##########################
 [auth.generic_oauth]
 ;enabled = false
-;allow_sign_up = false
+;allow_sign_up = true
 ;client_id = some_id
 ;client_secret = some_secret
 ;scopes = user:email,read:org
@@ -231,7 +232,7 @@
 #################################### Grafana.net Auth ####################
 [auth.grafananet]
 ;enabled = false
-;allow_sign_up = false
+;allow_sign_up = true
 ;client_id = some_id
 ;client_secret = some_secret
 ;scopes = user:email
@@ -272,7 +273,7 @@
 [log]
 # Either "console", "file", "syslog". Default is console and  file
 # Use space to separate multiple modes, e.g. "console file"
-;mode = console, file
+;mode = console file
 
 # Either "trace", "debug", "info", "warn", "error", "critical", default is "info"
 ;level = info

+ 2 - 0
docker/blocks/elastic1/elasticsearch.yml

@@ -0,0 +1,2 @@
+script.inline: on
+script.indexed: on

+ 8 - 0
docker/blocks/elastic1/fig

@@ -0,0 +1,8 @@
+elasticsearch1:
+  image: elasticsearch:1.7.6
+  command: elasticsearch -Des.network.host=0.0.0.0
+  ports:
+    - "11200:9200"
+    - "11300:9300"
+  volumes:
+    - ./blocks/elastic/elasticsearch.yml:/usr/share/elasticsearch/config/elasticsearch.yml

+ 2 - 0
docker/blocks/elastic5/elasticsearch.yml

@@ -0,0 +1,2 @@
+script.inline: on
+script.indexed: on

+ 8 - 0
docker/blocks/elastic5/fig

@@ -0,0 +1,8 @@
+# You need to run 'sysctl -w vm.max_map_count=262144' on the host machine
+
+elasticsearch5:
+  image: elasticsearch:5
+  command: elasticsearch
+  ports:
+    - "10200:9200"
+    - "10300:9300"

+ 23 - 0
docs/sources/alerting/metrics.md

@@ -0,0 +1,23 @@
++++
+title = "Alerting Metrics"
+description = "Alerting Metrics Guide"
+keywords = ["Grafana", "alerting", "guide", "metrics"]
+type = "docs"
+[menu.docs]
+name = "Metrics"
+parent = "alerting"
+weight = 2
++++
+
+# Metrics from the alert engine
+
+> Alerting is only available in Grafana v4.0 and above.
+
+The alert engine publish some internal metrics about itself. You can read more about how Grafana published [interal metrics](/installation/configuration/#metrics)
+
+Description | Type | Metric name
+---------- | ----------- | ----------
+Total number of alerts | counter | `alerting.active_alerts`
+Alert execution result | counter | `alerting.result`
+Notifications sent counter | counter | `alerting.notifications_sent`
+Alert execution timer | timer | `alerting.execution_time`

+ 2 - 1
docs/sources/alerting/notifications.md

@@ -69,6 +69,7 @@ Example json body:
   "ruleUrl": "http://url.to.grafana/db/dashboard/my_dashboard?panelId=2",
   "state": "Alerting",
   "imageUrl": "http://s3.image.url",
+  "message": "Load is peaking. Make sure the traffic is real and spin up more webfronts",
   "evalMatches": [
     {
       "metric": "requests",
@@ -91,7 +92,7 @@ Auto resolve incidents | Resolve incidents in pagerduty once the alert goes back
 
 # Enable images in notifications {#external-image-store}
 
-Grafan can render the panel associated with the alert rule and include that in the notification. Some types
+Grafana can render the panel associated with the alert rule and include that in the notification. Some types
 of notifications require that this image be publicly accessable (Slack for example). In order to support
 images in notifications like Slack Grafana can upload the image to an image store. It currently supports
 Amazon S3 for this and Webdav. So to set that up you need to configure the

+ 4 - 1
docs/sources/alerting/rules.md

@@ -55,7 +55,10 @@ Currently the only condition type that exists is a `Query` condition that allows
 specify a query letter, time range and an aggregation function. The letter refers to
 a query you already have added in the **Metrics** tab. The result from the query and the aggregation function is
 a single value that is then used in the threshold check. The query used in an alert rule cannot
-contain any template variables. Currently we only support `AND` operator between conditions.
+contain any template variables. Currently we only support `AND` and `OR` operators between conditions and they are executed serially.
+For example, we have 3 conditions in the following order:
+`condition:A(evaluates to: TRUE) OR condition:B(evaluates to: FALSE) AND condition:C(evaluates to: TRUE)`
+so the result will be calculated as ((TRUE OR FALSE) AND TRUE) = TRUE.
 
 We plan to add other condition types in the future, like `Other Alert`, where you can include the state
 of another alert in your conditions, and `Time Of Day`.

+ 20 - 0
docs/sources/datasources/index.md

@@ -8,4 +8,24 @@ parent = "features"
 weight = 5
 +++
 
+# Data Source Overview
+Grafana supports many different storage backends for your time series data (Data Source). Each Data Source has a specific Query Editor that is customized for the features and capabilities that the particular Data Source exposes.
+
+
+## Querying
+The query language and capabilities of each Data Source are obviously very different. You can combine data from multiple Data Sources onto a single Dashboard, but each Panel is tied to a specific Data Source that belongs to a particular Organization.
+
+## Supported Data Sources
+The following datasources are officially supported:
+
+* [Graphite]({{< relref "graphite.md" >}})
+* [Elasticsearch]({{< relref "elasticsearch.md" >}})
+* [CloudWatch]({{< relref "cloudwatch.md" >}})
+* [InfluxDB]({{< relref "influxdb.md" >}})
+* [OpenTSDB]({{< relref "opentsdb.md" >}})
+* [Prometheus]({{< relref "prometheus.md" >}})
+
+## Data source plugins
+
+Since grafana 3.0 you can install data sources as plugins. Checkout [Grafana.net](https://grafana.net/plugins) for more data sources.
 

+ 1 - 1
docs/sources/datasources/influxdb.md

@@ -116,7 +116,7 @@ the hosts variable only show hosts from the current selected region with a query
 SHOW TAG VALUES WITH KEY = "hostname"  WHERE region =~ /$region/
 ```
 
-> Always you `regex values` or `regex wildcard` for All format or multi select format.
+> Always use `regex values` or `regex wildcard` for All format or multi select format.
 
 ![](/img/docs/influxdb/templating_simple_ex1.png)
 

+ 0 - 25
docs/sources/datasources/overview.md

@@ -1,25 +0,0 @@
-----
-page_title: Data Source Overview
-page_description: Data Source Overview
-page_keywords: grafana, graphite, influxDB, KairosDB, OpenTSDB, Prometheus, documentation
----
-
-# Data Source Overview
-Grafana supports many different storage backends for your time series data (Data Source). Each Data Source has a specific Query Editor that is customized for the features and capabilities that the particular Data Source exposes.
-
-
-## Querying
-The query language and capabilities of each Data Source are obviously very different. You can combine data from multiple Data Sources onto a single Dashboard, but each Panel is tied to a specific Data Source that belongs to a particular Organization.
-
-## Supported Data Sources
-The following datasources are officially supported:
-
-* [Graphite](/datasources/graphite/)
-* [Elasticsearch](/datasources/elasticsearch/)
-* [CloudWatch](/datasources/cloudwatch/)
-* [InfluxDB](/datasources/influxdb/)
-* [OpenTSDB](/datasources/opentsdb/)
-* [KairosDB](/datasources/kairosdb)
-* [Prometheus](/datasources/prometheus)
-
-Grafana can query any Elasticsearch index for annotation events, but at this time, it's not supported for metric queries. Learn more about [annotations](/reference/annotations/#elasticsearch-annotations)

+ 1 - 1
docs/sources/guides/basic_concepts.md

@@ -29,7 +29,7 @@ Each Organization can have one or more Data Sources.
 
 All Dashboards are owned by a particular Organization.
 
- > Note: It is important to remember that most metric databases to not provide any sort of per-user series authentication. Therefore, in Grafana, Data Sources and Dashboards are available to all Users in a particular Organization.
+ > Note: It is important to remember that most metric databases do not provide any sort of per-user series authentication. Therefore, in Grafana, Data Sources and Dashboards are available to all Users in a particular Organization.
 
 For more details on the user model for Grafana, please refer to [Admin](/reference/admin/)
 

+ 10 - 5
docs/sources/http_api/admin.md

@@ -1,8 +1,13 @@
-----
-page_title: Admin APIs
-page_description: Grafana Admin API Reference
-page_keywords: grafana, admin, http, api, documentation
----
++++
+title = "Admin HTTP API "
+description = "Grafana Admin HTTP API"
+keywords = ["grafana", "http", "documentation", "api", "admin"]
+aliases = ["/http_api/admin/"]
+type = "docs"
+[menu.docs]
+name = "Admin"
+parent = "http_api"
++++
 
 # Admin API
 

+ 10 - 5
docs/sources/http_api/auth.md

@@ -1,8 +1,13 @@
-----
-page_title: Authentication API
-page_description: Grafana HTTP API Reference
-page_keywords: grafana, admin, http, api, documentation
----
++++
+title = "Authentication HTTP API "
+description = "Grafana Authentication HTTP API"
+keywords = ["grafana", "http", "documentation", "api", "authentication"]
+aliases = ["/http_api/authentication/"]
+type = "docs"
+[menu.docs]
+name = "Authentication"
+parent = "http_api"
++++
 
 # Authentication API
 

+ 10 - 5
docs/sources/http_api/dashboard.md

@@ -1,8 +1,13 @@
-----
-page_title: Dashboard API
-page_description: Grafana Dashboard API Reference
-page_keywords: grafana, admin, http, api, documentation, dashboard
----
++++
+title = "Dashboard HTTP API "
+description = "Grafana Dashboard HTTP API"
+keywords = ["grafana", "http", "documentation", "api", "dashboard"]
+aliases = ["/http_api/dashboard/"]
+type = "docs"
+[menu.docs]
+name = "Dashboard"
+parent = "http_api"
++++
 
 # Dashboard API
 

+ 11 - 5
docs/sources/http_api/data_source.md

@@ -1,8 +1,14 @@
-----
-page_title: Data source API
-page_description: Grafana Data source API Reference
-page_keywords: grafana, admin, http, api, documentation, datasource
----
++++
+title = "Data source HTTP API "
+description = "Grafana Data source HTTP API"
+keywords = ["grafana", "http", "documentation", "api", "data source"]
+aliases = ["/http_api/datasource/"]
+type = "docs"
+[menu.docs]
+name = "Data source"
+parent = "http_api"
++++
+
 
 # Data source API
 

+ 22 - 0
docs/sources/http_api/index.md

@@ -1,7 +1,29 @@
 +++
 title = "HTTP API"
+description = "Grafana HTTP API"
+keywords = ["grafana", "http", "documentation", "api", "overview"]
+type = "docs"
 [menu.docs]
 name = "HTTP API"
 identifier = "http_api"
 weight = 9
 +++
+
+
+# HTTP API Reference
+
+The Grafana backend exposes an HTTP API, the same API is used by the frontend to do everything from saving
+dashboards, creating users and updating data sources.
+
+## Supported HTTP APIs:
+
+
+* [Authentication API]({{< relref "auth.md" >}})
+* [Dashboard API]({{< relref "dashboard.md" >}})
+* [Data Source API]({{< relref "data_source.md" >}})
+* [Organisation API]({{< relref "org.md" >}})
+* [User API]({{< relref "user.md" >}})
+* [Admin API]({{< relref "admin.md" >}})
+* [Snapshot API]({{< relref "snapshot.md" >}})
+* [Preferences API]({{< relref "preferences.md" >}})
+* [Other API]({{< relref "other.md" >}})

+ 13 - 7
docs/sources/http_api/org.md

@@ -1,8 +1,14 @@
-----
-page_title: Organisation API
-page_description: Grafana Organisation API Reference
-page_keywords: grafana, admin, http, api, documentation, orgs, organisation
----
++++
+title = "Organisation HTTP API "
+description = "Grafana Organisation HTTP API"
+keywords = ["grafana", "http", "documentation", "api", "organisation"]
+aliases = ["/http_api/organisation/"]
+type = "docs"
+[menu.docs]
+name = "Organisation"
+parent = "http_api"
++++
+
 
 # Organisation API
 
@@ -87,11 +93,11 @@ page_keywords: grafana, admin, http, api, documentation, orgs, organisation
 
 ## Create Organisation
 
-`POST /api/org`
+`POST /api/orgs`
 
 **Example Request**:
 
-    POST /api/org HTTP/1.1
+    POST /api/orgs HTTP/1.1
     Accept: application/json
     Content-Type: application/json
     Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk

+ 11 - 5
docs/sources/http_api/other.md

@@ -1,8 +1,14 @@
-----
-page_title: Other APIs
-page_description: Grafana Other API Reference
-page_keywords: grafana, admin, http, api, documentation, dashboards
----
++++
+title = "Other HTTP API "
+description = "Grafana Other HTTP API"
+keywords = ["grafana", "http", "documentation", "api", "other"]
+aliases = ["/http_api/other/"]
+type = "docs"
+[menu.docs]
+name = "Other"
+parent = "http_api"
++++
+
 
 # Frontend Settings API
 

+ 0 - 22
docs/sources/http_api/overview.md

@@ -1,22 +0,0 @@
-----
-page_title: HTTP API
-page_description: Grafana HTTP API Reference
-page_keywords: grafana, admin, http, api, documentation
----
-
-# HTTP API Reference
-
-The Grafana backend exposes an HTTP API, the same API is used by the frontend to do everything from saving
-dashboards, creating users and updating data sources.
-
-###Supported HTTP APIs:
-
-* [Authentication API](/http_api/auth/)
-* [Dashboard API](/http_api/dashboard/)
-* [Data Source API](/http_api/data_source/)
-* [Organisation API](/http_api/org/)
-* [User API](/http_api/user/)
-* [Admin API](/http_api/admin/)
-* [Snapshot API](/http_api/snapshot/)
-* [Preferences API](/http_api/preferences/)
-* [Other API](/http_api/other/)

+ 10 - 5
docs/sources/http_api/preferences.md

@@ -1,8 +1,13 @@
-----
-page_title: Preferences API
-page_description: Grafana Preferences API Reference
-page_keywords: grafana, preferences, http, api, documentation
----
++++
+title = "HTTP Preferences API "
+description = "Grafana HTTP API"
+keywords = ["grafana", "http", "documentation", "api", "preferences"]
+aliases = ["/http_api/preferences/"]
+type = "docs"
+[menu.docs]
+name = "Preferences"
+parent = "http_api"
++++
 
 # User and Org Preferences API
 

+ 10 - 5
docs/sources/http_api/snapshot.md

@@ -1,8 +1,13 @@
-----
-page_title: Snapshot API
-page_description: Grafana Snapshot API Reference
-page_keywords: grafana, admin, http, api, documentation, snapshot, dashboard
----
++++
+title = "HTTP Snapshot API "
+description = "Grafana HTTP API"
+keywords = ["grafana", "http", "documentation", "api", "snapshot"]
+aliases = ["/http_api/snapshot/"]
+type = "docs"
+[menu.docs]
+name = "Snapshot"
+parent = "http_api"
++++
 
 # Snapshot API
 

+ 5 - 4
docs/sources/installation/configuration.md

@@ -170,7 +170,7 @@ The database user (not applicable for `sqlite3`).
 
 ### password
 
-The database user's password (not applicable for `sqlite3`).
+The database user's password (not applicable for `sqlite3`). If the password contains `#` or `;` you have to wrap it with trippel quotes. Ex `"""#password;"""`
 
 ### ssl_mode
 
@@ -289,6 +289,7 @@ example:
 
     [auth.github]
     enabled = true
+    allow_sign_up = true
     client_id = YOUR_GITHUB_APP_CLIENT_ID
     client_secret = YOUR_GITHUB_APP_CLIENT_SECRET
     scopes = user:email
@@ -323,7 +324,7 @@ Grafana instance. For example:
     team_ids = 150,300
     auth_url = https://github.com/login/oauth/authorize
     token_url = https://github.com/login/oauth/access_token
-    allow_sign_up = false
+    allow_sign_up = true
 
 ### allowed_organizations
 
@@ -367,7 +368,7 @@ Secret. Specify these in the Grafana configuration file. For example:
     auth_url = https://accounts.google.com/o/oauth2/auth
     token_url = https://accounts.google.com/o/oauth2/token
     allowed_domains = mycompany.com mycompany.org
-    allow_sign_up = false
+    allow_sign_up = true
 
 Restart the Grafana back-end. You should now see a Google login button
 on the login page. You can now login or sign up with your Google
@@ -394,7 +395,7 @@ browser to access Grafana, but with the prefix path of `/login/generic_oauth`.
     token_url =
     api_url =
     allowed_domains = mycompany.com mycompany.org
-    allow_sign_up = false
+    allow_sign_up = true
 
 Set api_url to the resource that returns basic user info.
 

+ 3 - 12
docs/sources/installation/debian.md

@@ -14,23 +14,16 @@ weight = 1
 
 Description | Download
 ------------ | -------------
-Stable for Debian-based Linux | [3.1.1 (x86-64 deb)](https://grafanarel.s3.amazonaws.com/builds/grafana_3.1.1-1470047149_amd64.deb)
-Latest Beta for Debian-based Linux | [4.0.0-beta1 (x86-64 deb)](https://grafanarel.s3.amazonaws.com/builds/grafana_4.0.0-1478693311beta1_amd64.deb)
+Stable for Debian-based Linux | [4.0.2 (x86-64 deb)](https://grafanarel.s3.amazonaws.com/builds/grafana_4.0.2-1481203731_amd64.deb)
 
 ## Install Stable
 
 ```
-$ wget https://grafanarel.s3.amazonaws.com/builds/grafana_3.1.1-1470047149_amd64.deb
+$ wget https://grafanarel.s3.amazonaws.com/builds/grafana_4.0.2-1481203731_amd64.deb
 $ sudo apt-get install -y adduser libfontconfig
-$ sudo dpkg -i grafana_3.1.1-1470047149_amd64.deb
+$ sudo dpkg -i grafana_4.0.2-1481203731_amd64.deb
 ```
 
-## Install Latest Beta
-
-    $ wget https://grafanarel.s3.amazonaws.com/builds/grafana_4.0.0-1478693311beta1_amd64.deb
-    $ sudo apt-get install -y adduser libfontconfig
-    $ sudo dpkg -i grafana_4.0.0-1478693311beta1_amd64.deb
-
 ## APT Repository
 
 Add the following line to your `/etc/apt/sources.list` file.
@@ -139,5 +132,3 @@ To configure Grafana add a configuration file named `custom.ini` to the
 Start Grafana by executing `./bin/grafana-server web`. The `grafana-server`
 binary needs the working directory to be the root install directory (where the
 binary and the `public` folder is located).
-
-

+ 4 - 20
docs/sources/installation/rpm.md

@@ -14,40 +14,24 @@ weight = 2
 
 Description | Download
 ------------ | -------------
-Stable for CentOS / Fedora / OpenSuse / Redhat Linux | [3.1.1 (x86-64 rpm)](https://grafanarel.s3.amazonaws.com/builds/grafana-3.1.1-1470047149.x86_64.rpm)
-Latest Beta for CentOS / Fedora / OpenSuse / Redhat Linux | [4.0.0-beta1 (x86-64 rpm)](https://grafanarel.s3.amazonaws.com/builds/grafana-4.0.0-1478693311beta1.x86_64.rpm)
+Stable for CentOS / Fedora / OpenSuse / Redhat Linux | [4.0.2 (x86-64 rpm)](https://grafanarel.s3.amazonaws.com/builds/grafana-4.0.2-1481203731.x86_64.rpm)
 
 ## Install Stable
 
 You can install Grafana using Yum directly.
 
-    $ sudo yum install https://grafanarel.s3.amazonaws.com/builds/grafana-3.1.1-1470047149.x86_64.rpm
+    $ sudo yum install https://grafanarel.s3.amazonaws.com/builds/grafana-4.0.2-1481203731.x86_64.rpm
 
 Or install manually using `rpm`.
 
 #### On CentOS / Fedora / Redhat:
 
     $ sudo yum install initscripts fontconfig
-    $ sudo rpm -Uvh grafana-3.1.1-1470047149.x86_64.rpm
+    $ sudo rpm -Uvh grafana-4.0.2-1481203731.x86_64.rpm
 
 #### On OpenSuse:
 
-    $ sudo rpm -i --nodeps grafana-3.1.1-1470047149.x86_64.rpm
-
-## Or Install Latest Beta
-
-    $ sudo yum install https://grafanarel.s3.amazonaws.com/builds/grafana-4.0.0-1478693311beta1.x86_64.rpm
-
-Or install manually using `rpm`.
-
-#### On CentOS / Fedora / Redhat:
-
-    $ sudo yum install initscripts fontconfig
-    $ sudo rpm -Uvh grafana-4.0.0-1478693311beta1.x86_64.rpm
-
-#### On OpenSuse:
-
-    $ sudo rpm -i --nodeps grafana-4.0.0-1478693311beta1.x86_64.rpm
+    $ sudo rpm -i --nodeps grafana-4.0.2-1481203731.x86_64.rpm
 
 ## Install via YUM Repository
 

+ 1 - 3
docs/sources/installation/windows.md

@@ -13,9 +13,7 @@ weight = 3
 
 Description | Download
 ------------ | -------------
-Latest stable package for Windows | [grafana.3.1.1.windows-x64.zip](https://grafanarel.s3.amazonaws.com/winbuilds/dist/grafana-3.1.1.windows-x64.zip)
-Latest beta package for Windows | [grafana.4.0.0-beta1.windows-x64.zip](https://grafanarel.s3.amazonaws.com/builds/grafana-4.0.0-beta1.windows-x64.zip)
-
+Latest stable package for Windows | [grafana.4.0.2.windows-x64.zip](https://grafanarel.s3.amazonaws.com/builds/grafana-4.0.2.windows-x64.zip)
 
 ## Configure
 

+ 1 - 1
docs/sources/plugins/datasources.md

@@ -37,7 +37,7 @@ The Datasource should contain the following functions.
 ```
 query(options) //used by panels to get data
 testDatasource() //used by datasource configuration page to make sure the connection is working
-annotationsQuery(options) // used dashboards to get annotations
+annotationsQuery(options) // used by dashboards to get annotations
 metricFindQuery(options) // used by query editor to get metric suggestions.
 ```
 

+ 1 - 4
docs/sources/project/building_from_source.md

@@ -13,7 +13,7 @@ dev environment. Grafana ships with its own required backend server; also comple
 
 ## Dependencies
 
-- [Go 1.7](https://golang.org/dl/)
+- [Go 1.7.3](https://golang.org/dl/)
 - [NodeJS](https://nodejs.org/download/)
 
 ## Get Code
@@ -91,6 +91,3 @@ Learn more about Grafana config options in the [Configuration section](/installa
 
 ## Create a pull requests
 Please contribute to the Grafana project and submit a pull request! Build new features, write or update documentation, fix bugs and generally make Grafana even more awesome.
-
-Before or after you create a pull request, sign the [contributor license agreement](/project/cla.html).
-Together we can build amazing software faster.

+ 4 - 4
docs/sources/reference/export_import.md

@@ -9,7 +9,7 @@ weight = 8
 
 # Export and Import
 
-Grafana Dashboads can easily be exported and imported, either from the UI or from the HTTP API.
+Grafana Dashboards can easily be exported and imported, either from the UI or from the HTTP API.
 
 ## Exporting a dashboard
 
@@ -22,9 +22,9 @@ The export feature is accessed from the share menu.
 ### Making a dashboard portable
 
 If you want to export a dashboard for others to use then it could be a good idea to
-add template variables for things like a metric prefix (use contant variable) and server name.
+add template variables for things like a metric prefix (use constant variable) and server name.
 
-A template varible of the type `Constant` will automatically be hidden in
+A template variable of the type `Constant` will automatically be hidden in
 the dashboard, and will also be added as an required input when the dashboard is imported.
 
 ## Importing a dashboard
@@ -43,7 +43,7 @@ data source you want the dashboard to use and specify any metric prefixes (if th
 
 ## Discover dashboards on Grafana.net
 
-Find dashboads for common server applications at [Grafana.net/dashboards](https://grafana.net/dashboards).
+Find dashboards for common server applications at [Grafana.net/dashboards](https://grafana.net/dashboards).
 
 <img src="/img/docs/v31/gnet_dashboards_list.png">
 

+ 6 - 9
docs/sources/reference/graph.md

@@ -48,12 +48,6 @@ populate the template variable to a desired value from the link.
 The metrics tab defines what series data and sources to render.  Each datasource provides different
 options.
 
-### Graphite
-
-### InfluxDB
-
-### OpenTSDB
-
 ## Axes & Grid
 
 ![](/img/docs/v2/graph_axes_grid_options.png)
@@ -71,9 +65,6 @@ The ``Left Y`` and ``Right Y`` can be customized using:
 
 Axes can also be hidden by unchecking the appropriate box from `Show Axis`.
 
-Thresholds allow you to add arbitrary lines or sections to the graph to make it easier to see when
-the graph crosses a particular threshold.
-
 ### Legend
 
 The legend hand be hidden by checking the ``Show`` checkbox.  If it's shown, it can be
@@ -103,6 +94,12 @@ It is just the sum of all data points received by Grafana.
 
 Display styles controls properties of the graph.
 
+### Thresholds
+
+Thresholds allow you to add arbitrary lines or sections to the graph to make it easier to see when
+the graph crosses a particular threshold.
+
+
 ### Chart Options
 
 - ``Bar`` - Display values as a bar chart

+ 10 - 2
docs/sources/reference/singlestat.md

@@ -1,5 +1,5 @@
 +++
-title = "Singletat Panel"
+title = "Singlestat Panel"
 keywords = ["grafana", "dashboard", "documentation", "panels", "singlestat"]
 type = "docs"
 [menu.docs]
@@ -23,7 +23,15 @@ The singlestat panel has a normal query editor to allow you define your exact me
 
 1. `Big Value`: Big Value refers to how we display the main stat for the Singlestat Panel. This is always a single value that is displayed in the Panel in between two strings, `Prefix` and  `Suffix`. The single number is calculated by choosing a function (min,max,average,current,total) of your metric query. This functions reduces your query into a single numeric value.
 2. `Font Size`: You can use this section to select the font size of the different texts in the Singlestat Panel, i.e. prefix, value and postfix.
-3. `Values`: The Value fields let you set the function (min, max, average, current, total) that your entire query is reduced into a single value with. You can also set the font size of the Value field and font-size (as a %) of the metric query that the Panel is configured with. This reduces the entire query into a single summary value that is displayed.
+3. `Values`: The Value fields let you set the function (min, max, average, current, total, first, delta, range) that your entire query is reduced into a single value with. You can also set the font size of the Value field and font-size (as a %) of the metric query that the Panel is configured with. This reduces the entire query into a single summary value that is displayed.
+   * `min` - The smallest value in the series
+   * `max` - The largest value in the series
+   * `average` - The average of all the non-null values in the series
+   * `current` - The last value in the series. If the series ends on null the previous value will be used.
+   * `total` - The sum of all the non-null values in the series
+   * `first` - The first value in the series
+   * `delta` - The total incremental increase (of a counter) in the series. An attempt is made to account for counter resets, but this will only be accurate for single instance metrics. Used to show total counter increase in time series.
+   * `range` - The difference between 'min' and 'max'. Useful the show the range of change for a gauge.
 4. `Postfixes`: The Postfix fields let you define a custom label and font-size (as a %) to appear *after* the value
 5. `Units`: Units are appended to the the Singlestat  within the panel, and will respect the color and threshold settings for the value.
 6. `Decimals`: The Decimal field allows you to override the automatic decimal precision, and set it explicitly.

+ 2 - 2
latest.json

@@ -1,4 +1,4 @@
 {
-  "stable": "3.1.1",
-	"testing": "3.1.1"
+  "stable": "4.0.2",
+	"testing": "4.0.2"
 }

+ 50 - 51
package.json

@@ -4,57 +4,56 @@
     "company": "Coding Instinct AB"
   },
   "name": "grafana",
-  "version": "4.0.0-beta1",
+  "version": "4.1.0-pre1",
   "repository": {
     "type": "git",
     "url": "http://github.com/grafana/grafana.git"
   },
   "devDependencies": {
-    "zone.js": "^0.6.6",
     "autoprefixer": "^6.4.0",
     "es6-promise": "^3.0.2",
     "es6-shim": "^0.35.1",
     "expect.js": "~0.2.0",
-    "glob": "~7.1.1",
-    "grunt": "~0.4.0",
-    "grunt-angular-templates": "^0.5.5",
-    "grunt-cli": "~0.1.13",
-    "grunt-contrib-clean": "~0.7.0",
+    "glob": "~7.0.0",
+    "grunt": "^0.4.5",
+    "grunt-angular-templates": "^1.1.0",
+    "grunt-cli": "~1.2.0",
+    "grunt-contrib-clean": "~1.0.0",
     "grunt-contrib-compress": "^1.3.0",
-    "grunt-contrib-concat": "^0.5.1",
-    "grunt-contrib-copy": "~0.8.2",
-    "grunt-contrib-cssmin": "~0.14.0",
-    "grunt-contrib-htmlmin": "~0.6.0",
-    "grunt-contrib-jshint": "~1.0.0",
-    "grunt-contrib-uglify": "~0.11.0",
-    "grunt-contrib-watch": "^0.6.1",
-    "grunt-filerev": "^0.2.1",
-    "grunt-git-describe": "~2.3.2",
-    "grunt-karma": "~0.12.2",
-    "grunt-ng-annotate": "^1.0.1",
-    "grunt-notify": "^0.4.3",
+    "grunt-contrib-concat": "^1.0.1",
+    "grunt-contrib-copy": "~1.0.0",
+    "grunt-contrib-cssmin": "~1.0.2",
+    "grunt-contrib-htmlmin": "~2.0.0",
+    "grunt-contrib-jshint": "~1.1.0",
+    "grunt-contrib-uglify": "~2.0.0",
+    "grunt-contrib-watch": "^1.0.0",
+    "grunt-exec": "^1.0.1",
+    "grunt-filerev": "^2.3.1",
+    "grunt-git-describe": "~2.4.2",
+    "grunt-karma": "~2.0.0",
+    "grunt-ng-annotate": "^3.0.0",
+    "grunt-notify": "^0.4.5",
     "grunt-postcss": "^0.8.0",
-    "grunt-sass": "^1.1.0",
-    "grunt-string-replace": "~1.2.1",
-    "grunt-systemjs-builder": "^0.2.6",
-    "grunt-tslint": "^3.0.2",
-    "grunt-typescript": "^0.8.0",
-    "grunt-usemin": "3.0.0",
-    "jshint-stylish": "~2.1.0",
-    "karma": "0.13.22",
-    "karma-chrome-launcher": "~0.2.2",
-    "karma-coverage": "0.5.3",
+    "grunt-sass": "^1.2.1",
+    "grunt-string-replace": "~1.3.1",
+    "grunt-systemjs-builder": "^0.2.7",
+    "grunt-usemin": "3.1.1",
+    "jshint-stylish": "~2.2.1",
+    "karma": "1.3.0",
+    "karma-chrome-launcher": "~2.0.0",
+    "karma-coverage": "1.1.1",
     "karma-coveralls": "1.1.2",
-    "karma-expect": "~1.1.0",
-    "karma-mocha": "~0.2.1",
-    "karma-phantomjs-launcher": "1.0.0",
-    "load-grunt-tasks": "3.4.0",
-    "mocha": "2.3.4",
-    "phantomjs-prebuilt": "^2.1.7",
-    "reflect-metadata": "0.1.2",
-    "rxjs": "5.0.0-beta.4",
-    "sass-lint": "^1.7.0",
-    "systemjs": "0.19.24"
+    "karma-expect": "~1.1.3",
+    "karma-mocha": "~1.3.0",
+    "karma-phantomjs-launcher": "1.0.2",
+    "load-grunt-tasks": "3.5.2",
+    "mocha": "3.2.0",
+    "phantomjs-prebuilt": "^2.1.13",
+    "reflect-metadata": "0.1.8",
+    "rxjs": "^5.0.0-rc.5",
+    "sass-lint": "^1.10.2",
+    "systemjs": "0.19.41",
+    "zone.js": "^0.7.2"
   },
   "engines": {
     "node": "4.x",
@@ -67,21 +66,21 @@
   },
   "license": "Apache-2.0",
   "dependencies": {
-    "eventemitter3": "^1.2.0",
+    "eventemitter3": "^2.0.2",
     "gaze": "^1.1.2",
-    "grunt-jscs": "~1.5.x",
-    "grunt-sass-lint": "^0.2.0",
-    "grunt-sync": "^0.4.1",
-    "karma-sinon": "^1.0.3",
-    "lodash": "^2.4.1",
+    "grunt-jscs": "3.0.1",
+    "grunt-sass-lint": "^0.2.2",
+    "grunt-sync": "^0.6.2",
+    "karma-sinon": "^1.0.5",
+    "lodash": "^4.17.2",
     "mousetrap": "^1.6.0",
-    "remarkable": "^1.6.2",
-    "sinon": "1.16.1",
-    "systemjs-builder": "^0.15.13",
-    "tether": "^1.2.0",
+    "remarkable": "^1.7.1",
+    "sinon": "1.17.6",
+    "systemjs-builder": "^0.15.34",
+    "tether": "^1.4.0",
     "tether-drop": "^1.4.2",
-    "tslint": "^3.4.0",
-    "typescript": "^1.7.5",
+    "tslint": "^4.0.2",
+    "typescript": "^2.1.4",
     "virtual-scroll": "^1.1.1"
   }
 }

+ 2 - 3
packaging/publish/publish.sh

@@ -1,7 +1,6 @@
 #! /usr/bin/env bash
-
-deb_ver=3.1.1-1470047149
-rpm_ver=3.1.1-1470047149
+deb_ver=4.0.2-1481203731
+rpm_ver=4.0.2-1481203731
 
 wget https://grafanarel.s3.amazonaws.com/builds/grafana_${deb_ver}_amd64.deb
 

+ 6 - 1
pkg/api/alerting.go

@@ -103,6 +103,10 @@ func GetAlerts(c *middleware.Context) Response {
 
 // POST /api/alerts/test
 func AlertTest(c *middleware.Context, dto dtos.AlertTestCommand) Response {
+	if _, idErr := dto.Dashboard.Get("id").Int64(); idErr != nil {
+		return ApiError(400, "The dashboard needs to be saved at least once before you can test an alert rule", nil)
+	}
+
 	backendCmd := alerting.AlertTestCommand{
 		OrgId:     c.OrgId,
 		Dashboard: dto.Dashboard,
@@ -119,7 +123,8 @@ func AlertTest(c *middleware.Context, dto dtos.AlertTestCommand) Response {
 	res := backendCmd.Result
 
 	dtoRes := &dtos.AlertTestResult{
-		Firing: res.Firing,
+		Firing:         res.Firing,
+		ConditionEvals: res.ConditionEvals,
 	}
 
 	if res.Error != nil {

+ 4 - 1
pkg/api/api.go

@@ -113,6 +113,9 @@ func Register(r *macaron.Macaron) {
 
 			r.Put("/password", bind(m.ChangeUserPasswordCommand{}), wrap(ChangeUserPassword))
 			r.Get("/quotas", wrap(GetUserQuotas))
+			r.Put("/helpflags/:id", wrap(SetHelpFlag))
+			// For dev purpose
+			r.Get("/helpflags/clear", wrap(ClearHelpFlags))
 
 			r.Get("/preferences", wrap(GetUserPreferences))
 			r.Put("/preferences", bind(dtos.UpdatePrefsCmd{}), wrap(UpdateUserPreferences))
@@ -193,7 +196,7 @@ func Register(r *macaron.Macaron) {
 		r.Group("/datasources", func() {
 			r.Get("/", GetDataSources)
 			r.Post("/", quota("data_source"), bind(m.AddDataSourceCommand{}), AddDataSource)
-			r.Put("/:id", bind(m.UpdateDataSourceCommand{}), UpdateDataSource)
+			r.Put("/:id", bind(m.UpdateDataSourceCommand{}), wrap(UpdateDataSource))
 			r.Delete("/:id", DeleteDataSource)
 			r.Get("/:id", wrap(GetDataSourceById))
 			r.Get("/name/:name", wrap(GetDataSourceByName))

+ 16 - 1
pkg/api/app_routes.go

@@ -1,6 +1,11 @@
 package api
 
 import (
+	"crypto/tls"
+	"net"
+	"net/http"
+	"time"
+
 	"gopkg.in/macaron.v1"
 
 	"github.com/grafana/grafana/pkg/api/pluginproxy"
@@ -11,6 +16,16 @@ import (
 	"github.com/grafana/grafana/pkg/util"
 )
 
+var pluginProxyTransport = &http.Transport{
+	TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
+	Proxy:           http.ProxyFromEnvironment,
+	Dial: (&net.Dialer{
+		Timeout:   30 * time.Second,
+		KeepAlive: 30 * time.Second,
+	}).Dial,
+	TLSHandshakeTimeout: 10 * time.Second,
+}
+
 func InitAppPluginRoutes(r *macaron.Macaron) {
 	for _, plugin := range plugins.Apps {
 		for _, route := range plugin.Routes {
@@ -40,7 +55,7 @@ func AppPluginRoute(route *plugins.AppPluginRoute, appId string) macaron.Handler
 		path := c.Params("*")
 
 		proxy := pluginproxy.NewApiPluginProxy(c, path, route, appId)
-		proxy.Transport = dataProxyTransport
+		proxy.Transport = pluginProxyTransport
 		proxy.ServeHTTP(c.Resp, c.Req.Request)
 	}
 }

+ 78 - 28
pkg/api/cloudwatch/cloudwatch.go

@@ -33,6 +33,39 @@ type cwRequest struct {
 	DataSource *m.DataSource
 }
 
+type datasourceInfo struct {
+	Profile       string
+	Region        string
+	AssumeRoleArn string
+	Namespace     string
+
+	AccessKey string
+	SecretKey string
+}
+
+func (req *cwRequest) GetDatasourceInfo() *datasourceInfo {
+	assumeRoleArn := req.DataSource.JsonData.Get("assumeRoleArn").MustString()
+	accessKey := ""
+	secretKey := ""
+
+	for key, value := range req.DataSource.SecureJsonData.Decrypt() {
+		if key == "accessKey" {
+			accessKey = value
+		}
+		if key == "secretKey" {
+			secretKey = value
+		}
+	}
+
+	return &datasourceInfo{
+		AssumeRoleArn: assumeRoleArn,
+		Region:        req.Region,
+		Profile:       req.DataSource.Database,
+		AccessKey:     accessKey,
+		SecretKey:     secretKey,
+	}
+}
+
 func init() {
 	actionHandlers = map[string]actionHandler{
 		"GetMetricStatistics":     handleGetMetricStatistics,
@@ -56,8 +89,8 @@ type cache struct {
 var awsCredentialCache map[string]cache = make(map[string]cache)
 var credentialCacheLock sync.RWMutex
 
-func getCredentials(profile string, region string, assumeRoleArn string) *credentials.Credentials {
-	cacheKey := profile + ":" + assumeRoleArn
+func getCredentials(dsInfo *datasourceInfo) *credentials.Credentials {
+	cacheKey := dsInfo.Profile + ":" + dsInfo.AssumeRoleArn
 	credentialCacheLock.RLock()
 	if _, ok := awsCredentialCache[cacheKey]; ok {
 		if awsCredentialCache[cacheKey].expiration != nil &&
@@ -74,9 +107,9 @@ func getCredentials(profile string, region string, assumeRoleArn string) *creden
 	sessionToken := ""
 	var expiration *time.Time
 	expiration = nil
-	if strings.Index(assumeRoleArn, "arn:aws:iam:") == 0 {
+	if strings.Index(dsInfo.AssumeRoleArn, "arn:aws:iam:") == 0 {
 		params := &sts.AssumeRoleInput{
-			RoleArn:         aws.String(assumeRoleArn),
+			RoleArn:         aws.String(dsInfo.AssumeRoleArn),
 			RoleSessionName: aws.String("GrafanaSession"),
 			DurationSeconds: aws.Int64(900),
 		}
@@ -85,13 +118,14 @@ func getCredentials(profile string, region string, assumeRoleArn string) *creden
 		stsCreds := credentials.NewChainCredentials(
 			[]credentials.Provider{
 				&credentials.EnvProvider{},
-				&credentials.SharedCredentialsProvider{Filename: "", Profile: profile},
+				&credentials.SharedCredentialsProvider{Filename: "", Profile: dsInfo.Profile},
 				&ec2rolecreds.EC2RoleProvider{Client: ec2metadata.New(stsSess), ExpiryWindow: 5 * time.Minute},
 			})
 		stsConfig := &aws.Config{
-			Region:      aws.String(region),
+			Region:      aws.String(dsInfo.Region),
 			Credentials: stsCreds,
 		}
+
 		svc := sts.New(session.New(stsConfig), stsConfig)
 		resp, err := svc.AssumeRole(params)
 		if err != nil {
@@ -115,9 +149,14 @@ func getCredentials(profile string, region string, assumeRoleArn string) *creden
 				SessionToken:    sessionToken,
 			}},
 			&credentials.EnvProvider{},
-			&credentials.SharedCredentialsProvider{Filename: "", Profile: profile},
+			&credentials.StaticProvider{Value: credentials.Value{
+				AccessKeyID:     dsInfo.AccessKey,
+				SecretAccessKey: dsInfo.SecretKey,
+			}},
+			&credentials.SharedCredentialsProvider{Filename: "", Profile: dsInfo.Profile},
 			&ec2rolecreds.EC2RoleProvider{Client: ec2metadata.New(sess), ExpiryWindow: 5 * time.Minute},
 		})
+
 	credentialCacheLock.Lock()
 	awsCredentialCache[cacheKey] = cache{
 		credential: creds,
@@ -129,10 +168,9 @@ func getCredentials(profile string, region string, assumeRoleArn string) *creden
 }
 
 func getAwsConfig(req *cwRequest) *aws.Config {
-	assumeRoleArn := req.DataSource.JsonData.Get("assumeRoleArn").MustString()
 	cfg := &aws.Config{
 		Region:      aws.String(req.Region),
-		Credentials: getCredentials(req.DataSource.Database, req.Region, assumeRoleArn),
+		Credentials: getCredentials(req.GetDatasourceInfo()),
 	}
 	return cfg
 }
@@ -143,25 +181,33 @@ func handleGetMetricStatistics(req *cwRequest, c *middleware.Context) {
 
 	reqParam := &struct {
 		Parameters struct {
-			Namespace  string                  `json:"namespace"`
-			MetricName string                  `json:"metricName"`
-			Dimensions []*cloudwatch.Dimension `json:"dimensions"`
-			Statistics []*string               `json:"statistics"`
-			StartTime  int64                   `json:"startTime"`
-			EndTime    int64                   `json:"endTime"`
-			Period     int64                   `json:"period"`
+			Namespace          string                  `json:"namespace"`
+			MetricName         string                  `json:"metricName"`
+			Dimensions         []*cloudwatch.Dimension `json:"dimensions"`
+			Statistics         []*string               `json:"statistics"`
+			ExtendedStatistics []*string               `json:"extendedStatistics"`
+			StartTime          int64                   `json:"startTime"`
+			EndTime            int64                   `json:"endTime"`
+			Period             int64                   `json:"period"`
 		} `json:"parameters"`
 	}{}
 	json.Unmarshal(req.Body, reqParam)
 
 	params := &cloudwatch.GetMetricStatisticsInput{
-		Namespace:  aws.String(reqParam.Parameters.Namespace),
-		MetricName: aws.String(reqParam.Parameters.MetricName),
-		Dimensions: reqParam.Parameters.Dimensions,
-		Statistics: reqParam.Parameters.Statistics,
-		StartTime:  aws.Time(time.Unix(reqParam.Parameters.StartTime, 0)),
-		EndTime:    aws.Time(time.Unix(reqParam.Parameters.EndTime, 0)),
-		Period:     aws.Int64(reqParam.Parameters.Period),
+		Namespace:          aws.String(reqParam.Parameters.Namespace),
+		MetricName:         aws.String(reqParam.Parameters.MetricName),
+		Dimensions:         reqParam.Parameters.Dimensions,
+		Statistics:         reqParam.Parameters.Statistics,
+		ExtendedStatistics: reqParam.Parameters.ExtendedStatistics,
+		StartTime:          aws.Time(time.Unix(reqParam.Parameters.StartTime, 0)),
+		EndTime:            aws.Time(time.Unix(reqParam.Parameters.EndTime, 0)),
+		Period:             aws.Int64(reqParam.Parameters.Period),
+	}
+	if len(reqParam.Parameters.Statistics) != 0 {
+		params.Statistics = reqParam.Parameters.Statistics
+	}
+	if len(reqParam.Parameters.ExtendedStatistics) != 0 {
+		params.ExtendedStatistics = reqParam.Parameters.ExtendedStatistics
 	}
 
 	resp, err := svc.GetMetricStatistics(params)
@@ -254,11 +300,12 @@ func handleDescribeAlarmsForMetric(req *cwRequest, c *middleware.Context) {
 
 	reqParam := &struct {
 		Parameters struct {
-			Namespace  string                  `json:"namespace"`
-			MetricName string                  `json:"metricName"`
-			Dimensions []*cloudwatch.Dimension `json:"dimensions"`
-			Statistic  string                  `json:"statistic"`
-			Period     int64                   `json:"period"`
+			Namespace         string                  `json:"namespace"`
+			MetricName        string                  `json:"metricName"`
+			Dimensions        []*cloudwatch.Dimension `json:"dimensions"`
+			Statistic         string                  `json:"statistic"`
+			ExtendedStatistic string                  `json:"extendedStatistic"`
+			Period            int64                   `json:"period"`
 		} `json:"parameters"`
 	}{}
 	json.Unmarshal(req.Body, reqParam)
@@ -274,6 +321,9 @@ func handleDescribeAlarmsForMetric(req *cwRequest, c *middleware.Context) {
 	if reqParam.Parameters.Statistic != "" {
 		params.Statistic = aws.String(reqParam.Parameters.Statistic)
 	}
+	if reqParam.Parameters.ExtendedStatistic != "" {
+		params.ExtendedStatistic = aws.String(reqParam.Parameters.ExtendedStatistic)
+	}
 
 	resp, err := svc.DescribeAlarmsForMetric(params)
 	if err != nil {

+ 44 - 40
pkg/api/cloudwatch/metrics.go

@@ -192,8 +192,10 @@ func handleGetMetrics(req *cwRequest, c *middleware.Context) {
 		}
 	} else {
 		var err error
-		assumeRoleArn := req.DataSource.JsonData.Get("assumeRoleArn").MustString()
-		if namespaceMetrics, err = getMetricsForCustomMetrics(req.Region, reqParam.Parameters.Namespace, req.DataSource.Database, assumeRoleArn, getAllMetrics); err != nil {
+		cwData := req.GetDatasourceInfo()
+		cwData.Namespace = reqParam.Parameters.Namespace
+
+		if namespaceMetrics, err = getMetricsForCustomMetrics(cwData, getAllMetrics); err != nil {
 			c.JsonApiErr(500, "Unable to call AWS API", err)
 			return
 		}
@@ -226,8 +228,10 @@ func handleGetDimensions(req *cwRequest, c *middleware.Context) {
 		}
 	} else {
 		var err error
-		assumeRoleArn := req.DataSource.JsonData.Get("assumeRoleArn").MustString()
-		if dimensionValues, err = getDimensionsForCustomMetrics(req.Region, reqParam.Parameters.Namespace, req.DataSource.Database, assumeRoleArn, getAllMetrics); err != nil {
+		dsInfo := req.GetDatasourceInfo()
+		dsInfo.Namespace = reqParam.Parameters.Namespace
+
+		if dimensionValues, err = getDimensionsForCustomMetrics(dsInfo, getAllMetrics); err != nil {
 			c.JsonApiErr(500, "Unable to call AWS API", err)
 			return
 		}
@@ -242,16 +246,16 @@ func handleGetDimensions(req *cwRequest, c *middleware.Context) {
 	c.JSON(200, result)
 }
 
-func getAllMetrics(region string, namespace string, database string, assumeRoleArn string) (cloudwatch.ListMetricsOutput, error) {
+func getAllMetrics(cwData *datasourceInfo) (cloudwatch.ListMetricsOutput, error) {
 	cfg := &aws.Config{
-		Region:      aws.String(region),
-		Credentials: getCredentials(database, region, assumeRoleArn),
+		Region:      aws.String(cwData.Region),
+		Credentials: getCredentials(cwData),
 	}
 
 	svc := cloudwatch.New(session.New(cfg), cfg)
 
 	params := &cloudwatch.ListMetricsInput{
-		Namespace: aws.String(namespace),
+		Namespace: aws.String(cwData.Namespace),
 	}
 
 	var resp cloudwatch.ListMetricsOutput
@@ -272,8 +276,8 @@ func getAllMetrics(region string, namespace string, database string, assumeRoleA
 
 var metricsCacheLock sync.Mutex
 
-func getMetricsForCustomMetrics(region string, namespace string, database string, assumeRoleArn string, getAllMetrics func(string, string, string, string) (cloudwatch.ListMetricsOutput, error)) ([]string, error) {
-	result, err := getAllMetrics(region, namespace, database, assumeRoleArn)
+func getMetricsForCustomMetrics(dsInfo *datasourceInfo, getAllMetrics func(*datasourceInfo) (cloudwatch.ListMetricsOutput, error)) ([]string, error) {
+	result, err := getAllMetrics(dsInfo)
 	if err != nil {
 		return []string{}, err
 	}
@@ -281,37 +285,37 @@ func getMetricsForCustomMetrics(region string, namespace string, database string
 	metricsCacheLock.Lock()
 	defer metricsCacheLock.Unlock()
 
-	if _, ok := customMetricsMetricsMap[database]; !ok {
-		customMetricsMetricsMap[database] = make(map[string]map[string]*CustomMetricsCache)
+	if _, ok := customMetricsMetricsMap[dsInfo.Profile]; !ok {
+		customMetricsMetricsMap[dsInfo.Profile] = make(map[string]map[string]*CustomMetricsCache)
 	}
-	if _, ok := customMetricsMetricsMap[database][region]; !ok {
-		customMetricsMetricsMap[database][region] = make(map[string]*CustomMetricsCache)
+	if _, ok := customMetricsMetricsMap[dsInfo.Profile][dsInfo.Region]; !ok {
+		customMetricsMetricsMap[dsInfo.Profile][dsInfo.Region] = make(map[string]*CustomMetricsCache)
 	}
-	if _, ok := customMetricsMetricsMap[database][region][namespace]; !ok {
-		customMetricsMetricsMap[database][region][namespace] = &CustomMetricsCache{}
-		customMetricsMetricsMap[database][region][namespace].Cache = make([]string, 0)
+	if _, ok := customMetricsMetricsMap[dsInfo.Profile][dsInfo.Region][dsInfo.Namespace]; !ok {
+		customMetricsMetricsMap[dsInfo.Profile][dsInfo.Region][dsInfo.Namespace] = &CustomMetricsCache{}
+		customMetricsMetricsMap[dsInfo.Profile][dsInfo.Region][dsInfo.Namespace].Cache = make([]string, 0)
 	}
 
-	if customMetricsMetricsMap[database][region][namespace].Expire.After(time.Now()) {
-		return customMetricsMetricsMap[database][region][namespace].Cache, nil
+	if customMetricsMetricsMap[dsInfo.Profile][dsInfo.Region][dsInfo.Namespace].Expire.After(time.Now()) {
+		return customMetricsMetricsMap[dsInfo.Profile][dsInfo.Region][dsInfo.Namespace].Cache, nil
 	}
-	customMetricsMetricsMap[database][region][namespace].Cache = make([]string, 0)
-	customMetricsMetricsMap[database][region][namespace].Expire = time.Now().Add(5 * time.Minute)
+	customMetricsMetricsMap[dsInfo.Profile][dsInfo.Region][dsInfo.Namespace].Cache = make([]string, 0)
+	customMetricsMetricsMap[dsInfo.Profile][dsInfo.Region][dsInfo.Namespace].Expire = time.Now().Add(5 * time.Minute)
 
 	for _, metric := range result.Metrics {
-		if isDuplicate(customMetricsMetricsMap[database][region][namespace].Cache, *metric.MetricName) {
+		if isDuplicate(customMetricsMetricsMap[dsInfo.Profile][dsInfo.Region][dsInfo.Namespace].Cache, *metric.MetricName) {
 			continue
 		}
-		customMetricsMetricsMap[database][region][namespace].Cache = append(customMetricsMetricsMap[database][region][namespace].Cache, *metric.MetricName)
+		customMetricsMetricsMap[dsInfo.Profile][dsInfo.Region][dsInfo.Namespace].Cache = append(customMetricsMetricsMap[dsInfo.Profile][dsInfo.Region][dsInfo.Namespace].Cache, *metric.MetricName)
 	}
 
-	return customMetricsMetricsMap[database][region][namespace].Cache, nil
+	return customMetricsMetricsMap[dsInfo.Profile][dsInfo.Region][dsInfo.Namespace].Cache, nil
 }
 
 var dimensionsCacheLock sync.Mutex
 
-func getDimensionsForCustomMetrics(region string, namespace string, database string, assumeRoleArn string, getAllMetrics func(string, string, string, string) (cloudwatch.ListMetricsOutput, error)) ([]string, error) {
-	result, err := getAllMetrics(region, namespace, database, assumeRoleArn)
+func getDimensionsForCustomMetrics(dsInfo *datasourceInfo, getAllMetrics func(*datasourceInfo) (cloudwatch.ListMetricsOutput, error)) ([]string, error) {
+	result, err := getAllMetrics(dsInfo)
 	if err != nil {
 		return []string{}, err
 	}
@@ -319,33 +323,33 @@ func getDimensionsForCustomMetrics(region string, namespace string, database str
 	dimensionsCacheLock.Lock()
 	defer dimensionsCacheLock.Unlock()
 
-	if _, ok := customMetricsDimensionsMap[database]; !ok {
-		customMetricsDimensionsMap[database] = make(map[string]map[string]*CustomMetricsCache)
+	if _, ok := customMetricsDimensionsMap[dsInfo.Profile]; !ok {
+		customMetricsDimensionsMap[dsInfo.Profile] = make(map[string]map[string]*CustomMetricsCache)
 	}
-	if _, ok := customMetricsDimensionsMap[database][region]; !ok {
-		customMetricsDimensionsMap[database][region] = make(map[string]*CustomMetricsCache)
+	if _, ok := customMetricsDimensionsMap[dsInfo.Profile][dsInfo.Region]; !ok {
+		customMetricsDimensionsMap[dsInfo.Profile][dsInfo.Region] = make(map[string]*CustomMetricsCache)
 	}
-	if _, ok := customMetricsDimensionsMap[database][region][namespace]; !ok {
-		customMetricsDimensionsMap[database][region][namespace] = &CustomMetricsCache{}
-		customMetricsDimensionsMap[database][region][namespace].Cache = make([]string, 0)
+	if _, ok := customMetricsDimensionsMap[dsInfo.Profile][dsInfo.Region][dsInfo.Namespace]; !ok {
+		customMetricsDimensionsMap[dsInfo.Profile][dsInfo.Region][dsInfo.Namespace] = &CustomMetricsCache{}
+		customMetricsDimensionsMap[dsInfo.Profile][dsInfo.Region][dsInfo.Namespace].Cache = make([]string, 0)
 	}
 
-	if customMetricsDimensionsMap[database][region][namespace].Expire.After(time.Now()) {
-		return customMetricsDimensionsMap[database][region][namespace].Cache, nil
+	if customMetricsDimensionsMap[dsInfo.Profile][dsInfo.Region][dsInfo.Namespace].Expire.After(time.Now()) {
+		return customMetricsDimensionsMap[dsInfo.Profile][dsInfo.Region][dsInfo.Namespace].Cache, nil
 	}
-	customMetricsDimensionsMap[database][region][namespace].Cache = make([]string, 0)
-	customMetricsDimensionsMap[database][region][namespace].Expire = time.Now().Add(5 * time.Minute)
+	customMetricsDimensionsMap[dsInfo.Profile][dsInfo.Region][dsInfo.Namespace].Cache = make([]string, 0)
+	customMetricsDimensionsMap[dsInfo.Profile][dsInfo.Region][dsInfo.Namespace].Expire = time.Now().Add(5 * time.Minute)
 
 	for _, metric := range result.Metrics {
 		for _, dimension := range metric.Dimensions {
-			if isDuplicate(customMetricsDimensionsMap[database][region][namespace].Cache, *dimension.Name) {
+			if isDuplicate(customMetricsDimensionsMap[dsInfo.Profile][dsInfo.Region][dsInfo.Namespace].Cache, *dimension.Name) {
 				continue
 			}
-			customMetricsDimensionsMap[database][region][namespace].Cache = append(customMetricsDimensionsMap[database][region][namespace].Cache, *dimension.Name)
+			customMetricsDimensionsMap[dsInfo.Profile][dsInfo.Region][dsInfo.Namespace].Cache = append(customMetricsDimensionsMap[dsInfo.Profile][dsInfo.Region][dsInfo.Namespace].Cache, *dimension.Name)
 		}
 	}
 
-	return customMetricsDimensionsMap[database][region][namespace].Cache, nil
+	return customMetricsDimensionsMap[dsInfo.Profile][dsInfo.Region][dsInfo.Namespace].Cache, nil
 }
 
 func isDuplicate(nameList []string, target string) bool {

+ 16 - 12
pkg/api/cloudwatch/metrics_test.go

@@ -11,11 +11,13 @@ import (
 func TestCloudWatchMetrics(t *testing.T) {
 
 	Convey("When calling getMetricsForCustomMetrics", t, func() {
-		region := "us-east-1"
-		namespace := "Foo"
-		database := "default"
-		assumeRoleArn := ""
-		f := func(region string, namespace string, database string, assumeRoleArn string) (cloudwatch.ListMetricsOutput, error) {
+		dsInfo := &datasourceInfo{
+			Region:        "us-east-1",
+			Namespace:     "Foo",
+			Profile:       "default",
+			AssumeRoleArn: "",
+		}
+		f := func(dsInfo *datasourceInfo) (cloudwatch.ListMetricsOutput, error) {
 			return cloudwatch.ListMetricsOutput{
 				Metrics: []*cloudwatch.Metric{
 					{
@@ -29,7 +31,7 @@ func TestCloudWatchMetrics(t *testing.T) {
 				},
 			}, nil
 		}
-		metrics, _ := getMetricsForCustomMetrics(region, namespace, database, assumeRoleArn, f)
+		metrics, _ := getMetricsForCustomMetrics(dsInfo, f)
 
 		Convey("Should contain Test_MetricName", func() {
 			So(metrics, ShouldContain, "Test_MetricName")
@@ -37,11 +39,13 @@ func TestCloudWatchMetrics(t *testing.T) {
 	})
 
 	Convey("When calling getDimensionsForCustomMetrics", t, func() {
-		region := "us-east-1"
-		namespace := "Foo"
-		database := "default"
-		assumeRoleArn := ""
-		f := func(region string, namespace string, database string, assumeRoleArn string) (cloudwatch.ListMetricsOutput, error) {
+		dsInfo := &datasourceInfo{
+			Region:        "us-east-1",
+			Namespace:     "Foo",
+			Profile:       "default",
+			AssumeRoleArn: "",
+		}
+		f := func(dsInfo *datasourceInfo) (cloudwatch.ListMetricsOutput, error) {
 			return cloudwatch.ListMetricsOutput{
 				Metrics: []*cloudwatch.Metric{
 					{
@@ -55,7 +59,7 @@ func TestCloudWatchMetrics(t *testing.T) {
 				},
 			}, nil
 		}
-		dimensionKeys, _ := getDimensionsForCustomMetrics(region, namespace, database, assumeRoleArn, f)
+		dimensionKeys, _ := getDimensionsForCustomMetrics(dsInfo, f)
 
 		Convey("Should contain Test_DimensionName", func() {
 			So(dimensionKeys, ShouldContain, "Test_DimensionName")

+ 34 - 0
pkg/api/dashboard.go

@@ -8,6 +8,7 @@ import (
 
 	"github.com/grafana/grafana/pkg/api/dtos"
 	"github.com/grafana/grafana/pkg/bus"
+	"github.com/grafana/grafana/pkg/components/simplejson"
 	"github.com/grafana/grafana/pkg/log"
 	"github.com/grafana/grafana/pkg/metrics"
 	"github.com/grafana/grafana/pkg/middleware"
@@ -121,6 +122,10 @@ func PostDashboard(c *middleware.Context, cmd m.SaveDashboardCommand) Response {
 	}
 
 	dash := cmd.GetDashboardModel()
+	// Check if Title is empty
+	if dash.Title == "" {
+		return ApiError(400, m.ErrDashboardTitleEmpty.Error(), nil)
+	}
 	if dash.Id == 0 {
 		limitReached, err := middleware.QuotaReached(c, "dashboard")
 		if err != nil {
@@ -131,6 +136,16 @@ func PostDashboard(c *middleware.Context, cmd m.SaveDashboardCommand) Response {
 		}
 	}
 
+	validateAlertsCmd := alerting.ValidateDashboardAlertsCommand{
+		OrgId:     c.OrgId,
+		UserId:    c.UserId,
+		Dashboard: dash,
+	}
+
+	if err := bus.Dispatch(&validateAlertsCmd); err != nil {
+		return ApiError(500, "Invalid alert data. Cannot save dashboard", err)
+	}
+
 	err := bus.Dispatch(&cmd)
 	if err != nil {
 		if err == m.ErrDashboardWithSameNameExists {
@@ -202,9 +217,28 @@ func GetHomeDashboard(c *middleware.Context) Response {
 		return ApiError(500, "Failed to load home dashboard", err)
 	}
 
+	if c.HasUserRole(m.ROLE_ADMIN) && !c.HasHelpFlag(m.HelpFlagGettingStartedPanelDismissed) {
+		addGettingStartedPanelToHomeDashboard(dash.Dashboard)
+	}
+
 	return Json(200, &dash)
 }
 
+func addGettingStartedPanelToHomeDashboard(dash *simplejson.Json) {
+	rows := dash.Get("rows").MustArray()
+	row := simplejson.NewFromAny(rows[0])
+
+	newpanel := simplejson.NewFromAny(map[string]interface{}{
+		"type": "gettingstarted",
+		"id":   123123,
+		"span": 12,
+	})
+
+	panels := row.Get("panels").MustArray()
+	panels = append(panels, newpanel)
+	row.Set("panels", panels)
+}
+
 func GetDashboardFromJsonFile(c *middleware.Context) {
 	file := c.Params(":file")
 

+ 5 - 13
pkg/api/dataproxy.go

@@ -1,8 +1,6 @@
 package api
 
 import (
-	"crypto/tls"
-	"net"
 	"net/http"
 	"net/http/httputil"
 	"net/url"
@@ -17,16 +15,6 @@ import (
 	"github.com/grafana/grafana/pkg/util"
 )
 
-var dataProxyTransport = &http.Transport{
-	TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
-	Proxy:           http.ProxyFromEnvironment,
-	Dial: (&net.Dialer{
-		Timeout:   30 * time.Second,
-		KeepAlive: 30 * time.Second,
-	}).Dial,
-	TLSHandshakeTimeout: 10 * time.Second,
-}
-
 func NewReverseProxy(ds *m.DataSource, proxyPath string, targetUrl *url.URL) *httputil.ReverseProxy {
 	director := func(req *http.Request) {
 		req.URL.Scheme = targetUrl.Scheme
@@ -128,7 +116,11 @@ func ProxyDataSourceRequest(c *middleware.Context) {
 	}
 
 	proxy := NewReverseProxy(ds, proxyPath, targetUrl)
-	proxy.Transport = dataProxyTransport
+	proxy.Transport, err = ds.GetHttpTransport()
+	if err != nil {
+		c.JsonApiErr(400, "Unable to load TLS certificate", err)
+		return
+	}
 	proxy.ServeHTTP(c.Resp, c.Req.Request)
 	c.Resp.Header().Del("Set-Cookie")
 }

+ 7 - 4
pkg/api/dataproxy_test.go

@@ -11,11 +11,16 @@ import (
 )
 
 func TestDataSourceProxy(t *testing.T) {
-
 	Convey("When getting graphite datasource proxy", t, func() {
 		ds := m.DataSource{Url: "htttp://graphite:8080", Type: m.DS_GRAPHITE}
-		targetUrl, _ := url.Parse(ds.Url)
+		targetUrl, err := url.Parse(ds.Url)
 		proxy := NewReverseProxy(&ds, "/render", targetUrl)
+		proxy.Transport, err = ds.GetHttpTransport()
+		So(err, ShouldBeNil)
+
+		transport, ok := proxy.Transport.(*http.Transport)
+		So(ok, ShouldBeTrue)
+		So(transport.TLSClientConfig.InsecureSkipVerify, ShouldBeTrue)
 
 		requestUrl, _ := url.Parse("http://grafana.com/sub")
 		req := http.Request{URL: requestUrl}
@@ -54,7 +59,5 @@ func TestDataSourceProxy(t *testing.T) {
 			So(queryVals["u"][0], ShouldEqual, "user")
 			So(queryVals["p"][0], ShouldEqual, "password")
 		})
-
 	})
-
 }

+ 60 - 8
pkg/api/datasources.go

@@ -5,10 +5,9 @@ import (
 
 	"github.com/grafana/grafana/pkg/api/dtos"
 	"github.com/grafana/grafana/pkg/bus"
-	"github.com/grafana/grafana/pkg/plugins"
-	//"github.com/grafana/grafana/pkg/log"
 	"github.com/grafana/grafana/pkg/middleware"
 	m "github.com/grafana/grafana/pkg/models"
+	"github.com/grafana/grafana/pkg/plugins"
 	"github.com/grafana/grafana/pkg/util"
 )
 
@@ -104,17 +103,56 @@ func AddDataSource(c *middleware.Context, cmd m.AddDataSourceCommand) {
 	c.JSON(200, util.DynMap{"message": "Datasource added", "id": cmd.Result.Id})
 }
 
-func UpdateDataSource(c *middleware.Context, cmd m.UpdateDataSourceCommand) {
+func UpdateDataSource(c *middleware.Context, cmd m.UpdateDataSourceCommand) Response {
 	cmd.OrgId = c.OrgId
 	cmd.Id = c.ParamsInt64(":id")
 
-	err := bus.Dispatch(&cmd)
+	err := fillWithSecureJsonData(&cmd)
 	if err != nil {
-		c.JsonApiErr(500, "Failed to update datasource", err)
-		return
+		return ApiError(500, "Failed to update datasource", err)
+	}
+
+	err = bus.Dispatch(&cmd)
+	if err != nil {
+		return ApiError(500, "Failed to update datasource", err)
+	}
+
+	return Json(200, util.DynMap{"message": "Datasource updated"})
+}
+
+func fillWithSecureJsonData(cmd *m.UpdateDataSourceCommand) error {
+	if len(cmd.SecureJsonData) == 0 {
+		return nil
+	}
+
+	ds, err := getRawDataSourceById(cmd.Id, cmd.OrgId)
+
+	if err != nil {
+		return err
+	}
+	secureJsonData := ds.SecureJsonData.Decrypt()
+
+	for k, v := range secureJsonData {
+
+		if _, ok := cmd.SecureJsonData[k]; !ok {
+			cmd.SecureJsonData[k] = v
+		}
+	}
+
+	return nil
+}
+
+func getRawDataSourceById(id int64, orgId int64) (*m.DataSource, error) {
+	query := m.GetDataSourceByIdQuery{
+		Id:    id,
+		OrgId: orgId,
+	}
+
+	if err := bus.Dispatch(&query); err != nil {
+		return nil, err
 	}
 
-	c.JsonOK("Datasource updated")
+	return query.Result, nil
 }
 
 // Get /api/datasources/name/:name
@@ -152,7 +190,7 @@ func GetDataSourceIdByName(c *middleware.Context) Response {
 }
 
 func convertModelToDtos(ds *m.DataSource) dtos.DataSource {
-	return dtos.DataSource{
+	dto := dtos.DataSource{
 		Id:                ds.Id,
 		OrgId:             ds.OrgId,
 		Name:              ds.Name,
@@ -169,4 +207,18 @@ func convertModelToDtos(ds *m.DataSource) dtos.DataSource {
 		IsDefault:         ds.IsDefault,
 		JsonData:          ds.JsonData,
 	}
+
+	if len(ds.SecureJsonData) > 0 {
+		dto.TLSAuth.CACertSet = len(ds.SecureJsonData["tlsCACert"]) > 0
+		dto.TLSAuth.ClientCertSet = len(ds.SecureJsonData["tlsClientCert"]) > 0
+		dto.TLSAuth.ClientKeySet = len(ds.SecureJsonData["tlsClientKey"]) > 0
+	}
+
+	for k, v := range ds.SecureJsonData {
+		if len(v) > 0 {
+			dto.EncryptedFields = append(dto.EncryptedFields, k)
+		}
+	}
+
+	return dto
 }

+ 6 - 5
pkg/api/dtos/alerting.go

@@ -35,11 +35,12 @@ type AlertTestCommand struct {
 }
 
 type AlertTestResult struct {
-	Firing      bool                  `json:"firing"`
-	TimeMs      string                `json:"timeMs"`
-	Error       string                `json:"error,omitempty"`
-	EvalMatches []*EvalMatch          `json:"matches,omitempty"`
-	Logs        []*AlertTestResultLog `json:"logs,omitempty"`
+	Firing         bool                  `json:"firing"`
+	ConditionEvals string                `json:"conditionEvals"`
+	TimeMs         string                `json:"timeMs"`
+	Error          string                `json:"error,omitempty"`
+	EvalMatches    []*EvalMatch          `json:"matches,omitempty"`
+	Logs           []*AlertTestResultLog `json:"logs,omitempty"`
 }
 
 type AlertTestResultLog struct {

+ 23 - 13
pkg/api/dtos/models.go

@@ -22,19 +22,20 @@ type LoginCommand struct {
 }
 
 type CurrentUser struct {
-	IsSignedIn     bool       `json:"isSignedIn"`
-	Id             int64      `json:"id"`
-	Login          string     `json:"login"`
-	Email          string     `json:"email"`
-	Name           string     `json:"name"`
-	LightTheme     bool       `json:"lightTheme"`
-	OrgId          int64      `json:"orgId"`
-	OrgName        string     `json:"orgName"`
-	OrgRole        m.RoleType `json:"orgRole"`
-	IsGrafanaAdmin bool       `json:"isGrafanaAdmin"`
-	GravatarUrl    string     `json:"gravatarUrl"`
-	Timezone       string     `json:"timezone"`
-	Locale         string     `json:"locale"`
+	IsSignedIn     bool         `json:"isSignedIn"`
+	Id             int64        `json:"id"`
+	Login          string       `json:"login"`
+	Email          string       `json:"email"`
+	Name           string       `json:"name"`
+	LightTheme     bool         `json:"lightTheme"`
+	OrgId          int64        `json:"orgId"`
+	OrgName        string       `json:"orgName"`
+	OrgRole        m.RoleType   `json:"orgRole"`
+	IsGrafanaAdmin bool         `json:"isGrafanaAdmin"`
+	GravatarUrl    string       `json:"gravatarUrl"`
+	Timezone       string       `json:"timezone"`
+	Locale         string       `json:"locale"`
+	HelpFlags1     m.HelpFlags1 `json:"helpFlags1"`
 }
 
 type DashboardMeta struct {
@@ -80,6 +81,15 @@ type DataSource struct {
 	WithCredentials   bool             `json:"withCredentials"`
 	IsDefault         bool             `json:"isDefault"`
 	JsonData          *simplejson.Json `json:"jsonData,omitempty"`
+	TLSAuth           TLSAuth          `json:"tlsAuth,omitempty"`
+	EncryptedFields   []string         `json:"encryptedFields"`
+}
+
+// TLSAuth is used to show if TLS certs have been uploaded already
+type TLSAuth struct {
+	CACertSet     bool `json:"tlsCACertSet"`
+	ClientCertSet bool `json:"tlsClientCertSet"`
+	ClientKeySet  bool `json:"tlsClientKeySet"`
 }
 
 type DataSourceList []DataSource

+ 7 - 6
pkg/api/frontendsettings.go

@@ -122,12 +122,13 @@ func getFrontendSettingsMap(c *middleware.Context) (map[string]interface{}, erro
 	panels := map[string]interface{}{}
 	for _, panel := range enabledPlugins.Panels {
 		panels[panel.Id] = map[string]interface{}{
-			"module":  panel.Module,
-			"baseUrl": panel.BaseUrl,
-			"name":    panel.Name,
-			"id":      panel.Id,
-			"info":    panel.Info,
-			"sort":    getPanelSort(panel.Id),
+			"module":       panel.Module,
+			"baseUrl":      panel.BaseUrl,
+			"name":         panel.Name,
+			"id":           panel.Id,
+			"info":         panel.Info,
+			"hideFromList": panel.HideFromList,
+			"sort":         getPanelSort(panel.Id),
 		}
 	}
 

+ 1 - 0
pkg/api/index.go

@@ -58,6 +58,7 @@ func setIndexViewData(c *middleware.Context) (*dtos.IndexViewData, error) {
 			LightTheme:     prefs.Theme == "light",
 			Timezone:       prefs.Timezone,
 			Locale:         locale,
+			HelpFlags1:     c.HelpFlags1,
 		},
 		Settings:                settings,
 		AppUrl:                  appUrl,

+ 8 - 3
pkg/api/metrics.go

@@ -8,6 +8,7 @@ import (
 	"github.com/grafana/grafana/pkg/api/dtos"
 	"github.com/grafana/grafana/pkg/metrics"
 	"github.com/grafana/grafana/pkg/middleware"
+	"github.com/grafana/grafana/pkg/models"
 	"github.com/grafana/grafana/pkg/tsdb"
 	"github.com/grafana/grafana/pkg/tsdb/testdata"
 	"github.com/grafana/grafana/pkg/util"
@@ -25,9 +26,9 @@ func QueryMetrics(c *middleware.Context, reqDto dtos.MetricRequest) Response {
 			MaxDataPoints: query.Get("maxDataPoints").MustInt64(100),
 			IntervalMs:    query.Get("intervalMs").MustInt64(1000),
 			Model:         query,
-			DataSource: &tsdb.DataSourceInfo{
-				Name:     "Grafana TestDataDB",
-				PluginId: "grafana-testdata-datasource",
+			DataSource: &models.DataSource{
+				Name: "Grafana TestDataDB",
+				Type: "grafana-testdata-datasource",
 			},
 		})
 	}
@@ -69,6 +70,10 @@ func GetInternalMetrics(c *middleware.Context) Response {
 		metricName := m.Name() + m.StringifyTags()
 
 		switch metric := m.(type) {
+		case metrics.Gauge:
+			resp[metricName] = map[string]interface{}{
+				"value": metric.Value(),
+			}
 		case metrics.Counter:
 			resp[metricName] = map[string]interface{}{
 				"count": metric.Count(),

+ 3 - 0
pkg/api/org.go

@@ -152,6 +152,9 @@ func updateOrgAddressHelper(form dtos.UpdateOrgAddressForm, orgId int64) Respons
 // GET /api/orgs/:orgId
 func DeleteOrgById(c *middleware.Context) Response {
 	if err := bus.Dispatch(&m.DeleteOrgCommand{Id: c.ParamsInt64(":orgId")}); err != nil {
+		if err == m.ErrOrgNotFound {
+			return ApiError(404, "Failed to delete organization. ID not found", nil)
+		}
 		return ApiError(500, "Failed to update organization", err)
 	}
 	return ApiSuccess("Organization deleted")

+ 3 - 0
pkg/api/org_users.go

@@ -38,6 +38,9 @@ func addOrgUserHelper(cmd m.AddOrgUserCommand) Response {
 	cmd.UserId = userToAdd.Id
 
 	if err := bus.Dispatch(&cmd); err != nil {
+		if err == m.ErrOrgUserAlreadyAdded {
+			return ApiError(409, "User is already member of this organization", nil)
+		}
 		return ApiError(500, "Could not add user to organization", err)
 	}
 

+ 34 - 2
pkg/api/user.go

@@ -157,8 +157,9 @@ func ChangeUserPassword(c *middleware.Context, cmd m.ChangeUserPasswordCommand)
 		return ApiError(401, "Invalid old password", nil)
 	}
 
-	if len(cmd.NewPassword) < 4 {
-		return ApiError(400, "New password too short", nil)
+	password := m.Password(cmd.NewPassword)
+	if password.IsWeak() {
+		return ApiError(400, "New password is too short", nil)
 	}
 
 	cmd.UserId = c.UserId
@@ -180,3 +181,34 @@ func SearchUsers(c *middleware.Context) Response {
 
 	return Json(200, query.Result)
 }
+
+func SetHelpFlag(c *middleware.Context) Response {
+	flag := c.ParamsInt64(":id")
+
+	bitmask := &c.HelpFlags1
+	bitmask.AddFlag(m.HelpFlags1(flag))
+
+	cmd := m.SetUserHelpFlagCommand{
+		UserId:     c.UserId,
+		HelpFlags1: *bitmask,
+	}
+
+	if err := bus.Dispatch(&cmd); err != nil {
+		return ApiError(500, "Failed to update help flag", err)
+	}
+
+	return Json(200, &util.DynMap{"message": "Help flag set", "helpFlags1": cmd.HelpFlags1})
+}
+
+func ClearHelpFlags(c *middleware.Context) Response {
+	cmd := m.SetUserHelpFlagCommand{
+		UserId:     c.UserId,
+		HelpFlags1: m.HelpFlags1(0),
+	}
+
+	if err := bus.Dispatch(&cmd); err != nil {
+		return ApiError(500, "Failed to update help flag", err)
+	}
+
+	return Json(200, &util.DynMap{"message": "Help flag set", "helpFlags1": cmd.HelpFlags1})
+}

+ 52 - 8
pkg/cmd/grafana-cli/commands/commands.go

@@ -1,14 +1,45 @@
 package commands
 
 import (
+	"flag"
 	"os"
 
 	"github.com/codegangsta/cli"
 	"github.com/fatih/color"
 	"github.com/grafana/grafana/pkg/cmd/grafana-cli/logger"
+	"github.com/grafana/grafana/pkg/services/sqlstore"
+	"github.com/grafana/grafana/pkg/setting"
 )
 
-func runCommand(command func(commandLine CommandLine) error) func(context *cli.Context) {
+var configFile = flag.String("config", "", "path to config file")
+var homePath = flag.String("homepath", "", "path to grafana install/home path, defaults to working directory")
+
+func runDbCommand(command func(commandLine CommandLine) error) func(context *cli.Context) {
+	return func(context *cli.Context) {
+
+		flag.Parse()
+		setting.NewConfigContext(&setting.CommandLineArgs{
+			Config:   *configFile,
+			HomePath: *homePath,
+			Args:     flag.Args(),
+		})
+
+		sqlstore.NewEngine()
+
+		cmd := &contextCommandLine{context}
+		if err := command(cmd); err != nil {
+			logger.Errorf("\n%s: ", color.RedString("Error"))
+			logger.Errorf("%s\n\n", err)
+
+			cmd.ShowHelp()
+			os.Exit(1)
+		} else {
+			logger.Info("\n\n")
+		}
+	}
+}
+
+func runPluginCommand(command func(commandLine CommandLine) error) func(context *cli.Context) {
 	return func(context *cli.Context) {
 
 		cmd := &contextCommandLine{context}
@@ -28,34 +59,42 @@ var pluginCommands = []cli.Command{
 	{
 		Name:   "install",
 		Usage:  "install <plugin id> <plugin version (optional)>",
-		Action: runCommand(installCommand),
+		Action: runPluginCommand(installCommand),
 	}, {
 		Name:   "list-remote",
 		Usage:  "list remote available plugins",
-		Action: runCommand(listremoteCommand),
+		Action: runPluginCommand(listremoteCommand),
 	}, {
 		Name:   "list-versions",
 		Usage:  "list-versions <plugin id>",
-		Action: runCommand(listversionsCommand),
+		Action: runPluginCommand(listversionsCommand),
 	}, {
 		Name:    "update",
 		Usage:   "update <plugin id>",
 		Aliases: []string{"upgrade"},
-		Action:  runCommand(upgradeCommand),
+		Action:  runPluginCommand(upgradeCommand),
 	}, {
 		Name:    "update-all",
 		Aliases: []string{"upgrade-all"},
 		Usage:   "update all your installed plugins",
-		Action:  runCommand(upgradeAllCommand),
+		Action:  runPluginCommand(upgradeAllCommand),
 	}, {
 		Name:   "ls",
 		Usage:  "list all installed plugins",
-		Action: runCommand(lsCommand),
+		Action: runPluginCommand(lsCommand),
 	}, {
 		Name:    "uninstall",
 		Aliases: []string{"remove"},
 		Usage:   "uninstall <plugin id>",
-		Action:  runCommand(removeCommand),
+		Action:  runPluginCommand(removeCommand),
+	},
+}
+
+var adminCommands = []cli.Command{
+	{
+		Name:   "reset-admin-password",
+		Usage:  "reset-admin-password <new password>",
+		Action: runDbCommand(resetPasswordCommand),
 	},
 }
 
@@ -65,4 +104,9 @@ var Commands = []cli.Command{
 		Usage:       "Manage plugins for grafana",
 		Subcommands: pluginCommands,
 	},
+	{
+		Name:        "admin",
+		Usage:       "Grafana admin commands",
+		Subcommands: adminCommands,
+	},
 }

+ 44 - 0
pkg/cmd/grafana-cli/commands/reset_password_command.go

@@ -0,0 +1,44 @@
+package commands
+
+import (
+	"fmt"
+
+	"github.com/fatih/color"
+	"github.com/grafana/grafana/pkg/bus"
+	"github.com/grafana/grafana/pkg/cmd/grafana-cli/logger"
+	"github.com/grafana/grafana/pkg/models"
+	"github.com/grafana/grafana/pkg/util"
+)
+
+const AdminUserId = 1
+
+func resetPasswordCommand(c CommandLine) error {
+	newPassword := c.Args().First()
+
+	password := models.Password(newPassword)
+	if password.IsWeak() {
+		return fmt.Errorf("New password is too short")
+	}
+
+	userQuery := models.GetUserByIdQuery{Id: AdminUserId}
+
+	if err := bus.Dispatch(&userQuery); err != nil {
+		return fmt.Errorf("Could not read user from database. Error: %v", err)
+	}
+
+	passwordHashed := util.EncodePassword(newPassword, userQuery.Result.Salt)
+
+	cmd := models.ChangeUserPasswordCommand{
+		UserId:      AdminUserId,
+		NewPassword: passwordHashed,
+	}
+
+	if err := bus.Dispatch(&cmd); err != nil {
+		return fmt.Errorf("Failed to update user password")
+	}
+
+	logger.Infof("\n")
+	logger.Infof("Admin password changed successfully %s", color.GreenString("✔"))
+
+	return nil
+}

+ 1 - 1
pkg/cmd/grafana-server/main.go

@@ -27,7 +27,7 @@ import (
 	_ "github.com/grafana/grafana/pkg/tsdb/testdata"
 )
 
-var version = "3.1.0"
+var version = "4.1.0"
 var commit = "NA"
 var buildstamp string
 var build_date string

+ 5 - 4
pkg/cmd/grafana-server/web.go

@@ -46,14 +46,15 @@ func newMacaron() *macaron.Macaron {
 		Delims:     macaron.Delims{Left: "[[", Right: "]]"},
 	}))
 
-	if setting.EnforceDomain {
-		m.Use(middleware.ValidateHostHeader(setting.Domain))
-	}
-
 	m.Use(middleware.GetContextHandler())
 	m.Use(middleware.Sessioner(&setting.SessionOptions))
 	m.Use(middleware.RequestMetrics())
 
+	// needs to be after context handler
+	if setting.EnforceDomain {
+		m.Use(middleware.ValidateHostHeader(setting.Domain))
+	}
+
 	return m
 }
 

+ 6 - 4
pkg/components/imguploader/webdavuploader.go

@@ -7,7 +7,6 @@ import (
 	"net/http"
 	"net/url"
 	"path"
-	"time"
 
 	"github.com/grafana/grafana/pkg/util"
 )
@@ -19,14 +18,17 @@ type WebdavUploader struct {
 }
 
 func (u *WebdavUploader) Upload(pa string) (string, error) {
-	client := http.Client{Timeout: time.Duration(10 * time.Second)}
-
 	url, _ := url.Parse(u.url)
 	url.Path = path.Join(url.Path, util.GetRandomString(20)+".png")
 
 	imgData, err := ioutil.ReadFile(pa)
 	req, err := http.NewRequest("PUT", url.String(), bytes.NewReader(imgData))
-	res, err := client.Do(req)
+
+	if u.username != "" {
+		req.SetBasicAuth(u.username, u.password)
+	}
+
+	res, err := http.DefaultClient.Do(req)
 
 	if err != nil {
 		return "", err

+ 8 - 2
pkg/components/renderer/renderer.go

@@ -35,7 +35,12 @@ func RenderToPng(params *RenderOpts) (string, error) {
 		executable = executable + ".exe"
 	}
 
-	url := fmt.Sprintf("%s://localhost:%s/%s", setting.Protocol, setting.HttpPort, params.Path)
+	localDomain := "localhost"
+	if setting.HttpAddr != setting.DEFAULT_HTTP_ADDR {
+		localDomain = setting.HttpAddr
+	}
+
+	url := fmt.Sprintf("%s://%s:%s/%s", setting.Protocol, localDomain, setting.HttpPort, params.Path)
 
 	binPath, _ := filepath.Abs(filepath.Join(setting.PhantomDir, executable))
 	scriptPath, _ := filepath.Abs(filepath.Join(setting.PhantomDir, "render.js"))
@@ -47,12 +52,13 @@ func RenderToPng(params *RenderOpts) (string, error) {
 
 	cmdArgs := []string{
 		"--ignore-ssl-errors=true",
+		"--web-security=false",
 		scriptPath,
 		"url=" + url,
 		"width=" + params.Width,
 		"height=" + params.Height,
 		"png=" + pngPath,
-		"domain=" + setting.Domain,
+		"domain=" + localDomain,
 		"renderKey=" + renderKey,
 	}
 

+ 24 - 0
pkg/components/securejsondata/securejsondata.go

@@ -0,0 +1,24 @@
+package securejsondata
+
+import (
+	"github.com/grafana/grafana/pkg/setting"
+	"github.com/grafana/grafana/pkg/util"
+)
+
+type SecureJsonData map[string][]byte
+
+func (s SecureJsonData) Decrypt() map[string]string {
+	decrypted := make(map[string]string)
+	for key, data := range s {
+		decrypted[key] = string(util.Decrypt(data, setting.SecretKey))
+	}
+	return decrypted
+}
+
+func GetEncryptedJsonData(sjd map[string]string) SecureJsonData {
+	encrypted := make(SecureJsonData)
+	for key, data := range sjd {
+		encrypted[key] = util.Encrypt([]byte(data), setting.SecretKey)
+	}
+	return encrypted
+}

+ 3 - 2
pkg/metrics/gauge.go

@@ -32,8 +32,8 @@ func RegGauge(name string, tagStrings ...string) Gauge {
 
 // GaugeSnapshot is a read-only copy of another Gauge.
 type GaugeSnapshot struct {
-	*MetricMeta
 	value int64
+	*MetricMeta
 }
 
 // Snapshot returns the snapshot.
@@ -61,9 +61,10 @@ func (NilGauge) Value() int64 { return 0 }
 
 // StandardGauge is the standard implementation of a Gauge and uses the
 // sync/atomic package to manage a single int64 value.
+// atomic needs 64-bit aligned memory which is ensure for first word
 type StandardGauge struct {
-	*MetricMeta
 	value int64
+	*MetricMeta
 }
 
 // Snapshot returns a read-only copy of the gauge.

+ 46 - 43
pkg/metrics/metrics.go

@@ -9,54 +9,55 @@ func init() {
 }
 
 var (
-	M_Instance_Start                     		Counter
-	M_Page_Status_200                    		Counter
-	M_Page_Status_500                    		Counter
-	M_Page_Status_404                    		Counter
-	M_Page_Status_Unknown                		Counter
-	M_Api_Status_200                     		Counter
-	M_Api_Status_404                     		Counter
-	M_Api_Status_500                     		Counter
-	M_Api_Status_Unknown                 		Counter
-	M_Proxy_Status_200                   		Counter
-	M_Proxy_Status_404                   		Counter
-	M_Proxy_Status_500                   		Counter
-	M_Proxy_Status_Unknown               		Counter
-	M_Api_User_SignUpStarted             		Counter
-	M_Api_User_SignUpCompleted           		Counter
-	M_Api_User_SignUpInvite              		Counter
-	M_Api_Dashboard_Save                 		Timer
-	M_Api_Dashboard_Get                  		Timer
-	M_Api_Dashboard_Search               		Timer
-	M_Api_Admin_User_Create              		Counter
-	M_Api_Login_Post                     		Counter
-	M_Api_Login_OAuth                    		Counter
-	M_Api_Org_Create                     		Counter
-	M_Api_Dashboard_Snapshot_Create      		Counter
-	M_Api_Dashboard_Snapshot_External    		Counter
-	M_Api_Dashboard_Snapshot_Get         		Counter
-	M_Models_Dashboard_Insert            		Counter
-	M_Alerting_Result_State_Alerting     		Counter
-	M_Alerting_Result_State_Ok           		Counter
-	M_Alerting_Result_State_Paused       		Counter
-	M_Alerting_Result_State_NoData       		Counter
-	M_Alerting_Result_State_Pending      		Counter
-	M_Alerting_Active_Alerts             		Counter
-	M_Alerting_Notification_Sent_Slack   		Counter
-	M_Alerting_Notification_Sent_Email   		Counter
-	M_Alerting_Notification_Sent_Webhook 		Counter
-	M_Alerting_Notification_Sent_PagerDuty	Counter
-
+	M_Instance_Start                       Counter
+	M_Page_Status_200                      Counter
+	M_Page_Status_500                      Counter
+	M_Page_Status_404                      Counter
+	M_Page_Status_Unknown                  Counter
+	M_Api_Status_200                       Counter
+	M_Api_Status_404                       Counter
+	M_Api_Status_500                       Counter
+	M_Api_Status_Unknown                   Counter
+	M_Proxy_Status_200                     Counter
+	M_Proxy_Status_404                     Counter
+	M_Proxy_Status_500                     Counter
+	M_Proxy_Status_Unknown                 Counter
+	M_Api_User_SignUpStarted               Counter
+	M_Api_User_SignUpCompleted             Counter
+	M_Api_User_SignUpInvite                Counter
+	M_Api_Dashboard_Save                   Timer
+	M_Api_Dashboard_Get                    Timer
+	M_Api_Dashboard_Search                 Timer
+	M_Api_Admin_User_Create                Counter
+	M_Api_Login_Post                       Counter
+	M_Api_Login_OAuth                      Counter
+	M_Api_Org_Create                       Counter
+	M_Api_Dashboard_Snapshot_Create        Counter
+	M_Api_Dashboard_Snapshot_External      Counter
+	M_Api_Dashboard_Snapshot_Get           Counter
+	M_Models_Dashboard_Insert              Counter
+	M_Alerting_Result_State_Alerting       Counter
+	M_Alerting_Result_State_Ok             Counter
+	M_Alerting_Result_State_Paused         Counter
+	M_Alerting_Result_State_NoData         Counter
+	M_Alerting_Result_State_Pending        Counter
+	M_Alerting_Notification_Sent_Slack     Counter
+	M_Alerting_Notification_Sent_Email     Counter
+	M_Alerting_Notification_Sent_Webhook   Counter
+	M_Alerting_Notification_Sent_PagerDuty Counter
+	M_Alerting_Notification_Sent_Victorops Counter
+	M_Alerting_Notification_Sent_OpsGenie  Counter
 
 	// Timers
 	M_DataSource_ProxyReq_Timer Timer
 	M_Alerting_Exeuction_Time   Timer
 
 	// StatTotals
-	M_StatTotal_Dashboards Gauge
-	M_StatTotal_Users      Gauge
-	M_StatTotal_Orgs       Gauge
-	M_StatTotal_Playlists  Gauge
+	M_Alerting_Active_Alerts Gauge
+	M_StatTotal_Dashboards   Gauge
+	M_StatTotal_Users        Gauge
+	M_StatTotal_Orgs         Gauge
+	M_StatTotal_Playlists    Gauge
 )
 
 func initMetricVars(settings *MetricSettings) {
@@ -105,17 +106,19 @@ func initMetricVars(settings *MetricSettings) {
 	M_Alerting_Result_State_NoData = RegCounter("alerting.result", "state", "no_data")
 	M_Alerting_Result_State_Pending = RegCounter("alerting.result", "state", "pending")
 
-	M_Alerting_Active_Alerts = RegCounter("alerting.active_alerts")
 	M_Alerting_Notification_Sent_Slack = RegCounter("alerting.notifications_sent", "type", "slack")
 	M_Alerting_Notification_Sent_Email = RegCounter("alerting.notifications_sent", "type", "email")
 	M_Alerting_Notification_Sent_Webhook = RegCounter("alerting.notifications_sent", "type", "webhook")
 	M_Alerting_Notification_Sent_PagerDuty = RegCounter("alerting.notifications_sent", "type", "pagerduty")
+	M_Alerting_Notification_Sent_Victorops = RegCounter("alerting.notifications_sent", "type", "victorops")
+	M_Alerting_Notification_Sent_OpsGenie = RegCounter("alerting.notifications_sent", "type", "opsgenie")
 
 	// Timers
 	M_DataSource_ProxyReq_Timer = RegTimer("api.dataproxy.request.all")
 	M_Alerting_Exeuction_Time = RegTimer("alerting.execution_time")
 
 	// StatTotals
+	M_Alerting_Active_Alerts = RegGauge("alerting.active_alerts")
 	M_StatTotal_Dashboards = RegGauge("stat_totals", "stat", "dashboards")
 	M_StatTotal_Users = RegGauge("stat_totals", "stat", "users")
 	M_StatTotal_Orgs = RegGauge("stat_totals", "stat", "orgs")

+ 4 - 0
pkg/middleware/middleware.go

@@ -229,6 +229,10 @@ func (ctx *Context) HasUserRole(role m.RoleType) bool {
 	return ctx.OrgRole.Includes(role)
 }
 
+func (ctx *Context) HasHelpFlag(flag m.HelpFlags1) bool {
+	return ctx.HelpFlags1.HasFlag(flag)
+}
+
 func (ctx *Context) TimeRequest(timer metrics.Timer) {
 	ctx.Data["perfmon.timer"] = timer
 }

+ 6 - 1
pkg/middleware/validate_host.go

@@ -8,7 +8,12 @@ import (
 )
 
 func ValidateHostHeader(domain string) macaron.Handler {
-	return func(c *macaron.Context) {
+	return func(c *Context) {
+		// ignore local render calls
+		if c.IsRenderCall {
+			return
+		}
+
 		h := c.Req.Host
 		if i := strings.Index(h, ":"); i >= 0 {
 			h = h[:i]

+ 1 - 0
pkg/models/dashboards.go

@@ -15,6 +15,7 @@ var (
 	ErrDashboardSnapshotNotFound   = errors.New("Dashboard snapshot not found")
 	ErrDashboardWithSameNameExists = errors.New("A dashboard with the same name already exists")
 	ErrDashboardVersionMismatch    = errors.New("The dashboard has been changed by someone else")
+	ErrDashboardTitleEmpty         = errors.New("Dashboard title cannot be empty")
 )
 
 type UpdatePluginDashboardError struct {

+ 30 - 26
pkg/models/datasource.go

@@ -4,6 +4,7 @@ import (
 	"errors"
 	"time"
 
+	"github.com/grafana/grafana/pkg/components/securejsondata"
 	"github.com/grafana/grafana/pkg/components/simplejson"
 )
 
@@ -46,6 +47,7 @@ type DataSource struct {
 	WithCredentials   bool
 	IsDefault         bool
 	JsonData          *simplejson.Json
+	SecureJsonData    securejsondata.SecureJsonData
 
 	Created time.Time
 	Updated time.Time
@@ -77,19 +79,20 @@ func IsKnownDataSourcePlugin(dsType string) bool {
 
 // Also acts as api DTO
 type AddDataSourceCommand struct {
-	Name              string           `json:"name" binding:"Required"`
-	Type              string           `json:"type" binding:"Required"`
-	Access            DsAccess         `json:"access" binding:"Required"`
-	Url               string           `json:"url"`
-	Password          string           `json:"password"`
-	Database          string           `json:"database"`
-	User              string           `json:"user"`
-	BasicAuth         bool             `json:"basicAuth"`
-	BasicAuthUser     string           `json:"basicAuthUser"`
-	BasicAuthPassword string           `json:"basicAuthPassword"`
-	WithCredentials   bool             `json:"withCredentials"`
-	IsDefault         bool             `json:"isDefault"`
-	JsonData          *simplejson.Json `json:"jsonData"`
+	Name              string            `json:"name" binding:"Required"`
+	Type              string            `json:"type" binding:"Required"`
+	Access            DsAccess          `json:"access" binding:"Required"`
+	Url               string            `json:"url"`
+	Password          string            `json:"password"`
+	Database          string            `json:"database"`
+	User              string            `json:"user"`
+	BasicAuth         bool              `json:"basicAuth"`
+	BasicAuthUser     string            `json:"basicAuthUser"`
+	BasicAuthPassword string            `json:"basicAuthPassword"`
+	WithCredentials   bool              `json:"withCredentials"`
+	IsDefault         bool              `json:"isDefault"`
+	JsonData          *simplejson.Json  `json:"jsonData"`
+	SecureJsonData    map[string]string `json:"secureJsonData"`
 
 	OrgId int64 `json:"-"`
 
@@ -98,19 +101,20 @@ type AddDataSourceCommand struct {
 
 // Also acts as api DTO
 type UpdateDataSourceCommand struct {
-	Name              string           `json:"name" binding:"Required"`
-	Type              string           `json:"type" binding:"Required"`
-	Access            DsAccess         `json:"access" binding:"Required"`
-	Url               string           `json:"url"`
-	Password          string           `json:"password"`
-	User              string           `json:"user"`
-	Database          string           `json:"database"`
-	BasicAuth         bool             `json:"basicAuth"`
-	BasicAuthUser     string           `json:"basicAuthUser"`
-	BasicAuthPassword string           `json:"basicAuthPassword"`
-	WithCredentials   bool             `json:"withCredentials"`
-	IsDefault         bool             `json:"isDefault"`
-	JsonData          *simplejson.Json `json:"jsonData"`
+	Name              string            `json:"name" binding:"Required"`
+	Type              string            `json:"type" binding:"Required"`
+	Access            DsAccess          `json:"access" binding:"Required"`
+	Url               string            `json:"url"`
+	Password          string            `json:"password"`
+	User              string            `json:"user"`
+	Database          string            `json:"database"`
+	BasicAuth         bool              `json:"basicAuth"`
+	BasicAuthUser     string            `json:"basicAuthUser"`
+	BasicAuthPassword string            `json:"basicAuthPassword"`
+	WithCredentials   bool              `json:"withCredentials"`
+	IsDefault         bool              `json:"isDefault"`
+	JsonData          *simplejson.Json  `json:"jsonData"`
+	SecureJsonData    map[string]string `json:"secureJsonData"`
 
 	OrgId int64 `json:"-"`
 	Id    int64 `json:"-"`

+ 95 - 0
pkg/models/datasource_cache.go

@@ -0,0 +1,95 @@
+package models
+
+import (
+	"crypto/tls"
+	"crypto/x509"
+	"net"
+	"net/http"
+	"sync"
+	"time"
+)
+
+type proxyTransportCache struct {
+	cache map[int64]cachedTransport
+	sync.Mutex
+}
+
+type cachedTransport struct {
+	updated time.Time
+
+	*http.Transport
+}
+
+var ptc = proxyTransportCache{
+	cache: make(map[int64]cachedTransport),
+}
+
+func (ds *DataSource) GetHttpClient() (*http.Client, error) {
+	transport, err := ds.GetHttpTransport()
+
+	if err != nil {
+		return nil, err
+	}
+
+	return &http.Client{
+		Timeout:   time.Duration(30 * time.Second),
+		Transport: transport,
+	}, nil
+}
+
+func (ds *DataSource) GetHttpTransport() (*http.Transport, error) {
+	ptc.Lock()
+	defer ptc.Unlock()
+
+	if t, present := ptc.cache[ds.Id]; present && ds.Updated.Equal(t.updated) {
+		return t.Transport, nil
+	}
+
+	transport := &http.Transport{
+		TLSClientConfig: &tls.Config{
+			InsecureSkipVerify: true,
+		},
+		Proxy: http.ProxyFromEnvironment,
+		Dial: (&net.Dialer{
+			Timeout:   30 * time.Second,
+			KeepAlive: 30 * time.Second,
+		}).Dial,
+		TLSHandshakeTimeout:   10 * time.Second,
+		ExpectContinueTimeout: 1 * time.Second,
+		MaxIdleConns:          100,
+		IdleConnTimeout:       90 * time.Second,
+	}
+
+	var tlsAuth, tlsAuthWithCACert bool
+	if ds.JsonData != nil {
+		tlsAuth = ds.JsonData.Get("tlsAuth").MustBool(false)
+		tlsAuthWithCACert = ds.JsonData.Get("tlsAuthWithCACert").MustBool(false)
+	}
+
+	if tlsAuth {
+		transport.TLSClientConfig.InsecureSkipVerify = false
+
+		decrypted := ds.SecureJsonData.Decrypt()
+
+		if tlsAuthWithCACert && len(decrypted["tlsCACert"]) > 0 {
+			caPool := x509.NewCertPool()
+			ok := caPool.AppendCertsFromPEM([]byte(decrypted["tlsCACert"]))
+			if ok {
+				transport.TLSClientConfig.RootCAs = caPool
+			}
+		}
+
+		cert, err := tls.X509KeyPair([]byte(decrypted["tlsClientCert"]), []byte(decrypted["tlsClientKey"]))
+		if err != nil {
+			return nil, err
+		}
+		transport.TLSClientConfig.Certificates = []tls.Certificate{cert}
+	}
+
+	ptc.cache[ds.Id] = cachedTransport{
+		Transport: transport,
+		updated:   ds.Updated,
+	}
+
+	return transport, nil
+}

+ 157 - 0
pkg/models/datasource_cache_test.go

@@ -0,0 +1,157 @@
+package models
+
+import (
+	"testing"
+	"time"
+
+	. "github.com/smartystreets/goconvey/convey"
+
+	"github.com/grafana/grafana/pkg/components/simplejson"
+	"github.com/grafana/grafana/pkg/setting"
+	"github.com/grafana/grafana/pkg/util"
+)
+
+func TestDataSourceCache(t *testing.T) {
+	Convey("When caching a datasource proxy", t, func() {
+		clearCache()
+		ds := DataSource{
+			Id:   1,
+			Url:  "http://k8s:8001",
+			Type: "Kubernetes",
+		}
+
+		t1, err := ds.GetHttpTransport()
+		So(err, ShouldBeNil)
+
+		t2, err := ds.GetHttpTransport()
+		So(err, ShouldBeNil)
+
+		Convey("Should be using the cached proxy", func() {
+			So(t2, ShouldEqual, t1)
+		})
+	})
+
+	Convey("When getting kubernetes datasource proxy", t, func() {
+		clearCache()
+		setting.SecretKey = "password"
+
+		json := simplejson.New()
+		json.Set("tlsAuth", true)
+		json.Set("tlsAuthWithCACert", true)
+
+		t := time.Now()
+		ds := DataSource{
+			Url:     "http://k8s:8001",
+			Type:    "Kubernetes",
+			Updated: t.Add(-2 * time.Minute),
+		}
+
+		transport, err := ds.GetHttpTransport()
+		So(err, ShouldBeNil)
+
+		Convey("Should have no cert", func() {
+			So(transport.TLSClientConfig.InsecureSkipVerify, ShouldEqual, true)
+		})
+
+		ds.JsonData = json
+		ds.SecureJsonData = map[string][]byte{
+			"tlsCACert":     util.Encrypt([]byte(caCert), "password"),
+			"tlsClientCert": util.Encrypt([]byte(clientCert), "password"),
+			"tlsClientKey":  util.Encrypt([]byte(clientKey), "password"),
+		}
+		ds.Updated = t.Add(-1 * time.Minute)
+
+		transport, err = ds.GetHttpTransport()
+		So(err, ShouldBeNil)
+
+		Convey("Should add cert", func() {
+			So(transport.TLSClientConfig.InsecureSkipVerify, ShouldEqual, false)
+			So(len(transport.TLSClientConfig.Certificates), ShouldEqual, 1)
+		})
+
+		ds.JsonData = nil
+		ds.SecureJsonData = map[string][]byte{}
+		ds.Updated = t
+
+		transport, err = ds.GetHttpTransport()
+		So(err, ShouldBeNil)
+
+		Convey("Should remove cert", func() {
+			So(transport.TLSClientConfig.InsecureSkipVerify, ShouldEqual, true)
+			So(len(transport.TLSClientConfig.Certificates), ShouldEqual, 0)
+		})
+	})
+}
+
+func clearCache() {
+	ptc.Lock()
+	defer ptc.Unlock()
+
+	ptc.cache = make(map[int64]cachedTransport)
+}
+
+const caCert string = `-----BEGIN CERTIFICATE-----
+MIIDATCCAemgAwIBAgIJAMQ5hC3CPDTeMA0GCSqGSIb3DQEBCwUAMBcxFTATBgNV
+BAMMDGNhLWs4cy1zdGhsbTAeFw0xNjEwMjcwODQyMjdaFw00NDAzMTQwODQyMjda
+MBcxFTATBgNVBAMMDGNhLWs4cy1zdGhsbTCCASIwDQYJKoZIhvcNAQEBBQADggEP
+ADCCAQoCggEBAMLe2AmJ6IleeUt69vgNchOjjmxIIxz5sp1vFu94m1vUip7CqnOg
+QkpUsHeBPrGYv8UGloARCL1xEWS+9FVZeXWQoDmbC0SxXhFwRIESNCET7Q8KMi/4
+4YPvnMLGZi3Fjwxa8BdUBCN1cx4WEooMVTWXm7RFMtZgDfuOAn3TNXla732sfT/d
+1HNFrh48b0wA+HhmA3nXoBnBEblA665hCeo7lIAdRr0zJxJpnFnWXkyTClsAUTMN
+iL905LdBiiIRenojipfKXvMz88XSaWTI7JjZYU3BvhyXndkT6f12cef3I96NY3WJ
+0uIK4k04WrbzdYXMU3rN6NqlvbHqnI+E7aMCAwEAAaNQME4wHQYDVR0OBBYEFHHx
+2+vSPw9bECHj3O51KNo5VdWOMB8GA1UdIwQYMBaAFHHx2+vSPw9bECHj3O51KNo5
+VdWOMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAH2eV5NcV3LBJHs9
+I+adbiTPg2vyumrGWwy73T0X8Dtchgt8wU7Q9b9Ucg2fOTmSSyS0iMqEu1Yb2ORB
+CknM9mixHC9PwEBbkGCom3VVkqdLwSP6gdILZgyLoH4i8sTUz+S1yGPepi+Vzhs7
+adOXtryjcGnwft6HdfKPNklMOHFnjw6uqpho54oj/z55jUpicY/8glDHdrr1bh3k
+MHuiWLGewHXPvxfG6UoUx1te65IhifVcJGFZDQwfEmhBflfCmtAJlZEsgTLlBBCh
+FHoXIyGOdq1chmRVocdGBCF8fUoGIbuF14r53rpvcbEKtKnnP8+96luKAZLq0a4n
+3lb92xM=
+-----END CERTIFICATE-----`
+
+const clientCert string = `-----BEGIN CERTIFICATE-----
+MIICsjCCAZoCCQCcd8sOfstQLzANBgkqhkiG9w0BAQsFADAXMRUwEwYDVQQDDAxj
+YS1rOHMtc3RobG0wHhcNMTYxMTAyMDkyNTE1WhcNMTcxMTAyMDkyNTE1WjAfMR0w
+GwYDVQQDDBRhZG0tZGFuaWVsLWs4cy1zdGhsbTCCASIwDQYJKoZIhvcNAQEBBQAD
+ggEPADCCAQoCggEBAOMliaWyNEUJKM37vWCl5bGub3lMicyRAqGQyY/qxD9yKKM2
+FbucVcmWmg5vvTqQVl5rlQ+c7GI8OD6ptmFl8a26coEki7bFr8bkpSyBSEc5p27b
+Z0ORFSqBHWHQbr9PkxPLYW6T3gZYUtRYv3OQgGxLXlvUh85n/mQfuR3N1FgmShHo
+GtAFi/ht6leXa0Ms+jNSDLCmXpJm1GIEqgyKX7K3+g3vzo9coYqXq4XTa8Efs2v8
+SCwqWfBC3rHfgs/5DLB8WT4Kul8QzxkytzcaBQfRfzhSV6bkgm7oTzt2/1eRRsf4
+YnXzLE9YkCC9sAn+Owzqf+TYC1KRluWDfqqBTJUCAwEAATANBgkqhkiG9w0BAQsF
+AAOCAQEAdMsZg6edWGC+xngizn0uamrUg1ViaDqUsz0vpzY5NWLA4MsBc4EtxWRP
+ueQvjUimZ3U3+AX0YWNLIrH1FCVos2jdij/xkTUmHcwzr8rQy+B17cFi+a8jtpgw
+AU6WWoaAIEhhbWQfth/Diz3mivl1ARB+YqiWca2mjRPLTPcKJEURDVddQ423el0Q
+4JNxS5icu7T2zYTYHAo/cT9zVdLZl0xuLxYm3asK1IONJ/evxyVZima3il6MPvhe
+58Hwz+m+HdqHxi24b/1J/VKYbISG4huOQCdLzeNXgvwFlGPUmHSnnKo1/KbQDAR5
+llG/Sw5+FquFuChaA6l5KWy7F3bQyA==
+-----END CERTIFICATE-----`
+
+const clientKey string = `-----BEGIN RSA PRIVATE KEY-----
+MIIEpQIBAAKCAQEA4yWJpbI0RQkozfu9YKXlsa5veUyJzJECoZDJj+rEP3IoozYV
+u5xVyZaaDm+9OpBWXmuVD5zsYjw4Pqm2YWXxrbpygSSLtsWvxuSlLIFIRzmnbttn
+Q5EVKoEdYdBuv0+TE8thbpPeBlhS1Fi/c5CAbEteW9SHzmf+ZB+5Hc3UWCZKEega
+0AWL+G3qV5drQyz6M1IMsKZekmbUYgSqDIpfsrf6De/Oj1yhiperhdNrwR+za/xI
+LCpZ8ELesd+Cz/kMsHxZPgq6XxDPGTK3NxoFB9F/OFJXpuSCbuhPO3b/V5FGx/hi
+dfMsT1iQIL2wCf47DOp/5NgLUpGW5YN+qoFMlQIDAQABAoIBAQCzy4u312XeW1Cs
+Mx6EuOwmh59/ESFmBkZh4rxZKYgrfE5EWlQ7i5SwG4BX+wR6rbNfy6JSmHDXlTkk
+CKvvToVNcW6fYHEivDnVojhIERFIJ4+rhQmpBtcNLOQ3/4cZ8X/GxE6b+3lb5l+x
+64mnjPLKRaIr5/+TVuebEy0xNTJmjnJ7yiB2HRz7uXEQaVSk/P7KAkkyl/9J3/LM
+8N9AX1w6qDaNQZ4/P0++1H4SQenosM/b/GqGTomarEk/GE0NcB9rzmR9VCXa7FRh
+WV5jyt9vUrwIEiK/6nUnOkGO8Ei3kB7Y+e+2m6WdaNoU5RAfqXmXa0Q/a0lLRruf
+vTMo2WrBAoGBAPRaK4cx76Q+3SJ/wfznaPsMM06OSR8A3ctKdV+ip/lyKtb1W8Pz
+k8MYQDH7GwPtSu5QD8doL00pPjugZL/ba7X9nAsI+pinyEErfnB9y7ORNEjIYYzs
+DiqDKup7ANgw1gZvznWvb9Ge0WUSXvWS0pFkgootQAf+RmnnbWGH6l6RAoGBAO35
+aGUrLro5u9RD24uSXNU3NmojINIQFK5dHAT3yl0BBYstL43AEsye9lX95uMPTvOQ
+Cqcn42Hjp/bSe3n0ObyOZeXVrWcDFAfE0wwB1BkvL1lpgnFO9+VQORlH4w3Ppnpo
+jcPkR2TFeDaAYtvckhxe/Bk3OnuFmnsQ3VzM75fFAoGBAI6PvS2XeNU+yA3EtA01
+hg5SQ+zlHswz2TMuMeSmJZJnhY78f5mHlwIQOAPxGQXlf/4iP9J7en1uPpzTK3S0
+M9duK4hUqMA/w5oiIhbHjf0qDnMYVbG+V1V+SZ+cPBXmCDihKreGr5qBKnHpkfV8
+v9WL6o1rcRw4wiQvnaV1gsvBAoGBALtzVTczr6gDKCAIn5wuWy+cQSGTsBunjRLX
+xuVm5iEiV+KMYkPvAx/pKzMLP96lRVR3ptyKgAKwl7LFk3u50+zh4gQLr35QH2wL
+Lw7rNc3srAhrItPsFzqrWX6/cGuFoKYVS239l/sZzRppQPXcpb7xVvTp2whHcir0
+Wtnpl+TdAoGAGqKqo2KU3JoY3IuTDUk1dsNAm8jd9EWDh+s1x4aG4N79mwcss5GD
+FF8MbFPneK7xQd8L6HisKUDAUi2NOyynM81LAftPkvN6ZuUVeFDfCL4vCA0HUXLD
++VrOhtUZkNNJlLMiVRJuQKUOGlg8PpObqYbstQAf/0/yFJMRHG82Tcg=
+-----END RSA PRIVATE KEY-----`

+ 18 - 0
pkg/models/helpflags.go

@@ -0,0 +1,18 @@
+package models
+
+type HelpFlags1 uint64
+
+const (
+	HelpFlagGettingStartedPanelDismissed HelpFlags1 = 1 << iota
+	HelpFlagDashboardHelp1
+)
+
+func (f HelpFlags1) HasFlag(flag HelpFlags1) bool { return f&flag != 0 }
+func (f *HelpFlags1) AddFlag(flag HelpFlags1)     { *f |= flag }
+func (f *HelpFlags1) ClearFlag(flag HelpFlags1)   { *f &= ^flag }
+func (f *HelpFlags1) ToggleFlag(flag HelpFlags1)  { *f ^= flag }
+
+type SetUserHelpFlagCommand struct {
+	HelpFlags1 HelpFlags1
+	UserId     int64
+}

+ 4 - 19
pkg/models/plugin_settings.go

@@ -4,8 +4,7 @@ import (
 	"errors"
 	"time"
 
-	"github.com/grafana/grafana/pkg/setting"
-	"github.com/grafana/grafana/pkg/util"
+	"github.com/grafana/grafana/pkg/components/securejsondata"
 )
 
 var (
@@ -19,23 +18,13 @@ type PluginSetting struct {
 	Enabled        bool
 	Pinned         bool
 	JsonData       map[string]interface{}
-	SecureJsonData SecureJsonData
+	SecureJsonData securejsondata.SecureJsonData
 	PluginVersion  string
 
 	Created time.Time
 	Updated time.Time
 }
 
-type SecureJsonData map[string][]byte
-
-func (s SecureJsonData) Decrypt() map[string]string {
-	decrypted := make(map[string]string)
-	for key, data := range s {
-		decrypted[key] = string(util.Decrypt(data, setting.SecretKey))
-	}
-	return decrypted
-}
-
 // ----------------------
 // COMMANDS
 
@@ -58,12 +47,8 @@ type UpdatePluginSettingVersionCmd struct {
 	OrgId         int64  `json:"-"`
 }
 
-func (cmd *UpdatePluginSettingCmd) GetEncryptedJsonData() SecureJsonData {
-	encrypted := make(SecureJsonData)
-	for key, data := range cmd.SecureJsonData {
-		encrypted[key] = util.Encrypt([]byte(data), setting.SecretKey)
-	}
-	return encrypted
+func (cmd *UpdatePluginSettingCmd) GetEncryptedJsonData() securejsondata.SecureJsonData {
+	return securejsondata.GetEncryptedJsonData(cmd.SecureJsonData)
 }
 
 // ---------------------

+ 8 - 0
pkg/models/user.go

@@ -10,6 +10,12 @@ var (
 	ErrUserNotFound = errors.New("User not found")
 )
 
+type Password string
+
+func (p Password) IsWeak() bool {
+	return len(p) <= 4
+}
+
 type User struct {
 	Id            int64
 	Version       int
@@ -22,6 +28,7 @@ type User struct {
 	Company       string
 	EmailVerified bool
 	Theme         string
+	HelpFlags1    HelpFlags1
 
 	IsAdmin bool
 	OrgId   int64
@@ -144,6 +151,7 @@ type SignedInUser struct {
 	Email          string
 	ApiKeyId       int64
 	IsGrafanaAdmin bool
+	HelpFlags1     HelpFlags1
 }
 
 type UserProfileDTO struct {

+ 1 - 0
pkg/plugins/models.go

@@ -38,6 +38,7 @@ type PluginBase struct {
 	Includes     []*PluginInclude   `json:"includes"`
 	Module       string             `json:"module"`
 	BaseUrl      string             `json:"baseUrl"`
+	HideFromList bool               `json:"hideFromList"`
 
 	IncludedInAppId string `json:"-"`
 	PluginDir       string `json:"-"`

+ 17 - 0
pkg/services/alerting/commands.go

@@ -11,8 +11,25 @@ type UpdateDashboardAlertsCommand struct {
 	Dashboard *m.Dashboard
 }
 
+type ValidateDashboardAlertsCommand struct {
+	UserId    int64
+	OrgId     int64
+	Dashboard *m.Dashboard
+}
+
 func init() {
 	bus.AddHandler("alerting", updateDashboardAlerts)
+	bus.AddHandler("alerting", validateDashboardAlerts)
+}
+
+func validateDashboardAlerts(cmd *ValidateDashboardAlertsCommand) error {
+	extractor := NewDashAlertExtractor(cmd.Dashboard, cmd.OrgId)
+
+	if _, err := extractor.GetAlerts(); err != nil {
+		return err
+	}
+
+	return nil
 }
 
 func updateDashboardAlerts(cmd *UpdateDashboardAlertsCommand) error {

+ 4 - 4
pkg/services/alerting/conditions/evaluator.go

@@ -17,9 +17,9 @@ type AlertEvaluator interface {
 	Eval(reducedValue null.Float) bool
 }
 
-type NoDataEvaluator struct{}
+type NoValueEvaluator struct{}
 
-func (e *NoDataEvaluator) Eval(reducedValue null.Float) bool {
+func (e *NoValueEvaluator) Eval(reducedValue null.Float) bool {
 	return reducedValue.Valid == false
 }
 
@@ -118,8 +118,8 @@ func NewAlertEvaluator(model *simplejson.Json) (AlertEvaluator, error) {
 		return newRangedEvaluator(typ, model)
 	}
 
-	if typ == "no_data" {
-		return &NoDataEvaluator{}, nil
+	if typ == "no_value" {
+		return &NoValueEvaluator{}, nil
 	}
 
 	return nil, alerting.ValidationError{Reason: "Evaluator invalid evaluator type: " + typ}

+ 12 - 7
pkg/services/alerting/conditions/evaluator_test.go

@@ -44,15 +44,20 @@ func TestEvalutors(t *testing.T) {
 		So(evalutorScenario(`{"type": "outside_range", "params": [100, 1] }`, 50), ShouldBeFalse)
 	})
 
-	Convey("no_data", t, func() {
-		So(evalutorScenario(`{"type": "no_data", "params": [] }`, 50), ShouldBeFalse)
+	Convey("no_value", t, func() {
+		Convey("should be false if serie have values", func() {
+			So(evalutorScenario(`{"type": "no_value", "params": [] }`, 50), ShouldBeFalse)
+		})
 
-		jsonModel, err := simplejson.NewJson([]byte(`{"type": "no_data", "params": [] }`))
-		So(err, ShouldBeNil)
+		Convey("should be true when the serie have no value", func() {
+			jsonModel, err := simplejson.NewJson([]byte(`{"type": "no_value", "params": [] }`))
+			So(err, ShouldBeNil)
 
-		evaluator, err := NewAlertEvaluator(jsonModel)
-		So(err, ShouldBeNil)
+			evaluator, err := NewAlertEvaluator(jsonModel)
+			So(err, ShouldBeNil)
 
-		So(evaluator.Eval(null.FloatFromPtr(nil)), ShouldBeTrue)
+			So(evaluator.Eval(null.FloatFromPtr(nil)), ShouldBeTrue)
+
+		})
 	})
 }

+ 10 - 16
pkg/services/alerting/conditions/query.go

@@ -23,6 +23,7 @@ type QueryCondition struct {
 	Query         AlertQuery
 	Reducer       QueryReducer
 	Evaluator     AlertEvaluator
+	Operator      string
 	HandleRequest tsdb.HandleRequestFunc
 }
 
@@ -72,6 +73,7 @@ func (c *QueryCondition) Eval(context *alerting.EvalContext) (*alerting.Conditio
 	return &alerting.ConditionResult{
 		Firing:      evalMatchCount > 0,
 		NoDataFound: emptySerieCount == len(seriesList),
+		Operator:    c.Operator,
 		EvalMatches: matches,
 	}, nil
 }
@@ -117,21 +119,9 @@ func (c *QueryCondition) getRequestForAlertRule(datasource *m.DataSource, timeRa
 		TimeRange: timeRange,
 		Queries: []*tsdb.Query{
 			{
-				RefId: "A",
-				Model: c.Query.Model,
-				DataSource: &tsdb.DataSourceInfo{
-					Id:                datasource.Id,
-					Name:              datasource.Name,
-					PluginId:          datasource.Type,
-					Url:               datasource.Url,
-					User:              datasource.User,
-					Password:          datasource.Password,
-					Database:          datasource.Database,
-					BasicAuth:         datasource.BasicAuth,
-					BasicAuthUser:     datasource.BasicAuthUser,
-					BasicAuthPassword: datasource.BasicAuthPassword,
-					JsonData:          datasource.JsonData,
-				},
+				RefId:      "A",
+				Model:      c.Query.Model,
+				DataSource: datasource,
 			},
 		},
 	}
@@ -168,8 +158,12 @@ func NewQueryCondition(model *simplejson.Json, index int) (*QueryCondition, erro
 	if err != nil {
 		return nil, err
 	}
-
 	condition.Evaluator = evaluator
+
+	operatorJson := model.Get("operator")
+	operator := operatorJson.Get("type").MustString("and")
+	condition.Operator = operator
+
 	return &condition, nil
 }
 

+ 24 - 1
pkg/services/alerting/conditions/reducer.go

@@ -3,6 +3,8 @@ package conditions
 import (
 	"math"
 
+	"sort"
+
 	"github.com/grafana/grafana/pkg/tsdb"
 	"gopkg.in/guregu/null.v3"
 )
@@ -25,13 +27,17 @@ func (s *SimpleReducer) Reduce(series *tsdb.TimeSeries) null.Float {
 
 	switch s.Type {
 	case "avg":
+		validPointsCount := 0
 		for _, point := range series.Points {
 			if point[0].Valid {
 				value += point[0].Float64
+				validPointsCount += 1
 				allNull = false
 			}
 		}
-		value = value / float64(len(series.Points))
+		if validPointsCount > 0 {
+			value = value / float64(validPointsCount)
+		}
 	case "sum":
 		for _, point := range series.Points {
 			if point[0].Valid {
@@ -71,6 +77,23 @@ func (s *SimpleReducer) Reduce(series *tsdb.TimeSeries) null.Float {
 				break
 			}
 		}
+	case "median":
+		var values []float64
+		for _, v := range series.Points {
+			if v[0].Valid {
+				allNull = false
+				values = append(values, v[0].Float64)
+			}
+		}
+		if len(values) >= 1 {
+			sort.Float64s(values)
+			length := len(values)
+			if length%2 == 1 {
+				value = values[(length-1)/2]
+			} else {
+				value = (values[(length/2)-1] + values[length/2]) / 2
+			}
+		}
 	}
 
 	if allNull {

+ 33 - 4
pkg/services/alerting/conditions/reducer_test.go

@@ -11,10 +11,6 @@ import (
 
 func TestSimpleReducer(t *testing.T) {
 	Convey("Test simple reducer by calculating", t, func() {
-		Convey("avg", func() {
-			result := testReducer("avg", 1, 2, 3)
-			So(result, ShouldEqual, float64(2))
-		})
 
 		Convey("sum", func() {
 			result := testReducer("sum", 1, 2, 3)
@@ -41,6 +37,39 @@ func TestSimpleReducer(t *testing.T) {
 			So(result, ShouldEqual, float64(3000))
 		})
 
+		Convey("median odd amount of numbers", func() {
+			result := testReducer("median", 1, 2, 3000)
+			So(result, ShouldEqual, float64(2))
+		})
+
+		Convey("median even amount of numbers", func() {
+			result := testReducer("median", 1, 2, 4, 3000)
+			So(result, ShouldEqual, float64(3))
+		})
+
+		Convey("median with one values", func() {
+			result := testReducer("median", 1)
+			So(result, ShouldEqual, float64(1))
+		})
+
+		Convey("avg", func() {
+			result := testReducer("avg", 1, 2, 3)
+			So(result, ShouldEqual, float64(2))
+		})
+
+		Convey("avg of number values and null values should ignore nulls", func() {
+			reducer := NewSimpleReducer("avg")
+			series := &tsdb.TimeSeries{
+				Name: "test time serie",
+			}
+
+			series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFrom(3), 1))
+			series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFromPtr(nil), 2))
+			series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFromPtr(nil), 3))
+			series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFrom(3), 4))
+
+			So(reducer.Reduce(series).Float64, ShouldEqual, float64(3))
+		})
 	})
 }
 

+ 1 - 1
pkg/services/alerting/eval_context.go

@@ -17,7 +17,7 @@ type EvalContext struct {
 	EvalMatches     []*EvalMatch
 	Logs            []*ResultLogEntry
 	Error           error
-	Description     string
+	ConditionEvals  string
 	StartTime       time.Time
 	EndTime         time.Time
 	Rule            *Rule

+ 22 - 5
pkg/services/alerting/eval_handler.go

@@ -1,6 +1,8 @@
 package alerting
 
 import (
+	"strconv"
+	"strings"
 	"time"
 
 	"github.com/grafana/grafana/pkg/log"
@@ -21,7 +23,11 @@ func NewEvalHandler() *DefaultEvalHandler {
 
 func (e *DefaultEvalHandler) Eval(context *EvalContext) {
 	firing := true
-	for _, condition := range context.Rule.Conditions {
+	noDataFound := true
+	conditionEvals := ""
+
+	for i := 0; i < len(context.Rule.Conditions); i++ {
+		condition := context.Rule.Conditions[i]
 		cr, err := condition.Eval(context)
 		if err != nil {
 			context.Error = err
@@ -32,16 +38,27 @@ func (e *DefaultEvalHandler) Eval(context *EvalContext) {
 			break
 		}
 
-		// break if result has not triggered yet
-		if cr.Firing == false {
-			firing = false
-			break
+		// calculating Firing based on operator
+		if cr.Operator == "or" {
+			firing = firing || cr.Firing
+			noDataFound = noDataFound || cr.NoDataFound
+		} else {
+			firing = firing && cr.Firing
+			noDataFound = noDataFound && cr.NoDataFound
+		}
+
+		if i > 0 {
+			conditionEvals = "[" + conditionEvals + " " + strings.ToUpper(cr.Operator) + " " + strconv.FormatBool(cr.Firing) + "]"
+		} else {
+			conditionEvals = strconv.FormatBool(firing)
 		}
 
 		context.EvalMatches = append(context.EvalMatches, cr.EvalMatches...)
 	}
 
+	context.ConditionEvals = conditionEvals + " = " + strconv.FormatBool(firing)
 	context.Firing = firing
+	context.NoDataFound = noDataFound
 	context.EndTime = time.Now()
 	elapsedTime := context.EndTime.Sub(context.StartTime) / time.Millisecond
 	metrics.M_Alerting_Exeuction_Time.Update(elapsedTime)

+ 127 - 5
pkg/services/alerting/eval_handler_test.go

@@ -8,12 +8,14 @@ import (
 )
 
 type conditionStub struct {
-	firing  bool
-	matches []*EvalMatch
+	firing   bool
+	operator string
+	matches  []*EvalMatch
+	noData   bool
 }
 
 func (c *conditionStub) Eval(context *EvalContext) (*ConditionResult, error) {
-	return &ConditionResult{Firing: c.firing, EvalMatches: c.matches}, nil
+	return &ConditionResult{Firing: c.firing, EvalMatches: c.matches, Operator: c.operator, NoDataFound: c.noData}, nil
 }
 
 func TestAlertingExecutor(t *testing.T) {
@@ -29,18 +31,138 @@ func TestAlertingExecutor(t *testing.T) {
 
 			handler.Eval(context)
 			So(context.Firing, ShouldEqual, true)
+			So(context.ConditionEvals, ShouldEqual, "true = true")
 		})
 
 		Convey("Show return false with not passing asdf", func() {
 			context := NewEvalContext(context.TODO(), &Rule{
 				Conditions: []Condition{
-					&conditionStub{firing: true, matches: []*EvalMatch{&EvalMatch{}, &EvalMatch{}}},
-					&conditionStub{firing: false},
+					&conditionStub{firing: true, operator: "and", matches: []*EvalMatch{&EvalMatch{}, &EvalMatch{}}},
+					&conditionStub{firing: false, operator: "and"},
 				},
 			})
 
 			handler.Eval(context)
 			So(context.Firing, ShouldEqual, false)
+			So(context.ConditionEvals, ShouldEqual, "[true AND false] = false")
+		})
+
+		Convey("Show return true if any of the condition is passing with OR operator", func() {
+			context := NewEvalContext(context.TODO(), &Rule{
+				Conditions: []Condition{
+					&conditionStub{firing: true, operator: "and"},
+					&conditionStub{firing: false, operator: "or"},
+				},
+			})
+
+			handler.Eval(context)
+			So(context.Firing, ShouldEqual, true)
+			So(context.ConditionEvals, ShouldEqual, "[true OR false] = true")
+		})
+
+		Convey("Show return false if any of the condition is failing with AND operator", func() {
+			context := NewEvalContext(context.TODO(), &Rule{
+				Conditions: []Condition{
+					&conditionStub{firing: true, operator: "and"},
+					&conditionStub{firing: false, operator: "and"},
+				},
+			})
+
+			handler.Eval(context)
+			So(context.Firing, ShouldEqual, false)
+			So(context.ConditionEvals, ShouldEqual, "[true AND false] = false")
+		})
+
+		Convey("Show return true if one condition is failing with nested OR operator", func() {
+			context := NewEvalContext(context.TODO(), &Rule{
+				Conditions: []Condition{
+					&conditionStub{firing: true, operator: "and"},
+					&conditionStub{firing: true, operator: "and"},
+					&conditionStub{firing: false, operator: "or"},
+				},
+			})
+
+			handler.Eval(context)
+			So(context.Firing, ShouldEqual, true)
+			So(context.ConditionEvals, ShouldEqual, "[[true AND true] OR false] = true")
+		})
+
+		Convey("Show return false if one condition is passing with nested OR operator", func() {
+			context := NewEvalContext(context.TODO(), &Rule{
+				Conditions: []Condition{
+					&conditionStub{firing: true, operator: "and"},
+					&conditionStub{firing: false, operator: "and"},
+					&conditionStub{firing: false, operator: "or"},
+				},
+			})
+
+			handler.Eval(context)
+			So(context.Firing, ShouldEqual, false)
+			So(context.ConditionEvals, ShouldEqual, "[[true AND false] OR false] = false")
+		})
+
+		Convey("Show return false if a condition is failing with nested AND operator", func() {
+			context := NewEvalContext(context.TODO(), &Rule{
+				Conditions: []Condition{
+					&conditionStub{firing: true, operator: "and"},
+					&conditionStub{firing: false, operator: "and"},
+					&conditionStub{firing: true, operator: "and"},
+				},
+			})
+
+			handler.Eval(context)
+			So(context.Firing, ShouldEqual, false)
+			So(context.ConditionEvals, ShouldEqual, "[[true AND false] AND true] = false")
+		})
+
+		Convey("Show return true if a condition is passing with nested OR operator", func() {
+			context := NewEvalContext(context.TODO(), &Rule{
+				Conditions: []Condition{
+					&conditionStub{firing: true, operator: "and"},
+					&conditionStub{firing: false, operator: "or"},
+					&conditionStub{firing: true, operator: "or"},
+				},
+			})
+
+			handler.Eval(context)
+			So(context.Firing, ShouldEqual, true)
+			So(context.ConditionEvals, ShouldEqual, "[[true OR false] OR true] = true")
+		})
+
+		Convey("Should return no data if one condition has nodata", func() {
+			context := NewEvalContext(context.TODO(), &Rule{
+				Conditions: []Condition{
+					&conditionStub{operator: "and", noData: true},
+				},
+			})
+
+			handler.Eval(context)
+			So(context.Firing, ShouldEqual, false)
+			So(context.NoDataFound, ShouldBeTrue)
+		})
+
+		Convey("Should return no data if both conditions have no data and using AND", func() {
+			context := NewEvalContext(context.TODO(), &Rule{
+				Conditions: []Condition{
+					&conditionStub{operator: "and", noData: true},
+					&conditionStub{operator: "and", noData: false},
+				},
+			})
+
+			handler.Eval(context)
+			So(context.NoDataFound, ShouldBeFalse)
+		})
+
+		Convey("Should not return no data if both conditions have no data and using OR", func() {
+			context := NewEvalContext(context.TODO(), &Rule{
+				Conditions: []Condition{
+					&conditionStub{operator: "or", noData: true},
+					&conditionStub{operator: "or", noData: false},
+				},
+			})
+
+			handler.Eval(context)
+			So(context.NoDataFound, ShouldBeTrue)
 		})
 	})
 }

+ 1 - 0
pkg/services/alerting/interfaces.go

@@ -24,6 +24,7 @@ type Notifier interface {
 type ConditionResult struct {
 	Firing      bool
 	NoDataFound bool
+	Operator    string
 	EvalMatches []*EvalMatch
 }
 

+ 2 - 4
pkg/services/alerting/notifier.go

@@ -55,10 +55,8 @@ func (n *RootNotifier) Notify(context *EvalContext) error {
 		return nil
 	}
 
-	err = n.uploadImage(context)
-	if err != nil {
-		n.log.Error("Failed to upload alert panel image", "error", err)
-		return err
+	if err = n.uploadImage(context); err != nil {
+		n.log.Error("Failed to upload alert panel image.", "error", err)
 	}
 
 	return n.sendNotifications(context, notifiers)

+ 1 - 0
pkg/services/alerting/notifiers/email.go

@@ -89,6 +89,7 @@ func (this *EmailNotifier) Notify(evalContext *alerting.EvalContext) error {
 
 	if err != nil {
 		this.log.Error("Failed to send alert notification email", "error", err)
+		return err
 	}
 	return nil
 

+ 119 - 0
pkg/services/alerting/notifiers/opsgenie.go

@@ -0,0 +1,119 @@
+package notifiers
+
+import (
+	"fmt"
+	"strconv"
+
+	"github.com/grafana/grafana/pkg/bus"
+	"github.com/grafana/grafana/pkg/components/simplejson"
+	"github.com/grafana/grafana/pkg/log"
+	"github.com/grafana/grafana/pkg/metrics"
+	m "github.com/grafana/grafana/pkg/models"
+	"github.com/grafana/grafana/pkg/services/alerting"
+)
+
+func init() {
+	alerting.RegisterNotifier("opsgenie", NewOpsGenieNotifier)
+}
+
+var (
+	opsgenieCreateAlertURL string = "https://api.opsgenie.com/v1/json/alert"
+	opsgenieCloseAlertURL  string = "https://api.opsgenie.com/v1/json/alert/close"
+)
+
+func NewOpsGenieNotifier(model *m.AlertNotification) (alerting.Notifier, error) {
+	autoClose := model.Settings.Get("autoClose").MustBool(true)
+	apiKey := model.Settings.Get("apiKey").MustString()
+	if apiKey == "" {
+		return nil, alerting.ValidationError{Reason: "Could not find api key property in settings"}
+	}
+
+	return &OpsGenieNotifier{
+		NotifierBase: NewNotifierBase(model.Id, model.IsDefault, model.Name, model.Type, model.Settings),
+		ApiKey:       apiKey,
+		AutoClose:    autoClose,
+		log:          log.New("alerting.notifier.opsgenie"),
+	}, nil
+}
+
+type OpsGenieNotifier struct {
+	NotifierBase
+	ApiKey    string
+	AutoClose bool
+	log       log.Logger
+}
+
+func (this *OpsGenieNotifier) Notify(evalContext *alerting.EvalContext) error {
+	metrics.M_Alerting_Notification_Sent_OpsGenie.Inc(1)
+
+	var err error
+	switch evalContext.Rule.State {
+	case m.AlertStateOK:
+		if this.AutoClose {
+			err = this.closeAlert(evalContext)
+		}
+	case m.AlertStateAlerting:
+		err = this.createAlert(evalContext)
+	}
+	return err
+}
+
+func (this *OpsGenieNotifier) createAlert(evalContext *alerting.EvalContext) error {
+	this.log.Info("Creating OpsGenie alert", "ruleId", evalContext.Rule.Id, "notification", this.Name)
+
+	ruleUrl, err := evalContext.GetRuleUrl()
+	if err != nil {
+		this.log.Error("Failed get rule link", "error", err)
+		return err
+	}
+
+	bodyJSON := simplejson.New()
+	bodyJSON.Set("apiKey", this.ApiKey)
+	bodyJSON.Set("message", evalContext.Rule.Name)
+	bodyJSON.Set("source", "Grafana")
+	bodyJSON.Set("alias", "alertId-"+strconv.FormatInt(evalContext.Rule.Id, 10))
+	bodyJSON.Set("description", fmt.Sprintf("%s - %s\n%s", evalContext.Rule.Name, ruleUrl, evalContext.Rule.Message))
+
+	details := simplejson.New()
+	details.Set("url", ruleUrl)
+	if evalContext.ImagePublicUrl != "" {
+		details.Set("image", evalContext.ImagePublicUrl)
+	}
+
+	bodyJSON.Set("details", details)
+	body, _ := bodyJSON.MarshalJSON()
+
+	cmd := &m.SendWebhookSync{
+		Url:        opsgenieCreateAlertURL,
+		Body:       string(body),
+		HttpMethod: "POST",
+	}
+
+	if err := bus.DispatchCtx(evalContext.Ctx, cmd); err != nil {
+		this.log.Error("Failed to send notification to OpsGenie", "error", err, "body", string(body))
+	}
+
+	return nil
+}
+
+func (this *OpsGenieNotifier) closeAlert(evalContext *alerting.EvalContext) error {
+	this.log.Info("Closing OpsGenie alert", "ruleId", evalContext.Rule.Id, "notification", this.Name)
+
+	bodyJSON := simplejson.New()
+	bodyJSON.Set("apiKey", this.ApiKey)
+	bodyJSON.Set("alias", "alertId-"+strconv.FormatInt(evalContext.Rule.Id, 10))
+	body, _ := bodyJSON.MarshalJSON()
+
+	cmd := &m.SendWebhookSync{
+		Url:        opsgenieCloseAlertURL,
+		Body:       string(body),
+		HttpMethod: "POST",
+	}
+
+	if err := bus.DispatchCtx(evalContext.Ctx, cmd); err != nil {
+		this.log.Error("Failed to send notification to OpsGenie", "error", err, "body", string(body))
+		return err
+	}
+
+	return nil
+}

+ 52 - 0
pkg/services/alerting/notifiers/opsgenie_test.go

@@ -0,0 +1,52 @@
+package notifiers
+
+import (
+	"testing"
+
+	"github.com/grafana/grafana/pkg/components/simplejson"
+	m "github.com/grafana/grafana/pkg/models"
+	. "github.com/smartystreets/goconvey/convey"
+)
+
+func TestOpsGenieNotifier(t *testing.T) {
+	Convey("OpsGenie notifier tests", t, func() {
+
+		Convey("Parsing alert notification from settings", func() {
+			Convey("empty settings should return error", func() {
+				json := `{ }`
+
+				settingsJSON, _ := simplejson.NewJson([]byte(json))
+				model := &m.AlertNotification{
+					Name:     "opsgenie_testing",
+					Type:     "opsgenie",
+					Settings: settingsJSON,
+				}
+
+				_, err := NewOpsGenieNotifier(model)
+				So(err, ShouldNotBeNil)
+			})
+
+			Convey("settings should trigger incident", func() {
+				json := `
+				{
+          "apiKey": "abcdefgh0123456789"
+				}`
+
+				settingsJSON, _ := simplejson.NewJson([]byte(json))
+				model := &m.AlertNotification{
+					Name:     "opsgenie_testing",
+					Type:     "opsgenie",
+					Settings: settingsJSON,
+				}
+
+				not, err := NewOpsGenieNotifier(model)
+				opsgenieNotifier := not.(*OpsGenieNotifier)
+
+				So(err, ShouldBeNil)
+				So(opsgenieNotifier.Name, ShouldEqual, "opsgenie_testing")
+				So(opsgenieNotifier.Type, ShouldEqual, "opsgenie")
+				So(opsgenieNotifier.ApiKey, ShouldEqual, "abcdefgh0123456789")
+			})
+		})
+	})
+}

+ 1 - 0
pkg/services/alerting/notifiers/pagerduty.go

@@ -89,6 +89,7 @@ func (this *PagerdutyNotifier) Notify(evalContext *alerting.EvalContext) error {
 
 	if err := bus.DispatchCtx(evalContext.Ctx, cmd); err != nil {
 		this.log.Error("Failed to send notification to Pagerduty", "error", err, "body", string(body))
+		return err
 	}
 
 	return nil

+ 1 - 0
pkg/services/alerting/notifiers/slack.go

@@ -105,6 +105,7 @@ func (this *SlackNotifier) Notify(evalContext *alerting.EvalContext) error {
 
 	if err := bus.DispatchCtx(evalContext.Ctx, cmd); err != nil {
 		this.log.Error("Failed to send slack notification", "error", err, "webhook", this.Name)
+		return err
 	}
 
 	return nil

+ 101 - 0
pkg/services/alerting/notifiers/victorops.go

@@ -0,0 +1,101 @@
+package notifiers
+
+import (
+	"encoding/json"
+	"time"
+
+	"github.com/grafana/grafana/pkg/bus"
+	"github.com/grafana/grafana/pkg/log"
+	"github.com/grafana/grafana/pkg/metrics"
+	"github.com/grafana/grafana/pkg/models"
+	"github.com/grafana/grafana/pkg/services/alerting"
+	"github.com/grafana/grafana/pkg/setting"
+)
+
+// AlertStateCritical - Victorops uses "CRITICAL" string to indicate "Alerting" state
+const AlertStateCritical = "CRITICAL"
+
+func init() {
+	alerting.RegisterNotifier("victorops", NewVictoropsNotifier)
+}
+
+// NewVictoropsNotifier creates an instance of VictoropsNotifier that
+// handles posting notifications to Victorops REST API
+func NewVictoropsNotifier(model *models.AlertNotification) (alerting.Notifier, error) {
+	url := model.Settings.Get("url").MustString()
+	if url == "" {
+		return nil, alerting.ValidationError{Reason: "Could not find victorops url property in settings"}
+	}
+
+	return &VictoropsNotifier{
+		NotifierBase: NewNotifierBase(model.Id, model.IsDefault, model.Name, model.Type, model.Settings),
+		URL:          url,
+		log:          log.New("alerting.notifier.victorops"),
+	}, nil
+}
+
+// VictoropsNotifier defines URL property for Victorops REST API
+// and handles notification process by formatting POST body according to
+// Victorops specifications (http://victorops.force.com/knowledgebase/articles/Integration/Alert-Ingestion-API-Documentation/)
+type VictoropsNotifier struct {
+	NotifierBase
+	URL string
+	log log.Logger
+}
+
+// Notify sends notification to Victorops via POST to URL endpoint
+func (this *VictoropsNotifier) Notify(evalContext *alerting.EvalContext) error {
+	this.log.Info("Executing victorops notification", "ruleId", evalContext.Rule.Id, "notification", this.Name)
+	metrics.M_Alerting_Notification_Sent_Victorops.Inc(1)
+
+	ruleUrl, err := evalContext.GetRuleUrl()
+	if err != nil {
+		this.log.Error("Failed get rule link", "error", err)
+		return err
+	}
+
+	fields := make([]map[string]interface{}, 0)
+	fieldLimitCount := 4
+	for index, evt := range evalContext.EvalMatches {
+		fields = append(fields, map[string]interface{}{
+			"title": evt.Metric,
+			"value": evt.Value,
+			"short": true,
+		})
+		if index > fieldLimitCount {
+			break
+		}
+	}
+
+	if evalContext.Error != nil {
+		fields = append(fields, map[string]interface{}{
+			"title": "Error message",
+			"value": evalContext.Error.Error(),
+			"short": false,
+		})
+	}
+
+	messageType := evalContext.Rule.State
+	if evalContext.Rule.State == models.AlertStateAlerting { // translate 'Alerting' to 'CRITICAL' (Victorops analog)
+		messageType = AlertStateCritical
+	}
+
+	body := map[string]interface{}{
+		"message_type":     messageType,
+		"entity_id":        evalContext.Rule.Name,
+		"timestamp":        time.Now().Unix(),
+		"state_start_time": evalContext.StartTime.Unix(),
+		"state_message":    evalContext.Rule.Message + "\n" + ruleUrl,
+		"monitoring_tool":  "Grafana v" + setting.BuildVersion,
+	}
+
+	data, _ := json.Marshal(&body)
+	cmd := &models.SendWebhookSync{Url: this.URL, Body: string(data)}
+
+	if err := bus.DispatchCtx(evalContext.Ctx, cmd); err != nil {
+		this.log.Error("Failed to send victorops notification", "error", err, "webhook", this.Name)
+		return err
+	}
+
+	return nil
+}

+ 52 - 0
pkg/services/alerting/notifiers/victorops_test.go

@@ -0,0 +1,52 @@
+package notifiers
+
+import (
+	"testing"
+
+	"github.com/grafana/grafana/pkg/components/simplejson"
+	m "github.com/grafana/grafana/pkg/models"
+	. "github.com/smartystreets/goconvey/convey"
+)
+
+func TestVictoropsNotifier(t *testing.T) {
+	Convey("Victorops notifier tests", t, func() {
+
+		Convey("Parsing alert notification from settings", func() {
+			Convey("empty settings should return error", func() {
+				json := `{ }`
+
+				settingsJSON, _ := simplejson.NewJson([]byte(json))
+				model := &m.AlertNotification{
+					Name:     "victorops_testing",
+					Type:     "victorops",
+					Settings: settingsJSON,
+				}
+
+				_, err := NewVictoropsNotifier(model)
+				So(err, ShouldNotBeNil)
+			})
+
+			Convey("from settings", func() {
+				json := `
+				{
+          "url": "http://google.com"
+				}`
+
+				settingsJSON, _ := simplejson.NewJson([]byte(json))
+				model := &m.AlertNotification{
+					Name:     "victorops_testing",
+					Type:     "victorops",
+					Settings: settingsJSON,
+				}
+
+				not, err := NewVictoropsNotifier(model)
+				victoropsNotifier := not.(*VictoropsNotifier)
+
+				So(err, ShouldBeNil)
+				So(victoropsNotifier.Name, ShouldEqual, "victorops_testing")
+				So(victoropsNotifier.Type, ShouldEqual, "victorops")
+				So(victoropsNotifier.URL, ShouldEqual, "http://google.com")
+			})
+		})
+	})
+}

+ 5 - 0
pkg/services/alerting/notifiers/webhook.go

@@ -58,6 +58,10 @@ func (this *WebhookNotifier) Notify(evalContext *alerting.EvalContext) error {
 		bodyJSON.Set("imageUrl", evalContext.ImagePublicUrl)
 	}
 
+	if evalContext.Rule.Message != "" {
+		bodyJSON.Set("message", evalContext.Rule.Message)
+	}
+
 	body, _ := bodyJSON.MarshalJSON()
 
 	cmd := &m.SendWebhookSync{
@@ -70,6 +74,7 @@ func (this *WebhookNotifier) Notify(evalContext *alerting.EvalContext) error {
 
 	if err := bus.DispatchCtx(evalContext.Ctx, cmd); err != nil {
 		this.log.Error("Failed to send webhook", "error", err, "webhook", this.Name)
+		return err
 	}
 
 	return nil

+ 1 - 1
pkg/services/alerting/reader.go

@@ -59,7 +59,7 @@ func (arr *DefaultRuleReader) Fetch() []*Rule {
 		}
 	}
 
-	metrics.M_Alerting_Active_Alerts.Inc(int64(len(res)))
+	metrics.M_Alerting_Active_Alerts.Update(int64(len(res)))
 	return res
 }
 

+ 3 - 0
pkg/services/alerting/scheduler.go

@@ -39,6 +39,9 @@ func (s *SchedulerImpl) Update(rules []*Rule) {
 
 		offset := ((rule.Frequency * 1000) / int64(len(rules))) * int64(i)
 		job.Offset = int64(math.Floor(float64(offset) / 1000))
+		if job.Offset == 0 { //zero offset causes division with 0 panics.
+			job.Offset = 1
+		}
 		jobs[rule.Id] = job
 	}
 

+ 12 - 13
pkg/services/notifications/webhook.go

@@ -6,7 +6,6 @@ import (
 	"fmt"
 	"io/ioutil"
 	"net/http"
-	"time"
 
 	"golang.org/x/net/context/ctxhttp"
 
@@ -22,8 +21,10 @@ type Webhook struct {
 	HttpMethod string
 }
 
-var webhookQueue chan *Webhook
-var webhookLog log.Logger
+var (
+	webhookQueue chan *Webhook
+	webhookLog   log.Logger
+)
 
 func initWebhookQueue() {
 	webhookLog = log.New("notifications.webhook")
@@ -47,24 +48,22 @@ func processWebhookQueue() {
 func sendWebRequestSync(ctx context.Context, webhook *Webhook) error {
 	webhookLog.Debug("Sending webhook", "url", webhook.Url, "http method", webhook.HttpMethod)
 
-	client := &http.Client{
-		Timeout: time.Duration(10 * time.Second),
-	}
-
 	if webhook.HttpMethod == "" {
 		webhook.HttpMethod = http.MethodPost
 	}
 
 	request, err := http.NewRequest(webhook.HttpMethod, webhook.Url, bytes.NewReader([]byte(webhook.Body)))
-	if webhook.User != "" && webhook.Password != "" {
-		request.Header.Add("Authorization", util.GetBasicAuthHeader(webhook.User, webhook.Password))
-	}
-
 	if err != nil {
 		return err
 	}
 
-	resp, err := ctxhttp.Do(ctx, client, request)
+	request.Header.Add("Content-Type", "application/json")
+	request.Header.Add("User-Agent", "Grafana")
+	if webhook.User != "" && webhook.Password != "" {
+		request.Header.Add("Authorization", util.GetBasicAuthHeader(webhook.User, webhook.Password))
+	}
+
+	resp, err := ctxhttp.Do(ctx, http.DefaultClient, request)
 	if err != nil {
 		return err
 	}
@@ -73,11 +72,11 @@ func sendWebRequestSync(ctx context.Context, webhook *Webhook) error {
 		return nil
 	}
 
+	defer resp.Body.Close()
 	body, err := ioutil.ReadAll(resp.Body)
 	if err != nil {
 		return err
 	}
-	defer resp.Body.Close()
 
 	webhookLog.Debug("Webhook failed", "statuscode", resp.Status, "body", string(body))
 	return fmt.Errorf("Webhook response status %v", resp.Status)

Some files were not shown because too many files changed in this diff