Просмотр исходного кода

Merge branch 'master' into react-panels

Torkel Ödegaard 7 лет назад
Родитель
Сommit
09ad8360ea
100 измененных файлов с 1467 добавлено и 1063 удалено
  1. 38 4
      .circleci/config.yml
  2. 1 0
      .gitignore
  3. 27 2
      CHANGELOG.md
  4. 0 1
      build.go
  5. 3 1
      docker/blocks/openldap/Dockerfile
  6. 5 4
      docker/blocks/openldap/entrypoint.sh
  7. 25 1
      docker/blocks/openldap/notes.md
  8. 14 0
      docker/blocks/openldap/prepopulate.sh
  9. 9 0
      docker/blocks/openldap/prepopulate/1_units.ldif
  10. 80 0
      docker/blocks/openldap/prepopulate/2_users.ldif
  11. 25 0
      docker/blocks/openldap/prepopulate/3_groups.ldif
  12. 0 10
      docker/blocks/openldap/prepopulate/admin.ldif
  13. 0 5
      docker/blocks/openldap/prepopulate/adminsgroup.ldif
  14. 0 10
      docker/blocks/openldap/prepopulate/editor.ldif
  15. 0 5
      docker/blocks/openldap/prepopulate/usersgroup.ldif
  16. 0 9
      docker/blocks/openldap/prepopulate/viewer.ldif
  17. 3 3
      docs/sources/features/datasources/mssql.md
  18. 3 3
      docs/sources/features/datasources/mysql.md
  19. 26 24
      docs/sources/guides/whats-new-in-v5-2.md
  20. 15 15
      docs/sources/http_api/admin.md
  21. 8 0
      docs/sources/http_api/auth.md
  22. 6 2
      docs/sources/http_api/folder.md
  23. 119 105
      docs/sources/http_api/org.md
  24. 3 3
      docs/sources/index.md
  25. 2 2
      docs/sources/installation/behind_proxy.md
  26. 2 0
      docs/sources/installation/windows.md
  27. 20 30
      docs/sources/reference/scripting.md
  28. 2 1
      docs/versions.json
  29. 2 2
      latest.json
  30. 1 1
      package.json
  31. 1 1
      pkg/api/alerting_test.go
  32. 3 3
      pkg/api/annotations_test.go
  33. 136 147
      pkg/api/api.go
  34. 1 1
      pkg/api/app_routes.go
  35. 1 1
      pkg/api/common.go
  36. 2 2
      pkg/api/common_test.go
  37. 1 1
      pkg/api/dashboard_permission_test.go
  38. 2 2
      pkg/api/dashboard_test.go
  39. 1 0
      pkg/api/dtos/index.go
  40. 1 1
      pkg/api/folder_permission_test.go
  41. 2 2
      pkg/api/folder_test.go
  42. 1 0
      pkg/api/frontendsettings.go
  43. 31 7
      pkg/api/http_server.go
  44. 1 0
      pkg/api/index.go
  45. 4 4
      pkg/cmd/grafana-server/main.go
  46. 2 2
      pkg/cmd/grafana-server/server.go
  47. 1 1
      pkg/extensions/main.go
  48. 17 1
      pkg/login/ext_user.go
  49. 2 0
      pkg/login/ldap.go
  50. 13 3
      pkg/login/ldap_test.go
  51. 9 0
      pkg/metrics/metrics.go
  52. 6 0
      pkg/middleware/auth.go
  53. 8 12
      pkg/middleware/auth_proxy.go
  54. 24 57
      pkg/middleware/middleware_test.go
  55. 1 0
      pkg/models/team_member.go
  56. 6 0
      pkg/models/user_auth.go
  57. 27 3
      pkg/registry/registry.go
  58. 4 4
      pkg/services/alerting/extractor_test.go
  59. 4 1
      pkg/services/alerting/notifier.go
  60. 1 0
      pkg/services/sqlstore/migrations/team_mig.go
  61. 7 0
      pkg/services/sqlstore/sqlstore.go
  62. 9 1
      pkg/services/sqlstore/team.go
  63. 4 3
      pkg/setting/setting.go
  64. 1 0
      pkg/social/github_oauth.go
  65. 9 9
      pkg/tsdb/elasticsearch/client/search_request_test.go
  66. 5 4
      pkg/tsdb/mssql/macros.go
  67. 11 11
      pkg/tsdb/mssql/macros_test.go
  68. 19 13
      pkg/tsdb/mssql/mssql_test.go
  69. 5 4
      pkg/tsdb/mysql/macros.go
  70. 11 11
      pkg/tsdb/mysql/macros_test.go
  71. 21 14
      pkg/tsdb/mysql/mysql_test.go
  72. 1 1
      pkg/tsdb/postgres/macros.go
  73. 2 2
      pkg/tsdb/postgres/macros_test.go
  74. 21 13
      pkg/tsdb/postgres/postgres_test.go
  75. 16 2
      public/app/core/config.ts
  76. 2 2
      public/app/core/directives/value_select_dropdown.ts
  77. 0 4
      public/app/core/services/context_srv.ts
  78. 35 0
      public/app/core/specs/table_model.jest.ts
  79. 14 0
      public/app/core/specs/time_series.jest.ts
  80. 159 0
      public/app/core/specs/value_select_dropdown.jest.ts
  81. 0 171
      public/app/core/specs/value_select_dropdown_specs.ts
  82. 5 12
      public/app/core/table_model.ts
  83. 11 11
      public/app/features/annotations/specs/annotations_srv.jest.ts
  84. 1 3
      public/app/features/dashboard/specs/exporter.jest.ts
  85. 67 0
      public/app/features/dashboard/specs/viewstate_srv.jest.ts
  86. 0 65
      public/app/features/dashboard/specs/viewstate_srv_specs.ts
  87. 80 42
      public/app/features/org/partials/team_details.html
  88. 27 0
      public/app/features/org/team_details_ctrl.ts
  89. 14 11
      public/app/features/plugins/datasource_srv.ts
  90. 2 2
      public/app/features/plugins/partials/ds_http_settings.html
  91. 59 0
      public/app/features/plugins/specs/datasource_srv.jest.ts
  92. 0 64
      public/app/features/plugins/specs/datasource_srv_specs.ts
  93. 6 2
      public/app/features/templating/variable_srv.ts
  94. 1 1
      public/app/partials/login.html
  95. 76 72
      public/app/plugins/datasource/elasticsearch/specs/datasource.jest.ts
  96. 3 3
      public/app/plugins/datasource/mssql/partials/annotations.editor.html
  97. 3 3
      public/app/plugins/datasource/mssql/partials/query.editor.html
  98. 3 3
      public/app/plugins/datasource/mysql/partials/annotations.editor.html
  99. 3 3
      public/app/plugins/datasource/mysql/partials/query.editor.html
  100. 2 2
      public/app/plugins/datasource/prometheus/datasource.ts

+ 38 - 4
.circleci/config.yml

@@ -8,6 +8,9 @@ aliases:
   - &filter-not-release
     tags:
       ignore: /^v[0-9]+(\.[0-9]+){2}(-.+|[^-.]*)$/
+  - &filter-only-master
+    branches:
+      only: master
 
 version: 2
 
@@ -91,9 +94,6 @@ jobs:
       - image: circleci/node:8
     steps:
       - checkout
-      - run:
-          name: install yarn
-          command: 'sudo npm install -g yarn --quiet'
       - restore_cache:
           key: dependency-cache-{{ checksum "yarn.lock" }}
       - run:
@@ -163,7 +163,7 @@ jobs:
     steps:
       - checkout
       - run:
-          name: build and package grafana
+          name: build, test and package grafana enterprise
           command: './scripts/build/build_enterprise.sh'
       - run:
           name: sign packages
@@ -171,6 +171,26 @@ jobs:
       - run:
           name: sha-sum packages
           command: 'go run build.go sha-dist'
+      - run:
+          name: move enterprise packages into their own folder
+          command: 'mv dist enterprise-dist'
+      - persist_to_workspace:
+          root: .
+          paths:
+            - enterprise-dist/grafana-enterprise*
+
+  deploy-enterprise-master:
+    docker:
+      - image: circleci/python:2.7-stretch
+    steps:
+      - attach_workspace:
+          at: .
+      - run:
+          name: install awscli
+          command: 'sudo pip install awscli'
+      - run:
+          name: deploy to s3
+          command: 'aws s3 sync ./enterprise-dist s3://$ENTERPRISE_BUCKET_NAME/master'
 
   deploy-master:
     docker:
@@ -224,6 +244,8 @@ workflows:
     jobs:
       - build-all:
           filters: *filter-not-release
+      - build-enterprise:
+          filters: *filter-only-master
       - codespell:
           filters: *filter-not-release
       - gometalinter:
@@ -248,6 +270,18 @@ workflows:
           filters:
            branches:
              only: master
+      - deploy-enterprise-master:
+          requires:
+            - build-all
+            - test-backend
+            - test-frontend
+            - codespell
+            - gometalinter
+            - mysql-integration-test
+            - postgres-integration-test
+            - build-enterprise
+          filters: *filter-only-master
+
   release:
     jobs:
       - build-all:

+ 1 - 0
.gitignore

@@ -43,6 +43,7 @@ fig.yml
 docker-compose.yml
 docker-compose.yaml
 /conf/provisioning/**/custom.yaml
+/conf/ldap_dev.toml
 profile.cov
 /grafana
 /local

+ 27 - 2
CHANGELOG.md

@@ -7,8 +7,24 @@
 
 * **Api**: Delete nonexistent datasource should return 404 [#12313](https://github.com/grafana/grafana/issues/12313), thx [@AustinWinstanley](https://github.com/AustinWinstanley)
 * **Dashboard**: Fix selecting current dashboard from search should not reload dashboard [#12248](https://github.com/grafana/grafana/issues/12248)
+* **Singlestat**: Make colorization of prefix and postfix optional in singlestat [#11892](https://github.com/grafana/grafana/pull/11892), thx [@ApsOps](https://github.com/ApsOps)
+* **Table**: Make table sorting stable when null values exist [#12362](https://github.com/grafana/grafana/pull/12362), thx [@bz2](https://github.com/bz2)
+* **Prometheus**: Fix graph panel bar width issue in aligned prometheus queries [#12379](https://github.com/grafana/grafana/issues/12379)
+* **Variables**: Skip unneeded extra query request when de-selecting variable values used for repeated panels [#8186](https://github.com/grafana/grafana/issues/8186), thx [@mtanda](https://github.com/mtanda)
+* **Postgres/MySQL/MSSQL**: Use floor rounding in $__timeGroup macro function [#12460](https://github.com/grafana/grafana/issues/12460), thx [@svenklemm](https://github.com/svenklemm)
+* **MySQL/MSSQL**: Use datetime format instead of epoch for $__timeFilter, $__timeFrom and $__timeTo macros [#11618](https://github.com/grafana/grafana/issues/11618) [#11619](https://github.com/grafana/grafana/issues/11619), thx [@AustinWinstanley](https://github.com/AustinWinstanley)
+* **Github OAuth**: Allow changes of user info at Github to be synched to Grafana when signing in [#11818](https://github.com/grafana/grafana/issues/11818), thx [@rwaweber](https://github.com/rwaweber)
 
-# 5.2.0 (unreleased)
+
+# 5.2.1 (2018-06-29)
+
+### Minor
+
+* **Auth Proxy**: Important security fix for whitelist of IP address feature [#12444](https://github.com/grafana/grafana/pull/12444)
+* **UI**: Fix - Grafana footer overlapping page [#12430](https://github.com/grafana/grafana/issues/12430)
+* **Logging**: Errors should be reported before crashing [#12438](https://github.com/grafana/grafana/issues/12438)
+
+# 5.2.0-stable (2018-06-27)
 
 ### Minor
 
@@ -16,6 +32,10 @@
 * **Render**: Enhance error message if phantomjs executable is not found [#11868](https://github.com/grafana/grafana/issues/11868)
 * **Dashboard**: Set correct text in drop down when variable is present in url [#11968](https://github.com/grafana/grafana/issues/11968)
 
+### 5.2.0-beta3 fixes
+
+* **LDAP**: Handle "dn" ldap attribute more gracefully [#12385](https://github.com/grafana/grafana/pull/12385), reverts [#10970](https://github.com/grafana/grafana/pull/10970)
+
 # 5.2.0-beta3 (2018-06-21)
 
 ### Minor
@@ -57,6 +77,7 @@
 ### New Features
 
 * **Elasticsearch**: Alerting support [#5893](https://github.com/grafana/grafana/issues/5893), thx [@WPH95](https://github.com/WPH95)
+* **Build**: Crosscompile and packages Grafana on arm, windows, linux and darwin [#11920](https://github.com/grafana/grafana/pull/11920), thx [@fg2it](https://github.com/fg2it)
 * **Login**: Change admin password after first login [#11882](https://github.com/grafana/grafana/issues/11882)
 * **Alert list panel**: Updated to support filtering alerts by name, dashboard title, folder, tags [#11500](https://github.com/grafana/grafana/issues/11500), [#8168](https://github.com/grafana/grafana/issues/8168), [#6541](https://github.com/grafana/grafana/issues/6541)
 
@@ -92,6 +113,10 @@
 * **Dashboard list panel**: Search dashboards by folder [#11525](https://github.com/grafana/grafana/issues/11525)
 * **Sidenav**: Always show server admin link in sidenav if grafana admin [#11657](https://github.com/grafana/grafana/issues/11657)
 
+# 5.1.5 (2018-06-27)
+
+* **Docker**: Config keys ending with _FILE are not respected [#170](https://github.com/grafana/grafana-docker/issues/170)
+
 # 5.1.4 (2018-06-19)
 
 * **Permissions**: Important security fix for API keys with viewer role [#12343](https://github.com/grafana/grafana/issues/12343)
@@ -1319,7 +1344,7 @@ Grafana 2.x is fundamentally different from 1.x; it now ships with an integrated
 **New features**
 - [Issue #1623](https://github.com/grafana/grafana/issues/1623). Share Dashboard: Dashboard snapshot sharing (dash and data snapshot), save to local or save to public snapshot dashboard snapshots.raintank.io site
 - [Issue #1622](https://github.com/grafana/grafana/issues/1622). Share Panel: The share modal now has an embed option, gives you an iframe that you can use to embedd a single graph on another web site
-- [Issue #718](https://github.com/grafana/grafana/issues/718).   Dashboard: When saving a dashboard and another user has made changes in between the user is promted with a warning if he really wants to overwrite the other's changes
+- [Issue #718](https://github.com/grafana/grafana/issues/718).   Dashboard: When saving a dashboard and another user has made changes in between the user is prompted with a warning if he really wants to overwrite the other's changes
 - [Issue #1331](https://github.com/grafana/grafana/issues/1331). Graph & Singlestat: New axis/unit format selector and more units (kbytes, Joule, Watt, eV), and new design for graph axis & grid tab and single stat options tab views
 - [Issue #1241](https://github.com/grafana/grafana/issues/1242). Timepicker: New option in timepicker (under dashboard settings), to change ``now`` to be for example ``now-1m``, useful when you want to ignore last minute because it contains incomplete data
 - [Issue #171](https://github.com/grafana/grafana/issues/171).   Panel: Different time periods, panels can override dashboard relative time and/or add a time shift

+ 0 - 1
build.go

@@ -465,7 +465,6 @@ func ldflags() string {
 	b.WriteString(fmt.Sprintf(" -X main.version=%s", version))
 	b.WriteString(fmt.Sprintf(" -X main.commit=%s", getGitSha()))
 	b.WriteString(fmt.Sprintf(" -X main.buildstamp=%d", buildStamp()))
-	b.WriteString(fmt.Sprintf(" -X main.enterprise=%t", enterprise))
 	return b.String()
 }
 

+ 3 - 1
docker/blocks/openldap/Dockerfile

@@ -8,7 +8,8 @@ ENV OPENLDAP_VERSION 2.4.40
 
 RUN apt-get update && \
     DEBIAN_FRONTEND=noninteractive apt-get install --no-install-recommends -y \
-        slapd=${OPENLDAP_VERSION}* && \
+        slapd=${OPENLDAP_VERSION}* \
+        ldap-utils && \
     apt-get clean && \
     rm -rf /var/lib/apt/lists/*
 
@@ -22,6 +23,7 @@ COPY modules/ /etc/ldap.dist/modules
 COPY prepopulate/ /etc/ldap.dist/prepopulate
 
 COPY entrypoint.sh /entrypoint.sh
+COPY prepopulate.sh /prepopulate.sh
 
 ENTRYPOINT ["/entrypoint.sh"]
 

+ 5 - 4
docker/blocks/openldap/entrypoint.sh

@@ -76,13 +76,14 @@ EOF
         IFS=","; declare -a modules=($SLAPD_ADDITIONAL_MODULES); unset IFS
 
         for module in "${modules[@]}"; do
-             slapadd -n0 -F /etc/ldap/slapd.d -l "/etc/ldap/modules/${module}.ldif" >/dev/null 2>&1
+          echo "Adding module ${module}"
+          slapadd -n0 -F /etc/ldap/slapd.d -l "/etc/ldap/modules/${module}.ldif" >/dev/null 2>&1
         done
     fi
 
-    for file in `ls /etc/ldap/prepopulate/*.ldif`; do
-        slapadd -F /etc/ldap/slapd.d -l "$file"
-    done
+    # This needs to run in background
+    # Will prepopulate entries after ldap daemon has started
+    ./prepopulate.sh &
 
     chown -R openldap:openldap /etc/ldap/slapd.d/ /var/lib/ldap/ /var/run/slapd/
 else

+ 25 - 1
docker/blocks/openldap/notes.md

@@ -1,6 +1,6 @@
 # Notes on OpenLdap Docker Block
 
-Any ldif files added to the prepopulate subdirectory will be automatically imported into the OpenLdap database. 
+Any ldif files added to the prepopulate subdirectory will be automatically imported into the OpenLdap database.
 
 The ldif files add three users, `ldapviewer`, `ldapeditor` and `ldapadmin`. Two groups, `admins` and `users`, are added that correspond with the group mappings in the default conf/ldap.toml. `ldapadmin` is a member of `admins` and `ldapeditor` is a member of `users`.
 
@@ -22,3 +22,27 @@ enabled = true
 config_file = conf/ldap.toml
 ; allow_sign_up = true
 ```
+
+Test groups & users
+
+admins
+  ldap-admin
+  ldap-torkel
+  ldap-daniel
+backend
+  ldap-carl
+  ldap-torkel
+  ldap-leo
+frontend
+  ldap-torkel
+  ldap-tobias
+  ldap-daniel
+editors
+  ldap-editors
+
+
+no groups
+  ldap-viewer
+
+
+

+ 14 - 0
docker/blocks/openldap/prepopulate.sh

@@ -0,0 +1,14 @@
+#!/bin/bash
+
+echo "Pre-populating ldap entries, first waiting for ldap to start"
+
+sleep 3
+
+adminUserDn="cn=admin,dc=grafana,dc=org"
+adminPassword="grafana"
+
+for file in `ls /etc/ldap/prepopulate/*.ldif`; do
+  ldapadd -x -D $adminUserDn -w $adminPassword -f "$file"
+done
+
+

+ 9 - 0
docker/blocks/openldap/prepopulate/1_units.ldif

@@ -0,0 +1,9 @@
+dn: ou=groups,dc=grafana,dc=org
+ou: Groups
+objectclass: top
+objectclass: organizationalUnit
+
+dn: ou=users,dc=grafana,dc=org
+ou: Users
+objectclass: top
+objectclass: organizationalUnit

+ 80 - 0
docker/blocks/openldap/prepopulate/2_users.ldif

@@ -0,0 +1,80 @@
+# ldap-admin
+dn: cn=ldap-admin,ou=users,dc=grafana,dc=org
+mail: ldap-admin@grafana.com
+userPassword: grafana
+objectClass: person
+objectClass: top
+objectClass: inetOrgPerson
+objectClass: organizationalPerson
+sn: ldap-admin
+cn: ldap-admin
+
+dn: cn=ldap-editor,ou=users,dc=grafana,dc=org
+mail: ldap-editor@grafana.com
+userPassword: grafana
+objectClass: person
+objectClass: top
+objectClass: inetOrgPerson
+objectClass: organizationalPerson
+sn: ldap-editor
+cn: ldap-editor
+
+dn: cn=ldap-viewer,ou=users,dc=grafana,dc=org
+mail: ldap-viewer@grafana.com
+userPassword: grafana
+objectClass: person
+objectClass: top
+objectClass: inetOrgPerson
+objectClass: organizationalPerson
+sn: ldap-viewer
+cn: ldap-viewer
+
+dn: cn=ldap-carl,ou=users,dc=grafana,dc=org
+mail: ldap-carl@grafana.com
+userPassword: grafana
+objectClass: person
+objectClass: top
+objectClass: inetOrgPerson
+objectClass: organizationalPerson
+sn: ldap-carl
+cn: ldap-carl
+
+dn: cn=ldap-daniel,ou=users,dc=grafana,dc=org
+mail: ldap-daniel@grafana.com
+userPassword: grafana
+objectClass: person
+objectClass: top
+objectClass: inetOrgPerson
+objectClass: organizationalPerson
+sn: ldap-daniel
+cn: ldap-daniel
+
+dn: cn=ldap-leo,ou=users,dc=grafana,dc=org
+mail: ldap-leo@grafana.com
+userPassword: grafana
+objectClass: person
+objectClass: top
+objectClass: inetOrgPerson
+objectClass: organizationalPerson
+sn: ldap-leo
+cn: ldap-leo
+
+dn: cn=ldap-tobias,ou=users,dc=grafana,dc=org
+mail: ldap-tobias@grafana.com
+userPassword: grafana
+objectClass: person
+objectClass: top
+objectClass: inetOrgPerson
+objectClass: organizationalPerson
+sn: ldap-tobias
+cn: ldap-tobias
+
+dn: cn=ldap-torkel,ou=users,dc=grafana,dc=org
+mail: ldap-torkel@grafana.com
+userPassword: grafana
+objectClass: person
+objectClass: top
+objectClass: inetOrgPerson
+objectClass: organizationalPerson
+sn: ldap-torkel
+cn: ldap-torkel

+ 25 - 0
docker/blocks/openldap/prepopulate/3_groups.ldif

@@ -0,0 +1,25 @@
+dn: cn=admins,ou=groups,dc=grafana,dc=org
+cn: admins
+objectClass: groupOfNames
+objectClass: top
+member: cn=ldap-admin,ou=users,dc=grafana,dc=org
+member: cn=ldap-torkel,ou=users,dc=grafana,dc=org
+
+dn: cn=editors,ou=groups,dc=grafana,dc=org
+cn: editors
+objectClass: groupOfNames
+member: cn=ldap-editor,ou=users,dc=grafana,dc=org
+
+dn: cn=backend,ou=groups,dc=grafana,dc=org
+cn: backend
+objectClass: groupOfNames
+member: cn=ldap-carl,ou=users,dc=grafana,dc=org
+member: cn=ldap-leo,ou=users,dc=grafana,dc=org
+member: cn=ldap-torkel,ou=users,dc=grafana,dc=org
+
+dn: cn=frontend,ou=groups,dc=grafana,dc=org
+cn: frontend
+objectClass: groupOfNames
+member: cn=ldap-torkel,ou=users,dc=grafana,dc=org
+member: cn=ldap-daniel,ou=users,dc=grafana,dc=org
+member: cn=ldap-leo,ou=users,dc=grafana,dc=org

+ 0 - 10
docker/blocks/openldap/prepopulate/admin.ldif

@@ -1,10 +0,0 @@
-dn: cn=ldapadmin,dc=grafana,dc=org
-mail: ldapadmin@grafana.com
-userPassword: grafana
-objectClass: person
-objectClass: top
-objectClass: inetOrgPerson
-objectClass: organizationalPerson
-sn: ldapadmin
-cn: ldapadmin
-memberOf: cn=admins,dc=grafana,dc=org

+ 0 - 5
docker/blocks/openldap/prepopulate/adminsgroup.ldif

@@ -1,5 +0,0 @@
-dn: cn=admins,dc=grafana,dc=org
-cn: admins
-member: cn=ldapadmin,dc=grafana,dc=org
-objectClass: groupOfNames
-objectClass: top

+ 0 - 10
docker/blocks/openldap/prepopulate/editor.ldif

@@ -1,10 +0,0 @@
-dn: cn=ldapeditor,dc=grafana,dc=org
-mail: ldapeditor@grafana.com
-userPassword: grafana
-objectClass: person
-objectClass: top
-objectClass: inetOrgPerson
-objectClass: organizationalPerson
-sn: ldapeditor
-cn: ldapeditor
-memberOf: cn=users,dc=grafana,dc=org

+ 0 - 5
docker/blocks/openldap/prepopulate/usersgroup.ldif

@@ -1,5 +0,0 @@
-dn: cn=users,dc=grafana,dc=org
-cn: users
-member: cn=ldapeditor,dc=grafana,dc=org
-objectClass: groupOfNames
-objectClass: top

+ 0 - 9
docker/blocks/openldap/prepopulate/viewer.ldif

@@ -1,9 +0,0 @@
-dn: cn=ldapviewer,dc=grafana,dc=org
-mail: ldapviewer@grafana.com
-userPassword: grafana
-objectClass: person
-objectClass: top
-objectClass: inetOrgPerson
-objectClass: organizationalPerson
-sn: ldapviewer
-cn: ldapviewer

+ 3 - 3
docs/sources/features/datasources/mssql.md

@@ -77,9 +77,9 @@ Macro example | Description
 ------------ | -------------
 *$__time(dateColumn)* | Will be replaced by an expression to rename the column to *time*. For example, *dateColumn as time*
 *$__timeEpoch(dateColumn)* | Will be replaced by an expression to convert a DATETIME column type to unix timestamp and rename it to *time*. <br/>For example, *DATEDIFF(second, '1970-01-01', dateColumn) AS time*
-*$__timeFilter(dateColumn)* | Will be replaced by a time range filter using the specified column name. <br/>For example, *dateColumn >= DATEADD(s, 1494410783, '1970-01-01') AND dateColumn <= DATEADD(s, 1494410783, '1970-01-01')*
-*$__timeFrom()* | Will be replaced by the start of the currently active time selection. For example, *DATEADD(second, 1494410783, '1970-01-01')*
-*$__timeTo()* | Will be replaced by the end of the currently active time selection. For example, *DATEADD(second, 1494410783, '1970-01-01')*
+*$__timeFilter(dateColumn)* | Will be replaced by a time range filter using the specified column name. <br/>For example, *dateColumn BETWEEN '2017-04-21T05:01:17Z' AND '2017-04-21T05:06:17Z'*
+*$__timeFrom()* | Will be replaced by the start of the currently active time selection. For example, *'2017-04-21T05:01:17Z'*
+*$__timeTo()* | Will be replaced by the end of the currently active time selection. For example, *'2017-04-21T05:06:17Z'*
 *$__timeGroup(dateColumn,'5m'[, fillvalue])* | Will be replaced by an expression usable in GROUP BY clause. Providing a *fillValue* of *NULL* or *floating value* will automatically fill empty series in timerange with that value. <br/>For example, *CAST(ROUND(DATEDIFF(second, '1970-01-01', time_column)/300.0, 0) as bigint)\*300*.
 *$__timeGroup(dateColumn,'5m', 0)* | Same as above but with a fill parameter so all null values will be converted to the fill value (all null values would be set to zero using this example).
 *$__unixEpochFilter(dateColumn)* | Will be replaced by a time range filter using the specified column name with times represented as unix timestamp. For example, *dateColumn > 1494410783 AND dateColumn < 1494497183*

+ 3 - 3
docs/sources/features/datasources/mysql.md

@@ -60,9 +60,9 @@ Macro example | Description
 ------------ | -------------
 *$__time(dateColumn)* | Will be replaced by an expression to convert to a UNIX timestamp and rename the column to `time_sec`. For example, *UNIX_TIMESTAMP(dateColumn) as time_sec*
 *$__timeEpoch(dateColumn)* | Will be replaced by an expression to convert to a UNIX timestamp and rename the column to `time_sec`. For example, *UNIX_TIMESTAMP(dateColumn) as time_sec*
-*$__timeFilter(dateColumn)* | Will be replaced by a time range filter using the specified column name. For example, *dateColumn > FROM_UNIXTIME(1494410783) AND dateColumn < FROM_UNIXTIME(1494497183)*
-*$__timeFrom()* | Will be replaced by the start of the currently active time selection. For example, *FROM_UNIXTIME(1494410783)*
-*$__timeTo()* | Will be replaced by the end of the currently active time selection. For example, *FROM_UNIXTIME(1494497183)*
+*$__timeFilter(dateColumn)* | Will be replaced by a time range filter using the specified column name. For example, *dateColumn BETWEEN '2017-04-21T05:01:17Z' AND '2017-04-21T05:06:17Z'*
+*$__timeFrom()* | Will be replaced by the start of the currently active time selection. For example, *'2017-04-21T05:01:17Z'*
+*$__timeTo()* | Will be replaced by the end of the currently active time selection. For example, *'2017-04-21T05:06:17Z'*
 *$__timeGroup(dateColumn,'5m')* | Will be replaced by an expression usable in GROUP BY clause. For example, *cast(cast(UNIX_TIMESTAMP(dateColumn)/(300) as signed)*300 as signed),*
 *$__timeGroup(dateColumn,'5m',0)* | Same as above but with a fill parameter so all null values will be converted to the fill value (all null values would be set to zero using this example).
 *$__unixEpochFilter(dateColumn)* | Will be replaced by a time range filter using the specified column name with times represented as unix timestamp. For example, *dateColumn > 1494410783 AND dateColumn < 1494497183*

+ 26 - 24
docs/sources/guides/whats-new-in-v5-2.md

@@ -14,14 +14,14 @@ weight = -8
 
 Grafana v5.2 brings new features, many enhancements and bug fixes. This article will detail the major new features and enhancements.
 
-* [Elasticsearch alerting]({{< relref "#elasticsearch-alerting" >}}) it's finally here!
-* [Cross platform build support]({{< relref "#cross-platform-build-support" >}}) enables native builds of Grafana for many more platforms!
-* [Improved Docker image]({{< relref "#improved-docker-image" >}}) with support for docker secrets
-* [Security]({{< relref "#security" >}}) make your Grafana instance more secure
-* [Prometheus]({{< relref "#prometheus" >}}) with alignment enhancements
-* [InfluxDB]({{< relref "#influxdb" >}}) with support for a new function
-* [Alerting]({{< relref "#alerting" >}}) with alert notification channel type for Discord
-* [Dashboards & Panels]({{< relref "#dashboards-panels" >}}) with save & import enhancements
+- [Elasticsearch alerting]({{< relref "#elasticsearch-alerting" >}}) it's finally here!
+- [Native builds for ARM]({{< relref "#native-builds-for-arm" >}}) native builds of Grafana for many more platforms!
+- [Improved Docker image]({{< relref "#improved-docker-image" >}}) with support for docker secrets
+- [Security]({{< relref "#security" >}}) make your Grafana instance more secure
+- [Prometheus]({{< relref "#prometheus" >}}) with alignment enhancements
+- [InfluxDB]({{< relref "#influxdb" >}}) now supports the `mode` function
+- [Alerting]({{< relref "#alerting" >}}) with alert notification channel type for Discord
+- [Dashboards & Panels]({{< relref "#dashboards-panels" >}}) with save & import enhancements
 
 ## Elasticsearch alerting
 
@@ -32,16 +32,18 @@ the most requested features by our community and now it's finally here. Please t
 
 <div class="clearfix"></div>
 
-## Cross platform build support
+## Native builds for ARM
 
-Grafana v5.2 brings an improved build pipeline with cross platform support. This enables native builds of Grafana for ARMv7 (x32), ARM64 (x64),
-MacOS/Darwin (x64) and Windows (x64) in both stable and nightly builds.
+Grafana v5.2 brings an improved build pipeline with cross-platform support. This enables native builds of Grafana for ARMv7 (x32) and ARM64 (x64).
+We've been longing for native ARM build support for ages. With the help from our amazing community this is now finally available.
+Please try it out and let us know what you think.
 
-We've been longing for native ARM build support for a long time. With the help from our amazing community this is now finally available.
+Another great addition with the improved build pipeline is that binaries for MacOS/Darwin (x64) and Windows (x64) are now automatically built and
+published for both stable and nightly builds.
 
 ## Improved Docker image
 
-The Grafana docker image now includes support for Docker secrets which enables you to supply Grafana with configuration through files. More
+The Grafana docker image adds support for Docker secrets which enables you to supply Grafana with configuration through files. More
 information in the [Installing using Docker documentation](/installation/docker/#reading-secrets-from-files-support-for-docker-secrets).
 
 ## Security
@@ -49,18 +51,18 @@ information in the [Installing using Docker documentation](/installation/docker/
 {{< docs-imagebox img="/img/docs/v52/login_change_password.png" max-width="800px" class="docs-image--right" >}}
 
 Starting from Grafana v5.2, when you login with the administrator account using the default password you'll be presented with a form to change the password.
-By this we hope to encourage users to follow Grafana's best practices and change the default administrator password.
+We hope this encourages users to follow Grafana's best practices and change the default administrator password.
 
 <div class="clearfix"></div>
 
 ## Prometheus
 
 The Prometheus datasource now aligns the start/end of the query sent to Prometheus with the step, which ensures PromQL expressions with *rate*
-functions get consistent results, and thus avoid graphs jumping around on reload.
+functions get consistent results, and thus avoids graphs jumping around on reload.
 
 ## InfluxDB
 
-The InfluxDB datasource now includes support for the *mode* function which allows to return the most frequent value in a list of field values.
+The InfluxDB datasource now includes support for the *mode* function which returns the most frequent value in a list of field values.
 
 ## Alerting
 
@@ -72,9 +74,9 @@ By popular demand Grafana now includes support for an alert notification channel
 
 {{< docs-imagebox img="/img/docs/v52/dashboard_save_modal.png" max-width="800px" class="docs-image--right" >}}
 
-Starting from Grafana v5.2 a modified time range or variable are no longer saved by default. To save a modified
-time range or variable you'll need to actively select that when saving a dashboard, see screenshot.
-This should hopefully make it easier to have sane defaults of time and variables in dashboards and make it more explicit
+Starting from Grafana v5.2, a modified time range or variable are no longer saved by default. To save a modified
+time range or variable, you'll need to actively select that when saving a dashboard, see screenshot.
+This should hopefully make it easier to have sane defaults for time and variables in dashboards and make it more explicit
 when you actually want to overwrite those settings.
 
 <div class="clearfix"></div>
@@ -83,13 +85,13 @@ when you actually want to overwrite those settings.
 
 {{< docs-imagebox img="/img/docs/v52/dashboard_import.png" max-width="800px" class="docs-image--right" >}}
 
-Grafana v5.2 adds support for specifying an existing folder or create a new one when importing a dashboard, a long awaited feature since
-Grafana v5.0 introduced support for dashboard folders and permissions. The import dashboard page have also got some general improvements
+Grafana v5.2 adds support for specifying an existing folder or creating a new one when importing a dashboard - a long-awaited feature since
+Grafana v5.0 introduced support for dashboard folders and permissions. The import dashboard page has also got some general improvements
 and should now make it more clear if a possible import will overwrite an existing dashboard, or not.
 
-This release also adds some improvements for those users only having editor or admin permissions in certain folders. Now the links to
-*Create Dashboard* and *Import Dashboard* is available in side navigation, dashboard search and manage dashboards/folder page for a
-user that has editor role in an organization or edit permission in at least one folder.
+This release also adds some improvements for those users only having editor or admin permissions in certain folders. The links to
+*Create Dashboard* and *Import Dashboard* are now available in the side navigation, in dashboard search and on the manage dashboards/folder page for a
+user that has editor role in an organization or the edit permission in at least one folder.
 
 <div class="clearfix"></div>
 

+ 15 - 15
docs/sources/http_api/admin.md

@@ -36,11 +36,10 @@ HTTP/1.1 200
 Content-Type: application/json
 
 {
-"DEFAULT":
-{
-  "app_mode":"production"},
-  "analytics":
-  {
+  "DEFAULT": {
+    "app_mode":"production"
+  },
+  "analytics": {
     "google_analytics_ua_id":"",
     "reporting_enabled":"false"
   },
@@ -195,15 +194,16 @@ HTTP/1.1 200
 Content-Type: application/json
 
 {
-  "user_count":2,
-  "org_count":1,
-  "dashboard_count":4,
-  "db_snapshot_count":2,
-  "db_tag_count":6,
-  "data_source_count":1,
-  "playlist_count":1,
-  "starred_db_count":2,
-  "grafana_admin_count":2
+  "users":2,
+  "orgs":1,
+  "dashboards":4,
+  "snapshots":2,
+  "tags":6,
+  "datasources":1,
+  "playlists":1,
+  "stars":2,
+  "alerts":2,
+  "activeUsers":1
 }
 ```
 
@@ -340,4 +340,4 @@ HTTP/1.1 200
 Content-Type: application/json
 
 {state: "new state", message: "alerts pause/un paused", "alertsAffected": 100}
-```
+```

+ 8 - 0
docs/sources/http_api/auth.md

@@ -44,6 +44,14 @@ Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk
 
 The `Authorization` header value should be `Bearer <your api key>`.
 
+The API Token can also be passed as a Basic authorization password with the special username `api_key`:
+
+curl example:
+```bash
+?curl http://api_key:eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk@localhost:3000/api/org
+{"id":1,"name":"Main Org."}
+```
+
 # Auth HTTP resources / actions
 
 ## Api Keys

+ 6 - 2
docs/sources/http_api/folder.md

@@ -19,6 +19,10 @@ The unique identifier (uid) of a folder can be used for uniquely identify folder
 
 The uid can have a maximum length of 40 characters.
 
+## A note about the General folder
+
+The General folder (id=0) is special and is not part of the Folder API which means
+that you cannot use this API for retrieving information about the General folder.
 
 ## Get all folders
 
@@ -273,14 +277,14 @@ Status Codes:
 
 ## Get folder by id
 
-`GET /api/folders/:id`
+`GET /api/folders/id/:id`
 
 Will return the folder identified by id.
 
 **Example Request**:
 
 ```http
-GET /api/folders/1 HTTP/1.1
+GET /api/folders/id/1 HTTP/1.1
 Accept: application/json
 Content-Type: application/json
 Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk

+ 119 - 105
docs/sources/http_api/org.md

@@ -12,7 +12,13 @@ parent = "http_api"
 
 # Organisation API
 
-## Get current Organisation
+The Organisation HTTP API is divided in two resources, `/api/org` (current organisation)
+and `/api/orgs` (admin organisations). One big difference between these are that
+the admin of all organisations API only works with basic authentication, see [Admin Organisations API](#admin-organisations-api) for more information.
+
+## Current Organisation API
+
+### Get current Organisation
 
 `GET /api/org/`
 
@@ -37,20 +43,18 @@ Content-Type: application/json
 }
 ```
 
-## Get Organisation by Id
+### Get all users within the current organisation
 
-`GET /api/orgs/:orgId`
+`GET /api/org/users`
 
 **Example Request**:
 
 ```http
-GET /api/orgs/1 HTTP/1.1
+GET /api/org/users HTTP/1.1
 Accept: application/json
 Content-Type: application/json
 Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk
 ```
-Note: The api will only work when you pass the admin name and password
-to the request http url, like http://admin:admin@localhost:3000/api/orgs/1
 
 **Example Response**:
 
@@ -58,33 +62,33 @@ to the request http url, like http://admin:admin@localhost:3000/api/orgs/1
 HTTP/1.1 200
 Content-Type: application/json
 
-{
-  "id":1,
-  "name":"Main Org.",
-  "address":{
-    "address1":"",
-    "address2":"",
-    "city":"",
-    "zipCode":"",
-    "state":"",
-    "country":""
+[
+  {
+    "orgId":1,
+    "userId":1,
+    "email":"admin@mygraf.com",
+    "login":"admin",
+    "role":"Admin"
   }
-}
+]
 ```
-## Get Organisation by Name
 
-`GET /api/orgs/name/:orgName`
+### Updates the given user
+
+`PATCH /api/org/users/:userId`
 
 **Example Request**:
 
 ```http
-GET /api/orgs/name/Main%20Org%2E HTTP/1.1
+PATCH /api/org/users/1 HTTP/1.1
 Accept: application/json
 Content-Type: application/json
 Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk
+
+{
+  "role": "Viewer",
+}
 ```
-Note: The api will only work when you pass the admin name and password
-to the request http url, like http://admin:admin@localhost:3000/api/orgs/name/Main%20Org%2E
 
 **Example Response**:
 
@@ -92,39 +96,21 @@ to the request http url, like http://admin:admin@localhost:3000/api/orgs/name/Ma
 HTTP/1.1 200
 Content-Type: application/json
 
-{
-  "id":1,
-  "name":"Main Org.",
-  "address":{
-    "address1":"",
-    "address2":"",
-    "city":"",
-    "zipCode":"",
-    "state":"",
-    "country":""
-  }
-}
+{"message":"Organization user updated"}
 ```
 
-## Create Organisation
+### Delete user in current organisation
 
-`POST /api/orgs`
+`DELETE /api/org/users/:userId`
 
 **Example Request**:
 
 ```http
-POST /api/orgs HTTP/1.1
+DELETE /api/org/users/1 HTTP/1.1
 Accept: application/json
 Content-Type: application/json
 Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk
-
-{
-  "name":"New Org."
-}
 ```
-Note: The api will work in the following two ways
-1) Need to set GF_USERS_ALLOW_ORG_CREATE=true
-2) Set the config users.allow_org_create to true in ini file
 
 **Example Response**:
 
@@ -132,14 +118,10 @@ Note: The api will work in the following two ways
 HTTP/1.1 200
 Content-Type: application/json
 
-{
-  "orgId":"1",
-  "message":"Organization created"
-}
+{"message":"User removed from organization"}
 ```
 
-
-## Update current Organisation
+### Update current Organisation
 
 `PUT /api/org`
 
@@ -165,17 +147,24 @@ Content-Type: application/json
 {"message":"Organization updated"}
 ```
 
-## Get all users within the actual organisation
+### Add a new user to the current organisation
 
-`GET /api/org/users`
+`POST /api/org/users`
+
+Adds a global user to the current organisation.
 
 **Example Request**:
 
 ```http
-GET /api/org/users HTTP/1.1
+POST /api/org/users HTTP/1.1
 Accept: application/json
 Content-Type: application/json
 Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk
+
+{
+  "role": "Admin",
+  "loginOrEmail": "admin"
+}
 ```
 
 **Example Response**:
@@ -184,35 +173,29 @@ Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk
 HTTP/1.1 200
 Content-Type: application/json
 
-[
-  {
-    "orgId":1,
-    "userId":1,
-    "email":"admin@mygraf.com",
-    "login":"admin",
-    "role":"Admin"
-  }
-]
+{"message":"User added to organization"}
 ```
 
-## Add a new user to the actual organisation
+## Admin Organisations API
 
-`POST /api/org/users`
+The Admin Organisations HTTP API does not currently work with an API Token. API Tokens are currently
+only linked to an organization and an organization role. They cannot be given the permission of server
+admin, only users can be given that permission. So in order to use these API calls you will have to
+use Basic Auth and the Grafana user must have the Grafana Admin permission (The default admin user
+is called `admin` and has permission to use this API).
+
+### Get Organisation by Id
+
+`GET /api/orgs/:orgId`
 
-Adds a global user to the actual organisation.
+Only works with Basic Authentication (username and password), see [introduction](#admin-organisations-api).
 
 **Example Request**:
 
 ```http
-POST /api/org/users HTTP/1.1
+GET /api/orgs/1 HTTP/1.1
 Accept: application/json
 Content-Type: application/json
-Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk
-
-{
-  "role": "Admin",
-  "loginOrEmail": "admin"
-}
 ```
 
 **Example Response**:
@@ -221,24 +204,31 @@ Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk
 HTTP/1.1 200
 Content-Type: application/json
 
-{"message":"User added to organization"}
+{
+  "id":1,
+  "name":"Main Org.",
+  "address":{
+    "address1":"",
+    "address2":"",
+    "city":"",
+    "zipCode":"",
+    "state":"",
+    "country":""
+  }
+}
 ```
+### Get Organisation by Name
 
-## Updates the given user
+`GET /api/orgs/name/:orgName`
 
-`PATCH /api/org/users/:userId`
+Only works with Basic Authentication (username and password), see [introduction](#admin-organisations-api).
 
 **Example Request**:
 
 ```http
-PATCH /api/org/users/1 HTTP/1.1
+GET /api/orgs/name/Main%20Org%2E HTTP/1.1
 Accept: application/json
 Content-Type: application/json
-Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk
-
-{
-  "role": "Viewer",
-}
 ```
 
 **Example Response**:
@@ -247,21 +237,40 @@ Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk
 HTTP/1.1 200
 Content-Type: application/json
 
-{"message":"Organization user updated"}
+{
+  "id":1,
+  "name":"Main Org.",
+  "address":{
+    "address1":"",
+    "address2":"",
+    "city":"",
+    "zipCode":"",
+    "state":"",
+    "country":""
+  }
+}
 ```
 
-## Delete user in actual organisation
+### Create Organisation
 
-`DELETE /api/org/users/:userId`
+`POST /api/orgs`
+
+Only works with Basic Authentication (username and password), see [introduction](#admin-organisations-api).
 
 **Example Request**:
 
 ```http
-DELETE /api/org/users/1 HTTP/1.1
+POST /api/orgs HTTP/1.1
 Accept: application/json
 Content-Type: application/json
-Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk
+
+{
+  "name":"New Org."
+}
 ```
+Note: The api will work in the following two ways
+1) Need to set GF_USERS_ALLOW_ORG_CREATE=true
+2) Set the config users.allow_org_create to true in ini file
 
 **Example Response**:
 
@@ -269,22 +278,24 @@ Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk
 HTTP/1.1 200
 Content-Type: application/json
 
-{"message":"User removed from organization"}
+{
+  "orgId":"1",
+  "message":"Organization created"
+}
 ```
 
-# Organisations
-
-## Search all Organisations
+### Search all Organisations
 
 `GET /api/orgs`
 
+Only works with Basic Authentication (username and password), see [introduction](#admin-organisations-api).
+
 **Example Request**:
 
 ```http
 GET /api/orgs HTTP/1.1
 Accept: application/json
 Content-Type: application/json
-Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk
 ```
 Note: The api will only work when you pass the admin name and password
 to the request http url, like http://admin:admin@localhost:3000/api/orgs
@@ -303,11 +314,12 @@ Content-Type: application/json
 ]
 ```
 
-## Update Organisation
+### Update Organisation
 
 `PUT /api/orgs/:orgId`
 
 Update Organisation, fields *Address 1*, *Address 2*, *City* are not implemented yet.
+Only works with Basic Authentication (username and password), see [introduction](#admin-organisations-api).
 
 **Example Request**:
 
@@ -315,7 +327,6 @@ Update Organisation, fields *Address 1*, *Address 2*, *City* are not implemented
 PUT /api/orgs/1 HTTP/1.1
 Accept: application/json
 Content-Type: application/json
-Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk
 
 {
   "name":"Main Org 2."
@@ -331,16 +342,17 @@ Content-Type: application/json
 {"message":"Organization updated"}
 ```
 
-## Delete Organisation
+### Delete Organisation
 
 `DELETE /api/orgs/:orgId`
 
+Only works with Basic Authentication (username and password), see [introduction](#admin-organisations-api).
+
 **Example Request**:
 
 ```http
 DELETE /api/orgs/1 HTTP/1.1
 Accept: application/json
-Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk
 ```
 
 **Example Response**:
@@ -352,17 +364,18 @@ Content-Type: application/json
 {"message":"Organization deleted"}
 ```
 
-## Get Users in Organisation
+### Get Users in Organisation
 
 `GET /api/orgs/:orgId/users`
 
+Only works with Basic Authentication (username and password), see [introduction](#admin-organisations-api).
+
 **Example Request**:
 
 ```http
 GET /api/orgs/1/users HTTP/1.1
 Accept: application/json
 Content-Type: application/json
-Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk
 ```
 Note: The api will only work when you pass the admin name and password
 to the request http url, like http://admin:admin@localhost:3000/api/orgs/1/users
@@ -384,25 +397,24 @@ Content-Type: application/json
 ]
 ```
 
-## Add User in Organisation
+### Add User in Organisation
 
 `POST /api/orgs/:orgId/users`
 
+Only works with Basic Authentication (username and password), see [introduction](#admin-organisations-api).
+
 **Example Request**:
 
 ```http
 POST /api/orgs/1/users HTTP/1.1
 Accept: application/json
 Content-Type: application/json
-Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk
 
 {
   "loginOrEmail":"user",
   "role":"Viewer"
 }
 ```
-Note: The api will only work when you pass the admin name and password
-to the request http url, like http://admin:admin@localhost:3000/api/orgs/1/users
 
 **Example Response**:
 
@@ -413,17 +425,18 @@ Content-Type: application/json
 {"message":"User added to organization"}
 ```
 
-## Update Users in Organisation
+### Update Users in Organisation
 
 `PATCH /api/orgs/:orgId/users/:userId`
 
+Only works with Basic Authentication (username and password), see [introduction](#admin-organisations-api).
+
 **Example Request**:
 
 ```http
 PATCH /api/orgs/1/users/2 HTTP/1.1
 Accept: application/json
 Content-Type: application/json
-Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk
 
 {
   "role":"Admin"
@@ -439,17 +452,18 @@ Content-Type: application/json
 {"message":"Organization user updated"}
 ```
 
-## Delete User in Organisation
+### Delete User in Organisation
 
 `DELETE /api/orgs/:orgId/users/:userId`
 
+Only works with Basic Authentication (username and password), see [introduction](#admin-organisations-api).
+
 **Example Request**:
 
 ```http
 DELETE /api/orgs/1/users/2 HTTP/1.1
 Accept: application/json
 Content-Type: application/json
-Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk
 ```
 
 **Example Response**:

+ 3 - 3
docs/sources/index.md

@@ -60,9 +60,9 @@ aliases = ["v1.1", "guides/reference/admin"]
         <h4>Provisioning</h4>
         <p>A guide to help you automate your Grafana setup & configuration.</p>
     </a>
-    <a href="{{< relref "guides/whats-new-in-v5.md" >}}" class="nav-cards__item nav-cards__item--guide">
-        <h4>What's new in v5.0</h4>
-        <p>Article on all the new cool features and enhancements in v5.0</p>
+    <a href="{{< relref "guides/whats-new-in-v5-2.md" >}}" class="nav-cards__item nav-cards__item--guide">
+        <h4>What's new in v5.2</h4>
+        <p>Article on all the new cool features and enhancements in v5.2</p>
     </a>
     <a href="{{< relref "tutorials/screencasts.md" >}}" class="nav-cards__item nav-cards__item--guide">
         <h4>Screencasts</h4>

+ 2 - 2
docs/sources/installation/behind_proxy.md

@@ -26,7 +26,7 @@ Otherwise Grafana will not behave correctly. See example below.
 ## Examples
 Here are some example configurations for running Grafana behind a reverse proxy.
 
-### Grafana configuration (ex http://foo.bar.com)
+### Grafana configuration (ex http://foo.bar)
 
 ```bash
 [server]
@@ -47,7 +47,7 @@ server {
 }
 ```
 
-### Examples with **sub path** (ex http://foo.bar.com/grafana)
+### Examples with **sub path** (ex http://foo.bar/grafana)
 
 #### Grafana configuration with sub path
 ```bash

+ 2 - 0
docs/sources/installation/windows.md

@@ -19,6 +19,8 @@ installation.
 
 ## Configure
 
+**Important:** After you've downloaded the zip file and before extracting it, make sure to open properties for that file (right-click Properties) and check the `unblock` checkbox and `Ok`.
+
 The zip file contains a folder with the current Grafana version. Extract
 this folder to anywhere you want Grafana to run from.  Go into the
 `conf` directory and copy `sample.ini` to `custom.ini`. You should edit

+ 20 - 30
docs/sources/reference/scripting.md

@@ -21,42 +21,32 @@ If you open scripted.js you can see how it reads url parameters from ARGS variab
 ## Example
 
 ```javascript
-var rows = 1;
 var seriesName = 'argName';
 
-if(!_.isUndefined(ARGS.rows)) {
-  rows = parseInt(ARGS.rows, 10);
-}
-
 if(!_.isUndefined(ARGS.name)) {
   seriesName = ARGS.name;
 }
 
-for (var i = 0; i < rows; i++) {
-
-  dashboard.rows.push({
-    title: 'Scripted Graph ' + i,
-    height: '300px',
-    panels: [
-      {
-        title: 'Events',
-        type: 'graph',
-        span: 12,
-        fill: 1,
-        linewidth: 2,
-        targets: [
-          {
-            'target': "randomWalk('" + seriesName + "')"
-          },
-          {
-            'target': "randomWalk('random walk2')"
-          }
-        ],
-      }
-    ]
-  });
-
-}
+dashboard.panels.push({
+  title: 'Events',
+  type: 'graph',
+  fill: 1,
+  linewidth: 2,
+  gridPos: {
+    h: 10,
+    w: 24,
+    x: 0,
+    y: 10,
+  },
+  targets: [
+    {
+      'target': "randomWalk('" + seriesName + "')"
+    },
+    {
+      'target': "randomWalk('random walk2')"
+    }
+  ]
+});
 
 return dashboard;
 ```

+ 2 - 1
docs/versions.json

@@ -1,5 +1,6 @@
 [
-  { "version": "v5.1", "path": "/", "archived": false, "current": true },
+  { "version": "v5.2", "path": "/", "archived": false, "current": true },
+  { "version": "v5.1", "path": "/v5.1", "archived": true },
   { "version": "v5.0", "path": "/v5.0", "archived": true },
   { "version": "v4.6", "path": "/v4.6", "archived": true },
   { "version": "v4.5", "path": "/v4.5", "archived": true },

+ 2 - 2
latest.json

@@ -1,4 +1,4 @@
 {
-  "stable": "5.1.3",
-  "testing": "5.1.3"
+  "stable": "5.2.0",
+  "testing": "5.2.0"
 }

+ 1 - 1
package.json

@@ -4,7 +4,7 @@
     "company": "Grafana Labs"
   },
   "name": "grafana",
-  "version": "5.2.0-pre1",
+  "version": "5.3.0-pre1",
   "repository": {
     "type": "git",
     "url": "http://github.com/grafana/grafana.git"

+ 1 - 1
pkg/api/alerting_test.go

@@ -135,7 +135,7 @@ func postAlertScenario(desc string, url string, routePattern string, role m.Role
 		defer bus.ClearBusHandlers()
 
 		sc := setupScenarioContext(url)
-		sc.defaultHandler = wrap(func(c *m.ReqContext) Response {
+		sc.defaultHandler = Wrap(func(c *m.ReqContext) Response {
 			sc.context = c
 			sc.context.UserId = TestUserID
 			sc.context.OrgId = TestOrgID

+ 3 - 3
pkg/api/annotations_test.go

@@ -223,7 +223,7 @@ func postAnnotationScenario(desc string, url string, routePattern string, role m
 		defer bus.ClearBusHandlers()
 
 		sc := setupScenarioContext(url)
-		sc.defaultHandler = wrap(func(c *m.ReqContext) Response {
+		sc.defaultHandler = Wrap(func(c *m.ReqContext) Response {
 			sc.context = c
 			sc.context.UserId = TestUserID
 			sc.context.OrgId = TestOrgID
@@ -246,7 +246,7 @@ func putAnnotationScenario(desc string, url string, routePattern string, role m.
 		defer bus.ClearBusHandlers()
 
 		sc := setupScenarioContext(url)
-		sc.defaultHandler = wrap(func(c *m.ReqContext) Response {
+		sc.defaultHandler = Wrap(func(c *m.ReqContext) Response {
 			sc.context = c
 			sc.context.UserId = TestUserID
 			sc.context.OrgId = TestOrgID
@@ -269,7 +269,7 @@ func deleteAnnotationsScenario(desc string, url string, routePattern string, rol
 		defer bus.ClearBusHandlers()
 
 		sc := setupScenarioContext(url)
-		sc.defaultHandler = wrap(func(c *m.ReqContext) Response {
+		sc.defaultHandler = Wrap(func(c *m.ReqContext) Response {
 			sc.context = c
 			sc.context.UserId = TestUserID
 			sc.context.OrgId = TestOrgID

+ 136 - 147
pkg/api/api.go

@@ -9,9 +9,7 @@ import (
 	m "github.com/grafana/grafana/pkg/models"
 )
 
-// Register adds http routes
 func (hs *HTTPServer) registerRoutes() {
-	macaronR := hs.macaron
 	reqSignedIn := middleware.Auth(&middleware.AuthOptions{ReqSignedIn: true})
 	reqGrafanaAdmin := middleware.Auth(&middleware.AuthOptions{ReqSignedIn: true, ReqGrafanaAdmin: true})
 	reqEditorRole := middleware.RoleAuth(m.ROLE_EDITOR, m.ROLE_ADMIN)
@@ -21,15 +19,12 @@ func (hs *HTTPServer) registerRoutes() {
 	quota := middleware.Quota
 	bind := binding.Bind
 
-	// automatically set HEAD for every GET
-	macaronR.SetAutoHead(true)
-
 	r := hs.RouteRegister
 
 	// not logged in views
 	r.Get("/", reqSignedIn, Index)
 	r.Get("/logout", Logout)
-	r.Post("/login", quota("session"), bind(dtos.LoginCommand{}), wrap(LoginPost))
+	r.Post("/login", quota("session"), bind(dtos.LoginCommand{}), Wrap(LoginPost))
 	r.Get("/login/:name", quota("session"), OAuthLogin)
 	r.Get("/login", LoginView)
 	r.Get("/invite/:code", Index)
@@ -88,20 +83,20 @@ func (hs *HTTPServer) registerRoutes() {
 
 	// sign up
 	r.Get("/signup", Index)
-	r.Get("/api/user/signup/options", wrap(GetSignUpOptions))
-	r.Post("/api/user/signup", quota("user"), bind(dtos.SignUpForm{}), wrap(SignUp))
-	r.Post("/api/user/signup/step2", bind(dtos.SignUpStep2Form{}), wrap(SignUpStep2))
+	r.Get("/api/user/signup/options", Wrap(GetSignUpOptions))
+	r.Post("/api/user/signup", quota("user"), bind(dtos.SignUpForm{}), Wrap(SignUp))
+	r.Post("/api/user/signup/step2", bind(dtos.SignUpStep2Form{}), Wrap(SignUpStep2))
 
 	// invited
-	r.Get("/api/user/invite/:code", wrap(GetInviteInfoByCode))
-	r.Post("/api/user/invite/complete", bind(dtos.CompleteInviteForm{}), wrap(CompleteInvite))
+	r.Get("/api/user/invite/:code", Wrap(GetInviteInfoByCode))
+	r.Post("/api/user/invite/complete", bind(dtos.CompleteInviteForm{}), Wrap(CompleteInvite))
 
 	// reset password
 	r.Get("/user/password/send-reset-email", Index)
 	r.Get("/user/password/reset", Index)
 
-	r.Post("/api/user/password/send-reset-email", bind(dtos.SendResetPasswordEmailForm{}), wrap(SendResetPasswordEmail))
-	r.Post("/api/user/password/reset", bind(dtos.ResetUserPasswordForm{}), wrap(ResetPassword))
+	r.Post("/api/user/password/send-reset-email", bind(dtos.SendResetPasswordEmailForm{}), Wrap(SendResetPasswordEmail))
+	r.Post("/api/user/password/reset", bind(dtos.ResetUserPasswordForm{}), Wrap(ResetPassword))
 
 	// dashboard snapshots
 	r.Get("/dashboard/snapshot/*", Index)
@@ -111,8 +106,8 @@ func (hs *HTTPServer) registerRoutes() {
 	r.Post("/api/snapshots/", bind(m.CreateDashboardSnapshotCommand{}), CreateDashboardSnapshot)
 	r.Get("/api/snapshot/shared-options/", GetSharingOptions)
 	r.Get("/api/snapshots/:key", GetDashboardSnapshot)
-	r.Get("/api/snapshots-delete/:deleteKey", wrap(DeleteDashboardSnapshotByDeleteKey))
-	r.Delete("/api/snapshots/:key", reqEditorRole, wrap(DeleteDashboardSnapshot))
+	r.Get("/api/snapshots-delete/:deleteKey", Wrap(DeleteDashboardSnapshotByDeleteKey))
+	r.Delete("/api/snapshots/:key", reqEditorRole, Wrap(DeleteDashboardSnapshot))
 
 	// api renew session based on remember cookie
 	r.Get("/api/login/ping", quota("session"), LoginAPIPing)
@@ -122,138 +117,138 @@ func (hs *HTTPServer) registerRoutes() {
 
 		// user (signed in)
 		apiRoute.Group("/user", func(userRoute routing.RouteRegister) {
-			userRoute.Get("/", wrap(GetSignedInUser))
-			userRoute.Put("/", bind(m.UpdateUserCommand{}), wrap(UpdateSignedInUser))
-			userRoute.Post("/using/:id", wrap(UserSetUsingOrg))
-			userRoute.Get("/orgs", wrap(GetSignedInUserOrgList))
+			userRoute.Get("/", Wrap(GetSignedInUser))
+			userRoute.Put("/", bind(m.UpdateUserCommand{}), Wrap(UpdateSignedInUser))
+			userRoute.Post("/using/:id", Wrap(UserSetUsingOrg))
+			userRoute.Get("/orgs", Wrap(GetSignedInUserOrgList))
 
-			userRoute.Post("/stars/dashboard/:id", wrap(StarDashboard))
-			userRoute.Delete("/stars/dashboard/:id", wrap(UnstarDashboard))
+			userRoute.Post("/stars/dashboard/:id", Wrap(StarDashboard))
+			userRoute.Delete("/stars/dashboard/:id", Wrap(UnstarDashboard))
 
-			userRoute.Put("/password", bind(m.ChangeUserPasswordCommand{}), wrap(ChangeUserPassword))
-			userRoute.Get("/quotas", wrap(GetUserQuotas))
-			userRoute.Put("/helpflags/:id", wrap(SetHelpFlag))
+			userRoute.Put("/password", bind(m.ChangeUserPasswordCommand{}), Wrap(ChangeUserPassword))
+			userRoute.Get("/quotas", Wrap(GetUserQuotas))
+			userRoute.Put("/helpflags/:id", Wrap(SetHelpFlag))
 			// For dev purpose
-			userRoute.Get("/helpflags/clear", wrap(ClearHelpFlags))
+			userRoute.Get("/helpflags/clear", Wrap(ClearHelpFlags))
 
-			userRoute.Get("/preferences", wrap(GetUserPreferences))
-			userRoute.Put("/preferences", bind(dtos.UpdatePrefsCmd{}), wrap(UpdateUserPreferences))
+			userRoute.Get("/preferences", Wrap(GetUserPreferences))
+			userRoute.Put("/preferences", bind(dtos.UpdatePrefsCmd{}), Wrap(UpdateUserPreferences))
 		})
 
 		// users (admin permission required)
 		apiRoute.Group("/users", func(usersRoute routing.RouteRegister) {
-			usersRoute.Get("/", wrap(SearchUsers))
-			usersRoute.Get("/search", wrap(SearchUsersWithPaging))
-			usersRoute.Get("/:id", wrap(GetUserByID))
-			usersRoute.Get("/:id/orgs", wrap(GetUserOrgList))
+			usersRoute.Get("/", Wrap(SearchUsers))
+			usersRoute.Get("/search", Wrap(SearchUsersWithPaging))
+			usersRoute.Get("/:id", Wrap(GetUserByID))
+			usersRoute.Get("/:id/orgs", Wrap(GetUserOrgList))
 			// query parameters /users/lookup?loginOrEmail=admin@example.com
-			usersRoute.Get("/lookup", wrap(GetUserByLoginOrEmail))
-			usersRoute.Put("/:id", bind(m.UpdateUserCommand{}), wrap(UpdateUser))
-			usersRoute.Post("/:id/using/:orgId", wrap(UpdateUserActiveOrg))
+			usersRoute.Get("/lookup", Wrap(GetUserByLoginOrEmail))
+			usersRoute.Put("/:id", bind(m.UpdateUserCommand{}), Wrap(UpdateUser))
+			usersRoute.Post("/:id/using/:orgId", Wrap(UpdateUserActiveOrg))
 		}, reqGrafanaAdmin)
 
 		// team (admin permission required)
 		apiRoute.Group("/teams", func(teamsRoute routing.RouteRegister) {
-			teamsRoute.Post("/", bind(m.CreateTeamCommand{}), wrap(CreateTeam))
-			teamsRoute.Put("/:teamId", bind(m.UpdateTeamCommand{}), wrap(UpdateTeam))
-			teamsRoute.Delete("/:teamId", wrap(DeleteTeamByID))
-			teamsRoute.Get("/:teamId/members", wrap(GetTeamMembers))
-			teamsRoute.Post("/:teamId/members", bind(m.AddTeamMemberCommand{}), wrap(AddTeamMember))
-			teamsRoute.Delete("/:teamId/members/:userId", wrap(RemoveTeamMember))
+			teamsRoute.Post("/", bind(m.CreateTeamCommand{}), Wrap(CreateTeam))
+			teamsRoute.Put("/:teamId", bind(m.UpdateTeamCommand{}), Wrap(UpdateTeam))
+			teamsRoute.Delete("/:teamId", Wrap(DeleteTeamByID))
+			teamsRoute.Get("/:teamId/members", Wrap(GetTeamMembers))
+			teamsRoute.Post("/:teamId/members", bind(m.AddTeamMemberCommand{}), Wrap(AddTeamMember))
+			teamsRoute.Delete("/:teamId/members/:userId", Wrap(RemoveTeamMember))
 		}, reqOrgAdmin)
 
 		// team without requirement of user to be org admin
 		apiRoute.Group("/teams", func(teamsRoute routing.RouteRegister) {
-			teamsRoute.Get("/:teamId", wrap(GetTeamByID))
-			teamsRoute.Get("/search", wrap(SearchTeams))
+			teamsRoute.Get("/:teamId", Wrap(GetTeamByID))
+			teamsRoute.Get("/search", Wrap(SearchTeams))
 		})
 
 		// org information available to all users.
 		apiRoute.Group("/org", func(orgRoute routing.RouteRegister) {
-			orgRoute.Get("/", wrap(GetOrgCurrent))
-			orgRoute.Get("/quotas", wrap(GetOrgQuotas))
+			orgRoute.Get("/", Wrap(GetOrgCurrent))
+			orgRoute.Get("/quotas", Wrap(GetOrgQuotas))
 		})
 
 		// current org
 		apiRoute.Group("/org", func(orgRoute routing.RouteRegister) {
-			orgRoute.Put("/", bind(dtos.UpdateOrgForm{}), wrap(UpdateOrgCurrent))
-			orgRoute.Put("/address", bind(dtos.UpdateOrgAddressForm{}), wrap(UpdateOrgAddressCurrent))
-			orgRoute.Post("/users", quota("user"), bind(m.AddOrgUserCommand{}), wrap(AddOrgUserToCurrentOrg))
-			orgRoute.Patch("/users/:userId", bind(m.UpdateOrgUserCommand{}), wrap(UpdateOrgUserForCurrentOrg))
-			orgRoute.Delete("/users/:userId", wrap(RemoveOrgUserForCurrentOrg))
+			orgRoute.Put("/", bind(dtos.UpdateOrgForm{}), Wrap(UpdateOrgCurrent))
+			orgRoute.Put("/address", bind(dtos.UpdateOrgAddressForm{}), Wrap(UpdateOrgAddressCurrent))
+			orgRoute.Post("/users", quota("user"), bind(m.AddOrgUserCommand{}), Wrap(AddOrgUserToCurrentOrg))
+			orgRoute.Patch("/users/:userId", bind(m.UpdateOrgUserCommand{}), Wrap(UpdateOrgUserForCurrentOrg))
+			orgRoute.Delete("/users/:userId", Wrap(RemoveOrgUserForCurrentOrg))
 
 			// invites
-			orgRoute.Get("/invites", wrap(GetPendingOrgInvites))
-			orgRoute.Post("/invites", quota("user"), bind(dtos.AddInviteForm{}), wrap(AddOrgInvite))
-			orgRoute.Patch("/invites/:code/revoke", wrap(RevokeInvite))
+			orgRoute.Get("/invites", Wrap(GetPendingOrgInvites))
+			orgRoute.Post("/invites", quota("user"), bind(dtos.AddInviteForm{}), Wrap(AddOrgInvite))
+			orgRoute.Patch("/invites/:code/revoke", Wrap(RevokeInvite))
 
 			// prefs
-			orgRoute.Get("/preferences", wrap(GetOrgPreferences))
-			orgRoute.Put("/preferences", bind(dtos.UpdatePrefsCmd{}), wrap(UpdateOrgPreferences))
+			orgRoute.Get("/preferences", Wrap(GetOrgPreferences))
+			orgRoute.Put("/preferences", bind(dtos.UpdatePrefsCmd{}), Wrap(UpdateOrgPreferences))
 		}, reqOrgAdmin)
 
 		// current org without requirement of user to be org admin
 		apiRoute.Group("/org", func(orgRoute routing.RouteRegister) {
-			orgRoute.Get("/users", wrap(GetOrgUsersForCurrentOrg))
+			orgRoute.Get("/users", Wrap(GetOrgUsersForCurrentOrg))
 		})
 
 		// create new org
-		apiRoute.Post("/orgs", quota("org"), bind(m.CreateOrgCommand{}), wrap(CreateOrg))
+		apiRoute.Post("/orgs", quota("org"), bind(m.CreateOrgCommand{}), Wrap(CreateOrg))
 
 		// search all orgs
-		apiRoute.Get("/orgs", reqGrafanaAdmin, wrap(SearchOrgs))
+		apiRoute.Get("/orgs", reqGrafanaAdmin, Wrap(SearchOrgs))
 
 		// orgs (admin routes)
 		apiRoute.Group("/orgs/:orgId", func(orgsRoute routing.RouteRegister) {
-			orgsRoute.Get("/", wrap(GetOrgByID))
-			orgsRoute.Put("/", bind(dtos.UpdateOrgForm{}), wrap(UpdateOrg))
-			orgsRoute.Put("/address", bind(dtos.UpdateOrgAddressForm{}), wrap(UpdateOrgAddress))
-			orgsRoute.Delete("/", wrap(DeleteOrgByID))
-			orgsRoute.Get("/users", wrap(GetOrgUsers))
-			orgsRoute.Post("/users", bind(m.AddOrgUserCommand{}), wrap(AddOrgUser))
-			orgsRoute.Patch("/users/:userId", bind(m.UpdateOrgUserCommand{}), wrap(UpdateOrgUser))
-			orgsRoute.Delete("/users/:userId", wrap(RemoveOrgUser))
-			orgsRoute.Get("/quotas", wrap(GetOrgQuotas))
-			orgsRoute.Put("/quotas/:target", bind(m.UpdateOrgQuotaCmd{}), wrap(UpdateOrgQuota))
+			orgsRoute.Get("/", Wrap(GetOrgByID))
+			orgsRoute.Put("/", bind(dtos.UpdateOrgForm{}), Wrap(UpdateOrg))
+			orgsRoute.Put("/address", bind(dtos.UpdateOrgAddressForm{}), Wrap(UpdateOrgAddress))
+			orgsRoute.Delete("/", Wrap(DeleteOrgByID))
+			orgsRoute.Get("/users", Wrap(GetOrgUsers))
+			orgsRoute.Post("/users", bind(m.AddOrgUserCommand{}), Wrap(AddOrgUser))
+			orgsRoute.Patch("/users/:userId", bind(m.UpdateOrgUserCommand{}), Wrap(UpdateOrgUser))
+			orgsRoute.Delete("/users/:userId", Wrap(RemoveOrgUser))
+			orgsRoute.Get("/quotas", Wrap(GetOrgQuotas))
+			orgsRoute.Put("/quotas/:target", bind(m.UpdateOrgQuotaCmd{}), Wrap(UpdateOrgQuota))
 		}, reqGrafanaAdmin)
 
 		// orgs (admin routes)
 		apiRoute.Group("/orgs/name/:name", func(orgsRoute routing.RouteRegister) {
-			orgsRoute.Get("/", wrap(GetOrgByName))
+			orgsRoute.Get("/", Wrap(GetOrgByName))
 		}, reqGrafanaAdmin)
 
 		// auth api keys
 		apiRoute.Group("/auth/keys", func(keysRoute routing.RouteRegister) {
-			keysRoute.Get("/", wrap(GetAPIKeys))
-			keysRoute.Post("/", quota("api_key"), bind(m.AddApiKeyCommand{}), wrap(AddAPIKey))
-			keysRoute.Delete("/:id", wrap(DeleteAPIKey))
+			keysRoute.Get("/", Wrap(GetAPIKeys))
+			keysRoute.Post("/", quota("api_key"), bind(m.AddApiKeyCommand{}), Wrap(AddAPIKey))
+			keysRoute.Delete("/:id", Wrap(DeleteAPIKey))
 		}, reqOrgAdmin)
 
 		// Preferences
 		apiRoute.Group("/preferences", func(prefRoute routing.RouteRegister) {
-			prefRoute.Post("/set-home-dash", bind(m.SavePreferencesCommand{}), wrap(SetHomeDashboard))
+			prefRoute.Post("/set-home-dash", bind(m.SavePreferencesCommand{}), Wrap(SetHomeDashboard))
 		})
 
 		// Data sources
 		apiRoute.Group("/datasources", func(datasourceRoute routing.RouteRegister) {
-			datasourceRoute.Get("/", wrap(GetDataSources))
-			datasourceRoute.Post("/", quota("data_source"), bind(m.AddDataSourceCommand{}), wrap(AddDataSource))
-			datasourceRoute.Put("/:id", bind(m.UpdateDataSourceCommand{}), wrap(UpdateDataSource))
-			datasourceRoute.Delete("/:id", wrap(DeleteDataSourceByID))
-			datasourceRoute.Delete("/name/:name", wrap(DeleteDataSourceByName))
-			datasourceRoute.Get("/:id", wrap(GetDataSourceByID))
-			datasourceRoute.Get("/name/:name", wrap(GetDataSourceByName))
+			datasourceRoute.Get("/", Wrap(GetDataSources))
+			datasourceRoute.Post("/", quota("data_source"), bind(m.AddDataSourceCommand{}), Wrap(AddDataSource))
+			datasourceRoute.Put("/:id", bind(m.UpdateDataSourceCommand{}), Wrap(UpdateDataSource))
+			datasourceRoute.Delete("/:id", Wrap(DeleteDataSourceByID))
+			datasourceRoute.Delete("/name/:name", Wrap(DeleteDataSourceByName))
+			datasourceRoute.Get("/:id", Wrap(GetDataSourceByID))
+			datasourceRoute.Get("/name/:name", Wrap(GetDataSourceByName))
 		}, reqOrgAdmin)
 
-		apiRoute.Get("/datasources/id/:name", wrap(GetDataSourceIDByName), reqSignedIn)
+		apiRoute.Get("/datasources/id/:name", Wrap(GetDataSourceIDByName), reqSignedIn)
 
-		apiRoute.Get("/plugins", wrap(GetPluginList))
-		apiRoute.Get("/plugins/:pluginId/settings", wrap(GetPluginSettingByID))
-		apiRoute.Get("/plugins/:pluginId/markdown/:name", wrap(GetPluginMarkdown))
+		apiRoute.Get("/plugins", Wrap(GetPluginList))
+		apiRoute.Get("/plugins/:pluginId/settings", Wrap(GetPluginSettingByID))
+		apiRoute.Get("/plugins/:pluginId/markdown/:name", Wrap(GetPluginMarkdown))
 
 		apiRoute.Group("/plugins", func(pluginRoute routing.RouteRegister) {
-			pluginRoute.Get("/:pluginId/dashboards/", wrap(GetPluginDashboards))
-			pluginRoute.Post("/:pluginId/settings", bind(m.UpdatePluginSettingCmd{}), wrap(UpdatePluginSetting))
+			pluginRoute.Get("/:pluginId/dashboards/", Wrap(GetPluginDashboards))
+			pluginRoute.Post("/:pluginId/settings", bind(m.UpdatePluginSettingCmd{}), Wrap(UpdatePluginSetting))
 		}, reqOrgAdmin)
 
 		apiRoute.Get("/frontend/settings/", GetFrontendSettings)
@@ -262,106 +257,106 @@ func (hs *HTTPServer) registerRoutes() {
 
 		// Folders
 		apiRoute.Group("/folders", func(folderRoute routing.RouteRegister) {
-			folderRoute.Get("/", wrap(GetFolders))
-			folderRoute.Get("/id/:id", wrap(GetFolderByID))
-			folderRoute.Post("/", bind(m.CreateFolderCommand{}), wrap(CreateFolder))
+			folderRoute.Get("/", Wrap(GetFolders))
+			folderRoute.Get("/id/:id", Wrap(GetFolderByID))
+			folderRoute.Post("/", bind(m.CreateFolderCommand{}), Wrap(CreateFolder))
 
 			folderRoute.Group("/:uid", func(folderUidRoute routing.RouteRegister) {
-				folderUidRoute.Get("/", wrap(GetFolderByUID))
-				folderUidRoute.Put("/", bind(m.UpdateFolderCommand{}), wrap(UpdateFolder))
-				folderUidRoute.Delete("/", wrap(DeleteFolder))
+				folderUidRoute.Get("/", Wrap(GetFolderByUID))
+				folderUidRoute.Put("/", bind(m.UpdateFolderCommand{}), Wrap(UpdateFolder))
+				folderUidRoute.Delete("/", Wrap(DeleteFolder))
 
 				folderUidRoute.Group("/permissions", func(folderPermissionRoute routing.RouteRegister) {
-					folderPermissionRoute.Get("/", wrap(GetFolderPermissionList))
-					folderPermissionRoute.Post("/", bind(dtos.UpdateDashboardAclCommand{}), wrap(UpdateFolderPermissions))
+					folderPermissionRoute.Get("/", Wrap(GetFolderPermissionList))
+					folderPermissionRoute.Post("/", bind(dtos.UpdateDashboardAclCommand{}), Wrap(UpdateFolderPermissions))
 				})
 			})
 		})
 
 		// Dashboard
 		apiRoute.Group("/dashboards", func(dashboardRoute routing.RouteRegister) {
-			dashboardRoute.Get("/uid/:uid", wrap(GetDashboard))
-			dashboardRoute.Delete("/uid/:uid", wrap(DeleteDashboardByUID))
+			dashboardRoute.Get("/uid/:uid", Wrap(GetDashboard))
+			dashboardRoute.Delete("/uid/:uid", Wrap(DeleteDashboardByUID))
 
-			dashboardRoute.Get("/db/:slug", wrap(GetDashboard))
-			dashboardRoute.Delete("/db/:slug", wrap(DeleteDashboard))
+			dashboardRoute.Get("/db/:slug", Wrap(GetDashboard))
+			dashboardRoute.Delete("/db/:slug", Wrap(DeleteDashboard))
 
-			dashboardRoute.Post("/calculate-diff", bind(dtos.CalculateDiffOptions{}), wrap(CalculateDashboardDiff))
+			dashboardRoute.Post("/calculate-diff", bind(dtos.CalculateDiffOptions{}), Wrap(CalculateDashboardDiff))
 
-			dashboardRoute.Post("/db", bind(m.SaveDashboardCommand{}), wrap(PostDashboard))
-			dashboardRoute.Get("/home", wrap(GetHomeDashboard))
+			dashboardRoute.Post("/db", bind(m.SaveDashboardCommand{}), Wrap(PostDashboard))
+			dashboardRoute.Get("/home", Wrap(GetHomeDashboard))
 			dashboardRoute.Get("/tags", GetDashboardTags)
-			dashboardRoute.Post("/import", bind(dtos.ImportDashboardCommand{}), wrap(ImportDashboard))
+			dashboardRoute.Post("/import", bind(dtos.ImportDashboardCommand{}), Wrap(ImportDashboard))
 
 			dashboardRoute.Group("/id/:dashboardId", func(dashIdRoute routing.RouteRegister) {
-				dashIdRoute.Get("/versions", wrap(GetDashboardVersions))
-				dashIdRoute.Get("/versions/:id", wrap(GetDashboardVersion))
-				dashIdRoute.Post("/restore", bind(dtos.RestoreDashboardVersionCommand{}), wrap(RestoreDashboardVersion))
+				dashIdRoute.Get("/versions", Wrap(GetDashboardVersions))
+				dashIdRoute.Get("/versions/:id", Wrap(GetDashboardVersion))
+				dashIdRoute.Post("/restore", bind(dtos.RestoreDashboardVersionCommand{}), Wrap(RestoreDashboardVersion))
 
 				dashIdRoute.Group("/permissions", func(dashboardPermissionRoute routing.RouteRegister) {
-					dashboardPermissionRoute.Get("/", wrap(GetDashboardPermissionList))
-					dashboardPermissionRoute.Post("/", bind(dtos.UpdateDashboardAclCommand{}), wrap(UpdateDashboardPermissions))
+					dashboardPermissionRoute.Get("/", Wrap(GetDashboardPermissionList))
+					dashboardPermissionRoute.Post("/", bind(dtos.UpdateDashboardAclCommand{}), Wrap(UpdateDashboardPermissions))
 				})
 			})
 		})
 
 		// Dashboard snapshots
 		apiRoute.Group("/dashboard/snapshots", func(dashboardRoute routing.RouteRegister) {
-			dashboardRoute.Get("/", wrap(SearchDashboardSnapshots))
+			dashboardRoute.Get("/", Wrap(SearchDashboardSnapshots))
 		})
 
 		// Playlist
 		apiRoute.Group("/playlists", func(playlistRoute routing.RouteRegister) {
-			playlistRoute.Get("/", wrap(SearchPlaylists))
-			playlistRoute.Get("/:id", ValidateOrgPlaylist, wrap(GetPlaylist))
-			playlistRoute.Get("/:id/items", ValidateOrgPlaylist, wrap(GetPlaylistItems))
-			playlistRoute.Get("/:id/dashboards", ValidateOrgPlaylist, wrap(GetPlaylistDashboards))
-			playlistRoute.Delete("/:id", reqEditorRole, ValidateOrgPlaylist, wrap(DeletePlaylist))
-			playlistRoute.Put("/:id", reqEditorRole, bind(m.UpdatePlaylistCommand{}), ValidateOrgPlaylist, wrap(UpdatePlaylist))
-			playlistRoute.Post("/", reqEditorRole, bind(m.CreatePlaylistCommand{}), wrap(CreatePlaylist))
+			playlistRoute.Get("/", Wrap(SearchPlaylists))
+			playlistRoute.Get("/:id", ValidateOrgPlaylist, Wrap(GetPlaylist))
+			playlistRoute.Get("/:id/items", ValidateOrgPlaylist, Wrap(GetPlaylistItems))
+			playlistRoute.Get("/:id/dashboards", ValidateOrgPlaylist, Wrap(GetPlaylistDashboards))
+			playlistRoute.Delete("/:id", reqEditorRole, ValidateOrgPlaylist, Wrap(DeletePlaylist))
+			playlistRoute.Put("/:id", reqEditorRole, bind(m.UpdatePlaylistCommand{}), ValidateOrgPlaylist, Wrap(UpdatePlaylist))
+			playlistRoute.Post("/", reqEditorRole, bind(m.CreatePlaylistCommand{}), Wrap(CreatePlaylist))
 		})
 
 		// Search
 		apiRoute.Get("/search/", Search)
 
 		// metrics
-		apiRoute.Post("/tsdb/query", bind(dtos.MetricRequest{}), wrap(QueryMetrics))
-		apiRoute.Get("/tsdb/testdata/scenarios", wrap(GetTestDataScenarios))
-		apiRoute.Get("/tsdb/testdata/gensql", reqGrafanaAdmin, wrap(GenerateSQLTestData))
-		apiRoute.Get("/tsdb/testdata/random-walk", wrap(GetTestDataRandomWalk))
+		apiRoute.Post("/tsdb/query", bind(dtos.MetricRequest{}), Wrap(QueryMetrics))
+		apiRoute.Get("/tsdb/testdata/scenarios", Wrap(GetTestDataScenarios))
+		apiRoute.Get("/tsdb/testdata/gensql", reqGrafanaAdmin, Wrap(GenerateSQLTestData))
+		apiRoute.Get("/tsdb/testdata/random-walk", Wrap(GetTestDataRandomWalk))
 
 		apiRoute.Group("/alerts", func(alertsRoute routing.RouteRegister) {
-			alertsRoute.Post("/test", bind(dtos.AlertTestCommand{}), wrap(AlertTest))
-			alertsRoute.Post("/:alertId/pause", reqEditorRole, bind(dtos.PauseAlertCommand{}), wrap(PauseAlert))
-			alertsRoute.Get("/:alertId", ValidateOrgAlert, wrap(GetAlert))
-			alertsRoute.Get("/", wrap(GetAlerts))
-			alertsRoute.Get("/states-for-dashboard", wrap(GetAlertStatesForDashboard))
+			alertsRoute.Post("/test", bind(dtos.AlertTestCommand{}), Wrap(AlertTest))
+			alertsRoute.Post("/:alertId/pause", reqEditorRole, bind(dtos.PauseAlertCommand{}), Wrap(PauseAlert))
+			alertsRoute.Get("/:alertId", ValidateOrgAlert, Wrap(GetAlert))
+			alertsRoute.Get("/", Wrap(GetAlerts))
+			alertsRoute.Get("/states-for-dashboard", Wrap(GetAlertStatesForDashboard))
 		})
 
-		apiRoute.Get("/alert-notifications", wrap(GetAlertNotifications))
-		apiRoute.Get("/alert-notifiers", wrap(GetAlertNotifiers))
+		apiRoute.Get("/alert-notifications", Wrap(GetAlertNotifications))
+		apiRoute.Get("/alert-notifiers", Wrap(GetAlertNotifiers))
 
 		apiRoute.Group("/alert-notifications", func(alertNotifications routing.RouteRegister) {
-			alertNotifications.Post("/test", bind(dtos.NotificationTestCommand{}), wrap(NotificationTest))
-			alertNotifications.Post("/", bind(m.CreateAlertNotificationCommand{}), wrap(CreateAlertNotification))
-			alertNotifications.Put("/:notificationId", bind(m.UpdateAlertNotificationCommand{}), wrap(UpdateAlertNotification))
-			alertNotifications.Get("/:notificationId", wrap(GetAlertNotificationByID))
-			alertNotifications.Delete("/:notificationId", wrap(DeleteAlertNotification))
+			alertNotifications.Post("/test", bind(dtos.NotificationTestCommand{}), Wrap(NotificationTest))
+			alertNotifications.Post("/", bind(m.CreateAlertNotificationCommand{}), Wrap(CreateAlertNotification))
+			alertNotifications.Put("/:notificationId", bind(m.UpdateAlertNotificationCommand{}), Wrap(UpdateAlertNotification))
+			alertNotifications.Get("/:notificationId", Wrap(GetAlertNotificationByID))
+			alertNotifications.Delete("/:notificationId", Wrap(DeleteAlertNotification))
 		}, reqEditorRole)
 
-		apiRoute.Get("/annotations", wrap(GetAnnotations))
-		apiRoute.Post("/annotations/mass-delete", reqOrgAdmin, bind(dtos.DeleteAnnotationsCmd{}), wrap(DeleteAnnotations))
+		apiRoute.Get("/annotations", Wrap(GetAnnotations))
+		apiRoute.Post("/annotations/mass-delete", reqOrgAdmin, bind(dtos.DeleteAnnotationsCmd{}), Wrap(DeleteAnnotations))
 
 		apiRoute.Group("/annotations", func(annotationsRoute routing.RouteRegister) {
-			annotationsRoute.Post("/", bind(dtos.PostAnnotationsCmd{}), wrap(PostAnnotation))
-			annotationsRoute.Delete("/:annotationId", wrap(DeleteAnnotationByID))
-			annotationsRoute.Put("/:annotationId", bind(dtos.UpdateAnnotationsCmd{}), wrap(UpdateAnnotation))
-			annotationsRoute.Delete("/region/:regionId", wrap(DeleteAnnotationRegion))
-			annotationsRoute.Post("/graphite", reqEditorRole, bind(dtos.PostGraphiteAnnotationsCmd{}), wrap(PostGraphiteAnnotation))
+			annotationsRoute.Post("/", bind(dtos.PostAnnotationsCmd{}), Wrap(PostAnnotation))
+			annotationsRoute.Delete("/:annotationId", Wrap(DeleteAnnotationByID))
+			annotationsRoute.Put("/:annotationId", bind(dtos.UpdateAnnotationsCmd{}), Wrap(UpdateAnnotation))
+			annotationsRoute.Delete("/region/:regionId", Wrap(DeleteAnnotationRegion))
+			annotationsRoute.Post("/graphite", reqEditorRole, bind(dtos.PostGraphiteAnnotationsCmd{}), Wrap(PostGraphiteAnnotation))
 		})
 
 		// error test
-		r.Get("/metrics/error", wrap(GenerateError))
+		r.Get("/metrics/error", Wrap(GenerateError))
 
 	}, reqSignedIn)
 
@@ -372,10 +367,10 @@ func (hs *HTTPServer) registerRoutes() {
 		adminRoute.Put("/users/:id/password", bind(dtos.AdminUpdateUserPasswordForm{}), AdminUpdateUserPassword)
 		adminRoute.Put("/users/:id/permissions", bind(dtos.AdminUpdateUserPermissionsForm{}), AdminUpdateUserPermissions)
 		adminRoute.Delete("/users/:id", AdminDeleteUser)
-		adminRoute.Get("/users/:id/quotas", wrap(GetUserQuotas))
-		adminRoute.Put("/users/:id/quotas/:target", bind(m.UpdateUserQuotaCmd{}), wrap(UpdateUserQuota))
+		adminRoute.Get("/users/:id/quotas", Wrap(GetUserQuotas))
+		adminRoute.Put("/users/:id/quotas/:target", bind(m.UpdateUserQuotaCmd{}), Wrap(UpdateUserQuota))
 		adminRoute.Get("/stats", AdminGetStats)
-		adminRoute.Post("/pause-all-alerts", bind(dtos.PauseAllAlertsCommand{}), wrap(PauseAllAlerts))
+		adminRoute.Post("/pause-all-alerts", bind(dtos.PauseAllAlertsCommand{}), Wrap(PauseAllAlerts))
 	}, reqGrafanaAdmin)
 
 	// rendering
@@ -393,10 +388,4 @@ func (hs *HTTPServer) registerRoutes() {
 
 	// streams
 	//r.Post("/api/streams/push", reqSignedIn, bind(dtos.StreamMessage{}), liveConn.PushToStream)
-
-	r.Register(macaronR)
-
-	InitAppPluginRoutes(macaronR)
-
-	macaronR.NotFound(NotFoundHandler)
 }

+ 1 - 1
pkg/api/app_routes.go

@@ -18,7 +18,7 @@ import (
 
 var pluginProxyTransport *http.Transport
 
-func InitAppPluginRoutes(r *macaron.Macaron) {
+func (hs *HTTPServer) initAppPluginRoutes(r *macaron.Macaron) {
 	pluginProxyTransport = &http.Transport{
 		TLSClientConfig: &tls.Config{
 			InsecureSkipVerify: setting.PluginAppsSkipVerifyTLS,

+ 1 - 1
pkg/api/common.go

@@ -30,7 +30,7 @@ type NormalResponse struct {
 	err        error
 }
 
-func wrap(action interface{}) macaron.Handler {
+func Wrap(action interface{}) macaron.Handler {
 
 	return func(c *m.ReqContext) {
 		var res Response

+ 2 - 2
pkg/api/common_test.go

@@ -23,7 +23,7 @@ func loggedInUserScenarioWithRole(desc string, method string, url string, routeP
 		defer bus.ClearBusHandlers()
 
 		sc := setupScenarioContext(url)
-		sc.defaultHandler = wrap(func(c *m.ReqContext) Response {
+		sc.defaultHandler = Wrap(func(c *m.ReqContext) Response {
 			sc.context = c
 			sc.context.UserId = TestUserID
 			sc.context.OrgId = TestOrgID
@@ -51,7 +51,7 @@ func anonymousUserScenario(desc string, method string, url string, routePattern
 		defer bus.ClearBusHandlers()
 
 		sc := setupScenarioContext(url)
-		sc.defaultHandler = wrap(func(c *m.ReqContext) Response {
+		sc.defaultHandler = Wrap(func(c *m.ReqContext) Response {
 			sc.context = c
 			if sc.handlerFunc != nil {
 				return sc.handlerFunc(sc.context)

+ 1 - 1
pkg/api/dashboard_permission_test.go

@@ -194,7 +194,7 @@ func updateDashboardPermissionScenario(desc string, url string, routePattern str
 
 		sc := setupScenarioContext(url)
 
-		sc.defaultHandler = wrap(func(c *m.ReqContext) Response {
+		sc.defaultHandler = Wrap(func(c *m.ReqContext) Response {
 			sc.context = c
 			sc.context.OrgId = TestOrgID
 			sc.context.UserId = TestUserID

+ 2 - 2
pkg/api/dashboard_test.go

@@ -882,7 +882,7 @@ func postDashboardScenario(desc string, url string, routePattern string, mock *d
 		defer bus.ClearBusHandlers()
 
 		sc := setupScenarioContext(url)
-		sc.defaultHandler = wrap(func(c *m.ReqContext) Response {
+		sc.defaultHandler = Wrap(func(c *m.ReqContext) Response {
 			sc.context = c
 			sc.context.SignedInUser = &m.SignedInUser{OrgId: cmd.OrgId, UserId: cmd.UserId}
 
@@ -907,7 +907,7 @@ func postDiffScenario(desc string, url string, routePattern string, cmd dtos.Cal
 		defer bus.ClearBusHandlers()
 
 		sc := setupScenarioContext(url)
-		sc.defaultHandler = wrap(func(c *m.ReqContext) Response {
+		sc.defaultHandler = Wrap(func(c *m.ReqContext) Response {
 			sc.context = c
 			sc.context.SignedInUser = &m.SignedInUser{
 				OrgId:  TestOrgID,

+ 1 - 0
pkg/api/dtos/index.go

@@ -13,6 +13,7 @@ type IndexViewData struct {
 	Theme                   string
 	NewGrafanaVersionExists bool
 	NewGrafanaVersion       string
+	AppName                 string
 }
 
 type PluginCss struct {

+ 1 - 1
pkg/api/folder_permission_test.go

@@ -226,7 +226,7 @@ func updateFolderPermissionScenario(desc string, url string, routePattern string
 
 		sc := setupScenarioContext(url)
 
-		sc.defaultHandler = wrap(func(c *m.ReqContext) Response {
+		sc.defaultHandler = Wrap(func(c *m.ReqContext) Response {
 			sc.context = c
 			sc.context.OrgId = TestOrgID
 			sc.context.UserId = TestUserID

+ 2 - 2
pkg/api/folder_test.go

@@ -152,7 +152,7 @@ func createFolderScenario(desc string, url string, routePattern string, mock *fa
 		defer bus.ClearBusHandlers()
 
 		sc := setupScenarioContext(url)
-		sc.defaultHandler = wrap(func(c *m.ReqContext) Response {
+		sc.defaultHandler = Wrap(func(c *m.ReqContext) Response {
 			sc.context = c
 			sc.context.SignedInUser = &m.SignedInUser{OrgId: TestOrgID, UserId: TestUserID}
 
@@ -181,7 +181,7 @@ func updateFolderScenario(desc string, url string, routePattern string, mock *fa
 		defer bus.ClearBusHandlers()
 
 		sc := setupScenarioContext(url)
-		sc.defaultHandler = wrap(func(c *m.ReqContext) Response {
+		sc.defaultHandler = Wrap(func(c *m.ReqContext) Response {
 			sc.context = c
 			sc.context.SignedInUser = &m.SignedInUser{OrgId: TestOrgID, UserId: TestUserID}
 

+ 1 - 0
pkg/api/frontendsettings.go

@@ -153,6 +153,7 @@ func getFrontendSettingsMap(c *m.ReqContext) (map[string]interface{}, error) {
 			"latestVersion": plugins.GrafanaLatestVersion,
 			"hasUpdate":     plugins.GrafanaHasUpdate,
 			"env":           setting.Env,
+			"isEnterprise":  setting.IsEnterprise,
 		},
 	}
 

+ 31 - 7
pkg/api/http_server.go

@@ -33,7 +33,11 @@ import (
 )
 
 func init() {
-	registry.RegisterService(&HTTPServer{})
+	registry.Register(&registry.Descriptor{
+		Name:         "HTTPServer",
+		Instance:     &HTTPServer{},
+		InitPriority: registry.High,
+	})
 }
 
 type HTTPServer struct {
@@ -54,6 +58,10 @@ func (hs *HTTPServer) Init() error {
 	hs.log = log.New("http.server")
 	hs.cache = gocache.New(5*time.Minute, 10*time.Minute)
 
+	hs.streamManager = live.NewStreamManager()
+	hs.macaron = hs.newMacaron()
+	hs.registerRoutes()
+
 	return nil
 }
 
@@ -61,10 +69,8 @@ func (hs *HTTPServer) Run(ctx context.Context) error {
 	var err error
 
 	hs.context = ctx
-	hs.streamManager = live.NewStreamManager()
-	hs.macaron = hs.newMacaron()
-	hs.registerRoutes()
 
+	hs.applyRoutes()
 	hs.streamManager.Run(ctx)
 
 	listenAddr := fmt.Sprintf("%s:%s", setting.HttpAddr, setting.HttpPort)
@@ -164,6 +170,26 @@ func (hs *HTTPServer) newMacaron() *macaron.Macaron {
 	macaron.Env = setting.Env
 	m := macaron.New()
 
+	// automatically set HEAD for every GET
+	m.SetAutoHead(true)
+
+	return m
+}
+
+func (hs *HTTPServer) applyRoutes() {
+	// start with middlewares & static routes
+	hs.addMiddlewaresAndStaticRoutes()
+	// then add view routes & api routes
+	hs.RouteRegister.Register(hs.macaron)
+	// then custom app proxy routes
+	hs.initAppPluginRoutes(hs.macaron)
+	// lastly not found route
+	hs.macaron.NotFound(NotFoundHandler)
+}
+
+func (hs *HTTPServer) addMiddlewaresAndStaticRoutes() {
+	m := hs.macaron
+
 	m.Use(middleware.Logger())
 
 	if setting.EnableGzip {
@@ -175,7 +201,7 @@ func (hs *HTTPServer) newMacaron() *macaron.Macaron {
 	for _, route := range plugins.StaticRoutes {
 		pluginRoute := path.Join("/public/plugins/", route.PluginId)
 		hs.log.Debug("Plugins: Adding route", "route", pluginRoute, "dir", route.Directory)
-		hs.mapStatic(m, route.Directory, "", pluginRoute)
+		hs.mapStatic(hs.macaron, route.Directory, "", pluginRoute)
 	}
 
 	hs.mapStatic(m, setting.StaticRootPath, "build", "public/build")
@@ -204,8 +230,6 @@ func (hs *HTTPServer) newMacaron() *macaron.Macaron {
 	}
 
 	m.Use(middleware.AddDefaultResponseHeaders())
-
-	return m
 }
 
 func (hs *HTTPServer) metricsEndpoint(ctx *macaron.Context) {

+ 1 - 0
pkg/api/index.go

@@ -76,6 +76,7 @@ func setIndexViewData(c *m.ReqContext) (*dtos.IndexViewData, error) {
 		BuildCommit:             setting.BuildCommit,
 		NewGrafanaVersion:       plugins.GrafanaLatestVersion,
 		NewGrafanaVersionExists: plugins.GrafanaHasUpdate,
+		AppName:                 setting.ApplicationName,
 	}
 
 	if setting.DisableGravatar {

+ 4 - 4
pkg/cmd/grafana-server/main.go

@@ -18,7 +18,7 @@ import (
 	"github.com/grafana/grafana/pkg/metrics"
 	"github.com/grafana/grafana/pkg/setting"
 
-	_ "github.com/grafana/grafana/pkg/extensions"
+	extensions "github.com/grafana/grafana/pkg/extensions"
 	_ "github.com/grafana/grafana/pkg/services/alerting/conditions"
 	_ "github.com/grafana/grafana/pkg/services/alerting/notifiers"
 	_ "github.com/grafana/grafana/pkg/tsdb/cloudwatch"
@@ -35,7 +35,6 @@ import (
 var version = "5.0.0"
 var commit = "NA"
 var buildstamp string
-var enterprise string
 
 var configFile = flag.String("config", "", "path to config file")
 var homePath = flag.String("homepath", "", "path to grafana install/home path, defaults to working directory")
@@ -78,7 +77,7 @@ func main() {
 	setting.BuildVersion = version
 	setting.BuildCommit = commit
 	setting.BuildStamp = buildstampInt64
-	setting.Enterprise, _ = strconv.ParseBool(enterprise)
+	setting.IsEnterprise = extensions.IsEnterprise
 
 	metrics.M_Grafana_Version.WithLabelValues(version).Set(1)
 
@@ -88,10 +87,11 @@ func main() {
 
 	err := server.Run()
 
+	code := server.Exit(err)
 	trace.Stop()
 	log.Close()
 
-	server.Exit(err)
+	os.Exit(code)
 }
 
 func listenToSystemSignals(server *GrafanaServerImpl) {

+ 2 - 2
pkg/cmd/grafana-server/server.go

@@ -175,7 +175,7 @@ func (g *GrafanaServerImpl) Shutdown(reason string) {
 	g.childRoutines.Wait()
 }
 
-func (g *GrafanaServerImpl) Exit(reason error) {
+func (g *GrafanaServerImpl) Exit(reason error) int {
 	// default exit code is 1
 	code := 1
 
@@ -185,7 +185,7 @@ func (g *GrafanaServerImpl) Exit(reason error) {
 	}
 
 	g.log.Error("Server shutdown", "reason", reason)
-	os.Exit(code)
+	return code
 }
 
 func (g *GrafanaServerImpl) writePIDFile() {

+ 1 - 1
pkg/extensions/main.go

@@ -1,3 +1,3 @@
 package extensions
 
-import _ "github.com/pkg/errors"
+var IsEnterprise bool = false

+ 17 - 1
pkg/login/ext_user.go

@@ -21,6 +21,7 @@ func UpsertUser(cmd *m.UpsertUserCommand) error {
 		Email:      extUser.Email,
 		Login:      extUser.Login,
 	}
+
 	err := bus.Dispatch(userQuery)
 	if err != m.ErrUserNotFound && err != nil {
 		return err
@@ -66,7 +67,21 @@ func UpsertUser(cmd *m.UpsertUserCommand) error {
 		}
 	}
 
-	return syncOrgRoles(cmd.Result, extUser)
+	err = syncOrgRoles(cmd.Result, extUser)
+	if err != nil {
+		return err
+	}
+
+	err = bus.Dispatch(&m.SyncTeamsCommand{
+		User:         cmd.Result,
+		ExternalUser: extUser,
+	})
+
+	if err == bus.ErrHandlerNotFound {
+		return nil
+	}
+
+	return err
 }
 
 func createUser(extUser *m.ExternalUserInfo) (*m.User, error) {
@@ -76,6 +91,7 @@ func createUser(extUser *m.ExternalUserInfo) (*m.User, error) {
 		Name:         extUser.Name,
 		SkipOrgSetup: len(extUser.OrgRoles) > 0,
 	}
+
 	if err := bus.Dispatch(cmd); err != nil {
 		return nil, err
 	}

+ 2 - 0
pkg/login/ldap.go

@@ -163,6 +163,7 @@ func (a *ldapAuther) GetGrafanaUserFor(ctx *m.ReqContext, ldapUser *LdapUserInfo
 		Name:       fmt.Sprintf("%s %s", ldapUser.FirstName, ldapUser.LastName),
 		Login:      ldapUser.Username,
 		Email:      ldapUser.Email,
+		Groups:     ldapUser.MemberOf,
 		OrgRoles:   map[int64]m.RoleType{},
 	}
 
@@ -194,6 +195,7 @@ func (a *ldapAuther) GetGrafanaUserFor(ctx *m.ReqContext, ldapUser *LdapUserInfo
 		ExternalUser:  extUser,
 		SignupAllowed: setting.LdapAllowSignup,
 	}
+
 	err := bus.Dispatch(userQuery)
 	if err != nil {
 		return nil, err

+ 13 - 3
pkg/login/ldap_test.go

@@ -1,6 +1,7 @@
 package login
 
 import (
+	"context"
 	"crypto/tls"
 	"testing"
 
@@ -14,6 +15,14 @@ func TestLdapAuther(t *testing.T) {
 
 	Convey("When translating ldap user to grafana user", t, func() {
 
+		var user1 = &m.User{}
+
+		bus.AddHandlerCtx("test", func(ctx context.Context, cmd *m.UpsertUserCommand) error {
+			cmd.Result = user1
+			cmd.Result.Login = "torkelo"
+			return nil
+		})
+
 		Convey("Given no ldap group map match", func() {
 			ldapAuther := NewLdapAuthenticator(&LdapServerConf{
 				LdapGroups: []*LdapGroupToOrgRole{{}},
@@ -23,8 +32,6 @@ func TestLdapAuther(t *testing.T) {
 			So(err, ShouldEqual, ErrInvalidCredentials)
 		})
 
-		var user1 = &m.User{}
-
 		ldapAutherScenario("Given wildcard group match", func(sc *scenarioContext) {
 			ldapAuther := NewLdapAuthenticator(&LdapServerConf{
 				LdapGroups: []*LdapGroupToOrgRole{
@@ -96,7 +103,6 @@ func TestLdapAuther(t *testing.T) {
 	})
 
 	Convey("When syncing ldap groups to grafana org roles", t, func() {
-
 		ldapAutherScenario("given no current user orgs", func(sc *scenarioContext) {
 			ldapAuther := NewLdapAuthenticator(&LdapServerConf{
 				LdapGroups: []*LdapGroupToOrgRole{
@@ -322,6 +328,10 @@ func ldapAutherScenario(desc string, fn scenarioFunc) {
 
 		bus.AddHandler("test", UpsertUser)
 
+		bus.AddHandlerCtx("test", func(ctx context.Context, cmd *m.SyncTeamsCommand) error {
+			return nil
+		})
+
 		bus.AddHandler("test", func(cmd *m.GetUserByAuthInfoQuery) error {
 			sc.getUserByAuthInfoQuery = cmd
 			sc.getUserByAuthInfoQuery.Result = &m.User{Login: cmd.Login}

+ 9 - 0
pkg/metrics/metrics.go

@@ -334,6 +334,14 @@ func updateTotalStats() {
 
 var usageStatsURL = "https://stats.grafana.org/grafana-usage-report"
 
+func getEdition() string {
+	if setting.IsEnterprise {
+		return "enterprise"
+	} else {
+		return "oss"
+	}
+}
+
 func sendUsageStats() {
 	if !setting.ReportingEnabled {
 		return
@@ -349,6 +357,7 @@ func sendUsageStats() {
 		"metrics": metrics,
 		"os":      runtime.GOOS,
 		"arch":    runtime.GOARCH,
+		"edition": getEdition(),
 	}
 
 	statsQuery := models.GetSystemStatsQuery{}

+ 6 - 0
pkg/middleware/auth.go

@@ -9,6 +9,7 @@ import (
 	m "github.com/grafana/grafana/pkg/models"
 	"github.com/grafana/grafana/pkg/services/session"
 	"github.com/grafana/grafana/pkg/setting"
+	"github.com/grafana/grafana/pkg/util"
 )
 
 type AuthOptions struct {
@@ -34,6 +35,11 @@ func getApiKey(c *m.ReqContext) string {
 		return key
 	}
 
+	username, password, err := util.DecodeBasicAuthHeader(header)
+	if err == nil && username == "api_key" {
+		return password
+	}
+
 	return ""
 }
 

+ 8 - 12
pkg/middleware/auth_proxy.go

@@ -2,6 +2,7 @@ package middleware
 
 import (
 	"fmt"
+	"net"
 	"net/mail"
 	"reflect"
 	"strings"
@@ -28,7 +29,7 @@ func initContextWithAuthProxy(ctx *m.ReqContext, orgID int64) bool {
 	}
 
 	// if auth proxy ip(s) defined, check if request comes from one of those
-	if err := checkAuthenticationProxy(ctx.RemoteAddr(), proxyHeaderValue); err != nil {
+	if err := checkAuthenticationProxy(ctx.Req.RemoteAddr, proxyHeaderValue); err != nil {
 		ctx.Handle(407, "Proxy authentication required", err)
 		return true
 	}
@@ -196,23 +197,18 @@ func checkAuthenticationProxy(remoteAddr string, proxyHeaderValue string) error
 		return nil
 	}
 
-	// Multiple ip addresses? Right-most IP address is the IP address of the most recent proxy
-	if strings.Contains(remoteAddr, ",") {
-		sourceIPs := strings.Split(remoteAddr, ",")
-		remoteAddr = strings.TrimSpace(sourceIPs[len(sourceIPs)-1])
-	}
-
-	remoteAddr = strings.TrimPrefix(remoteAddr, "[")
-	remoteAddr = strings.TrimSuffix(remoteAddr, "]")
-
 	proxies := strings.Split(setting.AuthProxyWhitelist, ",")
+	sourceIP, _, err := net.SplitHostPort(remoteAddr)
+	if err != nil {
+		return err
+	}
 
 	// Compare allowed IP addresses to actual address
 	for _, proxyIP := range proxies {
-		if remoteAddr == strings.TrimSpace(proxyIP) {
+		if sourceIP == strings.TrimSpace(proxyIP) {
 			return nil
 		}
 	}
 
-	return fmt.Errorf("Request for user (%s) from %s is not from the authentication proxy", proxyHeaderValue, remoteAddr)
+	return fmt.Errorf("Request for user (%s) from %s is not from the authentication proxy", proxyHeaderValue, sourceIP)
 }

+ 24 - 57
pkg/middleware/middleware_test.go

@@ -82,7 +82,7 @@ func TestMiddlewareContext(t *testing.T) {
 
 			setting.BasicAuthEnabled = true
 			authHeader := util.GetBasicAuthHeader("myUser", "myPass")
-			sc.fakeReq("GET", "/").withAuthoriziationHeader(authHeader).exec()
+			sc.fakeReq("GET", "/").withAuthorizationHeader(authHeader).exec()
 
 			Convey("Should init middleware context with user", func() {
 				So(sc.context.IsSignedIn, ShouldEqual, true)
@@ -128,6 +128,28 @@ func TestMiddlewareContext(t *testing.T) {
 			})
 		})
 
+		middlewareScenario("Valid api key via Basic auth", func(sc *scenarioContext) {
+			keyhash := util.EncodePassword("v5nAwpMafFP6znaS4urhdWDLS5511M42", "asd")
+
+			bus.AddHandler("test", func(query *m.GetApiKeyByNameQuery) error {
+				query.Result = &m.ApiKey{OrgId: 12, Role: m.ROLE_EDITOR, Key: keyhash}
+				return nil
+			})
+
+			authHeader := util.GetBasicAuthHeader("api_key", "eyJrIjoidjVuQXdwTWFmRlA2em5hUzR1cmhkV0RMUzU1MTFNNDIiLCJuIjoiYXNkIiwiaWQiOjF9")
+			sc.fakeReq("GET", "/").withAuthorizationHeader(authHeader).exec()
+
+			Convey("Should return 200", func() {
+				So(sc.resp.Code, ShouldEqual, 200)
+			})
+
+			Convey("Should init middleware context", func() {
+				So(sc.context.IsSignedIn, ShouldEqual, true)
+				So(sc.context.OrgId, ShouldEqual, 12)
+				So(sc.context.OrgRole, ShouldEqual, m.ROLE_EDITOR)
+			})
+		})
+
 		middlewareScenario("UserId in session", func(sc *scenarioContext) {
 
 			sc.fakeReq("GET", "/").handler(func(c *m.ReqContext) {
@@ -293,61 +315,6 @@ func TestMiddlewareContext(t *testing.T) {
 			})
 		})
 
-		middlewareScenario("When auth_proxy is enabled and request has X-Forwarded-For that is not trusted", func(sc *scenarioContext) {
-			setting.AuthProxyEnabled = true
-			setting.AuthProxyHeaderName = "X-WEBAUTH-USER"
-			setting.AuthProxyHeaderProperty = "username"
-			setting.AuthProxyWhitelist = "192.168.1.1, 2001::23"
-
-			bus.AddHandler("test", func(query *m.GetSignedInUserQuery) error {
-				query.Result = &m.SignedInUser{OrgId: 4, UserId: 33}
-				return nil
-			})
-
-			bus.AddHandler("test", func(cmd *m.UpsertUserCommand) error {
-				cmd.Result = &m.User{Id: 33}
-				return nil
-			})
-
-			sc.fakeReq("GET", "/")
-			sc.req.Header.Add("X-WEBAUTH-USER", "torkelo")
-			sc.req.Header.Add("X-Forwarded-For", "client-ip, 192.168.1.1, 192.168.1.2")
-			sc.exec()
-
-			Convey("should return 407 status code", func() {
-				So(sc.resp.Code, ShouldEqual, 407)
-				So(sc.resp.Body.String(), ShouldContainSubstring, "Request for user (torkelo) from 192.168.1.2 is not from the authentication proxy")
-			})
-		})
-
-		middlewareScenario("When auth_proxy is enabled and request has X-Forwarded-For that is trusted", func(sc *scenarioContext) {
-			setting.AuthProxyEnabled = true
-			setting.AuthProxyHeaderName = "X-WEBAUTH-USER"
-			setting.AuthProxyHeaderProperty = "username"
-			setting.AuthProxyWhitelist = "192.168.1.1, 2001::23"
-
-			bus.AddHandler("test", func(query *m.GetSignedInUserQuery) error {
-				query.Result = &m.SignedInUser{OrgId: 4, UserId: 33}
-				return nil
-			})
-
-			bus.AddHandler("test", func(cmd *m.UpsertUserCommand) error {
-				cmd.Result = &m.User{Id: 33}
-				return nil
-			})
-
-			sc.fakeReq("GET", "/")
-			sc.req.Header.Add("X-WEBAUTH-USER", "torkelo")
-			sc.req.Header.Add("X-Forwarded-For", "client-ip, 192.168.1.2, 192.168.1.1")
-			sc.exec()
-
-			Convey("Should init context with user info", func() {
-				So(sc.context.IsSignedIn, ShouldBeTrue)
-				So(sc.context.UserId, ShouldEqual, 33)
-				So(sc.context.OrgId, ShouldEqual, 4)
-			})
-		})
-
 		middlewareScenario("When session exists for previous user, create a new session", func(sc *scenarioContext) {
 			setting.AuthProxyEnabled = true
 			setting.AuthProxyHeaderName = "X-WEBAUTH-USER"
@@ -473,7 +440,7 @@ func (sc *scenarioContext) withInvalidApiKey() *scenarioContext {
 	return sc
 }
 
-func (sc *scenarioContext) withAuthoriziationHeader(authHeader string) *scenarioContext {
+func (sc *scenarioContext) withAuthorizationHeader(authHeader string) *scenarioContext {
 	sc.authHeader = authHeader
 	return sc
 }

+ 1 - 0
pkg/models/team_member.go

@@ -42,6 +42,7 @@ type RemoveTeamMemberCommand struct {
 type GetTeamMembersQuery struct {
 	OrgId  int64
 	TeamId int64
+	UserId int64
 	Result []*TeamMemberDTO
 }
 

+ 6 - 0
pkg/models/user_auth.go

@@ -19,6 +19,7 @@ type ExternalUserInfo struct {
 	Email      string
 	Login      string
 	Name       string
+	Groups     []string
 	OrgRoles   map[int64]RoleType
 }
 
@@ -70,3 +71,8 @@ type GetAuthInfoQuery struct {
 
 	Result *UserAuth
 }
+
+type SyncTeamsCommand struct {
+	ExternalUser *ExternalUserInfo
+	User         *User
+}

+ 27 - 3
pkg/registry/registry.go

@@ -4,6 +4,8 @@ import (
 	"context"
 	"reflect"
 	"sort"
+
+	"github.com/grafana/grafana/pkg/services/sqlstore/migrator"
 )
 
 type Descriptor struct {
@@ -34,23 +36,45 @@ func GetServices() []*Descriptor {
 	return services
 }
 
+// Service interface is the lowest common shape that services
+// are expected to forfill to be started within Grafana.
 type Service interface {
+
+	// Init is called by Grafana main process which gives the service
+	// the possibility do some initial work before its started. Things
+	// like adding routes, bus handlers should be done in the Init function
 	Init() error
 }
 
-// Useful for alerting service
+// CanBeDisabled allows the services to decide if it should
+// be started or not by itself. This is useful for services
+// that might not always be started, ex alerting.
+// This will be called after `Init()`.
 type CanBeDisabled interface {
+
+	// IsDisabled should return a bool saying if it can be started or not.
 	IsDisabled() bool
 }
 
+// BackgroundService should be implemented for services that have
+// long running tasks in the background.
 type BackgroundService interface {
+	// Run starts the background process of the service after `Init` have been called
+	// on all services. The `context.Context` passed into the function should be used
+	// to subscribe to ctx.Done() so the service can be notified when Grafana shuts down.
 	Run(ctx context.Context) error
 }
 
-type HasInitPriority interface {
-	GetInitPriority() Priority
+// DatabaseMigrator allows the caller to add migrations to
+// the migrator passed as argument
+type DatabaseMigrator interface {
+
+	// AddMigrations allows the service to add migrations to
+	// the database migrator.
+	AddMigration(mg *migrator.Migrator)
 }
 
+// IsDisabled takes an service and return true if its disabled
 func IsDisabled(srv Service) bool {
 	canBeDisabled, ok := srv.(CanBeDisabled)
 	return ok && canBeDisabled.IsDisabled()

+ 4 - 4
pkg/services/alerting/extractor_test.go

@@ -50,7 +50,7 @@ func TestAlertRuleExtraction(t *testing.T) {
 		So(err, ShouldBeNil)
 
 		Convey("Extractor should not modify the original json", func() {
-			dashJson, err := simplejson.NewJson([]byte(json))
+			dashJson, err := simplejson.NewJson(json)
 			So(err, ShouldBeNil)
 
 			dash := m.NewDashboardFromJson(dashJson)
@@ -79,7 +79,7 @@ func TestAlertRuleExtraction(t *testing.T) {
 
 		Convey("Parsing and validating dashboard containing graphite alerts", func() {
 
-			dashJson, err := simplejson.NewJson([]byte(json))
+			dashJson, err := simplejson.NewJson(json)
 			So(err, ShouldBeNil)
 
 			dash := m.NewDashboardFromJson(dashJson)
@@ -143,7 +143,7 @@ func TestAlertRuleExtraction(t *testing.T) {
 			panelWithoutId, err := ioutil.ReadFile("./test-data/panels-missing-id.json")
 			So(err, ShouldBeNil)
 
-			dashJson, err := simplejson.NewJson([]byte(panelWithoutId))
+			dashJson, err := simplejson.NewJson(panelWithoutId)
 			So(err, ShouldBeNil)
 			dash := m.NewDashboardFromJson(dashJson)
 			extractor := NewDashAlertExtractor(dash, 1)
@@ -159,7 +159,7 @@ func TestAlertRuleExtraction(t *testing.T) {
 			panelWithIdZero, err := ioutil.ReadFile("./test-data/panel-with-id-0.json")
 			So(err, ShouldBeNil)
 
-			dashJson, err := simplejson.NewJson([]byte(panelWithIdZero))
+			dashJson, err := simplejson.NewJson(panelWithIdZero)
 			So(err, ShouldBeNil)
 			dash := m.NewDashboardFromJson(dashJson)
 			extractor := NewDashAlertExtractor(dash, 1)

+ 4 - 1
pkg/services/alerting/notifier.go

@@ -104,7 +104,10 @@ func (n *notificationService) uploadImage(context *EvalContext) (err error) {
 		return err
 	}
 
-	n.log.Info("uploaded", "url", context.ImagePublicUrl)
+	if context.ImagePublicUrl != "" {
+		n.log.Info("uploaded screenshot of alert to external image store", "url", context.ImagePublicUrl)
+	}
+
 	return nil
 }
 

+ 1 - 0
pkg/services/sqlstore/migrations/team_mig.go

@@ -50,4 +50,5 @@ func addTeamMigrations(mg *Migrator) {
 	mg.AddMigration("Add column email to team table", NewAddColumnMigration(teamV1, &Column{
 		Name: "email", Type: DB_NVarchar, Nullable: true, Length: 190,
 	}))
+
 }

+ 7 - 0
pkg/services/sqlstore/sqlstore.go

@@ -132,6 +132,13 @@ func (ss *SqlStore) Init() error {
 	migrator := migrator.NewMigrator(x)
 	migrations.AddMigrations(migrator)
 
+	for _, descriptor := range registry.GetServices() {
+		sc, ok := descriptor.Instance.(registry.DatabaseMigrator)
+		if ok {
+			sc.AddMigration(migrator)
+		}
+	}
+
 	if err := migrator.Start(); err != nil {
 		return fmt.Errorf("Migration failed err: %v", err)
 	}

+ 9 - 1
pkg/services/sqlstore/team.go

@@ -268,7 +268,15 @@ func GetTeamMembers(query *m.GetTeamMembersQuery) error {
 	query.Result = make([]*m.TeamMemberDTO, 0)
 	sess := x.Table("team_member")
 	sess.Join("INNER", "user", fmt.Sprintf("team_member.user_id=%s.id", x.Dialect().Quote("user")))
-	sess.Where("team_member.org_id=? and team_member.team_id=?", query.OrgId, query.TeamId)
+	if query.OrgId != 0 {
+		sess.Where("team_member.org_id=?", query.OrgId)
+	}
+	if query.TeamId != 0 {
+		sess.Where("team_member.team_id=?", query.TeamId)
+	}
+	if query.UserId != 0 {
+		sess.Where("team_member.user_id=?", query.UserId)
+	}
 	sess.Cols("user.org_id", "team_member.team_id", "team_member.user_id", "user.email", "user.login")
 	sess.Asc("user.login", "user.email")
 

+ 4 - 3
pkg/setting/setting.go

@@ -18,9 +18,10 @@ import (
 
 	"github.com/go-macaron/session"
 
+	"time"
+
 	"github.com/grafana/grafana/pkg/log"
 	"github.com/grafana/grafana/pkg/util"
-	"time"
 )
 
 type Scheme string
@@ -49,7 +50,7 @@ var (
 	BuildVersion    string
 	BuildCommit     string
 	BuildStamp      int64
-	Enterprise      bool
+	IsEnterprise    bool
 	ApplicationName string
 
 	// Paths
@@ -517,7 +518,7 @@ func (cfg *Cfg) Load(args *CommandLineArgs) error {
 	Raw = cfg.Raw
 
 	ApplicationName = "Grafana"
-	if Enterprise {
+	if IsEnterprise {
 		ApplicationName += " Enterprise"
 	}
 

+ 1 - 0
pkg/social/github_oauth.go

@@ -213,6 +213,7 @@ func (s *SocialGithub) UserInfo(client *http.Client, token *oauth2.Token) (*Basi
 	userInfo := &BasicUserInfo{
 		Name:  data.Login,
 		Login: data.Login,
+		Id:    fmt.Sprintf("%d", data.Id),
 		Email: data.Email,
 	}
 

+ 9 - 9
pkg/tsdb/elasticsearch/client/search_request_test.go

@@ -32,7 +32,7 @@ func TestSearchRequest(t *testing.T) {
 				Convey("When marshal to JSON should generate correct json", func() {
 					body, err := json.Marshal(sr)
 					So(err, ShouldBeNil)
-					json, err := simplejson.NewJson([]byte(body))
+					json, err := simplejson.NewJson(body)
 					So(err, ShouldBeNil)
 					So(json.Get("size").MustInt(500), ShouldEqual, 0)
 					So(json.Get("sort").Interface(), ShouldBeNil)
@@ -81,7 +81,7 @@ func TestSearchRequest(t *testing.T) {
 					Convey("When marshal to JSON should generate correct json", func() {
 						body, err := json.Marshal(sr)
 						So(err, ShouldBeNil)
-						json, err := simplejson.NewJson([]byte(body))
+						json, err := simplejson.NewJson(body)
 						So(err, ShouldBeNil)
 						So(json.Get("size").MustInt(0), ShouldEqual, 200)
 
@@ -124,7 +124,7 @@ func TestSearchRequest(t *testing.T) {
 					Convey("When marshal to JSON should generate correct json", func() {
 						body, err := json.Marshal(sr)
 						So(err, ShouldBeNil)
-						json, err := simplejson.NewJson([]byte(body))
+						json, err := simplejson.NewJson(body)
 						So(err, ShouldBeNil)
 
 						scriptFields, err := json.Get("script_fields").Map()
@@ -163,7 +163,7 @@ func TestSearchRequest(t *testing.T) {
 					Convey("When marshal to JSON should generate correct json", func() {
 						body, err := json.Marshal(sr)
 						So(err, ShouldBeNil)
-						json, err := simplejson.NewJson([]byte(body))
+						json, err := simplejson.NewJson(body)
 						So(err, ShouldBeNil)
 
 						So(json.Get("aggs").MustMap(), ShouldHaveLength, 2)
@@ -200,7 +200,7 @@ func TestSearchRequest(t *testing.T) {
 					Convey("When marshal to JSON should generate correct json", func() {
 						body, err := json.Marshal(sr)
 						So(err, ShouldBeNil)
-						json, err := simplejson.NewJson([]byte(body))
+						json, err := simplejson.NewJson(body)
 						So(err, ShouldBeNil)
 
 						So(json.Get("aggs").MustMap(), ShouldHaveLength, 1)
@@ -251,7 +251,7 @@ func TestSearchRequest(t *testing.T) {
 					Convey("When marshal to JSON should generate correct json", func() {
 						body, err := json.Marshal(sr)
 						So(err, ShouldBeNil)
-						json, err := simplejson.NewJson([]byte(body))
+						json, err := simplejson.NewJson(body)
 						So(err, ShouldBeNil)
 
 						topAggOne := json.GetPath("aggs", "1")
@@ -300,7 +300,7 @@ func TestSearchRequest(t *testing.T) {
 					Convey("When marshal to JSON should generate correct json", func() {
 						body, err := json.Marshal(sr)
 						So(err, ShouldBeNil)
-						json, err := simplejson.NewJson([]byte(body))
+						json, err := simplejson.NewJson(body)
 						So(err, ShouldBeNil)
 
 						topAgg := json.GetPath("aggs", "1")
@@ -364,7 +364,7 @@ func TestSearchRequest(t *testing.T) {
 					Convey("When marshal to JSON should generate correct json", func() {
 						body, err := json.Marshal(sr)
 						So(err, ShouldBeNil)
-						json, err := simplejson.NewJson([]byte(body))
+						json, err := simplejson.NewJson(body)
 						So(err, ShouldBeNil)
 
 						termsAgg := json.GetPath("aggs", "1")
@@ -419,7 +419,7 @@ func TestSearchRequest(t *testing.T) {
 					Convey("When marshal to JSON should generate correct json", func() {
 						body, err := json.Marshal(sr)
 						So(err, ShouldBeNil)
-						json, err := simplejson.NewJson([]byte(body))
+						json, err := simplejson.NewJson(body)
 						So(err, ShouldBeNil)
 
 						scriptFields, err := json.Get("script_fields").Map()

+ 5 - 4
pkg/tsdb/mssql/macros.go

@@ -82,11 +82,12 @@ func (m *MsSqlMacroEngine) evaluateMacro(name string, args []string) (string, er
 		if len(args) == 0 {
 			return "", fmt.Errorf("missing time column argument for macro %v", name)
 		}
-		return fmt.Sprintf("%s >= DATEADD(s, %d, '1970-01-01') AND %s <= DATEADD(s, %d, '1970-01-01')", args[0], m.TimeRange.GetFromAsSecondsEpoch(), args[0], m.TimeRange.GetToAsSecondsEpoch()), nil
+
+		return fmt.Sprintf("%s BETWEEN '%s' AND '%s'", args[0], m.TimeRange.GetFromAsTimeUTC().Format(time.RFC3339), m.TimeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
 	case "__timeFrom":
-		return fmt.Sprintf("DATEADD(second, %d, '1970-01-01')", m.TimeRange.GetFromAsSecondsEpoch()), nil
+		return fmt.Sprintf("'%s'", m.TimeRange.GetFromAsTimeUTC().Format(time.RFC3339)), nil
 	case "__timeTo":
-		return fmt.Sprintf("DATEADD(second, %d, '1970-01-01')", m.TimeRange.GetToAsSecondsEpoch()), nil
+		return fmt.Sprintf("'%s'", m.TimeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
 	case "__timeGroup":
 		if len(args) < 2 {
 			return "", fmt.Errorf("macro %v needs time column and interval", name)
@@ -108,7 +109,7 @@ func (m *MsSqlMacroEngine) evaluateMacro(name string, args []string) (string, er
 				m.Query.Model.Set("fillValue", floatVal)
 			}
 		}
-		return fmt.Sprintf("CAST(ROUND(DATEDIFF(second, '1970-01-01', %s)/%.1f, 0) as bigint)*%.0f", args[0], interval.Seconds(), interval.Seconds()), nil
+		return fmt.Sprintf("FLOOR(DATEDIFF(second, '1970-01-01', %s)/%.0f)*%.0f", args[0], interval.Seconds(), interval.Seconds()), nil
 	case "__unixEpochFilter":
 		if len(args) == 0 {
 			return "", fmt.Errorf("missing time column argument for macro %v", name)

+ 11 - 11
pkg/tsdb/mssql/macros_test.go

@@ -49,21 +49,21 @@ func TestMacroEngine(t *testing.T) {
 				sql, err := engine.Interpolate(query, timeRange, "WHERE $__timeFilter(time_column)")
 				So(err, ShouldBeNil)
 
-				So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column >= DATEADD(s, %d, '1970-01-01') AND time_column <= DATEADD(s, %d, '1970-01-01')", from.Unix(), to.Unix()))
+				So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339)))
 			})
 
 			Convey("interpolate __timeGroup function", func() {
 				sql, err := engine.Interpolate(query, timeRange, "GROUP BY $__timeGroup(time_column,'5m')")
 				So(err, ShouldBeNil)
 
-				So(sql, ShouldEqual, "GROUP BY CAST(ROUND(DATEDIFF(second, '1970-01-01', time_column)/300.0, 0) as bigint)*300")
+				So(sql, ShouldEqual, "GROUP BY FLOOR(DATEDIFF(second, '1970-01-01', time_column)/300)*300")
 			})
 
 			Convey("interpolate __timeGroup function with spaces around arguments", func() {
 				sql, err := engine.Interpolate(query, timeRange, "GROUP BY $__timeGroup(time_column , '5m')")
 				So(err, ShouldBeNil)
 
-				So(sql, ShouldEqual, "GROUP BY CAST(ROUND(DATEDIFF(second, '1970-01-01', time_column)/300.0, 0) as bigint)*300")
+				So(sql, ShouldEqual, "GROUP BY FLOOR(DATEDIFF(second, '1970-01-01', time_column)/300)*300")
 			})
 
 			Convey("interpolate __timeGroup function with fill (value = NULL)", func() {
@@ -96,14 +96,14 @@ func TestMacroEngine(t *testing.T) {
 				sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)")
 				So(err, ShouldBeNil)
 
-				So(sql, ShouldEqual, fmt.Sprintf("select DATEADD(second, %d, '1970-01-01')", from.Unix()))
+				So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339)))
 			})
 
 			Convey("interpolate __timeTo function", func() {
 				sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)")
 				So(err, ShouldBeNil)
 
-				So(sql, ShouldEqual, fmt.Sprintf("select DATEADD(second, %d, '1970-01-01')", to.Unix()))
+				So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339)))
 			})
 
 			Convey("interpolate __unixEpochFilter function", func() {
@@ -137,21 +137,21 @@ func TestMacroEngine(t *testing.T) {
 				sql, err := engine.Interpolate(query, timeRange, "WHERE $__timeFilter(time_column)")
 				So(err, ShouldBeNil)
 
-				So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column >= DATEADD(s, %d, '1970-01-01') AND time_column <= DATEADD(s, %d, '1970-01-01')", from.Unix(), to.Unix()))
+				So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339)))
 			})
 
 			Convey("interpolate __timeFrom function", func() {
 				sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)")
 				So(err, ShouldBeNil)
 
-				So(sql, ShouldEqual, fmt.Sprintf("select DATEADD(second, %d, '1970-01-01')", from.Unix()))
+				So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339)))
 			})
 
 			Convey("interpolate __timeTo function", func() {
 				sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)")
 				So(err, ShouldBeNil)
 
-				So(sql, ShouldEqual, fmt.Sprintf("select DATEADD(second, %d, '1970-01-01')", to.Unix()))
+				So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339)))
 			})
 
 			Convey("interpolate __unixEpochFilter function", func() {
@@ -185,21 +185,21 @@ func TestMacroEngine(t *testing.T) {
 				sql, err := engine.Interpolate(query, timeRange, "WHERE $__timeFilter(time_column)")
 				So(err, ShouldBeNil)
 
-				So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column >= DATEADD(s, %d, '1970-01-01') AND time_column <= DATEADD(s, %d, '1970-01-01')", from.Unix(), to.Unix()))
+				So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339)))
 			})
 
 			Convey("interpolate __timeFrom function", func() {
 				sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)")
 				So(err, ShouldBeNil)
 
-				So(sql, ShouldEqual, fmt.Sprintf("select DATEADD(second, %d, '1970-01-01')", from.Unix()))
+				So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339)))
 			})
 
 			Convey("interpolate __timeTo function", func() {
 				sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)")
 				So(err, ShouldBeNil)
 
-				So(sql, ShouldEqual, fmt.Sprintf("select DATEADD(second, %d, '1970-01-01')", to.Unix()))
+				So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339)))
 			})
 
 			Convey("interpolate __unixEpochFilter function", func() {

+ 19 - 13
pkg/tsdb/mssql/mssql_test.go

@@ -210,11 +210,12 @@ func TestMSSQL(t *testing.T) {
 				So(queryResult.Error, ShouldBeNil)
 
 				points := queryResult.Series[0].Points
-				So(len(points), ShouldEqual, 6)
+				// without fill this should result in 4 buckets
+				So(len(points), ShouldEqual, 4)
 
 				dt := fromStart
 
-				for i := 0; i < 3; i++ {
+				for i := 0; i < 2; i++ {
 					aValue := points[i][0].Float64
 					aTime := time.Unix(int64(points[i][1].Float64)/1000, 0)
 					So(aValue, ShouldEqual, 15)
@@ -222,9 +223,9 @@ func TestMSSQL(t *testing.T) {
 					dt = dt.Add(5 * time.Minute)
 				}
 
-				// adjust for 5 minute gap
-				dt = dt.Add(5 * time.Minute)
-				for i := 3; i < 6; i++ {
+				// adjust for 10 minute gap between first and second set of points
+				dt = dt.Add(10 * time.Minute)
+				for i := 2; i < 4; i++ {
 					aValue := points[i][0].Float64
 					aTime := time.Unix(int64(points[i][1].Float64)/1000, 0)
 					So(aValue, ShouldEqual, 20)
@@ -260,7 +261,7 @@ func TestMSSQL(t *testing.T) {
 
 				dt := fromStart
 
-				for i := 0; i < 3; i++ {
+				for i := 0; i < 2; i++ {
 					aValue := points[i][0].Float64
 					aTime := time.Unix(int64(points[i][1].Float64)/1000, 0)
 					So(aValue, ShouldEqual, 15)
@@ -268,17 +269,22 @@ func TestMSSQL(t *testing.T) {
 					dt = dt.Add(5 * time.Minute)
 				}
 
+				// check for NULL values inserted by fill
+				So(points[2][0].Valid, ShouldBeFalse)
 				So(points[3][0].Valid, ShouldBeFalse)
 
-				// adjust for 5 minute gap
-				dt = dt.Add(5 * time.Minute)
-				for i := 4; i < 7; i++ {
+				// adjust for 10 minute gap between first and second set of points
+				dt = dt.Add(10 * time.Minute)
+				for i := 4; i < 6; i++ {
 					aValue := points[i][0].Float64
 					aTime := time.Unix(int64(points[i][1].Float64)/1000, 0)
 					So(aValue, ShouldEqual, 20)
 					So(aTime, ShouldEqual, dt)
 					dt = dt.Add(5 * time.Minute)
 				}
+
+				So(points[6][0].Valid, ShouldBeFalse)
+
 			})
 
 			Convey("When doing a metric query using timeGroup with float fill enabled", func() {
@@ -525,7 +531,7 @@ func TestMSSQL(t *testing.T) {
 				So(queryResult.Error, ShouldBeNil)
 
 				So(len(queryResult.Series), ShouldEqual, 1)
-				So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(float64(float32(tInitial.Unix())))*1e3)
+				So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(float32(tInitial.Unix()))*1e3)
 			})
 
 			Convey("When doing a metric query using epoch (float32 nullable) as time column and value column (float32 nullable) should return metric with time in milliseconds", func() {
@@ -547,7 +553,7 @@ func TestMSSQL(t *testing.T) {
 				So(queryResult.Error, ShouldBeNil)
 
 				So(len(queryResult.Series), ShouldEqual, 1)
-				So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(float64(float32(tInitial.Unix())))*1e3)
+				So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(float32(tInitial.Unix()))*1e3)
 			})
 
 			Convey("When doing a metric query grouping by time and select metric column should return correct series", func() {
@@ -924,7 +930,7 @@ func TestMSSQL(t *testing.T) {
 				columns := queryResult.Tables[0].Rows[0]
 
 				//Should be in milliseconds
-				So(columns[0].(int64), ShouldEqual, int64(dt.Unix()*1000))
+				So(columns[0].(int64), ShouldEqual, dt.Unix()*1000)
 			})
 
 			Convey("When doing an annotation query with a time column in epoch second format (int) should return ms", func() {
@@ -954,7 +960,7 @@ func TestMSSQL(t *testing.T) {
 				columns := queryResult.Tables[0].Rows[0]
 
 				//Should be in milliseconds
-				So(columns[0].(int64), ShouldEqual, int64(dt.Unix()*1000))
+				So(columns[0].(int64), ShouldEqual, dt.Unix()*1000)
 			})
 
 			Convey("When doing an annotation query with a time column in epoch millisecond format should return ms", func() {

+ 5 - 4
pkg/tsdb/mysql/macros.go

@@ -77,11 +77,12 @@ func (m *MySqlMacroEngine) evaluateMacro(name string, args []string) (string, er
 		if len(args) == 0 {
 			return "", fmt.Errorf("missing time column argument for macro %v", name)
 		}
-		return fmt.Sprintf("%s >= FROM_UNIXTIME(%d) AND %s <= FROM_UNIXTIME(%d)", args[0], m.TimeRange.GetFromAsSecondsEpoch(), args[0], m.TimeRange.GetToAsSecondsEpoch()), nil
+
+		return fmt.Sprintf("%s BETWEEN '%s' AND '%s'", args[0], m.TimeRange.GetFromAsTimeUTC().Format(time.RFC3339), m.TimeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
 	case "__timeFrom":
-		return fmt.Sprintf("FROM_UNIXTIME(%d)", m.TimeRange.GetFromAsSecondsEpoch()), nil
+		return fmt.Sprintf("'%s'", m.TimeRange.GetFromAsTimeUTC().Format(time.RFC3339)), nil
 	case "__timeTo":
-		return fmt.Sprintf("FROM_UNIXTIME(%d)", m.TimeRange.GetToAsSecondsEpoch()), nil
+		return fmt.Sprintf("'%s'", m.TimeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
 	case "__timeGroup":
 		if len(args) < 2 {
 			return "", fmt.Errorf("macro %v needs time column and interval", name)
@@ -103,7 +104,7 @@ func (m *MySqlMacroEngine) evaluateMacro(name string, args []string) (string, er
 				m.Query.Model.Set("fillValue", floatVal)
 			}
 		}
-		return fmt.Sprintf("cast(cast(UNIX_TIMESTAMP(%s)/(%.0f) as signed)*%.0f as signed)", args[0], interval.Seconds(), interval.Seconds()), nil
+		return fmt.Sprintf("UNIX_TIMESTAMP(%s) DIV %.0f * %.0f", args[0], interval.Seconds(), interval.Seconds()), nil
 	case "__unixEpochFilter":
 		if len(args) == 0 {
 			return "", fmt.Errorf("missing time column argument for macro %v", name)

+ 11 - 11
pkg/tsdb/mysql/macros_test.go

@@ -39,7 +39,7 @@ func TestMacroEngine(t *testing.T) {
 				sql, err := engine.Interpolate(query, timeRange, "GROUP BY $__timeGroup(time_column,'5m')")
 				So(err, ShouldBeNil)
 
-				So(sql, ShouldEqual, "GROUP BY cast(cast(UNIX_TIMESTAMP(time_column)/(300) as signed)*300 as signed)")
+				So(sql, ShouldEqual, "GROUP BY UNIX_TIMESTAMP(time_column) DIV 300 * 300")
 			})
 
 			Convey("interpolate __timeGroup function with spaces around arguments", func() {
@@ -47,28 +47,28 @@ func TestMacroEngine(t *testing.T) {
 				sql, err := engine.Interpolate(query, timeRange, "GROUP BY $__timeGroup(time_column , '5m')")
 				So(err, ShouldBeNil)
 
-				So(sql, ShouldEqual, "GROUP BY cast(cast(UNIX_TIMESTAMP(time_column)/(300) as signed)*300 as signed)")
+				So(sql, ShouldEqual, "GROUP BY UNIX_TIMESTAMP(time_column) DIV 300 * 300")
 			})
 
 			Convey("interpolate __timeFilter function", func() {
 				sql, err := engine.Interpolate(query, timeRange, "WHERE $__timeFilter(time_column)")
 				So(err, ShouldBeNil)
 
-				So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column >= FROM_UNIXTIME(%d) AND time_column <= FROM_UNIXTIME(%d)", from.Unix(), to.Unix()))
+				So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339)))
 			})
 
 			Convey("interpolate __timeFrom function", func() {
 				sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)")
 				So(err, ShouldBeNil)
 
-				So(sql, ShouldEqual, fmt.Sprintf("select FROM_UNIXTIME(%d)", from.Unix()))
+				So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339)))
 			})
 
 			Convey("interpolate __timeTo function", func() {
 				sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)")
 				So(err, ShouldBeNil)
 
-				So(sql, ShouldEqual, fmt.Sprintf("select FROM_UNIXTIME(%d)", to.Unix()))
+				So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339)))
 			})
 
 			Convey("interpolate __unixEpochFilter function", func() {
@@ -102,21 +102,21 @@ func TestMacroEngine(t *testing.T) {
 				sql, err := engine.Interpolate(query, timeRange, "WHERE $__timeFilter(time_column)")
 				So(err, ShouldBeNil)
 
-				So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column >= FROM_UNIXTIME(%d) AND time_column <= FROM_UNIXTIME(%d)", from.Unix(), to.Unix()))
+				So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339)))
 			})
 
 			Convey("interpolate __timeFrom function", func() {
 				sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)")
 				So(err, ShouldBeNil)
 
-				So(sql, ShouldEqual, fmt.Sprintf("select FROM_UNIXTIME(%d)", from.Unix()))
+				So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339)))
 			})
 
 			Convey("interpolate __timeTo function", func() {
 				sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)")
 				So(err, ShouldBeNil)
 
-				So(sql, ShouldEqual, fmt.Sprintf("select FROM_UNIXTIME(%d)", to.Unix()))
+				So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339)))
 			})
 
 			Convey("interpolate __unixEpochFilter function", func() {
@@ -150,21 +150,21 @@ func TestMacroEngine(t *testing.T) {
 				sql, err := engine.Interpolate(query, timeRange, "WHERE $__timeFilter(time_column)")
 				So(err, ShouldBeNil)
 
-				So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column >= FROM_UNIXTIME(%d) AND time_column <= FROM_UNIXTIME(%d)", from.Unix(), to.Unix()))
+				So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339)))
 			})
 
 			Convey("interpolate __timeFrom function", func() {
 				sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)")
 				So(err, ShouldBeNil)
 
-				So(sql, ShouldEqual, fmt.Sprintf("select FROM_UNIXTIME(%d)", from.Unix()))
+				So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339)))
 			})
 
 			Convey("interpolate __timeTo function", func() {
 				sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)")
 				So(err, ShouldBeNil)
 
-				So(sql, ShouldEqual, fmt.Sprintf("select FROM_UNIXTIME(%d)", to.Unix()))
+				So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339)))
 			})
 
 			Convey("interpolate __unixEpochFilter function", func() {

+ 21 - 14
pkg/tsdb/mysql/mysql_test.go

@@ -132,8 +132,8 @@ func TestMySQL(t *testing.T) {
 				So(column[7].(float64), ShouldEqual, 1.11)
 				So(column[8].(float64), ShouldEqual, 2.22)
 				So(*column[9].(*float32), ShouldEqual, 3.33)
-				So(column[10].(time.Time), ShouldHappenWithin, time.Duration(10*time.Second), time.Now())
-				So(column[11].(time.Time), ShouldHappenWithin, time.Duration(10*time.Second), time.Now())
+				So(column[10].(time.Time), ShouldHappenWithin, 10*time.Second, time.Now())
+				So(column[11].(time.Time), ShouldHappenWithin, 10*time.Second, time.Now())
 				So(column[12].(string), ShouldEqual, "11:11:11")
 				So(column[13].(int64), ShouldEqual, 2018)
 				So(*column[14].(*[]byte), ShouldHaveSameTypeAs, []byte{1})
@@ -209,11 +209,12 @@ func TestMySQL(t *testing.T) {
 				So(queryResult.Error, ShouldBeNil)
 
 				points := queryResult.Series[0].Points
-				So(len(points), ShouldEqual, 6)
+				// without fill this should result in 4 buckets
+				So(len(points), ShouldEqual, 4)
 
 				dt := fromStart
 
-				for i := 0; i < 3; i++ {
+				for i := 0; i < 2; i++ {
 					aValue := points[i][0].Float64
 					aTime := time.Unix(int64(points[i][1].Float64)/1000, 0)
 					So(aValue, ShouldEqual, 15)
@@ -221,9 +222,9 @@ func TestMySQL(t *testing.T) {
 					dt = dt.Add(5 * time.Minute)
 				}
 
-				// adjust for 5 minute gap
-				dt = dt.Add(5 * time.Minute)
-				for i := 3; i < 6; i++ {
+				// adjust for 10 minute gap between first and second set of points
+				dt = dt.Add(10 * time.Minute)
+				for i := 2; i < 4; i++ {
 					aValue := points[i][0].Float64
 					aTime := time.Unix(int64(points[i][1].Float64)/1000, 0)
 					So(aValue, ShouldEqual, 20)
@@ -259,7 +260,7 @@ func TestMySQL(t *testing.T) {
 
 				dt := fromStart
 
-				for i := 0; i < 3; i++ {
+				for i := 0; i < 2; i++ {
 					aValue := points[i][0].Float64
 					aTime := time.Unix(int64(points[i][1].Float64)/1000, 0)
 					So(aValue, ShouldEqual, 15)
@@ -267,17 +268,23 @@ func TestMySQL(t *testing.T) {
 					dt = dt.Add(5 * time.Minute)
 				}
 
+				// check for NULL values inserted by fill
+				So(points[2][0].Valid, ShouldBeFalse)
 				So(points[3][0].Valid, ShouldBeFalse)
 
-				// adjust for 5 minute gap
-				dt = dt.Add(5 * time.Minute)
-				for i := 4; i < 7; i++ {
+				// adjust for 10 minute gap between first and second set of points
+				dt = dt.Add(10 * time.Minute)
+				for i := 4; i < 6; i++ {
 					aValue := points[i][0].Float64
 					aTime := time.Unix(int64(points[i][1].Float64)/1000, 0)
 					So(aValue, ShouldEqual, 20)
 					So(aTime, ShouldEqual, dt)
 					dt = dt.Add(5 * time.Minute)
 				}
+
+				// check for NULL values inserted by fill
+				So(points[6][0].Valid, ShouldBeFalse)
+
 			})
 
 			Convey("When doing a metric query using timeGroup with float fill enabled", func() {
@@ -571,7 +578,7 @@ func TestMySQL(t *testing.T) {
 				So(queryResult.Error, ShouldBeNil)
 
 				So(len(queryResult.Series), ShouldEqual, 1)
-				So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(float64(float32(tInitial.Unix())))*1e3)
+				So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(float32(tInitial.Unix()))*1e3)
 			})
 
 			Convey("When doing a metric query using epoch (float32 nullable) as time column and value column (float32 nullable) should return metric with time in milliseconds", func() {
@@ -593,7 +600,7 @@ func TestMySQL(t *testing.T) {
 				So(queryResult.Error, ShouldBeNil)
 
 				So(len(queryResult.Series), ShouldEqual, 1)
-				So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(float64(float32(tInitial.Unix())))*1e3)
+				So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(float32(tInitial.Unix()))*1e3)
 			})
 
 			Convey("When doing a metric query grouping by time and select metric column should return correct series", func() {
@@ -810,7 +817,7 @@ func TestMySQL(t *testing.T) {
 				columns := queryResult.Tables[0].Rows[0]
 
 				//Should be in milliseconds
-				So(columns[0].(int64), ShouldEqual, int64(dt.Unix()*1000))
+				So(columns[0].(int64), ShouldEqual, dt.Unix()*1000)
 			})
 
 			Convey("When doing an annotation query with a time column in epoch millisecond format should return ms", func() {

+ 1 - 1
pkg/tsdb/postgres/macros.go

@@ -109,7 +109,7 @@ func (m *PostgresMacroEngine) evaluateMacro(name string, args []string) (string,
 				m.Query.Model.Set("fillValue", floatVal)
 			}
 		}
-		return fmt.Sprintf("(extract(epoch from %s)/%v)::bigint*%v AS time", args[0], interval.Seconds(), interval.Seconds()), nil
+		return fmt.Sprintf("floor(extract(epoch from %s)/%v)*%v AS time", args[0], interval.Seconds(), interval.Seconds()), nil
 	case "__unixEpochFilter":
 		if len(args) == 0 {
 			return "", fmt.Errorf("missing time column argument for macro %v", name)

+ 2 - 2
pkg/tsdb/postgres/macros_test.go

@@ -53,7 +53,7 @@ func TestMacroEngine(t *testing.T) {
 				sql, err := engine.Interpolate(query, timeRange, "GROUP BY $__timeGroup(time_column,'5m')")
 				So(err, ShouldBeNil)
 
-				So(sql, ShouldEqual, "GROUP BY (extract(epoch from time_column)/300)::bigint*300 AS time")
+				So(sql, ShouldEqual, "GROUP BY floor(extract(epoch from time_column)/300)*300 AS time")
 			})
 
 			Convey("interpolate __timeGroup function with spaces between args", func() {
@@ -61,7 +61,7 @@ func TestMacroEngine(t *testing.T) {
 				sql, err := engine.Interpolate(query, timeRange, "GROUP BY $__timeGroup(time_column , '5m')")
 				So(err, ShouldBeNil)
 
-				So(sql, ShouldEqual, "GROUP BY (extract(epoch from time_column)/300)::bigint*300 AS time")
+				So(sql, ShouldEqual, "GROUP BY floor(extract(epoch from time_column)/300)*300 AS time")
 			})
 
 			Convey("interpolate __timeTo function", func() {

+ 21 - 13
pkg/tsdb/postgres/postgres_test.go

@@ -189,21 +189,23 @@ func TestPostgres(t *testing.T) {
 				So(queryResult.Error, ShouldBeNil)
 
 				points := queryResult.Series[0].Points
-				So(len(points), ShouldEqual, 6)
+				// without fill this should result in 4 buckets
+				So(len(points), ShouldEqual, 4)
 
 				dt := fromStart
 
-				for i := 0; i < 3; i++ {
+				for i := 0; i < 2; i++ {
 					aValue := points[i][0].Float64
 					aTime := time.Unix(int64(points[i][1].Float64)/1000, 0)
 					So(aValue, ShouldEqual, 15)
 					So(aTime, ShouldEqual, dt)
+					So(aTime.Unix()%300, ShouldEqual, 0)
 					dt = dt.Add(5 * time.Minute)
 				}
 
-				// adjust for 5 minute gap
-				dt = dt.Add(5 * time.Minute)
-				for i := 3; i < 6; i++ {
+				// adjust for 10 minute gap between first and second set of points
+				dt = dt.Add(10 * time.Minute)
+				for i := 2; i < 4; i++ {
 					aValue := points[i][0].Float64
 					aTime := time.Unix(int64(points[i][1].Float64)/1000, 0)
 					So(aValue, ShouldEqual, 20)
@@ -239,7 +241,7 @@ func TestPostgres(t *testing.T) {
 
 				dt := fromStart
 
-				for i := 0; i < 3; i++ {
+				for i := 0; i < 2; i++ {
 					aValue := points[i][0].Float64
 					aTime := time.Unix(int64(points[i][1].Float64)/1000, 0)
 					So(aValue, ShouldEqual, 15)
@@ -247,17 +249,23 @@ func TestPostgres(t *testing.T) {
 					dt = dt.Add(5 * time.Minute)
 				}
 
+				// check for NULL values inserted by fill
+				So(points[2][0].Valid, ShouldBeFalse)
 				So(points[3][0].Valid, ShouldBeFalse)
 
-				// adjust for 5 minute gap
-				dt = dt.Add(5 * time.Minute)
-				for i := 4; i < 7; i++ {
+				// adjust for 10 minute gap between first and second set of points
+				dt = dt.Add(10 * time.Minute)
+				for i := 4; i < 6; i++ {
 					aValue := points[i][0].Float64
 					aTime := time.Unix(int64(points[i][1].Float64)/1000, 0)
 					So(aValue, ShouldEqual, 20)
 					So(aTime, ShouldEqual, dt)
 					dt = dt.Add(5 * time.Minute)
 				}
+
+				// check for NULL values inserted by fill
+				So(points[6][0].Valid, ShouldBeFalse)
+
 			})
 
 			Convey("When doing a metric query using timeGroup with float fill enabled", func() {
@@ -504,7 +512,7 @@ func TestPostgres(t *testing.T) {
 				So(queryResult.Error, ShouldBeNil)
 
 				So(len(queryResult.Series), ShouldEqual, 1)
-				So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(float64(float32(tInitial.Unix())))*1e3)
+				So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(float32(tInitial.Unix()))*1e3)
 			})
 
 			Convey("When doing a metric query using epoch (float32 nullable) as time column and value column (float32 nullable) should return metric with time in milliseconds", func() {
@@ -526,7 +534,7 @@ func TestPostgres(t *testing.T) {
 				So(queryResult.Error, ShouldBeNil)
 
 				So(len(queryResult.Series), ShouldEqual, 1)
-				So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(float64(float32(tInitial.Unix())))*1e3)
+				So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(float32(tInitial.Unix()))*1e3)
 			})
 
 			Convey("When doing a metric query grouping by time and select metric column should return correct series", func() {
@@ -713,7 +721,7 @@ func TestPostgres(t *testing.T) {
 				columns := queryResult.Tables[0].Rows[0]
 
 				//Should be in milliseconds
-				So(columns[0].(int64), ShouldEqual, int64(dt.Unix()*1000))
+				So(columns[0].(int64), ShouldEqual, dt.Unix()*1000)
 			})
 
 			Convey("When doing an annotation query with a time column in epoch second format (int) should return ms", func() {
@@ -743,7 +751,7 @@ func TestPostgres(t *testing.T) {
 				columns := queryResult.Tables[0].Rows[0]
 
 				//Should be in milliseconds
-				So(columns[0].(int64), ShouldEqual, int64(dt.Unix()*1000))
+				So(columns[0].(int64), ShouldEqual, dt.Unix()*1000)
 			})
 
 			Convey("When doing an annotation query with a time column in epoch millisecond format should return ms", func() {

+ 16 - 2
public/app/core/config.ts

@@ -1,11 +1,18 @@
 import _ from 'lodash';
 
-class Settings {
+export interface BuildInfo {
+  version: string;
+  commit: string;
+  isEnterprise: boolean;
+  env: string;
+}
+
+export class Settings {
   datasources: any;
   panels: any;
   appSubUrl: string;
   window_title_prefix: string;
-  buildInfo: any;
+  buildInfo: BuildInfo;
   new_panel_title: string;
   bootData: any;
   externalUserMngLinkUrl: string;
@@ -32,7 +39,14 @@ class Settings {
       playlist_timespan: '1m',
       unsaved_changes_warning: true,
       appSubUrl: '',
+      buildInfo: {
+        version: 'v1.0',
+        commit: '1',
+        env: 'production',
+        isEnterprise: false,
+      },
     };
+
     _.extend(this, defaults, options);
   }
 }

+ 2 - 2
public/app/core/directives/value_select_dropdown.ts

@@ -93,7 +93,7 @@ export class ValueSelectDropdownCtrl {
       tagValuesPromise = this.$q.when(tag.values);
     }
 
-    tagValuesPromise.then(values => {
+    return tagValuesPromise.then(values => {
       tag.values = values;
       tag.valuesText = values.join(' + ');
       _.each(this.options, option => {
@@ -132,7 +132,7 @@ export class ValueSelectDropdownCtrl {
     this.highlightIndex = (this.highlightIndex + direction) % this.search.options.length;
   }
 
-  selectValue(option, event, commitChange, excludeOthers) {
+  selectValue(option, event, commitChange?, excludeOthers?) {
     if (!option) {
       return;
     }

+ 0 - 4
public/app/core/services/context_srv.ts

@@ -34,14 +34,10 @@ export class ContextSrv {
   constructor() {
     this.sidemenu = store.getBool('grafana.sidemenu', true);
 
-    if (!config.buildInfo) {
-      config.buildInfo = {};
-    }
     if (!config.bootData) {
       config.bootData = { user: {}, settings: {} };
     }
 
-    this.version = config.buildInfo.version;
     this.user = new User();
     this.isSignedIn = this.user.isSignedIn;
     this.isGrafanaAdmin = this.user.isGrafanaAdmin;

+ 35 - 0
public/app/core/specs/table_model.jest.ts

@@ -44,3 +44,38 @@ describe('when sorting table asc', () => {
     expect(table.rows[2][1]).toBe(15);
   });
 });
+
+describe('when sorting with nulls', () => {
+  var table;
+  var values;
+
+  beforeEach(() => {
+    table = new TableModel();
+    table.columns = [{}, {}];
+    table.rows = [[42, ''], [19, 'a'], [null, 'b'], [0, 'd'], [null, null], [2, 'c'], [0, null], [-8, '']];
+  });
+
+  it('numbers with nulls at end with asc sort', () => {
+    table.sort({ col: 0, desc: false });
+    values = table.rows.map(row => row[0]);
+    expect(values).toEqual([-8, 0, 0, 2, 19, 42, null, null]);
+  });
+
+  it('numbers with nulls at start with desc sort', () => {
+    table.sort({ col: 0, desc: true });
+    values = table.rows.map(row => row[0]);
+    expect(values).toEqual([null, null, 42, 19, 2, 0, 0, -8]);
+  });
+
+  it('strings with nulls at end with asc sort', () => {
+    table.sort({ col: 1, desc: false });
+    values = table.rows.map(row => row[1]);
+    expect(values).toEqual(['', '', 'a', 'b', 'c', 'd', null, null]);
+  });
+
+  it('strings with nulls at start with desc sort', () => {
+    table.sort({ col: 1, desc: true });
+    values = table.rows.map(row => row[1]);
+    expect(values).toEqual([null, null, 'd', 'c', 'b', 'a', '', '']);
+  });
+});

+ 14 - 0
public/app/core/specs/time_series.jest.ts

@@ -119,6 +119,20 @@ describe('TimeSeries', function() {
       series.getFlotPairs('null');
       expect(series.stats.avg).toBe(null);
     });
+
+    it('calculates timeStep', function() {
+      series = new TimeSeries({
+        datapoints: [[null, 1], [null, 2], [null, 3]],
+      });
+      series.getFlotPairs('null');
+      expect(series.stats.timeStep).toBe(1);
+
+      series = new TimeSeries({
+        datapoints: [[0, 1530529290], [0, 1530529305], [0, 1530529320]],
+      });
+      series.getFlotPairs('null');
+      expect(series.stats.timeStep).toBe(15);
+    });
   });
 
   describe('When checking if ms resolution is needed', function() {

+ 159 - 0
public/app/core/specs/value_select_dropdown.jest.ts

@@ -0,0 +1,159 @@
+import 'app/core/directives/value_select_dropdown';
+import { ValueSelectDropdownCtrl } from '../directives/value_select_dropdown';
+import q from 'q';
+
+describe('SelectDropdownCtrl', () => {
+  let tagValuesMap: any = {};
+
+  ValueSelectDropdownCtrl.prototype.onUpdated = jest.fn();
+  let ctrl;
+
+  describe('Given simple variable', () => {
+    beforeEach(() => {
+      ctrl = new ValueSelectDropdownCtrl(q);
+      ctrl.variable = {
+        current: { text: 'hej', value: 'hej' },
+        getValuesForTag: key => {
+          return Promise.resolve(tagValuesMap[key]);
+        },
+      };
+      ctrl.init();
+    });
+
+    it('Should init labelText and linkText', () => {
+      expect(ctrl.linkText).toBe('hej');
+    });
+  });
+
+  describe('Given variable with tags and dropdown is opened', () => {
+    beforeEach(() => {
+      ctrl = new ValueSelectDropdownCtrl(q);
+      ctrl.variable = {
+        current: { text: 'server-1', value: 'server-1' },
+        options: [
+          { text: 'server-1', value: 'server-1', selected: true },
+          { text: 'server-2', value: 'server-2' },
+          { text: 'server-3', value: 'server-3' },
+        ],
+        tags: ['key1', 'key2', 'key3'],
+        getValuesForTag: key => {
+          return Promise.resolve(tagValuesMap[key]);
+        },
+        multi: true,
+      };
+      tagValuesMap.key1 = ['server-1', 'server-3'];
+      tagValuesMap.key2 = ['server-2', 'server-3'];
+      tagValuesMap.key3 = ['server-1', 'server-2', 'server-3'];
+      ctrl.init();
+      ctrl.show();
+    });
+
+    it('should init tags model', () => {
+      expect(ctrl.tags.length).toBe(3);
+      expect(ctrl.tags[0].text).toBe('key1');
+    });
+
+    it('should init options model', () => {
+      expect(ctrl.options.length).toBe(3);
+    });
+
+    it('should init selected values array', () => {
+      expect(ctrl.selectedValues.length).toBe(1);
+    });
+
+    it('should set linkText', () => {
+      expect(ctrl.linkText).toBe('server-1');
+    });
+
+    describe('after adititional value is selected', () => {
+      beforeEach(() => {
+        ctrl.selectValue(ctrl.options[2], {});
+        ctrl.commitChanges();
+      });
+
+      it('should update link text', () => {
+        expect(ctrl.linkText).toBe('server-1 + server-3');
+      });
+    });
+
+    describe('When tag is selected', () => {
+      beforeEach(async () => {
+        await ctrl.selectTag(ctrl.tags[0]);
+        ctrl.commitChanges();
+      });
+
+      it('should select tag', () => {
+        expect(ctrl.selectedTags.length).toBe(1);
+      });
+
+      it('should select values', () => {
+        expect(ctrl.options[0].selected).toBe(true);
+        expect(ctrl.options[2].selected).toBe(true);
+      });
+
+      it('link text should not include tag values', () => {
+        expect(ctrl.linkText).toBe('');
+      });
+
+      describe('and then dropdown is opened and closed without changes', () => {
+        beforeEach(() => {
+          ctrl.show();
+          ctrl.commitChanges();
+        });
+
+        it('should still have selected tag', () => {
+          expect(ctrl.selectedTags.length).toBe(1);
+        });
+      });
+
+      describe('and then unselected', () => {
+        beforeEach(async () => {
+          await ctrl.selectTag(ctrl.tags[0]);
+        });
+
+        it('should deselect tag', () => {
+          expect(ctrl.selectedTags.length).toBe(0);
+        });
+      });
+
+      describe('and then value is unselected', () => {
+        beforeEach(() => {
+          ctrl.selectValue(ctrl.options[0], {});
+        });
+
+        it('should deselect tag', () => {
+          expect(ctrl.selectedTags.length).toBe(0);
+        });
+      });
+    });
+  });
+
+  describe('Given variable with selected tags', () => {
+    beforeEach(() => {
+      ctrl = new ValueSelectDropdownCtrl(q);
+      ctrl.variable = {
+        current: {
+          text: 'server-1',
+          value: 'server-1',
+          tags: [{ text: 'key1', selected: true }],
+        },
+        options: [
+          { text: 'server-1', value: 'server-1' },
+          { text: 'server-2', value: 'server-2' },
+          { text: 'server-3', value: 'server-3' },
+        ],
+        tags: ['key1', 'key2', 'key3'],
+        getValuesForTag: key => {
+          return Promise.resolve(tagValuesMap[key]);
+        },
+        multi: true,
+      };
+      ctrl.init();
+      ctrl.show();
+    });
+
+    it('should set tag as selected', () => {
+      expect(ctrl.tags[0].selected).toBe(true);
+    });
+  });
+});

+ 0 - 171
public/app/core/specs/value_select_dropdown_specs.ts

@@ -1,171 +0,0 @@
-import { describe, beforeEach, it, expect, angularMocks, sinon } from 'test/lib/common';
-import 'app/core/directives/value_select_dropdown';
-
-describe('SelectDropdownCtrl', function() {
-  var scope;
-  var ctrl;
-  var tagValuesMap: any = {};
-  var rootScope;
-  var q;
-
-  beforeEach(angularMocks.module('grafana.core'));
-  beforeEach(
-    angularMocks.inject(function($controller, $rootScope, $q, $httpBackend) {
-      rootScope = $rootScope;
-      q = $q;
-      scope = $rootScope.$new();
-      ctrl = $controller('ValueSelectDropdownCtrl', { $scope: scope });
-      ctrl.onUpdated = sinon.spy();
-      $httpBackend.when('GET', /\.html$/).respond('');
-    })
-  );
-
-  describe('Given simple variable', function() {
-    beforeEach(function() {
-      ctrl.variable = {
-        current: { text: 'hej', value: 'hej' },
-        getValuesForTag: function(key) {
-          return q.when(tagValuesMap[key]);
-        },
-      };
-      ctrl.init();
-    });
-
-    it('Should init labelText and linkText', function() {
-      expect(ctrl.linkText).to.be('hej');
-    });
-  });
-
-  describe('Given variable with tags and dropdown is opened', function() {
-    beforeEach(function() {
-      ctrl.variable = {
-        current: { text: 'server-1', value: 'server-1' },
-        options: [
-          { text: 'server-1', value: 'server-1', selected: true },
-          { text: 'server-2', value: 'server-2' },
-          { text: 'server-3', value: 'server-3' },
-        ],
-        tags: ['key1', 'key2', 'key3'],
-        getValuesForTag: function(key) {
-          return q.when(tagValuesMap[key]);
-        },
-        multi: true,
-      };
-      tagValuesMap.key1 = ['server-1', 'server-3'];
-      tagValuesMap.key2 = ['server-2', 'server-3'];
-      tagValuesMap.key3 = ['server-1', 'server-2', 'server-3'];
-      ctrl.init();
-      ctrl.show();
-    });
-
-    it('should init tags model', function() {
-      expect(ctrl.tags.length).to.be(3);
-      expect(ctrl.tags[0].text).to.be('key1');
-    });
-
-    it('should init options model', function() {
-      expect(ctrl.options.length).to.be(3);
-    });
-
-    it('should init selected values array', function() {
-      expect(ctrl.selectedValues.length).to.be(1);
-    });
-
-    it('should set linkText', function() {
-      expect(ctrl.linkText).to.be('server-1');
-    });
-
-    describe('after adititional value is selected', function() {
-      beforeEach(function() {
-        ctrl.selectValue(ctrl.options[2], {});
-        ctrl.commitChanges();
-      });
-
-      it('should update link text', function() {
-        expect(ctrl.linkText).to.be('server-1 + server-3');
-      });
-    });
-
-    describe('When tag is selected', function() {
-      beforeEach(function() {
-        ctrl.selectTag(ctrl.tags[0]);
-        rootScope.$digest();
-        ctrl.commitChanges();
-      });
-
-      it('should select tag', function() {
-        expect(ctrl.selectedTags.length).to.be(1);
-      });
-
-      it('should select values', function() {
-        expect(ctrl.options[0].selected).to.be(true);
-        expect(ctrl.options[2].selected).to.be(true);
-      });
-
-      it('link text should not include tag values', function() {
-        expect(ctrl.linkText).to.be('');
-      });
-
-      describe('and then dropdown is opened and closed without changes', function() {
-        beforeEach(function() {
-          ctrl.show();
-          ctrl.commitChanges();
-          rootScope.$digest();
-        });
-
-        it('should still have selected tag', function() {
-          expect(ctrl.selectedTags.length).to.be(1);
-        });
-      });
-
-      describe('and then unselected', function() {
-        beforeEach(function() {
-          ctrl.selectTag(ctrl.tags[0]);
-          rootScope.$digest();
-        });
-
-        it('should deselect tag', function() {
-          expect(ctrl.selectedTags.length).to.be(0);
-        });
-      });
-
-      describe('and then value is unselected', function() {
-        beforeEach(function() {
-          ctrl.selectValue(ctrl.options[0], {});
-        });
-
-        it('should deselect tag', function() {
-          expect(ctrl.selectedTags.length).to.be(0);
-        });
-      });
-    });
-  });
-
-  describe('Given variable with selected tags', function() {
-    beforeEach(function() {
-      ctrl.variable = {
-        current: {
-          text: 'server-1',
-          value: 'server-1',
-          tags: [{ text: 'key1', selected: true }],
-        },
-        options: [
-          { text: 'server-1', value: 'server-1' },
-          { text: 'server-2', value: 'server-2' },
-          { text: 'server-3', value: 'server-3' },
-        ],
-        tags: ['key1', 'key2', 'key3'],
-        getValuesForTag: function(key) {
-          return q.when(tagValuesMap[key]);
-        },
-        multi: true,
-      };
-      ctrl.init();
-      ctrl.show();
-    });
-
-    it('should set tag as selected', function() {
-      expect(ctrl.tags[0].selected).to.be(true);
-    });
-  });
-});

+ 5 - 12
public/app/core/table_model.ts

@@ -19,23 +19,16 @@ export default class TableModel {
     this.rows.sort(function(a, b) {
       a = a[options.col];
       b = b[options.col];
-      if (a < b) {
-        return -1;
-      }
-      if (a > b) {
-        return 1;
-      }
-      return 0;
+      // Sort null or undefined seperately from comparable values
+      return +(a == null) - +(b == null) || +(a > b) || -(a < b);
     });
 
-    this.columns[options.col].sort = true;
-
     if (options.desc) {
       this.rows.reverse();
-      this.columns[options.col].desc = true;
-    } else {
-      this.columns[options.col].desc = false;
     }
+
+    this.columns[options.col].sort = true;
+    this.columns[options.col].desc = options.desc;
   }
 
   addColumn(col) {

+ 11 - 11
public/app/features/annotations/specs/annotations_srv_specs.ts → public/app/features/annotations/specs/annotations_srv.jest.ts

@@ -1,17 +1,17 @@
-import { describe, beforeEach, it, expect, angularMocks } from 'test/lib/common';
 import '../annotations_srv';
-import helpers from 'test/specs/helpers';
 import 'app/features/dashboard/time_srv';
+import { AnnotationsSrv } from '../annotations_srv';
 
 describe('AnnotationsSrv', function() {
-  var ctx = new helpers.ServiceTestContext();
+  let $rootScope = {
+    onAppEvent: jest.fn(),
+  };
+  let $q;
+  let datasourceSrv;
+  let backendSrv;
+  let timeSrv;
 
-  beforeEach(angularMocks.module('grafana.core'));
-  beforeEach(angularMocks.module('grafana.services'));
-  beforeEach(ctx.createService('timeSrv'));
-  beforeEach(() => {
-    ctx.createService('annotationsSrv');
-  });
+  let annotationsSrv = new AnnotationsSrv($rootScope, $q, datasourceSrv, backendSrv, timeSrv);
 
   describe('When translating the query result', () => {
     const annotationSource = {
@@ -30,11 +30,11 @@ describe('AnnotationsSrv', function() {
     let translatedAnnotations;
 
     beforeEach(() => {
-      translatedAnnotations = ctx.service.translateQueryResult(annotationSource, annotations);
+      translatedAnnotations = annotationsSrv.translateQueryResult(annotationSource, annotations);
     });
 
     it('should set defaults', () => {
-      expect(translatedAnnotations[0].source).to.eql(annotationSource);
+      expect(translatedAnnotations[0].source).toEqual(annotationSource);
     });
   });
 });

+ 1 - 3
public/app/features/dashboard/specs/exporter.jest.ts

@@ -86,9 +86,7 @@ describe('given dashboard with repeated panels', () => {
       ],
     };
 
-    config.buildInfo = {
-      version: '3.0.2',
-    };
+    config.buildInfo.version = '3.0.2';
 
     //Stubs test function calls
     var datasourceSrvStub = { get: jest.fn(arg => getStub(arg)) };

+ 67 - 0
public/app/features/dashboard/specs/viewstate_srv.jest.ts

@@ -0,0 +1,67 @@
+//import { describe, beforeEach, it, expect, angularMocks } from 'test/lib/common';
+import 'app/features/dashboard/view_state_srv';
+import config from 'app/core/config';
+import { DashboardViewState } from '../view_state_srv';
+
+describe('when updating view state', () => {
+  let location = {
+    replace: jest.fn(),
+    search: jest.fn(),
+  };
+
+  let $scope = {
+    onAppEvent: jest.fn(() => {}),
+    dashboard: {
+      meta: {},
+      panels: [],
+    },
+  };
+
+  let $rootScope = {};
+  let viewState;
+
+  beforeEach(() => {
+    config.bootData = {
+      user: {
+        orgId: 1,
+      },
+    };
+  });
+
+  describe('to fullscreen true and edit true', () => {
+    beforeEach(() => {
+      location.search = jest.fn(() => {
+        return { fullscreen: true, edit: true, panelId: 1 };
+      });
+      viewState = new DashboardViewState($scope, location, {}, $rootScope);
+    });
+
+    it('should update querystring and view state', () => {
+      var updateState = { fullscreen: true, edit: true, panelId: 1 };
+
+      viewState.update(updateState);
+
+      expect(location.search).toHaveBeenCalledWith({
+        edit: true,
+        editview: null,
+        fullscreen: true,
+        orgId: 1,
+        panelId: 1,
+      });
+      expect(viewState.dashboard.meta.fullscreen).toBe(true);
+      expect(viewState.state.fullscreen).toBe(true);
+    });
+  });
+
+  describe('to fullscreen false', () => {
+    beforeEach(() => {
+      viewState = new DashboardViewState($scope, location, {}, $rootScope);
+    });
+    it('should remove params from query string', () => {
+      viewState.update({ fullscreen: true, panelId: 1, edit: true });
+      viewState.update({ fullscreen: false });
+      expect(viewState.dashboard.meta.fullscreen).toBe(false);
+      expect(viewState.state.fullscreen).toBe(null);
+    });
+  });
+});

+ 0 - 65
public/app/features/dashboard/specs/viewstate_srv_specs.ts

@@ -1,65 +0,0 @@
-import { describe, beforeEach, it, expect, angularMocks } from 'test/lib/common';
-import 'app/features/dashboard/view_state_srv';
-import config from 'app/core/config';
-
-describe('when updating view state', function() {
-  var viewState, location;
-  var timeSrv = {};
-  var templateSrv = {};
-  var contextSrv = {
-    user: {
-      orgId: 19,
-    },
-  };
-  beforeEach(function() {
-    config.bootData = {
-      user: {
-        orgId: 1,
-      },
-    };
-  });
-  beforeEach(angularMocks.module('grafana.services'));
-  beforeEach(
-    angularMocks.module(function($provide) {
-      $provide.value('timeSrv', timeSrv);
-      $provide.value('templateSrv', templateSrv);
-      $provide.value('contextSrv', contextSrv);
-    })
-  );
-
-  beforeEach(
-    angularMocks.inject(function(dashboardViewStateSrv, $location, $rootScope) {
-      $rootScope.onAppEvent = function() {};
-      $rootScope.dashboard = {
-        meta: {},
-        panels: [],
-      };
-      viewState = dashboardViewStateSrv.create($rootScope);
-      location = $location;
-    })
-  );
-
-  describe('to fullscreen true and edit true', function() {
-    it('should update querystring and view state', function() {
-      var updateState = { fullscreen: true, edit: true, panelId: 1 };
-      viewState.update(updateState);
-      expect(location.search()).to.eql({
-        fullscreen: true,
-        edit: true,
-        panelId: 1,
-        orgId: 1,
-      });
-      expect(viewState.dashboard.meta.fullscreen).to.be(true);
-      expect(viewState.state.fullscreen).to.be(true);
-    });
-  });
-
-  describe('to fullscreen false', function() {
-    it('should remove params from query string', function() {
-      viewState.update({ fullscreen: true, panelId: 1, edit: true });
-      viewState.update({ fullscreen: false });
-      expect(viewState.dashboard.meta.fullscreen).to.be(false);
-      expect(viewState.state.fullscreen).to.be(null);
-    });
-  });
-});

+ 80 - 42
public/app/features/org/partials/team_details.html

@@ -1,22 +1,22 @@
 <page-header model="ctrl.navModel"></page-header>
 
 <div class="page-container page-body">
-	<h3 class="page-sub-heading">Team Details</h3>
+  <h3 class="page-sub-heading">Team Details</h3>
 
   <form name="teamDetailsForm" class="gf-form-group">
     <div class="gf-form max-width-30">
       <span class="gf-form-label width-10">Name</span>
       <input type="text" required ng-model="ctrl.team.name" class="gf-form-input max-width-22">
-		</div>
-		<div class="gf-form max-width-30">
-			<span class="gf-form-label width-10">
-				Email
-				<info-popover mode="right-normal">
-						This is optional and is primarily used for allowing custom team avatars.
-				</info-popover>
-			</span>
-			<input class="gf-form-input max-width-22" type="email" ng-model="ctrl.team.email" placeholder="email@test.com">
-		</div>
+    </div>
+    <div class="gf-form max-width-30">
+      <span class="gf-form-label width-10">
+        Email
+        <info-popover mode="right-normal">
+          This is optional and is primarily used for allowing custom team avatars.
+        </info-popover>
+      </span>
+      <input class="gf-form-input max-width-22" type="email" ng-model="ctrl.team.email" placeholder="email@test.com">
+    </div>
 
     <div class="gf-form-button-row">
       <button type="submit" class="btn btn-success" ng-click="ctrl.update()">Update</button>
@@ -26,42 +26,80 @@
   <div class="gf-form-group">
 
     <h3 class="page-heading">Team Members</h3>
-		<form name="ctrl.addMemberForm" class="gf-form-group">
+    <form name="ctrl.addMemberForm" class="gf-form-group">
       <div class="gf-form">
         <span class="gf-form-label width-10">Add member</span>
-				<!--
-				Old picker
-				<user-picker user-picked="ctrl.userPicked($user)"></user-picker>
-				-->
-				<select-user-picker  class="width-7" handlePicked="ctrl.userPicked" backendSrv="ctrl.backendSrv"></select-user-picker>
+        <!--
+        Old picker
+        <user-picker user-picked="ctrl.userPicked($user)"></user-picker>
+        -->
+        <select-user-picker class="width-7" handlePicked="ctrl.userPicked" backendSrv="ctrl.backendSrv"></select-user-picker>
       </div>
     </form>
 
     <table class="filter-table" ng-show="ctrl.teamMembers.length > 0">
-			<thead>
-				<tr>
-					<th></th>
-					<th>Username</th>
-					<th>Email</th>
-					<th></th>
-				</tr>
-			</thead>
-			<tr ng-repeat="member in ctrl.teamMembers">
-				<td class="width-4 text-center link-td">
-					<img class="filter-table__avatar" ng-src="{{member.avatarUrl}}"></img>
-				</td>
-				<td>{{member.login}}</td>
-				<td>{{member.email}}</td>
-				<td style="width: 1%">
-					<a ng-click="ctrl.removeTeamMember(member)" class="btn btn-danger btn-mini">
-						<i class="fa fa-remove"></i>
-					</a>
-				</td>
-			</tr>
-		</table>
-		<div>
-			<em class="muted" ng-hide="ctrl.teamMembers.length > 0">
-				This team has no members yet.
-			</em>
+      <thead>
+        <tr>
+          <th></th>
+          <th>Username</th>
+          <th>Email</th>
+          <th></th>
+        </tr>
+      </thead>
+      <tr ng-repeat="member in ctrl.teamMembers">
+        <td class="width-4 text-center link-td">
+          <img class="filter-table__avatar" ng-src="{{member.avatarUrl}}"></img>
+        </td>
+        <td>{{member.login}}</td>
+        <td>{{member.email}}</td>
+        <td style="width: 1%">
+          <a ng-click="ctrl.removeTeamMember(member)" class="btn btn-danger btn-mini">
+            <i class="fa fa-remove"></i>
+          </a>
+        </td>
+      </tr>
+    </table>
+    <div>
+      <em class="muted" ng-hide="ctrl.teamMembers.length > 0">
+        This team has no members yet.
+      </em>
+    </div>
+
+  </div>
+
+  <div class="gf-form-group" ng-if="ctrl.isMappingsEnabled">
+
+	<h3 class="page-heading">Mappings to external groups</h3>
+	<form name="ctrl.addGroupForm" class="gf-form-group">
+		<div class="gf-form">
+			<span class="gf-form-label width-10">Add group</span>
+			<input class="gf-form-input max-width-22" type="text" ng-model="ctrl.newGroupId">
 		</div>
+		<div class="gf-form-button-row">
+			<button type="submit" class="btn btn-success" ng-click="ctrl.addGroup()">Add</button>
+		</div>
+	</form>
+
+	<table class="filter-table" ng-show="ctrl.teamGroups.length > 0">
+		<thead>
+			<tr>
+				<th>Group</th>
+				<th></th>
+			</tr>
+		</thead>
+		<tr ng-repeat="group in ctrl.teamGroups">
+			<td>{{group.groupId}}</td>
+			<td style="width: 1%">
+				<a ng-click="ctrl.removeGroup(group)" class="btn btn-danger btn-mini">
+					<i class="fa fa-remove"></i>
+				</a>
+			</td>
+		</tr>
+	</table>
+	<div>
+		<em class="muted" ng-hide="ctrl.teamGroups.length > 0">
+			This team has no associated groups yet.
+		</em>
+	</div>
 
+	</div>

+ 27 - 0
public/app/features/org/team_details_ctrl.ts

@@ -1,15 +1,21 @@
 import coreModule from 'app/core/core_module';
+import config from 'app/core/config';
 
 export default class TeamDetailsCtrl {
   team: Team;
   teamMembers: User[] = [];
   navModel: any;
+  teamGroups: TeamGroup[] = [];
+  newGroupId: string;
+  isMappingsEnabled: boolean;
 
   /** @ngInject **/
   constructor(private $scope, private backendSrv, private $routeParams, navModelSrv) {
     this.navModel = navModelSrv.getNav('cfg', 'teams', 0);
     this.userPicked = this.userPicked.bind(this);
     this.get = this.get.bind(this);
+    this.newGroupId = '';
+    this.isMappingsEnabled = config.buildInfo.isEnterprise;
     this.get();
   }
 
@@ -18,9 +24,16 @@ export default class TeamDetailsCtrl {
       this.backendSrv.get(`/api/teams/${this.$routeParams.id}`).then(result => {
         this.team = result;
       });
+
       this.backendSrv.get(`/api/teams/${this.$routeParams.id}/members`).then(result => {
         this.teamMembers = result;
       });
+
+      if (this.isMappingsEnabled) {
+        this.backendSrv.get(`/api/teams/${this.$routeParams.id}/groups`).then(result => {
+          this.teamGroups = result;
+        });
+      }
     }
   }
 
@@ -57,6 +70,20 @@ export default class TeamDetailsCtrl {
       this.get();
     });
   }
+
+  addGroup() {
+    this.backendSrv.post(`/api/teams/${this.$routeParams.id}/groups`, { groupId: this.newGroupId }).then(() => {
+      this.get();
+    });
+  }
+
+  removeGroup(group: TeamGroup) {
+    this.backendSrv.delete(`/api/teams/${this.$routeParams.id}/groups/${group.groupId}`).then(this.get);
+  }
+}
+
+export interface TeamGroup {
+  groupId: string;
 }
 
 export interface Team {

+ 14 - 11
public/app/features/plugins/datasource_srv.ts

@@ -91,10 +91,20 @@ export class DatasourceSrv {
 
     _.each(config.datasources, function(value, key) {
       if (value.meta && value.meta.metrics) {
-        metricSources.push({ value: key, name: key, meta: value.meta });
+        let metricSource = { value: key, name: key, meta: value.meta, sort: key };
+
+        //Make sure grafana and mixed are sorted at the bottom
+        if (value.meta.id === 'grafana') {
+          metricSource.sort = String.fromCharCode(253);
+        } else if (value.meta.id === 'mixed') {
+          metricSource.sort = String.fromCharCode(254);
+        }
+
+        metricSources.push(metricSource);
 
         if (key === config.defaultDatasource) {
-          metricSources.push({ value: null, name: 'default', meta: value.meta });
+          metricSource = { value: null, name: 'default', meta: value.meta, sort: key };
+          metricSources.push(metricSource);
         }
       }
     });
@@ -104,17 +114,10 @@ export class DatasourceSrv {
     }
 
     metricSources.sort(function(a, b) {
-      // these two should always be at the bottom
-      if (a.meta.id === 'mixed' || a.meta.id === 'grafana') {
-        return 1;
-      }
-      if (b.meta.id === 'mixed' || b.meta.id === 'grafana') {
-        return -1;
-      }
-      if (a.name.toLowerCase() > b.name.toLowerCase()) {
+      if (a.sort.toLowerCase() > b.sort.toLowerCase()) {
         return 1;
       }
-      if (a.name.toLowerCase() < b.name.toLowerCase()) {
+      if (a.sort.toLowerCase() < b.sort.toLowerCase()) {
         return -1;
       }
       return 0;

+ 2 - 2
public/app/features/plugins/partials/ds_http_settings.html

@@ -32,8 +32,8 @@
       <div class="gf-form">
         <label class="gf-form-label query-keyword pointer" ng-click="toggleAccessHelp()">
           Help&nbsp;
-          <i class="fa fa-caret-down" ng-show="ctrl.showAccessHelp"></i>
-          <i class="fa fa-caret-right" ng-hide="ctrl.showAccessHelp">&nbsp;</i>
+          <i class="fa fa-caret-down" ng-show="showAccessHelp"></i>
+          <i class="fa fa-caret-right" ng-hide="showAccessHelp">&nbsp;</i>
         </label>
       </div>
     </div>

+ 59 - 0
public/app/features/plugins/specs/datasource_srv.jest.ts

@@ -0,0 +1,59 @@
+import config from 'app/core/config';
+import 'app/features/plugins/datasource_srv';
+import { DatasourceSrv } from 'app/features/plugins/datasource_srv';
+
+describe('datasource_srv', function() {
+  let _datasourceSrv = new DatasourceSrv({}, {}, {}, {});
+  let metricSources;
+
+  describe('when loading metric sources', () => {
+    let unsortedDatasources = {
+      mmm: {
+        type: 'test-db',
+        meta: { metrics: { m: 1 } },
+      },
+      '--Grafana--': {
+        type: 'grafana',
+        meta: { builtIn: true, metrics: { m: 1 }, id: 'grafana' },
+      },
+      '--Mixed--': {
+        type: 'test-db',
+        meta: { builtIn: true, metrics: { m: 1 }, id: 'mixed' },
+      },
+      ZZZ: {
+        type: 'test-db',
+        meta: { metrics: { m: 1 } },
+      },
+      aaa: {
+        type: 'test-db',
+        meta: { metrics: { m: 1 } },
+      },
+      BBB: {
+        type: 'test-db',
+        meta: { metrics: { m: 1 } },
+      },
+    };
+    beforeEach(() => {
+      config.datasources = unsortedDatasources;
+      metricSources = _datasourceSrv.getMetricSources({ skipVariables: true });
+    });
+
+    it('should return a list of sources sorted case insensitively with builtin sources last', () => {
+      expect(metricSources[0].name).toBe('aaa');
+      expect(metricSources[1].name).toBe('BBB');
+      expect(metricSources[2].name).toBe('mmm');
+      expect(metricSources[3].name).toBe('ZZZ');
+      expect(metricSources[4].name).toBe('--Grafana--');
+      expect(metricSources[5].name).toBe('--Mixed--');
+    });
+
+    beforeEach(() => {
+      config.defaultDatasource = 'BBB';
+    });
+
+    it('should set default data source', () => {
+      expect(metricSources[2].name).toBe('default');
+      expect(metricSources[2].sort).toBe('BBB');
+    });
+  });
+});

+ 0 - 64
public/app/features/plugins/specs/datasource_srv_specs.ts

@@ -1,64 +0,0 @@
-import { describe, beforeEach, it, expect, angularMocks } from 'test/lib/common';
-import config from 'app/core/config';
-import 'app/features/plugins/datasource_srv';
-
-describe('datasource_srv', function() {
-  var _datasourceSrv;
-  var metricSources;
-  var templateSrv = {};
-
-  beforeEach(angularMocks.module('grafana.core'));
-  beforeEach(
-    angularMocks.module(function($provide) {
-      $provide.value('templateSrv', templateSrv);
-    })
-  );
-  beforeEach(angularMocks.module('grafana.services'));
-  beforeEach(
-    angularMocks.inject(function(datasourceSrv) {
-      _datasourceSrv = datasourceSrv;
-    })
-  );
-
-  describe('when loading metric sources', function() {
-    var unsortedDatasources = {
-      mmm: {
-        type: 'test-db',
-        meta: { metrics: { m: 1 } },
-      },
-      '--Grafana--': {
-        type: 'grafana',
-        meta: { builtIn: true, metrics: { m: 1 }, id: 'grafana' },
-      },
-      '--Mixed--': {
-        type: 'test-db',
-        meta: { builtIn: true, metrics: { m: 1 }, id: 'mixed' },
-      },
-      ZZZ: {
-        type: 'test-db',
-        meta: { metrics: { m: 1 } },
-      },
-      aaa: {
-        type: 'test-db',
-        meta: { metrics: { m: 1 } },
-      },
-      BBB: {
-        type: 'test-db',
-        meta: { metrics: { m: 1 } },
-      },
-    };
-    beforeEach(function() {
-      config.datasources = unsortedDatasources;
-      metricSources = _datasourceSrv.getMetricSources({ skipVariables: true });
-    });
-
-    it('should return a list of sources sorted case insensitively with builtin sources last', function() {
-      expect(metricSources[0].name).to.be('aaa');
-      expect(metricSources[1].name).to.be('BBB');
-      expect(metricSources[2].name).to.be('mmm');
-      expect(metricSources[3].name).to.be('ZZZ');
-      expect(metricSources[4].name).to.be('--Grafana--');
-      expect(metricSources[5].name).to.be('--Mixed--');
-    });
-  });
-});

+ 6 - 2
public/app/features/templating/variable_srv.ts

@@ -38,7 +38,11 @@ export class VariableSrv {
       });
   }
 
-  onDashboardRefresh() {
+  onDashboardRefresh(evt, payload) {
+    if (payload && payload.fromVariableValueUpdated) {
+      return Promise.resolve({});
+    }
+
     var promises = this.variables.filter(variable => variable.refresh === 2).map(variable => {
       var previousOptions = variable.options.slice();
 
@@ -130,7 +134,7 @@ export class VariableSrv {
     return this.$q.all(promises).then(() => {
       if (emitChangeEvents) {
         this.$rootScope.$emit('template-variable-value-updated');
-        this.$rootScope.$broadcast('refresh');
+        this.$rootScope.$broadcast('refresh', { fromVariableValueUpdated: true });
       }
     });
   }

+ 1 - 1
public/app/partials/login.html

@@ -89,7 +89,7 @@
             <a class="btn btn-link" ng-click="skip();">
               Skip
               <info-popover mode="no-padding">
-                If you skip you will be promted to change password next time you login.
+                If you skip you will be prompted to change password next time you login.
               </info-popover>
             </a>
             <button type="submit" class="btn btn-large p-x-2" ng-click="changePassword();" ng-class="{'btn-inverse': !loginForm.$valid, 'btn-success': loginForm.$valid}">

+ 76 - 72
public/app/plugins/datasource/elasticsearch/specs/datasource_specs.ts → public/app/plugins/datasource/elasticsearch/specs/datasource.jest.ts

@@ -1,32 +1,46 @@
 import _ from 'lodash';
-import { describe, beforeEach, it, expect, angularMocks } from 'test/lib/common';
 import moment from 'moment';
 import angular from 'angular';
-import helpers from 'test/specs/helpers';
 import { ElasticDatasource } from '../datasource';
 
+import * as dateMath from 'app/core/utils/datemath';
+
 describe('ElasticDatasource', function() {
-  var ctx = new helpers.ServiceTestContext();
-
-  beforeEach(angularMocks.module('grafana.core'));
-  beforeEach(angularMocks.module('grafana.services'));
-  beforeEach(ctx.providePhase(['templateSrv', 'backendSrv', 'timeSrv']));
-
-  beforeEach(
-    angularMocks.inject(function($q, $rootScope, $httpBackend, $injector) {
-      ctx.$q = $q;
-      ctx.$httpBackend = $httpBackend;
-      ctx.$rootScope = $rootScope;
-      ctx.$injector = $injector;
-      $httpBackend.when('GET', /\.html$/).respond('');
-    })
-  );
+  let backendSrv = {
+    datasourceRequest: jest.fn(),
+  };
+
+  let $rootScope = {
+    $on: jest.fn(),
+    appEvent: jest.fn(),
+  };
+
+  let templateSrv = {
+    replace: jest.fn(text => text),
+    getAdhocFilters: jest.fn(() => []),
+  };
+
+  let timeSrv = {
+    time: { from: 'now-1h', to: 'now' },
+    timeRange: jest.fn(() => {
+      return {
+        from: dateMath.parse(this.time.from, false),
+        to: dateMath.parse(this.time.to, true),
+      };
+    }),
+    setTime: jest.fn(time => {
+      this.time = time;
+    }),
+  };
+
+  let ctx = <any>{
+    $rootScope,
+    backendSrv,
+  };
 
   function createDatasource(instanceSettings) {
     instanceSettings.jsonData = instanceSettings.jsonData || {};
-    ctx.ds = ctx.$injector.instantiate(ElasticDatasource, {
-      instanceSettings: instanceSettings,
-    });
+    ctx.ds = new ElasticDatasource(instanceSettings, {}, backendSrv, templateSrv, timeSrv);
   }
 
   describe('When testing datasource with index pattern', function() {
@@ -40,33 +54,32 @@ describe('ElasticDatasource', function() {
 
     it('should translate index pattern to current day', function() {
       var requestOptions;
-      ctx.backendSrv.datasourceRequest = function(options) {
+      ctx.backendSrv.datasourceRequest = jest.fn(options => {
         requestOptions = options;
-        return ctx.$q.when({ data: {} });
-      };
+        return Promise.resolve({ data: {} });
+      });
 
       ctx.ds.testDatasource();
-      ctx.$rootScope.$apply();
 
       var today = moment.utc().format('YYYY.MM.DD');
-      expect(requestOptions.url).to.be('http://es.com/asd-' + today + '/_mapping');
+      expect(requestOptions.url).toBe('http://es.com/asd-' + today + '/_mapping');
     });
   });
 
   describe('When issuing metric query with interval pattern', function() {
     var requestOptions, parts, header;
 
-    beforeEach(function() {
+    beforeEach(() => {
       createDatasource({
         url: 'http://es.com',
         index: '[asd-]YYYY.MM.DD',
         jsonData: { interval: 'Daily', esVersion: '2' },
       });
 
-      ctx.backendSrv.datasourceRequest = function(options) {
+      ctx.backendSrv.datasourceRequest = jest.fn(options => {
         requestOptions = options;
-        return ctx.$q.when({ data: { responses: [] } });
-      };
+        return Promise.resolve({ data: { responses: [] } });
+      });
 
       ctx.ds.query({
         range: {
@@ -82,19 +95,17 @@ describe('ElasticDatasource', function() {
         ],
       });
 
-      ctx.$rootScope.$apply();
-
       parts = requestOptions.data.split('\n');
       header = angular.fromJson(parts[0]);
     });
 
     it('should translate index pattern to current day', function() {
-      expect(header.index).to.eql(['asd-2015.05.30', 'asd-2015.05.31', 'asd-2015.06.01']);
+      expect(header.index).toEqual(['asd-2015.05.30', 'asd-2015.05.31', 'asd-2015.06.01']);
     });
 
     it('should json escape lucene query', function() {
       var body = angular.fromJson(parts[1]);
-      expect(body.query.bool.filter[1].query_string.query).to.be('escape\\:test');
+      expect(body.query.bool.filter[1].query_string.query).toBe('escape\\:test');
     });
   });
 
@@ -108,10 +119,10 @@ describe('ElasticDatasource', function() {
         jsonData: { esVersion: '2' },
       });
 
-      ctx.backendSrv.datasourceRequest = function(options) {
+      ctx.backendSrv.datasourceRequest = jest.fn(options => {
         requestOptions = options;
-        return ctx.$q.when({ data: { responses: [] } });
-      };
+        return Promise.resolve({ data: { responses: [] } });
+      });
 
       ctx.ds.query({
         range: {
@@ -127,27 +138,26 @@ describe('ElasticDatasource', function() {
         ],
       });
 
-      ctx.$rootScope.$apply();
       parts = requestOptions.data.split('\n');
       header = angular.fromJson(parts[0]);
     });
 
     it('should set search type to query_then_fetch', function() {
-      expect(header.search_type).to.eql('query_then_fetch');
+      expect(header.search_type).toEqual('query_then_fetch');
     });
 
     it('should set size', function() {
       var body = angular.fromJson(parts[1]);
-      expect(body.size).to.be(500);
+      expect(body.size).toBe(500);
     });
   });
 
   describe('When getting fields', function() {
-    beforeEach(function() {
+    beforeEach(() => {
       createDatasource({ url: 'http://es.com', index: 'metricbeat' });
 
-      ctx.backendSrv.datasourceRequest = function(options) {
-        return ctx.$q.when({
+      ctx.backendSrv.datasourceRequest = jest.fn(options => {
+        return Promise.resolve({
           data: {
             metricbeat: {
               mappings: {
@@ -190,7 +200,7 @@ describe('ElasticDatasource', function() {
             },
           },
         });
-      };
+      });
     });
 
     it('should return nested fields', function() {
@@ -201,7 +211,7 @@ describe('ElasticDatasource', function() {
         })
         .then(fieldObjects => {
           var fields = _.map(fieldObjects, 'text');
-          expect(fields).to.eql([
+          expect(fields).toEqual([
             '@timestamp',
             'beat.name.raw',
             'beat.name',
@@ -212,7 +222,6 @@ describe('ElasticDatasource', function() {
             'system.process.name',
           ]);
         });
-      ctx.$rootScope.$apply();
     });
 
     it('should return fields related to query type', function() {
@@ -224,7 +233,7 @@ describe('ElasticDatasource', function() {
         })
         .then(fieldObjects => {
           var fields = _.map(fieldObjects, 'text');
-          expect(fields).to.eql(['system.cpu.system', 'system.cpu.user', 'system.process.cpu.total']);
+          expect(fields).toEqual(['system.cpu.system', 'system.cpu.user', 'system.process.cpu.total']);
         });
 
       ctx.ds
@@ -235,10 +244,8 @@ describe('ElasticDatasource', function() {
         })
         .then(fieldObjects => {
           var fields = _.map(fieldObjects, 'text');
-          expect(fields).to.eql(['@timestamp']);
+          expect(fields).toEqual(['@timestamp']);
         });
-
-      ctx.$rootScope.$apply();
     });
   });
 
@@ -252,10 +259,10 @@ describe('ElasticDatasource', function() {
         jsonData: { esVersion: '5' },
       });
 
-      ctx.backendSrv.datasourceRequest = function(options) {
+      ctx.backendSrv.datasourceRequest = jest.fn(options => {
         requestOptions = options;
-        return ctx.$q.when({ data: { responses: [] } });
-      };
+        return Promise.resolve({ data: { responses: [] } });
+      });
 
       ctx.ds.query({
         range: {
@@ -271,34 +278,33 @@ describe('ElasticDatasource', function() {
         ],
       });
 
-      ctx.$rootScope.$apply();
       parts = requestOptions.data.split('\n');
       header = angular.fromJson(parts[0]);
     });
 
     it('should not set search type to count', function() {
-      expect(header.search_type).to.not.eql('count');
+      expect(header.search_type).not.toEqual('count');
     });
 
     it('should set size to 0', function() {
       var body = angular.fromJson(parts[1]);
-      expect(body.size).to.be(0);
+      expect(body.size).toBe(0);
     });
   });
 
   describe('When issuing metricFind query on es5.x', function() {
     var requestOptions, parts, header, body, results;
 
-    beforeEach(function() {
+    beforeEach(() => {
       createDatasource({
         url: 'http://es.com',
         index: 'test',
         jsonData: { esVersion: '5' },
       });
 
-      ctx.backendSrv.datasourceRequest = function(options) {
+      ctx.backendSrv.datasourceRequest = jest.fn(options => {
         requestOptions = options;
-        return ctx.$q.when({
+        return Promise.resolve({
           data: {
             responses: [
               {
@@ -318,38 +324,36 @@ describe('ElasticDatasource', function() {
             ],
           },
         });
-      };
+      });
 
       ctx.ds.metricFindQuery('{"find": "terms", "field": "test"}').then(res => {
         results = res;
       });
 
-      ctx.$rootScope.$apply();
-
       parts = requestOptions.data.split('\n');
       header = angular.fromJson(parts[0]);
       body = angular.fromJson(parts[1]);
     });
 
-    it('should get results', function() {
-      expect(results.length).to.eql(2);
+    it('should get results', () => {
+      expect(results.length).toEqual(2);
     });
 
-    it('should use key or key_as_string', function() {
-      expect(results[0].text).to.eql('test');
-      expect(results[1].text).to.eql('test2_as_string');
+    it('should use key or key_as_string', () => {
+      expect(results[0].text).toEqual('test');
+      expect(results[1].text).toEqual('test2_as_string');
     });
 
-    it('should not set search type to count', function() {
-      expect(header.search_type).to.not.eql('count');
+    it('should not set search type to count', () => {
+      expect(header.search_type).not.toEqual('count');
     });
 
-    it('should set size to 0', function() {
-      expect(body.size).to.be(0);
+    it('should set size to 0', () => {
+      expect(body.size).toBe(0);
     });
 
-    it('should not set terms aggregation size to 0', function() {
-      expect(body['aggs']['1']['terms'].size).to.not.be(0);
+    it('should not set terms aggregation size to 0', () => {
+      expect(body['aggs']['1']['terms'].size).not.toBe(0);
     });
   });
 });

+ 3 - 3
public/app/plugins/datasource/mssql/partials/annotations.editor.html

@@ -28,12 +28,12 @@ An annotation is an event that is overlaid on top of graphs. The query can have
 Macros:
 - $__time(column) -&gt; column AS time
 - $__timeEpoch(column) -&gt; DATEDIFF(second, '1970-01-01', column) AS time
-- $__timeFilter(column) -&gt; column &gt;= DATEADD(s, 18446744066914186738, '1970-01-01') AND column &lt;= DATEADD(s, 18446744066914187038, '1970-01-01')
+- $__timeFilter(column) -&gt; column BETWEEN '2017-04-21T05:01:17Z' AND '2017-04-21T05:01:17Z'
 - $__unixEpochFilter(column) -&gt; column &gt;= 1492750877 AND column &lt;= 1492750877
 
 Or build your own conditionals using these macros which just return the values:
-- $__timeFrom() -&gt; DATEADD(second, 1492750877, '1970-01-01')
-- $__timeTo() -&gt; DATEADD(second, 1492750877, '1970-01-01')
+- $__timeFrom() -&gt;  '2017-04-21T05:01:17Z'
+- $__timeTo() -&gt;  '2017-04-21T05:01:17Z'
 - $__unixEpochFrom() -&gt; 1492750877
 - $__unixEpochTo() -&gt; 1492750877
 		</pre>

+ 3 - 3
public/app/plugins/datasource/mssql/partials/query.editor.html

@@ -49,7 +49,7 @@ Table:
 Macros:
 - $__time(column) -&gt; column AS time
 - $__timeEpoch(column) -&gt; DATEDIFF(second, '1970-01-01', column) AS time
-- $__timeFilter(column) -&gt; column &gt;= DATEADD(s, 18446744066914186738, '1970-01-01') AND column &lt;= DATEADD(s, 18446744066914187038, '1970-01-01')
+- $__timeFilter(column) -&gt; column BETWEEN '2017-04-21T05:01:17Z' AND '2017-04-21T05:01:17Z'
 - $__unixEpochFilter(column) -&gt; column &gt;= 1492750877 AND column &lt;= 1492750877
 - $__timeGroup(column, '5m'[, fillvalue]) -&gt; CAST(ROUND(DATEDIFF(second, '1970-01-01', column)/300.0, 0) as bigint)*300. Providing a <i>fillValue</i> of <i>NULL</i> or floating value will automatically fill empty series in timerange with that value.
 
@@ -62,8 +62,8 @@ GROUP BY $__timeGroup(date_time_col, '1h')
 ORDER BY 1
 
 Or build your own conditionals using these macros which just return the values:
-- $__timeFrom() -&gt; DATEADD(second, 1492750877, '1970-01-01')
-- $__timeTo() -&gt; DATEADD(second, 1492750877, '1970-01-01')
+- $__timeFrom() -&gt;  '2017-04-21T05:01:17Z'
+- $__timeTo() -&gt;  '2017-04-21T05:01:17Z'
 - $__unixEpochFrom() -&gt; 1492750877
 - $__unixEpochTo() -&gt; 1492750877
 		</pre>

+ 3 - 3
public/app/plugins/datasource/mysql/partials/annotations.editor.html

@@ -28,12 +28,12 @@ An annotation is an event that is overlaid on top of graphs. The query can have
 Macros:
 - $__time(column) -&gt; UNIX_TIMESTAMP(column) as time (or as time_sec)
 - $__timeEpoch(column) -&gt; UNIX_TIMESTAMP(column) as time (or as time_sec)
-- $__timeFilter(column) -&gt;  UNIX_TIMESTAMP(time_date_time) &gt; 1492750877 AND UNIX_TIMESTAMP(time_date_time) &lt; 1492750877
+- $__timeFilter(column) -&gt; column BETWEEN '2017-04-21T05:01:17Z' AND '2017-04-21T05:01:17Z'
 - $__unixEpochFilter(column) -&gt;  time_unix_epoch &gt; 1492750877 AND time_unix_epoch &lt; 1492750877
 
 Or build your own conditionals using these macros which just return the values:
-- $__timeFrom() -&gt;  FROM_UNIXTIME(1492750877)
-- $__timeTo() -&gt;  FROM_UNIXTIME(1492750877)
+- $__timeFrom() -&gt;  '2017-04-21T05:01:17Z'
+- $__timeTo() -&gt;  '2017-04-21T05:01:17Z'
 - $__unixEpochFrom() -&gt;  1492750877
 - $__unixEpochTo() -&gt;  1492750877
 		</pre>

+ 3 - 3
public/app/plugins/datasource/mysql/partials/query.editor.html

@@ -48,7 +48,7 @@ Table:
 Macros:
 - $__time(column) -&gt; UNIX_TIMESTAMP(column) as time_sec
 - $__timeEpoch(column) -&gt; UNIX_TIMESTAMP(column) as time_sec
-- $__timeFilter(column) -&gt;  UNIX_TIMESTAMP(time_date_time) &ge; 1492750877 AND UNIX_TIMESTAMP(time_date_time) &le; 1492750877
+- $__timeFilter(column) -&gt; column BETWEEN '2017-04-21T05:01:17Z' AND '2017-04-21T05:01:17Z'
 - $__unixEpochFilter(column) -&gt;  time_unix_epoch &gt; 1492750877 AND time_unix_epoch &lt; 1492750877
 - $__timeGroup(column,'5m') -&gt; cast(cast(UNIX_TIMESTAMP(column)/(300) as signed)*300 as signed)
 
@@ -61,8 +61,8 @@ GROUP BY 1
 ORDER BY 1
 
 Or build your own conditionals using these macros which just return the values:
-- $__timeFrom() -&gt;  FROM_UNIXTIME(1492750877)
-- $__timeTo() -&gt;  FROM_UNIXTIME(1492750877)
+- $__timeFrom() -&gt;  '2017-04-21T05:01:17Z'
+- $__timeTo() -&gt;  '2017-04-21T05:01:17Z'
 - $__unixEpochFrom() -&gt;  1492750877
 - $__unixEpochTo() -&gt;  1492750877
 		</pre>

+ 2 - 2
public/app/plugins/datasource/prometheus/datasource.ts

@@ -162,8 +162,8 @@ export class PrometheusDatasource {
           format: activeTargets[index].format,
           step: queries[index].step,
           legendFormat: activeTargets[index].legendFormat,
-          start: start,
-          end: end,
+          start: queries[index].start,
+          end: queries[index].end,
           query: queries[index].expr,
           responseListLength: responseList.length,
           responseIndex: index,

Некоторые файлы не были показаны из-за большого количества измененных файлов