Kaynağa Gözat

Merge remote-tracking branch 'upstream/master' into dashboard_permissions

Daniel Lee 8 yıl önce
ebeveyn
işleme
a5830c6dbc
100 değiştirilmiş dosya ile 8170 ekleme ve 501 silme
  1. 1 0
      .gitignore
  2. 2 0
      CHANGELOG.md
  3. 3 0
      Makefile
  4. 6 6
      docs/sources/administration/cli.md
  5. 2 2
      docs/sources/administration/metrics.md
  6. 21 21
      docs/sources/administration/provisioning.md
  7. 1 1
      docs/sources/alerting/metrics.md
  8. 27 27
      docs/sources/alerting/notifications.md
  9. 36 0
      docs/sources/installation/behind_proxy.md
  10. 89 0
      docs/sources/tutorials/iis.md
  11. 4 1
      package.json
  12. 1 1
      pkg/api/login_oauth.go
  13. 8 3
      pkg/cmd/grafana-server/server.go
  14. 2 2
      pkg/log/log.go
  15. 2 1
      pkg/plugins/dashboard_importer_test.go
  16. 2 1
      pkg/plugins/dashboards_test.go
  17. 150 0
      pkg/plugins/datasource/tsdb/datasource_plugin_wrapper.go
  18. 108 0
      pkg/plugins/datasource/tsdb/datasource_plugin_wrapper_test.go
  19. 22 0
      pkg/plugins/datasource/tsdb/grpc.go
  20. 27 0
      pkg/plugins/datasource/tsdb/interface.go
  21. 103 0
      pkg/plugins/datasource_plugin.go
  22. 35 0
      pkg/plugins/datasource_plugin_test.go
  23. 49 0
      pkg/plugins/hclog-wrapper.go
  24. 46 10
      pkg/plugins/plugins.go
  25. 3 2
      pkg/plugins/plugins_test.go
  26. 89 24
      pkg/services/alerting/notifiers/telegram.go
  27. 6 3
      pkg/services/provisioning/dashboards/file_reader.go
  28. 1 0
      pkg/services/provisioning/dashboards/types.go
  29. 0 2
      pkg/tsdb/models.go
  30. 636 0
      pkg/tsdb/models/tsdb_plugin.pb.go
  31. 98 0
      pkg/tsdb/models/tsdb_plugin.proto
  32. 5 5
      pkg/tsdb/time_range.go
  33. 4 4
      pkg/tsdb/time_range_test.go
  34. 1 1
      public/app/containers/AlertRuleList/AlertRuleList.jest.tsx
  35. 59 30
      public/app/containers/AlertRuleList/AlertRuleList.tsx
  36. 77 46
      public/app/containers/AlertRuleList/__snapshots__/AlertRuleList.jest.tsx.snap
  37. 13 11
      public/app/core/angular_wrappers.ts
  38. 10 11
      public/app/core/components/EmptyListCTA/EmptyListCTA.jest.tsx
  39. 1 1
      public/app/core/components/EmptyListCTA/__snapshots__/EmptyListCTA.jest.tsx.snap
  40. 16 0
      public/app/core/components/Tooltip/Popover.jest.tsx
  41. 34 0
      public/app/core/components/Tooltip/Popover.tsx
  42. 16 0
      public/app/core/components/Tooltip/Tooltip.jest.tsx
  43. 45 0
      public/app/core/components/Tooltip/Tooltip.tsx
  44. 16 0
      public/app/core/components/Tooltip/__snapshots__/Popover.jest.tsx.snap
  45. 19 0
      public/app/core/components/Tooltip/__snapshots__/Tooltip.jest.tsx.snap
  46. 57 0
      public/app/core/components/Tooltip/withTooltip.tsx
  47. 20 0
      public/app/core/components/UserPicker/UserPicker.jest.tsx
  48. 108 0
      public/app/core/components/UserPicker/UserPicker.tsx
  49. 22 0
      public/app/core/components/UserPicker/UserPickerOption.jest.tsx
  50. 54 0
      public/app/core/components/UserPicker/UserPickerOption.tsx
  51. 98 0
      public/app/core/components/UserPicker/__snapshots__/UserPicker.jest.tsx.snap
  52. 17 0
      public/app/core/components/UserPicker/__snapshots__/UserPickerOption.jest.tsx.snap
  53. 14 0
      public/app/core/components/scroll/scroll.ts
  54. 0 4
      public/app/features/alerting/alert_def.ts
  55. 27 30
      public/app/features/alerting/partials/alert_tab.html
  56. 42 24
      public/app/features/dashboard/dashboard_model.ts
  57. 13 3
      public/app/features/dashboard/dashgrid/AddPanelPanel.tsx
  58. 2 2
      public/app/features/dashboard/dashnav/dashnav.ts
  59. 1 0
      public/app/features/dashboard/panel_model.ts
  60. 1 0
      public/app/features/dashboard/share_snapshot_ctrl.ts
  61. 17 0
      public/app/features/dashboard/specs/dashboard_model.jest.ts
  62. 129 0
      public/app/features/dashboard/specs/repeat.jest.ts
  63. 2 3
      public/app/features/org/partials/team_details.html
  64. 2 1
      public/app/features/org/team_details_ctrl.ts
  65. 1 1
      public/app/plugins/datasource/cloudwatch/partials/config.html
  66. 37 44
      public/app/plugins/panel/alertlist/module.html
  67. 6 1
      public/app/plugins/panel/graph/graph.ts
  68. 0 3
      public/app/plugins/panel/text/module.ts
  69. 65 0
      public/app/stores/AlertListStore/AlertListStore.jest.ts
  70. 19 6
      public/app/stores/AlertListStore/AlertListStore.ts
  71. 2 2
      public/app/stores/AlertListStore/AlertRule.ts
  72. 1 1
      public/app/stores/AlertListStore/helpers.ts
  73. 94 91
      public/sass/_grafana.scss
  74. 9 4
      public/sass/base/_type.scss
  75. 73 0
      public/sass/components/_form_select_box.scss
  76. 1 0
      public/sass/components/_gf-form.scss
  77. 9 1
      public/sass/components/_panel_add_panel.scss
  78. 79 0
      public/sass/components/_popper.scss
  79. 1 1
      public/sass/components/_submenu.scss
  80. 12 0
      public/sass/components/_user-picker.scss
  81. 91 63
      public/sass/pages/_alerting.scss
  82. 37 0
      vendor/github.com/golang/protobuf/protoc-gen-go/descriptor/Makefile
  83. 2215 0
      vendor/github.com/golang/protobuf/protoc-gen-go/descriptor/descriptor.pb.go
  84. 849 0
      vendor/github.com/golang/protobuf/protoc-gen-go/descriptor/descriptor.proto
  85. 139 0
      vendor/github.com/golang/protobuf/ptypes/any.go
  86. 178 0
      vendor/github.com/golang/protobuf/ptypes/any/any.pb.go
  87. 149 0
      vendor/github.com/golang/protobuf/ptypes/any/any.proto
  88. 35 0
      vendor/github.com/golang/protobuf/ptypes/doc.go
  89. 102 0
      vendor/github.com/golang/protobuf/ptypes/duration.go
  90. 144 0
      vendor/github.com/golang/protobuf/ptypes/duration/duration.pb.go
  91. 117 0
      vendor/github.com/golang/protobuf/ptypes/duration/duration.proto
  92. 43 0
      vendor/github.com/golang/protobuf/ptypes/regen.sh
  93. 134 0
      vendor/github.com/golang/protobuf/ptypes/timestamp.go
  94. 160 0
      vendor/github.com/golang/protobuf/ptypes/timestamp/timestamp.pb.go
  95. 133 0
      vendor/github.com/golang/protobuf/ptypes/timestamp/timestamp.proto
  96. 21 0
      vendor/github.com/hashicorp/go-hclog/LICENSE
  97. 123 0
      vendor/github.com/hashicorp/go-hclog/README.md
  98. 34 0
      vendor/github.com/hashicorp/go-hclog/global.go
  99. 397 0
      vendor/github.com/hashicorp/go-hclog/int.go
  100. 138 0
      vendor/github.com/hashicorp/go-hclog/log.go

+ 1 - 0
.gitignore

@@ -60,3 +60,4 @@ debug.test
 /vendor/**/*_test.go
 /vendor/**/*_test.go
 /vendor/**/.editorconfig
 /vendor/**/.editorconfig
 /vendor/**/appengine*
 /vendor/**/appengine*
+*.orig

+ 2 - 0
CHANGELOG.md

@@ -38,6 +38,7 @@ The pagerduty notifier now defaults to not auto resolve incidents. More details
 * **Dashboard as cfg**: Load dashboards from file into Grafana on startup/change [#9654](https://github.com/grafana/grafana/issues/9654) [#5269](https://github.com/grafana/grafana/issues/5269)
 * **Dashboard as cfg**: Load dashboards from file into Grafana on startup/change [#9654](https://github.com/grafana/grafana/issues/9654) [#5269](https://github.com/grafana/grafana/issues/5269)
 * **Prometheus**: Grafana can now send alerts to Prometheus Alertmanager while firing [#7481](https://github.com/grafana/grafana/issues/7481), thx [@Thib17](https://github.com/Thib17) and [@mtanda](https://github.com/mtanda)
 * **Prometheus**: Grafana can now send alerts to Prometheus Alertmanager while firing [#7481](https://github.com/grafana/grafana/issues/7481), thx [@Thib17](https://github.com/Thib17) and [@mtanda](https://github.com/mtanda)
 * **Table**: Support multiple table formated queries in table panel [#9170](https://github.com/grafana/grafana/issues/9170), thx [@davkal](https://github.com/davkal)
 * **Table**: Support multiple table formated queries in table panel [#9170](https://github.com/grafana/grafana/issues/9170), thx [@davkal](https://github.com/davkal)
+
 ## Minor
 ## Minor
 * **Alert panel**: Adds placeholder text when no alerts are within the time range [#9624](https://github.com/grafana/grafana/issues/9624), thx [@straend](https://github.com/straend)
 * **Alert panel**: Adds placeholder text when no alerts are within the time range [#9624](https://github.com/grafana/grafana/issues/9624), thx [@straend](https://github.com/straend)
 * **Mysql**: MySQL enable MaxOpenCon and MaxIdleCon regards how constring is configured.  [#9784](https://github.com/grafana/grafana/issues/9784), thx [@dfredell](https://github.com/dfredell)
 * **Mysql**: MySQL enable MaxOpenCon and MaxIdleCon regards how constring is configured.  [#9784](https://github.com/grafana/grafana/issues/9784), thx [@dfredell](https://github.com/dfredell)
@@ -49,6 +50,7 @@ The pagerduty notifier now defaults to not auto resolve incidents. More details
 [@adiletmaratov](https://github.com/adiletmaratov)
 [@adiletmaratov](https://github.com/adiletmaratov)
 * **Backend**: Fixed bug where Grafana exited before all sub routines where finished [#10131](https://github.com/grafana/grafana/issues/10131)
 * **Backend**: Fixed bug where Grafana exited before all sub routines where finished [#10131](https://github.com/grafana/grafana/issues/10131)
 * **Azure**: Adds support for Azure blob storage as external image stor [#8955](https://github.com/grafana/grafana/issues/8955), thx [@saada](https://github.com/saada)
 * **Azure**: Adds support for Azure blob storage as external image stor [#8955](https://github.com/grafana/grafana/issues/8955), thx [@saada](https://github.com/saada)
+* **Telegram**: Add support for inline image uploads to telegram notifier plugin [#9967](https://github.com/grafana/grafana/pull/9967), thx [@rburchell](https://github.com/rburchell)
 
 
 ## Tech
 ## Tech
 * **RabbitMq**: Remove support for publishing events to RabbitMQ [#9645](https://github.com/grafana/grafana/issues/9645)
 * **RabbitMq**: Remove support for publishing events to RabbitMQ [#9645](https://github.com/grafana/grafana/issues/9645)

+ 3 - 0
Makefile

@@ -26,3 +26,6 @@ test: test-go test-js
 
 
 run:
 run:
 	./bin/grafana-server
 	./bin/grafana-server
+
+protoc:
+	protoc -I pkg/tsdb/models pkg/tsdb/models/*.proto --go_out=plugins=grpc:pkg/tsdb/models/.

+ 6 - 6
docs/sources/administration/cli.md

@@ -10,17 +10,17 @@ weight = 8
 
 
 # Grafana CLI
 # Grafana CLI
 
 
-Grafana cli is a small executable that is bundled with grafana server and is suppose to be executed on the same machine as grafana runs.
+Grafana cli is a small executable that is bundled with Grafana-server and is supposed to be executed on the same machine Grafana-server is running on.
 
 
 ## Plugins
 ## Plugins
 
 
-The CLI helps you install, upgrade and manage your plugins on the same machine it CLI is running.
-You can find more information about how to install and manage your plugins at the
-[plugin page]({{< relref "plugins/installation.md" >}}).
+The CLI allows you to install, upgrade and manage your plugins on the machine it is running on.
+You can find more information about how to install and manage your plugins in the
+[plugins page]({{< relref "plugins/installation.md" >}}).
 
 
 ## Admin
 ## Admin
 
 
-> This feature is only available in grafana 4.1 and above.
+> This feature is only available in Grafana 4.1 and above.
 
 
 To show all admin commands:
 To show all admin commands:
 `grafana-cli admin`
 `grafana-cli admin`
@@ -39,7 +39,7 @@ then there are two flags that can be used to set homepath and the config file pa
 
 
 `grafana-cli admin reset-admin-password --homepath "/usr/share/grafana" newpass`
 `grafana-cli admin reset-admin-password --homepath "/usr/share/grafana" newpass`
 
 
-If you have not lost the admin password then it is better to set in the Grafana UI. If you need to set the password in a script then the [Grafana API](http://docs.grafana.org/http_api/user/#change-password) can be used. Here is an example with curl using basic auth:
+If you have not lost the admin password then it is better to set in the Grafana UI. If you need to set the password in a script then the [Grafana API](http://docs.grafana.org/http_api/user/#change-password) can be used. Here is an example using curl with basic auth:
 
 
 ```bash
 ```bash
 curl -X PUT -H "Content-Type: application/json" -d '{
 curl -X PUT -H "Content-Type: application/json" -d '{

+ 2 - 2
docs/sources/administration/metrics.md

@@ -10,6 +10,6 @@ weight = 8
 
 
 # Internal metrics
 # Internal metrics
 
 
-Grafana collects some metrics about it self internally. Currently Grafana supports pushing metrics to graphite and exposing them to be scraped by Prometheus.
+Grafana collects some metrics about itself internally. Currently, Grafana supports pushing metrics to Graphite or exposing them to be scraped by Prometheus.
 
 
-To enabled internal metrics you have to enable it under the [metrics] section in your [grafana.ini](http://docs.grafana.org/installation/configuration/#enabled-6) config file.If you want to push metrics to graphite you have also have to configure the [metrics.graphite](http://docs.grafana.org/installation/configuration/#metrics-graphite) section.
+To emit internal metrics you have to enable the option under the [metrics] section in your [grafana.ini](http://docs.grafana.org/installation/configuration/#enabled-6) config file. If you want to push metrics to Graphite, you must also configure the [metrics.graphite](http://docs.grafana.org/installation/configuration/#metrics-graphite) section.

+ 21 - 21
docs/sources/administration/provisioning.md

@@ -12,7 +12,7 @@ weight = 8
 
 
 ## Config file
 ## Config file
 
 
-Checkout the [configuration](/installation/configuration) page for more information about what you can configure in `grafana.ini`
+Checkout the [configuration](/installation/configuration) page for more information on what you can configure in `grafana.ini`
 
 
 ### Config file locations
 ### Config file locations
 
 
@@ -35,7 +35,7 @@ GF_<SectionName>_<KeyName>
 ```
 ```
 
 
 Where the section name is the text within the brackets. Everything
 Where the section name is the text within the brackets. Everything
-should be upper case, `.` should be replaced by `_`. For example, given these configuration settings:
+should be upper case and `.` should be replaced by `_`. For example, given these configuration settings:
 
 
 ```bash
 ```bash
 # default section
 # default section
@@ -48,7 +48,7 @@ admin_user = admin
 client_secret = 0ldS3cretKey
 client_secret = 0ldS3cretKey
 ```
 ```
 
 
-Then you can override them using:
+Overriding will be done like so:
 
 
 ```bash
 ```bash
 export GF_DEFAULT_INSTANCE_NAME=my-instance
 export GF_DEFAULT_INSTANCE_NAME=my-instance
@@ -60,7 +60,7 @@ export GF_AUTH_GOOGLE_CLIENT_SECRET=newS3cretKey
 
 
 ## Configuration management tools
 ## Configuration management tools
 
 
-Currently we do not provide any scripts/manifests for configuring Grafana. Rather then spending time learning and creating scripts/manifests for each tool, we think our time is better spent making Grafana easier to provision. Therefor, we heavily relay on the expertise of he community. 
+Currently we do not provide any scripts/manifests for configuring Grafana. Rather than spending time learning and creating scripts/manifests for each tool, we think our time is better spent making Grafana easier to provision. Therefore, we heavily relay on the expertise of the community.
 
 
 Tool | Project
 Tool | Project
 -----|------------
 -----|------------
@@ -70,14 +70,14 @@ Ansible | [https://github.com/picotrading/ansible-grafana](https://github.com/pi
 Chef | [https://github.com/JonathanTron/chef-grafana](https://github.com/JonathanTron/chef-grafana)
 Chef | [https://github.com/JonathanTron/chef-grafana](https://github.com/JonathanTron/chef-grafana)
 Saltstack | [https://github.com/salt-formulas/salt-formula-grafana](https://github.com/salt-formulas/salt-formula-grafana)
 Saltstack | [https://github.com/salt-formulas/salt-formula-grafana](https://github.com/salt-formulas/salt-formula-grafana)
 
 
-## Datasources 
+## Datasources
 
 
 > This feature is available from v5.0
 > This feature is available from v5.0
 
 
 It's possible to manage datasources in Grafana by adding one or more yaml config files in the [`provisioning/datasources`](/installation/configuration/#provisioning) directory. Each config file can contain a list of `datasources` that will be added or updated during start up. If the datasource already exists, Grafana will update it to match the configuration file. The config file can also contain a list of datasources that should be deleted. That list is called `delete_datasources`. Grafana will delete datasources listed in `delete_datasources` before inserting/updating those in the `datasource` list.
 It's possible to manage datasources in Grafana by adding one or more yaml config files in the [`provisioning/datasources`](/installation/configuration/#provisioning) directory. Each config file can contain a list of `datasources` that will be added or updated during start up. If the datasource already exists, Grafana will update it to match the configuration file. The config file can also contain a list of datasources that should be deleted. That list is called `delete_datasources`. Grafana will delete datasources listed in `delete_datasources` before inserting/updating those in the `datasource` list.
 
 
-### Running multiple grafana instances.
-If you are running multiple instances of Grafana you might run into problems if they have different versions of the datasource.yaml configuration file. The best way to solve this problem is to add a version number to each datasource in the configuration and increase it when you update the config. Grafana will only update datasources with the same or lower version number than specified in the config. That way old configs cannot overwrite newer configs if they restart at the same time. 
+### Running multiple Grafana instances.
+If you are running multiple instances of Grafana you might run into problems if they have different versions of the `datasource.yaml` configuration file. The best way to solve this problem is to add a version number to each datasource in the configuration and increase it when you update the config. Grafana will only update datasources with the same or lower version number than specified in the config. That way, old configs cannot overwrite newer configs if they restart at the same time.
 
 
 ### Example datasource config file
 ### Example datasource config file
 ```yaml
 ```yaml
@@ -86,7 +86,7 @@ delete_datasources:
   - name: Graphite
   - name: Graphite
     org_id: 1
     org_id: 1
 
 
-# list of datasources to insert/update depending 
+# list of datasources to insert/update depending
 # whats available in the datbase
 # whats available in the datbase
 datasources:
 datasources:
   # <string, required> name of the datasource. Required
   # <string, required> name of the datasource. Required
@@ -116,7 +116,7 @@ datasources:
   # <bool> mark as default datasource. Max one per org
   # <bool> mark as default datasource. Max one per org
   is_default:
   is_default:
   # <map> fields that will be converted to json and stored in json_data
   # <map> fields that will be converted to json and stored in json_data
-  json_data: 
+  json_data:
      graphiteVersion: "1.1"
      graphiteVersion: "1.1"
      tlsAuth: true
      tlsAuth: true
      tlsAuthWithCACert: true
      tlsAuthWithCACert: true
@@ -132,7 +132,7 @@ datasources:
 
 
 #### Json data
 #### Json data
 
 
-Since all datasources dont have the same configuration settings we only have the most common ones as fields. The rest should be stored as a json blob in the `json_data` field. Here are the most common settings that the core datasources use. 
+Since not all datasources have the same configuration settings we only have the most common ones as fields. The rest should be stored as a json blob in the `json_data` field. Here are the most common settings that the core datasources use.
 
 
 | Name | Type | Datasource |Description |
 | Name | Type | Datasource |Description |
 | ----| ---- | ---- | --- |
 | ----| ---- | ---- | --- |
@@ -141,37 +141,37 @@ Since all datasources dont have the same configuration settings we only have the
 | tlsSkipVerify | boolean | *All* | Controls whether a client verifies the server's certificate chain and host name. |
 | tlsSkipVerify | boolean | *All* | Controls whether a client verifies the server's certificate chain and host name. |
 | graphiteVersion | string | Graphite |  Graphite version  |
 | graphiteVersion | string | Graphite |  Graphite version  |
 | timeInterval | string | Elastic, Influxdb & Prometheus | Lowest interval/step value that should be used for this data source |
 | timeInterval | string | Elastic, Influxdb & Prometheus | Lowest interval/step value that should be used for this data source |
-| esVersion | string | Elastic | Elasticsearch version | 
-| timeField | string | Elastic | Which field that should be used as timestamp | 
+| esVersion | string | Elastic | Elasticsearch version |
+| timeField | string | Elastic | Which field that should be used as timestamp |
 | interval | string | Elastic | Index date time format |
 | interval | string | Elastic | Index date time format |
 | authType | string | Cloudwatch | Auth provider. keys/credentials/arn |
 | authType | string | Cloudwatch | Auth provider. keys/credentials/arn |
-| assumeRoleArn | string | Cloudwatch | ARN of Assume Role | 
+| assumeRoleArn | string | Cloudwatch | ARN of Assume Role |
 | defaultRegion | string | Cloudwatch | AWS region |
 | defaultRegion | string | Cloudwatch | AWS region |
-| customMetricsNamespaces | string | Cloudwatch | Namespaces of Custom Metrics | 
+| customMetricsNamespaces | string | Cloudwatch | Namespaces of Custom Metrics |
 | tsdbVersion | string | OpenTsdb | Version |
 | tsdbVersion | string | OpenTsdb | Version |
 | tsdbResolution | string | OpenTsdb | Resolution |
 | tsdbResolution | string | OpenTsdb | Resolution |
-| sslmode | string | Postgre | SSLmode. 'disable', 'require', 'verify-ca' or 'verify-full' | 
+| sslmode | string | Postgre | SSLmode. 'disable', 'require', 'verify-ca' or 'verify-full' |
 
 
 
 
 #### Secure Json data
 #### Secure Json data
 
 
 {"authType":"keys","defaultRegion":"us-west-2","timeField":"@timestamp"}
 {"authType":"keys","defaultRegion":"us-west-2","timeField":"@timestamp"}
 
 
-Secure json data is a map of settings that will be encrypted with [secret key](/installation/configuration/#secret-key) from the grafana config. The purpose of this is only to hide content from the users of the application. This should be used for storing TLS Cert and password that Grafana will append to request on the server side. All these settings are optional.
+Secure json data is a map of settings that will be encrypted with [secret key](/installation/configuration/#secret-key) from the Grafana config. The purpose of this is only to hide content from the users of the application. This should be used for storing TLS Cert and password that Grafana will append to the request on the server side. All of these settings are optional.
 
 
 | Name | Type | Datasource | Description |
 | Name | Type | Datasource | Description |
 | ----| ---- | ---- | --- |
 | ----| ---- | ---- | --- |
 | tlsCACert | string | *All* |CA cert for out going requests |
 | tlsCACert | string | *All* |CA cert for out going requests |
 | tlsClientCert | string | *All* |TLS Client cert for outgoing requests |
 | tlsClientCert | string | *All* |TLS Client cert for outgoing requests |
 | tlsClientKey | string | *All* |TLS Client key for outgoing requests |
 | tlsClientKey | string | *All* |TLS Client key for outgoing requests |
-| password | string | Postgre | password | 
-| user | string | Postgre | user | 
+| password | string | Postgre | password |
+| user | string | Postgre | user |
 
 
 ### Dashboards
 ### Dashboards
 
 
-It's possible to manage dashboards in Grafana by adding one or more yaml config files in the [`provisioning/dashboards`](/installation/configuration/#provisioning) directory. Each config file can contain a list of `dashboards providers` that will load dashboards into grafana. Currently we only support reading dashboards from file but we will add more providers in the future. 
+It's possible to manage dashboards in Grafana by adding one or more yaml config files in the [`provisioning/dashboards`](/installation/configuration/#provisioning) directory. Each config file can contain a list of `dashboards providers` that will load dashboards into Grafana. Currently we only support reading dashboards from file but we will add more providers in the future.
 
 
-The dashboard provider config file looks like this
+The dashboard provider config file looks somewhat like this:
 
 
 ```yaml
 ```yaml
 - name: 'default'
 - name: 'default'
@@ -182,4 +182,4 @@ The dashboard provider config file looks like this
     folder: /var/lib/grafana/dashboards
     folder: /var/lib/grafana/dashboards
 ```
 ```
 
 
-When grafana starts it will update/insert all dashboards available in the configured folders. If you modify the file the dashboard will also be updated. 
+When Grafana starts, it will update/insert all dashboards available in the configured folders. If you modify the file, the dashboard will also be updated.

+ 1 - 1
docs/sources/alerting/metrics.md

@@ -13,7 +13,7 @@ weight = 2
 
 
 > Alerting is only available in Grafana v4.0 and above.
 > Alerting is only available in Grafana v4.0 and above.
 
 
-The alert engine publishes some internal metrics about itself. You can read more about how Grafana published [internal metrics](/installation/configuration/#metrics).
+The alert engine publishes some internal metrics about itself. You can read more about how Grafana publishes [internal metrics](/installation/configuration/#metrics).
 
 
 Description | Type | Metric name
 Description | Type | Metric name
 ---------- | ----------- | ----------
 ---------- | ----------- | ----------

+ 27 - 27
docs/sources/alerting/notifications.md

@@ -14,9 +14,9 @@ weight = 2
 
 
 > Alerting is only available in Grafana v4.0 and above.
 > Alerting is only available in Grafana v4.0 and above.
 
 
-When an alert changes state it sends out notifications. Each alert rule can have
-multiple notifications. But in order to add a notification to an alert rule you first need
-to add and configure a `notification` channel (can be email, Pagerduty or other integration). This is done from the Notification Channels page.
+When an alert changes state, it sends out notifications. Each alert rule can have
+multiple notifications. In order to add a notification to an alert rule you first need
+to add and configure a `notification` channel (can be email, PagerDuty or other integration). This is done from the Notification Channels page.
 
 
 ## Notification Channel Setup
 ## Notification Channel Setup
 
 
@@ -25,12 +25,12 @@ to add and configure a `notification` channel (can be email, Pagerduty or other
 On the Notification Channels page hit the `New Channel` button to go the page where you
 On the Notification Channels page hit the `New Channel` button to go the page where you
 can configure and setup a new Notification Channel.
 can configure and setup a new Notification Channel.
 
 
-You specify name and type, and type specific options. You can also test the notification to make
-sure it's working and setup correctly.
+You specify a name and a type, and type specific options. You can also test the notification to make
+sure it's setup correctly.
 
 
 ### Send on all alerts
 ### Send on all alerts
 
 
-When checked this option will make this notification used for all alert rules, existing and new.
+When checked, this option will nofity for all alert rules - existing and new.
 
 
 ## Supported Notification Types
 ## Supported Notification Types
 
 
@@ -38,39 +38,39 @@ Grafana ships with the following set of notification types:
 
 
 ### Email
 ### Email
 
 
-To enable email notification you have to setup [SMTP settings](/installation/configuration/#smtp)
-in the Grafana config. Email notification will upload an image of the alert graph to an
-external image destination if available or fallback to attaching the image in the email.
+To enable email notifications you have to setup [SMTP settings](/installation/configuration/#smtp)
+in the Grafana config. Email notifications will upload an image of the alert graph to an
+external image destination if available or fallback to attaching the image to the email.
 
 
 ### Slack
 ### Slack
 
 
 {{< imgbox max-width="40%" img="/img/docs/v4/slack_notification.png" caption="Alerting Slack Notification" >}}
 {{< imgbox max-width="40%" img="/img/docs/v4/slack_notification.png" caption="Alerting Slack Notification" >}}
 
 
-To set up slack you need to configure an incoming webhook url at slack. You can follow their guide for how
-to do that https://api.slack.com/incoming-webhooks If you want to include screenshots of the firing alerts
-in the slack messages you have to configure either the [external image destination](#external-image-store) in Grafana,
+To set up slack you need to configure an incoming webhook url at slack. You can follow their guide on how
+to do that [here](https://api.slack.com/incoming-webhooks). If you want to include screenshots of the firing alerts
+in the Slack messages you have to configure either the [external image destination](#external-image-store) in Grafana,
 or a bot integration via Slack Apps. Follow Slack's guide to set up a bot integration and use the token provided
 or a bot integration via Slack Apps. Follow Slack's guide to set up a bot integration and use the token provided
-https://api.slack.com/bot-users, which starts with "xoxb".
+(https://api.slack.com/bot-users), which starts with "xoxb".
 
 
 Setting | Description
 Setting | Description
 ---------- | -----------
 ---------- | -----------
-Recipient | allows you to override the slack recipient.
-Mention | make it possible to include a mention in the slack notification sent by Grafana. Ex @here or @channel
+Recipient | allows you to override the Slack recipient.
+Mention | make it possible to include a mention in the Slack notification sent by Grafana. Ex @here or @channel
 Token | If provided, Grafana will upload the generated image via Slack's file.upload API method, not the external image destination.
 Token | If provided, Grafana will upload the generated image via Slack's file.upload API method, not the external image destination.
 
 
 ### PagerDuty
 ### PagerDuty
 
 
-To set up PagerDuty, all you have to do is to provide an api key.
+To set up PagerDuty, all you have to do is to provide an API key.
 
 
 Setting | Description
 Setting | Description
 ---------- | -----------
 ---------- | -----------
-Integration Key | Integration key for pagerduty.
-Auto resolve incidents | Resolve incidents in pagerduty once the alert goes back to ok
+Integration Key | Integration key for PagerDuty.
+Auto resolve incidents | Resolve incidents in PagerDuty once the alert goes back to ok
 
 
 ### Webhook
 ### Webhook
 
 
-The webhook notification is a simple way to send information about an state change over HTTP to a custom endpoint.
-Using this notification you could integrate Grafana into any system you choose, by yourself.
+The webhook notification is a simple way to send information about a state change over HTTP to a custom endpoint.
+Using this notification you could integrate Grafana into a system of your choosing.
 
 
 Example json body:
 Example json body:
 
 
@@ -117,19 +117,19 @@ Dingtalk supports the following "message type": `text`, `link` and `markdown`. O
 
 
 ### Kafka
 ### Kafka
 
 
-Notifications can be sent to a Kafka topic from Grafana using [Kafka REST Proxy](https://docs.confluent.io/1.0/kafka-rest/docs/index.html).
-There are couple of configurations options which need to be set in Grafana UI under Kafka Settings:
+Notifications can be sent to a Kafka topic from Grafana using the [Kafka REST Proxy](https://docs.confluent.io/1.0/kafka-rest/docs/index.html).
+There are a couple of configuration options which need to be set up in Grafana UI under Kafka Settings:
 
 
 1. Kafka REST Proxy endpoint.
 1. Kafka REST Proxy endpoint.
 
 
 2. Kafka Topic.
 2. Kafka Topic.
 
 
-Once these two properties are set, you can send the alerts to Kafka for further processing or throttling them.
+Once these two properties are set, you can send the alerts to Kafka for further processing or throttling.
 
 
 ### All supported notifier
 ### All supported notifier
 
 
-Name | Type |Support images 
------|------------ | ------  
+Name | Type |Support images
+-----|------------ | ------
 Slack | `slack` | yes
 Slack | `slack` | yes
 Pagerduty | `pagerduty` | yes
 Pagerduty | `pagerduty` | yes
 Email | `email` | yes
 Email | `email` | yes
@@ -154,9 +154,9 @@ Amazon S3, Webdav, and Azure Blob Storage for this. So to set that up you need t
 
 
 Currently only the Email Channels attaches images if no external image store is specified. To include images in alert notifications for other channels then you need to set up an external image store.
 Currently only the Email Channels attaches images if no external image store is specified. To include images in alert notifications for other channels then you need to set up an external image store.
 
 
-This is an optional requirement, you can get Slack and email notifications without setting this up.
+This is an optional requirement. You can get Slack and email notifications without setting this up.
 
 
 # Configure the link back to Grafana from alert notifications
 # Configure the link back to Grafana from alert notifications
 
 
-All alert notifications contains a link back to the triggered alert in the Grafana instance.
+All alert notifications contain a link back to the triggered alert in the Grafana instance.
 This url is based on the [domain](/installation/configuration/#domain) setting in Grafana.
 This url is based on the [domain](/installation/configuration/#domain) setting in Grafana.

+ 36 - 0
docs/sources/installation/behind_proxy.md

@@ -68,5 +68,41 @@ server {
   }
   }
 }
 }
 ```
 ```
+### IIS URL Rewrite Rule (Windows) with Subpath
 
 
+IIS requires that the URL Rewrite module is installed.
 
 
+Given:
+
+- subpath `grafana`
+- Grafana installed on `http://localhost:3000`
+- server config:
+
+    ```bash
+    [server]
+    domain = localhost:8080
+    root_url = %(protocol)s://%(domain)s:/grafana
+    ```
+
+Create an Inbound Rule for the parent website (localhost:8080 in this example) in IIS Manager with the following settings:
+
+- pattern: `grafana(/)?(.*)`
+- check the `Ignore case` checkbox
+- rewrite url set to `http://localhost:3000/{R:2}`
+- check the `Append query string` checkbox
+- check the `Stop processing of subsequent rules` checkbox
+
+This is the rewrite rule that is generated in the `web.config`:
+
+```xml
+  <rewrite>
+      <rules>
+          <rule name="Grafana" enabled="true" stopProcessing="true">
+              <match url="grafana(/)?(.*)" />
+              <action type="Rewrite" url="http://localhost:3000/{R:2}" logRewrittenUrl="false" />
+          </rule>
+      </rules>
+  </rewrite>
+```
+
+See the [tutorial on IIS Url Rewrites](http://docs.grafana.org/tutorials/iis/) for more in-depth instructions.

+ 89 - 0
docs/sources/tutorials/iis.md

@@ -0,0 +1,89 @@
++++
+title = "Grafana with IIS Reverse Proxy on Windows"
+type = "docs"
+keywords = ["grafana", "tutorials", "proxy", "IIS", "windows"]
+[menu.docs]
+parent = "tutorials"
+weight = 10
++++
+
+# How to Use IIS with URL Rewrite as a Reverse Proxy for Grafana on Windows
+
+If you want Grafana to be a subpath or subfolder under a website in IIS then the URL Rewrite module for ISS can be used to support this.
+
+Example:
+
+- Parent site: http://localhost:8080
+- Grafana: http://localhost:3000
+
+Grafana as a subpath: http://localhost:8080/grafana 
+
+## Setup
+
+If you have not already done it, then a requirement is to install URL Rewrite module for IIS.
+
+Download and install the URL Rewrite module for IIS: https://www.iis.net/downloads/microsoft/url-rewrite
+
+## Grafana Config
+
+The Grafana config can be set by creating a file named `custom.ini` in the `conf` subdirectory of your Grafana installation. See the [installation instructions](http://docs.grafana.org/installation/windows/#configure) for more details.
+
+Given that the subpath should be `grafana` and the parent site is `localhost:8080` then add this to the `custom.ini` config file:
+
+ ```bash
+[server]
+domain = localhost:8080
+root_url = %(protocol)s://%(domain)s:/grafana
+```
+
+Restart the Grafana server after changing the config file.
+
+## IIS Config
+
+1. Open the IIS Manager and click on the parent website
+2. In the admin console for this website, double click on the Url Rewrite option:
+    {{< docs-imagebox img="/img/docs/tutorials/IIS_admin_console.png"  max-width= "800px" >}}
+
+3. Click on the `Add Rule(s)...` action
+4. Choose the Blank Rule template for an Inbound Rule
+    {{< docs-imagebox img="/img/docs/tutorials/IIS_add_inbound_rule.png"  max-width= "800px" >}}
+
+5. Create an Inbound Rule for the parent website (localhost:8080 in this example) with the following settings:
+  - pattern: `grafana(/)?(.*)`
+  - check the `Ignore case` checkbox
+  - rewrite url set to `http://localhost:3000/{R:2}`
+  - check the `Append query string` checkbox
+  - check the `Stop processing of subsequent rules` checkbox
+
+    {{< docs-imagebox img="/img/docs/tutorials/IIS_url_rewrite.png"  max-width= "800px" >}}
+
+Finally, navigate to `http://localhost:8080/grafana` (replace `http://localhost:8080` with your parent domain) and you should come to the Grafana login page.
+
+## Troubleshooting
+
+### 404 error
+
+When navigating to the grafana url (`http://localhost:8080/grafana` in the example above) and a `HTTP Error 404.0 - Not Found` error is returned then either:
+
+- the pattern for the Inbound Rule is incorrect. Edit the rule, click on the `Test pattern...` button, test the part of the url after `http://localhost:8080/` and make sure it matches. For `grafana/login` the test should return 3 capture groups: {R:0}: `grafana` {R:1}: `/` and {R:2}: `login`.
+- The `root_url` setting in the Grafana config file does not match the parent url with subpath.
+
+### Grafana Website only shows text with no images or css
+
+{{< docs-imagebox img="/img/docs/tutorials/IIS_proxy_error.png"  max-width= "800px" >}}
+
+1. The `root_url` setting in the Grafana config file does not match the parent url with subpath. This could happen if the root_url is commented out by mistake (`;` is used for commenting out a line in .ini files):
+
+    `; root_url = %(protocol)s://%(domain)s:/grafana`
+
+2. or if the subpath in the `root_url` setting does not match the subpath used in the pattern in the Inbound Rule in IIS:
+
+    `root_url = %(protocol)s://%(domain)s:/grafana`
+
+    pattern in Inbound Rule: `wrongsubpath(/)?(.*)`
+
+3. or if the Rewrite Url in the Inbound Rule is incorrect. 
+
+    The Rewrite Url should not include the subpath. 
+
+    The Rewrite Url should contain the capture group from the pattern matching that returns the part of the url after the subpath. The pattern used above returns 3 capture groups and the third one {R:2} returns the part of the url after `http://localhost:8080/grafana/`.

+ 4 - 1
package.json

@@ -92,6 +92,7 @@
     "webpack": "^3.10.0",
     "webpack": "^3.10.0",
     "webpack-bundle-analyzer": "^2.9.0",
     "webpack-bundle-analyzer": "^2.9.0",
     "webpack-cleanup-plugin": "^0.5.1",
     "webpack-cleanup-plugin": "^0.5.1",
+    "angular-mocks": "^1.6.6",
     "webpack-merge": "^4.1.0",
     "webpack-merge": "^4.1.0",
     "zone.js": "^0.7.2"
     "zone.js": "^0.7.2"
   },
   },
@@ -125,7 +126,6 @@
   "dependencies": {
   "dependencies": {
     "angular": "^1.6.6",
     "angular": "^1.6.6",
     "angular-bindonce": "^0.3.1",
     "angular-bindonce": "^0.3.1",
-    "angular-mocks": "^1.6.6",
     "angular-native-dragdrop": "^1.2.2",
     "angular-native-dragdrop": "^1.2.2",
     "angular-route": "^1.6.6",
     "angular-route": "^1.6.6",
     "angular-sanitize": "^1.6.6",
     "angular-sanitize": "^1.6.6",
@@ -149,6 +149,9 @@
     "react": "^16.2.0",
     "react": "^16.2.0",
     "react-dom": "^16.2.0",
     "react-dom": "^16.2.0",
     "react-grid-layout": "^0.16.1",
     "react-grid-layout": "^0.16.1",
+    "react-popper": "^0.7.5",
+    "react-highlight-words": "^0.10.0",
+    "react-select": "^1.1.0",
     "react-sizeme": "^2.3.6",
     "react-sizeme": "^2.3.6",
     "remarkable": "^1.7.1",
     "remarkable": "^1.7.1",
     "rxjs": "^5.4.3",
     "rxjs": "^5.4.3",

+ 1 - 1
pkg/api/login_oauth.go

@@ -35,7 +35,7 @@ var (
 func GenStateString() string {
 func GenStateString() string {
 	rnd := make([]byte, 32)
 	rnd := make([]byte, 32)
 	rand.Read(rnd)
 	rand.Read(rnd)
-	return base64.StdEncoding.EncodeToString(rnd)
+	return base64.URLEncoding.EncodeToString(rnd)
 }
 }
 
 
 func OAuthLogin(ctx *middleware.Context) {
 func OAuthLogin(ctx *middleware.Context) {

+ 8 - 3
pkg/cmd/grafana-server/server.go

@@ -62,17 +62,22 @@ func (g *GrafanaServerImpl) Start() error {
 	search.Init()
 	search.Init()
 	login.Init()
 	login.Init()
 	social.NewOAuthService()
 	social.NewOAuthService()
-	plugins.Init()
+
+	pluginManager, err := plugins.NewPluginManager(g.context)
+	if err != nil {
+		return fmt.Errorf("Failed to start plugins. error: %v", err)
+	}
+	g.childRoutines.Go(func() error { return pluginManager.Run(g.context) })
 
 
 	if err := provisioning.Init(g.context, setting.HomePath, setting.Cfg); err != nil {
 	if err := provisioning.Init(g.context, setting.HomePath, setting.Cfg); err != nil {
 		return fmt.Errorf("Failed to provision Grafana from config. error: %v", err)
 		return fmt.Errorf("Failed to provision Grafana from config. error: %v", err)
 	}
 	}
 
 
-	closer, err := tracing.Init(setting.Cfg)
+	tracingCloser, err := tracing.Init(setting.Cfg)
 	if err != nil {
 	if err != nil {
 		return fmt.Errorf("Tracing settings is not valid. error: %v", err)
 		return fmt.Errorf("Tracing settings is not valid. error: %v", err)
 	}
 	}
-	defer closer.Close()
+	defer tracingCloser.Close()
 
 
 	// init alerting
 	// init alerting
 	if setting.AlertingEnabled && setting.ExecuteAlerts {
 	if setting.AlertingEnabled && setting.ExecuteAlerts {

+ 2 - 2
pkg/log/log.go

@@ -236,8 +236,8 @@ func LogFilterHandler(maxLevel log15.Lvl, filters map[string]log15.Lvl, h log15.
 
 
 		if len(filters) > 0 {
 		if len(filters) > 0 {
 			for i := 0; i < len(r.Ctx); i += 2 {
 			for i := 0; i < len(r.Ctx); i += 2 {
-				key := r.Ctx[i].(string)
-				if key == "logger" {
+				key, ok := r.Ctx[i].(string)
+				if ok && key == "logger" {
 					loggerName, strOk := r.Ctx[i+1].(string)
 					loggerName, strOk := r.Ctx[i+1].(string)
 					if strOk {
 					if strOk {
 						if filterLevel, ok := filters[loggerName]; ok {
 						if filterLevel, ok := filters[loggerName]; ok {

+ 2 - 1
pkg/plugins/dashboard_importer_test.go

@@ -1,6 +1,7 @@
 package plugins
 package plugins
 
 
 import (
 import (
+	"context"
 	"io/ioutil"
 	"io/ioutil"
 	"testing"
 	"testing"
 
 
@@ -91,7 +92,7 @@ func pluginScenario(desc string, t *testing.T, fn func()) {
 		setting.Cfg = ini.Empty()
 		setting.Cfg = ini.Empty()
 		sec, _ := setting.Cfg.NewSection("plugin.test-app")
 		sec, _ := setting.Cfg.NewSection("plugin.test-app")
 		sec.NewKey("path", "../../tests/test-app")
 		sec.NewKey("path", "../../tests/test-app")
-		err := Init()
+		err := initPlugins(context.Background())
 
 
 		So(err, ShouldBeNil)
 		So(err, ShouldBeNil)
 
 

+ 2 - 1
pkg/plugins/dashboards_test.go

@@ -1,6 +1,7 @@
 package plugins
 package plugins
 
 
 import (
 import (
+	"context"
 	"testing"
 	"testing"
 
 
 	"github.com/grafana/grafana/pkg/bus"
 	"github.com/grafana/grafana/pkg/bus"
@@ -17,7 +18,7 @@ func TestPluginDashboards(t *testing.T) {
 		setting.Cfg = ini.Empty()
 		setting.Cfg = ini.Empty()
 		sec, _ := setting.Cfg.NewSection("plugin.test-app")
 		sec, _ := setting.Cfg.NewSection("plugin.test-app")
 		sec.NewKey("path", "../../tests/test-app")
 		sec.NewKey("path", "../../tests/test-app")
-		err := Init()
+		err := initPlugins(context.Background())
 
 
 		So(err, ShouldBeNil)
 		So(err, ShouldBeNil)
 
 

+ 150 - 0
pkg/plugins/datasource/tsdb/datasource_plugin_wrapper.go

@@ -0,0 +1,150 @@
+package tsdb
+
+import (
+	"fmt"
+	"github.com/grafana/grafana/pkg/components/null"
+	"github.com/grafana/grafana/pkg/log"
+	"github.com/grafana/grafana/pkg/models"
+	"github.com/grafana/grafana/pkg/tsdb"
+	proto "github.com/grafana/grafana/pkg/tsdb/models"
+	"golang.org/x/net/context"
+)
+
+func NewDatasourcePluginWrapper(log log.Logger, plugin TsdbPlugin) *DatasourcePluginWrapper {
+	return &DatasourcePluginWrapper{TsdbPlugin: plugin, logger: log}
+}
+
+type DatasourcePluginWrapper struct {
+	TsdbPlugin
+
+	logger log.Logger
+}
+
+func (tw *DatasourcePluginWrapper) Query(ctx context.Context, ds *models.DataSource, query *tsdb.TsdbQuery) (*tsdb.Response, error) {
+	jsonData, err := ds.JsonData.MarshalJSON()
+	if err != nil {
+		return nil, err
+	}
+
+	pbQuery := &proto.TsdbQuery{
+		Datasource: &proto.DatasourceInfo{
+			JsonData: string(jsonData),
+			Name:     ds.Name,
+			Type:     ds.Type,
+			Url:      ds.Url,
+			Id:       ds.Id,
+			OrgId:    ds.OrgId,
+		},
+		TimeRange: &proto.TimeRange{
+			FromRaw:     query.TimeRange.From,
+			ToRaw:       query.TimeRange.To,
+			ToEpochMs:   query.TimeRange.GetToAsMsEpoch(),
+			FromEpochMs: query.TimeRange.GetFromAsMsEpoch(),
+		},
+		Queries: []*proto.Query{},
+	}
+
+	for _, q := range query.Queries {
+		modelJson, _ := q.Model.MarshalJSON()
+
+		pbQuery.Queries = append(pbQuery.Queries, &proto.Query{
+			ModelJson:     string(modelJson),
+			IntervalMs:    q.IntervalMs,
+			RefId:         q.RefId,
+			MaxDataPoints: q.MaxDataPoints,
+		})
+	}
+
+	pbres, err := tw.TsdbPlugin.Query(ctx, pbQuery)
+
+	if err != nil {
+		return nil, err
+	}
+
+	res := &tsdb.Response{
+		Results: map[string]*tsdb.QueryResult{},
+	}
+
+	for _, r := range pbres.Results {
+		res.Results[r.RefId] = &tsdb.QueryResult{
+			RefId:  r.RefId,
+			Series: []*tsdb.TimeSeries{},
+		}
+
+		for _, s := range r.GetSeries() {
+			points := tsdb.TimeSeriesPoints{}
+
+			for _, p := range s.Points {
+				po := tsdb.NewTimePoint(null.FloatFrom(p.Value), float64(p.Timestamp))
+				points = append(points, po)
+			}
+
+			res.Results[r.RefId].Series = append(res.Results[r.RefId].Series, &tsdb.TimeSeries{
+				Name:   s.Name,
+				Tags:   s.Tags,
+				Points: points,
+			})
+		}
+
+		mappedTables, err := tw.mapTables(r)
+		if err != nil {
+			return nil, err
+		}
+		res.Results[r.RefId].Tables = mappedTables
+	}
+
+	return res, nil
+}
+func (tw *DatasourcePluginWrapper) mapTables(r *proto.QueryResult) ([]*tsdb.Table, error) {
+	var tables []*tsdb.Table
+	for _, t := range r.GetTables() {
+		mappedTable, err := tw.mapTable(t)
+		if err != nil {
+			return nil, err
+		}
+		tables = append(tables, mappedTable)
+	}
+	return tables, nil
+}
+
+func (tw *DatasourcePluginWrapper) mapTable(t *proto.Table) (*tsdb.Table, error) {
+	table := &tsdb.Table{}
+	for _, c := range t.GetColumns() {
+		table.Columns = append(table.Columns, tsdb.TableColumn{
+			Text: c.Name,
+		})
+	}
+
+	for _, r := range t.GetRows() {
+		row := tsdb.RowValues{}
+		for _, rv := range r.Values {
+			mappedRw, err := tw.mapRowValue(rv)
+			if err != nil {
+				return nil, err
+			}
+
+			row = append(row, mappedRw)
+		}
+		table.Rows = append(table.Rows, row)
+	}
+
+	return table, nil
+}
+func (tw *DatasourcePluginWrapper) mapRowValue(rv *proto.RowValue) (interface{}, error) {
+	switch rv.Kind {
+	case proto.RowValue_TYPE_NULL:
+		return nil, nil
+	case proto.RowValue_TYPE_INT64:
+		return rv.Int64Value, nil
+	case proto.RowValue_TYPE_BOOL:
+		return rv.BoolValue, nil
+	case proto.RowValue_TYPE_STRING:
+		return rv.StringValue, nil
+	case proto.RowValue_TYPE_DOUBLE:
+		return rv.DoubleValue, nil
+	case proto.RowValue_TYPE_BYTES:
+		return rv.BytesValue, nil
+	default:
+		return nil, fmt.Errorf("Unsupported row value %v from plugin", rv.Kind)
+	}
+}

+ 108 - 0
pkg/plugins/datasource/tsdb/datasource_plugin_wrapper_test.go

@@ -0,0 +1,108 @@
+package tsdb
+
+import (
+	"github.com/grafana/grafana/pkg/log"
+	"github.com/grafana/grafana/pkg/tsdb"
+	"github.com/grafana/grafana/pkg/tsdb/models"
+	"testing"
+)
+
+func TestMapTables(t *testing.T) {
+	dpw := NewDatasourcePluginWrapper(log.New("test-logger"), nil)
+	var qr = &proto.QueryResult{}
+	qr.Tables = append(qr.Tables, &proto.Table{
+		Columns: []*proto.TableColumn{},
+		Rows:    nil,
+	})
+	want := []*tsdb.Table{{}}
+
+	have, err := dpw.mapTables(qr)
+	if err != nil {
+		t.Errorf("failed to map tables. error: %v", err)
+	}
+	if len(want) != len(have) {
+		t.Errorf("could not map all tables")
+	}
+}
+
+func TestMapTable(t *testing.T) {
+	dpw := NewDatasourcePluginWrapper(log.New("test-logger"), nil)
+
+	source := &proto.Table{
+		Columns: []*proto.TableColumn{{Name: "column1"}, {Name: "column2"}},
+		Rows: []*proto.TableRow{{
+			Values: []*proto.RowValue{
+				{
+					Kind:      proto.RowValue_TYPE_BOOL,
+					BoolValue: true,
+				},
+				{
+					Kind:       proto.RowValue_TYPE_INT64,
+					Int64Value: 42,
+				},
+			},
+		}},
+	}
+
+	want := &tsdb.Table{
+		Columns: []tsdb.TableColumn{{Text: "column1"}, {Text: "column2"}},
+	}
+	have, err := dpw.mapTable(source)
+	if err != nil {
+		t.Fatalf("failed to map table. error: %v", err)
+	}
+
+	for i := range have.Columns {
+		if want.Columns[i] != have.Columns[i] {
+			t.Fatalf("have column: %s, want %s", have, want)
+		}
+	}
+
+	if len(have.Rows) != 1 {
+		t.Fatalf("Expects one row but got %d", len(have.Rows))
+	}
+
+	rowValuesCount := len(have.Rows[0])
+	if rowValuesCount != 2 {
+		t.Fatalf("Expects two row values, got %d", rowValuesCount)
+	}
+}
+
+func TestMappingRowValue(t *testing.T) {
+	dpw := NewDatasourcePluginWrapper(log.New("test-logger"), nil)
+
+	boolRowValue, _ := dpw.mapRowValue(&proto.RowValue{Kind: proto.RowValue_TYPE_BOOL, BoolValue: true})
+	haveBool, ok := boolRowValue.(bool)
+	if !ok || haveBool != true {
+		t.Fatalf("Expected true, was %s", haveBool)
+	}
+
+	intRowValue, _ := dpw.mapRowValue(&proto.RowValue{Kind: proto.RowValue_TYPE_INT64, Int64Value: 42})
+	haveInt, ok := intRowValue.(int64)
+	if !ok || haveInt != 42 {
+		t.Fatalf("Expected %d, was %d", 42, haveInt)
+	}
+
+	stringRowValue, _ := dpw.mapRowValue(&proto.RowValue{Kind: proto.RowValue_TYPE_STRING, StringValue: "grafana"})
+	haveString, ok := stringRowValue.(string)
+	if !ok || haveString != "grafana" {
+		t.Fatalf("Expected %s, was %s", "grafana", haveString)
+	}
+
+	doubleRowValue, _ := dpw.mapRowValue(&proto.RowValue{Kind: proto.RowValue_TYPE_DOUBLE, DoubleValue: 1.5})
+	haveDouble, ok := doubleRowValue.(float64)
+	if !ok || haveDouble != 1.5 {
+		t.Fatalf("Expected %v, was %v", 1.5, haveDouble)
+	}
+
+	bytesRowValue, _ := dpw.mapRowValue(&proto.RowValue{Kind: proto.RowValue_TYPE_BYTES, BytesValue: []byte{66}})
+	haveBytes, ok := bytesRowValue.([]byte)
+	if !ok || len(haveBytes) != 1 || haveBytes[0] != 66 {
+		t.Fatalf("Expected %v, was %v", []byte{66}, haveBytes)
+	}
+
+	haveNil, _ := dpw.mapRowValue(&proto.RowValue{Kind: proto.RowValue_TYPE_NULL})
+	if haveNil != nil {
+		t.Fatalf("Expected %v, was %v", nil, haveNil)
+	}
+}

+ 22 - 0
pkg/plugins/datasource/tsdb/grpc.go

@@ -0,0 +1,22 @@
+package tsdb
+
+import (
+	proto "github.com/grafana/grafana/pkg/tsdb/models"
+	"golang.org/x/net/context"
+)
+
+type GRPCClient struct {
+	proto.TsdbPluginClient
+}
+
+func (m *GRPCClient) Query(ctx context.Context, req *proto.TsdbQuery) (*proto.Response, error) {
+	return m.TsdbPluginClient.Query(ctx, req)
+}
+
+type GRPCServer struct {
+	TsdbPlugin
+}
+
+func (m *GRPCServer) Query(ctx context.Context, req *proto.TsdbQuery) (*proto.Response, error) {
+	return m.TsdbPlugin.Query(ctx, req)
+}

+ 27 - 0
pkg/plugins/datasource/tsdb/interface.go

@@ -0,0 +1,27 @@
+package tsdb
+
+import (
+	"golang.org/x/net/context"
+
+	proto "github.com/grafana/grafana/pkg/tsdb/models"
+	plugin "github.com/hashicorp/go-plugin"
+	"google.golang.org/grpc"
+)
+
+type TsdbPlugin interface {
+	Query(ctx context.Context, req *proto.TsdbQuery) (*proto.Response, error)
+}
+
+type TsdbPluginImpl struct {
+	plugin.NetRPCUnsupportedPlugin
+	Plugin TsdbPlugin
+}
+
+func (p *TsdbPluginImpl) GRPCServer(s *grpc.Server) error {
+	proto.RegisterTsdbPluginServer(s, &GRPCServer{p.Plugin})
+	return nil
+}
+
+func (p *TsdbPluginImpl) GRPCClient(c *grpc.ClientConn) (interface{}, error) {
+	return &GRPCClient{proto.NewTsdbPluginClient(c)}, nil
+}

+ 103 - 0
pkg/plugins/datasource_plugin.go

@@ -1,9 +1,22 @@
 package plugins
 package plugins
 
 
 import (
 import (
+	"context"
 	"encoding/json"
 	"encoding/json"
+	"fmt"
 	"os"
 	"os"
+	"os/exec"
+	"path"
 	"path/filepath"
 	"path/filepath"
+	"runtime"
+	"strings"
+	"time"
+
+	"github.com/grafana/grafana/pkg/log"
+	"github.com/grafana/grafana/pkg/models"
+	shared "github.com/grafana/grafana/pkg/plugins/datasource/tsdb"
+	"github.com/grafana/grafana/pkg/tsdb"
+	plugin "github.com/hashicorp/go-plugin"
 )
 )
 
 
 type DataSourcePlugin struct {
 type DataSourcePlugin struct {
@@ -16,6 +29,12 @@ type DataSourcePlugin struct {
 	Mixed        bool              `json:"mixed,omitempty"`
 	Mixed        bool              `json:"mixed,omitempty"`
 	HasQueryHelp bool              `json:"hasQueryHelp,omitempty"`
 	HasQueryHelp bool              `json:"hasQueryHelp,omitempty"`
 	Routes       []*AppPluginRoute `json:"routes"`
 	Routes       []*AppPluginRoute `json:"routes"`
+
+	Backend    bool   `json:"backend,omitempty"`
+	Executable string `json:"executable,omitempty"`
+
+	log    log.Logger
+	client *plugin.Client
 }
 }
 
 
 func (p *DataSourcePlugin) Load(decoder *json.Decoder, pluginDir string) error {
 func (p *DataSourcePlugin) Load(decoder *json.Decoder, pluginDir string) error {
@@ -39,3 +58,87 @@ func (p *DataSourcePlugin) Load(decoder *json.Decoder, pluginDir string) error {
 	DataSources[p.Id] = p
 	DataSources[p.Id] = p
 	return nil
 	return nil
 }
 }
+
+var handshakeConfig = plugin.HandshakeConfig{
+	ProtocolVersion:  1,
+	MagicCookieKey:   "grafana_plugin_type",
+	MagicCookieValue: "datasource",
+}
+
+func composeBinaryName(executable, os, arch string) string {
+	var extension string
+	os = strings.ToLower(os)
+	if os == "windows" {
+		extension = ".exe"
+	}
+
+	return fmt.Sprintf("%s_%s_%s%s", executable, os, strings.ToLower(arch), extension)
+}
+
+func (p *DataSourcePlugin) initBackendPlugin(ctx context.Context, log log.Logger) error {
+	p.log = log.New("plugin-id", p.Id)
+
+	err := p.spawnSubProcess()
+	if err == nil {
+		go p.restartKilledProcess(ctx)
+	}
+
+	return err
+}
+
+func (p *DataSourcePlugin) spawnSubProcess() error {
+	cmd := composeBinaryName(p.Executable, runtime.GOOS, runtime.GOARCH)
+	fullpath := path.Join(p.PluginDir, cmd)
+
+	p.client = plugin.NewClient(&plugin.ClientConfig{
+		HandshakeConfig:  handshakeConfig,
+		Plugins:          map[string]plugin.Plugin{p.Id: &shared.TsdbPluginImpl{}},
+		Cmd:              exec.Command(fullpath),
+		AllowedProtocols: []plugin.Protocol{plugin.ProtocolGRPC},
+		Logger:           LogWrapper{Logger: p.log},
+	})
+
+	rpcClient, err := p.client.Client()
+	if err != nil {
+		return err
+	}
+
+	raw, err := rpcClient.Dispense(p.Id)
+	if err != nil {
+		return err
+	}
+
+	plugin := raw.(shared.TsdbPlugin)
+
+	tsdb.RegisterTsdbQueryEndpoint(p.Id, func(dsInfo *models.DataSource) (tsdb.TsdbQueryEndpoint, error) {
+		return shared.NewDatasourcePluginWrapper(p.log, plugin), nil
+	})
+
+	return nil
+}
+
+func (p *DataSourcePlugin) restartKilledProcess(ctx context.Context) error {
+	ticker := time.NewTicker(time.Second * 1)
+
+	for {
+		select {
+		case <-ctx.Done():
+			return ctx.Err()
+		case <-ticker.C:
+			if p.client.Exited() {
+				err := p.spawnSubProcess()
+				p.log.Debug("Spawning new sub process", "name", p.Name, "id", p.Id)
+				if err != nil {
+					p.log.Error("Failed to spawn subprocess")
+				}
+			}
+		}
+	}
+}
+
+func (p *DataSourcePlugin) Kill() {
+	if p.client != nil {
+		p.log.Debug("Killing subprocess ", "name", p.Name)
+		p.client.Kill()
+	}
+}

+ 35 - 0
pkg/plugins/datasource_plugin_test.go

@@ -0,0 +1,35 @@
+package plugins
+
+import (
+	"testing"
+)
+
+func TestComposeBinaryName(t *testing.T) {
+	tests := []struct {
+		name string
+		os   string
+		arch string
+
+		expectedPath string
+	}{
+		{
+			name:         "simple-json",
+			os:           "linux",
+			arch:         "amd64",
+			expectedPath: `simple-json_linux_amd64`,
+		},
+		{
+			name:         "simple-json",
+			os:           "windows",
+			arch:         "amd64",
+			expectedPath: `simple-json_windows_amd64.exe`,
+		},
+	}
+
+	for _, v := range tests {
+		have := composeBinaryName(v.name, v.os, v.arch)
+		if have != v.expectedPath {
+			t.Errorf("expected %s got %s", v.expectedPath, have)
+		}
+	}
+}

+ 49 - 0
pkg/plugins/hclog-wrapper.go

@@ -0,0 +1,49 @@
+package plugins
+
+import (
+	"log"
+
+	glog "github.com/grafana/grafana/pkg/log"
+	hclog "github.com/hashicorp/go-hclog"
+)
+
+type LogWrapper struct {
+	Logger glog.Logger
+}
+
+func (lw LogWrapper) Trace(msg string, args ...interface{}) {
+	lw.Logger.Debug(msg, args...)
+}
+func (lw LogWrapper) Debug(msg string, args ...interface{}) {
+	lw.Logger.Debug(msg, args...)
+}
+func (lw LogWrapper) Info(msg string, args ...interface{}) {
+	lw.Logger.Info(msg, args...)
+}
+func (lw LogWrapper) Warn(msg string, args ...interface{}) {
+	lw.Logger.Warn(msg, args...)
+}
+func (lw LogWrapper) Error(msg string, args ...interface{}) {
+	lw.Logger.Error(msg, args...)
+}
+
+func (lw LogWrapper) IsTrace() bool { return true }
+func (lw LogWrapper) IsDebug() bool { return true }
+func (lw LogWrapper) IsInfo() bool  { return true }
+func (lw LogWrapper) IsWarn() bool  { return true }
+func (lw LogWrapper) IsError() bool { return true }
+
+func (lw LogWrapper) With(args ...interface{}) hclog.Logger {
+	return LogWrapper{Logger: lw.Logger.New(args...)}
+}
+func (lw LogWrapper) Named(name string) hclog.Logger {
+	return LogWrapper{Logger: lw.Logger.New()}
+}
+
+func (lw LogWrapper) ResetNamed(name string) hclog.Logger {
+	return LogWrapper{Logger: lw.Logger.New()}
+}
+
+func (lw LogWrapper) StandardLogger(ops *hclog.StandardLoggerOptions) *log.Logger {
+	return nil
+}

+ 46 - 10
pkg/plugins/plugins.go

@@ -1,6 +1,7 @@
 package plugins
 package plugins
 
 
 import (
 import (
+	"context"
 	"encoding/json"
 	"encoding/json"
 	"errors"
 	"errors"
 	"fmt"
 	"fmt"
@@ -34,14 +35,41 @@ type PluginScanner struct {
 	errors     []error
 	errors     []error
 }
 }
 
 
-func Init() error {
+type PluginManager struct {
+	log log.Logger
+}
+
+func NewPluginManager(ctx context.Context) (*PluginManager, error) {
+	err := initPlugins(ctx)
+
+	if err != nil {
+		return nil, err
+	}
+
+	return &PluginManager{
+		log: log.New("plugins"),
+	}, nil
+}
+
+func (p *PluginManager) Run(ctx context.Context) error {
+	<-ctx.Done()
+
+	for _, p := range DataSources {
+		p.Kill()
+	}
+
+	p.log.Info("Stopped Plugins", "error", ctx.Err())
+	return ctx.Err()
+}
+
+func initPlugins(ctx context.Context) error {
 	plog = log.New("plugins")
 	plog = log.New("plugins")
 
 
-	DataSources = make(map[string]*DataSourcePlugin)
-	StaticRoutes = make([]*PluginStaticRoute, 0)
-	Panels = make(map[string]*PanelPlugin)
-	Apps = make(map[string]*AppPlugin)
-	Plugins = make(map[string]*PluginBase)
+	DataSources = map[string]*DataSourcePlugin{}
+	StaticRoutes = []*PluginStaticRoute{}
+	Panels = map[string]*PanelPlugin{}
+	Apps = map[string]*AppPlugin{}
+	Plugins = map[string]*PluginBase{}
 	PluginTypes = map[string]interface{}{
 	PluginTypes = map[string]interface{}{
 		"panel":      PanelPlugin{},
 		"panel":      PanelPlugin{},
 		"datasource": DataSourcePlugin{},
 		"datasource": DataSourcePlugin{},
@@ -53,9 +81,8 @@ func Init() error {
 
 
 	// check if plugins dir exists
 	// check if plugins dir exists
 	if _, err := os.Stat(setting.PluginsPath); os.IsNotExist(err) {
 	if _, err := os.Stat(setting.PluginsPath); os.IsNotExist(err) {
-		plog.Warn("Plugin dir does not exist", "dir", setting.PluginsPath)
 		if err = os.MkdirAll(setting.PluginsPath, os.ModePerm); err != nil {
 		if err = os.MkdirAll(setting.PluginsPath, os.ModePerm); err != nil {
-			plog.Warn("Failed to create plugin dir", "dir", setting.PluginsPath, "error", err)
+			plog.Error("Failed to create plugin dir", "dir", setting.PluginsPath, "error", err)
 		} else {
 		} else {
 			plog.Info("Plugin dir created", "dir", setting.PluginsPath)
 			plog.Info("Plugin dir created", "dir", setting.PluginsPath)
 			scan(setting.PluginsPath)
 			scan(setting.PluginsPath)
@@ -70,9 +97,18 @@ func Init() error {
 	for _, panel := range Panels {
 	for _, panel := range Panels {
 		panel.initFrontendPlugin()
 		panel.initFrontendPlugin()
 	}
 	}
-	for _, panel := range DataSources {
-		panel.initFrontendPlugin()
+
+	for _, ds := range DataSources {
+		if ds.Backend {
+			err := ds.initBackendPlugin(ctx, plog)
+			if err != nil {
+				plog.Error("Failed to init plugin.", "error", err, "plugin", ds.Id)
+			}
+		}
+
+		ds.initFrontendPlugin()
 	}
 	}
+
 	for _, app := range Apps {
 	for _, app := range Apps {
 		app.initApp()
 		app.initApp()
 	}
 	}

+ 3 - 2
pkg/plugins/plugins_test.go

@@ -1,6 +1,7 @@
 package plugins
 package plugins
 
 
 import (
 import (
+	"context"
 	"path/filepath"
 	"path/filepath"
 	"testing"
 	"testing"
 
 
@@ -14,7 +15,7 @@ func TestPluginScans(t *testing.T) {
 	Convey("When scaning for plugins", t, func() {
 	Convey("When scaning for plugins", t, func() {
 		setting.StaticRootPath, _ = filepath.Abs("../../public/")
 		setting.StaticRootPath, _ = filepath.Abs("../../public/")
 		setting.Cfg = ini.Empty()
 		setting.Cfg = ini.Empty()
-		err := Init()
+		err := initPlugins(context.Background())
 
 
 		So(err, ShouldBeNil)
 		So(err, ShouldBeNil)
 		So(len(DataSources), ShouldBeGreaterThan, 1)
 		So(len(DataSources), ShouldBeGreaterThan, 1)
@@ -29,7 +30,7 @@ func TestPluginScans(t *testing.T) {
 		setting.Cfg = ini.Empty()
 		setting.Cfg = ini.Empty()
 		sec, _ := setting.Cfg.NewSection("plugin.nginx-app")
 		sec, _ := setting.Cfg.NewSection("plugin.nginx-app")
 		sec.NewKey("path", "../../tests/test-app")
 		sec.NewKey("path", "../../tests/test-app")
-		err := Init()
+		err := initPlugins(context.Background())
 
 
 		So(err, ShouldBeNil)
 		So(err, ShouldBeNil)
 		So(len(Apps), ShouldBeGreaterThan, 0)
 		So(len(Apps), ShouldBeGreaterThan, 0)

+ 89 - 24
pkg/services/alerting/notifiers/telegram.go

@@ -1,17 +1,19 @@
 package notifiers
 package notifiers
 
 
 import (
 import (
+	"bytes"
 	"fmt"
 	"fmt"
-
 	"github.com/grafana/grafana/pkg/bus"
 	"github.com/grafana/grafana/pkg/bus"
-	"github.com/grafana/grafana/pkg/components/simplejson"
 	"github.com/grafana/grafana/pkg/log"
 	"github.com/grafana/grafana/pkg/log"
 	m "github.com/grafana/grafana/pkg/models"
 	m "github.com/grafana/grafana/pkg/models"
 	"github.com/grafana/grafana/pkg/services/alerting"
 	"github.com/grafana/grafana/pkg/services/alerting"
+	"io"
+	"mime/multipart"
+	"os"
 )
 )
 
 
 var (
 var (
-	telegeramApiUrl string = "https://api.telegram.org/bot%s/%s"
+	telegramApiUrl string = "https://api.telegram.org/bot%s/%s"
 )
 )
 
 
 func init() {
 func init() {
@@ -47,9 +49,10 @@ func init() {
 
 
 type TelegramNotifier struct {
 type TelegramNotifier struct {
 	NotifierBase
 	NotifierBase
-	BotToken string
-	ChatID   string
-	log      log.Logger
+	BotToken    string
+	ChatID      string
+	UploadImage bool
+	log         log.Logger
 }
 }
 
 
 func NewTelegramNotifier(model *m.AlertNotification) (alerting.Notifier, error) {
 func NewTelegramNotifier(model *m.AlertNotification) (alerting.Notifier, error) {
@@ -59,6 +62,7 @@ func NewTelegramNotifier(model *m.AlertNotification) (alerting.Notifier, error)
 
 
 	botToken := model.Settings.Get("bottoken").MustString()
 	botToken := model.Settings.Get("bottoken").MustString()
 	chatId := model.Settings.Get("chatid").MustString()
 	chatId := model.Settings.Get("chatid").MustString()
+	uploadImage := model.Settings.Get("uploadImage").MustBool()
 
 
 	if botToken == "" {
 	if botToken == "" {
 		return nil, alerting.ValidationError{Reason: "Could not find Bot Token in settings"}
 		return nil, alerting.ValidationError{Reason: "Could not find Bot Token in settings"}
@@ -72,31 +76,42 @@ func NewTelegramNotifier(model *m.AlertNotification) (alerting.Notifier, error)
 		NotifierBase: NewNotifierBase(model.Id, model.IsDefault, model.Name, model.Type, model.Settings),
 		NotifierBase: NewNotifierBase(model.Id, model.IsDefault, model.Name, model.Type, model.Settings),
 		BotToken:     botToken,
 		BotToken:     botToken,
 		ChatID:       chatId,
 		ChatID:       chatId,
+		UploadImage:  uploadImage,
 		log:          log.New("alerting.notifier.telegram"),
 		log:          log.New("alerting.notifier.telegram"),
 	}, nil
 	}, nil
 }
 }
 
 
-func (this *TelegramNotifier) ShouldNotify(context *alerting.EvalContext) bool {
-	return defaultShouldNotify(context)
-}
+func (this *TelegramNotifier) buildMessage(evalContext *alerting.EvalContext, sendImageInline bool) *m.SendWebhookSync {
+	var imageFile *os.File
+	var err error
 
 
-func (this *TelegramNotifier) Notify(evalContext *alerting.EvalContext) error {
-	this.log.Info("Sending alert notification to", "bot_token", this.BotToken)
-	this.log.Info("Sending alert notification to", "chat_id", this.ChatID)
-
-	bodyJSON := simplejson.New()
+	if sendImageInline {
+		imageFile, err = os.Open(evalContext.ImageOnDiskPath)
+		defer imageFile.Close()
+		if err != nil {
+			sendImageInline = false // fall back to text message
+		}
+	}
 
 
-	bodyJSON.Set("chat_id", this.ChatID)
-	bodyJSON.Set("parse_mode", "html")
+	message := ""
 
 
-	message := fmt.Sprintf("<b>%s</b>\nState: %s\nMessage: %s\n", evalContext.GetNotificationTitle(), evalContext.Rule.Name, evalContext.Rule.Message)
+	if sendImageInline {
+		// Telegram's API does not allow HTML formatting for image captions.
+		message = fmt.Sprintf("%s\nState: %s\nMessage: %s\n", evalContext.GetNotificationTitle(), evalContext.Rule.Name, evalContext.Rule.Message)
+	} else {
+		message = fmt.Sprintf("<b>%s</b>\nState: %s\nMessage: %s\n", evalContext.GetNotificationTitle(), evalContext.Rule.Name, evalContext.Rule.Message)
+	}
 
 
 	ruleUrl, err := evalContext.GetRuleUrl()
 	ruleUrl, err := evalContext.GetRuleUrl()
 	if err == nil {
 	if err == nil {
 		message = message + fmt.Sprintf("URL: %s\n", ruleUrl)
 		message = message + fmt.Sprintf("URL: %s\n", ruleUrl)
 	}
 	}
-	if evalContext.ImagePublicUrl != "" {
-		message = message + fmt.Sprintf("Image: %s\n", evalContext.ImagePublicUrl)
+
+	if !sendImageInline {
+		// only attach this if we are not sending it inline.
+		if evalContext.ImagePublicUrl != "" {
+			message = message + fmt.Sprintf("Image: %s\n", evalContext.ImagePublicUrl)
+		}
 	}
 	}
 
 
 	metrics := ""
 	metrics := ""
@@ -107,19 +122,69 @@ func (this *TelegramNotifier) Notify(evalContext *alerting.EvalContext) error {
 			break
 			break
 		}
 		}
 	}
 	}
+
 	if metrics != "" {
 	if metrics != "" {
-		message = message + fmt.Sprintf("\n<i>Metrics:</i>%s", metrics)
+		if sendImageInline {
+			// Telegram's API does not allow HTML formatting for image captions.
+			message = message + fmt.Sprintf("\nMetrics:%s", metrics)
+		} else {
+			message = message + fmt.Sprintf("\n<i>Metrics:</i>%s", metrics)
+		}
+	}
+
+	var body bytes.Buffer
+
+	w := multipart.NewWriter(&body)
+	fw, _ := w.CreateFormField("chat_id")
+	fw.Write([]byte(this.ChatID))
+
+	if sendImageInline {
+		fw, _ = w.CreateFormField("caption")
+		fw.Write([]byte(message))
+
+		fw, _ = w.CreateFormFile("photo", evalContext.ImageOnDiskPath)
+		io.Copy(fw, imageFile)
+	} else {
+		fw, _ = w.CreateFormField("text")
+		fw.Write([]byte(message))
+
+		fw, _ = w.CreateFormField("parse_mode")
+		fw.Write([]byte("html"))
 	}
 	}
 
 
-	bodyJSON.Set("text", message)
+	w.Close()
 
 
-	url := fmt.Sprintf(telegeramApiUrl, this.BotToken, "sendMessage")
-	body, _ := bodyJSON.MarshalJSON()
+	apiMethod := ""
+	if sendImageInline {
+		this.log.Info("Sending telegram image notification", "photo", evalContext.ImageOnDiskPath, "chat_id", this.ChatID, "bot_token", this.BotToken)
+		apiMethod = "sendPhoto"
+	} else {
+		this.log.Info("Sending telegram text notification", "chat_id", this.ChatID, "bot_token", this.BotToken)
+		apiMethod = "sendMessage"
+	}
 
 
+	url := fmt.Sprintf(telegramApiUrl, this.BotToken, apiMethod)
 	cmd := &m.SendWebhookSync{
 	cmd := &m.SendWebhookSync{
 		Url:        url,
 		Url:        url,
-		Body:       string(body),
+		Body:       body.String(),
 		HttpMethod: "POST",
 		HttpMethod: "POST",
+		HttpHeader: map[string]string{
+			"Content-Type": w.FormDataContentType(),
+		},
+	}
+	return cmd
+}
+
+func (this *TelegramNotifier) ShouldNotify(context *alerting.EvalContext) bool {
+	return defaultShouldNotify(context)
+}
+
+func (this *TelegramNotifier) Notify(evalContext *alerting.EvalContext) error {
+	var cmd *m.SendWebhookSync
+	if evalContext.ImagePublicUrl == "" && this.UploadImage == true {
+		cmd = this.buildMessage(evalContext, true)
+	} else {
+		cmd = this.buildMessage(evalContext, false)
 	}
 	}
 
 
 	if err := bus.DispatchCtx(evalContext.Ctx, cmd); err != nil {
 	if err := bus.DispatchCtx(evalContext.Ctx, cmd); err != nil {

+ 6 - 3
pkg/services/provisioning/dashboards/file_reader.go

@@ -156,13 +156,15 @@ func createWalkFn(fr *fileReader, folderId int64) filepath.WalkFunc {
 			return nil
 			return nil
 		}
 		}
 
 
-		// id = 0 indicates ID validation should be avoided before writing to the db.
-		dash.Dashboard.Id = 0
+		if dash.Dashboard.Id != 0 {
+			fr.log.Error("Cannot provision dashboard. Please remove the id property from the json file")
+			return nil
+		}
 
 
 		cmd := &models.GetDashboardQuery{Slug: dash.Dashboard.Slug}
 		cmd := &models.GetDashboardQuery{Slug: dash.Dashboard.Slug}
 		err = bus.Dispatch(cmd)
 		err = bus.Dispatch(cmd)
 
 
-		// if we dont have the dashboard in the db, save it!
+		// if we don't have the dashboard in the db, save it!
 		if err == models.ErrDashboardNotFound {
 		if err == models.ErrDashboardNotFound {
 			fr.log.Debug("saving new dashboard", "file", path)
 			fr.log.Debug("saving new dashboard", "file", path)
 			_, err = fr.dashboardRepo.SaveDashboard(dash)
 			_, err = fr.dashboardRepo.SaveDashboard(dash)
@@ -181,6 +183,7 @@ func createWalkFn(fr *fileReader, folderId int64) filepath.WalkFunc {
 
 
 		fr.log.Debug("loading dashboard from disk into database.", "file", path)
 		fr.log.Debug("loading dashboard from disk into database.", "file", path)
 		_, err = fr.dashboardRepo.SaveDashboard(dash)
 		_, err = fr.dashboardRepo.SaveDashboard(dash)
+
 		return err
 		return err
 	}
 	}
 }
 }

+ 1 - 0
pkg/services/provisioning/dashboards/types.go

@@ -24,6 +24,7 @@ func createDashboardJson(data *simplejson.Json, lastModified time.Time, cfg *Das
 	dash.UpdatedAt = lastModified
 	dash.UpdatedAt = lastModified
 	dash.Overwrite = true
 	dash.Overwrite = true
 	dash.OrgId = cfg.OrgId
 	dash.OrgId = cfg.OrgId
+	dash.Dashboard.OrgId = cfg.OrgId
 	dash.Dashboard.FolderId = folderId
 	dash.Dashboard.FolderId = folderId
 	if !cfg.Editable {
 	if !cfg.Editable {
 		dash.Dashboard.Data.Set("editable", cfg.Editable)
 		dash.Dashboard.Data.Set("editable", cfg.Editable)

+ 0 - 2
pkg/tsdb/models.go

@@ -14,9 +14,7 @@ type TsdbQuery struct {
 type Query struct {
 type Query struct {
 	RefId         string
 	RefId         string
 	Model         *simplejson.Json
 	Model         *simplejson.Json
-	Depends       []string
 	DataSource    *models.DataSource
 	DataSource    *models.DataSource
-	Results       []*TimeSeries
 	MaxDataPoints int64
 	MaxDataPoints int64
 	IntervalMs    int64
 	IntervalMs    int64
 }
 }

+ 636 - 0
pkg/tsdb/models/tsdb_plugin.pb.go

@@ -0,0 +1,636 @@
+// Code generated by protoc-gen-go. DO NOT EDIT.
+// source: tsdb_plugin.proto
+
+/*
+Package proto is a generated protocol buffer package.
+
+It is generated from these files:
+	tsdb_plugin.proto
+
+It has these top-level messages:
+	TsdbQuery
+	Query
+	TimeRange
+	Response
+	QueryResult
+	Table
+	TableColumn
+	TableRow
+	RowValue
+	DatasourceInfo
+	TimeSeries
+	Point
+*/
+package proto
+
+import proto1 "github.com/golang/protobuf/proto"
+import fmt "fmt"
+import math "math"
+
+import (
+	context "golang.org/x/net/context"
+	grpc "google.golang.org/grpc"
+)
+
+// Reference imports to suppress errors if they are not otherwise used.
+var _ = proto1.Marshal
+var _ = fmt.Errorf
+var _ = math.Inf
+
+// This is a compile-time assertion to ensure that this generated file
+// is compatible with the proto package it is being compiled against.
+// A compilation error at this line likely means your copy of the
+// proto package needs to be updated.
+const _ = proto1.ProtoPackageIsVersion2 // please upgrade the proto package
+
+type RowValue_Kind int32
+
+const (
+	// Field type null.
+	RowValue_TYPE_NULL RowValue_Kind = 0
+	// Field type double.
+	RowValue_TYPE_DOUBLE RowValue_Kind = 1
+	// Field type int64.
+	RowValue_TYPE_INT64 RowValue_Kind = 2
+	// Field type bool.
+	RowValue_TYPE_BOOL RowValue_Kind = 3
+	// Field type string.
+	RowValue_TYPE_STRING RowValue_Kind = 4
+	// Field type bytes.
+	RowValue_TYPE_BYTES RowValue_Kind = 5
+)
+
+var RowValue_Kind_name = map[int32]string{
+	0: "TYPE_NULL",
+	1: "TYPE_DOUBLE",
+	2: "TYPE_INT64",
+	3: "TYPE_BOOL",
+	4: "TYPE_STRING",
+	5: "TYPE_BYTES",
+}
+var RowValue_Kind_value = map[string]int32{
+	"TYPE_NULL":   0,
+	"TYPE_DOUBLE": 1,
+	"TYPE_INT64":  2,
+	"TYPE_BOOL":   3,
+	"TYPE_STRING": 4,
+	"TYPE_BYTES":  5,
+}
+
+func (x RowValue_Kind) String() string {
+	return proto1.EnumName(RowValue_Kind_name, int32(x))
+}
+func (RowValue_Kind) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{8, 0} }
+
+type TsdbQuery struct {
+	TimeRange  *TimeRange      `protobuf:"bytes,1,opt,name=timeRange" json:"timeRange,omitempty"`
+	Datasource *DatasourceInfo `protobuf:"bytes,2,opt,name=datasource" json:"datasource,omitempty"`
+	Queries    []*Query        `protobuf:"bytes,3,rep,name=queries" json:"queries,omitempty"`
+}
+
+func (m *TsdbQuery) Reset()                    { *m = TsdbQuery{} }
+func (m *TsdbQuery) String() string            { return proto1.CompactTextString(m) }
+func (*TsdbQuery) ProtoMessage()               {}
+func (*TsdbQuery) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} }
+
+func (m *TsdbQuery) GetTimeRange() *TimeRange {
+	if m != nil {
+		return m.TimeRange
+	}
+	return nil
+}
+
+func (m *TsdbQuery) GetDatasource() *DatasourceInfo {
+	if m != nil {
+		return m.Datasource
+	}
+	return nil
+}
+
+func (m *TsdbQuery) GetQueries() []*Query {
+	if m != nil {
+		return m.Queries
+	}
+	return nil
+}
+
+type Query struct {
+	RefId         string `protobuf:"bytes,1,opt,name=refId" json:"refId,omitempty"`
+	MaxDataPoints int64  `protobuf:"varint,2,opt,name=maxDataPoints" json:"maxDataPoints,omitempty"`
+	IntervalMs    int64  `protobuf:"varint,3,opt,name=intervalMs" json:"intervalMs,omitempty"`
+	ModelJson     string `protobuf:"bytes,4,opt,name=modelJson" json:"modelJson,omitempty"`
+}
+
+func (m *Query) Reset()                    { *m = Query{} }
+func (m *Query) String() string            { return proto1.CompactTextString(m) }
+func (*Query) ProtoMessage()               {}
+func (*Query) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} }
+
+func (m *Query) GetRefId() string {
+	if m != nil {
+		return m.RefId
+	}
+	return ""
+}
+
+func (m *Query) GetMaxDataPoints() int64 {
+	if m != nil {
+		return m.MaxDataPoints
+	}
+	return 0
+}
+
+func (m *Query) GetIntervalMs() int64 {
+	if m != nil {
+		return m.IntervalMs
+	}
+	return 0
+}
+
+func (m *Query) GetModelJson() string {
+	if m != nil {
+		return m.ModelJson
+	}
+	return ""
+}
+
+type TimeRange struct {
+	FromRaw     string `protobuf:"bytes,1,opt,name=fromRaw" json:"fromRaw,omitempty"`
+	ToRaw       string `protobuf:"bytes,2,opt,name=toRaw" json:"toRaw,omitempty"`
+	FromEpochMs int64  `protobuf:"varint,3,opt,name=fromEpochMs" json:"fromEpochMs,omitempty"`
+	ToEpochMs   int64  `protobuf:"varint,4,opt,name=toEpochMs" json:"toEpochMs,omitempty"`
+}
+
+func (m *TimeRange) Reset()                    { *m = TimeRange{} }
+func (m *TimeRange) String() string            { return proto1.CompactTextString(m) }
+func (*TimeRange) ProtoMessage()               {}
+func (*TimeRange) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} }
+
+func (m *TimeRange) GetFromRaw() string {
+	if m != nil {
+		return m.FromRaw
+	}
+	return ""
+}
+
+func (m *TimeRange) GetToRaw() string {
+	if m != nil {
+		return m.ToRaw
+	}
+	return ""
+}
+
+func (m *TimeRange) GetFromEpochMs() int64 {
+	if m != nil {
+		return m.FromEpochMs
+	}
+	return 0
+}
+
+func (m *TimeRange) GetToEpochMs() int64 {
+	if m != nil {
+		return m.ToEpochMs
+	}
+	return 0
+}
+
+type Response struct {
+	Results []*QueryResult `protobuf:"bytes,1,rep,name=results" json:"results,omitempty"`
+}
+
+func (m *Response) Reset()                    { *m = Response{} }
+func (m *Response) String() string            { return proto1.CompactTextString(m) }
+func (*Response) ProtoMessage()               {}
+func (*Response) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{3} }
+
+func (m *Response) GetResults() []*QueryResult {
+	if m != nil {
+		return m.Results
+	}
+	return nil
+}
+
+type QueryResult struct {
+	Error    string        `protobuf:"bytes,1,opt,name=error" json:"error,omitempty"`
+	RefId    string        `protobuf:"bytes,2,opt,name=refId" json:"refId,omitempty"`
+	MetaJson string        `protobuf:"bytes,3,opt,name=metaJson" json:"metaJson,omitempty"`
+	Series   []*TimeSeries `protobuf:"bytes,4,rep,name=series" json:"series,omitempty"`
+	Tables   []*Table      `protobuf:"bytes,5,rep,name=tables" json:"tables,omitempty"`
+}
+
+func (m *QueryResult) Reset()                    { *m = QueryResult{} }
+func (m *QueryResult) String() string            { return proto1.CompactTextString(m) }
+func (*QueryResult) ProtoMessage()               {}
+func (*QueryResult) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{4} }
+
+func (m *QueryResult) GetError() string {
+	if m != nil {
+		return m.Error
+	}
+	return ""
+}
+
+func (m *QueryResult) GetRefId() string {
+	if m != nil {
+		return m.RefId
+	}
+	return ""
+}
+
+func (m *QueryResult) GetMetaJson() string {
+	if m != nil {
+		return m.MetaJson
+	}
+	return ""
+}
+
+func (m *QueryResult) GetSeries() []*TimeSeries {
+	if m != nil {
+		return m.Series
+	}
+	return nil
+}
+
+func (m *QueryResult) GetTables() []*Table {
+	if m != nil {
+		return m.Tables
+	}
+	return nil
+}
+
+type Table struct {
+	Columns []*TableColumn `protobuf:"bytes,1,rep,name=columns" json:"columns,omitempty"`
+	Rows    []*TableRow    `protobuf:"bytes,2,rep,name=rows" json:"rows,omitempty"`
+}
+
+func (m *Table) Reset()                    { *m = Table{} }
+func (m *Table) String() string            { return proto1.CompactTextString(m) }
+func (*Table) ProtoMessage()               {}
+func (*Table) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{5} }
+
+func (m *Table) GetColumns() []*TableColumn {
+	if m != nil {
+		return m.Columns
+	}
+	return nil
+}
+
+func (m *Table) GetRows() []*TableRow {
+	if m != nil {
+		return m.Rows
+	}
+	return nil
+}
+
+type TableColumn struct {
+	Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"`
+}
+
+func (m *TableColumn) Reset()                    { *m = TableColumn{} }
+func (m *TableColumn) String() string            { return proto1.CompactTextString(m) }
+func (*TableColumn) ProtoMessage()               {}
+func (*TableColumn) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{6} }
+
+func (m *TableColumn) GetName() string {
+	if m != nil {
+		return m.Name
+	}
+	return ""
+}
+
+type TableRow struct {
+	Values []*RowValue `protobuf:"bytes,1,rep,name=values" json:"values,omitempty"`
+}
+
+func (m *TableRow) Reset()                    { *m = TableRow{} }
+func (m *TableRow) String() string            { return proto1.CompactTextString(m) }
+func (*TableRow) ProtoMessage()               {}
+func (*TableRow) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{7} }
+
+func (m *TableRow) GetValues() []*RowValue {
+	if m != nil {
+		return m.Values
+	}
+	return nil
+}
+
+type RowValue struct {
+	Kind        RowValue_Kind `protobuf:"varint,1,opt,name=kind,enum=plugins.RowValue_Kind" json:"kind,omitempty"`
+	DoubleValue float64       `protobuf:"fixed64,2,opt,name=doubleValue" json:"doubleValue,omitempty"`
+	Int64Value  int64         `protobuf:"varint,3,opt,name=int64Value" json:"int64Value,omitempty"`
+	BoolValue   bool          `protobuf:"varint,4,opt,name=boolValue" json:"boolValue,omitempty"`
+	StringValue string        `protobuf:"bytes,5,opt,name=stringValue" json:"stringValue,omitempty"`
+	BytesValue  []byte        `protobuf:"bytes,6,opt,name=bytesValue,proto3" json:"bytesValue,omitempty"`
+}
+
+func (m *RowValue) Reset()                    { *m = RowValue{} }
+func (m *RowValue) String() string            { return proto1.CompactTextString(m) }
+func (*RowValue) ProtoMessage()               {}
+func (*RowValue) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{8} }
+
+func (m *RowValue) GetKind() RowValue_Kind {
+	if m != nil {
+		return m.Kind
+	}
+	return RowValue_TYPE_NULL
+}
+
+func (m *RowValue) GetDoubleValue() float64 {
+	if m != nil {
+		return m.DoubleValue
+	}
+	return 0
+}
+
+func (m *RowValue) GetInt64Value() int64 {
+	if m != nil {
+		return m.Int64Value
+	}
+	return 0
+}
+
+func (m *RowValue) GetBoolValue() bool {
+	if m != nil {
+		return m.BoolValue
+	}
+	return false
+}
+
+func (m *RowValue) GetStringValue() string {
+	if m != nil {
+		return m.StringValue
+	}
+	return ""
+}
+
+func (m *RowValue) GetBytesValue() []byte {
+	if m != nil {
+		return m.BytesValue
+	}
+	return nil
+}
+
+type DatasourceInfo struct {
+	Id             int64  `protobuf:"varint,1,opt,name=id" json:"id,omitempty"`
+	OrgId          int64  `protobuf:"varint,2,opt,name=orgId" json:"orgId,omitempty"`
+	Name           string `protobuf:"bytes,3,opt,name=name" json:"name,omitempty"`
+	Type           string `protobuf:"bytes,4,opt,name=type" json:"type,omitempty"`
+	Url            string `protobuf:"bytes,5,opt,name=url" json:"url,omitempty"`
+	JsonData       string `protobuf:"bytes,6,opt,name=jsonData" json:"jsonData,omitempty"`
+	SecureJsonData string `protobuf:"bytes,7,opt,name=secureJsonData" json:"secureJsonData,omitempty"`
+}
+
+func (m *DatasourceInfo) Reset()                    { *m = DatasourceInfo{} }
+func (m *DatasourceInfo) String() string            { return proto1.CompactTextString(m) }
+func (*DatasourceInfo) ProtoMessage()               {}
+func (*DatasourceInfo) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{9} }
+
+func (m *DatasourceInfo) GetId() int64 {
+	if m != nil {
+		return m.Id
+	}
+	return 0
+}
+
+func (m *DatasourceInfo) GetOrgId() int64 {
+	if m != nil {
+		return m.OrgId
+	}
+	return 0
+}
+
+func (m *DatasourceInfo) GetName() string {
+	if m != nil {
+		return m.Name
+	}
+	return ""
+}
+
+func (m *DatasourceInfo) GetType() string {
+	if m != nil {
+		return m.Type
+	}
+	return ""
+}
+
+func (m *DatasourceInfo) GetUrl() string {
+	if m != nil {
+		return m.Url
+	}
+	return ""
+}
+
+func (m *DatasourceInfo) GetJsonData() string {
+	if m != nil {
+		return m.JsonData
+	}
+	return ""
+}
+
+func (m *DatasourceInfo) GetSecureJsonData() string {
+	if m != nil {
+		return m.SecureJsonData
+	}
+	return ""
+}
+
+type TimeSeries struct {
+	Name   string            `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"`
+	Tags   map[string]string `protobuf:"bytes,2,rep,name=tags" json:"tags,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"`
+	Points []*Point          `protobuf:"bytes,3,rep,name=points" json:"points,omitempty"`
+}
+
+func (m *TimeSeries) Reset()                    { *m = TimeSeries{} }
+func (m *TimeSeries) String() string            { return proto1.CompactTextString(m) }
+func (*TimeSeries) ProtoMessage()               {}
+func (*TimeSeries) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{10} }
+
+func (m *TimeSeries) GetName() string {
+	if m != nil {
+		return m.Name
+	}
+	return ""
+}
+
+func (m *TimeSeries) GetTags() map[string]string {
+	if m != nil {
+		return m.Tags
+	}
+	return nil
+}
+
+func (m *TimeSeries) GetPoints() []*Point {
+	if m != nil {
+		return m.Points
+	}
+	return nil
+}
+
+type Point struct {
+	Timestamp int64   `protobuf:"varint,1,opt,name=timestamp" json:"timestamp,omitempty"`
+	Value     float64 `protobuf:"fixed64,2,opt,name=value" json:"value,omitempty"`
+}
+
+func (m *Point) Reset()                    { *m = Point{} }
+func (m *Point) String() string            { return proto1.CompactTextString(m) }
+func (*Point) ProtoMessage()               {}
+func (*Point) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{11} }
+
+func (m *Point) GetTimestamp() int64 {
+	if m != nil {
+		return m.Timestamp
+	}
+	return 0
+}
+
+func (m *Point) GetValue() float64 {
+	if m != nil {
+		return m.Value
+	}
+	return 0
+}
+
+func init() {
+	proto1.RegisterType((*TsdbQuery)(nil), "plugins.TsdbQuery")
+	proto1.RegisterType((*Query)(nil), "plugins.Query")
+	proto1.RegisterType((*TimeRange)(nil), "plugins.TimeRange")
+	proto1.RegisterType((*Response)(nil), "plugins.Response")
+	proto1.RegisterType((*QueryResult)(nil), "plugins.QueryResult")
+	proto1.RegisterType((*Table)(nil), "plugins.Table")
+	proto1.RegisterType((*TableColumn)(nil), "plugins.TableColumn")
+	proto1.RegisterType((*TableRow)(nil), "plugins.TableRow")
+	proto1.RegisterType((*RowValue)(nil), "plugins.RowValue")
+	proto1.RegisterType((*DatasourceInfo)(nil), "plugins.DatasourceInfo")
+	proto1.RegisterType((*TimeSeries)(nil), "plugins.TimeSeries")
+	proto1.RegisterType((*Point)(nil), "plugins.Point")
+	proto1.RegisterEnum("plugins.RowValue_Kind", RowValue_Kind_name, RowValue_Kind_value)
+}
+
+// Reference imports to suppress errors if they are not otherwise used.
+var _ context.Context
+var _ grpc.ClientConn
+
+// This is a compile-time assertion to ensure that this generated file
+// is compatible with the grpc package it is being compiled against.
+const _ = grpc.SupportPackageIsVersion4
+
+// Client API for TsdbPlugin service
+
+type TsdbPluginClient interface {
+	Query(ctx context.Context, in *TsdbQuery, opts ...grpc.CallOption) (*Response, error)
+}
+
+type tsdbPluginClient struct {
+	cc *grpc.ClientConn
+}
+
+func NewTsdbPluginClient(cc *grpc.ClientConn) TsdbPluginClient {
+	return &tsdbPluginClient{cc}
+}
+
+func (c *tsdbPluginClient) Query(ctx context.Context, in *TsdbQuery, opts ...grpc.CallOption) (*Response, error) {
+	out := new(Response)
+	err := grpc.Invoke(ctx, "/plugins.TsdbPlugin/Query", in, out, c.cc, opts...)
+	if err != nil {
+		return nil, err
+	}
+	return out, nil
+}
+
+// Server API for TsdbPlugin service
+
+type TsdbPluginServer interface {
+	Query(context.Context, *TsdbQuery) (*Response, error)
+}
+
+func RegisterTsdbPluginServer(s *grpc.Server, srv TsdbPluginServer) {
+	s.RegisterService(&_TsdbPlugin_serviceDesc, srv)
+}
+
+func _TsdbPlugin_Query_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
+	in := new(TsdbQuery)
+	if err := dec(in); err != nil {
+		return nil, err
+	}
+	if interceptor == nil {
+		return srv.(TsdbPluginServer).Query(ctx, in)
+	}
+	info := &grpc.UnaryServerInfo{
+		Server:     srv,
+		FullMethod: "/plugins.TsdbPlugin/Query",
+	}
+	handler := func(ctx context.Context, req interface{}) (interface{}, error) {
+		return srv.(TsdbPluginServer).Query(ctx, req.(*TsdbQuery))
+	}
+	return interceptor(ctx, in, info, handler)
+}
+
+var _TsdbPlugin_serviceDesc = grpc.ServiceDesc{
+	ServiceName: "plugins.TsdbPlugin",
+	HandlerType: (*TsdbPluginServer)(nil),
+	Methods: []grpc.MethodDesc{
+		{
+			MethodName: "Query",
+			Handler:    _TsdbPlugin_Query_Handler,
+		},
+	},
+	Streams:  []grpc.StreamDesc{},
+	Metadata: "tsdb_plugin.proto",
+}
+
+func init() { proto1.RegisterFile("tsdb_plugin.proto", fileDescriptor0) }
+
+var fileDescriptor0 = []byte{
+	// 811 bytes of a gzipped FileDescriptorProto
+	0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x6c, 0x55, 0x5d, 0x8f, 0x1b, 0x35,
+	0x14, 0x65, 0xbe, 0x92, 0xcc, 0x0d, 0x0d, 0xa9, 0xa9, 0x20, 0x5a, 0x01, 0x5a, 0x46, 0x50, 0x05,
+	0x90, 0x22, 0x08, 0xa5, 0x45, 0x85, 0xa7, 0xd0, 0x08, 0xa5, 0x84, 0xdd, 0xc5, 0x3b, 0x45, 0x2a,
+	0x0f, 0x54, 0x93, 0x8c, 0x13, 0x86, 0xce, 0x8c, 0x83, 0xed, 0xd9, 0x10, 0xf1, 0xc4, 0x3f, 0xe1,
+	0x99, 0x67, 0x7e, 0x00, 0x3f, 0xad, 0xf2, 0x1d, 0xcf, 0x47, 0xd2, 0x7d, 0x9a, 0xb9, 0xe7, 0x1c,
+	0x5f, 0x5f, 0x5f, 0x1f, 0xdb, 0x70, 0x57, 0xc9, 0x78, 0xf5, 0x62, 0x97, 0x16, 0xdb, 0x24, 0x9f,
+	0xec, 0x04, 0x57, 0x9c, 0x74, 0xcb, 0x48, 0x06, 0xff, 0x58, 0xe0, 0x87, 0x32, 0x5e, 0xfd, 0x54,
+	0x30, 0x71, 0x20, 0x9f, 0x83, 0xaf, 0x92, 0x8c, 0xd1, 0x28, 0xdf, 0xb2, 0x91, 0x75, 0x6e, 0x8d,
+	0xfb, 0x53, 0x32, 0x31, 0xd2, 0x49, 0x58, 0x31, 0xb4, 0x11, 0x91, 0x47, 0x00, 0x71, 0xa4, 0x22,
+	0xc9, 0x0b, 0xb1, 0x66, 0x23, 0x1b, 0x87, 0xbc, 0x5b, 0x0f, 0x79, 0x52, 0x53, 0x8b, 0x7c, 0xc3,
+	0x69, 0x4b, 0x4a, 0xc6, 0xd0, 0xfd, 0xa3, 0x60, 0x22, 0x61, 0x72, 0xe4, 0x9c, 0x3b, 0xe3, 0xfe,
+	0x74, 0x50, 0x8f, 0xc2, 0x5a, 0x68, 0x45, 0x07, 0x7f, 0x5b, 0xe0, 0x95, 0xe5, 0xdd, 0x03, 0x4f,
+	0xb0, 0xcd, 0x22, 0xc6, 0xd2, 0x7c, 0x5a, 0x06, 0xe4, 0x23, 0xb8, 0x93, 0x45, 0x7f, 0xea, 0xa9,
+	0xae, 0x78, 0x92, 0x2b, 0x89, 0x55, 0x38, 0xf4, 0x18, 0x24, 0x1f, 0x00, 0x24, 0xb9, 0x62, 0xe2,
+	0x26, 0x4a, 0x7f, 0xd4, 0x53, 0x6a, 0x49, 0x0b, 0x21, 0xef, 0x81, 0x9f, 0xf1, 0x98, 0xa5, 0x4f,
+	0x25, 0xcf, 0x47, 0x2e, 0xe6, 0x6f, 0x80, 0xe0, 0x2f, 0xf0, 0xeb, 0xe5, 0x93, 0x11, 0x74, 0x37,
+	0x82, 0x67, 0x34, 0xda, 0x9b, 0x42, 0xaa, 0x50, 0x17, 0xa8, 0xb8, 0xc6, 0xed, 0xb2, 0x40, 0x0c,
+	0xc8, 0x39, 0xf4, 0xb5, 0x60, 0xbe, 0xe3, 0xeb, 0xdf, 0xea, 0xb9, 0xdb, 0x90, 0x9e, 0x5c, 0xf1,
+	0x8a, 0x77, 0x91, 0x6f, 0x80, 0xe0, 0x31, 0xf4, 0x28, 0x93, 0x3b, 0x9e, 0x4b, 0x46, 0x26, 0xd0,
+	0x15, 0x4c, 0x16, 0xa9, 0x92, 0x23, 0x0b, 0xdb, 0x76, 0xef, 0xa4, 0x6d, 0x48, 0xd2, 0x4a, 0x14,
+	0xfc, 0x6b, 0x41, 0xbf, 0x45, 0xe8, 0x0a, 0x99, 0x10, 0x5c, 0x54, 0x2d, 0xc4, 0xa0, 0x69, 0xac,
+	0xdd, 0x6e, 0xec, 0x19, 0xf4, 0x32, 0xa6, 0x22, 0xec, 0x88, 0x83, 0x44, 0x1d, 0x93, 0xcf, 0xa0,
+	0x23, 0xcb, 0xdd, 0x73, 0xb1, 0x8c, 0xb7, 0x8f, 0x6c, 0x72, 0x8d, 0x14, 0x35, 0x12, 0x72, 0x1f,
+	0x3a, 0x2a, 0x5a, 0xa5, 0x4c, 0x8e, 0xbc, 0x93, 0xad, 0x0e, 0x35, 0x4c, 0x0d, 0x1b, 0xfc, 0x0a,
+	0x1e, 0x02, 0x7a, 0x95, 0x6b, 0x9e, 0x16, 0x59, 0xfe, 0xfa, 0x2a, 0x51, 0xf0, 0x1d, 0x92, 0xb4,
+	0x12, 0x91, 0x8f, 0xc1, 0x15, 0x7c, 0xaf, 0x77, 0x5e, 0x8b, 0xef, 0x9e, 0xa4, 0xe7, 0x7b, 0x8a,
+	0x74, 0xf0, 0x21, 0xf4, 0x5b, 0xc3, 0x09, 0x01, 0x37, 0x8f, 0x32, 0x66, 0x5a, 0x81, 0xff, 0xc1,
+	0x57, 0xd0, 0xab, 0x06, 0x91, 0x4f, 0xa0, 0x73, 0x13, 0xa5, 0x05, 0xab, 0x8a, 0x68, 0xf2, 0x52,
+	0xbe, 0xff, 0x59, 0x33, 0xd4, 0x08, 0x82, 0xff, 0x6d, 0xe8, 0x55, 0x20, 0xf9, 0x14, 0xdc, 0x97,
+	0x49, 0x5e, 0xba, 0x74, 0x30, 0x7d, 0xe7, 0xb5, 0x51, 0x93, 0x1f, 0x92, 0x3c, 0xa6, 0xa8, 0xd1,
+	0xde, 0x88, 0x79, 0xb1, 0x4a, 0x19, 0x32, 0xd8, 0x7f, 0x8b, 0xb6, 0x21, 0x63, 0xdc, 0x87, 0x0f,
+	0x4a, 0x41, 0x63, 0x5c, 0x83, 0x68, 0xef, 0xac, 0x38, 0x4f, 0x4b, 0x5a, 0x7b, 0xa7, 0x47, 0x1b,
+	0x40, 0xe7, 0x97, 0x4a, 0x24, 0xf9, 0xb6, 0xe4, 0x3d, 0x5c, 0x6a, 0x1b, 0xd2, 0xf9, 0x57, 0x07,
+	0xc5, 0x64, 0x29, 0xe8, 0x9c, 0x5b, 0xe3, 0x37, 0x69, 0x0b, 0x09, 0x36, 0xe0, 0xea, 0x7a, 0xc9,
+	0x1d, 0xf0, 0xc3, 0xe7, 0x57, 0xf3, 0x17, 0x17, 0xcf, 0x96, 0xcb, 0xe1, 0x1b, 0xe4, 0x2d, 0xe8,
+	0x63, 0xf8, 0xe4, 0xf2, 0xd9, 0x6c, 0x39, 0x1f, 0x5a, 0x64, 0x00, 0x80, 0xc0, 0xe2, 0x22, 0x7c,
+	0xf8, 0x60, 0x68, 0xd7, 0xfa, 0xd9, 0xe5, 0xe5, 0x72, 0xe8, 0xd4, 0xfa, 0xeb, 0x90, 0x2e, 0x2e,
+	0xbe, 0x1f, 0xba, 0xb5, 0x7e, 0xf6, 0x3c, 0x9c, 0x5f, 0x0f, 0xbd, 0xe0, 0x3f, 0x0b, 0x06, 0xc7,
+	0xf7, 0x05, 0x19, 0x80, 0x9d, 0x94, 0x6d, 0x74, 0xa8, 0x9d, 0xc4, 0xda, 0xa6, 0x5c, 0x6c, 0x8d,
+	0x4d, 0x1d, 0x5a, 0x06, 0xf5, 0x36, 0x3a, 0xcd, 0x36, 0x6a, 0x4c, 0x1d, 0x76, 0xcc, 0x1c, 0x64,
+	0xfc, 0x27, 0x43, 0x70, 0x0a, 0x91, 0x9a, 0x16, 0xe8, 0x5f, 0x6d, 0xf0, 0xdf, 0x25, 0xcf, 0xf5,
+	0xac, 0xb8, 0x70, 0x9f, 0xd6, 0x31, 0xb9, 0x0f, 0x03, 0xc9, 0xd6, 0x85, 0x60, 0x4f, 0x2b, 0x45,
+	0x17, 0x15, 0x27, 0xa8, 0x2e, 0x1b, 0x1a, 0xcb, 0xdf, 0xe6, 0x29, 0xf2, 0x05, 0xb8, 0x2a, 0xda,
+	0x56, 0xee, 0x7c, 0xff, 0x96, 0x93, 0x32, 0x09, 0xa3, 0xad, 0x9c, 0xe7, 0x4a, 0x1c, 0x28, 0x4a,
+	0xf5, 0x89, 0xd9, 0x95, 0x97, 0xd9, 0xe9, 0xe5, 0x88, 0xd7, 0x19, 0x35, 0xec, 0xd9, 0x23, 0xf0,
+	0xeb, 0xa1, 0x7a, 0x81, 0x2f, 0xd9, 0xc1, 0x4c, 0xad, 0x7f, 0x75, 0xc3, 0x6e, 0x6a, 0x5f, 0xf9,
+	0xb4, 0x0c, 0x1e, 0xdb, 0x5f, 0x5b, 0xc1, 0x37, 0xe0, 0x61, 0x26, 0xbc, 0x7a, 0x92, 0x8c, 0x49,
+	0x15, 0x65, 0x3b, 0xd3, 0xea, 0x06, 0x38, 0x4e, 0x60, 0x99, 0x04, 0xd3, 0x6f, 0x01, 0xf4, 0x9b,
+	0x71, 0x85, 0x25, 0x91, 0x49, 0x75, 0x3d, 0xb7, 0x9e, 0x8a, 0xea, 0x45, 0x39, 0x6b, 0x9d, 0x19,
+	0x73, 0x85, 0xcd, 0xba, 0xbf, 0x78, 0xf8, 0x08, 0xad, 0x3a, 0xf8, 0xf9, 0xf2, 0x55, 0x00, 0x00,
+	0x00, 0xff, 0xff, 0x58, 0xae, 0x00, 0xd6, 0xa0, 0x06, 0x00, 0x00,
+}

+ 98 - 0
pkg/tsdb/models/tsdb_plugin.proto

@@ -0,0 +1,98 @@
+syntax = "proto3";
+option go_package = "proto";
+
+package plugins;
+
+message TsdbQuery {
+  TimeRange timeRange = 1;
+  DatasourceInfo datasource = 2;
+  repeated Query queries = 3;
+}
+
+message Query {
+  string refId = 1;
+  int64 maxDataPoints = 2;
+  int64 intervalMs = 3;
+  string modelJson = 4;
+}
+
+message TimeRange {
+  string fromRaw = 1;
+  string toRaw = 2;
+  int64 fromEpochMs = 3;
+  int64 toEpochMs = 4;
+}
+
+message Response {
+  repeated QueryResult results = 1;
+}
+
+message QueryResult {
+  string error = 1;
+  string refId = 2;
+  string metaJson = 3;
+  repeated TimeSeries series = 4;
+  repeated Table tables = 5;
+}
+
+message Table {
+  repeated TableColumn columns = 1;
+  repeated TableRow rows = 2;
+}
+
+message TableColumn {
+  string name = 1;
+}
+
+message TableRow {
+  repeated RowValue values = 1;
+}
+
+message RowValue {
+  enum Kind {
+    // Field type null.
+    TYPE_NULL           = 0;
+    // Field type double.
+    TYPE_DOUBLE          = 1;
+    // Field type int64.
+    TYPE_INT64          = 2;
+    // Field type bool.
+    TYPE_BOOL           = 3;
+    // Field type string.
+    TYPE_STRING         = 4;
+    // Field type bytes.
+    TYPE_BYTES          = 5;
+  };
+
+  Kind kind = 1;
+  double doubleValue = 2;
+  int64 int64Value = 3;
+  bool boolValue = 4;
+  string stringValue = 5;
+  bytes bytesValue = 6;
+}
+
+message DatasourceInfo {
+  int64 id = 1;
+  int64 orgId = 2;
+  string name = 3;
+  string type = 4;
+  string url = 5;
+  string jsonData = 6;
+  string secureJsonData = 7;
+}
+
+message TimeSeries {
+  string name = 1;
+  map<string, string> tags = 2;
+  repeated Point points = 3;
+}
+
+message Point {
+  int64 timestamp = 1;
+  double value = 2;
+}
+
+service TsdbPlugin {
+    rpc Query(TsdbQuery) returns (Response);
+}

+ 5 - 5
pkg/tsdb/time_range.go

@@ -11,14 +11,14 @@ func NewTimeRange(from, to string) *TimeRange {
 	return &TimeRange{
 	return &TimeRange{
 		From: from,
 		From: from,
 		To:   to,
 		To:   to,
-		Now:  time.Now(),
+		now:  time.Now(),
 	}
 	}
 }
 }
 
 
 type TimeRange struct {
 type TimeRange struct {
 	From string
 	From string
 	To   string
 	To   string
-	Now  time.Time
+	now  time.Time
 }
 }
 
 
 func (tr *TimeRange) GetFromAsMsEpoch() int64 {
 func (tr *TimeRange) GetFromAsMsEpoch() int64 {
@@ -65,12 +65,12 @@ func (tr *TimeRange) ParseFrom() (time.Time, error) {
 		return time.Time{}, err
 		return time.Time{}, err
 	}
 	}
 
 
-	return tr.Now.Add(diff), nil
+	return tr.now.Add(diff), nil
 }
 }
 
 
 func (tr *TimeRange) ParseTo() (time.Time, error) {
 func (tr *TimeRange) ParseTo() (time.Time, error) {
 	if tr.To == "now" {
 	if tr.To == "now" {
-		return tr.Now, nil
+		return tr.now, nil
 	} else if strings.HasPrefix(tr.To, "now-") {
 	} else if strings.HasPrefix(tr.To, "now-") {
 		withoutNow := strings.Replace(tr.To, "now-", "", 1)
 		withoutNow := strings.Replace(tr.To, "now-", "", 1)
 
 
@@ -79,7 +79,7 @@ func (tr *TimeRange) ParseTo() (time.Time, error) {
 			return time.Time{}, nil
 			return time.Time{}, nil
 		}
 		}
 
 
-		return tr.Now.Add(diff), nil
+		return tr.now.Add(diff), nil
 	}
 	}
 
 
 	if res, ok := tryParseUnixMsEpoch(tr.To); ok {
 	if res, ok := tryParseUnixMsEpoch(tr.To); ok {

+ 4 - 4
pkg/tsdb/time_range_test.go

@@ -16,7 +16,7 @@ func TestTimeRange(t *testing.T) {
 			tr := TimeRange{
 			tr := TimeRange{
 				From: "5m",
 				From: "5m",
 				To:   "now",
 				To:   "now",
-				Now:  now,
+				now:  now,
 			}
 			}
 
 
 			Convey("5m ago ", func() {
 			Convey("5m ago ", func() {
@@ -39,7 +39,7 @@ func TestTimeRange(t *testing.T) {
 			tr := TimeRange{
 			tr := TimeRange{
 				From: "5h",
 				From: "5h",
 				To:   "now-10m",
 				To:   "now-10m",
-				Now:  now,
+				now:  now,
 			}
 			}
 
 
 			Convey("5h ago ", func() {
 			Convey("5h ago ", func() {
@@ -65,7 +65,7 @@ func TestTimeRange(t *testing.T) {
 			tr := TimeRange{
 			tr := TimeRange{
 				From: "1474973725473",
 				From: "1474973725473",
 				To:   "1474975757930",
 				To:   "1474975757930",
-				Now:  now,
+				now:  now,
 			}
 			}
 
 
 			res, err := tr.ParseFrom()
 			res, err := tr.ParseFrom()
@@ -82,7 +82,7 @@ func TestTimeRange(t *testing.T) {
 			tr := TimeRange{
 			tr := TimeRange{
 				From: "asdf",
 				From: "asdf",
 				To:   "asdf",
 				To:   "asdf",
-				Now:  now,
+				now:  now,
 			}
 			}
 
 
 			_, err = tr.ParseFrom()
 			_, err = tr.ParseFrom()

+ 1 - 1
public/app/containers/AlertRuleList/AlertRuleList.jest.tsx

@@ -45,7 +45,7 @@ describe('AlertRuleList', () => {
 
 
   it('should render 1 rule', () => {
   it('should render 1 rule', () => {
     page.update();
     page.update();
-    let ruleNode = page.find('.card-item-wrapper');
+    let ruleNode = page.find('.alert-rule-item');
     expect(toJson(ruleNode)).toMatchSnapshot();
     expect(toJson(ruleNode)).toMatchSnapshot();
   });
   });
 
 

+ 59 - 30
public/app/containers/AlertRuleList/AlertRuleList.tsx

@@ -5,6 +5,7 @@ import PageHeader from 'app/core/components/PageHeader/PageHeader';
 import { IAlertRule } from 'app/stores/AlertListStore/AlertListStore';
 import { IAlertRule } from 'app/stores/AlertListStore/AlertListStore';
 import appEvents from 'app/core/app_events';
 import appEvents from 'app/core/app_events';
 import IContainerProps from 'app/containers/IContainerProps';
 import IContainerProps from 'app/containers/IContainerProps';
+import Highlighter from 'react-highlight-words';
 
 
 @inject('view', 'nav', 'alertList')
 @inject('view', 'nav', 'alertList')
 @observer
 @observer
@@ -44,6 +45,10 @@ export class AlertRuleList extends React.Component<IContainerProps, any> {
     });
     });
   };
   };
 
 
+  onSearchQueryChange = evt => {
+    this.props.alertList.setSearchQuery(evt.target.value);
+  };
+
   render() {
   render() {
     const { nav, alertList } = this.props;
     const { nav, alertList } = this.props;
 
 
@@ -52,8 +57,20 @@ export class AlertRuleList extends React.Component<IContainerProps, any> {
         <PageHeader model={nav as any} />
         <PageHeader model={nav as any} />
         <div className="page-container page-body">
         <div className="page-container page-body">
           <div className="page-action-bar">
           <div className="page-action-bar">
+            <div className="gf-form gf-form--grow">
+              <label className="gf-form--has-input-icon gf-form--grow">
+                <input
+                  type="text"
+                  className="gf-form-input"
+                  placeholder="Search alerts"
+                  value={alertList.search}
+                  onChange={this.onSearchQueryChange}
+                />
+                <i className="gf-form-input-icon fa fa-search" />
+              </label>
+            </div>
             <div className="gf-form">
             <div className="gf-form">
-              <label className="gf-form-label">Filter by state</label>
+              <label className="gf-form-label">States</label>
 
 
               <div className="gf-form-select-wrapper width-13">
               <div className="gf-form-select-wrapper width-13">
                 <select className="gf-form-input" onChange={this.onStateFilterChanged} value={alertList.stateFilter}>
                 <select className="gf-form-input" onChange={this.onStateFilterChanged} value={alertList.stateFilter}>
@@ -69,8 +86,12 @@ export class AlertRuleList extends React.Component<IContainerProps, any> {
             </a>
             </a>
           </div>
           </div>
 
 
-          <section className="card-section card-list-layout-list">
-            <ol className="card-list">{alertList.rules.map(rule => <AlertRuleItem rule={rule} key={rule.id} />)}</ol>
+          <section>
+            <ol className="alert-rule-list">
+              {alertList.filteredRules.map(rule => (
+                <AlertRuleItem rule={rule} key={rule.id} search={alertList.search} />
+              ))}
+            </ol>
           </section>
           </section>
         </div>
         </div>
       </div>
       </div>
@@ -88,6 +109,7 @@ function AlertStateFilterOption({ text, value }) {
 
 
 export interface AlertRuleItemProps {
 export interface AlertRuleItemProps {
   rule: IAlertRule;
   rule: IAlertRule;
+  search: string;
 }
 }
 
 
 @observer
 @observer
@@ -96,6 +118,16 @@ export class AlertRuleItem extends React.Component<AlertRuleItemProps, any> {
     this.props.rule.togglePaused();
     this.props.rule.togglePaused();
   };
   };
 
 
+  renderText(text: string) {
+    return (
+      <Highlighter
+        highlightClassName="highlight-search-match"
+        textToHighlight={text}
+        searchWords={[this.props.search]}
+      />
+    );
+  }
+
   render() {
   render() {
     const { rule } = this.props;
     const { rule } = this.props;
 
 
@@ -108,36 +140,33 @@ export class AlertRuleItem extends React.Component<AlertRuleItemProps, any> {
     let ruleUrl = `dashboard/${rule.dashboardUri}?panelId=${rule.panelId}&fullscreen&edit&tab=alert`;
     let ruleUrl = `dashboard/${rule.dashboardUri}?panelId=${rule.panelId}&fullscreen&edit&tab=alert`;
 
 
     return (
     return (
-      <li className="card-item-wrapper">
-        <div className="card-item card-item--alert">
-          <div className="card-item-header">
-            <div className="card-item-type">
-              <a
-                className="card-item-cog"
-                title="Pausing an alert rule prevents it from executing"
-                onClick={this.toggleState}
-              >
-                <i className={stateClass} />
-              </a>
-              <a className="card-item-cog" href={ruleUrl} title="Edit alert rule">
-                <i className="icon-gf icon-gf-settings" />
-              </a>
+      <li className="alert-rule-item">
+        <span className={`alert-rule-item__icon ${rule.stateClass}`}>
+          <i className={rule.stateIcon} />
+        </span>
+        <div className="alert-rule-item__body">
+          <div className="alert-rule-item__header">
+            <div className="alert-rule-item__name">
+              <a href={ruleUrl}>{this.renderText(rule.name)}</a>
             </div>
             </div>
-          </div>
-          <div className="card-item-body">
-            <div className="card-item-details">
-              <div className="card-item-name">
-                <a href={ruleUrl}>{rule.name}</a>
-              </div>
-              <div className="card-item-sub-name">
-                <span className={`alert-list-item-state ${rule.stateClass}`}>
-                  <i className={rule.stateIcon} /> {rule.stateText}
-                </span>
-                <span> for {rule.stateAge}</span>
-              </div>
-              {rule.info && <div className="small muted">{rule.info}</div>}
+            <div className="alert-rule-item__text">
+              <span className={`${rule.stateClass}`}>{this.renderText(rule.stateText)}</span>
+              <span className="alert-rule-item__time"> for {rule.stateAge}</span>
             </div>
             </div>
           </div>
           </div>
+          {rule.info && <div className="small muted alert-rule-item__info">{this.renderText(rule.info)}</div>}
+        </div>
+        <div className="alert-rule-item__actions">
+          <a
+            className="btn btn-small btn-inverse alert-list__btn width-2"
+            title="Pausing an alert rule prevents it from executing"
+            onClick={this.toggleState}
+          >
+            <i className={stateClass} />
+          </a>
+          <a className="btn btn-small btn-inverse alert-list__btn width-2" href={ruleUrl} title="Edit alert rule">
+            <i className="icon-gf icon-gf-settings" />
+          </a>
         </div>
         </div>
       </li>
       </li>
     );
     );

+ 77 - 46
public/app/containers/AlertRuleList/__snapshots__/AlertRuleList.jest.tsx.snap

@@ -2,71 +2,102 @@
 
 
 exports[`AlertRuleList should render 1 rule 1`] = `
 exports[`AlertRuleList should render 1 rule 1`] = `
 <li
 <li
-  className="card-item-wrapper"
+  className="alert-rule-item"
 >
 >
+  <span
+    className="alert-rule-item__icon alert-state-ok"
+  >
+    <i
+      className="icon-gf icon-gf-online"
+    />
+  </span>
   <div
   <div
-    className="card-item card-item--alert"
+    className="alert-rule-item__body"
   >
   >
     <div
     <div
-      className="card-item-header"
+      className="alert-rule-item__header"
     >
     >
       <div
       <div
-        className="card-item-type"
+        className="alert-rule-item__name"
       >
       >
         <a
         <a
-          className="card-item-cog"
-          onClick={[Function]}
-          title="Pausing an alert rule prevents it from executing"
-        >
-          <i
-            className="fa fa-pause"
-          />
-        </a>
-        <a
-          className="card-item-cog"
           href="dashboard/db/mygool?panelId=3&fullscreen&edit&tab=alert"
           href="dashboard/db/mygool?panelId=3&fullscreen&edit&tab=alert"
-          title="Edit alert rule"
         >
         >
-          <i
-            className="icon-gf icon-gf-settings"
-          />
+          <Highlighter
+            highlightClassName="highlight-search-match"
+            searchWords={
+              Array [
+                "",
+              ]
+            }
+            textToHighlight="Panel Title alert"
+          >
+            <span>
+              <span
+                className=""
+                key="0"
+              >
+                Panel Title alert
+              </span>
+            </span>
+          </Highlighter>
         </a>
         </a>
       </div>
       </div>
-    </div>
-    <div
-      className="card-item-body"
-    >
       <div
       <div
-        className="card-item-details"
+        className="alert-rule-item__text"
       >
       >
-        <div
-          className="card-item-name"
+        <span
+          className="alert-state-ok"
         >
         >
-          <a
-            href="dashboard/db/mygool?panelId=3&fullscreen&edit&tab=alert"
+          <Highlighter
+            highlightClassName="highlight-search-match"
+            searchWords={
+              Array [
+                "",
+              ]
+            }
+            textToHighlight="OK"
           >
           >
-            Panel Title alert
-          </a>
-        </div>
-        <div
-          className="card-item-sub-name"
+            <span>
+              <span
+                className=""
+                key="0"
+              >
+                OK
+              </span>
+            </span>
+          </Highlighter>
+        </span>
+        <span
+          className="alert-rule-item__time"
         >
         >
-          <span
-            className="alert-list-item-state alert-state-ok"
-          >
-            <i
-              className="icon-gf icon-gf-online"
-            />
-             
-            OK
-          </span>
-          <span>
-             for 
-            5 minutes
-          </span>
-        </div>
+           for 
+          5 minutes
+        </span>
       </div>
       </div>
     </div>
     </div>
   </div>
   </div>
+  <div
+    className="alert-rule-item__actions"
+  >
+    <a
+      className="btn btn-small btn-inverse alert-list__btn width-2"
+      onClick={[Function]}
+      title="Pausing an alert rule prevents it from executing"
+    >
+      <i
+        className="fa fa-pause"
+      />
+    </a>
+    <a
+      className="btn btn-small btn-inverse alert-list__btn width-2"
+      href="dashboard/db/mygool?panelId=3&fullscreen&edit&tab=alert"
+      title="Edit alert rule"
+    >
+      <i
+        className="icon-gf icon-gf-settings"
+      />
+    </a>
+  </div>
 </li>
 </li>
 `;
 `;

+ 13 - 11
public/app/core/angular_wrappers.ts

@@ -1,14 +1,16 @@
-import { react2AngularDirective } from "app/core/utils/react2angular";
-import { PasswordStrength } from "./components/PasswordStrength";
-import PageHeader from "./components/PageHeader/PageHeader";
-import EmptyListCTA from "./components/EmptyListCTA/EmptyListCTA";
-import LoginBackground from "./components/Login/LoginBackground";
-import { SearchResult } from "./components/search/SearchResult";
+import { react2AngularDirective } from 'app/core/utils/react2angular';
+import { PasswordStrength } from './components/PasswordStrength';
+import PageHeader from './components/PageHeader/PageHeader';
+import EmptyListCTA from './components/EmptyListCTA/EmptyListCTA';
+import LoginBackground from './components/Login/LoginBackground';
+import { SearchResult } from './components/search/SearchResult';
+import UserPicker from './components/UserPicker/UserPicker';
 
 
 export function registerAngularDirectives() {
 export function registerAngularDirectives() {
-  react2AngularDirective("passwordStrength", PasswordStrength, ["password"]);
-  react2AngularDirective("pageHeader", PageHeader, ["model", "noTabs"]);
-  react2AngularDirective("emptyListCta", EmptyListCTA, ["model"]);
-  react2AngularDirective("loginBackground", LoginBackground, []);
-  react2AngularDirective("searchResult", SearchResult, []);
+  react2AngularDirective('passwordStrength', PasswordStrength, ['password']);
+  react2AngularDirective('pageHeader', PageHeader, ['model', 'noTabs']);
+  react2AngularDirective('emptyListCta', EmptyListCTA, ['model']);
+  react2AngularDirective('loginBackground', LoginBackground, []);
+  react2AngularDirective('searchResult', SearchResult, []);
+  react2AngularDirective('selectUserPicker', UserPicker, ['backendSrv', 'teamId', 'refreshList']);
 }
 }

+ 10 - 11
public/app/core/components/EmptyListCTA/EmptyListCTA.jest.tsx

@@ -3,19 +3,18 @@ import renderer from 'react-test-renderer';
 import EmptyListCTA from './EmptyListCTA';
 import EmptyListCTA from './EmptyListCTA';
 
 
 const model = {
 const model = {
-    title: 'Title',
-    buttonIcon: 'ga css class',
-    buttonLink: 'http://url/to/destination',
-    buttonTitle: 'Click me',
-    proTip: 'This is a tip',
-    proTipLink: 'http://url/to/tip/destination',
-    proTipLinkTitle: 'Learn more',
-    proTipTarget: '_blank'
+  title: 'Title',
+  buttonIcon: 'ga css class',
+  buttonLink: 'http://url/to/destination',
+  buttonTitle: 'Click me',
+  proTip: 'This is a tip',
+  proTipLink: 'http://url/to/tip/destination',
+  proTipLinkTitle: 'Learn more',
+  proTipTarget: '_blank',
 };
 };
 
 
-describe('CollorPalette', () => {
-
-    it('renders correctly', () => {
+describe('EmptyListCTA', () => {
+  it('renders correctly', () => {
     const tree = renderer.create(<EmptyListCTA model={model} />).toJSON();
     const tree = renderer.create(<EmptyListCTA model={model} />).toJSON();
     expect(tree).toMatchSnapshot();
     expect(tree).toMatchSnapshot();
   });
   });

+ 1 - 1
public/app/core/components/EmptyListCTA/__snapshots__/EmptyListCTA.jest.tsx.snap

@@ -1,6 +1,6 @@
 // Jest Snapshot v1, https://goo.gl/fbAQLP
 // Jest Snapshot v1, https://goo.gl/fbAQLP
 
 
-exports[`CollorPalette renders correctly 1`] = `
+exports[`EmptyListCTA renders correctly 1`] = `
 <div
 <div
   className="empty-list-cta"
   className="empty-list-cta"
 >
 >

+ 16 - 0
public/app/core/components/Tooltip/Popover.jest.tsx

@@ -0,0 +1,16 @@
+import React from 'react';
+import renderer from 'react-test-renderer';
+import Popover from './Popover';
+
+describe('Popover', () => {
+  it('renders correctly', () => {
+    const tree = renderer
+      .create(
+        <Popover placement="auto" content="Popover text">
+          <button>Button with Popover</button>
+        </Popover>
+      )
+      .toJSON();
+    expect(tree).toMatchSnapshot();
+  });
+});

+ 34 - 0
public/app/core/components/Tooltip/Popover.tsx

@@ -0,0 +1,34 @@
+import React from 'react';
+import withTooltip from './withTooltip';
+import { Target } from 'react-popper';
+
+interface IPopoverProps {
+  tooltipSetState: (prevState: object) => void;
+}
+
+class Popover extends React.Component<IPopoverProps, any> {
+  constructor(props) {
+    super(props);
+    this.toggleTooltip = this.toggleTooltip.bind(this);
+  }
+
+  toggleTooltip() {
+    const { tooltipSetState } = this.props;
+    tooltipSetState(prevState => {
+      return {
+        ...prevState,
+        show: !prevState.show,
+      };
+    });
+  }
+
+  render() {
+    return (
+      <Target className="popper__target" onClick={this.toggleTooltip}>
+        {this.props.children}
+      </Target>
+    );
+  }
+}
+
+export default withTooltip(Popover);

+ 16 - 0
public/app/core/components/Tooltip/Tooltip.jest.tsx

@@ -0,0 +1,16 @@
+import React from 'react';
+import renderer from 'react-test-renderer';
+import Tooltip from './Tooltip';
+
+describe('Tooltip', () => {
+  it('renders correctly', () => {
+    const tree = renderer
+      .create(
+        <Tooltip placement="auto" content="Tooltip text">
+          <a href="http://www.grafana.com">Link with tooltip</a>
+        </Tooltip>
+      )
+      .toJSON();
+    expect(tree).toMatchSnapshot();
+  });
+});

+ 45 - 0
public/app/core/components/Tooltip/Tooltip.tsx

@@ -0,0 +1,45 @@
+import React from 'react';
+import withTooltip from './withTooltip';
+import { Target } from 'react-popper';
+
+interface ITooltipProps {
+  tooltipSetState: (prevState: object) => void;
+}
+
+class Tooltip extends React.Component<ITooltipProps, any> {
+  constructor(props) {
+    super(props);
+    this.showTooltip = this.showTooltip.bind(this);
+    this.hideTooltip = this.hideTooltip.bind(this);
+  }
+
+  showTooltip() {
+    const { tooltipSetState } = this.props;
+    tooltipSetState(prevState => {
+      return {
+        ...prevState,
+        show: true,
+      };
+    });
+  }
+
+  hideTooltip() {
+    const { tooltipSetState } = this.props;
+    tooltipSetState(prevState => {
+      return {
+        ...prevState,
+        show: false,
+      };
+    });
+  }
+
+  render() {
+    return (
+      <Target className="popper__target" onMouseOver={this.showTooltip} onMouseOut={this.hideTooltip}>
+        {this.props.children}
+      </Target>
+    );
+  }
+}
+
+export default withTooltip(Tooltip);

+ 16 - 0
public/app/core/components/Tooltip/__snapshots__/Popover.jest.tsx.snap

@@ -0,0 +1,16 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`Popover renders correctly 1`] = `
+<div
+  className="popper__manager"
+>
+  <div
+    className="popper__target"
+    onClick={[Function]}
+  >
+    <button>
+      Button with Popover
+    </button>
+  </div>
+</div>
+`;

+ 19 - 0
public/app/core/components/Tooltip/__snapshots__/Tooltip.jest.tsx.snap

@@ -0,0 +1,19 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`Tooltip renders correctly 1`] = `
+<div
+  className="popper__manager"
+>
+  <div
+    className="popper__target"
+    onMouseOut={[Function]}
+    onMouseOver={[Function]}
+  >
+    <a
+      href="http://www.grafana.com"
+    >
+      Link with tooltip
+    </a>
+  </div>
+</div>
+`;

+ 57 - 0
public/app/core/components/Tooltip/withTooltip.tsx

@@ -0,0 +1,57 @@
+import React from 'react';
+import { Manager, Popper, Arrow } from 'react-popper';
+
+interface IwithTooltipProps {
+  placement?: string;
+  content: string | ((props: any) => JSX.Element);
+}
+
+export default function withTooltip(WrappedComponent) {
+  return class extends React.Component<IwithTooltipProps, any> {
+    constructor(props) {
+      super(props);
+
+      this.setState = this.setState.bind(this);
+      this.state = {
+        placement: this.props.placement || 'auto',
+        show: false,
+      };
+    }
+
+    componentWillReceiveProps(nextProps) {
+      if (nextProps.placement && nextProps.placement !== this.state.placement) {
+        this.setState(prevState => {
+          return {
+            ...prevState,
+            placement: nextProps.placement,
+          };
+        });
+      }
+    }
+
+    renderContent(content) {
+      if (typeof content === 'function') {
+        // If it's a function we assume it's a React component
+        const ReactComponent = content;
+        return <ReactComponent />;
+      }
+      return content;
+    }
+
+    render() {
+      const { content } = this.props;
+
+      return (
+        <Manager className="popper__manager">
+          <WrappedComponent {...this.props} tooltipSetState={this.setState} />
+          {this.state.show ? (
+            <Popper placement={this.state.placement} className="popper">
+              {this.renderContent(content)}
+              <Arrow className="popper__arrow" />
+            </Popper>
+          ) : null}
+        </Manager>
+      );
+    }
+  };
+}

+ 20 - 0
public/app/core/components/UserPicker/UserPicker.jest.tsx

@@ -0,0 +1,20 @@
+import React from 'react';
+import renderer from 'react-test-renderer';
+import UserPicker from './UserPicker';
+
+const model = {
+  backendSrv: {
+    get: () => {
+      return new Promise((resolve, reject) => {});
+    },
+  },
+  refreshList: () => {},
+  teamId: '1',
+};
+
+describe('UserPicker', () => {
+  it('renders correctly', () => {
+    const tree = renderer.create(<UserPicker {...model} />).toJSON();
+    expect(tree).toMatchSnapshot();
+  });
+});

+ 108 - 0
public/app/core/components/UserPicker/UserPicker.tsx

@@ -0,0 +1,108 @@
+import React, { Component } from 'react';
+import { debounce } from 'lodash';
+import Select from 'react-select';
+import UserPickerOption from './UserPickerOption';
+
+export interface IProps {
+  backendSrv: any;
+  teamId: string;
+  refreshList: any;
+}
+
+export interface User {
+  id: number;
+  name: string;
+  login: string;
+  email: string;
+}
+
+class UserPicker extends Component<IProps, any> {
+  debouncedSearchUsers: any;
+  backendSrv: any;
+  teamId: string;
+  refreshList: any;
+
+  constructor(props) {
+    super(props);
+    this.backendSrv = this.props.backendSrv;
+    this.teamId = this.props.teamId;
+    this.refreshList = this.props.refreshList;
+
+    this.searchUsers = this.searchUsers.bind(this);
+    this.handleChange = this.handleChange.bind(this);
+    this.addUser = this.addUser.bind(this);
+    this.toggleLoading = this.toggleLoading.bind(this);
+
+    this.debouncedSearchUsers = debounce(this.searchUsers, 300, {
+      leading: true,
+      trailing: false,
+    });
+
+    this.state = {
+      multi: false,
+      isLoading: false,
+    };
+  }
+
+  handleChange(user) {
+    this.addUser(user.id);
+  }
+
+  toggleLoading(isLoading) {
+    this.setState(prevState => {
+      return {
+        ...prevState,
+        isLoading: isLoading,
+      };
+    });
+  }
+
+  addUser(userId) {
+    this.toggleLoading(true);
+    this.backendSrv.post(`/api/teams/${this.teamId}/members`, { userId: userId }).then(() => {
+      this.refreshList();
+      this.toggleLoading(false);
+    });
+  }
+
+  searchUsers(query) {
+    this.toggleLoading(true);
+
+    return this.backendSrv.get(`/api/users/search?perpage=10&page=1&query=${query}`).then(result => {
+      const users = result.users.map(user => {
+        return {
+          id: user.id,
+          label: `${user.login} - ${user.email}`,
+          avatarUrl: user.avatarUrl,
+        };
+      });
+      this.toggleLoading(false);
+      return { options: users };
+    });
+  }
+
+  render() {
+    const AsyncComponent = this.state.creatable ? Select.AsyncCreatable : Select.Async;
+
+    return (
+      <div className="user-picker">
+        <AsyncComponent
+          valueKey="id"
+          multi={this.state.multi}
+          labelKey="label"
+          cache={false}
+          isLoading={this.state.isLoading}
+          loadOptions={this.debouncedSearchUsers}
+          loadingPlaceholder="Loading..."
+          noResultsText="No users found"
+          onChange={this.handleChange}
+          className="width-8 gf-form-input gf-form-input--form-dropdown"
+          optionComponent={UserPickerOption}
+          placeholder="Choose"
+        />
+      </div>
+    );
+  }
+}
+
+export default UserPicker;

+ 22 - 0
public/app/core/components/UserPicker/UserPickerOption.jest.tsx

@@ -0,0 +1,22 @@
+import React from 'react';
+import renderer from 'react-test-renderer';
+import UserPickerOption from './UserPickerOption';
+
+const model = {
+  onSelect: () => {},
+  onFocus: () => {},
+  isFocused: () => {},
+  option: {
+    title: 'Model title',
+    avatarUrl: 'url/to/avatar',
+    label: 'User picker label',
+  },
+  className: 'class-for-user-picker',
+};
+
+describe('UserPickerOption', () => {
+  it('renders correctly', () => {
+    const tree = renderer.create(<UserPickerOption {...model} />).toJSON();
+    expect(tree).toMatchSnapshot();
+  });
+});

+ 54 - 0
public/app/core/components/UserPicker/UserPickerOption.tsx

@@ -0,0 +1,54 @@
+import React, { Component } from 'react';
+
+export interface IProps {
+  onSelect: any;
+  onFocus: any;
+  option: any;
+  isFocused: any;
+  className: any;
+}
+
+class UserPickerOption extends Component<IProps, any> {
+  constructor(props) {
+    super(props);
+    this.handleMouseDown = this.handleMouseDown.bind(this);
+    this.handleMouseEnter = this.handleMouseEnter.bind(this);
+    this.handleMouseMove = this.handleMouseMove.bind(this);
+  }
+
+  handleMouseDown(event) {
+    event.preventDefault();
+    event.stopPropagation();
+    this.props.onSelect(this.props.option, event);
+  }
+
+  handleMouseEnter(event) {
+    this.props.onFocus(this.props.option, event);
+  }
+
+  handleMouseMove(event) {
+    if (this.props.isFocused) {
+      return;
+    }
+    this.props.onFocus(this.props.option, event);
+  }
+
+  render() {
+    const { option, children, className } = this.props;
+
+    return (
+      <button
+        onMouseDown={this.handleMouseDown}
+        onMouseEnter={this.handleMouseEnter}
+        onMouseMove={this.handleMouseMove}
+        title={option.title}
+        className={`user-picker-option__button btn btn-link ${className}`}
+      >
+        <img src={option.avatarUrl} alt={option.label} className="user-picker-option__avatar" />
+        {children}
+      </button>
+    );
+  }
+}
+
+export default UserPickerOption;

+ 98 - 0
public/app/core/components/UserPicker/__snapshots__/UserPicker.jest.tsx.snap

@@ -0,0 +1,98 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`UserPicker renders correctly 1`] = `
+<div
+  className="user-picker"
+>
+  <div
+    className="Select width-8 gf-form-input gf-form-input--form-dropdown is-clearable is-loading is-searchable Select--single"
+    style={undefined}
+  >
+    <div
+      className="Select-control"
+      onKeyDown={[Function]}
+      onMouseDown={[Function]}
+      onTouchEnd={[Function]}
+      onTouchMove={[Function]}
+      onTouchStart={[Function]}
+      style={undefined}
+    >
+      <span
+        className="Select-multi-value-wrapper"
+        id="react-select-2--value"
+      >
+        <div
+          className="Select-placeholder"
+        >
+          Loading...
+        </div>
+        <div
+          className="Select-input"
+          style={
+            Object {
+              "display": "inline-block",
+            }
+          }
+        >
+          <input
+            aria-activedescendant="react-select-2--value"
+            aria-describedby={undefined}
+            aria-expanded="false"
+            aria-haspopup="false"
+            aria-label={undefined}
+            aria-labelledby={undefined}
+            aria-owns=""
+            className={undefined}
+            id={undefined}
+            onBlur={[Function]}
+            onChange={[Function]}
+            onFocus={[Function]}
+            required={false}
+            role="combobox"
+            style={
+              Object {
+                "boxSizing": "content-box",
+                "width": "5px",
+              }
+            }
+            tabIndex={undefined}
+            value=""
+          />
+          <div
+            style={
+              Object {
+                "height": 0,
+                "left": 0,
+                "overflow": "scroll",
+                "position": "absolute",
+                "top": 0,
+                "visibility": "hidden",
+                "whiteSpace": "pre",
+              }
+            }
+          >
+            
+          </div>
+        </div>
+      </span>
+      <span
+        aria-hidden="true"
+        className="Select-loading-zone"
+      >
+        <span
+          className="Select-loading"
+        />
+      </span>
+      <span
+        className="Select-arrow-zone"
+        onMouseDown={[Function]}
+      >
+        <span
+          className="Select-arrow"
+          onMouseDown={[Function]}
+        />
+      </span>
+    </div>
+  </div>
+</div>
+`;

+ 17 - 0
public/app/core/components/UserPicker/__snapshots__/UserPickerOption.jest.tsx.snap

@@ -0,0 +1,17 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`UserPickerOption renders correctly 1`] = `
+<button
+  className="user-picker-option__button btn btn-link class-for-user-picker"
+  onMouseDown={[Function]}
+  onMouseEnter={[Function]}
+  onMouseMove={[Function]}
+  title="Model title"
+>
+  <img
+    alt="User picker label"
+    className="user-picker-option__avatar"
+    src="url/to/avatar"
+  />
+</button>
+`;

+ 14 - 0
public/app/core/components/scroll/scroll.ts

@@ -1,5 +1,6 @@
 import PerfectScrollbar from 'perfect-scrollbar';
 import PerfectScrollbar from 'perfect-scrollbar';
 import coreModule from 'app/core/core_module';
 import coreModule from 'app/core/core_module';
+import appEvents from 'app/core/app_events';
 
 
 export function geminiScrollbar() {
 export function geminiScrollbar() {
   return {
   return {
@@ -7,6 +8,19 @@ export function geminiScrollbar() {
     link: function(scope, elem, attrs) {
     link: function(scope, elem, attrs) {
       let scrollbar = new PerfectScrollbar(elem[0]);
       let scrollbar = new PerfectScrollbar(elem[0]);
 
 
+      appEvents.on(
+        'smooth-scroll-top',
+        () => {
+          elem.animate(
+            {
+              scrollTop: 0,
+            },
+            500
+          );
+        },
+        scope
+      );
+
       scope.$on('$routeChangeSuccess', () => {
       scope.$on('$routeChangeSuccess', () => {
         elem[0].scrollTop = 0;
         elem[0].scrollTop = 0;
       });
       });

+ 0 - 4
public/app/features/alerting/alert_def.ts

@@ -138,10 +138,6 @@ function getAlertAnnotationInfo(ah) {
     return 'Error: ' + ah.data.error;
     return 'Error: ' + ah.data.error;
   }
   }
 
 
-  if (ah.data.noData || ah.data.no_data) {
-    return 'No Data';
-  }
-
   return '';
   return '';
 }
 }
 
 

+ 27 - 30
public/app/features/alerting/partials/alert_tab.html

@@ -12,7 +12,7 @@
 			<li ng-class="{active: ctrl.subTabIndex === 2}">
 			<li ng-class="{active: ctrl.subTabIndex === 2}">
 				<a ng-click="ctrl.changeTabIndex(2)">State history</a>
 				<a ng-click="ctrl.changeTabIndex(2)">State history</a>
 			</li>
 			</li>
-      			<li>
+      <li>
 				<a ng-click="ctrl.delete()">Delete</a>
 				<a ng-click="ctrl.delete()">Delete</a>
 			</li>
 			</li>
 		</ul>
 		</ul>
@@ -143,36 +143,33 @@
 				<i>No state changes recorded</i>
 				<i>No state changes recorded</i>
 			</div>
 			</div>
 
 
-			<section class="card-section card-list-layout-list">
-				<ol class="card-list" >
-					<li class="card-item-wrapper" ng-repeat="ah in ctrl.alertHistory">
-						<div class="alert-list card-item card-item--alert">
-							<div class="alert-list-body">
-								<div class="alert-list-icon alert-list-item-state {{ah.stateModel.stateClass}}">
-									<i class="{{ah.stateModel.iconClass}}"></i>
-								</div>
-								<div class="alert-list-main alert-list-text">
-									<span class="alert-list-state {{ah.stateModel.stateClass}}">{{ah.stateModel.text}}</span>
-									<span class="alert-list-info">{{ah.info}}</span>
-								</div>
-							</div>
-							<div class="alert-list-footer alert-list-text">
-								<span>{{ah.time}}</span>
-								<span><!--Img Link--></span>
-							</div>
-						</div>
-					</li>
-				</ol>
-			</section>
-		</div>
-	</div>
+      <ol class="alert-rule-list" >
+        <li class="alert-rule-item" ng-repeat="al in ctrl.alertHistory">
+          <div class="alert-rule-item__icon {{al.stateModel.stateClass}}">
+            <i class="{{al.stateModel.iconClass}}"></i>
+          </div>
+          <div class="alert-rule-item__body">
+            <div class="alert-rule-item__header">
+              <div class="alert-rule-item__text">
+                <span class="{{al.stateModel.stateClass}}">{{al.stateModel.text}}</span>
+              </div>
+            </div>
+            <span class="alert-list-info">{{al.info}}</span>
+          </div>
+          <div class="alert-rule-item__time">
+            <span>{{al.time}}</span>
+          </div>
+        </li>
+      </ol>
+    </div>
+  </div>
 </div>
 </div>
 
 
 <div class="gf-form-group" ng-if="!ctrl.alert">
 <div class="gf-form-group" ng-if="!ctrl.alert">
-	<div class="gf-form-button-row">
-		<button class="btn btn-inverse" ng-click="ctrl.enable()">
-			<i class="icon-gf icon-gf-alert"></i>
-			Create Alert
-		</button>
-	</div>
+  <div class="gf-form-button-row">
+    <button class="btn btn-inverse" ng-click="ctrl.enable()">
+      <i class="icon-gf icon-gf-alert"></i>
+      Create Alert
+    </button>
+  </div>
 </div>
 </div>

+ 42 - 24
public/app/features/dashboard/dashboard_model.ts

@@ -145,7 +145,10 @@ export class DashboardModel {
     };
     };
 
 
     // get panel save models
     // get panel save models
-    copy.panels = _.map(this.panels, panel => panel.getSaveModel());
+    copy.panels = _.chain(this.panels)
+      .filter(panel => panel.type !== 'add-panel')
+      .map(panel => panel.getSaveModel())
+      .value();
 
 
     //  sort by keys
     //  sort by keys
     copy = sortByKeys(copy);
     copy = sortByKeys(copy);
@@ -230,10 +233,6 @@ export class DashboardModel {
   }
   }
 
 
   cleanUpRepeats() {
   cleanUpRepeats() {
-    this.processRepeats(true);
-  }
-
-  processRepeats(cleanUpOnly?: boolean) {
     if (this.snapshot || this.templating.list.length === 0) {
     if (this.snapshot || this.templating.list.length === 0) {
       return;
       return;
     }
     }
@@ -248,11 +247,7 @@ export class DashboardModel {
 
 
     for (let i = 0; i < this.panels.length; i++) {
     for (let i = 0; i < this.panels.length; i++) {
       let panel = this.panels[i];
       let panel = this.panels[i];
-      if (panel.repeat) {
-        if (!cleanUpOnly) {
-          this.repeatPanel(panel, i);
-        }
-      } else if (panel.repeatPanelId && panel.repeatIteration !== this.iteration) {
+      if ((!panel.repeat || panel.repeatedByRow) && panel.repeatPanelId && panel.repeatIteration !== this.iteration) {
         panelsToRemove.push(panel);
         panelsToRemove.push(panel);
       }
       }
     }
     }
@@ -264,6 +259,26 @@ export class DashboardModel {
     this.events.emit('repeats-processed');
     this.events.emit('repeats-processed');
   }
   }
 
 
+  processRepeats() {
+    if (this.snapshot || this.templating.list.length === 0) {
+      return;
+    }
+
+    this.cleanUpRepeats();
+
+    this.iteration = (this.iteration || new Date().getTime()) + 1;
+
+    for (let i = 0; i < this.panels.length; i++) {
+      let panel = this.panels[i];
+      if (panel.repeat) {
+        this.repeatPanel(panel, i);
+      }
+    }
+
+    this.sortPanelsByGridPos();
+    this.events.emit('repeats-processed');
+  }
+
   getPanelRepeatClone(sourcePanel, valueIndex, sourcePanelIndex) {
   getPanelRepeatClone(sourcePanel, valueIndex, sourcePanelIndex) {
     // if first clone return source
     // if first clone return source
     if (valueIndex === 0) {
     if (valueIndex === 0) {
@@ -282,21 +297,21 @@ export class DashboardModel {
     return clone;
     return clone;
   }
   }
 
 
-  getRowRepeatClone(sourcePanel, valueIndex, sourcePanelIndex) {
+  getRowRepeatClone(sourceRowPanel, valueIndex, sourcePanelIndex) {
     // if first clone return source
     // if first clone return source
     if (valueIndex === 0) {
     if (valueIndex === 0) {
-      if (!sourcePanel.collapsed) {
+      if (!sourceRowPanel.collapsed) {
         let rowPanels = this.getRowPanels(sourcePanelIndex);
         let rowPanels = this.getRowPanels(sourcePanelIndex);
-        sourcePanel.panels = rowPanels;
+        sourceRowPanel.panels = rowPanels;
       }
       }
-      return sourcePanel;
+      return sourceRowPanel;
     }
     }
 
 
-    let clone = new PanelModel(sourcePanel.getSaveModel());
+    let clone = new PanelModel(sourceRowPanel.getSaveModel());
     // for row clones we need to figure out panels under row to clone and where to insert clone
     // for row clones we need to figure out panels under row to clone and where to insert clone
     let rowPanels, insertPos;
     let rowPanels, insertPos;
-    if (sourcePanel.collapsed) {
-      rowPanels = _.cloneDeep(sourcePanel.panels);
+    if (sourceRowPanel.collapsed) {
+      rowPanels = _.cloneDeep(sourceRowPanel.panels);
       clone.panels = rowPanels;
       clone.panels = rowPanels;
       // insert copied row after preceding row
       // insert copied row after preceding row
       insertPos = sourcePanelIndex + valueIndex;
       insertPos = sourcePanelIndex + valueIndex;
@@ -333,7 +348,7 @@ export class DashboardModel {
       let copy;
       let copy;
 
 
       copy = this.getPanelRepeatClone(panel, index, panelIndex);
       copy = this.getPanelRepeatClone(panel, index, panelIndex);
-      copy.scopedVars = {};
+      copy.scopedVars = copy.scopedVars || {};
       copy.scopedVars[variable.name] = option;
       copy.scopedVars[variable.name] = option;
 
 
       if (panel.repeatDirection === REPEAT_DIR_VERTICAL) {
       if (panel.repeatDirection === REPEAT_DIR_VERTICAL) {
@@ -342,7 +357,6 @@ export class DashboardModel {
       } else {
       } else {
         // set width based on how many are selected
         // set width based on how many are selected
         // assumed the repeated panels should take up full row width
         // assumed the repeated panels should take up full row width
-
         copy.gridPos.w = Math.max(GRID_COLUMN_COUNT / selectedOptions.length, minWidth);
         copy.gridPos.w = Math.max(GRID_COLUMN_COUNT / selectedOptions.length, minWidth);
         copy.gridPos.x = xPos;
         copy.gridPos.x = xPos;
         copy.gridPos.y = yPos;
         copy.gridPos.y = yPos;
@@ -363,7 +377,7 @@ export class DashboardModel {
     let yPos = panel.gridPos.y;
     let yPos = panel.gridPos.y;
 
 
     function setScopedVars(panel, variableOption) {
     function setScopedVars(panel, variableOption) {
-      panel.scopedVars = {};
+      panel.scopedVars = panel.scopedVars || {};
       panel.scopedVars[variable.name] = variableOption;
       panel.scopedVars[variable.name] = variableOption;
     }
     }
 
 
@@ -381,7 +395,7 @@ export class DashboardModel {
         _.each(rowPanels, (rowPanel, i) => {
         _.each(rowPanels, (rowPanel, i) => {
           setScopedVars(rowPanel, option);
           setScopedVars(rowPanel, option);
           if (optionIndex > 0) {
           if (optionIndex > 0) {
-            this.updateRepeatedPanelIds(rowPanel);
+            this.updateRepeatedPanelIds(rowPanel, true);
           }
           }
         });
         });
         rowCopy.gridPos.y += optionIndex;
         rowCopy.gridPos.y += optionIndex;
@@ -394,7 +408,7 @@ export class DashboardModel {
           setScopedVars(rowPanel, option);
           setScopedVars(rowPanel, option);
           if (optionIndex > 0) {
           if (optionIndex > 0) {
             let cloneRowPanel = new PanelModel(rowPanel);
             let cloneRowPanel = new PanelModel(rowPanel);
-            this.updateRepeatedPanelIds(cloneRowPanel);
+            this.updateRepeatedPanelIds(cloneRowPanel, true);
             // For exposed row additionally set proper Y grid position and add it to dashboard panels
             // For exposed row additionally set proper Y grid position and add it to dashboard panels
             cloneRowPanel.gridPos.y += rowHeight * optionIndex;
             cloneRowPanel.gridPos.y += rowHeight * optionIndex;
             this.panels.splice(insertPos + i, 0, cloneRowPanel);
             this.panels.splice(insertPos + i, 0, cloneRowPanel);
@@ -413,11 +427,15 @@ export class DashboardModel {
     }
     }
   }
   }
 
 
-  updateRepeatedPanelIds(panel: PanelModel) {
+  updateRepeatedPanelIds(panel: PanelModel, repeatedByRow?: boolean) {
     panel.repeatPanelId = panel.id;
     panel.repeatPanelId = panel.id;
     panel.id = this.getNextPanelId();
     panel.id = this.getNextPanelId();
     panel.repeatIteration = this.iteration;
     panel.repeatIteration = this.iteration;
-    panel.repeat = null;
+    if (repeatedByRow) {
+      panel.repeatedByRow = true;
+    } else {
+      panel.repeat = null;
+    }
     return panel;
     return panel;
   }
   }
 
 

+ 13 - 3
public/app/features/dashboard/dashgrid/AddPanelPanel.tsx

@@ -21,6 +21,8 @@ export interface AddPanelPanelState {
 export class AddPanelPanel extends React.Component<AddPanelPanelProps, AddPanelPanelState> {
 export class AddPanelPanel extends React.Component<AddPanelPanelProps, AddPanelPanelState> {
   constructor(props) {
   constructor(props) {
     super(props);
     super(props);
+    this.handleCloseAddPanel = this.handleCloseAddPanel.bind(this);
+    this.renderPanelItem = this.renderPanelItem.bind(this);
 
 
     this.state = {
     this.state = {
       panelPlugins: this.getPanelPlugins(),
       panelPlugins: this.getPanelPlugins(),
@@ -83,6 +85,13 @@ export class AddPanelPanel extends React.Component<AddPanelPanelProps, AddPanelP
     dashboard.removePanel(this.props.panel);
     dashboard.removePanel(this.props.panel);
   };
   };
 
 
+  handleCloseAddPanel(evt) {
+    evt.preventDefault();
+    const panelContainer = this.props.getPanelContainer();
+    const dashboard = panelContainer.getDashboard();
+    dashboard.removePanel(dashboard.panels[0]);
+  }
+
   renderPanelItem(panel, index) {
   renderPanelItem(panel, index) {
     console.log('render panel', index);
     console.log('render panel', index);
     return (
     return (
@@ -101,10 +110,11 @@ export class AddPanelPanel extends React.Component<AddPanelPanelProps, AddPanelP
             <i className="gicon gicon-add-panel" />
             <i className="gicon gicon-add-panel" />
             <span className="add-panel__title">New Panel</span>
             <span className="add-panel__title">New Panel</span>
             <span className="add-panel__sub-title">Select a visualization</span>
             <span className="add-panel__sub-title">Select a visualization</span>
+            <button className="add-panel__close" onClick={this.handleCloseAddPanel}>
+              <i className="fa fa-close" />
+            </button>
           </div>
           </div>
-          <ScrollBar className="add-panel__items">
-            {this.state.panelPlugins.map(this.renderPanelItem.bind(this))}
-          </ScrollBar>
+          <ScrollBar className="add-panel__items">{this.state.panelPlugins.map(this.renderPanelItem)}</ScrollBar>
         </div>
         </div>
       </div>
       </div>
     );
     );

+ 2 - 2
public/app/features/dashboard/dashnav/dashnav.ts

@@ -73,9 +73,9 @@ export class DashNavCtrl {
   }
   }
 
 
   addPanel() {
   addPanel() {
+    appEvents.emit('smooth-scroll-top');
     if (this.dashboard.panels.length > 0 && this.dashboard.panels[0].type === 'add-panel') {
     if (this.dashboard.panels.length > 0 && this.dashboard.panels[0].type === 'add-panel') {
-      this.dashboard.removePanel(this.dashboard.panels[0]);
-      return;
+      return; // Return if the "Add panel" exists already
     }
     }
 
 
     this.dashboard.addPanel({
     this.dashboard.addPanel({

+ 1 - 0
public/app/features/dashboard/panel_model.ts

@@ -26,6 +26,7 @@ export class PanelModel {
   repeatIteration?: number;
   repeatIteration?: number;
   repeatPanelId?: number;
   repeatPanelId?: number;
   repeatDirection?: string;
   repeatDirection?: string;
+  repeatedByRow?: boolean;
   minSpan?: number;
   minSpan?: number;
   collapsed?: boolean;
   collapsed?: boolean;
   panels?: any;
   panels?: any;

+ 1 - 0
public/app/features/dashboard/share_snapshot_ctrl.ts

@@ -2,6 +2,7 @@ import angular from 'angular';
 import _ from 'lodash';
 import _ from 'lodash';
 
 
 export class ShareSnapshotCtrl {
 export class ShareSnapshotCtrl {
+  /** @ngInject **/
   constructor($scope, $rootScope, $location, backendSrv, $timeout, timeSrv) {
   constructor($scope, $rootScope, $location, backendSrv, $timeout, timeSrv) {
     $scope.snapshot = {
     $scope.snapshot = {
       name: $scope.dashboard.title,
       name: $scope.dashboard.title,

+ 17 - 0
public/app/features/dashboard/specs/dashboard_model.jest.ts

@@ -49,6 +49,23 @@ describe('DashboardModel', function() {
       expect(keys[0]).toBe('annotations');
       expect(keys[0]).toBe('annotations');
       expect(keys[1]).toBe('autoUpdate');
       expect(keys[1]).toBe('autoUpdate');
     });
     });
+
+    it('should remove add panel panels', () => {
+      var model = new DashboardModel({});
+      model.addPanel({
+        type: 'add-panel',
+      });
+      model.addPanel({
+        type: 'graph',
+      });
+      model.addPanel({
+        type: 'add-panel',
+      });
+      var saveModel = model.getSaveModelClone();
+      var panels = saveModel.panels;
+
+      expect(panels.length).toBe(1);
+    });
   });
   });
 
 
   describe('row and panel manipulation', function() {
   describe('row and panel manipulation', function() {

+ 129 - 0
public/app/features/dashboard/specs/repeat.jest.ts

@@ -382,3 +382,132 @@ describe('given dashboard with row repeat', function() {
     expect(panel_ids.length).toEqual(_.uniq(panel_ids).length);
     expect(panel_ids.length).toEqual(_.uniq(panel_ids).length);
   });
   });
 });
 });
+
+describe('given dashboard with row and panel repeat', () => {
+  let dashboard, dashboardJSON;
+
+  beforeEach(() => {
+    dashboardJSON = {
+      panels: [
+        {
+          id: 1,
+          type: 'row',
+          repeat: 'region',
+          gridPos: { x: 0, y: 0, h: 1, w: 24 },
+        },
+        { id: 2, type: 'graph', repeat: 'app', gridPos: { x: 0, y: 1, h: 1, w: 6 } },
+      ],
+      templating: {
+        list: [
+          {
+            name: 'region',
+            current: {
+              text: 'reg1, reg2',
+              value: ['reg1', 'reg2'],
+            },
+            options: [
+              { text: 'reg1', value: 'reg1', selected: true },
+              { text: 'reg2', value: 'reg2', selected: true },
+              { text: 'reg3', value: 'reg3', selected: false },
+            ],
+          },
+          {
+            name: 'app',
+            current: {
+              text: 'se1, se2',
+              value: ['se1', 'se2'],
+            },
+            options: [
+              { text: 'se1', value: 'se1', selected: true },
+              { text: 'se2', value: 'se2', selected: true },
+              { text: 'se3', value: 'se3', selected: false },
+            ],
+          },
+        ],
+      },
+    };
+    dashboard = new DashboardModel(dashboardJSON);
+    dashboard.processRepeats();
+  });
+
+  it('should repeat row and panels for each row', () => {
+    const panel_types = _.map(dashboard.panels, 'type');
+    expect(panel_types).toEqual(['row', 'graph', 'graph', 'row', 'graph', 'graph']);
+  });
+
+  it('should clean up old repeated panels', () => {
+    dashboardJSON.panels = [
+      {
+        id: 1,
+        type: 'row',
+        repeat: 'region',
+        gridPos: { x: 0, y: 0, h: 1, w: 24 },
+      },
+      { id: 2, type: 'graph', repeat: 'app', gridPos: { x: 0, y: 1, h: 1, w: 6 } },
+      { id: 3, type: 'graph', repeatPanelId: 2, repeatIteration: 101, gridPos: { x: 7, y: 1, h: 1, w: 6 } },
+      {
+        id: 11,
+        type: 'row',
+        repeatPanelId: 1,
+        repeatIteration: 101,
+        gridPos: { x: 0, y: 2, h: 1, w: 24 },
+      },
+      { id: 12, type: 'graph', repeatPanelId: 2, repeatIteration: 101, gridPos: { x: 0, y: 3, h: 1, w: 6 } },
+    ];
+    dashboard = new DashboardModel(dashboardJSON);
+    dashboard.processRepeats();
+
+    const panel_types = _.map(dashboard.panels, 'type');
+    expect(panel_types).toEqual(['row', 'graph', 'graph', 'row', 'graph', 'graph']);
+  });
+
+  it('should set scopedVars for each row', () => {
+    dashboard = new DashboardModel(dashboardJSON);
+    dashboard.processRepeats();
+
+    expect(dashboard.panels[0].scopedVars).toMatchObject({
+      region: { text: 'reg1', value: 'reg1' },
+    });
+    expect(dashboard.panels[3].scopedVars).toMatchObject({
+      region: { text: 'reg2', value: 'reg2' },
+    });
+  });
+
+  it('should set panel-repeat variable for each panel', () => {
+    dashboard = new DashboardModel(dashboardJSON);
+    dashboard.processRepeats();
+
+    expect(dashboard.panels[1].scopedVars).toMatchObject({
+      app: { text: 'se1', value: 'se1' },
+    });
+    expect(dashboard.panels[2].scopedVars).toMatchObject({
+      app: { text: 'se2', value: 'se2' },
+    });
+
+    expect(dashboard.panels[4].scopedVars).toMatchObject({
+      app: { text: 'se1', value: 'se1' },
+    });
+    expect(dashboard.panels[5].scopedVars).toMatchObject({
+      app: { text: 'se2', value: 'se2' },
+    });
+  });
+
+  it('should set row-repeat variable for each panel', () => {
+    dashboard = new DashboardModel(dashboardJSON);
+    dashboard.processRepeats();
+
+    expect(dashboard.panels[1].scopedVars).toMatchObject({
+      region: { text: 'reg1', value: 'reg1' },
+    });
+    expect(dashboard.panels[2].scopedVars).toMatchObject({
+      region: { text: 'reg1', value: 'reg1' },
+    });
+
+    expect(dashboard.panels[4].scopedVars).toMatchObject({
+      region: { text: 'reg2', value: 'reg2' },
+    });
+    expect(dashboard.panels[5].scopedVars).toMatchObject({
+      region: { text: 'reg2', value: 'reg2' },
+    });
+  });
+});

+ 2 - 3
public/app/features/org/partials/team_details.html

@@ -26,11 +26,10 @@
   <div class="gf-form-group">
   <div class="gf-form-group">
 
 
     <h3 class="page-heading">Team Members</h3>
     <h3 class="page-heading">Team Members</h3>
-
-    <form name="ctrl.addMemberForm" class="gf-form-group">
+		<form name="ctrl.addMemberForm" class="gf-form-group">
       <div class="gf-form">
       <div class="gf-form">
         <span class="gf-form-label width-10">Add member</span>
         <span class="gf-form-label width-10">Add member</span>
-        <user-picker user-picked="ctrl.userPicked($user)"></user-picker>
+				<select-user-picker backendSrv="ctrl.backendSrv" teamId="ctrl.$routeParams.id" refreshList="ctrl.get" teamMembers="ctrl.teamMembers"></select-user-picker>
       </div>
       </div>
     </form>
     </form>
 
 

+ 2 - 1
public/app/features/org/team_details_ctrl.ts

@@ -8,6 +8,7 @@ export default class TeamDetailsCtrl {
   /** @ngInject **/
   /** @ngInject **/
   constructor(private $scope, private backendSrv, private $routeParams, navModelSrv) {
   constructor(private $scope, private backendSrv, private $routeParams, navModelSrv) {
     this.navModel = navModelSrv.getNav('cfg', 'teams', 0);
     this.navModel = navModelSrv.getNav('cfg', 'teams', 0);
+    this.get = this.get.bind(this);
     this.get();
     this.get();
   }
   }
 
 
@@ -35,7 +36,7 @@ export default class TeamDetailsCtrl {
   }
   }
 
 
   removeMemberConfirmed(teamMember: TeamMember) {
   removeMemberConfirmed(teamMember: TeamMember) {
-    this.backendSrv.delete(`/api/teams/${this.$routeParams.id}/members/${teamMember.userId}`).then(this.get.bind(this));
+    this.backendSrv.delete(`/api/teams/${this.$routeParams.id}/members/${teamMember.userId}`).then(this.get);
   }
   }
 
 
   update() {
   update() {

+ 1 - 1
public/app/plugins/datasource/cloudwatch/partials/config.html

@@ -39,7 +39,7 @@
   <div class="gf-form">
   <div class="gf-form">
     <label class="gf-form-label width-13">Default Region</label>
     <label class="gf-form-label width-13">Default Region</label>
     <div class="gf-form-select-wrapper max-width-18 gf-form-select-wrapper--has-help-icon">
     <div class="gf-form-select-wrapper max-width-18 gf-form-select-wrapper--has-help-icon">
-      <select class="gf-form-input" ng-model="ctrl.current.jsonData.defaultRegion" ng-options="region for region in ['ap-northeast-1', 'ap-northeast-2', 'ap-southeast-1', 'ap-southeast-2', 'ap-south-1', 'ca-central-1', 'cn-north-1', 'cn-northwest-1', 'eu-central-1', 'eu-west-1', 'eu-west-2', 'sa-east-1', 'us-east-1', 'us-east-2', 'us-gov-west-1', 'us-west-1', 'us-west-2']"></select>
+      <select class="gf-form-input" ng-model="ctrl.current.jsonData.defaultRegion" ng-options="region for region in ['ap-northeast-1', 'ap-northeast-2', 'ap-southeast-1', 'ap-southeast-2', 'ap-south-1', 'ca-central-1', 'cn-north-1', 'cn-northwest-1', 'eu-central-1', 'eu-west-1', 'eu-west-2', 'eu-west-3', 'sa-east-1', 'us-east-1', 'us-east-2', 'us-gov-west-1', 'us-west-1', 'us-west-2']"></select>
       <info-popover mode="right-absolute">
       <info-popover mode="right-absolute">
         Specify the region, such as for US West (Oregon) use ` us-west-2 ` as the region.
         Specify the region, such as for US West (Oregon) use ` us-west-2 ` as the region.
       </info-popover>
       </info-popover>

+ 37 - 44
public/app/plugins/panel/alertlist/module.html

@@ -3,24 +3,22 @@
 		{{ctrl.noAlertsMessage}}
 		{{ctrl.noAlertsMessage}}
 	</div>
 	</div>
 
 
-  <section class="card-section card-list-layout-list" ng-if="ctrl.panel.show === 'current'">
-    <ol class="card-list">
-			<li class="card-item-wrapper" ng-repeat="alert in ctrl.currentAlerts">
-        <div class="alert-list card-item card-item--alert">
-          <div class="alert-list-body">
-            <div class="alert-list-icon alert-list-item-state {{alert.stateModel.stateClass}}">
-              <i class="{{alert.stateModel.iconClass}}"></i>
-            </div>
-            <div class="alert-list-main">
-              <p class="alert-list-title">
-                <a href="dashboard/{{alert.dashboardUri}}?panelId={{alert.panelId}}&fullscreen&edit&tab=alert">
-                  {{alert.name}}
-                </a>
-              </p>
-              <p class="alert-list-text">
-                <span class="alert-list-state {{alert.stateModel.stateClass}}">{{alert.stateModel.text}}</span>
-                for {{alert.newStateDateAgo}}
-              </p>
+  <section ng-if="ctrl.panel.show === 'current'">
+    <ol class="alert-rule-list">
+      <li class="alert-rule-item" ng-repeat="alert in ctrl.currentAlerts">
+        <div class="alert-rule-item__body">
+          <div class="alert-rule-item__icon  {{alert.stateModel.stateClass}}">
+            <i class="{{alert.stateModel.iconClass}}"></i>
+          </div>
+          <div class="alert-rule-item__header">
+            <p class="alert-rule-item__name">
+              <a href="dashboard/{{alert.dashboardUri}}?panelId={{alert.panelId}}&fullscreen&edit&tab=alert">
+                {{alert.name}}
+              </a>
+            </p>
+            <div class="alert-rule-item__text">
+              <span class="{{alert.stateModel.stateClass}}">{{alert.stateModel.text}}</span>
+              <span class="alert-rule-item__time">for {{alert.newStateDateAgo}}</span>
             </div>
             </div>
           </div>
           </div>
         </div>
         </div>
@@ -28,30 +26,25 @@
     </ol>
     </ol>
   </section>
   </section>
 
 
-  <section class="card-section card-list-layout-list" ng-if="ctrl.panel.show === 'changes'">
-		<ol class="card-list">
-			<li class="card-item-wrapper" ng-repeat="al in ctrl.alertHistory">
-				<div class="alert-list card-item card-item--alert">
-					<div class="alert-list-body">
-						<div class="alert-list-icon alert-list-item-state {{al.stateModel.stateClass}}">
-							<i class="{{al.stateModel.iconClass}}"></i>
-						</div>
-						<div class="alert-list-main">
-							<p class="alert-list-title">{{al.alertName}}</p>
-							<div class="alert-list-text">
-								<span class="alert-list-state {{al.stateModel.stateClass}}">{{al.stateModel.text}}</span>
-								<span class="alert-list-info alert-list-info-left">{{al.info}}</span>
-							</div>
-						</div>
-					</div>
-					<div class="alert-list-footer">
-						<span class="alert-list-text">{{al.time}}</span>
-						<span class="alert-list-text">
-							<!--Img Link-->
-						</span>
-					</div>
-				</div>
-			</li>
-		</ol>
-	</section>
+  <section ng-if="ctrl.panel.show === 'changes'">
+    <ol class="alert-rule-list">
+      <li class="alert-rule-item" ng-repeat="al in ctrl.alertHistory">
+        <div class="alert-rule-item__icon {{al.stateModel.stateClass}}">
+          <i class="{{al.stateModel.iconClass}}"></i>
+        </div>
+        <div class="alert-rule-item__body">
+          <div class="alert-rule-item__header">
+            <p class="alert-rule-item__name">{{al.alertName}}</p>
+            <div class="alert-rule-item__text">
+              <span class="{{al.stateModel.stateClass}}">{{al.stateModel.text}}</span>
+            </div>
+          </div>
+          <span class="alert-rule-item__info">{{al.info}}</span>
+        </div>
+        <div class="alert-rule-item__time">
+          <span>{{al.time}}</span>
+        </div>
+      </li>
+    </ol>
+  </section>
 </div>
 </div>

+ 6 - 1
public/app/plugins/panel/graph/graph.ts

@@ -18,6 +18,7 @@ import GraphTooltip from './graph_tooltip';
 import { ThresholdManager } from './threshold_manager';
 import { ThresholdManager } from './threshold_manager';
 import { EventManager } from 'app/features/annotations/all';
 import { EventManager } from 'app/features/annotations/all';
 import { convertValuesToHistogram, getSeriesValues } from './histogram';
 import { convertValuesToHistogram, getSeriesValues } from './histogram';
+import config from 'app/core/config';
 
 
 /** @ngInject **/
 /** @ngInject **/
 function graphDirective(timeSrv, popoverSrv, contextSrv) {
 function graphDirective(timeSrv, popoverSrv, contextSrv) {
@@ -286,6 +287,10 @@ function graphDirective(timeSrv, popoverSrv, contextSrv) {
       }
       }
 
 
       function buildFlotOptions(panel) {
       function buildFlotOptions(panel) {
+        let gridColor = '#c8c8c8';
+        if (config.bootData.user.lightTheme === true) {
+          gridColor = '#a1a1a1';
+        }
         const stack = panel.stack ? true : null;
         const stack = panel.stack ? true : null;
         let options = {
         let options = {
           hooks: {
           hooks: {
@@ -332,7 +337,7 @@ function graphDirective(timeSrv, popoverSrv, contextSrv) {
             borderWidth: 0,
             borderWidth: 0,
             hoverable: true,
             hoverable: true,
             clickable: true,
             clickable: true,
-            color: '#c8c8c8',
+            color: gridColor,
             margin: { left: 0, right: 0 },
             margin: { left: 0, right: 0 },
             labelMarginX: 0,
             labelMarginX: 0,
           },
           },

+ 0 - 3
public/app/plugins/panel/text/module.ts

@@ -66,9 +66,6 @@ export class TextPanelCtrl extends PanelCtrl {
   renderMarkdown(content) {
   renderMarkdown(content) {
     if (!this.remarkable) {
     if (!this.remarkable) {
       this.remarkable = new Remarkable();
       this.remarkable = new Remarkable();
-      this.$scope.$apply(() => {
-        this.updateContent(this.remarkable.render(content));
-      });
     }
     }
 
 
     this.$scope.$applyAsync(() => {
     this.$scope.$applyAsync(() => {

+ 65 - 0
public/app/stores/AlertListStore/AlertListStore.jest.ts

@@ -0,0 +1,65 @@
+import { AlertListStore } from './AlertListStore';
+import { backendSrv } from 'test/mocks/common';
+import moment from 'moment';
+
+function getRule(name, state, info) {
+  return {
+    id: 11,
+    dashboardId: 58,
+    panelId: 3,
+    name: name,
+    state: state,
+    newStateDate: moment()
+      .subtract(5, 'minutes')
+      .format(),
+    evalData: {},
+    executionError: '',
+    dashboardUri: 'db/mygool',
+    stateText: state,
+    stateIcon: 'fa',
+    stateClass: 'asd',
+    stateAge: '10m',
+    info: info,
+  };
+}
+
+describe('AlertListStore', () => {
+  let store;
+
+  beforeAll(() => {
+    store = AlertListStore.create(
+      {
+        rules: [
+          getRule('Europe', 'OK', 'backend-01'),
+          getRule('Google', 'ALERTING', 'backend-02'),
+          getRule('Amazon', 'PAUSED', 'backend-03'),
+          getRule('West-Europe', 'PAUSED', 'backend-03'),
+        ],
+        search: '',
+      },
+      {
+        backendSrv: backendSrv,
+      }
+    );
+  });
+
+  it('search should filter list on name', () => {
+    store.setSearchQuery('urope');
+    expect(store.filteredRules).toHaveLength(2);
+  });
+
+  it('search should filter list on state', () => {
+    store.setSearchQuery('ale');
+    expect(store.filteredRules).toHaveLength(1);
+  });
+
+  it('search should filter list on info', () => {
+    store.setSearchQuery('-0');
+    expect(store.filteredRules).toHaveLength(4);
+  });
+
+  it('search should be equal', () => {
+    store.setSearchQuery('alert');
+    expect(store.search).toBe('alert');
+  });
+});

+ 19 - 6
public/app/stores/AlertListStore/AlertListStore.ts

@@ -9,7 +9,16 @@ export const AlertListStore = types
   .model('AlertListStore', {
   .model('AlertListStore', {
     rules: types.array(AlertRule),
     rules: types.array(AlertRule),
     stateFilter: types.optional(types.string, 'all'),
     stateFilter: types.optional(types.string, 'all'),
+    search: types.optional(types.string, ''),
   })
   })
+  .views(self => ({
+    get filteredRules() {
+      let regex = new RegExp(self.search, 'i');
+      return self.rules.filter(alert => {
+        return regex.test(alert.name) || regex.test(alert.stateText) || regex.test(alert.info);
+      });
+    },
+  }))
   .actions(self => ({
   .actions(self => ({
     loadRules: flow(function* load(filters) {
     loadRules: flow(function* load(filters) {
       const backendSrv = getEnv(self).backendSrv;
       const backendSrv = getEnv(self).backendSrv;
@@ -20,15 +29,19 @@ export const AlertListStore = types
       for (let rule of apiRules) {
       for (let rule of apiRules) {
         setStateFields(rule, rule.state);
         setStateFields(rule, rule.state);
 
 
-        if (rule.executionError) {
-          rule.info = 'Execution Error: ' + rule.executionError;
-        }
-
-        if (rule.evalData && rule.evalData.noData) {
-          rule.info = 'Query returned no data';
+        if (rule.state !== 'paused') {
+          if (rule.executionError) {
+            rule.info = 'Execution Error: ' + rule.executionError;
+          }
+          if (rule.evalData && rule.evalData.noData) {
+            rule.info = 'Query returned no data';
+          }
         }
         }
 
 
         self.rules.push(AlertRule.create(rule));
         self.rules.push(AlertRule.create(rule));
       }
       }
     }),
     }),
+    setSearchQuery(query: string) {
+      self.search = query;
+    },
   }));
   }));

+ 2 - 2
public/app/stores/AlertListStore/AlertRule.ts

@@ -1,4 +1,4 @@
-import { types, getEnv, flow } from 'mobx-state-tree';
+import { types, getEnv, flow } from 'mobx-state-tree';
 import { setStateFields } from './helpers';
 import { setStateFields } from './helpers';
 
 
 export const AlertRule = types
 export const AlertRule = types
@@ -26,7 +26,7 @@ export const AlertRule = types
      */
      */
     togglePaused: flow(function* togglePaused() {
     togglePaused: flow(function* togglePaused() {
       const backendSrv = getEnv(self).backendSrv;
       const backendSrv = getEnv(self).backendSrv;
-      const payload = { paused: self.isPaused };
+      const payload = { paused: !self.isPaused };
       const res = yield backendSrv.post(`/api/alerts/${self.id}/pause`, payload);
       const res = yield backendSrv.post(`/api/alerts/${self.id}/pause`, payload);
       setStateFields(self, res.state);
       setStateFields(self, res.state);
       self.info = '';
       self.info = '';

+ 1 - 1
public/app/stores/AlertListStore/helpers.ts

@@ -1,4 +1,4 @@
-import moment from 'moment';
+import moment from 'moment';
 import alertDef from 'app/features/alerting/alert_def';
 import alertDef from 'app/features/alerting/alert_def';
 
 
 export function setStateFields(rule, state) {
 export function setStateFields(rule, state) {

+ 94 - 91
public/sass/_grafana.scss

@@ -1,104 +1,107 @@
 // vendor
 // vendor
-@import "../vendor/css/timepicker.css";
-@import "../vendor/css/spectrum.css";
+@import '../vendor/css/timepicker.css';
+@import '../vendor/css/spectrum.css';
 
 
 // MIXINS
 // MIXINS
-@import "mixins/mixins";
-@import "mixins/animations";
-@import "mixins/buttons";
-@import "mixins/breakpoints";
-@import "mixins/grid";
-@import "mixins/grid-framework";
-@import "mixins/hover";
-@import "mixins/forms";
-@import "mixins/drop_element";
+@import 'mixins/mixins';
+@import 'mixins/animations';
+@import 'mixins/buttons';
+@import 'mixins/breakpoints';
+@import 'mixins/grid';
+@import 'mixins/grid-framework';
+@import 'mixins/hover';
+@import 'mixins/forms';
+@import 'mixins/drop_element';
 
 
 // BASE
 // BASE
-@import "base/normalize";
-@import "base/reboot";
-@import "base/type";
-@import "base/forms";
-@import "base/grid";
-@import "base/fonts";
-@import "base/code";
-@import "base/icons";
+@import 'base/normalize';
+@import 'base/reboot';
+@import 'base/type';
+@import 'base/forms';
+@import 'base/grid';
+@import 'base/fonts';
+@import 'base/code';
+@import 'base/icons';
 
 
 // UTILS
 // UTILS
-@import "utils/utils";
-@import "utils/validation";
-@import "utils/angular";
-@import "utils/spacings";
-@import "utils/widths";
+@import 'utils/utils';
+@import 'utils/validation';
+@import 'utils/angular';
+@import 'utils/spacings';
+@import 'utils/widths';
 
 
 // LAYOUTS
 // LAYOUTS
-@import "layout/lists";
-@import "layout/page";
+@import 'layout/lists';
+@import 'layout/page';
 
 
 // COMPONENTS
 // COMPONENTS
-@import "components/scrollbar";
-@import "components/cards";
-@import "components/buttons";
-@import "components/navs";
-@import "components/tabs";
-@import "components/alerts";
-@import "components/switch";
-@import "components/tooltip";
-@import "components/tags";
-@import "components/panel_graph";
-@import "components/submenu";
-@import "components/panel_alertlist";
-@import "components/panel_dashlist";
-@import "components/panel_gettingstarted";
-@import "components/panel_pluginlist";
-@import "components/panel_singlestat";
-@import "components/panel_table";
-@import "components/panel_text";
-@import "components/panel_heatmap";
-@import "components/panel_add_panel";
-@import "components/settings_permissions";
-@import "components/tagsinput";
-@import "components/tables_lists";
-@import "components/search";
-@import "components/gf-form";
-@import "components/sidemenu";
-@import "components/navbar";
-@import "components/timepicker";
-@import "components/filter-controls";
-@import "components/filter-list";
-@import "components/filter-table";
-@import "components/old_stuff";
-@import "components/typeahead";
-@import "components/modals";
-@import "components/dropdown";
-@import "components/color_picker";
-@import "components/footer";
-@import "components/infobox";
-@import "components/shortcuts";
-@import "components/drop";
-@import "components/query_editor";
-@import "components/tabbed_view";
-@import "components/query_part";
-@import "components/jsontree";
-@import "components/edit_sidemenu";
-@import "components/row.scss";
-@import "components/json_explorer";
-@import "components/code_editor";
-@import "components/dashboard_grid";
-@import "components/dashboard_list";
-@import "components/page_header";
-@import "components/dashboard_settings";
-@import "components/empty_list_cta";
+@import 'components/scrollbar';
+@import 'components/cards';
+@import 'components/buttons';
+@import 'components/navs';
+@import 'components/tabs';
+@import 'components/alerts';
+@import 'components/switch';
+@import 'components/tooltip';
+@import 'components/tags';
+@import 'components/panel_graph';
+@import 'components/submenu';
+@import 'components/panel_alertlist';
+@import 'components/panel_dashlist';
+@import 'components/panel_gettingstarted';
+@import 'components/panel_pluginlist';
+@import 'components/panel_singlestat';
+@import 'components/panel_table';
+@import 'components/panel_text';
+@import 'components/panel_heatmap';
+@import 'components/panel_add_panel';
+@import 'components/settings_permissions';
+@import 'components/tagsinput';
+@import 'components/tables_lists';
+@import 'components/search';
+@import 'components/gf-form';
+@import 'components/sidemenu';
+@import 'components/navbar';
+@import 'components/timepicker';
+@import 'components/filter-controls';
+@import 'components/filter-list';
+@import 'components/filter-table';
+@import 'components/old_stuff';
+@import 'components/typeahead';
+@import 'components/modals';
+@import 'components/dropdown';
+@import 'components/color_picker';
+@import 'components/footer';
+@import 'components/infobox';
+@import 'components/shortcuts';
+@import 'components/drop';
+@import 'components/query_editor';
+@import 'components/tabbed_view';
+@import 'components/query_part';
+@import 'components/jsontree';
+@import 'components/edit_sidemenu';
+@import 'components/row.scss';
+@import 'components/json_explorer';
+@import 'components/code_editor';
+@import 'components/dashboard_grid';
+@import 'components/dashboard_list';
+@import 'components/page_header';
+@import 'components/dashboard_settings';
+@import 'components/empty_list_cta';
+@import 'components/popper';
+@import 'components/form_select_box';
+@import 'components/user-picker';
 
 
 // PAGES
 // PAGES
-@import "pages/login";
-@import "pages/dashboard";
-@import "pages/playlist";
-@import "pages/admin";
-@import "pages/alerting";
-@import "pages/history";
-@import "pages/plugins";
-@import "pages/signup";
-@import "pages/styleguide";
-@import "pages/errorpage";
-@import "old_responsive";
-@import "components/view_states.scss";
+@import 'pages/login';
+@import 'pages/dashboard';
+@import 'pages/playlist';
+@import 'pages/admin';
+@import 'pages/alerting';
+@import 'pages/history';
+@import 'pages/plugins';
+@import 'pages/signup';
+@import 'pages/styleguide';
+@import 'pages/errorpage';
+@import 'old_responsive';
+@import 'components/view_states.scss';

+ 9 - 4
public/sass/base/_type.scss

@@ -299,7 +299,7 @@ blockquote {
     line-height: $line-height-base;
     line-height: $line-height-base;
     color: $gray-2;
     color: $gray-2;
     &:before {
     &:before {
-      content: "\2014 \00A0";
+      content: '\2014 \00A0';
     }
     }
   }
   }
 
 
@@ -316,10 +316,10 @@ blockquote {
     }
     }
     small {
     small {
       &:before {
       &:before {
-        content: "";
+        content: '';
       }
       }
       &:after {
       &:after {
-        content: "\00A0 \2014";
+        content: '\00A0 \2014';
       }
       }
     }
     }
   }
   }
@@ -330,7 +330,7 @@ q:before,
 q:after,
 q:after,
 blockquote:before,
 blockquote:before,
 blockquote:after {
 blockquote:after {
-  content: "";
+  content: '';
 }
 }
 
 
 // Addresses
 // Addresses
@@ -409,3 +409,8 @@ a.external-link {
 .no-wrap {
 .no-wrap {
   white-space: nowrap;
   white-space: nowrap;
 }
 }
+
+.highlight-search-match {
+  background: transparent;
+  color: $yellow;
+}

+ 73 - 0
public/sass/components/_form_select_box.scss

@@ -0,0 +1,73 @@
+$select-input-height: 35px;
+$select-menu-max-height: 300px;
+$select-item-font-size: $font-size-base;
+$select-item-bg: $dropdownBackground;
+$select-item-fg: $input-color;
+$select-option-bg: $dropdownBackground;
+$select-option-color: $input-color;
+$select-noresults-color: $text-color;
+$select-input-bg: $input-bg;
+$select-input-border-color: $input-border-color;
+$select-menu-box-shadow: $menu-dropdown-shadow;
+
+@import '../../../node_modules/react-select/scss/default.scss';
+
+@mixin select-control() {
+  width: 100%;
+  margin-right: $gf-form-margin;
+  @include border-radius($input-border-radius-sm);
+  background-color: $input-bg;
+}
+
+@mixin select-control-focus() {
+  border-color: $input-border-focus;
+  outline: none;
+  $shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075), 0 0 8px $input-box-shadow-focus;
+  @include box-shadow($shadow);
+}
+
+.gf-form-input--form-dropdown {
+  padding: 0;
+  border: 0;
+  overflow: visible;
+
+  .Select-placeholder {
+    color: $gray-4;
+  }
+
+  > .Select-control {
+    @include select-control();
+    border-color: $dark-3;
+  }
+
+  &.is-open > .Select-control {
+    background: transparent;
+    border-color: $dark-3;
+  }
+
+  &.is-focused > .Select-control {
+    background-color: $input-bg;
+    @include select-control-focus();
+  }
+
+  .Select-menu-outer {
+    border: 0;
+    width: auto;
+  }
+
+  .Select-option.is-focused {
+    background-color: $dropdownLinkBackgroundHover;
+    color: $dropdownLinkColorHover;
+
+    &::before {
+      position: absolute;
+      left: 0;
+      top: 0;
+      height: 100%;
+      width: 2px;
+      display: block;
+      content: '';
+      background-image: linear-gradient(to bottom, #ffd500 0%, #ff4400 99%, #ff4400 100%);
+    }
+  }
+}

+ 1 - 0
public/sass/components/_gf-form.scss

@@ -33,6 +33,7 @@ $input-border: 1px solid $input-border-color;
 
 
 .gf-form--has-input-icon {
 .gf-form--has-input-icon {
   position: relative;
   position: relative;
+  margin-right: $gf-form-margin;
 
 
   .gf-form-input-icon {
   .gf-form-input-icon {
     position: absolute;
     position: absolute;

+ 9 - 1
public/sass/components/_panel_add_panel.scss

@@ -7,12 +7,20 @@
   display: flex;
   display: flex;
   align-items: center;
   align-items: center;
 
 
-  i {
+  .gicon {
     font-size: 30px;
     font-size: 30px;
     margin-right: $spacer;
     margin-right: $spacer;
   }
   }
 }
 }
 
 
+.add-panel__close {
+  margin-left: auto;
+  background-color: transparent;
+  border: 0;
+  font-size: 16px;
+  margin-right: -10px;
+}
+
 .add-panel__title {
 .add-panel__title {
   font-size: $font-size-md;
   font-size: $font-size-md;
   margin-right: $spacer/2;
   margin-right: $spacer/2;

+ 79 - 0
public/sass/components/_popper.scss

@@ -0,0 +1,79 @@
+.popper {
+  position: absolute;
+  background: $tooltipBackground;
+  color: $tooltipColor;
+  width: 150px;
+  border-radius: 3px;
+  box-shadow: 0 0 2px rgba(0, 0, 0, 0.5);
+  padding: 10px;
+  text-align: center;
+}
+.popper .popper__arrow {
+  width: 0;
+  height: 0;
+  border-style: solid;
+  position: absolute;
+  margin: 5px;
+}
+
+.popper .popper__arrow {
+  border-color: $tooltipBackground;
+}
+
+.popper[data-placement^='top'] {
+  margin-bottom: 5px;
+}
+.popper[data-placement^='top'] .popper__arrow {
+  border-width: 5px 5px 0 5px;
+  border-left-color: transparent;
+  border-right-color: transparent;
+  border-bottom-color: transparent;
+  bottom: -5px;
+  left: calc(50% - 5px);
+  margin-top: 0;
+  margin-bottom: 0;
+}
+.popper[data-placement^='bottom'] {
+  margin-top: 5px;
+}
+.popper[data-placement^='bottom'] .popper__arrow {
+  border-width: 0 5px 5px 5px;
+  border-left-color: transparent;
+  border-right-color: transparent;
+  border-top-color: transparent;
+  top: -5px;
+  left: calc(50% - 5px);
+  margin-top: 0;
+  margin-bottom: 0;
+}
+.popper[data-placement^='right'] {
+  margin-left: 5px;
+}
+.popper[data-placement^='right'] .popper__arrow {
+  border-width: 5px 5px 5px 0;
+  border-left-color: transparent;
+  border-top-color: transparent;
+  border-bottom-color: transparent;
+  left: -5px;
+  top: calc(50% - 5px);
+  margin-left: 0;
+  margin-right: 0;
+}
+.popper[data-placement^='left'] {
+  margin-right: 5px;
+}
+.popper[data-placement^='left'] .popper__arrow {
+  border-width: 5px 0 5px 5px;
+  border-top-color: transparent;
+  border-right-color: transparent;
+  border-bottom-color: transparent;
+  right: -5px;
+  top: calc(50% - 5px);
+  margin-left: 0;
+  margin-right: 0;
+}
+
+.popper__target,
+.popper__manager {
+  display: inline-block;
+}

+ 1 - 1
public/sass/components/_submenu.scss

@@ -68,7 +68,7 @@
   overflow-x: hidden;
   overflow-x: hidden;
   background-color: $dropdownBackground;
   background-color: $dropdownBackground;
   box-shadow: 0 0 25px 0 rgba(0, 0, 0, 0.4);
   box-shadow: 0 0 25px 0 rgba(0, 0, 0, 0.4);
-  z-index: 1000;
+  z-index: $zindex-typeahead;
   font-size: $font-size-base;
   font-size: $font-size-base;
   border-radius: 3px 3px 0 0;
   border-radius: 3px 3px 0 0;
   border: 1px solid $tight-form-func-bg;
   border: 1px solid $tight-form-func-bg;

+ 12 - 0
public/sass/components/_user-picker.scss

@@ -0,0 +1,12 @@
+.user-picker-option__button {
+  position: relative;
+  text-align: left;
+  width: 100%;
+  display: block;
+  border-radius: 0;
+}
+.user-picker-option__avatar {
+  width: 20px;
+  display: inline-block;
+  margin-right: 10px;
+}

+ 91 - 63
public/sass/pages/_alerting.scss

@@ -28,69 +28,6 @@
   border: 0;
   border: 0;
 }
 }
 
 
-// Alert List
-.alert-list {
-  display: flex;
-  flex-direction: row;
-  justify-content: space-between;
-}
-
-.alert-list-icon {
-  font-weight: bold;
-  display: flex;
-  justify-content: center;
-  align-items: center;
-  .icon-gf,
-  .fa {
-    font-size: 200%;
-    position: relative;
-    top: 2px;
-  }
-}
-
-.alert-list-body {
-  display: flex;
-}
-
-.alert-list-main {
-  padding: 0 2rem;
-  display: flex;
-  flex-direction: column;
-  justify-content: center;
-}
-
-.alert-list-title {
-  font-size: $font-size-base;
-  margin: 0;
-  font-weight: 600;
-}
-
-.alert-list-state {
-  font-weight: bold;
-}
-
-.alert-list-text {
-  font-size: $font-size-sm;
-  margin: 0;
-  line-height: 1.5rem;
-  color: $text-color-weak;
-}
-
-.alert-list-info {
-  color: $text-color;
-}
-
-.alert-list-info-left {
-  padding-left: 2rem;
-}
-
-.alert-list-footer {
-  display: flex;
-  justify-content: space-between;
-  flex-direction: column;
-  align-items: flex-end;
-}
-
 .panel-has-alert {
 .panel-has-alert {
   .panel-alert-icon:before {
   .panel-alert-icon:before {
     content: '\e611';
     content: '\e611';
@@ -136,3 +73,94 @@
     opacity: 1;
     opacity: 1;
   }
   }
 }
 }
+
+// Alert List
+// Alert List
+
+.alert-rule-list {
+  display: flex;
+  flex-direction: row;
+  flex-wrap: wrap;
+  justify-content: space-between;
+  list-style-type: none;
+}
+
+.alert-rule-item {
+  display: flex;
+  width: 100%;
+  height: 100%;
+  background: $card-background;
+  box-shadow: $card-shadow;
+  padding: 4px 8px;
+  border-radius: 4px;
+  margin-bottom: 4px;
+}
+
+.alert-rule-item__body {
+  display: flex;
+  flex-direction: column;
+  flex-grow: 1;
+  overflow: hidden;
+}
+
+.alert-rule-item__icon {
+  display: flex;
+  justify-content: center;
+  align-items: center;
+  width: 40px;
+  padding: 0 28px 0 16px;
+  .icon-gf,
+  .fa {
+    font-size: 200%;
+    position: relative;
+    top: 2px;
+  }
+}
+
+.alert-rule-item__header {
+  display: flex;
+  flex-direction: column;
+  justify-content: space-between;
+}
+
+.alert-rule-item__name {
+  font-size: $font-size-base;
+  margin: 0;
+  font-weight: $font-weight-semi-bold;
+}
+
+.alert-list__btn {
+  margin: 0 2px;
+  display: flex;
+  align-items: center;
+  justify-content: center;
+}
+
+.alert-rule-item__text {
+  font-weight: bold;
+  font-size: $font-size-sm;
+  margin: 0;
+}
+
+.alert-rule-item__time {
+  color: $text-color-weak;
+  font-weight: normal;
+  white-space: nowrap;
+}
+
+.alert-rule-item__info {
+  //color: $text-color;
+  font-weight: normal;
+  flex-grow: 2;
+  display: flex;
+  align-items: flex-end;
+}
+
+.alert-rule-item__actions {
+  display: flex;
+  align-items: center;
+}
+
+.alert-tesint {
+  display: flex;
+}

+ 37 - 0
vendor/github.com/golang/protobuf/protoc-gen-go/descriptor/Makefile

@@ -0,0 +1,37 @@
+# Go support for Protocol Buffers - Google's data interchange format
+#
+# Copyright 2010 The Go Authors.  All rights reserved.
+# https://github.com/golang/protobuf
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+#     * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+#     * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+#     * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Not stored here, but descriptor.proto is in https://github.com/google/protobuf/
+# at src/google/protobuf/descriptor.proto
+regenerate:
+	@echo WARNING! THIS RULE IS PROBABLY NOT RIGHT FOR YOUR INSTALLATION
+	cp $(HOME)/src/protobuf/include/google/protobuf/descriptor.proto .
+	protoc --go_out=../../../../.. -I$(HOME)/src/protobuf/include $(HOME)/src/protobuf/include/google/protobuf/descriptor.proto

+ 2215 - 0
vendor/github.com/golang/protobuf/protoc-gen-go/descriptor/descriptor.pb.go

@@ -0,0 +1,2215 @@
+// Code generated by protoc-gen-go. DO NOT EDIT.
+// source: google/protobuf/descriptor.proto
+
+/*
+Package descriptor is a generated protocol buffer package.
+
+It is generated from these files:
+	google/protobuf/descriptor.proto
+
+It has these top-level messages:
+	FileDescriptorSet
+	FileDescriptorProto
+	DescriptorProto
+	ExtensionRangeOptions
+	FieldDescriptorProto
+	OneofDescriptorProto
+	EnumDescriptorProto
+	EnumValueDescriptorProto
+	ServiceDescriptorProto
+	MethodDescriptorProto
+	FileOptions
+	MessageOptions
+	FieldOptions
+	OneofOptions
+	EnumOptions
+	EnumValueOptions
+	ServiceOptions
+	MethodOptions
+	UninterpretedOption
+	SourceCodeInfo
+	GeneratedCodeInfo
+*/
+package descriptor
+
+import proto "github.com/golang/protobuf/proto"
+import fmt "fmt"
+import math "math"
+
+// Reference imports to suppress errors if they are not otherwise used.
+var _ = proto.Marshal
+var _ = fmt.Errorf
+var _ = math.Inf
+
+// This is a compile-time assertion to ensure that this generated file
+// is compatible with the proto package it is being compiled against.
+// A compilation error at this line likely means your copy of the
+// proto package needs to be updated.
+const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
+
+type FieldDescriptorProto_Type int32
+
+const (
+	// 0 is reserved for errors.
+	// Order is weird for historical reasons.
+	FieldDescriptorProto_TYPE_DOUBLE FieldDescriptorProto_Type = 1
+	FieldDescriptorProto_TYPE_FLOAT  FieldDescriptorProto_Type = 2
+	// Not ZigZag encoded.  Negative numbers take 10 bytes.  Use TYPE_SINT64 if
+	// negative values are likely.
+	FieldDescriptorProto_TYPE_INT64  FieldDescriptorProto_Type = 3
+	FieldDescriptorProto_TYPE_UINT64 FieldDescriptorProto_Type = 4
+	// Not ZigZag encoded.  Negative numbers take 10 bytes.  Use TYPE_SINT32 if
+	// negative values are likely.
+	FieldDescriptorProto_TYPE_INT32   FieldDescriptorProto_Type = 5
+	FieldDescriptorProto_TYPE_FIXED64 FieldDescriptorProto_Type = 6
+	FieldDescriptorProto_TYPE_FIXED32 FieldDescriptorProto_Type = 7
+	FieldDescriptorProto_TYPE_BOOL    FieldDescriptorProto_Type = 8
+	FieldDescriptorProto_TYPE_STRING  FieldDescriptorProto_Type = 9
+	// Tag-delimited aggregate.
+	// Group type is deprecated and not supported in proto3. However, Proto3
+	// implementations should still be able to parse the group wire format and
+	// treat group fields as unknown fields.
+	FieldDescriptorProto_TYPE_GROUP   FieldDescriptorProto_Type = 10
+	FieldDescriptorProto_TYPE_MESSAGE FieldDescriptorProto_Type = 11
+	// New in version 2.
+	FieldDescriptorProto_TYPE_BYTES    FieldDescriptorProto_Type = 12
+	FieldDescriptorProto_TYPE_UINT32   FieldDescriptorProto_Type = 13
+	FieldDescriptorProto_TYPE_ENUM     FieldDescriptorProto_Type = 14
+	FieldDescriptorProto_TYPE_SFIXED32 FieldDescriptorProto_Type = 15
+	FieldDescriptorProto_TYPE_SFIXED64 FieldDescriptorProto_Type = 16
+	FieldDescriptorProto_TYPE_SINT32   FieldDescriptorProto_Type = 17
+	FieldDescriptorProto_TYPE_SINT64   FieldDescriptorProto_Type = 18
+)
+
+var FieldDescriptorProto_Type_name = map[int32]string{
+	1:  "TYPE_DOUBLE",
+	2:  "TYPE_FLOAT",
+	3:  "TYPE_INT64",
+	4:  "TYPE_UINT64",
+	5:  "TYPE_INT32",
+	6:  "TYPE_FIXED64",
+	7:  "TYPE_FIXED32",
+	8:  "TYPE_BOOL",
+	9:  "TYPE_STRING",
+	10: "TYPE_GROUP",
+	11: "TYPE_MESSAGE",
+	12: "TYPE_BYTES",
+	13: "TYPE_UINT32",
+	14: "TYPE_ENUM",
+	15: "TYPE_SFIXED32",
+	16: "TYPE_SFIXED64",
+	17: "TYPE_SINT32",
+	18: "TYPE_SINT64",
+}
+var FieldDescriptorProto_Type_value = map[string]int32{
+	"TYPE_DOUBLE":   1,
+	"TYPE_FLOAT":    2,
+	"TYPE_INT64":    3,
+	"TYPE_UINT64":   4,
+	"TYPE_INT32":    5,
+	"TYPE_FIXED64":  6,
+	"TYPE_FIXED32":  7,
+	"TYPE_BOOL":     8,
+	"TYPE_STRING":   9,
+	"TYPE_GROUP":    10,
+	"TYPE_MESSAGE":  11,
+	"TYPE_BYTES":    12,
+	"TYPE_UINT32":   13,
+	"TYPE_ENUM":     14,
+	"TYPE_SFIXED32": 15,
+	"TYPE_SFIXED64": 16,
+	"TYPE_SINT32":   17,
+	"TYPE_SINT64":   18,
+}
+
+func (x FieldDescriptorProto_Type) Enum() *FieldDescriptorProto_Type {
+	p := new(FieldDescriptorProto_Type)
+	*p = x
+	return p
+}
+func (x FieldDescriptorProto_Type) String() string {
+	return proto.EnumName(FieldDescriptorProto_Type_name, int32(x))
+}
+func (x *FieldDescriptorProto_Type) UnmarshalJSON(data []byte) error {
+	value, err := proto.UnmarshalJSONEnum(FieldDescriptorProto_Type_value, data, "FieldDescriptorProto_Type")
+	if err != nil {
+		return err
+	}
+	*x = FieldDescriptorProto_Type(value)
+	return nil
+}
+func (FieldDescriptorProto_Type) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{4, 0} }
+
+type FieldDescriptorProto_Label int32
+
+const (
+	// 0 is reserved for errors
+	FieldDescriptorProto_LABEL_OPTIONAL FieldDescriptorProto_Label = 1
+	FieldDescriptorProto_LABEL_REQUIRED FieldDescriptorProto_Label = 2
+	FieldDescriptorProto_LABEL_REPEATED FieldDescriptorProto_Label = 3
+)
+
+var FieldDescriptorProto_Label_name = map[int32]string{
+	1: "LABEL_OPTIONAL",
+	2: "LABEL_REQUIRED",
+	3: "LABEL_REPEATED",
+}
+var FieldDescriptorProto_Label_value = map[string]int32{
+	"LABEL_OPTIONAL": 1,
+	"LABEL_REQUIRED": 2,
+	"LABEL_REPEATED": 3,
+}
+
+func (x FieldDescriptorProto_Label) Enum() *FieldDescriptorProto_Label {
+	p := new(FieldDescriptorProto_Label)
+	*p = x
+	return p
+}
+func (x FieldDescriptorProto_Label) String() string {
+	return proto.EnumName(FieldDescriptorProto_Label_name, int32(x))
+}
+func (x *FieldDescriptorProto_Label) UnmarshalJSON(data []byte) error {
+	value, err := proto.UnmarshalJSONEnum(FieldDescriptorProto_Label_value, data, "FieldDescriptorProto_Label")
+	if err != nil {
+		return err
+	}
+	*x = FieldDescriptorProto_Label(value)
+	return nil
+}
+func (FieldDescriptorProto_Label) EnumDescriptor() ([]byte, []int) {
+	return fileDescriptor0, []int{4, 1}
+}
+
+// Generated classes can be optimized for speed or code size.
+type FileOptions_OptimizeMode int32
+
+const (
+	FileOptions_SPEED FileOptions_OptimizeMode = 1
+	// etc.
+	FileOptions_CODE_SIZE    FileOptions_OptimizeMode = 2
+	FileOptions_LITE_RUNTIME FileOptions_OptimizeMode = 3
+)
+
+var FileOptions_OptimizeMode_name = map[int32]string{
+	1: "SPEED",
+	2: "CODE_SIZE",
+	3: "LITE_RUNTIME",
+}
+var FileOptions_OptimizeMode_value = map[string]int32{
+	"SPEED":        1,
+	"CODE_SIZE":    2,
+	"LITE_RUNTIME": 3,
+}
+
+func (x FileOptions_OptimizeMode) Enum() *FileOptions_OptimizeMode {
+	p := new(FileOptions_OptimizeMode)
+	*p = x
+	return p
+}
+func (x FileOptions_OptimizeMode) String() string {
+	return proto.EnumName(FileOptions_OptimizeMode_name, int32(x))
+}
+func (x *FileOptions_OptimizeMode) UnmarshalJSON(data []byte) error {
+	value, err := proto.UnmarshalJSONEnum(FileOptions_OptimizeMode_value, data, "FileOptions_OptimizeMode")
+	if err != nil {
+		return err
+	}
+	*x = FileOptions_OptimizeMode(value)
+	return nil
+}
+func (FileOptions_OptimizeMode) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{10, 0} }
+
+type FieldOptions_CType int32
+
+const (
+	// Default mode.
+	FieldOptions_STRING       FieldOptions_CType = 0
+	FieldOptions_CORD         FieldOptions_CType = 1
+	FieldOptions_STRING_PIECE FieldOptions_CType = 2
+)
+
+var FieldOptions_CType_name = map[int32]string{
+	0: "STRING",
+	1: "CORD",
+	2: "STRING_PIECE",
+}
+var FieldOptions_CType_value = map[string]int32{
+	"STRING":       0,
+	"CORD":         1,
+	"STRING_PIECE": 2,
+}
+
+func (x FieldOptions_CType) Enum() *FieldOptions_CType {
+	p := new(FieldOptions_CType)
+	*p = x
+	return p
+}
+func (x FieldOptions_CType) String() string {
+	return proto.EnumName(FieldOptions_CType_name, int32(x))
+}
+func (x *FieldOptions_CType) UnmarshalJSON(data []byte) error {
+	value, err := proto.UnmarshalJSONEnum(FieldOptions_CType_value, data, "FieldOptions_CType")
+	if err != nil {
+		return err
+	}
+	*x = FieldOptions_CType(value)
+	return nil
+}
+func (FieldOptions_CType) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{12, 0} }
+
+type FieldOptions_JSType int32
+
+const (
+	// Use the default type.
+	FieldOptions_JS_NORMAL FieldOptions_JSType = 0
+	// Use JavaScript strings.
+	FieldOptions_JS_STRING FieldOptions_JSType = 1
+	// Use JavaScript numbers.
+	FieldOptions_JS_NUMBER FieldOptions_JSType = 2
+)
+
+var FieldOptions_JSType_name = map[int32]string{
+	0: "JS_NORMAL",
+	1: "JS_STRING",
+	2: "JS_NUMBER",
+}
+var FieldOptions_JSType_value = map[string]int32{
+	"JS_NORMAL": 0,
+	"JS_STRING": 1,
+	"JS_NUMBER": 2,
+}
+
+func (x FieldOptions_JSType) Enum() *FieldOptions_JSType {
+	p := new(FieldOptions_JSType)
+	*p = x
+	return p
+}
+func (x FieldOptions_JSType) String() string {
+	return proto.EnumName(FieldOptions_JSType_name, int32(x))
+}
+func (x *FieldOptions_JSType) UnmarshalJSON(data []byte) error {
+	value, err := proto.UnmarshalJSONEnum(FieldOptions_JSType_value, data, "FieldOptions_JSType")
+	if err != nil {
+		return err
+	}
+	*x = FieldOptions_JSType(value)
+	return nil
+}
+func (FieldOptions_JSType) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{12, 1} }
+
+// Is this method side-effect-free (or safe in HTTP parlance), or idempotent,
+// or neither? HTTP based RPC implementation may choose GET verb for safe
+// methods, and PUT verb for idempotent methods instead of the default POST.
+type MethodOptions_IdempotencyLevel int32
+
+const (
+	MethodOptions_IDEMPOTENCY_UNKNOWN MethodOptions_IdempotencyLevel = 0
+	MethodOptions_NO_SIDE_EFFECTS     MethodOptions_IdempotencyLevel = 1
+	MethodOptions_IDEMPOTENT          MethodOptions_IdempotencyLevel = 2
+)
+
+var MethodOptions_IdempotencyLevel_name = map[int32]string{
+	0: "IDEMPOTENCY_UNKNOWN",
+	1: "NO_SIDE_EFFECTS",
+	2: "IDEMPOTENT",
+}
+var MethodOptions_IdempotencyLevel_value = map[string]int32{
+	"IDEMPOTENCY_UNKNOWN": 0,
+	"NO_SIDE_EFFECTS":     1,
+	"IDEMPOTENT":          2,
+}
+
+func (x MethodOptions_IdempotencyLevel) Enum() *MethodOptions_IdempotencyLevel {
+	p := new(MethodOptions_IdempotencyLevel)
+	*p = x
+	return p
+}
+func (x MethodOptions_IdempotencyLevel) String() string {
+	return proto.EnumName(MethodOptions_IdempotencyLevel_name, int32(x))
+}
+func (x *MethodOptions_IdempotencyLevel) UnmarshalJSON(data []byte) error {
+	value, err := proto.UnmarshalJSONEnum(MethodOptions_IdempotencyLevel_value, data, "MethodOptions_IdempotencyLevel")
+	if err != nil {
+		return err
+	}
+	*x = MethodOptions_IdempotencyLevel(value)
+	return nil
+}
+func (MethodOptions_IdempotencyLevel) EnumDescriptor() ([]byte, []int) {
+	return fileDescriptor0, []int{17, 0}
+}
+
+// The protocol compiler can output a FileDescriptorSet containing the .proto
+// files it parses.
+type FileDescriptorSet struct {
+	File             []*FileDescriptorProto `protobuf:"bytes,1,rep,name=file" json:"file,omitempty"`
+	XXX_unrecognized []byte                 `json:"-"`
+}
+
+func (m *FileDescriptorSet) Reset()                    { *m = FileDescriptorSet{} }
+func (m *FileDescriptorSet) String() string            { return proto.CompactTextString(m) }
+func (*FileDescriptorSet) ProtoMessage()               {}
+func (*FileDescriptorSet) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} }
+
+func (m *FileDescriptorSet) GetFile() []*FileDescriptorProto {
+	if m != nil {
+		return m.File
+	}
+	return nil
+}
+
+// Describes a complete .proto file.
+type FileDescriptorProto struct {
+	Name    *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"`
+	Package *string `protobuf:"bytes,2,opt,name=package" json:"package,omitempty"`
+	// Names of files imported by this file.
+	Dependency []string `protobuf:"bytes,3,rep,name=dependency" json:"dependency,omitempty"`
+	// Indexes of the public imported files in the dependency list above.
+	PublicDependency []int32 `protobuf:"varint,10,rep,name=public_dependency,json=publicDependency" json:"public_dependency,omitempty"`
+	// Indexes of the weak imported files in the dependency list.
+	// For Google-internal migration only. Do not use.
+	WeakDependency []int32 `protobuf:"varint,11,rep,name=weak_dependency,json=weakDependency" json:"weak_dependency,omitempty"`
+	// All top-level definitions in this file.
+	MessageType []*DescriptorProto        `protobuf:"bytes,4,rep,name=message_type,json=messageType" json:"message_type,omitempty"`
+	EnumType    []*EnumDescriptorProto    `protobuf:"bytes,5,rep,name=enum_type,json=enumType" json:"enum_type,omitempty"`
+	Service     []*ServiceDescriptorProto `protobuf:"bytes,6,rep,name=service" json:"service,omitempty"`
+	Extension   []*FieldDescriptorProto   `protobuf:"bytes,7,rep,name=extension" json:"extension,omitempty"`
+	Options     *FileOptions              `protobuf:"bytes,8,opt,name=options" json:"options,omitempty"`
+	// This field contains optional information about the original source code.
+	// You may safely remove this entire field without harming runtime
+	// functionality of the descriptors -- the information is needed only by
+	// development tools.
+	SourceCodeInfo *SourceCodeInfo `protobuf:"bytes,9,opt,name=source_code_info,json=sourceCodeInfo" json:"source_code_info,omitempty"`
+	// The syntax of the proto file.
+	// The supported values are "proto2" and "proto3".
+	Syntax           *string `protobuf:"bytes,12,opt,name=syntax" json:"syntax,omitempty"`
+	XXX_unrecognized []byte  `json:"-"`
+}
+
+func (m *FileDescriptorProto) Reset()                    { *m = FileDescriptorProto{} }
+func (m *FileDescriptorProto) String() string            { return proto.CompactTextString(m) }
+func (*FileDescriptorProto) ProtoMessage()               {}
+func (*FileDescriptorProto) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} }
+
+func (m *FileDescriptorProto) GetName() string {
+	if m != nil && m.Name != nil {
+		return *m.Name
+	}
+	return ""
+}
+
+func (m *FileDescriptorProto) GetPackage() string {
+	if m != nil && m.Package != nil {
+		return *m.Package
+	}
+	return ""
+}
+
+func (m *FileDescriptorProto) GetDependency() []string {
+	if m != nil {
+		return m.Dependency
+	}
+	return nil
+}
+
+func (m *FileDescriptorProto) GetPublicDependency() []int32 {
+	if m != nil {
+		return m.PublicDependency
+	}
+	return nil
+}
+
+func (m *FileDescriptorProto) GetWeakDependency() []int32 {
+	if m != nil {
+		return m.WeakDependency
+	}
+	return nil
+}
+
+func (m *FileDescriptorProto) GetMessageType() []*DescriptorProto {
+	if m != nil {
+		return m.MessageType
+	}
+	return nil
+}
+
+func (m *FileDescriptorProto) GetEnumType() []*EnumDescriptorProto {
+	if m != nil {
+		return m.EnumType
+	}
+	return nil
+}
+
+func (m *FileDescriptorProto) GetService() []*ServiceDescriptorProto {
+	if m != nil {
+		return m.Service
+	}
+	return nil
+}
+
+func (m *FileDescriptorProto) GetExtension() []*FieldDescriptorProto {
+	if m != nil {
+		return m.Extension
+	}
+	return nil
+}
+
+func (m *FileDescriptorProto) GetOptions() *FileOptions {
+	if m != nil {
+		return m.Options
+	}
+	return nil
+}
+
+func (m *FileDescriptorProto) GetSourceCodeInfo() *SourceCodeInfo {
+	if m != nil {
+		return m.SourceCodeInfo
+	}
+	return nil
+}
+
+func (m *FileDescriptorProto) GetSyntax() string {
+	if m != nil && m.Syntax != nil {
+		return *m.Syntax
+	}
+	return ""
+}
+
+// Describes a message type.
+type DescriptorProto struct {
+	Name           *string                           `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"`
+	Field          []*FieldDescriptorProto           `protobuf:"bytes,2,rep,name=field" json:"field,omitempty"`
+	Extension      []*FieldDescriptorProto           `protobuf:"bytes,6,rep,name=extension" json:"extension,omitempty"`
+	NestedType     []*DescriptorProto                `protobuf:"bytes,3,rep,name=nested_type,json=nestedType" json:"nested_type,omitempty"`
+	EnumType       []*EnumDescriptorProto            `protobuf:"bytes,4,rep,name=enum_type,json=enumType" json:"enum_type,omitempty"`
+	ExtensionRange []*DescriptorProto_ExtensionRange `protobuf:"bytes,5,rep,name=extension_range,json=extensionRange" json:"extension_range,omitempty"`
+	OneofDecl      []*OneofDescriptorProto           `protobuf:"bytes,8,rep,name=oneof_decl,json=oneofDecl" json:"oneof_decl,omitempty"`
+	Options        *MessageOptions                   `protobuf:"bytes,7,opt,name=options" json:"options,omitempty"`
+	ReservedRange  []*DescriptorProto_ReservedRange  `protobuf:"bytes,9,rep,name=reserved_range,json=reservedRange" json:"reserved_range,omitempty"`
+	// Reserved field names, which may not be used by fields in the same message.
+	// A given name may only be reserved once.
+	ReservedName     []string `protobuf:"bytes,10,rep,name=reserved_name,json=reservedName" json:"reserved_name,omitempty"`
+	XXX_unrecognized []byte   `json:"-"`
+}
+
+func (m *DescriptorProto) Reset()                    { *m = DescriptorProto{} }
+func (m *DescriptorProto) String() string            { return proto.CompactTextString(m) }
+func (*DescriptorProto) ProtoMessage()               {}
+func (*DescriptorProto) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} }
+
+func (m *DescriptorProto) GetName() string {
+	if m != nil && m.Name != nil {
+		return *m.Name
+	}
+	return ""
+}
+
+func (m *DescriptorProto) GetField() []*FieldDescriptorProto {
+	if m != nil {
+		return m.Field
+	}
+	return nil
+}
+
+func (m *DescriptorProto) GetExtension() []*FieldDescriptorProto {
+	if m != nil {
+		return m.Extension
+	}
+	return nil
+}
+
+func (m *DescriptorProto) GetNestedType() []*DescriptorProto {
+	if m != nil {
+		return m.NestedType
+	}
+	return nil
+}
+
+func (m *DescriptorProto) GetEnumType() []*EnumDescriptorProto {
+	if m != nil {
+		return m.EnumType
+	}
+	return nil
+}
+
+func (m *DescriptorProto) GetExtensionRange() []*DescriptorProto_ExtensionRange {
+	if m != nil {
+		return m.ExtensionRange
+	}
+	return nil
+}
+
+func (m *DescriptorProto) GetOneofDecl() []*OneofDescriptorProto {
+	if m != nil {
+		return m.OneofDecl
+	}
+	return nil
+}
+
+func (m *DescriptorProto) GetOptions() *MessageOptions {
+	if m != nil {
+		return m.Options
+	}
+	return nil
+}
+
+func (m *DescriptorProto) GetReservedRange() []*DescriptorProto_ReservedRange {
+	if m != nil {
+		return m.ReservedRange
+	}
+	return nil
+}
+
+func (m *DescriptorProto) GetReservedName() []string {
+	if m != nil {
+		return m.ReservedName
+	}
+	return nil
+}
+
+type DescriptorProto_ExtensionRange struct {
+	Start            *int32                 `protobuf:"varint,1,opt,name=start" json:"start,omitempty"`
+	End              *int32                 `protobuf:"varint,2,opt,name=end" json:"end,omitempty"`
+	Options          *ExtensionRangeOptions `protobuf:"bytes,3,opt,name=options" json:"options,omitempty"`
+	XXX_unrecognized []byte                 `json:"-"`
+}
+
+func (m *DescriptorProto_ExtensionRange) Reset()         { *m = DescriptorProto_ExtensionRange{} }
+func (m *DescriptorProto_ExtensionRange) String() string { return proto.CompactTextString(m) }
+func (*DescriptorProto_ExtensionRange) ProtoMessage()    {}
+func (*DescriptorProto_ExtensionRange) Descriptor() ([]byte, []int) {
+	return fileDescriptor0, []int{2, 0}
+}
+
+func (m *DescriptorProto_ExtensionRange) GetStart() int32 {
+	if m != nil && m.Start != nil {
+		return *m.Start
+	}
+	return 0
+}
+
+func (m *DescriptorProto_ExtensionRange) GetEnd() int32 {
+	if m != nil && m.End != nil {
+		return *m.End
+	}
+	return 0
+}
+
+func (m *DescriptorProto_ExtensionRange) GetOptions() *ExtensionRangeOptions {
+	if m != nil {
+		return m.Options
+	}
+	return nil
+}
+
+// Range of reserved tag numbers. Reserved tag numbers may not be used by
+// fields or extension ranges in the same message. Reserved ranges may
+// not overlap.
+type DescriptorProto_ReservedRange struct {
+	Start            *int32 `protobuf:"varint,1,opt,name=start" json:"start,omitempty"`
+	End              *int32 `protobuf:"varint,2,opt,name=end" json:"end,omitempty"`
+	XXX_unrecognized []byte `json:"-"`
+}
+
+func (m *DescriptorProto_ReservedRange) Reset()         { *m = DescriptorProto_ReservedRange{} }
+func (m *DescriptorProto_ReservedRange) String() string { return proto.CompactTextString(m) }
+func (*DescriptorProto_ReservedRange) ProtoMessage()    {}
+func (*DescriptorProto_ReservedRange) Descriptor() ([]byte, []int) {
+	return fileDescriptor0, []int{2, 1}
+}
+
+func (m *DescriptorProto_ReservedRange) GetStart() int32 {
+	if m != nil && m.Start != nil {
+		return *m.Start
+	}
+	return 0
+}
+
+func (m *DescriptorProto_ReservedRange) GetEnd() int32 {
+	if m != nil && m.End != nil {
+		return *m.End
+	}
+	return 0
+}
+
+type ExtensionRangeOptions struct {
+	// The parser stores options it doesn't recognize here. See above.
+	UninterpretedOption          []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"`
+	proto.XXX_InternalExtensions `json:"-"`
+	XXX_unrecognized             []byte `json:"-"`
+}
+
+func (m *ExtensionRangeOptions) Reset()                    { *m = ExtensionRangeOptions{} }
+func (m *ExtensionRangeOptions) String() string            { return proto.CompactTextString(m) }
+func (*ExtensionRangeOptions) ProtoMessage()               {}
+func (*ExtensionRangeOptions) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{3} }
+
+var extRange_ExtensionRangeOptions = []proto.ExtensionRange{
+	{1000, 536870911},
+}
+
+func (*ExtensionRangeOptions) ExtensionRangeArray() []proto.ExtensionRange {
+	return extRange_ExtensionRangeOptions
+}
+
+func (m *ExtensionRangeOptions) GetUninterpretedOption() []*UninterpretedOption {
+	if m != nil {
+		return m.UninterpretedOption
+	}
+	return nil
+}
+
+// Describes a field within a message.
+type FieldDescriptorProto struct {
+	Name   *string                     `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"`
+	Number *int32                      `protobuf:"varint,3,opt,name=number" json:"number,omitempty"`
+	Label  *FieldDescriptorProto_Label `protobuf:"varint,4,opt,name=label,enum=google.protobuf.FieldDescriptorProto_Label" json:"label,omitempty"`
+	// If type_name is set, this need not be set.  If both this and type_name
+	// are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP.
+	Type *FieldDescriptorProto_Type `protobuf:"varint,5,opt,name=type,enum=google.protobuf.FieldDescriptorProto_Type" json:"type,omitempty"`
+	// For message and enum types, this is the name of the type.  If the name
+	// starts with a '.', it is fully-qualified.  Otherwise, C++-like scoping
+	// rules are used to find the type (i.e. first the nested types within this
+	// message are searched, then within the parent, on up to the root
+	// namespace).
+	TypeName *string `protobuf:"bytes,6,opt,name=type_name,json=typeName" json:"type_name,omitempty"`
+	// For extensions, this is the name of the type being extended.  It is
+	// resolved in the same manner as type_name.
+	Extendee *string `protobuf:"bytes,2,opt,name=extendee" json:"extendee,omitempty"`
+	// For numeric types, contains the original text representation of the value.
+	// For booleans, "true" or "false".
+	// For strings, contains the default text contents (not escaped in any way).
+	// For bytes, contains the C escaped value.  All bytes >= 128 are escaped.
+	// TODO(kenton):  Base-64 encode?
+	DefaultValue *string `protobuf:"bytes,7,opt,name=default_value,json=defaultValue" json:"default_value,omitempty"`
+	// If set, gives the index of a oneof in the containing type's oneof_decl
+	// list.  This field is a member of that oneof.
+	OneofIndex *int32 `protobuf:"varint,9,opt,name=oneof_index,json=oneofIndex" json:"oneof_index,omitempty"`
+	// JSON name of this field. The value is set by protocol compiler. If the
+	// user has set a "json_name" option on this field, that option's value
+	// will be used. Otherwise, it's deduced from the field's name by converting
+	// it to camelCase.
+	JsonName         *string       `protobuf:"bytes,10,opt,name=json_name,json=jsonName" json:"json_name,omitempty"`
+	Options          *FieldOptions `protobuf:"bytes,8,opt,name=options" json:"options,omitempty"`
+	XXX_unrecognized []byte        `json:"-"`
+}
+
+func (m *FieldDescriptorProto) Reset()                    { *m = FieldDescriptorProto{} }
+func (m *FieldDescriptorProto) String() string            { return proto.CompactTextString(m) }
+func (*FieldDescriptorProto) ProtoMessage()               {}
+func (*FieldDescriptorProto) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{4} }
+
+func (m *FieldDescriptorProto) GetName() string {
+	if m != nil && m.Name != nil {
+		return *m.Name
+	}
+	return ""
+}
+
+func (m *FieldDescriptorProto) GetNumber() int32 {
+	if m != nil && m.Number != nil {
+		return *m.Number
+	}
+	return 0
+}
+
+func (m *FieldDescriptorProto) GetLabel() FieldDescriptorProto_Label {
+	if m != nil && m.Label != nil {
+		return *m.Label
+	}
+	return FieldDescriptorProto_LABEL_OPTIONAL
+}
+
+func (m *FieldDescriptorProto) GetType() FieldDescriptorProto_Type {
+	if m != nil && m.Type != nil {
+		return *m.Type
+	}
+	return FieldDescriptorProto_TYPE_DOUBLE
+}
+
+func (m *FieldDescriptorProto) GetTypeName() string {
+	if m != nil && m.TypeName != nil {
+		return *m.TypeName
+	}
+	return ""
+}
+
+func (m *FieldDescriptorProto) GetExtendee() string {
+	if m != nil && m.Extendee != nil {
+		return *m.Extendee
+	}
+	return ""
+}
+
+func (m *FieldDescriptorProto) GetDefaultValue() string {
+	if m != nil && m.DefaultValue != nil {
+		return *m.DefaultValue
+	}
+	return ""
+}
+
+func (m *FieldDescriptorProto) GetOneofIndex() int32 {
+	if m != nil && m.OneofIndex != nil {
+		return *m.OneofIndex
+	}
+	return 0
+}
+
+func (m *FieldDescriptorProto) GetJsonName() string {
+	if m != nil && m.JsonName != nil {
+		return *m.JsonName
+	}
+	return ""
+}
+
+func (m *FieldDescriptorProto) GetOptions() *FieldOptions {
+	if m != nil {
+		return m.Options
+	}
+	return nil
+}
+
+// Describes a oneof.
+type OneofDescriptorProto struct {
+	Name             *string       `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"`
+	Options          *OneofOptions `protobuf:"bytes,2,opt,name=options" json:"options,omitempty"`
+	XXX_unrecognized []byte        `json:"-"`
+}
+
+func (m *OneofDescriptorProto) Reset()                    { *m = OneofDescriptorProto{} }
+func (m *OneofDescriptorProto) String() string            { return proto.CompactTextString(m) }
+func (*OneofDescriptorProto) ProtoMessage()               {}
+func (*OneofDescriptorProto) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{5} }
+
+func (m *OneofDescriptorProto) GetName() string {
+	if m != nil && m.Name != nil {
+		return *m.Name
+	}
+	return ""
+}
+
+func (m *OneofDescriptorProto) GetOptions() *OneofOptions {
+	if m != nil {
+		return m.Options
+	}
+	return nil
+}
+
+// Describes an enum type.
+type EnumDescriptorProto struct {
+	Name             *string                     `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"`
+	Value            []*EnumValueDescriptorProto `protobuf:"bytes,2,rep,name=value" json:"value,omitempty"`
+	Options          *EnumOptions                `protobuf:"bytes,3,opt,name=options" json:"options,omitempty"`
+	XXX_unrecognized []byte                      `json:"-"`
+}
+
+func (m *EnumDescriptorProto) Reset()                    { *m = EnumDescriptorProto{} }
+func (m *EnumDescriptorProto) String() string            { return proto.CompactTextString(m) }
+func (*EnumDescriptorProto) ProtoMessage()               {}
+func (*EnumDescriptorProto) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{6} }
+
+func (m *EnumDescriptorProto) GetName() string {
+	if m != nil && m.Name != nil {
+		return *m.Name
+	}
+	return ""
+}
+
+func (m *EnumDescriptorProto) GetValue() []*EnumValueDescriptorProto {
+	if m != nil {
+		return m.Value
+	}
+	return nil
+}
+
+func (m *EnumDescriptorProto) GetOptions() *EnumOptions {
+	if m != nil {
+		return m.Options
+	}
+	return nil
+}
+
+// Describes a value within an enum.
+type EnumValueDescriptorProto struct {
+	Name             *string           `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"`
+	Number           *int32            `protobuf:"varint,2,opt,name=number" json:"number,omitempty"`
+	Options          *EnumValueOptions `protobuf:"bytes,3,opt,name=options" json:"options,omitempty"`
+	XXX_unrecognized []byte            `json:"-"`
+}
+
+func (m *EnumValueDescriptorProto) Reset()                    { *m = EnumValueDescriptorProto{} }
+func (m *EnumValueDescriptorProto) String() string            { return proto.CompactTextString(m) }
+func (*EnumValueDescriptorProto) ProtoMessage()               {}
+func (*EnumValueDescriptorProto) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{7} }
+
+func (m *EnumValueDescriptorProto) GetName() string {
+	if m != nil && m.Name != nil {
+		return *m.Name
+	}
+	return ""
+}
+
+func (m *EnumValueDescriptorProto) GetNumber() int32 {
+	if m != nil && m.Number != nil {
+		return *m.Number
+	}
+	return 0
+}
+
+func (m *EnumValueDescriptorProto) GetOptions() *EnumValueOptions {
+	if m != nil {
+		return m.Options
+	}
+	return nil
+}
+
+// Describes a service.
+type ServiceDescriptorProto struct {
+	Name             *string                  `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"`
+	Method           []*MethodDescriptorProto `protobuf:"bytes,2,rep,name=method" json:"method,omitempty"`
+	Options          *ServiceOptions          `protobuf:"bytes,3,opt,name=options" json:"options,omitempty"`
+	XXX_unrecognized []byte                   `json:"-"`
+}
+
+func (m *ServiceDescriptorProto) Reset()                    { *m = ServiceDescriptorProto{} }
+func (m *ServiceDescriptorProto) String() string            { return proto.CompactTextString(m) }
+func (*ServiceDescriptorProto) ProtoMessage()               {}
+func (*ServiceDescriptorProto) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{8} }
+
+func (m *ServiceDescriptorProto) GetName() string {
+	if m != nil && m.Name != nil {
+		return *m.Name
+	}
+	return ""
+}
+
+func (m *ServiceDescriptorProto) GetMethod() []*MethodDescriptorProto {
+	if m != nil {
+		return m.Method
+	}
+	return nil
+}
+
+func (m *ServiceDescriptorProto) GetOptions() *ServiceOptions {
+	if m != nil {
+		return m.Options
+	}
+	return nil
+}
+
+// Describes a method of a service.
+type MethodDescriptorProto struct {
+	Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"`
+	// Input and output type names.  These are resolved in the same way as
+	// FieldDescriptorProto.type_name, but must refer to a message type.
+	InputType  *string        `protobuf:"bytes,2,opt,name=input_type,json=inputType" json:"input_type,omitempty"`
+	OutputType *string        `protobuf:"bytes,3,opt,name=output_type,json=outputType" json:"output_type,omitempty"`
+	Options    *MethodOptions `protobuf:"bytes,4,opt,name=options" json:"options,omitempty"`
+	// Identifies if client streams multiple client messages
+	ClientStreaming *bool `protobuf:"varint,5,opt,name=client_streaming,json=clientStreaming,def=0" json:"client_streaming,omitempty"`
+	// Identifies if server streams multiple server messages
+	ServerStreaming  *bool  `protobuf:"varint,6,opt,name=server_streaming,json=serverStreaming,def=0" json:"server_streaming,omitempty"`
+	XXX_unrecognized []byte `json:"-"`
+}
+
+func (m *MethodDescriptorProto) Reset()                    { *m = MethodDescriptorProto{} }
+func (m *MethodDescriptorProto) String() string            { return proto.CompactTextString(m) }
+func (*MethodDescriptorProto) ProtoMessage()               {}
+func (*MethodDescriptorProto) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{9} }
+
+const Default_MethodDescriptorProto_ClientStreaming bool = false
+const Default_MethodDescriptorProto_ServerStreaming bool = false
+
+func (m *MethodDescriptorProto) GetName() string {
+	if m != nil && m.Name != nil {
+		return *m.Name
+	}
+	return ""
+}
+
+func (m *MethodDescriptorProto) GetInputType() string {
+	if m != nil && m.InputType != nil {
+		return *m.InputType
+	}
+	return ""
+}
+
+func (m *MethodDescriptorProto) GetOutputType() string {
+	if m != nil && m.OutputType != nil {
+		return *m.OutputType
+	}
+	return ""
+}
+
+func (m *MethodDescriptorProto) GetOptions() *MethodOptions {
+	if m != nil {
+		return m.Options
+	}
+	return nil
+}
+
+func (m *MethodDescriptorProto) GetClientStreaming() bool {
+	if m != nil && m.ClientStreaming != nil {
+		return *m.ClientStreaming
+	}
+	return Default_MethodDescriptorProto_ClientStreaming
+}
+
+func (m *MethodDescriptorProto) GetServerStreaming() bool {
+	if m != nil && m.ServerStreaming != nil {
+		return *m.ServerStreaming
+	}
+	return Default_MethodDescriptorProto_ServerStreaming
+}
+
+type FileOptions struct {
+	// Sets the Java package where classes generated from this .proto will be
+	// placed.  By default, the proto package is used, but this is often
+	// inappropriate because proto packages do not normally start with backwards
+	// domain names.
+	JavaPackage *string `protobuf:"bytes,1,opt,name=java_package,json=javaPackage" json:"java_package,omitempty"`
+	// If set, all the classes from the .proto file are wrapped in a single
+	// outer class with the given name.  This applies to both Proto1
+	// (equivalent to the old "--one_java_file" option) and Proto2 (where
+	// a .proto always translates to a single class, but you may want to
+	// explicitly choose the class name).
+	JavaOuterClassname *string `protobuf:"bytes,8,opt,name=java_outer_classname,json=javaOuterClassname" json:"java_outer_classname,omitempty"`
+	// If set true, then the Java code generator will generate a separate .java
+	// file for each top-level message, enum, and service defined in the .proto
+	// file.  Thus, these types will *not* be nested inside the outer class
+	// named by java_outer_classname.  However, the outer class will still be
+	// generated to contain the file's getDescriptor() method as well as any
+	// top-level extensions defined in the file.
+	JavaMultipleFiles *bool `protobuf:"varint,10,opt,name=java_multiple_files,json=javaMultipleFiles,def=0" json:"java_multiple_files,omitempty"`
+	// This option does nothing.
+	JavaGenerateEqualsAndHash *bool `protobuf:"varint,20,opt,name=java_generate_equals_and_hash,json=javaGenerateEqualsAndHash" json:"java_generate_equals_and_hash,omitempty"`
+	// If set true, then the Java2 code generator will generate code that
+	// throws an exception whenever an attempt is made to assign a non-UTF-8
+	// byte sequence to a string field.
+	// Message reflection will do the same.
+	// However, an extension field still accepts non-UTF-8 byte sequences.
+	// This option has no effect on when used with the lite runtime.
+	JavaStringCheckUtf8 *bool                     `protobuf:"varint,27,opt,name=java_string_check_utf8,json=javaStringCheckUtf8,def=0" json:"java_string_check_utf8,omitempty"`
+	OptimizeFor         *FileOptions_OptimizeMode `protobuf:"varint,9,opt,name=optimize_for,json=optimizeFor,enum=google.protobuf.FileOptions_OptimizeMode,def=1" json:"optimize_for,omitempty"`
+	// Sets the Go package where structs generated from this .proto will be
+	// placed. If omitted, the Go package will be derived from the following:
+	//   - The basename of the package import path, if provided.
+	//   - Otherwise, the package statement in the .proto file, if present.
+	//   - Otherwise, the basename of the .proto file, without extension.
+	GoPackage *string `protobuf:"bytes,11,opt,name=go_package,json=goPackage" json:"go_package,omitempty"`
+	// Should generic services be generated in each language?  "Generic" services
+	// are not specific to any particular RPC system.  They are generated by the
+	// main code generators in each language (without additional plugins).
+	// Generic services were the only kind of service generation supported by
+	// early versions of google.protobuf.
+	//
+	// Generic services are now considered deprecated in favor of using plugins
+	// that generate code specific to your particular RPC system.  Therefore,
+	// these default to false.  Old code which depends on generic services should
+	// explicitly set them to true.
+	CcGenericServices   *bool `protobuf:"varint,16,opt,name=cc_generic_services,json=ccGenericServices,def=0" json:"cc_generic_services,omitempty"`
+	JavaGenericServices *bool `protobuf:"varint,17,opt,name=java_generic_services,json=javaGenericServices,def=0" json:"java_generic_services,omitempty"`
+	PyGenericServices   *bool `protobuf:"varint,18,opt,name=py_generic_services,json=pyGenericServices,def=0" json:"py_generic_services,omitempty"`
+	PhpGenericServices  *bool `protobuf:"varint,42,opt,name=php_generic_services,json=phpGenericServices,def=0" json:"php_generic_services,omitempty"`
+	// Is this file deprecated?
+	// Depending on the target platform, this can emit Deprecated annotations
+	// for everything in the file, or it will be completely ignored; in the very
+	// least, this is a formalization for deprecating files.
+	Deprecated *bool `protobuf:"varint,23,opt,name=deprecated,def=0" json:"deprecated,omitempty"`
+	// Enables the use of arenas for the proto messages in this file. This applies
+	// only to generated classes for C++.
+	CcEnableArenas *bool `protobuf:"varint,31,opt,name=cc_enable_arenas,json=ccEnableArenas,def=0" json:"cc_enable_arenas,omitempty"`
+	// Sets the objective c class prefix which is prepended to all objective c
+	// generated classes from this .proto. There is no default.
+	ObjcClassPrefix *string `protobuf:"bytes,36,opt,name=objc_class_prefix,json=objcClassPrefix" json:"objc_class_prefix,omitempty"`
+	// Namespace for generated classes; defaults to the package.
+	CsharpNamespace *string `protobuf:"bytes,37,opt,name=csharp_namespace,json=csharpNamespace" json:"csharp_namespace,omitempty"`
+	// By default Swift generators will take the proto package and CamelCase it
+	// replacing '.' with underscore and use that to prefix the types/symbols
+	// defined. When this options is provided, they will use this value instead
+	// to prefix the types/symbols defined.
+	SwiftPrefix *string `protobuf:"bytes,39,opt,name=swift_prefix,json=swiftPrefix" json:"swift_prefix,omitempty"`
+	// Sets the php class prefix which is prepended to all php generated classes
+	// from this .proto. Default is empty.
+	PhpClassPrefix *string `protobuf:"bytes,40,opt,name=php_class_prefix,json=phpClassPrefix" json:"php_class_prefix,omitempty"`
+	// Use this option to change the namespace of php generated classes. Default
+	// is empty. When this option is empty, the package name will be used for
+	// determining the namespace.
+	PhpNamespace *string `protobuf:"bytes,41,opt,name=php_namespace,json=phpNamespace" json:"php_namespace,omitempty"`
+	// The parser stores options it doesn't recognize here. See above.
+	UninterpretedOption          []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"`
+	proto.XXX_InternalExtensions `json:"-"`
+	XXX_unrecognized             []byte `json:"-"`
+}
+
+func (m *FileOptions) Reset()                    { *m = FileOptions{} }
+func (m *FileOptions) String() string            { return proto.CompactTextString(m) }
+func (*FileOptions) ProtoMessage()               {}
+func (*FileOptions) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{10} }
+
+var extRange_FileOptions = []proto.ExtensionRange{
+	{1000, 536870911},
+}
+
+func (*FileOptions) ExtensionRangeArray() []proto.ExtensionRange {
+	return extRange_FileOptions
+}
+
+const Default_FileOptions_JavaMultipleFiles bool = false
+const Default_FileOptions_JavaStringCheckUtf8 bool = false
+const Default_FileOptions_OptimizeFor FileOptions_OptimizeMode = FileOptions_SPEED
+const Default_FileOptions_CcGenericServices bool = false
+const Default_FileOptions_JavaGenericServices bool = false
+const Default_FileOptions_PyGenericServices bool = false
+const Default_FileOptions_PhpGenericServices bool = false
+const Default_FileOptions_Deprecated bool = false
+const Default_FileOptions_CcEnableArenas bool = false
+
+func (m *FileOptions) GetJavaPackage() string {
+	if m != nil && m.JavaPackage != nil {
+		return *m.JavaPackage
+	}
+	return ""
+}
+
+func (m *FileOptions) GetJavaOuterClassname() string {
+	if m != nil && m.JavaOuterClassname != nil {
+		return *m.JavaOuterClassname
+	}
+	return ""
+}
+
+func (m *FileOptions) GetJavaMultipleFiles() bool {
+	if m != nil && m.JavaMultipleFiles != nil {
+		return *m.JavaMultipleFiles
+	}
+	return Default_FileOptions_JavaMultipleFiles
+}
+
+func (m *FileOptions) GetJavaGenerateEqualsAndHash() bool {
+	if m != nil && m.JavaGenerateEqualsAndHash != nil {
+		return *m.JavaGenerateEqualsAndHash
+	}
+	return false
+}
+
+func (m *FileOptions) GetJavaStringCheckUtf8() bool {
+	if m != nil && m.JavaStringCheckUtf8 != nil {
+		return *m.JavaStringCheckUtf8
+	}
+	return Default_FileOptions_JavaStringCheckUtf8
+}
+
+func (m *FileOptions) GetOptimizeFor() FileOptions_OptimizeMode {
+	if m != nil && m.OptimizeFor != nil {
+		return *m.OptimizeFor
+	}
+	return Default_FileOptions_OptimizeFor
+}
+
+func (m *FileOptions) GetGoPackage() string {
+	if m != nil && m.GoPackage != nil {
+		return *m.GoPackage
+	}
+	return ""
+}
+
+func (m *FileOptions) GetCcGenericServices() bool {
+	if m != nil && m.CcGenericServices != nil {
+		return *m.CcGenericServices
+	}
+	return Default_FileOptions_CcGenericServices
+}
+
+func (m *FileOptions) GetJavaGenericServices() bool {
+	if m != nil && m.JavaGenericServices != nil {
+		return *m.JavaGenericServices
+	}
+	return Default_FileOptions_JavaGenericServices
+}
+
+func (m *FileOptions) GetPyGenericServices() bool {
+	if m != nil && m.PyGenericServices != nil {
+		return *m.PyGenericServices
+	}
+	return Default_FileOptions_PyGenericServices
+}
+
+func (m *FileOptions) GetPhpGenericServices() bool {
+	if m != nil && m.PhpGenericServices != nil {
+		return *m.PhpGenericServices
+	}
+	return Default_FileOptions_PhpGenericServices
+}
+
+func (m *FileOptions) GetDeprecated() bool {
+	if m != nil && m.Deprecated != nil {
+		return *m.Deprecated
+	}
+	return Default_FileOptions_Deprecated
+}
+
+func (m *FileOptions) GetCcEnableArenas() bool {
+	if m != nil && m.CcEnableArenas != nil {
+		return *m.CcEnableArenas
+	}
+	return Default_FileOptions_CcEnableArenas
+}
+
+func (m *FileOptions) GetObjcClassPrefix() string {
+	if m != nil && m.ObjcClassPrefix != nil {
+		return *m.ObjcClassPrefix
+	}
+	return ""
+}
+
+func (m *FileOptions) GetCsharpNamespace() string {
+	if m != nil && m.CsharpNamespace != nil {
+		return *m.CsharpNamespace
+	}
+	return ""
+}
+
+func (m *FileOptions) GetSwiftPrefix() string {
+	if m != nil && m.SwiftPrefix != nil {
+		return *m.SwiftPrefix
+	}
+	return ""
+}
+
+func (m *FileOptions) GetPhpClassPrefix() string {
+	if m != nil && m.PhpClassPrefix != nil {
+		return *m.PhpClassPrefix
+	}
+	return ""
+}
+
+func (m *FileOptions) GetPhpNamespace() string {
+	if m != nil && m.PhpNamespace != nil {
+		return *m.PhpNamespace
+	}
+	return ""
+}
+
+func (m *FileOptions) GetUninterpretedOption() []*UninterpretedOption {
+	if m != nil {
+		return m.UninterpretedOption
+	}
+	return nil
+}
+
+type MessageOptions struct {
+	// Set true to use the old proto1 MessageSet wire format for extensions.
+	// This is provided for backwards-compatibility with the MessageSet wire
+	// format.  You should not use this for any other reason:  It's less
+	// efficient, has fewer features, and is more complicated.
+	//
+	// The message must be defined exactly as follows:
+	//   message Foo {
+	//     option message_set_wire_format = true;
+	//     extensions 4 to max;
+	//   }
+	// Note that the message cannot have any defined fields; MessageSets only
+	// have extensions.
+	//
+	// All extensions of your type must be singular messages; e.g. they cannot
+	// be int32s, enums, or repeated messages.
+	//
+	// Because this is an option, the above two restrictions are not enforced by
+	// the protocol compiler.
+	MessageSetWireFormat *bool `protobuf:"varint,1,opt,name=message_set_wire_format,json=messageSetWireFormat,def=0" json:"message_set_wire_format,omitempty"`
+	// Disables the generation of the standard "descriptor()" accessor, which can
+	// conflict with a field of the same name.  This is meant to make migration
+	// from proto1 easier; new code should avoid fields named "descriptor".
+	NoStandardDescriptorAccessor *bool `protobuf:"varint,2,opt,name=no_standard_descriptor_accessor,json=noStandardDescriptorAccessor,def=0" json:"no_standard_descriptor_accessor,omitempty"`
+	// Is this message deprecated?
+	// Depending on the target platform, this can emit Deprecated annotations
+	// for the message, or it will be completely ignored; in the very least,
+	// this is a formalization for deprecating messages.
+	Deprecated *bool `protobuf:"varint,3,opt,name=deprecated,def=0" json:"deprecated,omitempty"`
+	// Whether the message is an automatically generated map entry type for the
+	// maps field.
+	//
+	// For maps fields:
+	//     map<KeyType, ValueType> map_field = 1;
+	// The parsed descriptor looks like:
+	//     message MapFieldEntry {
+	//         option map_entry = true;
+	//         optional KeyType key = 1;
+	//         optional ValueType value = 2;
+	//     }
+	//     repeated MapFieldEntry map_field = 1;
+	//
+	// Implementations may choose not to generate the map_entry=true message, but
+	// use a native map in the target language to hold the keys and values.
+	// The reflection APIs in such implementions still need to work as
+	// if the field is a repeated message field.
+	//
+	// NOTE: Do not set the option in .proto files. Always use the maps syntax
+	// instead. The option should only be implicitly set by the proto compiler
+	// parser.
+	MapEntry *bool `protobuf:"varint,7,opt,name=map_entry,json=mapEntry" json:"map_entry,omitempty"`
+	// The parser stores options it doesn't recognize here. See above.
+	UninterpretedOption          []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"`
+	proto.XXX_InternalExtensions `json:"-"`
+	XXX_unrecognized             []byte `json:"-"`
+}
+
+func (m *MessageOptions) Reset()                    { *m = MessageOptions{} }
+func (m *MessageOptions) String() string            { return proto.CompactTextString(m) }
+func (*MessageOptions) ProtoMessage()               {}
+func (*MessageOptions) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{11} }
+
+var extRange_MessageOptions = []proto.ExtensionRange{
+	{1000, 536870911},
+}
+
+func (*MessageOptions) ExtensionRangeArray() []proto.ExtensionRange {
+	return extRange_MessageOptions
+}
+
+const Default_MessageOptions_MessageSetWireFormat bool = false
+const Default_MessageOptions_NoStandardDescriptorAccessor bool = false
+const Default_MessageOptions_Deprecated bool = false
+
+func (m *MessageOptions) GetMessageSetWireFormat() bool {
+	if m != nil && m.MessageSetWireFormat != nil {
+		return *m.MessageSetWireFormat
+	}
+	return Default_MessageOptions_MessageSetWireFormat
+}
+
+func (m *MessageOptions) GetNoStandardDescriptorAccessor() bool {
+	if m != nil && m.NoStandardDescriptorAccessor != nil {
+		return *m.NoStandardDescriptorAccessor
+	}
+	return Default_MessageOptions_NoStandardDescriptorAccessor
+}
+
+func (m *MessageOptions) GetDeprecated() bool {
+	if m != nil && m.Deprecated != nil {
+		return *m.Deprecated
+	}
+	return Default_MessageOptions_Deprecated
+}
+
+func (m *MessageOptions) GetMapEntry() bool {
+	if m != nil && m.MapEntry != nil {
+		return *m.MapEntry
+	}
+	return false
+}
+
+func (m *MessageOptions) GetUninterpretedOption() []*UninterpretedOption {
+	if m != nil {
+		return m.UninterpretedOption
+	}
+	return nil
+}
+
+type FieldOptions struct {
+	// The ctype option instructs the C++ code generator to use a different
+	// representation of the field than it normally would.  See the specific
+	// options below.  This option is not yet implemented in the open source
+	// release -- sorry, we'll try to include it in a future version!
+	Ctype *FieldOptions_CType `protobuf:"varint,1,opt,name=ctype,enum=google.protobuf.FieldOptions_CType,def=0" json:"ctype,omitempty"`
+	// The packed option can be enabled for repeated primitive fields to enable
+	// a more efficient representation on the wire. Rather than repeatedly
+	// writing the tag and type for each element, the entire array is encoded as
+	// a single length-delimited blob. In proto3, only explicit setting it to
+	// false will avoid using packed encoding.
+	Packed *bool `protobuf:"varint,2,opt,name=packed" json:"packed,omitempty"`
+	// The jstype option determines the JavaScript type used for values of the
+	// field.  The option is permitted only for 64 bit integral and fixed types
+	// (int64, uint64, sint64, fixed64, sfixed64).  A field with jstype JS_STRING
+	// is represented as JavaScript string, which avoids loss of precision that
+	// can happen when a large value is converted to a floating point JavaScript.
+	// Specifying JS_NUMBER for the jstype causes the generated JavaScript code to
+	// use the JavaScript "number" type.  The behavior of the default option
+	// JS_NORMAL is implementation dependent.
+	//
+	// This option is an enum to permit additional types to be added, e.g.
+	// goog.math.Integer.
+	Jstype *FieldOptions_JSType `protobuf:"varint,6,opt,name=jstype,enum=google.protobuf.FieldOptions_JSType,def=0" json:"jstype,omitempty"`
+	// Should this field be parsed lazily?  Lazy applies only to message-type
+	// fields.  It means that when the outer message is initially parsed, the
+	// inner message's contents will not be parsed but instead stored in encoded
+	// form.  The inner message will actually be parsed when it is first accessed.
+	//
+	// This is only a hint.  Implementations are free to choose whether to use
+	// eager or lazy parsing regardless of the value of this option.  However,
+	// setting this option true suggests that the protocol author believes that
+	// using lazy parsing on this field is worth the additional bookkeeping
+	// overhead typically needed to implement it.
+	//
+	// This option does not affect the public interface of any generated code;
+	// all method signatures remain the same.  Furthermore, thread-safety of the
+	// interface is not affected by this option; const methods remain safe to
+	// call from multiple threads concurrently, while non-const methods continue
+	// to require exclusive access.
+	//
+	//
+	// Note that implementations may choose not to check required fields within
+	// a lazy sub-message.  That is, calling IsInitialized() on the outer message
+	// may return true even if the inner message has missing required fields.
+	// This is necessary because otherwise the inner message would have to be
+	// parsed in order to perform the check, defeating the purpose of lazy
+	// parsing.  An implementation which chooses not to check required fields
+	// must be consistent about it.  That is, for any particular sub-message, the
+	// implementation must either *always* check its required fields, or *never*
+	// check its required fields, regardless of whether or not the message has
+	// been parsed.
+	Lazy *bool `protobuf:"varint,5,opt,name=lazy,def=0" json:"lazy,omitempty"`
+	// Is this field deprecated?
+	// Depending on the target platform, this can emit Deprecated annotations
+	// for accessors, or it will be completely ignored; in the very least, this
+	// is a formalization for deprecating fields.
+	Deprecated *bool `protobuf:"varint,3,opt,name=deprecated,def=0" json:"deprecated,omitempty"`
+	// For Google-internal migration only. Do not use.
+	Weak *bool `protobuf:"varint,10,opt,name=weak,def=0" json:"weak,omitempty"`
+	// The parser stores options it doesn't recognize here. See above.
+	UninterpretedOption          []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"`
+	proto.XXX_InternalExtensions `json:"-"`
+	XXX_unrecognized             []byte `json:"-"`
+}
+
+func (m *FieldOptions) Reset()                    { *m = FieldOptions{} }
+func (m *FieldOptions) String() string            { return proto.CompactTextString(m) }
+func (*FieldOptions) ProtoMessage()               {}
+func (*FieldOptions) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{12} }
+
+var extRange_FieldOptions = []proto.ExtensionRange{
+	{1000, 536870911},
+}
+
+func (*FieldOptions) ExtensionRangeArray() []proto.ExtensionRange {
+	return extRange_FieldOptions
+}
+
+const Default_FieldOptions_Ctype FieldOptions_CType = FieldOptions_STRING
+const Default_FieldOptions_Jstype FieldOptions_JSType = FieldOptions_JS_NORMAL
+const Default_FieldOptions_Lazy bool = false
+const Default_FieldOptions_Deprecated bool = false
+const Default_FieldOptions_Weak bool = false
+
+func (m *FieldOptions) GetCtype() FieldOptions_CType {
+	if m != nil && m.Ctype != nil {
+		return *m.Ctype
+	}
+	return Default_FieldOptions_Ctype
+}
+
+func (m *FieldOptions) GetPacked() bool {
+	if m != nil && m.Packed != nil {
+		return *m.Packed
+	}
+	return false
+}
+
+func (m *FieldOptions) GetJstype() FieldOptions_JSType {
+	if m != nil && m.Jstype != nil {
+		return *m.Jstype
+	}
+	return Default_FieldOptions_Jstype
+}
+
+func (m *FieldOptions) GetLazy() bool {
+	if m != nil && m.Lazy != nil {
+		return *m.Lazy
+	}
+	return Default_FieldOptions_Lazy
+}
+
+func (m *FieldOptions) GetDeprecated() bool {
+	if m != nil && m.Deprecated != nil {
+		return *m.Deprecated
+	}
+	return Default_FieldOptions_Deprecated
+}
+
+func (m *FieldOptions) GetWeak() bool {
+	if m != nil && m.Weak != nil {
+		return *m.Weak
+	}
+	return Default_FieldOptions_Weak
+}
+
+func (m *FieldOptions) GetUninterpretedOption() []*UninterpretedOption {
+	if m != nil {
+		return m.UninterpretedOption
+	}
+	return nil
+}
+
+type OneofOptions struct {
+	// The parser stores options it doesn't recognize here. See above.
+	UninterpretedOption          []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"`
+	proto.XXX_InternalExtensions `json:"-"`
+	XXX_unrecognized             []byte `json:"-"`
+}
+
+func (m *OneofOptions) Reset()                    { *m = OneofOptions{} }
+func (m *OneofOptions) String() string            { return proto.CompactTextString(m) }
+func (*OneofOptions) ProtoMessage()               {}
+func (*OneofOptions) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{13} }
+
+var extRange_OneofOptions = []proto.ExtensionRange{
+	{1000, 536870911},
+}
+
+func (*OneofOptions) ExtensionRangeArray() []proto.ExtensionRange {
+	return extRange_OneofOptions
+}
+
+func (m *OneofOptions) GetUninterpretedOption() []*UninterpretedOption {
+	if m != nil {
+		return m.UninterpretedOption
+	}
+	return nil
+}
+
+type EnumOptions struct {
+	// Set this option to true to allow mapping different tag names to the same
+	// value.
+	AllowAlias *bool `protobuf:"varint,2,opt,name=allow_alias,json=allowAlias" json:"allow_alias,omitempty"`
+	// Is this enum deprecated?
+	// Depending on the target platform, this can emit Deprecated annotations
+	// for the enum, or it will be completely ignored; in the very least, this
+	// is a formalization for deprecating enums.
+	Deprecated *bool `protobuf:"varint,3,opt,name=deprecated,def=0" json:"deprecated,omitempty"`
+	// The parser stores options it doesn't recognize here. See above.
+	UninterpretedOption          []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"`
+	proto.XXX_InternalExtensions `json:"-"`
+	XXX_unrecognized             []byte `json:"-"`
+}
+
+func (m *EnumOptions) Reset()                    { *m = EnumOptions{} }
+func (m *EnumOptions) String() string            { return proto.CompactTextString(m) }
+func (*EnumOptions) ProtoMessage()               {}
+func (*EnumOptions) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{14} }
+
+var extRange_EnumOptions = []proto.ExtensionRange{
+	{1000, 536870911},
+}
+
+func (*EnumOptions) ExtensionRangeArray() []proto.ExtensionRange {
+	return extRange_EnumOptions
+}
+
+const Default_EnumOptions_Deprecated bool = false
+
+func (m *EnumOptions) GetAllowAlias() bool {
+	if m != nil && m.AllowAlias != nil {
+		return *m.AllowAlias
+	}
+	return false
+}
+
+func (m *EnumOptions) GetDeprecated() bool {
+	if m != nil && m.Deprecated != nil {
+		return *m.Deprecated
+	}
+	return Default_EnumOptions_Deprecated
+}
+
+func (m *EnumOptions) GetUninterpretedOption() []*UninterpretedOption {
+	if m != nil {
+		return m.UninterpretedOption
+	}
+	return nil
+}
+
+type EnumValueOptions struct {
+	// Is this enum value deprecated?
+	// Depending on the target platform, this can emit Deprecated annotations
+	// for the enum value, or it will be completely ignored; in the very least,
+	// this is a formalization for deprecating enum values.
+	Deprecated *bool `protobuf:"varint,1,opt,name=deprecated,def=0" json:"deprecated,omitempty"`
+	// The parser stores options it doesn't recognize here. See above.
+	UninterpretedOption          []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"`
+	proto.XXX_InternalExtensions `json:"-"`
+	XXX_unrecognized             []byte `json:"-"`
+}
+
+func (m *EnumValueOptions) Reset()                    { *m = EnumValueOptions{} }
+func (m *EnumValueOptions) String() string            { return proto.CompactTextString(m) }
+func (*EnumValueOptions) ProtoMessage()               {}
+func (*EnumValueOptions) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{15} }
+
+var extRange_EnumValueOptions = []proto.ExtensionRange{
+	{1000, 536870911},
+}
+
+func (*EnumValueOptions) ExtensionRangeArray() []proto.ExtensionRange {
+	return extRange_EnumValueOptions
+}
+
+const Default_EnumValueOptions_Deprecated bool = false
+
+func (m *EnumValueOptions) GetDeprecated() bool {
+	if m != nil && m.Deprecated != nil {
+		return *m.Deprecated
+	}
+	return Default_EnumValueOptions_Deprecated
+}
+
+func (m *EnumValueOptions) GetUninterpretedOption() []*UninterpretedOption {
+	if m != nil {
+		return m.UninterpretedOption
+	}
+	return nil
+}
+
+type ServiceOptions struct {
+	// Is this service deprecated?
+	// Depending on the target platform, this can emit Deprecated annotations
+	// for the service, or it will be completely ignored; in the very least,
+	// this is a formalization for deprecating services.
+	Deprecated *bool `protobuf:"varint,33,opt,name=deprecated,def=0" json:"deprecated,omitempty"`
+	// The parser stores options it doesn't recognize here. See above.
+	UninterpretedOption          []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"`
+	proto.XXX_InternalExtensions `json:"-"`
+	XXX_unrecognized             []byte `json:"-"`
+}
+
+func (m *ServiceOptions) Reset()                    { *m = ServiceOptions{} }
+func (m *ServiceOptions) String() string            { return proto.CompactTextString(m) }
+func (*ServiceOptions) ProtoMessage()               {}
+func (*ServiceOptions) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{16} }
+
+var extRange_ServiceOptions = []proto.ExtensionRange{
+	{1000, 536870911},
+}
+
+func (*ServiceOptions) ExtensionRangeArray() []proto.ExtensionRange {
+	return extRange_ServiceOptions
+}
+
+const Default_ServiceOptions_Deprecated bool = false
+
+func (m *ServiceOptions) GetDeprecated() bool {
+	if m != nil && m.Deprecated != nil {
+		return *m.Deprecated
+	}
+	return Default_ServiceOptions_Deprecated
+}
+
+func (m *ServiceOptions) GetUninterpretedOption() []*UninterpretedOption {
+	if m != nil {
+		return m.UninterpretedOption
+	}
+	return nil
+}
+
+type MethodOptions struct {
+	// Is this method deprecated?
+	// Depending on the target platform, this can emit Deprecated annotations
+	// for the method, or it will be completely ignored; in the very least,
+	// this is a formalization for deprecating methods.
+	Deprecated       *bool                           `protobuf:"varint,33,opt,name=deprecated,def=0" json:"deprecated,omitempty"`
+	IdempotencyLevel *MethodOptions_IdempotencyLevel `protobuf:"varint,34,opt,name=idempotency_level,json=idempotencyLevel,enum=google.protobuf.MethodOptions_IdempotencyLevel,def=0" json:"idempotency_level,omitempty"`
+	// The parser stores options it doesn't recognize here. See above.
+	UninterpretedOption          []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"`
+	proto.XXX_InternalExtensions `json:"-"`
+	XXX_unrecognized             []byte `json:"-"`
+}
+
+func (m *MethodOptions) Reset()                    { *m = MethodOptions{} }
+func (m *MethodOptions) String() string            { return proto.CompactTextString(m) }
+func (*MethodOptions) ProtoMessage()               {}
+func (*MethodOptions) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{17} }
+
+var extRange_MethodOptions = []proto.ExtensionRange{
+	{1000, 536870911},
+}
+
+func (*MethodOptions) ExtensionRangeArray() []proto.ExtensionRange {
+	return extRange_MethodOptions
+}
+
+const Default_MethodOptions_Deprecated bool = false
+const Default_MethodOptions_IdempotencyLevel MethodOptions_IdempotencyLevel = MethodOptions_IDEMPOTENCY_UNKNOWN
+
+func (m *MethodOptions) GetDeprecated() bool {
+	if m != nil && m.Deprecated != nil {
+		return *m.Deprecated
+	}
+	return Default_MethodOptions_Deprecated
+}
+
+func (m *MethodOptions) GetIdempotencyLevel() MethodOptions_IdempotencyLevel {
+	if m != nil && m.IdempotencyLevel != nil {
+		return *m.IdempotencyLevel
+	}
+	return Default_MethodOptions_IdempotencyLevel
+}
+
+func (m *MethodOptions) GetUninterpretedOption() []*UninterpretedOption {
+	if m != nil {
+		return m.UninterpretedOption
+	}
+	return nil
+}
+
+// A message representing a option the parser does not recognize. This only
+// appears in options protos created by the compiler::Parser class.
+// DescriptorPool resolves these when building Descriptor objects. Therefore,
+// options protos in descriptor objects (e.g. returned by Descriptor::options(),
+// or produced by Descriptor::CopyTo()) will never have UninterpretedOptions
+// in them.
+type UninterpretedOption struct {
+	Name []*UninterpretedOption_NamePart `protobuf:"bytes,2,rep,name=name" json:"name,omitempty"`
+	// The value of the uninterpreted option, in whatever type the tokenizer
+	// identified it as during parsing. Exactly one of these should be set.
+	IdentifierValue  *string  `protobuf:"bytes,3,opt,name=identifier_value,json=identifierValue" json:"identifier_value,omitempty"`
+	PositiveIntValue *uint64  `protobuf:"varint,4,opt,name=positive_int_value,json=positiveIntValue" json:"positive_int_value,omitempty"`
+	NegativeIntValue *int64   `protobuf:"varint,5,opt,name=negative_int_value,json=negativeIntValue" json:"negative_int_value,omitempty"`
+	DoubleValue      *float64 `protobuf:"fixed64,6,opt,name=double_value,json=doubleValue" json:"double_value,omitempty"`
+	StringValue      []byte   `protobuf:"bytes,7,opt,name=string_value,json=stringValue" json:"string_value,omitempty"`
+	AggregateValue   *string  `protobuf:"bytes,8,opt,name=aggregate_value,json=aggregateValue" json:"aggregate_value,omitempty"`
+	XXX_unrecognized []byte   `json:"-"`
+}
+
+func (m *UninterpretedOption) Reset()                    { *m = UninterpretedOption{} }
+func (m *UninterpretedOption) String() string            { return proto.CompactTextString(m) }
+func (*UninterpretedOption) ProtoMessage()               {}
+func (*UninterpretedOption) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{18} }
+
+func (m *UninterpretedOption) GetName() []*UninterpretedOption_NamePart {
+	if m != nil {
+		return m.Name
+	}
+	return nil
+}
+
+func (m *UninterpretedOption) GetIdentifierValue() string {
+	if m != nil && m.IdentifierValue != nil {
+		return *m.IdentifierValue
+	}
+	return ""
+}
+
+func (m *UninterpretedOption) GetPositiveIntValue() uint64 {
+	if m != nil && m.PositiveIntValue != nil {
+		return *m.PositiveIntValue
+	}
+	return 0
+}
+
+func (m *UninterpretedOption) GetNegativeIntValue() int64 {
+	if m != nil && m.NegativeIntValue != nil {
+		return *m.NegativeIntValue
+	}
+	return 0
+}
+
+func (m *UninterpretedOption) GetDoubleValue() float64 {
+	if m != nil && m.DoubleValue != nil {
+		return *m.DoubleValue
+	}
+	return 0
+}
+
+func (m *UninterpretedOption) GetStringValue() []byte {
+	if m != nil {
+		return m.StringValue
+	}
+	return nil
+}
+
+func (m *UninterpretedOption) GetAggregateValue() string {
+	if m != nil && m.AggregateValue != nil {
+		return *m.AggregateValue
+	}
+	return ""
+}
+
+// The name of the uninterpreted option.  Each string represents a segment in
+// a dot-separated name.  is_extension is true iff a segment represents an
+// extension (denoted with parentheses in options specs in .proto files).
+// E.g.,{ ["foo", false], ["bar.baz", true], ["qux", false] } represents
+// "foo.(bar.baz).qux".
+type UninterpretedOption_NamePart struct {
+	NamePart         *string `protobuf:"bytes,1,req,name=name_part,json=namePart" json:"name_part,omitempty"`
+	IsExtension      *bool   `protobuf:"varint,2,req,name=is_extension,json=isExtension" json:"is_extension,omitempty"`
+	XXX_unrecognized []byte  `json:"-"`
+}
+
+func (m *UninterpretedOption_NamePart) Reset()         { *m = UninterpretedOption_NamePart{} }
+func (m *UninterpretedOption_NamePart) String() string { return proto.CompactTextString(m) }
+func (*UninterpretedOption_NamePart) ProtoMessage()    {}
+func (*UninterpretedOption_NamePart) Descriptor() ([]byte, []int) {
+	return fileDescriptor0, []int{18, 0}
+}
+
+func (m *UninterpretedOption_NamePart) GetNamePart() string {
+	if m != nil && m.NamePart != nil {
+		return *m.NamePart
+	}
+	return ""
+}
+
+func (m *UninterpretedOption_NamePart) GetIsExtension() bool {
+	if m != nil && m.IsExtension != nil {
+		return *m.IsExtension
+	}
+	return false
+}
+
+// Encapsulates information about the original source file from which a
+// FileDescriptorProto was generated.
+type SourceCodeInfo struct {
+	// A Location identifies a piece of source code in a .proto file which
+	// corresponds to a particular definition.  This information is intended
+	// to be useful to IDEs, code indexers, documentation generators, and similar
+	// tools.
+	//
+	// For example, say we have a file like:
+	//   message Foo {
+	//     optional string foo = 1;
+	//   }
+	// Let's look at just the field definition:
+	//   optional string foo = 1;
+	//   ^       ^^     ^^  ^  ^^^
+	//   a       bc     de  f  ghi
+	// We have the following locations:
+	//   span   path               represents
+	//   [a,i)  [ 4, 0, 2, 0 ]     The whole field definition.
+	//   [a,b)  [ 4, 0, 2, 0, 4 ]  The label (optional).
+	//   [c,d)  [ 4, 0, 2, 0, 5 ]  The type (string).
+	//   [e,f)  [ 4, 0, 2, 0, 1 ]  The name (foo).
+	//   [g,h)  [ 4, 0, 2, 0, 3 ]  The number (1).
+	//
+	// Notes:
+	// - A location may refer to a repeated field itself (i.e. not to any
+	//   particular index within it).  This is used whenever a set of elements are
+	//   logically enclosed in a single code segment.  For example, an entire
+	//   extend block (possibly containing multiple extension definitions) will
+	//   have an outer location whose path refers to the "extensions" repeated
+	//   field without an index.
+	// - Multiple locations may have the same path.  This happens when a single
+	//   logical declaration is spread out across multiple places.  The most
+	//   obvious example is the "extend" block again -- there may be multiple
+	//   extend blocks in the same scope, each of which will have the same path.
+	// - A location's span is not always a subset of its parent's span.  For
+	//   example, the "extendee" of an extension declaration appears at the
+	//   beginning of the "extend" block and is shared by all extensions within
+	//   the block.
+	// - Just because a location's span is a subset of some other location's span
+	//   does not mean that it is a descendent.  For example, a "group" defines
+	//   both a type and a field in a single declaration.  Thus, the locations
+	//   corresponding to the type and field and their components will overlap.
+	// - Code which tries to interpret locations should probably be designed to
+	//   ignore those that it doesn't understand, as more types of locations could
+	//   be recorded in the future.
+	Location         []*SourceCodeInfo_Location `protobuf:"bytes,1,rep,name=location" json:"location,omitempty"`
+	XXX_unrecognized []byte                     `json:"-"`
+}
+
+func (m *SourceCodeInfo) Reset()                    { *m = SourceCodeInfo{} }
+func (m *SourceCodeInfo) String() string            { return proto.CompactTextString(m) }
+func (*SourceCodeInfo) ProtoMessage()               {}
+func (*SourceCodeInfo) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{19} }
+
+func (m *SourceCodeInfo) GetLocation() []*SourceCodeInfo_Location {
+	if m != nil {
+		return m.Location
+	}
+	return nil
+}
+
+type SourceCodeInfo_Location struct {
+	// Identifies which part of the FileDescriptorProto was defined at this
+	// location.
+	//
+	// Each element is a field number or an index.  They form a path from
+	// the root FileDescriptorProto to the place where the definition.  For
+	// example, this path:
+	//   [ 4, 3, 2, 7, 1 ]
+	// refers to:
+	//   file.message_type(3)  // 4, 3
+	//       .field(7)         // 2, 7
+	//       .name()           // 1
+	// This is because FileDescriptorProto.message_type has field number 4:
+	//   repeated DescriptorProto message_type = 4;
+	// and DescriptorProto.field has field number 2:
+	//   repeated FieldDescriptorProto field = 2;
+	// and FieldDescriptorProto.name has field number 1:
+	//   optional string name = 1;
+	//
+	// Thus, the above path gives the location of a field name.  If we removed
+	// the last element:
+	//   [ 4, 3, 2, 7 ]
+	// this path refers to the whole field declaration (from the beginning
+	// of the label to the terminating semicolon).
+	Path []int32 `protobuf:"varint,1,rep,packed,name=path" json:"path,omitempty"`
+	// Always has exactly three or four elements: start line, start column,
+	// end line (optional, otherwise assumed same as start line), end column.
+	// These are packed into a single field for efficiency.  Note that line
+	// and column numbers are zero-based -- typically you will want to add
+	// 1 to each before displaying to a user.
+	Span []int32 `protobuf:"varint,2,rep,packed,name=span" json:"span,omitempty"`
+	// If this SourceCodeInfo represents a complete declaration, these are any
+	// comments appearing before and after the declaration which appear to be
+	// attached to the declaration.
+	//
+	// A series of line comments appearing on consecutive lines, with no other
+	// tokens appearing on those lines, will be treated as a single comment.
+	//
+	// leading_detached_comments will keep paragraphs of comments that appear
+	// before (but not connected to) the current element. Each paragraph,
+	// separated by empty lines, will be one comment element in the repeated
+	// field.
+	//
+	// Only the comment content is provided; comment markers (e.g. //) are
+	// stripped out.  For block comments, leading whitespace and an asterisk
+	// will be stripped from the beginning of each line other than the first.
+	// Newlines are included in the output.
+	//
+	// Examples:
+	//
+	//   optional int32 foo = 1;  // Comment attached to foo.
+	//   // Comment attached to bar.
+	//   optional int32 bar = 2;
+	//
+	//   optional string baz = 3;
+	//   // Comment attached to baz.
+	//   // Another line attached to baz.
+	//
+	//   // Comment attached to qux.
+	//   //
+	//   // Another line attached to qux.
+	//   optional double qux = 4;
+	//
+	//   // Detached comment for corge. This is not leading or trailing comments
+	//   // to qux or corge because there are blank lines separating it from
+	//   // both.
+	//
+	//   // Detached comment for corge paragraph 2.
+	//
+	//   optional string corge = 5;
+	//   /* Block comment attached
+	//    * to corge.  Leading asterisks
+	//    * will be removed. */
+	//   /* Block comment attached to
+	//    * grault. */
+	//   optional int32 grault = 6;
+	//
+	//   // ignored detached comments.
+	LeadingComments         *string  `protobuf:"bytes,3,opt,name=leading_comments,json=leadingComments" json:"leading_comments,omitempty"`
+	TrailingComments        *string  `protobuf:"bytes,4,opt,name=trailing_comments,json=trailingComments" json:"trailing_comments,omitempty"`
+	LeadingDetachedComments []string `protobuf:"bytes,6,rep,name=leading_detached_comments,json=leadingDetachedComments" json:"leading_detached_comments,omitempty"`
+	XXX_unrecognized        []byte   `json:"-"`
+}
+
+func (m *SourceCodeInfo_Location) Reset()                    { *m = SourceCodeInfo_Location{} }
+func (m *SourceCodeInfo_Location) String() string            { return proto.CompactTextString(m) }
+func (*SourceCodeInfo_Location) ProtoMessage()               {}
+func (*SourceCodeInfo_Location) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{19, 0} }
+
+func (m *SourceCodeInfo_Location) GetPath() []int32 {
+	if m != nil {
+		return m.Path
+	}
+	return nil
+}
+
+func (m *SourceCodeInfo_Location) GetSpan() []int32 {
+	if m != nil {
+		return m.Span
+	}
+	return nil
+}
+
+func (m *SourceCodeInfo_Location) GetLeadingComments() string {
+	if m != nil && m.LeadingComments != nil {
+		return *m.LeadingComments
+	}
+	return ""
+}
+
+func (m *SourceCodeInfo_Location) GetTrailingComments() string {
+	if m != nil && m.TrailingComments != nil {
+		return *m.TrailingComments
+	}
+	return ""
+}
+
+func (m *SourceCodeInfo_Location) GetLeadingDetachedComments() []string {
+	if m != nil {
+		return m.LeadingDetachedComments
+	}
+	return nil
+}
+
+// Describes the relationship between generated code and its original source
+// file. A GeneratedCodeInfo message is associated with only one generated
+// source file, but may contain references to different source .proto files.
+type GeneratedCodeInfo struct {
+	// An Annotation connects some span of text in generated code to an element
+	// of its generating .proto file.
+	Annotation       []*GeneratedCodeInfo_Annotation `protobuf:"bytes,1,rep,name=annotation" json:"annotation,omitempty"`
+	XXX_unrecognized []byte                          `json:"-"`
+}
+
+func (m *GeneratedCodeInfo) Reset()                    { *m = GeneratedCodeInfo{} }
+func (m *GeneratedCodeInfo) String() string            { return proto.CompactTextString(m) }
+func (*GeneratedCodeInfo) ProtoMessage()               {}
+func (*GeneratedCodeInfo) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{20} }
+
+func (m *GeneratedCodeInfo) GetAnnotation() []*GeneratedCodeInfo_Annotation {
+	if m != nil {
+		return m.Annotation
+	}
+	return nil
+}
+
+type GeneratedCodeInfo_Annotation struct {
+	// Identifies the element in the original source .proto file. This field
+	// is formatted the same as SourceCodeInfo.Location.path.
+	Path []int32 `protobuf:"varint,1,rep,packed,name=path" json:"path,omitempty"`
+	// Identifies the filesystem path to the original source .proto.
+	SourceFile *string `protobuf:"bytes,2,opt,name=source_file,json=sourceFile" json:"source_file,omitempty"`
+	// Identifies the starting offset in bytes in the generated code
+	// that relates to the identified object.
+	Begin *int32 `protobuf:"varint,3,opt,name=begin" json:"begin,omitempty"`
+	// Identifies the ending offset in bytes in the generated code that
+	// relates to the identified offset. The end offset should be one past
+	// the last relevant byte (so the length of the text = end - begin).
+	End              *int32 `protobuf:"varint,4,opt,name=end" json:"end,omitempty"`
+	XXX_unrecognized []byte `json:"-"`
+}
+
+func (m *GeneratedCodeInfo_Annotation) Reset()         { *m = GeneratedCodeInfo_Annotation{} }
+func (m *GeneratedCodeInfo_Annotation) String() string { return proto.CompactTextString(m) }
+func (*GeneratedCodeInfo_Annotation) ProtoMessage()    {}
+func (*GeneratedCodeInfo_Annotation) Descriptor() ([]byte, []int) {
+	return fileDescriptor0, []int{20, 0}
+}
+
+func (m *GeneratedCodeInfo_Annotation) GetPath() []int32 {
+	if m != nil {
+		return m.Path
+	}
+	return nil
+}
+
+func (m *GeneratedCodeInfo_Annotation) GetSourceFile() string {
+	if m != nil && m.SourceFile != nil {
+		return *m.SourceFile
+	}
+	return ""
+}
+
+func (m *GeneratedCodeInfo_Annotation) GetBegin() int32 {
+	if m != nil && m.Begin != nil {
+		return *m.Begin
+	}
+	return 0
+}
+
+func (m *GeneratedCodeInfo_Annotation) GetEnd() int32 {
+	if m != nil && m.End != nil {
+		return *m.End
+	}
+	return 0
+}
+
+func init() {
+	proto.RegisterType((*FileDescriptorSet)(nil), "google.protobuf.FileDescriptorSet")
+	proto.RegisterType((*FileDescriptorProto)(nil), "google.protobuf.FileDescriptorProto")
+	proto.RegisterType((*DescriptorProto)(nil), "google.protobuf.DescriptorProto")
+	proto.RegisterType((*DescriptorProto_ExtensionRange)(nil), "google.protobuf.DescriptorProto.ExtensionRange")
+	proto.RegisterType((*DescriptorProto_ReservedRange)(nil), "google.protobuf.DescriptorProto.ReservedRange")
+	proto.RegisterType((*ExtensionRangeOptions)(nil), "google.protobuf.ExtensionRangeOptions")
+	proto.RegisterType((*FieldDescriptorProto)(nil), "google.protobuf.FieldDescriptorProto")
+	proto.RegisterType((*OneofDescriptorProto)(nil), "google.protobuf.OneofDescriptorProto")
+	proto.RegisterType((*EnumDescriptorProto)(nil), "google.protobuf.EnumDescriptorProto")
+	proto.RegisterType((*EnumValueDescriptorProto)(nil), "google.protobuf.EnumValueDescriptorProto")
+	proto.RegisterType((*ServiceDescriptorProto)(nil), "google.protobuf.ServiceDescriptorProto")
+	proto.RegisterType((*MethodDescriptorProto)(nil), "google.protobuf.MethodDescriptorProto")
+	proto.RegisterType((*FileOptions)(nil), "google.protobuf.FileOptions")
+	proto.RegisterType((*MessageOptions)(nil), "google.protobuf.MessageOptions")
+	proto.RegisterType((*FieldOptions)(nil), "google.protobuf.FieldOptions")
+	proto.RegisterType((*OneofOptions)(nil), "google.protobuf.OneofOptions")
+	proto.RegisterType((*EnumOptions)(nil), "google.protobuf.EnumOptions")
+	proto.RegisterType((*EnumValueOptions)(nil), "google.protobuf.EnumValueOptions")
+	proto.RegisterType((*ServiceOptions)(nil), "google.protobuf.ServiceOptions")
+	proto.RegisterType((*MethodOptions)(nil), "google.protobuf.MethodOptions")
+	proto.RegisterType((*UninterpretedOption)(nil), "google.protobuf.UninterpretedOption")
+	proto.RegisterType((*UninterpretedOption_NamePart)(nil), "google.protobuf.UninterpretedOption.NamePart")
+	proto.RegisterType((*SourceCodeInfo)(nil), "google.protobuf.SourceCodeInfo")
+	proto.RegisterType((*SourceCodeInfo_Location)(nil), "google.protobuf.SourceCodeInfo.Location")
+	proto.RegisterType((*GeneratedCodeInfo)(nil), "google.protobuf.GeneratedCodeInfo")
+	proto.RegisterType((*GeneratedCodeInfo_Annotation)(nil), "google.protobuf.GeneratedCodeInfo.Annotation")
+	proto.RegisterEnum("google.protobuf.FieldDescriptorProto_Type", FieldDescriptorProto_Type_name, FieldDescriptorProto_Type_value)
+	proto.RegisterEnum("google.protobuf.FieldDescriptorProto_Label", FieldDescriptorProto_Label_name, FieldDescriptorProto_Label_value)
+	proto.RegisterEnum("google.protobuf.FileOptions_OptimizeMode", FileOptions_OptimizeMode_name, FileOptions_OptimizeMode_value)
+	proto.RegisterEnum("google.protobuf.FieldOptions_CType", FieldOptions_CType_name, FieldOptions_CType_value)
+	proto.RegisterEnum("google.protobuf.FieldOptions_JSType", FieldOptions_JSType_name, FieldOptions_JSType_value)
+	proto.RegisterEnum("google.protobuf.MethodOptions_IdempotencyLevel", MethodOptions_IdempotencyLevel_name, MethodOptions_IdempotencyLevel_value)
+}
+
+func init() { proto.RegisterFile("google/protobuf/descriptor.proto", fileDescriptor0) }
+
+var fileDescriptor0 = []byte{
+	// 2519 bytes of a gzipped FileDescriptorProto
+	0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xc4, 0x59, 0xdd, 0x6e, 0x1b, 0xc7,
+	0x15, 0x0e, 0x7f, 0x45, 0x1e, 0x52, 0xd4, 0x68, 0xa4, 0xd8, 0x6b, 0xe5, 0xc7, 0x32, 0xf3, 0x63,
+	0xd9, 0x69, 0xa8, 0x40, 0xb1, 0x1d, 0x47, 0x29, 0xd2, 0x52, 0xe4, 0x5a, 0xa1, 0x4a, 0x91, 0xec,
+	0x92, 0x6a, 0x7e, 0x6e, 0x16, 0xa3, 0xdd, 0x21, 0xb9, 0xf6, 0x72, 0x77, 0xb3, 0xbb, 0xb4, 0xad,
+	0xa0, 0x17, 0x06, 0x7a, 0x55, 0xa0, 0x0f, 0x50, 0x14, 0x45, 0x2f, 0x72, 0x13, 0xa0, 0x0f, 0x50,
+	0x20, 0x77, 0x7d, 0x82, 0x02, 0x79, 0x83, 0xa2, 0x28, 0xd0, 0x3e, 0x46, 0x31, 0x33, 0xbb, 0xcb,
+	0x5d, 0xfe, 0xc4, 0x6a, 0x80, 0x38, 0x57, 0xe4, 0x7c, 0xe7, 0x3b, 0x67, 0xce, 0x9c, 0x39, 0x33,
+	0x73, 0x66, 0x16, 0x76, 0x47, 0xb6, 0x3d, 0x32, 0xe9, 0xbe, 0xe3, 0xda, 0xbe, 0x7d, 0x3e, 0x1d,
+	0xee, 0xeb, 0xd4, 0xd3, 0x5c, 0xc3, 0xf1, 0x6d, 0xb7, 0xc6, 0x31, 0xbc, 0x21, 0x18, 0xb5, 0x90,
+	0x51, 0x3d, 0x85, 0xcd, 0x07, 0x86, 0x49, 0x9b, 0x11, 0xb1, 0x4f, 0x7d, 0x7c, 0x1f, 0xb2, 0x43,
+	0xc3, 0xa4, 0x52, 0x6a, 0x37, 0xb3, 0x57, 0x3a, 0x78, 0xb3, 0x36, 0xa7, 0x54, 0x4b, 0x6a, 0xf4,
+	0x18, 0xac, 0x70, 0x8d, 0xea, 0xbf, 0xb3, 0xb0, 0xb5, 0x44, 0x8a, 0x31, 0x64, 0x2d, 0x32, 0x61,
+	0x16, 0x53, 0x7b, 0x45, 0x85, 0xff, 0xc7, 0x12, 0xac, 0x39, 0x44, 0x7b, 0x44, 0x46, 0x54, 0x4a,
+	0x73, 0x38, 0x6c, 0xe2, 0xd7, 0x01, 0x74, 0xea, 0x50, 0x4b, 0xa7, 0x96, 0x76, 0x21, 0x65, 0x76,
+	0x33, 0x7b, 0x45, 0x25, 0x86, 0xe0, 0x77, 0x60, 0xd3, 0x99, 0x9e, 0x9b, 0x86, 0xa6, 0xc6, 0x68,
+	0xb0, 0x9b, 0xd9, 0xcb, 0x29, 0x48, 0x08, 0x9a, 0x33, 0xf2, 0x4d, 0xd8, 0x78, 0x42, 0xc9, 0xa3,
+	0x38, 0xb5, 0xc4, 0xa9, 0x15, 0x06, 0xc7, 0x88, 0x0d, 0x28, 0x4f, 0xa8, 0xe7, 0x91, 0x11, 0x55,
+	0xfd, 0x0b, 0x87, 0x4a, 0x59, 0x3e, 0xfa, 0xdd, 0x85, 0xd1, 0xcf, 0x8f, 0xbc, 0x14, 0x68, 0x0d,
+	0x2e, 0x1c, 0x8a, 0xeb, 0x50, 0xa4, 0xd6, 0x74, 0x22, 0x2c, 0xe4, 0x56, 0xc4, 0x4f, 0xb6, 0xa6,
+	0x93, 0x79, 0x2b, 0x05, 0xa6, 0x16, 0x98, 0x58, 0xf3, 0xa8, 0xfb, 0xd8, 0xd0, 0xa8, 0x94, 0xe7,
+	0x06, 0x6e, 0x2e, 0x18, 0xe8, 0x0b, 0xf9, 0xbc, 0x8d, 0x50, 0x0f, 0x37, 0xa0, 0x48, 0x9f, 0xfa,
+	0xd4, 0xf2, 0x0c, 0xdb, 0x92, 0xd6, 0xb8, 0x91, 0xb7, 0x96, 0xcc, 0x22, 0x35, 0xf5, 0x79, 0x13,
+	0x33, 0x3d, 0x7c, 0x0f, 0xd6, 0x6c, 0xc7, 0x37, 0x6c, 0xcb, 0x93, 0x0a, 0xbb, 0xa9, 0xbd, 0xd2,
+	0xc1, 0xab, 0x4b, 0x13, 0xa1, 0x2b, 0x38, 0x4a, 0x48, 0xc6, 0x2d, 0x40, 0x9e, 0x3d, 0x75, 0x35,
+	0xaa, 0x6a, 0xb6, 0x4e, 0x55, 0xc3, 0x1a, 0xda, 0x52, 0x91, 0x1b, 0xb8, 0xbe, 0x38, 0x10, 0x4e,
+	0x6c, 0xd8, 0x3a, 0x6d, 0x59, 0x43, 0x5b, 0xa9, 0x78, 0x89, 0x36, 0xbe, 0x02, 0x79, 0xef, 0xc2,
+	0xf2, 0xc9, 0x53, 0xa9, 0xcc, 0x33, 0x24, 0x68, 0x55, 0xbf, 0xcd, 0xc3, 0xc6, 0x65, 0x52, 0xec,
+	0x23, 0xc8, 0x0d, 0xd9, 0x28, 0xa5, 0xf4, 0xff, 0x13, 0x03, 0xa1, 0x93, 0x0c, 0x62, 0xfe, 0x07,
+	0x06, 0xb1, 0x0e, 0x25, 0x8b, 0x7a, 0x3e, 0xd5, 0x45, 0x46, 0x64, 0x2e, 0x99, 0x53, 0x20, 0x94,
+	0x16, 0x53, 0x2a, 0xfb, 0x83, 0x52, 0xea, 0x33, 0xd8, 0x88, 0x5c, 0x52, 0x5d, 0x62, 0x8d, 0xc2,
+	0xdc, 0xdc, 0x7f, 0x9e, 0x27, 0x35, 0x39, 0xd4, 0x53, 0x98, 0x9a, 0x52, 0xa1, 0x89, 0x36, 0x6e,
+	0x02, 0xd8, 0x16, 0xb5, 0x87, 0xaa, 0x4e, 0x35, 0x53, 0x2a, 0xac, 0x88, 0x52, 0x97, 0x51, 0x16,
+	0xa2, 0x64, 0x0b, 0x54, 0x33, 0xf1, 0x87, 0xb3, 0x54, 0x5b, 0x5b, 0x91, 0x29, 0xa7, 0x62, 0x91,
+	0x2d, 0x64, 0xdb, 0x19, 0x54, 0x5c, 0xca, 0xf2, 0x9e, 0xea, 0xc1, 0xc8, 0x8a, 0xdc, 0x89, 0xda,
+	0x73, 0x47, 0xa6, 0x04, 0x6a, 0x62, 0x60, 0xeb, 0x6e, 0xbc, 0x89, 0xdf, 0x80, 0x08, 0x50, 0x79,
+	0x5a, 0x01, 0xdf, 0x85, 0xca, 0x21, 0xd8, 0x21, 0x13, 0xba, 0xf3, 0x15, 0x54, 0x92, 0xe1, 0xc1,
+	0xdb, 0x90, 0xf3, 0x7c, 0xe2, 0xfa, 0x3c, 0x0b, 0x73, 0x8a, 0x68, 0x60, 0x04, 0x19, 0x6a, 0xe9,
+	0x7c, 0x97, 0xcb, 0x29, 0xec, 0x2f, 0xfe, 0xe5, 0x6c, 0xc0, 0x19, 0x3e, 0xe0, 0xb7, 0x17, 0x67,
+	0x34, 0x61, 0x79, 0x7e, 0xdc, 0x3b, 0x1f, 0xc0, 0x7a, 0x62, 0x00, 0x97, 0xed, 0xba, 0xfa, 0x5b,
+	0x78, 0x79, 0xa9, 0x69, 0xfc, 0x19, 0x6c, 0x4f, 0x2d, 0xc3, 0xf2, 0xa9, 0xeb, 0xb8, 0x94, 0x65,
+	0xac, 0xe8, 0x4a, 0xfa, 0xcf, 0xda, 0x8a, 0x9c, 0x3b, 0x8b, 0xb3, 0x85, 0x15, 0x65, 0x6b, 0xba,
+	0x08, 0xde, 0x2e, 0x16, 0xfe, 0xbb, 0x86, 0x9e, 0x3d, 0x7b, 0xf6, 0x2c, 0x5d, 0xfd, 0x63, 0x1e,
+	0xb6, 0x97, 0xad, 0x99, 0xa5, 0xcb, 0xf7, 0x0a, 0xe4, 0xad, 0xe9, 0xe4, 0x9c, 0xba, 0x3c, 0x48,
+	0x39, 0x25, 0x68, 0xe1, 0x3a, 0xe4, 0x4c, 0x72, 0x4e, 0x4d, 0x29, 0xbb, 0x9b, 0xda, 0xab, 0x1c,
+	0xbc, 0x73, 0xa9, 0x55, 0x59, 0x6b, 0x33, 0x15, 0x45, 0x68, 0xe2, 0x8f, 0x21, 0x1b, 0x6c, 0xd1,
+	0xcc, 0xc2, 0xed, 0xcb, 0x59, 0x60, 0x6b, 0x49, 0xe1, 0x7a, 0xf8, 0x15, 0x28, 0xb2, 0x5f, 0x91,
+	0x1b, 0x79, 0xee, 0x73, 0x81, 0x01, 0x2c, 0x2f, 0xf0, 0x0e, 0x14, 0xf8, 0x32, 0xd1, 0x69, 0x78,
+	0xb4, 0x45, 0x6d, 0x96, 0x58, 0x3a, 0x1d, 0x92, 0xa9, 0xe9, 0xab, 0x8f, 0x89, 0x39, 0xa5, 0x3c,
+	0xe1, 0x8b, 0x4a, 0x39, 0x00, 0x7f, 0xc3, 0x30, 0x7c, 0x1d, 0x4a, 0x62, 0x55, 0x19, 0x96, 0x4e,
+	0x9f, 0xf2, 0xdd, 0x33, 0xa7, 0x88, 0x85, 0xd6, 0x62, 0x08, 0xeb, 0xfe, 0xa1, 0x67, 0x5b, 0x61,
+	0x6a, 0xf2, 0x2e, 0x18, 0xc0, 0xbb, 0xff, 0x60, 0x7e, 0xe3, 0x7e, 0x6d, 0xf9, 0xf0, 0xe6, 0x73,
+	0xaa, 0xfa, 0xb7, 0x34, 0x64, 0xf9, 0x7e, 0xb1, 0x01, 0xa5, 0xc1, 0xe7, 0x3d, 0x59, 0x6d, 0x76,
+	0xcf, 0x8e, 0xda, 0x32, 0x4a, 0xe1, 0x0a, 0x00, 0x07, 0x1e, 0xb4, 0xbb, 0xf5, 0x01, 0x4a, 0x47,
+	0xed, 0x56, 0x67, 0x70, 0xef, 0x0e, 0xca, 0x44, 0x0a, 0x67, 0x02, 0xc8, 0xc6, 0x09, 0xef, 0x1f,
+	0xa0, 0x1c, 0x46, 0x50, 0x16, 0x06, 0x5a, 0x9f, 0xc9, 0xcd, 0x7b, 0x77, 0x50, 0x3e, 0x89, 0xbc,
+	0x7f, 0x80, 0xd6, 0xf0, 0x3a, 0x14, 0x39, 0x72, 0xd4, 0xed, 0xb6, 0x51, 0x21, 0xb2, 0xd9, 0x1f,
+	0x28, 0xad, 0xce, 0x31, 0x2a, 0x46, 0x36, 0x8f, 0x95, 0xee, 0x59, 0x0f, 0x41, 0x64, 0xe1, 0x54,
+	0xee, 0xf7, 0xeb, 0xc7, 0x32, 0x2a, 0x45, 0x8c, 0xa3, 0xcf, 0x07, 0x72, 0x1f, 0x95, 0x13, 0x6e,
+	0xbd, 0x7f, 0x80, 0xd6, 0xa3, 0x2e, 0xe4, 0xce, 0xd9, 0x29, 0xaa, 0xe0, 0x4d, 0x58, 0x17, 0x5d,
+	0x84, 0x4e, 0x6c, 0xcc, 0x41, 0xf7, 0xee, 0x20, 0x34, 0x73, 0x44, 0x58, 0xd9, 0x4c, 0x00, 0xf7,
+	0xee, 0x20, 0x5c, 0x6d, 0x40, 0x8e, 0x67, 0x17, 0xc6, 0x50, 0x69, 0xd7, 0x8f, 0xe4, 0xb6, 0xda,
+	0xed, 0x0d, 0x5a, 0xdd, 0x4e, 0xbd, 0x8d, 0x52, 0x33, 0x4c, 0x91, 0x7f, 0x7d, 0xd6, 0x52, 0xe4,
+	0x26, 0x4a, 0xc7, 0xb1, 0x9e, 0x5c, 0x1f, 0xc8, 0x4d, 0x94, 0xa9, 0x6a, 0xb0, 0xbd, 0x6c, 0x9f,
+	0x5c, 0xba, 0x32, 0x62, 0x53, 0x9c, 0x5e, 0x31, 0xc5, 0xdc, 0xd6, 0xc2, 0x14, 0x7f, 0x9d, 0x82,
+	0xad, 0x25, 0x67, 0xc5, 0xd2, 0x4e, 0x7e, 0x01, 0x39, 0x91, 0xa2, 0xe2, 0xf4, 0xbc, 0xb5, 0xf4,
+	0xd0, 0xe1, 0x09, 0xbb, 0x70, 0x82, 0x72, 0xbd, 0x78, 0x05, 0x91, 0x59, 0x51, 0x41, 0x30, 0x13,
+	0x0b, 0x4e, 0xfe, 0x2e, 0x05, 0xd2, 0x2a, 0xdb, 0xcf, 0xd9, 0x28, 0xd2, 0x89, 0x8d, 0xe2, 0xa3,
+	0x79, 0x07, 0x6e, 0xac, 0x1e, 0xc3, 0x82, 0x17, 0xdf, 0xa4, 0xe0, 0xca, 0xf2, 0x42, 0x6b, 0xa9,
+	0x0f, 0x1f, 0x43, 0x7e, 0x42, 0xfd, 0xb1, 0x1d, 0x16, 0x1b, 0x6f, 0x2f, 0x39, 0xc2, 0x98, 0x78,
+	0x3e, 0x56, 0x81, 0x56, 0xfc, 0x0c, 0xcc, 0xac, 0xaa, 0x96, 0x84, 0x37, 0x0b, 0x9e, 0xfe, 0x3e,
+	0x0d, 0x2f, 0x2f, 0x35, 0xbe, 0xd4, 0xd1, 0xd7, 0x00, 0x0c, 0xcb, 0x99, 0xfa, 0xa2, 0xa0, 0x10,
+	0xfb, 0x53, 0x91, 0x23, 0x7c, 0xed, 0xb3, 0xbd, 0x67, 0xea, 0x47, 0xf2, 0x0c, 0x97, 0x83, 0x80,
+	0x38, 0xe1, 0xfe, 0xcc, 0xd1, 0x2c, 0x77, 0xf4, 0xf5, 0x15, 0x23, 0x5d, 0x38, 0xab, 0xdf, 0x03,
+	0xa4, 0x99, 0x06, 0xb5, 0x7c, 0xd5, 0xf3, 0x5d, 0x4a, 0x26, 0x86, 0x35, 0xe2, 0x1b, 0x70, 0xe1,
+	0x30, 0x37, 0x24, 0xa6, 0x47, 0x95, 0x0d, 0x21, 0xee, 0x87, 0x52, 0xa6, 0xc1, 0xcf, 0x38, 0x37,
+	0xa6, 0x91, 0x4f, 0x68, 0x08, 0x71, 0xa4, 0x51, 0xfd, 0xb6, 0x00, 0xa5, 0x58, 0x59, 0x8a, 0x6f,
+	0x40, 0xf9, 0x21, 0x79, 0x4c, 0xd4, 0xf0, 0xaa, 0x21, 0x22, 0x51, 0x62, 0x58, 0x2f, 0xb8, 0x6e,
+	0xbc, 0x07, 0xdb, 0x9c, 0x62, 0x4f, 0x7d, 0xea, 0xaa, 0x9a, 0x49, 0x3c, 0x8f, 0x07, 0xad, 0xc0,
+	0xa9, 0x98, 0xc9, 0xba, 0x4c, 0xd4, 0x08, 0x25, 0xf8, 0x2e, 0x6c, 0x71, 0x8d, 0xc9, 0xd4, 0xf4,
+	0x0d, 0xc7, 0xa4, 0x2a, 0xbb, 0xfc, 0x78, 0x7c, 0x23, 0x8e, 0x3c, 0xdb, 0x64, 0x8c, 0xd3, 0x80,
+	0xc0, 0x3c, 0xf2, 0x70, 0x13, 0x5e, 0xe3, 0x6a, 0x23, 0x6a, 0x51, 0x97, 0xf8, 0x54, 0xa5, 0x5f,
+	0x4e, 0x89, 0xe9, 0xa9, 0xc4, 0xd2, 0xd5, 0x31, 0xf1, 0xc6, 0xd2, 0x36, 0x33, 0x70, 0x94, 0x96,
+	0x52, 0xca, 0x35, 0x46, 0x3c, 0x0e, 0x78, 0x32, 0xa7, 0xd5, 0x2d, 0xfd, 0x13, 0xe2, 0x8d, 0xf1,
+	0x21, 0x5c, 0xe1, 0x56, 0x3c, 0xdf, 0x35, 0xac, 0x91, 0xaa, 0x8d, 0xa9, 0xf6, 0x48, 0x9d, 0xfa,
+	0xc3, 0xfb, 0xd2, 0x2b, 0xf1, 0xfe, 0xb9, 0x87, 0x7d, 0xce, 0x69, 0x30, 0xca, 0x99, 0x3f, 0xbc,
+	0x8f, 0xfb, 0x50, 0x66, 0x93, 0x31, 0x31, 0xbe, 0xa2, 0xea, 0xd0, 0x76, 0xf9, 0xc9, 0x52, 0x59,
+	0xb2, 0xb2, 0x63, 0x11, 0xac, 0x75, 0x03, 0x85, 0x53, 0x5b, 0xa7, 0x87, 0xb9, 0x7e, 0x4f, 0x96,
+	0x9b, 0x4a, 0x29, 0xb4, 0xf2, 0xc0, 0x76, 0x59, 0x42, 0x8d, 0xec, 0x28, 0xc0, 0x25, 0x91, 0x50,
+	0x23, 0x3b, 0x0c, 0xef, 0x5d, 0xd8, 0xd2, 0x34, 0x31, 0x66, 0x43, 0x53, 0x83, 0x2b, 0x8a, 0x27,
+	0xa1, 0x44, 0xb0, 0x34, 0xed, 0x58, 0x10, 0x82, 0x1c, 0xf7, 0xf0, 0x87, 0xf0, 0xf2, 0x2c, 0x58,
+	0x71, 0xc5, 0xcd, 0x85, 0x51, 0xce, 0xab, 0xde, 0x85, 0x2d, 0xe7, 0x62, 0x51, 0x11, 0x27, 0x7a,
+	0x74, 0x2e, 0xe6, 0xd5, 0x3e, 0x80, 0x6d, 0x67, 0xec, 0x2c, 0xea, 0xdd, 0x8e, 0xeb, 0x61, 0x67,
+	0xec, 0xcc, 0x2b, 0xbe, 0xc5, 0xef, 0xab, 0x2e, 0xd5, 0x88, 0x4f, 0x75, 0xe9, 0x6a, 0x9c, 0x1e,
+	0x13, 0xe0, 0x7d, 0x40, 0x9a, 0xa6, 0x52, 0x8b, 0x9c, 0x9b, 0x54, 0x25, 0x2e, 0xb5, 0x88, 0x27,
+	0x5d, 0x8f, 0x93, 0x2b, 0x9a, 0x26, 0x73, 0x69, 0x9d, 0x0b, 0xf1, 0x6d, 0xd8, 0xb4, 0xcf, 0x1f,
+	0x6a, 0x22, 0x25, 0x55, 0xc7, 0xa5, 0x43, 0xe3, 0xa9, 0xf4, 0x26, 0x8f, 0xef, 0x06, 0x13, 0xf0,
+	0x84, 0xec, 0x71, 0x18, 0xdf, 0x02, 0xa4, 0x79, 0x63, 0xe2, 0x3a, 0xbc, 0x26, 0xf0, 0x1c, 0xa2,
+	0x51, 0xe9, 0x2d, 0x41, 0x15, 0x78, 0x27, 0x84, 0xd9, 0x92, 0xf0, 0x9e, 0x18, 0x43, 0x3f, 0xb4,
+	0x78, 0x53, 0x2c, 0x09, 0x8e, 0x05, 0xd6, 0xf6, 0x00, 0xb1, 0x50, 0x24, 0x3a, 0xde, 0xe3, 0xb4,
+	0x8a, 0x33, 0x76, 0xe2, 0xfd, 0xbe, 0x01, 0xeb, 0x8c, 0x39, 0xeb, 0xf4, 0x96, 0xa8, 0x67, 0x9c,
+	0x71, 0xac, 0xc7, 0x1f, 0xad, 0xb4, 0xac, 0x1e, 0x42, 0x39, 0x9e, 0x9f, 0xb8, 0x08, 0x22, 0x43,
+	0x51, 0x8a, 0x9d, 0xf5, 0x8d, 0x6e, 0x93, 0x9d, 0xd2, 0x5f, 0xc8, 0x28, 0xcd, 0xaa, 0x85, 0x76,
+	0x6b, 0x20, 0xab, 0xca, 0x59, 0x67, 0xd0, 0x3a, 0x95, 0x51, 0x26, 0x56, 0x96, 0x9e, 0x64, 0x0b,
+	0x6f, 0xa3, 0x9b, 0xd5, 0xef, 0xd2, 0x50, 0x49, 0xde, 0x33, 0xf0, 0xcf, 0xe1, 0x6a, 0xf8, 0x28,
+	0xe0, 0x51, 0x5f, 0x7d, 0x62, 0xb8, 0x7c, 0xe1, 0x4c, 0x88, 0xa8, 0xb3, 0xa3, 0xa9, 0xdb, 0x0e,
+	0x58, 0x7d, 0xea, 0x7f, 0x6a, 0xb8, 0x6c, 0x59, 0x4c, 0x88, 0x8f, 0xdb, 0x70, 0xdd, 0xb2, 0x55,
+	0xcf, 0x27, 0x96, 0x4e, 0x5c, 0x5d, 0x9d, 0x3d, 0xc7, 0xa8, 0x44, 0xd3, 0xa8, 0xe7, 0xd9, 0xe2,
+	0xc0, 0x8a, 0xac, 0xbc, 0x6a, 0xd9, 0xfd, 0x80, 0x3c, 0xdb, 0xc9, 0xeb, 0x01, 0x75, 0x2e, 0xcd,
+	0x32, 0xab, 0xd2, 0xec, 0x15, 0x28, 0x4e, 0x88, 0xa3, 0x52, 0xcb, 0x77, 0x2f, 0x78, 0x75, 0x59,
+	0x50, 0x0a, 0x13, 0xe2, 0xc8, 0xac, 0xfd, 0x42, 0x8a, 0xfc, 0x93, 0x6c, 0xa1, 0x80, 0x8a, 0x27,
+	0xd9, 0x42, 0x11, 0x41, 0xf5, 0x5f, 0x19, 0x28, 0xc7, 0xab, 0x4d, 0x56, 0xbc, 0x6b, 0xfc, 0x64,
+	0x49, 0xf1, 0xbd, 0xe7, 0x8d, 0xef, 0xad, 0x4d, 0x6b, 0x0d, 0x76, 0xe4, 0x1c, 0xe6, 0x45, 0x0d,
+	0xa8, 0x08, 0x4d, 0x76, 0xdc, 0xb3, 0xdd, 0x86, 0x8a, 0x7b, 0x4d, 0x41, 0x09, 0x5a, 0xf8, 0x18,
+	0xf2, 0x0f, 0x3d, 0x6e, 0x3b, 0xcf, 0x6d, 0xbf, 0xf9, 0xfd, 0xb6, 0x4f, 0xfa, 0xdc, 0x78, 0xf1,
+	0xa4, 0xaf, 0x76, 0xba, 0xca, 0x69, 0xbd, 0xad, 0x04, 0xea, 0xf8, 0x1a, 0x64, 0x4d, 0xf2, 0xd5,
+	0x45, 0xf2, 0x70, 0xe2, 0xd0, 0x65, 0x27, 0xe1, 0x1a, 0x64, 0x9f, 0x50, 0xf2, 0x28, 0x79, 0x24,
+	0x70, 0xe8, 0x47, 0x5c, 0x0c, 0xfb, 0x90, 0xe3, 0xf1, 0xc2, 0x00, 0x41, 0xc4, 0xd0, 0x4b, 0xb8,
+	0x00, 0xd9, 0x46, 0x57, 0x61, 0x0b, 0x02, 0x41, 0x59, 0xa0, 0x6a, 0xaf, 0x25, 0x37, 0x64, 0x94,
+	0xae, 0xde, 0x85, 0xbc, 0x08, 0x02, 0x5b, 0x2c, 0x51, 0x18, 0xd0, 0x4b, 0x41, 0x33, 0xb0, 0x91,
+	0x0a, 0xa5, 0x67, 0xa7, 0x47, 0xb2, 0x82, 0xd2, 0xc9, 0xa9, 0xce, 0xa2, 0x5c, 0xd5, 0x83, 0x72,
+	0xbc, 0xdc, 0x7c, 0x31, 0x57, 0xc9, 0xbf, 0xa7, 0xa0, 0x14, 0x2b, 0x1f, 0x59, 0xe1, 0x42, 0x4c,
+	0xd3, 0x7e, 0xa2, 0x12, 0xd3, 0x20, 0x5e, 0x90, 0x1a, 0xc0, 0xa1, 0x3a, 0x43, 0x2e, 0x3b, 0x75,
+	0x2f, 0x68, 0x89, 0xe4, 0x50, 0xbe, 0xfa, 0x97, 0x14, 0xa0, 0xf9, 0x02, 0x74, 0xce, 0xcd, 0xd4,
+	0x4f, 0xe9, 0x66, 0xf5, 0xcf, 0x29, 0xa8, 0x24, 0xab, 0xce, 0x39, 0xf7, 0x6e, 0xfc, 0xa4, 0xee,
+	0xfd, 0x33, 0x0d, 0xeb, 0x89, 0x5a, 0xf3, 0xb2, 0xde, 0x7d, 0x09, 0x9b, 0x86, 0x4e, 0x27, 0x8e,
+	0xed, 0x53, 0x4b, 0xbb, 0x50, 0x4d, 0xfa, 0x98, 0x9a, 0x52, 0x95, 0x6f, 0x1a, 0xfb, 0xdf, 0x5f,
+	0xcd, 0xd6, 0x5a, 0x33, 0xbd, 0x36, 0x53, 0x3b, 0xdc, 0x6a, 0x35, 0xe5, 0xd3, 0x5e, 0x77, 0x20,
+	0x77, 0x1a, 0x9f, 0xab, 0x67, 0x9d, 0x5f, 0x75, 0xba, 0x9f, 0x76, 0x14, 0x64, 0xcc, 0xd1, 0x7e,
+	0xc4, 0x65, 0xdf, 0x03, 0x34, 0xef, 0x14, 0xbe, 0x0a, 0xcb, 0xdc, 0x42, 0x2f, 0xe1, 0x2d, 0xd8,
+	0xe8, 0x74, 0xd5, 0x7e, 0xab, 0x29, 0xab, 0xf2, 0x83, 0x07, 0x72, 0x63, 0xd0, 0x17, 0xd7, 0xfb,
+	0x88, 0x3d, 0x48, 0x2c, 0xf0, 0xea, 0x9f, 0x32, 0xb0, 0xb5, 0xc4, 0x13, 0x5c, 0x0f, 0x6e, 0x16,
+	0xe2, 0xb2, 0xf3, 0xee, 0x65, 0xbc, 0xaf, 0xb1, 0x82, 0xa0, 0x47, 0x5c, 0x3f, 0xb8, 0x88, 0xdc,
+	0x02, 0x16, 0x25, 0xcb, 0x37, 0x86, 0x06, 0x75, 0x83, 0xd7, 0x10, 0x71, 0xdd, 0xd8, 0x98, 0xe1,
+	0xe2, 0x41, 0xe4, 0x67, 0x80, 0x1d, 0xdb, 0x33, 0x7c, 0xe3, 0x31, 0x55, 0x0d, 0x2b, 0x7c, 0x3a,
+	0x61, 0xd7, 0x8f, 0xac, 0x82, 0x42, 0x49, 0xcb, 0xf2, 0x23, 0xb6, 0x45, 0x47, 0x64, 0x8e, 0xcd,
+	0x36, 0xf3, 0x8c, 0x82, 0x42, 0x49, 0xc4, 0xbe, 0x01, 0x65, 0xdd, 0x9e, 0xb2, 0x9a, 0x4c, 0xf0,
+	0xd8, 0xd9, 0x91, 0x52, 0x4a, 0x02, 0x8b, 0x28, 0x41, 0xb5, 0x3d, 0x7b, 0xb3, 0x29, 0x2b, 0x25,
+	0x81, 0x09, 0xca, 0x4d, 0xd8, 0x20, 0xa3, 0x91, 0xcb, 0x8c, 0x87, 0x86, 0xc4, 0xfd, 0xa1, 0x12,
+	0xc1, 0x9c, 0xb8, 0x73, 0x02, 0x85, 0x30, 0x0e, 0xec, 0xa8, 0x66, 0x91, 0x50, 0x1d, 0xf1, 0x6e,
+	0x97, 0xde, 0x2b, 0x2a, 0x05, 0x2b, 0x14, 0xde, 0x80, 0xb2, 0xe1, 0xa9, 0xb3, 0x27, 0xe8, 0xf4,
+	0x6e, 0x7a, 0xaf, 0xa0, 0x94, 0x0c, 0x2f, 0x7a, 0xbe, 0xab, 0x7e, 0x93, 0x86, 0x4a, 0xf2, 0x09,
+	0x1d, 0x37, 0xa1, 0x60, 0xda, 0x1a, 0xe1, 0xa9, 0x25, 0xbe, 0xdf, 0xec, 0x3d, 0xe7, 0xd5, 0xbd,
+	0xd6, 0x0e, 0xf8, 0x4a, 0xa4, 0xb9, 0xf3, 0x8f, 0x14, 0x14, 0x42, 0x18, 0x5f, 0x81, 0xac, 0x43,
+	0xfc, 0x31, 0x37, 0x97, 0x3b, 0x4a, 0xa3, 0x94, 0xc2, 0xdb, 0x0c, 0xf7, 0x1c, 0x62, 0xf1, 0x14,
+	0x08, 0x70, 0xd6, 0x66, 0xf3, 0x6a, 0x52, 0xa2, 0xf3, 0xcb, 0x89, 0x3d, 0x99, 0x50, 0xcb, 0xf7,
+	0xc2, 0x79, 0x0d, 0xf0, 0x46, 0x00, 0xe3, 0x77, 0x60, 0xd3, 0x77, 0x89, 0x61, 0x26, 0xb8, 0x59,
+	0xce, 0x45, 0xa1, 0x20, 0x22, 0x1f, 0xc2, 0xb5, 0xd0, 0xae, 0x4e, 0x7d, 0xa2, 0x8d, 0xa9, 0x3e,
+	0x53, 0xca, 0xf3, 0xf7, 0xd9, 0xab, 0x01, 0xa1, 0x19, 0xc8, 0x43, 0xdd, 0xea, 0x77, 0x29, 0xd8,
+	0x0c, 0xaf, 0x53, 0x7a, 0x14, 0xac, 0x53, 0x00, 0x62, 0x59, 0xb6, 0x1f, 0x0f, 0xd7, 0x62, 0x2a,
+	0x2f, 0xe8, 0xd5, 0xea, 0x91, 0x92, 0x12, 0x33, 0xb0, 0x33, 0x01, 0x98, 0x49, 0x56, 0x86, 0xed,
+	0x3a, 0x94, 0x82, 0xef, 0x23, 0xfc, 0x23, 0x9b, 0xb8, 0x80, 0x83, 0x80, 0xd8, 0xbd, 0x0b, 0x6f,
+	0x43, 0xee, 0x9c, 0x8e, 0x0c, 0x2b, 0x78, 0xf5, 0x14, 0x8d, 0xf0, 0x25, 0x37, 0x1b, 0xbd, 0xe4,
+	0x1e, 0xfd, 0x21, 0x05, 0x5b, 0x9a, 0x3d, 0x99, 0xf7, 0xf7, 0x08, 0xcd, 0xbd, 0x02, 0x78, 0x9f,
+	0xa4, 0xbe, 0xf8, 0x78, 0x64, 0xf8, 0xe3, 0xe9, 0x79, 0x4d, 0xb3, 0x27, 0xfb, 0x23, 0xdb, 0x24,
+	0xd6, 0x68, 0xf6, 0x95, 0x90, 0xff, 0xd1, 0xde, 0x1d, 0x51, 0xeb, 0xdd, 0x91, 0x1d, 0xfb, 0x66,
+	0xf8, 0xd1, 0xec, 0xef, 0xd7, 0xe9, 0xcc, 0x71, 0xef, 0xe8, 0xaf, 0xe9, 0x9d, 0x63, 0xd1, 0x57,
+	0x2f, 0x8c, 0x8d, 0x42, 0x87, 0x26, 0xd5, 0xd8, 0x78, 0xff, 0x17, 0x00, 0x00, 0xff, 0xff, 0x0c,
+	0xab, 0xb6, 0x37, 0x7e, 0x1c, 0x00, 0x00,
+}

+ 849 - 0
vendor/github.com/golang/protobuf/protoc-gen-go/descriptor/descriptor.proto

@@ -0,0 +1,849 @@
+// Protocol Buffers - Google's data interchange format
+// Copyright 2008 Google Inc.  All rights reserved.
+// https://developers.google.com/protocol-buffers/
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Author: kenton@google.com (Kenton Varda)
+//  Based on original Protocol Buffers design by
+//  Sanjay Ghemawat, Jeff Dean, and others.
+//
+// The messages in this file describe the definitions found in .proto files.
+// A valid .proto file can be translated directly to a FileDescriptorProto
+// without any other information (e.g. without reading its imports).
+
+
+syntax = "proto2";
+
+package google.protobuf;
+option go_package = "github.com/golang/protobuf/protoc-gen-go/descriptor;descriptor";
+option java_package = "com.google.protobuf";
+option java_outer_classname = "DescriptorProtos";
+option csharp_namespace = "Google.Protobuf.Reflection";
+option objc_class_prefix = "GPB";
+
+// descriptor.proto must be optimized for speed because reflection-based
+// algorithms don't work during bootstrapping.
+option optimize_for = SPEED;
+
+// The protocol compiler can output a FileDescriptorSet containing the .proto
+// files it parses.
+message FileDescriptorSet {
+  repeated FileDescriptorProto file = 1;
+}
+
+// Describes a complete .proto file.
+message FileDescriptorProto {
+  optional string name = 1;       // file name, relative to root of source tree
+  optional string package = 2;    // e.g. "foo", "foo.bar", etc.
+
+  // Names of files imported by this file.
+  repeated string dependency = 3;
+  // Indexes of the public imported files in the dependency list above.
+  repeated int32 public_dependency = 10;
+  // Indexes of the weak imported files in the dependency list.
+  // For Google-internal migration only. Do not use.
+  repeated int32 weak_dependency = 11;
+
+  // All top-level definitions in this file.
+  repeated DescriptorProto message_type = 4;
+  repeated EnumDescriptorProto enum_type = 5;
+  repeated ServiceDescriptorProto service = 6;
+  repeated FieldDescriptorProto extension = 7;
+
+  optional FileOptions options = 8;
+
+  // This field contains optional information about the original source code.
+  // You may safely remove this entire field without harming runtime
+  // functionality of the descriptors -- the information is needed only by
+  // development tools.
+  optional SourceCodeInfo source_code_info = 9;
+
+  // The syntax of the proto file.
+  // The supported values are "proto2" and "proto3".
+  optional string syntax = 12;
+}
+
+// Describes a message type.
+message DescriptorProto {
+  optional string name = 1;
+
+  repeated FieldDescriptorProto field = 2;
+  repeated FieldDescriptorProto extension = 6;
+
+  repeated DescriptorProto nested_type = 3;
+  repeated EnumDescriptorProto enum_type = 4;
+
+  message ExtensionRange {
+    optional int32 start = 1;
+    optional int32 end = 2;
+
+    optional ExtensionRangeOptions options = 3;
+  }
+  repeated ExtensionRange extension_range = 5;
+
+  repeated OneofDescriptorProto oneof_decl = 8;
+
+  optional MessageOptions options = 7;
+
+  // Range of reserved tag numbers. Reserved tag numbers may not be used by
+  // fields or extension ranges in the same message. Reserved ranges may
+  // not overlap.
+  message ReservedRange {
+    optional int32 start = 1; // Inclusive.
+    optional int32 end = 2;   // Exclusive.
+  }
+  repeated ReservedRange reserved_range = 9;
+  // Reserved field names, which may not be used by fields in the same message.
+  // A given name may only be reserved once.
+  repeated string reserved_name = 10;
+}
+
+message ExtensionRangeOptions {
+  // The parser stores options it doesn't recognize here. See above.
+  repeated UninterpretedOption uninterpreted_option = 999;
+
+  // Clients can define custom options in extensions of this message. See above.
+  extensions 1000 to max;
+}
+
+// Describes a field within a message.
+message FieldDescriptorProto {
+  enum Type {
+    // 0 is reserved for errors.
+    // Order is weird for historical reasons.
+    TYPE_DOUBLE         = 1;
+    TYPE_FLOAT          = 2;
+    // Not ZigZag encoded.  Negative numbers take 10 bytes.  Use TYPE_SINT64 if
+    // negative values are likely.
+    TYPE_INT64          = 3;
+    TYPE_UINT64         = 4;
+    // Not ZigZag encoded.  Negative numbers take 10 bytes.  Use TYPE_SINT32 if
+    // negative values are likely.
+    TYPE_INT32          = 5;
+    TYPE_FIXED64        = 6;
+    TYPE_FIXED32        = 7;
+    TYPE_BOOL           = 8;
+    TYPE_STRING         = 9;
+    // Tag-delimited aggregate.
+    // Group type is deprecated and not supported in proto3. However, Proto3
+    // implementations should still be able to parse the group wire format and
+    // treat group fields as unknown fields.
+    TYPE_GROUP          = 10;
+    TYPE_MESSAGE        = 11;  // Length-delimited aggregate.
+
+    // New in version 2.
+    TYPE_BYTES          = 12;
+    TYPE_UINT32         = 13;
+    TYPE_ENUM           = 14;
+    TYPE_SFIXED32       = 15;
+    TYPE_SFIXED64       = 16;
+    TYPE_SINT32         = 17;  // Uses ZigZag encoding.
+    TYPE_SINT64         = 18;  // Uses ZigZag encoding.
+  };
+
+  enum Label {
+    // 0 is reserved for errors
+    LABEL_OPTIONAL      = 1;
+    LABEL_REQUIRED      = 2;
+    LABEL_REPEATED      = 3;
+  };
+
+  optional string name = 1;
+  optional int32 number = 3;
+  optional Label label = 4;
+
+  // If type_name is set, this need not be set.  If both this and type_name
+  // are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP.
+  optional Type type = 5;
+
+  // For message and enum types, this is the name of the type.  If the name
+  // starts with a '.', it is fully-qualified.  Otherwise, C++-like scoping
+  // rules are used to find the type (i.e. first the nested types within this
+  // message are searched, then within the parent, on up to the root
+  // namespace).
+  optional string type_name = 6;
+
+  // For extensions, this is the name of the type being extended.  It is
+  // resolved in the same manner as type_name.
+  optional string extendee = 2;
+
+  // For numeric types, contains the original text representation of the value.
+  // For booleans, "true" or "false".
+  // For strings, contains the default text contents (not escaped in any way).
+  // For bytes, contains the C escaped value.  All bytes >= 128 are escaped.
+  // TODO(kenton):  Base-64 encode?
+  optional string default_value = 7;
+
+  // If set, gives the index of a oneof in the containing type's oneof_decl
+  // list.  This field is a member of that oneof.
+  optional int32 oneof_index = 9;
+
+  // JSON name of this field. The value is set by protocol compiler. If the
+  // user has set a "json_name" option on this field, that option's value
+  // will be used. Otherwise, it's deduced from the field's name by converting
+  // it to camelCase.
+  optional string json_name = 10;
+
+  optional FieldOptions options = 8;
+}
+
+// Describes a oneof.
+message OneofDescriptorProto {
+  optional string name = 1;
+  optional OneofOptions options = 2;
+}
+
+// Describes an enum type.
+message EnumDescriptorProto {
+  optional string name = 1;
+
+  repeated EnumValueDescriptorProto value = 2;
+
+  optional EnumOptions options = 3;
+}
+
+// Describes a value within an enum.
+message EnumValueDescriptorProto {
+  optional string name = 1;
+  optional int32 number = 2;
+
+  optional EnumValueOptions options = 3;
+}
+
+// Describes a service.
+message ServiceDescriptorProto {
+  optional string name = 1;
+  repeated MethodDescriptorProto method = 2;
+
+  optional ServiceOptions options = 3;
+}
+
+// Describes a method of a service.
+message MethodDescriptorProto {
+  optional string name = 1;
+
+  // Input and output type names.  These are resolved in the same way as
+  // FieldDescriptorProto.type_name, but must refer to a message type.
+  optional string input_type = 2;
+  optional string output_type = 3;
+
+  optional MethodOptions options = 4;
+
+  // Identifies if client streams multiple client messages
+  optional bool client_streaming = 5 [default=false];
+  // Identifies if server streams multiple server messages
+  optional bool server_streaming = 6 [default=false];
+}
+
+
+// ===================================================================
+// Options
+
+// Each of the definitions above may have "options" attached.  These are
+// just annotations which may cause code to be generated slightly differently
+// or may contain hints for code that manipulates protocol messages.
+//
+// Clients may define custom options as extensions of the *Options messages.
+// These extensions may not yet be known at parsing time, so the parser cannot
+// store the values in them.  Instead it stores them in a field in the *Options
+// message called uninterpreted_option. This field must have the same name
+// across all *Options messages. We then use this field to populate the
+// extensions when we build a descriptor, at which point all protos have been
+// parsed and so all extensions are known.
+//
+// Extension numbers for custom options may be chosen as follows:
+// * For options which will only be used within a single application or
+//   organization, or for experimental options, use field numbers 50000
+//   through 99999.  It is up to you to ensure that you do not use the
+//   same number for multiple options.
+// * For options which will be published and used publicly by multiple
+//   independent entities, e-mail protobuf-global-extension-registry@google.com
+//   to reserve extension numbers. Simply provide your project name (e.g.
+//   Objective-C plugin) and your project website (if available) -- there's no
+//   need to explain how you intend to use them. Usually you only need one
+//   extension number. You can declare multiple options with only one extension
+//   number by putting them in a sub-message. See the Custom Options section of
+//   the docs for examples:
+//   https://developers.google.com/protocol-buffers/docs/proto#options
+//   If this turns out to be popular, a web service will be set up
+//   to automatically assign option numbers.
+
+
+message FileOptions {
+
+  // Sets the Java package where classes generated from this .proto will be
+  // placed.  By default, the proto package is used, but this is often
+  // inappropriate because proto packages do not normally start with backwards
+  // domain names.
+  optional string java_package = 1;
+
+
+  // If set, all the classes from the .proto file are wrapped in a single
+  // outer class with the given name.  This applies to both Proto1
+  // (equivalent to the old "--one_java_file" option) and Proto2 (where
+  // a .proto always translates to a single class, but you may want to
+  // explicitly choose the class name).
+  optional string java_outer_classname = 8;
+
+  // If set true, then the Java code generator will generate a separate .java
+  // file for each top-level message, enum, and service defined in the .proto
+  // file.  Thus, these types will *not* be nested inside the outer class
+  // named by java_outer_classname.  However, the outer class will still be
+  // generated to contain the file's getDescriptor() method as well as any
+  // top-level extensions defined in the file.
+  optional bool java_multiple_files = 10 [default=false];
+
+  // This option does nothing.
+  optional bool java_generate_equals_and_hash = 20 [deprecated=true];
+
+  // If set true, then the Java2 code generator will generate code that
+  // throws an exception whenever an attempt is made to assign a non-UTF-8
+  // byte sequence to a string field.
+  // Message reflection will do the same.
+  // However, an extension field still accepts non-UTF-8 byte sequences.
+  // This option has no effect on when used with the lite runtime.
+  optional bool java_string_check_utf8 = 27 [default=false];
+
+
+  // Generated classes can be optimized for speed or code size.
+  enum OptimizeMode {
+    SPEED = 1;        // Generate complete code for parsing, serialization,
+                      // etc.
+    CODE_SIZE = 2;    // Use ReflectionOps to implement these methods.
+    LITE_RUNTIME = 3; // Generate code using MessageLite and the lite runtime.
+  }
+  optional OptimizeMode optimize_for = 9 [default=SPEED];
+
+  // Sets the Go package where structs generated from this .proto will be
+  // placed. If omitted, the Go package will be derived from the following:
+  //   - The basename of the package import path, if provided.
+  //   - Otherwise, the package statement in the .proto file, if present.
+  //   - Otherwise, the basename of the .proto file, without extension.
+  optional string go_package = 11;
+
+
+
+  // Should generic services be generated in each language?  "Generic" services
+  // are not specific to any particular RPC system.  They are generated by the
+  // main code generators in each language (without additional plugins).
+  // Generic services were the only kind of service generation supported by
+  // early versions of google.protobuf.
+  //
+  // Generic services are now considered deprecated in favor of using plugins
+  // that generate code specific to your particular RPC system.  Therefore,
+  // these default to false.  Old code which depends on generic services should
+  // explicitly set them to true.
+  optional bool cc_generic_services = 16 [default=false];
+  optional bool java_generic_services = 17 [default=false];
+  optional bool py_generic_services = 18 [default=false];
+  optional bool php_generic_services = 42 [default=false];
+
+  // Is this file deprecated?
+  // Depending on the target platform, this can emit Deprecated annotations
+  // for everything in the file, or it will be completely ignored; in the very
+  // least, this is a formalization for deprecating files.
+  optional bool deprecated = 23 [default=false];
+
+  // Enables the use of arenas for the proto messages in this file. This applies
+  // only to generated classes for C++.
+  optional bool cc_enable_arenas = 31 [default=false];
+
+
+  // Sets the objective c class prefix which is prepended to all objective c
+  // generated classes from this .proto. There is no default.
+  optional string objc_class_prefix = 36;
+
+  // Namespace for generated classes; defaults to the package.
+  optional string csharp_namespace = 37;
+
+  // By default Swift generators will take the proto package and CamelCase it
+  // replacing '.' with underscore and use that to prefix the types/symbols
+  // defined. When this options is provided, they will use this value instead
+  // to prefix the types/symbols defined.
+  optional string swift_prefix = 39;
+
+  // Sets the php class prefix which is prepended to all php generated classes
+  // from this .proto. Default is empty.
+  optional string php_class_prefix = 40;
+
+  // Use this option to change the namespace of php generated classes. Default
+  // is empty. When this option is empty, the package name will be used for
+  // determining the namespace.
+  optional string php_namespace = 41;
+
+  // The parser stores options it doesn't recognize here. See above.
+  repeated UninterpretedOption uninterpreted_option = 999;
+
+  // Clients can define custom options in extensions of this message. See above.
+  extensions 1000 to max;
+
+  reserved 38;
+}
+
+message MessageOptions {
+  // Set true to use the old proto1 MessageSet wire format for extensions.
+  // This is provided for backwards-compatibility with the MessageSet wire
+  // format.  You should not use this for any other reason:  It's less
+  // efficient, has fewer features, and is more complicated.
+  //
+  // The message must be defined exactly as follows:
+  //   message Foo {
+  //     option message_set_wire_format = true;
+  //     extensions 4 to max;
+  //   }
+  // Note that the message cannot have any defined fields; MessageSets only
+  // have extensions.
+  //
+  // All extensions of your type must be singular messages; e.g. they cannot
+  // be int32s, enums, or repeated messages.
+  //
+  // Because this is an option, the above two restrictions are not enforced by
+  // the protocol compiler.
+  optional bool message_set_wire_format = 1 [default=false];
+
+  // Disables the generation of the standard "descriptor()" accessor, which can
+  // conflict with a field of the same name.  This is meant to make migration
+  // from proto1 easier; new code should avoid fields named "descriptor".
+  optional bool no_standard_descriptor_accessor = 2 [default=false];
+
+  // Is this message deprecated?
+  // Depending on the target platform, this can emit Deprecated annotations
+  // for the message, or it will be completely ignored; in the very least,
+  // this is a formalization for deprecating messages.
+  optional bool deprecated = 3 [default=false];
+
+  // Whether the message is an automatically generated map entry type for the
+  // maps field.
+  //
+  // For maps fields:
+  //     map<KeyType, ValueType> map_field = 1;
+  // The parsed descriptor looks like:
+  //     message MapFieldEntry {
+  //         option map_entry = true;
+  //         optional KeyType key = 1;
+  //         optional ValueType value = 2;
+  //     }
+  //     repeated MapFieldEntry map_field = 1;
+  //
+  // Implementations may choose not to generate the map_entry=true message, but
+  // use a native map in the target language to hold the keys and values.
+  // The reflection APIs in such implementions still need to work as
+  // if the field is a repeated message field.
+  //
+  // NOTE: Do not set the option in .proto files. Always use the maps syntax
+  // instead. The option should only be implicitly set by the proto compiler
+  // parser.
+  optional bool map_entry = 7;
+
+  reserved 8;  // javalite_serializable
+  reserved 9;  // javanano_as_lite
+
+  // The parser stores options it doesn't recognize here. See above.
+  repeated UninterpretedOption uninterpreted_option = 999;
+
+  // Clients can define custom options in extensions of this message. See above.
+  extensions 1000 to max;
+}
+
+message FieldOptions {
+  // The ctype option instructs the C++ code generator to use a different
+  // representation of the field than it normally would.  See the specific
+  // options below.  This option is not yet implemented in the open source
+  // release -- sorry, we'll try to include it in a future version!
+  optional CType ctype = 1 [default = STRING];
+  enum CType {
+    // Default mode.
+    STRING = 0;
+
+    CORD = 1;
+
+    STRING_PIECE = 2;
+  }
+  // The packed option can be enabled for repeated primitive fields to enable
+  // a more efficient representation on the wire. Rather than repeatedly
+  // writing the tag and type for each element, the entire array is encoded as
+  // a single length-delimited blob. In proto3, only explicit setting it to
+  // false will avoid using packed encoding.
+  optional bool packed = 2;
+
+  // The jstype option determines the JavaScript type used for values of the
+  // field.  The option is permitted only for 64 bit integral and fixed types
+  // (int64, uint64, sint64, fixed64, sfixed64).  A field with jstype JS_STRING
+  // is represented as JavaScript string, which avoids loss of precision that
+  // can happen when a large value is converted to a floating point JavaScript.
+  // Specifying JS_NUMBER for the jstype causes the generated JavaScript code to
+  // use the JavaScript "number" type.  The behavior of the default option
+  // JS_NORMAL is implementation dependent.
+  //
+  // This option is an enum to permit additional types to be added, e.g.
+  // goog.math.Integer.
+  optional JSType jstype = 6 [default = JS_NORMAL];
+  enum JSType {
+    // Use the default type.
+    JS_NORMAL = 0;
+
+    // Use JavaScript strings.
+    JS_STRING = 1;
+
+    // Use JavaScript numbers.
+    JS_NUMBER = 2;
+  }
+
+  // Should this field be parsed lazily?  Lazy applies only to message-type
+  // fields.  It means that when the outer message is initially parsed, the
+  // inner message's contents will not be parsed but instead stored in encoded
+  // form.  The inner message will actually be parsed when it is first accessed.
+  //
+  // This is only a hint.  Implementations are free to choose whether to use
+  // eager or lazy parsing regardless of the value of this option.  However,
+  // setting this option true suggests that the protocol author believes that
+  // using lazy parsing on this field is worth the additional bookkeeping
+  // overhead typically needed to implement it.
+  //
+  // This option does not affect the public interface of any generated code;
+  // all method signatures remain the same.  Furthermore, thread-safety of the
+  // interface is not affected by this option; const methods remain safe to
+  // call from multiple threads concurrently, while non-const methods continue
+  // to require exclusive access.
+  //
+  //
+  // Note that implementations may choose not to check required fields within
+  // a lazy sub-message.  That is, calling IsInitialized() on the outer message
+  // may return true even if the inner message has missing required fields.
+  // This is necessary because otherwise the inner message would have to be
+  // parsed in order to perform the check, defeating the purpose of lazy
+  // parsing.  An implementation which chooses not to check required fields
+  // must be consistent about it.  That is, for any particular sub-message, the
+  // implementation must either *always* check its required fields, or *never*
+  // check its required fields, regardless of whether or not the message has
+  // been parsed.
+  optional bool lazy = 5 [default=false];
+
+  // Is this field deprecated?
+  // Depending on the target platform, this can emit Deprecated annotations
+  // for accessors, or it will be completely ignored; in the very least, this
+  // is a formalization for deprecating fields.
+  optional bool deprecated = 3 [default=false];
+
+  // For Google-internal migration only. Do not use.
+  optional bool weak = 10 [default=false];
+
+
+  // The parser stores options it doesn't recognize here. See above.
+  repeated UninterpretedOption uninterpreted_option = 999;
+
+  // Clients can define custom options in extensions of this message. See above.
+  extensions 1000 to max;
+
+  reserved 4;  // removed jtype
+}
+
+message OneofOptions {
+  // The parser stores options it doesn't recognize here. See above.
+  repeated UninterpretedOption uninterpreted_option = 999;
+
+  // Clients can define custom options in extensions of this message. See above.
+  extensions 1000 to max;
+}
+
+message EnumOptions {
+
+  // Set this option to true to allow mapping different tag names to the same
+  // value.
+  optional bool allow_alias = 2;
+
+  // Is this enum deprecated?
+  // Depending on the target platform, this can emit Deprecated annotations
+  // for the enum, or it will be completely ignored; in the very least, this
+  // is a formalization for deprecating enums.
+  optional bool deprecated = 3 [default=false];
+
+  reserved 5;  // javanano_as_lite
+
+  // The parser stores options it doesn't recognize here. See above.
+  repeated UninterpretedOption uninterpreted_option = 999;
+
+  // Clients can define custom options in extensions of this message. See above.
+  extensions 1000 to max;
+}
+
+message EnumValueOptions {
+  // Is this enum value deprecated?
+  // Depending on the target platform, this can emit Deprecated annotations
+  // for the enum value, or it will be completely ignored; in the very least,
+  // this is a formalization for deprecating enum values.
+  optional bool deprecated = 1 [default=false];
+
+  // The parser stores options it doesn't recognize here. See above.
+  repeated UninterpretedOption uninterpreted_option = 999;
+
+  // Clients can define custom options in extensions of this message. See above.
+  extensions 1000 to max;
+}
+
+message ServiceOptions {
+
+  // Note:  Field numbers 1 through 32 are reserved for Google's internal RPC
+  //   framework.  We apologize for hoarding these numbers to ourselves, but
+  //   we were already using them long before we decided to release Protocol
+  //   Buffers.
+
+  // Is this service deprecated?
+  // Depending on the target platform, this can emit Deprecated annotations
+  // for the service, or it will be completely ignored; in the very least,
+  // this is a formalization for deprecating services.
+  optional bool deprecated = 33 [default=false];
+
+  // The parser stores options it doesn't recognize here. See above.
+  repeated UninterpretedOption uninterpreted_option = 999;
+
+  // Clients can define custom options in extensions of this message. See above.
+  extensions 1000 to max;
+}
+
+message MethodOptions {
+
+  // Note:  Field numbers 1 through 32 are reserved for Google's internal RPC
+  //   framework.  We apologize for hoarding these numbers to ourselves, but
+  //   we were already using them long before we decided to release Protocol
+  //   Buffers.
+
+  // Is this method deprecated?
+  // Depending on the target platform, this can emit Deprecated annotations
+  // for the method, or it will be completely ignored; in the very least,
+  // this is a formalization for deprecating methods.
+  optional bool deprecated = 33 [default=false];
+
+  // Is this method side-effect-free (or safe in HTTP parlance), or idempotent,
+  // or neither? HTTP based RPC implementation may choose GET verb for safe
+  // methods, and PUT verb for idempotent methods instead of the default POST.
+  enum IdempotencyLevel {
+    IDEMPOTENCY_UNKNOWN = 0;
+    NO_SIDE_EFFECTS     = 1; // implies idempotent
+    IDEMPOTENT          = 2; // idempotent, but may have side effects
+  }
+  optional IdempotencyLevel idempotency_level =
+      34 [default=IDEMPOTENCY_UNKNOWN];
+
+  // The parser stores options it doesn't recognize here. See above.
+  repeated UninterpretedOption uninterpreted_option = 999;
+
+  // Clients can define custom options in extensions of this message. See above.
+  extensions 1000 to max;
+}
+
+
+// A message representing a option the parser does not recognize. This only
+// appears in options protos created by the compiler::Parser class.
+// DescriptorPool resolves these when building Descriptor objects. Therefore,
+// options protos in descriptor objects (e.g. returned by Descriptor::options(),
+// or produced by Descriptor::CopyTo()) will never have UninterpretedOptions
+// in them.
+message UninterpretedOption {
+  // The name of the uninterpreted option.  Each string represents a segment in
+  // a dot-separated name.  is_extension is true iff a segment represents an
+  // extension (denoted with parentheses in options specs in .proto files).
+  // E.g.,{ ["foo", false], ["bar.baz", true], ["qux", false] } represents
+  // "foo.(bar.baz).qux".
+  message NamePart {
+    required string name_part = 1;
+    required bool is_extension = 2;
+  }
+  repeated NamePart name = 2;
+
+  // The value of the uninterpreted option, in whatever type the tokenizer
+  // identified it as during parsing. Exactly one of these should be set.
+  optional string identifier_value = 3;
+  optional uint64 positive_int_value = 4;
+  optional int64 negative_int_value = 5;
+  optional double double_value = 6;
+  optional bytes string_value = 7;
+  optional string aggregate_value = 8;
+}
+
+// ===================================================================
+// Optional source code info
+
+// Encapsulates information about the original source file from which a
+// FileDescriptorProto was generated.
+message SourceCodeInfo {
+  // A Location identifies a piece of source code in a .proto file which
+  // corresponds to a particular definition.  This information is intended
+  // to be useful to IDEs, code indexers, documentation generators, and similar
+  // tools.
+  //
+  // For example, say we have a file like:
+  //   message Foo {
+  //     optional string foo = 1;
+  //   }
+  // Let's look at just the field definition:
+  //   optional string foo = 1;
+  //   ^       ^^     ^^  ^  ^^^
+  //   a       bc     de  f  ghi
+  // We have the following locations:
+  //   span   path               represents
+  //   [a,i)  [ 4, 0, 2, 0 ]     The whole field definition.
+  //   [a,b)  [ 4, 0, 2, 0, 4 ]  The label (optional).
+  //   [c,d)  [ 4, 0, 2, 0, 5 ]  The type (string).
+  //   [e,f)  [ 4, 0, 2, 0, 1 ]  The name (foo).
+  //   [g,h)  [ 4, 0, 2, 0, 3 ]  The number (1).
+  //
+  // Notes:
+  // - A location may refer to a repeated field itself (i.e. not to any
+  //   particular index within it).  This is used whenever a set of elements are
+  //   logically enclosed in a single code segment.  For example, an entire
+  //   extend block (possibly containing multiple extension definitions) will
+  //   have an outer location whose path refers to the "extensions" repeated
+  //   field without an index.
+  // - Multiple locations may have the same path.  This happens when a single
+  //   logical declaration is spread out across multiple places.  The most
+  //   obvious example is the "extend" block again -- there may be multiple
+  //   extend blocks in the same scope, each of which will have the same path.
+  // - A location's span is not always a subset of its parent's span.  For
+  //   example, the "extendee" of an extension declaration appears at the
+  //   beginning of the "extend" block and is shared by all extensions within
+  //   the block.
+  // - Just because a location's span is a subset of some other location's span
+  //   does not mean that it is a descendent.  For example, a "group" defines
+  //   both a type and a field in a single declaration.  Thus, the locations
+  //   corresponding to the type and field and their components will overlap.
+  // - Code which tries to interpret locations should probably be designed to
+  //   ignore those that it doesn't understand, as more types of locations could
+  //   be recorded in the future.
+  repeated Location location = 1;
+  message Location {
+    // Identifies which part of the FileDescriptorProto was defined at this
+    // location.
+    //
+    // Each element is a field number or an index.  They form a path from
+    // the root FileDescriptorProto to the place where the definition.  For
+    // example, this path:
+    //   [ 4, 3, 2, 7, 1 ]
+    // refers to:
+    //   file.message_type(3)  // 4, 3
+    //       .field(7)         // 2, 7
+    //       .name()           // 1
+    // This is because FileDescriptorProto.message_type has field number 4:
+    //   repeated DescriptorProto message_type = 4;
+    // and DescriptorProto.field has field number 2:
+    //   repeated FieldDescriptorProto field = 2;
+    // and FieldDescriptorProto.name has field number 1:
+    //   optional string name = 1;
+    //
+    // Thus, the above path gives the location of a field name.  If we removed
+    // the last element:
+    //   [ 4, 3, 2, 7 ]
+    // this path refers to the whole field declaration (from the beginning
+    // of the label to the terminating semicolon).
+    repeated int32 path = 1 [packed=true];
+
+    // Always has exactly three or four elements: start line, start column,
+    // end line (optional, otherwise assumed same as start line), end column.
+    // These are packed into a single field for efficiency.  Note that line
+    // and column numbers are zero-based -- typically you will want to add
+    // 1 to each before displaying to a user.
+    repeated int32 span = 2 [packed=true];
+
+    // If this SourceCodeInfo represents a complete declaration, these are any
+    // comments appearing before and after the declaration which appear to be
+    // attached to the declaration.
+    //
+    // A series of line comments appearing on consecutive lines, with no other
+    // tokens appearing on those lines, will be treated as a single comment.
+    //
+    // leading_detached_comments will keep paragraphs of comments that appear
+    // before (but not connected to) the current element. Each paragraph,
+    // separated by empty lines, will be one comment element in the repeated
+    // field.
+    //
+    // Only the comment content is provided; comment markers (e.g. //) are
+    // stripped out.  For block comments, leading whitespace and an asterisk
+    // will be stripped from the beginning of each line other than the first.
+    // Newlines are included in the output.
+    //
+    // Examples:
+    //
+    //   optional int32 foo = 1;  // Comment attached to foo.
+    //   // Comment attached to bar.
+    //   optional int32 bar = 2;
+    //
+    //   optional string baz = 3;
+    //   // Comment attached to baz.
+    //   // Another line attached to baz.
+    //
+    //   // Comment attached to qux.
+    //   //
+    //   // Another line attached to qux.
+    //   optional double qux = 4;
+    //
+    //   // Detached comment for corge. This is not leading or trailing comments
+    //   // to qux or corge because there are blank lines separating it from
+    //   // both.
+    //
+    //   // Detached comment for corge paragraph 2.
+    //
+    //   optional string corge = 5;
+    //   /* Block comment attached
+    //    * to corge.  Leading asterisks
+    //    * will be removed. */
+    //   /* Block comment attached to
+    //    * grault. */
+    //   optional int32 grault = 6;
+    //
+    //   // ignored detached comments.
+    optional string leading_comments = 3;
+    optional string trailing_comments = 4;
+    repeated string leading_detached_comments = 6;
+  }
+}
+
+// Describes the relationship between generated code and its original source
+// file. A GeneratedCodeInfo message is associated with only one generated
+// source file, but may contain references to different source .proto files.
+message GeneratedCodeInfo {
+  // An Annotation connects some span of text in generated code to an element
+  // of its generating .proto file.
+  repeated Annotation annotation = 1;
+  message Annotation {
+    // Identifies the element in the original source .proto file. This field
+    // is formatted the same as SourceCodeInfo.Location.path.
+    repeated int32 path = 1 [packed=true];
+
+    // Identifies the filesystem path to the original source .proto.
+    optional string source_file = 2;
+
+    // Identifies the starting offset in bytes in the generated code
+    // that relates to the identified object.
+    optional int32 begin = 3;
+
+    // Identifies the ending offset in bytes in the generated code that
+    // relates to the identified offset. The end offset should be one past
+    // the last relevant byte (so the length of the text = end - begin).
+    optional int32 end = 4;
+  }
+}

+ 139 - 0
vendor/github.com/golang/protobuf/ptypes/any.go

@@ -0,0 +1,139 @@
+// Go support for Protocol Buffers - Google's data interchange format
+//
+// Copyright 2016 The Go Authors.  All rights reserved.
+// https://github.com/golang/protobuf
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+package ptypes
+
+// This file implements functions to marshal proto.Message to/from
+// google.protobuf.Any message.
+
+import (
+	"fmt"
+	"reflect"
+	"strings"
+
+	"github.com/golang/protobuf/proto"
+	"github.com/golang/protobuf/ptypes/any"
+)
+
+const googleApis = "type.googleapis.com/"
+
+// AnyMessageName returns the name of the message contained in a google.protobuf.Any message.
+//
+// Note that regular type assertions should be done using the Is
+// function. AnyMessageName is provided for less common use cases like filtering a
+// sequence of Any messages based on a set of allowed message type names.
+func AnyMessageName(any *any.Any) (string, error) {
+	if any == nil {
+		return "", fmt.Errorf("message is nil")
+	}
+	slash := strings.LastIndex(any.TypeUrl, "/")
+	if slash < 0 {
+		return "", fmt.Errorf("message type url %q is invalid", any.TypeUrl)
+	}
+	return any.TypeUrl[slash+1:], nil
+}
+
+// MarshalAny takes the protocol buffer and encodes it into google.protobuf.Any.
+func MarshalAny(pb proto.Message) (*any.Any, error) {
+	value, err := proto.Marshal(pb)
+	if err != nil {
+		return nil, err
+	}
+	return &any.Any{TypeUrl: googleApis + proto.MessageName(pb), Value: value}, nil
+}
+
+// DynamicAny is a value that can be passed to UnmarshalAny to automatically
+// allocate a proto.Message for the type specified in a google.protobuf.Any
+// message. The allocated message is stored in the embedded proto.Message.
+//
+// Example:
+//
+//   var x ptypes.DynamicAny
+//   if err := ptypes.UnmarshalAny(a, &x); err != nil { ... }
+//   fmt.Printf("unmarshaled message: %v", x.Message)
+type DynamicAny struct {
+	proto.Message
+}
+
+// Empty returns a new proto.Message of the type specified in a
+// google.protobuf.Any message. It returns an error if corresponding message
+// type isn't linked in.
+func Empty(any *any.Any) (proto.Message, error) {
+	aname, err := AnyMessageName(any)
+	if err != nil {
+		return nil, err
+	}
+
+	t := proto.MessageType(aname)
+	if t == nil {
+		return nil, fmt.Errorf("any: message type %q isn't linked in", aname)
+	}
+	return reflect.New(t.Elem()).Interface().(proto.Message), nil
+}
+
+// UnmarshalAny parses the protocol buffer representation in a google.protobuf.Any
+// message and places the decoded result in pb. It returns an error if type of
+// contents of Any message does not match type of pb message.
+//
+// pb can be a proto.Message, or a *DynamicAny.
+func UnmarshalAny(any *any.Any, pb proto.Message) error {
+	if d, ok := pb.(*DynamicAny); ok {
+		if d.Message == nil {
+			var err error
+			d.Message, err = Empty(any)
+			if err != nil {
+				return err
+			}
+		}
+		return UnmarshalAny(any, d.Message)
+	}
+
+	aname, err := AnyMessageName(any)
+	if err != nil {
+		return err
+	}
+
+	mname := proto.MessageName(pb)
+	if aname != mname {
+		return fmt.Errorf("mismatched message type: got %q want %q", aname, mname)
+	}
+	return proto.Unmarshal(any.Value, pb)
+}
+
+// Is returns true if any value contains a given message type.
+func Is(any *any.Any, pb proto.Message) bool {
+	aname, err := AnyMessageName(any)
+	if err != nil {
+		return false
+	}
+
+	return aname == proto.MessageName(pb)
+}

+ 178 - 0
vendor/github.com/golang/protobuf/ptypes/any/any.pb.go

@@ -0,0 +1,178 @@
+// Code generated by protoc-gen-go. DO NOT EDIT.
+// source: google/protobuf/any.proto
+
+/*
+Package any is a generated protocol buffer package.
+
+It is generated from these files:
+	google/protobuf/any.proto
+
+It has these top-level messages:
+	Any
+*/
+package any
+
+import proto "github.com/golang/protobuf/proto"
+import fmt "fmt"
+import math "math"
+
+// Reference imports to suppress errors if they are not otherwise used.
+var _ = proto.Marshal
+var _ = fmt.Errorf
+var _ = math.Inf
+
+// This is a compile-time assertion to ensure that this generated file
+// is compatible with the proto package it is being compiled against.
+// A compilation error at this line likely means your copy of the
+// proto package needs to be updated.
+const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
+
+// `Any` contains an arbitrary serialized protocol buffer message along with a
+// URL that describes the type of the serialized message.
+//
+// Protobuf library provides support to pack/unpack Any values in the form
+// of utility functions or additional generated methods of the Any type.
+//
+// Example 1: Pack and unpack a message in C++.
+//
+//     Foo foo = ...;
+//     Any any;
+//     any.PackFrom(foo);
+//     ...
+//     if (any.UnpackTo(&foo)) {
+//       ...
+//     }
+//
+// Example 2: Pack and unpack a message in Java.
+//
+//     Foo foo = ...;
+//     Any any = Any.pack(foo);
+//     ...
+//     if (any.is(Foo.class)) {
+//       foo = any.unpack(Foo.class);
+//     }
+//
+//  Example 3: Pack and unpack a message in Python.
+//
+//     foo = Foo(...)
+//     any = Any()
+//     any.Pack(foo)
+//     ...
+//     if any.Is(Foo.DESCRIPTOR):
+//       any.Unpack(foo)
+//       ...
+//
+//  Example 4: Pack and unpack a message in Go
+//
+//      foo := &pb.Foo{...}
+//      any, err := ptypes.MarshalAny(foo)
+//      ...
+//      foo := &pb.Foo{}
+//      if err := ptypes.UnmarshalAny(any, foo); err != nil {
+//        ...
+//      }
+//
+// The pack methods provided by protobuf library will by default use
+// 'type.googleapis.com/full.type.name' as the type URL and the unpack
+// methods only use the fully qualified type name after the last '/'
+// in the type URL, for example "foo.bar.com/x/y.z" will yield type
+// name "y.z".
+//
+//
+// JSON
+// ====
+// The JSON representation of an `Any` value uses the regular
+// representation of the deserialized, embedded message, with an
+// additional field `@type` which contains the type URL. Example:
+//
+//     package google.profile;
+//     message Person {
+//       string first_name = 1;
+//       string last_name = 2;
+//     }
+//
+//     {
+//       "@type": "type.googleapis.com/google.profile.Person",
+//       "firstName": <string>,
+//       "lastName": <string>
+//     }
+//
+// If the embedded message type is well-known and has a custom JSON
+// representation, that representation will be embedded adding a field
+// `value` which holds the custom JSON in addition to the `@type`
+// field. Example (for message [google.protobuf.Duration][]):
+//
+//     {
+//       "@type": "type.googleapis.com/google.protobuf.Duration",
+//       "value": "1.212s"
+//     }
+//
+type Any struct {
+	// A URL/resource name whose content describes the type of the
+	// serialized protocol buffer message.
+	//
+	// For URLs which use the scheme `http`, `https`, or no scheme, the
+	// following restrictions and interpretations apply:
+	//
+	// * If no scheme is provided, `https` is assumed.
+	// * The last segment of the URL's path must represent the fully
+	//   qualified name of the type (as in `path/google.protobuf.Duration`).
+	//   The name should be in a canonical form (e.g., leading "." is
+	//   not accepted).
+	// * An HTTP GET on the URL must yield a [google.protobuf.Type][]
+	//   value in binary format, or produce an error.
+	// * Applications are allowed to cache lookup results based on the
+	//   URL, or have them precompiled into a binary to avoid any
+	//   lookup. Therefore, binary compatibility needs to be preserved
+	//   on changes to types. (Use versioned type names to manage
+	//   breaking changes.)
+	//
+	// Schemes other than `http`, `https` (or the empty scheme) might be
+	// used with implementation specific semantics.
+	//
+	TypeUrl string `protobuf:"bytes,1,opt,name=type_url,json=typeUrl" json:"type_url,omitempty"`
+	// Must be a valid serialized protocol buffer of the above specified type.
+	Value []byte `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"`
+}
+
+func (m *Any) Reset()                    { *m = Any{} }
+func (m *Any) String() string            { return proto.CompactTextString(m) }
+func (*Any) ProtoMessage()               {}
+func (*Any) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} }
+func (*Any) XXX_WellKnownType() string   { return "Any" }
+
+func (m *Any) GetTypeUrl() string {
+	if m != nil {
+		return m.TypeUrl
+	}
+	return ""
+}
+
+func (m *Any) GetValue() []byte {
+	if m != nil {
+		return m.Value
+	}
+	return nil
+}
+
+func init() {
+	proto.RegisterType((*Any)(nil), "google.protobuf.Any")
+}
+
+func init() { proto.RegisterFile("google/protobuf/any.proto", fileDescriptor0) }
+
+var fileDescriptor0 = []byte{
+	// 185 bytes of a gzipped FileDescriptorProto
+	0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x4c, 0xcf, 0xcf, 0x4f,
+	0xcf, 0x49, 0xd5, 0x2f, 0x28, 0xca, 0x2f, 0xc9, 0x4f, 0x2a, 0x4d, 0xd3, 0x4f, 0xcc, 0xab, 0xd4,
+	0x03, 0x73, 0x84, 0xf8, 0x21, 0x52, 0x7a, 0x30, 0x29, 0x25, 0x33, 0x2e, 0x66, 0xc7, 0xbc, 0x4a,
+	0x21, 0x49, 0x2e, 0x8e, 0x92, 0xca, 0x82, 0xd4, 0xf8, 0xd2, 0xa2, 0x1c, 0x09, 0x46, 0x05, 0x46,
+	0x0d, 0xce, 0x20, 0x76, 0x10, 0x3f, 0xb4, 0x28, 0x47, 0x48, 0x84, 0x8b, 0xb5, 0x2c, 0x31, 0xa7,
+	0x34, 0x55, 0x82, 0x49, 0x81, 0x51, 0x83, 0x27, 0x08, 0xc2, 0x71, 0xca, 0xe7, 0x12, 0x4e, 0xce,
+	0xcf, 0xd5, 0x43, 0x33, 0xce, 0x89, 0xc3, 0x31, 0xaf, 0x32, 0x00, 0xc4, 0x09, 0x60, 0x8c, 0x52,
+	0x4d, 0xcf, 0x2c, 0xc9, 0x28, 0x4d, 0xd2, 0x4b, 0xce, 0xcf, 0xd5, 0x4f, 0xcf, 0xcf, 0x49, 0xcc,
+	0x4b, 0x47, 0xb8, 0xa8, 0x00, 0x64, 0x7a, 0x31, 0xc8, 0x61, 0x8b, 0x98, 0x98, 0xdd, 0x03, 0x9c,
+	0x56, 0x31, 0xc9, 0xb9, 0x43, 0x8c, 0x0a, 0x80, 0x2a, 0xd1, 0x0b, 0x4f, 0xcd, 0xc9, 0xf1, 0xce,
+	0xcb, 0x2f, 0xcf, 0x0b, 0x01, 0x29, 0x4d, 0x62, 0x03, 0xeb, 0x35, 0x06, 0x04, 0x00, 0x00, 0xff,
+	0xff, 0x13, 0xf8, 0xe8, 0x42, 0xdd, 0x00, 0x00, 0x00,
+}

+ 149 - 0
vendor/github.com/golang/protobuf/ptypes/any/any.proto

@@ -0,0 +1,149 @@
+// Protocol Buffers - Google's data interchange format
+// Copyright 2008 Google Inc.  All rights reserved.
+// https://developers.google.com/protocol-buffers/
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+syntax = "proto3";
+
+package google.protobuf;
+
+option csharp_namespace = "Google.Protobuf.WellKnownTypes";
+option go_package = "github.com/golang/protobuf/ptypes/any";
+option java_package = "com.google.protobuf";
+option java_outer_classname = "AnyProto";
+option java_multiple_files = true;
+option objc_class_prefix = "GPB";
+
+// `Any` contains an arbitrary serialized protocol buffer message along with a
+// URL that describes the type of the serialized message.
+//
+// Protobuf library provides support to pack/unpack Any values in the form
+// of utility functions or additional generated methods of the Any type.
+//
+// Example 1: Pack and unpack a message in C++.
+//
+//     Foo foo = ...;
+//     Any any;
+//     any.PackFrom(foo);
+//     ...
+//     if (any.UnpackTo(&foo)) {
+//       ...
+//     }
+//
+// Example 2: Pack and unpack a message in Java.
+//
+//     Foo foo = ...;
+//     Any any = Any.pack(foo);
+//     ...
+//     if (any.is(Foo.class)) {
+//       foo = any.unpack(Foo.class);
+//     }
+//
+//  Example 3: Pack and unpack a message in Python.
+//
+//     foo = Foo(...)
+//     any = Any()
+//     any.Pack(foo)
+//     ...
+//     if any.Is(Foo.DESCRIPTOR):
+//       any.Unpack(foo)
+//       ...
+//
+//  Example 4: Pack and unpack a message in Go
+//
+//      foo := &pb.Foo{...}
+//      any, err := ptypes.MarshalAny(foo)
+//      ...
+//      foo := &pb.Foo{}
+//      if err := ptypes.UnmarshalAny(any, foo); err != nil {
+//        ...
+//      }
+//
+// The pack methods provided by protobuf library will by default use
+// 'type.googleapis.com/full.type.name' as the type URL and the unpack
+// methods only use the fully qualified type name after the last '/'
+// in the type URL, for example "foo.bar.com/x/y.z" will yield type
+// name "y.z".
+//
+//
+// JSON
+// ====
+// The JSON representation of an `Any` value uses the regular
+// representation of the deserialized, embedded message, with an
+// additional field `@type` which contains the type URL. Example:
+//
+//     package google.profile;
+//     message Person {
+//       string first_name = 1;
+//       string last_name = 2;
+//     }
+//
+//     {
+//       "@type": "type.googleapis.com/google.profile.Person",
+//       "firstName": <string>,
+//       "lastName": <string>
+//     }
+//
+// If the embedded message type is well-known and has a custom JSON
+// representation, that representation will be embedded adding a field
+// `value` which holds the custom JSON in addition to the `@type`
+// field. Example (for message [google.protobuf.Duration][]):
+//
+//     {
+//       "@type": "type.googleapis.com/google.protobuf.Duration",
+//       "value": "1.212s"
+//     }
+//
+message Any {
+  // A URL/resource name whose content describes the type of the
+  // serialized protocol buffer message.
+  //
+  // For URLs which use the scheme `http`, `https`, or no scheme, the
+  // following restrictions and interpretations apply:
+  //
+  // * If no scheme is provided, `https` is assumed.
+  // * The last segment of the URL's path must represent the fully
+  //   qualified name of the type (as in `path/google.protobuf.Duration`).
+  //   The name should be in a canonical form (e.g., leading "." is
+  //   not accepted).
+  // * An HTTP GET on the URL must yield a [google.protobuf.Type][]
+  //   value in binary format, or produce an error.
+  // * Applications are allowed to cache lookup results based on the
+  //   URL, or have them precompiled into a binary to avoid any
+  //   lookup. Therefore, binary compatibility needs to be preserved
+  //   on changes to types. (Use versioned type names to manage
+  //   breaking changes.)
+  //
+  // Schemes other than `http`, `https` (or the empty scheme) might be
+  // used with implementation specific semantics.
+  //
+  string type_url = 1;
+
+  // Must be a valid serialized protocol buffer of the above specified type.
+  bytes value = 2;
+}

+ 35 - 0
vendor/github.com/golang/protobuf/ptypes/doc.go

@@ -0,0 +1,35 @@
+// Go support for Protocol Buffers - Google's data interchange format
+//
+// Copyright 2016 The Go Authors.  All rights reserved.
+// https://github.com/golang/protobuf
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+/*
+Package ptypes contains code for interacting with well-known types.
+*/
+package ptypes

+ 102 - 0
vendor/github.com/golang/protobuf/ptypes/duration.go

@@ -0,0 +1,102 @@
+// Go support for Protocol Buffers - Google's data interchange format
+//
+// Copyright 2016 The Go Authors.  All rights reserved.
+// https://github.com/golang/protobuf
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+package ptypes
+
+// This file implements conversions between google.protobuf.Duration
+// and time.Duration.
+
+import (
+	"errors"
+	"fmt"
+	"time"
+
+	durpb "github.com/golang/protobuf/ptypes/duration"
+)
+
+const (
+	// Range of a durpb.Duration in seconds, as specified in
+	// google/protobuf/duration.proto. This is about 10,000 years in seconds.
+	maxSeconds = int64(10000 * 365.25 * 24 * 60 * 60)
+	minSeconds = -maxSeconds
+)
+
+// validateDuration determines whether the durpb.Duration is valid according to the
+// definition in google/protobuf/duration.proto. A valid durpb.Duration
+// may still be too large to fit into a time.Duration (the range of durpb.Duration
+// is about 10,000 years, and the range of time.Duration is about 290).
+func validateDuration(d *durpb.Duration) error {
+	if d == nil {
+		return errors.New("duration: nil Duration")
+	}
+	if d.Seconds < minSeconds || d.Seconds > maxSeconds {
+		return fmt.Errorf("duration: %v: seconds out of range", d)
+	}
+	if d.Nanos <= -1e9 || d.Nanos >= 1e9 {
+		return fmt.Errorf("duration: %v: nanos out of range", d)
+	}
+	// Seconds and Nanos must have the same sign, unless d.Nanos is zero.
+	if (d.Seconds < 0 && d.Nanos > 0) || (d.Seconds > 0 && d.Nanos < 0) {
+		return fmt.Errorf("duration: %v: seconds and nanos have different signs", d)
+	}
+	return nil
+}
+
+// Duration converts a durpb.Duration to a time.Duration. Duration
+// returns an error if the durpb.Duration is invalid or is too large to be
+// represented in a time.Duration.
+func Duration(p *durpb.Duration) (time.Duration, error) {
+	if err := validateDuration(p); err != nil {
+		return 0, err
+	}
+	d := time.Duration(p.Seconds) * time.Second
+	if int64(d/time.Second) != p.Seconds {
+		return 0, fmt.Errorf("duration: %v is out of range for time.Duration", p)
+	}
+	if p.Nanos != 0 {
+		d += time.Duration(p.Nanos)
+		if (d < 0) != (p.Nanos < 0) {
+			return 0, fmt.Errorf("duration: %v is out of range for time.Duration", p)
+		}
+	}
+	return d, nil
+}
+
+// DurationProto converts a time.Duration to a durpb.Duration.
+func DurationProto(d time.Duration) *durpb.Duration {
+	nanos := d.Nanoseconds()
+	secs := nanos / 1e9
+	nanos -= secs * 1e9
+	return &durpb.Duration{
+		Seconds: secs,
+		Nanos:   int32(nanos),
+	}
+}

+ 144 - 0
vendor/github.com/golang/protobuf/ptypes/duration/duration.pb.go

@@ -0,0 +1,144 @@
+// Code generated by protoc-gen-go. DO NOT EDIT.
+// source: google/protobuf/duration.proto
+
+/*
+Package duration is a generated protocol buffer package.
+
+It is generated from these files:
+	google/protobuf/duration.proto
+
+It has these top-level messages:
+	Duration
+*/
+package duration
+
+import proto "github.com/golang/protobuf/proto"
+import fmt "fmt"
+import math "math"
+
+// Reference imports to suppress errors if they are not otherwise used.
+var _ = proto.Marshal
+var _ = fmt.Errorf
+var _ = math.Inf
+
+// This is a compile-time assertion to ensure that this generated file
+// is compatible with the proto package it is being compiled against.
+// A compilation error at this line likely means your copy of the
+// proto package needs to be updated.
+const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
+
+// A Duration represents a signed, fixed-length span of time represented
+// as a count of seconds and fractions of seconds at nanosecond
+// resolution. It is independent of any calendar and concepts like "day"
+// or "month". It is related to Timestamp in that the difference between
+// two Timestamp values is a Duration and it can be added or subtracted
+// from a Timestamp. Range is approximately +-10,000 years.
+//
+// # Examples
+//
+// Example 1: Compute Duration from two Timestamps in pseudo code.
+//
+//     Timestamp start = ...;
+//     Timestamp end = ...;
+//     Duration duration = ...;
+//
+//     duration.seconds = end.seconds - start.seconds;
+//     duration.nanos = end.nanos - start.nanos;
+//
+//     if (duration.seconds < 0 && duration.nanos > 0) {
+//       duration.seconds += 1;
+//       duration.nanos -= 1000000000;
+//     } else if (durations.seconds > 0 && duration.nanos < 0) {
+//       duration.seconds -= 1;
+//       duration.nanos += 1000000000;
+//     }
+//
+// Example 2: Compute Timestamp from Timestamp + Duration in pseudo code.
+//
+//     Timestamp start = ...;
+//     Duration duration = ...;
+//     Timestamp end = ...;
+//
+//     end.seconds = start.seconds + duration.seconds;
+//     end.nanos = start.nanos + duration.nanos;
+//
+//     if (end.nanos < 0) {
+//       end.seconds -= 1;
+//       end.nanos += 1000000000;
+//     } else if (end.nanos >= 1000000000) {
+//       end.seconds += 1;
+//       end.nanos -= 1000000000;
+//     }
+//
+// Example 3: Compute Duration from datetime.timedelta in Python.
+//
+//     td = datetime.timedelta(days=3, minutes=10)
+//     duration = Duration()
+//     duration.FromTimedelta(td)
+//
+// # JSON Mapping
+//
+// In JSON format, the Duration type is encoded as a string rather than an
+// object, where the string ends in the suffix "s" (indicating seconds) and
+// is preceded by the number of seconds, with nanoseconds expressed as
+// fractional seconds. For example, 3 seconds with 0 nanoseconds should be
+// encoded in JSON format as "3s", while 3 seconds and 1 nanosecond should
+// be expressed in JSON format as "3.000000001s", and 3 seconds and 1
+// microsecond should be expressed in JSON format as "3.000001s".
+//
+//
+type Duration struct {
+	// Signed seconds of the span of time. Must be from -315,576,000,000
+	// to +315,576,000,000 inclusive. Note: these bounds are computed from:
+	// 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years
+	Seconds int64 `protobuf:"varint,1,opt,name=seconds" json:"seconds,omitempty"`
+	// Signed fractions of a second at nanosecond resolution of the span
+	// of time. Durations less than one second are represented with a 0
+	// `seconds` field and a positive or negative `nanos` field. For durations
+	// of one second or more, a non-zero value for the `nanos` field must be
+	// of the same sign as the `seconds` field. Must be from -999,999,999
+	// to +999,999,999 inclusive.
+	Nanos int32 `protobuf:"varint,2,opt,name=nanos" json:"nanos,omitempty"`
+}
+
+func (m *Duration) Reset()                    { *m = Duration{} }
+func (m *Duration) String() string            { return proto.CompactTextString(m) }
+func (*Duration) ProtoMessage()               {}
+func (*Duration) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} }
+func (*Duration) XXX_WellKnownType() string   { return "Duration" }
+
+func (m *Duration) GetSeconds() int64 {
+	if m != nil {
+		return m.Seconds
+	}
+	return 0
+}
+
+func (m *Duration) GetNanos() int32 {
+	if m != nil {
+		return m.Nanos
+	}
+	return 0
+}
+
+func init() {
+	proto.RegisterType((*Duration)(nil), "google.protobuf.Duration")
+}
+
+func init() { proto.RegisterFile("google/protobuf/duration.proto", fileDescriptor0) }
+
+var fileDescriptor0 = []byte{
+	// 190 bytes of a gzipped FileDescriptorProto
+	0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x4b, 0xcf, 0xcf, 0x4f,
+	0xcf, 0x49, 0xd5, 0x2f, 0x28, 0xca, 0x2f, 0xc9, 0x4f, 0x2a, 0x4d, 0xd3, 0x4f, 0x29, 0x2d, 0x4a,
+	0x2c, 0xc9, 0xcc, 0xcf, 0xd3, 0x03, 0x8b, 0x08, 0xf1, 0x43, 0xe4, 0xf5, 0x60, 0xf2, 0x4a, 0x56,
+	0x5c, 0x1c, 0x2e, 0x50, 0x25, 0x42, 0x12, 0x5c, 0xec, 0xc5, 0xa9, 0xc9, 0xf9, 0x79, 0x29, 0xc5,
+	0x12, 0x8c, 0x0a, 0x8c, 0x1a, 0xcc, 0x41, 0x30, 0xae, 0x90, 0x08, 0x17, 0x6b, 0x5e, 0x62, 0x5e,
+	0x7e, 0xb1, 0x04, 0x93, 0x02, 0xa3, 0x06, 0x6b, 0x10, 0x84, 0xe3, 0x54, 0xc3, 0x25, 0x9c, 0x9c,
+	0x9f, 0xab, 0x87, 0x66, 0xa4, 0x13, 0x2f, 0xcc, 0xc0, 0x00, 0x90, 0x48, 0x00, 0x63, 0x94, 0x56,
+	0x7a, 0x66, 0x49, 0x46, 0x69, 0x92, 0x5e, 0x72, 0x7e, 0xae, 0x7e, 0x7a, 0x7e, 0x4e, 0x62, 0x5e,
+	0x3a, 0xc2, 0x7d, 0x05, 0x25, 0x95, 0x05, 0xa9, 0xc5, 0x70, 0x67, 0xfe, 0x60, 0x64, 0x5c, 0xc4,
+	0xc4, 0xec, 0x1e, 0xe0, 0xb4, 0x8a, 0x49, 0xce, 0x1d, 0x62, 0x6e, 0x00, 0x54, 0xa9, 0x5e, 0x78,
+	0x6a, 0x4e, 0x8e, 0x77, 0x5e, 0x7e, 0x79, 0x5e, 0x08, 0x48, 0x4b, 0x12, 0x1b, 0xd8, 0x0c, 0x63,
+	0x40, 0x00, 0x00, 0x00, 0xff, 0xff, 0xdc, 0x84, 0x30, 0xff, 0xf3, 0x00, 0x00, 0x00,
+}

+ 117 - 0
vendor/github.com/golang/protobuf/ptypes/duration/duration.proto

@@ -0,0 +1,117 @@
+// Protocol Buffers - Google's data interchange format
+// Copyright 2008 Google Inc.  All rights reserved.
+// https://developers.google.com/protocol-buffers/
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+syntax = "proto3";
+
+package google.protobuf;
+
+option csharp_namespace = "Google.Protobuf.WellKnownTypes";
+option cc_enable_arenas = true;
+option go_package = "github.com/golang/protobuf/ptypes/duration";
+option java_package = "com.google.protobuf";
+option java_outer_classname = "DurationProto";
+option java_multiple_files = true;
+option objc_class_prefix = "GPB";
+
+// A Duration represents a signed, fixed-length span of time represented
+// as a count of seconds and fractions of seconds at nanosecond
+// resolution. It is independent of any calendar and concepts like "day"
+// or "month". It is related to Timestamp in that the difference between
+// two Timestamp values is a Duration and it can be added or subtracted
+// from a Timestamp. Range is approximately +-10,000 years.
+//
+// # Examples
+//
+// Example 1: Compute Duration from two Timestamps in pseudo code.
+//
+//     Timestamp start = ...;
+//     Timestamp end = ...;
+//     Duration duration = ...;
+//
+//     duration.seconds = end.seconds - start.seconds;
+//     duration.nanos = end.nanos - start.nanos;
+//
+//     if (duration.seconds < 0 && duration.nanos > 0) {
+//       duration.seconds += 1;
+//       duration.nanos -= 1000000000;
+//     } else if (durations.seconds > 0 && duration.nanos < 0) {
+//       duration.seconds -= 1;
+//       duration.nanos += 1000000000;
+//     }
+//
+// Example 2: Compute Timestamp from Timestamp + Duration in pseudo code.
+//
+//     Timestamp start = ...;
+//     Duration duration = ...;
+//     Timestamp end = ...;
+//
+//     end.seconds = start.seconds + duration.seconds;
+//     end.nanos = start.nanos + duration.nanos;
+//
+//     if (end.nanos < 0) {
+//       end.seconds -= 1;
+//       end.nanos += 1000000000;
+//     } else if (end.nanos >= 1000000000) {
+//       end.seconds += 1;
+//       end.nanos -= 1000000000;
+//     }
+//
+// Example 3: Compute Duration from datetime.timedelta in Python.
+//
+//     td = datetime.timedelta(days=3, minutes=10)
+//     duration = Duration()
+//     duration.FromTimedelta(td)
+//
+// # JSON Mapping
+//
+// In JSON format, the Duration type is encoded as a string rather than an
+// object, where the string ends in the suffix "s" (indicating seconds) and
+// is preceded by the number of seconds, with nanoseconds expressed as
+// fractional seconds. For example, 3 seconds with 0 nanoseconds should be
+// encoded in JSON format as "3s", while 3 seconds and 1 nanosecond should
+// be expressed in JSON format as "3.000000001s", and 3 seconds and 1
+// microsecond should be expressed in JSON format as "3.000001s".
+//
+//
+message Duration {
+
+  // Signed seconds of the span of time. Must be from -315,576,000,000
+  // to +315,576,000,000 inclusive. Note: these bounds are computed from:
+  // 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years
+  int64 seconds = 1;
+
+  // Signed fractions of a second at nanosecond resolution of the span
+  // of time. Durations less than one second are represented with a 0
+  // `seconds` field and a positive or negative `nanos` field. For durations
+  // of one second or more, a non-zero value for the `nanos` field must be
+  // of the same sign as the `seconds` field. Must be from -999,999,999
+  // to +999,999,999 inclusive.
+  int32 nanos = 2;
+}

+ 43 - 0
vendor/github.com/golang/protobuf/ptypes/regen.sh

@@ -0,0 +1,43 @@
+#!/bin/bash -e
+#
+# This script fetches and rebuilds the "well-known types" protocol buffers.
+# To run this you will need protoc and goprotobuf installed;
+# see https://github.com/golang/protobuf for instructions.
+# You also need Go and Git installed.
+
+PKG=github.com/golang/protobuf/ptypes
+UPSTREAM=https://github.com/google/protobuf
+UPSTREAM_SUBDIR=src/google/protobuf
+PROTO_FILES=(any duration empty struct timestamp wrappers)
+
+function die() {
+  echo 1>&2 $*
+  exit 1
+}
+
+# Sanity check that the right tools are accessible.
+for tool in go git protoc protoc-gen-go; do
+  q=$(which $tool) || die "didn't find $tool"
+  echo 1>&2 "$tool: $q"
+done
+
+tmpdir=$(mktemp -d -t regen-wkt.XXXXXX)
+trap 'rm -rf $tmpdir' EXIT
+
+echo -n 1>&2 "finding package dir... "
+pkgdir=$(go list -f '{{.Dir}}' $PKG)
+echo 1>&2 $pkgdir
+base=$(echo $pkgdir | sed "s,/$PKG\$,,")
+echo 1>&2 "base: $base"
+cd "$base"
+
+echo 1>&2 "fetching latest protos... "
+git clone -q $UPSTREAM $tmpdir
+
+for file in ${PROTO_FILES[@]}; do
+  echo 1>&2 "* $file"
+  protoc --go_out=. -I$tmpdir/src $tmpdir/src/google/protobuf/$file.proto || die
+  cp $tmpdir/src/google/protobuf/$file.proto $PKG/$file
+done
+
+echo 1>&2 "All OK"

+ 134 - 0
vendor/github.com/golang/protobuf/ptypes/timestamp.go

@@ -0,0 +1,134 @@
+// Go support for Protocol Buffers - Google's data interchange format
+//
+// Copyright 2016 The Go Authors.  All rights reserved.
+// https://github.com/golang/protobuf
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+package ptypes
+
+// This file implements operations on google.protobuf.Timestamp.
+
+import (
+	"errors"
+	"fmt"
+	"time"
+
+	tspb "github.com/golang/protobuf/ptypes/timestamp"
+)
+
+const (
+	// Seconds field of the earliest valid Timestamp.
+	// This is time.Date(1, 1, 1, 0, 0, 0, 0, time.UTC).Unix().
+	minValidSeconds = -62135596800
+	// Seconds field just after the latest valid Timestamp.
+	// This is time.Date(10000, 1, 1, 0, 0, 0, 0, time.UTC).Unix().
+	maxValidSeconds = 253402300800
+)
+
+// validateTimestamp determines whether a Timestamp is valid.
+// A valid timestamp represents a time in the range
+// [0001-01-01, 10000-01-01) and has a Nanos field
+// in the range [0, 1e9).
+//
+// If the Timestamp is valid, validateTimestamp returns nil.
+// Otherwise, it returns an error that describes
+// the problem.
+//
+// Every valid Timestamp can be represented by a time.Time, but the converse is not true.
+func validateTimestamp(ts *tspb.Timestamp) error {
+	if ts == nil {
+		return errors.New("timestamp: nil Timestamp")
+	}
+	if ts.Seconds < minValidSeconds {
+		return fmt.Errorf("timestamp: %v before 0001-01-01", ts)
+	}
+	if ts.Seconds >= maxValidSeconds {
+		return fmt.Errorf("timestamp: %v after 10000-01-01", ts)
+	}
+	if ts.Nanos < 0 || ts.Nanos >= 1e9 {
+		return fmt.Errorf("timestamp: %v: nanos not in range [0, 1e9)", ts)
+	}
+	return nil
+}
+
+// Timestamp converts a google.protobuf.Timestamp proto to a time.Time.
+// It returns an error if the argument is invalid.
+//
+// Unlike most Go functions, if Timestamp returns an error, the first return value
+// is not the zero time.Time. Instead, it is the value obtained from the
+// time.Unix function when passed the contents of the Timestamp, in the UTC
+// locale. This may or may not be a meaningful time; many invalid Timestamps
+// do map to valid time.Times.
+//
+// A nil Timestamp returns an error. The first return value in that case is
+// undefined.
+func Timestamp(ts *tspb.Timestamp) (time.Time, error) {
+	// Don't return the zero value on error, because corresponds to a valid
+	// timestamp. Instead return whatever time.Unix gives us.
+	var t time.Time
+	if ts == nil {
+		t = time.Unix(0, 0).UTC() // treat nil like the empty Timestamp
+	} else {
+		t = time.Unix(ts.Seconds, int64(ts.Nanos)).UTC()
+	}
+	return t, validateTimestamp(ts)
+}
+
+// TimestampNow returns a google.protobuf.Timestamp for the current time.
+func TimestampNow() *tspb.Timestamp {
+	ts, err := TimestampProto(time.Now())
+	if err != nil {
+		panic("ptypes: time.Now() out of Timestamp range")
+	}
+	return ts
+}
+
+// TimestampProto converts the time.Time to a google.protobuf.Timestamp proto.
+// It returns an error if the resulting Timestamp is invalid.
+func TimestampProto(t time.Time) (*tspb.Timestamp, error) {
+	seconds := t.Unix()
+	nanos := int32(t.Sub(time.Unix(seconds, 0)))
+	ts := &tspb.Timestamp{
+		Seconds: seconds,
+		Nanos:   nanos,
+	}
+	if err := validateTimestamp(ts); err != nil {
+		return nil, err
+	}
+	return ts, nil
+}
+
+// TimestampString returns the RFC 3339 string for valid Timestamps. For invalid
+// Timestamps, it returns an error message in parentheses.
+func TimestampString(ts *tspb.Timestamp) string {
+	t, err := Timestamp(ts)
+	if err != nil {
+		return fmt.Sprintf("(%v)", err)
+	}
+	return t.Format(time.RFC3339Nano)
+}

+ 160 - 0
vendor/github.com/golang/protobuf/ptypes/timestamp/timestamp.pb.go

@@ -0,0 +1,160 @@
+// Code generated by protoc-gen-go. DO NOT EDIT.
+// source: google/protobuf/timestamp.proto
+
+/*
+Package timestamp is a generated protocol buffer package.
+
+It is generated from these files:
+	google/protobuf/timestamp.proto
+
+It has these top-level messages:
+	Timestamp
+*/
+package timestamp
+
+import proto "github.com/golang/protobuf/proto"
+import fmt "fmt"
+import math "math"
+
+// Reference imports to suppress errors if they are not otherwise used.
+var _ = proto.Marshal
+var _ = fmt.Errorf
+var _ = math.Inf
+
+// This is a compile-time assertion to ensure that this generated file
+// is compatible with the proto package it is being compiled against.
+// A compilation error at this line likely means your copy of the
+// proto package needs to be updated.
+const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
+
+// A Timestamp represents a point in time independent of any time zone
+// or calendar, represented as seconds and fractions of seconds at
+// nanosecond resolution in UTC Epoch time. It is encoded using the
+// Proleptic Gregorian Calendar which extends the Gregorian calendar
+// backwards to year one. It is encoded assuming all minutes are 60
+// seconds long, i.e. leap seconds are "smeared" so that no leap second
+// table is needed for interpretation. Range is from
+// 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z.
+// By restricting to that range, we ensure that we can convert to
+// and from  RFC 3339 date strings.
+// See [https://www.ietf.org/rfc/rfc3339.txt](https://www.ietf.org/rfc/rfc3339.txt).
+//
+// # Examples
+//
+// Example 1: Compute Timestamp from POSIX `time()`.
+//
+//     Timestamp timestamp;
+//     timestamp.set_seconds(time(NULL));
+//     timestamp.set_nanos(0);
+//
+// Example 2: Compute Timestamp from POSIX `gettimeofday()`.
+//
+//     struct timeval tv;
+//     gettimeofday(&tv, NULL);
+//
+//     Timestamp timestamp;
+//     timestamp.set_seconds(tv.tv_sec);
+//     timestamp.set_nanos(tv.tv_usec * 1000);
+//
+// Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`.
+//
+//     FILETIME ft;
+//     GetSystemTimeAsFileTime(&ft);
+//     UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime;
+//
+//     // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z
+//     // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z.
+//     Timestamp timestamp;
+//     timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL));
+//     timestamp.set_nanos((INT32) ((ticks % 10000000) * 100));
+//
+// Example 4: Compute Timestamp from Java `System.currentTimeMillis()`.
+//
+//     long millis = System.currentTimeMillis();
+//
+//     Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000)
+//         .setNanos((int) ((millis % 1000) * 1000000)).build();
+//
+//
+// Example 5: Compute Timestamp from current time in Python.
+//
+//     timestamp = Timestamp()
+//     timestamp.GetCurrentTime()
+//
+// # JSON Mapping
+//
+// In JSON format, the Timestamp type is encoded as a string in the
+// [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the
+// format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z"
+// where {year} is always expressed using four digits while {month}, {day},
+// {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional
+// seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution),
+// are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone
+// is required, though only UTC (as indicated by "Z") is presently supported.
+//
+// For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past
+// 01:30 UTC on January 15, 2017.
+//
+// In JavaScript, one can convert a Date object to this format using the
+// standard [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString]
+// method. In Python, a standard `datetime.datetime` object can be converted
+// to this format using [`strftime`](https://docs.python.org/2/library/time.html#time.strftime)
+// with the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one
+// can use the Joda Time's [`ISODateTimeFormat.dateTime()`](
+// http://joda-time.sourceforge.net/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime())
+// to obtain a formatter capable of generating timestamps in this format.
+//
+//
+type Timestamp struct {
+	// Represents seconds of UTC time since Unix epoch
+	// 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to
+	// 9999-12-31T23:59:59Z inclusive.
+	Seconds int64 `protobuf:"varint,1,opt,name=seconds" json:"seconds,omitempty"`
+	// Non-negative fractions of a second at nanosecond resolution. Negative
+	// second values with fractions must still have non-negative nanos values
+	// that count forward in time. Must be from 0 to 999,999,999
+	// inclusive.
+	Nanos int32 `protobuf:"varint,2,opt,name=nanos" json:"nanos,omitempty"`
+}
+
+func (m *Timestamp) Reset()                    { *m = Timestamp{} }
+func (m *Timestamp) String() string            { return proto.CompactTextString(m) }
+func (*Timestamp) ProtoMessage()               {}
+func (*Timestamp) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} }
+func (*Timestamp) XXX_WellKnownType() string   { return "Timestamp" }
+
+func (m *Timestamp) GetSeconds() int64 {
+	if m != nil {
+		return m.Seconds
+	}
+	return 0
+}
+
+func (m *Timestamp) GetNanos() int32 {
+	if m != nil {
+		return m.Nanos
+	}
+	return 0
+}
+
+func init() {
+	proto.RegisterType((*Timestamp)(nil), "google.protobuf.Timestamp")
+}
+
+func init() { proto.RegisterFile("google/protobuf/timestamp.proto", fileDescriptor0) }
+
+var fileDescriptor0 = []byte{
+	// 191 bytes of a gzipped FileDescriptorProto
+	0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x4f, 0xcf, 0xcf, 0x4f,
+	0xcf, 0x49, 0xd5, 0x2f, 0x28, 0xca, 0x2f, 0xc9, 0x4f, 0x2a, 0x4d, 0xd3, 0x2f, 0xc9, 0xcc, 0x4d,
+	0x2d, 0x2e, 0x49, 0xcc, 0x2d, 0xd0, 0x03, 0x0b, 0x09, 0xf1, 0x43, 0x14, 0xe8, 0xc1, 0x14, 0x28,
+	0x59, 0x73, 0x71, 0x86, 0xc0, 0xd4, 0x08, 0x49, 0x70, 0xb1, 0x17, 0xa7, 0x26, 0xe7, 0xe7, 0xa5,
+	0x14, 0x4b, 0x30, 0x2a, 0x30, 0x6a, 0x30, 0x07, 0xc1, 0xb8, 0x42, 0x22, 0x5c, 0xac, 0x79, 0x89,
+	0x79, 0xf9, 0xc5, 0x12, 0x4c, 0x0a, 0x8c, 0x1a, 0xac, 0x41, 0x10, 0x8e, 0x53, 0x1d, 0x97, 0x70,
+	0x72, 0x7e, 0xae, 0x1e, 0x9a, 0x99, 0x4e, 0x7c, 0x70, 0x13, 0x03, 0x40, 0x42, 0x01, 0x8c, 0x51,
+	0xda, 0xe9, 0x99, 0x25, 0x19, 0xa5, 0x49, 0x7a, 0xc9, 0xf9, 0xb9, 0xfa, 0xe9, 0xf9, 0x39, 0x89,
+	0x79, 0xe9, 0x08, 0x27, 0x16, 0x94, 0x54, 0x16, 0xa4, 0x16, 0x23, 0x5c, 0xfa, 0x83, 0x91, 0x71,
+	0x11, 0x13, 0xb3, 0x7b, 0x80, 0xd3, 0x2a, 0x26, 0x39, 0x77, 0x88, 0xc9, 0x01, 0x50, 0xb5, 0x7a,
+	0xe1, 0xa9, 0x39, 0x39, 0xde, 0x79, 0xf9, 0xe5, 0x79, 0x21, 0x20, 0x3d, 0x49, 0x6c, 0x60, 0x43,
+	0x8c, 0x01, 0x01, 0x00, 0x00, 0xff, 0xff, 0xbc, 0x77, 0x4a, 0x07, 0xf7, 0x00, 0x00, 0x00,
+}

+ 133 - 0
vendor/github.com/golang/protobuf/ptypes/timestamp/timestamp.proto

@@ -0,0 +1,133 @@
+// Protocol Buffers - Google's data interchange format
+// Copyright 2008 Google Inc.  All rights reserved.
+// https://developers.google.com/protocol-buffers/
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+syntax = "proto3";
+
+package google.protobuf;
+
+option csharp_namespace = "Google.Protobuf.WellKnownTypes";
+option cc_enable_arenas = true;
+option go_package = "github.com/golang/protobuf/ptypes/timestamp";
+option java_package = "com.google.protobuf";
+option java_outer_classname = "TimestampProto";
+option java_multiple_files = true;
+option objc_class_prefix = "GPB";
+
+// A Timestamp represents a point in time independent of any time zone
+// or calendar, represented as seconds and fractions of seconds at
+// nanosecond resolution in UTC Epoch time. It is encoded using the
+// Proleptic Gregorian Calendar which extends the Gregorian calendar
+// backwards to year one. It is encoded assuming all minutes are 60
+// seconds long, i.e. leap seconds are "smeared" so that no leap second
+// table is needed for interpretation. Range is from
+// 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z.
+// By restricting to that range, we ensure that we can convert to
+// and from  RFC 3339 date strings.
+// See [https://www.ietf.org/rfc/rfc3339.txt](https://www.ietf.org/rfc/rfc3339.txt).
+//
+// # Examples
+//
+// Example 1: Compute Timestamp from POSIX `time()`.
+//
+//     Timestamp timestamp;
+//     timestamp.set_seconds(time(NULL));
+//     timestamp.set_nanos(0);
+//
+// Example 2: Compute Timestamp from POSIX `gettimeofday()`.
+//
+//     struct timeval tv;
+//     gettimeofday(&tv, NULL);
+//
+//     Timestamp timestamp;
+//     timestamp.set_seconds(tv.tv_sec);
+//     timestamp.set_nanos(tv.tv_usec * 1000);
+//
+// Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`.
+//
+//     FILETIME ft;
+//     GetSystemTimeAsFileTime(&ft);
+//     UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime;
+//
+//     // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z
+//     // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z.
+//     Timestamp timestamp;
+//     timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL));
+//     timestamp.set_nanos((INT32) ((ticks % 10000000) * 100));
+//
+// Example 4: Compute Timestamp from Java `System.currentTimeMillis()`.
+//
+//     long millis = System.currentTimeMillis();
+//
+//     Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000)
+//         .setNanos((int) ((millis % 1000) * 1000000)).build();
+//
+//
+// Example 5: Compute Timestamp from current time in Python.
+//
+//     timestamp = Timestamp()
+//     timestamp.GetCurrentTime()
+//
+// # JSON Mapping
+//
+// In JSON format, the Timestamp type is encoded as a string in the
+// [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the
+// format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z"
+// where {year} is always expressed using four digits while {month}, {day},
+// {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional
+// seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution),
+// are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone
+// is required, though only UTC (as indicated by "Z") is presently supported.
+//
+// For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past
+// 01:30 UTC on January 15, 2017.
+//
+// In JavaScript, one can convert a Date object to this format using the
+// standard [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString]
+// method. In Python, a standard `datetime.datetime` object can be converted
+// to this format using [`strftime`](https://docs.python.org/2/library/time.html#time.strftime)
+// with the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one
+// can use the Joda Time's [`ISODateTimeFormat.dateTime()`](
+// http://joda-time.sourceforge.net/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime())
+// to obtain a formatter capable of generating timestamps in this format.
+//
+//
+message Timestamp {
+
+  // Represents seconds of UTC time since Unix epoch
+  // 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to
+  // 9999-12-31T23:59:59Z inclusive.
+  int64 seconds = 1;
+
+  // Non-negative fractions of a second at nanosecond resolution. Negative
+  // second values with fractions must still have non-negative nanos values
+  // that count forward in time. Must be from 0 to 999,999,999
+  // inclusive.
+  int32 nanos = 2;
+}

+ 21 - 0
vendor/github.com/hashicorp/go-hclog/LICENSE

@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2017 HashiCorp
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.

+ 123 - 0
vendor/github.com/hashicorp/go-hclog/README.md

@@ -0,0 +1,123 @@
+# go-hclog
+
+[![Go Documentation](http://img.shields.io/badge/go-documentation-blue.svg?style=flat-square)][godocs]
+
+[godocs]: https://godoc.org/github.com/hashicorp/go-hclog
+
+`go-hclog` is a package for Go that provides a simple key/value logging
+interface for use in development and production environments.
+
+It provides logging levels that provide decreased output based upon the
+desired amount of output, unlike the standard library `log` package.
+
+It does not provide `Printf` style logging, only key/value logging that is
+exposed as arguments to the logging functions for simplicity.
+
+It provides a human readable output mode for use in development as well as
+JSON output mode for production.
+
+## Stability Note
+
+While this library is fully open source and HashiCorp will be maintaining it
+(since we are and will be making extensive use of it), the API and output
+format is subject to minor changes as we fully bake and vet it in our projects.
+This notice will be removed once it's fully integrated into our major projects
+and no further changes are anticipated.
+
+## Installation and Docs
+
+Install using `go get github.com/hashicorp/go-hclog`.
+
+Full documentation is available at
+http://godoc.org/github.com/hashicorp/go-hclog
+
+## Usage
+
+### Use the global logger
+
+```go
+hclog.Default().Info("hello world")
+```
+
+```text
+2017-07-05T16:15:55.167-0700 [INFO ] hello world
+```
+
+(Note timestamps are removed in future examples for brevity.)
+
+### Create a new logger
+
+```go
+appLogger := hclog.New(&hclog.LoggerOptions{
+	Name:  "my-app",
+	Level: hclog.LevelFromString("DEBUG"),
+})
+```
+
+### Emit an Info level message with 2 key/value pairs
+
+```go
+input := "5.5"
+_, err := strconv.ParseInt(input, 10, 32)
+if err != nil {
+	appLogger.Info("Invalid input for ParseInt", "input", input, "error", err)
+}
+```
+
+```text
+... [INFO ] my-app: Invalid input for ParseInt: input=5.5 error="strconv.ParseInt: parsing "5.5": invalid syntax"
+```
+
+### Create a new Logger for a major subsystem
+
+```go
+subsystemLogger := appLogger.Named("transport")
+subsystemLogger.Info("we are transporting something")
+```
+
+```text
+... [INFO ] my-app.transport: we are transporting something
+```
+
+Notice that logs emitted by `subsystemLogger` contain `my-app.transport`,
+reflecting both the application and subsystem names.
+
+### Create a new Logger with fixed key/value pairs
+
+Using `With()` will include a specific key-value pair in all messages emitted
+by that logger.
+
+```go
+requestID := "5fb446b6-6eba-821d-df1b-cd7501b6a363"
+requestLogger := subsystemLogger.With("request", requestID)
+requestLogger.Info("we are transporting a request")
+```
+
+```text
+... [INFO ] my-app.transport: we are transporting a request: request=5fb446b6-6eba-821d-df1b-cd7501b6a363
+```
+
+This allows sub Loggers to be context specific without having to thread that
+into all the callers.
+
+### Use this with code that uses the standard library logger
+
+If you want to use the standard library's `log.Logger` interface you can wrap
+`hclog.Logger` by calling the `StandardLogger()` method. This allows you to use
+it with the familiar `Println()`, `Printf()`, etc. For example:
+
+```go
+stdLogger := appLogger.StandardLogger(&hclog.StandardLoggerOptions{
+	InferLevels: true,
+})
+// Printf() is provided by stdlib log.Logger interface, not hclog.Logger
+stdLogger.Printf("[DEBUG] %+v", stdLogger)
+```
+
+```text
+... [DEBUG] my-app: &{mu:{state:0 sema:0} prefix: flag:0 out:0xc42000a0a0 buf:[]}
+```
+
+Notice that if `appLogger` is initialized with the `INFO` log level _and_ you
+specify `InferLevels: true`, you will not see any output here. You must change
+`appLogger` to `DEBUG` to see output. See the docs for more information.

+ 34 - 0
vendor/github.com/hashicorp/go-hclog/global.go

@@ -0,0 +1,34 @@
+package hclog
+
+import (
+	"sync"
+)
+
+var (
+	protect sync.Once
+	def     Logger
+
+	// The options used to create the Default logger. These are
+	// read only when the Default logger is created, so set them
+	// as soon as the process starts.
+	DefaultOptions = &LoggerOptions{
+		Level:  DefaultLevel,
+		Output: DefaultOutput,
+	}
+)
+
+// Return a logger that is held globally. This can be a good starting
+// place, and then you can use .With() and .Name() to create sub-loggers
+// to be used in more specific contexts.
+func Default() Logger {
+	protect.Do(func() {
+		def = New(DefaultOptions)
+	})
+
+	return def
+}
+
+// A short alias for Default()
+func L() Logger {
+	return Default()
+}

+ 397 - 0
vendor/github.com/hashicorp/go-hclog/int.go

@@ -0,0 +1,397 @@
+package hclog
+
+import (
+	"bufio"
+	"encoding"
+	"encoding/json"
+	"fmt"
+	"log"
+	"os"
+	"runtime"
+	"strconv"
+	"strings"
+	"sync"
+	"time"
+)
+
+var (
+	_levelToBracket = map[Level]string{
+		Debug: "[DEBUG]",
+		Trace: "[TRACE]",
+		Info:  "[INFO ]",
+		Warn:  "[WARN ]",
+		Error: "[ERROR]",
+	}
+)
+
+// Given the options (nil for defaults), create a new Logger
+func New(opts *LoggerOptions) Logger {
+	if opts == nil {
+		opts = &LoggerOptions{}
+	}
+
+	output := opts.Output
+	if output == nil {
+		output = os.Stderr
+	}
+
+	level := opts.Level
+	if level == NoLevel {
+		level = DefaultLevel
+	}
+
+	return &intLogger{
+		m:      new(sync.Mutex),
+		json:   opts.JSONFormat,
+		caller: opts.IncludeLocation,
+		name:   opts.Name,
+		w:      bufio.NewWriter(output),
+		level:  level,
+	}
+}
+
+// The internal logger implementation. Internal in that it is defined entirely
+// by this package.
+type intLogger struct {
+	json   bool
+	caller bool
+	name   string
+
+	// this is a pointer so that it's shared by any derived loggers, since
+	// those derived loggers share the bufio.Writer as well.
+	m     *sync.Mutex
+	w     *bufio.Writer
+	level Level
+
+	implied []interface{}
+}
+
+// Make sure that intLogger is a Logger
+var _ Logger = &intLogger{}
+
+// The time format to use for logging. This is a version of RFC3339 that
+// contains millisecond precision
+const TimeFormat = "2006-01-02T15:04:05.000Z0700"
+
+// Log a message and a set of key/value pairs if the given level is at
+// or more severe that the threshold configured in the Logger.
+func (z *intLogger) Log(level Level, msg string, args ...interface{}) {
+	if level < z.level {
+		return
+	}
+
+	t := time.Now()
+
+	z.m.Lock()
+	defer z.m.Unlock()
+
+	if z.json {
+		z.logJson(t, level, msg, args...)
+	} else {
+		z.log(t, level, msg, args...)
+	}
+
+	z.w.Flush()
+}
+
+// Cleanup a path by returning the last 2 segments of the path only.
+func trimCallerPath(path string) string {
+	// lovely borrowed from zap
+	// nb. To make sure we trim the path correctly on Windows too, we
+	// counter-intuitively need to use '/' and *not* os.PathSeparator here,
+	// because the path given originates from Go stdlib, specifically
+	// runtime.Caller() which (as of Mar/17) returns forward slashes even on
+	// Windows.
+	//
+	// See https://github.com/golang/go/issues/3335
+	// and https://github.com/golang/go/issues/18151
+	//
+	// for discussion on the issue on Go side.
+	//
+
+	// Find the last separator.
+	//
+	idx := strings.LastIndexByte(path, '/')
+	if idx == -1 {
+		return path
+	}
+
+	// Find the penultimate separator.
+	idx = strings.LastIndexByte(path[:idx], '/')
+	if idx == -1 {
+		return path
+	}
+
+	return path[idx+1:]
+}
+
+// Non-JSON logging format function
+func (z *intLogger) log(t time.Time, level Level, msg string, args ...interface{}) {
+	z.w.WriteString(t.Format(TimeFormat))
+	z.w.WriteByte(' ')
+
+	s, ok := _levelToBracket[level]
+	if ok {
+		z.w.WriteString(s)
+	} else {
+		z.w.WriteString("[UNKN ]")
+	}
+
+	if z.caller {
+		if _, file, line, ok := runtime.Caller(3); ok {
+			z.w.WriteByte(' ')
+			z.w.WriteString(trimCallerPath(file))
+			z.w.WriteByte(':')
+			z.w.WriteString(strconv.Itoa(line))
+			z.w.WriteByte(':')
+		}
+	}
+
+	z.w.WriteByte(' ')
+
+	if z.name != "" {
+		z.w.WriteString(z.name)
+		z.w.WriteString(": ")
+	}
+
+	z.w.WriteString(msg)
+
+	args = append(z.implied, args...)
+
+	var stacktrace CapturedStacktrace
+
+	if args != nil && len(args) > 0 {
+		if len(args)%2 != 0 {
+			cs, ok := args[len(args)-1].(CapturedStacktrace)
+			if ok {
+				args = args[:len(args)-1]
+				stacktrace = cs
+			} else {
+				args = append(args, "<unknown>")
+			}
+		}
+
+		z.w.WriteByte(':')
+
+	FOR:
+		for i := 0; i < len(args); i = i + 2 {
+			var val string
+
+			switch st := args[i+1].(type) {
+			case string:
+				val = st
+			case int:
+				val = strconv.FormatInt(int64(st), 10)
+			case int64:
+				val = strconv.FormatInt(int64(st), 10)
+			case int32:
+				val = strconv.FormatInt(int64(st), 10)
+			case int16:
+				val = strconv.FormatInt(int64(st), 10)
+			case int8:
+				val = strconv.FormatInt(int64(st), 10)
+			case uint:
+				val = strconv.FormatUint(uint64(st), 10)
+			case uint64:
+				val = strconv.FormatUint(uint64(st), 10)
+			case uint32:
+				val = strconv.FormatUint(uint64(st), 10)
+			case uint16:
+				val = strconv.FormatUint(uint64(st), 10)
+			case uint8:
+				val = strconv.FormatUint(uint64(st), 10)
+			case CapturedStacktrace:
+				stacktrace = st
+				continue FOR
+			default:
+				val = fmt.Sprintf("%v", st)
+			}
+
+			z.w.WriteByte(' ')
+			z.w.WriteString(args[i].(string))
+			z.w.WriteByte('=')
+
+			if strings.ContainsAny(val, " \t\n\r") {
+				z.w.WriteByte('"')
+				z.w.WriteString(val)
+				z.w.WriteByte('"')
+			} else {
+				z.w.WriteString(val)
+			}
+		}
+	}
+
+	z.w.WriteString("\n")
+
+	if stacktrace != "" {
+		z.w.WriteString(string(stacktrace))
+	}
+}
+
+// JSON logging function
+func (z *intLogger) logJson(t time.Time, level Level, msg string, args ...interface{}) {
+	vals := map[string]interface{}{
+		"@message":   msg,
+		"@timestamp": t.Format("2006-01-02T15:04:05.000000Z07:00"),
+	}
+
+	var levelStr string
+	switch level {
+	case Error:
+		levelStr = "error"
+	case Warn:
+		levelStr = "warn"
+	case Info:
+		levelStr = "info"
+	case Debug:
+		levelStr = "debug"
+	case Trace:
+		levelStr = "trace"
+	default:
+		levelStr = "all"
+	}
+
+	vals["@level"] = levelStr
+
+	if z.name != "" {
+		vals["@module"] = z.name
+	}
+
+	if z.caller {
+		if _, file, line, ok := runtime.Caller(3); ok {
+			vals["@caller"] = fmt.Sprintf("%s:%d", file, line)
+		}
+	}
+
+	if args != nil && len(args) > 0 {
+		if len(args)%2 != 0 {
+			cs, ok := args[len(args)-1].(CapturedStacktrace)
+			if ok {
+				args = args[:len(args)-1]
+				vals["stacktrace"] = cs
+			} else {
+				args = append(args, "<unknown>")
+			}
+		}
+
+		for i := 0; i < len(args); i = i + 2 {
+			if _, ok := args[i].(string); !ok {
+				// As this is the logging function not much we can do here
+				// without injecting into logs...
+				continue
+			}
+			val := args[i+1]
+			// Check if val is of type error. If error type doesn't
+			// implement json.Marshaler or encoding.TextMarshaler
+			// then set val to err.Error() so that it gets marshaled
+			if err, ok := val.(error); ok {
+				switch err.(type) {
+				case json.Marshaler, encoding.TextMarshaler:
+				default:
+					val = err.Error()
+				}
+			}
+			vals[args[i].(string)] = val
+		}
+	}
+
+	err := json.NewEncoder(z.w).Encode(vals)
+	if err != nil {
+		panic(err)
+	}
+}
+
+// Emit the message and args at DEBUG level
+func (z *intLogger) Debug(msg string, args ...interface{}) {
+	z.Log(Debug, msg, args...)
+}
+
+// Emit the message and args at TRACE level
+func (z *intLogger) Trace(msg string, args ...interface{}) {
+	z.Log(Trace, msg, args...)
+}
+
+// Emit the message and args at INFO level
+func (z *intLogger) Info(msg string, args ...interface{}) {
+	z.Log(Info, msg, args...)
+}
+
+// Emit the message and args at WARN level
+func (z *intLogger) Warn(msg string, args ...interface{}) {
+	z.Log(Warn, msg, args...)
+}
+
+// Emit the message and args at ERROR level
+func (z *intLogger) Error(msg string, args ...interface{}) {
+	z.Log(Error, msg, args...)
+}
+
+// Indicate that the logger would emit TRACE level logs
+func (z *intLogger) IsTrace() bool {
+	return z.level == Trace
+}
+
+// Indicate that the logger would emit DEBUG level logs
+func (z *intLogger) IsDebug() bool {
+	return z.level <= Debug
+}
+
+// Indicate that the logger would emit INFO level logs
+func (z *intLogger) IsInfo() bool {
+	return z.level <= Info
+}
+
+// Indicate that the logger would emit WARN level logs
+func (z *intLogger) IsWarn() bool {
+	return z.level <= Warn
+}
+
+// Indicate that the logger would emit ERROR level logs
+func (z *intLogger) IsError() bool {
+	return z.level <= Error
+}
+
+// Return a sub-Logger for which every emitted log message will contain
+// the given key/value pairs. This is used to create a context specific
+// Logger.
+func (z *intLogger) With(args ...interface{}) Logger {
+	var nz intLogger = *z
+
+	nz.implied = append(nz.implied, args...)
+
+	return &nz
+}
+
+// Create a new sub-Logger that a name decending from the current name.
+// This is used to create a subsystem specific Logger.
+func (z *intLogger) Named(name string) Logger {
+	var nz intLogger = *z
+
+	if nz.name != "" {
+		nz.name = nz.name + "." + name
+	}
+
+	return &nz
+}
+
+// Create a new sub-Logger with an explicit name. This ignores the current
+// name. This is used to create a standalone logger that doesn't fall
+// within the normal hierarchy.
+func (z *intLogger) ResetNamed(name string) Logger {
+	var nz intLogger = *z
+
+	nz.name = name
+
+	return &nz
+}
+
+// Create a *log.Logger that will send it's data through this Logger. This
+// allows packages that expect to be using the standard library log to actually
+// use this logger.
+func (z *intLogger) StandardLogger(opts *StandardLoggerOptions) *log.Logger {
+	if opts == nil {
+		opts = &StandardLoggerOptions{}
+	}
+
+	return log.New(&stdlogAdapter{z, opts.InferLevels}, "", 0)
+}

+ 138 - 0
vendor/github.com/hashicorp/go-hclog/log.go

@@ -0,0 +1,138 @@
+package hclog
+
+import (
+	"io"
+	"log"
+	"os"
+	"strings"
+)
+
+var (
+	DefaultOutput = os.Stderr
+	DefaultLevel  = Info
+)
+
+type Level int
+
+const (
+	// This is a special level used to indicate that no level has been
+	// set and allow for a default to be used.
+	NoLevel Level = 0
+
+	// The most verbose level. Intended to be used for the tracing of actions
+	// in code, such as function enters/exits, etc.
+	Trace Level = 1
+
+	// For programmer lowlevel analysis.
+	Debug Level = 2
+
+	// For information about steady state operations.
+	Info Level = 3
+
+	// For information about rare but handled events.
+	Warn Level = 4
+
+	// For information about unrecoverable events.
+	Error Level = 5
+)
+
+// LevelFromString returns a Level type for the named log level, or "NoLevel" if
+// the level string is invalid. This facilitates setting the log level via
+// config or environment variable by name in a predictable way.
+func LevelFromString(levelStr string) Level {
+	// We don't care about case. Accept "INFO" or "info"
+	levelStr = strings.ToLower(strings.TrimSpace(levelStr))
+	switch levelStr {
+	case "trace":
+		return Trace
+	case "debug":
+		return Debug
+	case "info":
+		return Info
+	case "warn":
+		return Warn
+	case "error":
+		return Error
+	default:
+		return NoLevel
+	}
+}
+
+// The main Logger interface. All code should code against this interface only.
+type Logger interface {
+	// Args are alternating key, val pairs
+	// keys must be strings
+	// vals can be any type, but display is implementation specific
+	// Emit a message and key/value pairs at the TRACE level
+	Trace(msg string, args ...interface{})
+
+	// Emit a message and key/value pairs at the DEBUG level
+	Debug(msg string, args ...interface{})
+
+	// Emit a message and key/value pairs at the INFO level
+	Info(msg string, args ...interface{})
+
+	// Emit a message and key/value pairs at the WARN level
+	Warn(msg string, args ...interface{})
+
+	// Emit a message and key/value pairs at the ERROR level
+	Error(msg string, args ...interface{})
+
+	// Indicate if TRACE logs would be emitted. This and the other Is* guards
+	// are used to elide expensive logging code based on the current level.
+	IsTrace() bool
+
+	// Indicate if DEBUG logs would be emitted. This and the other Is* guards
+	IsDebug() bool
+
+	// Indicate if INFO logs would be emitted. This and the other Is* guards
+	IsInfo() bool
+
+	// Indicate if WARN logs would be emitted. This and the other Is* guards
+	IsWarn() bool
+
+	// Indicate if ERROR logs would be emitted. This and the other Is* guards
+	IsError() bool
+
+	// Creates a sublogger that will always have the given key/value pairs
+	With(args ...interface{}) Logger
+
+	// Create a logger that will prepend the name string on the front of all messages.
+	// If the logger already has a name, the new value will be appended to the current
+	// name. That way, a major subsystem can use this to decorate all it's own logs
+	// without losing context.
+	Named(name string) Logger
+
+	// Create a logger that will prepend the name string on the front of all messages.
+	// This sets the name of the logger to the value directly, unlike Named which honor
+	// the current name as well.
+	ResetNamed(name string) Logger
+
+	// Return a value that conforms to the stdlib log.Logger interface
+	StandardLogger(opts *StandardLoggerOptions) *log.Logger
+}
+
+type StandardLoggerOptions struct {
+	// Indicate that some minimal parsing should be done on strings to try
+	// and detect their level and re-emit them.
+	// This supports the strings like [ERROR], [ERR] [TRACE], [WARN], [INFO],
+	// [DEBUG] and strip it off before reapplying it.
+	InferLevels bool
+}
+
+type LoggerOptions struct {
+	// Name of the subsystem to prefix logs with
+	Name string
+
+	// The threshold for the logger. Anything less severe is supressed
+	Level Level
+
+	// Where to write the logs to. Defaults to os.Stdout if nil
+	Output io.Writer
+
+	// Control if the output should be in JSON.
+	JSONFormat bool
+
+	// Include file and line information in each log line
+	IncludeLocation bool
+}

Bu fark içinde çok fazla dosya değişikliği olduğu için bazı dosyalar gösterilmiyor