ソースを参照

Merge remote-tracking branch 'grafana/master' into alpha-react-table

* grafana/master: (24 commits)
  fix: Consistency in unit labels #15709
  Update latest.json
  Update CHANGELOG.md
  position from add panel, dimensions from copied panel
  changelog: add notes about closing #14509 #15179
  fix: Add class for input fields with help icon to avoid icon hiding the text #15771
  Update CHANGELOG.md
  restore to current folder when restoring old dashboard version
  fix(renderer): Vendor ansicolor as typescript
  log phantomjs output even if it timeout and include orgId when render alert
  keep size from copied panel and not from add panel widget
  Added basic cherry pick helping task
  Prevent search in VizPicker from stealing focus (#15802)
  fix only users that can edit a dashboard should be able to update panel json
  Updated changelog task
  Fixed image rendering issue for dashboards with auto refresh, casued by missing reloadOnSearch flag on route, fixes #15631
  Updated to add PR author, skip PR issue references
  Added first iteration/poc of changelog task
  Enable @grafana/ui version bump based on package.json contents
  Ensure clean master only when publishing package to npm
  ...
ryan 6 年 前
コミット
26e3e7146e
100 ファイル変更3248 行追加3724 行削除
  1. 13 4
      CHANGELOG.md
  2. 276 36
      Gopkg.lock
  3. 6 2
      Gopkg.toml
  4. 2 2
      latest.json
  5. 9 7
      package.json
  6. 1 1
      packages/grafana-ui/src/utils/valueFormats/categories.ts
  7. 1 0
      pkg/api/dashboard.go
  8. 124 0
      pkg/api/dashboard_test.go
  9. 9 2
      pkg/log/log.go
  10. 1 1
      pkg/services/alerting/notifier.go
  11. 15 4
      pkg/services/rendering/phantomjs.go
  12. 3 1
      pkg/services/sqlstore/alert.go
  13. 6 2
      pkg/services/sqlstore/annotation.go
  14. 3 2
      pkg/services/sqlstore/dashboard_version.go
  15. 1 1
      pkg/services/sqlstore/org_users.go
  16. 1 1
      pkg/services/sqlstore/team.go
  17. 1 1
      pkg/services/sqlstore/user_test.go
  18. 1 1
      public/app/core/controllers/json_editor_ctrl.ts
  19. 1 1
      public/app/features/alerting/partials/alert_tab.html
  20. 6 1
      public/app/features/dashboard/components/AddPanelWidget/AddPanelWidget.tsx
  21. 14 20
      public/app/features/dashboard/panel_editor/VisualizationTab.tsx
  22. 33 0
      public/app/features/dashboard/panel_editor/VizPickerSearch.tsx
  23. 1 0
      public/app/features/dashboard/utils/panel.ts
  24. 2 2
      public/app/features/datasources/partials/http_settings.html
  25. 1 1
      public/app/features/explore/LogMessageAnsi.tsx
  26. 3 3
      public/app/plugins/datasource/cloudwatch/partials/config.html
  27. 1 1
      public/app/plugins/datasource/elasticsearch/partials/config.html
  28. 10 10
      public/app/plugins/datasource/grafana-azure-monitor-datasource/partials/config.html
  29. 1 1
      public/app/plugins/datasource/influxdb/partials/config.html
  30. 1 1
      public/app/plugins/datasource/loki/partials/config.html
  31. 1 1
      public/app/plugins/datasource/loki/result_transformer.ts
  32. 4 4
      public/app/plugins/datasource/mssql/partials/config.html
  33. 4 4
      public/app/plugins/datasource/mysql/partials/config.html
  34. 2 2
      public/app/plugins/datasource/opentsdb/partials/query.editor.html
  35. 4 4
      public/app/plugins/datasource/postgres/partials/config.html
  36. 2 2
      public/app/plugins/datasource/prometheus/partials/config.html
  37. 2 2
      public/app/plugins/datasource/prometheus/partials/query.editor.html
  38. 2 0
      public/app/routes/routes.ts
  39. 4 0
      public/sass/components/_gf-form.scss
  40. 4 0
      public/sass/components/_tagsinput.scss
  41. 471 0
      public/vendor/ansicolor/ansicolor.ts
  42. 66 26
      scripts/cli/index.ts
  43. 49 0
      scripts/cli/tasks/changelog.ts
  44. 42 0
      scripts/cli/tasks/cherrypick.ts
  45. 19 22
      scripts/cli/tasks/core.start.ts
  46. 32 59
      scripts/cli/tasks/grafanaui.build.ts
  47. 86 54
      scripts/cli/tasks/grafanaui.release.ts
  48. 23 0
      scripts/cli/tasks/task.ts
  49. 13 4
      scripts/cli/utils/execTask.ts
  50. 0 7
      scripts/cli/utils/startSpinner.ts
  51. 20 0
      scripts/cli/utils/useSpinner.ts
  52. 1 1
      tsconfig.json
  53. 0 191
      vendor/github.com/go-ini/ini/LICENSE
  54. 0 32
      vendor/github.com/go-ini/ini/error.go
  55. 0 407
      vendor/github.com/go-ini/ini/file.go
  56. 0 202
      vendor/github.com/go-ini/ini/ini.go
  57. 0 751
      vendor/github.com/go-ini/ini/key.go
  58. 0 477
      vendor/github.com/go-ini/ini/parser.go
  59. 0 257
      vendor/github.com/go-ini/ini/section.go
  60. 0 512
      vendor/github.com/go-ini/ini/struct.go
  61. 247 43
      vendor/github.com/go-xorm/builder/builder.go
  62. 9 4
      vendor/github.com/go-xorm/builder/builder_delete.go
  63. 35 12
      vendor/github.com/go-xorm/builder/builder_insert.go
  64. 100 0
      vendor/github.com/go-xorm/builder/builder_limit.go
  65. 99 11
      vendor/github.com/go-xorm/builder/builder_select.go
  66. 47 0
      vendor/github.com/go-xorm/builder/builder_union.go
  67. 10 5
      vendor/github.com/go-xorm/builder/builder_update.go
  68. 4 17
      vendor/github.com/go-xorm/builder/cond.go
  69. 27 2
      vendor/github.com/go-xorm/builder/cond_between.go
  70. 3 1
      vendor/github.com/go-xorm/builder/cond_or.go
  71. 25 1
      vendor/github.com/go-xorm/builder/error.go
  72. 156 0
      vendor/github.com/go-xorm/builder/sql.go
  73. 119 0
      vendor/github.com/go-xorm/builder/string_builder.go
  74. 8 4
      vendor/github.com/go-xorm/core/cache.go
  75. 19 12
      vendor/github.com/go-xorm/core/column.go
  76. 4 0
      vendor/github.com/go-xorm/core/converstion.go
  77. 88 233
      vendor/github.com/go-xorm/core/db.go
  78. 10 1
      vendor/github.com/go-xorm/core/dialect.go
  79. 4 0
      vendor/github.com/go-xorm/core/driver.go
  80. 4 0
      vendor/github.com/go-xorm/core/error.go
  81. 7 3
      vendor/github.com/go-xorm/core/filter.go
  82. 4 0
      vendor/github.com/go-xorm/core/ilogger.go
  83. 14 4
      vendor/github.com/go-xorm/core/index.go
  84. 4 0
      vendor/github.com/go-xorm/core/mapper.go
  85. 4 0
      vendor/github.com/go-xorm/core/pk.go
  86. 7 61
      vendor/github.com/go-xorm/core/rows.go
  87. 14 0
      vendor/github.com/go-xorm/core/scan.go
  88. 165 0
      vendor/github.com/go-xorm/core/stmt.go
  89. 4 2
      vendor/github.com/go-xorm/core/table.go
  90. 153 0
      vendor/github.com/go-xorm/core/tx.go
  91. 36 18
      vendor/github.com/go-xorm/core/type.go
  92. 30 0
      vendor/github.com/go-xorm/xorm/context_cache.go
  93. 77 0
      vendor/github.com/go-xorm/xorm/dialect_mysql.go
  94. 82 26
      vendor/github.com/go-xorm/xorm/dialect_postgres.go
  95. 5 1
      vendor/github.com/go-xorm/xorm/dialect_sqlite3.go
  96. 99 110
      vendor/github.com/go-xorm/xorm/engine.go
  97. 4 1
      vendor/github.com/go-xorm/xorm/engine_cond.go
  98. 10 0
      vendor/github.com/go-xorm/xorm/engine_group.go
  99. 0 22
      vendor/github.com/go-xorm/xorm/engine_maxlife.go
  100. 113 0
      vendor/github.com/go-xorm/xorm/engine_table.go

+ 13 - 4
CHANGELOG.md

@@ -6,25 +6,34 @@
 ### Minor
 * **Cloudwatch**: Add AWS RDS MaximumUsedTransactionIDs metric [#15077](https://github.com/grafana/grafana/pull/15077), thx [@activeshadow](https://github.com/activeshadow)
 
-
 ### Bug Fixes
 * **Api**: Invalid org invite code [#10506](https://github.com/grafana/grafana/issues/10506)
 * **Datasource**: Handles nil jsondata field gracefully [#14239](https://github.com/grafana/grafana/issues/14239)
 * **Gauge**: Interpolate scoped variables in repeated gauges [#15739](https://github.com/grafana/grafana/issues/15739)
 * **Datasource**: Empty user/password was not updated when updating datasources [#15608](https://github.com/grafana/grafana/pull/15608), thx [@Maddin-619](https://github.com/Maddin-619)
 
-# 6.0.1 (unreleased)
+# 6.0.1 (2019-03-06)
 
 ### Bug Fixes
 * **Metrics**: Fixes broken usagestats metrics for /metrics [#15651](https://github.com/grafana/grafana/issues/15651)
 * **Dashboard**: Fixes kiosk mode should have &kiosk appended to the url [#15765](https://github.com/grafana/grafana/issues/15765)
 * **Dashboard**: Fixes kiosk=tv mode with autofitpanels should respect header [#15650](https://github.com/grafana/grafana/issues/15650)
+* **Image rendering**: Fixed image rendering issue for dashboards with auto refresh, . [#15818](https://github.com/grafana/grafana/pull/15818), [@torkelo](https://github.com/torkelo)
+* **Dashboard**: Fix only users that can edit a dashboard should be able to update panel json. [#15805](https://github.com/grafana/grafana/pull/15805), [@marefr](https://github.com/marefr)
+* **LDAP**: fix allow anonymous initial bind for ldap search. [#15803](https://github.com/grafana/grafana/pull/15803), [@marefr](https://github.com/marefr)
+* **UX**: Fixed scrollbar not visible initially (only after manual scroll). [#15798](https://github.com/grafana/grafana/pull/15798), [@torkelo](https://github.com/torkelo)
+* **Datasource admin** TestData   [#15793](https://github.com/grafana/grafana/pull/15793), [@hugohaggmark](https://github.com/hugohaggmark)
+* **Dashboard**: Fixed scrolling issue that caused scroll to be locked to bottom. [#15792](https://github.com/grafana/grafana/pull/15792), [@torkelo](https://github.com/torkelo)
+* **Explore**: Viewers with viewers_can_edit should be able to access /explore. [#15787](https://github.com/grafana/grafana/pull/15787), [@jschill](https://github.com/jschill)
+* **Security** fix: limit access to org admin and alerting pages. [#15761](https://github.com/grafana/grafana/pull/15761), [@marefr](https://github.com/marefr)
+* **Panel Edit** minInterval changes did not persist [#15757](https://github.com/grafana/grafana/pull/15757), [@hugohaggmark](https://github.com/hugohaggmark)
+* **Teams**: Fixed bug when getting teams for user. [#15595](https://github.com/grafana/grafana/pull/15595), [@hugohaggmark](https://github.com/hugohaggmark)
+* **Stackdriver**: fix for float64 bounds for distribution metrics [#14509](https://github.com/grafana/grafana/issues/14509)
+* **Stackdriver**: no reducers available for distribution type [#15179](https://github.com/grafana/grafana/issues/15179)
 
 # 6.0.0 stable (2019-02-25)
 
 ### Bug Fixes
-* **Stackdriver**: fix for float64 bounds for distribution metrics [#14509](https://github.com/grafana/grafana/issues/14509)
-* **Stackdriver**: no reducers available for distribution type [#15179](https://github.com/grafana/grafana/issues/15179)
 * **Dashboard**: fixes click after scroll in series override menu [#15621](https://github.com/grafana/grafana/issues/15621)
 * **MySQL**: fix mysql query using _interval_ms variable throws error [#14507](https://github.com/grafana/grafana/issues/14507)
 

+ 276 - 36
Gopkg.lock

@@ -2,30 +2,39 @@
 
 
 [[projects]]
+  digest = "1:f8ad8a53fa865a70efbe215b0ca34735523f50ea39e0efde319ab6fc80089b44"
   name = "cloud.google.com/go"
   packages = ["compute/metadata"]
+  pruneopts = "NUT"
   revision = "056a55f54a6cc77b440b31a56a5e7c3982d32811"
   version = "v0.22.0"
 
 [[projects]]
+  digest = "1:167b6f65a6656de568092189ae791253939f076df60231fdd64588ac703892a1"
   name = "github.com/BurntSushi/toml"
   packages = ["."]
+  pruneopts = "NUT"
   revision = "b26d9c308763d68093482582cea63d69be07a0f0"
   version = "v0.3.0"
 
 [[projects]]
   branch = "master"
+  digest = "1:7d23e6e1889b8bb4bbb37a564708fdab4497ce232c3a99d66406c975b642a6ff"
   name = "github.com/Unknwon/com"
   packages = ["."]
+  pruneopts = "NUT"
   revision = "7677a1d7c1137cd3dd5ba7a076d0c898a1ef4520"
 
 [[projects]]
   branch = "master"
+  digest = "1:1610787cd9726e29d8fecc2a80e43e4fced008a1f560fec6688fc4d946f17835"
   name = "github.com/VividCortex/mysqlerr"
   packages = ["."]
+  pruneopts = "NUT"
   revision = "6c6b55f8796f578c870b7e19bafb16103bc40095"
 
 [[projects]]
+  digest = "1:ebe102b61c1615d2954734e3cfe1b6b06a5088c25a41055b38661d41ad7b8f27"
   name = "github.com/aws/aws-sdk-go"
   packages = [
     "aws",
@@ -69,399 +78,507 @@
     "service/resourcegroupstaggingapi",
     "service/resourcegroupstaggingapi/resourcegroupstaggingapiiface",
     "service/s3",
-    "service/sts"
+    "service/sts",
   ]
+  pruneopts = "NUT"
   revision = "62936e15518acb527a1a9cb4a39d96d94d0fd9a2"
   version = "v1.16.15"
 
 [[projects]]
   branch = "master"
+  digest = "1:79cad073c7be02632d3fa52f62486848b089f560db1e94536de83a408c0f4726"
   name = "github.com/benbjohnson/clock"
   packages = ["."]
+  pruneopts = "NUT"
   revision = "7dc76406b6d3c05b5f71a86293cbcf3c4ea03b19"
 
 [[projects]]
   branch = "master"
+  digest = "1:707ebe952a8b3d00b343c01536c79c73771d100f63ec6babeaed5c79e2b8a8dd"
   name = "github.com/beorn7/perks"
   packages = ["quantile"]
+  pruneopts = "NUT"
   revision = "3a771d992973f24aa725d07868b467d1ddfceafb"
 
 [[projects]]
   branch = "master"
+  digest = "1:433a2ff0ef4e2f8634614aab3174783c5ff80120b487712db96cc3712f409583"
   name = "github.com/bmizerany/assert"
   packages = ["."]
+  pruneopts = "NUT"
   revision = "b7ed37b82869576c289d7d97fb2bbd8b64a0cb28"
 
 [[projects]]
   branch = "master"
+  digest = "1:d8f9145c361920507a4f85ffb7f70b96beaedacba2ce8c00aa663adb08689d3e"
   name = "github.com/bradfitz/gomemcache"
   packages = ["memcache"]
+  pruneopts = "NUT"
   revision = "1952afaa557dc08e8e0d89eafab110fb501c1a2b"
 
 [[projects]]
   branch = "master"
+  digest = "1:8ecb89af7dfe3ac401bdb0c9390b134ef96a97e85f732d2b0604fb7b3977839f"
   name = "github.com/codahale/hdrhistogram"
   packages = ["."]
+  pruneopts = "NUT"
   revision = "3a0bb77429bd3a61596f5e8a3172445844342120"
 
 [[projects]]
+  digest = "1:5dba68a1600a235630e208cb7196b24e58fcbb77bb7a6bec08fcd23f081b0a58"
   name = "github.com/codegangsta/cli"
   packages = ["."]
+  pruneopts = "NUT"
   revision = "cfb38830724cc34fedffe9a2a29fb54fa9169cd1"
   version = "v1.20.0"
 
 [[projects]]
+  digest = "1:a2c1d0e43bd3baaa071d1b9ed72c27d78169b2b269f71c105ac4ba34b1be4a39"
   name = "github.com/davecgh/go-spew"
   packages = ["spew"]
+  pruneopts = "NUT"
   revision = "346938d642f2ec3594ed81d874461961cd0faa76"
   version = "v1.1.0"
 
 [[projects]]
+  digest = "1:1b318d2dd6cea8a1a8d8ec70348852303bd3e491df74e8bca6e32eb5a4d06970"
   name = "github.com/denisenkom/go-mssqldb"
   packages = [
     ".",
-    "internal/cp"
+    "internal/cp",
   ]
+  pruneopts = "NUT"
   revision = "270bc3860bb94dd3a3ffd047377d746c5e276726"
 
 [[projects]]
   branch = "master"
+  digest = "1:2da5f11ad66ff01a27a5c3dba4620b7eee2327be75b32c9ee9f87c9a8001ecbf"
   name = "github.com/facebookgo/inject"
   packages = ["."]
+  pruneopts = "NUT"
   revision = "cc1aa653e50f6a9893bcaef89e673e5b24e1e97b"
 
 [[projects]]
   branch = "master"
+  digest = "1:1108df7f658c90db041e0d6174d55be689aaeb0585913b9c3c7aab51a3a6b2b1"
   name = "github.com/facebookgo/structtag"
   packages = ["."]
+  pruneopts = "NUT"
   revision = "217e25fb96916cc60332e399c9aa63f5c422ceed"
 
 [[projects]]
+  digest = "1:ade392a843b2035effb4b4a2efa2c3bab3eb29b992e98bacf9c898b0ecb54e45"
   name = "github.com/fatih/color"
   packages = ["."]
+  pruneopts = "NUT"
   revision = "5b77d2a35fb0ede96d138fc9a99f5c9b6aef11b4"
   version = "v1.7.0"
 
-[[projects]]
-  name = "github.com/go-ini/ini"
-  packages = ["."]
-  revision = "6529cf7c58879c08d927016dde4477f18a0634cb"
-  version = "v1.36.0"
-
 [[projects]]
   branch = "master"
+  digest = "1:682a0aca743a1a4a36697f3d7f86c0ed403c4e3a780db9935f633242855eac9c"
   name = "github.com/go-macaron/binding"
   packages = ["."]
+  pruneopts = "NUT"
   revision = "ac54ee249c27dca7e76fad851a4a04b73bd1b183"
 
 [[projects]]
   branch = "master"
+  digest = "1:6326b27f8e0c8e135c8674ddbc619fae879664ac832e8e6fa6a23ce0d279ed4d"
   name = "github.com/go-macaron/gzip"
   packages = ["."]
+  pruneopts = "NUT"
   revision = "cad1c6580a07c56f5f6bc52d66002a05985c5854"
 
 [[projects]]
   branch = "master"
+  digest = "1:fb8711b648d1ff03104fc1d9593a13cb1d5120be7ba2b01641c14ccae286a9e3"
   name = "github.com/go-macaron/inject"
   packages = ["."]
+  pruneopts = "NUT"
   revision = "d8a0b8677191f4380287cfebd08e462217bac7ad"
 
 [[projects]]
   branch = "master"
+  digest = "1:21577aafe885f088e8086a3415f154c63c0b7ce956a6994df2ac5776bc01b7e3"
   name = "github.com/go-macaron/session"
   packages = [
     ".",
     "memcache",
     "postgres",
-    "redis"
+    "redis",
   ]
+  pruneopts = "NUT"
   revision = "068d408f9c54c7fa7fcc5e2bdd3241ab21280c9e"
 
 [[projects]]
+  digest = "1:fddd4bada6100d6fc49a9f32f18ba5718db45a58e4b00aa6377e1cfbf06af34f"
   name = "github.com/go-sql-driver/mysql"
   packages = ["."]
+  pruneopts = "NUT"
   revision = "2cc627ac8defc45d65066ae98f898166f580f9a4"
 
 [[projects]]
+  digest = "1:a1efdbc2762667c8a41cbf02b19a0549c846bf2c1d08cad4f445e3344089f1f0"
   name = "github.com/go-stack/stack"
   packages = ["."]
+  pruneopts = "NUT"
   revision = "259ab82a6cad3992b4e21ff5cac294ccb06474bc"
   version = "v1.7.0"
 
 [[projects]]
+  digest = "1:06d21295033f211588d0ad7ff391cc1b27e72b60cb6d4b7db0d70cffae4cf228"
   name = "github.com/go-xorm/builder"
   packages = ["."]
-  revision = "bad0a612f0d6277b953910822ab5dfb30dd18237"
-  version = "v0.2.0"
+  pruneopts = "NUT"
+  revision = "1d658d7596c25394aab557ef5b50ef35bf706384"
+  version = "v0.3.4"
 
 [[projects]]
+  digest = "1:b26928aab0fff92592e8728c5bc9d6e404fa2017d6a8e841ae5e60a42237f6fc"
   name = "github.com/go-xorm/core"
   packages = ["."]
-  revision = "da1adaf7a28ca792961721a34e6e04945200c890"
-  version = "v0.5.7"
+  pruneopts = "NUT"
+  revision = "ccc80c1adf1f6172bbc548877f50a1163041a40a"
+  version = "v0.6.2"
 
 [[projects]]
+  digest = "1:407316703b32d68ccf5d39bdae57d411b6954e253e07d0fff0988a3f39861f2f"
   name = "github.com/go-xorm/xorm"
   packages = ["."]
-  revision = "1933dd69e294c0a26c0266637067f24dbb25770c"
-  version = "v0.6.4"
+  pruneopts = "NUT"
+  revision = "1f39c590c64924f358c0d89016ac9b2bb84e9125"
+  version = "v0.7.1"
 
 [[projects]]
   branch = "master"
+  digest = "1:ffbb19fb66f140b5ea059428d1f84246a055d1bc3d9456c1e5c3d143611f03d0"
   name = "github.com/golang/protobuf"
   packages = [
     "proto",
     "ptypes",
     "ptypes/any",
     "ptypes/duration",
-    "ptypes/timestamp"
+    "ptypes/timestamp",
   ]
+  pruneopts = "NUT"
   revision = "927b65914520a8b7d44f5c9057611cfec6b2e2d0"
 
 [[projects]]
   branch = "master"
+  digest = "1:f14d1b50e0075fb00177f12a96dd7addf93d1e2883c25befd17285b779549795"
   name = "github.com/gopherjs/gopherjs"
   packages = ["js"]
+  pruneopts = "NUT"
   revision = "8dffc02ea1cb8398bb73f30424697c60fcf8d4c5"
 
 [[projects]]
+  digest = "1:3b708ebf63bfa9ba3313bedb8526bc0bb284e51474e65e958481476a9d4a12aa"
   name = "github.com/gorilla/websocket"
   packages = ["."]
+  pruneopts = "NUT"
   revision = "ea4d1f681babbce9545c9c5f3d5194a789c89f5b"
   version = "v1.2.0"
 
 [[projects]]
+  digest = "1:4e771d1c6e15ca4516ad971c34205c822b5cff2747179679d7b321e4e1bfe431"
   name = "github.com/gosimple/slug"
   packages = ["."]
+  pruneopts = "NUT"
   revision = "e9f42fa127660e552d0ad2b589868d403a9be7c6"
   version = "v1.1.1"
 
 [[projects]]
   branch = "master"
+  digest = "1:08e53c69cd267ef7d71eeae5d953153d0d2bc1b8e0b498731fe9acaead7001b6"
   name = "github.com/grafana/grafana-plugin-model"
   packages = [
     "go/datasource",
-    "go/renderer"
+    "go/renderer",
   ]
+  pruneopts = "NUT"
   revision = "84176c64269d8060f99e750ee8aba6f062753336"
 
 [[projects]]
   branch = "master"
+  digest = "1:58ba5285227b0f635652cd4aa82c4cfd00b590191eadd823462f0c9f64e3ae07"
   name = "github.com/hashicorp/go-hclog"
   packages = ["."]
+  pruneopts = "NUT"
   revision = "69ff559dc25f3b435631604f573a5fa1efdb6433"
 
 [[projects]]
+  digest = "1:532090ffc3b05a7e4c0229dd2698d79149f2e0683df993224a8b202f607fb605"
   name = "github.com/hashicorp/go-plugin"
   packages = ["."]
+  pruneopts = "NUT"
   revision = "e8d22c780116115ae5624720c9af0c97afe4f551"
 
 [[projects]]
   branch = "master"
+  digest = "1:8925116d1edcd85fc0c014e1aa69ce12892489b48ee633a605c46d893b8c151f"
   name = "github.com/hashicorp/go-version"
   packages = ["."]
+  pruneopts = "NUT"
   revision = "23480c0665776210b5fbbac6eaaee40e3e6a96b7"
 
 [[projects]]
   branch = "master"
+  digest = "1:8deb0c5545c824dfeb0ac77ab8eb67a3d541eab76df5c85ce93064ef02d44cd0"
   name = "github.com/hashicorp/yamux"
   packages = ["."]
+  pruneopts = "NUT"
   revision = "7221087c3d281fda5f794e28c2ea4c6e4d5c4558"
 
 [[projects]]
+  digest = "1:efbe016b6d198cf44f1db0ed2fbdf1b36ebf1f6956cc9b76d6affa96f022d368"
   name = "github.com/inconshreveable/log15"
   packages = ["."]
+  pruneopts = "NUT"
   revision = "0decfc6c20d9ca0ad143b0e89dcaa20f810b4fb3"
   version = "v2.13"
 
 [[projects]]
+  digest = "1:1f2aebae7e7c856562355ec0198d8ca2fa222fb05e5b1b66632a1fce39631885"
   name = "github.com/jmespath/go-jmespath"
   packages = ["."]
-  revision = "0b12d6b5"
+  pruneopts = "NUT"
+  revision = "c2b33e84"
 
 [[projects]]
+  digest = "1:6ddab442e52381bab82fb6c07ef3f4b565ff7ec4b8fae96d8dd4b8573a460597"
   name = "github.com/jtolds/gls"
   packages = ["."]
+  pruneopts = "NUT"
   revision = "77f18212c9c7edc9bd6a33d383a7b545ce62f064"
   version = "v4.2.1"
 
 [[projects]]
+  digest = "1:1da1796a71eb70f1e3e085984d044f67840bb0326816ec8276231aa87b1b9fc3"
   name = "github.com/klauspost/compress"
   packages = [
     "flate",
-    "gzip"
+    "gzip",
   ]
+  pruneopts = "NUT"
   revision = "6c8db69c4b49dd4df1fff66996cf556176d0b9bf"
   version = "v1.2.1"
 
 [[projects]]
+  digest = "1:5e55a8699c9ff7aba1e4c8952aeda209685d88d4cb63a8766c338e333b8e65d6"
   name = "github.com/klauspost/cpuid"
   packages = ["."]
+  pruneopts = "NUT"
   revision = "ae7887de9fa5d2db4eaa8174a7eff2c1ac00f2da"
   version = "v1.1"
 
 [[projects]]
+  digest = "1:b95da1293525625ef6f07be79d537b9bf2ecd7901efcf9a92193edafbd55b9ef"
   name = "github.com/klauspost/crc32"
   packages = ["."]
+  pruneopts = "NUT"
   revision = "cb6bfca970f6908083f26f39a79009d608efd5cd"
   version = "v1.1"
 
 [[projects]]
+  digest = "1:7b21c7fc5551b46d1308b4ffa9e9e49b66c7a8b0ba88c0130474b0e7a20d859f"
   name = "github.com/kr/pretty"
   packages = ["."]
+  pruneopts = "NUT"
   revision = "73f6ac0b30a98e433b289500d779f50c1a6f0712"
   version = "v0.1.0"
 
 [[projects]]
+  digest = "1:c3a7836b5904db0f8b609595b619916a6831cb35b8b714aec39f96d00c6155d8"
   name = "github.com/kr/text"
   packages = ["."]
+  pruneopts = "NUT"
   revision = "e2ffdb16a802fe2bb95e2e35ff34f0e53aeef34f"
   version = "v0.1.0"
 
 [[projects]]
   branch = "master"
+  digest = "1:7a1e592f0349d56fac8ce47f28469e4e7f4ce637cb26f40c88da9dff25db1c98"
   name = "github.com/lib/pq"
   packages = [
     ".",
-    "oid"
+    "oid",
   ]
+  pruneopts = "NUT"
   revision = "d34b9ff171c21ad295489235aec8b6626023cd04"
 
 [[projects]]
+  digest = "1:08c231ec84231a7e23d67e4b58f975e1423695a32467a362ee55a803f9de8061"
   name = "github.com/mattn/go-colorable"
   packages = ["."]
+  pruneopts = "NUT"
   revision = "167de6bfdfba052fa6b2d3664c8f5272e23c9072"
   version = "v0.0.9"
 
 [[projects]]
+  digest = "1:bc4f7eec3b7be8c6cb1f0af6c1e3333d5bb71072951aaaae2f05067b0803f287"
   name = "github.com/mattn/go-isatty"
   packages = ["."]
+  pruneopts = "NUT"
   revision = "0360b2af4f38e8d38c7fce2a9f4e702702d73a39"
   version = "v0.0.3"
 
 [[projects]]
+  digest = "1:536979f1c56397dbf91c2785159b37dec37e35d3bffa3cd1cfe66d25f51f8088"
   name = "github.com/mattn/go-sqlite3"
   packages = ["."]
+  pruneopts = "NUT"
   revision = "323a32be5a2421b8c7087225079c6c900ec397cd"
   version = "v1.7.0"
 
 [[projects]]
+  digest = "1:5985ef4caf91ece5d54817c11ea25f182697534f8ae6521eadcd628c142ac4b6"
   name = "github.com/matttproud/golang_protobuf_extensions"
   packages = ["pbutil"]
+  pruneopts = "NUT"
   revision = "3247c84500bff8d9fb6d579d800f20b3e091582c"
   version = "v1.0.0"
 
 [[projects]]
   branch = "master"
+  digest = "1:18b773b92ac82a451c1276bd2776c1e55ce057ee202691ab33c8d6690efcc048"
   name = "github.com/mitchellh/go-testing-interface"
   packages = ["."]
+  pruneopts = "NUT"
   revision = "a61a99592b77c9ba629d254a693acffaeb4b7e28"
 
 [[projects]]
+  digest = "1:3b517122f3aad1ecce45a630ea912b3092b4729f25532a911d0cb2935a1f9352"
   name = "github.com/oklog/run"
   packages = ["."]
+  pruneopts = "NUT"
   revision = "4dadeb3030eda0273a12382bb2348ffc7c9d1a39"
   version = "v1.0.0"
 
 [[projects]]
+  digest = "1:7da29c22bcc5c2ffb308324377dc00b5084650348c2799e573ed226d8cc9faf0"
   name = "github.com/opentracing/opentracing-go"
   packages = [
     ".",
     "ext",
-    "log"
+    "log",
   ]
+  pruneopts = "NUT"
   revision = "1949ddbfd147afd4d964a9f00b24eb291e0e7c38"
   version = "v1.0.2"
 
 [[projects]]
+  digest = "1:748946761cf99c8b73cef5a3c0ee3e040859dd713a20cece0d0e0dc04e6ceca7"
   name = "github.com/patrickmn/go-cache"
   packages = ["."]
+  pruneopts = "NUT"
   revision = "a3647f8e31d79543b2d0f0ae2fe5c379d72cedc0"
   version = "v2.1.0"
 
 [[projects]]
+  digest = "1:5cf3f025cbee5951a4ee961de067c8a89fc95a5adabead774f82822efabab121"
   name = "github.com/pkg/errors"
   packages = ["."]
+  pruneopts = "NUT"
   revision = "645ef00459ed84a119197bfb8d8205042c6df63d"
   version = "v0.8.0"
 
 [[projects]]
+  digest = "1:4759bed95e3a52febc18c071db28790a5c6e9e106ee201a37add6f6a056f8f9c"
   name = "github.com/prometheus/client_golang"
   packages = [
     "api",
     "api/prometheus/v1",
     "prometheus",
-    "prometheus/promhttp"
+    "prometheus/promhttp",
   ]
+  pruneopts = "NUT"
   revision = "967789050ba94deca04a5e84cce8ad472ce313c1"
   version = "v0.9.0-pre1"
 
 [[projects]]
   branch = "master"
+  digest = "1:32d10bdfa8f09ecf13598324dba86ab891f11db3c538b6a34d1c3b5b99d7c36b"
   name = "github.com/prometheus/client_model"
   packages = ["go"]
+  pruneopts = "NUT"
   revision = "99fa1f4be8e564e8a6b613da7fa6f46c9edafc6c"
 
 [[projects]]
   branch = "master"
+  digest = "1:768b555b86742de2f28beb37f1dedce9a75f91f871d75b5717c96399c1a78c08"
   name = "github.com/prometheus/common"
   packages = [
     "expfmt",
     "internal/bitbucket.org/ww/goautoneg",
-    "model"
+    "model",
   ]
+  pruneopts = "NUT"
   revision = "d811d2e9bf898806ecfb6ef6296774b13ffc314c"
 
 [[projects]]
   branch = "master"
+  digest = "1:c4a213a8d73fbb0b13f717ba7996116602ef18ecb42b91d77405877914cb0349"
   name = "github.com/prometheus/procfs"
   packages = [
     ".",
     "internal/util",
     "nfs",
-    "xfs"
+    "xfs",
   ]
+  pruneopts = "NUT"
   revision = "8b1c2da0d56deffdbb9e48d4414b4e674bd8083e"
 
 [[projects]]
   branch = "master"
+  digest = "1:16e2136a67ec44aa2d1d6b0fd65394b3c4a8b2a1b6730c77967f7b7b06b179b2"
   name = "github.com/rainycape/unidecode"
   packages = ["."]
+  pruneopts = "NUT"
   revision = "cb7f23ec59bec0d61b19c56cd88cee3d0cc1870c"
 
 [[projects]]
+  digest = "1:d917313f309bda80d27274d53985bc65651f81a5b66b820749ac7f8ef061fd04"
   name = "github.com/sergi/go-diff"
   packages = ["diffmatchpatch"]
+  pruneopts = "NUT"
   revision = "1744e2970ca51c86172c8190fadad617561ed6e7"
   version = "v1.0.0"
 
 [[projects]]
+  digest = "1:1f0b284a6858827de4c27c66b49b2b25df3e16b031c2b57b7892273131e7dd2b"
   name = "github.com/smartystreets/assertions"
   packages = [
     ".",
     "internal/go-render/render",
-    "internal/oglematchers"
+    "internal/oglematchers",
   ]
+  pruneopts = "NUT"
   revision = "7678a5452ebea5b7090a6b163f844c133f523da2"
   version = "1.8.3"
 
 [[projects]]
+  digest = "1:7efd0b2309cdd6468029fa30c808c50a820c9344df07e1a4bbdaf18f282907aa"
   name = "github.com/smartystreets/goconvey"
   packages = [
     "convey",
     "convey/gotest",
-    "convey/reporting"
+    "convey/reporting",
   ]
+  pruneopts = "NUT"
   revision = "9e8dc3f972df6c8fcc0375ef492c24d0bb204857"
   version = "1.6.3"
 
 [[projects]]
   branch = "master"
+  digest = "1:a66add8dd963bfc72649017c1b321198f596cb4958cb1a11ff91a1be8691020b"
   name = "github.com/teris-io/shortid"
   packages = ["."]
+  pruneopts = "NUT"
   revision = "771a37caa5cf0c81f585d7b6df4dfc77e0615b5c"
 
 [[projects]]
+  digest = "1:3d48c38e0eca8c66df62379c5ae7a83fb5cd839b94f241354c07ba077da7bc45"
   name = "github.com/uber/jaeger-client-go"
   packages = [
     ".",
@@ -479,45 +596,55 @@
     "thrift-gen/jaeger",
     "thrift-gen/sampling",
     "thrift-gen/zipkincore",
-    "utils"
+    "utils",
   ]
+  pruneopts = "NUT"
   revision = "b043381d944715b469fd6b37addfd30145ca1758"
   version = "v2.14.0"
 
 [[projects]]
+  digest = "1:0f09db8429e19d57c8346ad76fbbc679341fa86073d3b8fb5ac919f0357d8f4c"
   name = "github.com/uber/jaeger-lib"
   packages = ["metrics"]
+  pruneopts = "NUT"
   revision = "ed3a127ec5fef7ae9ea95b01b542c47fbd999ce5"
   version = "v1.5.0"
 
 [[projects]]
+  digest = "1:4c7d12ad3ef47bb03892a52e2609dc9a9cff93136ca9c7d31c00b79fcbc23c7b"
   name = "github.com/yudai/gojsondiff"
   packages = [
     ".",
-    "formatter"
+    "formatter",
   ]
+  pruneopts = "NUT"
   revision = "7b1b7adf999dab73a6eb02669c3d82dbb27a3dd6"
   version = "1.0.0"
 
 [[projects]]
   branch = "master"
+  digest = "1:e50cbf8eba568d59b71e08c22c2a77809ed4646ae06ef4abb32b3d3d3fdb1a77"
   name = "github.com/yudai/golcs"
   packages = ["."]
+  pruneopts = "NUT"
   revision = "ecda9a501e8220fae3b4b600c3db4b0ba22cfc68"
 
 [[projects]]
   branch = "master"
+  digest = "1:758f363e0dff33cf00b234be2efb12f919d79b42d5ae3909ff9eb69ef2c3cca5"
   name = "golang.org/x/crypto"
   packages = [
     "ed25519",
     "ed25519/internal/edwards25519",
     "md4",
-    "pbkdf2"
+    "pbkdf2",
   ]
+  pruneopts = "NUT"
   revision = "1a580b3eff7814fc9b40602fd35256c63b50f491"
 
 [[projects]]
   branch = "master"
+  digest = "1:0b3fee9c4472022a0982ee0d81e08b3cc3e595f50befd7a4b358b48540d9d8c5"
   name = "golang.org/x/net"
   packages = [
     "context",
@@ -527,35 +654,43 @@
     "http2/hpack",
     "idna",
     "internal/timeseries",
-    "trace"
+    "trace",
   ]
+  pruneopts = "NUT"
   revision = "2491c5de3490fced2f6cff376127c667efeed857"
 
 [[projects]]
   branch = "master"
+  digest = "1:46bd4e66bfce5e77f08fc2e8dcacc3676e679241ce83d9c150ff0397d686dd44"
   name = "golang.org/x/oauth2"
   packages = [
     ".",
     "google",
     "internal",
     "jws",
-    "jwt"
+    "jwt",
   ]
+  pruneopts = "NUT"
   revision = "cdc340f7c179dbbfa4afd43b7614e8fcadde4269"
 
 [[projects]]
   branch = "master"
+  digest = "1:39ebcc2b11457b703ae9ee2e8cca0f68df21969c6102cb3b705f76cca0ea0239"
   name = "golang.org/x/sync"
   packages = ["errgroup"]
+  pruneopts = "NUT"
   revision = "1d60e4601c6fd243af51cc01ddf169918a5407ca"
 
 [[projects]]
   branch = "master"
+  digest = "1:ec21c5bf0572488865b93e30ffd9132afbf85bec0b20c2d6cbcf349cf2031ed5"
   name = "golang.org/x/sys"
   packages = ["unix"]
+  pruneopts = "NUT"
   revision = "7c87d13f8e835d2fb3a70a2912c811ed0c1d241b"
 
 [[projects]]
+  digest = "1:e7071ed636b5422cc51c0e3a6cebc229d6c9fffc528814b519a980641422d619"
   name = "golang.org/x/text"
   packages = [
     "collate",
@@ -571,12 +706,14 @@
     "unicode/bidi",
     "unicode/cldr",
     "unicode/norm",
-    "unicode/rangetable"
+    "unicode/rangetable",
   ]
+  pruneopts = "NUT"
   revision = "f21a4dfb5e38f5895301dc265a8def02365cc3d0"
   version = "v0.3.0"
 
 [[projects]]
+  digest = "1:dbd5568923513ee74aa626d027e2a8a352cf8f35df41d19f4e34491d1858c38b"
   name = "google.golang.org/appengine"
   packages = [
     ".",
@@ -589,18 +726,22 @@
     "internal/modules",
     "internal/remote_api",
     "internal/urlfetch",
-    "urlfetch"
+    "urlfetch",
   ]
+  pruneopts = "NUT"
   revision = "150dc57a1b433e64154302bdc40b6bb8aefa313a"
   version = "v1.0.0"
 
 [[projects]]
   branch = "master"
+  digest = "1:3c24554c312721e98fa6b76403e7100cf974eb46b1255ea7fc6471db9a9ce498"
   name = "google.golang.org/genproto"
   packages = ["googleapis/rpc/status"]
+  pruneopts = "NUT"
   revision = "7bb2a897381c9c5ab2aeb8614f758d7766af68ff"
 
 [[projects]]
+  digest = "1:840b77b6eb539b830bb760b6e30b688ed2ff484bd83466fce2395835ed9367fe"
   name = "google.golang.org/grpc"
   packages = [
     ".",
@@ -627,78 +768,177 @@
     "stats",
     "status",
     "tap",
-    "transport"
+    "transport",
   ]
+  pruneopts = "NUT"
   revision = "1e2570b1b19ade82d8dbb31bba4e65e9f9ef5b34"
   version = "v1.11.1"
 
 [[projects]]
   branch = "v3"
+  digest = "1:1244a9b3856f70d5ffb74bbfd780fc9d47f93f2049fa265c6fb602878f507bf8"
   name = "gopkg.in/alexcesaro/quotedprintable.v3"
   packages = ["."]
+  pruneopts = "NUT"
   revision = "2caba252f4dc53eaf6b553000885530023f54623"
 
 [[projects]]
+  digest = "1:aea6e9483c167cc6fdf1274c442558c5dda8fd3373372be04d98c79100868da1"
   name = "gopkg.in/asn1-ber.v1"
   packages = ["."]
+  pruneopts = "NUT"
   revision = "379148ca0225df7a432012b8df0355c2a2063ac0"
   version = "v1.2"
 
 [[projects]]
+  digest = "1:24bfc2e8bf971485cb5ba0f0e5b08a1b806cca5828134df76b32d1ea50f2ab49"
   name = "gopkg.in/bufio.v1"
   packages = ["."]
+  pruneopts = "NUT"
   revision = "567b2bfa514e796916c4747494d6ff5132a1dfce"
   version = "v1"
 
 [[projects]]
+  digest = "1:e05711632e1515319b014e8fe4cbe1d30ab024c473403f60cf0fdeb4c586a474"
   name = "gopkg.in/ini.v1"
   packages = ["."]
+  pruneopts = "NUT"
   revision = "6529cf7c58879c08d927016dde4477f18a0634cb"
   version = "v1.36.0"
 
 [[projects]]
+  digest = "1:c847b7fea4c7e6db5281a37dffc4620cb78c1227403a79e5aa290db517657ac1"
   name = "gopkg.in/ldap.v3"
   packages = ["."]
+  pruneopts = "NUT"
   revision = "5c2c0f997205c29de14cb6c35996370c2c5dfab1"
   version = "v3"
 
 [[projects]]
+  digest = "1:3b0cf3a465fd07f76e5fc1a9d0783c662dac0de9fc73d713ebe162768fd87b5f"
   name = "gopkg.in/macaron.v1"
   packages = ["."]
+  pruneopts = "NUT"
   revision = "c1be95e6d21e769e44e1ec33cec9da5837861c10"
   version = "v1.3.1"
 
 [[projects]]
   branch = "v2"
+  digest = "1:d52332f9e9f2c6343652e13aa3fd40cfd03353520c9a48d90f21215d3012d50f"
   name = "gopkg.in/mail.v2"
   packages = ["."]
+  pruneopts = "NUT"
   revision = "5bc5c8bb07bd8d2803831fbaf8cbd630fcde2c68"
 
 [[projects]]
+  digest = "1:00126f697efdcab42f07c89ac8bf0095fb2328aef6464e070055154088cea859"
   name = "gopkg.in/redis.v2"
   packages = ["."]
+  pruneopts = "NUT"
   revision = "e6179049628164864e6e84e973cfb56335748dea"
   version = "v2.3.2"
 
 [[projects]]
+  digest = "1:a50fabe7a46692dc7c656310add3d517abe7914df02afd151ef84da884605dc8"
   name = "gopkg.in/square/go-jose.v2"
   packages = [
     ".",
     "cipher",
-    "json"
+    "json",
   ]
+  pruneopts = "NUT"
   revision = "ef984e69dd356202fd4e4910d4d9c24468bdf0b8"
   version = "v2.1.9"
 
 [[projects]]
   branch = "v2"
+  digest = "1:7c95b35057a0ff2e19f707173cc1a947fa43a6eb5c4d300d196ece0334046082"
   name = "gopkg.in/yaml.v2"
   packages = ["."]
+  pruneopts = "NUT"
   revision = "5420a8b6744d3b0345ab293f6fcba19c978f1183"
 
 [solve-meta]
   analyzer-name = "dep"
   analyzer-version = 1
-  inputs-digest = "88f0eb826b9c154ba46ea3bb64767707d86db75449ec75199eb2b8cf2b337fd4"
+  input-imports = [
+    "github.com/BurntSushi/toml",
+    "github.com/Unknwon/com",
+    "github.com/VividCortex/mysqlerr",
+    "github.com/aws/aws-sdk-go/aws",
+    "github.com/aws/aws-sdk-go/aws/awserr",
+    "github.com/aws/aws-sdk-go/aws/awsutil",
+    "github.com/aws/aws-sdk-go/aws/credentials",
+    "github.com/aws/aws-sdk-go/aws/credentials/ec2rolecreds",
+    "github.com/aws/aws-sdk-go/aws/credentials/endpointcreds",
+    "github.com/aws/aws-sdk-go/aws/defaults",
+    "github.com/aws/aws-sdk-go/aws/ec2metadata",
+    "github.com/aws/aws-sdk-go/aws/endpoints",
+    "github.com/aws/aws-sdk-go/aws/request",
+    "github.com/aws/aws-sdk-go/aws/session",
+    "github.com/aws/aws-sdk-go/service/cloudwatch",
+    "github.com/aws/aws-sdk-go/service/ec2",
+    "github.com/aws/aws-sdk-go/service/ec2/ec2iface",
+    "github.com/aws/aws-sdk-go/service/resourcegroupstaggingapi",
+    "github.com/aws/aws-sdk-go/service/resourcegroupstaggingapi/resourcegroupstaggingapiiface",
+    "github.com/aws/aws-sdk-go/service/s3",
+    "github.com/aws/aws-sdk-go/service/sts",
+    "github.com/benbjohnson/clock",
+    "github.com/bmizerany/assert",
+    "github.com/codegangsta/cli",
+    "github.com/davecgh/go-spew/spew",
+    "github.com/denisenkom/go-mssqldb",
+    "github.com/facebookgo/inject",
+    "github.com/fatih/color",
+    "github.com/go-macaron/binding",
+    "github.com/go-macaron/gzip",
+    "github.com/go-macaron/session",
+    "github.com/go-macaron/session/memcache",
+    "github.com/go-macaron/session/postgres",
+    "github.com/go-macaron/session/redis",
+    "github.com/go-sql-driver/mysql",
+    "github.com/go-stack/stack",
+    "github.com/go-xorm/core",
+    "github.com/go-xorm/xorm",
+    "github.com/gorilla/websocket",
+    "github.com/gosimple/slug",
+    "github.com/grafana/grafana-plugin-model/go/datasource",
+    "github.com/grafana/grafana-plugin-model/go/renderer",
+    "github.com/hashicorp/go-hclog",
+    "github.com/hashicorp/go-plugin",
+    "github.com/hashicorp/go-version",
+    "github.com/inconshreveable/log15",
+    "github.com/lib/pq",
+    "github.com/mattn/go-isatty",
+    "github.com/mattn/go-sqlite3",
+    "github.com/opentracing/opentracing-go",
+    "github.com/opentracing/opentracing-go/ext",
+    "github.com/opentracing/opentracing-go/log",
+    "github.com/patrickmn/go-cache",
+    "github.com/pkg/errors",
+    "github.com/prometheus/client_golang/api",
+    "github.com/prometheus/client_golang/api/prometheus/v1",
+    "github.com/prometheus/client_golang/prometheus",
+    "github.com/prometheus/client_golang/prometheus/promhttp",
+    "github.com/prometheus/client_model/go",
+    "github.com/prometheus/common/expfmt",
+    "github.com/prometheus/common/model",
+    "github.com/smartystreets/goconvey/convey",
+    "github.com/teris-io/shortid",
+    "github.com/uber/jaeger-client-go/config",
+    "github.com/yudai/gojsondiff",
+    "github.com/yudai/gojsondiff/formatter",
+    "golang.org/x/net/context/ctxhttp",
+    "golang.org/x/oauth2",
+    "golang.org/x/oauth2/google",
+    "golang.org/x/oauth2/jwt",
+    "golang.org/x/sync/errgroup",
+    "gopkg.in/ini.v1",
+    "gopkg.in/ldap.v3",
+    "gopkg.in/macaron.v1",
+    "gopkg.in/mail.v2",
+    "gopkg.in/square/go-jose.v2",
+    "gopkg.in/yaml.v2",
+  ]
   solver-name = "gps-cdcl"
   solver-version = 1

+ 6 - 2
Gopkg.toml

@@ -81,11 +81,15 @@ ignored = [
 
 [[constraint]]
   name = "github.com/go-xorm/core"
-  version = "=0.5.7"
+  version = "=0.6.2"
+
+[[override]]
+  name = "github.com/go-xorm/builder"
+  version = "=0.3.4"
 
 [[constraint]]
   name = "github.com/go-xorm/xorm"
-  version = "=0.6.4"
+  version = "=0.7.1"
 
 [[constraint]]
   name = "github.com/gorilla/websocket"

+ 2 - 2
latest.json

@@ -1,4 +1,4 @@
 {
-  "stable": "6.0.0",
-  "testing": "6.0.0"
+  "stable": "6.0.1",
+  "testing": "6.0.1"
 }

+ 9 - 7
package.json

@@ -123,10 +123,10 @@
   },
   "scripts": {
     "dev": "webpack --progress --colors --mode development --config scripts/webpack/webpack.dev.js",
-    "start": "ts-node --project ./scripts/cli/tsconfig.json ./scripts/cli/index.ts --theme",
-    "start:hot": "ts-node --project ./scripts/cli/tsconfig.json ./scripts/cli/index.ts --hot --theme",
-    "start:ignoreTheme": "ts-node --project ./scripts/cli/tsconfig.json ./scripts/cli/index.ts --hot",
-    "watch": "ts-node --project ./scripts/cli/tsconfig.json ./scripts/cli/index.ts --theme -d watch,start",
+    "start": "ts-node --project ./scripts/cli/tsconfig.json ./scripts/cli/index.ts core:start --watchTheme",
+    "start:hot": "ts-node --project ./scripts/cli/tsconfig.json ./scripts/cli/index.ts core:start --hot --watchTheme",
+    "start:ignoreTheme": "ts-node --project ./scripts/cli/tsconfig.json ./scripts/cli/index.ts core:start --hot",
+    "watch": "yarn start -d watch,start core:start --watchTheme ",
     "build": "grunt build",
     "test": "grunt test",
     "tslint": "tslint -c tslint.json --project tsconfig.json",
@@ -136,8 +136,11 @@
     "storybook": "cd packages/grafana-ui && yarn storybook",
     "themes:generate": "ts-node --project ./scripts/cli/tsconfig.json ./scripts/cli/generateSassVariableFiles.ts",
     "prettier:check": "prettier --list-different \"**/*.{ts,tsx,scss}\"",
-    "gui:build": "ts-node --project ./scripts/cli/tsconfig.json ./scripts/cli/index.ts --build",
-    "gui:release": "ts-node --project ./scripts/cli/tsconfig.json ./scripts/cli/index.ts --release"
+    "gui:build": "ts-node --project ./scripts/cli/tsconfig.json ./scripts/cli/index.ts gui:build",
+    "gui:releasePrepare": "ts-node --project ./scripts/cli/tsconfig.json ./scripts/cli/index.ts gui:release",
+    "gui:publish": "cd packages/grafana-ui/dist && npm publish --access public",
+    "gui:release": "ts-node --project ./scripts/cli/tsconfig.json ./scripts/cli/index.ts gui:release -p",
+    "cli": "ts-node --project ./scripts/cli/tsconfig.json ./scripts/cli/index.ts"
   },
   "husky": {
     "hooks": {
@@ -169,7 +172,6 @@
     "angular-native-dragdrop": "1.2.2",
     "angular-route": "1.6.6",
     "angular-sanitize": "1.6.6",
-    "ansicolor": "1.1.78",
     "baron": "^3.0.3",
     "brace": "^0.10.0",
     "classnames": "^2.2.6",

+ 1 - 1
packages/grafana-ui/src/utils/valueFormats/categories.ts

@@ -137,7 +137,7 @@ export const getCategories = (): ValueFormatCategory[] => [
     formats: [
       { name: 'packets/sec', id: 'pps', fn: decimalSIPrefix('pps') },
       { name: 'bits/sec', id: 'bps', fn: decimalSIPrefix('bps') },
-      { name: 'bytes/sec', id: 'Bps', fn: decimalSIPrefix('B/s') },
+      { name: 'bytes/sec', id: 'Bps', fn: decimalSIPrefix('Bs') },
       { name: 'kilobytes/sec', id: 'KBs', fn: decimalSIPrefix('Bs', 1) },
       { name: 'kilobits/sec', id: 'Kbits', fn: decimalSIPrefix('bps', 1) },
       { name: 'megabytes/sec', id: 'MBs', fn: decimalSIPrefix('Bs', 2) },

+ 1 - 0
pkg/api/dashboard.go

@@ -488,6 +488,7 @@ func (hs *HTTPServer) RestoreDashboardVersion(c *m.ReqContext, apiCmd dtos.Resto
 	saveCmd.Dashboard.Set("version", dash.Version)
 	saveCmd.Dashboard.Set("uid", dash.Uid)
 	saveCmd.Message = fmt.Sprintf("Restored from version %d", version.Version)
+	saveCmd.FolderId = dash.FolderId
 
 	return hs.PostDashboard(c, saveCmd)
 }

+ 124 - 0
pkg/api/dashboard_test.go

@@ -810,6 +810,93 @@ func TestDashboardApiEndpoint(t *testing.T) {
 			})
 		})
 	})
+
+	Convey("Given dashboard in folder being restored should restore to folder", t, func() {
+		fakeDash := m.NewDashboard("Child dash")
+		fakeDash.Id = 2
+		fakeDash.FolderId = 1
+		fakeDash.HasAcl = false
+
+		bus.AddHandler("test", func(query *m.GetDashboardQuery) error {
+			query.Result = fakeDash
+			return nil
+		})
+
+		bus.AddHandler("test", func(query *m.GetDashboardVersionQuery) error {
+			query.Result = &m.DashboardVersion{
+				DashboardId: 2,
+				Version:     1,
+				Data:        fakeDash.Data,
+			}
+			return nil
+		})
+
+		mock := &dashboards.FakeDashboardService{
+			SaveDashboardResult: &m.Dashboard{
+				Id:      2,
+				Uid:     "uid",
+				Title:   "Dash",
+				Slug:    "dash",
+				Version: 1,
+			},
+		}
+
+		cmd := dtos.RestoreDashboardVersionCommand{
+			Version: 1,
+		}
+
+		restoreDashboardVersionScenario("When calling POST on", "/api/dashboards/id/1/restore", "/api/dashboards/id/:dashboardId/restore", mock, cmd, func(sc *scenarioContext) {
+			CallRestoreDashboardVersion(sc)
+			So(sc.resp.Code, ShouldEqual, 200)
+			dto := mock.SavedDashboards[0]
+			So(dto.Dashboard.FolderId, ShouldEqual, 1)
+			So(dto.Dashboard.Title, ShouldEqual, "Child dash")
+			So(dto.Message, ShouldEqual, "Restored from version 1")
+		})
+	})
+
+	Convey("Given dashboard in general folder being restored should restore to general folder", t, func() {
+		fakeDash := m.NewDashboard("Child dash")
+		fakeDash.Id = 2
+		fakeDash.HasAcl = false
+
+		bus.AddHandler("test", func(query *m.GetDashboardQuery) error {
+			query.Result = fakeDash
+			return nil
+		})
+
+		bus.AddHandler("test", func(query *m.GetDashboardVersionQuery) error {
+			query.Result = &m.DashboardVersion{
+				DashboardId: 2,
+				Version:     1,
+				Data:        fakeDash.Data,
+			}
+			return nil
+		})
+
+		mock := &dashboards.FakeDashboardService{
+			SaveDashboardResult: &m.Dashboard{
+				Id:      2,
+				Uid:     "uid",
+				Title:   "Dash",
+				Slug:    "dash",
+				Version: 1,
+			},
+		}
+
+		cmd := dtos.RestoreDashboardVersionCommand{
+			Version: 1,
+		}
+
+		restoreDashboardVersionScenario("When calling POST on", "/api/dashboards/id/1/restore", "/api/dashboards/id/:dashboardId/restore", mock, cmd, func(sc *scenarioContext) {
+			CallRestoreDashboardVersion(sc)
+			So(sc.resp.Code, ShouldEqual, 200)
+			dto := mock.SavedDashboards[0]
+			So(dto.Dashboard.FolderId, ShouldEqual, 0)
+			So(dto.Dashboard.Title, ShouldEqual, "Child dash")
+			So(dto.Message, ShouldEqual, "Restored from version 1")
+		})
+	})
 }
 
 func GetDashboardShouldReturn200(sc *scenarioContext) dtos.DashboardFullWithMeta {
@@ -871,6 +958,10 @@ func CallPostDashboard(sc *scenarioContext) {
 	sc.fakeReqWithParams("POST", sc.url, map[string]string{}).exec()
 }
 
+func CallRestoreDashboardVersion(sc *scenarioContext) {
+	sc.fakeReqWithParams("POST", sc.url, map[string]string{}).exec()
+}
+
 func CallPostDashboardShouldReturnSuccess(sc *scenarioContext) {
 	CallPostDashboard(sc)
 
@@ -928,6 +1019,39 @@ func postDiffScenario(desc string, url string, routePattern string, cmd dtos.Cal
 	})
 }
 
+func restoreDashboardVersionScenario(desc string, url string, routePattern string, mock *dashboards.FakeDashboardService, cmd dtos.RestoreDashboardVersionCommand, fn scenarioFunc) {
+	Convey(desc+" "+url, func() {
+		defer bus.ClearBusHandlers()
+
+		hs := HTTPServer{
+			Bus: bus.GetBus(),
+		}
+
+		sc := setupScenarioContext(url)
+		sc.defaultHandler = Wrap(func(c *m.ReqContext) Response {
+			sc.context = c
+			sc.context.SignedInUser = &m.SignedInUser{
+				OrgId:  TestOrgID,
+				UserId: TestUserID,
+			}
+			sc.context.OrgRole = m.ROLE_ADMIN
+
+			return hs.RestoreDashboardVersion(c, cmd)
+		})
+
+		origNewDashboardService := dashboards.NewService
+		dashboards.MockDashboardService(mock)
+
+		sc.m.Post(routePattern, sc.defaultHandler)
+
+		defer func() {
+			dashboards.NewService = origNewDashboardService
+		}()
+
+		fn(sc)
+	})
+}
+
 func (sc *scenarioContext) ToJSON() *simplejson.Json {
 	var result *simplejson.Json
 	err := json.NewDecoder(sc.resp.Body).Decode(&result)

+ 9 - 2
pkg/log/log.go

@@ -25,6 +25,7 @@ var filters map[string]log15.Lvl
 func init() {
 	loggersToClose = make([]DisposableHandler, 0)
 	loggersToReload = make([]ReloadableHandler, 0)
+	filters = map[string]log15.Lvl{}
 	Root = log15.Root()
 	Root.SetHandler(log15.DiscardHandler())
 }
@@ -197,7 +198,7 @@ func ReadLoggingConfig(modes []string, logsPath string, cfg *ini.File) {
 
 		// Log level.
 		_, level := getLogLevelFromConfig("log."+mode, defaultLevelName, cfg)
-		filters := getFilters(util.SplitString(sec.Key("filters").String()))
+		modeFilters := getFilters(util.SplitString(sec.Key("filters").String()))
 		format := getLogFormat(sec.Key("format").MustString(""))
 
 		var handler log15.Handler
@@ -230,12 +231,18 @@ func ReadLoggingConfig(modes []string, logsPath string, cfg *ini.File) {
 		}
 
 		for key, value := range defaultFilters {
+			if _, exist := modeFilters[key]; !exist {
+				modeFilters[key] = value
+			}
+		}
+
+		for key, value := range modeFilters {
 			if _, exist := filters[key]; !exist {
 				filters[key] = value
 			}
 		}
 
-		handler = LogFilterHandler(level, filters, handler)
+		handler = LogFilterHandler(level, modeFilters, handler)
 		handlers = append(handlers, handler)
 	}
 

+ 1 - 1
pkg/services/alerting/notifier.go

@@ -138,7 +138,7 @@ func (n *notificationService) uploadImage(context *EvalContext) (err error) {
 		return err
 	}
 
-	renderOpts.Path = fmt.Sprintf("d-solo/%s/%s?panelId=%d", ref.Uid, ref.Slug, context.Rule.PanelId)
+	renderOpts.Path = fmt.Sprintf("d-solo/%s/%s?orgId=%d&panelId=%d", ref.Uid, ref.Slug, context.Rule.OrgId, context.Rule.PanelId)
 
 	result, err := n.renderService.Render(context.Ctx, renderOpts)
 	if err != nil {

+ 15 - 4
pkg/services/rendering/phantomjs.go

@@ -36,7 +36,7 @@ func (rs *RenderingService) renderViaPhantomJS(ctx context.Context, opts Opts) (
 	defer middleware.RemoveRenderAuthKey(renderKey)
 
 	phantomDebugArg := "--debug=false"
-	if log.GetLogLevelFor("renderer") >= log.LvlDebug {
+	if log.GetLogLevelFor("rendering") >= log.LvlDebug {
 		phantomDebugArg = "--debug=true"
 	}
 
@@ -64,13 +64,26 @@ func (rs *RenderingService) renderViaPhantomJS(ctx context.Context, opts Opts) (
 	cmd := exec.CommandContext(commandCtx, binPath, cmdArgs...)
 	cmd.Stderr = cmd.Stdout
 
+	timezone := ""
+
 	if opts.Timezone != "" {
+		timezone = isoTimeOffsetToPosixTz(opts.Timezone)
 		baseEnviron := os.Environ()
-		cmd.Env = appendEnviron(baseEnviron, "TZ", isoTimeOffsetToPosixTz(opts.Timezone))
+		cmd.Env = appendEnviron(baseEnviron, "TZ", timezone)
 	}
 
+	rs.log.Debug("executing Phantomjs", "binPath", binPath, "cmdArgs", cmdArgs, "timezone", timezone)
+
 	out, err := cmd.Output()
 
+	if out != nil {
+		rs.log.Debug("Phantomjs output", "out", string(out))
+	}
+
+	if err != nil {
+		rs.log.Debug("Phantomjs error", "error", err)
+	}
+
 	// check for timeout first
 	if commandCtx.Err() == context.DeadlineExceeded {
 		rs.log.Info("Rendering timed out")
@@ -82,8 +95,6 @@ func (rs *RenderingService) renderViaPhantomJS(ctx context.Context, opts Opts) (
 		return nil, err
 	}
 
-	rs.log.Debug("Phantomjs output", "out", string(out))
-
 	rs.log.Debug("Image rendered", "path", pngPath)
 	return &RenderResult{FilePath: pngPath}, nil
 }

+ 3 - 1
pkg/services/sqlstore/alert.go

@@ -309,7 +309,9 @@ func PauseAlert(cmd *m.PauseAlertCommand) error {
 			params = append(params, v)
 		}
 
-		res, err := sess.Exec(buffer.String(), params...)
+		sqlOrArgs := append([]interface{}{buffer.String()}, params...)
+
+		res, err := sess.Exec(sqlOrArgs...)
 		if err != nil {
 			return err
 		}

+ 6 - 2
pkg/services/sqlstore/annotation.go

@@ -258,11 +258,15 @@ func (r *SqlAnnotationRepo) Delete(params *annotations.DeleteParams) error {
 			queryParams = []interface{}{params.DashboardId, params.PanelId, params.OrgId}
 		}
 
-		if _, err := sess.Exec(annoTagSql, queryParams...); err != nil {
+		sqlOrArgs := append([]interface{}{annoTagSql}, queryParams...)
+
+		if _, err := sess.Exec(sqlOrArgs...); err != nil {
 			return err
 		}
 
-		if _, err := sess.Exec(sql, queryParams...); err != nil {
+		sqlOrArgs = append([]interface{}{sql}, queryParams...)
+
+		if _, err := sess.Exec(sqlOrArgs...); err != nil {
 			return err
 		}
 

+ 3 - 2
pkg/services/sqlstore/dashboard_version.go

@@ -51,7 +51,7 @@ func GetDashboardVersions(query *m.GetDashboardVersionsQuery) error {
 				dashboard_version.message,
 				dashboard_version.data,`+
 			dialect.Quote("user")+`.login as created_by`).
-		Join("LEFT", "user", `dashboard_version.created_by = `+dialect.Quote("user")+`.id`).
+		Join("LEFT", dialect.Quote("user"), `dashboard_version.created_by = `+dialect.Quote("user")+`.id`).
 		Join("LEFT", "dashboard", `dashboard.id = dashboard_version.dashboard_id`).
 		Where("dashboard_version.dashboard_id=? AND dashboard.org_id=?", query.DashboardId, query.OrgId).
 		OrderBy("dashboard_version.version DESC").
@@ -102,7 +102,8 @@ func DeleteExpiredVersions(cmd *m.DeleteExpiredVersionsCommand) error {
 
 		if len(versionIdsToDelete) > 0 {
 			deleteExpiredSql := `DELETE FROM dashboard_version WHERE id IN (?` + strings.Repeat(",?", len(versionIdsToDelete)-1) + `)`
-			expiredResponse, err := sess.Exec(deleteExpiredSql, versionIdsToDelete...)
+			sqlOrArgs := append([]interface{}{deleteExpiredSql}, versionIdsToDelete...)
+			expiredResponse, err := sess.Exec(sqlOrArgs...)
 			if err != nil {
 				return err
 			}

+ 1 - 1
pkg/services/sqlstore/org_users.go

@@ -98,7 +98,7 @@ func GetOrgUsers(query *m.GetOrgUsersQuery) error {
 	query.Result = make([]*m.OrgUserDTO, 0)
 
 	sess := x.Table("org_user")
-	sess.Join("INNER", "user", fmt.Sprintf("org_user.user_id=%s.id", x.Dialect().Quote("user")))
+	sess.Join("INNER", x.Dialect().Quote("user"), fmt.Sprintf("org_user.user_id=%s.id", x.Dialect().Quote("user")))
 
 	whereConditions := make([]string, 0)
 	whereParams := make([]interface{}, 0)

+ 1 - 1
pkg/services/sqlstore/team.go

@@ -280,7 +280,7 @@ func RemoveTeamMember(cmd *m.RemoveTeamMemberCommand) error {
 func GetTeamMembers(query *m.GetTeamMembersQuery) error {
 	query.Result = make([]*m.TeamMemberDTO, 0)
 	sess := x.Table("team_member")
-	sess.Join("INNER", "user", fmt.Sprintf("team_member.user_id=%s.id", x.Dialect().Quote("user")))
+	sess.Join("INNER", x.Dialect().Quote("user"), fmt.Sprintf("team_member.user_id=%s.id", x.Dialect().Quote("user")))
 	if query.OrgId != 0 {
 		sess.Where("team_member.org_id=?", query.OrgId)
 	}

+ 1 - 1
pkg/services/sqlstore/user_test.go

@@ -208,7 +208,7 @@ func TestUserDataAccess(t *testing.T) {
 func GetOrgUsersForTest(query *m.GetOrgUsersQuery) error {
 	query.Result = make([]*m.OrgUserDTO, 0)
 	sess := x.Table("org_user")
-	sess.Join("LEFT ", "user", fmt.Sprintf("org_user.user_id=%s.id", x.Dialect().Quote("user")))
+	sess.Join("LEFT ", x.Dialect().Quote("user"), fmt.Sprintf("org_user.user_id=%s.id", x.Dialect().Quote("user")))
 	sess.Where("org_user.org_id=?", query.OrgId)
 	sess.Cols("org_user.org_id", "org_user.user_id", "user.email", "user.login", "org_user.role")
 

+ 1 - 1
public/app/core/controllers/json_editor_ctrl.ts

@@ -5,7 +5,7 @@ export class JsonEditorCtrl {
   /** @ngInject */
   constructor($scope) {
     $scope.json = angular.toJson($scope.model.object, true);
-    $scope.canUpdate = $scope.model.updateHandler !== void 0 && $scope.contextSrv.isEditor;
+    $scope.canUpdate = $scope.model.updateHandler !== void 0 && $scope.model.canUpdate;
     $scope.canCopy = $scope.model.enableCopy;
 
     $scope.update = () => {

+ 1 - 1
public/app/features/alerting/partials/alert_tab.html

@@ -17,7 +17,7 @@
           </div>
           <div class="gf-form max-width-11">
             <label class="gf-form-label width-5">For</label>
-            <input type="text" class="gf-form-input max-width-6" ng-model="ctrl.alert.for"
+            <input type="text" class="gf-form-input max-width-6 gf-form-input--has-help-icon" ng-model="ctrl.alert.for"
                   spellcheck='false' placeholder="5m">
             <info-popover mode="right-absolute">
               If an alert rule has a configured For and the query violates the configured

+ 6 - 1
public/app/features/dashboard/components/AddPanelWidget/AddPanelWidget.tsx

@@ -100,7 +100,12 @@ export class AddPanelWidget extends React.Component<Props, State> {
     const newPanel: any = {
       type: panelPluginInfo.id,
       title: 'Panel Title',
-      gridPos: { x: gridPos.x, y: gridPos.y, w: gridPos.w, h: gridPos.h },
+      gridPos: {
+        x: gridPos.x,
+        y: gridPos.y,
+        w: panelPluginInfo.defaults.gridPos.w,
+        h: panelPluginInfo.defaults.gridPos.h,
+      },
     };
 
     // apply panel template / defaults

+ 14 - 20
public/app/features/dashboard/panel_editor/VisualizationTab.tsx

@@ -14,10 +14,10 @@ import { PluginHelp } from 'app/core/components/PluginHelp/PluginHelp';
 import { FadeIn } from 'app/core/components/Animations/FadeIn';
 
 // Types
-import { PanelModel } from '../state/PanelModel';
-import { DashboardModel } from '../state/DashboardModel';
+import { PanelModel } from '../state';
+import { DashboardModel } from '../state';
 import { PanelPlugin } from 'app/types/plugins';
-import { FilterInput } from 'app/core/components/FilterInput/FilterInput';
+import { VizPickerSearch } from './VizPickerSearch';
 
 interface Props {
   panel: PanelModel;
@@ -33,18 +33,19 @@ interface State {
   isVizPickerOpen: boolean;
   searchQuery: string;
   scrollTop: number;
+  hasBeenFocused: boolean;
 }
 
 export class VisualizationTab extends PureComponent<Props, State> {
   element: HTMLElement;
   angularOptions: AngularComponent;
-  searchInput: HTMLElement;
 
   constructor(props) {
     super(props);
 
     this.state = {
       isVizPickerOpen: this.props.urlOpenVizPicker,
+      hasBeenFocused: false,
       searchQuery: '',
       scrollTop: 0,
     };
@@ -162,7 +163,7 @@ export class VisualizationTab extends PureComponent<Props, State> {
       this.props.updateLocation({ query: { openVizPicker: null }, partial: true });
     }
 
-    this.setState({ isVizPickerOpen: false });
+    this.setState({ isVizPickerOpen: false, hasBeenFocused: false });
   };
 
   onSearchQueryChange = (value: string) => {
@@ -173,23 +174,16 @@ export class VisualizationTab extends PureComponent<Props, State> {
 
   renderToolbar = (): JSX.Element => {
     const { plugin } = this.props;
-    const { searchQuery } = this.state;
+    const { isVizPickerOpen, searchQuery } = this.state;
 
-    if (this.state.isVizPickerOpen) {
+    if (isVizPickerOpen) {
       return (
-        <>
-          <FilterInput
-            labelClassName="gf-form--has-input-icon"
-            inputClassName="gf-form-input width-13"
-            placeholder=""
-            onChange={this.onSearchQueryChange}
-            value={searchQuery}
-            ref={elem => elem && elem.focus()}
-          />
-          <button className="btn btn-link toolbar__close" onClick={this.onCloseVizPicker}>
-            <i className="fa fa-chevron-up" />
-          </button>
-        </>
+        <VizPickerSearch
+          plugin={plugin}
+          searchQuery={searchQuery}
+          onChange={this.onSearchQueryChange}
+          onClose={this.onCloseVizPicker}
+        />
       );
     } else {
       return (

+ 33 - 0
public/app/features/dashboard/panel_editor/VizPickerSearch.tsx

@@ -0,0 +1,33 @@
+import React, { PureComponent } from 'react';
+
+import { FilterInput } from 'app/core/components/FilterInput/FilterInput';
+
+import { PanelPlugin } from 'app/types';
+
+interface Props {
+  plugin: PanelPlugin;
+  searchQuery: string;
+  onChange: (query: string) => void;
+  onClose: () => void;
+}
+
+export class VizPickerSearch extends PureComponent<Props> {
+  render() {
+    const { searchQuery, onChange, onClose } = this.props;
+    return (
+      <>
+        <FilterInput
+          labelClassName="gf-form--has-input-icon"
+          inputClassName="gf-form-input width-13"
+          placeholder=""
+          onChange={onChange}
+          value={searchQuery}
+          ref={element => element && element.focus()}
+        />
+        <button className="btn btn-link toolbar__close" onClick={onClose}>
+          <i className="fa fa-chevron-up" />
+        </button>
+      </>
+    );
+  }
+}

+ 1 - 0
public/app/features/dashboard/utils/panel.ts

@@ -70,6 +70,7 @@ export const editPanelJson = (dashboard: DashboardModel, panel: PanelModel) => {
     updateHandler: (newPanel: PanelModel, oldPanel: PanelModel) => {
       replacePanel(dashboard, newPanel, oldPanel);
     },
+    canUpdate: dashboard.meta.canEdit,
     enableCopy: true,
   };
 

+ 2 - 2
public/app/features/datasources/partials/http_settings.html

@@ -4,7 +4,7 @@
     <div class="gf-form-inline">
       <div class="gf-form max-width-30">
         <span class="gf-form-label width-10">URL</span>
-        <input class="gf-form-input" type="text"
+        <input class="gf-form-input gf-form-input--has-help-icon" type="text"
               ng-model='current.url' placeholder="{{suggestUrl}}"
               bs-typeahead="getSuggestUrls"  min-length="0"
               ng-pattern="/^(ftp|http|https):\/\/(\w+:{0,1}\w*@)?(\S+)(:[0-9]+)?(\/|\/([\w#!:.?+=&%@!\-\/]))?$/" required></input>
@@ -59,7 +59,7 @@
 		<div class="gf-form-inline" ng-if="current.access=='proxy'">
 			<div class="gf-form">
 				<span class="gf-form-label width-10">Whitelisted Cookies</span>
-				<bootstrap-tagsinput ng-model="current.jsonData.keepCookies" width-class="width-20" tagclass="label label-tag" placeholder="Add Name">
+				<bootstrap-tagsinput ng-model="current.jsonData.keepCookies" width-class="width-20 gf-form-input--has-help-icon" tagclass="label label-tag" placeholder="Add Name">
 				</bootstrap-tagsinput>
 				<info-popover mode="right-absolute">
 					Grafana Proxy deletes forwarded cookies by default. Specify cookies by name that should be forwarded to the data source.

+ 1 - 1
public/app/features/explore/LogMessageAnsi.tsx

@@ -1,5 +1,5 @@
 import React, { PureComponent } from 'react';
-import ansicolor from 'ansicolor';
+import ansicolor from 'vendor/ansicolor/ansicolor';
 
 interface Style {
   [key: string]: string;

+ 3 - 3
public/app/plugins/datasource/cloudwatch/partials/config.html

@@ -8,7 +8,7 @@
 
   <div class="gf-form" ng-show='ctrl.current.jsonData.authType == "credentials"'>
     <label class="gf-form-label width-13">Credentials profile name</label>
-    <input type="text" class="gf-form-input max-width-18" ng-model='ctrl.current.database' placeholder="default"></input>
+    <input type="text" class="gf-form-input max-width-18 gf-form-input--has-help-icon" ng-model='ctrl.current.database' placeholder="default"></input>
     <info-popover mode="right-absolute">
       Credentials profile name, as specified in ~/.aws/credentials, leave blank for default
     </info-popover>
@@ -30,7 +30,7 @@
 
   <div class="gf-form" ng-show='ctrl.current.jsonData.authType == "arn"'>
     <label class="gf-form-label width-13">Assume Role ARN</label>
-    <input type="text" class="gf-form-input max-width-18" ng-model='ctrl.current.jsonData.assumeRoleArn' placeholder="arn:aws:iam:*"></input>
+    <input type="text" class="gf-form-input max-width-18 gf-form-input--has-help-icon" ng-model='ctrl.current.jsonData.assumeRoleArn' placeholder="arn:aws:iam:*"></input>
     <info-popover mode="right-absolute">
       ARN of Assume Role
     </info-popover>
@@ -47,7 +47,7 @@
   </div>
   <div class="gf-form">
     <label class="gf-form-label width-13">Custom Metrics</label>
-    <input type="text" class="gf-form-input max-width-18" ng-model='ctrl.current.jsonData.customMetricsNamespaces' placeholder="Namespace1,Namespace2"></input>
+    <input type="text" class="gf-form-input max-width-18 gf-form-input--has-help-icon" ng-model='ctrl.current.jsonData.customMetricsNamespaces' placeholder="Namespace1,Namespace2"></input>
     <info-popover mode="right-absolute">
       Namespaces of Custom Metrics
     </info-popover>

+ 1 - 1
public/app/plugins/datasource/elasticsearch/partials/config.html

@@ -36,7 +36,7 @@
 	<div class="gf-form-inline">
 		<div class="gf-form">
 			<span class="gf-form-label width-9">Min time interval</span>
-			<input type="text" class="gf-form-input width-6" ng-model="ctrl.current.jsonData.timeInterval" spellcheck='false' placeholder="10s"></input>
+			<input type="text" class="gf-form-input width-6 gf-form-input--has-help-icon" ng-model="ctrl.current.jsonData.timeInterval" spellcheck='false' placeholder="10s"></input>
 			<info-popover mode="right-absolute">
 				A lower limit for the auto group by time interval. Recommended to be set to write frequency,
 				for example <code>1m</code> if your data is written every minute.

+ 10 - 10
public/app/plugins/datasource/grafana-azure-monitor-datasource/partials/config.html

@@ -16,7 +16,7 @@
   <div class="gf-form-inline">
     <div class="gf-form">
       <span class="gf-form-label width-9">Subscription Id</span>
-      <input class="gf-form-input width-30" type="text" ng-model="ctrl.current.jsonData.subscriptionId" placeholder="XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX"></input>
+      <input class="gf-form-input width-30 gf-form-input--has-help-icon" type="text" ng-model="ctrl.current.jsonData.subscriptionId" placeholder="XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX"></input>
       <info-popover mode="right-absolute">
         <p>In the Azure Portal, navigate to Subscriptions -> Choose subscription -> Overview -> Subscription ID.</p>
         <a target="_blank" href="https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-group-create-service-principal-portal">**Click
@@ -27,7 +27,7 @@
   <div class="gf-form-inline">
     <div class="gf-form">
       <span class="gf-form-label width-9">Tenant Id</span>
-      <input class="gf-form-input width-30" type="text" ng-model="ctrl.current.jsonData.tenantId" placeholder="XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX"></input>
+      <input class="gf-form-input width-30 gf-form-input--has-help-icon" type="text" ng-model="ctrl.current.jsonData.tenantId" placeholder="XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX"></input>
       <info-popover mode="right-absolute">
         <p>In the Azure Portal, navigate to Azure Active Directory -> Properties -> Directory ID.</p>
         <a target="_blank" href="https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-group-create-service-principal-portal">**Click
@@ -38,7 +38,7 @@
   <div class="gf-form-inline">
     <div class="gf-form">
       <span class="gf-form-label width-9">Client Id</span>
-      <input class="gf-form-input width-30" type="text" ng-model="ctrl.current.jsonData.clientId" placeholder="XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX"></input>
+      <input class="gf-form-input width-30 gf-form-input--has-help-icon" type="text" ng-model="ctrl.current.jsonData.clientId" placeholder="XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX"></input>
       <info-popover mode="right-absolute">
         <p>In the Azure Portal, navigate to Azure Active Directory -> App Registrations -> Choose your app ->
           Application ID.</p>
@@ -50,7 +50,7 @@
   <div class="gf-form-inline" ng-if="!ctrl.current.secureJsonFields.clientSecret">
     <div class="gf-form">
       <span class="gf-form-label width-9">Client Secret</span>
-      <input class="gf-form-input width-30" type="text" ng-model="ctrl.current.secureJsonData.clientSecret" placeholder=""></input>
+      <input class="gf-form-input width-30 gf-form-input--has-help-icon" type="text" ng-model="ctrl.current.secureJsonData.clientSecret" placeholder=""></input>
       <info-popover mode="right-absolute">
         <p>To create a new key, log in to Azure Portal, navigate to Azure Active Directory -> App Registrations ->
           Choose your
@@ -83,7 +83,7 @@
   <div class="gf-form-inline">
     <div class="gf-form">
       <span class="gf-form-label width-9">Subscription Id</span>
-      <input class="gf-form-input width-30" type="text" ng-model="ctrl.current.jsonData.logAnalyticsSubscriptionId"
+      <input class="gf-form-input width-30 gf-form-input--has-help-icon" type="text" ng-model="ctrl.current.jsonData.logAnalyticsSubscriptionId"
         placeholder="XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX" />
       <info-popover mode="right-absolute">
         <p>In the Azure Portal, navigate to Subscriptions -> Choose subscription -> Overview -> Subscription ID.</p>
@@ -95,7 +95,7 @@
   <div class="gf-form-inline">
     <div class="gf-form">
       <span class="gf-form-label width-9">Tenant Id</span>
-      <input class="gf-form-input width-30" type="text" ng-model="ctrl.current.jsonData.logAnalyticsTenantId"
+      <input class="gf-form-input width-30 gf-form-input--has-help-icon" type="text" ng-model="ctrl.current.jsonData.logAnalyticsTenantId"
         placeholder="XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX" />
       <info-popover mode="right-absolute">
         <p>In the Azure Portal, navigate to Azure Active Directory -> Properties -> Directory ID.</p>
@@ -107,7 +107,7 @@
   <div class="gf-form-inline">
     <div class="gf-form">
       <span class="gf-form-label width-9">Client Id</span>
-      <input class="gf-form-input width-30" type="text" ng-model="ctrl.current.jsonData.logAnalyticsClientId"
+      <input class="gf-form-input width-30 gf-form-input--has-help-icon" type="text" ng-model="ctrl.current.jsonData.logAnalyticsClientId"
         placeholder="XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX"></input>
       <info-popover mode="right-absolute">
         <p>In the Azure Portal, navigate to Azure Active Directory -> App Registrations -> Choose your app ->
@@ -121,7 +121,7 @@
   <div class="gf-form-inline" ng-if="!ctrl.current.secureJsonFields.logAnalyticsClientSecret">
     <div class="gf-form">
       <span class="gf-form-label width-9">Client Secret</span>
-      <input class="gf-form-input width-30" type="text" ng-model="ctrl.current.secureJsonData.logAnalyticsClientSecret"
+      <input class="gf-form-input width-30 gf-form-input--has-help-icon" type="text" ng-model="ctrl.current.secureJsonData.logAnalyticsClientSecret"
         placeholder="" />
       <info-popover mode="right-absolute">
         <p>To create a new key, log in to Azure Portal, navigate to Azure Active Directory -> App Registrations ->
@@ -168,7 +168,7 @@
   <div class="gf-form-inline" ng-if="!ctrl.current.secureJsonFields.appInsightsApiKey">
     <div class="gf-form">
       <span class="gf-form-label width-9">API Key</span>
-      <input class="gf-form-input width-30" type="text" ng-model="ctrl.current.secureJsonData.appInsightsApiKey"
+      <input class="gf-form-input width-30 gf-form-input--has-help-icon" type="text" ng-model="ctrl.current.secureJsonData.appInsightsApiKey"
         placeholder="" />
       <info-popover mode="right-absolute">
         <p>Section 2 of the Quickstart guide shows where to find/create the API Key:</p>
@@ -185,7 +185,7 @@
   <div class="gf-form-inline">
     <div class="gf-form">
       <span class="gf-form-label width-9">Application Id</span>
-      <input class="gf-form-input width-30" type="text" ng-model="ctrl.current.jsonData.appInsightsAppId" placeholder="xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"></input>
+      <input class="gf-form-input width-30 gf-form-input--has-help-icon" type="text" ng-model="ctrl.current.jsonData.appInsightsAppId" placeholder="xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"></input>
       <info-popover mode="right-absolute">
         <p>Section 2 of the Quickstart guide shows where to find the Application ID:</p>
         <a target="_blank" href="https://dev.applicationinsights.io/quickstart/">**Click here to open the Application

+ 1 - 1
public/app/plugins/datasource/influxdb/partials/config.html

@@ -41,7 +41,7 @@
 	<div class="gf-form-inline">
 		<div class="gf-form">
 			<span class="gf-form-label">Min time interval</span>
-			<input type="text" class="gf-form-input width-6" ng-model="ctrl.current.jsonData.timeInterval" spellcheck='false' placeholder="10s"></input>
+			<input type="text" class="gf-form-input width-6 gf-form-input--has-help-icon" ng-model="ctrl.current.jsonData.timeInterval" spellcheck='false' placeholder="10s"></input>
 			<info-popover mode="right-absolute">
 				A lower limit for the auto group by time interval. Recommended to be set to write frequency,
 				for example <code>1m</code> if your data is written every minute.

+ 1 - 1
public/app/plugins/datasource/loki/partials/config.html

@@ -5,7 +5,7 @@
   <div class="gf-form-inline">
     <div class="gf-form">
       <span class="gf-form-label width-8">Maximum lines</span>
-      <input type="text" class="gf-form-input width-8" ng-model="ctrl.current.jsonData.maxLines" spellcheck='false' placeholder="1000"></input>
+      <input type="text" class="gf-form-input width-8 gf-form-input--has-help-icon" ng-model="ctrl.current.jsonData.maxLines" spellcheck='false' placeholder="1000"></input>
       <info-popover mode="right-absolute">
         Loki queries must contain a limit of the maximum number of lines returned (default: 1000).
         Increase this limit to have a bigger result set for ad-hoc analysis.

+ 1 - 1
public/app/plugins/datasource/loki/result_transformer.ts

@@ -1,4 +1,4 @@
-import ansicolor from 'ansicolor';
+import ansicolor from 'vendor/ansicolor/ansicolor';
 import _ from 'lodash';
 import moment from 'moment';
 

+ 4 - 4
public/app/plugins/datasource/mssql/partials/config.html

@@ -50,7 +50,7 @@
 <div class="gf-form-group">
 	<div class="gf-form max-width-15">
 		<span class="gf-form-label width-7">Max open</span>
-		<input type="number" min="0" class="gf-form-input" ng-model="ctrl.current.jsonData.maxOpenConns" placeholder="unlimited"></input>
+		<input type="number" min="0" class="gf-form-input gf-form-input--has-help-icon" ng-model="ctrl.current.jsonData.maxOpenConns" placeholder="unlimited"></input>
 		<info-popover mode="right-absolute">
 			The maximum number of open connections to the database. If <i>Max idle connections</i> is greater than 0 and the
 			<i>Max open connections</i> is less than <i>Max idle connections</i>, then <i>Max idle connections</i> will be
@@ -60,7 +60,7 @@
 	</div>
 	<div class="gf-form max-width-15">
 		<span class="gf-form-label width-7">Max idle</span>
-		<input type="number" min="0" class="gf-form-input" ng-model="ctrl.current.jsonData.maxIdleConns" placeholder="2"></input>
+		<input type="number" min="0" class="gf-form-input gf-form-input--has-help-icon" ng-model="ctrl.current.jsonData.maxIdleConns" placeholder="2"></input>
 		<info-popover mode="right-absolute">
 			The maximum number of connections in the idle connection pool. If <i>Max open connections</i> is greater than 0 but
 			less than the <i>Max idle connections</i>, then the <i>Max idle connections</i> will be reduced to match the
@@ -69,7 +69,7 @@
 	</div>
 	<div class="gf-form max-width-15">
 		<span class="gf-form-label width-7">Max lifetime</span>
-		<input type="number" min="0" class="gf-form-input" ng-model="ctrl.current.jsonData.connMaxLifetime" placeholder="14400"></input>
+		<input type="number" min="0" class="gf-form-input gf-form-input--has-help-icon" ng-model="ctrl.current.jsonData.connMaxLifetime" placeholder="14400"></input>
 		<info-popover mode="right-absolute">
 			The maximum amount of time in seconds a connection may be reused. If set to 0, connections are reused forever.
 		</info-popover>
@@ -82,7 +82,7 @@
 	<div class="gf-form-inline">
 		<div class="gf-form">
 			<span class="gf-form-label width-9">Min time interval</span>
-			<input type="text" class="gf-form-input width-6" ng-model="ctrl.current.jsonData.timeInterval" spellcheck='false' placeholder="1m"></input>
+			<input type="text" class="gf-form-input width-6 gf-form-input--has-help-icon" ng-model="ctrl.current.jsonData.timeInterval" spellcheck='false' placeholder="1m"></input>
 			<info-popover mode="right-absolute">
 				A lower limit for the auto group by time interval. Recommended to be set to write frequency,
 				for example <code>1m</code> if your data is written every minute.

+ 4 - 4
public/app/plugins/datasource/mysql/partials/config.html

@@ -44,7 +44,7 @@
 <div class="gf-form-group">
 	<div class="gf-form max-width-15">
 		<span class="gf-form-label width-7">Max open</span>
-		<input type="number" min="0" class="gf-form-input" ng-model="ctrl.current.jsonData.maxOpenConns" placeholder="unlimited"></input>
+		<input type="number" min="0" class="gf-form-input gf-form-input--has-help-icon" ng-model="ctrl.current.jsonData.maxOpenConns" placeholder="unlimited"></input>
 		<info-popover mode="right-absolute">
 			The maximum number of open connections to the database. If <i>Max idle connections</i> is greater than 0 and the
 			<i>Max open connections</i> is less than <i>Max idle connections</i>, then <i>Max idle connections</i> will be
@@ -54,7 +54,7 @@
 	</div>
 	<div class="gf-form max-width-15">
 		<span class="gf-form-label width-7">Max idle</span>
-		<input type="number" min="0" class="gf-form-input" ng-model="ctrl.current.jsonData.maxIdleConns" placeholder="2"></input>
+		<input type="number" min="0" class="gf-form-input gf-form-input--has-help-icon" ng-model="ctrl.current.jsonData.maxIdleConns" placeholder="2"></input>
 		<info-popover mode="right-absolute">
 			The maximum number of connections in the idle connection pool. If <i>Max open connections</i> is greater than 0 but
 			less than the <i>Max idle connections</i>, then the <i>Max idle connections</i> will be reduced to match the
@@ -63,7 +63,7 @@
 	</div>
 	<div class="gf-form max-width-15">
 		<span class="gf-form-label width-7">Max lifetime</span>
-		<input type="number" min="0" class="gf-form-input" ng-model="ctrl.current.jsonData.connMaxLifetime" placeholder="14400"></input>
+		<input type="number" min="0" class="gf-form-input gf-form-input--has-help-icon" ng-model="ctrl.current.jsonData.connMaxLifetime" placeholder="14400"></input>
 		<info-popover mode="right-absolute">
 			The maximum amount of time in seconds a connection may be reused. If set to 0, connections are reused forever.<br/><br/>
 			This should always be lower than configured <a href="https://dev.mysql.com/doc/refman/8.0/en/server-system-variables.html#sysvar_wait_timeout" target="_blank">wait_timeout</a> in MySQL.
@@ -77,7 +77,7 @@
 	<div class="gf-form-inline">
 		<div class="gf-form">
 			<span class="gf-form-label width-9">Min time interval</span>
-			<input type="text" class="gf-form-input width-6" ng-model="ctrl.current.jsonData.timeInterval" spellcheck='false' placeholder="1m"></input>
+			<input type="text" class="gf-form-input width-6 gf-form-input--has-help-icon" ng-model="ctrl.current.jsonData.timeInterval" spellcheck='false' placeholder="1m"></input>
 			<info-popover mode="right-absolute">
 				A lower limit for the auto group by time interval. Recommended to be set to write frequency,
 				for example <code>1m</code> if your data is written every minute.

+ 2 - 2
public/app/plugins/datasource/opentsdb/partials/query.editor.html

@@ -49,7 +49,7 @@
 	<div class="gf-form-inline">
 		<div class="gf-form max-width-25">
 			<label class="gf-form-label query-keyword width-8">Down sample</label>
-			<input type="text" class="gf-form-input"
+			<input type="text" class="gf-form-input gf-form-input--has-help-icon"
 						 ng-model="ctrl.target.downsampleInterval"
 						 ng-model-onblur
 			       ng-change="ctrl.targetBlur()"
@@ -139,7 +139,7 @@
 			</div>
 
       <gf-form-switch class="gf-form"
-                    label="Group by" 
+                    label="Group by"
                     label-class="query-keyword"
                     checked="ctrl.target.currentFilterGroupBy"
                     on-change="ctrl.targetBlur()">

+ 4 - 4
public/app/plugins/datasource/postgres/partials/config.html

@@ -43,7 +43,7 @@
 <div class="gf-form-group">
 	<div class="gf-form max-width-15">
 		<span class="gf-form-label width-7">Max open</span>
-		<input type="number" min="0" class="gf-form-input" ng-model="ctrl.current.jsonData.maxOpenConns" placeholder="unlimited"></input>
+		<input type="number" min="0" class="gf-form-input gf-form-input--has-help-icon" ng-model="ctrl.current.jsonData.maxOpenConns" placeholder="unlimited"></input>
 		<info-popover mode="right-absolute">
 			The maximum number of open connections to the database. If <i>Max idle connections</i> is greater than 0 and the
 			<i>Max open connections</i> is less than <i>Max idle connections</i>, then <i>Max idle connections</i> will be
@@ -53,7 +53,7 @@
 	</div>
 	<div class="gf-form max-width-15">
 		<span class="gf-form-label width-7">Max idle</span>
-		<input type="number" min="0" class="gf-form-input" ng-model="ctrl.current.jsonData.maxIdleConns" placeholder="2"></input>
+		<input type="number" min="0" class="gf-form-input gf-form-input--has-help-icon" ng-model="ctrl.current.jsonData.maxIdleConns" placeholder="2"></input>
 		<info-popover mode="right-absolute">
 			The maximum number of connections in the idle connection pool. If <i>Max open connections</i> is greater than 0 but
 			less than the <i>Max idle connections</i>, then the <i>Max idle connections</i> will be reduced to match the
@@ -62,7 +62,7 @@
 	</div>
 	<div class="gf-form max-width-15">
 		<span class="gf-form-label width-7">Max lifetime</span>
-		<input type="number" min="0" class="gf-form-input" ng-model="ctrl.current.jsonData.connMaxLifetime" placeholder="14400"></input>
+		<input type="number" min="0" class="gf-form-input gf-form-input--has-help-icon" ng-model="ctrl.current.jsonData.connMaxLifetime" placeholder="14400"></input>
 		<info-popover mode="right-absolute">
 			The maximum amount of time in seconds a connection may be reused. If set to 0, connections are reused forever.
 		</info-popover>
@@ -95,7 +95,7 @@
 	<div class="gf-form-inline">
 		<div class="gf-form">
 			<span class="gf-form-label width-9">Min time interval</span>
-			<input type="text" class="gf-form-input width-6" ng-model="ctrl.current.jsonData.timeInterval" spellcheck='false' placeholder="1m"></input>
+			<input type="text" class="gf-form-input width-6 gf-form-input--has-help-icon" ng-model="ctrl.current.jsonData.timeInterval" spellcheck='false' placeholder="1m"></input>
 			<info-popover mode="right-absolute">
 				A lower limit for the auto group by time interval. Recommended to be set to write frequency,
 				for example <code>1m</code> if your data is written every minute.

+ 2 - 2
public/app/plugins/datasource/prometheus/partials/config.html

@@ -5,7 +5,7 @@
   <div class="gf-form-inline">
     <div class="gf-form">
       <span class="gf-form-label width-8">Scrape interval</span>
-      <input type="text" class="gf-form-input width-8" ng-model="ctrl.current.jsonData.timeInterval" spellcheck='false' placeholder="15s"></input>
+      <input type="text" class="gf-form-input width-8 gf-form-input--has-help-icon" ng-model="ctrl.current.jsonData.timeInterval" spellcheck='false' placeholder="15s"></input>
       <info-popover mode="right-absolute">
         Set this to your global scrape interval defined in your Prometheus config file. This will be used as a lower limit for the
         Prometheus step query parameter.
@@ -16,7 +16,7 @@
   <div class="gf-form-inline">
     <div class="gf-form">
       <span class="gf-form-label width-8">Query timeout</span>
-      <input type="text" class="gf-form-input width-8" ng-model="ctrl.current.jsonData.queryTimeout" spellcheck='false' placeholder="60s"></input>
+      <input type="text" class="gf-form-input width-8 gf-form-input--has-help-icon" ng-model="ctrl.current.jsonData.queryTimeout" spellcheck='false' placeholder="60s"></input>
       <info-popover mode="right-absolute">
         Set the Prometheus query timeout.
       </info-popover>

+ 2 - 2
public/app/plugins/datasource/prometheus/partials/query.editor.html

@@ -10,7 +10,7 @@
   <div class="gf-form-inline">
     <div class="gf-form">
       <label class="gf-form-label">Legend</label>
-      <input type="text" class="gf-form-input" ng-model="ctrl.target.legendFormat" spellcheck='false' placeholder="legend format"
+      <input type="text" class="gf-form-input gf-form-input--has-help-icon" ng-model="ctrl.target.legendFormat" spellcheck='false' placeholder="legend format"
         data-min-length=0 data-items=1000 ng-model-onblur ng-change="ctrl.refreshMetricData()">
       </input>
       <info-popover mode="right-absolute">
@@ -21,7 +21,7 @@
 
     <div class="gf-form">
       <label class="gf-form-label width-6">Min step</label>
-      <input type="text" class="gf-form-input width-8" ng-model="ctrl.target.interval" data-placement="right" spellcheck='false'
+      <input type="text" class="gf-form-input width-8 gf-form-input--has-help-icon" ng-model="ctrl.target.interval" data-placement="right" spellcheck='false'
         placeholder="{{ctrl.panelCtrl.interval}}" data-min-length=0 data-items=100 ng-model-onblur ng-change="ctrl.refreshMetricData()"
       />
       <info-popover mode="right-absolute">

+ 2 - 0
public/app/routes/routes.ts

@@ -81,6 +81,7 @@ export function setupAngularRoutes($routeProvider, $locationProvider) {
       template: '<react-container />',
       pageClass: 'dashboard-solo',
       routeInfo: DashboardRouteInfo.Normal,
+      reloadOnSearch: false,
       resolve: {
         component: () => SoloPanelPage,
       },
@@ -89,6 +90,7 @@ export function setupAngularRoutes($routeProvider, $locationProvider) {
       template: '<react-container />',
       pageClass: 'dashboard-solo',
       routeInfo: DashboardRouteInfo.Normal,
+      reloadOnSearch: false,
       resolve: {
         component: () => SoloPanelPage,
       },

+ 4 - 0
public/sass/components/_gf-form.scss

@@ -250,6 +250,10 @@ $input-border: 1px solid $input-border-color;
   &--plaintext {
     white-space: unset;
   }
+
+  &--has-help-icon {
+    padding-right: $input-padding-x * 3;
+  }
 }
 
 .gf-form-hint {

+ 4 - 0
public/sass/components/_tagsinput.scss

@@ -15,6 +15,10 @@
     height: 100%;
     width: 5rem;
     box-sizing: border-box;
+
+    &.gf-form-input--has-help-icon {
+      padding-right: $input-padding-x * 3;
+    }
   }
 
   .tag {

+ 471 - 0
public/vendor/ansicolor/ansicolor.ts

@@ -0,0 +1,471 @@
+// Vendored and converted to TS, source: https://github.com/xpl/ansicolor/blob/b82360563ed29de444dc7618b9236191e0a77096/ansicolor.js
+// License: Unlicense, author: https://github.com/xpl
+
+const O = Object;
+
+/*  See https://misc.flogisoft.com/bash/tip_colors_and_formatting
+    ------------------------------------------------------------------------ */
+
+const colorCodes = ['black', 'red', 'green', 'yellow', 'blue', 'magenta', 'cyan', 'lightGray', '', 'default'],
+  colorCodesLight = [
+    'darkGray',
+    'lightRed',
+    'lightGreen',
+    'lightYellow',
+    'lightBlue',
+    'lightMagenta',
+    'lightCyan',
+    'white',
+    '',
+  ],
+  styleCodes = ['', 'bright', 'dim', 'italic', 'underline', '', '', 'inverse'],
+  asBright = {
+    red: 'lightRed',
+    green: 'lightGreen',
+    yellow: 'lightYellow',
+    blue: 'lightBlue',
+    magenta: 'lightMagenta',
+    cyan: 'lightCyan',
+    black: 'darkGray',
+    lightGray: 'white',
+  },
+  types = {
+    0: 'style',
+    2: 'unstyle',
+    3: 'color',
+    9: 'colorLight',
+    4: 'bgColor',
+    10: 'bgColorLight',
+  },
+  subtypes = {
+    color: colorCodes,
+    colorLight: colorCodesLight,
+    bgColor: colorCodes,
+    bgColorLight: colorCodesLight,
+    style: styleCodes,
+    unstyle: styleCodes,
+  };
+
+/*  ------------------------------------------------------------------------ */
+
+const clean = obj => {
+  for (const k in obj) {
+    if (!obj[k]) {
+      delete obj[k];
+    }
+  }
+  return O.keys(obj).length === 0 ? undefined : obj;
+};
+
+/*  ------------------------------------------------------------------------ */
+
+class Color {
+  background: string;
+  name: string;
+  brightness: number;
+
+  constructor(background?, name?, brightness?) {
+    this.background = background;
+    this.name = name;
+    this.brightness = brightness;
+  }
+
+  get inverse() {
+    return new Color(!this.background, this.name || (this.background ? 'black' : 'white'), this.brightness);
+  }
+
+  get clean() {
+    return clean({
+      name: this.name === 'default' ? '' : this.name,
+      bright: this.brightness === Code.bright,
+      dim: this.brightness === Code.dim,
+    });
+  }
+
+  defaultBrightness(value) {
+    return new Color(this.background, this.name, this.brightness || value);
+  }
+
+  css(inverted) {
+    const color = inverted ? this.inverse : this;
+
+    const rgbName = (color.brightness === Code.bright && asBright[color.name]) || color.name;
+
+    const prop = color.background ? 'background:' : 'color:',
+      rgb = Colors.rgb[rgbName],
+      alpha = this.brightness === Code.dim ? 0.5 : 1;
+
+    return rgb
+      ? prop + 'rgba(' + [...rgb, alpha].join(',') + ');'
+      : !color.background && alpha < 1 ? 'color:rgba(0,0,0,0.5);' : ''; // Chrome does not support 'opacity' property...
+  }
+}
+
+/*  ------------------------------------------------------------------------ */
+
+class Code {
+  static reset = 0;
+  static bright = 1;
+  static dim = 2;
+  static inverse = 7;
+  static noBrightness = 22;
+  static noItalic = 23;
+  static noUnderline = 24;
+  static noInverse = 27;
+  static noColor = 39;
+  static noBgColor = 49;
+
+  value: number;
+
+  constructor(n?) {
+    if (n !== undefined) {
+      this.value = Number(n);
+    }
+  }
+
+  get type() {
+    return types[Math.floor(this.value / 10)];
+  }
+
+  get subtype() {
+    return subtypes[this.type][this.value % 10];
+  }
+
+  get str() {
+    return this.value ? '\u001b[' + this.value + 'm' : '';
+  }
+
+  static str(x) {
+    return new Code(x).str;
+  }
+
+  get isBrightness() {
+    return this.value === Code.noBrightness || this.value === Code.bright || this.value === Code.dim;
+  }
+}
+
+/*  ------------------------------------------------------------------------ */
+
+const replaceAll = (str, a, b) => str.split(a).join(b);
+
+/*  ANSI brightness codes do not overlap, e.g. "{bright}{dim}foo" will be rendered bright (not dim).
+    So we fix it by adding brightness canceling before each brightness code, so the former example gets
+    converted to "{noBrightness}{bright}{noBrightness}{dim}foo" – this way it gets rendered as expected.
+ */
+
+const denormalizeBrightness = s => s.replace(/(\u001b\[(1|2)m)/g, '\u001b[22m$1');
+const normalizeBrightness = s => s.replace(/\u001b\[22m(\u001b\[(1|2)m)/g, '$1');
+
+const wrap = (x, openCode, closeCode) => {
+  const open = Code.str(openCode),
+    close = Code.str(closeCode);
+
+  return String(x)
+    .split('\n')
+    .map(line => denormalizeBrightness(open + replaceAll(normalizeBrightness(line), close, open) + close))
+    .join('\n');
+};
+
+/*  ------------------------------------------------------------------------ */
+
+const camel = (a, b) => a + b.charAt(0).toUpperCase() + b.slice(1);
+
+const stringWrappingMethods = (() =>
+  [
+    ...colorCodes.map(
+      (k, i) =>
+        !k
+          ? []
+          : [
+              // color methods
+
+              [k, 30 + i, Code.noColor],
+              [camel('bg', k), 40 + i, Code.noBgColor],
+            ]
+    ),
+
+    ...colorCodesLight.map(
+      (k, i) =>
+        !k
+          ? []
+          : [
+              // light color methods
+
+              [k, 90 + i, Code.noColor],
+              [camel('bg', k), 100 + i, Code.noBgColor],
+            ]
+    ),
+
+    /* THIS ONE IS FOR BACKWARDS COMPATIBILITY WITH PREVIOUS VERSIONS (had 'bright' instead of 'light' for backgrounds)
+         */
+    ...['', 'BrightRed', 'BrightGreen', 'BrightYellow', 'BrightBlue', 'BrightMagenta', 'BrightCyan'].map(
+      (k, i) => (!k ? [] : [['bg' + k, 100 + i, Code.noBgColor]])
+    ),
+
+    ...styleCodes.map(
+      (k, i) =>
+        !k
+          ? []
+          : [
+              // style methods
+
+              [k, i, k === 'bright' || k === 'dim' ? Code.noBrightness : 20 + i],
+            ]
+    ),
+  ].reduce((a, b) => a.concat(b)))();
+
+/*  ------------------------------------------------------------------------ */
+
+const assignStringWrappingAPI = (target, wrapBefore = target) =>
+  stringWrappingMethods.reduce(
+    (memo, [k, open, close]) =>
+      O.defineProperty(memo, k, {
+        get: () => assignStringWrappingAPI(str => wrapBefore(wrap(str, open, close))),
+      }),
+
+    target
+  );
+
+/*  ------------------------------------------------------------------------ */
+
+const TEXT = 0,
+  BRACKET = 1,
+  CODE = 2;
+
+function rawParse(s) {
+  let state = TEXT,
+    buffer = '',
+    text = '',
+    code = '',
+    codes = [];
+  const spans = [];
+
+  for (let i = 0, n = s.length; i < n; i++) {
+    const c = s[i];
+
+    buffer += c;
+
+    switch (state) {
+      case TEXT: {
+        if (c === '\u001b') {
+          state = BRACKET;
+          buffer = c;
+        } else {
+          text += c;
+        }
+        break;
+      }
+      case BRACKET:
+        if (c === '[') {
+          state = CODE;
+          code = '';
+          codes = [];
+        } else {
+          state = TEXT;
+          text += buffer;
+        }
+        break;
+
+      case CODE:
+        if (c >= '0' && c <= '9') {
+          code += c;
+        } else if (c === ';') {
+          codes.push(new Code(code));
+          code = '';
+        } else if (c === 'm' && code.length) {
+          codes.push(new Code(code));
+          for (const code of codes) {
+            spans.push({ text, code });
+            text = '';
+          }
+          state = TEXT;
+        } else {
+          state = TEXT;
+          text += buffer;
+        }
+    }
+  }
+
+  if (state !== TEXT) {
+    text += buffer;
+  }
+
+  if (text) {
+    spans.push({ text, code: new Code() });
+  }
+
+  return spans;
+}
+
+/*  ------------------------------------------------------------------------ */
+
+/**
+ * Represents an ANSI-escaped string.
+ */
+export default class Colors {
+  spans: any[];
+  static names = stringWrappingMethods.map(([k]) => k);
+  static rgb = {
+    black: [0, 0, 0],
+    darkGray: [100, 100, 100],
+    lightGray: [200, 200, 200],
+    white: [255, 255, 255],
+
+    red: [204, 0, 0],
+    lightRed: [255, 51, 0],
+
+    green: [0, 204, 0],
+    lightGreen: [51, 204, 51],
+
+    yellow: [204, 102, 0],
+    lightYellow: [255, 153, 51],
+
+    blue: [0, 0, 255],
+    lightBlue: [26, 140, 255],
+
+    magenta: [204, 0, 204],
+    lightMagenta: [255, 0, 255],
+
+    cyan: [0, 153, 255],
+    lightCyan: [0, 204, 255],
+  };
+
+  /**
+   * @param {string} s a string containing ANSI escape codes.
+   */
+  constructor(s?) {
+    this.spans = s ? rawParse(s) : [];
+  }
+
+  get str() {
+    return this.spans.reduce((str, p) => str + p.text + p.code.str, '');
+  }
+
+  get parsed() {
+    let color, bgColor, brightness, styles;
+
+    function reset() {
+      (color = new Color()),
+        (bgColor = new Color(true /* background */)),
+        (brightness = undefined),
+        (styles = new Set());
+    }
+
+    reset();
+
+    return O.assign(new Colors(), {
+      spans: this.spans
+        .map(span => {
+          const c = span.code;
+
+          const inverted = styles.has('inverse'),
+            underline = styles.has('underline') ? 'text-decoration: underline;' : '',
+            italic = styles.has('italic') ? 'font-style: italic;' : '',
+            bold = brightness === Code.bright ? 'font-weight: bold;' : '';
+
+          const foreColor = color.defaultBrightness(brightness);
+
+          const styledSpan = O.assign(
+            { css: bold + italic + underline + foreColor.css(inverted) + bgColor.css(inverted) },
+            clean({ bold: !!bold, color: foreColor.clean, bgColor: bgColor.clean }),
+            span
+          );
+
+          for (const k of styles) {
+            styledSpan[k] = true;
+          }
+
+          if (c.isBrightness) {
+            brightness = c.value;
+          } else if (span.code.value !== undefined) {
+            if (span.code.value === Code.reset) {
+              reset();
+            } else {
+              switch (span.code.type) {
+                case 'color':
+                case 'colorLight':
+                  color = new Color(false, c.subtype);
+                  break;
+
+                case 'bgColor':
+                case 'bgColorLight':
+                  bgColor = new Color(true, c.subtype);
+                  break;
+
+                case 'style':
+                  styles.add(c.subtype);
+                  break;
+                case 'unstyle':
+                  styles.delete(c.subtype);
+                  break;
+              }
+            }
+          }
+
+          return styledSpan;
+        })
+        .filter(s => s.text.length > 0),
+    });
+  }
+
+  /*  Outputs with Chrome DevTools-compatible format     */
+
+  get asChromeConsoleLogArguments() {
+    const spans = this.parsed.spans;
+
+    return [spans.map(s => '%c' + s.text).join(''), ...spans.map(s => s.css)];
+  }
+
+  get browserConsoleArguments() /* LEGACY, DEPRECATED */ {
+    return this.asChromeConsoleLogArguments;
+  }
+
+  /**
+   * @desc installs String prototype extensions
+   * @example
+   * require ('ansicolor').nice
+   * console.log ('foo'.bright.red)
+   */
+  static get nice() {
+    Colors.names.forEach(k => {
+      if (!(k in String.prototype)) {
+        O.defineProperty(String.prototype, k, {
+          get: function() {
+            return Colors[k](this);
+          },
+        });
+      }
+    });
+
+    return Colors;
+  }
+
+  /**
+   * @desc parses a string containing ANSI escape codes
+   * @return {Colors} parsed representation.
+   */
+  static parse(s) {
+    return new Colors(s).parsed;
+  }
+
+  /**
+   * @desc strips ANSI codes from a string
+   * @param {string} s a string containing ANSI escape codes.
+   * @return {string} clean string.
+   */
+  static strip(s) {
+    return s.replace(/[\u001b\u009b][[()#;?]*(?:[0-9]{1,4}(?:;[0-9]{0,4})*)?[0-9A-PRZcf-nqry=><]/g, ''); // hope V8 caches the regexp
+  }
+
+  /**
+   * @example
+   * const spans = [...ansi.parse ('\u001b[7m\u001b[7mfoo\u001b[7mbar\u001b[27m')]
+   */
+  [Symbol.iterator]() {
+    return this.spans[Symbol.iterator]();
+  }
+}
+
+/*  ------------------------------------------------------------------------ */
+
+assignStringWrappingAPI(Colors, str => str);

+ 66 - 26
scripts/cli/index.ts

@@ -1,33 +1,73 @@
 import program from 'commander';
-import chalk from 'chalk';
 import { execTask } from './utils/execTask';
+import chalk from 'chalk';
+import { startTask } from './tasks/core.start';
+import { buildTask } from './tasks/grafanaui.build';
+import { releaseTask } from './tasks/grafanaui.release';
+import { changelogTask } from './tasks/changelog';
+import { cherryPickTask } from './tasks/cherrypick';
+
+program.option('-d, --depreciate <scripts>', 'Inform about npm script deprecation', v => v.split(','));
 
-export type Task<T> = (options: T) => Promise<void>;
+program
+  .command('core:start')
+  .option('-h, --hot', 'Run front-end with HRM enabled')
+  .option('-t, --watchTheme', 'Watch for theme changes and regenerate variables.scss files')
+  .description('Starts Grafana front-end in development mode with watch enabled')
+  .action(async cmd => {
+    await execTask(startTask)({
+      watchThemes: cmd.theme,
+      hot: cmd.hot,
+    });
+  });
 
-// TODO: Refactor to commander commands
-// This will enable us to have command scoped options and limit the ifs below
 program
-  .option('-h, --hot', 'Runs front-end with hot reload enabled')
-  .option('-t, --theme', 'Watches for theme changes and regenerates variables.scss files')
-  .option('-d, --depreciate <scripts>', 'Inform about npm script deprecation', v => v.split(','))
-  .option('-b, --build', 'Created @grafana/ui build')
-  .option('-r, --release', 'Releases @grafana/ui to npm')
-  .parse(process.argv);
-
-if (program.build) {
-  execTask('grafanaui.build');
-} else if (program.release) {
-  execTask('grafanaui.release');
-} else {
-  if (program.depreciate && program.depreciate.length === 2) {
-    console.log(
-      chalk.yellow.bold(
-        `[NPM script depreciation] ${program.depreciate[0]} is deprecated! Use ${program.depreciate[1]} instead!`
-      )
-    );
-  }
-  execTask('core.start', {
-    watchThemes: !!program.theme,
-    hot: !!program.hot,
+  .command('gui:build')
+  .description('Builds @grafana/ui package to packages/grafana-ui/dist')
+  .action(async cmd => {
+    await execTask(buildTask)();
   });
+
+program
+  .command('gui:release')
+  .description('Prepares @grafana/ui release (and publishes to npm on demand)')
+  .option('-p, --publish', 'Publish @grafana/ui to npm registry')
+  .option('-u, --usePackageJsonVersion', 'Use version specified in package.json')
+  .action(async cmd => {
+    await execTask(releaseTask)({
+      publishToNpm: !!cmd.publish,
+      usePackageJsonVersion: !!cmd.usePackageJsonVersion,
+    });
+  });
+
+program
+  .command('changelog')
+  .option('-m, --milestone <milestone>', 'Specify milestone')
+  .description('Builds changelog markdown')
+  .action(async cmd => {
+    if (!cmd.milestone) {
+      console.log('Please specify milestone, example: --m 6.0.1');
+      return;
+    }
+
+    await execTask(changelogTask)({
+      milestone: cmd.milestone,
+    });
+  });
+
+program
+  .command('cherrypick')
+  .description('Helps find commits to cherry pick')
+  .action(async cmd => {
+    await execTask(cherryPickTask)({});
+  });
+
+program.parse(process.argv);
+
+if (program.depreciate && program.depreciate.length === 2) {
+  console.log(
+    chalk.yellow.bold(
+      `[NPM script depreciation] ${program.depreciate[0]} is deprecated! Use ${program.depreciate[1]} instead!`
+    )
+  );
 }

+ 49 - 0
scripts/cli/tasks/changelog.ts

@@ -0,0 +1,49 @@
+import { Task, TaskRunner } from './task';
+import axios from 'axios';
+
+const githubGrafanaUrl = 'https://github.com/grafana/grafana';
+
+interface ChangelogOptions {
+  milestone: string;
+}
+
+const changelogTaskRunner: TaskRunner<ChangelogOptions> = async ({ milestone }) => {
+  let client = axios.create({
+    baseURL: 'https://api.github.com/repos/grafana/grafana',
+    timeout: 10000,
+  });
+
+  const res = await client.get('/issues', {
+    params: {
+      state: 'closed',
+      labels: 'add to changelog',
+    },
+  });
+
+  let markdown = '';
+
+  for (const item of res.data) {
+    if (!item.milestone) {
+      console.log('Item missing milestone', item.number);
+      continue;
+    }
+
+    // For some reason I could not get the github api to filter on milestone and label
+    // So doing this filter here
+    if (item.milestone.title !== milestone) {
+      continue;
+    }
+
+    markdown += '* ' + item.title + '.';
+    markdown += ` [#${item.number}](${githubGrafanaUrl}/pull/${item.number})`;
+    markdown += `, [@${item.user.login}](${item.user.html_url})`;
+
+    markdown += '\n';
+  }
+
+  console.log(markdown);
+};
+
+export const changelogTask = new Task<ChangelogOptions>();
+changelogTask.setName('Changelog generator task');
+changelogTask.setRunner(changelogTaskRunner);

+ 42 - 0
scripts/cli/tasks/cherrypick.ts

@@ -0,0 +1,42 @@
+import { Task, TaskRunner } from './task';
+import axios from 'axios';
+
+interface CherryPickOptions {}
+
+const cherryPickRunner: TaskRunner<CherryPickOptions> = async () => {
+  let client = axios.create({
+    baseURL: 'https://api.github.com/repos/grafana/grafana',
+    timeout: 10000,
+  });
+
+  const res = await client.get('/issues', {
+    params: {
+      state: 'closed',
+      labels: 'cherry-pick needed',
+    },
+  });
+
+  // sort by closed date
+  res.data.sort(function(a, b) {
+    return new Date(b.closed_at).getTime() - new Date(a.closed_at).getTime();
+  });
+
+  for (const item of res.data) {
+    if (!item.milestone) {
+      console.log(item.number + ' missing milestone!');
+      continue;
+    }
+
+    console.log(item.number + ' closed_at ' + item.closed_at + ' ' + item.html_url);
+    const issueDetails = await client.get(item.pull_request.url);
+    const commits = await client.get(issueDetails.data.commits_url);
+
+    for (const commit of commits.data) {
+      console.log(commit.commit.message + ' sha: ' + commit.sha);
+    }
+  }
+};
+
+export const cherryPickTask = new Task<CherryPickOptions>();
+cherryPickTask.setName('Cherry pick task');
+cherryPickTask.setRunner(cherryPickRunner);

+ 19 - 22
scripts/cli/tasks/core.start.ts

@@ -1,35 +1,30 @@
 import concurrently from 'concurrently';
-import { Task } from '..';
+import { Task, TaskRunner } from './task';
 
 interface StartTaskOptions {
   watchThemes: boolean;
   hot: boolean;
 }
 
-const startTask: Task<StartTaskOptions> = async ({ watchThemes, hot }) => {
-  const jobs = [];
-
-  if (watchThemes) {
-    jobs.push({
+const startTaskRunner: TaskRunner<StartTaskOptions> = async ({ watchThemes, hot }) => {
+  const jobs = [
+    watchThemes && {
       command: 'nodemon -e ts -w ./packages/grafana-ui/src/themes -x yarn run themes:generate',
       name: 'SASS variables generator',
-    });
-  }
-
-  if (!hot) {
-    jobs.push({
-      command: 'webpack --progress --colors --watch --mode development --config scripts/webpack/webpack.dev.js',
-      name: 'Webpack',
-    });
-  } else {
-    jobs.push({
-      command: 'webpack-dev-server --progress --colors --mode development --config scripts/webpack/webpack.hot.js',
-      name: 'Dev server',
-    });
-  }
+    },
+    hot
+      ? {
+          command: 'webpack-dev-server --progress --colors --mode development --config scripts/webpack/webpack.hot.js',
+          name: 'Dev server',
+        }
+      : {
+          command: 'webpack --progress --colors --watch --mode development --config scripts/webpack/webpack.dev.js',
+          name: 'Webpack',
+        },
+  ];
 
   try {
-    await concurrently(jobs, {
+    await concurrently(jobs.filter(job => !!job), {
       killOthers: ['failure', 'failure'],
     });
   } catch (e) {
@@ -38,4 +33,6 @@ const startTask: Task<StartTaskOptions> = async ({ watchThemes, hot }) => {
   }
 };
 
-export default startTask;
+export const startTask = new Task<StartTaskOptions>();
+startTask.setName('Core startTask');
+startTask.setRunner(startTaskRunner);

+ 32 - 59
scripts/cli/tasks/grafanaui.build.ts

@@ -1,95 +1,66 @@
 import execa from 'execa';
 import fs from 'fs';
-import { Task } from '..';
 import { changeCwdToGrafanaUi, restoreCwd } from '../utils/cwd';
 import chalk from 'chalk';
-import { startSpinner } from '../utils/startSpinner';
+import { useSpinner } from '../utils/useSpinner';
+import { Task, TaskRunner } from './task';
 
 let distDir, cwd;
 
-const clean = async () => {
-  const spinner = startSpinner('Cleaning');
-  try {
-    await execa('npm', ['run', 'clean']);
-    spinner.succeed();
-  } catch (e) {
-    spinner.fail();
-    throw e;
-  }
-};
-
-const compile = async () => {
-  const spinner = startSpinner('Compiling sources');
-  try {
-    await execa('tsc', ['-p', './tsconfig.build.json']);
-    spinner.succeed();
-  } catch (e) {
-    console.log(e);
-    spinner.fail();
-  }
-};
+const clean = useSpinner<void>('Cleaning', async () => await execa('npm', ['run', 'clean']));
 
-const rollup = async () => {
-  const spinner = startSpinner('Bundling');
+const compile = useSpinner<void>('Compiling sources', () => execa('tsc', ['-p', './tsconfig.build.json']));
 
-  try {
-    await execa('npm', ['run', 'build']);
-    spinner.succeed();
-  } catch (e) {
-    spinner.fail();
-  }
-};
-
-export const savePackage = async (path, pkg) => {
-  const spinner = startSpinner('Updating package.json');
+const rollup = useSpinner<void>('Bundling', () => execa('npm', ['run', 'build']));
 
+export const savePackage = useSpinner<{
+  path: string;
+  pkg: {};
+}>('Updating package.json', async ({ path, pkg }) => {
   return new Promise((resolve, reject) => {
     fs.writeFile(path, JSON.stringify(pkg, null, 2), err => {
       if (err) {
-        spinner.fail();
-        console.error(err);
         reject(err);
         return;
       }
-      spinner.succeed();
       resolve();
     });
   });
-};
+});
 
 const preparePackage = async pkg => {
   pkg.main = 'index.js';
   pkg.types = 'index.d.ts';
-  await savePackage(`${cwd}/dist/package.json`, pkg);
+  await savePackage({
+    path: `${cwd}/dist/package.json`,
+    pkg,
+  });
 };
 
-const moveFiles = async () => {
+const moveFiles = () => {
   const files = ['README.md', 'CHANGELOG.md', 'index.js'];
-  const spinner = startSpinner(`Moving ${files.join(', ')} files`);
-
-  const promises = files.map(file => {
-    return fs.copyFile(`${cwd}/${file}`, `${distDir}/${file}`, err => {
-      if (err) {
-        console.error(err);
-        return;
-      }
+  return useSpinner<void>(`Moving ${files.join(', ')} files`, async () => {
+    const promises = files.map(file => {
+      return new Promise((resolve, reject) => {
+        fs.copyFile(`${cwd}/${file}`, `${distDir}/${file}`, err => {
+          if (err) {
+            reject(err);
+            return;
+          }
+          resolve();
+        });
+      });
     });
-  });
 
-  try {
     await Promise.all(promises);
-    spinner.succeed();
-  } catch (e) {
-    spinner.fail();
-  }
+  })();
 };
 
-const buildTask: Task<void> = async () => {
+const buildTaskRunner: TaskRunner<void> = async () => {
   cwd = changeCwdToGrafanaUi();
   distDir = `${cwd}/dist`;
   const pkg = require(`${cwd}/package.json`);
-
-  console.log(chalk.yellow(`Building ${pkg.name} @ ${pkg.version}`));
+  console.log(chalk.yellow(`Building ${pkg.name} (package.json version: ${pkg.version})`));
 
   await clean();
   await compile();
@@ -100,4 +71,6 @@ const buildTask: Task<void> = async () => {
   restoreCwd();
 };
 
-export default buildTask;
+export const buildTask = new Task<void>();
+buildTask.setName('@grafana/ui build');
+buildTask.setRunner(buildTaskRunner);

+ 86 - 54
scripts/cli/tasks/grafanaui.release.ts

@@ -1,14 +1,19 @@
 import execa from 'execa';
-import { Task } from '..';
 import { execTask } from '../utils/execTask';
 import { changeCwdToGrafanaUiDist, changeCwdToGrafanaUi } from '../utils/cwd';
 import semver from 'semver';
 import inquirer from 'inquirer';
 import chalk from 'chalk';
-import { startSpinner } from '../utils/startSpinner';
-import { savePackage } from './grafanaui.build';
+import { useSpinner } from '../utils/useSpinner';
+import { savePackage, buildTask } from './grafanaui.build';
+import { TaskRunner, Task } from './task';
 
-type VersionBumpType = 'patch' | 'minor' | 'major';
+type VersionBumpType = 'prerelease' | 'patch' | 'minor' | 'major';
+
+interface ReleaseTaskOptions {
+  publishToNpm: boolean;
+  usePackageJsonVersion: boolean;
+}
 
 const promptBumpType = async () => {
   return inquirer.prompt<{ type: VersionBumpType }>([
@@ -16,7 +21,7 @@ const promptBumpType = async () => {
       type: 'list',
       message: 'Select version bump',
       name: 'type',
-      choices: ['patch', 'minor', 'major'],
+      choices: ['prerelease', 'patch', 'minor', 'major'],
       validate: answer => {
         if (answer.length < 1) {
           return 'You must choose something';
@@ -28,13 +33,13 @@ const promptBumpType = async () => {
   ]);
 };
 
-const promptPrereleaseId = async () => {
+const promptPrereleaseId = async (message = 'Is this a prerelease?', allowNo = true) => {
   return inquirer.prompt<{ id: string }>([
     {
       type: 'list',
-      message: 'Is this a prerelease?',
+      message: message,
       name: 'id',
-      choices: ['no', 'alpha', 'beta'],
+      choices: allowNo ? ['no', 'alpha', 'beta'] : ['alpha', 'beta'],
       validate: answer => {
         if (answer.length < 1) {
           return 'You must choose something';
@@ -57,47 +62,45 @@ const promptConfirm = async (message?: string) => {
   ]);
 };
 
-const bumpVersion = async (version: string) => {
-  const spinner = startSpinner(`Saving version ${version} to package.json`);
-  changeCwdToGrafanaUi();
-
-  try {
+const bumpVersion = (version: string) =>
+  useSpinner<void>(`Saving version ${version} to package.json`, async () => {
+    changeCwdToGrafanaUi();
     await execa('npm', ['version', version]);
-    spinner.succeed();
-  } catch (e) {
-    console.log(e);
-    spinner.fail();
-  }
-
-  changeCwdToGrafanaUiDist();
-  const pkg = require(`${process.cwd()}/package.json`);
-  pkg.version = version;
-  await savePackage(`${process.cwd()}/package.json`, pkg);
-};
-
-const publishPackage = async (name: string, version: string) => {
-  changeCwdToGrafanaUiDist();
-  console.log(chalk.yellowBright.bold(`\nReview dist package.json before proceeding!\n`));
-  const { confirmed } = await promptConfirm('Are you ready to publish to npm?');
-
-  if (!confirmed) {
-    process.exit();
-  }
+    changeCwdToGrafanaUiDist();
+    const pkg = require(`${process.cwd()}/package.json`);
+    pkg.version = version;
+    await savePackage({ path: `${process.cwd()}/package.json`, pkg });
+  })();
+
+const publishPackage = (name: string, version: string) =>
+  useSpinner<void>(`Publishing ${name} @ ${version} to npm registry...`, async () => {
+    changeCwdToGrafanaUiDist();
+    console.log(chalk.yellowBright.bold(`\nReview dist package.json before proceeding!\n`));
+    const { confirmed } = await promptConfirm('Are you ready to publish to npm?');
+
+    if (!confirmed) {
+      process.exit();
+    }
+    await execa('npm', ['publish', '--access', 'public']);
+  })();
 
-  const spinner = startSpinner(`Publishing ${name} @ ${version} to npm registry...`);
+const ensureMasterBranch = async () => {
+  const currentBranch = await execa.stdout('git', ['symbolic-ref', '--short', 'HEAD']);
+  const status = await execa.stdout('git', ['status', '--porcelain']);
 
-  try {
-    await execa('npm', ['publish', '--access', 'public']);
-    spinner.succeed();
-  } catch (e) {
-    console.log(e);
-    spinner.fail();
+  if (currentBranch !== 'master' && status !== '') {
+    console.error(chalk.red.bold('You need to be on clean master branch to release @grafana/ui'));
     process.exit(1);
   }
 };
 
-const releaseTask: Task<void> = async () => {
-  await execTask('grafanaui.build');
+const releaseTaskRunner: TaskRunner<ReleaseTaskOptions> = async ({ publishToNpm, usePackageJsonVersion }) => {
+  if (publishToNpm) {
+    await ensureMasterBranch();
+  }
+
+  await execTask(buildTask)();
+
   let releaseConfirmed = false;
   let nextVersion;
   changeCwdToGrafanaUiDist();
@@ -107,27 +110,56 @@ const releaseTask: Task<void> = async () => {
   console.log(`Current version: ${pkg.version}`);
 
   do {
-    const { type } = await promptBumpType();
-    const { id } = await promptPrereleaseId();
-
-    if (id !== 'no') {
-      nextVersion = semver.inc(pkg.version, `pre${type}`, id);
+    if (!usePackageJsonVersion) {
+      const { type } = await promptBumpType();
+      console.log(type);
+      if (type === 'prerelease') {
+        const { id } = await promptPrereleaseId('What kind of prerelease?', false);
+        nextVersion = semver.inc(pkg.version, type, id);
+      } else {
+        const { id } = await promptPrereleaseId();
+        if (id !== 'no') {
+          nextVersion = semver.inc(pkg.version, `pre${type}`, id);
+        } else {
+          nextVersion = semver.inc(pkg.version, type);
+        }
+      }
     } else {
-      nextVersion = semver.inc(pkg.version, type);
+      nextVersion = pkg.version;
     }
 
     console.log(chalk.yellowBright.bold(`You are going to release a new version of ${pkg.name}`));
-    console.log(chalk.green(`Version bump: ${pkg.version} ->`), chalk.bold.yellowBright(`${nextVersion}`));
+
+    if (usePackageJsonVersion) {
+      console.log(chalk.green(`Version based on package.json: `), chalk.bold.yellowBright(`${nextVersion}`));
+    } else {
+      console.log(chalk.green(`Version bump: ${pkg.version} ->`), chalk.bold.yellowBright(`${nextVersion}`));
+    }
+
     const { confirmed } = await promptConfirm();
 
     releaseConfirmed = confirmed;
   } while (!releaseConfirmed);
 
-  await bumpVersion(nextVersion);
-  await publishPackage(pkg.name, nextVersion);
+  if (!usePackageJsonVersion) {
+    await bumpVersion(nextVersion);
+  }
 
-  console.log(chalk.green(`\nVersion ${nextVersion} of ${pkg.name} succesfully released!`));
-  console.log(chalk.yellow(`\nUpdated @grafana/ui/package.json with version bump created - COMMIT THIS FILE!`));
+  if (publishToNpm) {
+    await publishPackage(pkg.name, nextVersion);
+    console.log(chalk.green(`\nVersion ${nextVersion} of ${pkg.name} succesfully released!`));
+    console.log(chalk.yellow(`\nUpdated @grafana/ui/package.json with version bump created - COMMIT THIS FILE!`));
+    process.exit();
+  } else {
+    console.log(
+      chalk.green(
+        `\nVersion ${nextVersion} of ${pkg.name} succesfully prepared for release. See packages/grafana-ui/dist`
+      )
+    );
+    console.log(chalk.green(`\nTo publish to npm registry run`), chalk.bold.blue(`npm run gui:publish`));
+  }
 };
 
-export default releaseTask;
+export const releaseTask = new Task<ReleaseTaskOptions>();
+releaseTask.setName('@grafana/ui release');
+releaseTask.setRunner(releaseTaskRunner);

+ 23 - 0
scripts/cli/tasks/task.ts

@@ -0,0 +1,23 @@
+export type TaskRunner<T> = (options: T) => Promise<void>;
+
+export class Task<TOptions> {
+  name: string;
+  runner: (options: TOptions) => Promise<void>;
+  options: TOptions;
+
+  setName = name => {
+    this.name = name;
+  };
+
+  setRunner = (runner: TaskRunner<TOptions>) => {
+    this.runner = runner;
+  };
+
+  setOptions = options => {
+    this.options = options;
+  };
+
+  exec = () => {
+    return this.runner(this.options);
+  };
+}

+ 13 - 4
scripts/cli/utils/execTask.ts

@@ -1,6 +1,15 @@
-import { Task } from '..';
+import { Task } from '../tasks/task';
+import chalk from 'chalk';
 
-export const execTask = async <T>(taskName, options?: T) => {
-  const task = await import(`${__dirname}/../tasks/${taskName}.ts`);
-  return task.default(options) as Task<T>;
+export const execTask = <TOptions>(task: Task<TOptions>) => async (options: TOptions) => {
+  console.log(chalk.yellow(`Running ${chalk.bold(task.name)} task`));
+  task.setOptions(options);
+  try {
+    console.group();
+    await task.exec();
+    console.groupEnd();
+  } catch (e) {
+    console.log(e);
+    process.exit(1);
+  }
 };

+ 0 - 7
scripts/cli/utils/startSpinner.ts

@@ -1,7 +0,0 @@
-import ora from 'ora';
-
-export const startSpinner = (label: string) => {
-  const spinner = new ora(label);
-  spinner.start();
-  return spinner;
-};

+ 20 - 0
scripts/cli/utils/useSpinner.ts

@@ -0,0 +1,20 @@
+import ora from 'ora';
+
+type FnToSpin<T> = (options: T) => Promise<void>;
+
+export const useSpinner = <T>(spinnerLabel: string, fn: FnToSpin<T>, killProcess = true) => {
+  return async (options: T) => {
+    const spinner = new ora(spinnerLabel);
+    spinner.start();
+    try {
+      await fn(options);
+      spinner.succeed();
+    } catch (e) {
+      spinner.fail();
+      console.log(e);
+      if (killProcess) {
+        process.exit(1);
+      }
+    }
+  };
+};

+ 1 - 1
tsconfig.json

@@ -32,5 +32,5 @@
       "sass": ["sass"]
     }
   },
-  "include": ["public/app/**/*.ts", "public/app/**/*.tsx", "public/test/**/*.ts"]
+  "include": ["public/app/**/*.ts", "public/app/**/*.tsx", "public/test/**/*.ts", "public/vendor/**/*.ts"]
 }

+ 0 - 191
vendor/github.com/go-ini/ini/LICENSE

@@ -1,191 +0,0 @@
-Apache License
-Version 2.0, January 2004
-http://www.apache.org/licenses/
-
-TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-1. Definitions.
-
-"License" shall mean the terms and conditions for use, reproduction, and
-distribution as defined by Sections 1 through 9 of this document.
-
-"Licensor" shall mean the copyright owner or entity authorized by the copyright
-owner that is granting the License.
-
-"Legal Entity" shall mean the union of the acting entity and all other entities
-that control, are controlled by, or are under common control with that entity.
-For the purposes of this definition, "control" means (i) the power, direct or
-indirect, to cause the direction or management of such entity, whether by
-contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the
-outstanding shares, or (iii) beneficial ownership of such entity.
-
-"You" (or "Your") shall mean an individual or Legal Entity exercising
-permissions granted by this License.
-
-"Source" form shall mean the preferred form for making modifications, including
-but not limited to software source code, documentation source, and configuration
-files.
-
-"Object" form shall mean any form resulting from mechanical transformation or
-translation of a Source form, including but not limited to compiled object code,
-generated documentation, and conversions to other media types.
-
-"Work" shall mean the work of authorship, whether in Source or Object form, made
-available under the License, as indicated by a copyright notice that is included
-in or attached to the work (an example is provided in the Appendix below).
-
-"Derivative Works" shall mean any work, whether in Source or Object form, that
-is based on (or derived from) the Work and for which the editorial revisions,
-annotations, elaborations, or other modifications represent, as a whole, an
-original work of authorship. For the purposes of this License, Derivative Works
-shall not include works that remain separable from, or merely link (or bind by
-name) to the interfaces of, the Work and Derivative Works thereof.
-
-"Contribution" shall mean any work of authorship, including the original version
-of the Work and any modifications or additions to that Work or Derivative Works
-thereof, that is intentionally submitted to Licensor for inclusion in the Work
-by the copyright owner or by an individual or Legal Entity authorized to submit
-on behalf of the copyright owner. For the purposes of this definition,
-"submitted" means any form of electronic, verbal, or written communication sent
-to the Licensor or its representatives, including but not limited to
-communication on electronic mailing lists, source code control systems, and
-issue tracking systems that are managed by, or on behalf of, the Licensor for
-the purpose of discussing and improving the Work, but excluding communication
-that is conspicuously marked or otherwise designated in writing by the copyright
-owner as "Not a Contribution."
-
-"Contributor" shall mean Licensor and any individual or Legal Entity on behalf
-of whom a Contribution has been received by Licensor and subsequently
-incorporated within the Work.
-
-2. Grant of Copyright License.
-
-Subject to the terms and conditions of this License, each Contributor hereby
-grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free,
-irrevocable copyright license to reproduce, prepare Derivative Works of,
-publicly display, publicly perform, sublicense, and distribute the Work and such
-Derivative Works in Source or Object form.
-
-3. Grant of Patent License.
-
-Subject to the terms and conditions of this License, each Contributor hereby
-grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free,
-irrevocable (except as stated in this section) patent license to make, have
-made, use, offer to sell, sell, import, and otherwise transfer the Work, where
-such license applies only to those patent claims licensable by such Contributor
-that are necessarily infringed by their Contribution(s) alone or by combination
-of their Contribution(s) with the Work to which such Contribution(s) was
-submitted. If You institute patent litigation against any entity (including a
-cross-claim or counterclaim in a lawsuit) alleging that the Work or a
-Contribution incorporated within the Work constitutes direct or contributory
-patent infringement, then any patent licenses granted to You under this License
-for that Work shall terminate as of the date such litigation is filed.
-
-4. Redistribution.
-
-You may reproduce and distribute copies of the Work or Derivative Works thereof
-in any medium, with or without modifications, and in Source or Object form,
-provided that You meet the following conditions:
-
-You must give any other recipients of the Work or Derivative Works a copy of
-this License; and
-You must cause any modified files to carry prominent notices stating that You
-changed the files; and
-You must retain, in the Source form of any Derivative Works that You distribute,
-all copyright, patent, trademark, and attribution notices from the Source form
-of the Work, excluding those notices that do not pertain to any part of the
-Derivative Works; and
-If the Work includes a "NOTICE" text file as part of its distribution, then any
-Derivative Works that You distribute must include a readable copy of the
-attribution notices contained within such NOTICE file, excluding those notices
-that do not pertain to any part of the Derivative Works, in at least one of the
-following places: within a NOTICE text file distributed as part of the
-Derivative Works; within the Source form or documentation, if provided along
-with the Derivative Works; or, within a display generated by the Derivative
-Works, if and wherever such third-party notices normally appear. The contents of
-the NOTICE file are for informational purposes only and do not modify the
-License. You may add Your own attribution notices within Derivative Works that
-You distribute, alongside or as an addendum to the NOTICE text from the Work,
-provided that such additional attribution notices cannot be construed as
-modifying the License.
-You may add Your own copyright statement to Your modifications and may provide
-additional or different license terms and conditions for use, reproduction, or
-distribution of Your modifications, or for any such Derivative Works as a whole,
-provided Your use, reproduction, and distribution of the Work otherwise complies
-with the conditions stated in this License.
-
-5. Submission of Contributions.
-
-Unless You explicitly state otherwise, any Contribution intentionally submitted
-for inclusion in the Work by You to the Licensor shall be under the terms and
-conditions of this License, without any additional terms or conditions.
-Notwithstanding the above, nothing herein shall supersede or modify the terms of
-any separate license agreement you may have executed with Licensor regarding
-such Contributions.
-
-6. Trademarks.
-
-This License does not grant permission to use the trade names, trademarks,
-service marks, or product names of the Licensor, except as required for
-reasonable and customary use in describing the origin of the Work and
-reproducing the content of the NOTICE file.
-
-7. Disclaimer of Warranty.
-
-Unless required by applicable law or agreed to in writing, Licensor provides the
-Work (and each Contributor provides its Contributions) on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied,
-including, without limitation, any warranties or conditions of TITLE,
-NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are
-solely responsible for determining the appropriateness of using or
-redistributing the Work and assume any risks associated with Your exercise of
-permissions under this License.
-
-8. Limitation of Liability.
-
-In no event and under no legal theory, whether in tort (including negligence),
-contract, or otherwise, unless required by applicable law (such as deliberate
-and grossly negligent acts) or agreed to in writing, shall any Contributor be
-liable to You for damages, including any direct, indirect, special, incidental,
-or consequential damages of any character arising as a result of this License or
-out of the use or inability to use the Work (including but not limited to
-damages for loss of goodwill, work stoppage, computer failure or malfunction, or
-any and all other commercial damages or losses), even if such Contributor has
-been advised of the possibility of such damages.
-
-9. Accepting Warranty or Additional Liability.
-
-While redistributing the Work or Derivative Works thereof, You may choose to
-offer, and charge a fee for, acceptance of support, warranty, indemnity, or
-other liability obligations and/or rights consistent with this License. However,
-in accepting such obligations, You may act only on Your own behalf and on Your
-sole responsibility, not on behalf of any other Contributor, and only if You
-agree to indemnify, defend, and hold each Contributor harmless for any liability
-incurred by, or claims asserted against, such Contributor by reason of your
-accepting any such warranty or additional liability.
-
-END OF TERMS AND CONDITIONS
-
-APPENDIX: How to apply the Apache License to your work
-
-To apply the Apache License to your work, attach the following boilerplate
-notice, with the fields enclosed by brackets "[]" replaced with your own
-identifying information. (Don't include the brackets!) The text should be
-enclosed in the appropriate comment syntax for the file format. We also
-recommend that a file or class name and description of purpose be included on
-the same "printed page" as the copyright notice for easier identification within
-third-party archives.
-
-   Copyright 2014 Unknwon
-
-   Licensed under the Apache License, Version 2.0 (the "License");
-   you may not use this file except in compliance with the License.
-   You may obtain a copy of the License at
-
-     http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.

+ 0 - 32
vendor/github.com/go-ini/ini/error.go

@@ -1,32 +0,0 @@
-// Copyright 2016 Unknwon
-//
-// Licensed under the Apache License, Version 2.0 (the "License"): you may
-// not use this file except in compliance with the License. You may obtain
-// a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations
-// under the License.
-
-package ini
-
-import (
-	"fmt"
-)
-
-type ErrDelimiterNotFound struct {
-	Line string
-}
-
-func IsErrDelimiterNotFound(err error) bool {
-	_, ok := err.(ErrDelimiterNotFound)
-	return ok
-}
-
-func (err ErrDelimiterNotFound) Error() string {
-	return fmt.Sprintf("key-value delimiter not found: %s", err.Line)
-}

+ 0 - 407
vendor/github.com/go-ini/ini/file.go

@@ -1,407 +0,0 @@
-// Copyright 2017 Unknwon
-//
-// Licensed under the Apache License, Version 2.0 (the "License"): you may
-// not use this file except in compliance with the License. You may obtain
-// a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations
-// under the License.
-
-package ini
-
-import (
-	"bytes"
-	"errors"
-	"fmt"
-	"io"
-	"io/ioutil"
-	"os"
-	"strings"
-	"sync"
-)
-
-// File represents a combination of a or more INI file(s) in memory.
-type File struct {
-	options     LoadOptions
-	dataSources []dataSource
-
-	// Should make things safe, but sometimes doesn't matter.
-	BlockMode bool
-	lock      sync.RWMutex
-
-	// To keep data in order.
-	sectionList []string
-	// Actual data is stored here.
-	sections map[string]*Section
-
-	NameMapper
-	ValueMapper
-}
-
-// newFile initializes File object with given data sources.
-func newFile(dataSources []dataSource, opts LoadOptions) *File {
-	return &File{
-		BlockMode:   true,
-		dataSources: dataSources,
-		sections:    make(map[string]*Section),
-		sectionList: make([]string, 0, 10),
-		options:     opts,
-	}
-}
-
-// Empty returns an empty file object.
-func Empty() *File {
-	// Ignore error here, we sure our data is good.
-	f, _ := Load([]byte(""))
-	return f
-}
-
-// NewSection creates a new section.
-func (f *File) NewSection(name string) (*Section, error) {
-	if len(name) == 0 {
-		return nil, errors.New("error creating new section: empty section name")
-	} else if f.options.Insensitive && name != DEFAULT_SECTION {
-		name = strings.ToLower(name)
-	}
-
-	if f.BlockMode {
-		f.lock.Lock()
-		defer f.lock.Unlock()
-	}
-
-	if inSlice(name, f.sectionList) {
-		return f.sections[name], nil
-	}
-
-	f.sectionList = append(f.sectionList, name)
-	f.sections[name] = newSection(f, name)
-	return f.sections[name], nil
-}
-
-// NewRawSection creates a new section with an unparseable body.
-func (f *File) NewRawSection(name, body string) (*Section, error) {
-	section, err := f.NewSection(name)
-	if err != nil {
-		return nil, err
-	}
-
-	section.isRawSection = true
-	section.rawBody = body
-	return section, nil
-}
-
-// NewSections creates a list of sections.
-func (f *File) NewSections(names ...string) (err error) {
-	for _, name := range names {
-		if _, err = f.NewSection(name); err != nil {
-			return err
-		}
-	}
-	return nil
-}
-
-// GetSection returns section by given name.
-func (f *File) GetSection(name string) (*Section, error) {
-	if len(name) == 0 {
-		name = DEFAULT_SECTION
-	}
-	if f.options.Insensitive {
-		name = strings.ToLower(name)
-	}
-
-	if f.BlockMode {
-		f.lock.RLock()
-		defer f.lock.RUnlock()
-	}
-
-	sec := f.sections[name]
-	if sec == nil {
-		return nil, fmt.Errorf("section '%s' does not exist", name)
-	}
-	return sec, nil
-}
-
-// Section assumes named section exists and returns a zero-value when not.
-func (f *File) Section(name string) *Section {
-	sec, err := f.GetSection(name)
-	if err != nil {
-		// Note: It's OK here because the only possible error is empty section name,
-		// but if it's empty, this piece of code won't be executed.
-		sec, _ = f.NewSection(name)
-		return sec
-	}
-	return sec
-}
-
-// Section returns list of Section.
-func (f *File) Sections() []*Section {
-	if f.BlockMode {
-		f.lock.RLock()
-		defer f.lock.RUnlock()
-	}
-
-	sections := make([]*Section, len(f.sectionList))
-	for i, name := range f.sectionList {
-		sections[i] = f.sections[name]
-	}
-	return sections
-}
-
-// ChildSections returns a list of child sections of given section name.
-func (f *File) ChildSections(name string) []*Section {
-	return f.Section(name).ChildSections()
-}
-
-// SectionStrings returns list of section names.
-func (f *File) SectionStrings() []string {
-	list := make([]string, len(f.sectionList))
-	copy(list, f.sectionList)
-	return list
-}
-
-// DeleteSection deletes a section.
-func (f *File) DeleteSection(name string) {
-	if f.BlockMode {
-		f.lock.Lock()
-		defer f.lock.Unlock()
-	}
-
-	if len(name) == 0 {
-		name = DEFAULT_SECTION
-	}
-
-	for i, s := range f.sectionList {
-		if s == name {
-			f.sectionList = append(f.sectionList[:i], f.sectionList[i+1:]...)
-			delete(f.sections, name)
-			return
-		}
-	}
-}
-
-func (f *File) reload(s dataSource) error {
-	r, err := s.ReadCloser()
-	if err != nil {
-		return err
-	}
-	defer r.Close()
-
-	return f.parse(r)
-}
-
-// Reload reloads and parses all data sources.
-func (f *File) Reload() (err error) {
-	for _, s := range f.dataSources {
-		if err = f.reload(s); err != nil {
-			// In loose mode, we create an empty default section for nonexistent files.
-			if os.IsNotExist(err) && f.options.Loose {
-				f.parse(bytes.NewBuffer(nil))
-				continue
-			}
-			return err
-		}
-	}
-	return nil
-}
-
-// Append appends one or more data sources and reloads automatically.
-func (f *File) Append(source interface{}, others ...interface{}) error {
-	ds, err := parseDataSource(source)
-	if err != nil {
-		return err
-	}
-	f.dataSources = append(f.dataSources, ds)
-	for _, s := range others {
-		ds, err = parseDataSource(s)
-		if err != nil {
-			return err
-		}
-		f.dataSources = append(f.dataSources, ds)
-	}
-	return f.Reload()
-}
-
-func (f *File) writeToBuffer(indent string) (*bytes.Buffer, error) {
-	equalSign := "="
-	if PrettyFormat || PrettyEqual {
-		equalSign = " = "
-	}
-
-	// Use buffer to make sure target is safe until finish encoding.
-	buf := bytes.NewBuffer(nil)
-	for i, sname := range f.sectionList {
-		sec := f.Section(sname)
-		if len(sec.Comment) > 0 {
-			if sec.Comment[0] != '#' && sec.Comment[0] != ';' {
-				sec.Comment = "; " + sec.Comment
-			} else {
-				sec.Comment = sec.Comment[:1] + " " + strings.TrimSpace(sec.Comment[1:])
-			}
-			if _, err := buf.WriteString(sec.Comment + LineBreak); err != nil {
-				return nil, err
-			}
-		}
-
-		if i > 0 || DefaultHeader {
-			if _, err := buf.WriteString("[" + sname + "]" + LineBreak); err != nil {
-				return nil, err
-			}
-		} else {
-			// Write nothing if default section is empty
-			if len(sec.keyList) == 0 {
-				continue
-			}
-		}
-
-		if sec.isRawSection {
-			if _, err := buf.WriteString(sec.rawBody); err != nil {
-				return nil, err
-			}
-
-			if PrettySection {
-				// Put a line between sections
-				if _, err := buf.WriteString(LineBreak); err != nil {
-					return nil, err
-				}
-			}
-			continue
-		}
-
-		// Count and generate alignment length and buffer spaces using the
-		// longest key. Keys may be modifed if they contain certain characters so
-		// we need to take that into account in our calculation.
-		alignLength := 0
-		if PrettyFormat {
-			for _, kname := range sec.keyList {
-				keyLength := len(kname)
-				// First case will surround key by ` and second by """
-				if strings.ContainsAny(kname, "\"=:") {
-					keyLength += 2
-				} else if strings.Contains(kname, "`") {
-					keyLength += 6
-				}
-
-				if keyLength > alignLength {
-					alignLength = keyLength
-				}
-			}
-		}
-		alignSpaces := bytes.Repeat([]byte(" "), alignLength)
-
-	KEY_LIST:
-		for _, kname := range sec.keyList {
-			key := sec.Key(kname)
-			if len(key.Comment) > 0 {
-				if len(indent) > 0 && sname != DEFAULT_SECTION {
-					buf.WriteString(indent)
-				}
-				if key.Comment[0] != '#' && key.Comment[0] != ';' {
-					key.Comment = "; " + key.Comment
-				} else {
-					key.Comment = key.Comment[:1] + " " + strings.TrimSpace(key.Comment[1:])
-				}
-
-				// Support multiline comments
-				key.Comment = strings.Replace(key.Comment, "\n", "\n; ", -1)
-
-				if _, err := buf.WriteString(key.Comment + LineBreak); err != nil {
-					return nil, err
-				}
-			}
-
-			if len(indent) > 0 && sname != DEFAULT_SECTION {
-				buf.WriteString(indent)
-			}
-
-			switch {
-			case key.isAutoIncrement:
-				kname = "-"
-			case strings.ContainsAny(kname, "\"=:"):
-				kname = "`" + kname + "`"
-			case strings.Contains(kname, "`"):
-				kname = `"""` + kname + `"""`
-			}
-
-			for _, val := range key.ValueWithShadows() {
-				if _, err := buf.WriteString(kname); err != nil {
-					return nil, err
-				}
-
-				if key.isBooleanType {
-					if kname != sec.keyList[len(sec.keyList)-1] {
-						buf.WriteString(LineBreak)
-					}
-					continue KEY_LIST
-				}
-
-				// Write out alignment spaces before "=" sign
-				if PrettyFormat {
-					buf.Write(alignSpaces[:alignLength-len(kname)])
-				}
-
-				// In case key value contains "\n", "`", "\"", "#" or ";"
-				if strings.ContainsAny(val, "\n`") {
-					val = `"""` + val + `"""`
-				} else if !f.options.IgnoreInlineComment && strings.ContainsAny(val, "#;") {
-					val = "`" + val + "`"
-				}
-				if _, err := buf.WriteString(equalSign + val + LineBreak); err != nil {
-					return nil, err
-				}
-			}
-
-			for _, val := range key.nestedValues {
-				if _, err := buf.WriteString(indent + "  " + val + LineBreak); err != nil {
-					return nil, err
-				}
-			}
-		}
-
-		if PrettySection {
-			// Put a line between sections
-			if _, err := buf.WriteString(LineBreak); err != nil {
-				return nil, err
-			}
-		}
-	}
-
-	return buf, nil
-}
-
-// WriteToIndent writes content into io.Writer with given indention.
-// If PrettyFormat has been set to be true,
-// it will align "=" sign with spaces under each section.
-func (f *File) WriteToIndent(w io.Writer, indent string) (int64, error) {
-	buf, err := f.writeToBuffer(indent)
-	if err != nil {
-		return 0, err
-	}
-	return buf.WriteTo(w)
-}
-
-// WriteTo writes file content into io.Writer.
-func (f *File) WriteTo(w io.Writer) (int64, error) {
-	return f.WriteToIndent(w, "")
-}
-
-// SaveToIndent writes content to file system with given value indention.
-func (f *File) SaveToIndent(filename, indent string) error {
-	// Note: Because we are truncating with os.Create,
-	// 	so it's safer to save to a temporary file location and rename afte done.
-	buf, err := f.writeToBuffer(indent)
-	if err != nil {
-		return err
-	}
-
-	return ioutil.WriteFile(filename, buf.Bytes(), 0666)
-}
-
-// SaveTo writes content to file system.
-func (f *File) SaveTo(filename string) error {
-	return f.SaveToIndent(filename, "")
-}

+ 0 - 202
vendor/github.com/go-ini/ini/ini.go

@@ -1,202 +0,0 @@
-// Copyright 2014 Unknwon
-//
-// Licensed under the Apache License, Version 2.0 (the "License"): you may
-// not use this file except in compliance with the License. You may obtain
-// a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations
-// under the License.
-
-// Package ini provides INI file read and write functionality in Go.
-package ini
-
-import (
-	"bytes"
-	"fmt"
-	"io"
-	"io/ioutil"
-	"os"
-	"regexp"
-	"runtime"
-)
-
-const (
-	// Name for default section. You can use this constant or the string literal.
-	// In most of cases, an empty string is all you need to access the section.
-	DEFAULT_SECTION = "DEFAULT"
-
-	// Maximum allowed depth when recursively substituing variable names.
-	_DEPTH_VALUES = 99
-	_VERSION      = "1.36.0"
-)
-
-// Version returns current package version literal.
-func Version() string {
-	return _VERSION
-}
-
-var (
-	// Delimiter to determine or compose a new line.
-	// This variable will be changed to "\r\n" automatically on Windows
-	// at package init time.
-	LineBreak = "\n"
-
-	// Variable regexp pattern: %(variable)s
-	varPattern = regexp.MustCompile(`%\(([^\)]+)\)s`)
-
-	// Indicate whether to align "=" sign with spaces to produce pretty output
-	// or reduce all possible spaces for compact format.
-	PrettyFormat = true
-
-	// Place spaces around "=" sign even when PrettyFormat is false
-	PrettyEqual = false
-
-	// Explicitly write DEFAULT section header
-	DefaultHeader = false
-
-	// Indicate whether to put a line between sections
-	PrettySection = true
-)
-
-func init() {
-	if runtime.GOOS == "windows" {
-		LineBreak = "\r\n"
-	}
-}
-
-func inSlice(str string, s []string) bool {
-	for _, v := range s {
-		if str == v {
-			return true
-		}
-	}
-	return false
-}
-
-// dataSource is an interface that returns object which can be read and closed.
-type dataSource interface {
-	ReadCloser() (io.ReadCloser, error)
-}
-
-// sourceFile represents an object that contains content on the local file system.
-type sourceFile struct {
-	name string
-}
-
-func (s sourceFile) ReadCloser() (_ io.ReadCloser, err error) {
-	return os.Open(s.name)
-}
-
-// sourceData represents an object that contains content in memory.
-type sourceData struct {
-	data []byte
-}
-
-func (s *sourceData) ReadCloser() (io.ReadCloser, error) {
-	return ioutil.NopCloser(bytes.NewReader(s.data)), nil
-}
-
-// sourceReadCloser represents an input stream with Close method.
-type sourceReadCloser struct {
-	reader io.ReadCloser
-}
-
-func (s *sourceReadCloser) ReadCloser() (io.ReadCloser, error) {
-	return s.reader, nil
-}
-
-func parseDataSource(source interface{}) (dataSource, error) {
-	switch s := source.(type) {
-	case string:
-		return sourceFile{s}, nil
-	case []byte:
-		return &sourceData{s}, nil
-	case io.ReadCloser:
-		return &sourceReadCloser{s}, nil
-	default:
-		return nil, fmt.Errorf("error parsing data source: unknown type '%s'", s)
-	}
-}
-
-type LoadOptions struct {
-	// Loose indicates whether the parser should ignore nonexistent files or return error.
-	Loose bool
-	// Insensitive indicates whether the parser forces all section and key names to lowercase.
-	Insensitive bool
-	// IgnoreContinuation indicates whether to ignore continuation lines while parsing.
-	IgnoreContinuation bool
-	// IgnoreInlineComment indicates whether to ignore comments at the end of value and treat it as part of value.
-	IgnoreInlineComment bool
-	// AllowBooleanKeys indicates whether to allow boolean type keys or treat as value is missing.
-	// This type of keys are mostly used in my.cnf.
-	AllowBooleanKeys bool
-	// AllowShadows indicates whether to keep track of keys with same name under same section.
-	AllowShadows bool
-	// AllowNestedValues indicates whether to allow AWS-like nested values.
-	// Docs: http://docs.aws.amazon.com/cli/latest/topic/config-vars.html#nested-values
-	AllowNestedValues bool
-	// AllowPythonMultilineValues indicates whether to allow Python-like multi-line values.
-	// Docs: https://docs.python.org/3/library/configparser.html#supported-ini-file-structure
-	// Relevant quote:  Values can also span multiple lines, as long as they are indented deeper
-	// than the first line of the value.
-	AllowPythonMultilineValues bool
-	// UnescapeValueDoubleQuotes indicates whether to unescape double quotes inside value to regular format
-	// when value is surrounded by double quotes, e.g. key="a \"value\"" => key=a "value"
-	UnescapeValueDoubleQuotes bool
-	// UnescapeValueCommentSymbols indicates to unescape comment symbols (\# and \;) inside value to regular format
-	// when value is NOT surrounded by any quotes.
-	// Note: UNSTABLE, behavior might change to only unescape inside double quotes but may noy necessary at all.
-	UnescapeValueCommentSymbols bool
-	// Some INI formats allow group blocks that store a block of raw content that doesn't otherwise
-	// conform to key/value pairs. Specify the names of those blocks here.
-	UnparseableSections []string
-}
-
-func LoadSources(opts LoadOptions, source interface{}, others ...interface{}) (_ *File, err error) {
-	sources := make([]dataSource, len(others)+1)
-	sources[0], err = parseDataSource(source)
-	if err != nil {
-		return nil, err
-	}
-	for i := range others {
-		sources[i+1], err = parseDataSource(others[i])
-		if err != nil {
-			return nil, err
-		}
-	}
-	f := newFile(sources, opts)
-	if err = f.Reload(); err != nil {
-		return nil, err
-	}
-	return f, nil
-}
-
-// Load loads and parses from INI data sources.
-// Arguments can be mixed of file name with string type, or raw data in []byte.
-// It will return error if list contains nonexistent files.
-func Load(source interface{}, others ...interface{}) (*File, error) {
-	return LoadSources(LoadOptions{}, source, others...)
-}
-
-// LooseLoad has exactly same functionality as Load function
-// except it ignores nonexistent files instead of returning error.
-func LooseLoad(source interface{}, others ...interface{}) (*File, error) {
-	return LoadSources(LoadOptions{Loose: true}, source, others...)
-}
-
-// InsensitiveLoad has exactly same functionality as Load function
-// except it forces all section and key names to be lowercased.
-func InsensitiveLoad(source interface{}, others ...interface{}) (*File, error) {
-	return LoadSources(LoadOptions{Insensitive: true}, source, others...)
-}
-
-// InsensitiveLoad has exactly same functionality as Load function
-// except it allows have shadow keys.
-func ShadowLoad(source interface{}, others ...interface{}) (*File, error) {
-	return LoadSources(LoadOptions{AllowShadows: true}, source, others...)
-}

+ 0 - 751
vendor/github.com/go-ini/ini/key.go

@@ -1,751 +0,0 @@
-// Copyright 2014 Unknwon
-//
-// Licensed under the Apache License, Version 2.0 (the "License"): you may
-// not use this file except in compliance with the License. You may obtain
-// a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations
-// under the License.
-
-package ini
-
-import (
-	"bytes"
-	"errors"
-	"fmt"
-	"strconv"
-	"strings"
-	"time"
-)
-
-// Key represents a key under a section.
-type Key struct {
-	s               *Section
-	Comment         string
-	name            string
-	value           string
-	isAutoIncrement bool
-	isBooleanType   bool
-
-	isShadow bool
-	shadows  []*Key
-
-	nestedValues []string
-}
-
-// newKey simply return a key object with given values.
-func newKey(s *Section, name, val string) *Key {
-	return &Key{
-		s:     s,
-		name:  name,
-		value: val,
-	}
-}
-
-func (k *Key) addShadow(val string) error {
-	if k.isShadow {
-		return errors.New("cannot add shadow to another shadow key")
-	} else if k.isAutoIncrement || k.isBooleanType {
-		return errors.New("cannot add shadow to auto-increment or boolean key")
-	}
-
-	shadow := newKey(k.s, k.name, val)
-	shadow.isShadow = true
-	k.shadows = append(k.shadows, shadow)
-	return nil
-}
-
-// AddShadow adds a new shadow key to itself.
-func (k *Key) AddShadow(val string) error {
-	if !k.s.f.options.AllowShadows {
-		return errors.New("shadow key is not allowed")
-	}
-	return k.addShadow(val)
-}
-
-func (k *Key) addNestedValue(val string) error {
-	if k.isAutoIncrement || k.isBooleanType {
-		return errors.New("cannot add nested value to auto-increment or boolean key")
-	}
-
-	k.nestedValues = append(k.nestedValues, val)
-	return nil
-}
-
-func (k *Key) AddNestedValue(val string) error {
-	if !k.s.f.options.AllowNestedValues {
-		return errors.New("nested value is not allowed")
-	}
-	return k.addNestedValue(val)
-}
-
-// ValueMapper represents a mapping function for values, e.g. os.ExpandEnv
-type ValueMapper func(string) string
-
-// Name returns name of key.
-func (k *Key) Name() string {
-	return k.name
-}
-
-// Value returns raw value of key for performance purpose.
-func (k *Key) Value() string {
-	return k.value
-}
-
-// ValueWithShadows returns raw values of key and its shadows if any.
-func (k *Key) ValueWithShadows() []string {
-	if len(k.shadows) == 0 {
-		return []string{k.value}
-	}
-	vals := make([]string, len(k.shadows)+1)
-	vals[0] = k.value
-	for i := range k.shadows {
-		vals[i+1] = k.shadows[i].value
-	}
-	return vals
-}
-
-// NestedValues returns nested values stored in the key.
-// It is possible returned value is nil if no nested values stored in the key.
-func (k *Key) NestedValues() []string {
-	return k.nestedValues
-}
-
-// transformValue takes a raw value and transforms to its final string.
-func (k *Key) transformValue(val string) string {
-	if k.s.f.ValueMapper != nil {
-		val = k.s.f.ValueMapper(val)
-	}
-
-	// Fail-fast if no indicate char found for recursive value
-	if !strings.Contains(val, "%") {
-		return val
-	}
-	for i := 0; i < _DEPTH_VALUES; i++ {
-		vr := varPattern.FindString(val)
-		if len(vr) == 0 {
-			break
-		}
-
-		// Take off leading '%(' and trailing ')s'.
-		noption := strings.TrimLeft(vr, "%(")
-		noption = strings.TrimRight(noption, ")s")
-
-		// Search in the same section.
-		nk, err := k.s.GetKey(noption)
-		if err != nil || k == nk {
-			// Search again in default section.
-			nk, _ = k.s.f.Section("").GetKey(noption)
-		}
-
-		// Substitute by new value and take off leading '%(' and trailing ')s'.
-		val = strings.Replace(val, vr, nk.value, -1)
-	}
-	return val
-}
-
-// String returns string representation of value.
-func (k *Key) String() string {
-	return k.transformValue(k.value)
-}
-
-// Validate accepts a validate function which can
-// return modifed result as key value.
-func (k *Key) Validate(fn func(string) string) string {
-	return fn(k.String())
-}
-
-// parseBool returns the boolean value represented by the string.
-//
-// It accepts 1, t, T, TRUE, true, True, YES, yes, Yes, y, ON, on, On,
-// 0, f, F, FALSE, false, False, NO, no, No, n, OFF, off, Off.
-// Any other value returns an error.
-func parseBool(str string) (value bool, err error) {
-	switch str {
-	case "1", "t", "T", "true", "TRUE", "True", "YES", "yes", "Yes", "y", "ON", "on", "On":
-		return true, nil
-	case "0", "f", "F", "false", "FALSE", "False", "NO", "no", "No", "n", "OFF", "off", "Off":
-		return false, nil
-	}
-	return false, fmt.Errorf("parsing \"%s\": invalid syntax", str)
-}
-
-// Bool returns bool type value.
-func (k *Key) Bool() (bool, error) {
-	return parseBool(k.String())
-}
-
-// Float64 returns float64 type value.
-func (k *Key) Float64() (float64, error) {
-	return strconv.ParseFloat(k.String(), 64)
-}
-
-// Int returns int type value.
-func (k *Key) Int() (int, error) {
-	return strconv.Atoi(k.String())
-}
-
-// Int64 returns int64 type value.
-func (k *Key) Int64() (int64, error) {
-	return strconv.ParseInt(k.String(), 10, 64)
-}
-
-// Uint returns uint type valued.
-func (k *Key) Uint() (uint, error) {
-	u, e := strconv.ParseUint(k.String(), 10, 64)
-	return uint(u), e
-}
-
-// Uint64 returns uint64 type value.
-func (k *Key) Uint64() (uint64, error) {
-	return strconv.ParseUint(k.String(), 10, 64)
-}
-
-// Duration returns time.Duration type value.
-func (k *Key) Duration() (time.Duration, error) {
-	return time.ParseDuration(k.String())
-}
-
-// TimeFormat parses with given format and returns time.Time type value.
-func (k *Key) TimeFormat(format string) (time.Time, error) {
-	return time.Parse(format, k.String())
-}
-
-// Time parses with RFC3339 format and returns time.Time type value.
-func (k *Key) Time() (time.Time, error) {
-	return k.TimeFormat(time.RFC3339)
-}
-
-// MustString returns default value if key value is empty.
-func (k *Key) MustString(defaultVal string) string {
-	val := k.String()
-	if len(val) == 0 {
-		k.value = defaultVal
-		return defaultVal
-	}
-	return val
-}
-
-// MustBool always returns value without error,
-// it returns false if error occurs.
-func (k *Key) MustBool(defaultVal ...bool) bool {
-	val, err := k.Bool()
-	if len(defaultVal) > 0 && err != nil {
-		k.value = strconv.FormatBool(defaultVal[0])
-		return defaultVal[0]
-	}
-	return val
-}
-
-// MustFloat64 always returns value without error,
-// it returns 0.0 if error occurs.
-func (k *Key) MustFloat64(defaultVal ...float64) float64 {
-	val, err := k.Float64()
-	if len(defaultVal) > 0 && err != nil {
-		k.value = strconv.FormatFloat(defaultVal[0], 'f', -1, 64)
-		return defaultVal[0]
-	}
-	return val
-}
-
-// MustInt always returns value without error,
-// it returns 0 if error occurs.
-func (k *Key) MustInt(defaultVal ...int) int {
-	val, err := k.Int()
-	if len(defaultVal) > 0 && err != nil {
-		k.value = strconv.FormatInt(int64(defaultVal[0]), 10)
-		return defaultVal[0]
-	}
-	return val
-}
-
-// MustInt64 always returns value without error,
-// it returns 0 if error occurs.
-func (k *Key) MustInt64(defaultVal ...int64) int64 {
-	val, err := k.Int64()
-	if len(defaultVal) > 0 && err != nil {
-		k.value = strconv.FormatInt(defaultVal[0], 10)
-		return defaultVal[0]
-	}
-	return val
-}
-
-// MustUint always returns value without error,
-// it returns 0 if error occurs.
-func (k *Key) MustUint(defaultVal ...uint) uint {
-	val, err := k.Uint()
-	if len(defaultVal) > 0 && err != nil {
-		k.value = strconv.FormatUint(uint64(defaultVal[0]), 10)
-		return defaultVal[0]
-	}
-	return val
-}
-
-// MustUint64 always returns value without error,
-// it returns 0 if error occurs.
-func (k *Key) MustUint64(defaultVal ...uint64) uint64 {
-	val, err := k.Uint64()
-	if len(defaultVal) > 0 && err != nil {
-		k.value = strconv.FormatUint(defaultVal[0], 10)
-		return defaultVal[0]
-	}
-	return val
-}
-
-// MustDuration always returns value without error,
-// it returns zero value if error occurs.
-func (k *Key) MustDuration(defaultVal ...time.Duration) time.Duration {
-	val, err := k.Duration()
-	if len(defaultVal) > 0 && err != nil {
-		k.value = defaultVal[0].String()
-		return defaultVal[0]
-	}
-	return val
-}
-
-// MustTimeFormat always parses with given format and returns value without error,
-// it returns zero value if error occurs.
-func (k *Key) MustTimeFormat(format string, defaultVal ...time.Time) time.Time {
-	val, err := k.TimeFormat(format)
-	if len(defaultVal) > 0 && err != nil {
-		k.value = defaultVal[0].Format(format)
-		return defaultVal[0]
-	}
-	return val
-}
-
-// MustTime always parses with RFC3339 format and returns value without error,
-// it returns zero value if error occurs.
-func (k *Key) MustTime(defaultVal ...time.Time) time.Time {
-	return k.MustTimeFormat(time.RFC3339, defaultVal...)
-}
-
-// In always returns value without error,
-// it returns default value if error occurs or doesn't fit into candidates.
-func (k *Key) In(defaultVal string, candidates []string) string {
-	val := k.String()
-	for _, cand := range candidates {
-		if val == cand {
-			return val
-		}
-	}
-	return defaultVal
-}
-
-// InFloat64 always returns value without error,
-// it returns default value if error occurs or doesn't fit into candidates.
-func (k *Key) InFloat64(defaultVal float64, candidates []float64) float64 {
-	val := k.MustFloat64()
-	for _, cand := range candidates {
-		if val == cand {
-			return val
-		}
-	}
-	return defaultVal
-}
-
-// InInt always returns value without error,
-// it returns default value if error occurs or doesn't fit into candidates.
-func (k *Key) InInt(defaultVal int, candidates []int) int {
-	val := k.MustInt()
-	for _, cand := range candidates {
-		if val == cand {
-			return val
-		}
-	}
-	return defaultVal
-}
-
-// InInt64 always returns value without error,
-// it returns default value if error occurs or doesn't fit into candidates.
-func (k *Key) InInt64(defaultVal int64, candidates []int64) int64 {
-	val := k.MustInt64()
-	for _, cand := range candidates {
-		if val == cand {
-			return val
-		}
-	}
-	return defaultVal
-}
-
-// InUint always returns value without error,
-// it returns default value if error occurs or doesn't fit into candidates.
-func (k *Key) InUint(defaultVal uint, candidates []uint) uint {
-	val := k.MustUint()
-	for _, cand := range candidates {
-		if val == cand {
-			return val
-		}
-	}
-	return defaultVal
-}
-
-// InUint64 always returns value without error,
-// it returns default value if error occurs or doesn't fit into candidates.
-func (k *Key) InUint64(defaultVal uint64, candidates []uint64) uint64 {
-	val := k.MustUint64()
-	for _, cand := range candidates {
-		if val == cand {
-			return val
-		}
-	}
-	return defaultVal
-}
-
-// InTimeFormat always parses with given format and returns value without error,
-// it returns default value if error occurs or doesn't fit into candidates.
-func (k *Key) InTimeFormat(format string, defaultVal time.Time, candidates []time.Time) time.Time {
-	val := k.MustTimeFormat(format)
-	for _, cand := range candidates {
-		if val == cand {
-			return val
-		}
-	}
-	return defaultVal
-}
-
-// InTime always parses with RFC3339 format and returns value without error,
-// it returns default value if error occurs or doesn't fit into candidates.
-func (k *Key) InTime(defaultVal time.Time, candidates []time.Time) time.Time {
-	return k.InTimeFormat(time.RFC3339, defaultVal, candidates)
-}
-
-// RangeFloat64 checks if value is in given range inclusively,
-// and returns default value if it's not.
-func (k *Key) RangeFloat64(defaultVal, min, max float64) float64 {
-	val := k.MustFloat64()
-	if val < min || val > max {
-		return defaultVal
-	}
-	return val
-}
-
-// RangeInt checks if value is in given range inclusively,
-// and returns default value if it's not.
-func (k *Key) RangeInt(defaultVal, min, max int) int {
-	val := k.MustInt()
-	if val < min || val > max {
-		return defaultVal
-	}
-	return val
-}
-
-// RangeInt64 checks if value is in given range inclusively,
-// and returns default value if it's not.
-func (k *Key) RangeInt64(defaultVal, min, max int64) int64 {
-	val := k.MustInt64()
-	if val < min || val > max {
-		return defaultVal
-	}
-	return val
-}
-
-// RangeTimeFormat checks if value with given format is in given range inclusively,
-// and returns default value if it's not.
-func (k *Key) RangeTimeFormat(format string, defaultVal, min, max time.Time) time.Time {
-	val := k.MustTimeFormat(format)
-	if val.Unix() < min.Unix() || val.Unix() > max.Unix() {
-		return defaultVal
-	}
-	return val
-}
-
-// RangeTime checks if value with RFC3339 format is in given range inclusively,
-// and returns default value if it's not.
-func (k *Key) RangeTime(defaultVal, min, max time.Time) time.Time {
-	return k.RangeTimeFormat(time.RFC3339, defaultVal, min, max)
-}
-
-// Strings returns list of string divided by given delimiter.
-func (k *Key) Strings(delim string) []string {
-	str := k.String()
-	if len(str) == 0 {
-		return []string{}
-	}
-
-	runes := []rune(str)
-	vals := make([]string, 0, 2)
-	var buf bytes.Buffer
-	escape := false
-	idx := 0
-	for {
-		if escape {
-			escape = false
-			if runes[idx] != '\\' && !strings.HasPrefix(string(runes[idx:]), delim) {
-				buf.WriteRune('\\')
-			}
-			buf.WriteRune(runes[idx])
-		} else {
-			if runes[idx] == '\\' {
-				escape = true
-			} else if strings.HasPrefix(string(runes[idx:]), delim) {
-				idx += len(delim) - 1
-				vals = append(vals, strings.TrimSpace(buf.String()))
-				buf.Reset()
-			} else {
-				buf.WriteRune(runes[idx])
-			}
-		}
-		idx += 1
-		if idx == len(runes) {
-			break
-		}
-	}
-
-	if buf.Len() > 0 {
-		vals = append(vals, strings.TrimSpace(buf.String()))
-	}
-
-	return vals
-}
-
-// StringsWithShadows returns list of string divided by given delimiter.
-// Shadows will also be appended if any.
-func (k *Key) StringsWithShadows(delim string) []string {
-	vals := k.ValueWithShadows()
-	results := make([]string, 0, len(vals)*2)
-	for i := range vals {
-		if len(vals) == 0 {
-			continue
-		}
-
-		results = append(results, strings.Split(vals[i], delim)...)
-	}
-
-	for i := range results {
-		results[i] = k.transformValue(strings.TrimSpace(results[i]))
-	}
-	return results
-}
-
-// Float64s returns list of float64 divided by given delimiter. Any invalid input will be treated as zero value.
-func (k *Key) Float64s(delim string) []float64 {
-	vals, _ := k.parseFloat64s(k.Strings(delim), true, false)
-	return vals
-}
-
-// Ints returns list of int divided by given delimiter. Any invalid input will be treated as zero value.
-func (k *Key) Ints(delim string) []int {
-	vals, _ := k.parseInts(k.Strings(delim), true, false)
-	return vals
-}
-
-// Int64s returns list of int64 divided by given delimiter. Any invalid input will be treated as zero value.
-func (k *Key) Int64s(delim string) []int64 {
-	vals, _ := k.parseInt64s(k.Strings(delim), true, false)
-	return vals
-}
-
-// Uints returns list of uint divided by given delimiter. Any invalid input will be treated as zero value.
-func (k *Key) Uints(delim string) []uint {
-	vals, _ := k.parseUints(k.Strings(delim), true, false)
-	return vals
-}
-
-// Uint64s returns list of uint64 divided by given delimiter. Any invalid input will be treated as zero value.
-func (k *Key) Uint64s(delim string) []uint64 {
-	vals, _ := k.parseUint64s(k.Strings(delim), true, false)
-	return vals
-}
-
-// TimesFormat parses with given format and returns list of time.Time divided by given delimiter.
-// Any invalid input will be treated as zero value (0001-01-01 00:00:00 +0000 UTC).
-func (k *Key) TimesFormat(format, delim string) []time.Time {
-	vals, _ := k.parseTimesFormat(format, k.Strings(delim), true, false)
-	return vals
-}
-
-// Times parses with RFC3339 format and returns list of time.Time divided by given delimiter.
-// Any invalid input will be treated as zero value (0001-01-01 00:00:00 +0000 UTC).
-func (k *Key) Times(delim string) []time.Time {
-	return k.TimesFormat(time.RFC3339, delim)
-}
-
-// ValidFloat64s returns list of float64 divided by given delimiter. If some value is not float, then
-// it will not be included to result list.
-func (k *Key) ValidFloat64s(delim string) []float64 {
-	vals, _ := k.parseFloat64s(k.Strings(delim), false, false)
-	return vals
-}
-
-// ValidInts returns list of int divided by given delimiter. If some value is not integer, then it will
-// not be included to result list.
-func (k *Key) ValidInts(delim string) []int {
-	vals, _ := k.parseInts(k.Strings(delim), false, false)
-	return vals
-}
-
-// ValidInt64s returns list of int64 divided by given delimiter. If some value is not 64-bit integer,
-// then it will not be included to result list.
-func (k *Key) ValidInt64s(delim string) []int64 {
-	vals, _ := k.parseInt64s(k.Strings(delim), false, false)
-	return vals
-}
-
-// ValidUints returns list of uint divided by given delimiter. If some value is not unsigned integer,
-// then it will not be included to result list.
-func (k *Key) ValidUints(delim string) []uint {
-	vals, _ := k.parseUints(k.Strings(delim), false, false)
-	return vals
-}
-
-// ValidUint64s returns list of uint64 divided by given delimiter. If some value is not 64-bit unsigned
-// integer, then it will not be included to result list.
-func (k *Key) ValidUint64s(delim string) []uint64 {
-	vals, _ := k.parseUint64s(k.Strings(delim), false, false)
-	return vals
-}
-
-// ValidTimesFormat parses with given format and returns list of time.Time divided by given delimiter.
-func (k *Key) ValidTimesFormat(format, delim string) []time.Time {
-	vals, _ := k.parseTimesFormat(format, k.Strings(delim), false, false)
-	return vals
-}
-
-// ValidTimes parses with RFC3339 format and returns list of time.Time divided by given delimiter.
-func (k *Key) ValidTimes(delim string) []time.Time {
-	return k.ValidTimesFormat(time.RFC3339, delim)
-}
-
-// StrictFloat64s returns list of float64 divided by given delimiter or error on first invalid input.
-func (k *Key) StrictFloat64s(delim string) ([]float64, error) {
-	return k.parseFloat64s(k.Strings(delim), false, true)
-}
-
-// StrictInts returns list of int divided by given delimiter or error on first invalid input.
-func (k *Key) StrictInts(delim string) ([]int, error) {
-	return k.parseInts(k.Strings(delim), false, true)
-}
-
-// StrictInt64s returns list of int64 divided by given delimiter or error on first invalid input.
-func (k *Key) StrictInt64s(delim string) ([]int64, error) {
-	return k.parseInt64s(k.Strings(delim), false, true)
-}
-
-// StrictUints returns list of uint divided by given delimiter or error on first invalid input.
-func (k *Key) StrictUints(delim string) ([]uint, error) {
-	return k.parseUints(k.Strings(delim), false, true)
-}
-
-// StrictUint64s returns list of uint64 divided by given delimiter or error on first invalid input.
-func (k *Key) StrictUint64s(delim string) ([]uint64, error) {
-	return k.parseUint64s(k.Strings(delim), false, true)
-}
-
-// StrictTimesFormat parses with given format and returns list of time.Time divided by given delimiter
-// or error on first invalid input.
-func (k *Key) StrictTimesFormat(format, delim string) ([]time.Time, error) {
-	return k.parseTimesFormat(format, k.Strings(delim), false, true)
-}
-
-// StrictTimes parses with RFC3339 format and returns list of time.Time divided by given delimiter
-// or error on first invalid input.
-func (k *Key) StrictTimes(delim string) ([]time.Time, error) {
-	return k.StrictTimesFormat(time.RFC3339, delim)
-}
-
-// parseFloat64s transforms strings to float64s.
-func (k *Key) parseFloat64s(strs []string, addInvalid, returnOnInvalid bool) ([]float64, error) {
-	vals := make([]float64, 0, len(strs))
-	for _, str := range strs {
-		val, err := strconv.ParseFloat(str, 64)
-		if err != nil && returnOnInvalid {
-			return nil, err
-		}
-		if err == nil || addInvalid {
-			vals = append(vals, val)
-		}
-	}
-	return vals, nil
-}
-
-// parseInts transforms strings to ints.
-func (k *Key) parseInts(strs []string, addInvalid, returnOnInvalid bool) ([]int, error) {
-	vals := make([]int, 0, len(strs))
-	for _, str := range strs {
-		val, err := strconv.Atoi(str)
-		if err != nil && returnOnInvalid {
-			return nil, err
-		}
-		if err == nil || addInvalid {
-			vals = append(vals, val)
-		}
-	}
-	return vals, nil
-}
-
-// parseInt64s transforms strings to int64s.
-func (k *Key) parseInt64s(strs []string, addInvalid, returnOnInvalid bool) ([]int64, error) {
-	vals := make([]int64, 0, len(strs))
-	for _, str := range strs {
-		val, err := strconv.ParseInt(str, 10, 64)
-		if err != nil && returnOnInvalid {
-			return nil, err
-		}
-		if err == nil || addInvalid {
-			vals = append(vals, val)
-		}
-	}
-	return vals, nil
-}
-
-// parseUints transforms strings to uints.
-func (k *Key) parseUints(strs []string, addInvalid, returnOnInvalid bool) ([]uint, error) {
-	vals := make([]uint, 0, len(strs))
-	for _, str := range strs {
-		val, err := strconv.ParseUint(str, 10, 0)
-		if err != nil && returnOnInvalid {
-			return nil, err
-		}
-		if err == nil || addInvalid {
-			vals = append(vals, uint(val))
-		}
-	}
-	return vals, nil
-}
-
-// parseUint64s transforms strings to uint64s.
-func (k *Key) parseUint64s(strs []string, addInvalid, returnOnInvalid bool) ([]uint64, error) {
-	vals := make([]uint64, 0, len(strs))
-	for _, str := range strs {
-		val, err := strconv.ParseUint(str, 10, 64)
-		if err != nil && returnOnInvalid {
-			return nil, err
-		}
-		if err == nil || addInvalid {
-			vals = append(vals, val)
-		}
-	}
-	return vals, nil
-}
-
-// parseTimesFormat transforms strings to times in given format.
-func (k *Key) parseTimesFormat(format string, strs []string, addInvalid, returnOnInvalid bool) ([]time.Time, error) {
-	vals := make([]time.Time, 0, len(strs))
-	for _, str := range strs {
-		val, err := time.Parse(format, str)
-		if err != nil && returnOnInvalid {
-			return nil, err
-		}
-		if err == nil || addInvalid {
-			vals = append(vals, val)
-		}
-	}
-	return vals, nil
-}
-
-// SetValue changes key value.
-func (k *Key) SetValue(v string) {
-	if k.s.f.BlockMode {
-		k.s.f.lock.Lock()
-		defer k.s.f.lock.Unlock()
-	}
-
-	k.value = v
-	k.s.keysHash[k.name] = v
-}

+ 0 - 477
vendor/github.com/go-ini/ini/parser.go

@@ -1,477 +0,0 @@
-// Copyright 2015 Unknwon
-//
-// Licensed under the Apache License, Version 2.0 (the "License"): you may
-// not use this file except in compliance with the License. You may obtain
-// a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations
-// under the License.
-
-package ini
-
-import (
-	"bufio"
-	"bytes"
-	"fmt"
-	"io"
-	"regexp"
-	"strconv"
-	"strings"
-	"unicode"
-)
-
-var pythonMultiline = regexp.MustCompile("^(\\s+)([^\n]+)")
-
-type tokenType int
-
-const (
-	_TOKEN_INVALID tokenType = iota
-	_TOKEN_COMMENT
-	_TOKEN_SECTION
-	_TOKEN_KEY
-)
-
-type parser struct {
-	buf     *bufio.Reader
-	isEOF   bool
-	count   int
-	comment *bytes.Buffer
-}
-
-func newParser(r io.Reader) *parser {
-	return &parser{
-		buf:     bufio.NewReader(r),
-		count:   1,
-		comment: &bytes.Buffer{},
-	}
-}
-
-// BOM handles header of UTF-8, UTF-16 LE and UTF-16 BE's BOM format.
-// http://en.wikipedia.org/wiki/Byte_order_mark#Representations_of_byte_order_marks_by_encoding
-func (p *parser) BOM() error {
-	mask, err := p.buf.Peek(2)
-	if err != nil && err != io.EOF {
-		return err
-	} else if len(mask) < 2 {
-		return nil
-	}
-
-	switch {
-	case mask[0] == 254 && mask[1] == 255:
-		fallthrough
-	case mask[0] == 255 && mask[1] == 254:
-		p.buf.Read(mask)
-	case mask[0] == 239 && mask[1] == 187:
-		mask, err := p.buf.Peek(3)
-		if err != nil && err != io.EOF {
-			return err
-		} else if len(mask) < 3 {
-			return nil
-		}
-		if mask[2] == 191 {
-			p.buf.Read(mask)
-		}
-	}
-	return nil
-}
-
-func (p *parser) readUntil(delim byte) ([]byte, error) {
-	data, err := p.buf.ReadBytes(delim)
-	if err != nil {
-		if err == io.EOF {
-			p.isEOF = true
-		} else {
-			return nil, err
-		}
-	}
-	return data, nil
-}
-
-func cleanComment(in []byte) ([]byte, bool) {
-	i := bytes.IndexAny(in, "#;")
-	if i == -1 {
-		return nil, false
-	}
-	return in[i:], true
-}
-
-func readKeyName(in []byte) (string, int, error) {
-	line := string(in)
-
-	// Check if key name surrounded by quotes.
-	var keyQuote string
-	if line[0] == '"' {
-		if len(line) > 6 && string(line[0:3]) == `"""` {
-			keyQuote = `"""`
-		} else {
-			keyQuote = `"`
-		}
-	} else if line[0] == '`' {
-		keyQuote = "`"
-	}
-
-	// Get out key name
-	endIdx := -1
-	if len(keyQuote) > 0 {
-		startIdx := len(keyQuote)
-		// FIXME: fail case -> """"""name"""=value
-		pos := strings.Index(line[startIdx:], keyQuote)
-		if pos == -1 {
-			return "", -1, fmt.Errorf("missing closing key quote: %s", line)
-		}
-		pos += startIdx
-
-		// Find key-value delimiter
-		i := strings.IndexAny(line[pos+startIdx:], "=:")
-		if i < 0 {
-			return "", -1, ErrDelimiterNotFound{line}
-		}
-		endIdx = pos + i
-		return strings.TrimSpace(line[startIdx:pos]), endIdx + startIdx + 1, nil
-	}
-
-	endIdx = strings.IndexAny(line, "=:")
-	if endIdx < 0 {
-		return "", -1, ErrDelimiterNotFound{line}
-	}
-	return strings.TrimSpace(line[0:endIdx]), endIdx + 1, nil
-}
-
-func (p *parser) readMultilines(line, val, valQuote string) (string, error) {
-	for {
-		data, err := p.readUntil('\n')
-		if err != nil {
-			return "", err
-		}
-		next := string(data)
-
-		pos := strings.LastIndex(next, valQuote)
-		if pos > -1 {
-			val += next[:pos]
-
-			comment, has := cleanComment([]byte(next[pos:]))
-			if has {
-				p.comment.Write(bytes.TrimSpace(comment))
-			}
-			break
-		}
-		val += next
-		if p.isEOF {
-			return "", fmt.Errorf("missing closing key quote from '%s' to '%s'", line, next)
-		}
-	}
-	return val, nil
-}
-
-func (p *parser) readContinuationLines(val string) (string, error) {
-	for {
-		data, err := p.readUntil('\n')
-		if err != nil {
-			return "", err
-		}
-		next := strings.TrimSpace(string(data))
-
-		if len(next) == 0 {
-			break
-		}
-		val += next
-		if val[len(val)-1] != '\\' {
-			break
-		}
-		val = val[:len(val)-1]
-	}
-	return val, nil
-}
-
-// hasSurroundedQuote check if and only if the first and last characters
-// are quotes \" or \'.
-// It returns false if any other parts also contain same kind of quotes.
-func hasSurroundedQuote(in string, quote byte) bool {
-	return len(in) >= 2 && in[0] == quote && in[len(in)-1] == quote &&
-		strings.IndexByte(in[1:], quote) == len(in)-2
-}
-
-func (p *parser) readValue(in []byte,
-	parserBufferSize int,
-	ignoreContinuation, ignoreInlineComment, unescapeValueDoubleQuotes, unescapeValueCommentSymbols, allowPythonMultilines bool) (string, error) {
-
-	line := strings.TrimLeftFunc(string(in), unicode.IsSpace)
-	if len(line) == 0 {
-		return "", nil
-	}
-
-	var valQuote string
-	if len(line) > 3 && string(line[0:3]) == `"""` {
-		valQuote = `"""`
-	} else if line[0] == '`' {
-		valQuote = "`"
-	} else if unescapeValueDoubleQuotes && line[0] == '"' {
-		valQuote = `"`
-	}
-
-	if len(valQuote) > 0 {
-		startIdx := len(valQuote)
-		pos := strings.LastIndex(line[startIdx:], valQuote)
-		// Check for multi-line value
-		if pos == -1 {
-			return p.readMultilines(line, line[startIdx:], valQuote)
-		}
-
-		if unescapeValueDoubleQuotes && valQuote == `"` {
-			return strings.Replace(line[startIdx:pos+startIdx], `\"`, `"`, -1), nil
-		}
-		return line[startIdx : pos+startIdx], nil
-	}
-
-	lastChar := line[len(line)-1]
-	// Won't be able to reach here if value only contains whitespace
-	line = strings.TrimSpace(line)
-	trimmedLastChar := line[len(line)-1]
-
-	// Check continuation lines when desired
-	if !ignoreContinuation && trimmedLastChar == '\\' {
-		return p.readContinuationLines(line[:len(line)-1])
-	}
-
-	// Check if ignore inline comment
-	if !ignoreInlineComment {
-		i := strings.IndexAny(line, "#;")
-		if i > -1 {
-			p.comment.WriteString(line[i:])
-			line = strings.TrimSpace(line[:i])
-		}
-	}
-
-	// Trim single and double quotes
-	if hasSurroundedQuote(line, '\'') ||
-		hasSurroundedQuote(line, '"') {
-		line = line[1 : len(line)-1]
-	} else if len(valQuote) == 0 && unescapeValueCommentSymbols {
-		if strings.Contains(line, `\;`) {
-			line = strings.Replace(line, `\;`, ";", -1)
-		}
-		if strings.Contains(line, `\#`) {
-			line = strings.Replace(line, `\#`, "#", -1)
-		}
-	} else if allowPythonMultilines && lastChar == '\n' {
-		parserBufferPeekResult, _ := p.buf.Peek(parserBufferSize)
-		peekBuffer := bytes.NewBuffer(parserBufferPeekResult)
-
-		identSize := -1
-		val := line
-
-		for {
-			peekData, peekErr := peekBuffer.ReadBytes('\n')
-			if peekErr != nil {
-				if peekErr == io.EOF {
-					return val, nil
-				}
-				return "", peekErr
-			}
-
-			peekMatches := pythonMultiline.FindStringSubmatch(string(peekData))
-			if len(peekMatches) != 3 {
-				return val, nil
-			}
-
-			currentIdentSize := len(peekMatches[1])
-			// NOTE: Return if not a python-ini multi-line value.
-			if currentIdentSize < 0 {
-				return val, nil
-			}
-			identSize = currentIdentSize
-
-			// NOTE: Just advance the parser reader (buffer) in-sync with the peek buffer.
-			_, err := p.readUntil('\n')
-			if err != nil {
-				return "", err
-			}
-
-			val += fmt.Sprintf("\n%s", peekMatches[2])
-		}
-
-		// NOTE: If it was a Python multi-line value,
-		// return the appended value.
-		if identSize > 0 {
-			return val, nil
-		}
-	}
-
-	return line, nil
-}
-
-// parse parses data through an io.Reader.
-func (f *File) parse(reader io.Reader) (err error) {
-	p := newParser(reader)
-	if err = p.BOM(); err != nil {
-		return fmt.Errorf("BOM: %v", err)
-	}
-
-	// Ignore error because default section name is never empty string.
-	name := DEFAULT_SECTION
-	if f.options.Insensitive {
-		name = strings.ToLower(DEFAULT_SECTION)
-	}
-	section, _ := f.NewSection(name)
-
-	// This "last" is not strictly equivalent to "previous one" if current key is not the first nested key
-	var isLastValueEmpty bool
-	var lastRegularKey *Key
-
-	var line []byte
-	var inUnparseableSection bool
-
-	// NOTE: Iterate and increase `currentPeekSize` until
-	// the size of the parser buffer is found.
-	// TODO: When Golang 1.10 is the lowest version supported,
-	// replace with `parserBufferSize := p.buf.Size()`.
-	parserBufferSize := 0
-	// NOTE: Peek 1kb at a time.
-	currentPeekSize := 1024
-
-	if f.options.AllowPythonMultilineValues {
-		for {
-			peekBytes, _ := p.buf.Peek(currentPeekSize)
-			peekBytesLength := len(peekBytes)
-
-			if parserBufferSize >= peekBytesLength {
-				break
-			}
-
-			currentPeekSize *= 2
-			parserBufferSize = peekBytesLength
-		}
-	}
-
-	for !p.isEOF {
-		line, err = p.readUntil('\n')
-		if err != nil {
-			return err
-		}
-
-		if f.options.AllowNestedValues &&
-			isLastValueEmpty && len(line) > 0 {
-			if line[0] == ' ' || line[0] == '\t' {
-				lastRegularKey.addNestedValue(string(bytes.TrimSpace(line)))
-				continue
-			}
-		}
-
-		line = bytes.TrimLeftFunc(line, unicode.IsSpace)
-		if len(line) == 0 {
-			continue
-		}
-
-		// Comments
-		if line[0] == '#' || line[0] == ';' {
-			// Note: we do not care ending line break,
-			// it is needed for adding second line,
-			// so just clean it once at the end when set to value.
-			p.comment.Write(line)
-			continue
-		}
-
-		// Section
-		if line[0] == '[' {
-			// Read to the next ']' (TODO: support quoted strings)
-			// TODO(unknwon): use LastIndexByte when stop supporting Go1.4
-			closeIdx := bytes.LastIndex(line, []byte("]"))
-			if closeIdx == -1 {
-				return fmt.Errorf("unclosed section: %s", line)
-			}
-
-			name := string(line[1:closeIdx])
-			section, err = f.NewSection(name)
-			if err != nil {
-				return err
-			}
-
-			comment, has := cleanComment(line[closeIdx+1:])
-			if has {
-				p.comment.Write(comment)
-			}
-
-			section.Comment = strings.TrimSpace(p.comment.String())
-
-			// Reset aotu-counter and comments
-			p.comment.Reset()
-			p.count = 1
-
-			inUnparseableSection = false
-			for i := range f.options.UnparseableSections {
-				if f.options.UnparseableSections[i] == name ||
-					(f.options.Insensitive && strings.ToLower(f.options.UnparseableSections[i]) == strings.ToLower(name)) {
-					inUnparseableSection = true
-					continue
-				}
-			}
-			continue
-		}
-
-		if inUnparseableSection {
-			section.isRawSection = true
-			section.rawBody += string(line)
-			continue
-		}
-
-		kname, offset, err := readKeyName(line)
-		if err != nil {
-			// Treat as boolean key when desired, and whole line is key name.
-			if IsErrDelimiterNotFound(err) && f.options.AllowBooleanKeys {
-				kname, err := p.readValue(line,
-					parserBufferSize,
-					f.options.IgnoreContinuation,
-					f.options.IgnoreInlineComment,
-					f.options.UnescapeValueDoubleQuotes,
-					f.options.UnescapeValueCommentSymbols,
-					f.options.AllowPythonMultilineValues)
-				if err != nil {
-					return err
-				}
-				key, err := section.NewBooleanKey(kname)
-				if err != nil {
-					return err
-				}
-				key.Comment = strings.TrimSpace(p.comment.String())
-				p.comment.Reset()
-				continue
-			}
-			return err
-		}
-
-		// Auto increment.
-		isAutoIncr := false
-		if kname == "-" {
-			isAutoIncr = true
-			kname = "#" + strconv.Itoa(p.count)
-			p.count++
-		}
-
-		value, err := p.readValue(line[offset:],
-			parserBufferSize,
-			f.options.IgnoreContinuation,
-			f.options.IgnoreInlineComment,
-			f.options.UnescapeValueDoubleQuotes,
-			f.options.UnescapeValueCommentSymbols,
-			f.options.AllowPythonMultilineValues)
-		if err != nil {
-			return err
-		}
-		isLastValueEmpty = len(value) == 0
-
-		key, err := section.NewKey(kname, value)
-		if err != nil {
-			return err
-		}
-		key.isAutoIncrement = isAutoIncr
-		key.Comment = strings.TrimSpace(p.comment.String())
-		p.comment.Reset()
-		lastRegularKey = key
-	}
-	return nil
-}

+ 0 - 257
vendor/github.com/go-ini/ini/section.go

@@ -1,257 +0,0 @@
-// Copyright 2014 Unknwon
-//
-// Licensed under the Apache License, Version 2.0 (the "License"): you may
-// not use this file except in compliance with the License. You may obtain
-// a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations
-// under the License.
-
-package ini
-
-import (
-	"errors"
-	"fmt"
-	"strings"
-)
-
-// Section represents a config section.
-type Section struct {
-	f        *File
-	Comment  string
-	name     string
-	keys     map[string]*Key
-	keyList  []string
-	keysHash map[string]string
-
-	isRawSection bool
-	rawBody      string
-}
-
-func newSection(f *File, name string) *Section {
-	return &Section{
-		f:        f,
-		name:     name,
-		keys:     make(map[string]*Key),
-		keyList:  make([]string, 0, 10),
-		keysHash: make(map[string]string),
-	}
-}
-
-// Name returns name of Section.
-func (s *Section) Name() string {
-	return s.name
-}
-
-// Body returns rawBody of Section if the section was marked as unparseable.
-// It still follows the other rules of the INI format surrounding leading/trailing whitespace.
-func (s *Section) Body() string {
-	return strings.TrimSpace(s.rawBody)
-}
-
-// SetBody updates body content only if section is raw.
-func (s *Section) SetBody(body string) {
-	if !s.isRawSection {
-		return
-	}
-	s.rawBody = body
-}
-
-// NewKey creates a new key to given section.
-func (s *Section) NewKey(name, val string) (*Key, error) {
-	if len(name) == 0 {
-		return nil, errors.New("error creating new key: empty key name")
-	} else if s.f.options.Insensitive {
-		name = strings.ToLower(name)
-	}
-
-	if s.f.BlockMode {
-		s.f.lock.Lock()
-		defer s.f.lock.Unlock()
-	}
-
-	if inSlice(name, s.keyList) {
-		if s.f.options.AllowShadows {
-			if err := s.keys[name].addShadow(val); err != nil {
-				return nil, err
-			}
-		} else {
-			s.keys[name].value = val
-		}
-		return s.keys[name], nil
-	}
-
-	s.keyList = append(s.keyList, name)
-	s.keys[name] = newKey(s, name, val)
-	s.keysHash[name] = val
-	return s.keys[name], nil
-}
-
-// NewBooleanKey creates a new boolean type key to given section.
-func (s *Section) NewBooleanKey(name string) (*Key, error) {
-	key, err := s.NewKey(name, "true")
-	if err != nil {
-		return nil, err
-	}
-
-	key.isBooleanType = true
-	return key, nil
-}
-
-// GetKey returns key in section by given name.
-func (s *Section) GetKey(name string) (*Key, error) {
-	// FIXME: change to section level lock?
-	if s.f.BlockMode {
-		s.f.lock.RLock()
-	}
-	if s.f.options.Insensitive {
-		name = strings.ToLower(name)
-	}
-	key := s.keys[name]
-	if s.f.BlockMode {
-		s.f.lock.RUnlock()
-	}
-
-	if key == nil {
-		// Check if it is a child-section.
-		sname := s.name
-		for {
-			if i := strings.LastIndex(sname, "."); i > -1 {
-				sname = sname[:i]
-				sec, err := s.f.GetSection(sname)
-				if err != nil {
-					continue
-				}
-				return sec.GetKey(name)
-			} else {
-				break
-			}
-		}
-		return nil, fmt.Errorf("error when getting key of section '%s': key '%s' not exists", s.name, name)
-	}
-	return key, nil
-}
-
-// HasKey returns true if section contains a key with given name.
-func (s *Section) HasKey(name string) bool {
-	key, _ := s.GetKey(name)
-	return key != nil
-}
-
-// Haskey is a backwards-compatible name for HasKey.
-// TODO: delete me in v2
-func (s *Section) Haskey(name string) bool {
-	return s.HasKey(name)
-}
-
-// HasValue returns true if section contains given raw value.
-func (s *Section) HasValue(value string) bool {
-	if s.f.BlockMode {
-		s.f.lock.RLock()
-		defer s.f.lock.RUnlock()
-	}
-
-	for _, k := range s.keys {
-		if value == k.value {
-			return true
-		}
-	}
-	return false
-}
-
-// Key assumes named Key exists in section and returns a zero-value when not.
-func (s *Section) Key(name string) *Key {
-	key, err := s.GetKey(name)
-	if err != nil {
-		// It's OK here because the only possible error is empty key name,
-		// but if it's empty, this piece of code won't be executed.
-		key, _ = s.NewKey(name, "")
-		return key
-	}
-	return key
-}
-
-// Keys returns list of keys of section.
-func (s *Section) Keys() []*Key {
-	keys := make([]*Key, len(s.keyList))
-	for i := range s.keyList {
-		keys[i] = s.Key(s.keyList[i])
-	}
-	return keys
-}
-
-// ParentKeys returns list of keys of parent section.
-func (s *Section) ParentKeys() []*Key {
-	var parentKeys []*Key
-	sname := s.name
-	for {
-		if i := strings.LastIndex(sname, "."); i > -1 {
-			sname = sname[:i]
-			sec, err := s.f.GetSection(sname)
-			if err != nil {
-				continue
-			}
-			parentKeys = append(parentKeys, sec.Keys()...)
-		} else {
-			break
-		}
-
-	}
-	return parentKeys
-}
-
-// KeyStrings returns list of key names of section.
-func (s *Section) KeyStrings() []string {
-	list := make([]string, len(s.keyList))
-	copy(list, s.keyList)
-	return list
-}
-
-// KeysHash returns keys hash consisting of names and values.
-func (s *Section) KeysHash() map[string]string {
-	if s.f.BlockMode {
-		s.f.lock.RLock()
-		defer s.f.lock.RUnlock()
-	}
-
-	hash := map[string]string{}
-	for key, value := range s.keysHash {
-		hash[key] = value
-	}
-	return hash
-}
-
-// DeleteKey deletes a key from section.
-func (s *Section) DeleteKey(name string) {
-	if s.f.BlockMode {
-		s.f.lock.Lock()
-		defer s.f.lock.Unlock()
-	}
-
-	for i, k := range s.keyList {
-		if k == name {
-			s.keyList = append(s.keyList[:i], s.keyList[i+1:]...)
-			delete(s.keys, name)
-			return
-		}
-	}
-}
-
-// ChildSections returns a list of child sections of current section.
-// For example, "[parent.child1]" and "[parent.child12]" are child sections
-// of section "[parent]".
-func (s *Section) ChildSections() []*Section {
-	prefix := s.name + "."
-	children := make([]*Section, 0, 3)
-	for _, name := range s.f.sectionList {
-		if strings.HasPrefix(name, prefix) {
-			children = append(children, s.f.sections[name])
-		}
-	}
-	return children
-}

+ 0 - 512
vendor/github.com/go-ini/ini/struct.go

@@ -1,512 +0,0 @@
-// Copyright 2014 Unknwon
-//
-// Licensed under the Apache License, Version 2.0 (the "License"): you may
-// not use this file except in compliance with the License. You may obtain
-// a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations
-// under the License.
-
-package ini
-
-import (
-	"bytes"
-	"errors"
-	"fmt"
-	"reflect"
-	"strings"
-	"time"
-	"unicode"
-)
-
-// NameMapper represents a ini tag name mapper.
-type NameMapper func(string) string
-
-// Built-in name getters.
-var (
-	// AllCapsUnderscore converts to format ALL_CAPS_UNDERSCORE.
-	AllCapsUnderscore NameMapper = func(raw string) string {
-		newstr := make([]rune, 0, len(raw))
-		for i, chr := range raw {
-			if isUpper := 'A' <= chr && chr <= 'Z'; isUpper {
-				if i > 0 {
-					newstr = append(newstr, '_')
-				}
-			}
-			newstr = append(newstr, unicode.ToUpper(chr))
-		}
-		return string(newstr)
-	}
-	// TitleUnderscore converts to format title_underscore.
-	TitleUnderscore NameMapper = func(raw string) string {
-		newstr := make([]rune, 0, len(raw))
-		for i, chr := range raw {
-			if isUpper := 'A' <= chr && chr <= 'Z'; isUpper {
-				if i > 0 {
-					newstr = append(newstr, '_')
-				}
-				chr -= ('A' - 'a')
-			}
-			newstr = append(newstr, chr)
-		}
-		return string(newstr)
-	}
-)
-
-func (s *Section) parseFieldName(raw, actual string) string {
-	if len(actual) > 0 {
-		return actual
-	}
-	if s.f.NameMapper != nil {
-		return s.f.NameMapper(raw)
-	}
-	return raw
-}
-
-func parseDelim(actual string) string {
-	if len(actual) > 0 {
-		return actual
-	}
-	return ","
-}
-
-var reflectTime = reflect.TypeOf(time.Now()).Kind()
-
-// setSliceWithProperType sets proper values to slice based on its type.
-func setSliceWithProperType(key *Key, field reflect.Value, delim string, allowShadow, isStrict bool) error {
-	var strs []string
-	if allowShadow {
-		strs = key.StringsWithShadows(delim)
-	} else {
-		strs = key.Strings(delim)
-	}
-
-	numVals := len(strs)
-	if numVals == 0 {
-		return nil
-	}
-
-	var vals interface{}
-	var err error
-
-	sliceOf := field.Type().Elem().Kind()
-	switch sliceOf {
-	case reflect.String:
-		vals = strs
-	case reflect.Int:
-		vals, err = key.parseInts(strs, true, false)
-	case reflect.Int64:
-		vals, err = key.parseInt64s(strs, true, false)
-	case reflect.Uint:
-		vals, err = key.parseUints(strs, true, false)
-	case reflect.Uint64:
-		vals, err = key.parseUint64s(strs, true, false)
-	case reflect.Float64:
-		vals, err = key.parseFloat64s(strs, true, false)
-	case reflectTime:
-		vals, err = key.parseTimesFormat(time.RFC3339, strs, true, false)
-	default:
-		return fmt.Errorf("unsupported type '[]%s'", sliceOf)
-	}
-	if err != nil && isStrict {
-		return err
-	}
-
-	slice := reflect.MakeSlice(field.Type(), numVals, numVals)
-	for i := 0; i < numVals; i++ {
-		switch sliceOf {
-		case reflect.String:
-			slice.Index(i).Set(reflect.ValueOf(vals.([]string)[i]))
-		case reflect.Int:
-			slice.Index(i).Set(reflect.ValueOf(vals.([]int)[i]))
-		case reflect.Int64:
-			slice.Index(i).Set(reflect.ValueOf(vals.([]int64)[i]))
-		case reflect.Uint:
-			slice.Index(i).Set(reflect.ValueOf(vals.([]uint)[i]))
-		case reflect.Uint64:
-			slice.Index(i).Set(reflect.ValueOf(vals.([]uint64)[i]))
-		case reflect.Float64:
-			slice.Index(i).Set(reflect.ValueOf(vals.([]float64)[i]))
-		case reflectTime:
-			slice.Index(i).Set(reflect.ValueOf(vals.([]time.Time)[i]))
-		}
-	}
-	field.Set(slice)
-	return nil
-}
-
-func wrapStrictError(err error, isStrict bool) error {
-	if isStrict {
-		return err
-	}
-	return nil
-}
-
-// setWithProperType sets proper value to field based on its type,
-// but it does not return error for failing parsing,
-// because we want to use default value that is already assigned to strcut.
-func setWithProperType(t reflect.Type, key *Key, field reflect.Value, delim string, allowShadow, isStrict bool) error {
-	switch t.Kind() {
-	case reflect.String:
-		if len(key.String()) == 0 {
-			return nil
-		}
-		field.SetString(key.String())
-	case reflect.Bool:
-		boolVal, err := key.Bool()
-		if err != nil {
-			return wrapStrictError(err, isStrict)
-		}
-		field.SetBool(boolVal)
-	case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
-		durationVal, err := key.Duration()
-		// Skip zero value
-		if err == nil && int64(durationVal) > 0 {
-			field.Set(reflect.ValueOf(durationVal))
-			return nil
-		}
-
-		intVal, err := key.Int64()
-		if err != nil {
-			return wrapStrictError(err, isStrict)
-		}
-		field.SetInt(intVal)
-	//	byte is an alias for uint8, so supporting uint8 breaks support for byte
-	case reflect.Uint, reflect.Uint16, reflect.Uint32, reflect.Uint64:
-		durationVal, err := key.Duration()
-		// Skip zero value
-		if err == nil && int(durationVal) > 0 {
-			field.Set(reflect.ValueOf(durationVal))
-			return nil
-		}
-
-		uintVal, err := key.Uint64()
-		if err != nil {
-			return wrapStrictError(err, isStrict)
-		}
-		field.SetUint(uintVal)
-
-	case reflect.Float32, reflect.Float64:
-		floatVal, err := key.Float64()
-		if err != nil {
-			return wrapStrictError(err, isStrict)
-		}
-		field.SetFloat(floatVal)
-	case reflectTime:
-		timeVal, err := key.Time()
-		if err != nil {
-			return wrapStrictError(err, isStrict)
-		}
-		field.Set(reflect.ValueOf(timeVal))
-	case reflect.Slice:
-		return setSliceWithProperType(key, field, delim, allowShadow, isStrict)
-	default:
-		return fmt.Errorf("unsupported type '%s'", t)
-	}
-	return nil
-}
-
-func parseTagOptions(tag string) (rawName string, omitEmpty bool, allowShadow bool) {
-	opts := strings.SplitN(tag, ",", 3)
-	rawName = opts[0]
-	if len(opts) > 1 {
-		omitEmpty = opts[1] == "omitempty"
-	}
-	if len(opts) > 2 {
-		allowShadow = opts[2] == "allowshadow"
-	}
-	return rawName, omitEmpty, allowShadow
-}
-
-func (s *Section) mapTo(val reflect.Value, isStrict bool) error {
-	if val.Kind() == reflect.Ptr {
-		val = val.Elem()
-	}
-	typ := val.Type()
-
-	for i := 0; i < typ.NumField(); i++ {
-		field := val.Field(i)
-		tpField := typ.Field(i)
-
-		tag := tpField.Tag.Get("ini")
-		if tag == "-" {
-			continue
-		}
-
-		rawName, _, allowShadow := parseTagOptions(tag)
-		fieldName := s.parseFieldName(tpField.Name, rawName)
-		if len(fieldName) == 0 || !field.CanSet() {
-			continue
-		}
-
-		isAnonymous := tpField.Type.Kind() == reflect.Ptr && tpField.Anonymous
-		isStruct := tpField.Type.Kind() == reflect.Struct
-		if isAnonymous {
-			field.Set(reflect.New(tpField.Type.Elem()))
-		}
-
-		if isAnonymous || isStruct {
-			if sec, err := s.f.GetSection(fieldName); err == nil {
-				if err = sec.mapTo(field, isStrict); err != nil {
-					return fmt.Errorf("error mapping field(%s): %v", fieldName, err)
-				}
-				continue
-			}
-		}
-
-		if key, err := s.GetKey(fieldName); err == nil {
-			delim := parseDelim(tpField.Tag.Get("delim"))
-			if err = setWithProperType(tpField.Type, key, field, delim, allowShadow, isStrict); err != nil {
-				return fmt.Errorf("error mapping field(%s): %v", fieldName, err)
-			}
-		}
-	}
-	return nil
-}
-
-// MapTo maps section to given struct.
-func (s *Section) MapTo(v interface{}) error {
-	typ := reflect.TypeOf(v)
-	val := reflect.ValueOf(v)
-	if typ.Kind() == reflect.Ptr {
-		typ = typ.Elem()
-		val = val.Elem()
-	} else {
-		return errors.New("cannot map to non-pointer struct")
-	}
-
-	return s.mapTo(val, false)
-}
-
-// MapTo maps section to given struct in strict mode,
-// which returns all possible error including value parsing error.
-func (s *Section) StrictMapTo(v interface{}) error {
-	typ := reflect.TypeOf(v)
-	val := reflect.ValueOf(v)
-	if typ.Kind() == reflect.Ptr {
-		typ = typ.Elem()
-		val = val.Elem()
-	} else {
-		return errors.New("cannot map to non-pointer struct")
-	}
-
-	return s.mapTo(val, true)
-}
-
-// MapTo maps file to given struct.
-func (f *File) MapTo(v interface{}) error {
-	return f.Section("").MapTo(v)
-}
-
-// MapTo maps file to given struct in strict mode,
-// which returns all possible error including value parsing error.
-func (f *File) StrictMapTo(v interface{}) error {
-	return f.Section("").StrictMapTo(v)
-}
-
-// MapTo maps data sources to given struct with name mapper.
-func MapToWithMapper(v interface{}, mapper NameMapper, source interface{}, others ...interface{}) error {
-	cfg, err := Load(source, others...)
-	if err != nil {
-		return err
-	}
-	cfg.NameMapper = mapper
-	return cfg.MapTo(v)
-}
-
-// StrictMapToWithMapper maps data sources to given struct with name mapper in strict mode,
-// which returns all possible error including value parsing error.
-func StrictMapToWithMapper(v interface{}, mapper NameMapper, source interface{}, others ...interface{}) error {
-	cfg, err := Load(source, others...)
-	if err != nil {
-		return err
-	}
-	cfg.NameMapper = mapper
-	return cfg.StrictMapTo(v)
-}
-
-// MapTo maps data sources to given struct.
-func MapTo(v, source interface{}, others ...interface{}) error {
-	return MapToWithMapper(v, nil, source, others...)
-}
-
-// StrictMapTo maps data sources to given struct in strict mode,
-// which returns all possible error including value parsing error.
-func StrictMapTo(v, source interface{}, others ...interface{}) error {
-	return StrictMapToWithMapper(v, nil, source, others...)
-}
-
-// reflectSliceWithProperType does the opposite thing as setSliceWithProperType.
-func reflectSliceWithProperType(key *Key, field reflect.Value, delim string) error {
-	slice := field.Slice(0, field.Len())
-	if field.Len() == 0 {
-		return nil
-	}
-
-	var buf bytes.Buffer
-	sliceOf := field.Type().Elem().Kind()
-	for i := 0; i < field.Len(); i++ {
-		switch sliceOf {
-		case reflect.String:
-			buf.WriteString(slice.Index(i).String())
-		case reflect.Int, reflect.Int64:
-			buf.WriteString(fmt.Sprint(slice.Index(i).Int()))
-		case reflect.Uint, reflect.Uint64:
-			buf.WriteString(fmt.Sprint(slice.Index(i).Uint()))
-		case reflect.Float64:
-			buf.WriteString(fmt.Sprint(slice.Index(i).Float()))
-		case reflectTime:
-			buf.WriteString(slice.Index(i).Interface().(time.Time).Format(time.RFC3339))
-		default:
-			return fmt.Errorf("unsupported type '[]%s'", sliceOf)
-		}
-		buf.WriteString(delim)
-	}
-	key.SetValue(buf.String()[:buf.Len()-1])
-	return nil
-}
-
-// reflectWithProperType does the opposite thing as setWithProperType.
-func reflectWithProperType(t reflect.Type, key *Key, field reflect.Value, delim string) error {
-	switch t.Kind() {
-	case reflect.String:
-		key.SetValue(field.String())
-	case reflect.Bool:
-		key.SetValue(fmt.Sprint(field.Bool()))
-	case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
-		key.SetValue(fmt.Sprint(field.Int()))
-	case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
-		key.SetValue(fmt.Sprint(field.Uint()))
-	case reflect.Float32, reflect.Float64:
-		key.SetValue(fmt.Sprint(field.Float()))
-	case reflectTime:
-		key.SetValue(fmt.Sprint(field.Interface().(time.Time).Format(time.RFC3339)))
-	case reflect.Slice:
-		return reflectSliceWithProperType(key, field, delim)
-	default:
-		return fmt.Errorf("unsupported type '%s'", t)
-	}
-	return nil
-}
-
-// CR: copied from encoding/json/encode.go with modifications of time.Time support.
-// TODO: add more test coverage.
-func isEmptyValue(v reflect.Value) bool {
-	switch v.Kind() {
-	case reflect.Array, reflect.Map, reflect.Slice, reflect.String:
-		return v.Len() == 0
-	case reflect.Bool:
-		return !v.Bool()
-	case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
-		return v.Int() == 0
-	case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
-		return v.Uint() == 0
-	case reflect.Float32, reflect.Float64:
-		return v.Float() == 0
-	case reflect.Interface, reflect.Ptr:
-		return v.IsNil()
-	case reflectTime:
-		t, ok := v.Interface().(time.Time)
-		return ok && t.IsZero()
-	}
-	return false
-}
-
-func (s *Section) reflectFrom(val reflect.Value) error {
-	if val.Kind() == reflect.Ptr {
-		val = val.Elem()
-	}
-	typ := val.Type()
-
-	for i := 0; i < typ.NumField(); i++ {
-		field := val.Field(i)
-		tpField := typ.Field(i)
-
-		tag := tpField.Tag.Get("ini")
-		if tag == "-" {
-			continue
-		}
-
-		opts := strings.SplitN(tag, ",", 2)
-		if len(opts) == 2 && opts[1] == "omitempty" && isEmptyValue(field) {
-			continue
-		}
-
-		fieldName := s.parseFieldName(tpField.Name, opts[0])
-		if len(fieldName) == 0 || !field.CanSet() {
-			continue
-		}
-
-		if (tpField.Type.Kind() == reflect.Ptr && tpField.Anonymous) ||
-			(tpField.Type.Kind() == reflect.Struct && tpField.Type.Name() != "Time") {
-			// Note: The only error here is section doesn't exist.
-			sec, err := s.f.GetSection(fieldName)
-			if err != nil {
-				// Note: fieldName can never be empty here, ignore error.
-				sec, _ = s.f.NewSection(fieldName)
-			}
-
-			// Add comment from comment tag
-			if len(sec.Comment) == 0 {
-				sec.Comment = tpField.Tag.Get("comment")
-			}
-
-			if err = sec.reflectFrom(field); err != nil {
-				return fmt.Errorf("error reflecting field (%s): %v", fieldName, err)
-			}
-			continue
-		}
-
-		// Note: Same reason as secion.
-		key, err := s.GetKey(fieldName)
-		if err != nil {
-			key, _ = s.NewKey(fieldName, "")
-		}
-
-		// Add comment from comment tag
-		if len(key.Comment) == 0 {
-			key.Comment = tpField.Tag.Get("comment")
-		}
-
-		if err = reflectWithProperType(tpField.Type, key, field, parseDelim(tpField.Tag.Get("delim"))); err != nil {
-			return fmt.Errorf("error reflecting field (%s): %v", fieldName, err)
-		}
-
-	}
-	return nil
-}
-
-// ReflectFrom reflects secion from given struct.
-func (s *Section) ReflectFrom(v interface{}) error {
-	typ := reflect.TypeOf(v)
-	val := reflect.ValueOf(v)
-	if typ.Kind() == reflect.Ptr {
-		typ = typ.Elem()
-		val = val.Elem()
-	} else {
-		return errors.New("cannot reflect from non-pointer struct")
-	}
-
-	return s.reflectFrom(val)
-}
-
-// ReflectFrom reflects file from given struct.
-func (f *File) ReflectFrom(v interface{}) error {
-	return f.Section("").ReflectFrom(v)
-}
-
-// ReflectFrom reflects data sources from given struct with name mapper.
-func ReflectFromWithMapper(cfg *File, v interface{}, mapper NameMapper) error {
-	cfg.NameMapper = mapper
-	return cfg.ReflectFrom(v)
-}
-
-// ReflectFrom reflects data sources from given struct.
-func ReflectFrom(cfg *File, v interface{}) error {
-	return ReflectFromWithMapper(cfg, v, nil)
-}

+ 247 - 43
vendor/github.com/go-xorm/builder/builder.go

@@ -4,6 +4,12 @@
 
 package builder
 
+import (
+	sql2 "database/sql"
+	"fmt"
+	"sort"
+)
+
 type optype byte
 
 const (
@@ -12,6 +18,15 @@ const (
 	insertType               // insert
 	updateType               // update
 	deleteType               // delete
+	unionType                // union
+)
+
+const (
+	POSTGRES = "postgres"
+	SQLITE   = "sqlite3"
+	MYSQL    = "mysql"
+	MSSQL    = "mssql"
+	ORACLE   = "oracle"
 )
 
 type join struct {
@@ -20,60 +35,115 @@ type join struct {
 	joinCond  Cond
 }
 
+type union struct {
+	unionType string
+	builder   *Builder
+}
+
+type limit struct {
+	limitN int
+	offset int
+}
+
 // Builder describes a SQL statement
 type Builder struct {
 	optype
-	tableName string
-	cond      Cond
-	selects   []string
-	joins     []join
-	inserts   Eq
-	updates   []Eq
+	dialect    string
+	isNested   bool
+	into       string
+	from       string
+	subQuery   *Builder
+	cond       Cond
+	selects    []string
+	joins      []join
+	unions     []union
+	limitation *limit
+	insertCols []string
+	insertVals []interface{}
+	updates    []Eq
+	orderBy    string
+	groupBy    string
+	having     string
+}
+
+// Dialect sets the db dialect of Builder.
+func Dialect(dialect string) *Builder {
+	builder := &Builder{cond: NewCond(), dialect: dialect}
+	return builder
+}
+
+// MySQL is shortcut of Dialect(MySQL)
+func MySQL() *Builder {
+	return Dialect(MYSQL)
 }
 
-// Select creates a select Builder
-func Select(cols ...string) *Builder {
-	builder := &Builder{cond: NewCond()}
-	return builder.Select(cols...)
+// MsSQL is shortcut of Dialect(MsSQL)
+func MsSQL() *Builder {
+	return Dialect(MSSQL)
 }
 
-// Insert creates an insert Builder
-func Insert(eq Eq) *Builder {
-	builder := &Builder{cond: NewCond()}
-	return builder.Insert(eq)
+// Oracle is shortcut of Dialect(Oracle)
+func Oracle() *Builder {
+	return Dialect(ORACLE)
 }
 
-// Update creates an update Builder
-func Update(updates ...Eq) *Builder {
-	builder := &Builder{cond: NewCond()}
-	return builder.Update(updates...)
+// Postgres is shortcut of Dialect(Postgres)
+func Postgres() *Builder {
+	return Dialect(POSTGRES)
 }
 
-// Delete creates a delete Builder
-func Delete(conds ...Cond) *Builder {
-	builder := &Builder{cond: NewCond()}
-	return builder.Delete(conds...)
+// SQLite is shortcut of Dialect(SQLITE)
+func SQLite() *Builder {
+	return Dialect(SQLITE)
 }
 
 // Where sets where SQL
 func (b *Builder) Where(cond Cond) *Builder {
-	b.cond = b.cond.And(cond)
+	if b.cond.IsValid() {
+		b.cond = b.cond.And(cond)
+	} else {
+		b.cond = cond
+	}
 	return b
 }
 
-// From sets the table name
-func (b *Builder) From(tableName string) *Builder {
-	b.tableName = tableName
+// From sets from subject(can be a table name in string or a builder pointer) and its alias
+func (b *Builder) From(subject interface{}, alias ...string) *Builder {
+	switch subject.(type) {
+	case *Builder:
+		b.subQuery = subject.(*Builder)
+
+		if len(alias) > 0 {
+			b.from = alias[0]
+		} else {
+			b.isNested = true
+		}
+	case string:
+		b.from = subject.(string)
+
+		if len(alias) > 0 {
+			b.from = b.from + " " + alias[0]
+		}
+	}
+
 	return b
 }
 
+// TableName returns the table name
+func (b *Builder) TableName() string {
+	if b.optype == insertType {
+		return b.into
+	}
+	return b.from
+}
+
 // Into sets insert table name
 func (b *Builder) Into(tableName string) *Builder {
-	b.tableName = tableName
+	b.into = tableName
 	return b
 }
 
-// Join sets join table and contions
+// Join sets join table and conditions
 func (b *Builder) Join(joinType, joinTable string, joinCond interface{}) *Builder {
 	switch joinCond.(type) {
 	case Cond:
@@ -85,6 +155,50 @@ func (b *Builder) Join(joinType, joinTable string, joinCond interface{}) *Builde
 	return b
 }
 
+// Union sets union conditions
+func (b *Builder) Union(unionTp string, unionCond *Builder) *Builder {
+	var builder *Builder
+	if b.optype != unionType {
+		builder = &Builder{cond: NewCond()}
+		builder.optype = unionType
+		builder.dialect = b.dialect
+		builder.selects = b.selects
+
+		currentUnions := b.unions
+		// erase sub unions (actually append to new Builder.unions)
+		b.unions = nil
+
+		for e := range currentUnions {
+			currentUnions[e].builder.dialect = b.dialect
+		}
+
+		builder.unions = append(append(builder.unions, union{"", b}), currentUnions...)
+	} else {
+		builder = b
+	}
+
+	if unionCond != nil {
+		if unionCond.dialect == "" && builder.dialect != "" {
+			unionCond.dialect = builder.dialect
+		}
+
+		builder.unions = append(builder.unions, union{unionTp, unionCond})
+	}
+
+	return builder
+}
+
+// Limit sets limitN condition
+func (b *Builder) Limit(limitN int, offset ...int) *Builder {
+	b.limitation = &limit{limitN: limitN}
+
+	if len(offset) > 0 {
+		b.limitation.offset = offset[0]
+	}
+
+	return b
+}
+
 // InnerJoin sets inner join
 func (b *Builder) InnerJoin(joinTable string, joinCond interface{}) *Builder {
 	return b.Join("INNER", joinTable, joinCond)
@@ -113,7 +227,9 @@ func (b *Builder) FullJoin(joinTable string, joinCond interface{}) *Builder {
 // Select sets select SQL
 func (b *Builder) Select(cols ...string) *Builder {
 	b.selects = cols
-	b.optype = selectType
+	if b.optype == condType {
+		b.optype = selectType
+	}
 	return b
 }
 
@@ -129,16 +245,70 @@ func (b *Builder) Or(cond Cond) *Builder {
 	return b
 }
 
+type insertColsSorter struct {
+	cols []string
+	vals []interface{}
+}
+
+func (s insertColsSorter) Len() int {
+	return len(s.cols)
+}
+func (s insertColsSorter) Swap(i, j int) {
+	s.cols[i], s.cols[j] = s.cols[j], s.cols[i]
+	s.vals[i], s.vals[j] = s.vals[j], s.vals[i]
+}
+
+func (s insertColsSorter) Less(i, j int) bool {
+	return s.cols[i] < s.cols[j]
+}
+
 // Insert sets insert SQL
-func (b *Builder) Insert(eq Eq) *Builder {
-	b.inserts = eq
+func (b *Builder) Insert(eq ...interface{}) *Builder {
+	if len(eq) > 0 {
+		var paramType = -1
+		for _, e := range eq {
+			switch t := e.(type) {
+			case Eq:
+				if paramType == -1 {
+					paramType = 0
+				}
+				if paramType != 0 {
+					break
+				}
+				for k, v := range t {
+					b.insertCols = append(b.insertCols, k)
+					b.insertVals = append(b.insertVals, v)
+				}
+			case string:
+				if paramType == -1 {
+					paramType = 1
+				}
+				if paramType != 1 {
+					break
+				}
+				b.insertCols = append(b.insertCols, t)
+			}
+		}
+	}
+
+	if len(b.insertCols) == len(b.insertVals) {
+		sort.Sort(insertColsSorter{
+			cols: b.insertCols,
+			vals: b.insertVals,
+		})
+	}
 	b.optype = insertType
 	return b
 }
 
 // Update sets update SQL
 func (b *Builder) Update(updates ...Eq) *Builder {
-	b.updates = updates
+	b.updates = make([]Eq, 0, len(updates))
+	for _, update := range updates {
+		if update.IsValid() {
+			b.updates = append(b.updates, update)
+		}
+	}
 	b.optype = updateType
 	return b
 }
@@ -153,8 +323,8 @@ func (b *Builder) Delete(conds ...Cond) *Builder {
 // WriteTo implements Writer interface
 func (b *Builder) WriteTo(w Writer) error {
 	switch b.optype {
-	case condType:
-		return b.cond.WriteTo(w)
+	/*case condType:
+	return b.cond.WriteTo(w)*/
 	case selectType:
 		return b.selectWriteTo(w)
 	case insertType:
@@ -163,6 +333,8 @@ func (b *Builder) WriteTo(w Writer) error {
 		return b.updateWriteTo(w)
 	case deleteType:
 		return b.deleteWriteTo(w)
+	case unionType:
+		return b.unionWriteTo(w)
 	}
 
 	return ErrNotSupportType
@@ -175,16 +347,48 @@ func (b *Builder) ToSQL() (string, []interface{}, error) {
 		return "", nil, err
 	}
 
-	return w.writer.String(), w.args, nil
+	// in case of sql.NamedArg in args
+	for e := range w.args {
+		if namedArg, ok := w.args[e].(sql2.NamedArg); ok {
+			w.args[e] = namedArg.Value
+		}
+	}
+
+	var sql = w.writer.String()
+	var err error
+
+	switch b.dialect {
+	case ORACLE, MSSQL:
+		// This is for compatibility with different sql drivers
+		for e := range w.args {
+			w.args[e] = sql2.Named(fmt.Sprintf("p%d", e+1), w.args[e])
+		}
+
+		var prefix string
+		if b.dialect == ORACLE {
+			prefix = ":p"
+		} else {
+			prefix = "@p"
+		}
+
+		if sql, err = ConvertPlaceholder(sql, prefix); err != nil {
+			return "", nil, err
+		}
+	case POSTGRES:
+		if sql, err = ConvertPlaceholder(sql, "$"); err != nil {
+			return "", nil, err
+		}
+	}
+
+	return sql, w.args, nil
 }
 
-// ToSQL convert a builder or condtions to SQL and args
-func ToSQL(cond interface{}) (string, []interface{}, error) {
-	switch cond.(type) {
-	case Cond:
-		return condToSQL(cond.(Cond))
-	case *Builder:
-		return cond.(*Builder).ToSQL()
+// ToBoundSQL
+func (b *Builder) ToBoundSQL() (string, error) {
+	w := NewWriter()
+	if err := b.WriteTo(w); err != nil {
+		return "", err
 	}
-	return "", nil, ErrNotSupportType
+
+	return ConvertToBoundSQL(w.writer.String(), w.args)
 }

+ 9 - 4
vendor/github.com/go-xorm/builder/builder_delete.go

@@ -5,16 +5,21 @@
 package builder
 
 import (
-	"errors"
 	"fmt"
 )
 
+// Delete creates a delete Builder
+func Delete(conds ...Cond) *Builder {
+	builder := &Builder{cond: NewCond()}
+	return builder.Delete(conds...)
+}
+
 func (b *Builder) deleteWriteTo(w Writer) error {
-	if len(b.tableName) <= 0 {
-		return errors.New("no table indicated")
+	if len(b.from) <= 0 {
+		return ErrNoTableName
 	}
 
-	if _, err := fmt.Fprintf(w, "DELETE FROM %s WHERE ", b.tableName); err != nil {
+	if _, err := fmt.Fprintf(w, "DELETE FROM %s WHERE ", b.from); err != nil {
 		return err
 	}
 

+ 35 - 12
vendor/github.com/go-xorm/builder/builder_insert.go

@@ -6,39 +6,63 @@ package builder
 
 import (
 	"bytes"
-	"errors"
 	"fmt"
 )
 
+// Insert creates an insert Builder
+func Insert(eq ...interface{}) *Builder {
+	builder := &Builder{cond: NewCond()}
+	return builder.Insert(eq...)
+}
+
+func (b *Builder) insertSelectWriteTo(w Writer) error {
+	if _, err := fmt.Fprintf(w, "INSERT INTO %s ", b.into); err != nil {
+		return err
+	}
+
+	if len(b.insertCols) > 0 {
+		fmt.Fprintf(w, "(")
+		for _, col := range b.insertCols {
+			fmt.Fprintf(w, col)
+		}
+		fmt.Fprintf(w, ") ")
+	}
+
+	return b.selectWriteTo(w)
+}
+
 func (b *Builder) insertWriteTo(w Writer) error {
-	if len(b.tableName) <= 0 {
-		return errors.New("no table indicated")
+	if len(b.into) <= 0 {
+		return ErrNoTableName
+	}
+	if len(b.insertCols) <= 0 && b.from == "" {
+		return ErrNoColumnToInsert
 	}
-	if len(b.inserts) <= 0 {
-		return errors.New("no column to be insert")
+
+	if b.into != "" && b.from != "" {
+		return b.insertSelectWriteTo(w)
 	}
 
-	if _, err := fmt.Fprintf(w, "INSERT INTO %s (", b.tableName); err != nil {
+	if _, err := fmt.Fprintf(w, "INSERT INTO %s (", b.into); err != nil {
 		return err
 	}
 
 	var args = make([]interface{}, 0)
 	var bs []byte
 	var valBuffer = bytes.NewBuffer(bs)
-	var i = 0
 
-	for _, col := range b.inserts.sortedKeys() {
-		value := b.inserts[col]
+	for i, col := range b.insertCols {
+		value := b.insertVals[i]
 		fmt.Fprint(w, col)
 		if e, ok := value.(expr); ok {
-			fmt.Fprint(valBuffer, e.sql)
+			fmt.Fprintf(valBuffer, "(%s)", e.sql)
 			args = append(args, e.args...)
 		} else {
 			fmt.Fprint(valBuffer, "?")
 			args = append(args, value)
 		}
 
-		if i != len(b.inserts)-1 {
+		if i != len(b.insertCols)-1 {
 			if _, err := fmt.Fprint(w, ","); err != nil {
 				return err
 			}
@@ -46,7 +70,6 @@ func (b *Builder) insertWriteTo(w Writer) error {
 				return err
 			}
 		}
-		i = i + 1
 	}
 
 	if _, err := fmt.Fprint(w, ") Values ("); err != nil {

+ 100 - 0
vendor/github.com/go-xorm/builder/builder_limit.go

@@ -0,0 +1,100 @@
+// Copyright 2018 The Xorm Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package builder
+
+import (
+	"fmt"
+	"strings"
+)
+
+func (b *Builder) limitWriteTo(w Writer) error {
+	if strings.TrimSpace(b.dialect) == "" {
+		return ErrDialectNotSetUp
+	}
+
+	if b.limitation != nil {
+		limit := b.limitation
+		if limit.offset < 0 || limit.limitN <= 0 {
+			return ErrInvalidLimitation
+		}
+		// erase limit condition
+		b.limitation = nil
+		ow := w.(*BytesWriter)
+
+		switch strings.ToLower(strings.TrimSpace(b.dialect)) {
+		case ORACLE:
+			if len(b.selects) == 0 {
+				b.selects = append(b.selects, "*")
+			}
+
+			var final *Builder
+			selects := b.selects
+			b.selects = append(selects, "ROWNUM RN")
+
+			var wb *Builder
+			if b.optype == unionType {
+				wb = Dialect(b.dialect).Select("at.*", "ROWNUM RN").
+					From(b, "at")
+			} else {
+				wb = b
+			}
+
+			if limit.offset == 0 {
+				final = Dialect(b.dialect).Select(selects...).From(wb, "at").
+					Where(Lte{"at.RN": limit.limitN})
+			} else {
+				sub := Dialect(b.dialect).Select("*").
+					From(b, "at").Where(Lte{"at.RN": limit.offset + limit.limitN})
+
+				final = Dialect(b.dialect).Select(selects...).From(sub, "att").
+					Where(Gt{"att.RN": limit.offset})
+			}
+
+			return final.WriteTo(ow)
+		case SQLITE, MYSQL, POSTGRES:
+			// if type UNION, we need to write previous content back to current writer
+			if b.optype == unionType {
+				if err := b.WriteTo(ow); err != nil {
+					return err
+				}
+			}
+
+			if limit.offset == 0 {
+				fmt.Fprint(ow, " LIMIT ", limit.limitN)
+			} else {
+				fmt.Fprintf(ow, " LIMIT %v OFFSET %v", limit.limitN, limit.offset)
+			}
+		case MSSQL:
+			if len(b.selects) == 0 {
+				b.selects = append(b.selects, "*")
+			}
+
+			var final *Builder
+			selects := b.selects
+			b.selects = append(append([]string{fmt.Sprintf("TOP %d %v", limit.limitN+limit.offset, b.selects[0])},
+				b.selects[1:]...), "ROW_NUMBER() OVER (ORDER BY (SELECT 1)) AS RN")
+
+			var wb *Builder
+			if b.optype == unionType {
+				wb = Dialect(b.dialect).Select("*", "ROW_NUMBER() OVER (ORDER BY (SELECT 1)) AS RN").
+					From(b, "at")
+			} else {
+				wb = b
+			}
+
+			if limit.offset == 0 {
+				final = Dialect(b.dialect).Select(selects...).From(wb, "at")
+			} else {
+				final = Dialect(b.dialect).Select(selects...).From(wb, "at").Where(Gt{"at.RN": limit.offset})
+			}
+
+			return final.WriteTo(ow)
+		default:
+			return ErrNotSupportType
+		}
+	}
+
+	return nil
+}

+ 99 - 11
vendor/github.com/go-xorm/builder/builder_select.go

@@ -5,13 +5,24 @@
 package builder
 
 import (
-	"errors"
 	"fmt"
 )
 
+// Select creates a select Builder
+func Select(cols ...string) *Builder {
+	builder := &Builder{cond: NewCond()}
+	return builder.Select(cols...)
+}
+
 func (b *Builder) selectWriteTo(w Writer) error {
-	if len(b.tableName) <= 0 {
-		return errors.New("no table indicated")
+	if len(b.from) <= 0 && !b.isNested {
+		return ErrNoTableName
+	}
+
+	// perform limit before writing to writer when b.dialect between ORACLE and MSSQL
+	// this avoid a duplicate writing problem in simple limit query
+	if b.limitation != nil && (b.dialect == ORACLE || b.dialect == MSSQL) {
+		return b.limitWriteTo(w)
 	}
 
 	if _, err := fmt.Fprint(w, "SELECT "); err != nil {
@@ -34,24 +45,101 @@ func (b *Builder) selectWriteTo(w Writer) error {
 		}
 	}
 
-	if _, err := fmt.Fprintf(w, " FROM %s", b.tableName); err != nil {
-		return err
+	if b.subQuery == nil {
+		if _, err := fmt.Fprint(w, " FROM ", b.from); err != nil {
+			return err
+		}
+	} else {
+		if b.cond.IsValid() && len(b.from) <= 0 {
+			return ErrUnnamedDerivedTable
+		}
+		if b.subQuery.dialect != "" && b.dialect != b.subQuery.dialect {
+			return ErrInconsistentDialect
+		}
+
+		// dialect of sub-query will inherit from the main one (if not set up)
+		if b.dialect != "" && b.subQuery.dialect == "" {
+			b.subQuery.dialect = b.dialect
+		}
+
+		switch b.subQuery.optype {
+		case selectType, unionType:
+			fmt.Fprint(w, " FROM (")
+			if err := b.subQuery.WriteTo(w); err != nil {
+				return err
+			}
+
+			if len(b.from) == 0 {
+				fmt.Fprintf(w, ")")
+			} else {
+				fmt.Fprintf(w, ") %v", b.from)
+			}
+		default:
+			return ErrUnexpectedSubQuery
+		}
 	}
 
 	for _, v := range b.joins {
-		fmt.Fprintf(w, " %s JOIN %s ON ", v.joinType, v.joinTable)
+		if _, err := fmt.Fprintf(w, " %s JOIN %s ON ", v.joinType, v.joinTable); err != nil {
+			return err
+		}
+
 		if err := v.joinCond.WriteTo(w); err != nil {
 			return err
 		}
 	}
 
-	if !b.cond.IsValid() {
-		return nil
+	if b.cond.IsValid() {
+		if _, err := fmt.Fprint(w, " WHERE "); err != nil {
+			return err
+		}
+
+		if err := b.cond.WriteTo(w); err != nil {
+			return err
+		}
 	}
 
-	if _, err := fmt.Fprint(w, " WHERE "); err != nil {
-		return err
+	if len(b.groupBy) > 0 {
+		if _, err := fmt.Fprint(w, " GROUP BY ", b.groupBy); err != nil {
+			return err
+		}
+	}
+
+	if len(b.having) > 0 {
+		if _, err := fmt.Fprint(w, " HAVING ", b.having); err != nil {
+			return err
+		}
 	}
 
-	return b.cond.WriteTo(w)
+	if len(b.orderBy) > 0 {
+		if _, err := fmt.Fprint(w, " ORDER BY ", b.orderBy); err != nil {
+			return err
+		}
+	}
+
+	if b.limitation != nil {
+		if err := b.limitWriteTo(w); err != nil {
+			return err
+		}
+	}
+
+	return nil
+}
+
+// OrderBy orderBy SQL
+func (b *Builder) OrderBy(orderBy string) *Builder {
+	b.orderBy = orderBy
+	return b
+}
+
+// GroupBy groupby SQL
+func (b *Builder) GroupBy(groupby string) *Builder {
+	b.groupBy = groupby
+	return b
+}
+
+// Having having SQL
+func (b *Builder) Having(having string) *Builder {
+	b.having = having
+	return b
 }

+ 47 - 0
vendor/github.com/go-xorm/builder/builder_union.go

@@ -0,0 +1,47 @@
+// Copyright 2018 The Xorm Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package builder
+
+import (
+	"fmt"
+	"strings"
+)
+
+func (b *Builder) unionWriteTo(w Writer) error {
+	if b.limitation != nil || b.cond.IsValid() ||
+		b.orderBy != "" || b.having != "" || b.groupBy != "" {
+		return ErrNotUnexpectedUnionConditions
+	}
+
+	for idx, u := range b.unions {
+		current := u.builder
+		if current.optype != selectType {
+			return ErrUnsupportedUnionMembers
+		}
+
+		if len(b.unions) == 1 {
+			if err := current.selectWriteTo(w); err != nil {
+				return err
+			}
+		} else {
+			if b.dialect != "" && b.dialect != current.dialect {
+				return ErrInconsistentDialect
+			}
+
+			if idx != 0 {
+				fmt.Fprint(w, fmt.Sprintf(" UNION %v ", strings.ToUpper(u.unionType)))
+			}
+			fmt.Fprint(w, "(")
+
+			if err := current.selectWriteTo(w); err != nil {
+				return err
+			}
+
+			fmt.Fprint(w, ")")
+		}
+	}
+
+	return nil
+}

+ 10 - 5
vendor/github.com/go-xorm/builder/builder_update.go

@@ -5,19 +5,24 @@
 package builder
 
 import (
-	"errors"
 	"fmt"
 )
 
+// Update creates an update Builder
+func Update(updates ...Eq) *Builder {
+	builder := &Builder{cond: NewCond()}
+	return builder.Update(updates...)
+}
+
 func (b *Builder) updateWriteTo(w Writer) error {
-	if len(b.tableName) <= 0 {
-		return errors.New("no table indicated")
+	if len(b.from) <= 0 {
+		return ErrNoTableName
 	}
 	if len(b.updates) <= 0 {
-		return errors.New("no column to be update")
+		return ErrNoColumnToUpdate
 	}
 
-	if _, err := fmt.Fprintf(w, "UPDATE %s SET ", b.tableName); err != nil {
+	if _, err := fmt.Fprintf(w, "UPDATE %s SET ", b.from); err != nil {
 		return err
 	}
 

+ 4 - 17
vendor/github.com/go-xorm/builder/cond.go

@@ -5,7 +5,6 @@
 package builder
 
 import (
-	"bytes"
 	"io"
 )
 
@@ -19,15 +18,15 @@ var _ Writer = NewWriter()
 
 // BytesWriter implments Writer and save SQL in bytes.Buffer
 type BytesWriter struct {
-	writer *bytes.Buffer
-	buffer []byte
+	writer *StringBuilder
 	args   []interface{}
 }
 
 // NewWriter creates a new string writer
 func NewWriter() *BytesWriter {
-	w := &BytesWriter{}
-	w.writer = bytes.NewBuffer(w.buffer)
+	w := &BytesWriter{
+		writer: &StringBuilder{},
+	}
 	return w
 }
 
@@ -73,15 +72,3 @@ func (condEmpty) Or(conds ...Cond) Cond {
 func (condEmpty) IsValid() bool {
 	return false
 }
-
-func condToSQL(cond Cond) (string, []interface{}, error) {
-	if cond == nil || !cond.IsValid() {
-		return "", nil, nil
-	}
-
-	w := NewWriter()
-	if err := cond.WriteTo(w); err != nil {
-		return "", nil, err
-	}
-	return w.writer.String(), w.args, nil
-}

+ 27 - 2
vendor/github.com/go-xorm/builder/cond_between.go

@@ -17,10 +17,35 @@ var _ Cond = Between{}
 
 // WriteTo write data to Writer
 func (between Between) WriteTo(w Writer) error {
-	if _, err := fmt.Fprintf(w, "%s BETWEEN ? AND ?", between.Col); err != nil {
+	if _, err := fmt.Fprintf(w, "%s BETWEEN ", between.Col); err != nil {
 		return err
 	}
-	w.Append(between.LessVal, between.MoreVal)
+	if lv, ok := between.LessVal.(expr); ok {
+		if err := lv.WriteTo(w); err != nil {
+			return err
+		}
+	} else {
+		if _, err := fmt.Fprint(w, "?"); err != nil {
+			return err
+		}
+		w.Append(between.LessVal)
+	}
+
+	if _, err := fmt.Fprint(w, " AND "); err != nil {
+		return err
+	}
+
+	if mv, ok := between.MoreVal.(expr); ok {
+		if err := mv.WriteTo(w); err != nil {
+			return err
+		}
+	} else {
+		if _, err := fmt.Fprint(w, "?"); err != nil {
+			return err
+		}
+		w.Append(between.MoreVal)
+	}
+
 	return nil
 }
 

+ 3 - 1
vendor/github.com/go-xorm/builder/cond_or.go

@@ -27,10 +27,12 @@ func (o condOr) WriteTo(w Writer) error {
 	for i, cond := range o {
 		var needQuote bool
 		switch cond.(type) {
-		case condAnd:
+		case condAnd, expr:
 			needQuote = true
 		case Eq:
 			needQuote = (len(cond.(Eq)) > 1)
+		case Neq:
+			needQuote = (len(cond.(Neq)) > 1)
 		}
 
 		if needQuote {

+ 25 - 1
vendor/github.com/go-xorm/builder/error.go

@@ -8,9 +8,33 @@ import "errors"
 
 var (
 	// ErrNotSupportType not supported SQL type error
-	ErrNotSupportType = errors.New("not supported SQL type")
+	ErrNotSupportType = errors.New("Not supported SQL type")
 	// ErrNoNotInConditions no NOT IN params error
 	ErrNoNotInConditions = errors.New("No NOT IN conditions")
 	// ErrNoInConditions no IN params error
 	ErrNoInConditions = errors.New("No IN conditions")
+	// ErrNeedMoreArguments need more arguments
+	ErrNeedMoreArguments = errors.New("Need more sql arguments")
+	// ErrNoTableName no table name
+	ErrNoTableName = errors.New("No table indicated")
+	// ErrNoColumnToInsert no column to update
+	ErrNoColumnToUpdate = errors.New("No column(s) to update")
+	// ErrNoColumnToInsert no column to update
+	ErrNoColumnToInsert = errors.New("No column(s) to insert")
+	// ErrNotSupportDialectType not supported dialect type error
+	ErrNotSupportDialectType = errors.New("Not supported dialect type")
+	// ErrNotUnexpectedUnionConditions using union in a wrong way
+	ErrNotUnexpectedUnionConditions = errors.New("Unexpected conditional fields in UNION query")
+	// ErrUnsupportedUnionMembers unexpected members in UNION query
+	ErrUnsupportedUnionMembers = errors.New("Unexpected members in UNION query")
+	// ErrUnexpectedSubQuery Unexpected sub-query in SELECT query
+	ErrUnexpectedSubQuery = errors.New("Unexpected sub-query in SELECT query")
+	// ErrDialectNotSetUp dialect is not setup yet
+	ErrDialectNotSetUp = errors.New("Dialect is not setup yet, try to use `Dialect(dbType)` at first")
+	// ErrInvalidLimitation offset or limit is not correct
+	ErrInvalidLimitation = errors.New("Offset or limit is not correct")
+	// ErrUnnamedDerivedTable Every derived table must have its own alias
+	ErrUnnamedDerivedTable = errors.New("Every derived table must have its own alias")
+	// ErrInconsistentDialect Inconsistent dialect in same builder
+	ErrInconsistentDialect = errors.New("Inconsistent dialect in same builder")
 )

+ 156 - 0
vendor/github.com/go-xorm/builder/sql.go

@@ -0,0 +1,156 @@
+// Copyright 2018 The Xorm Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package builder
+
+import (
+	sql2 "database/sql"
+	"fmt"
+	"reflect"
+	"time"
+)
+
+func condToSQL(cond Cond) (string, []interface{}, error) {
+	if cond == nil || !cond.IsValid() {
+		return "", nil, nil
+	}
+
+	w := NewWriter()
+	if err := cond.WriteTo(w); err != nil {
+		return "", nil, err
+	}
+	return w.writer.String(), w.args, nil
+}
+
+func condToBoundSQL(cond Cond) (string, error) {
+	if cond == nil || !cond.IsValid() {
+		return "", nil
+	}
+
+	w := NewWriter()
+	if err := cond.WriteTo(w); err != nil {
+		return "", err
+	}
+	return ConvertToBoundSQL(w.writer.String(), w.args)
+}
+
+// ToSQL convert a builder or conditions to SQL and args
+func ToSQL(cond interface{}) (string, []interface{}, error) {
+	switch cond.(type) {
+	case Cond:
+		return condToSQL(cond.(Cond))
+	case *Builder:
+		return cond.(*Builder).ToSQL()
+	}
+	return "", nil, ErrNotSupportType
+}
+
+// ToBoundSQL convert a builder or conditions to parameters bound SQL
+func ToBoundSQL(cond interface{}) (string, error) {
+	switch cond.(type) {
+	case Cond:
+		return condToBoundSQL(cond.(Cond))
+	case *Builder:
+		return cond.(*Builder).ToBoundSQL()
+	}
+	return "", ErrNotSupportType
+}
+
+func noSQLQuoteNeeded(a interface{}) bool {
+	switch a.(type) {
+	case int, int8, int16, int32, int64:
+		return true
+	case uint, uint8, uint16, uint32, uint64:
+		return true
+	case float32, float64:
+		return true
+	case bool:
+		return true
+	case string:
+		return false
+	case time.Time, *time.Time:
+		return false
+	}
+
+	t := reflect.TypeOf(a)
+	switch t.Kind() {
+	case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+		return true
+	case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
+		return true
+	case reflect.Float32, reflect.Float64:
+		return true
+	case reflect.Bool:
+		return true
+	case reflect.String:
+		return false
+	}
+
+	return false
+}
+
+// ConvertToBoundSQL will convert SQL and args to a bound SQL
+func ConvertToBoundSQL(sql string, args []interface{}) (string, error) {
+	buf := StringBuilder{}
+	var i, j, start int
+	for ; i < len(sql); i++ {
+		if sql[i] == '?' {
+			_, err := buf.WriteString(sql[start:i])
+			if err != nil {
+				return "", err
+			}
+			start = i + 1
+
+			if len(args) == j {
+				return "", ErrNeedMoreArguments
+			}
+
+			arg := args[j]
+			if namedArg, ok := arg.(sql2.NamedArg); ok {
+				arg = namedArg.Value
+			}
+
+			if noSQLQuoteNeeded(arg) {
+				_, err = fmt.Fprint(&buf, arg)
+			} else {
+				_, err = fmt.Fprintf(&buf, "'%v'", arg)
+			}
+			if err != nil {
+				return "", err
+			}
+			j = j + 1
+		}
+	}
+	_, err := buf.WriteString(sql[start:])
+	if err != nil {
+		return "", err
+	}
+	return buf.String(), nil
+}
+
+// ConvertPlaceholder replaces ? to $1, $2 ... or :1, :2 ... according prefix
+func ConvertPlaceholder(sql, prefix string) (string, error) {
+	buf := StringBuilder{}
+	var i, j, start int
+	for ; i < len(sql); i++ {
+		if sql[i] == '?' {
+			if _, err := buf.WriteString(sql[start:i]); err != nil {
+				return "", err
+			}
+
+			start = i + 1
+			j = j + 1
+
+			if _, err := buf.WriteString(fmt.Sprintf("%v%d", prefix, j)); err != nil {
+				return "", err
+			}
+		}
+	}
+
+	if _, err := buf.WriteString(sql[start:]); err != nil {
+		return "", err
+	}
+
+	return buf.String(), nil
+}

+ 119 - 0
vendor/github.com/go-xorm/builder/string_builder.go

@@ -0,0 +1,119 @@
+// Copyright 2017 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package builder
+
+import (
+	"unicode/utf8"
+	"unsafe"
+)
+
+// A StringBuilder is used to efficiently build a string using Write methods.
+// It minimizes memory copying. The zero value is ready to use.
+// Do not copy a non-zero Builder.
+type StringBuilder struct {
+	addr *StringBuilder // of receiver, to detect copies by value
+	buf  []byte
+}
+
+// noescape hides a pointer from escape analysis.  noescape is
+// the identity function but escape analysis doesn't think the
+// output depends on the input. noescape is inlined and currently
+// compiles down to zero instructions.
+// USE CAREFULLY!
+// This was copied from the runtime; see issues 23382 and 7921.
+//go:nosplit
+func noescape(p unsafe.Pointer) unsafe.Pointer {
+	x := uintptr(p)
+	return unsafe.Pointer(x ^ 0)
+}
+
+func (b *StringBuilder) copyCheck() {
+	if b.addr == nil {
+		// This hack works around a failing of Go's escape analysis
+		// that was causing b to escape and be heap allocated.
+		// See issue 23382.
+		// TODO: once issue 7921 is fixed, this should be reverted to
+		// just "b.addr = b".
+		b.addr = (*StringBuilder)(noescape(unsafe.Pointer(b)))
+	} else if b.addr != b {
+		panic("strings: illegal use of non-zero Builder copied by value")
+	}
+}
+
+// String returns the accumulated string.
+func (b *StringBuilder) String() string {
+	return *(*string)(unsafe.Pointer(&b.buf))
+}
+
+// Len returns the number of accumulated bytes; b.Len() == len(b.String()).
+func (b *StringBuilder) Len() int { return len(b.buf) }
+
+// Reset resets the Builder to be empty.
+func (b *StringBuilder) Reset() {
+	b.addr = nil
+	b.buf = nil
+}
+
+// grow copies the buffer to a new, larger buffer so that there are at least n
+// bytes of capacity beyond len(b.buf).
+func (b *StringBuilder) grow(n int) {
+	buf := make([]byte, len(b.buf), 2*cap(b.buf)+n)
+	copy(buf, b.buf)
+	b.buf = buf
+}
+
+// Grow grows b's capacity, if necessary, to guarantee space for
+// another n bytes. After Grow(n), at least n bytes can be written to b
+// without another allocation. If n is negative, Grow panics.
+func (b *StringBuilder) Grow(n int) {
+	b.copyCheck()
+	if n < 0 {
+		panic("strings.Builder.Grow: negative count")
+	}
+	if cap(b.buf)-len(b.buf) < n {
+		b.grow(n)
+	}
+}
+
+// Write appends the contents of p to b's buffer.
+// Write always returns len(p), nil.
+func (b *StringBuilder) Write(p []byte) (int, error) {
+	b.copyCheck()
+	b.buf = append(b.buf, p...)
+	return len(p), nil
+}
+
+// WriteByte appends the byte c to b's buffer.
+// The returned error is always nil.
+func (b *StringBuilder) WriteByte(c byte) error {
+	b.copyCheck()
+	b.buf = append(b.buf, c)
+	return nil
+}
+
+// WriteRune appends the UTF-8 encoding of Unicode code point r to b's buffer.
+// It returns the length of r and a nil error.
+func (b *StringBuilder) WriteRune(r rune) (int, error) {
+	b.copyCheck()
+	if r < utf8.RuneSelf {
+		b.buf = append(b.buf, byte(r))
+		return 1, nil
+	}
+	l := len(b.buf)
+	if cap(b.buf)-l < utf8.UTFMax {
+		b.grow(utf8.UTFMax)
+	}
+	n := utf8.EncodeRune(b.buf[l:l+utf8.UTFMax], r)
+	b.buf = b.buf[:l+n]
+	return n, nil
+}
+
+// WriteString appends the contents of s to b's buffer.
+// It returns the length of s and a nil error.
+func (b *StringBuilder) WriteString(s string) (int, error) {
+	b.copyCheck()
+	b.buf = append(b.buf, s...)
+	return len(s), nil
+}

+ 8 - 4
vendor/github.com/go-xorm/core/cache.go

@@ -1,11 +1,16 @@
+// Copyright 2019 The Xorm Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
 package core
 
 import (
+	"bytes"
+	"encoding/gob"
 	"errors"
 	"fmt"
+	"strings"
 	"time"
-	"bytes"
-	"encoding/gob"
 )
 
 const (
@@ -55,11 +60,10 @@ func encodeIds(ids []PK) (string, error) {
 	return buf.String(), err
 }
 
-
 func decodeIds(s string) ([]PK, error) {
 	pks := make([]PK, 0)
 
-	dec := gob.NewDecoder(bytes.NewBufferString(s))
+	dec := gob.NewDecoder(strings.NewReader(s))
 	err := dec.Decode(&pks)
 
 	return pks, err

+ 19 - 12
vendor/github.com/go-xorm/core/column.go

@@ -1,3 +1,7 @@
+// Copyright 2019 The Xorm Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
 package core
 
 import (
@@ -41,6 +45,7 @@ type Column struct {
 	Comment         string
 }
 
+// NewColumn creates a new column
 func NewColumn(name, fieldName string, sqlType SQLType, len1, len2 int, nullable bool) *Column {
 	return &Column{
 		Name:            name,
@@ -66,7 +71,7 @@ func NewColumn(name, fieldName string, sqlType SQLType, len1, len2 int, nullable
 	}
 }
 
-// generate column description string according dialect
+// String generate column description string according dialect
 func (col *Column) String(d Dialect) string {
 	sql := d.QuoteStr() + col.Name + d.QuoteStr() + " "
 
@@ -79,6 +84,10 @@ func (col *Column) String(d Dialect) string {
 		}
 	}
 
+	if col.Default != "" {
+		sql += "DEFAULT " + col.Default + " "
+	}
+
 	if d.ShowCreateNull() {
 		if col.Nullable {
 			sql += "NULL "
@@ -87,18 +96,19 @@ func (col *Column) String(d Dialect) string {
 		}
 	}
 
-	if col.Default != "" {
-		sql += "DEFAULT " + col.Default + " "
-	}
-
 	return sql
 }
 
+// StringNoPk generate column description string according dialect without primary keys
 func (col *Column) StringNoPk(d Dialect) string {
 	sql := d.QuoteStr() + col.Name + d.QuoteStr() + " "
 
 	sql += d.SqlType(col) + " "
 
+	if col.Default != "" {
+		sql += "DEFAULT " + col.Default + " "
+	}
+
 	if d.ShowCreateNull() {
 		if col.Nullable {
 			sql += "NULL "
@@ -107,19 +117,16 @@ func (col *Column) StringNoPk(d Dialect) string {
 		}
 	}
 
-	if col.Default != "" {
-		sql += "DEFAULT " + col.Default + " "
-	}
-
 	return sql
 }
 
-// return col's filed of struct's value
+// ValueOf returns column's filed of struct's value
 func (col *Column) ValueOf(bean interface{}) (*reflect.Value, error) {
 	dataStruct := reflect.Indirect(reflect.ValueOf(bean))
 	return col.ValueOfV(&dataStruct)
 }
 
+// ValueOfV returns column's filed of struct's value accept reflevt value
 func (col *Column) ValueOfV(dataStruct *reflect.Value) (*reflect.Value, error) {
 	var fieldValue reflect.Value
 	fieldPath := strings.Split(col.FieldName, ".")
@@ -147,12 +154,12 @@ func (col *Column) ValueOfV(dataStruct *reflect.Value) (*reflect.Value, error) {
 			}
 			fieldValue = fieldValue.Elem().FieldByName(fieldPath[i+1])
 		} else {
-			return nil, fmt.Errorf("field  %v is not valid", col.FieldName)
+			return nil, fmt.Errorf("field %v is not valid", col.FieldName)
 		}
 	}
 
 	if !fieldValue.IsValid() {
-		return nil, fmt.Errorf("field  %v is not valid", col.FieldName)
+		return nil, fmt.Errorf("field %v is not valid", col.FieldName)
 	}
 
 	return &fieldValue, nil

+ 4 - 0
vendor/github.com/go-xorm/core/converstion.go

@@ -1,3 +1,7 @@
+// Copyright 2019 The Xorm Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
 package core
 
 // Conversion is an interface. A type implements Conversion will according

+ 88 - 233
vendor/github.com/go-xorm/core/db.go

@@ -1,12 +1,21 @@
+// Copyright 2019 The Xorm Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
 package core
 
 import (
+	"context"
 	"database/sql"
 	"database/sql/driver"
-	"errors"
 	"fmt"
 	"reflect"
 	"regexp"
+	"sync"
+)
+
+var (
+	DefaultCacheSize = 200
 )
 
 func MapToSlice(query string, mp interface{}) (string, []interface{}, error) {
@@ -58,189 +67,129 @@ func StructToSlice(query string, st interface{}) (string, []interface{}, error)
 	return query, args, nil
 }
 
+type cacheStruct struct {
+	value reflect.Value
+	idx   int
+}
+
+// DB is a wrap of sql.DB with extra contents
 type DB struct {
 	*sql.DB
-	Mapper IMapper
+	Mapper            IMapper
+	reflectCache      map[reflect.Type]*cacheStruct
+	reflectCacheMutex sync.RWMutex
 }
 
+// Open opens a database
 func Open(driverName, dataSourceName string) (*DB, error) {
 	db, err := sql.Open(driverName, dataSourceName)
 	if err != nil {
 		return nil, err
 	}
-	return &DB{db, NewCacheMapper(&SnakeMapper{})}, nil
+	return &DB{
+		DB:           db,
+		Mapper:       NewCacheMapper(&SnakeMapper{}),
+		reflectCache: make(map[reflect.Type]*cacheStruct),
+	}, nil
 }
 
+// FromDB creates a DB from a sql.DB
 func FromDB(db *sql.DB) *DB {
-	return &DB{db, NewCacheMapper(&SnakeMapper{})}
-}
-
-func (db *DB) Query(query string, args ...interface{}) (*Rows, error) {
-	rows, err := db.DB.Query(query, args...)
-	if err != nil {
-		if rows != nil {
-			rows.Close()
-		}
-		return nil, err
+	return &DB{
+		DB:           db,
+		Mapper:       NewCacheMapper(&SnakeMapper{}),
+		reflectCache: make(map[reflect.Type]*cacheStruct),
 	}
-	return &Rows{rows, db.Mapper}, nil
 }
 
-func (db *DB) QueryMap(query string, mp interface{}) (*Rows, error) {
-	query, args, err := MapToSlice(query, mp)
-	if err != nil {
-		return nil, err
+func (db *DB) reflectNew(typ reflect.Type) reflect.Value {
+	db.reflectCacheMutex.Lock()
+	defer db.reflectCacheMutex.Unlock()
+	cs, ok := db.reflectCache[typ]
+	if !ok || cs.idx+1 > DefaultCacheSize-1 {
+		cs = &cacheStruct{reflect.MakeSlice(reflect.SliceOf(typ), DefaultCacheSize, DefaultCacheSize), 0}
+		db.reflectCache[typ] = cs
+	} else {
+		cs.idx = cs.idx + 1
 	}
-	return db.Query(query, args...)
+	return cs.value.Index(cs.idx).Addr()
 }
 
-func (db *DB) QueryStruct(query string, st interface{}) (*Rows, error) {
-	query, args, err := StructToSlice(query, st)
+// QueryContext overwrites sql.DB.QueryContext
+func (db *DB) QueryContext(ctx context.Context, query string, args ...interface{}) (*Rows, error) {
+	rows, err := db.DB.QueryContext(ctx, query, args...)
 	if err != nil {
+		if rows != nil {
+			rows.Close()
+		}
 		return nil, err
 	}
-	return db.Query(query, args...)
+	return &Rows{rows, db}, nil
 }
 
-func (db *DB) QueryRow(query string, args ...interface{}) *Row {
-	rows, err := db.Query(query, args...)
-	if err != nil {
-		return &Row{nil, err}
-	}
-	return &Row{rows, nil}
+// Query overwrites sql.DB.Query
+func (db *DB) Query(query string, args ...interface{}) (*Rows, error) {
+	return db.QueryContext(context.Background(), query, args...)
 }
 
-func (db *DB) QueryRowMap(query string, mp interface{}) *Row {
+func (db *DB) QueryMapContext(ctx context.Context, query string, mp interface{}) (*Rows, error) {
 	query, args, err := MapToSlice(query, mp)
 	if err != nil {
-		return &Row{nil, err}
-	}
-	return db.QueryRow(query, args...)
-}
-
-func (db *DB) QueryRowStruct(query string, st interface{}) *Row {
-	query, args, err := StructToSlice(query, st)
-	if err != nil {
-		return &Row{nil, err}
+		return nil, err
 	}
-	return db.QueryRow(query, args...)
+	return db.QueryContext(ctx, query, args...)
 }
 
-type Stmt struct {
-	*sql.Stmt
-	Mapper IMapper
-	names  map[string]int
+func (db *DB) QueryMap(query string, mp interface{}) (*Rows, error) {
+	return db.QueryMapContext(context.Background(), query, mp)
 }
 
-func (db *DB) Prepare(query string) (*Stmt, error) {
-	names := make(map[string]int)
-	var i int
-	query = re.ReplaceAllStringFunc(query, func(src string) string {
-		names[src[1:]] = i
-		i += 1
-		return "?"
-	})
-
-	stmt, err := db.DB.Prepare(query)
+func (db *DB) QueryStructContext(ctx context.Context, query string, st interface{}) (*Rows, error) {
+	query, args, err := StructToSlice(query, st)
 	if err != nil {
 		return nil, err
 	}
-	return &Stmt{stmt, db.Mapper, names}, nil
-}
-
-func (s *Stmt) ExecMap(mp interface{}) (sql.Result, error) {
-	vv := reflect.ValueOf(mp)
-	if vv.Kind() != reflect.Ptr || vv.Elem().Kind() != reflect.Map {
-		return nil, errors.New("mp should be a map's pointer")
-	}
-
-	args := make([]interface{}, len(s.names))
-	for k, i := range s.names {
-		args[i] = vv.Elem().MapIndex(reflect.ValueOf(k)).Interface()
-	}
-	return s.Stmt.Exec(args...)
+	return db.QueryContext(ctx, query, args...)
 }
 
-func (s *Stmt) ExecStruct(st interface{}) (sql.Result, error) {
-	vv := reflect.ValueOf(st)
-	if vv.Kind() != reflect.Ptr || vv.Elem().Kind() != reflect.Struct {
-		return nil, errors.New("mp should be a map's pointer")
-	}
-
-	args := make([]interface{}, len(s.names))
-	for k, i := range s.names {
-		args[i] = vv.Elem().FieldByName(k).Interface()
-	}
-	return s.Stmt.Exec(args...)
+func (db *DB) QueryStruct(query string, st interface{}) (*Rows, error) {
+	return db.QueryStructContext(context.Background(), query, st)
 }
 
-func (s *Stmt) Query(args ...interface{}) (*Rows, error) {
-	rows, err := s.Stmt.Query(args...)
+func (db *DB) QueryRowContext(ctx context.Context, query string, args ...interface{}) *Row {
+	rows, err := db.QueryContext(ctx, query, args...)
 	if err != nil {
-		return nil, err
+		return &Row{nil, err}
 	}
-	return &Rows{rows, s.Mapper}, nil
+	return &Row{rows, nil}
 }
 
-func (s *Stmt) QueryMap(mp interface{}) (*Rows, error) {
-	vv := reflect.ValueOf(mp)
-	if vv.Kind() != reflect.Ptr || vv.Elem().Kind() != reflect.Map {
-		return nil, errors.New("mp should be a map's pointer")
-	}
-
-	args := make([]interface{}, len(s.names))
-	for k, i := range s.names {
-		args[i] = vv.Elem().MapIndex(reflect.ValueOf(k)).Interface()
-	}
-
-	return s.Query(args...)
+func (db *DB) QueryRow(query string, args ...interface{}) *Row {
+	return db.QueryRowContext(context.Background(), query, args...)
 }
 
-func (s *Stmt) QueryStruct(st interface{}) (*Rows, error) {
-	vv := reflect.ValueOf(st)
-	if vv.Kind() != reflect.Ptr || vv.Elem().Kind() != reflect.Struct {
-		return nil, errors.New("mp should be a map's pointer")
-	}
-
-	args := make([]interface{}, len(s.names))
-	for k, i := range s.names {
-		args[i] = vv.Elem().FieldByName(k).Interface()
+func (db *DB) QueryRowMapContext(ctx context.Context, query string, mp interface{}) *Row {
+	query, args, err := MapToSlice(query, mp)
+	if err != nil {
+		return &Row{nil, err}
 	}
-
-	return s.Query(args...)
+	return db.QueryRowContext(ctx, query, args...)
 }
 
-func (s *Stmt) QueryRow(args ...interface{}) *Row {
-	rows, err := s.Query(args...)
-	return &Row{rows, err}
+func (db *DB) QueryRowMap(query string, mp interface{}) *Row {
+	return db.QueryRowMapContext(context.Background(), query, mp)
 }
 
-func (s *Stmt) QueryRowMap(mp interface{}) *Row {
-	vv := reflect.ValueOf(mp)
-	if vv.Kind() != reflect.Ptr || vv.Elem().Kind() != reflect.Map {
-		return &Row{nil, errors.New("mp should be a map's pointer")}
-	}
-
-	args := make([]interface{}, len(s.names))
-	for k, i := range s.names {
-		args[i] = vv.Elem().MapIndex(reflect.ValueOf(k)).Interface()
+func (db *DB) QueryRowStructContext(ctx context.Context, query string, st interface{}) *Row {
+	query, args, err := StructToSlice(query, st)
+	if err != nil {
+		return &Row{nil, err}
 	}
-
-	return s.QueryRow(args...)
+	return db.QueryRowContext(ctx, query, args...)
 }
 
-func (s *Stmt) QueryRowStruct(st interface{}) *Row {
-	vv := reflect.ValueOf(st)
-	if vv.Kind() != reflect.Ptr || vv.Elem().Kind() != reflect.Struct {
-		return &Row{nil, errors.New("st should be a struct's pointer")}
-	}
-
-	args := make([]interface{}, len(s.names))
-	for k, i := range s.names {
-		args[i] = vv.Elem().FieldByName(k).Interface()
-	}
-
-	return s.QueryRow(args...)
+func (db *DB) QueryRowStruct(query string, st interface{}) *Row {
+	return db.QueryRowStructContext(context.Background(), query, st)
 }
 
 var (
@@ -249,120 +198,26 @@ var (
 
 // insert into (name) values (?)
 // insert into (name) values (?name)
-func (db *DB) ExecMap(query string, mp interface{}) (sql.Result, error) {
-	query, args, err := MapToSlice(query, mp)
-	if err != nil {
-		return nil, err
-	}
-	return db.DB.Exec(query, args...)
-}
-
-func (db *DB) ExecStruct(query string, st interface{}) (sql.Result, error) {
-	query, args, err := StructToSlice(query, st)
-	if err != nil {
-		return nil, err
-	}
-	return db.DB.Exec(query, args...)
-}
-
-type EmptyScanner struct {
-}
-
-func (EmptyScanner) Scan(src interface{}) error {
-	return nil
-}
-
-type Tx struct {
-	*sql.Tx
-	Mapper IMapper
-}
-
-func (db *DB) Begin() (*Tx, error) {
-	tx, err := db.DB.Begin()
-	if err != nil {
-		return nil, err
-	}
-	return &Tx{tx, db.Mapper}, nil
-}
-
-func (tx *Tx) Prepare(query string) (*Stmt, error) {
-	names := make(map[string]int)
-	var i int
-	query = re.ReplaceAllStringFunc(query, func(src string) string {
-		names[src[1:]] = i
-		i += 1
-		return "?"
-	})
-
-	stmt, err := tx.Tx.Prepare(query)
-	if err != nil {
-		return nil, err
-	}
-	return &Stmt{stmt, tx.Mapper, names}, nil
-}
-
-func (tx *Tx) Stmt(stmt *Stmt) *Stmt {
-	// TODO:
-	return stmt
-}
-
-func (tx *Tx) ExecMap(query string, mp interface{}) (sql.Result, error) {
+func (db *DB) ExecMapContext(ctx context.Context, query string, mp interface{}) (sql.Result, error) {
 	query, args, err := MapToSlice(query, mp)
 	if err != nil {
 		return nil, err
 	}
-	return tx.Tx.Exec(query, args...)
+	return db.DB.ExecContext(ctx, query, args...)
 }
 
-func (tx *Tx) ExecStruct(query string, st interface{}) (sql.Result, error) {
-	query, args, err := StructToSlice(query, st)
-	if err != nil {
-		return nil, err
-	}
-	return tx.Tx.Exec(query, args...)
-}
-
-func (tx *Tx) Query(query string, args ...interface{}) (*Rows, error) {
-	rows, err := tx.Tx.Query(query, args...)
-	if err != nil {
-		return nil, err
-	}
-	return &Rows{rows, tx.Mapper}, nil
-}
-
-func (tx *Tx) QueryMap(query string, mp interface{}) (*Rows, error) {
-	query, args, err := MapToSlice(query, mp)
-	if err != nil {
-		return nil, err
-	}
-	return tx.Query(query, args...)
+func (db *DB) ExecMap(query string, mp interface{}) (sql.Result, error) {
+	return db.ExecMapContext(context.Background(), query, mp)
 }
 
-func (tx *Tx) QueryStruct(query string, st interface{}) (*Rows, error) {
+func (db *DB) ExecStructContext(ctx context.Context, query string, st interface{}) (sql.Result, error) {
 	query, args, err := StructToSlice(query, st)
 	if err != nil {
 		return nil, err
 	}
-	return tx.Query(query, args...)
-}
-
-func (tx *Tx) QueryRow(query string, args ...interface{}) *Row {
-	rows, err := tx.Query(query, args...)
-	return &Row{rows, err}
+	return db.DB.ExecContext(ctx, query, args...)
 }
 
-func (tx *Tx) QueryRowMap(query string, mp interface{}) *Row {
-	query, args, err := MapToSlice(query, mp)
-	if err != nil {
-		return &Row{nil, err}
-	}
-	return tx.QueryRow(query, args...)
-}
-
-func (tx *Tx) QueryRowStruct(query string, st interface{}) *Row {
-	query, args, err := StructToSlice(query, st)
-	if err != nil {
-		return &Row{nil, err}
-	}
-	return tx.QueryRow(query, args...)
+func (db *DB) ExecStruct(query string, st interface{}) (sql.Result, error) {
+	return db.ExecStructContext(context.Background(), query, st)
 }

+ 10 - 1
vendor/github.com/go-xorm/core/dialect.go

@@ -1,3 +1,7 @@
+// Copyright 2019 The Xorm Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
 package core
 
 import (
@@ -74,6 +78,7 @@ type Dialect interface {
 	GetIndexes(tableName string) (map[string]*Index, error)
 
 	Filters() []Filter
+	SetParams(params map[string]string)
 }
 
 func OpenDialect(dialect Dialect) (*DB, error) {
@@ -148,7 +153,8 @@ func (db *Base) SupportDropIfExists() bool {
 }
 
 func (db *Base) DropTableSql(tableName string) string {
-	return fmt.Sprintf("DROP TABLE IF EXISTS `%s`", tableName)
+	quote := db.dialect.Quote
+	return fmt.Sprintf("DROP TABLE IF EXISTS %s", quote(tableName))
 }
 
 func (db *Base) HasRecords(query string, args ...interface{}) (bool, error) {
@@ -289,6 +295,9 @@ func (b *Base) LogSQL(sql string, args []interface{}) {
 	}
 }
 
+func (b *Base) SetParams(params map[string]string) {
+}
+
 var (
 	dialects = map[string]func() Dialect{}
 )

+ 4 - 0
vendor/github.com/go-xorm/core/driver.go

@@ -1,3 +1,7 @@
+// Copyright 2019 The Xorm Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
 package core
 
 type Driver interface {

+ 4 - 0
vendor/github.com/go-xorm/core/error.go

@@ -1,3 +1,7 @@
+// Copyright 2019 The Xorm Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
 package core
 
 import "errors"

+ 7 - 3
vendor/github.com/go-xorm/core/filter.go

@@ -1,3 +1,7 @@
+// Copyright 2019 The Xorm Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
 package core
 
 import (
@@ -37,9 +41,9 @@ func (q *Quoter) Quote(content string) string {
 func (i *IdFilter) Do(sql string, dialect Dialect, table *Table) string {
 	quoter := NewQuoter(dialect)
 	if table != nil && len(table.PrimaryKeys) == 1 {
-		sql = strings.Replace(sql, "`(id)`", quoter.Quote(table.PrimaryKeys[0]), -1)
-		sql = strings.Replace(sql, quoter.Quote("(id)"), quoter.Quote(table.PrimaryKeys[0]), -1)
-		return strings.Replace(sql, "(id)", quoter.Quote(table.PrimaryKeys[0]), -1)
+		sql = strings.Replace(sql, " `(id)` ", " "+quoter.Quote(table.PrimaryKeys[0])+" ", -1)
+		sql = strings.Replace(sql, " "+quoter.Quote("(id)")+" ", " "+quoter.Quote(table.PrimaryKeys[0])+" ", -1)
+		return strings.Replace(sql, " (id) ", " "+quoter.Quote(table.PrimaryKeys[0])+" ", -1)
 	}
 	return sql
 }

+ 4 - 0
vendor/github.com/go-xorm/core/ilogger.go

@@ -1,3 +1,7 @@
+// Copyright 2019 The Xorm Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
 package core
 
 type LogLevel int

+ 14 - 4
vendor/github.com/go-xorm/core/index.go

@@ -1,8 +1,11 @@
+// Copyright 2019 The Xorm Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
 package core
 
 import (
 	"fmt"
-	"sort"
 	"strings"
 )
 
@@ -22,6 +25,8 @@ type Index struct {
 func (index *Index) XName(tableName string) string {
 	if !strings.HasPrefix(index.Name, "UQE_") &&
 		!strings.HasPrefix(index.Name, "IDX_") {
+		tableName = strings.Replace(tableName, `"`, "", -1)
+		tableName = strings.Replace(tableName, `.`, "_", -1)
 		if index.Type == UniqueType {
 			return fmt.Sprintf("UQE_%v_%v", tableName, index.Name)
 		}
@@ -44,11 +49,16 @@ func (index *Index) Equal(dst *Index) bool {
 	if len(index.Cols) != len(dst.Cols) {
 		return false
 	}
-	sort.StringSlice(index.Cols).Sort()
-	sort.StringSlice(dst.Cols).Sort()
 
 	for i := 0; i < len(index.Cols); i++ {
-		if index.Cols[i] != dst.Cols[i] {
+		var found bool
+		for j := 0; j < len(dst.Cols); j++ {
+			if index.Cols[i] == dst.Cols[j] {
+				found = true
+				break
+			}
+		}
+		if !found {
 			return false
 		}
 	}

+ 4 - 0
vendor/github.com/go-xorm/core/mapper.go

@@ -1,3 +1,7 @@
+// Copyright 2019 The Xorm Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
 package core
 
 import (

+ 4 - 0
vendor/github.com/go-xorm/core/pk.go

@@ -1,3 +1,7 @@
+// Copyright 2019 The Xorm Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
 package core
 
 import (

+ 7 - 61
vendor/github.com/go-xorm/core/rows.go

@@ -1,3 +1,7 @@
+// Copyright 2019 The Xorm Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
 package core
 
 import (
@@ -9,7 +13,7 @@ import (
 
 type Rows struct {
 	*sql.Rows
-	Mapper IMapper
+	db *DB
 }
 
 func (rs *Rows) ToMapString() ([]map[string]string, error) {
@@ -105,7 +109,7 @@ func (rs *Rows) ScanStructByName(dest interface{}) error {
 	newDest := make([]interface{}, len(cols))
 	var v EmptyScanner
 	for j, name := range cols {
-		f := fieldByName(vv.Elem(), rs.Mapper.Table2Obj(name))
+		f := fieldByName(vv.Elem(), rs.db.Mapper.Table2Obj(name))
 		if f.IsValid() {
 			newDest[j] = f.Addr().Interface()
 		} else {
@@ -116,36 +120,6 @@ func (rs *Rows) ScanStructByName(dest interface{}) error {
 	return rs.Rows.Scan(newDest...)
 }
 
-type cacheStruct struct {
-	value reflect.Value
-	idx   int
-}
-
-var (
-	reflectCache      = make(map[reflect.Type]*cacheStruct)
-	reflectCacheMutex sync.RWMutex
-)
-
-func ReflectNew(typ reflect.Type) reflect.Value {
-	reflectCacheMutex.RLock()
-	cs, ok := reflectCache[typ]
-	reflectCacheMutex.RUnlock()
-
-	const newSize = 200
-
-	if !ok || cs.idx+1 > newSize-1 {
-		cs = &cacheStruct{reflect.MakeSlice(reflect.SliceOf(typ), newSize, newSize), 0}
-		reflectCacheMutex.Lock()
-		reflectCache[typ] = cs
-		reflectCacheMutex.Unlock()
-	} else {
-		reflectCacheMutex.Lock()
-		cs.idx = cs.idx + 1
-		reflectCacheMutex.Unlock()
-	}
-	return cs.value.Index(cs.idx).Addr()
-}
-
 // scan data to a slice's pointer, slice's length should equal to columns' number
 func (rs *Rows) ScanSlice(dest interface{}) error {
 	vv := reflect.ValueOf(dest)
@@ -197,9 +171,7 @@ func (rs *Rows) ScanMap(dest interface{}) error {
 	vvv := vv.Elem()
 
 	for i, _ := range cols {
-		newDest[i] = ReflectNew(vvv.Type().Elem()).Interface()
-		//v := reflect.New(vvv.Type().Elem())
-		//newDest[i] = v.Interface()
+		newDest[i] = rs.db.reflectNew(vvv.Type().Elem()).Interface()
 	}
 
 	err = rs.Rows.Scan(newDest...)
@@ -215,32 +187,6 @@ func (rs *Rows) ScanMap(dest interface{}) error {
 	return nil
 }
 
-/*func (rs *Rows) ScanMap(dest interface{}) error {
-	vv := reflect.ValueOf(dest)
-	if vv.Kind() != reflect.Ptr || vv.Elem().Kind() != reflect.Map {
-		return errors.New("dest should be a map's pointer")
-	}
-
-	cols, err := rs.Columns()
-	if err != nil {
-		return err
-	}
-
-	newDest := make([]interface{}, len(cols))
-	err = rs.ScanSlice(newDest)
-	if err != nil {
-		return err
-	}
-
-	vvv := vv.Elem()
-
-	for i, name := range cols {
-		vname := reflect.ValueOf(name)
-		vvv.SetMapIndex(vname, reflect.ValueOf(newDest[i]).Elem())
-	}
-
-	return nil
-}*/
 type Row struct {
 	rows *Rows
 	// One of these two will be non-nil:

+ 14 - 0
vendor/github.com/go-xorm/core/scan.go

@@ -1,3 +1,7 @@
+// Copyright 2019 The Xorm Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
 package core
 
 import (
@@ -44,9 +48,19 @@ func convertTime(dest *NullTime, src interface{}) error {
 		}
 		*dest = NullTime(t)
 		return nil
+	case time.Time:
+		*dest = NullTime(s)
+		return nil
 	case nil:
 	default:
 		return fmt.Errorf("unsupported driver -> Scan pair: %T -> %T", src, dest)
 	}
 	return nil
 }
+
+type EmptyScanner struct {
+}
+
+func (EmptyScanner) Scan(src interface{}) error {
+	return nil
+}

+ 165 - 0
vendor/github.com/go-xorm/core/stmt.go

@@ -0,0 +1,165 @@
+// Copyright 2019 The Xorm Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package core
+
+import (
+	"context"
+	"database/sql"
+	"errors"
+	"reflect"
+)
+
+type Stmt struct {
+	*sql.Stmt
+	db    *DB
+	names map[string]int
+}
+
+func (db *DB) PrepareContext(ctx context.Context, query string) (*Stmt, error) {
+	names := make(map[string]int)
+	var i int
+	query = re.ReplaceAllStringFunc(query, func(src string) string {
+		names[src[1:]] = i
+		i += 1
+		return "?"
+	})
+
+	stmt, err := db.DB.PrepareContext(ctx, query)
+	if err != nil {
+		return nil, err
+	}
+	return &Stmt{stmt, db, names}, nil
+}
+
+func (db *DB) Prepare(query string) (*Stmt, error) {
+	return db.PrepareContext(context.Background(), query)
+}
+
+func (s *Stmt) ExecMapContext(ctx context.Context, mp interface{}) (sql.Result, error) {
+	vv := reflect.ValueOf(mp)
+	if vv.Kind() != reflect.Ptr || vv.Elem().Kind() != reflect.Map {
+		return nil, errors.New("mp should be a map's pointer")
+	}
+
+	args := make([]interface{}, len(s.names))
+	for k, i := range s.names {
+		args[i] = vv.Elem().MapIndex(reflect.ValueOf(k)).Interface()
+	}
+	return s.Stmt.ExecContext(ctx, args...)
+}
+
+func (s *Stmt) ExecMap(mp interface{}) (sql.Result, error) {
+	return s.ExecMapContext(context.Background(), mp)
+}
+
+func (s *Stmt) ExecStructContext(ctx context.Context, st interface{}) (sql.Result, error) {
+	vv := reflect.ValueOf(st)
+	if vv.Kind() != reflect.Ptr || vv.Elem().Kind() != reflect.Struct {
+		return nil, errors.New("mp should be a map's pointer")
+	}
+
+	args := make([]interface{}, len(s.names))
+	for k, i := range s.names {
+		args[i] = vv.Elem().FieldByName(k).Interface()
+	}
+	return s.Stmt.ExecContext(ctx, args...)
+}
+
+func (s *Stmt) ExecStruct(st interface{}) (sql.Result, error) {
+	return s.ExecStructContext(context.Background(), st)
+}
+
+func (s *Stmt) QueryContext(ctx context.Context, args ...interface{}) (*Rows, error) {
+	rows, err := s.Stmt.QueryContext(ctx, args...)
+	if err != nil {
+		return nil, err
+	}
+	return &Rows{rows, s.db}, nil
+}
+
+func (s *Stmt) Query(args ...interface{}) (*Rows, error) {
+	return s.QueryContext(context.Background(), args...)
+}
+
+func (s *Stmt) QueryMapContext(ctx context.Context, mp interface{}) (*Rows, error) {
+	vv := reflect.ValueOf(mp)
+	if vv.Kind() != reflect.Ptr || vv.Elem().Kind() != reflect.Map {
+		return nil, errors.New("mp should be a map's pointer")
+	}
+
+	args := make([]interface{}, len(s.names))
+	for k, i := range s.names {
+		args[i] = vv.Elem().MapIndex(reflect.ValueOf(k)).Interface()
+	}
+
+	return s.QueryContext(ctx, args...)
+}
+
+func (s *Stmt) QueryMap(mp interface{}) (*Rows, error) {
+	return s.QueryMapContext(context.Background(), mp)
+}
+
+func (s *Stmt) QueryStructContext(ctx context.Context, st interface{}) (*Rows, error) {
+	vv := reflect.ValueOf(st)
+	if vv.Kind() != reflect.Ptr || vv.Elem().Kind() != reflect.Struct {
+		return nil, errors.New("mp should be a map's pointer")
+	}
+
+	args := make([]interface{}, len(s.names))
+	for k, i := range s.names {
+		args[i] = vv.Elem().FieldByName(k).Interface()
+	}
+
+	return s.Query(args...)
+}
+
+func (s *Stmt) QueryStruct(st interface{}) (*Rows, error) {
+	return s.QueryStructContext(context.Background(), st)
+}
+
+func (s *Stmt) QueryRowContext(ctx context.Context, args ...interface{}) *Row {
+	rows, err := s.QueryContext(ctx, args...)
+	return &Row{rows, err}
+}
+
+func (s *Stmt) QueryRow(args ...interface{}) *Row {
+	return s.QueryRowContext(context.Background(), args...)
+}
+
+func (s *Stmt) QueryRowMapContext(ctx context.Context, mp interface{}) *Row {
+	vv := reflect.ValueOf(mp)
+	if vv.Kind() != reflect.Ptr || vv.Elem().Kind() != reflect.Map {
+		return &Row{nil, errors.New("mp should be a map's pointer")}
+	}
+
+	args := make([]interface{}, len(s.names))
+	for k, i := range s.names {
+		args[i] = vv.Elem().MapIndex(reflect.ValueOf(k)).Interface()
+	}
+
+	return s.QueryRowContext(ctx, args...)
+}
+
+func (s *Stmt) QueryRowMap(mp interface{}) *Row {
+	return s.QueryRowMapContext(context.Background(), mp)
+}
+
+func (s *Stmt) QueryRowStructContext(ctx context.Context, st interface{}) *Row {
+	vv := reflect.ValueOf(st)
+	if vv.Kind() != reflect.Ptr || vv.Elem().Kind() != reflect.Struct {
+		return &Row{nil, errors.New("st should be a struct's pointer")}
+	}
+
+	args := make([]interface{}, len(s.names))
+	for k, i := range s.names {
+		args[i] = vv.Elem().FieldByName(k).Interface()
+	}
+
+	return s.QueryRowContext(ctx, args...)
+}
+
+func (s *Stmt) QueryRowStruct(st interface{}) *Row {
+	return s.QueryRowStructContext(context.Background(), st)
+}

+ 4 - 2
vendor/github.com/go-xorm/core/table.go

@@ -1,3 +1,7 @@
+// Copyright 2019 The Xorm Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
 package core
 
 import (
@@ -49,7 +53,6 @@ func NewTable(name string, t reflect.Type) *Table {
 }
 
 func (table *Table) columnsByName(name string) []*Column {
-
 	n := len(name)
 
 	for k := range table.columnsMap {
@@ -75,7 +78,6 @@ func (table *Table) GetColumn(name string) *Column {
 }
 
 func (table *Table) GetColumnIdx(name string, idx int) *Column {
-
 	cols := table.columnsByName(name)
 
 	if cols != nil && idx < len(cols) {

+ 153 - 0
vendor/github.com/go-xorm/core/tx.go

@@ -0,0 +1,153 @@
+// Copyright 2019 The Xorm Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package core
+
+import (
+	"context"
+	"database/sql"
+)
+
+type Tx struct {
+	*sql.Tx
+	db *DB
+}
+
+func (db *DB) BeginTx(ctx context.Context, opts *sql.TxOptions) (*Tx, error) {
+	tx, err := db.DB.BeginTx(ctx, opts)
+	if err != nil {
+		return nil, err
+	}
+	return &Tx{tx, db}, nil
+}
+
+func (db *DB) Begin() (*Tx, error) {
+	tx, err := db.DB.Begin()
+	if err != nil {
+		return nil, err
+	}
+	return &Tx{tx, db}, nil
+}
+
+func (tx *Tx) PrepareContext(ctx context.Context, query string) (*Stmt, error) {
+	names := make(map[string]int)
+	var i int
+	query = re.ReplaceAllStringFunc(query, func(src string) string {
+		names[src[1:]] = i
+		i += 1
+		return "?"
+	})
+
+	stmt, err := tx.Tx.PrepareContext(ctx, query)
+	if err != nil {
+		return nil, err
+	}
+	return &Stmt{stmt, tx.db, names}, nil
+}
+
+func (tx *Tx) Prepare(query string) (*Stmt, error) {
+	return tx.PrepareContext(context.Background(), query)
+}
+
+func (tx *Tx) StmtContext(ctx context.Context, stmt *Stmt) *Stmt {
+	stmt.Stmt = tx.Tx.StmtContext(ctx, stmt.Stmt)
+	return stmt
+}
+
+func (tx *Tx) Stmt(stmt *Stmt) *Stmt {
+	return tx.StmtContext(context.Background(), stmt)
+}
+
+func (tx *Tx) ExecMapContext(ctx context.Context, query string, mp interface{}) (sql.Result, error) {
+	query, args, err := MapToSlice(query, mp)
+	if err != nil {
+		return nil, err
+	}
+	return tx.Tx.ExecContext(ctx, query, args...)
+}
+
+func (tx *Tx) ExecMap(query string, mp interface{}) (sql.Result, error) {
+	return tx.ExecMapContext(context.Background(), query, mp)
+}
+
+func (tx *Tx) ExecStructContext(ctx context.Context, query string, st interface{}) (sql.Result, error) {
+	query, args, err := StructToSlice(query, st)
+	if err != nil {
+		return nil, err
+	}
+	return tx.Tx.ExecContext(ctx, query, args...)
+}
+
+func (tx *Tx) ExecStruct(query string, st interface{}) (sql.Result, error) {
+	return tx.ExecStructContext(context.Background(), query, st)
+}
+
+func (tx *Tx) QueryContext(ctx context.Context, query string, args ...interface{}) (*Rows, error) {
+	rows, err := tx.Tx.QueryContext(ctx, query, args...)
+	if err != nil {
+		return nil, err
+	}
+	return &Rows{rows, tx.db}, nil
+}
+
+func (tx *Tx) Query(query string, args ...interface{}) (*Rows, error) {
+	return tx.QueryContext(context.Background(), query, args...)
+}
+
+func (tx *Tx) QueryMapContext(ctx context.Context, query string, mp interface{}) (*Rows, error) {
+	query, args, err := MapToSlice(query, mp)
+	if err != nil {
+		return nil, err
+	}
+	return tx.QueryContext(ctx, query, args...)
+}
+
+func (tx *Tx) QueryMap(query string, mp interface{}) (*Rows, error) {
+	return tx.QueryMapContext(context.Background(), query, mp)
+}
+
+func (tx *Tx) QueryStructContext(ctx context.Context, query string, st interface{}) (*Rows, error) {
+	query, args, err := StructToSlice(query, st)
+	if err != nil {
+		return nil, err
+	}
+	return tx.QueryContext(ctx, query, args...)
+}
+
+func (tx *Tx) QueryStruct(query string, st interface{}) (*Rows, error) {
+	return tx.QueryStructContext(context.Background(), query, st)
+}
+
+func (tx *Tx) QueryRowContext(ctx context.Context, query string, args ...interface{}) *Row {
+	rows, err := tx.QueryContext(ctx, query, args...)
+	return &Row{rows, err}
+}
+
+func (tx *Tx) QueryRow(query string, args ...interface{}) *Row {
+	return tx.QueryRowContext(context.Background(), query, args...)
+}
+
+func (tx *Tx) QueryRowMapContext(ctx context.Context, query string, mp interface{}) *Row {
+	query, args, err := MapToSlice(query, mp)
+	if err != nil {
+		return &Row{nil, err}
+	}
+	return tx.QueryRowContext(ctx, query, args...)
+}
+
+func (tx *Tx) QueryRowMap(query string, mp interface{}) *Row {
+	return tx.QueryRowMapContext(context.Background(), query, mp)
+}
+
+func (tx *Tx) QueryRowStructContext(ctx context.Context, query string, st interface{}) *Row {
+	query, args, err := StructToSlice(query, st)
+	if err != nil {
+		return &Row{nil, err}
+	}
+	return tx.QueryRowContext(ctx, query, args...)
+}
+
+func (tx *Tx) QueryRowStruct(query string, st interface{}) *Row {
+	return tx.QueryRowStructContext(context.Background(), query, st)
+}

+ 36 - 18
vendor/github.com/go-xorm/core/type.go

@@ -1,3 +1,7 @@
+// Copyright 2019 The Xorm Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
 package core
 
 import (
@@ -69,24 +73,31 @@ var (
 	Enum = "ENUM"
 	Set  = "SET"
 
-	Char       = "CHAR"
-	Varchar    = "VARCHAR"
-	NVarchar   = "NVARCHAR"
-	TinyText   = "TINYTEXT"
-	Text       = "TEXT"
-	Clob       = "CLOB"
-	MediumText = "MEDIUMTEXT"
-	LongText   = "LONGTEXT"
-	Uuid       = "UUID"
+	Char             = "CHAR"
+	Varchar          = "VARCHAR"
+	NChar            = "NCHAR"
+	NVarchar         = "NVARCHAR"
+	TinyText         = "TINYTEXT"
+	Text             = "TEXT"
+	NText            = "NTEXT"
+	Clob             = "CLOB"
+	MediumText       = "MEDIUMTEXT"
+	LongText         = "LONGTEXT"
+	Uuid             = "UUID"
+	UniqueIdentifier = "UNIQUEIDENTIFIER"
+	SysName          = "SYSNAME"
 
 	Date       = "DATE"
 	DateTime   = "DATETIME"
+	SmallDateTime   = "SMALLDATETIME"
 	Time       = "TIME"
 	TimeStamp  = "TIMESTAMP"
 	TimeStampz = "TIMESTAMPZ"
 
 	Decimal = "DECIMAL"
 	Numeric = "NUMERIC"
+	Money   = "MONEY"
+	SmallMoney = "SMALLMONEY"
 
 	Real   = "REAL"
 	Float  = "FLOAT"
@@ -124,35 +135,42 @@ var (
 		Jsonb: TEXT_TYPE,
 
 		Char:       TEXT_TYPE,
+		NChar:      TEXT_TYPE,
 		Varchar:    TEXT_TYPE,
 		NVarchar:   TEXT_TYPE,
 		TinyText:   TEXT_TYPE,
 		Text:       TEXT_TYPE,
+		NText:      TEXT_TYPE,
 		MediumText: TEXT_TYPE,
 		LongText:   TEXT_TYPE,
 		Uuid:       TEXT_TYPE,
 		Clob:       TEXT_TYPE,
+		SysName:    TEXT_TYPE,
 
 		Date:       TIME_TYPE,
 		DateTime:   TIME_TYPE,
 		Time:       TIME_TYPE,
 		TimeStamp:  TIME_TYPE,
 		TimeStampz: TIME_TYPE,
+		SmallDateTime:   TIME_TYPE,
 
 		Decimal: NUMERIC_TYPE,
 		Numeric: NUMERIC_TYPE,
 		Real:    NUMERIC_TYPE,
 		Float:   NUMERIC_TYPE,
 		Double:  NUMERIC_TYPE,
+		Money:   NUMERIC_TYPE,
+		SmallMoney: NUMERIC_TYPE,
 
 		Binary:    BLOB_TYPE,
 		VarBinary: BLOB_TYPE,
 
-		TinyBlob:   BLOB_TYPE,
-		Blob:       BLOB_TYPE,
-		MediumBlob: BLOB_TYPE,
-		LongBlob:   BLOB_TYPE,
-		Bytea:      BLOB_TYPE,
+		TinyBlob:         BLOB_TYPE,
+		Blob:             BLOB_TYPE,
+		MediumBlob:       BLOB_TYPE,
+		LongBlob:         BLOB_TYPE,
+		Bytea:            BLOB_TYPE,
+		UniqueIdentifier: BLOB_TYPE,
 
 		Bool: NUMERIC_TYPE,
 
@@ -289,15 +307,15 @@ func SQLType2Type(st SQLType) reflect.Type {
 		return reflect.TypeOf(float32(1))
 	case Double:
 		return reflect.TypeOf(float64(1))
-	case Char, Varchar, NVarchar, TinyText, Text, MediumText, LongText, Enum, Set, Uuid, Clob:
+	case Char, NChar, Varchar, NVarchar, TinyText, Text, NText, MediumText, LongText, Enum, Set, Uuid, Clob, SysName:
 		return reflect.TypeOf("")
-	case TinyBlob, Blob, LongBlob, Bytea, Binary, MediumBlob, VarBinary:
+	case TinyBlob, Blob, LongBlob, Bytea, Binary, MediumBlob, VarBinary, UniqueIdentifier:
 		return reflect.TypeOf([]byte{})
 	case Bool:
 		return reflect.TypeOf(true)
-	case DateTime, Date, Time, TimeStamp, TimeStampz:
+	case DateTime, Date, Time, TimeStamp, TimeStampz, SmallDateTime:
 		return reflect.TypeOf(c_TIME_DEFAULT)
-	case Decimal, Numeric:
+	case Decimal, Numeric, Money, SmallMoney:
 		return reflect.TypeOf("")
 	default:
 		return reflect.TypeOf("")

+ 30 - 0
vendor/github.com/go-xorm/xorm/context_cache.go

@@ -0,0 +1,30 @@
+// Copyright 2018 The Xorm Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package xorm
+
+// ContextCache is the interface that operates the cache data.
+type ContextCache interface {
+	// Put puts value into cache with key.
+	Put(key string, val interface{})
+	// Get gets cached value by given key.
+	Get(key string) interface{}
+}
+
+type memoryContextCache map[string]interface{}
+
+// NewMemoryContextCache return memoryContextCache
+func NewMemoryContextCache() memoryContextCache {
+	return make(map[string]interface{})
+}
+
+// Put puts value into cache with key.
+func (m memoryContextCache) Put(key string, val interface{}) {
+	m[key] = val
+}
+
+// Get gets cached value by given key.
+func (m memoryContextCache) Get(key string) interface{} {
+	return m[key]
+}

+ 77 - 0
vendor/github.com/go-xorm/xorm/dialect_mysql.go

@@ -172,12 +172,33 @@ type mysql struct {
 	allowAllFiles     bool
 	allowOldPasswords bool
 	clientFoundRows   bool
+	rowFormat         string
 }
 
 func (db *mysql) Init(d *core.DB, uri *core.Uri, drivername, dataSourceName string) error {
 	return db.Base.Init(d, db, uri, drivername, dataSourceName)
 }
 
+func (db *mysql) SetParams(params map[string]string) {
+	rowFormat, ok := params["rowFormat"]
+	if ok {
+		var t = strings.ToUpper(rowFormat)
+		switch t {
+		case "COMPACT":
+			fallthrough
+		case "REDUNDANT":
+			fallthrough
+		case "DYNAMIC":
+			fallthrough
+		case "COMPRESSED":
+			db.rowFormat = t
+			break
+		default:
+			break
+		}
+	}
+}
+
 func (db *mysql) SqlType(c *core.Column) string {
 	var res string
 	switch t := c.SQLType.Name; t {
@@ -487,6 +508,62 @@ func (db *mysql) GetIndexes(tableName string) (map[string]*core.Index, error) {
 	return indexes, nil
 }
 
+func (db *mysql) CreateTableSql(table *core.Table, tableName, storeEngine, charset string) string {
+	var sql string
+	sql = "CREATE TABLE IF NOT EXISTS "
+	if tableName == "" {
+		tableName = table.Name
+	}
+
+	sql += db.Quote(tableName)
+	sql += " ("
+
+	if len(table.ColumnsSeq()) > 0 {
+		pkList := table.PrimaryKeys
+
+		for _, colName := range table.ColumnsSeq() {
+			col := table.GetColumn(colName)
+			if col.IsPrimaryKey && len(pkList) == 1 {
+				sql += col.String(db)
+			} else {
+				sql += col.StringNoPk(db)
+			}
+			sql = strings.TrimSpace(sql)
+			if len(col.Comment) > 0 {
+				sql += " COMMENT '" + col.Comment + "'"
+			}
+			sql += ", "
+		}
+
+		if len(pkList) > 1 {
+			sql += "PRIMARY KEY ( "
+			sql += db.Quote(strings.Join(pkList, db.Quote(",")))
+			sql += " ), "
+		}
+
+		sql = sql[:len(sql)-2]
+	}
+	sql += ")"
+
+	if storeEngine != "" {
+		sql += " ENGINE=" + storeEngine
+	}
+
+	if len(charset) == 0 {
+		charset = db.URI().Charset
+	} 
+	if len(charset) != 0 {
+		sql += " DEFAULT CHARSET " + charset
+	}
+	
+	
+
+	if db.rowFormat != "" {
+		sql += " ROW_FORMAT=" + db.rowFormat
+	}
+	return sql
+}
+
 func (db *mysql) Filters() []core.Filter {
 	return []core.Filter{&core.IdFilter{}}
 }

+ 82 - 26
vendor/github.com/go-xorm/xorm/dialect_postgres.go

@@ -764,14 +764,26 @@ var (
 		"YES":                       true,
 		"ZONE":                      true,
 	}
+
+	// DefaultPostgresSchema default postgres schema
+	DefaultPostgresSchema = "public"
 )
 
+const postgresPublicSchema = "public"
+
 type postgres struct {
 	core.Base
 }
 
 func (db *postgres) Init(d *core.DB, uri *core.Uri, drivername, dataSourceName string) error {
-	return db.Base.Init(d, db, uri, drivername, dataSourceName)
+	err := db.Base.Init(d, db, uri, drivername, dataSourceName)
+	if err != nil {
+		return err
+	}
+	if db.Schema == "" {
+		db.Schema = DefaultPostgresSchema
+	}
+	return nil
 }
 
 func (db *postgres) SqlType(c *core.Column) string {
@@ -868,32 +880,42 @@ func (db *postgres) IndexOnTable() bool {
 }
 
 func (db *postgres) IndexCheckSql(tableName, idxName string) (string, []interface{}) {
-	args := []interface{}{tableName, idxName}
+	if len(db.Schema) == 0 {
+		args := []interface{}{tableName, idxName}
+		return `SELECT indexname FROM pg_indexes WHERE tablename = ? AND indexname = ?`, args
+	}
+
+	args := []interface{}{db.Schema, tableName, idxName}
 	return `SELECT indexname FROM pg_indexes ` +
-		`WHERE tablename = ? AND indexname = ?`, args
+		`WHERE schemaname = ? AND tablename = ? AND indexname = ?`, args
 }
 
 func (db *postgres) TableCheckSql(tableName string) (string, []interface{}) {
-	args := []interface{}{tableName}
-	return `SELECT tablename FROM pg_tables WHERE tablename = ?`, args
-}
+	if len(db.Schema) == 0 {
+		args := []interface{}{tableName}
+		return `SELECT tablename FROM pg_tables WHERE tablename = ?`, args
+	}
 
-/*func (db *postgres) ColumnCheckSql(tableName, colName string) (string, []interface{}) {
-	args := []interface{}{tableName, colName}
-	return "SELECT column_name FROM INFORMATION_SCHEMA.COLUMNS WHERE table_name = ?" +
-		" AND column_name = ?", args
-}*/
+	args := []interface{}{db.Schema, tableName}
+	return `SELECT tablename FROM pg_tables WHERE schemaname = ? AND tablename = ?`, args
+}
 
 func (db *postgres) ModifyColumnSql(tableName string, col *core.Column) string {
-	return fmt.Sprintf("alter table %s ALTER COLUMN %s TYPE %s",
-		tableName, col.Name, db.SqlType(col))
+	if len(db.Schema) == 0 {
+		return fmt.Sprintf("alter table %s ALTER COLUMN %s TYPE %s",
+			tableName, col.Name, db.SqlType(col))
+	}
+	return fmt.Sprintf("alter table %s.%s ALTER COLUMN %s TYPE %s",
+		db.Schema, tableName, col.Name, db.SqlType(col))
 }
 
 func (db *postgres) DropIndexSql(tableName string, index *core.Index) string {
-	//var unique string
 	quote := db.Quote
 	idxName := index.Name
 
+	tableName = strings.Replace(tableName, `"`, "", -1)
+	tableName = strings.Replace(tableName, `.`, "_", -1)
+
 	if !strings.HasPrefix(idxName, "UQE_") &&
 		!strings.HasPrefix(idxName, "IDX_") {
 		if index.Type == core.UniqueType {
@@ -902,13 +924,21 @@ func (db *postgres) DropIndexSql(tableName string, index *core.Index) string {
 			idxName = fmt.Sprintf("IDX_%v_%v", tableName, index.Name)
 		}
 	}
+	if db.Uri.Schema != "" {
+		idxName = db.Uri.Schema + "." + idxName
+	}
 	return fmt.Sprintf("DROP INDEX %v", quote(idxName))
 }
 
 func (db *postgres) IsColumnExist(tableName, colName string) (bool, error) {
-	args := []interface{}{tableName, colName}
-	query := "SELECT column_name FROM INFORMATION_SCHEMA.COLUMNS WHERE table_name = $1" +
-		" AND column_name = $2"
+	args := []interface{}{db.Schema, tableName, colName}
+	query := "SELECT column_name FROM INFORMATION_SCHEMA.COLUMNS WHERE table_schema = $1 AND table_name = $2" +
+		" AND column_name = $3"
+	if len(db.Schema) == 0 {
+		args = []interface{}{tableName, colName}
+		query = "SELECT column_name FROM INFORMATION_SCHEMA.COLUMNS WHERE table_name = $1" +
+			" AND column_name = $2"
+	}
 	db.LogSQL(query, args)
 
 	rows, err := db.DB().Query(query, args...)
@@ -921,8 +951,7 @@ func (db *postgres) IsColumnExist(tableName, colName string) (bool, error) {
 }
 
 func (db *postgres) GetColumns(tableName string) ([]string, map[string]*core.Column, error) {
-	// FIXME: the schema should be replaced by user custom's
-	args := []interface{}{tableName, "public"}
+	args := []interface{}{tableName}
 	s := `SELECT column_name, column_default, is_nullable, data_type, character_maximum_length, numeric_precision, numeric_precision_radix ,
     CASE WHEN p.contype = 'p' THEN true ELSE false END AS primarykey,
     CASE WHEN p.contype = 'u' THEN true ELSE false END AS uniquekey
@@ -933,7 +962,15 @@ FROM pg_attribute f
     LEFT JOIN pg_constraint p ON p.conrelid = c.oid AND f.attnum = ANY (p.conkey)
     LEFT JOIN pg_class AS g ON p.confrelid = g.oid
     LEFT JOIN INFORMATION_SCHEMA.COLUMNS s ON s.column_name=f.attname AND c.relname=s.table_name
-WHERE c.relkind = 'r'::char AND c.relname = $1 AND s.table_schema = $2 AND f.attnum > 0 ORDER BY f.attnum;`
+WHERE c.relkind = 'r'::char AND c.relname = $1%s AND f.attnum > 0 ORDER BY f.attnum;`
+
+	var f string
+	if len(db.Schema) != 0 {
+		args = append(args, db.Schema)
+		f = " AND s.table_schema = $2"
+	}
+	s = fmt.Sprintf(s, f)
+
 	db.LogSQL(s, args)
 
 	rows, err := db.DB().Query(s, args...)
@@ -1023,9 +1060,13 @@ WHERE c.relkind = 'r'::char AND c.relname = $1 AND s.table_schema = $2 AND f.att
 }
 
 func (db *postgres) GetTables() ([]*core.Table, error) {
-	// FIXME: replace public to user customrize schema
-	args := []interface{}{"public"}
-	s := fmt.Sprintf("SELECT tablename FROM pg_tables WHERE schemaname = $1")
+	args := []interface{}{}
+	s := "SELECT tablename FROM pg_tables"
+	if len(db.Schema) != 0 {
+		args = append(args, db.Schema)
+		s = s + " WHERE schemaname = $1"
+	}
+
 	db.LogSQL(s, args)
 
 	rows, err := db.DB().Query(s, args...)
@@ -1049,9 +1090,12 @@ func (db *postgres) GetTables() ([]*core.Table, error) {
 }
 
 func (db *postgres) GetIndexes(tableName string) (map[string]*core.Index, error) {
-	// FIXME: replace the public schema to user specify schema
-	args := []interface{}{"public", tableName}
-	s := fmt.Sprintf("SELECT indexname, indexdef FROM pg_indexes WHERE schemaname=$1 AND tablename=$2")
+	args := []interface{}{tableName}
+	s := fmt.Sprintf("SELECT indexname, indexdef FROM pg_indexes WHERE tablename=$1")
+	if len(db.Schema) != 0 {
+		args = append(args, db.Schema)
+		s = s + " AND schemaname=$2"
+	}
 	db.LogSQL(s, args)
 
 	rows, err := db.DB().Query(s, args...)
@@ -1179,3 +1223,15 @@ func (p *pqDriver) Parse(driverName, dataSourceName string) (*core.Uri, error) {
 
 	return db, nil
 }
+
+type pqDriverPgx struct {
+	pqDriver
+}
+
+func (pgx *pqDriverPgx) Parse(driverName, dataSourceName string) (*core.Uri, error) {
+	// Remove the leading characters for driver to work
+	if len(dataSourceName) >= 9 && dataSourceName[0] == 0 {
+		dataSourceName = dataSourceName[9:]
+	}
+	return pgx.pqDriver.Parse(driverName, dataSourceName)
+}

+ 5 - 1
vendor/github.com/go-xorm/xorm/dialect_sqlite3.go

@@ -233,7 +233,7 @@ func (db *sqlite3) TableCheckSql(tableName string) (string, []interface{}) {
 }
 
 func (db *sqlite3) DropIndexSql(tableName string, index *core.Index) string {
-	//var unique string
+	// var unique string
 	quote := db.Quote
 	idxName := index.Name
 
@@ -452,5 +452,9 @@ type sqlite3Driver struct {
 }
 
 func (p *sqlite3Driver) Parse(driverName, dataSourceName string) (*core.Uri, error) {
+	if strings.Contains(dataSourceName, "?") {
+		dataSourceName = dataSourceName[:strings.Index(dataSourceName, "?")]
+	}
+
 	return &core.Uri{DbType: core.SQLITE, DbName: dataSourceName}, nil
 }

+ 99 - 110
vendor/github.com/go-xorm/xorm/engine.go

@@ -49,6 +49,35 @@ type Engine struct {
 	tagHandlers map[string]tagHandler
 
 	engineGroup *EngineGroup
+
+	cachers    map[string]core.Cacher
+	cacherLock sync.RWMutex
+}
+
+func (engine *Engine) setCacher(tableName string, cacher core.Cacher) {
+	engine.cacherLock.Lock()
+	engine.cachers[tableName] = cacher
+	engine.cacherLock.Unlock()
+}
+
+func (engine *Engine) SetCacher(tableName string, cacher core.Cacher) {
+	engine.setCacher(tableName, cacher)
+}
+
+func (engine *Engine) getCacher(tableName string) core.Cacher {
+	var cacher core.Cacher
+	var ok bool
+	engine.cacherLock.RLock()
+	cacher, ok = engine.cachers[tableName]
+	engine.cacherLock.RUnlock()
+	if !ok && !engine.disableGlobalCache {
+		cacher = engine.Cacher
+	}
+	return cacher
+}
+
+func (engine *Engine) GetCacher(tableName string) core.Cacher {
+	return engine.getCacher(tableName)
 }
 
 // BufferSize sets buffer size for iterate
@@ -148,6 +177,14 @@ func (engine *Engine) QuoteStr() string {
 	return engine.dialect.QuoteStr()
 }
 
+func (engine *Engine) quoteColumns(columnStr string) string {
+	columns := strings.Split(columnStr, ",")
+	for i := 0; i < len(columns); i++ {
+		columns[i] = engine.Quote(strings.TrimSpace(columns[i]))
+	}
+	return strings.Join(columns, ",")
+}
+
 // Quote Use QuoteStr quote the string sql
 func (engine *Engine) Quote(value string) string {
 	value = strings.TrimSpace(value)
@@ -165,7 +202,7 @@ func (engine *Engine) Quote(value string) string {
 }
 
 // QuoteTo quotes string and writes into the buffer
-func (engine *Engine) QuoteTo(buf *bytes.Buffer, value string) {
+func (engine *Engine) QuoteTo(buf *builder.StringBuilder, value string) {
 	if buf == nil {
 		return
 	}
@@ -208,6 +245,11 @@ func (engine *Engine) AutoIncrStr() string {
 	return engine.dialect.AutoIncrStr()
 }
 
+// SetConnMaxLifetime sets the maximum amount of time a connection may be reused.
+func (engine *Engine) SetConnMaxLifetime(d time.Duration) {
+	engine.db.SetConnMaxLifetime(d)
+}
+
 // SetMaxOpenConns is only available for go 1.2+
 func (engine *Engine) SetMaxOpenConns(conns int) {
 	engine.db.SetMaxOpenConns(conns)
@@ -245,13 +287,7 @@ func (engine *Engine) NoCascade() *Session {
 
 // MapCacher Set a table use a special cacher
 func (engine *Engine) MapCacher(bean interface{}, cacher core.Cacher) error {
-	v := rValue(bean)
-	tb, err := engine.autoMapType(v)
-	if err != nil {
-		return err
-	}
-
-	tb.Cacher = cacher
+	engine.setCacher(engine.TableName(bean, true), cacher)
 	return nil
 }
 
@@ -536,33 +572,6 @@ func (engine *Engine) dumpTables(tables []*core.Table, w io.Writer, tp ...core.D
 	return nil
 }
 
-func (engine *Engine) tableName(beanOrTableName interface{}) (string, error) {
-	v := rValue(beanOrTableName)
-	if v.Type().Kind() == reflect.String {
-		return beanOrTableName.(string), nil
-	} else if v.Type().Kind() == reflect.Struct {
-		return engine.tbName(v), nil
-	}
-	return "", errors.New("bean should be a struct or struct's point")
-}
-
-func (engine *Engine) tbName(v reflect.Value) string {
-	if tb, ok := v.Interface().(TableName); ok {
-		return tb.TableName()
-	}
-
-	if v.Type().Kind() == reflect.Ptr {
-		if tb, ok := reflect.Indirect(v).Interface().(TableName); ok {
-			return tb.TableName()
-		}
-	} else if v.CanAddr() {
-		if tb, ok := v.Addr().Interface().(TableName); ok {
-			return tb.TableName()
-		}
-	}
-	return engine.TableMapper.Obj2Table(reflect.Indirect(v).Type().Name())
-}
-
 // Cascade use cascade or not
 func (engine *Engine) Cascade(trueOrFalse ...bool) *Session {
 	session := engine.NewSession()
@@ -846,7 +855,7 @@ func (engine *Engine) TableInfo(bean interface{}) *Table {
 	if err != nil {
 		engine.logger.Error(err)
 	}
-	return &Table{tb, engine.tbName(v)}
+	return &Table{tb, engine.TableName(bean)}
 }
 
 func addIndex(indexName string, table *core.Table, col *core.Column, indexType int) {
@@ -861,15 +870,6 @@ func addIndex(indexName string, table *core.Table, col *core.Column, indexType i
 	}
 }
 
-func (engine *Engine) newTable() *core.Table {
-	table := core.NewEmptyTable()
-
-	if !engine.disableGlobalCache {
-		table.Cacher = engine.Cacher
-	}
-	return table
-}
-
 // TableName table name interface to define customerize table name
 type TableName interface {
 	TableName() string
@@ -881,21 +881,9 @@ var (
 
 func (engine *Engine) mapType(v reflect.Value) (*core.Table, error) {
 	t := v.Type()
-	table := engine.newTable()
-	if tb, ok := v.Interface().(TableName); ok {
-		table.Name = tb.TableName()
-	} else {
-		if v.CanAddr() {
-			if tb, ok = v.Addr().Interface().(TableName); ok {
-				table.Name = tb.TableName()
-			}
-		}
-		if table.Name == "" {
-			table.Name = engine.TableMapper.Obj2Table(t.Name())
-		}
-	}
-
+	table := core.NewEmptyTable()
 	table.Type = t
+	table.Name = engine.tbNameForMap(v)
 
 	var idFieldColName string
 	var hasCacheTag, hasNoCacheTag bool
@@ -1049,15 +1037,15 @@ func (engine *Engine) mapType(v reflect.Value) (*core.Table, error) {
 	if hasCacheTag {
 		if engine.Cacher != nil { // !nash! use engine's cacher if provided
 			engine.logger.Info("enable cache on table:", table.Name)
-			table.Cacher = engine.Cacher
+			engine.setCacher(table.Name, engine.Cacher)
 		} else {
 			engine.logger.Info("enable LRU cache on table:", table.Name)
-			table.Cacher = NewLRUCacher2(NewMemoryStore(), time.Hour, 10000) // !nashtsai! HACK use LRU cacher for now
+			engine.setCacher(table.Name, NewLRUCacher2(NewMemoryStore(), time.Hour, 10000))
 		}
 	}
 	if hasNoCacheTag {
-		engine.logger.Info("no cache on table:", table.Name)
-		table.Cacher = nil
+		engine.logger.Info("disable cache on table:", table.Name)
+		engine.setCacher(table.Name, nil)
 	}
 
 	return table, nil
@@ -1116,7 +1104,25 @@ func (engine *Engine) idOfV(rv reflect.Value) (core.PK, error) {
 	pk := make([]interface{}, len(table.PrimaryKeys))
 	for i, col := range table.PKColumns() {
 		var err error
-		pkField := v.FieldByName(col.FieldName)
+
+		fieldName := col.FieldName
+		for {
+			parts := strings.SplitN(fieldName, ".", 2)
+			if len(parts) == 1 {
+				break
+			}
+
+			v = v.FieldByName(parts[0])
+			if v.Kind() == reflect.Ptr {
+				v = v.Elem()
+			}
+			if v.Kind() != reflect.Struct {
+				return nil, ErrUnSupportedType
+			}
+			fieldName = parts[1]
+		}
+
+		pkField := v.FieldByName(fieldName)
 		switch pkField.Kind() {
 		case reflect.String:
 			pk[i], err = engine.idTypeAssertion(col, pkField.String())
@@ -1162,26 +1168,10 @@ func (engine *Engine) CreateUniques(bean interface{}) error {
 	return session.CreateUniques(bean)
 }
 
-func (engine *Engine) getCacher2(table *core.Table) core.Cacher {
-	return table.Cacher
-}
-
 // ClearCacheBean if enabled cache, clear the cache bean
 func (engine *Engine) ClearCacheBean(bean interface{}, id string) error {
-	v := rValue(bean)
-	t := v.Type()
-	if t.Kind() != reflect.Struct {
-		return errors.New("error params")
-	}
-	tableName := engine.tbName(v)
-	table, err := engine.autoMapType(v)
-	if err != nil {
-		return err
-	}
-	cacher := table.Cacher
-	if cacher == nil {
-		cacher = engine.Cacher
-	}
+	tableName := engine.TableName(bean)
+	cacher := engine.getCacher(tableName)
 	if cacher != nil {
 		cacher.ClearIds(tableName)
 		cacher.DelBean(tableName, id)
@@ -1192,21 +1182,8 @@ func (engine *Engine) ClearCacheBean(bean interface{}, id string) error {
 // ClearCache if enabled cache, clear some tables' cache
 func (engine *Engine) ClearCache(beans ...interface{}) error {
 	for _, bean := range beans {
-		v := rValue(bean)
-		t := v.Type()
-		if t.Kind() != reflect.Struct {
-			return errors.New("error params")
-		}
-		tableName := engine.tbName(v)
-		table, err := engine.autoMapType(v)
-		if err != nil {
-			return err
-		}
-
-		cacher := table.Cacher
-		if cacher == nil {
-			cacher = engine.Cacher
-		}
+		tableName := engine.TableName(bean)
+		cacher := engine.getCacher(tableName)
 		if cacher != nil {
 			cacher.ClearIds(tableName)
 			cacher.ClearBeans(tableName)
@@ -1224,13 +1201,13 @@ func (engine *Engine) Sync(beans ...interface{}) error {
 
 	for _, bean := range beans {
 		v := rValue(bean)
-		tableName := engine.tbName(v)
+		tableNameNoSchema := engine.TableName(bean)
 		table, err := engine.autoMapType(v)
 		if err != nil {
 			return err
 		}
 
-		isExist, err := session.Table(bean).isTableExist(tableName)
+		isExist, err := session.Table(bean).isTableExist(tableNameNoSchema)
 		if err != nil {
 			return err
 		}
@@ -1256,12 +1233,12 @@ func (engine *Engine) Sync(beans ...interface{}) error {
 			}
 		} else {
 			for _, col := range table.Columns() {
-				isExist, err := engine.dialect.IsColumnExist(tableName, col.Name)
+				isExist, err := engine.dialect.IsColumnExist(tableNameNoSchema, col.Name)
 				if err != nil {
 					return err
 				}
 				if !isExist {
-					if err := session.statement.setRefValue(v); err != nil {
+					if err := session.statement.setRefBean(bean); err != nil {
 						return err
 					}
 					err = session.addColumn(col.Name)
@@ -1272,35 +1249,35 @@ func (engine *Engine) Sync(beans ...interface{}) error {
 			}
 
 			for name, index := range table.Indexes {
-				if err := session.statement.setRefValue(v); err != nil {
+				if err := session.statement.setRefBean(bean); err != nil {
 					return err
 				}
 				if index.Type == core.UniqueType {
-					isExist, err := session.isIndexExist2(tableName, index.Cols, true)
+					isExist, err := session.isIndexExist2(tableNameNoSchema, index.Cols, true)
 					if err != nil {
 						return err
 					}
 					if !isExist {
-						if err := session.statement.setRefValue(v); err != nil {
+						if err := session.statement.setRefBean(bean); err != nil {
 							return err
 						}
 
-						err = session.addUnique(tableName, name)
+						err = session.addUnique(tableNameNoSchema, name)
 						if err != nil {
 							return err
 						}
 					}
 				} else if index.Type == core.IndexType {
-					isExist, err := session.isIndexExist2(tableName, index.Cols, false)
+					isExist, err := session.isIndexExist2(tableNameNoSchema, index.Cols, false)
 					if err != nil {
 						return err
 					}
 					if !isExist {
-						if err := session.statement.setRefValue(v); err != nil {
+						if err := session.statement.setRefBean(bean); err != nil {
 							return err
 						}
 
-						err = session.addIndex(tableName, name)
+						err = session.addIndex(tableNameNoSchema, name)
 						if err != nil {
 							return err
 						}
@@ -1369,10 +1346,10 @@ func (engine *Engine) DropIndexes(bean interface{}) error {
 }
 
 // Exec raw sql
-func (engine *Engine) Exec(sql string, args ...interface{}) (sql.Result, error) {
+func (engine *Engine) Exec(sqlorArgs ...interface{}) (sql.Result, error) {
 	session := engine.NewSession()
 	defer session.Close()
-	return session.Exec(sql, args...)
+	return session.Exec(sqlorArgs...)
 }
 
 // Query a raw sql and return records as []map[string][]byte
@@ -1453,6 +1430,13 @@ func (engine *Engine) Find(beans interface{}, condiBeans ...interface{}) error {
 	return session.Find(beans, condiBeans...)
 }
 
+// FindAndCount find the results and also return the counts
+func (engine *Engine) FindAndCount(rowsSlicePtr interface{}, condiBean ...interface{}) (int64, error) {
+	session := engine.NewSession()
+	defer session.Close()
+	return session.FindAndCount(rowsSlicePtr, condiBean...)
+}
+
 // Iterate record by record handle records from table, bean's non-empty fields
 // are conditions.
 func (engine *Engine) Iterate(bean interface{}, fun IterFunc) error {
@@ -1629,6 +1613,11 @@ func (engine *Engine) SetTZDatabase(tz *time.Location) {
 	engine.DatabaseTZ = tz
 }
 
+// SetSchema sets the schema of database
+func (engine *Engine) SetSchema(schema string) {
+	engine.dialect.URI().Schema = schema
+}
+
 // Unscoped always disable struct tag "deleted"
 func (engine *Engine) Unscoped() *Session {
 	session := engine.NewSession()

+ 4 - 1
vendor/github.com/go-xorm/xorm/engine_cond.go

@@ -9,6 +9,7 @@ import (
 	"encoding/json"
 	"fmt"
 	"reflect"
+	"strings"
 	"time"
 
 	"github.com/go-xorm/builder"
@@ -51,7 +52,9 @@ func (engine *Engine) buildConds(table *core.Table, bean interface{},
 
 		fieldValuePtr, err := col.ValueOf(bean)
 		if err != nil {
-			engine.logger.Error(err)
+			if !strings.Contains(err.Error(), "is not valid") {
+				engine.logger.Warn(err)
+			}
 			continue
 		}
 

+ 10 - 0
vendor/github.com/go-xorm/xorm/engine_group.go

@@ -5,6 +5,8 @@
 package xorm
 
 import (
+	"time"
+
 	"github.com/go-xorm/core"
 )
 
@@ -99,6 +101,14 @@ func (eg *EngineGroup) SetColumnMapper(mapper core.IMapper) {
 	}
 }
 
+// SetConnMaxLifetime sets the maximum amount of time a connection may be reused.
+func (eg *EngineGroup) SetConnMaxLifetime(d time.Duration) {
+	eg.Engine.SetConnMaxLifetime(d)
+	for i := 0; i < len(eg.slaves); i++ {
+		eg.slaves[i].SetConnMaxLifetime(d)
+	}
+}
+
 // SetDefaultCacher set the default cacher
 func (eg *EngineGroup) SetDefaultCacher(cacher core.Cacher) {
 	eg.Engine.SetDefaultCacher(cacher)

+ 0 - 22
vendor/github.com/go-xorm/xorm/engine_maxlife.go

@@ -1,22 +0,0 @@
-// Copyright 2017 The Xorm Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// +build go1.6
-
-package xorm
-
-import "time"
-
-// SetConnMaxLifetime sets the maximum amount of time a connection may be reused.
-func (engine *Engine) SetConnMaxLifetime(d time.Duration) {
-	engine.db.SetConnMaxLifetime(d)
-}
-
-// SetConnMaxLifetime sets the maximum amount of time a connection may be reused.
-func (eg *EngineGroup) SetConnMaxLifetime(d time.Duration) {
-	eg.Engine.SetConnMaxLifetime(d)
-	for i := 0; i < len(eg.slaves); i++ {
-		eg.slaves[i].SetConnMaxLifetime(d)
-	}
-}

+ 113 - 0
vendor/github.com/go-xorm/xorm/engine_table.go

@@ -0,0 +1,113 @@
+// Copyright 2018 The Xorm Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package xorm
+
+import (
+	"fmt"
+	"reflect"
+	"strings"
+
+	"github.com/go-xorm/core"
+)
+
+// TableNameWithSchema will automatically add schema prefix on table name
+func (engine *Engine) tbNameWithSchema(v string) string {
+	// Add schema name as prefix of table name.
+	// Only for postgres database.
+	if engine.dialect.DBType() == core.POSTGRES &&
+		engine.dialect.URI().Schema != "" &&
+		engine.dialect.URI().Schema != postgresPublicSchema &&
+		strings.Index(v, ".") == -1 {
+		return engine.dialect.URI().Schema + "." + v
+	}
+	return v
+}
+
+// TableName returns table name with schema prefix if has
+func (engine *Engine) TableName(bean interface{}, includeSchema ...bool) string {
+	tbName := engine.tbNameNoSchema(bean)
+	if len(includeSchema) > 0 && includeSchema[0] {
+		tbName = engine.tbNameWithSchema(tbName)
+	}
+
+	return tbName
+}
+
+// tbName get some table's table name
+func (session *Session) tbNameNoSchema(table *core.Table) string {
+	if len(session.statement.AltTableName) > 0 {
+		return session.statement.AltTableName
+	}
+
+	return table.Name
+}
+
+func (engine *Engine) tbNameForMap(v reflect.Value) string {
+	if v.Type().Implements(tpTableName) {
+		return v.Interface().(TableName).TableName()
+	}
+	if v.Kind() == reflect.Ptr {
+		v = v.Elem()
+		if v.Type().Implements(tpTableName) {
+			return v.Interface().(TableName).TableName()
+		}
+	}
+
+	return engine.TableMapper.Obj2Table(v.Type().Name())
+}
+
+func (engine *Engine) tbNameNoSchema(tablename interface{}) string {
+	switch tablename.(type) {
+	case []string:
+		t := tablename.([]string)
+		if len(t) > 1 {
+			return fmt.Sprintf("%v AS %v", engine.Quote(t[0]), engine.Quote(t[1]))
+		} else if len(t) == 1 {
+			return engine.Quote(t[0])
+		}
+	case []interface{}:
+		t := tablename.([]interface{})
+		l := len(t)
+		var table string
+		if l > 0 {
+			f := t[0]
+			switch f.(type) {
+			case string:
+				table = f.(string)
+			case TableName:
+				table = f.(TableName).TableName()
+			default:
+				v := rValue(f)
+				t := v.Type()
+				if t.Kind() == reflect.Struct {
+					table = engine.tbNameForMap(v)
+				} else {
+					table = engine.Quote(fmt.Sprintf("%v", f))
+				}
+			}
+		}
+		if l > 1 {
+			return fmt.Sprintf("%v AS %v", engine.Quote(table),
+				engine.Quote(fmt.Sprintf("%v", t[1])))
+		} else if l == 1 {
+			return engine.Quote(table)
+		}
+	case TableName:
+		return tablename.(TableName).TableName()
+	case string:
+		return tablename.(string)
+	case reflect.Value:
+		v := tablename.(reflect.Value)
+		return engine.tbNameForMap(v)
+	default:
+		v := rValue(tablename)
+		t := v.Type()
+		if t.Kind() == reflect.Struct {
+			return engine.tbNameForMap(v)
+		}
+		return engine.Quote(fmt.Sprintf("%v", tablename))
+	}
+	return ""
+}

この差分においてかなりの量のファイルが変更されているため、一部のファイルを表示していません