postgres_test.go 32 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997
  1. package postgres
  2. import (
  3. "context"
  4. "fmt"
  5. "math/rand"
  6. "strings"
  7. "testing"
  8. "time"
  9. "github.com/go-xorm/xorm"
  10. "github.com/grafana/grafana/pkg/components/securejsondata"
  11. "github.com/grafana/grafana/pkg/components/simplejson"
  12. "github.com/grafana/grafana/pkg/models"
  13. "github.com/grafana/grafana/pkg/services/sqlstore"
  14. "github.com/grafana/grafana/pkg/services/sqlstore/sqlutil"
  15. "github.com/grafana/grafana/pkg/tsdb"
  16. _ "github.com/lib/pq"
  17. . "github.com/smartystreets/goconvey/convey"
  18. )
  19. // To run this test, set runPostgresTests=true
  20. // Or from the commandline: GRAFANA_TEST_DB=postgres go test -v ./pkg/tsdb/postgres
  21. // The tests require a PostgreSQL db named grafanadstest and a user/password grafanatest/grafanatest!
  22. // Use the docker/blocks/postgres_tests/docker-compose.yaml to spin up a
  23. // preconfigured Postgres server suitable for running these tests.
  24. // There is also a datasource and dashboard provisioned by devenv scripts that you can
  25. // use to verify that the generated data are vizualized as expected, see
  26. // devenv/README.md for setup instructions.
  27. func TestPostgres(t *testing.T) {
  28. // change to true to run the PostgreSQL tests
  29. runPostgresTests := false
  30. // runPostgresTests := true
  31. if !(sqlstore.IsTestDbPostgres() || runPostgresTests) {
  32. t.Skip()
  33. }
  34. Convey("PostgreSQL", t, func() {
  35. x := InitPostgresTestDB(t)
  36. origXormEngine := tsdb.NewXormEngine
  37. tsdb.NewXormEngine = func(d, c string) (*xorm.Engine, error) {
  38. return x, nil
  39. }
  40. origInterpolate := tsdb.Interpolate
  41. tsdb.Interpolate = func(query *tsdb.Query, timeRange *tsdb.TimeRange, sql string) (string, error) {
  42. return sql, nil
  43. }
  44. endpoint, err := newPostgresQueryEndpoint(&models.DataSource{
  45. JsonData: simplejson.New(),
  46. SecureJsonData: securejsondata.SecureJsonData{},
  47. })
  48. So(err, ShouldBeNil)
  49. sess := x.NewSession()
  50. fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC).In(time.Local)
  51. Reset(func() {
  52. sess.Close()
  53. tsdb.NewXormEngine = origXormEngine
  54. tsdb.Interpolate = origInterpolate
  55. })
  56. Convey("Given a table with different native data types", func() {
  57. sql := `
  58. DROP TABLE IF EXISTS postgres_types;
  59. CREATE TABLE postgres_types(
  60. c00_smallint smallint,
  61. c01_integer integer,
  62. c02_bigint bigint,
  63. c03_real real,
  64. c04_double double precision,
  65. c05_decimal decimal(10,2),
  66. c06_numeric numeric(10,2),
  67. c07_char char(10),
  68. c08_varchar varchar(10),
  69. c09_text text,
  70. c10_timestamp timestamp without time zone,
  71. c11_timestamptz timestamp with time zone,
  72. c12_date date,
  73. c13_time time without time zone,
  74. c14_timetz time with time zone,
  75. c15_interval interval
  76. );
  77. `
  78. _, err := sess.Exec(sql)
  79. So(err, ShouldBeNil)
  80. sql = `
  81. INSERT INTO postgres_types VALUES(
  82. 1,2,3,
  83. 4.5,6.7,1.1,1.2,
  84. 'char10','varchar10','text',
  85. now(),now(),now(),now(),now(),'15m'::interval
  86. );
  87. `
  88. _, err = sess.Exec(sql)
  89. So(err, ShouldBeNil)
  90. Convey("When doing a table query should map Postgres column types to Go types", func() {
  91. query := &tsdb.TsdbQuery{
  92. Queries: []*tsdb.Query{
  93. {
  94. Model: simplejson.NewFromAny(map[string]interface{}{
  95. "rawSql": "SELECT * FROM postgres_types",
  96. "format": "table",
  97. }),
  98. RefId: "A",
  99. },
  100. },
  101. }
  102. resp, err := endpoint.Query(context.Background(), nil, query)
  103. So(err, ShouldBeNil)
  104. queryResult := resp.Results["A"]
  105. So(queryResult.Error, ShouldBeNil)
  106. column := queryResult.Tables[0].Rows[0]
  107. So(column[0].(int64), ShouldEqual, 1)
  108. So(column[1].(int64), ShouldEqual, 2)
  109. So(column[2].(int64), ShouldEqual, 3)
  110. So(column[3].(float64), ShouldEqual, 4.5)
  111. So(column[4].(float64), ShouldEqual, 6.7)
  112. So(column[5].(float64), ShouldEqual, 1.1)
  113. So(column[6].(float64), ShouldEqual, 1.2)
  114. So(column[7].(string), ShouldEqual, "char10 ")
  115. So(column[8].(string), ShouldEqual, "varchar10")
  116. So(column[9].(string), ShouldEqual, "text")
  117. So(column[10].(time.Time), ShouldHaveSameTypeAs, time.Now())
  118. So(column[11].(time.Time), ShouldHaveSameTypeAs, time.Now())
  119. So(column[12].(time.Time), ShouldHaveSameTypeAs, time.Now())
  120. So(column[13].(time.Time), ShouldHaveSameTypeAs, time.Now())
  121. So(column[14].(time.Time), ShouldHaveSameTypeAs, time.Now())
  122. So(column[15].(string), ShouldEqual, "00:15:00")
  123. })
  124. })
  125. Convey("Given a table with metrics that lacks data for some series ", func() {
  126. sql := `
  127. DROP TABLE IF EXISTS metric;
  128. CREATE TABLE metric (
  129. time timestamp,
  130. value integer
  131. )
  132. `
  133. _, err := sess.Exec(sql)
  134. So(err, ShouldBeNil)
  135. type metric struct {
  136. Time time.Time
  137. Value int64
  138. }
  139. series := []*metric{}
  140. firstRange := genTimeRangeByInterval(fromStart, 10*time.Minute, 10*time.Second)
  141. secondRange := genTimeRangeByInterval(fromStart.Add(20*time.Minute), 10*time.Minute, 10*time.Second)
  142. for _, t := range firstRange {
  143. series = append(series, &metric{
  144. Time: t,
  145. Value: 15,
  146. })
  147. }
  148. for _, t := range secondRange {
  149. series = append(series, &metric{
  150. Time: t,
  151. Value: 20,
  152. })
  153. }
  154. _, err = sess.InsertMulti(series)
  155. So(err, ShouldBeNil)
  156. Convey("When doing a metric query using timeGroup", func() {
  157. query := &tsdb.TsdbQuery{
  158. Queries: []*tsdb.Query{
  159. {
  160. Model: simplejson.NewFromAny(map[string]interface{}{
  161. "rawSql": "SELECT $__timeGroup(time, '5m') AS time, avg(value) as value FROM metric GROUP BY 1 ORDER BY 1",
  162. "format": "time_series",
  163. }),
  164. RefId: "A",
  165. },
  166. },
  167. }
  168. resp, err := endpoint.Query(context.Background(), nil, query)
  169. So(err, ShouldBeNil)
  170. queryResult := resp.Results["A"]
  171. So(queryResult.Error, ShouldBeNil)
  172. points := queryResult.Series[0].Points
  173. // without fill this should result in 4 buckets
  174. So(len(points), ShouldEqual, 4)
  175. dt := fromStart
  176. for i := 0; i < 2; i++ {
  177. aValue := points[i][0].Float64
  178. aTime := time.Unix(int64(points[i][1].Float64)/1000, 0)
  179. So(aValue, ShouldEqual, 15)
  180. So(aTime, ShouldEqual, dt)
  181. So(aTime.Unix()%300, ShouldEqual, 0)
  182. dt = dt.Add(5 * time.Minute)
  183. }
  184. // adjust for 10 minute gap between first and second set of points
  185. dt = dt.Add(10 * time.Minute)
  186. for i := 2; i < 4; i++ {
  187. aValue := points[i][0].Float64
  188. aTime := time.Unix(int64(points[i][1].Float64)/1000, 0)
  189. So(aValue, ShouldEqual, 20)
  190. So(aTime, ShouldEqual, dt)
  191. dt = dt.Add(5 * time.Minute)
  192. }
  193. })
  194. Convey("When doing a metric query using timeGroup and $__interval", func() {
  195. mockInterpolate := tsdb.Interpolate
  196. tsdb.Interpolate = origInterpolate
  197. Reset(func() {
  198. tsdb.Interpolate = mockInterpolate
  199. })
  200. Convey("Should replace $__interval", func() {
  201. query := &tsdb.TsdbQuery{
  202. Queries: []*tsdb.Query{
  203. {
  204. DataSource: &models.DataSource{},
  205. Model: simplejson.NewFromAny(map[string]interface{}{
  206. "rawSql": "SELECT $__timeGroup(time, $__interval) AS time, avg(value) as value FROM metric GROUP BY 1 ORDER BY 1",
  207. "format": "time_series",
  208. }),
  209. RefId: "A",
  210. },
  211. },
  212. TimeRange: &tsdb.TimeRange{
  213. From: fmt.Sprintf("%v", fromStart.Unix()*1000),
  214. To: fmt.Sprintf("%v", fromStart.Add(30*time.Minute).Unix()*1000),
  215. },
  216. }
  217. resp, err := endpoint.Query(context.Background(), nil, query)
  218. So(err, ShouldBeNil)
  219. queryResult := resp.Results["A"]
  220. So(queryResult.Error, ShouldBeNil)
  221. So(queryResult.Meta.Get("sql").MustString(), ShouldEqual, "SELECT floor(extract(epoch from time)/60)*60 AS time, avg(value) as value FROM metric GROUP BY 1 ORDER BY 1")
  222. })
  223. })
  224. Convey("When doing a metric query using timeGroup with NULL fill enabled", func() {
  225. query := &tsdb.TsdbQuery{
  226. Queries: []*tsdb.Query{
  227. {
  228. Model: simplejson.NewFromAny(map[string]interface{}{
  229. "rawSql": "SELECT $__timeGroup(time, '5m', NULL) AS time, avg(value) as value FROM metric GROUP BY 1 ORDER BY 1",
  230. "format": "time_series",
  231. }),
  232. RefId: "A",
  233. },
  234. },
  235. TimeRange: &tsdb.TimeRange{
  236. From: fmt.Sprintf("%v", fromStart.Unix()*1000),
  237. To: fmt.Sprintf("%v", fromStart.Add(34*time.Minute).Unix()*1000),
  238. },
  239. }
  240. resp, err := endpoint.Query(context.Background(), nil, query)
  241. So(err, ShouldBeNil)
  242. queryResult := resp.Results["A"]
  243. So(queryResult.Error, ShouldBeNil)
  244. points := queryResult.Series[0].Points
  245. So(len(points), ShouldEqual, 7)
  246. dt := fromStart
  247. for i := 0; i < 2; i++ {
  248. aValue := points[i][0].Float64
  249. aTime := time.Unix(int64(points[i][1].Float64)/1000, 0)
  250. So(aValue, ShouldEqual, 15)
  251. So(aTime, ShouldEqual, dt)
  252. dt = dt.Add(5 * time.Minute)
  253. }
  254. // check for NULL values inserted by fill
  255. So(points[2][0].Valid, ShouldBeFalse)
  256. So(points[3][0].Valid, ShouldBeFalse)
  257. // adjust for 10 minute gap between first and second set of points
  258. dt = dt.Add(10 * time.Minute)
  259. for i := 4; i < 6; i++ {
  260. aValue := points[i][0].Float64
  261. aTime := time.Unix(int64(points[i][1].Float64)/1000, 0)
  262. So(aValue, ShouldEqual, 20)
  263. So(aTime, ShouldEqual, dt)
  264. dt = dt.Add(5 * time.Minute)
  265. }
  266. // check for NULL values inserted by fill
  267. So(points[6][0].Valid, ShouldBeFalse)
  268. })
  269. Convey("When doing a metric query using timeGroup with value fill enabled", func() {
  270. query := &tsdb.TsdbQuery{
  271. Queries: []*tsdb.Query{
  272. {
  273. Model: simplejson.NewFromAny(map[string]interface{}{
  274. "rawSql": "SELECT $__timeGroup(time, '5m', 1.5) AS time, avg(value) as value FROM metric GROUP BY 1 ORDER BY 1",
  275. "format": "time_series",
  276. }),
  277. RefId: "A",
  278. },
  279. },
  280. TimeRange: &tsdb.TimeRange{
  281. From: fmt.Sprintf("%v", fromStart.Unix()*1000),
  282. To: fmt.Sprintf("%v", fromStart.Add(34*time.Minute).Unix()*1000),
  283. },
  284. }
  285. resp, err := endpoint.Query(context.Background(), nil, query)
  286. So(err, ShouldBeNil)
  287. queryResult := resp.Results["A"]
  288. So(queryResult.Error, ShouldBeNil)
  289. points := queryResult.Series[0].Points
  290. So(points[3][0].Float64, ShouldEqual, 1.5)
  291. })
  292. })
  293. Convey("When doing a metric query using timeGroup with previous fill enabled", func() {
  294. query := &tsdb.TsdbQuery{
  295. Queries: []*tsdb.Query{
  296. {
  297. Model: simplejson.NewFromAny(map[string]interface{}{
  298. "rawSql": "SELECT $__timeGroup(time, '5m', previous), avg(value) as value FROM metric GROUP BY 1 ORDER BY 1",
  299. "format": "time_series",
  300. }),
  301. RefId: "A",
  302. },
  303. },
  304. TimeRange: &tsdb.TimeRange{
  305. From: fmt.Sprintf("%v", fromStart.Unix()*1000),
  306. To: fmt.Sprintf("%v", fromStart.Add(34*time.Minute).Unix()*1000),
  307. },
  308. }
  309. resp, err := endpoint.Query(context.Background(), nil, query)
  310. So(err, ShouldBeNil)
  311. queryResult := resp.Results["A"]
  312. So(queryResult.Error, ShouldBeNil)
  313. points := queryResult.Series[0].Points
  314. So(points[2][0].Float64, ShouldEqual, 15.0)
  315. So(points[3][0].Float64, ShouldEqual, 15.0)
  316. So(points[6][0].Float64, ShouldEqual, 20.0)
  317. })
  318. Convey("Given a table with metrics having multiple values and measurements", func() {
  319. type metric_values struct {
  320. Time time.Time
  321. TimeInt64 int64 `xorm:"bigint 'timeInt64' not null"`
  322. TimeInt64Nullable *int64 `xorm:"bigint 'timeInt64Nullable' null"`
  323. TimeFloat64 float64 `xorm:"double 'timeFloat64' not null"`
  324. TimeFloat64Nullable *float64 `xorm:"double 'timeFloat64Nullable' null"`
  325. TimeInt32 int32 `xorm:"int(11) 'timeInt32' not null"`
  326. TimeInt32Nullable *int32 `xorm:"int(11) 'timeInt32Nullable' null"`
  327. TimeFloat32 float32 `xorm:"double 'timeFloat32' not null"`
  328. TimeFloat32Nullable *float32 `xorm:"double 'timeFloat32Nullable' null"`
  329. Measurement string
  330. ValueOne int64 `xorm:"integer 'valueOne'"`
  331. ValueTwo int64 `xorm:"integer 'valueTwo'"`
  332. }
  333. if exist, err := sess.IsTableExist(metric_values{}); err != nil || exist {
  334. So(err, ShouldBeNil)
  335. sess.DropTable(metric_values{})
  336. }
  337. err := sess.CreateTable(metric_values{})
  338. So(err, ShouldBeNil)
  339. rand.Seed(time.Now().Unix())
  340. rnd := func(min, max int64) int64 {
  341. return rand.Int63n(max-min) + min
  342. }
  343. var tInitial time.Time
  344. series := []*metric_values{}
  345. for i, t := range genTimeRangeByInterval(fromStart.Add(-30*time.Minute), 90*time.Minute, 5*time.Minute) {
  346. if i == 0 {
  347. tInitial = t
  348. }
  349. tSeconds := t.Unix()
  350. tSecondsInt32 := int32(tSeconds)
  351. tSecondsFloat32 := float32(tSeconds)
  352. tMilliseconds := tSeconds * 1e3
  353. tMillisecondsFloat := float64(tMilliseconds)
  354. first := metric_values{
  355. Time: t,
  356. TimeInt64: tMilliseconds,
  357. TimeInt64Nullable: &(tMilliseconds),
  358. TimeFloat64: tMillisecondsFloat,
  359. TimeFloat64Nullable: &tMillisecondsFloat,
  360. TimeInt32: tSecondsInt32,
  361. TimeInt32Nullable: &tSecondsInt32,
  362. TimeFloat32: tSecondsFloat32,
  363. TimeFloat32Nullable: &tSecondsFloat32,
  364. Measurement: "Metric A",
  365. ValueOne: rnd(0, 100),
  366. ValueTwo: rnd(0, 100),
  367. }
  368. second := first
  369. second.Measurement = "Metric B"
  370. second.ValueOne = rnd(0, 100)
  371. second.ValueTwo = rnd(0, 100)
  372. series = append(series, &first)
  373. series = append(series, &second)
  374. }
  375. _, err = sess.InsertMulti(series)
  376. So(err, ShouldBeNil)
  377. Convey("When doing a metric query using epoch (int64) as time column and value column (int64) should return metric with time in milliseconds", func() {
  378. query := &tsdb.TsdbQuery{
  379. Queries: []*tsdb.Query{
  380. {
  381. Model: simplejson.NewFromAny(map[string]interface{}{
  382. "rawSql": `SELECT "timeInt64" as time, "timeInt64" FROM metric_values ORDER BY time LIMIT 1`,
  383. "format": "time_series",
  384. }),
  385. RefId: "A",
  386. },
  387. },
  388. }
  389. resp, err := endpoint.Query(context.Background(), nil, query)
  390. So(err, ShouldBeNil)
  391. queryResult := resp.Results["A"]
  392. So(queryResult.Error, ShouldBeNil)
  393. So(len(queryResult.Series), ShouldEqual, 1)
  394. So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(tInitial.UnixNano()/1e6))
  395. })
  396. Convey("When doing a metric query using epoch (int64 nullable) as time column and value column (int64 nullable) should return metric with time in milliseconds", func() {
  397. query := &tsdb.TsdbQuery{
  398. Queries: []*tsdb.Query{
  399. {
  400. Model: simplejson.NewFromAny(map[string]interface{}{
  401. "rawSql": `SELECT "timeInt64Nullable" as time, "timeInt64Nullable" FROM metric_values ORDER BY time LIMIT 1`,
  402. "format": "time_series",
  403. }),
  404. RefId: "A",
  405. },
  406. },
  407. }
  408. resp, err := endpoint.Query(context.Background(), nil, query)
  409. So(err, ShouldBeNil)
  410. queryResult := resp.Results["A"]
  411. So(queryResult.Error, ShouldBeNil)
  412. So(len(queryResult.Series), ShouldEqual, 1)
  413. So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(tInitial.UnixNano()/1e6))
  414. })
  415. Convey("When doing a metric query using epoch (float64) as time column and value column (float64) should return metric with time in milliseconds", func() {
  416. query := &tsdb.TsdbQuery{
  417. Queries: []*tsdb.Query{
  418. {
  419. Model: simplejson.NewFromAny(map[string]interface{}{
  420. "rawSql": `SELECT "timeFloat64" as time, "timeFloat64" FROM metric_values ORDER BY time LIMIT 1`,
  421. "format": "time_series",
  422. }),
  423. RefId: "A",
  424. },
  425. },
  426. }
  427. resp, err := endpoint.Query(context.Background(), nil, query)
  428. So(err, ShouldBeNil)
  429. queryResult := resp.Results["A"]
  430. So(queryResult.Error, ShouldBeNil)
  431. So(len(queryResult.Series), ShouldEqual, 1)
  432. So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(tInitial.UnixNano()/1e6))
  433. })
  434. Convey("When doing a metric query using epoch (float64 nullable) as time column and value column (float64 nullable) should return metric with time in milliseconds", func() {
  435. query := &tsdb.TsdbQuery{
  436. Queries: []*tsdb.Query{
  437. {
  438. Model: simplejson.NewFromAny(map[string]interface{}{
  439. "rawSql": `SELECT "timeFloat64Nullable" as time, "timeFloat64Nullable" FROM metric_values ORDER BY time LIMIT 1`,
  440. "format": "time_series",
  441. }),
  442. RefId: "A",
  443. },
  444. },
  445. }
  446. resp, err := endpoint.Query(context.Background(), nil, query)
  447. So(err, ShouldBeNil)
  448. queryResult := resp.Results["A"]
  449. So(queryResult.Error, ShouldBeNil)
  450. So(len(queryResult.Series), ShouldEqual, 1)
  451. So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(tInitial.UnixNano()/1e6))
  452. })
  453. Convey("When doing a metric query using epoch (int32) as time column and value column (int32) should return metric with time in milliseconds", func() {
  454. query := &tsdb.TsdbQuery{
  455. Queries: []*tsdb.Query{
  456. {
  457. Model: simplejson.NewFromAny(map[string]interface{}{
  458. "rawSql": `SELECT "timeInt32" as time, "timeInt32" FROM metric_values ORDER BY time LIMIT 1`,
  459. "format": "time_series",
  460. }),
  461. RefId: "A",
  462. },
  463. },
  464. }
  465. resp, err := endpoint.Query(context.Background(), nil, query)
  466. So(err, ShouldBeNil)
  467. queryResult := resp.Results["A"]
  468. So(queryResult.Error, ShouldBeNil)
  469. So(len(queryResult.Series), ShouldEqual, 1)
  470. So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(tInitial.UnixNano()/1e6))
  471. })
  472. Convey("When doing a metric query using epoch (int32 nullable) as time column and value column (int32 nullable) should return metric with time in milliseconds", func() {
  473. query := &tsdb.TsdbQuery{
  474. Queries: []*tsdb.Query{
  475. {
  476. Model: simplejson.NewFromAny(map[string]interface{}{
  477. "rawSql": `SELECT "timeInt32Nullable" as time, "timeInt32Nullable" FROM metric_values ORDER BY time LIMIT 1`,
  478. "format": "time_series",
  479. }),
  480. RefId: "A",
  481. },
  482. },
  483. }
  484. resp, err := endpoint.Query(context.Background(), nil, query)
  485. So(err, ShouldBeNil)
  486. queryResult := resp.Results["A"]
  487. So(queryResult.Error, ShouldBeNil)
  488. So(len(queryResult.Series), ShouldEqual, 1)
  489. So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(tInitial.UnixNano()/1e6))
  490. })
  491. Convey("When doing a metric query using epoch (float32) as time column and value column (float32) should return metric with time in milliseconds", func() {
  492. query := &tsdb.TsdbQuery{
  493. Queries: []*tsdb.Query{
  494. {
  495. Model: simplejson.NewFromAny(map[string]interface{}{
  496. "rawSql": `SELECT "timeFloat32" as time, "timeFloat32" FROM metric_values ORDER BY time LIMIT 1`,
  497. "format": "time_series",
  498. }),
  499. RefId: "A",
  500. },
  501. },
  502. }
  503. resp, err := endpoint.Query(context.Background(), nil, query)
  504. So(err, ShouldBeNil)
  505. queryResult := resp.Results["A"]
  506. So(queryResult.Error, ShouldBeNil)
  507. So(len(queryResult.Series), ShouldEqual, 1)
  508. So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(float32(tInitial.Unix()))*1e3)
  509. })
  510. Convey("When doing a metric query using epoch (float32 nullable) as time column and value column (float32 nullable) should return metric with time in milliseconds", func() {
  511. query := &tsdb.TsdbQuery{
  512. Queries: []*tsdb.Query{
  513. {
  514. Model: simplejson.NewFromAny(map[string]interface{}{
  515. "rawSql": `SELECT "timeFloat32Nullable" as time, "timeFloat32Nullable" FROM metric_values ORDER BY time LIMIT 1`,
  516. "format": "time_series",
  517. }),
  518. RefId: "A",
  519. },
  520. },
  521. }
  522. resp, err := endpoint.Query(context.Background(), nil, query)
  523. So(err, ShouldBeNil)
  524. queryResult := resp.Results["A"]
  525. So(queryResult.Error, ShouldBeNil)
  526. So(len(queryResult.Series), ShouldEqual, 1)
  527. So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(float32(tInitial.Unix()))*1e3)
  528. })
  529. Convey("When doing a metric query grouping by time and select metric column should return correct series", func() {
  530. query := &tsdb.TsdbQuery{
  531. Queries: []*tsdb.Query{
  532. {
  533. Model: simplejson.NewFromAny(map[string]interface{}{
  534. "rawSql": `SELECT $__timeEpoch(time), measurement || ' - value one' as metric, "valueOne" FROM metric_values ORDER BY 1`,
  535. "format": "time_series",
  536. }),
  537. RefId: "A",
  538. },
  539. },
  540. }
  541. resp, err := endpoint.Query(context.Background(), nil, query)
  542. So(err, ShouldBeNil)
  543. queryResult := resp.Results["A"]
  544. So(queryResult.Error, ShouldBeNil)
  545. So(len(queryResult.Series), ShouldEqual, 2)
  546. So(queryResult.Series[0].Name, ShouldEqual, "Metric A - value one")
  547. So(queryResult.Series[1].Name, ShouldEqual, "Metric B - value one")
  548. })
  549. Convey("When doing a metric query with metric column and multiple value columns", func() {
  550. query := &tsdb.TsdbQuery{
  551. Queries: []*tsdb.Query{
  552. {
  553. Model: simplejson.NewFromAny(map[string]interface{}{
  554. "rawSql": `SELECT $__timeEpoch(time), measurement as metric, "valueOne", "valueTwo" FROM metric_values ORDER BY 1`,
  555. "format": "time_series",
  556. }),
  557. RefId: "A",
  558. },
  559. },
  560. }
  561. resp, err := endpoint.Query(context.Background(), nil, query)
  562. So(err, ShouldBeNil)
  563. queryResult := resp.Results["A"]
  564. So(queryResult.Error, ShouldBeNil)
  565. So(len(queryResult.Series), ShouldEqual, 4)
  566. So(queryResult.Series[0].Name, ShouldEqual, "Metric A valueOne")
  567. So(queryResult.Series[1].Name, ShouldEqual, "Metric A valueTwo")
  568. So(queryResult.Series[2].Name, ShouldEqual, "Metric B valueOne")
  569. So(queryResult.Series[3].Name, ShouldEqual, "Metric B valueTwo")
  570. })
  571. Convey("When doing a metric query grouping by time should return correct series", func() {
  572. query := &tsdb.TsdbQuery{
  573. Queries: []*tsdb.Query{
  574. {
  575. Model: simplejson.NewFromAny(map[string]interface{}{
  576. "rawSql": `SELECT $__timeEpoch(time), "valueOne", "valueTwo" FROM metric_values ORDER BY 1`,
  577. "format": "time_series",
  578. }),
  579. RefId: "A",
  580. },
  581. },
  582. }
  583. resp, err := endpoint.Query(context.Background(), nil, query)
  584. So(err, ShouldBeNil)
  585. queryResult := resp.Results["A"]
  586. So(queryResult.Error, ShouldBeNil)
  587. So(len(queryResult.Series), ShouldEqual, 2)
  588. So(queryResult.Series[0].Name, ShouldEqual, "valueOne")
  589. So(queryResult.Series[1].Name, ShouldEqual, "valueTwo")
  590. })
  591. Convey("When doing a query with timeFrom,timeTo,unixEpochFrom,unixEpochTo macros", func() {
  592. tsdb.Interpolate = origInterpolate
  593. query := &tsdb.TsdbQuery{
  594. TimeRange: tsdb.NewFakeTimeRange("5m", "now", fromStart),
  595. Queries: []*tsdb.Query{
  596. {
  597. DataSource: &models.DataSource{JsonData: simplejson.New()},
  598. Model: simplejson.NewFromAny(map[string]interface{}{
  599. "rawSql": `SELECT time FROM metric_values WHERE time > $__timeFrom() OR time < $__timeFrom() OR 1 < $__unixEpochFrom() OR $__unixEpochTo() > 1 ORDER BY 1`,
  600. "format": "time_series",
  601. }),
  602. RefId: "A",
  603. },
  604. },
  605. }
  606. resp, err := endpoint.Query(nil, nil, query)
  607. So(err, ShouldBeNil)
  608. queryResult := resp.Results["A"]
  609. So(queryResult.Error, ShouldBeNil)
  610. So(queryResult.Meta.Get("sql").MustString(), ShouldEqual, "SELECT time FROM metric_values WHERE time > '2018-03-15T12:55:00Z' OR time < '2018-03-15T12:55:00Z' OR 1 < 1521118500 OR 1521118800 > 1 ORDER BY 1")
  611. })
  612. })
  613. Convey("Given a table with event data", func() {
  614. type event struct {
  615. TimeSec int64
  616. Description string
  617. Tags string
  618. }
  619. if exist, err := sess.IsTableExist(event{}); err != nil || exist {
  620. So(err, ShouldBeNil)
  621. sess.DropTable(event{})
  622. }
  623. err := sess.CreateTable(event{})
  624. So(err, ShouldBeNil)
  625. events := []*event{}
  626. for _, t := range genTimeRangeByInterval(fromStart.Add(-20*time.Minute), 60*time.Minute, 25*time.Minute) {
  627. events = append(events, &event{
  628. TimeSec: t.Unix(),
  629. Description: "Someone deployed something",
  630. Tags: "deploy",
  631. })
  632. events = append(events, &event{
  633. TimeSec: t.Add(5 * time.Minute).Unix(),
  634. Description: "New support ticket registered",
  635. Tags: "ticket",
  636. })
  637. }
  638. for _, e := range events {
  639. _, err = sess.Insert(e)
  640. So(err, ShouldBeNil)
  641. }
  642. Convey("When doing an annotation query of deploy events should return expected result", func() {
  643. query := &tsdb.TsdbQuery{
  644. Queries: []*tsdb.Query{
  645. {
  646. Model: simplejson.NewFromAny(map[string]interface{}{
  647. "rawSql": `SELECT "time_sec" as time, description as text, tags FROM event WHERE $__unixEpochFilter(time_sec) AND tags='deploy' ORDER BY 1 ASC`,
  648. "format": "table",
  649. }),
  650. RefId: "Deploys",
  651. },
  652. },
  653. TimeRange: &tsdb.TimeRange{
  654. From: fmt.Sprintf("%v", fromStart.Add(-20*time.Minute).Unix()*1000),
  655. To: fmt.Sprintf("%v", fromStart.Add(40*time.Minute).Unix()*1000),
  656. },
  657. }
  658. resp, err := endpoint.Query(context.Background(), nil, query)
  659. queryResult := resp.Results["Deploys"]
  660. So(err, ShouldBeNil)
  661. So(len(queryResult.Tables[0].Rows), ShouldEqual, 3)
  662. })
  663. Convey("When doing an annotation query of ticket events should return expected result", func() {
  664. query := &tsdb.TsdbQuery{
  665. Queries: []*tsdb.Query{
  666. {
  667. Model: simplejson.NewFromAny(map[string]interface{}{
  668. "rawSql": `SELECT "time_sec" as time, description as text, tags FROM event WHERE $__unixEpochFilter(time_sec) AND tags='ticket' ORDER BY 1 ASC`,
  669. "format": "table",
  670. }),
  671. RefId: "Tickets",
  672. },
  673. },
  674. TimeRange: &tsdb.TimeRange{
  675. From: fmt.Sprintf("%v", fromStart.Add(-20*time.Minute).Unix()*1000),
  676. To: fmt.Sprintf("%v", fromStart.Add(40*time.Minute).Unix()*1000),
  677. },
  678. }
  679. resp, err := endpoint.Query(context.Background(), nil, query)
  680. queryResult := resp.Results["Tickets"]
  681. So(err, ShouldBeNil)
  682. So(len(queryResult.Tables[0].Rows), ShouldEqual, 3)
  683. })
  684. Convey("When doing an annotation query with a time column in datetime format", func() {
  685. dt := time.Date(2018, 3, 14, 21, 20, 6, 527e6, time.UTC)
  686. dtFormat := "2006-01-02 15:04:05.999999999"
  687. query := &tsdb.TsdbQuery{
  688. Queries: []*tsdb.Query{
  689. {
  690. Model: simplejson.NewFromAny(map[string]interface{}{
  691. "rawSql": fmt.Sprintf(`SELECT
  692. CAST('%s' AS TIMESTAMP) as time,
  693. 'message' as text,
  694. 'tag1,tag2' as tags
  695. `, dt.Format(dtFormat)),
  696. "format": "table",
  697. }),
  698. RefId: "A",
  699. },
  700. },
  701. }
  702. resp, err := endpoint.Query(context.Background(), nil, query)
  703. So(err, ShouldBeNil)
  704. queryResult := resp.Results["A"]
  705. So(queryResult.Error, ShouldBeNil)
  706. So(len(queryResult.Tables[0].Rows), ShouldEqual, 1)
  707. columns := queryResult.Tables[0].Rows[0]
  708. //Should be in milliseconds
  709. So(columns[0].(float64), ShouldEqual, float64(dt.UnixNano()/1e6))
  710. })
  711. Convey("When doing an annotation query with a time column in epoch second format should return ms", func() {
  712. dt := time.Date(2018, 3, 14, 21, 20, 6, 527e6, time.UTC)
  713. query := &tsdb.TsdbQuery{
  714. Queries: []*tsdb.Query{
  715. {
  716. Model: simplejson.NewFromAny(map[string]interface{}{
  717. "rawSql": fmt.Sprintf(`SELECT
  718. %d as time,
  719. 'message' as text,
  720. 'tag1,tag2' as tags
  721. `, dt.Unix()),
  722. "format": "table",
  723. }),
  724. RefId: "A",
  725. },
  726. },
  727. }
  728. resp, err := endpoint.Query(context.Background(), nil, query)
  729. So(err, ShouldBeNil)
  730. queryResult := resp.Results["A"]
  731. So(queryResult.Error, ShouldBeNil)
  732. So(len(queryResult.Tables[0].Rows), ShouldEqual, 1)
  733. columns := queryResult.Tables[0].Rows[0]
  734. //Should be in milliseconds
  735. So(columns[0].(int64), ShouldEqual, dt.Unix()*1000)
  736. })
  737. Convey("When doing an annotation query with a time column in epoch second format (int) should return ms", func() {
  738. dt := time.Date(2018, 3, 14, 21, 20, 6, 527e6, time.UTC)
  739. query := &tsdb.TsdbQuery{
  740. Queries: []*tsdb.Query{
  741. {
  742. Model: simplejson.NewFromAny(map[string]interface{}{
  743. "rawSql": fmt.Sprintf(`SELECT
  744. cast(%d as bigint) as time,
  745. 'message' as text,
  746. 'tag1,tag2' as tags
  747. `, dt.Unix()),
  748. "format": "table",
  749. }),
  750. RefId: "A",
  751. },
  752. },
  753. }
  754. resp, err := endpoint.Query(context.Background(), nil, query)
  755. So(err, ShouldBeNil)
  756. queryResult := resp.Results["A"]
  757. So(queryResult.Error, ShouldBeNil)
  758. So(len(queryResult.Tables[0].Rows), ShouldEqual, 1)
  759. columns := queryResult.Tables[0].Rows[0]
  760. //Should be in milliseconds
  761. So(columns[0].(int64), ShouldEqual, dt.Unix()*1000)
  762. })
  763. Convey("When doing an annotation query with a time column in epoch millisecond format should return ms", func() {
  764. dt := time.Date(2018, 3, 14, 21, 20, 6, 527e6, time.UTC)
  765. query := &tsdb.TsdbQuery{
  766. Queries: []*tsdb.Query{
  767. {
  768. Model: simplejson.NewFromAny(map[string]interface{}{
  769. "rawSql": fmt.Sprintf(`SELECT
  770. %d as time,
  771. 'message' as text,
  772. 'tag1,tag2' as tags
  773. `, dt.Unix()*1000),
  774. "format": "table",
  775. }),
  776. RefId: "A",
  777. },
  778. },
  779. }
  780. resp, err := endpoint.Query(context.Background(), nil, query)
  781. So(err, ShouldBeNil)
  782. queryResult := resp.Results["A"]
  783. So(queryResult.Error, ShouldBeNil)
  784. So(len(queryResult.Tables[0].Rows), ShouldEqual, 1)
  785. columns := queryResult.Tables[0].Rows[0]
  786. //Should be in milliseconds
  787. So(columns[0].(int64), ShouldEqual, dt.Unix()*1000)
  788. })
  789. Convey("When doing an annotation query with a time column holding a bigint null value should return nil", func() {
  790. query := &tsdb.TsdbQuery{
  791. Queries: []*tsdb.Query{
  792. {
  793. Model: simplejson.NewFromAny(map[string]interface{}{
  794. "rawSql": `SELECT
  795. cast(null as bigint) as time,
  796. 'message' as text,
  797. 'tag1,tag2' as tags
  798. `,
  799. "format": "table",
  800. }),
  801. RefId: "A",
  802. },
  803. },
  804. }
  805. resp, err := endpoint.Query(context.Background(), nil, query)
  806. So(err, ShouldBeNil)
  807. queryResult := resp.Results["A"]
  808. So(queryResult.Error, ShouldBeNil)
  809. So(len(queryResult.Tables[0].Rows), ShouldEqual, 1)
  810. columns := queryResult.Tables[0].Rows[0]
  811. //Should be in milliseconds
  812. So(columns[0], ShouldBeNil)
  813. })
  814. Convey("When doing an annotation query with a time column holding a timestamp null value should return nil", func() {
  815. query := &tsdb.TsdbQuery{
  816. Queries: []*tsdb.Query{
  817. {
  818. Model: simplejson.NewFromAny(map[string]interface{}{
  819. "rawSql": `SELECT
  820. cast(null as timestamp) as time,
  821. 'message' as text,
  822. 'tag1,tag2' as tags
  823. `,
  824. "format": "table",
  825. }),
  826. RefId: "A",
  827. },
  828. },
  829. }
  830. resp, err := endpoint.Query(context.Background(), nil, query)
  831. So(err, ShouldBeNil)
  832. queryResult := resp.Results["A"]
  833. So(queryResult.Error, ShouldBeNil)
  834. So(len(queryResult.Tables[0].Rows), ShouldEqual, 1)
  835. columns := queryResult.Tables[0].Rows[0]
  836. //Should be in milliseconds
  837. So(columns[0], ShouldBeNil)
  838. })
  839. })
  840. })
  841. }
  842. func InitPostgresTestDB(t *testing.T) *xorm.Engine {
  843. x, err := xorm.NewEngine(sqlutil.TestDB_Postgres.DriverName, strings.Replace(sqlutil.TestDB_Postgres.ConnStr, "dbname=grafanatest", "dbname=grafanadstest", 1))
  844. if err != nil {
  845. t.Fatalf("Failed to init postgres db %v", err)
  846. }
  847. x.DatabaseTZ = time.UTC
  848. x.TZLocation = time.UTC
  849. // x.ShowSQL()
  850. return x
  851. }
  852. func genTimeRangeByInterval(from time.Time, duration time.Duration, interval time.Duration) []time.Time {
  853. durationSec := int64(duration.Seconds())
  854. intervalSec := int64(interval.Seconds())
  855. timeRange := []time.Time{}
  856. for i := int64(0); i < durationSec; i += intervalSec {
  857. timeRange = append(timeRange, from)
  858. from = from.Add(time.Duration(int64(time.Second) * intervalSec))
  859. }
  860. return timeRange
  861. }