| 123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958 |
- package elasticsearch
- import (
- "encoding/json"
- "fmt"
- "testing"
- "time"
- "github.com/grafana/grafana/pkg/components/null"
- "github.com/grafana/grafana/pkg/components/simplejson"
- "github.com/grafana/grafana/pkg/tsdb/elasticsearch/client"
- "github.com/grafana/grafana/pkg/tsdb"
- . "github.com/smartystreets/goconvey/convey"
- )
- func TestResponseParser(t *testing.T) {
- Convey("Elasticsearch response parser test", t, func() {
- Convey("Simple query and count", func() {
- targets := map[string]string{
- "A": `{
- "timeField": "@timestamp",
- "metrics": [{ "type": "count", "id": "1" }],
- "bucketAggs": [{ "type": "date_histogram", "field": "@timestamp", "id": "2" }]
- }`,
- }
- response := `{
- "responses": [
- {
- "aggregations": {
- "2": {
- "buckets": [
- {
- "doc_count": 10,
- "key": 1000
- },
- {
- "doc_count": 15,
- "key": 2000
- }
- ]
- }
- }
- }
- ]
- }`
- rp, err := newResponseParserForTest(targets, response)
- So(err, ShouldBeNil)
- result, err := rp.getTimeSeries()
- So(err, ShouldBeNil)
- So(result.Results, ShouldHaveLength, 1)
- queryRes := result.Results["A"]
- So(queryRes, ShouldNotBeNil)
- So(queryRes.Series, ShouldHaveLength, 1)
- series := queryRes.Series[0]
- So(series.Name, ShouldEqual, "Count")
- So(series.Points, ShouldHaveLength, 2)
- So(series.Points[0][0].Float64, ShouldEqual, 10)
- So(series.Points[0][1].Float64, ShouldEqual, 1000)
- So(series.Points[1][0].Float64, ShouldEqual, 15)
- So(series.Points[1][1].Float64, ShouldEqual, 2000)
- })
- Convey("Simple query count & avg aggregation", func() {
- targets := map[string]string{
- "A": `{
- "timeField": "@timestamp",
- "metrics": [{ "type": "count", "id": "1" }, {"type": "avg", "field": "value", "id": "2" }],
- "bucketAggs": [{ "type": "date_histogram", "field": "@timestamp", "id": "3" }]
- }`,
- }
- response := `{
- "responses": [
- {
- "aggregations": {
- "3": {
- "buckets": [
- {
- "2": { "value": 88 },
- "doc_count": 10,
- "key": 1000
- },
- {
- "2": { "value": 99 },
- "doc_count": 15,
- "key": 2000
- }
- ]
- }
- }
- }
- ]
- }`
- rp, err := newResponseParserForTest(targets, response)
- So(err, ShouldBeNil)
- result, err := rp.getTimeSeries()
- So(err, ShouldBeNil)
- So(result.Results, ShouldHaveLength, 1)
- queryRes := result.Results["A"]
- So(queryRes, ShouldNotBeNil)
- So(queryRes.Series, ShouldHaveLength, 2)
- seriesOne := queryRes.Series[0]
- So(seriesOne.Name, ShouldEqual, "Count")
- So(seriesOne.Points, ShouldHaveLength, 2)
- So(seriesOne.Points[0][0].Float64, ShouldEqual, 10)
- So(seriesOne.Points[0][1].Float64, ShouldEqual, 1000)
- So(seriesOne.Points[1][0].Float64, ShouldEqual, 15)
- So(seriesOne.Points[1][1].Float64, ShouldEqual, 2000)
- seriesTwo := queryRes.Series[1]
- So(seriesTwo.Name, ShouldEqual, "Average value")
- So(seriesTwo.Points, ShouldHaveLength, 2)
- So(seriesTwo.Points[0][0].Float64, ShouldEqual, 88)
- So(seriesTwo.Points[0][1].Float64, ShouldEqual, 1000)
- So(seriesTwo.Points[1][0].Float64, ShouldEqual, 99)
- So(seriesTwo.Points[1][1].Float64, ShouldEqual, 2000)
- })
- Convey("Single group by query one metric", func() {
- targets := map[string]string{
- "A": `{
- "timeField": "@timestamp",
- "metrics": [{ "type": "count", "id": "1" }],
- "bucketAggs": [
- { "type": "terms", "field": "host", "id": "2" },
- { "type": "date_histogram", "field": "@timestamp", "id": "3" }
- ]
- }`,
- }
- response := `{
- "responses": [
- {
- "aggregations": {
- "2": {
- "buckets": [
- {
- "3": {
- "buckets": [{ "doc_count": 1, "key": 1000 }, { "doc_count": 3, "key": 2000 }]
- },
- "doc_count": 4,
- "key": "server1"
- },
- {
- "3": {
- "buckets": [{ "doc_count": 2, "key": 1000 }, { "doc_count": 8, "key": 2000 }]
- },
- "doc_count": 10,
- "key": "server2"
- }
- ]
- }
- }
- }
- ]
- }`
- rp, err := newResponseParserForTest(targets, response)
- So(err, ShouldBeNil)
- result, err := rp.getTimeSeries()
- So(err, ShouldBeNil)
- So(result.Results, ShouldHaveLength, 1)
- queryRes := result.Results["A"]
- So(queryRes, ShouldNotBeNil)
- So(queryRes.Series, ShouldHaveLength, 2)
- seriesOne := queryRes.Series[0]
- So(seriesOne.Name, ShouldEqual, "server1")
- So(seriesOne.Points, ShouldHaveLength, 2)
- So(seriesOne.Points[0][0].Float64, ShouldEqual, 1)
- So(seriesOne.Points[0][1].Float64, ShouldEqual, 1000)
- So(seriesOne.Points[1][0].Float64, ShouldEqual, 3)
- So(seriesOne.Points[1][1].Float64, ShouldEqual, 2000)
- seriesTwo := queryRes.Series[1]
- So(seriesTwo.Name, ShouldEqual, "server2")
- So(seriesTwo.Points, ShouldHaveLength, 2)
- So(seriesTwo.Points[0][0].Float64, ShouldEqual, 2)
- So(seriesTwo.Points[0][1].Float64, ShouldEqual, 1000)
- So(seriesTwo.Points[1][0].Float64, ShouldEqual, 8)
- So(seriesTwo.Points[1][1].Float64, ShouldEqual, 2000)
- })
- Convey("Single group by query two metrics", func() {
- targets := map[string]string{
- "A": `{
- "timeField": "@timestamp",
- "metrics": [{ "type": "count", "id": "1" }, { "type": "avg", "field": "@value", "id": "4" }],
- "bucketAggs": [
- { "type": "terms", "field": "host", "id": "2" },
- { "type": "date_histogram", "field": "@timestamp", "id": "3" }
- ]
- }`,
- }
- response := `{
- "responses": [
- {
- "aggregations": {
- "2": {
- "buckets": [
- {
- "3": {
- "buckets": [
- { "4": { "value": 10 }, "doc_count": 1, "key": 1000 },
- { "4": { "value": 12 }, "doc_count": 3, "key": 2000 }
- ]
- },
- "doc_count": 4,
- "key": "server1"
- },
- {
- "3": {
- "buckets": [
- { "4": { "value": 20 }, "doc_count": 1, "key": 1000 },
- { "4": { "value": 32 }, "doc_count": 3, "key": 2000 }
- ]
- },
- "doc_count": 10,
- "key": "server2"
- }
- ]
- }
- }
- }
- ]
- }`
- rp, err := newResponseParserForTest(targets, response)
- So(err, ShouldBeNil)
- result, err := rp.getTimeSeries()
- So(err, ShouldBeNil)
- So(result.Results, ShouldHaveLength, 1)
- queryRes := result.Results["A"]
- So(queryRes, ShouldNotBeNil)
- So(queryRes.Series, ShouldHaveLength, 4)
- seriesOne := queryRes.Series[0]
- So(seriesOne.Name, ShouldEqual, "server1 Count")
- So(seriesOne.Points, ShouldHaveLength, 2)
- So(seriesOne.Points[0][0].Float64, ShouldEqual, 1)
- So(seriesOne.Points[0][1].Float64, ShouldEqual, 1000)
- So(seriesOne.Points[1][0].Float64, ShouldEqual, 3)
- So(seriesOne.Points[1][1].Float64, ShouldEqual, 2000)
- seriesTwo := queryRes.Series[1]
- So(seriesTwo.Name, ShouldEqual, "server1 Average @value")
- So(seriesTwo.Points, ShouldHaveLength, 2)
- So(seriesTwo.Points[0][0].Float64, ShouldEqual, 10)
- So(seriesTwo.Points[0][1].Float64, ShouldEqual, 1000)
- So(seriesTwo.Points[1][0].Float64, ShouldEqual, 12)
- So(seriesTwo.Points[1][1].Float64, ShouldEqual, 2000)
- seriesThree := queryRes.Series[2]
- So(seriesThree.Name, ShouldEqual, "server2 Count")
- So(seriesThree.Points, ShouldHaveLength, 2)
- So(seriesThree.Points[0][0].Float64, ShouldEqual, 1)
- So(seriesThree.Points[0][1].Float64, ShouldEqual, 1000)
- So(seriesThree.Points[1][0].Float64, ShouldEqual, 3)
- So(seriesThree.Points[1][1].Float64, ShouldEqual, 2000)
- seriesFour := queryRes.Series[3]
- So(seriesFour.Name, ShouldEqual, "server2 Average @value")
- So(seriesFour.Points, ShouldHaveLength, 2)
- So(seriesFour.Points[0][0].Float64, ShouldEqual, 20)
- So(seriesFour.Points[0][1].Float64, ShouldEqual, 1000)
- So(seriesFour.Points[1][0].Float64, ShouldEqual, 32)
- So(seriesFour.Points[1][1].Float64, ShouldEqual, 2000)
- })
- Convey("With percentiles", func() {
- targets := map[string]string{
- "A": `{
- "timeField": "@timestamp",
- "metrics": [{ "type": "percentiles", "settings": { "percents": [75, 90] }, "id": "1" }],
- "bucketAggs": [{ "type": "date_histogram", "field": "@timestamp", "id": "3" }]
- }`,
- }
- response := `{
- "responses": [
- {
- "aggregations": {
- "3": {
- "buckets": [
- {
- "1": { "values": { "75": 3.3, "90": 5.5 } },
- "doc_count": 10,
- "key": 1000
- },
- {
- "1": { "values": { "75": 2.3, "90": 4.5 } },
- "doc_count": 15,
- "key": 2000
- }
- ]
- }
- }
- }
- ]
- }`
- rp, err := newResponseParserForTest(targets, response)
- So(err, ShouldBeNil)
- result, err := rp.getTimeSeries()
- So(err, ShouldBeNil)
- So(result.Results, ShouldHaveLength, 1)
- queryRes := result.Results["A"]
- So(queryRes, ShouldNotBeNil)
- So(queryRes.Series, ShouldHaveLength, 2)
- seriesOne := queryRes.Series[0]
- So(seriesOne.Name, ShouldEqual, "p75")
- So(seriesOne.Points, ShouldHaveLength, 2)
- So(seriesOne.Points[0][0].Float64, ShouldEqual, 3.3)
- So(seriesOne.Points[0][1].Float64, ShouldEqual, 1000)
- So(seriesOne.Points[1][0].Float64, ShouldEqual, 2.3)
- So(seriesOne.Points[1][1].Float64, ShouldEqual, 2000)
- seriesTwo := queryRes.Series[1]
- So(seriesTwo.Name, ShouldEqual, "p90")
- So(seriesTwo.Points, ShouldHaveLength, 2)
- So(seriesTwo.Points[0][0].Float64, ShouldEqual, 5.5)
- So(seriesTwo.Points[0][1].Float64, ShouldEqual, 1000)
- So(seriesTwo.Points[1][0].Float64, ShouldEqual, 4.5)
- So(seriesTwo.Points[1][1].Float64, ShouldEqual, 2000)
- })
- Convey("With extended stats", func() {
- targets := map[string]string{
- "A": `{
- "timeField": "@timestamp",
- "metrics": [{ "type": "extended_stats", "meta": { "max": true, "std_deviation_bounds_upper": true, "std_deviation_bounds_lower": true }, "id": "1" }],
- "bucketAggs": [
- { "type": "terms", "field": "host", "id": "3" },
- { "type": "date_histogram", "field": "@timestamp", "id": "4" }
- ]
- }`,
- }
- response := `{
- "responses": [
- {
- "aggregations": {
- "3": {
- "buckets": [
- {
- "key": "server1",
- "4": {
- "buckets": [
- {
- "1": {
- "max": 10.2,
- "min": 5.5,
- "std_deviation_bounds": { "upper": 3, "lower": -2 }
- },
- "doc_count": 10,
- "key": 1000
- }
- ]
- }
- },
- {
- "key": "server2",
- "4": {
- "buckets": [
- {
- "1": {
- "max": 15.5,
- "min": 3.4,
- "std_deviation_bounds": { "upper": 4, "lower": -1 }
- },
- "doc_count": 10,
- "key": 1000
- }
- ]
- }
- }
- ]
- }
- }
- }
- ]
- }`
- rp, err := newResponseParserForTest(targets, response)
- So(err, ShouldBeNil)
- result, err := rp.getTimeSeries()
- So(err, ShouldBeNil)
- So(result.Results, ShouldHaveLength, 1)
- queryRes := result.Results["A"]
- So(queryRes, ShouldNotBeNil)
- So(queryRes.Series, ShouldHaveLength, 6)
- seriesOne := queryRes.Series[0]
- So(seriesOne.Name, ShouldEqual, "server1 Max")
- So(seriesOne.Points, ShouldHaveLength, 1)
- So(seriesOne.Points[0][0].Float64, ShouldEqual, 10.2)
- So(seriesOne.Points[0][1].Float64, ShouldEqual, 1000)
- seriesTwo := queryRes.Series[1]
- So(seriesTwo.Name, ShouldEqual, "server1 Std Dev Lower")
- So(seriesTwo.Points, ShouldHaveLength, 1)
- So(seriesTwo.Points[0][0].Float64, ShouldEqual, -2)
- So(seriesTwo.Points[0][1].Float64, ShouldEqual, 1000)
- seriesThree := queryRes.Series[2]
- So(seriesThree.Name, ShouldEqual, "server1 Std Dev Upper")
- So(seriesThree.Points, ShouldHaveLength, 1)
- So(seriesThree.Points[0][0].Float64, ShouldEqual, 3)
- So(seriesThree.Points[0][1].Float64, ShouldEqual, 1000)
- seriesFour := queryRes.Series[3]
- So(seriesFour.Name, ShouldEqual, "server2 Max")
- So(seriesFour.Points, ShouldHaveLength, 1)
- So(seriesFour.Points[0][0].Float64, ShouldEqual, 15.5)
- So(seriesFour.Points[0][1].Float64, ShouldEqual, 1000)
- seriesFive := queryRes.Series[4]
- So(seriesFive.Name, ShouldEqual, "server2 Std Dev Lower")
- So(seriesFive.Points, ShouldHaveLength, 1)
- So(seriesFive.Points[0][0].Float64, ShouldEqual, -1)
- So(seriesFive.Points[0][1].Float64, ShouldEqual, 1000)
- seriesSix := queryRes.Series[5]
- So(seriesSix.Name, ShouldEqual, "server2 Std Dev Upper")
- So(seriesSix.Points, ShouldHaveLength, 1)
- So(seriesSix.Points[0][0].Float64, ShouldEqual, 4)
- So(seriesSix.Points[0][1].Float64, ShouldEqual, 1000)
- })
- Convey("Single group by with alias pattern", func() {
- targets := map[string]string{
- "A": `{
- "timeField": "@timestamp",
- "alias": "{{term @host}} {{metric}} and {{not_exist}} {{@host}}",
- "metrics": [{ "type": "count", "id": "1" }],
- "bucketAggs": [
- { "type": "terms", "field": "@host", "id": "2" },
- { "type": "date_histogram", "field": "@timestamp", "id": "3" }
- ]
- }`,
- }
- response := `{
- "responses": [
- {
- "aggregations": {
- "2": {
- "buckets": [
- {
- "3": {
- "buckets": [{ "doc_count": 1, "key": 1000 }, { "doc_count": 3, "key": 2000 }]
- },
- "doc_count": 4,
- "key": "server1"
- },
- {
- "3": {
- "buckets": [{ "doc_count": 2, "key": 1000 }, { "doc_count": 8, "key": 2000 }]
- },
- "doc_count": 10,
- "key": "server2"
- },
- {
- "3": {
- "buckets": [{ "doc_count": 2, "key": 1000 }, { "doc_count": 8, "key": 2000 }]
- },
- "doc_count": 10,
- "key": 0
- }
- ]
- }
- }
- }
- ]
- }`
- rp, err := newResponseParserForTest(targets, response)
- So(err, ShouldBeNil)
- result, err := rp.getTimeSeries()
- So(err, ShouldBeNil)
- So(result.Results, ShouldHaveLength, 1)
- queryRes := result.Results["A"]
- So(queryRes, ShouldNotBeNil)
- So(queryRes.Series, ShouldHaveLength, 3)
- seriesOne := queryRes.Series[0]
- So(seriesOne.Name, ShouldEqual, "server1 Count and {{not_exist}} server1")
- So(seriesOne.Points, ShouldHaveLength, 2)
- So(seriesOne.Points[0][0].Float64, ShouldEqual, 1)
- So(seriesOne.Points[0][1].Float64, ShouldEqual, 1000)
- So(seriesOne.Points[1][0].Float64, ShouldEqual, 3)
- So(seriesOne.Points[1][1].Float64, ShouldEqual, 2000)
- seriesTwo := queryRes.Series[1]
- So(seriesTwo.Name, ShouldEqual, "server2 Count and {{not_exist}} server2")
- So(seriesTwo.Points, ShouldHaveLength, 2)
- So(seriesTwo.Points[0][0].Float64, ShouldEqual, 2)
- So(seriesTwo.Points[0][1].Float64, ShouldEqual, 1000)
- So(seriesTwo.Points[1][0].Float64, ShouldEqual, 8)
- So(seriesTwo.Points[1][1].Float64, ShouldEqual, 2000)
- seriesThree := queryRes.Series[2]
- So(seriesThree.Name, ShouldEqual, "0 Count and {{not_exist}} 0")
- So(seriesThree.Points, ShouldHaveLength, 2)
- So(seriesThree.Points[0][0].Float64, ShouldEqual, 2)
- So(seriesThree.Points[0][1].Float64, ShouldEqual, 1000)
- So(seriesThree.Points[1][0].Float64, ShouldEqual, 8)
- So(seriesThree.Points[1][1].Float64, ShouldEqual, 2000)
- })
- Convey("Histogram response", func() {
- targets := map[string]string{
- "A": `{
- "timeField": "@timestamp",
- "metrics": [{ "type": "count", "id": "1" }],
- "bucketAggs": [{ "type": "histogram", "field": "bytes", "id": "3" }]
- }`,
- }
- response := `{
- "responses": [
- {
- "aggregations": {
- "3": {
- "buckets": [{ "doc_count": 1, "key": 1000 }, { "doc_count": 3, "key": 2000 }, { "doc_count": 2, "key": 3000 }]
- }
- }
- }
- ]
- }`
- rp, err := newResponseParserForTest(targets, response)
- So(err, ShouldBeNil)
- result, err := rp.getTimeSeries()
- So(err, ShouldBeNil)
- So(result.Results, ShouldHaveLength, 1)
- queryRes := result.Results["A"]
- So(queryRes, ShouldNotBeNil)
- So(queryRes.Tables, ShouldHaveLength, 1)
- rows := queryRes.Tables[0].Rows
- So(rows, ShouldHaveLength, 3)
- cols := queryRes.Tables[0].Columns
- So(cols, ShouldHaveLength, 2)
- So(cols[0].Text, ShouldEqual, "bytes")
- So(cols[1].Text, ShouldEqual, "Count")
- So(rows[0][0].(null.Float).Float64, ShouldEqual, 1000)
- So(rows[0][1].(null.Float).Float64, ShouldEqual, 1)
- So(rows[1][0].(null.Float).Float64, ShouldEqual, 2000)
- So(rows[1][1].(null.Float).Float64, ShouldEqual, 3)
- So(rows[2][0].(null.Float).Float64, ShouldEqual, 3000)
- So(rows[2][1].(null.Float).Float64, ShouldEqual, 2)
- })
- Convey("With two filters agg", func() {
- targets := map[string]string{
- "A": `{
- "timeField": "@timestamp",
- "metrics": [{ "type": "count", "id": "1" }],
- "bucketAggs": [
- {
- "type": "filters",
- "id": "2",
- "settings": {
- "filters": [{ "query": "@metric:cpu" }, { "query": "@metric:logins.count" }]
- }
- },
- { "type": "date_histogram", "field": "@timestamp", "id": "3" }
- ]
- }`,
- }
- response := `{
- "responses": [
- {
- "aggregations": {
- "2": {
- "buckets": {
- "@metric:cpu": {
- "3": {
- "buckets": [{ "doc_count": 1, "key": 1000 }, { "doc_count": 3, "key": 2000 }]
- }
- },
- "@metric:logins.count": {
- "3": {
- "buckets": [{ "doc_count": 2, "key": 1000 }, { "doc_count": 8, "key": 2000 }]
- }
- }
- }
- }
- }
- }
- ]
- }`
- rp, err := newResponseParserForTest(targets, response)
- So(err, ShouldBeNil)
- result, err := rp.getTimeSeries()
- So(err, ShouldBeNil)
- So(result.Results, ShouldHaveLength, 1)
- queryRes := result.Results["A"]
- So(queryRes, ShouldNotBeNil)
- So(queryRes.Series, ShouldHaveLength, 2)
- seriesOne := queryRes.Series[0]
- So(seriesOne.Name, ShouldEqual, "@metric:cpu")
- So(seriesOne.Points, ShouldHaveLength, 2)
- So(seriesOne.Points[0][0].Float64, ShouldEqual, 1)
- So(seriesOne.Points[0][1].Float64, ShouldEqual, 1000)
- So(seriesOne.Points[1][0].Float64, ShouldEqual, 3)
- So(seriesOne.Points[1][1].Float64, ShouldEqual, 2000)
- seriesTwo := queryRes.Series[1]
- So(seriesTwo.Name, ShouldEqual, "@metric:logins.count")
- So(seriesTwo.Points, ShouldHaveLength, 2)
- So(seriesTwo.Points[0][0].Float64, ShouldEqual, 2)
- So(seriesTwo.Points[0][1].Float64, ShouldEqual, 1000)
- So(seriesTwo.Points[1][0].Float64, ShouldEqual, 8)
- So(seriesTwo.Points[1][1].Float64, ShouldEqual, 2000)
- })
- Convey("With dropfirst and last aggregation", func() {
- targets := map[string]string{
- "A": `{
- "timeField": "@timestamp",
- "metrics": [{ "type": "avg", "id": "1" }, { "type": "count" }],
- "bucketAggs": [
- {
- "type": "date_histogram",
- "field": "@timestamp",
- "id": "2",
- "settings": { "trimEdges": 1 }
- }
- ]
- }`,
- }
- response := `{
- "responses": [
- {
- "aggregations": {
- "2": {
- "buckets": [
- {
- "1": { "value": 1000 },
- "key": 1,
- "doc_count": 369
- },
- {
- "1": { "value": 2000 },
- "key": 2,
- "doc_count": 200
- },
- {
- "1": { "value": 2000 },
- "key": 3,
- "doc_count": 200
- }
- ]
- }
- }
- }
- ]
- }`
- rp, err := newResponseParserForTest(targets, response)
- So(err, ShouldBeNil)
- result, err := rp.getTimeSeries()
- So(err, ShouldBeNil)
- So(result.Results, ShouldHaveLength, 1)
- queryRes := result.Results["A"]
- So(queryRes, ShouldNotBeNil)
- So(queryRes.Series, ShouldHaveLength, 2)
- seriesOne := queryRes.Series[0]
- So(seriesOne.Name, ShouldEqual, "Average")
- So(seriesOne.Points, ShouldHaveLength, 1)
- So(seriesOne.Points[0][0].Float64, ShouldEqual, 2000)
- So(seriesOne.Points[0][1].Float64, ShouldEqual, 2)
- seriesTwo := queryRes.Series[1]
- So(seriesTwo.Name, ShouldEqual, "Count")
- So(seriesTwo.Points, ShouldHaveLength, 1)
- So(seriesTwo.Points[0][0].Float64, ShouldEqual, 200)
- So(seriesTwo.Points[0][1].Float64, ShouldEqual, 2)
- })
- Convey("No group by time", func() {
- targets := map[string]string{
- "A": `{
- "timeField": "@timestamp",
- "metrics": [{ "type": "avg", "id": "1" }, { "type": "count" }],
- "bucketAggs": [{ "type": "terms", "field": "host", "id": "2" }]
- }`,
- }
- response := `{
- "responses": [
- {
- "aggregations": {
- "2": {
- "buckets": [
- {
- "1": { "value": 1000 },
- "key": "server-1",
- "doc_count": 369
- },
- {
- "1": { "value": 2000 },
- "key": "server-2",
- "doc_count": 200
- }
- ]
- }
- }
- }
- ]
- }`
- rp, err := newResponseParserForTest(targets, response)
- So(err, ShouldBeNil)
- result, err := rp.getTimeSeries()
- So(err, ShouldBeNil)
- So(result.Results, ShouldHaveLength, 1)
- queryRes := result.Results["A"]
- So(queryRes, ShouldNotBeNil)
- So(queryRes.Tables, ShouldHaveLength, 1)
- rows := queryRes.Tables[0].Rows
- So(rows, ShouldHaveLength, 2)
- cols := queryRes.Tables[0].Columns
- So(cols, ShouldHaveLength, 3)
- So(cols[0].Text, ShouldEqual, "host")
- So(cols[1].Text, ShouldEqual, "Average")
- So(cols[2].Text, ShouldEqual, "Count")
- So(rows[0][0].(string), ShouldEqual, "server-1")
- So(rows[0][1].(null.Float).Float64, ShouldEqual, 1000)
- So(rows[0][2].(null.Float).Float64, ShouldEqual, 369)
- So(rows[1][0].(string), ShouldEqual, "server-2")
- So(rows[1][1].(null.Float).Float64, ShouldEqual, 2000)
- So(rows[1][2].(null.Float).Float64, ShouldEqual, 200)
- })
- Convey("Multiple metrics of same type", func() {
- targets := map[string]string{
- "A": `{
- "timeField": "@timestamp",
- "metrics": [{ "type": "avg", "field": "test", "id": "1" }, { "type": "avg", "field": "test2", "id": "2" }],
- "bucketAggs": [{ "type": "terms", "field": "host", "id": "2" }]
- }`,
- }
- response := `{
- "responses": [
- {
- "aggregations": {
- "2": {
- "buckets": [
- {
- "1": { "value": 1000 },
- "2": { "value": 3000 },
- "key": "server-1",
- "doc_count": 369
- }
- ]
- }
- }
- }
- ]
- }`
- rp, err := newResponseParserForTest(targets, response)
- So(err, ShouldBeNil)
- result, err := rp.getTimeSeries()
- So(err, ShouldBeNil)
- So(result.Results, ShouldHaveLength, 1)
- queryRes := result.Results["A"]
- So(queryRes, ShouldNotBeNil)
- So(queryRes.Tables, ShouldHaveLength, 1)
- rows := queryRes.Tables[0].Rows
- So(rows, ShouldHaveLength, 1)
- cols := queryRes.Tables[0].Columns
- So(cols, ShouldHaveLength, 3)
- So(cols[0].Text, ShouldEqual, "host")
- So(cols[1].Text, ShouldEqual, "Average test")
- So(cols[2].Text, ShouldEqual, "Average test2")
- So(rows[0][0].(string), ShouldEqual, "server-1")
- So(rows[0][1].(null.Float).Float64, ShouldEqual, 1000)
- So(rows[0][2].(null.Float).Float64, ShouldEqual, 3000)
- })
- Convey("With bucket_script", func() {
- targets := map[string]string{
- "A": `{
- "timeField": "@timestamp",
- "metrics": [
- { "id": "1", "type": "sum", "field": "@value" },
- { "id": "3", "type": "max", "field": "@value" },
- {
- "id": "4",
- "field": "select field",
- "pipelineVariables": [{ "name": "var1", "pipelineAgg": "1" }, { "name": "var2", "pipelineAgg": "3" }],
- "settings": { "script": "params.var1 * params.var2" },
- "type": "bucket_script"
- }
- ],
- "bucketAggs": [{ "type": "date_histogram", "field": "@timestamp", "id": "2" }]
- }`,
- }
- response := `{
- "responses": [
- {
- "aggregations": {
- "2": {
- "buckets": [
- {
- "1": { "value": 2 },
- "3": { "value": 3 },
- "4": { "value": 6 },
- "doc_count": 60,
- "key": 1000
- },
- {
- "1": { "value": 3 },
- "3": { "value": 4 },
- "4": { "value": 12 },
- "doc_count": 60,
- "key": 2000
- }
- ]
- }
- }
- }
- ]
- }`
- rp, err := newResponseParserForTest(targets, response)
- So(err, ShouldBeNil)
- result, err := rp.getTimeSeries()
- So(err, ShouldBeNil)
- So(result.Results, ShouldHaveLength, 1)
- queryRes := result.Results["A"]
- So(queryRes, ShouldNotBeNil)
- So(queryRes.Series, ShouldHaveLength, 3)
- seriesOne := queryRes.Series[0]
- So(seriesOne.Name, ShouldEqual, "Sum @value")
- So(seriesOne.Points, ShouldHaveLength, 2)
- So(seriesOne.Points[0][0].Float64, ShouldEqual, 2)
- So(seriesOne.Points[0][1].Float64, ShouldEqual, 1000)
- So(seriesOne.Points[1][0].Float64, ShouldEqual, 3)
- So(seriesOne.Points[1][1].Float64, ShouldEqual, 2000)
- seriesTwo := queryRes.Series[1]
- So(seriesTwo.Name, ShouldEqual, "Max @value")
- So(seriesTwo.Points, ShouldHaveLength, 2)
- So(seriesTwo.Points[0][0].Float64, ShouldEqual, 3)
- So(seriesTwo.Points[0][1].Float64, ShouldEqual, 1000)
- So(seriesTwo.Points[1][0].Float64, ShouldEqual, 4)
- So(seriesTwo.Points[1][1].Float64, ShouldEqual, 2000)
- seriesThree := queryRes.Series[2]
- So(seriesThree.Name, ShouldEqual, "Sum @value * Max @value")
- So(seriesThree.Points, ShouldHaveLength, 2)
- So(seriesThree.Points[0][0].Float64, ShouldEqual, 6)
- So(seriesThree.Points[0][1].Float64, ShouldEqual, 1000)
- So(seriesThree.Points[1][0].Float64, ShouldEqual, 12)
- So(seriesThree.Points[1][1].Float64, ShouldEqual, 2000)
- })
- // Convey("Raw documents query", func() {
- // targets := map[string]string{
- // "A": `{
- // "timeField": "@timestamp",
- // "metrics": [{ "type": "raw_document", "id": "1" }]
- // }`,
- // }
- // response := `{
- // "responses": [
- // {
- // "hits": {
- // "total": 100,
- // "hits": [
- // {
- // "_id": "1",
- // "_type": "type",
- // "_index": "index",
- // "_source": { "sourceProp": "asd" },
- // "fields": { "fieldProp": "field" }
- // },
- // {
- // "_source": { "sourceProp": "asd2" },
- // "fields": { "fieldProp": "field2" }
- // }
- // ]
- // }
- // }
- // ]
- // }`
- // rp, err := newResponseParserForTest(targets, response)
- // So(err, ShouldBeNil)
- // result, err := rp.getTimeSeries()
- // So(err, ShouldBeNil)
- // So(result.Results, ShouldHaveLength, 1)
- // queryRes := result.Results["A"]
- // So(queryRes, ShouldNotBeNil)
- // So(queryRes.Tables, ShouldHaveLength, 1)
- // rows := queryRes.Tables[0].Rows
- // So(rows, ShouldHaveLength, 1)
- // cols := queryRes.Tables[0].Columns
- // So(cols, ShouldHaveLength, 3)
- // So(cols[0].Text, ShouldEqual, "host")
- // So(cols[1].Text, ShouldEqual, "Average test")
- // So(cols[2].Text, ShouldEqual, "Average test2")
- // So(rows[0][0].(string), ShouldEqual, "server-1")
- // So(rows[0][1].(null.Float).Float64, ShouldEqual, 1000)
- // So(rows[0][2].(null.Float).Float64, ShouldEqual, 3000)
- // })
- })
- }
- func newResponseParserForTest(tsdbQueries map[string]string, responseBody string) (*responseParser, error) {
- from := time.Date(2018, 5, 15, 17, 50, 0, 0, time.UTC)
- to := time.Date(2018, 5, 15, 17, 55, 0, 0, time.UTC)
- fromStr := fmt.Sprintf("%d", from.UnixNano()/int64(time.Millisecond))
- toStr := fmt.Sprintf("%d", to.UnixNano()/int64(time.Millisecond))
- tsdbQuery := &tsdb.TsdbQuery{
- Queries: []*tsdb.Query{},
- TimeRange: tsdb.NewTimeRange(fromStr, toStr),
- }
- for refID, tsdbQueryBody := range tsdbQueries {
- tsdbQueryJSON, err := simplejson.NewJson([]byte(tsdbQueryBody))
- if err != nil {
- return nil, err
- }
- tsdbQuery.Queries = append(tsdbQuery.Queries, &tsdb.Query{
- Model: tsdbQueryJSON,
- RefId: refID,
- })
- }
- var response es.MultiSearchResponse
- err := json.Unmarshal([]byte(responseBody), &response)
- if err != nil {
- return nil, err
- }
- tsQueryParser := newTimeSeriesQueryParser()
- queries, err := tsQueryParser.parse(tsdbQuery)
- if err != nil {
- return nil, err
- }
- return newResponseParser(response.Responses, queries, nil), nil
- }
|