time_series_query.go 9.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324
  1. package elasticsearch
  2. import (
  3. "fmt"
  4. "strconv"
  5. "strings"
  6. "time"
  7. "github.com/grafana/grafana/pkg/components/simplejson"
  8. "github.com/grafana/grafana/pkg/tsdb"
  9. "github.com/grafana/grafana/pkg/tsdb/elasticsearch/client"
  10. )
  11. type timeSeriesQuery struct {
  12. client es.Client
  13. tsdbQuery *tsdb.TsdbQuery
  14. intervalCalculator tsdb.IntervalCalculator
  15. }
  16. var newTimeSeriesQuery = func(client es.Client, tsdbQuery *tsdb.TsdbQuery, intervalCalculator tsdb.IntervalCalculator) *timeSeriesQuery {
  17. return &timeSeriesQuery{
  18. client: client,
  19. tsdbQuery: tsdbQuery,
  20. intervalCalculator: intervalCalculator,
  21. }
  22. }
  23. func (e *timeSeriesQuery) execute() (*tsdb.Response, error) {
  24. result := &tsdb.Response{}
  25. result.Results = make(map[string]*tsdb.QueryResult)
  26. tsQueryParser := newTimeSeriesQueryParser()
  27. queries, err := tsQueryParser.parse(e.tsdbQuery)
  28. if err != nil {
  29. return nil, err
  30. }
  31. ms := e.client.MultiSearch()
  32. from := fmt.Sprintf("%d", e.tsdbQuery.TimeRange.GetFromAsMsEpoch())
  33. to := fmt.Sprintf("%d", e.tsdbQuery.TimeRange.GetToAsMsEpoch())
  34. for _, q := range queries {
  35. minInterval, err := e.client.GetMinInterval(q.Interval)
  36. if err != nil {
  37. return nil, err
  38. }
  39. interval := e.intervalCalculator.Calculate(e.tsdbQuery.TimeRange, minInterval)
  40. b := ms.Search()
  41. b.Size(0)
  42. filters := b.Query().Bool().Filter()
  43. filters.AddDateRangeFilter(e.client.GetTimeField(), to, from, es.DateFormatEpochMS)
  44. if q.RawQuery != "" {
  45. filters.AddQueryStringFilter(q.RawQuery, true)
  46. }
  47. if len(q.BucketAggs) == 0 {
  48. if len(q.Metrics) == 0 || q.Metrics[0].Type != "raw_document" {
  49. result.Results[q.RefID] = &tsdb.QueryResult{
  50. RefId: q.RefID,
  51. Error: fmt.Errorf("invalid query, missing metrics and aggregations"),
  52. ErrorString: "invalid query, missing metrics and aggregations",
  53. }
  54. continue
  55. }
  56. metric := q.Metrics[0]
  57. b.Size(metric.Settings.Get("size").MustInt(500))
  58. b.SortDesc("@timestamp", "boolean")
  59. b.AddDocValueField("@timestamp")
  60. continue
  61. }
  62. aggBuilder := b.Agg()
  63. // iterate backwards to create aggregations bottom-down
  64. for _, bucketAgg := range q.BucketAggs {
  65. switch bucketAgg.Type {
  66. case "date_histogram":
  67. aggBuilder = addDateHistogramAgg(aggBuilder, bucketAgg, from, to, interval)
  68. case "histogram":
  69. aggBuilder = addHistogramAgg(aggBuilder, bucketAgg)
  70. case "filters":
  71. aggBuilder = addFiltersAgg(aggBuilder, bucketAgg)
  72. case "terms":
  73. aggBuilder = addTermsAgg(aggBuilder, bucketAgg, q.Metrics)
  74. case "geohash_grid":
  75. aggBuilder = addGeoHashGridAgg(aggBuilder, bucketAgg)
  76. }
  77. }
  78. for _, m := range q.Metrics {
  79. if m.Type == "count" {
  80. continue
  81. }
  82. if isPipelineAgg(m.Type) {
  83. if _, err := strconv.Atoi(m.PipelineAggregate); err == nil {
  84. aggBuilder.Pipeline(m.ID, m.Type, m.PipelineAggregate, func(a *es.PipelineAggregation) {
  85. a.Settings = m.Settings.MustMap()
  86. })
  87. } else {
  88. continue
  89. }
  90. } else {
  91. aggBuilder.Metric(m.ID, m.Type, m.Field, func(a *es.MetricAggregation) {
  92. a.Settings = m.Settings.MustMap()
  93. })
  94. }
  95. }
  96. }
  97. req, err := ms.Build()
  98. if err != nil {
  99. return nil, err
  100. }
  101. res, err := e.client.ExecuteMultisearch(req)
  102. if err != nil {
  103. return nil, err
  104. }
  105. rp := newResponseParser(res.Responses, queries)
  106. return rp.getTimeSeries()
  107. }
  108. func addDateHistogramAgg(aggBuilder es.AggBuilder, bucketAgg *BucketAgg, timeFrom, timeTo string, interval tsdb.Interval) es.AggBuilder {
  109. aggBuilder.DateHistogram(bucketAgg.ID, bucketAgg.Field, func(a *es.DateHistogramAgg, b es.AggBuilder) {
  110. a.Interval = bucketAgg.Settings.Get("interval").MustString("auto")
  111. a.MinDocCount = bucketAgg.Settings.Get("min_doc_count").MustInt(0)
  112. a.ExtendedBounds = &es.ExtendedBounds{Min: timeFrom, Max: timeTo}
  113. a.Format = bucketAgg.Settings.Get("format").MustString(es.DateFormatEpochMS)
  114. if a.Interval == "auto" {
  115. a.Interval = "$__interval"
  116. }
  117. a.Interval = strings.Replace(a.Interval, "$interval", interval.Text, -1)
  118. a.Interval = strings.Replace(a.Interval, "$__interval_ms", strconv.FormatInt(interval.Value.Nanoseconds()/int64(time.Millisecond), 10), -1)
  119. a.Interval = strings.Replace(a.Interval, "$__interval", interval.Text, -1)
  120. if missing, err := bucketAgg.Settings.Get("missing").String(); err == nil {
  121. a.Missing = &missing
  122. }
  123. aggBuilder = b
  124. })
  125. return aggBuilder
  126. }
  127. func addHistogramAgg(aggBuilder es.AggBuilder, bucketAgg *BucketAgg) es.AggBuilder {
  128. aggBuilder.Histogram(bucketAgg.ID, bucketAgg.Field, func(a *es.HistogramAgg, b es.AggBuilder) {
  129. a.Interval = bucketAgg.Settings.Get("interval").MustInt(1000)
  130. a.MinDocCount = bucketAgg.Settings.Get("min_doc_count").MustInt(0)
  131. if missing, err := bucketAgg.Settings.Get("missing").Int(); err == nil {
  132. a.Missing = &missing
  133. }
  134. aggBuilder = b
  135. })
  136. return aggBuilder
  137. }
  138. func addTermsAgg(aggBuilder es.AggBuilder, bucketAgg *BucketAgg, metrics []*MetricAgg) es.AggBuilder {
  139. aggBuilder.Terms(bucketAgg.ID, bucketAgg.Field, func(a *es.TermsAggregation, b es.AggBuilder) {
  140. if size, err := bucketAgg.Settings.Get("size").Int(); err == nil {
  141. a.Size = size
  142. } else if size, err := bucketAgg.Settings.Get("size").String(); err == nil {
  143. a.Size, err = strconv.Atoi(size)
  144. if err != nil {
  145. a.Size = 500
  146. }
  147. } else {
  148. a.Size = 500
  149. }
  150. if minDocCount, err := bucketAgg.Settings.Get("min_doc_count").Int(); err == nil {
  151. a.MinDocCount = &minDocCount
  152. }
  153. if missing, err := bucketAgg.Settings.Get("missing").String(); err == nil {
  154. a.Missing = &missing
  155. }
  156. if orderBy, err := bucketAgg.Settings.Get("orderBy").String(); err == nil {
  157. a.Order[orderBy] = bucketAgg.Settings.Get("order").MustString("desc")
  158. if _, err := strconv.Atoi(orderBy); err == nil {
  159. for _, m := range metrics {
  160. if m.ID == orderBy {
  161. b.Metric(m.ID, m.Type, m.Field, nil)
  162. break
  163. }
  164. }
  165. }
  166. }
  167. aggBuilder = b
  168. })
  169. return aggBuilder
  170. }
  171. func addFiltersAgg(aggBuilder es.AggBuilder, bucketAgg *BucketAgg) es.AggBuilder {
  172. filters := make(map[string]interface{})
  173. for _, filter := range bucketAgg.Settings.Get("filters").MustArray() {
  174. json := simplejson.NewFromAny(filter)
  175. query := json.Get("query").MustString()
  176. label := json.Get("label").MustString()
  177. if label == "" {
  178. label = query
  179. }
  180. filters[label] = &es.QueryStringFilter{Query: query, AnalyzeWildcard: true}
  181. }
  182. if len(filters) > 0 {
  183. aggBuilder.Filters(bucketAgg.ID, func(a *es.FiltersAggregation, b es.AggBuilder) {
  184. a.Filters = filters
  185. aggBuilder = b
  186. })
  187. }
  188. return aggBuilder
  189. }
  190. func addGeoHashGridAgg(aggBuilder es.AggBuilder, bucketAgg *BucketAgg) es.AggBuilder {
  191. aggBuilder.GeoHashGrid(bucketAgg.ID, bucketAgg.Field, func(a *es.GeoHashGridAggregation, b es.AggBuilder) {
  192. a.Precision = bucketAgg.Settings.Get("precision").MustInt(3)
  193. aggBuilder = b
  194. })
  195. return aggBuilder
  196. }
  197. type timeSeriesQueryParser struct{}
  198. func newTimeSeriesQueryParser() *timeSeriesQueryParser {
  199. return &timeSeriesQueryParser{}
  200. }
  201. func (p *timeSeriesQueryParser) parse(tsdbQuery *tsdb.TsdbQuery) ([]*Query, error) {
  202. queries := make([]*Query, 0)
  203. for _, q := range tsdbQuery.Queries {
  204. model := q.Model
  205. timeField, err := model.Get("timeField").String()
  206. if err != nil {
  207. return nil, err
  208. }
  209. rawQuery := model.Get("query").MustString()
  210. bucketAggs, err := p.parseBucketAggs(model)
  211. if err != nil {
  212. return nil, err
  213. }
  214. metrics, err := p.parseMetrics(model)
  215. if err != nil {
  216. return nil, err
  217. }
  218. alias := model.Get("alias").MustString("")
  219. interval := strconv.FormatInt(q.IntervalMs, 10) + "ms"
  220. queries = append(queries, &Query{
  221. TimeField: timeField,
  222. RawQuery: rawQuery,
  223. BucketAggs: bucketAggs,
  224. Metrics: metrics,
  225. Alias: alias,
  226. Interval: interval,
  227. RefID: q.RefId,
  228. })
  229. }
  230. return queries, nil
  231. }
  232. func (p *timeSeriesQueryParser) parseBucketAggs(model *simplejson.Json) ([]*BucketAgg, error) {
  233. var err error
  234. var result []*BucketAgg
  235. for _, t := range model.Get("bucketAggs").MustArray() {
  236. aggJSON := simplejson.NewFromAny(t)
  237. agg := &BucketAgg{}
  238. agg.Type, err = aggJSON.Get("type").String()
  239. if err != nil {
  240. return nil, err
  241. }
  242. agg.ID, err = aggJSON.Get("id").String()
  243. if err != nil {
  244. return nil, err
  245. }
  246. agg.Field = aggJSON.Get("field").MustString()
  247. agg.Settings = simplejson.NewFromAny(aggJSON.Get("settings").MustMap())
  248. result = append(result, agg)
  249. }
  250. return result, nil
  251. }
  252. func (p *timeSeriesQueryParser) parseMetrics(model *simplejson.Json) ([]*MetricAgg, error) {
  253. var err error
  254. var result []*MetricAgg
  255. for _, t := range model.Get("metrics").MustArray() {
  256. metricJSON := simplejson.NewFromAny(t)
  257. metric := &MetricAgg{}
  258. metric.Field = metricJSON.Get("field").MustString()
  259. metric.Hide = metricJSON.Get("hide").MustBool(false)
  260. metric.ID = metricJSON.Get("id").MustString()
  261. metric.PipelineAggregate = metricJSON.Get("pipelineAgg").MustString()
  262. metric.Settings = simplejson.NewFromAny(metricJSON.Get("settings").MustMap())
  263. metric.Meta = simplejson.NewFromAny(metricJSON.Get("meta").MustMap())
  264. metric.Type, err = metricJSON.Get("type").String()
  265. if err != nil {
  266. return nil, err
  267. }
  268. result = append(result, metric)
  269. }
  270. return result, nil
  271. }