response_parser.go 6.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263
  1. package elasticsearch
  2. import (
  3. "errors"
  4. "fmt"
  5. "github.com/grafana/grafana/pkg/components/null"
  6. "github.com/grafana/grafana/pkg/components/simplejson"
  7. "github.com/grafana/grafana/pkg/tsdb"
  8. "regexp"
  9. "strconv"
  10. "strings"
  11. )
  12. type ElasticsearchResponseParser struct {
  13. Responses []Response
  14. Targets []*Query
  15. }
  16. func (rp *ElasticsearchResponseParser) getTimeSeries() *tsdb.QueryResult {
  17. queryRes := tsdb.NewQueryResult()
  18. for i, res := range rp.Responses {
  19. target := rp.Targets[i]
  20. props := make(map[string]string)
  21. series := make([]*tsdb.TimeSeries, 0)
  22. rp.processBuckets(res.Aggregations, target, &series, props, 0)
  23. rp.nameSeries(&series, target)
  24. queryRes.Series = append(queryRes.Series, series...)
  25. }
  26. return queryRes
  27. }
  28. func (rp *ElasticsearchResponseParser) processBuckets(aggs map[string]interface{}, target *Query, series *[]*tsdb.TimeSeries, props map[string]string, depth int) error {
  29. var err error
  30. maxDepth := len(target.BucketAggs) - 1
  31. for aggId, v := range aggs {
  32. aggDef, _ := findAgg(target, aggId)
  33. esAgg := simplejson.NewFromAny(v)
  34. if aggDef == nil {
  35. continue
  36. }
  37. if depth == maxDepth {
  38. if aggDef.Type == "date_histogram" {
  39. err = rp.processMetrics(esAgg, target, series, props)
  40. if err != nil {
  41. return err
  42. }
  43. } else {
  44. return fmt.Errorf("not support type:%s", aggDef.Type)
  45. }
  46. } else {
  47. for i, b := range esAgg.Get("buckets").MustArray() {
  48. bucket := simplejson.NewFromAny(b)
  49. newProps := props
  50. if key, err := bucket.Get("key").String(); err == nil {
  51. newProps[aggDef.Field] = key
  52. } else {
  53. props["filter"] = strconv.Itoa(i)
  54. }
  55. if key, err := bucket.Get("key_as_string").String(); err == nil {
  56. props[aggDef.Field] = key
  57. }
  58. rp.processBuckets(bucket.MustMap(), target, series, newProps, depth+1)
  59. }
  60. }
  61. }
  62. return nil
  63. }
  64. func (rp *ElasticsearchResponseParser) processMetrics(esAgg *simplejson.Json, target *Query, series *[]*tsdb.TimeSeries, props map[string]string) error {
  65. for _, metric := range target.Metrics {
  66. if metric.Hide {
  67. continue
  68. }
  69. switch metric.Type {
  70. case "count":
  71. newSeries := tsdb.TimeSeries{}
  72. for _, v := range esAgg.Get("buckets").MustArray() {
  73. bucket := simplejson.NewFromAny(v)
  74. value := castToNullFloat(bucket.Get("doc_count"))
  75. key := castToNullFloat(bucket.Get("key"))
  76. newSeries.Points = append(newSeries.Points, tsdb.TimePoint{value, key})
  77. }
  78. newSeries.Tags = props
  79. newSeries.Tags["metric"] = "count"
  80. *series = append(*series, &newSeries)
  81. case "percentiles":
  82. buckets := esAgg.Get("buckets").MustArray()
  83. if len(buckets) == 0 {
  84. break
  85. }
  86. firstBucket := simplejson.NewFromAny(buckets[0])
  87. percentiles := firstBucket.GetPath(metric.ID, "values").MustMap()
  88. for percentileName := range percentiles {
  89. newSeries := tsdb.TimeSeries{}
  90. newSeries.Tags = props
  91. newSeries.Tags["metric"] = "p" + percentileName
  92. newSeries.Tags["field"] = metric.Field
  93. for _, v := range buckets {
  94. bucket := simplejson.NewFromAny(v)
  95. value := castToNullFloat(bucket.GetPath(metric.ID, "values", percentileName))
  96. key := castToNullFloat(bucket.Get("key"))
  97. newSeries.Points = append(newSeries.Points, tsdb.TimePoint{value, key})
  98. }
  99. *series = append(*series, &newSeries)
  100. }
  101. default:
  102. newSeries := tsdb.TimeSeries{}
  103. newSeries.Tags = map[string]string{}
  104. for k, v := range props {
  105. newSeries.Tags[k] = v
  106. }
  107. newSeries.Tags["metric"] = metric.Type
  108. newSeries.Tags["field"] = metric.Field
  109. for _, v := range esAgg.Get("buckets").MustArray() {
  110. bucket := simplejson.NewFromAny(v)
  111. key := castToNullFloat(bucket.Get("key"))
  112. valueObj, err := bucket.Get(metric.ID).Map()
  113. if err != nil {
  114. continue
  115. }
  116. var value null.Float
  117. if _, ok := valueObj["normalized_value"]; ok {
  118. value = castToNullFloat(bucket.GetPath(metric.ID, "normalized_value"))
  119. } else {
  120. value = castToNullFloat(bucket.GetPath(metric.ID, "value"))
  121. }
  122. newSeries.Points = append(newSeries.Points, tsdb.TimePoint{value, key})
  123. }
  124. *series = append(*series, &newSeries)
  125. }
  126. }
  127. return nil
  128. }
  129. func (rp *ElasticsearchResponseParser) nameSeries(seriesList *[]*tsdb.TimeSeries, target *Query) {
  130. set := make(map[string]string)
  131. for _, v := range *seriesList {
  132. if metricType, exists := v.Tags["metric"]; exists {
  133. if _, ok := set[metricType]; !ok {
  134. set[metricType] = ""
  135. }
  136. }
  137. }
  138. metricTypeCount := len(set)
  139. for _, series := range *seriesList {
  140. series.Name = rp.getSeriesName(series, target, metricTypeCount)
  141. }
  142. }
  143. func (rp *ElasticsearchResponseParser) getSeriesName(series *tsdb.TimeSeries, target *Query, metricTypeCount int) string {
  144. metricType := series.Tags["metric"]
  145. metricName := rp.getMetricName(metricType)
  146. delete(series.Tags, "metric")
  147. field := ""
  148. if v, ok := series.Tags["field"]; ok {
  149. field = v
  150. delete(series.Tags, "field")
  151. }
  152. if target.Alias != "" {
  153. var re = regexp.MustCompile(`{{([\s\S]+?)}}`)
  154. for _, match := range re.FindAllString(target.Alias, -1) {
  155. group := match[2 : len(match)-2]
  156. if strings.HasPrefix(group, "term ") {
  157. if term, ok := series.Tags["term "]; ok {
  158. strings.Replace(target.Alias, match, term, 1)
  159. }
  160. }
  161. if v, ok := series.Tags[group]; ok {
  162. strings.Replace(target.Alias, match, v, 1)
  163. }
  164. switch group {
  165. case "metric":
  166. strings.Replace(target.Alias, match, metricName, 1)
  167. case "field":
  168. strings.Replace(target.Alias, match, field, 1)
  169. }
  170. }
  171. }
  172. // todo, if field and pipelineAgg
  173. if field != "" && isPipelineAgg(metricType) {
  174. found := false
  175. for _, metric := range target.Metrics {
  176. if metric.ID == field {
  177. metricName += " " + describeMetric(metric.Type, field)
  178. found = true
  179. }
  180. }
  181. if !found {
  182. metricName = "Unset"
  183. }
  184. } else if field != "" {
  185. metricName += " " + field
  186. }
  187. if len(series.Tags) == 0 {
  188. return metricName
  189. }
  190. name := ""
  191. for _, v := range series.Tags {
  192. name += v + " "
  193. }
  194. if metricTypeCount == 1 {
  195. return strings.TrimSpace(name)
  196. }
  197. return strings.TrimSpace(name) + " " + metricName
  198. }
  199. func (rp *ElasticsearchResponseParser) getMetricName(metric string) string {
  200. if text, ok := metricAggType[metric]; ok {
  201. return text
  202. }
  203. if text, ok := extendedStats[metric]; ok {
  204. return text
  205. }
  206. return metric
  207. }
  208. func castToNullFloat(j *simplejson.Json) null.Float {
  209. f, err := j.Float64()
  210. if err == nil {
  211. return null.FloatFrom(f)
  212. }
  213. s, err := j.String()
  214. if err == nil {
  215. v, _ := strconv.ParseFloat(s, 64)
  216. return null.FloatFromPtr(&v)
  217. }
  218. return null.NewFloat(0, false)
  219. }
  220. func findAgg(target *Query, aggId string) (*BucketAgg, error) {
  221. for _, v := range target.BucketAggs {
  222. if aggId == v.ID {
  223. return v, nil
  224. }
  225. }
  226. return nil, errors.New("can't found aggDef, aggID:" + aggId)
  227. }