model_parser.go 3.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153
  1. package elasticsearch
  2. import (
  3. "fmt"
  4. "github.com/grafana/grafana/pkg/components/simplejson"
  5. "github.com/grafana/grafana/pkg/models"
  6. "github.com/grafana/grafana/pkg/tsdb"
  7. "github.com/leibowitz/moment"
  8. "strings"
  9. "time"
  10. )
  11. type ElasticSearchQueryParser struct {
  12. }
  13. func (qp *ElasticSearchQueryParser) Parse(model *simplejson.Json, dsInfo *models.DataSource) (*Query, error) {
  14. //payload := bytes.Buffer{}
  15. //queryHeader := qp.getQueryHeader()
  16. timeField, err := model.Get("timeField").String()
  17. if err != nil {
  18. return nil, err
  19. }
  20. rawQuery := model.Get("query").MustString()
  21. bucketAggs, err := qp.parseBucketAggs(model)
  22. if err != nil {
  23. return nil, err
  24. }
  25. metrics, err := qp.parseMetrics(model)
  26. if err != nil {
  27. return nil, err
  28. }
  29. alias := model.Get("alias").MustString("")
  30. parsedInterval, err := tsdb.GetIntervalFrom(dsInfo, model, time.Millisecond)
  31. if err != nil {
  32. return nil, err
  33. }
  34. return &Query{timeField,
  35. rawQuery,
  36. bucketAggs,
  37. metrics,
  38. alias,
  39. parsedInterval}, nil
  40. }
  41. func (qp *ElasticSearchQueryParser) parseBucketAggs(model *simplejson.Json) ([]*BucketAgg, error) {
  42. var err error
  43. var result []*BucketAgg
  44. for _, t := range model.Get("bucketAggs").MustArray() {
  45. aggJson := simplejson.NewFromAny(t)
  46. agg := &BucketAgg{}
  47. agg.Type, err = aggJson.Get("type").String()
  48. if err != nil {
  49. return nil, err
  50. }
  51. agg.ID, err = aggJson.Get("id").String()
  52. if err != nil {
  53. return nil, err
  54. }
  55. agg.Field = aggJson.Get("field").MustString()
  56. agg.Settings = simplejson.NewFromAny(aggJson.Get("settings").MustMap())
  57. result = append(result, agg)
  58. }
  59. return result, nil
  60. }
  61. func (qp *ElasticSearchQueryParser) parseMetrics(model *simplejson.Json) ([]*Metric, error) {
  62. var err error
  63. var result []*Metric
  64. for _, t := range model.Get("metrics").MustArray() {
  65. metricJson := simplejson.NewFromAny(t)
  66. metric := &Metric{}
  67. metric.Field = metricJson.Get("field").MustString()
  68. metric.Hide = metricJson.Get("hide").MustBool(false)
  69. metric.ID, err = metricJson.Get("id").String()
  70. if err != nil {
  71. return nil, err
  72. }
  73. metric.PipelineAggregate = metricJson.Get("pipelineAgg").MustString()
  74. metric.Settings = simplejson.NewFromAny(metricJson.Get("settings").MustMap())
  75. metric.Type, err = metricJson.Get("type").String()
  76. if err != nil {
  77. return nil, err
  78. }
  79. result = append(result, metric)
  80. }
  81. return result, nil
  82. }
  83. func getRequestHeader(timeRange *tsdb.TimeRange, dsInfo *models.DataSource) *QueryHeader {
  84. var header QueryHeader
  85. esVersion := dsInfo.JsonData.Get("esVersion").MustInt()
  86. searchType := "query_then_fetch"
  87. if esVersion < 5 {
  88. searchType = "count"
  89. }
  90. header.SearchType = searchType
  91. header.IgnoreUnavailable = true
  92. header.Index = getIndexList(dsInfo.Database, dsInfo.JsonData.Get("interval").MustString(), timeRange)
  93. if esVersion >= 56 {
  94. header.MaxConcurrentShardRequests = dsInfo.JsonData.Get("maxConcurrentShardRequests").MustInt()
  95. }
  96. return &header
  97. }
  98. func getIndexList(pattern string, interval string, timeRange *tsdb.TimeRange) string {
  99. if interval == "" {
  100. return pattern
  101. }
  102. var indexes []string
  103. indexParts := strings.Split(strings.TrimLeft(pattern, "["), "]")
  104. indexBase := indexParts[0]
  105. if len(indexParts) <= 1 {
  106. return pattern
  107. }
  108. indexDateFormat := indexParts[1]
  109. start := moment.NewMoment(timeRange.MustGetFrom())
  110. end := moment.NewMoment(timeRange.MustGetTo())
  111. indexes = append(indexes, fmt.Sprintf("%s%s", indexBase, start.Format(indexDateFormat)))
  112. for start.IsBefore(*end) {
  113. switch interval {
  114. case "Hourly":
  115. start = start.AddHours(1)
  116. case "Daily":
  117. start = start.AddDay()
  118. case "Weekly":
  119. start = start.AddWeeks(1)
  120. case "Monthly":
  121. start = start.AddMonths(1)
  122. case "Yearly":
  123. start = start.AddYears(1)
  124. }
  125. indexes = append(indexes, fmt.Sprintf("%s%s", indexBase, start.Format(indexDateFormat)))
  126. }
  127. return strings.Join(indexes, ",")
  128. }