فهرست منبع

Merge branch 'master' of git://github.com/grafana/grafana into WPH95-feature/add_es_alerting

Marcus Efraimsson 7 سال پیش
والد
کامیت
99257eb048

+ 19 - 1
Gopkg.lock

@@ -111,6 +111,18 @@
   ]
   ]
   revision = "270bc3860bb94dd3a3ffd047377d746c5e276726"
   revision = "270bc3860bb94dd3a3ffd047377d746c5e276726"
 
 
+[[projects]]
+  branch = "master"
+  name = "github.com/facebookgo/inject"
+  packages = ["."]
+  revision = "cc1aa653e50f6a9893bcaef89e673e5b24e1e97b"
+
+[[projects]]
+  branch = "master"
+  name = "github.com/facebookgo/structtag"
+  packages = ["."]
+  revision = "217e25fb96916cc60332e399c9aa63f5c422ceed"
+
 [[projects]]
 [[projects]]
   name = "github.com/fatih/color"
   name = "github.com/fatih/color"
   packages = ["."]
   packages = ["."]
@@ -296,6 +308,12 @@
   packages = ["."]
   packages = ["."]
   revision = "7cafcd837844e784b526369c9bce262804aebc60"
   revision = "7cafcd837844e784b526369c9bce262804aebc60"
 
 
+[[projects]]
+  branch = "master"
+  name = "github.com/leibowitz/moment"
+  packages = ["."]
+  revision = "8548108dcca204a1110b99e5fec966817499fe84"
+
 [[projects]]
 [[projects]]
   branch = "master"
   branch = "master"
   name = "github.com/lib/pq"
   name = "github.com/lib/pq"
@@ -649,6 +667,6 @@
 [solve-meta]
 [solve-meta]
   analyzer-name = "dep"
   analyzer-name = "dep"
   analyzer-version = 1
   analyzer-version = 1
-  inputs-digest = "2bd5b309496d57e2189a1cc28f5c1c41398c19729ba0cf53c8cbb17ea3f706b5"
+  inputs-digest = "4039f122ac5dd045948e003eb7a74c8864df1759b25147f1b2e2e8ad7a8414d6"
   solver-name = "gps-cdcl"
   solver-name = "gps-cdcl"
   solver-version = 1
   solver-version = 1

+ 4 - 0
Gopkg.toml

@@ -199,3 +199,7 @@ ignored = [
 [[constraint]]
 [[constraint]]
   name = "github.com/denisenkom/go-mssqldb"
   name = "github.com/denisenkom/go-mssqldb"
   revision = "270bc3860bb94dd3a3ffd047377d746c5e276726"
   revision = "270bc3860bb94dd3a3ffd047377d746c5e276726"
+
+[[constraint]]
+  branch = "master"
+  name = "github.com/leibowitz/moment"

+ 1 - 0
pkg/cmd/grafana-server/main.go

@@ -22,6 +22,7 @@ import (
 	_ "github.com/grafana/grafana/pkg/services/alerting/conditions"
 	_ "github.com/grafana/grafana/pkg/services/alerting/conditions"
 	_ "github.com/grafana/grafana/pkg/services/alerting/notifiers"
 	_ "github.com/grafana/grafana/pkg/services/alerting/notifiers"
 	_ "github.com/grafana/grafana/pkg/tsdb/cloudwatch"
 	_ "github.com/grafana/grafana/pkg/tsdb/cloudwatch"
+	_ "github.com/grafana/grafana/pkg/tsdb/elasticsearch"
 	_ "github.com/grafana/grafana/pkg/tsdb/graphite"
 	_ "github.com/grafana/grafana/pkg/tsdb/graphite"
 	_ "github.com/grafana/grafana/pkg/tsdb/influxdb"
 	_ "github.com/grafana/grafana/pkg/tsdb/influxdb"
 	_ "github.com/grafana/grafana/pkg/tsdb/mysql"
 	_ "github.com/grafana/grafana/pkg/tsdb/mysql"

+ 150 - 0
pkg/tsdb/elasticsearch/elasticsearch.go

@@ -0,0 +1,150 @@
+package elasticsearch
+
+import (
+	"bytes"
+	"context"
+	"encoding/json"
+	"errors"
+	"fmt"
+	"github.com/grafana/grafana/pkg/log"
+	"github.com/grafana/grafana/pkg/models"
+	"github.com/grafana/grafana/pkg/setting"
+	"github.com/grafana/grafana/pkg/tsdb"
+	"golang.org/x/net/context/ctxhttp"
+	"net/http"
+	"net/url"
+	"path"
+	"strings"
+	"time"
+)
+
+type ElasticsearchExecutor struct {
+	QueryParser *ElasticSearchQueryParser
+	Transport   *http.Transport
+}
+
+var (
+	glog               log.Logger
+	intervalCalculator tsdb.IntervalCalculator
+)
+
+func NewElasticsearchExecutor(dsInfo *models.DataSource) (tsdb.TsdbQueryEndpoint, error) {
+	transport, err := dsInfo.GetHttpTransport()
+	if err != nil {
+		return nil, err
+	}
+
+	return &ElasticsearchExecutor{
+		Transport: transport,
+	}, nil
+}
+
+func init() {
+	glog = log.New("tsdb.elasticsearch")
+	tsdb.RegisterTsdbQueryEndpoint("elasticsearch", NewElasticsearchExecutor)
+	intervalCalculator = tsdb.NewIntervalCalculator(&tsdb.IntervalOptions{MinInterval: time.Millisecond * 1})
+}
+
+func (e *ElasticsearchExecutor) Query(ctx context.Context, dsInfo *models.DataSource, tsdbQuery *tsdb.TsdbQuery) (*tsdb.Response, error) {
+	result := &tsdb.Response{}
+	result.Results = make(map[string]*tsdb.QueryResult)
+
+	queries, err := e.getQuery(dsInfo, tsdbQuery)
+	if err != nil {
+		return nil, err
+	}
+
+	buff := bytes.Buffer{}
+	for _, q := range queries {
+		s, err := q.Build(tsdbQuery, dsInfo)
+		if err != nil {
+			return nil, err
+		}
+		buff.WriteString(s)
+	}
+	payload := buff.String()
+
+	if setting.Env == setting.DEV {
+		glog.Debug("Elasticsearch playload", "raw playload", payload)
+	}
+	glog.Info("Elasticsearch playload", "raw playload", payload)
+
+	req, err := e.createRequest(dsInfo, payload)
+	if err != nil {
+		return nil, err
+	}
+
+	httpClient, err := dsInfo.GetHttpClient()
+	if err != nil {
+		return nil, err
+	}
+
+	resp, err := ctxhttp.Do(ctx, httpClient, req)
+	if err != nil {
+		return nil, err
+	}
+
+	if resp.StatusCode/100 != 2 {
+		return nil, fmt.Errorf("elasticsearch returned statuscode invalid status code: %v", resp.Status)
+	}
+
+	var responses Responses
+	dec := json.NewDecoder(resp.Body)
+	defer resp.Body.Close()
+	dec.UseNumber()
+	err = dec.Decode(&responses)
+	if err != nil {
+		return nil, err
+	}
+
+	for _, res := range responses.Responses {
+		if res.Err != nil {
+			return nil, errors.New(res.getErrMsg())
+		}
+	}
+	responseParser := ElasticsearchResponseParser{responses.Responses, queries}
+	queryRes := responseParser.getTimeSeries()
+	result.Results["A"] = queryRes
+	return result, nil
+}
+
+func (e *ElasticsearchExecutor) getQuery(dsInfo *models.DataSource, context *tsdb.TsdbQuery) ([]*Query, error) {
+	queries := make([]*Query, 0)
+	if len(context.Queries) == 0 {
+		return nil, fmt.Errorf("query request contains no queries")
+	}
+	for _, v := range context.Queries {
+
+		query, err := e.QueryParser.Parse(v.Model, dsInfo)
+		if err != nil {
+			return nil, err
+		}
+		queries = append(queries, query)
+
+	}
+	return queries, nil
+
+}
+
+func (e *ElasticsearchExecutor) createRequest(dsInfo *models.DataSource, query string) (*http.Request, error) {
+	u, _ := url.Parse(dsInfo.Url)
+	u.Path = path.Join(u.Path, "_msearch")
+	req, err := http.NewRequest(http.MethodPost, u.String(), strings.NewReader(query))
+	if err != nil {
+		return nil, err
+	}
+	req.Header.Set("User-Agent", "Grafana")
+	req.Header.Set("Content-Type", "application/json")
+
+	if dsInfo.BasicAuth {
+		req.SetBasicAuth(dsInfo.BasicAuthUser, dsInfo.BasicAuthPassword)
+	}
+
+	if !dsInfo.BasicAuth && dsInfo.User != "" {
+		req.SetBasicAuth(dsInfo.User, dsInfo.Password)
+	}
+
+	glog.Debug("Elasticsearch request", "url", req.URL.String())
+	glog.Debug("Elasticsearch request", "body", query)
+	return req, nil
+}

+ 121 - 0
pkg/tsdb/elasticsearch/elasticsearch_test.go

@@ -0,0 +1,121 @@
+package elasticsearch
+
+import (
+	"github.com/grafana/grafana/pkg/components/simplejson"
+	"time"
+)
+
+var avgWithMovingAvg = Query{
+	TimeField: "timestamp",
+	RawQuery:  "(test:query) AND (name:sample)",
+	Interval:  time.Millisecond,
+	BucketAggs: []*BucketAgg{{
+		Field: "timestamp",
+		ID:    "2",
+		Type:  "date_histogram",
+		Settings: simplejson.NewFromAny(map[string]interface{}{
+			"interval":      "auto",
+			"min_doc_count": 0,
+			"trimEdges":     0,
+		}),
+	}},
+	Metrics: []*Metric{{
+		Field: "value",
+		ID:    "1",
+		Type:  "avg",
+		Settings: simplejson.NewFromAny(map[string]interface{}{
+			"script": map[string]string{
+				"inline": "_value * 2",
+			},
+		}),
+	}, {
+		Field:             "1",
+		ID:                "3",
+		Type:              "moving_avg",
+		PipelineAggregate: "1",
+		Settings: simplejson.NewFromAny(map[string]interface{}{
+			"minimize": false,
+			"model":    "simple",
+			"window":   5,
+		}),
+	}},
+}
+
+var wildcardsAndQuotes = Query{
+	TimeField: "timestamp",
+	RawQuery:  "scope:$location.leagueconnect.api AND name:*CreateRegistration AND name:\"*.201-responses.rate\"",
+	Interval:  time.Millisecond,
+	BucketAggs: []*BucketAgg{{
+		Field:    "timestamp",
+		ID:       "2",
+		Type:     "date_histogram",
+		Settings: simplejson.NewFromAny(map[string]interface{}{}),
+	}},
+	Metrics: []*Metric{{
+		Field:    "value",
+		ID:       "1",
+		Type:     "sum",
+		Settings: simplejson.NewFromAny(map[string]interface{}{}),
+	}},
+}
+var termAggs = Query{
+	TimeField: "timestamp",
+	RawQuery:  "(scope:*.hmp.metricsd) AND (name_raw:builtin.general.*_instance_count)",
+	Interval:  time.Millisecond,
+	BucketAggs: []*BucketAgg{{
+		Field: "name_raw",
+		ID:    "4",
+		Type:  "terms",
+		Settings: simplejson.NewFromAny(map[string]interface{}{
+			"order":   "desc",
+			"orderBy": "_term",
+			"size":    "10",
+		}),
+	}, {
+		Field: "timestamp",
+		ID:    "2",
+		Type:  "date_histogram",
+		Settings: simplejson.NewFromAny(map[string]interface{}{
+			"interval":      "auto",
+			"min_doc_count": 0,
+			"trimEdges":     0,
+		}),
+	}},
+	Metrics: []*Metric{{
+		Field:    "value",
+		ID:       "1",
+		Type:     "sum",
+		Settings: simplejson.NewFromAny(map[string]interface{}{}),
+	}},
+}
+
+var filtersAggs = Query{
+	TimeField: "time",
+	RawQuery:  "*",
+	Interval:  time.Millisecond,
+	BucketAggs: []*BucketAgg{{
+		ID:   "3",
+		Type: "filters",
+		Settings: simplejson.NewFromAny(map[string]interface{}{
+			"filters": []interface{}{
+				map[string]interface{}{"label": "hello", "query": "host:\"67.65.185.232\""},
+			},
+		}),
+	}, {
+		Field: "timestamp",
+		ID:    "2",
+		Type:  "date_histogram",
+		Settings: simplejson.NewFromAny(map[string]interface{}{
+			"interval":      "auto",
+			"min_doc_count": 0,
+			"trimEdges":     0,
+		}),
+	}},
+	Metrics: []*Metric{{
+		Field:             "bytesSent",
+		ID:                "1",
+		Type:              "count",
+		PipelineAggregate: "select metric",
+		Settings:          simplejson.NewFromAny(map[string]interface{}{}),
+	}},
+}

+ 153 - 0
pkg/tsdb/elasticsearch/model_parser.go

@@ -0,0 +1,153 @@
+package elasticsearch
+
+import (
+	"fmt"
+	"github.com/grafana/grafana/pkg/components/simplejson"
+	"github.com/grafana/grafana/pkg/models"
+	"github.com/grafana/grafana/pkg/tsdb"
+	"github.com/leibowitz/moment"
+	"strings"
+	"time"
+)
+
+type ElasticSearchQueryParser struct {
+}
+
+func (qp *ElasticSearchQueryParser) Parse(model *simplejson.Json, dsInfo *models.DataSource) (*Query, error) {
+	//payload := bytes.Buffer{}
+	//queryHeader := qp.getQueryHeader()
+	timeField, err := model.Get("timeField").String()
+	if err != nil {
+		return nil, err
+	}
+	rawQuery := model.Get("query").MustString()
+	bucketAggs, err := qp.parseBucketAggs(model)
+	if err != nil {
+		return nil, err
+	}
+	metrics, err := qp.parseMetrics(model)
+	if err != nil {
+		return nil, err
+	}
+	alias := model.Get("alias").MustString("")
+	parsedInterval, err := tsdb.GetIntervalFrom(dsInfo, model, time.Millisecond)
+	if err != nil {
+		return nil, err
+	}
+
+	return &Query{timeField,
+		rawQuery,
+		bucketAggs,
+		metrics,
+		alias,
+		parsedInterval}, nil
+}
+
+func (qp *ElasticSearchQueryParser) parseBucketAggs(model *simplejson.Json) ([]*BucketAgg, error) {
+	var err error
+	var result []*BucketAgg
+	for _, t := range model.Get("bucketAggs").MustArray() {
+		aggJson := simplejson.NewFromAny(t)
+		agg := &BucketAgg{}
+
+		agg.Type, err = aggJson.Get("type").String()
+		if err != nil {
+			return nil, err
+		}
+
+		agg.ID, err = aggJson.Get("id").String()
+		if err != nil {
+			return nil, err
+		}
+
+		agg.Field = aggJson.Get("field").MustString()
+		agg.Settings = simplejson.NewFromAny(aggJson.Get("settings").MustMap())
+
+		result = append(result, agg)
+	}
+	return result, nil
+}
+
+func (qp *ElasticSearchQueryParser) parseMetrics(model *simplejson.Json) ([]*Metric, error) {
+	var err error
+	var result []*Metric
+	for _, t := range model.Get("metrics").MustArray() {
+		metricJson := simplejson.NewFromAny(t)
+		metric := &Metric{}
+
+		metric.Field = metricJson.Get("field").MustString()
+		metric.Hide = metricJson.Get("hide").MustBool(false)
+		metric.ID, err = metricJson.Get("id").String()
+		if err != nil {
+			return nil, err
+		}
+
+		metric.PipelineAggregate = metricJson.Get("pipelineAgg").MustString()
+		metric.Settings = simplejson.NewFromAny(metricJson.Get("settings").MustMap())
+
+		metric.Type, err = metricJson.Get("type").String()
+		if err != nil {
+			return nil, err
+		}
+
+		result = append(result, metric)
+	}
+	return result, nil
+}
+func getRequestHeader(timeRange *tsdb.TimeRange, dsInfo *models.DataSource) *QueryHeader {
+	var header QueryHeader
+	esVersion := dsInfo.JsonData.Get("esVersion").MustInt()
+
+	searchType := "query_then_fetch"
+	if esVersion < 5 {
+		searchType = "count"
+	}
+	header.SearchType = searchType
+	header.IgnoreUnavailable = true
+	header.Index = getIndexList(dsInfo.Database, dsInfo.JsonData.Get("interval").MustString(), timeRange)
+
+	if esVersion >= 56 {
+		header.MaxConcurrentShardRequests = dsInfo.JsonData.Get("maxConcurrentShardRequests").MustInt()
+	}
+	return &header
+}
+
+func getIndexList(pattern string, interval string, timeRange *tsdb.TimeRange) string {
+	if interval == "" {
+		return pattern
+	}
+
+	var indexes []string
+	indexParts := strings.Split(strings.TrimLeft(pattern, "["), "]")
+	indexBase := indexParts[0]
+	if len(indexParts) <= 1 {
+		return pattern
+	}
+
+	indexDateFormat := indexParts[1]
+
+	start := moment.NewMoment(timeRange.MustGetFrom())
+	end := moment.NewMoment(timeRange.MustGetTo())
+
+	indexes = append(indexes, fmt.Sprintf("%s%s", indexBase, start.Format(indexDateFormat)))
+	for start.IsBefore(*end) {
+		switch interval {
+		case "Hourly":
+			start = start.AddHours(1)
+
+		case "Daily":
+			start = start.AddDay()
+
+		case "Weekly":
+			start = start.AddWeeks(1)
+
+		case "Monthly":
+			start = start.AddMonths(1)
+
+		case "Yearly":
+			start = start.AddYears(1)
+		}
+		indexes = append(indexes, fmt.Sprintf("%s%s", indexBase, start.Format(indexDateFormat)))
+	}
+	return strings.Join(indexes, ",")
+}

+ 49 - 0
pkg/tsdb/elasticsearch/model_parser_test.go

@@ -0,0 +1,49 @@
+package elasticsearch
+
+import (
+	"github.com/grafana/grafana/pkg/tsdb"
+	. "github.com/smartystreets/goconvey/convey"
+	"strconv"
+	"strings"
+	"testing"
+)
+
+func makeTime(hour int) string {
+	//unixtime 1500000000 == 2017-07-14T02:40:00+00:00
+	return strconv.Itoa((1500000000 + hour*60*60) * 1000)
+}
+
+func getIndexListByTime(pattern string, interval string, hour int) string {
+	timeRange := &tsdb.TimeRange{
+		From: makeTime(0),
+		To:   makeTime(hour),
+	}
+	return getIndexList(pattern, interval, timeRange)
+}
+
+func TestElasticsearchGetIndexList(t *testing.T) {
+	Convey("Test Elasticsearch getIndex ", t, func() {
+
+		Convey("Parse Interval Formats", func() {
+			So(getIndexListByTime("[logstash-]YYYY.MM.DD", "Daily", 48),
+				ShouldEqual, "logstash-2017.07.14,logstash-2017.07.15,logstash-2017.07.16")
+
+			So(len(strings.Split(getIndexListByTime("[logstash-]YYYY.MM.DD.HH", "Hourly", 3), ",")),
+				ShouldEqual, 4)
+
+			So(getIndexListByTime("[logstash-]YYYY.W", "Weekly", 100),
+				ShouldEqual, "logstash-2017.28,logstash-2017.29")
+
+			So(getIndexListByTime("[logstash-]YYYY.MM", "Monthly", 700),
+				ShouldEqual, "logstash-2017.07,logstash-2017.08")
+
+			So(getIndexListByTime("[logstash-]YYYY", "Yearly", 10000),
+				ShouldEqual, "logstash-2017,logstash-2018,logstash-2019")
+		})
+
+		Convey("No Interval", func() {
+			index := getIndexListByTime("logstash-test", "", 1)
+			So(index, ShouldEqual, "logstash-test")
+		})
+	})
+}

+ 133 - 0
pkg/tsdb/elasticsearch/models.go

@@ -0,0 +1,133 @@
+package elasticsearch
+
+import (
+	"bytes"
+	"encoding/json"
+	"fmt"
+	"github.com/grafana/grafana/pkg/components/simplejson"
+)
+
+type BucketAgg struct {
+	Field    string           `json:"field"`
+	ID       string           `json:"id"`
+	Settings *simplejson.Json `json:"settings"`
+	Type     string           `jsons:"type"`
+}
+
+type Metric struct {
+	Field             string           `json:"field"`
+	Hide              bool             `json:"hide"`
+	ID                string           `json:"id"`
+	PipelineAggregate string           `json:"pipelineAgg"`
+	Settings          *simplejson.Json `json:"settings"`
+	Type              string           `json:"type"`
+}
+
+type QueryHeader struct {
+	SearchType                 string      `json:"search_type"`
+	IgnoreUnavailable          bool        `json:"ignore_unavailable"`
+	Index                      interface{} `json:"index"`
+	MaxConcurrentShardRequests int         `json:"max_concurrent_shard_requests,omitempty"`
+}
+
+func (q *QueryHeader) String() string {
+	r, _ := json.Marshal(q)
+	return string(r)
+}
+
+type Request struct {
+	Query map[string]interface{} `json:"query"`
+	Aggs  Aggs                   `json:"aggs"`
+	Size  int                    `json:"size"`
+}
+
+type Aggs map[string]interface{}
+
+type HistogramAgg struct {
+	Interval    string `json:"interval,omitempty"`
+	Field       string `json:"field"`
+	MinDocCount int    `json:"min_doc_count"`
+	Missing     string `json:"missing,omitempty"`
+}
+
+type DateHistogramAgg struct {
+	HistogramAgg
+	ExtendedBounds ExtendedBounds `json:"extended_bounds"`
+	Format         string         `json:"format"`
+}
+
+type FiltersAgg struct {
+	Filters map[string]interface{} `json:"filters"`
+}
+
+type TermsAgg struct {
+	Field   string                 `json:"field"`
+	Size    int                    `json:"size"`
+	Order   map[string]interface{} `json:"order"`
+	Missing string                 `json:"missing,omitempty"`
+}
+
+type TermsAggWrap struct {
+	Terms TermsAgg `json:"terms"`
+	Aggs  Aggs     `json:"aggs"`
+}
+
+type ExtendedBounds struct {
+	Min string `json:"min"`
+	Max string `json:"max"`
+}
+
+type RangeFilter struct {
+	Range map[string]RangeFilterSetting `json:"range"`
+}
+type RangeFilterSetting struct {
+	Gte    string `json:"gte"`
+	Lte    string `json:"lte"`
+	Format string `json:"format"`
+}
+
+func newRangeFilter(field string, rangeFilterSetting RangeFilterSetting) *RangeFilter {
+	return &RangeFilter{
+		map[string]RangeFilterSetting{field: rangeFilterSetting}}
+}
+
+type QueryStringFilter struct {
+	QueryString QueryStringFilterSetting `json:"query_string"`
+}
+type QueryStringFilterSetting struct {
+	AnalyzeWildcard bool   `json:"analyze_wildcard"`
+	Query           string `json:"query"`
+}
+
+func newQueryStringFilter(analyzeWildcard bool, query string) *QueryStringFilter {
+	return &QueryStringFilter{QueryStringFilterSetting{AnalyzeWildcard: analyzeWildcard, Query: query}}
+}
+
+type BoolQuery struct {
+	Filter []interface{} `json:"filter"`
+}
+
+type Responses struct {
+	Responses []Response `json:"responses"`
+}
+
+type Response struct {
+	Status       int                    `json:"status"`
+	Err          map[string]interface{} `json:"error"`
+	Aggregations map[string]interface{} `json:"aggregations"`
+}
+
+func (r *Response) getErrMsg() string {
+	var msg bytes.Buffer
+	errJson := simplejson.NewFromAny(r.Err)
+	errType, err := errJson.Get("type").String()
+	if err == nil {
+		msg.WriteString(fmt.Sprintf("type:%s", errType))
+	}
+
+	reason, err := errJson.Get("type").String()
+	if err == nil {
+		msg.WriteString(fmt.Sprintf("reason:%s", reason))
+	}
+	return msg.String()
+}

+ 237 - 0
pkg/tsdb/elasticsearch/query.go

@@ -0,0 +1,237 @@
+package elasticsearch
+
+import (
+	"bytes"
+	"encoding/json"
+	"errors"
+	"fmt"
+	"github.com/grafana/grafana/pkg/components/simplejson"
+	"github.com/grafana/grafana/pkg/models"
+	"github.com/grafana/grafana/pkg/tsdb"
+	"strconv"
+	"strings"
+	"time"
+)
+
+var rangeFilterSetting = RangeFilterSetting{Gte: "$timeFrom",
+	Lte:    "$timeTo",
+	Format: "epoch_millis"}
+
+type Query struct {
+	TimeField  string       `json:"timeField"`
+	RawQuery   string       `json:"query"`
+	BucketAggs []*BucketAgg `json:"bucketAggs"`
+	Metrics    []*Metric    `json:"metrics"`
+	Alias      string       `json:"Alias"`
+	Interval   time.Duration
+}
+
+func (q *Query) Build(queryContext *tsdb.TsdbQuery, dsInfo *models.DataSource) (string, error) {
+	var req Request
+	payload := bytes.Buffer{}
+
+	req.Size = 0
+	q.renderReqQuery(&req)
+
+	// handle document query
+	if q.isRawDocumentQuery() {
+		return "", errors.New("alert not support Raw_Document")
+	}
+
+	err := q.parseAggs(&req)
+	if err != nil {
+		return "", err
+	}
+
+	reqBytes, err := json.Marshal(req)
+	reqHeader := getRequestHeader(queryContext.TimeRange, dsInfo)
+	payload.WriteString(reqHeader.String() + "\n")
+	payload.WriteString(string(reqBytes) + "\n")
+	return q.renderTemplate(payload.String(), queryContext)
+}
+
+func (q *Query) isRawDocumentQuery() bool {
+	if len(q.BucketAggs) == 0 {
+		if len(q.Metrics) > 0 {
+			metric := simplejson.NewFromAny(q.Metrics[0])
+			if metric.Get("type").MustString("") == "raw_document" {
+				return true
+			}
+		}
+	}
+	return false
+}
+
+func (q *Query) renderReqQuery(req *Request) {
+	req.Query = make(map[string]interface{})
+	boolQuery := BoolQuery{}
+	boolQuery.Filter = append(boolQuery.Filter, newRangeFilter(q.TimeField, rangeFilterSetting))
+	boolQuery.Filter = append(boolQuery.Filter, newQueryStringFilter(true, q.RawQuery))
+	req.Query["bool"] = boolQuery
+}
+
+func (q *Query) parseAggs(req *Request) error {
+	aggs := make(Aggs)
+	nestedAggs := aggs
+	for _, agg := range q.BucketAggs {
+		esAggs := make(Aggs)
+		switch agg.Type {
+		case "date_histogram":
+			esAggs["date_histogram"] = q.getDateHistogramAgg(agg)
+		case "histogram":
+			esAggs["histogram"] = q.getHistogramAgg(agg)
+		case "filters":
+			esAggs["filters"] = q.getFilters(agg)
+		case "terms":
+			terms := q.getTerms(agg)
+			esAggs["terms"] = terms.Terms
+			esAggs["aggs"] = terms.Aggs
+		case "geohash_grid":
+			return errors.New("alert not support Geo_Hash_Grid")
+		}
+
+		if _, ok := nestedAggs["aggs"]; !ok {
+			nestedAggs["aggs"] = make(Aggs)
+		}
+
+		if aggs, ok := (nestedAggs["aggs"]).(Aggs); ok {
+			aggs[agg.ID] = esAggs
+		}
+		nestedAggs = esAggs
+
+	}
+	nestedAggs["aggs"] = make(Aggs)
+
+	for _, metric := range q.Metrics {
+		subAgg := make(Aggs)
+
+		if metric.Type == "count" {
+			continue
+		}
+		settings := metric.Settings.MustMap(make(map[string]interface{}))
+
+		if isPipelineAgg(metric.Type) {
+			if _, err := strconv.Atoi(metric.PipelineAggregate); err == nil {
+				settings["buckets_path"] = metric.PipelineAggregate
+			} else {
+				continue
+			}
+
+		} else {
+			settings["field"] = metric.Field
+		}
+
+		subAgg[metric.Type] = settings
+		nestedAggs["aggs"].(Aggs)[metric.ID] = subAgg
+	}
+	req.Aggs = aggs["aggs"].(Aggs)
+	return nil
+}
+
+func (q *Query) getDateHistogramAgg(target *BucketAgg) *DateHistogramAgg {
+	agg := &DateHistogramAgg{}
+	interval, err := target.Settings.Get("interval").String()
+	if err == nil {
+		agg.Interval = interval
+	}
+	agg.Field = q.TimeField
+	agg.MinDocCount = target.Settings.Get("min_doc_count").MustInt(0)
+	agg.ExtendedBounds = ExtendedBounds{"$timeFrom", "$timeTo"}
+	agg.Format = "epoch_millis"
+
+	if agg.Interval == "auto" {
+		agg.Interval = "$__interval"
+	}
+
+	missing, err := target.Settings.Get("missing").String()
+	if err == nil {
+		agg.Missing = missing
+	}
+	return agg
+}
+
+func (q *Query) getHistogramAgg(target *BucketAgg) *HistogramAgg {
+	agg := &HistogramAgg{}
+	interval, err := target.Settings.Get("interval").String()
+	if err == nil {
+		agg.Interval = interval
+	}
+
+	if target.Field != "" {
+		agg.Field = target.Field
+	}
+	agg.MinDocCount = target.Settings.Get("min_doc_count").MustInt(0)
+	missing, err := target.Settings.Get("missing").String()
+	if err == nil {
+		agg.Missing = missing
+	}
+	return agg
+}
+
+func (q *Query) getFilters(target *BucketAgg) *FiltersAgg {
+	agg := &FiltersAgg{}
+	agg.Filters = map[string]interface{}{}
+	for _, filter := range target.Settings.Get("filters").MustArray() {
+		filterJson := simplejson.NewFromAny(filter)
+		query := filterJson.Get("query").MustString("")
+		label := filterJson.Get("label").MustString("")
+		if label == "" {
+			label = query
+		}
+
+		agg.Filters[label] = newQueryStringFilter(true, query)
+	}
+	return agg
+}
+
+func (q *Query) getTerms(target *BucketAgg) *TermsAggWrap {
+	agg := &TermsAggWrap{Aggs: make(Aggs)}
+	agg.Terms.Field = target.Field
+	if len(target.Settings.MustMap()) == 0 {
+		return agg
+	}
+	sizeStr := target.Settings.Get("size").MustString("")
+	size, err := strconv.Atoi(sizeStr)
+	if err != nil {
+		size = 500
+	}
+	agg.Terms.Size = size
+	orderBy, err := target.Settings.Get("orderBy").String()
+	if err == nil {
+		agg.Terms.Order = make(map[string]interface{})
+		agg.Terms.Order[orderBy] = target.Settings.Get("order").MustString("")
+		if _, err := strconv.Atoi(orderBy); err != nil {
+			for _, metricI := range q.Metrics {
+				metric := simplejson.NewFromAny(metricI)
+				metricId := metric.Get("id").MustString()
+				if metricId == orderBy {
+					subAggs := make(Aggs)
+					metricField := metric.Get("field").MustString()
+					metricType := metric.Get("type").MustString()
+					subAggs[metricType] = map[string]string{"field": metricField}
+					agg.Aggs = make(Aggs)
+					agg.Aggs[metricId] = subAggs
+					break
+				}
+			}
+		}
+	}
+
+	missing, err := target.Settings.Get("missing").String()
+	if err == nil {
+		agg.Terms.Missing = missing
+	}
+
+	return agg
+}
+
+func (q *Query) renderTemplate(payload string, queryContext *tsdb.TsdbQuery) (string, error) {
+	timeRange := queryContext.TimeRange
+	interval := intervalCalculator.Calculate(timeRange, q.Interval)
+	payload = strings.Replace(payload, "$timeFrom", fmt.Sprintf("%d", timeRange.GetFromAsMsEpoch()), -1)
+	payload = strings.Replace(payload, "$timeTo", fmt.Sprintf("%d", timeRange.GetToAsMsEpoch()), -1)
+	payload = strings.Replace(payload, "$interval", interval.Text, -1)
+	payload = strings.Replace(payload, "$__interval_ms", strconv.FormatInt(interval.Value.Nanoseconds()/int64(time.Millisecond), 10), -1)
+	payload = strings.Replace(payload, "$__interval", interval.Text, -1)
+	return payload, nil
+}

+ 43 - 0
pkg/tsdb/elasticsearch/query_def.go

@@ -0,0 +1,43 @@
+package elasticsearch
+
+var metricAggType = map[string]string{
+	"count":          "Count",
+	"avg":            "Average",
+	"sum":            "Sum",
+	"max":            "Max",
+	"min":            "Min",
+	"extended_stats": "Extended Stats",
+	"percentiles":    "Percentiles",
+	"cardinality":    "Unique Count",
+	"moving_avg":     "Moving Average",
+	"derivative":     "Derivative",
+	"raw_document":   "Raw Document",
+}
+
+var extendedStats = map[string]string{
+	"avg":                        "Avg",
+	"min":                        "Min",
+	"max":                        "Max",
+	"sum":                        "Sum",
+	"count":                      "Count",
+	"std_deviation":              "Std Dev",
+	"std_deviation_bounds_upper": "Std Dev Upper",
+	"std_deviation_bounds_lower": "Std Dev Lower",
+}
+
+var pipelineOptions = map[string]string{
+	"moving_avg": "moving_avg",
+	"derivative": "derivative",
+}
+
+func isPipelineAgg(metricType string) bool {
+	if _, ok := pipelineOptions[metricType]; ok {
+		return true
+	}
+	return false
+}
+
+func describeMetric(metricType, field string) string {
+	text := metricAggType[metricType]
+	return text + " " + field
+}

+ 256 - 0
pkg/tsdb/elasticsearch/query_test.go

@@ -0,0 +1,256 @@
+package elasticsearch
+
+import (
+	"encoding/json"
+	"fmt"
+	"github.com/grafana/grafana/pkg/components/simplejson"
+	"github.com/grafana/grafana/pkg/models"
+	"github.com/grafana/grafana/pkg/tsdb"
+	. "github.com/smartystreets/goconvey/convey"
+	"reflect"
+	"strconv"
+	"strings"
+	"testing"
+)
+
+func testElasticSearchResponse(query Query, expectedElasticSearchRequestJSON string) {
+	var queryExpectedJSONInterface, queryJSONInterface interface{}
+	jsonDate, _ := simplejson.NewJson([]byte(`{"esVersion":2}`))
+	dsInfo := &models.DataSource{
+		Database: "grafana-test",
+		JsonData: jsonDate,
+	}
+
+	testTimeRange := tsdb.NewTimeRange("5m", "now")
+
+	s, err := query.Build(&tsdb.TsdbQuery{TimeRange: testTimeRange}, dsInfo)
+	So(err, ShouldBeNil)
+	queryJSON := strings.Split(s, "\n")[1]
+	err = json.Unmarshal([]byte(queryJSON), &queryJSONInterface)
+	So(err, ShouldBeNil)
+
+	expectedElasticSearchRequestJSON = strings.Replace(
+		expectedElasticSearchRequestJSON,
+		"<FROM_TIMESTAMP>",
+		strconv.FormatInt(testTimeRange.GetFromAsMsEpoch(), 10),
+		-1,
+	)
+
+	expectedElasticSearchRequestJSON = strings.Replace(
+		expectedElasticSearchRequestJSON,
+		"<TO_TIMESTAMP>",
+		strconv.FormatInt(testTimeRange.GetToAsMsEpoch(), 10),
+		-1,
+	)
+
+	err = json.Unmarshal([]byte(expectedElasticSearchRequestJSON), &queryExpectedJSONInterface)
+	So(err, ShouldBeNil)
+
+	result := reflect.DeepEqual(queryExpectedJSONInterface, queryJSONInterface)
+	if !result {
+		fmt.Printf("ERROR: %s \n !=  \n %s", expectedElasticSearchRequestJSON, queryJSON)
+	}
+	So(result, ShouldBeTrue)
+}
+func TestElasticSearchQueryBuilder(t *testing.T) {
+	Convey("Elasticsearch QueryBuilder query testing", t, func() {
+		Convey("Build test average metric with moving average", func() {
+			var expectedElasticsearchQueryJSON = `
+			{
+				"size": 0,
+				"query": {
+					"bool": {
+					  "filter": [
+						{
+						  "range": {
+							"timestamp": {
+							  "gte": "<FROM_TIMESTAMP>",
+							  "lte": "<TO_TIMESTAMP>",
+							  "format": "epoch_millis"
+							}
+						  }
+						},
+						{
+						  "query_string": {
+							"analyze_wildcard": true,
+							"query": "(test:query) AND (name:sample)"
+						  }
+						}
+					  ]
+					}
+				},
+				"aggs": {
+					"2": {
+						"date_histogram": {
+							"interval": "200ms",
+							"field": "timestamp",
+							"min_doc_count": 0,
+							"extended_bounds": {
+								"min": "<FROM_TIMESTAMP>",
+								"max": "<TO_TIMESTAMP>"
+							},
+							"format": "epoch_millis"
+						},
+						"aggs": {
+							"1": {
+								"avg": {
+									"field": "value",
+									"script": {
+										"inline": "_value * 2"
+									}
+								}
+							},
+							"3": {
+								"moving_avg": {
+									"buckets_path": "1",
+									"window": 5,
+									"model": "simple",
+									"minimize": false
+								}
+							}
+						}
+					}
+				}
+			}`
+
+			testElasticSearchResponse(avgWithMovingAvg, expectedElasticsearchQueryJSON)
+		})
+		Convey("Test Wildcards and Quotes", func() {
+			expectedElasticsearchQueryJSON := `
+			{
+				"size": 0,
+				"query": {
+					"bool": {
+						"filter": [
+							{
+						  		"range": {
+									"timestamp": {
+								  	"gte": "<FROM_TIMESTAMP>",
+									"lte": "<TO_TIMESTAMP>",
+									"format": "epoch_millis"
+									}
+						  		}
+							},
+							{
+						  		"query_string": {
+								"analyze_wildcard": true,
+								"query": "scope:$location.leagueconnect.api AND name:*CreateRegistration AND name:\"*.201-responses.rate\""
+						  	}
+						}
+					  ]
+					}
+				},
+				"aggs": {
+					"2": {
+						"aggs": {
+							"1": {
+								"sum": {
+									"field": "value"
+								}
+							}
+						},
+						"date_histogram": {
+							"extended_bounds": {
+								"max": "<TO_TIMESTAMP>",
+								"min": "<FROM_TIMESTAMP>"
+							},
+							"field": "timestamp",
+							"format": "epoch_millis",
+							"min_doc_count": 0
+						}
+					}
+				}
+			}`
+
+			testElasticSearchResponse(wildcardsAndQuotes, expectedElasticsearchQueryJSON)
+		})
+		Convey("Test Term Aggregates", func() {
+			expectedElasticsearchQueryJSON := `
+			{
+				"size": 0,
+				"query": {
+					"bool": {
+						"filter": [
+							{
+						  		"range": {
+									"timestamp": {
+								  	"gte": "<FROM_TIMESTAMP>",
+									"lte": "<TO_TIMESTAMP>",
+									"format": "epoch_millis"
+									}
+						  		}
+							},
+							{
+						  		"query_string": {
+								"analyze_wildcard": true,
+								"query": "(scope:*.hmp.metricsd) AND (name_raw:builtin.general.*_instance_count)"
+						  	}
+						}
+					  ]
+					}
+				},
+				"aggs": {"4":{"aggs":{"2":{"aggs":{"1":{"sum":{"field":"value"}}},"date_histogram":{"extended_bounds":{"max":"<TO_TIMESTAMP>","min":"<FROM_TIMESTAMP>"},"field":"timestamp","format":"epoch_millis","interval":"200ms","min_doc_count":0}}},"terms":{"field":"name_raw","order":{"_term":"desc"},"size":10}}}
+			}`
+
+			testElasticSearchResponse(termAggs, expectedElasticsearchQueryJSON)
+		})
+		Convey("Test Filters Aggregates", func() {
+			expectedElasticsearchQueryJSON := `{
+			  "size": 0,
+			  "query": {
+				"bool": {
+				  "filter": [
+					{
+					  "range": {
+						"time": {
+						  "gte":  "<FROM_TIMESTAMP>",
+						  "lte":  "<TO_TIMESTAMP>",
+						  "format": "epoch_millis"
+						}
+					  }
+					},
+					{
+					  "query_string": {
+						"analyze_wildcard": true,
+						"query": "*"
+					  }
+					}
+				  ]
+				}
+			  },
+			  "aggs": {
+				"3": {
+				  "filters": {
+					"filters": {
+					  "hello": {
+						"query_string": {
+						  "query": "host:\"67.65.185.232\"",
+						  "analyze_wildcard": true
+						}
+					  }
+					}
+				  },
+				  "aggs": {
+					"2": {
+					  "date_histogram": {
+						"interval": "200ms",
+						"field": "time",
+						"min_doc_count": 0,
+						"extended_bounds": {
+						  "min":  "<FROM_TIMESTAMP>",
+						  "max":  "<TO_TIMESTAMP>"
+						},
+						"format": "epoch_millis"
+					  },
+					  "aggs": {}
+					}
+				  }
+				}
+			  }
+			}
+			`
+
+			testElasticSearchResponse(filtersAggs, expectedElasticsearchQueryJSON)
+		})
+	})
+}

+ 263 - 0
pkg/tsdb/elasticsearch/response_parser.go

@@ -0,0 +1,263 @@
+package elasticsearch
+
+import (
+	"errors"
+	"fmt"
+	"github.com/grafana/grafana/pkg/components/null"
+	"github.com/grafana/grafana/pkg/components/simplejson"
+	"github.com/grafana/grafana/pkg/tsdb"
+	"regexp"
+	"strconv"
+	"strings"
+)
+
+type ElasticsearchResponseParser struct {
+	Responses []Response
+	Targets   []*Query
+}
+
+func (rp *ElasticsearchResponseParser) getTimeSeries() *tsdb.QueryResult {
+	queryRes := tsdb.NewQueryResult()
+	for i, res := range rp.Responses {
+		target := rp.Targets[i]
+		props := make(map[string]string)
+		series := make([]*tsdb.TimeSeries, 0)
+		rp.processBuckets(res.Aggregations, target, &series, props, 0)
+		rp.nameSeries(&series, target)
+		queryRes.Series = append(queryRes.Series, series...)
+	}
+	return queryRes
+}
+
+func (rp *ElasticsearchResponseParser) processBuckets(aggs map[string]interface{}, target *Query, series *[]*tsdb.TimeSeries, props map[string]string, depth int) error {
+
+	var err error
+	maxDepth := len(target.BucketAggs) - 1
+	for aggId, v := range aggs {
+		aggDef, _ := findAgg(target, aggId)
+		esAgg := simplejson.NewFromAny(v)
+		if aggDef == nil {
+			continue
+		}
+
+		if depth == maxDepth {
+			if aggDef.Type == "date_histogram" {
+				err = rp.processMetrics(esAgg, target, series, props)
+				if err != nil {
+					return err
+				}
+			} else {
+				return fmt.Errorf("not support type:%s", aggDef.Type)
+			}
+		} else {
+			for i, b := range esAgg.Get("buckets").MustArray() {
+				bucket := simplejson.NewFromAny(b)
+				newProps := props
+				if key, err := bucket.Get("key").String(); err == nil {
+					newProps[aggDef.Field] = key
+				} else {
+					props["filter"] = strconv.Itoa(i)
+				}
+
+				if key, err := bucket.Get("key_as_string").String(); err == nil {
+					props[aggDef.Field] = key
+				}
+				rp.processBuckets(bucket.MustMap(), target, series, newProps, depth+1)
+			}
+		}
+
+	}
+	return nil
+
+}
+
+func (rp *ElasticsearchResponseParser) processMetrics(esAgg *simplejson.Json, target *Query, series *[]*tsdb.TimeSeries, props map[string]string) error {
+	for _, metric := range target.Metrics {
+		if metric.Hide {
+			continue
+		}
+
+		switch metric.Type {
+		case "count":
+			newSeries := tsdb.TimeSeries{}
+			for _, v := range esAgg.Get("buckets").MustArray() {
+				bucket := simplejson.NewFromAny(v)
+				value := castToNullFloat(bucket.Get("doc_count"))
+				key := castToNullFloat(bucket.Get("key"))
+				newSeries.Points = append(newSeries.Points, tsdb.TimePoint{value, key})
+			}
+			newSeries.Tags = props
+			newSeries.Tags["metric"] = "count"
+			*series = append(*series, &newSeries)
+
+		case "percentiles":
+			buckets := esAgg.Get("buckets").MustArray()
+			if len(buckets) == 0 {
+				break
+			}
+
+			firstBucket := simplejson.NewFromAny(buckets[0])
+			percentiles := firstBucket.GetPath(metric.ID, "values").MustMap()
+
+			for percentileName := range percentiles {
+				newSeries := tsdb.TimeSeries{}
+				newSeries.Tags = props
+				newSeries.Tags["metric"] = "p" + percentileName
+				newSeries.Tags["field"] = metric.Field
+				for _, v := range buckets {
+					bucket := simplejson.NewFromAny(v)
+					value := castToNullFloat(bucket.GetPath(metric.ID, "values", percentileName))
+					key := castToNullFloat(bucket.Get("key"))
+					newSeries.Points = append(newSeries.Points, tsdb.TimePoint{value, key})
+				}
+				*series = append(*series, &newSeries)
+			}
+		default:
+			newSeries := tsdb.TimeSeries{}
+			newSeries.Tags = map[string]string{}
+			for k, v := range props {
+				newSeries.Tags[k] = v
+			}
+
+			newSeries.Tags["metric"] = metric.Type
+			newSeries.Tags["field"] = metric.Field
+			for _, v := range esAgg.Get("buckets").MustArray() {
+				bucket := simplejson.NewFromAny(v)
+				key := castToNullFloat(bucket.Get("key"))
+				valueObj, err := bucket.Get(metric.ID).Map()
+				if err != nil {
+					continue
+				}
+				var value null.Float
+				if _, ok := valueObj["normalized_value"]; ok {
+					value = castToNullFloat(bucket.GetPath(metric.ID, "normalized_value"))
+				} else {
+					value = castToNullFloat(bucket.GetPath(metric.ID, "value"))
+				}
+				newSeries.Points = append(newSeries.Points, tsdb.TimePoint{value, key})
+			}
+			*series = append(*series, &newSeries)
+		}
+	}
+	return nil
+}
+
+func (rp *ElasticsearchResponseParser) nameSeries(seriesList *[]*tsdb.TimeSeries, target *Query) {
+	set := make(map[string]string)
+	for _, v := range *seriesList {
+		if metricType, exists := v.Tags["metric"]; exists {
+			if _, ok := set[metricType]; !ok {
+				set[metricType] = ""
+			}
+		}
+	}
+	metricTypeCount := len(set)
+	for _, series := range *seriesList {
+		series.Name = rp.getSeriesName(series, target, metricTypeCount)
+	}
+
+}
+
+func (rp *ElasticsearchResponseParser) getSeriesName(series *tsdb.TimeSeries, target *Query, metricTypeCount int) string {
+	metricType := series.Tags["metric"]
+	metricName := rp.getMetricName(metricType)
+	delete(series.Tags, "metric")
+
+	field := ""
+	if v, ok := series.Tags["field"]; ok {
+		field = v
+		delete(series.Tags, "field")
+	}
+
+	if target.Alias != "" {
+		var re = regexp.MustCompile(`{{([\s\S]+?)}}`)
+		for _, match := range re.FindAllString(target.Alias, -1) {
+			group := match[2 : len(match)-2]
+
+			if strings.HasPrefix(group, "term ") {
+				if term, ok := series.Tags["term "]; ok {
+					strings.Replace(target.Alias, match, term, 1)
+				}
+			}
+			if v, ok := series.Tags[group]; ok {
+				strings.Replace(target.Alias, match, v, 1)
+			}
+
+			switch group {
+			case "metric":
+				strings.Replace(target.Alias, match, metricName, 1)
+			case "field":
+				strings.Replace(target.Alias, match, field, 1)
+			}
+
+		}
+	}
+	// todo, if field and pipelineAgg
+	if field != "" && isPipelineAgg(metricType) {
+		found := false
+		for _, metric := range target.Metrics {
+			if metric.ID == field {
+				metricName += " " + describeMetric(metric.Type, field)
+				found = true
+			}
+		}
+		if !found {
+			metricName = "Unset"
+		}
+
+	} else if field != "" {
+		metricName += " " + field
+	}
+
+	if len(series.Tags) == 0 {
+		return metricName
+	}
+
+	name := ""
+	for _, v := range series.Tags {
+		name += v + " "
+	}
+
+	if metricTypeCount == 1 {
+		return strings.TrimSpace(name)
+	}
+
+	return strings.TrimSpace(name) + " " + metricName
+
+}
+
+func (rp *ElasticsearchResponseParser) getMetricName(metric string) string {
+	if text, ok := metricAggType[metric]; ok {
+		return text
+	}
+
+	if text, ok := extendedStats[metric]; ok {
+		return text
+	}
+
+	return metric
+}
+
+func castToNullFloat(j *simplejson.Json) null.Float {
+	f, err := j.Float64()
+	if err == nil {
+		return null.FloatFrom(f)
+	}
+
+	s, err := j.String()
+	if err == nil {
+		v, _ := strconv.ParseFloat(s, 64)
+		return null.FloatFromPtr(&v)
+	}
+
+	return null.NewFloat(0, false)
+}
+
+func findAgg(target *Query, aggId string) (*BucketAgg, error) {
+	for _, v := range target.BucketAggs {
+		if aggId == v.ID {
+			return v, nil
+		}
+	}
+	return nil, errors.New("can't found aggDef, aggID:" + aggId)
+}

+ 109 - 0
pkg/tsdb/elasticsearch/response_parser_test.go

@@ -0,0 +1,109 @@
+package elasticsearch
+
+import (
+	"encoding/json"
+	"github.com/grafana/grafana/pkg/tsdb"
+	. "github.com/smartystreets/goconvey/convey"
+	"testing"
+)
+
+func testElasticsearchResponse(body string, target Query) *tsdb.QueryResult {
+	var responses Responses
+	err := json.Unmarshal([]byte(body), &responses)
+	So(err, ShouldBeNil)
+
+	responseParser := ElasticsearchResponseParser{responses.Responses, []*Query{&target}}
+	return responseParser.getTimeSeries()
+}
+
+func TestElasticSearchResponseParser(t *testing.T) {
+	Convey("Elasticsearch Response query testing", t, func() {
+		Convey("Build test average metric with moving average", func() {
+			responses := `{
+  "responses": [
+    {
+      "took": 1,
+      "timed_out": false,
+      "_shards": {
+        "total": 5,
+        "successful": 5,
+        "skipped": 0,
+        "failed": 0
+      },
+      "hits": {
+        "total": 4500,
+        "max_score": 0,
+        "hits": []
+      },
+      "aggregations": {
+        "2": {
+          "buckets": [
+            {
+              "1": {
+                "value": null
+              },
+              "key_as_string": "1522205880000",
+              "key": 1522205880000,
+              "doc_count": 0
+            },
+            {
+              "1": {
+                "value": 10
+              },
+              "key_as_string": "1522205940000",
+              "key": 1522205940000,
+              "doc_count": 300
+            },
+            {
+              "1": {
+                "value": 10
+              },
+              "3": {
+                "value": 20
+              },
+              "key_as_string": "1522206000000",
+              "key": 1522206000000,
+              "doc_count": 300
+            },
+            {
+              "1": {
+                "value": 10
+              },
+              "3": {
+                "value": 20
+              },
+              "key_as_string": "1522206060000",
+              "key": 1522206060000,
+              "doc_count": 300
+            }
+          ]
+        }
+      },
+      "status": 200
+    }
+  ]
+}
+`
+			res := testElasticsearchResponse(responses, avgWithMovingAvg)
+			So(len(res.Series), ShouldEqual, 2)
+			So(res.Series[0].Name, ShouldEqual, "Average value")
+			So(len(res.Series[0].Points), ShouldEqual, 4)
+			for i, p := range res.Series[0].Points {
+				if i == 0 {
+					So(p[0].Valid, ShouldBeFalse)
+				} else {
+					So(p[0].Float64, ShouldEqual, 10)
+				}
+				So(p[1].Float64, ShouldEqual, 1522205880000+60000*i)
+			}
+
+			So(res.Series[1].Name, ShouldEqual, "Moving Average Average 1")
+			So(len(res.Series[1].Points), ShouldEqual, 2)
+
+			for _, p := range res.Series[1].Points {
+				So(p[0].Float64, ShouldEqual, 20)
+			}
+
+		})
+	})
+}

+ 1 - 0
public/app/plugins/datasource/elasticsearch/plugin.json

@@ -20,6 +20,7 @@
     "version": "5.0.0"
     "version": "5.0.0"
   },
   },
 
 
+  "alerting": true,
   "annotations": true,
   "annotations": true,
   "metrics": true,
   "metrics": true,
 
 

+ 75 - 0
vendor/github.com/leibowitz/moment/diff.go

@@ -0,0 +1,75 @@
+package moment
+
+import (
+	"fmt"
+	"math"
+	"time"
+)
+
+// @todo In months/years requires the old and new to calculate correctly, right?
+// @todo decide how to handle rounding (i.e. always floor?)
+type Diff struct {
+	duration time.Duration
+}
+
+func (d *Diff) InSeconds() int {
+	return int(d.duration.Seconds())
+}
+
+func (d *Diff) InMinutes() int {
+	return int(d.duration.Minutes())
+}
+
+func (d *Diff) InHours() int {
+	return int(d.duration.Hours())
+}
+
+func (d *Diff) InDays() int {
+	return int(math.Floor(float64(d.InSeconds()) / 86400))
+}
+
+// This depends on where the weeks fall?
+func (d *Diff) InWeeks() int {
+	return int(math.Floor(float64(d.InDays() / 7)))
+}
+
+func (d *Diff) InMonths() int {
+	return 0
+}
+
+func (d *Diff) InYears() int {
+	return 0
+}
+
+// http://momentjs.com/docs/#/durations/humanize/
+func (d *Diff) Humanize() string {
+	diffInSeconds := d.InSeconds()
+
+	if diffInSeconds <= 45 {
+		return fmt.Sprintf("%d seconds ago", diffInSeconds)
+	} else if diffInSeconds <= 90 {
+		return "a minute ago"
+	}
+
+	diffInMinutes := d.InMinutes()
+
+	if diffInMinutes <= 45 {
+		return fmt.Sprintf("%d minutes ago", diffInMinutes)
+	} else if diffInMinutes <= 90 {
+		return "an hour ago"
+	}
+
+	diffInHours := d.InHours()
+
+	if diffInHours <= 22 {
+		return fmt.Sprintf("%d hours ago", diffInHours)
+	} else if diffInHours <= 36 {
+		return "a day ago"
+	}
+
+	return "diff is in days"
+}
+
+// In Months
+
+// In years

+ 1185 - 0
vendor/github.com/leibowitz/moment/moment.go

@@ -0,0 +1,1185 @@
+package moment
+
+import (
+	"fmt"
+	"regexp"
+	"strconv"
+	"strings"
+	"time"
+)
+
+// links
+// http://en.wikipedia.org/wiki/ISO_week_date
+// http://golang.org/src/pkg/time/format.go
+// http://www.php.net/manual/en/class.datetime.php#datetime.constants.rfc822
+// http://php.net/manual/en/function.date.php
+// http://www.php.net/manual/en/datetime.formats.relative.php
+
+// @todo are these constants needed if they are in the time package?
+// There are a lot of extras here, and RFC822 doesn't match up. Why?
+// Also, is timezone usage wrong? Double-check
+const (
+	ATOM    = "2006-01-02T15:04:05Z07:00"
+	COOKIE  = "Monday, 02-Jan-06 15:04:05 MST"
+	ISO8601 = "2006-01-02T15:04:05Z0700"
+	RFC822  = "Mon, 02 Jan 06 15:04:05 Z0700"
+	RFC850  = "Monday, 02-Jan-06 15:04:05 MST"
+	RFC1036 = "Mon, 02 Jan 06 15:04:05 Z0700"
+	RFC1123 = "Mon, 02 Jan 2006 15:04:05 Z0700"
+	RFC2822 = "Mon, 02 Jan 2006 15:04:05 Z0700"
+	RFC3339 = "2006-01-02T15:04:05Z07:00"
+	RSS     = "Mon, 02 Jan 2006 15:04:05 Z0700"
+	W3C     = "2006-01-02T15:04:05Z07:00"
+)
+
+var (
+	regex_days    = "monday|mon|tuesday|tues|wednesday|wed|thursday|thurs|friday|fri|saturday|sat|sunday|sun"
+	regex_period  = "second|minute|hour|day|week|month|year"
+	regex_numbers = "one|two|three|four|five|six|seven|eight|nine|ten"
+)
+
+// regexp
+var (
+	compiled    = regexp.MustCompile(`\s{2,}`)
+	relativeday = regexp.MustCompile(`(yesterday|today|tomorrow)`)
+	//relative1      = regexp.MustCompile(`(first|last) day of (this|next|last|previous) (week|month|year)`)
+	//relative2      = regexp.MustCompile(`(first|last) day of (` + "jan|january|feb|february|mar|march|apr|april|may|jun|june|jul|july|aug|august|sep|september|oct|october|nov|november|dec|december" + `)(?:\s(\d{4,4}))?`)
+	relative3 = regexp.MustCompile(`((?P<relperiod>this|next|last|previous) )?(` + regex_days + `)`)
+	//relativeval    = regexp.MustCompile(`([0-9]+) (day|week|month|year)s? ago`)
+	ago            = regexp.MustCompile(`([0-9]+) (` + regex_period + `)s? ago`)
+	ordinal        = regexp.MustCompile("([0-9]+)(st|nd|rd|th)")
+	written        = regexp.MustCompile(regex_numbers)
+	relativediff   = regexp.MustCompile(`([\+\-])?([0-9]+),? ?(` + regex_period + `)s?`)
+	relativetime   = regexp.MustCompile(`(?P<hour>\d\d?):(?P<minutes>\d\d?)(:(?P<seconds>\d\d?))?\s?(?P<meridiem>am|pm)?\s?(?P<zone>[a-z]{3,3})?|(?P<relativetime>noon|midnight)`)
+	yearmonthday   = regexp.MustCompile(`(?P<year>\d{4})-(?P<month>\d{1,2})-(?P<day>\d{1,2})`)
+	relativeperiod = regexp.MustCompile(`(?P<relperiod>this|next|last) (week|month|year)`)
+	numberRegex    = regexp.MustCompile("([0-9]+)(?:<stdOrdinal>)")
+)
+
+// http://golang.org/src/pkg/time/format.go?s=12686:12728#L404
+
+// Timezone implementation
+// https://groups.google.com/forum/#!topic/golang-nuts/XEVN4QwTvHw
+// http://en.wikipedia.org/wiki/Zone.tab
+
+// Support ISO8601 Duration Parsing?
+// http://en.wikipedia.org/wiki/ISO_8601
+
+// Differences
+// Months are NOT zero-index, MOmentJS they are
+// Weeks are 0 indexed
+//     -- Sunday being the last day of the week ISO-8601 - is that diff from Moment?
+// From/FromNow Return a Diff object rather than strings
+
+// Support for locale and languages with English as default
+
+// Support for strftime
+// https://github.com/benjaminoakes/moment-strftime
+// Format: https://php.net/strftime
+
+type Moment struct {
+	time time.Time
+
+	Parser
+}
+
+type Parser interface {
+	Convert(string) string
+}
+
+func New() *Moment {
+	m := &Moment{time.Now(), new(MomentParser)}
+
+	return m
+}
+
+func NewMoment(t time.Time) *Moment {
+	m := &Moment{t, new(MomentParser)}
+
+	return m
+}
+
+func (m *Moment) GetTime() time.Time {
+	return m.time
+}
+
+func (m *Moment) Now() *Moment {
+	m.time = time.Now().In(m.GetTime().Location())
+
+	return m
+}
+
+func (m *Moment) Moment(layout string, datetime string) *Moment {
+	return m.MomentGo(m.Convert(layout), datetime)
+}
+
+func (m *Moment) MomentGo(layout string, datetime string) *Moment {
+	time, _ := time.Parse(layout, datetime)
+
+	m.time = time
+
+	return m
+}
+
+// This method is nowhere near done - requires lots of work.
+func (m *Moment) Strtotime(str string) *Moment {
+	str = strings.ToLower(strings.TrimSpace(str))
+	str = compiled.ReplaceAllString(str, " ")
+
+	// Replace written numbers (i.e. nine, ten) with actual numbers (9, 10)
+	str = written.ReplaceAllStringFunc(str, func(n string) string {
+		switch n {
+		case "one":
+			return "1"
+		case "two":
+			return "2"
+		case "three":
+			return "3"
+		case "four":
+			return "4"
+		case "five":
+			return "5"
+		case "six":
+			return "6"
+		case "seven":
+			return "7"
+		case "eight":
+			return "8"
+		case "nine":
+			return "9"
+		case "ten":
+			return "10"
+		}
+
+		return ""
+	})
+
+	// Remove ordinal suffixes st, nd, rd, th
+	str = ordinal.ReplaceAllString(str, "$1")
+
+	// Replace n second|minute|hour... ago to -n second|minute|hour... to consolidate parsing
+	str = ago.ReplaceAllString(str, "-$1 $2")
+
+	// Look for relative +1day, +3 days 5 hours 15 minutes
+	if match := relativediff.FindAllStringSubmatch(str, -1); match != nil {
+		for i := range match {
+			switch match[i][1] {
+			case "-":
+				number, _ := strconv.Atoi(match[i][2])
+				m.Subtract(match[i][3], number)
+			default:
+				number, _ := strconv.Atoi(match[i][2])
+				m.Add(match[i][3], number)
+			}
+
+			str = strings.Replace(str, match[i][0], "", 1)
+		}
+	}
+
+	// Remove any words that aren't needed for consistency
+	str = strings.Replace(str, " at ", " ", -1)
+	str = strings.Replace(str, " on ", " ", -1)
+
+	// Support for interchangeable previous/last
+	str = strings.Replace(str, "previous", "last", -1)
+
+	var dateDefaults = map[string]int{
+		"year":  0,
+		"month": 0,
+		"day":   0,
+	}
+
+	dateMatches := dateDefaults
+	if match := yearmonthday.FindStringSubmatch(str); match != nil {
+		for i, name := range yearmonthday.SubexpNames() {
+			if i == 0 {
+				str = strings.Replace(str, match[i], "", 1)
+				continue
+			}
+
+			if match[i] == "" {
+				continue
+			}
+
+			if name == "year" || name == "month" || name == "day" {
+				dateMatches[name], _ = strconv.Atoi(match[i])
+			}
+
+		}
+
+		defer m.strtotimeSetDate(dateMatches)
+		if str == "" {
+			// Nothing left to parse
+			return m
+		}
+
+		str = strings.TrimSpace(str)
+	}
+
+	// Try to parse out time from the string
+	var timeDefaults = map[string]int{
+		"hour":    0,
+		"minutes": 0,
+		"seconds": 0,
+	}
+
+	timeMatches := timeDefaults
+	var zone string
+	if match := relativetime.FindStringSubmatch(str); match != nil {
+		for i, name := range relativetime.SubexpNames() {
+			if i == 0 {
+				str = strings.Replace(str, match[i], "", 1)
+				continue
+			}
+
+			if match[i] == "" {
+				continue
+			}
+
+			// Midnight is all zero's so nothing to do
+			if name == "relativetime" && match[i] == "noon" {
+				timeDefaults["hour"] = 12
+			}
+
+			if name == "zone" {
+				zone = match[i]
+			}
+
+			if name == "meridiem" && match[i] == "pm" && timeMatches["hour"] < 12 {
+				timeMatches["hour"] += 12
+			}
+
+			if name == "hour" || name == "minutes" || name == "seconds" {
+				timeMatches[name], _ = strconv.Atoi(match[i])
+			}
+		}
+
+		// Processing time is always last
+		defer m.strtotimeSetTime(timeMatches, zone)
+
+		if str == "" {
+			// Nothing left to parse
+			return m
+		}
+
+		str = strings.TrimSpace(str)
+	}
+
+	// m.StartOf("month", "January").GoTo(time.Sunday)
+
+	if match := relativeperiod.FindStringSubmatch(str); match != nil {
+		period := match[1]
+		unit := match[2]
+
+		str = strings.Replace(str, match[0], "", 1)
+
+		switch period {
+		case "next":
+			if unit == "year" {
+				m.AddYears(1)
+			}
+			if unit == "month" {
+				m.AddMonths(1)
+			}
+			if unit == "week" {
+				m.AddWeeks(1)
+			}
+		case "last":
+			if unit == "year" {
+				m.SubYears(1)
+			}
+			if unit == "month" {
+				m.SubMonths(1)
+			}
+			if unit == "week" {
+				m.SubWeeks(1)
+			}
+		}
+
+		str = strings.TrimSpace(str)
+
+		// first := regexp.MustCompile("(?P<relperiod>first|last)?")
+	}
+
+	/*
+
+							   relativeday:        first day of
+							   relativeperiod:     this, last, next
+							   relativeperiodunit  week, month, year
+							   day:                monday, tues, wednesday
+							   month:              january, feb
+
+
+							   YYYY-MM-DD (HH:MM:SS MST)?
+							   MM-DD-YYYY (HH:MM:SS MST)
+							   10 September 2015 (HH:MM:SS MST)?
+							   September, 10 2015 (HH:MM:SS MST)?
+							   September 10 2015 (HH:MM:SS M
+
+		                           this year 2014
+		                           next year 2015
+		                           last year 2013
+
+		                        this month April
+		                        next month May
+		                        last month Mar
+
+		                        first day of April
+		                        last day of April
+
+
+							   DONE 3PM
+							   DONE 3:00 PM
+							   DONE 3:00:05 MST
+							   3PM on January 5th
+							   January 5th at 3:00PM
+							   first saturday _of_ next month
+							   first saturday _of_ next month _at_ 3:00PM
+							   saturday of next week
+							   saturday of last week
+							        saturday next week
+							        monday next week
+							   saturday of this week
+							   saturday at 3:00pm
+							   saturday at 4:00PM
+							   saturday at midn
+							   first of january
+							   last of january
+				               january of next year
+							   first day of january
+							   last day of january
+							   		      first day of February
+
+						       DONE midnight
+						       DONE noon
+							   DONE 3 days ago
+							   DONE ten days
+							   DONE 9 weeks ago // Convert to -9 weeks
+							   DONE -9 weeks
+
+	*/
+
+	if match := relativeday.FindStringSubmatch(str); match != nil && len(match) > 1 {
+		day := match[1]
+
+		str = strings.Replace(str, match[0], "", 1)
+
+		switch day {
+		case "today":
+			m.Today()
+		case "yesterday":
+			m.Yesterday()
+		case "tomorrow":
+			m.Tomorrow()
+		}
+	}
+
+	if match := relative3.FindStringSubmatch(str); match != nil {
+		var when string
+		for i, name := range relative3.SubexpNames() {
+			if name == "relperiod" {
+				when = match[i]
+			}
+		}
+		weekDay := match[len(match)-1]
+
+		str = strings.Replace(str, match[0], "", 1)
+
+		wDay, err := ParseWeekDay(weekDay)
+		if err == nil {
+			switch when {
+			case "last", "previous":
+				m.GoBackTo(wDay, true)
+
+			case "next":
+				m.GoTo(wDay, true)
+
+			case "", "this":
+				m.GoTo(wDay, false)
+			default:
+				m.GoTo(wDay, false)
+			}
+		}
+	}
+
+	/*
+
+
+	   yesterday 11:00
+	   today 11:00
+	   tomorrow 11:00
+	   midnight
+	   noon
+	   DONE +n (second|day|week|month|year)s?
+	   DONE -n (second|day|week|month|year)s?
+	   next (monday|tuesday|wednesday|thursday|friday|saturday|sunday) 11:00
+	   last (monday|tuesday|wednesday|thursday|friday|saturday|sunday) 11:00
+	   next (month|year)
+	   last (month|year)
+	   first day of (january|february|march...|december) 2014
+	   last day of (january|february|march...|december) 2014
+	   first day of (this|next|last) (week|month|year)
+	   last day of (this|next|last) (week|month|year)
+	   first (monday|tuesday|wednesday) of July 2014
+	   last (monday|tuesday|wednesday) of July 2014
+	   n (day|week|month|year)s? ago
+	   Monday|Tuesday|Wednesday|Thursday|Friday
+	   Monday (last|this|next) week
+
+	   DONE +1 week 2 days 3 hours 4 minutes 5 seconds
+	*/
+
+	return m
+}
+
+// @todo deal with timezone
+func (m *Moment) strtotimeSetTime(time map[string]int, zone string) {
+	m.SetHour(time["hour"]).SetMinute(time["minutes"]).SetSecond(time["seconds"])
+}
+
+func (m *Moment) strtotimeSetDate(date map[string]int) {
+	m.SetYear(date["year"]).SetMonth(time.Month(date["month"])).SetDay(date["day"])
+}
+
+func (m Moment) Clone() *Moment {
+	copy := New()
+	copy.time = m.GetTime()
+
+	return copy
+}
+
+/**
+ * Getters
+ *
+ */
+// https://groups.google.com/forum/#!topic/golang-nuts/pret7hjDc70
+func (m *Moment) Millisecond() {
+
+}
+
+func (m *Moment) Second() int {
+	return m.GetTime().Second()
+}
+
+func (m *Moment) Minute() int {
+	return m.GetTime().Minute()
+}
+
+func (m *Moment) Hour() int {
+	return m.GetTime().Hour()
+}
+
+// Day of month
+func (m *Moment) Date() int {
+	return m.DayOfMonth()
+}
+
+// Carbon convenience method
+func (m *Moment) DayOfMonth() int {
+	return m.GetTime().Day()
+}
+
+// Day of week (int or string)
+func (m *Moment) Day() time.Weekday {
+	return m.DayOfWeek()
+}
+
+// Carbon convenience method
+func (m *Moment) DayOfWeek() time.Weekday {
+	return m.GetTime().Weekday()
+}
+
+func (m *Moment) DayOfWeekISO() int {
+	day := m.GetTime().Weekday()
+
+	if day == time.Sunday {
+		return 7
+	}
+
+	return int(day)
+}
+
+func (m *Moment) DayOfYear() int {
+	return m.GetTime().YearDay()
+}
+
+// Day of Year with zero padding
+func (m *Moment) dayOfYearZero() string {
+	day := m.GetTime().YearDay()
+
+	if day < 10 {
+		return fmt.Sprintf("00%d", day)
+	}
+
+	if day < 100 {
+		return fmt.Sprintf("0%d", day)
+	}
+
+	return fmt.Sprintf("%d", day)
+}
+
+// todo panic?
+func (m *Moment) Weekday(index int) string {
+	if index > 6 {
+		panic("Weekday index must be between 0 and 6")
+	}
+
+	return time.Weekday(index).String()
+}
+
+func (m *Moment) Week() int {
+	return 0
+}
+
+// Is this the week number where as ISOWeekYear is the number of weeks in the year?
+// @see http://stackoverflow.com/questions/18478741/get-weeks-in-year
+func (m *Moment) ISOWeek() int {
+	_, week := m.GetTime().ISOWeek()
+
+	return week
+}
+
+// @todo Consider language support
+func (m *Moment) Month() time.Month {
+	return m.GetTime().Month()
+}
+
+func (m *Moment) Quarter() (quarter int) {
+	quarter = 4
+
+	switch m.Month() {
+	case time.January, time.February, time.March:
+		quarter = 1
+	case time.April, time.May, time.June:
+		quarter = 2
+	case time.July, time.August, time.September:
+		quarter = 3
+	}
+
+	return
+}
+
+func (m *Moment) Year() int {
+	return m.GetTime().Year()
+}
+
+// @see comments for ISOWeek
+func (m *Moment) WeekYear() {
+
+}
+
+func (m *Moment) ISOWeekYear() {
+
+}
+
+/**
+ * Manipulate
+ *
+ */
+func (m *Moment) Add(key string, value int) *Moment {
+	switch key {
+	case "years", "year", "y":
+		m.AddYears(value)
+	case "months", "month", "M":
+		m.AddMonths(value)
+	case "weeks", "week", "w":
+		m.AddWeeks(value)
+	case "days", "day", "d":
+		m.AddDays(value)
+	case "hours", "hour", "h":
+		m.AddHours(value)
+	case "minutes", "minute", "m":
+		m.AddMinutes(value)
+	case "seconds", "second", "s":
+		m.AddSeconds(value)
+	case "milliseconds", "millisecond", "ms":
+
+	}
+
+	return m
+}
+
+// Carbon
+func (m *Moment) AddSeconds(seconds int) *Moment {
+	return m.addTime(time.Second * time.Duration(seconds))
+}
+
+// Carbon
+func (m *Moment) AddMinutes(minutes int) *Moment {
+	return m.addTime(time.Minute * time.Duration(minutes))
+}
+
+// Carbon
+func (m *Moment) AddHours(hours int) *Moment {
+	return m.addTime(time.Hour * time.Duration(hours))
+}
+
+// Carbon
+func (m *Moment) AddDay() *Moment {
+	return m.AddDays(1)
+}
+
+// Carbon
+func (m *Moment) AddDays(days int) *Moment {
+	m.time = m.GetTime().AddDate(0, 0, days)
+
+	return m
+}
+
+// Carbon
+func (m *Moment) AddWeeks(weeks int) *Moment {
+	return m.AddDays(weeks * 7)
+}
+
+// Carbon
+func (m *Moment) AddMonths(months int) *Moment {
+	m.time = m.GetTime().AddDate(0, months, 0)
+
+	return m
+}
+
+// Carbon
+func (m *Moment) AddYears(years int) *Moment {
+	m.time = m.GetTime().AddDate(years, 0, 0)
+
+	return m
+}
+
+func (m *Moment) addTime(d time.Duration) *Moment {
+	m.time = m.GetTime().Add(d)
+
+	return m
+}
+
+func (m *Moment) Subtract(key string, value int) *Moment {
+	switch key {
+	case "years", "year", "y":
+		m.SubYears(value)
+	case "months", "month", "M":
+		m.SubMonths(value)
+	case "weeks", "week", "w":
+		m.SubWeeks(value)
+	case "days", "day", "d":
+		m.SubDays(value)
+	case "hours", "hour", "h":
+		m.SubHours(value)
+	case "minutes", "minute", "m":
+		m.SubMinutes(value)
+	case "seconds", "second", "s":
+		m.SubSeconds(value)
+	case "milliseconds", "millisecond", "ms":
+
+	}
+
+	return m
+}
+
+// Carbon
+func (m *Moment) SubSeconds(seconds int) *Moment {
+	return m.addTime(time.Second * time.Duration(seconds*-1))
+}
+
+// Carbon
+func (m *Moment) SubMinutes(minutes int) *Moment {
+	return m.addTime(time.Minute * time.Duration(minutes*-1))
+}
+
+// Carbon
+func (m *Moment) SubHours(hours int) *Moment {
+	return m.addTime(time.Hour * time.Duration(hours*-1))
+}
+
+// Carbon
+func (m *Moment) SubDay() *Moment {
+	return m.SubDays(1)
+}
+
+// Carbon
+func (m *Moment) SubDays(days int) *Moment {
+	return m.AddDays(days * -1)
+}
+
+func (m *Moment) SubWeeks(weeks int) *Moment {
+	return m.SubDays(weeks * 7)
+}
+
+// Carbon
+func (m *Moment) SubMonths(months int) *Moment {
+	return m.AddMonths(months * -1)
+}
+
+// Carbon
+func (m *Moment) SubYears(years int) *Moment {
+	return m.AddYears(years * -1)
+}
+
+// Carbon
+func (m *Moment) Today() *Moment {
+	return m.Now()
+}
+
+// Carbon
+func (m *Moment) Tomorrow() *Moment {
+	return m.Today().AddDay()
+}
+
+// Carbon
+func (m *Moment) Yesterday() *Moment {
+	return m.Today().SubDay()
+}
+
+func (m *Moment) StartOf(key string) *Moment {
+	switch key {
+	case "year", "y":
+		m.StartOfYear()
+	case "month", "M":
+		m.StartOfMonth()
+	case "week", "w":
+		m.StartOfWeek()
+	case "day", "d":
+		m.StartOfDay()
+	case "hour", "h":
+		if m.Minute() > 0 {
+			m.SubMinutes(m.Minute())
+		}
+
+		if m.Second() > 0 {
+			m.SubSeconds(m.Second())
+		}
+	case "minute", "m":
+		if m.Second() > 0 {
+			m.SubSeconds(m.Second())
+		}
+	case "second", "s":
+
+	}
+
+	return m
+}
+
+// Carbon
+func (m *Moment) StartOfDay() *Moment {
+	if m.Hour() > 0 {
+		_, timeOffset := m.GetTime().Zone()
+		m.SubHours(m.Hour())
+
+		_, newTimeOffset := m.GetTime().Zone()
+		diffOffset := timeOffset - newTimeOffset
+		if diffOffset != 0 {
+			// we need to adjust for time zone difference
+			m.AddSeconds(diffOffset)
+		}
+	}
+
+	return m.StartOf("hour")
+}
+
+// @todo ISO8601 Starts on Monday
+func (m *Moment) StartOfWeek() *Moment {
+	return m.GoBackTo(time.Monday, false).StartOfDay()
+}
+
+// Carbon
+func (m *Moment) StartOfMonth() *Moment {
+	return m.SetDay(1).StartOfDay()
+}
+
+// Carbon
+func (m *Moment) StartOfYear() *Moment {
+	return m.SetMonth(time.January).SetDay(1).StartOfDay()
+}
+
+// Carbon
+func (m *Moment) EndOf(key string) *Moment {
+	switch key {
+	case "year", "y":
+		m.EndOfYear()
+	case "month", "M":
+		m.EndOfMonth()
+	case "week", "w":
+		m.EndOfWeek()
+	case "day", "d":
+		m.EndOfDay()
+	case "hour", "h":
+		if m.Minute() < 59 {
+			m.AddMinutes(59 - m.Minute())
+		}
+	case "minute", "m":
+		if m.Second() < 59 {
+			m.AddSeconds(59 - m.Second())
+		}
+	case "second", "s":
+
+	}
+
+	return m
+}
+
+// Carbon
+func (m *Moment) EndOfDay() *Moment {
+	if m.Hour() < 23 {
+		_, timeOffset := m.GetTime().Zone()
+		m.AddHours(23 - m.Hour())
+
+		_, newTimeOffset := m.GetTime().Zone()
+		diffOffset := newTimeOffset - timeOffset
+		if diffOffset != 0 {
+			// we need to adjust for time zone difference
+			m.SubSeconds(diffOffset)
+		}
+	}
+
+	return m.EndOf("hour")
+}
+
+// @todo ISO8601 Ends on Sunday
+func (m *Moment) EndOfWeek() *Moment {
+	return m.GoTo(time.Sunday, false).EndOfDay()
+}
+
+// Carbon
+func (m *Moment) EndOfMonth() *Moment {
+	return m.SetDay(m.DaysInMonth()).EndOfDay()
+}
+
+// Carbon
+func (m *Moment) EndOfYear() *Moment {
+	return m.GoToMonth(time.December, false).EndOfMonth()
+}
+
+// Custom
+func (m *Moment) GoTo(day time.Weekday, next bool) *Moment {
+	if m.Day() == day {
+		if !next {
+			return m
+		} else {
+			m.AddDay()
+		}
+	}
+
+	var diff int
+	if diff = int(day) - int(m.Day()); diff > 0 {
+		return m.AddDays(diff)
+	}
+
+	return m.AddDays(7 + diff)
+}
+
+// Custom
+func (m *Moment) GoBackTo(day time.Weekday, previous bool) *Moment {
+	if m.Day() == day {
+		if !previous {
+			return m
+		} else {
+			m.SubDay()
+		}
+	}
+
+	var diff int
+	if diff = int(day) - int(m.Day()); diff > 0 {
+		return m.SubDays(7 - diff)
+	}
+
+	return m.SubDays(diff * -1)
+}
+
+// Custom
+func (m *Moment) GoToMonth(month time.Month, next bool) *Moment {
+	if m.Month() == month {
+		if !next {
+			return m
+		} else {
+			m.AddMonths(1)
+		}
+	}
+
+	var diff int
+	if diff = int(month - m.Month()); diff > 0 {
+		return m.AddMonths(diff)
+	}
+
+	return m.AddMonths(12 + diff)
+}
+
+// Custom
+func (m *Moment) GoBackToMonth(month time.Month, previous bool) *Moment {
+	if m.Month() == month {
+		if !previous {
+			return m
+		} else {
+			m.SubMonths(1)
+		}
+	}
+
+	var diff int
+	if diff = int(month) - int(m.Month()); diff > 0 {
+		return m.SubMonths(12 - diff)
+	}
+
+	return m.SubMonths(diff * -1)
+}
+
+func (m *Moment) SetSecond(seconds int) *Moment {
+	if seconds >= 0 && seconds <= 60 {
+		return m.AddSeconds(seconds - m.Second())
+	}
+
+	return m
+}
+
+func (m *Moment) SetMinute(minute int) *Moment {
+	if minute >= 0 && minute <= 60 {
+		return m.AddMinutes(minute - m.Minute())
+	}
+
+	return m
+}
+
+func (m *Moment) SetHour(hour int) *Moment {
+	if hour >= 0 && hour <= 23 {
+		return m.AddHours(hour - m.Hour())
+	}
+
+	return m
+}
+
+// Custom
+func (m *Moment) SetDay(day int) *Moment {
+	if m.DayOfMonth() == day {
+		return m
+	}
+
+	return m.AddDays(day - m.DayOfMonth())
+}
+
+// Custom
+func (m *Moment) SetMonth(month time.Month) *Moment {
+	if m.Month() > month {
+		return m.GoBackToMonth(month, false)
+	}
+
+	return m.GoToMonth(month, false)
+}
+
+// Custom
+func (m *Moment) SetYear(year int) *Moment {
+	if m.Year() == year {
+		return m
+	}
+
+	return m.AddYears(year - m.Year())
+}
+
+// UTC Mode. @see http://momentjs.com/docs/#/parsing/utc/
+func (m *Moment) UTC() *Moment {
+	return m
+}
+
+// http://momentjs.com/docs/#/manipulating/timezone-offset/
+func (m *Moment) Zone() int {
+	_, offset := m.GetTime().Zone()
+
+	return (offset / 60) * -1
+}
+
+/**
+ * Display
+ *
+ */
+func (m *Moment) Format(layout string) string {
+	format := m.Convert(layout)
+	hasCustom := false
+
+	formatted := m.GetTime().Format(format)
+
+	if strings.Contains(formatted, "<std") {
+		hasCustom = true
+		formatted = strings.Replace(formatted, "<stdUnix>", fmt.Sprintf("%d", m.Unix()), -1)
+		formatted = strings.Replace(formatted, "<stdWeekOfYear>", fmt.Sprintf("%d", m.ISOWeek()), -1)
+		formatted = strings.Replace(formatted, "<stdDayOfWeek>", fmt.Sprintf("%d", m.DayOfWeek()), -1)
+		formatted = strings.Replace(formatted, "<stdDayOfWeekISO>", fmt.Sprintf("%d", m.DayOfWeekISO()), -1)
+		formatted = strings.Replace(formatted, "<stdDayOfYear>", fmt.Sprintf("%d", m.DayOfYear()), -1)
+		formatted = strings.Replace(formatted, "<stdQuarter>", fmt.Sprintf("%d", m.Quarter()), -1)
+		formatted = strings.Replace(formatted, "<stdDayOfYearZero>", m.dayOfYearZero(), -1)
+		formatted = strings.Replace(formatted, "<stdHourNoZero>", fmt.Sprintf("%d", m.Hour()), -1)
+	}
+
+	// This has to happen after time.Format
+	if hasCustom && strings.Contains(formatted, "<stdOrdinal>") {
+		formatted = numberRegex.ReplaceAllStringFunc(formatted, func(n string) string {
+			ordinal, _ := strconv.Atoi(strings.Replace(n, "<stdOrdinal>", "", 1))
+			return m.ordinal(ordinal)
+		})
+	}
+
+	return formatted
+}
+
+func (m *Moment) FormatGo(layout string) string {
+	return m.GetTime().Format(layout)
+}
+
+// From Dmytro Shteflyuk @https://groups.google.com/forum/#!topic/golang-nuts/l8NhI74jl-4
+func (m *Moment) ordinal(x int) string {
+	suffix := "th"
+	switch x % 10 {
+	case 1:
+		if x%100 != 11 {
+			suffix = "st"
+		}
+	case 2:
+		if x%100 != 12 {
+			suffix = "nd"
+		}
+	case 3:
+		if x%100 != 13 {
+			suffix = "rd"
+		}
+	}
+
+	return strconv.Itoa(x) + suffix
+}
+
+func (m *Moment) FromNow() Diff {
+	now := new(Moment)
+	now.Now()
+
+	return m.From(now)
+}
+
+// Carbon
+func (m *Moment) From(f *Moment) Diff {
+	return m.GetDiff(f)
+}
+
+/**
+ * Difference
+ *
+ */
+func (m *Moment) Diff(t *Moment, unit string) int {
+	diff := m.GetDiff(t)
+
+	switch unit {
+	case "years":
+		return diff.InYears()
+	case "months":
+		return diff.InMonths()
+	case "weeks":
+		return diff.InWeeks()
+	case "days":
+		return diff.InDays()
+	case "hours":
+		return diff.InHours()
+	case "minutes":
+		return diff.InMinutes()
+	case "seconds":
+		return diff.InSeconds()
+	}
+
+	return 0
+}
+
+// Custom
+func (m *Moment) GetDiff(t *Moment) Diff {
+	duration := m.GetTime().Sub(t.GetTime())
+
+	return Diff{duration}
+}
+
+/**
+ * Display
+ *
+ */
+func (m *Moment) ValueOf() int64 {
+	return m.Unix() * 1000
+}
+
+func (m *Moment) Unix() int64 {
+	return m.GetTime().Unix()
+}
+
+func (m *Moment) DaysInMonth() int {
+	days := 31
+	switch m.Month() {
+	case time.April, time.June, time.September, time.November:
+		days = 30
+		break
+	case time.February:
+		days = 28
+		if m.IsLeapYear() {
+			days = 29
+		}
+		break
+	}
+
+	return days
+}
+
+// or ToSlice?
+func (m *Moment) ToArray() []int {
+	return []int{
+		m.Year(),
+		int(m.Month()),
+		m.DayOfMonth(),
+		m.Hour(),
+		m.Minute(),
+		m.Second(),
+	}
+}
+
+/**
+ * Query
+ *
+ */
+func (m *Moment) IsBefore(t Moment) bool {
+	return m.GetTime().Before(t.GetTime())
+}
+
+func (m *Moment) IsSame(t *Moment, layout string) bool {
+	return m.Format(layout) == t.Format(layout)
+}
+
+func (m *Moment) IsAfter(t Moment) bool {
+	return m.GetTime().After(t.GetTime())
+}
+
+// Carbon
+func (m *Moment) IsToday() bool {
+	today := m.Clone().Today()
+
+	return m.Year() == today.Year() && m.Month() == today.Month() && m.Day() == today.Day()
+}
+
+// Carbon
+func (m *Moment) IsTomorrow() bool {
+	tomorrow := m.Clone().Tomorrow()
+
+	return m.Year() == tomorrow.Year() && m.Month() == tomorrow.Month() && m.Day() == tomorrow.Day()
+}
+
+// Carbon
+func (m *Moment) IsYesterday() bool {
+	yesterday := m.Clone().Yesterday()
+
+	return m.Year() == yesterday.Year() && m.Month() == yesterday.Month() && m.Day() == yesterday.Day()
+}
+
+// Carbon
+func (m *Moment) IsWeekday() bool {
+	return !m.IsWeekend()
+}
+
+// Carbon
+func (m *Moment) IsWeekend() bool {
+	return m.DayOfWeek() == time.Sunday || m.DayOfWeek() == time.Saturday
+}
+
+func (m *Moment) IsLeapYear() bool {
+	year := m.Year()
+	return year%4 == 0 && (year%100 != 0 || year%400 == 0)
+}
+
+// Custom
+func (m *Moment) Range(start Moment, end Moment) bool {
+	return m.IsAfter(start) && m.IsBefore(end)
+}

+ 100 - 0
vendor/github.com/leibowitz/moment/moment_parser.go

@@ -0,0 +1,100 @@
+package moment
+
+import (
+	"regexp"
+	"strings"
+)
+
+type MomentParser struct{}
+
+var (
+	date_pattern = regexp.MustCompile("(LT|LL?L?L?|l{1,4}|Mo|MM?M?M?|Do|DDDo|DD?D?D?|ddd?d?|do?|w[o|w]?|W[o|W]?|YYYYY|YYYY|YY|gg(ggg?)?|GG(GGG?)?|e|E|a|A|hh?|HH?|mm?|ss?|SS?S?|X|zz?|ZZ?|Q)")
+)
+
+/*
+	+	<stdOrdinal> 					S (makes any number before it ordinal)
+	+	stdDayOfYear					1,2,365
+	+	stdDayOfYearZero			001, 002, 365
+	+	stdDayOfWeek					w 0, 1, 2 numeric day of the week (0 = sunday)
+	+	stdDayOfWeekISO				N 1 = Monday
+	+	stdWeekOfYear					W Iso week number of year
+	+	stdUnix								U
+    +   stdQuarter
+*/
+
+// Thanks to https://github.com/fightbulc/moment.php for replacement keys and regex
+var moment_replacements = map[string]string{
+	"M":    "1",                           // stdNumMonth 1 2 ... 11 12
+	"Mo":   "1<stdOrdinal>",               // stdNumMonth 1st 2nd ... 11th 12th
+	"MM":   "01",                          // stdZeroMonth 01 02 ... 11 12
+	"MMM":  "Jan",                         // stdMonth Jan Feb ... Nov Dec
+	"MMMM": "January",                     // stdLongMonth January February ... November December
+	"D":    "2",                           // stdDay 1 2 ... 30 30
+	"Do":   "2<stdOrdinal>",               // stdDay 1st 2nd ... 30th 31st  @todo support st nd th etch
+	"DD":   "02",                          // stdZeroDay 01 02 ... 30 31
+	"DDD":  "<stdDayOfYear>",              // Day of the year 1 2 ... 364 365
+	"DDDo": "<stdDayOfYear><stdOrdinal>",  // Day of the year 1st 2nd ... 364th 365th
+	"DDDD": "<stdDayOfYearZero>",          // Day of the year 001 002 ... 364 365 @todo****
+	"d":    "<stdDayOfWeek>",              // Numeric representation of day of the week 0 1 ... 5 6
+	"do":   "<stdDayOfWeek><stdOrdinal>",  // 0th 1st ... 5th 6th
+	"dd":   "Mon",                         // ***Su Mo ... Fr Sa @todo
+	"ddd":  "Mon",                         // Sun Mon ... Fri Sat
+	"dddd": "Monday",                      // stdLongWeekDay Sunday Monday ... Friday Saturday
+	"e":    "<stdDayOfWeek>",              // Numeric representation of day of the week 0 1 ... 5 6 @todo
+	"E":    "<stdDayOfWeekISO>",           // ISO-8601 numeric representation of the day of the week (added in PHP 5.1.0) 1 2 ... 6 7 @todo
+	"w":    "<stdWeekOfYear>",             // 1 2 ... 52 53
+	"wo":   "<stdWeekOfYear><stdOrdinal>", // 1st 2nd ... 52nd 53rd
+	"ww":   "<stdWeekOfYear>",             // ***01 02 ... 52 53 @todo
+	"W":    "<stdWeekOfYear>",             // 1 2 ... 52 53
+	"Wo":   "<stdWeekOfYear><stdOrdinal>", // 1st 2nd ... 52nd 53rd
+	"WW":   "<stdWeekOfYear>",             // ***01 02 ... 52 53 @todo
+	"YY":   "06",                          // stdYear 70 71 ... 29 30
+	"YYYY": "2006",                        // stdLongYear 1970 1971 ... 2029 2030
+	// "gg"      : "o", 				 // ISO-8601 year number 70 71 ... 29 30 @todo
+	// "gggg"    : "o", // ***1970 1971 ... 2029 2030 @todo
+	// "GG"      : "o", //70 71 ... 29 30 @todo
+	// "GGGG"    : "o", // ***1970 1971 ... 2029 2030 @todo
+	"Q":  "<stdQuarter>",
+	"A":  "PM",              // stdPM AM PM
+	"a":  "pm",              // stdpm am pm
+	"H":  "<stdHourNoZero>", // stdHour 0 1 ... 22 23
+	"HH": "15",              // 00 01 ... 22 23
+	"h":  "3",               // stdHour12 1 2 ... 11 12
+	"hh": "03",              // stdZeroHour12 01 02 ... 11 12
+	"m":  "4",               // stdZeroMinute 0 1 ... 58 59
+	"mm": "04",              // stdZeroMinute 00 01 ... 58 59
+	"s":  "5",               // stdSecond 0 1 ... 58 59
+	"ss": "05",              // stdZeroSecond ***00 01 ... 58 59
+	// "S"       : "", //0 1 ... 8 9
+	// "SS"      : "", //0 1 ... 98 99
+	// "SSS"     : "", //0 1 ... 998 999
+	"z":    "MST",                                        //EST CST ... MST PST
+	"zz":   "MST",                                        //EST CST ... MST PST
+	"Z":    "Z07:00",                                     // stdNumColonTZ -07:00 -06:00 ... +06:00 +07:00
+	"ZZ":   "-0700",                                      // stdNumTZ -0700 -0600 ... +0600 +0700
+	"X":    "<stdUnix>",                                  // Seconds since unix epoch 1360013296
+	"LT":   "3:04 PM",                                    // 8:30 PM
+	"L":    "01/02/2006",                                 //09/04/1986
+	"l":    "1/2/2006",                                   //9/4/1986
+	"LL":   "January 2<stdOrdinal> 2006",                 //September 4th 1986 the php s flag isn't supported
+	"ll":   "Jan 2 2006",                                 //Sep 4 1986
+	"LLL":  "January 2<stdOrdinal> 2006 3:04 PM",         //September 4th 1986 8:30 PM @todo the php s flag isn't supported
+	"lll":  "Jan 2 2006 3:04 PM",                         //Sep 4 1986 8:30 PM
+	"LLLL": "Monday, January 2<stdOrdinal> 2006 3:04 PM", //Thursday, September 4th 1986 8:30 PM the php s flag isn't supported
+	"llll": "Mon, Jan 2 2006 3:04 PM",                    //Thu, Sep 4 1986 8:30 PM
+}
+
+func (p *MomentParser) Convert(layout string) string {
+	var match [][]string
+	if match = date_pattern.FindAllStringSubmatch(layout, -1); match == nil {
+		return layout
+	}
+
+	for i := range match {
+		if replace, ok := moment_replacements[match[i][0]]; ok {
+			layout = strings.Replace(layout, match[i][0], replace, 1)
+		}
+	}
+
+	return layout
+}

+ 32 - 0
vendor/github.com/leibowitz/moment/parse_day.go

@@ -0,0 +1,32 @@
+package moment
+
+import (
+	"fmt"
+	"strings"
+	"time"
+)
+
+var (
+	days = []time.Weekday{
+		time.Sunday,
+		time.Monday,
+		time.Tuesday,
+		time.Wednesday,
+		time.Thursday,
+		time.Friday,
+		time.Saturday,
+	}
+)
+
+func ParseWeekDay(day string) (time.Weekday, error) {
+
+	day = strings.ToLower(day)
+
+	for _, d := range days {
+		if day == strings.ToLower(d.String()) {
+			return d, nil
+		}
+	}
+
+	return -1, fmt.Errorf("Unable to parse %s as week day", day)
+}

+ 68 - 0
vendor/github.com/leibowitz/moment/strftime_parser.go

@@ -0,0 +1,68 @@
+package moment
+
+import (
+	"regexp"
+	"strings"
+)
+
+type StrftimeParser struct{}
+
+var (
+	replacements_pattern = regexp.MustCompile("%[mbhBedjwuaAVgyGYpPkHlIMSZzsTrRTDFXx]")
+)
+
+// Not implemented
+// U
+// C
+
+var strftime_replacements = map[string]string{
+	"%m": "01",  // stdZeroMonth 01 02 ... 11 12
+	"%b": "Jan", // stdMonth Jan Feb ... Nov Dec
+	"%h": "Jan",
+	"%B": "January",           // stdLongMonth January February ... November December
+	"%e": "2",                 // stdDay 1 2 ... 30 30
+	"%d": "02",                // stdZeroDay 01 02 ... 30 31
+	"%j": "<stdDayOfYear>",    // Day of the year ***001 002 ... 364 365 @todo****
+	"%w": "<stdDayOfWeek>",    // Numeric representation of day of the week 0 1 ... 5 6
+	"%u": "<stdDayOfWeekISO>", // ISO-8601 numeric representation of the day of the week (added in PHP 5.1.0) 1 2 ... 6 7 @todo
+	"%a": "Mon",               // Sun Mon ... Fri Sat
+	"%A": "Monday",            // stdLongWeekDay Sunday Monday ... Friday Saturday
+	"%V": "<stdWeekOfYear>",   // ***01 02 ... 52 53  @todo begin with zeros
+	"%g": "06",                // stdYear 70 71 ... 29 30
+	"%y": "06",
+	"%G": "2006", // stdLongYear 1970 1971 ... 2029 2030
+	"%Y": "2006",
+	"%p": "PM",        // stdPM AM PM
+	"%P": "pm",        // stdpm am pm
+	"%k": "15",        // stdHour 0 1 ... 22 23
+	"%H": "15",        // 00 01 ... 22 23
+	"%l": "3",         // stdHour12 1 2 ... 11 12
+	"%I": "03",        // stdZeroHour12 01 02 ... 11 12
+	"%M": "04",        // stdZeroMinute 00 01 ... 58 59
+	"%S": "05",        // stdZeroSecond ***00 01 ... 58 59
+	"%Z": "MST",       //EST CST ... MST PST
+	"%z": "-0700",     // stdNumTZ -0700 -0600 ... +0600 +0700
+	"%s": "<stdUnix>", // Seconds since unix epoch 1360013296
+	"%r": "03:04:05 PM",
+	"%R": "15:04",
+	"%T": "15:04:05",
+	"%D": "01/02/06",
+	"%F": "2006-01-02",
+	"%X": "15:04:05",
+	"%x": "01/02/06",
+}
+
+func (p *StrftimeParser) Convert(layout string) string {
+	var match [][]string
+	if match = replacements_pattern.FindAllStringSubmatch(layout, -1); match == nil {
+		return layout
+	}
+
+	for i := range match {
+		if replace, ok := strftime_replacements[match[i][0]]; ok {
+			layout = strings.Replace(layout, match[i][0], replace, 1)
+		}
+	}
+
+	return layout
+}