浏览代码

feat(alerting): more refactoring work in backend code

Torkel Ödegaard 9 年之前
父节点
当前提交
f872d5cfa3

+ 1 - 0
pkg/models/alert.go

@@ -13,6 +13,7 @@ type Alert struct {
 	PanelId     int64
 	Name        string
 	Description string
+	Severity    string
 	State       string
 	Handler     int64
 	Enabled     bool

+ 3 - 27
pkg/services/alerting/alert_rule.go

@@ -6,31 +6,11 @@ import (
 	"strconv"
 
 	"github.com/grafana/grafana/pkg/components/simplejson"
-	"github.com/grafana/grafana/pkg/services/alerting/transformers"
 
 	m "github.com/grafana/grafana/pkg/models"
 )
 
 type AlertRule struct {
-	Id              int64
-	OrgId           int64
-	DashboardId     int64
-	PanelId         int64
-	Frequency       int64
-	Name            string
-	Description     string
-	State           string
-	Warning         Level
-	Critical        Level
-	Query           AlertQuery
-	Transform       string
-	TransformParams simplejson.Json
-	Transformer     transformers.Transformer
-
-	NotificationGroups []int64
-}
-
-type AlertRule2 struct {
 	Id            int64
 	OrgId         int64
 	DashboardId   int64
@@ -38,7 +18,7 @@ type AlertRule2 struct {
 	Frequency     int64
 	Name          string
 	Description   string
-	State         string
+	Severity      string
 	Conditions    []AlertCondition
 	Notifications []int64
 }
@@ -68,17 +48,13 @@ func getTimeDurationStringToSeconds(str string) int64 {
 }
 
 func NewAlertRuleFromDBModel(ruleDef *m.Alert) (*AlertRule, error) {
-	return nil, nil
-}
-
-func NewAlertRuleFromDBModel2(ruleDef *m.Alert) (*AlertRule2, error) {
-	model := &AlertRule2{}
+	model := &AlertRule{}
 	model.Id = ruleDef.Id
 	model.OrgId = ruleDef.OrgId
 	model.Name = ruleDef.Name
 	model.Description = ruleDef.Description
-	model.State = ruleDef.State
 	model.Frequency = ruleDef.Frequency
+	model.Severity = ruleDef.Severity
 
 	for _, v := range ruleDef.Settings.Get("notifications").MustArray() {
 		if id, ok := v.(int64); ok {

+ 1 - 1
pkg/services/alerting/alert_rule_test.go

@@ -66,7 +66,7 @@ func TestAlertRuleModel(t *testing.T) {
 				Settings: alertJSON,
 			}
 
-			alertRule, err := NewAlertRuleFromDBModel2(alert)
+			alertRule, err := NewAlertRuleFromDBModel(alert)
 			So(err, ShouldBeNil)
 
 			So(alertRule.Conditions, ShouldHaveLength, 1)

+ 1 - 1
pkg/services/alerting/conditions.go

@@ -13,7 +13,7 @@ type QueryCondition struct {
 	Evaluator AlertEvaluator
 }
 
-func (c *QueryCondition) Eval() {
+func (c *QueryCondition) Eval(context *AlertResultContext) {
 }
 
 func NewQueryCondition(model *simplejson.Json) (*QueryCondition, error) {

+ 10 - 24
pkg/services/alerting/engine.go

@@ -6,12 +6,11 @@ import (
 
 	"github.com/benbjohnson/clock"
 	"github.com/grafana/grafana/pkg/log"
-	"github.com/grafana/grafana/pkg/services/alerting/alertstates"
 )
 
 type Engine struct {
 	execQueue       chan *AlertJob
-	resultQueue     chan *AlertResult
+	resultQueue     chan *AlertResultContext
 	clock           clock.Clock
 	ticker          *Ticker
 	scheduler       Scheduler
@@ -26,7 +25,7 @@ func NewEngine() *Engine {
 	e := &Engine{
 		ticker:          NewTicker(time.Now(), time.Second*0, clock.New()),
 		execQueue:       make(chan *AlertJob, 1000),
-		resultQueue:     make(chan *AlertResult, 1000),
+		resultQueue:     make(chan *AlertResultContext, 1000),
 		scheduler:       NewScheduler(),
 		handler:         NewHandler(),
 		ruleReader:      NewRuleReader(),
@@ -91,15 +90,14 @@ func (e *Engine) execDispatch() {
 func (e *Engine) executeJob(job *AlertJob) {
 	startTime := time.Now()
 
-	resultChan := make(chan *AlertResult, 1)
-	go e.handler.Execute(job, resultChan)
+	resultChan := make(chan *AlertResultContext, 1)
+	go e.handler.Execute(job.Rule, resultChan)
 
 	select {
 	case <-time.After(e.alertJobTimeout):
-		e.resultQueue <- &AlertResult{
-			State:     alertstates.Pending,
+		e.resultQueue <- &AlertResultContext{
 			Error:     fmt.Errorf("Timeout"),
-			AlertJob:  job,
+			Rule:      job.Rule,
 			StartTime: startTime,
 			EndTime:   time.Now(),
 		}
@@ -110,6 +108,8 @@ func (e *Engine) executeJob(job *AlertJob) {
 		e.log.Debug("Job Execution done", "timeTakenMs", duration, "ruleId", job.Rule.Id)
 		e.resultQueue <- result
 	}
+
+	job.Running = false
 }
 
 func (e *Engine) resultHandler() {
@@ -120,25 +120,11 @@ func (e *Engine) resultHandler() {
 	}()
 
 	for result := range e.resultQueue {
-		e.log.Debug("Alert Rule Result", "ruleId", result.AlertJob.Rule.Id, "state", result.State, "retry", result.AlertJob.RetryCount)
-
-		result.AlertJob.Running = false
+		e.log.Debug("Alert Rule Result", "ruleId", result.Rule.Id, "triggered", result.Triggered)
 
 		if result.Error != nil {
-			result.AlertJob.IncRetry()
-
-			if result.AlertJob.Retryable() {
-				e.log.Error("Alert Rule Result Error", "ruleId", result.AlertJob.Rule.Id, "error", result.Error, "retry", result.AlertJob.RetryCount)
-				e.execQueue <- result.AlertJob
-			} else {
-				e.log.Error("Alert Rule Result Error After Max Retries", "ruleId", result.AlertJob.Rule.Id, "error", result.Error, "retry", result.AlertJob.RetryCount)
-
-				result.State = alertstates.Critical
-				result.Description = fmt.Sprintf("Failed to run check after %d retires, Error: %v", maxAlertExecutionRetries, result.Error)
-				e.responseHandler.Handle(result)
-			}
+			e.log.Error("Alert Rule Result Error", "ruleId", result.Rule.Id, "error", result.Error, "retry")
 		} else {
-			result.AlertJob.ResetRetry()
 			e.responseHandler.Handle(result)
 		}
 	}

+ 7 - 0
pkg/services/alerting/extractor_test.go

@@ -39,6 +39,7 @@ func TestAlertRuleExtraction(t *testing.T) {
               "handler": 1,
               "enabled": true,
               "frequency": "60s",
+              "severity": "critical",
               "conditions": [
               {
                 "type": "query",
@@ -63,6 +64,7 @@ func TestAlertRuleExtraction(t *testing.T) {
               "handler": 0,
               "enabled": true,
               "frequency": "60s",
+              "severity": "warning",
               "conditions": [
               {
                 "type": "query",
@@ -122,6 +124,11 @@ func TestAlertRuleExtraction(t *testing.T) {
 					So(alerts[1].Handler, ShouldEqual, 0)
 				})
 
+				Convey("should extract Severity property", func() {
+					So(alerts[0].Severity, ShouldEqual, "critical")
+					So(alerts[1].Severity, ShouldEqual, "warning")
+				})
+
 				Convey("should extract frequency in seconds", func() {
 					So(alerts[0].Frequency, ShouldEqual, 60)
 					So(alerts[1].Frequency, ShouldEqual, 60)

+ 108 - 118
pkg/services/alerting/handler.go

@@ -1,14 +1,9 @@
 package alerting
 
 import (
-	"fmt"
 	"time"
 
-	"github.com/grafana/grafana/pkg/bus"
 	"github.com/grafana/grafana/pkg/log"
-	m "github.com/grafana/grafana/pkg/models"
-	"github.com/grafana/grafana/pkg/services/alerting/alertstates"
-	"github.com/grafana/grafana/pkg/tsdb"
 )
 
 var (
@@ -25,124 +20,119 @@ func NewHandler() *HandlerImpl {
 	}
 }
 
-func (e *HandlerImpl) Execute(job *AlertJob, resultQueue chan *AlertResult) {
-	startTime := time.Now()
-
-	timeSeries, err := e.executeQuery(job)
-	if err != nil {
-		resultQueue <- &AlertResult{
-			Error:     err,
-			State:     alertstates.Pending,
-			AlertJob:  job,
-			StartTime: time.Now(),
-			EndTime:   time.Now(),
-		}
-	}
-
-	result := e.evaluateRule(job.Rule, timeSeries)
-	result.AlertJob = job
-	result.StartTime = startTime
-	result.EndTime = time.Now()
-
-	resultQueue <- result
+func (e *HandlerImpl) Execute(rule *AlertRule, resultQueue chan *AlertResultContext) {
+	resultQueue <- e.eval(rule)
 }
 
-func (e *HandlerImpl) executeQuery(job *AlertJob) (tsdb.TimeSeriesSlice, error) {
-	getDsInfo := &m.GetDataSourceByIdQuery{
-		Id:    job.Rule.Query.DatasourceId,
-		OrgId: job.Rule.OrgId,
-	}
-
-	if err := bus.Dispatch(getDsInfo); err != nil {
-		return nil, fmt.Errorf("Could not find datasource")
-	}
-
-	req := e.GetRequestForAlertRule(job.Rule, getDsInfo.Result)
-	result := make(tsdb.TimeSeriesSlice, 0)
-
-	resp, err := tsdb.HandleRequest(req)
-	if err != nil {
-		return nil, fmt.Errorf("Alerting: GetSeries() tsdb.HandleRequest() error %v", err)
-	}
-
-	for _, v := range resp.Results {
-		if v.Error != nil {
-			return nil, fmt.Errorf("Alerting: GetSeries() tsdb.HandleRequest() response error %v", v)
-		}
-
-		result = append(result, v.Series...)
+func (e *HandlerImpl) eval(rule *AlertRule) *AlertResultContext {
+	result := &AlertResultContext{
+		StartTime: time.Now(),
 	}
 
-	return result, nil
-}
-
-func (e *HandlerImpl) GetRequestForAlertRule(rule *AlertRule, datasource *m.DataSource) *tsdb.Request {
-	e.log.Debug("GetRequest", "query", rule.Query.Query, "from", rule.Query.From, "datasourceId", datasource.Id)
-	req := &tsdb.Request{
-		TimeRange: tsdb.TimeRange{
-			From: "-" + rule.Query.From,
-			To:   rule.Query.To,
-		},
-		Queries: []*tsdb.Query{
-			{
-				RefId: "A",
-				Query: rule.Query.Query,
-				DataSource: &tsdb.DataSourceInfo{
-					Id:       datasource.Id,
-					Name:     datasource.Name,
-					PluginId: datasource.Type,
-					Url:      datasource.Url,
-				},
-			},
-		},
+	for _, condition := range rule.Conditions {
+		condition.Eval(result)
 	}
 
-	return req
+	result.EndTime = time.Now()
+	return result
 }
 
-func (e *HandlerImpl) evaluateRule(rule *AlertRule, series tsdb.TimeSeriesSlice) *AlertResult {
-	e.log.Debug("Evaluating Alerting Rule", "seriesCount", len(series), "ruleName", rule.Name)
-
-	triggeredAlert := make([]*TriggeredAlert, 0)
-
-	for _, serie := range series {
-		e.log.Debug("Evaluating series", "series", serie.Name)
-		transformedValue, _ := rule.Transformer.Transform(serie)
-
-		critResult := evalCondition(rule.Critical, transformedValue)
-		condition2 := fmt.Sprintf("%v %s %v ", transformedValue, rule.Critical.Operator, rule.Critical.Value)
-		e.log.Debug("Alert execution Crit", "name", serie.Name, "condition", condition2, "result", critResult)
-		if critResult {
-			triggeredAlert = append(triggeredAlert, &TriggeredAlert{
-				State:  alertstates.Critical,
-				Value:  transformedValue,
-				Metric: serie.Name,
-			})
-			continue
-		}
-
-		warnResult := evalCondition(rule.Warning, transformedValue)
-		condition := fmt.Sprintf("%v %s %v ", transformedValue, rule.Warning.Operator, rule.Warning.Value)
-		e.log.Debug("Alert execution Warn", "name", serie.Name, "condition", condition, "result", warnResult)
-		if warnResult {
-			triggeredAlert = append(triggeredAlert, &TriggeredAlert{
-				State:  alertstates.Warn,
-				Value:  transformedValue,
-				Metric: serie.Name,
-			})
-		}
-	}
-
-	executionState := alertstates.Ok
-	for _, raised := range triggeredAlert {
-		if raised.State == alertstates.Critical {
-			executionState = alertstates.Critical
-		}
-
-		if executionState != alertstates.Critical && raised.State == alertstates.Warn {
-			executionState = alertstates.Warn
-		}
-	}
-
-	return &AlertResult{State: executionState, TriggeredAlerts: triggeredAlert}
-}
+// func (e *HandlerImpl) executeQuery(job *AlertJob) (tsdb.TimeSeriesSlice, error) {
+// 	getDsInfo := &m.GetDataSourceByIdQuery{
+// 		Id:    job.Rule.Query.DatasourceId,
+// 		OrgId: job.Rule.OrgId,
+// 	}
+//
+// 	if err := bus.Dispatch(getDsInfo); err != nil {
+// 		return nil, fmt.Errorf("Could not find datasource")
+// 	}
+//
+// 	req := e.GetRequestForAlertRule(job.Rule, getDsInfo.Result)
+// 	result := make(tsdb.TimeSeriesSlice, 0)
+//
+// 	resp, err := tsdb.HandleRequest(req)
+// 	if err != nil {
+// 		return nil, fmt.Errorf("Alerting: GetSeries() tsdb.HandleRequest() error %v", err)
+// 	}
+//
+// 	for _, v := range resp.Results {
+// 		if v.Error != nil {
+// 			return nil, fmt.Errorf("Alerting: GetSeries() tsdb.HandleRequest() response error %v", v)
+// 		}
+//
+// 		result = append(result, v.Series...)
+// 	}
+//
+// 	return result, nil
+// }
+//
+// func (e *HandlerImpl) GetRequestForAlertRule(rule *AlertRule, datasource *m.DataSource) *tsdb.Request {
+// 	e.log.Debug("GetRequest", "query", rule.Query.Query, "from", rule.Query.From, "datasourceId", datasource.Id)
+// 	req := &tsdb.Request{
+// 		TimeRange: tsdb.TimeRange{
+// 			From: "-" + rule.Query.From,
+// 			To:   rule.Query.To,
+// 		},
+// 		Queries: []*tsdb.Query{
+// 			{
+// 				RefId: "A",
+// 				Query: rule.Query.Query,
+// 				DataSource: &tsdb.DataSourceInfo{
+// 					Id:       datasource.Id,
+// 					Name:     datasource.Name,
+// 					PluginId: datasource.Type,
+// 					Url:      datasource.Url,
+// 				},
+// 			},
+// 		},
+// 	}
+//
+// 	return req
+// }
+//
+// func (e *HandlerImpl) evaluateRule(rule *AlertRule, series tsdb.TimeSeriesSlice) *AlertResult {
+// 	e.log.Debug("Evaluating Alerting Rule", "seriesCount", len(series), "ruleName", rule.Name)
+//
+// 	triggeredAlert := make([]*TriggeredAlert, 0)
+//
+// 	for _, serie := range series {
+// 		e.log.Debug("Evaluating series", "series", serie.Name)
+// 		transformedValue, _ := rule.Transformer.Transform(serie)
+//
+// 		critResult := evalCondition(rule.Critical, transformedValue)
+// 		condition2 := fmt.Sprintf("%v %s %v ", transformedValue, rule.Critical.Operator, rule.Critical.Value)
+// 		e.log.Debug("Alert execution Crit", "name", serie.Name, "condition", condition2, "result", critResult)
+// 		if critResult {
+// 			triggeredAlert = append(triggeredAlert, &TriggeredAlert{
+// 				State:  alertstates.Critical,
+// 				Value:  transformedValue,
+// 				Metric: serie.Name,
+// 			})
+// 			continue
+// 		}
+//
+// 		warnResult := evalCondition(rule.Warning, transformedValue)
+// 		condition := fmt.Sprintf("%v %s %v ", transformedValue, rule.Warning.Operator, rule.Warning.Value)
+// 		e.log.Debug("Alert execution Warn", "name", serie.Name, "condition", condition, "result", warnResult)
+// 		if warnResult {
+// 			triggeredAlert = append(triggeredAlert, &TriggeredAlert{
+// 				State:  alertstates.Warn,
+// 				Value:  transformedValue,
+// 				Metric: serie.Name,
+// 			})
+// 		}
+// 	}
+//
+// 	executionState := alertstates.Ok
+// 	for _, raised := range triggeredAlert {
+// 		if raised.State == alertstates.Critical {
+// 			executionState = alertstates.Critical
+// 		}
+//
+// 		if executionState != alertstates.Critical && raised.State == alertstates.Warn {
+// 			executionState = alertstates.Warn
+// 		}
+// 	}
+//
+// 	return &AlertResult{State: executionState, TriggeredAlerts: triggeredAlert}
+// }

+ 155 - 133
pkg/services/alerting/handler_test.go

@@ -3,149 +3,171 @@ package alerting
 import (
 	"testing"
 
-	"github.com/grafana/grafana/pkg/services/alerting/alertstates"
-	"github.com/grafana/grafana/pkg/services/alerting/transformers"
-	"github.com/grafana/grafana/pkg/tsdb"
+	"github.com/grafana/grafana/pkg/components/simplejson"
+	"github.com/grafana/grafana/pkg/models"
 	. "github.com/smartystreets/goconvey/convey"
 )
 
 func TestAlertingExecutor(t *testing.T) {
 	Convey("Test alert execution", t, func() {
-		executor := NewHandler()
+		handler := NewHandler()
 
 		Convey("single time serie", func() {
 			Convey("Show return ok since avg is above 2", func() {
-				rule := &AlertRule{
-					Critical:    Level{Value: 10, Operator: ">"},
-					Transformer: transformers.NewAggregationTransformer("avg"),
-				}
-
-				timeSeries := []*tsdb.TimeSeries{
-					tsdb.NewTimeSeries("test1", [][2]float64{{2, 0}}),
-				}
-
-				result := executor.evaluateRule(rule, timeSeries)
-				So(result.State, ShouldEqual, alertstates.Ok)
-			})
-
-			Convey("Show return critical since below 2", func() {
-				rule := &AlertRule{
-					Critical:    Level{Value: 10, Operator: "<"},
-					Transformer: transformers.NewAggregationTransformer("avg"),
-				}
-
-				timeSeries := []*tsdb.TimeSeries{
-					tsdb.NewTimeSeries("test1", [][2]float64{{2, 0}}),
-				}
-
-				result := executor.evaluateRule(rule, timeSeries)
-				So(result.State, ShouldEqual, alertstates.Critical)
-			})
-
-			Convey("Show return critical since sum is above 10", func() {
-				rule := &AlertRule{
-					Critical:    Level{Value: 10, Operator: ">"},
-					Transformer: transformers.NewAggregationTransformer("sum"),
-				}
-
-				timeSeries := []*tsdb.TimeSeries{
-					tsdb.NewTimeSeries("test1", [][2]float64{{9, 0}, {9, 0}}),
-				}
-
-				result := executor.evaluateRule(rule, timeSeries)
-				So(result.State, ShouldEqual, alertstates.Critical)
-			})
-
-			Convey("Show return ok since avg is below 10", func() {
-				rule := &AlertRule{
-					Critical:    Level{Value: 10, Operator: ">"},
-					Transformer: transformers.NewAggregationTransformer("avg"),
-				}
-
-				timeSeries := []*tsdb.TimeSeries{
-					tsdb.NewTimeSeries("test1", [][2]float64{{9, 0}, {9, 0}}),
-				}
-
-				result := executor.evaluateRule(rule, timeSeries)
-				So(result.State, ShouldEqual, alertstates.Ok)
-			})
-
-			Convey("Show return ok since min is below 10", func() {
-				rule := &AlertRule{
-					Critical:    Level{Value: 10, Operator: ">"},
-					Transformer: transformers.NewAggregationTransformer("avg"),
-				}
-
-				timeSeries := []*tsdb.TimeSeries{
-					tsdb.NewTimeSeries("test1", [][2]float64{{11, 0}, {9, 0}}),
-				}
-
-				result := executor.evaluateRule(rule, timeSeries)
-				So(result.State, ShouldEqual, alertstates.Ok)
+				json := `
+        {
+				"name": "name2",
+				"description": "desc2",
+				"handler": 0,
+				"enabled": true,
+				"frequency": "60s",
+        "conditions": [
+          {
+            "type": "query",
+            "query":  {
+              "params": ["A", "5m", "now"],
+              "datasourceId": 1,
+              "model": {"target": "aliasByNode(statsd.fakesite.counters.session_start.mobile.count, 4)"}
+            },
+            "reducer": {"type": "avg", "params": []},
+            "evaluator": {"type": ">", "params": [100]}
+          }
+        ]
+			}
+			`
+
+				alertJSON, jsonErr := simplejson.NewJson([]byte(json))
+				So(jsonErr, ShouldBeNil)
+
+				alert := &models.Alert{Settings: alertJSON}
+				rule, _ := NewAlertRuleFromDBModel(alert)
+
+				// timeSeries := []*tsdb.TimeSeries{
+				// 	tsdb.NewTimeSeries("test1", [][2]float64{{2, 0}}),
+				// }
+
+				result := handler.eval(rule)
+				So(result.Triggered, ShouldEqual, true)
 			})
 
-			Convey("Show return ok since max is above 10", func() {
-				rule := &AlertRule{
-					Critical:    Level{Value: 10, Operator: ">"},
-					Transformer: transformers.NewAggregationTransformer("max"),
-				}
-
-				timeSeries := []*tsdb.TimeSeries{
-					tsdb.NewTimeSeries("test1", [][2]float64{{6, 0}, {11, 0}}),
-				}
-
-				result := executor.evaluateRule(rule, timeSeries)
-				So(result.State, ShouldEqual, alertstates.Critical)
-			})
-
-		})
-
-		Convey("muliple time series", func() {
-			Convey("both are ok", func() {
-				rule := &AlertRule{
-					Critical:    Level{Value: 10, Operator: ">"},
-					Transformer: transformers.NewAggregationTransformer("avg"),
-				}
-
-				timeSeries := []*tsdb.TimeSeries{
-					tsdb.NewTimeSeries("test1", [][2]float64{{2, 0}}),
-					tsdb.NewTimeSeries("test1", [][2]float64{{2, 0}}),
-				}
-
-				result := executor.evaluateRule(rule, timeSeries)
-				So(result.State, ShouldEqual, alertstates.Ok)
-			})
-
-			Convey("first serie is good, second is critical", func() {
-				rule := &AlertRule{
-					Critical:    Level{Value: 10, Operator: ">"},
-					Transformer: transformers.NewAggregationTransformer("avg"),
-				}
-
-				timeSeries := []*tsdb.TimeSeries{
-					tsdb.NewTimeSeries("test1", [][2]float64{{2, 0}}),
-					tsdb.NewTimeSeries("test1", [][2]float64{{11, 0}}),
-				}
-
-				result := executor.evaluateRule(rule, timeSeries)
-				So(result.State, ShouldEqual, alertstates.Critical)
-			})
-
-			Convey("first serie is warn, second is critical", func() {
-				rule := &AlertRule{
-					Critical:    Level{Value: 10, Operator: ">"},
-					Warning:     Level{Value: 5, Operator: ">"},
-					Transformer: transformers.NewAggregationTransformer("avg"),
-				}
-
-				timeSeries := []*tsdb.TimeSeries{
-					tsdb.NewTimeSeries("test1", [][2]float64{{6, 0}}),
-					tsdb.NewTimeSeries("test1", [][2]float64{{11, 0}}),
-				}
-
-				result := executor.evaluateRule(rule, timeSeries)
-				So(result.State, ShouldEqual, alertstates.Critical)
-			})
+			// 	Convey("Show return critical since below 2", func() {
+			// 		rule := &AlertRule{
+			// 			Critical:    Level{Value: 10, Operator: "<"},
+			// 			Transformer: transformers.NewAggregationTransformer("avg"),
+			// 		}
+			//
+			// 		timeSeries := []*tsdb.TimeSeries{
+			// 			tsdb.NewTimeSeries("test1", [][2]float64{{2, 0}}),
+			// 		}
+			//
+			// 		result := executor.evaluateRule(rule, timeSeries)
+			// 		So(result.State, ShouldEqual, alertstates.Critical)
+			// 	})
+			//
+			// 	Convey("Show return critical since sum is above 10", func() {
+			// 		rule := &AlertRule{
+			// 			Critical:    Level{Value: 10, Operator: ">"},
+			// 			Transformer: transformers.NewAggregationTransformer("sum"),
+			// 		}
+			//
+			// 		timeSeries := []*tsdb.TimeSeries{
+			// 			tsdb.NewTimeSeries("test1", [][2]float64{{9, 0}, {9, 0}}),
+			// 		}
+			//
+			// 		result := executor.evaluateRule(rule, timeSeries)
+			// 		So(result.State, ShouldEqual, alertstates.Critical)
+			// 	})
+			//
+			// 	Convey("Show return ok since avg is below 10", func() {
+			// 		rule := &AlertRule{
+			// 			Critical:    Level{Value: 10, Operator: ">"},
+			// 			Transformer: transformers.NewAggregationTransformer("avg"),
+			// 		}
+			//
+			// 		timeSeries := []*tsdb.TimeSeries{
+			// 			tsdb.NewTimeSeries("test1", [][2]float64{{9, 0}, {9, 0}}),
+			// 		}
+			//
+			// 		result := executor.evaluateRule(rule, timeSeries)
+			// 		So(result.State, ShouldEqual, alertstates.Ok)
+			// 	})
+			//
+			// 	Convey("Show return ok since min is below 10", func() {
+			// 		rule := &AlertRule{
+			// 			Critical:    Level{Value: 10, Operator: ">"},
+			// 			Transformer: transformers.NewAggregationTransformer("avg"),
+			// 		}
+			//
+			// 		timeSeries := []*tsdb.TimeSeries{
+			// 			tsdb.NewTimeSeries("test1", [][2]float64{{11, 0}, {9, 0}}),
+			// 		}
+			//
+			// 		result := executor.evaluateRule(rule, timeSeries)
+			// 		So(result.State, ShouldEqual, alertstates.Ok)
+			// 	})
+			//
+			// 	Convey("Show return ok since max is above 10", func() {
+			// 		rule := &AlertRule{
+			// 			Critical:    Level{Value: 10, Operator: ">"},
+			// 			Transformer: transformers.NewAggregationTransformer("max"),
+			// 		}
+			//
+			// 		timeSeries := []*tsdb.TimeSeries{
+			// 			tsdb.NewTimeSeries("test1", [][2]float64{{6, 0}, {11, 0}}),
+			// 		}
+			//
+			// 		result := executor.evaluateRule(rule, timeSeries)
+			// 		So(result.State, ShouldEqual, alertstates.Critical)
+			// 	})
+			//
+			// })
+			//
+			// Convey("muliple time series", func() {
+			// 	Convey("both are ok", func() {
+			// 		rule := &AlertRule{
+			// 			Critical:    Level{Value: 10, Operator: ">"},
+			// 			Transformer: transformers.NewAggregationTransformer("avg"),
+			// 		}
+			//
+			// 		timeSeries := []*tsdb.TimeSeries{
+			// 			tsdb.NewTimeSeries("test1", [][2]float64{{2, 0}}),
+			// 			tsdb.NewTimeSeries("test1", [][2]float64{{2, 0}}),
+			// 		}
+			//
+			// 		result := executor.evaluateRule(rule, timeSeries)
+			// 		So(result.State, ShouldEqual, alertstates.Ok)
+			// 	})
+			//
+			// 	Convey("first serie is good, second is critical", func() {
+			// 		rule := &AlertRule{
+			// 			Critical:    Level{Value: 10, Operator: ">"},
+			// 			Transformer: transformers.NewAggregationTransformer("avg"),
+			// 		}
+			//
+			// 		timeSeries := []*tsdb.TimeSeries{
+			// 			tsdb.NewTimeSeries("test1", [][2]float64{{2, 0}}),
+			// 			tsdb.NewTimeSeries("test1", [][2]float64{{11, 0}}),
+			// 		}
+			//
+			// 		result := executor.evaluateRule(rule, timeSeries)
+			// 		So(result.State, ShouldEqual, alertstates.Critical)
+			// 	})
+			//
+			// 	Convey("first serie is warn, second is critical", func() {
+			// 		rule := &AlertRule{
+			// 			Critical:    Level{Value: 10, Operator: ">"},
+			// 			Warning:     Level{Value: 5, Operator: ">"},
+			// 			Transformer: transformers.NewAggregationTransformer("avg"),
+			// 		}
+			//
+			// 		timeSeries := []*tsdb.TimeSeries{
+			// 			tsdb.NewTimeSeries("test1", [][2]float64{{6, 0}}),
+			// 			tsdb.NewTimeSeries("test1", [][2]float64{{11, 0}}),
+			// 		}
+			//
+			// 		result := executor.evaluateRule(rule, timeSeries)
+			// 		So(result.State, ShouldEqual, alertstates.Critical)
+			// 	})
 		})
 	})
 }

+ 3 - 3
pkg/services/alerting/interfaces.go

@@ -3,7 +3,7 @@ package alerting
 import "time"
 
 type AlertingHandler interface {
-	Execute(rule *AlertJob, resultChan chan *AlertResult)
+	Execute(rule *AlertRule, resultChan chan *AlertResultContext)
 }
 
 type Scheduler interface {
@@ -12,11 +12,11 @@ type Scheduler interface {
 }
 
 type Notifier interface {
-	Notify(alertResult *AlertResult)
+	Notify(alertResult *AlertResultContext)
 }
 
 type AlertCondition interface {
-	Eval()
+	Eval(result *AlertResultContext)
 }
 
 type QueryReducer interface {

+ 9 - 10
pkg/services/alerting/models.go

@@ -22,18 +22,17 @@ func (aj *AlertJob) IncRetry() {
 	aj.RetryCount++
 }
 
-type AlertResult struct {
-	State           string
-	TriggeredAlerts []*TriggeredAlert
-	Error           error
-	Description     string
-	StartTime       time.Time
-	EndTime         time.Time
-
-	AlertJob *AlertJob
+type AlertResultContext struct {
+	Triggered   bool
+	Details     []*AlertResultDetail
+	Error       error
+	Description string
+	StartTime   time.Time
+	EndTime     time.Time
+	Rule        *AlertRule
 }
 
-type TriggeredAlert struct {
+type AlertResultDetail struct {
 	Value  float64
 	Metric string
 	State  string

+ 193 - 205
pkg/services/alerting/notifier.go

@@ -1,207 +1,195 @@
 package alerting
 
-import (
-	"fmt"
-	"strconv"
-
-	"github.com/grafana/grafana/pkg/bus"
-	"github.com/grafana/grafana/pkg/components/simplejson"
-	"github.com/grafana/grafana/pkg/log"
-	m "github.com/grafana/grafana/pkg/models"
-	"github.com/grafana/grafana/pkg/services/alerting/alertstates"
-	"github.com/grafana/grafana/pkg/setting"
-)
-
-type NotifierImpl struct {
-	log              log.Logger
-	getNotifications func(orgId int64, notificationGroups []int64) []*Notification
-}
-
-func NewNotifier() *NotifierImpl {
-	log := log.New("alerting.notifier")
-	return &NotifierImpl{
-		log:              log,
-		getNotifications: buildGetNotifiers(log),
-	}
-}
-
-func (n NotifierImpl) ShouldDispath(alertResult *AlertResult, notifier *Notification) bool {
-	warn := alertResult.State == alertstates.Warn && notifier.SendWarning
-	crit := alertResult.State == alertstates.Critical && notifier.SendCritical
-	return (warn || crit) || alertResult.State == alertstates.Ok
-}
-
-func (n *NotifierImpl) Notify(alertResult *AlertResult) {
-	notifiers := n.getNotifications(alertResult.AlertJob.Rule.OrgId, alertResult.AlertJob.Rule.NotificationGroups)
-
-	for _, notifier := range notifiers {
-		if n.ShouldDispath(alertResult, notifier) {
-			n.log.Info("Sending notification", "state", alertResult.State, "type", notifier.Type)
-			go notifier.Notifierr.Dispatch(alertResult)
-		}
-	}
-}
-
-type Notification struct {
-	Name         string
-	Type         string
-	SendWarning  bool
-	SendCritical bool
-
-	Notifierr NotificationDispatcher
-}
-
-type EmailNotifier struct {
-	To  string
-	log log.Logger
-}
-
-func (this *EmailNotifier) Dispatch(alertResult *AlertResult) {
-	this.log.Info("Sending email")
-	grafanaUrl := fmt.Sprintf("%s:%s", setting.HttpAddr, setting.HttpPort)
-	if setting.AppSubUrl != "" {
-		grafanaUrl += "/" + setting.AppSubUrl
-	}
-
-	query := &m.GetDashboardsQuery{
-		DashboardIds: []int64{alertResult.AlertJob.Rule.DashboardId},
-	}
-
-	if err := bus.Dispatch(query); err != nil {
-		this.log.Error("Failed to load dashboard", "error", err)
-		return
-	}
-
-	if len(query.Result) != 1 {
-		this.log.Error("Can only support one dashboard", "result", len(query.Result))
-		return
-	}
-
-	dashboard := query.Result[0]
-
-	panelId := strconv.Itoa(int(alertResult.AlertJob.Rule.PanelId))
-
-	//TODO: get from alertrule and transforms to seconds
-	from := "1466169458375"
-	to := "1466171258375"
-
-	renderUrl := fmt.Sprintf("%s/render/dashboard-solo/db/%s?from=%s&to=%s&panelId=%s&width=1000&height=500", grafanaUrl, dashboard.Slug, from, to, panelId)
-	cmd := &m.SendEmailCommand{
-		Data: map[string]interface{}{
-			"Name":            "Name",
-			"State":           alertResult.State,
-			"Description":     alertResult.Description,
-			"TriggeredAlerts": alertResult.TriggeredAlerts,
-			"DashboardLink":   grafanaUrl + "/dashboard/db/" + dashboard.Slug,
-			"AlertPageUrl":    grafanaUrl + "/alerting",
-			"DashboardImage":  renderUrl,
-		},
-		To:       []string{this.To},
-		Template: "alert_notification.html",
-	}
-
-	err := bus.Dispatch(cmd)
-	if err != nil {
-		this.log.Error("Could not send alert notification as email", "error", err)
-	}
-}
-
-type WebhookNotifier struct {
-	Url      string
-	User     string
-	Password string
-	log      log.Logger
-}
-
-func (this *WebhookNotifier) Dispatch(alertResult *AlertResult) {
-	this.log.Info("Sending webhook")
-
-	bodyJSON := simplejson.New()
-	bodyJSON.Set("name", alertResult.AlertJob.Rule.Name)
-	bodyJSON.Set("state", alertResult.State)
-	bodyJSON.Set("trigged", alertResult.TriggeredAlerts)
-
-	body, _ := bodyJSON.MarshalJSON()
-
-	cmd := &m.SendWebhook{
-		Url:      this.Url,
-		User:     this.User,
-		Password: this.Password,
-		Body:     string(body),
-	}
-
-	bus.Dispatch(cmd)
-}
-
-type NotificationDispatcher interface {
-	Dispatch(alertResult *AlertResult)
-}
-
-func buildGetNotifiers(log log.Logger) func(orgId int64, notificationGroups []int64) []*Notification {
-	return func(orgId int64, notificationGroups []int64) []*Notification {
-		query := &m.GetAlertNotificationQuery{
-			OrgID:                orgId,
-			Ids:                  notificationGroups,
-			IncludeAlwaysExecute: true,
-		}
-		err := bus.Dispatch(query)
-		if err != nil {
-			log.Error("Failed to read notifications", "error", err)
-		}
-
-		var result []*Notification
-		for _, notification := range query.Result {
-			not, err := NewNotificationFromDBModel(notification)
-			if err == nil {
-				result = append(result, not)
-			} else {
-				log.Error("Failed to read notification model", "error", err)
-			}
-		}
-
-		return result
-	}
-}
-
-func NewNotificationFromDBModel(model *m.AlertNotification) (*Notification, error) {
-	notifier, err := createNotifier(model.Type, model.Settings)
-
-	if err != nil {
-		return nil, err
-	}
-
-	return &Notification{
-		Name:         model.Name,
-		Type:         model.Type,
-		Notifierr:    notifier,
-		SendCritical: model.Settings.Get("sendCrit").MustBool(),
-		SendWarning:  model.Settings.Get("sendWarn").MustBool(),
-	}, nil
-}
-
-var createNotifier = func(notificationType string, settings *simplejson.Json) (NotificationDispatcher, error) {
-	if notificationType == "email" {
-		to := settings.Get("to").MustString()
-
-		if to == "" {
-			return nil, fmt.Errorf("Could not find to propertie in settings")
-		}
-
-		return &EmailNotifier{
-			To:  to,
-			log: log.New("alerting.notification.email"),
-		}, nil
-	}
-
-	url := settings.Get("url").MustString()
-	if url == "" {
-		return nil, fmt.Errorf("Could not find url propertie in settings")
-	}
-
-	return &WebhookNotifier{
-		Url:      url,
-		User:     settings.Get("user").MustString(),
-		Password: settings.Get("password").MustString(),
-		log:      log.New("alerting.notification.webhook"),
-	}, nil
-}
+// type NotifierImpl struct {
+// 	log              log.Logger
+// 	getNotifications func(orgId int64, notificationGroups []int64) []*Notification
+// }
+//
+// func NewNotifier() *NotifierImpl {
+// 	log := log.New("alerting.notifier")
+// 	return &NotifierImpl{
+// 		log:              log,
+// 		getNotifications: buildGetNotifiers(log),
+// 	}
+// }
+
+// func (n NotifierImpl) ShouldDispath(alertResult *AlertResultContext, notifier *Notification) bool {
+// 	warn := alertResult.State == alertstates.Warn && notifier.SendWarning
+// 	crit := alertResult.State == alertstates.Critical && notifier.SendCritical
+// 	return (warn || crit) || alertResult.State == alertstates.Ok
+// }
+//
+// func (n *NotifierImpl) Notify(alertResult *AlertResultContext) {
+// 	notifiers := n.getNotifications(alertResult.Rule.OrgId, alertResult.Rule.Notifications)
+//
+// 	for _, notifier := range notifiers {
+// 		if n.ShouldDispath(alertResult, notifier) {
+// 			n.log.Info("Sending notification", "state", alertResult.State, "type", notifier.Type)
+// 			go notifier.Notifierr.Dispatch(alertResult)
+// 		}
+// 	}
+// }
+//
+// type Notification struct {
+// 	Name         string
+// 	Type         string
+// 	SendWarning  bool
+// 	SendCritical bool
+//
+// 	Notifierr NotificationDispatcher
+// }
+//
+// type EmailNotifier struct {
+// 	To  string
+// 	log log.Logger
+// }
+//
+// func (this *EmailNotifier) Dispatch(alertResult *AlertResult) {
+// 	this.log.Info("Sending email")
+// 	grafanaUrl := fmt.Sprintf("%s:%s", setting.HttpAddr, setting.HttpPort)
+// 	if setting.AppSubUrl != "" {
+// 		grafanaUrl += "/" + setting.AppSubUrl
+// 	}
+//
+// 	query := &m.GetDashboardsQuery{
+// 		DashboardIds: []int64{alertResult.AlertJob.Rule.DashboardId},
+// 	}
+//
+// 	if err := bus.Dispatch(query); err != nil {
+// 		this.log.Error("Failed to load dashboard", "error", err)
+// 		return
+// 	}
+//
+// 	if len(query.Result) != 1 {
+// 		this.log.Error("Can only support one dashboard", "result", len(query.Result))
+// 		return
+// 	}
+//
+// 	dashboard := query.Result[0]
+//
+// 	panelId := strconv.Itoa(int(alertResult.AlertJob.Rule.PanelId))
+//
+// 	//TODO: get from alertrule and transforms to seconds
+// 	from := "1466169458375"
+// 	to := "1466171258375"
+//
+// 	renderUrl := fmt.Sprintf("%s/render/dashboard-solo/db/%s?from=%s&to=%s&panelId=%s&width=1000&height=500", grafanaUrl, dashboard.Slug, from, to, panelId)
+// 	cmd := &m.SendEmailCommand{
+// 		Data: map[string]interface{}{
+// 			"Name":            "Name",
+// 			"State":           alertResult.State,
+// 			"Description":     alertResult.Description,
+// 			"TriggeredAlerts": alertResult.TriggeredAlerts,
+// 			"DashboardLink":   grafanaUrl + "/dashboard/db/" + dashboard.Slug,
+// 			"AlertPageUrl":    grafanaUrl + "/alerting",
+// 			"DashboardImage":  renderUrl,
+// 		},
+// 		To:       []string{this.To},
+// 		Template: "alert_notification.html",
+// 	}
+//
+// 	err := bus.Dispatch(cmd)
+// 	if err != nil {
+// 		this.log.Error("Could not send alert notification as email", "error", err)
+// 	}
+// }
+//
+// type WebhookNotifier struct {
+// 	Url      string
+// 	User     string
+// 	Password string
+// 	log      log.Logger
+// }
+//
+// func (this *WebhookNotifier) Dispatch(alertResult *AlertResultContext) {
+// 	this.log.Info("Sending webhook")
+//
+// 	bodyJSON := simplejson.New()
+// 	bodyJSON.Set("name", alertResult.AlertJob.Rule.Name)
+// 	bodyJSON.Set("state", alertResult.State)
+// 	bodyJSON.Set("trigged", alertResult.TriggeredAlerts)
+//
+// 	body, _ := bodyJSON.MarshalJSON()
+//
+// 	cmd := &m.SendWebhook{
+// 		Url:      this.Url,
+// 		User:     this.User,
+// 		Password: this.Password,
+// 		Body:     string(body),
+// 	}
+//
+// 	bus.Dispatch(cmd)
+// }
+//
+// type NotificationDispatcher interface {
+// 	Dispatch(alertResult *AlertResult)
+// }
+//
+// func buildGetNotifiers(log log.Logger) func(orgId int64, notificationGroups []int64) []*Notification {
+// 	return func(orgId int64, notificationGroups []int64) []*Notification {
+// 		query := &m.GetAlertNotificationQuery{
+// 			OrgID:                orgId,
+// 			Ids:                  notificationGroups,
+// 			IncludeAlwaysExecute: true,
+// 		}
+// 		err := bus.Dispatch(query)
+// 		if err != nil {
+// 			log.Error("Failed to read notifications", "error", err)
+// 		}
+//
+// 		var result []*Notification
+// 		for _, notification := range query.Result {
+// 			not, err := NewNotificationFromDBModel(notification)
+// 			if err == nil {
+// 				result = append(result, not)
+// 			} else {
+// 				log.Error("Failed to read notification model", "error", err)
+// 			}
+// 		}
+//
+// 		return result
+// 	}
+// }
+//
+// func NewNotificationFromDBModel(model *m.AlertNotification) (*Notification, error) {
+// 	notifier, err := createNotifier(model.Type, model.Settings)
+//
+// 	if err != nil {
+// 		return nil, err
+// 	}
+//
+// 	return &Notification{
+// 		Name:         model.Name,
+// 		Type:         model.Type,
+// 		Notifierr:    notifier,
+// 		SendCritical: model.Settings.Get("sendCrit").MustBool(),
+// 		SendWarning:  model.Settings.Get("sendWarn").MustBool(),
+// 	}, nil
+// }
+//
+// var createNotifier = func(notificationType string, settings *simplejson.Json) (NotificationDispatcher, error) {
+// 	if notificationType == "email" {
+// 		to := settings.Get("to").MustString()
+//
+// 		if to == "" {
+// 			return nil, fmt.Errorf("Could not find to propertie in settings")
+// 		}
+//
+// 		return &EmailNotifier{
+// 			To:  to,
+// 			log: log.New("alerting.notification.email"),
+// 		}, nil
+// 	}
+//
+// 	url := settings.Get("url").MustString()
+// 	if url == "" {
+// 		return nil, fmt.Errorf("Could not find url propertie in settings")
+// 	}
+//
+// 	return &WebhookNotifier{
+// 		Url:      url,
+// 		User:     settings.Get("user").MustString(),
+// 		Password: settings.Get("password").MustString(),
+// 		log:      log.New("alerting.notification.webhook"),
+// 	}, nil
+// }

+ 112 - 123
pkg/services/alerting/notifier_test.go

@@ -1,125 +1,114 @@
 package alerting
 
-import (
-	"testing"
-
-	"reflect"
-
-	"github.com/grafana/grafana/pkg/components/simplejson"
-	m "github.com/grafana/grafana/pkg/models"
-	"github.com/grafana/grafana/pkg/services/alerting/alertstates"
-	. "github.com/smartystreets/goconvey/convey"
-)
-
-func TestAlertNotificationExtraction(t *testing.T) {
-	Convey("Notifier tests", t, func() {
-		Convey("rules for sending notifications", func() {
-			dummieNotifier := NotifierImpl{}
-
-			result := &AlertResult{
-				State: alertstates.Critical,
-			}
-
-			notifier := &Notification{
-				Name:         "Test Notifier",
-				Type:         "TestType",
-				SendCritical: true,
-				SendWarning:  true,
-			}
-
-			Convey("Should send notification", func() {
-				So(dummieNotifier.ShouldDispath(result, notifier), ShouldBeTrue)
-			})
-
-			Convey("warn:false and state:warn should not send", func() {
-				result.State = alertstates.Warn
-				notifier.SendWarning = false
-				So(dummieNotifier.ShouldDispath(result, notifier), ShouldBeFalse)
-			})
-		})
-
-		Convey("Parsing alert notification from settings", func() {
-			Convey("Parsing email", func() {
-				Convey("empty settings should return error", func() {
-					json := `{ }`
-
-					settingsJSON, _ := simplejson.NewJson([]byte(json))
-					model := &m.AlertNotification{
-						Name:     "ops",
-						Type:     "email",
-						Settings: settingsJSON,
-					}
-
-					_, err := NewNotificationFromDBModel(model)
-					So(err, ShouldNotBeNil)
-				})
-
-				Convey("from settings", func() {
-					json := `
-				{
-					"to": "ops@grafana.org"
-				}`
-
-					settingsJSON, _ := simplejson.NewJson([]byte(json))
-					model := &m.AlertNotification{
-						Name:     "ops",
-						Type:     "email",
-						Settings: settingsJSON,
-					}
-
-					not, err := NewNotificationFromDBModel(model)
-
-					So(err, ShouldBeNil)
-					So(not.Name, ShouldEqual, "ops")
-					So(not.Type, ShouldEqual, "email")
-					So(reflect.TypeOf(not.Notifierr).Elem().String(), ShouldEqual, "alerting.EmailNotifier")
-
-					email := not.Notifierr.(*EmailNotifier)
-					So(email.To, ShouldEqual, "ops@grafana.org")
-				})
-			})
-
-			Convey("Parsing webhook", func() {
-				Convey("empty settings should return error", func() {
-					json := `{ }`
-
-					settingsJSON, _ := simplejson.NewJson([]byte(json))
-					model := &m.AlertNotification{
-						Name:     "ops",
-						Type:     "webhook",
-						Settings: settingsJSON,
-					}
-
-					_, err := NewNotificationFromDBModel(model)
-					So(err, ShouldNotBeNil)
-				})
-
-				Convey("from settings", func() {
-					json := `
-				{
-					"url": "http://localhost:3000",
-					"username": "username",
-					"password": "password"
-				}`
-
-					settingsJSON, _ := simplejson.NewJson([]byte(json))
-					model := &m.AlertNotification{
-						Name:     "slack",
-						Type:     "webhook",
-						Settings: settingsJSON,
-					}
-
-					not, err := NewNotificationFromDBModel(model)
-
-					So(err, ShouldBeNil)
-					So(not.Name, ShouldEqual, "slack")
-					So(not.Type, ShouldEqual, "webhook")
-					So(reflect.TypeOf(not.Notifierr).Elem().String(), ShouldEqual, "alerting.WebhookNotifier")
-
-					webhook := not.Notifierr.(*WebhookNotifier)
-					So(webhook.Url, ShouldEqual, "http://localhost:3000")
-				})
-			})
-		})
-	})
-}
+// func TestAlertNotificationExtraction(t *testing.T) {
+// 	Convey("Notifier tests", t, func() {
+// 		Convey("rules for sending notifications", func() {
+// 			dummieNotifier := NotifierImpl{}
+//
+// 			result := &AlertResult{
+// 				State: alertstates.Critical,
+// 			}
+//
+// 			notifier := &Notification{
+// 				Name:         "Test Notifier",
+// 				Type:         "TestType",
+// 				SendCritical: true,
+// 				SendWarning:  true,
+// 			}
+//
+// 			Convey("Should send notification", func() {
+// 				So(dummieNotifier.ShouldDispath(result, notifier), ShouldBeTrue)
+// 			})
+//
+// 			Convey("warn:false and state:warn should not send", func() {
+// 				result.State = alertstates.Warn
+// 				notifier.SendWarning = false
+// 				So(dummieNotifier.ShouldDispath(result, notifier), ShouldBeFalse)
+// 			})
+// 		})
+//
+// 		Convey("Parsing alert notification from settings", func() {
+// 			Convey("Parsing email", func() {
+// 				Convey("empty settings should return error", func() {
+// 					json := `{ }`
+//
+// 					settingsJSON, _ := simplejson.NewJson([]byte(json))
+// 					model := &m.AlertNotification{
+// 						Name:     "ops",
+// 						Type:     "email",
+// 						Settings: settingsJSON,
+// 					}
+//
+// 					_, err := NewNotificationFromDBModel(model)
+// 					So(err, ShouldNotBeNil)
+// 				})
+//
+// 				Convey("from settings", func() {
+// 					json := `
+// 				{
+// 					"to": "ops@grafana.org"
+// 				}`
+//
+// 					settingsJSON, _ := simplejson.NewJson([]byte(json))
+// 					model := &m.AlertNotification{
+// 						Name:     "ops",
+// 						Type:     "email",
+// 						Settings: settingsJSON,
+// 					}
+//
+// 					not, err := NewNotificationFromDBModel(model)
+//
+// 					So(err, ShouldBeNil)
+// 					So(not.Name, ShouldEqual, "ops")
+// 					So(not.Type, ShouldEqual, "email")
+// 					So(reflect.TypeOf(not.Notifierr).Elem().String(), ShouldEqual, "alerting.EmailNotifier")
+//
+// 					email := not.Notifierr.(*EmailNotifier)
+// 					So(email.To, ShouldEqual, "ops@grafana.org")
+// 				})
+// 			})
+//
+// 			Convey("Parsing webhook", func() {
+// 				Convey("empty settings should return error", func() {
+// 					json := `{ }`
+//
+// 					settingsJSON, _ := simplejson.NewJson([]byte(json))
+// 					model := &m.AlertNotification{
+// 						Name:     "ops",
+// 						Type:     "webhook",
+// 						Settings: settingsJSON,
+// 					}
+//
+// 					_, err := NewNotificationFromDBModel(model)
+// 					So(err, ShouldNotBeNil)
+// 				})
+//
+// 				Convey("from settings", func() {
+// 					json := `
+// 				{
+// 					"url": "http://localhost:3000",
+// 					"username": "username",
+// 					"password": "password"
+// 				}`
+//
+// 					settingsJSON, _ := simplejson.NewJson([]byte(json))
+// 					model := &m.AlertNotification{
+// 						Name:     "slack",
+// 						Type:     "webhook",
+// 						Settings: settingsJSON,
+// 					}
+//
+// 					not, err := NewNotificationFromDBModel(model)
+//
+// 					So(err, ShouldBeNil)
+// 					So(not.Name, ShouldEqual, "slack")
+// 					So(not.Type, ShouldEqual, "webhook")
+// 					So(reflect.TypeOf(not.Notifierr).Elem().String(), ShouldEqual, "alerting.WebhookNotifier")
+//
+// 					webhook := not.Notifierr.(*WebhookNotifier)
+// 					So(webhook.Url, ShouldEqual, "http://localhost:3000")
+// 				})
+// 			})
+// 		})
+// 	})
+// }

+ 43 - 49
pkg/services/alerting/result_handler.go

@@ -1,16 +1,9 @@
 package alerting
 
-import (
-	"time"
-
-	"github.com/grafana/grafana/pkg/bus"
-	"github.com/grafana/grafana/pkg/components/simplejson"
-	"github.com/grafana/grafana/pkg/log"
-	m "github.com/grafana/grafana/pkg/models"
-)
+import "github.com/grafana/grafana/pkg/log"
 
 type ResultHandler interface {
-	Handle(result *AlertResult)
+	Handle(result *AlertResultContext)
 }
 
 type ResultHandlerImpl struct {
@@ -20,49 +13,50 @@ type ResultHandlerImpl struct {
 
 func NewResultHandler() *ResultHandlerImpl {
 	return &ResultHandlerImpl{
-		log:      log.New("alerting.responseHandler"),
-		notifier: NewNotifier(),
+		log: log.New("alerting.responseHandler"),
+		//notifier: NewNotifier(),
 	}
 }
 
-func (handler *ResultHandlerImpl) Handle(result *AlertResult) {
-	if handler.shouldUpdateState(result) {
-		cmd := &m.UpdateAlertStateCommand{
-			AlertId:         result.AlertJob.Rule.Id,
-			State:           result.State,
-			Info:            result.Description,
-			OrgId:           result.AlertJob.Rule.OrgId,
-			TriggeredAlerts: simplejson.NewFromAny(result.TriggeredAlerts),
-		}
-
-		if err := bus.Dispatch(cmd); err != nil {
-			handler.log.Error("Failed to save state", "error", err)
-		}
-
-		handler.log.Debug("will notify about new state", "new state", result.State)
-		handler.notifier.Notify(result)
-	}
+func (handler *ResultHandlerImpl) Handle(result *AlertResultContext) {
+	// if handler.shouldUpdateState(result) {
+	// 	cmd := &m.UpdateAlertStateCommand{
+	// 		AlertId:         result.Rule.Id,
+	// 		State:           result.Rule.Severity,
+	// 		Info:            result.Description,
+	// 		OrgId:           result.Rule.OrgId,
+	// 		TriggeredAlerts: simplejson.NewFromAny(result.Details),
+	// 	}
+	//
+	// 	if err := bus.Dispatch(cmd); err != nil {
+	// 		handler.log.Error("Failed to save state", "error", err)
+	// 	}
+	//
+	// 	handler.log.Debug("will notify about new state", "new state", result.State)
+	// 	handler.notifier.Notify(result)
+	// }
 }
 
-func (handler *ResultHandlerImpl) shouldUpdateState(result *AlertResult) bool {
-	query := &m.GetLastAlertStateQuery{
-		AlertId: result.AlertJob.Rule.Id,
-		OrgId:   result.AlertJob.Rule.OrgId,
-	}
-
-	if err := bus.Dispatch(query); err != nil {
-		log.Error2("Failed to read last alert state", "error", err)
-		return false
-	}
-
-	if query.Result == nil {
-		return true
-	}
-
-	lastExecution := query.Result.Created
-	asdf := result.StartTime.Add(time.Minute * -15)
-	olderThen15Min := lastExecution.Before(asdf)
-	changedState := query.Result.State != result.State
-
-	return changedState || olderThen15Min
+func (handler *ResultHandlerImpl) shouldUpdateState(result *AlertResultContext) bool {
+	// query := &m.GetLastAlertStateQuery{
+	// 	AlertId: result.AlertJob.Rule.Id,
+	// 	OrgId:   result.AlertJob.Rule.OrgId,
+	// }
+	//
+	// if err := bus.Dispatch(query); err != nil {
+	// 	log.Error2("Failed to read last alert state", "error", err)
+	// 	return false
+	// }
+	//
+	// if query.Result == nil {
+	// 	return true
+	// }
+	//
+	// lastExecution := query.Result.Created
+	// asdf := result.StartTime.Add(time.Minute * -15)
+	// olderThen15Min := lastExecution.Before(asdf)
+	// changedState := query.Result.State != result.State
+	//
+	// return changedState || olderThen15Min
+	return false
 }

+ 47 - 57
pkg/services/alerting/result_handler_test.go

@@ -1,59 +1,49 @@
 package alerting
 
-import (
-	"testing"
-	"time"
-
-	"github.com/grafana/grafana/pkg/bus"
-	m "github.com/grafana/grafana/pkg/models"
-	"github.com/grafana/grafana/pkg/services/alerting/alertstates"
-	. "github.com/smartystreets/goconvey/convey"
-)
-
-func TestAlertResultHandler(t *testing.T) {
-	Convey("Test result Handler", t, func() {
-		resultHandler := ResultHandlerImpl{}
-		mockResult := &AlertResult{
-			State: alertstates.Ok,
-			AlertJob: &AlertJob{
-				Rule: &AlertRule{
-					Id:    1,
-					OrgId: 1,
-				},
-			},
-		}
-		mockAlertState := &m.AlertState{}
-		bus.ClearBusHandlers()
-		bus.AddHandler("test", func(query *m.GetLastAlertStateQuery) error {
-			query.Result = mockAlertState
-			return nil
-		})
-
-		Convey("Should update", func() {
-
-			Convey("when no earlier alert state", func() {
-				mockAlertState = nil
-				So(resultHandler.shouldUpdateState(mockResult), ShouldBeTrue)
-			})
-
-			Convey("alert state have changed", func() {
-				mockAlertState = &m.AlertState{
-					State: alertstates.Critical,
-				}
-				mockResult.State = alertstates.Ok
-				So(resultHandler.shouldUpdateState(mockResult), ShouldBeTrue)
-			})
-
-			Convey("last alert state was 15min ago", func() {
-				now := time.Now()
-				mockAlertState = &m.AlertState{
-					State:   alertstates.Critical,
-					Created: now.Add(time.Minute * -30),
-				}
-				mockResult.State = alertstates.Critical
-				mockResult.StartTime = time.Now()
-				So(resultHandler.shouldUpdateState(mockResult), ShouldBeTrue)
-			})
-		})
-	})
-}
+// func TestAlertResultHandler(t *testing.T) {
+// 	Convey("Test result Handler", t, func() {
+// 		resultHandler := ResultHandlerImpl{}
+// 		mockResult := &AlertResult{
+// 			State: alertstates.Ok,
+// 			AlertJob: &AlertJob{
+// 				Rule: &AlertRule{
+// 					Id:    1,
+// 					OrgId: 1,
+// 				},
+// 			},
+// 		}
+// 		mockAlertState := &m.AlertState{}
+// 		bus.ClearBusHandlers()
+// 		bus.AddHandler("test", func(query *m.GetLastAlertStateQuery) error {
+// 			query.Result = mockAlertState
+// 			return nil
+// 		})
+//
+// 		Convey("Should update", func() {
+//
+// 			Convey("when no earlier alert state", func() {
+// 				mockAlertState = nil
+// 				So(resultHandler.shouldUpdateState(mockResult), ShouldBeTrue)
+// 			})
+//
+// 			Convey("alert state have changed", func() {
+// 				mockAlertState = &m.AlertState{
+// 					State: alertstates.Critical,
+// 				}
+// 				mockResult.State = alertstates.Ok
+// 				So(resultHandler.shouldUpdateState(mockResult), ShouldBeTrue)
+// 			})
+//
+// 			Convey("last alert state was 15min ago", func() {
+// 				now := time.Now()
+// 				mockAlertState = &m.AlertState{
+// 					State:   alertstates.Critical,
+// 					Created: now.Add(time.Minute * -30),
+// 				}
+// 				mockResult.State = alertstates.Critical
+// 				mockResult.StartTime = time.Now()
+// 				So(resultHandler.shouldUpdateState(mockResult), ShouldBeTrue)
+// 			})
+// 		})
+// 	})
+// }