|
|
@@ -2,6 +2,7 @@ package notifiers
|
|
|
|
|
|
import (
|
|
|
"context"
|
|
|
+ "regexp"
|
|
|
"time"
|
|
|
|
|
|
"github.com/grafana/grafana/pkg/bus"
|
|
|
@@ -27,6 +28,7 @@ func init() {
|
|
|
})
|
|
|
}
|
|
|
|
|
|
+// NewAlertmanagerNotifier returns a new Alertmanager notifier
|
|
|
func NewAlertmanagerNotifier(model *models.AlertNotification) (alerting.Notifier, error) {
|
|
|
url := model.Settings.Get("url").MustString()
|
|
|
if url == "" {
|
|
|
@@ -35,38 +37,42 @@ func NewAlertmanagerNotifier(model *models.AlertNotification) (alerting.Notifier
|
|
|
|
|
|
return &AlertmanagerNotifier{
|
|
|
NotifierBase: NewNotifierBase(model),
|
|
|
- Url: url,
|
|
|
+ URL: url,
|
|
|
log: log.New("alerting.notifier.prometheus-alertmanager"),
|
|
|
}, nil
|
|
|
}
|
|
|
|
|
|
+// AlertmanagerNotifier sends alert notifications to the alert manager
|
|
|
type AlertmanagerNotifier struct {
|
|
|
NotifierBase
|
|
|
- Url string
|
|
|
+ URL string
|
|
|
log log.Logger
|
|
|
}
|
|
|
|
|
|
-func (this *AlertmanagerNotifier) ShouldNotify(ctx context.Context, evalContext *alerting.EvalContext, notificationState *models.AlertNotificationState) bool {
|
|
|
- this.log.Debug("Should notify", "ruleId", evalContext.Rule.Id, "state", evalContext.Rule.State, "previousState", evalContext.PrevAlertState)
|
|
|
+// ShouldNotify returns true if the notifiers should be used depending on state
|
|
|
+func (am *AlertmanagerNotifier) ShouldNotify(ctx context.Context, evalContext *alerting.EvalContext, notificationState *models.AlertNotificationState) bool {
|
|
|
+ am.log.Debug("Should notify", "ruleId", evalContext.Rule.Id, "state", evalContext.Rule.State, "previousState", evalContext.PrevAlertState)
|
|
|
|
|
|
// Do not notify when we become OK for the first time.
|
|
|
if (evalContext.PrevAlertState == models.AlertStatePending) && (evalContext.Rule.State == models.AlertStateOK) {
|
|
|
return false
|
|
|
}
|
|
|
+
|
|
|
// Notify on Alerting -> OK to resolve before alertmanager timeout.
|
|
|
if (evalContext.PrevAlertState == models.AlertStateAlerting) && (evalContext.Rule.State == models.AlertStateOK) {
|
|
|
return true
|
|
|
}
|
|
|
+
|
|
|
return evalContext.Rule.State == models.AlertStateAlerting
|
|
|
}
|
|
|
|
|
|
-func (this *AlertmanagerNotifier) createAlert(evalContext *alerting.EvalContext, match *alerting.EvalMatch, ruleUrl string) *simplejson.Json {
|
|
|
+func (am *AlertmanagerNotifier) createAlert(evalContext *alerting.EvalContext, match *alerting.EvalMatch, ruleURL string) *simplejson.Json {
|
|
|
alertJSON := simplejson.New()
|
|
|
alertJSON.Set("startsAt", evalContext.StartTime.UTC().Format(time.RFC3339))
|
|
|
if evalContext.Rule.State == models.AlertStateOK {
|
|
|
alertJSON.Set("endsAt", time.Now().UTC().Format(time.RFC3339))
|
|
|
}
|
|
|
- alertJSON.Set("generatorURL", ruleUrl)
|
|
|
+ alertJSON.Set("generatorURL", ruleURL)
|
|
|
|
|
|
// Annotations (summary and description are very commonly used).
|
|
|
alertJSON.SetPath([]string{"annotations", "summary"}, evalContext.Rule.Name)
|
|
|
@@ -94,7 +100,7 @@ func (this *AlertmanagerNotifier) createAlert(evalContext *alerting.EvalContext,
|
|
|
tags["metric"] = match.Metric
|
|
|
} else {
|
|
|
for k, v := range match.Tags {
|
|
|
- tags[k] = v
|
|
|
+ tags[replaceIllegalCharsInLabelname(k)] = v
|
|
|
}
|
|
|
}
|
|
|
}
|
|
|
@@ -103,25 +109,26 @@ func (this *AlertmanagerNotifier) createAlert(evalContext *alerting.EvalContext,
|
|
|
return alertJSON
|
|
|
}
|
|
|
|
|
|
-func (this *AlertmanagerNotifier) Notify(evalContext *alerting.EvalContext) error {
|
|
|
- this.log.Info("Sending Alertmanager alert", "ruleId", evalContext.Rule.Id, "notification", this.Name)
|
|
|
+// Notify sends alert notifications to the alert manager
|
|
|
+func (am *AlertmanagerNotifier) Notify(evalContext *alerting.EvalContext) error {
|
|
|
+ am.log.Info("Sending Alertmanager alert", "ruleId", evalContext.Rule.Id, "notification", am.Name)
|
|
|
|
|
|
- ruleUrl, err := evalContext.GetRuleUrl()
|
|
|
+ ruleURL, err := evalContext.GetRuleUrl()
|
|
|
if err != nil {
|
|
|
- this.log.Error("Failed get rule link", "error", err)
|
|
|
+ am.log.Error("Failed get rule link", "error", err)
|
|
|
return err
|
|
|
}
|
|
|
|
|
|
// Send one alert per matching series.
|
|
|
alerts := make([]interface{}, 0)
|
|
|
for _, match := range evalContext.EvalMatches {
|
|
|
- alert := this.createAlert(evalContext, match, ruleUrl)
|
|
|
+ alert := am.createAlert(evalContext, match, ruleURL)
|
|
|
alerts = append(alerts, alert)
|
|
|
}
|
|
|
|
|
|
// This happens on ExecutionError or NoData
|
|
|
if len(alerts) == 0 {
|
|
|
- alert := this.createAlert(evalContext, nil, ruleUrl)
|
|
|
+ alert := am.createAlert(evalContext, nil, ruleURL)
|
|
|
alerts = append(alerts, alert)
|
|
|
}
|
|
|
|
|
|
@@ -129,15 +136,23 @@ func (this *AlertmanagerNotifier) Notify(evalContext *alerting.EvalContext) erro
|
|
|
body, _ := bodyJSON.MarshalJSON()
|
|
|
|
|
|
cmd := &models.SendWebhookSync{
|
|
|
- Url: this.Url + "/api/v1/alerts",
|
|
|
+ Url: am.URL + "/api/v1/alerts",
|
|
|
HttpMethod: "POST",
|
|
|
Body: string(body),
|
|
|
}
|
|
|
|
|
|
if err := bus.DispatchCtx(evalContext.Ctx, cmd); err != nil {
|
|
|
- this.log.Error("Failed to send alertmanager", "error", err, "alertmanager", this.Name)
|
|
|
+ am.log.Error("Failed to send alertmanager", "error", err, "alertmanager", am.Name)
|
|
|
return err
|
|
|
}
|
|
|
|
|
|
return nil
|
|
|
}
|
|
|
+
|
|
|
+// regexp that matches all none valid label name characters
|
|
|
+// https://prometheus.io/docs/concepts/data_model/#metric-names-and-labels
|
|
|
+var labelNamePattern = regexp.MustCompile(`[^a-zA-Z0-9_]`)
|
|
|
+
|
|
|
+func replaceIllegalCharsInLabelname(input string) string {
|
|
|
+ return labelNamePattern.ReplaceAllString(input, "_")
|
|
|
+}
|