Просмотр исходного кода

Merge branch 'master' into dashboard-acl-ux2

Marcus Efraimsson 7 лет назад
Родитель
Сommit
92768d5a2b

+ 2 - 2
.bra.toml

@@ -1,6 +1,6 @@
 [run]
 init_cmds = [
-  ["go", "build", "-o", "./bin/grafana-server", "./pkg/cmd/grafana-server"],
+  ["go", "run", "build.go", "build"],
 	["./bin/grafana-server", "cfg:app_mode=development"]
 ]
 watch_all = true
@@ -12,6 +12,6 @@ watch_dirs = [
 watch_exts = [".go", ".ini", ".toml"]
 build_delay = 1500
 cmds = [
-  ["go", "build", "-o", "./bin/grafana-server", "./pkg/cmd/grafana-server"],
+  ["go", "run", "build.go", "build"],
 	["./bin/grafana-server", "cfg:app_mode=development"]
 ]

+ 1 - 0
CHANGELOG.md

@@ -49,6 +49,7 @@
 * **Prometheus**: tooltip for legend format not showing properly [#11516](https://github.com/grafana/grafana/issues/11516), thx [@svenklemm](https://github.com/svenklemm) 
 * **Playlist**: Empty playlists cannot be deleted [#11133](https://github.com/grafana/grafana/issues/11133), thx [@kichristensen](https://github.com/kichristensen) 
 * **Switch Orgs**: Alphabetic order in Switch Organization modal [#11556](https://github.com/grafana/grafana/issues/11556)
+* **Postgres**: improve `$__timeFilter` macro [#11578](https://github.com/grafana/grafana/issues/11578), thx [@svenklemm](https://github.com/svenklemm)
 
 ### Tech
 * Migrated JavaScript files to TypeScript

+ 1 - 0
Gruntfile.js

@@ -22,6 +22,7 @@ module.exports = function (grunt) {
     }
   }
 
+  config.coverage = grunt.option('coverage');
   config.phjs = grunt.option('phjsToRelease');
   config.pkg.version = grunt.option('pkgVer') || config.pkg.version;
 

+ 13 - 0
codecov.yml

@@ -0,0 +1,13 @@
+coverage:
+  precision: 2
+  round: down
+  range: "50...100"
+
+  status:
+    project: yes
+    patch: yes
+    changes: no
+
+comment:
+  layout: "diff"
+  behavior: "once"

+ 1 - 0
package.json

@@ -102,6 +102,7 @@
     "watch": "webpack --progress --colors --watch --config scripts/webpack/webpack.dev.js",
     "build": "grunt build",
     "test": "grunt test",
+    "test:coverage": "grunt test --coverage=true",
     "lint": "tslint -c tslint.json --project tsconfig.json --type-check",
     "karma": "grunt karma:dev",
     "jest": "jest --notify --watch",

+ 1 - 1
pkg/cmd/grafana-cli/services/services.go

@@ -42,7 +42,7 @@ func Init(version string, skipTLSVerify bool) {
 	}
 
 	HttpClient = http.Client{
-		Timeout:   time.Duration(10 * time.Second),
+		Timeout:   10 * time.Second,
 		Transport: tr,
 	}
 }

+ 2 - 2
pkg/components/apikeygen/apikeygen.go

@@ -33,7 +33,7 @@ func New(orgId int64, name string) KeyGenResult {
 
 	jsonString, _ := json.Marshal(jsonKey)
 
-	result.ClientSecret = base64.StdEncoding.EncodeToString([]byte(jsonString))
+	result.ClientSecret = base64.StdEncoding.EncodeToString(jsonString)
 	return result
 }
 
@@ -44,7 +44,7 @@ func Decode(keyString string) (*ApiKeyJson, error) {
 	}
 
 	var keyObj ApiKeyJson
-	err = json.Unmarshal([]byte(jsonString), &keyObj)
+	err = json.Unmarshal(jsonString, &keyObj)
 	if err != nil {
 		return nil, ErrInvalidApiKey
 	}

+ 1 - 1
pkg/components/imguploader/azureblobuploader.go

@@ -225,7 +225,7 @@ func (a *Auth) SignRequest(req *http.Request) {
 	)
 	decodedKey, _ := base64.StdEncoding.DecodeString(a.Key)
 
-	sha256 := hmac.New(sha256.New, []byte(decodedKey))
+	sha256 := hmac.New(sha256.New, decodedKey)
 	sha256.Write([]byte(strToSign))
 
 	signature := base64.StdEncoding.EncodeToString(sha256.Sum(nil))

+ 1 - 1
pkg/components/null/float.go

@@ -50,7 +50,7 @@ func (f *Float) UnmarshalJSON(data []byte) error {
 	}
 	switch x := v.(type) {
 	case float64:
-		f.Float64 = float64(x)
+		f.Float64 = x
 	case map[string]interface{}:
 		err = json.Unmarshal(data, &f.NullFloat64)
 	case nil:

+ 1 - 1
pkg/metrics/metrics.go

@@ -403,6 +403,6 @@ func sendUsageStats() {
 	out, _ := json.MarshalIndent(report, "", " ")
 	data := bytes.NewBuffer(out)
 
-	client := http.Client{Timeout: time.Duration(5 * time.Second)}
+	client := http.Client{Timeout: 5 * time.Second}
 	go client.Post("https://stats.grafana.org/grafana-usage-report", "application/json", data)
 }

+ 1 - 1
pkg/models/datasource_cache.go

@@ -33,7 +33,7 @@ func (ds *DataSource) GetHttpClient() (*http.Client, error) {
 	}
 
 	return &http.Client{
-		Timeout:   time.Duration(30 * time.Second),
+		Timeout:   30 * time.Second,
 		Transport: transport,
 	}, nil
 }

+ 1 - 1
pkg/plugins/models.go

@@ -69,7 +69,7 @@ func (pb *PluginBase) registerPlugin(pluginDir string) error {
 
 	for _, include := range pb.Includes {
 		if include.Role == "" {
-			include.Role = m.RoleType(m.ROLE_VIEWER)
+			include.Role = m.ROLE_VIEWER
 		}
 	}
 

+ 1 - 1
pkg/plugins/update_checker.go

@@ -13,7 +13,7 @@ import (
 )
 
 var (
-	httpClient http.Client = http.Client{Timeout: time.Duration(10 * time.Second)}
+	httpClient http.Client = http.Client{Timeout: 10 * time.Second}
 )
 
 type GrafanaNetPlugin struct {

+ 1 - 1
pkg/services/alerting/notifiers/line.go

@@ -90,7 +90,7 @@ func (this *LineNotifier) createAlert(evalContext *alerting.EvalContext) error {
 	}
 
 	if err := bus.DispatchCtx(evalContext.Ctx, cmd); err != nil {
-		this.log.Error("Failed to send notification to LINE", "error", err, "body", string(body))
+		this.log.Error("Failed to send notification to LINE", "error", err, "body", body)
 		return err
 	}
 

+ 2 - 2
pkg/tsdb/cloudwatch/annotation_query.go

@@ -72,7 +72,7 @@ func (e *CloudWatchExecutor) executeAnnotationQuery(ctx context.Context, queryCo
 				MetricName: aws.String(metricName),
 				Dimensions: qd,
 				Statistic:  aws.String(s),
-				Period:     aws.Int64(int64(period)),
+				Period:     aws.Int64(period),
 			}
 			resp, err := svc.DescribeAlarmsForMetric(params)
 			if err != nil {
@@ -88,7 +88,7 @@ func (e *CloudWatchExecutor) executeAnnotationQuery(ctx context.Context, queryCo
 				MetricName:        aws.String(metricName),
 				Dimensions:        qd,
 				ExtendedStatistic: aws.String(s),
-				Period:            aws.Int64(int64(period)),
+				Period:            aws.Int64(period),
 			}
 			resp, err := svc.DescribeAlarmsForMetric(params)
 			if err != nil {

+ 4 - 4
pkg/tsdb/postgres/macros.go

@@ -79,15 +79,15 @@ func (m *PostgresMacroEngine) evaluateMacro(name string, args []string) (string,
 		}
 		return fmt.Sprintf("extract(epoch from %s) as \"time\"", args[0]), nil
 	case "__timeFilter":
-		// don't use to_timestamp in this macro for redshift compatibility #9566
 		if len(args) == 0 {
 			return "", fmt.Errorf("missing time column argument for macro %v", name)
 		}
-		return fmt.Sprintf("extract(epoch from %s) BETWEEN %d AND %d", args[0], m.TimeRange.GetFromAsSecondsEpoch(), m.TimeRange.GetToAsSecondsEpoch()), nil
+
+		return fmt.Sprintf("%s BETWEEN '%s' AND '%s'", args[0], m.TimeRange.GetFromAsTimeUTC().Format(time.RFC3339), m.TimeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
 	case "__timeFrom":
-		return fmt.Sprintf("to_timestamp(%d)", m.TimeRange.GetFromAsSecondsEpoch()), nil
+		return fmt.Sprintf("'%s'", m.TimeRange.GetFromAsTimeUTC().Format(time.RFC3339)), nil
 	case "__timeTo":
-		return fmt.Sprintf("to_timestamp(%d)", m.TimeRange.GetToAsSecondsEpoch()), nil
+		return fmt.Sprintf("'%s'", m.TimeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
 	case "__timeGroup":
 		if len(args) < 2 {
 			return "", fmt.Errorf("macro %v needs time column and interval and optional fill value", name)

+ 10 - 10
pkg/tsdb/postgres/macros_test.go

@@ -12,7 +12,7 @@ import (
 
 func TestMacroEngine(t *testing.T) {
 	Convey("MacroEngine", t, func() {
-		engine := &PostgresMacroEngine{}
+		engine := NewPostgresMacroEngine()
 		query := &tsdb.Query{}
 
 		Convey("Given a time range between 2018-04-12 00:00 and 2018-04-12 00:05", func() {
@@ -38,14 +38,14 @@ func TestMacroEngine(t *testing.T) {
 				sql, err := engine.Interpolate(query, timeRange, "WHERE $__timeFilter(time_column)")
 				So(err, ShouldBeNil)
 
-				So(sql, ShouldEqual, fmt.Sprintf("WHERE extract(epoch from time_column) BETWEEN %d AND %d", from.Unix(), to.Unix()))
+				So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339)))
 			})
 
 			Convey("interpolate __timeFrom function", func() {
 				sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)")
 				So(err, ShouldBeNil)
 
-				So(sql, ShouldEqual, fmt.Sprintf("select to_timestamp(%d)", from.Unix()))
+				So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339)))
 			})
 
 			Convey("interpolate __timeGroup function", func() {
@@ -68,7 +68,7 @@ func TestMacroEngine(t *testing.T) {
 				sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)")
 				So(err, ShouldBeNil)
 
-				So(sql, ShouldEqual, fmt.Sprintf("select to_timestamp(%d)", to.Unix()))
+				So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339)))
 			})
 
 			Convey("interpolate __unixEpochFilter function", func() {
@@ -102,21 +102,21 @@ func TestMacroEngine(t *testing.T) {
 				sql, err := engine.Interpolate(query, timeRange, "WHERE $__timeFilter(time_column)")
 				So(err, ShouldBeNil)
 
-				So(sql, ShouldEqual, fmt.Sprintf("WHERE extract(epoch from time_column) BETWEEN %d AND %d", from.Unix(), to.Unix()))
+				So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339)))
 			})
 
 			Convey("interpolate __timeFrom function", func() {
 				sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)")
 				So(err, ShouldBeNil)
 
-				So(sql, ShouldEqual, fmt.Sprintf("select to_timestamp(%d)", from.Unix()))
+				So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339)))
 			})
 
 			Convey("interpolate __timeTo function", func() {
 				sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)")
 				So(err, ShouldBeNil)
 
-				So(sql, ShouldEqual, fmt.Sprintf("select to_timestamp(%d)", to.Unix()))
+				So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339)))
 			})
 
 			Convey("interpolate __unixEpochFilter function", func() {
@@ -150,21 +150,21 @@ func TestMacroEngine(t *testing.T) {
 				sql, err := engine.Interpolate(query, timeRange, "WHERE $__timeFilter(time_column)")
 				So(err, ShouldBeNil)
 
-				So(sql, ShouldEqual, fmt.Sprintf("WHERE extract(epoch from time_column) BETWEEN %d AND %d", from.Unix(), to.Unix()))
+				So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339)))
 			})
 
 			Convey("interpolate __timeFrom function", func() {
 				sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)")
 				So(err, ShouldBeNil)
 
-				So(sql, ShouldEqual, fmt.Sprintf("select to_timestamp(%d)", from.Unix()))
+				So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339)))
 			})
 
 			Convey("interpolate __timeTo function", func() {
 				sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)")
 				So(err, ShouldBeNil)
 
-				So(sql, ShouldEqual, fmt.Sprintf("select to_timestamp(%d)", to.Unix()))
+				So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339)))
 			})
 
 			Convey("interpolate __unixEpochFilter function", func() {

+ 2 - 2
pkg/tsdb/prometheus/prometheus.go

@@ -108,8 +108,8 @@ func (e *PrometheusExecutor) Query(ctx context.Context, dsInfo *models.DataSourc
 
 		span, ctx := opentracing.StartSpanFromContext(ctx, "alerting.prometheus")
 		span.SetTag("expr", query.Expr)
-		span.SetTag("start_unixnano", int64(query.Start.UnixNano()))
-		span.SetTag("stop_unixnano", int64(query.End.UnixNano()))
+		span.SetTag("start_unixnano", query.Start.UnixNano())
+		span.SetTag("stop_unixnano", query.End.UnixNano())
 		defer span.Finish()
 
 		value, err := client.QueryRange(ctx, query.Expr, timeRange)

+ 8 - 0
pkg/tsdb/time_range.go

@@ -37,6 +37,10 @@ func (tr *TimeRange) GetFromAsSecondsEpoch() int64 {
 	return tr.GetFromAsMsEpoch() / 1000
 }
 
+func (tr *TimeRange) GetFromAsTimeUTC() time.Time {
+	return tr.MustGetFrom().UTC()
+}
+
 func (tr *TimeRange) GetToAsMsEpoch() int64 {
 	return tr.MustGetTo().UnixNano() / int64(time.Millisecond)
 }
@@ -45,6 +49,10 @@ func (tr *TimeRange) GetToAsSecondsEpoch() int64 {
 	return tr.GetToAsMsEpoch() / 1000
 }
 
+func (tr *TimeRange) GetToAsTimeUTC() time.Time {
+	return tr.MustGetTo().UTC()
+}
+
 func (tr *TimeRange) MustGetFrom() time.Time {
 	if res, err := tr.ParseFrom(); err != nil {
 		return time.Unix(0, 0)

+ 1 - 1
public/app/plugins/datasource/postgres/module.ts

@@ -8,7 +8,7 @@ class PostgresConfigCtrl {
 
   /** @ngInject **/
   constructor($scope) {
-    this.current.jsonData.sslmode = this.current.jsonData.sslmode || 'require';
+    this.current.jsonData.sslmode = this.current.jsonData.sslmode || 'verify-full';
   }
 }
 

+ 4 - 4
public/app/plugins/datasource/postgres/partials/annotations.editor.html

@@ -28,12 +28,12 @@ An annotation is an event that is overlaid on top of graphs. The query can have
 Macros:
 - $__time(column) -&gt; column as "time"
 - $__timeEpoch -&gt; extract(epoch from column) as "time"
-- $__timeFilter(column) -&gt;  column &ge; to_timestamp(1492750877) AND column &le; to_timestamp(1492750877)
-- $__unixEpochFilter(column) -&gt;  column &gt; 1492750877 AND column &lt; 1492750877
+- $__timeFilter(column) -&gt; column BETWEEN '2017-04-21T05:01:17Z' AND '2017-04-21T05:01:17Z'
+- $__unixEpochFilter(column) -&gt;  column &gt;= 1492750877 AND column &lt;= 1492750877
 
 Or build your own conditionals using these macros which just return the values:
-- $__timeFrom() -&gt;  to_timestamp(1492750877)
-- $__timeTo() -&gt;  to_timestamp(1492750877)
+- $__timeFrom() -&gt;  '2017-04-21T05:01:17Z'
+- $__timeTo() -&gt;  '2017-04-21T05:01:17Z'
 - $__unixEpochFrom() -&gt;  1492750877
 - $__unixEpochTo() -&gt;  1492750877
 		</pre>

+ 4 - 4
public/app/plugins/datasource/postgres/partials/query.editor.html

@@ -48,8 +48,8 @@ Table:
 Macros:
 - $__time(column) -&gt; column as "time"
 - $__timeEpoch -&gt; extract(epoch from column) as "time"
-- $__timeFilter(column) -&gt;  extract(epoch from column) BETWEEN 1492750877 AND 1492750877
-- $__unixEpochFilter(column) -&gt;  column &gt; 1492750877 AND column &lt; 1492750877
+- $__timeFilter(column) -&gt; column BETWEEN '2017-04-21T05:01:17Z' AND '2017-04-21T05:01:17Z'
+- $__unixEpochFilter(column) -&gt;  column &gt;= 1492750877 AND column &lt;= 1492750877
 - $__timeGroup(column,'5m') -&gt; (extract(epoch from column)/300)::bigint*300 AS time
 
 Example of group by and order by with $__timeGroup:
@@ -61,8 +61,8 @@ GROUP BY time
 ORDER BY time
 
 Or build your own conditionals using these macros which just return the values:
-- $__timeFrom() -&gt;  to_timestamp(1492750877)
-- $__timeTo() -&gt;  to_timestamp(1492750877)
+- $__timeFrom() -&gt;  '2017-04-21T05:01:17Z'
+- $__timeTo() -&gt;  '2017-04-21T05:01:17Z'
 - $__unixEpochFrom() -&gt;  1492750877
 - $__unixEpochTo() -&gt;  1492750877
 		</pre>

+ 14 - 1
scripts/circle-test-backend.sh

@@ -20,4 +20,17 @@ echo "building backend with install to cache pkgs"
 exit_if_fail time go install ./pkg/cmd/grafana-server
 
 echo "running go test"
-go test ./pkg/...
+
+set -e
+echo "" > coverage.txt
+
+time for d in $(go list ./pkg/...); do
+  exit_if_fail go test -coverprofile=profile.out -covermode=atomic $d
+  if [ -f profile.out ]; then
+    cat profile.out >> coverage.txt
+    rm profile.out
+  fi
+done
+
+echo "Publishing go code coverage"
+bash <(curl -s https://codecov.io/bash) -cF go

+ 7 - 2
scripts/circle-test-frontend.sh

@@ -10,5 +10,10 @@ function exit_if_fail {
     fi
 }
 
-exit_if_fail npm run test
-exit_if_fail npm run build
+exit_if_fail npm run test:coverage
+exit_if_fail npm run build
+
+# publish code coverage
+echo "Publishing javascript code coverage"
+bash <(curl -s https://codecov.io/bash) -cF javascript
+rm -rf coverage

+ 6 - 1
scripts/grunt/options/exec.js

@@ -1,9 +1,14 @@
 module.exports = function(config, grunt) {
   'use strict';
 
+  var coverage = '';
+  if (config.coverage) {
+    coverage = '--coverage --maxWorkers 2';
+  }
+
   return {
     tslint: 'node ./node_modules/tslint/lib/tslint-cli.js -c tslint.json --project ./tsconfig.json',
-    jest: 'node ./node_modules/jest-cli/bin/jest.js --maxWorkers 2',
+    jest: 'node ./node_modules/jest-cli/bin/jest.js ' + coverage,
     webpack: 'node ./node_modules/webpack/bin/webpack.js --config scripts/webpack/webpack.prod.js',
   };
 };