Browse Source

Fixes #69, lexer correctly tokenizes number-number segments, numericLiterals are only considered literal token if it is folled by a punctuator.

Torkel Ödegaard 12 years ago
parent
commit
8e2008f821

+ 12 - 0
src/app/services/graphite/gfunc.js

@@ -130,6 +130,11 @@ function (_) {
     defaultParams: [10]
   });
 
+  addFuncDef({
+    name: 'cactiStyle',
+    category: categories.Special,
+  });
+
   addFuncDef({
     name: 'scale',
     category: categories.Transform,
@@ -137,6 +142,13 @@ function (_) {
     defaultParams: [1]
   });
 
+  addFuncDef({
+    name: 'offset',
+    category: categories.Transform,
+    params: [ { name: "amount", type: "int", } ],
+    defaultParams: [10]
+  });
+
   addFuncDef({
     name: 'integral',
     category: categories.Transform,

+ 20 - 5
src/app/services/graphite/lexer.js

@@ -402,13 +402,20 @@ define([
           (ch >= "a" && ch <= "z") || (ch >= "A" && ch <= "Z");
       }
 
+      // handle negative num literals
+      if (char === '-') {
+        value += char;
+        index += 1;
+        char = this.peek(index);
+      }
+
       // Numbers must start either with a decimal digit or a point.
       if (char !== "." && !isDecimalDigit(char)) {
         return null;
       }
 
       if (char !== ".") {
-        value = this.peek(index);
+        value += this.peek(index);
         index += 1;
         char = this.peek(index);
 
@@ -555,7 +562,7 @@ define([
 
       if (index < length) {
         char = this.peek(index);
-        if (isIdentifierStart(char)) {
+        if (!this.isPunctuator(char)) {
           return null;
         }
       }
@@ -569,9 +576,7 @@ define([
       };
     },
 
-    scanPunctuator: function () {
-      var ch1 = this.peek();
-
+    isPunctuator: function (ch1) {
       switch (ch1) {
       case ".":
       case "(":
@@ -579,6 +584,16 @@ define([
       case ",":
       case "{":
       case "}":
+        return true;
+      }
+
+      return false;
+    },
+
+    scanPunctuator: function () {
+      var ch1 = this.peek();
+
+      if (this.isPunctuator(ch1)) {
         return {
           type: ch1,
           value: ch1,

+ 15 - 0
src/test/specs/lexer-specs.js

@@ -21,6 +21,21 @@ define([
       expect(tokens[4].value).to.be('se1-server-*');
     });
 
+    it('should tokenize metric expression with dash2', function() {
+      var lexer = new Lexer('net.192-168-1-1.192-168-1-9.ping_value.*');
+      var tokens = lexer.tokenize();
+      expect(tokens[0].value).to.be('net');
+      expect(tokens[2].value).to.be('192-168-1-1');
+    });
+
+    it('simple function2', function() {
+      var lexer = new Lexer('offset(test.metric, -100)');
+      var tokens = lexer.tokenize();
+      expect(tokens[2].type).to.be('identifier');
+      expect(tokens[4].type).to.be('identifier');
+      expect(tokens[6].type).to.be('number');
+    });
+
     it('should tokenize metric expression with curly braces', function() {
       var lexer = new Lexer('metric.se1-{first, second}.count');
       var tokens = lexer.tokenize();

+ 15 - 0
src/test/specs/parser-specs.js

@@ -49,6 +49,14 @@ define([
       expect(rootNode.params.length).to.be(1);
     });
 
+    it('simple function2', function() {
+      var parser = new Parser('offset(test.metric, -100)');
+      var rootNode = parser.getAst();
+      expect(rootNode.type).to.be('function');
+      expect(rootNode.params[0].type).to.be('metric');
+      expect(rootNode.params[1].type).to.be('number');
+    });
+
     it('simple function with string arg', function() {
       var parser = new Parser("randomWalk('test')");
       var rootNode = parser.getAst();
@@ -125,6 +133,13 @@ define([
       expect(rootNode.pos).to.be(11);
     });
 
+    it('handle issue #69', function() {
+      var parser = new Parser('cactiStyle(offset(scale(net.192-168-1-1.192-168-1-9.ping_value.*,0.001),-100))');
+      var rootNode = parser.getAst();
+      expect(rootNode.type).to.be('function');
+    });
+
+
   });
 
 });