Browse Source

feat: Adding sink parsing

Matthias Ladkau 3 years ago
parent
commit
e2a97c3572

+ 9 - 23
lang/ecal/parser/const.go

@@ -238,6 +238,15 @@ const (
 
 	NodeIMPORT = "import"
 
+	// Sink definition
+
+	NodeSINK       = "sink"
+	NodeKINDMATCH  = "kindmatch"
+	NodeSCOPEMATCH = "scopematch"
+	NodeSTATEMATCH = "statematch"
+	NodePRIORITY   = "priority"
+	NodeSUPPRESSES = "suppresses"
+
 	// Function definition
 
 	NodeFUNC   = "function"
@@ -265,31 +274,8 @@ const (
 
 /*
 
-	NodeLIST       = "list"       // List value
-	NodeMAP        = "map"        // Map value
 	NodeGUARD      = "guard"      // Guard expressions for conditional statements
 
-	// Map entries
-
-	NodeMAPENTRY = "entry" // Map entry value
-
-	// Function call statement
-
-	NodeFUNCCALL = "funccall"
-
-	// Data structure access
-
-	NodeACCESS = "access"
-
-	// Sink definition
-
-	NodeSINK       = "sink"
-	NodeKINDMATCH  = "kindmatch"
-	NodeSCOPEMATCH = "scopematch"
-	NodeSTATEMATCH = "statematch"
-	NodePRIORITY   = "priority"
-	NodeSUPPRESSES = "suppresses"
-
 	// Block statements
 
 	NodeCOND = "cond"

+ 5 - 3
lang/ecal/parser/lexer.go

@@ -564,7 +564,8 @@ func lexToken(l *lexer) lexFunc {
 	// First try to parse a number
 
 	lexNumberBlock(l)
-	keywordCandidate := strings.ToLower(l.input[l.start:l.pos])
+	identifierCandidate := l.input[l.start:l.pos]
+	keywordCandidate := strings.ToLower(identifierCandidate)
 
 	// Check for number
 
@@ -581,7 +582,8 @@ func lexToken(l *lexer) lexFunc {
 		l.backup(l.pos - l.start)
 	}
 	lexTextBlock(l, true)
-	keywordCandidate = strings.ToLower(l.input[l.start:l.pos])
+	identifierCandidate = l.input[l.start:l.pos]
+	keywordCandidate = strings.ToLower(identifierCandidate)
 
 	// Check for keyword
 
@@ -609,7 +611,7 @@ func lexToken(l *lexer) lexFunc {
 
 		// An identifier was found
 
-		l.emitTokenAndValue(TokenIDENTIFIER, keywordCandidate, true)
+		l.emitTokenAndValue(TokenIDENTIFIER, identifierCandidate, true)
 	}
 
 	return lexToken

+ 2 - 2
lang/ecal/parser/lexer_test.go

@@ -115,9 +115,9 @@ func TestBasicTokenLexing(t *testing.T) {
 		return
 	}
 
-	input = `test := not a * 1.3 or (12 / aa) * 5 DIV 3 % 1 > true`
+	input = `test := not a * 1.3 or (12 / aa) * 5 DiV 3 % 1 > trUe`
 	if res := LexToList("mytest", input); fmt.Sprint(res) !=
-		`["test" := <NOT> "a" * v:"1.3" <OR> ( v:"12" / "aa" ) * v:"5" "div" v:"3" % v:"1" > <TRUE> EOF]` {
+		`["test" := <NOT> "a" * v:"1.3" <OR> ( v:"12" / "aa" ) * v:"5" "DiV" v:"3" % v:"1" > <TRUE> EOF]` {
 		t.Error("Unexpected lexer result:\n  ", res)
 		return
 	}

+ 44 - 11
lang/ecal/parser/parser.go

@@ -88,18 +88,14 @@ func init() {
 		TokenIMPORT: {NodeIMPORT, nil, nil, nil, nil, 0, ndImport, nil},
 		TokenAS:     {"", nil, nil, nil, nil, 0, ndImport, nil},
 
-		/*
-			// Sink definition
-
-			TokenSINK
-			TokenKINDMATCH
-			TokenSCOPEMATCH
-			TokenSTATEMATCH
-			TokenPRIORITY
-			TokenSUPPRESSES
+		// Sink definition
 
-
-		*/
+		TokenSINK:       {NodeSINK, nil, nil, nil, nil, 0, ndSkink, nil},
+		TokenKINDMATCH:  {NodeKINDMATCH, nil, nil, nil, nil, 150, ndPrefix, nil},
+		TokenSCOPEMATCH: {NodeSCOPEMATCH, nil, nil, nil, nil, 150, ndPrefix, nil},
+		TokenSTATEMATCH: {NodeSTATEMATCH, nil, nil, nil, nil, 150, ndPrefix, nil},
+		TokenPRIORITY:   {NodePRIORITY, nil, nil, nil, nil, 150, ndPrefix, nil},
+		TokenSUPPRESSES: {NodeSUPPRESSES, nil, nil, nil, nil, 150, ndPrefix, nil},
 
 		// Function definition
 
@@ -404,6 +400,43 @@ func ndImport(p *parser, self *ASTNode) (*ASTNode, error) {
 	return self, err
 }
 
+/*
+ndSink is used to parse sinks.
+*/
+func ndSkink(p *parser, self *ASTNode) (*ASTNode, error) {
+	var ret *ASTNode
+
+	// Must specify a name
+
+	err := acceptChild(p, self, TokenIDENTIFIER)
+
+	if err == nil {
+
+		// Parse the rest of the parameters as children until we reach the body
+
+		for p.node.Token.ID != TokenEOF && p.node.Token.ID != TokenLBRACE {
+			exp, err := p.run(150)
+			if err != nil {
+				return nil, err
+			}
+
+			self.Children = append(self.Children, exp)
+
+			// Skip commas
+
+			if p.node.Token.ID == TokenCOMMA {
+				skipToken(p, TokenCOMMA)
+			}
+		}
+
+		// Parse the body
+
+		ret, err = parseInnerStatements(p, self)
+	}
+
+	return ret, err
+}
+
 /*
 ndFunc is used to parse function definitions.
 */

+ 97 - 4
lang/ecal/parser/parser_func_test.go

@@ -16,22 +16,115 @@ import (
 
 func TestImportParsing(t *testing.T) {
 
-	input := `import "foo/bar.ecal" as foobar
-	i := foobar`
+	input := `import "foo/bar.ecal" as fooBar
+	i := fooBar`
 	expectedOutput := `
 statements
   import
     string: 'foo/bar.ecal'
-    identifier: foobar
+    identifier: fooBar
   :=
     identifier: i
-    identifier: foobar
+    identifier: fooBar
+`[1:]
+
+	if res, err := UnitTestParse("mytest", input); err != nil || fmt.Sprint(res) != expectedOutput {
+		t.Error("Unexpected parser output:\n", res, "expected was:\n", expectedOutput, "Error:", err)
+		return
+	}
+}
+
+func TestSinkParsing(t *testing.T) {
+
+	input := `
+	sink fooBar 
+    kindmatch [ "priority", "t.do.bla" ],
+	scopematch [ "data.read", "data.write" ],
+	statematch { "priority:" : 5, test: 1, "bla 1": null },
+	priority 0,
+	suppresses [ "test1", test2 ]
+	{
+		print("test1");
+		print("test2")
+	}
+`
+	expectedOutput := `
+sink
+  identifier: fooBar
+  kindmatch
+    list
+      string: 'priority'
+      string: 't.do.bla'
+  scopematch
+    list
+      string: 'data.read'
+      string: 'data.write'
+  statematch
+    map
+      kvp
+        string: 'priority:'
+        number: 5
+      kvp
+        identifier: test
+        number: 1
+      kvp
+        string: 'bla 1'
+        null
+  priority
+    number: 0
+  suppresses
+    list
+      string: 'test1'
+      identifier: test2
+  statements
+    identifier: print
+      funccall
+        string: 'test1'
+    identifier: print
+      funccall
+        string: 'test2'
 `[1:]
 
 	if res, err := UnitTestParse("mytest", input); err != nil || fmt.Sprint(res) != expectedOutput {
 		t.Error("Unexpected parser output:\n", res, "expected was:\n", expectedOutput, "Error:", err)
 		return
 	}
+
+	input = `
+	sink mySink
+    kindmatch [ "priority", t.do.bla ]
+	{
+	}
+`
+	expectedOutput = `
+sink
+  identifier: mySink
+  kindmatch
+    list
+      string: 'priority'
+      identifier: t
+        identifier: do
+          identifier: bla
+  statements
+`[1:]
+
+	if res, err := UnitTestParse("mytest", input); err != nil || fmt.Sprint(res) != expectedOutput {
+		t.Error("Unexpected parser output:\n", res, "expected was:\n", expectedOutput, "Error:", err)
+		return
+	}
+
+	input = `
+	sink fooBar 
+    ==
+	kindmatch [ "priority", "t.do.bla" ]
+	{
+	}
+`
+	if _, err := UnitTestParse("mytest", input); err.Error() !=
+		"Parse error in mytest: Term cannot start an expression (==) (Line:3 Pos:5)" {
+		t.Error(err)
+		return
+	}
 }
 
 func TestFuncParsing(t *testing.T) {

+ 41 - 24
lang/ecal/parser/prettyprinter.go

@@ -45,23 +45,22 @@ func init() {
 		// TokenMAP - Special case (handled in code)
 		// TokenPARAMS - Special case (handled in code)
 
-		/*
-
-
-			NodeSTATEMENTS = "statements" // List of statements
-
-			// Assignment statement
-
-			NodeASSIGN = ":="
-		*/
-
 		// Assignment statement
 
-		NodeASSIGN + "_2": template.Must(template.New(NodeMINUS).Parse("{{.c1}} := {{.c2}}")),
+		NodeASSIGN + "_2": template.Must(template.New(NodeASSIGN).Parse("{{.c1}} := {{.c2}}")),
 
 		// Import statement
 
-		NodeIMPORT + "_2": template.Must(template.New(NodeMINUS).Parse("import {{.c1}} as {{.c2}}")),
+		NodeIMPORT + "_2": template.Must(template.New(NodeIMPORT).Parse("import {{.c1}} as {{.c2}}")),
+
+		// Sink definition
+
+		// NodeSINK - Special case (handled in code)
+		NodeKINDMATCH + "_1":  template.Must(template.New(NodeKINDMATCH).Parse("kindmatch {{.c1}}")),
+		NodeSCOPEMATCH + "_1": template.Must(template.New(NodeSCOPEMATCH).Parse("scopematch {{.c1}}")),
+		NodeSTATEMATCH + "_1": template.Must(template.New(NodeSTATEMATCH).Parse("statematch {{.c1}}")),
+		NodePRIORITY + "_1":   template.Must(template.New(NodePRIORITY).Parse("priority {{.c1}}")),
+		NodeSUPPRESSES + "_1": template.Must(template.New(NodeSUPPRESSES).Parse("suppresses {{.c1}}")),
 
 		// Arithmetic operators
 
@@ -76,23 +75,23 @@ func init() {
 
 		// Function definition
 
-		NodeFUNC + "_3":   template.Must(template.New(NodeDIVINT).Parse("func {{.c1}}{{.c2}} {\n{{.c3}}}")),
-		NodeRETURN:        template.Must(template.New(NodeDIVINT).Parse("return")),
-		NodeRETURN + "_1": template.Must(template.New(NodeDIVINT).Parse("return {{.c1}}")),
+		NodeFUNC + "_3":   template.Must(template.New(NodeFUNC).Parse("func {{.c1}}{{.c2}} {\n{{.c3}}}")),
+		NodeRETURN:        template.Must(template.New(NodeRETURN).Parse("return")),
+		NodeRETURN + "_1": template.Must(template.New(NodeRETURN).Parse("return {{.c1}}")),
 
 		// Boolean operators
 
-		NodeOR + "_2":  template.Must(template.New(NodeGEQ).Parse("{{.c1}} or {{.c2}}")),
-		NodeAND + "_2": template.Must(template.New(NodeLEQ).Parse("{{.c1}} and {{.c2}}")),
+		NodeOR + "_2":  template.Must(template.New(NodeOR).Parse("{{.c1}} or {{.c2}}")),
+		NodeAND + "_2": template.Must(template.New(NodeAND).Parse("{{.c1}} and {{.c2}}")),
 		NodeNOT + "_1": template.Must(template.New(NodeNOT).Parse("not {{.c1}}")),
 
 		// Condition operators
 
-		NodeLIKE + "_2":      template.Must(template.New(NodeGEQ).Parse("{{.c1}} like {{.c2}}")),
-		NodeIN + "_2":        template.Must(template.New(NodeLEQ).Parse("{{.c1}} in {{.c2}}")),
-		NodeHASPREFIX + "_2": template.Must(template.New(NodeLEQ).Parse("{{.c1}} hasprefix {{.c2}}")),
-		NodeHASSUFFIX + "_2": template.Must(template.New(NodeLEQ).Parse("{{.c1}} hassuffix {{.c2}}")),
-		NodeNOTIN + "_2":     template.Must(template.New(NodeLEQ).Parse("{{.c1}} notin {{.c2}}")),
+		NodeLIKE + "_2":      template.Must(template.New(NodeLIKE).Parse("{{.c1}} like {{.c2}}")),
+		NodeIN + "_2":        template.Must(template.New(NodeIN).Parse("{{.c1}} in {{.c2}}")),
+		NodeHASPREFIX + "_2": template.Must(template.New(NodeHASPREFIX).Parse("{{.c1}} hasprefix {{.c2}}")),
+		NodeHASSUFFIX + "_2": template.Must(template.New(NodeHASSUFFIX).Parse("{{.c1}} hassuffix {{.c2}}")),
+		NodeNOTIN + "_2":     template.Must(template.New(NodeNOTIN).Parse("{{.c1}} notin {{.c2}}")),
 
 		NodeGEQ + "_2": template.Must(template.New(NodeGEQ).Parse("{{.c1}} >= {{.c2}}")),
 		NodeLEQ + "_2": template.Must(template.New(NodeLEQ).Parse("{{.c1}} <= {{.c2}}")),
@@ -103,8 +102,8 @@ func init() {
 
 		// Separators
 
-		NodeKVP + "_2":    template.Must(template.New(NodeLT).Parse("{{.c1}} : {{.c2}}")),
-		NodePRESET + "_2": template.Must(template.New(NodeLT).Parse("{{.c1}}={{.c2}}")),
+		NodeKVP + "_2":    template.Must(template.New(NodeKVP).Parse("{{.c1}} : {{.c2}}")),
+		NodePRESET + "_2": template.Must(template.New(NodePRESET).Parse("{{.c1}}={{.c2}}")),
 
 		// Constants
 
@@ -189,6 +188,24 @@ func PrettyPrint(ast *ASTNode) (string, error) {
 
 			return ppMetaData(ast, buf.String()), nil
 
+		} else if ast.Name == NodeSINK {
+
+			buf.WriteString("sink ")
+			buf.WriteString(tempParam["c1"])
+			buf.WriteString("\n")
+
+			for i := 1; i < len(ast.Children)-1; i++ {
+				buf.WriteString("  ")
+				buf.WriteString(tempParam[fmt.Sprint("c", i+1)])
+				buf.WriteString("\n")
+			}
+
+			buf.WriteString("{\n")
+			buf.WriteString(tempParam[fmt.Sprint("c", len(ast.Children))])
+			buf.WriteString("}\n")
+
+			return buf.String(), nil
+
 		} else if ast.Name == NodeFUNCCALL {
 
 			// For statements just concat all children