parser.go 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614
  1. /*
  2. * Public Domain Software
  3. *
  4. * I (Matthias Ladkau) am the author of the source code in this file.
  5. * I have placed the source code in this file in the public domain.
  6. *
  7. * For further information see: http://creativecommons.org/publicdomain/zero/1.0/
  8. */
  9. package parser
  10. import (
  11. "fmt"
  12. )
  13. /*
  14. Map of AST nodes corresponding to lexer tokens. The map determines how a given
  15. sequence of lexer tokens are organized into an AST.
  16. */
  17. var astNodeMap map[LexTokenID]*ASTNode
  18. func init() {
  19. astNodeMap = map[LexTokenID]*ASTNode{
  20. TokenEOF: {NodeEOF, nil, nil, nil, nil, 0, ndTerm, nil},
  21. // Value tokens
  22. TokenSTRING: {NodeSTRING, nil, nil, nil, nil, 0, ndTerm, nil},
  23. TokenNUMBER: {NodeNUMBER, nil, nil, nil, nil, 0, ndTerm, nil},
  24. TokenIDENTIFIER: {NodeIDENTIFIER, nil, nil, nil, nil, 0, ndIdentifier, nil},
  25. // Constructed tokens
  26. TokenSTATEMENTS: {NodeSTATEMENTS, nil, nil, nil, nil, 0, nil, nil},
  27. TokenFUNCCALL: {NodeFUNCCALL, nil, nil, nil, nil, 0, nil, nil},
  28. TokenCOMPACCESS: {NodeCOMPACCESS, nil, nil, nil, nil, 0, nil, nil},
  29. TokenLIST: {NodeLIST, nil, nil, nil, nil, 0, nil, nil},
  30. TokenMAP: {NodeMAP, nil, nil, nil, nil, 0, nil, nil},
  31. /*
  32. TokenGUARD: {NodeGUARD, nil, nil, nil, 0, nil, nil},
  33. */
  34. // Condition operators
  35. TokenGEQ: {NodeGEQ, nil, nil, nil, nil, 60, nil, ldInfix},
  36. TokenLEQ: {NodeLEQ, nil, nil, nil, nil, 60, nil, ldInfix},
  37. TokenNEQ: {NodeNEQ, nil, nil, nil, nil, 60, nil, ldInfix},
  38. TokenEQ: {NodeEQ, nil, nil, nil, nil, 60, nil, ldInfix},
  39. TokenGT: {NodeGT, nil, nil, nil, nil, 60, nil, ldInfix},
  40. TokenLT: {NodeLT, nil, nil, nil, nil, 60, nil, ldInfix},
  41. // Grouping symbols
  42. TokenLPAREN: {"", nil, nil, nil, nil, 150, ndInner, nil},
  43. TokenRPAREN: {"", nil, nil, nil, nil, 0, nil, nil},
  44. TokenLBRACK: {"", nil, nil, nil, nil, 150, ndList, nil},
  45. TokenRBRACK: {"", nil, nil, nil, nil, 0, nil, nil},
  46. TokenLBRACE: {"", nil, nil, nil, nil, 150, ndMap, nil},
  47. TokenRBRACE: {"", nil, nil, nil, nil, 0, nil, nil},
  48. // Separators
  49. TokenDOT: {"", nil, nil, nil, nil, 0, nil, nil},
  50. TokenCOMMA: {"", nil, nil, nil, nil, 0, nil, nil},
  51. TokenSEMICOLON: {"", nil, nil, nil, nil, 0, nil, nil},
  52. // Grouping
  53. TokenCOLON: {NodeKVP, nil, nil, nil, nil, 60, nil, ldInfix},
  54. // Arithmetic operators
  55. TokenPLUS: {NodePLUS, nil, nil, nil, nil, 110, ndPrefix, ldInfix},
  56. TokenMINUS: {NodeMINUS, nil, nil, nil, nil, 110, ndPrefix, ldInfix},
  57. TokenTIMES: {NodeTIMES, nil, nil, nil, nil, 120, nil, ldInfix},
  58. TokenDIV: {NodeDIV, nil, nil, nil, nil, 120, nil, ldInfix},
  59. TokenDIVINT: {NodeDIVINT, nil, nil, nil, nil, 120, nil, ldInfix},
  60. TokenMODINT: {NodeMODINT, nil, nil, nil, nil, 120, nil, ldInfix},
  61. // Assignment statement
  62. TokenASSIGN: {NodeASSIGN, nil, nil, nil, nil, 10, nil, ldInfix},
  63. // Import statement
  64. TokenIMPORT: {NodeIMPORT, nil, nil, nil, nil, 0, ndImport, nil},
  65. TokenAS: {"", nil, nil, nil, nil, 0, ndImport, nil},
  66. // Boolean operators
  67. TokenOR: {NodeOR, nil, nil, nil, nil, 30, nil, ldInfix},
  68. TokenAND: {NodeAND, nil, nil, nil, nil, 40, nil, ldInfix},
  69. TokenNOT: {NodeNOT, nil, nil, nil, nil, 20, ndPrefix, nil},
  70. // Condition operators
  71. TokenLIKE: {NodeLIKE, nil, nil, nil, nil, 60, nil, ldInfix},
  72. TokenIN: {NodeIN, nil, nil, nil, nil, 60, nil, ldInfix},
  73. TokenHASPREFIX: {NodeHASPREFIX, nil, nil, nil, nil, 60, nil, ldInfix},
  74. TokenHASSUFFIX: {NodeHASSUFFIX, nil, nil, nil, nil, 60, nil, ldInfix},
  75. TokenNOTIN: {NodeNOTIN, nil, nil, nil, nil, 60, nil, ldInfix},
  76. // Constant terminals
  77. TokenFALSE: {NodeFALSE, nil, nil, nil, nil, 0, ndTerm, nil},
  78. TokenTRUE: {NodeTRUE, nil, nil, nil, nil, 0, ndTerm, nil},
  79. TokenNULL: {NodeNULL, nil, nil, nil, nil, 0, ndTerm, nil},
  80. }
  81. }
  82. // Parser
  83. // ======
  84. /*
  85. Parser data structure
  86. */
  87. type parser struct {
  88. name string // Name to identify the input
  89. node *ASTNode // Current ast node
  90. tokens *LABuffer // Buffer which is connected to the channel which contains lex tokens
  91. rp RuntimeProvider // Runtime provider which creates runtime components
  92. }
  93. /*
  94. Parse parses a given input string and returns an AST.
  95. */
  96. func Parse(name string, input string) (*ASTNode, error) {
  97. return ParseWithRuntime(name, input, nil)
  98. }
  99. /*
  100. ParseWithRuntime parses a given input string and returns an AST decorated with
  101. runtime components.
  102. */
  103. func ParseWithRuntime(name string, input string, rp RuntimeProvider) (*ASTNode, error) {
  104. // Create a new parser with a look-ahead buffer of 3
  105. p := &parser{name, nil, NewLABuffer(Lex(name, input), 3), rp}
  106. // Read and set initial AST node
  107. node, err := p.next()
  108. if err != nil {
  109. return nil, err
  110. }
  111. p.node = node
  112. n, err := p.run(0)
  113. if err == nil && hasMoreStatements(p, n) {
  114. st := astNodeMap[TokenSTATEMENTS].instance(p, nil)
  115. st.Children = append(st.Children, n)
  116. for err == nil && hasMoreStatements(p, n) {
  117. // Skip semicolons
  118. if p.node.Token.ID == TokenSEMICOLON {
  119. skipToken(p, TokenSEMICOLON)
  120. }
  121. n, err = p.run(0)
  122. st.Children = append(st.Children, n)
  123. }
  124. n = st
  125. }
  126. if err == nil && p.node != nil && p.node.Token.ID != TokenEOF {
  127. token := *p.node.Token
  128. err = p.newParserError(ErrUnexpectedEnd, fmt.Sprintf("extra token id:%v (%v)",
  129. token.ID, token), token)
  130. }
  131. return n, err
  132. }
  133. /*
  134. run models the main parser function.
  135. */
  136. func (p *parser) run(rightBinding int) (*ASTNode, error) {
  137. var err error
  138. n := p.node
  139. p.node, err = p.next()
  140. if err != nil {
  141. return nil, err
  142. }
  143. // Start with the null denotation of this statement / expression
  144. if n.nullDenotation == nil {
  145. return nil, p.newParserError(ErrImpossibleNullDenotation,
  146. n.Token.String(), *n.Token)
  147. }
  148. left, err := n.nullDenotation(p, n)
  149. if err != nil {
  150. return nil, err
  151. }
  152. // Collect left denotations as long as the left binding power is greater
  153. // than the initial right one
  154. for rightBinding < p.node.binding {
  155. var nleft *ASTNode
  156. n = p.node
  157. if n.leftDenotation == nil {
  158. if left.Token.Lline < n.Token.Lline {
  159. // If the impossible left denotation is on a new line
  160. // we might be parsing a new statement
  161. return left, nil
  162. }
  163. return nil, p.newParserError(ErrImpossibleLeftDenotation,
  164. n.Token.String(), *n.Token)
  165. }
  166. p.node, err = p.next()
  167. if err != nil {
  168. return nil, err
  169. }
  170. // Get the next left denotation
  171. nleft, err = n.leftDenotation(p, n, left)
  172. left = nleft
  173. if err != nil {
  174. return nil, err
  175. }
  176. }
  177. return left, nil
  178. }
  179. /*
  180. next retrieves the next lexer token.
  181. */
  182. func (p *parser) next() (*ASTNode, error) {
  183. var preComments []MetaData
  184. var postComments []MetaData
  185. token, more := p.tokens.Next()
  186. // Skip over pre comment token
  187. for more && token.ID == TokenPRECOMMENT {
  188. preComments = append(preComments, NewLexTokenInstance(token))
  189. token, more = p.tokens.Next()
  190. }
  191. // Skip over post comment token
  192. for more && token.ID == TokenPOSTCOMMENT {
  193. postComments = append(postComments, NewLexTokenInstance(token))
  194. token, more = p.tokens.Next()
  195. }
  196. if !more {
  197. // Unexpected end of input - the associated token is an empty error token
  198. return nil, p.newParserError(ErrUnexpectedEnd, "", token)
  199. } else if token.ID == TokenError {
  200. // There was a lexer error wrap it in a parser error
  201. return nil, p.newParserError(ErrLexicalError, token.Val, token)
  202. } else if node, ok := astNodeMap[token.ID]; ok {
  203. // We got a normal AST component
  204. ret := node.instance(p, &token)
  205. ret.Meta = append(ret.Meta, preComments...) // Attach pre comments to the next AST node
  206. if len(postComments) > 0 && p.node != nil {
  207. p.node.Meta = append(p.node.Meta, postComments...) // Attach post comments to the previous AST node
  208. }
  209. return ret, nil
  210. }
  211. return nil, p.newParserError(ErrUnknownToken, fmt.Sprintf("id:%v (%v)", token.ID, token), token)
  212. }
  213. // Standard null denotation functions
  214. // ==================================
  215. /*
  216. ndTerm is used for terminals.
  217. */
  218. func ndTerm(p *parser, self *ASTNode) (*ASTNode, error) {
  219. return self, nil
  220. }
  221. /*
  222. ndInner returns the inner expression of an enclosed block and discard the
  223. block token. This method is used for brackets.
  224. */
  225. func ndInner(p *parser, self *ASTNode) (*ASTNode, error) {
  226. // Get the inner expression
  227. exp, err := p.run(0)
  228. if err != nil {
  229. return nil, err
  230. }
  231. // We return here the inner expression - discarding the bracket tokens
  232. return exp, skipToken(p, TokenRPAREN)
  233. }
  234. /*
  235. ndPrefix is used for prefix operators.
  236. */
  237. func ndPrefix(p *parser, self *ASTNode) (*ASTNode, error) {
  238. // Make sure a prefix will only prefix the next item
  239. val, err := p.run(self.binding + 20)
  240. if err != nil {
  241. return nil, err
  242. }
  243. self.Children = append(self.Children, val)
  244. return self, nil
  245. }
  246. // Null denotation functions for specific expressions
  247. // ==================================================
  248. /*
  249. ndImport is used to parse imports.
  250. */
  251. func ndImport(p *parser, self *ASTNode) (*ASTNode, error) {
  252. // Must specify a file path
  253. err := acceptChild(p, self, TokenSTRING)
  254. if err == nil {
  255. // Must specify AS
  256. if err = skipToken(p, TokenAS); err == nil {
  257. // Must specify an identifier
  258. err = acceptChild(p, self, TokenIDENTIFIER)
  259. }
  260. }
  261. return self, err
  262. }
  263. /*
  264. ndIdentifier is to parse identifiers and function calls.
  265. */
  266. func ndIdentifier(p *parser, self *ASTNode) (*ASTNode, error) {
  267. var parseMore, parseSegment, parseFuncCall, parseCompositionAccess func(parent *ASTNode) error
  268. parseMore = func(current *ASTNode) error {
  269. var err error
  270. if p.node.Token.ID == TokenDOT {
  271. err = parseSegment(current)
  272. } else if p.node.Token.ID == TokenLPAREN {
  273. err = parseFuncCall(current)
  274. } else if p.node.Token.ID == TokenLBRACK {
  275. err = parseCompositionAccess(current)
  276. }
  277. return err
  278. }
  279. parseSegment = func(current *ASTNode) error {
  280. var err error
  281. var next *ASTNode
  282. if err = skipToken(p, TokenDOT); err == nil {
  283. next = p.node
  284. if err = acceptChild(p, current, TokenIDENTIFIER); err == nil {
  285. err = parseMore(next)
  286. }
  287. }
  288. return err
  289. }
  290. parseFuncCall = func(current *ASTNode) error {
  291. err := skipToken(p, TokenLPAREN)
  292. fc := astNodeMap[TokenFUNCCALL].instance(p, nil)
  293. current.Children = append(current.Children, fc)
  294. // Read in parameters
  295. for err == nil && p.node.Token.ID != TokenRPAREN {
  296. // Parse all the expressions inside the directives
  297. exp, err := p.run(0)
  298. if err == nil {
  299. fc.Children = append(fc.Children, exp)
  300. if p.node.Token.ID == TokenCOMMA {
  301. err = skipToken(p, TokenCOMMA)
  302. }
  303. }
  304. }
  305. if err == nil {
  306. err = skipToken(p, TokenRPAREN)
  307. if err == nil {
  308. err = parseMore(current)
  309. }
  310. }
  311. return err
  312. }
  313. parseCompositionAccess = func(current *ASTNode) error {
  314. err := skipToken(p, TokenLBRACK)
  315. ca := astNodeMap[TokenCOMPACCESS].instance(p, nil)
  316. current.Children = append(current.Children, ca)
  317. // Parse all the expressions inside the directives
  318. exp, err := p.run(0)
  319. if err == nil {
  320. ca.Children = append(ca.Children, exp)
  321. if err = skipToken(p, TokenRBRACK); err == nil {
  322. err = parseMore(current)
  323. }
  324. }
  325. return err
  326. }
  327. return self, parseMore(self)
  328. }
  329. /*
  330. ndList is used to collect elements of a list.
  331. */
  332. func ndList(p *parser, self *ASTNode) (*ASTNode, error) {
  333. // Create a list token
  334. st := astNodeMap[TokenLIST].instance(p, self.Token)
  335. // Get the inner expression
  336. for p.node.Token.ID != TokenRBRACK {
  337. // Parse all the expressions inside
  338. exp, err := p.run(0)
  339. if err != nil {
  340. return nil, err
  341. }
  342. st.Children = append(st.Children, exp)
  343. if p.node.Token.ID == TokenCOMMA {
  344. skipToken(p, TokenCOMMA)
  345. }
  346. }
  347. // Must have a closing bracket
  348. return st, skipToken(p, TokenRBRACK)
  349. }
  350. /*
  351. ndMap is used to collect elements of a map.
  352. */
  353. func ndMap(p *parser, self *ASTNode) (*ASTNode, error) {
  354. // Create a map token
  355. st := astNodeMap[TokenMAP].instance(p, self.Token)
  356. // Get the inner expression
  357. for p.node.Token.ID != TokenRBRACE {
  358. // Parse all the expressions inside
  359. exp, err := p.run(0)
  360. if err != nil {
  361. return nil, err
  362. }
  363. st.Children = append(st.Children, exp)
  364. if p.node.Token.ID == TokenCOMMA {
  365. if err := skipToken(p, TokenCOMMA); err != nil {
  366. return nil, err
  367. }
  368. }
  369. }
  370. // Must have a closing brace
  371. return st, skipToken(p, TokenRBRACE)
  372. }
  373. // Standard left denotation functions
  374. // ==================================
  375. /*
  376. ldInfix is used for infix operators.
  377. */
  378. func ldInfix(p *parser, self *ASTNode, left *ASTNode) (*ASTNode, error) {
  379. right, err := p.run(self.binding)
  380. if err != nil {
  381. return nil, err
  382. }
  383. self.Children = append(self.Children, left)
  384. self.Children = append(self.Children, right)
  385. return self, nil
  386. }
  387. // Helper functions
  388. // ================
  389. /*
  390. hasMoreStatements returns true if there are more statements to parse.
  391. */
  392. func hasMoreStatements(p *parser, currentNode *ASTNode) bool {
  393. nextNode := p.node
  394. if nextNode == nil || nextNode.Token.ID == TokenEOF {
  395. return false
  396. } else if nextNode.Token.ID == TokenSEMICOLON {
  397. return true
  398. }
  399. return currentNode != nil && currentNode.Token.Lline < nextNode.Token.Lline
  400. }
  401. /*
  402. skipToken skips over a given token.
  403. */
  404. func skipToken(p *parser, ids ...LexTokenID) error {
  405. var err error
  406. canSkip := func(id LexTokenID) bool {
  407. for _, i := range ids {
  408. if i == id {
  409. return true
  410. }
  411. }
  412. return false
  413. }
  414. if !canSkip(p.node.Token.ID) {
  415. if p.node.Token.ID == TokenEOF {
  416. return p.newParserError(ErrUnexpectedEnd, "", *p.node.Token)
  417. }
  418. return p.newParserError(ErrUnexpectedToken, p.node.Token.Val, *p.node.Token)
  419. }
  420. // This should never return an error unless we skip over EOF or complex tokens
  421. // like values
  422. p.node, err = p.next()
  423. return err
  424. }
  425. /*
  426. acceptChild accepts the current token as a child.
  427. */
  428. func acceptChild(p *parser, self *ASTNode, id LexTokenID) error {
  429. var err error
  430. current := p.node
  431. p.node, err = p.next()
  432. if err != nil {
  433. return err
  434. }
  435. if current.Token.ID == id {
  436. self.Children = append(self.Children, current)
  437. return nil
  438. }
  439. return p.newParserError(ErrUnexpectedToken, current.Token.Val, *current.Token)
  440. }