parser.go 6.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298
  1. package parser
  2. import (
  3. "fmt"
  4. "github/runnignwater/monkey/ast"
  5. "github/runnignwater/monkey/lexer"
  6. "github/runnignwater/monkey/token"
  7. "strconv"
  8. )
  9. /**
  10. * @Author: simon
  11. * @Author: ynwdlxm@163.com
  12. * @Date: 2022/10/2 下午9:55
  13. * @Desc:
  14. */
  15. const (
  16. // 优先级常量
  17. _ int = iota
  18. LOWEST
  19. EQUALS // ==
  20. LESSGREATER // > OR <
  21. SUM // +
  22. PRODUCT // *
  23. PREFIX // -X OR !X
  24. CALL // myFunction(X)
  25. )
  26. // 指派 token 类型的优先级
  27. var precedences = map[token.TypeToken]int{
  28. token.EQ: EQUALS,
  29. token.NOT_EQ: EQUALS,
  30. token.GT: LESSGREATER,
  31. token.LT: LESSGREATER,
  32. token.PLUS: SUM,
  33. token.MINUS: SUM,
  34. token.ASTERISK: PRODUCT,
  35. token.SLASH: PRODUCT,
  36. }
  37. type (
  38. prefixParseFn func() ast.Expression
  39. infixParseFn func(expression ast.Expression) ast.Expression
  40. )
  41. type Parser struct {
  42. l *lexer.Lexer // point to the instance of the lexer
  43. errors []string
  44. curToken token.Token // point to the current token
  45. peekToken token.Token // point to the next token
  46. prefixParseFns map[token.TypeToken]prefixParseFn
  47. infixParseFns map[token.TypeToken]infixParseFn
  48. }
  49. func New(l *lexer.Lexer) *Parser {
  50. p := &Parser{
  51. l: l,
  52. errors: []string{},
  53. }
  54. p.prefixParseFns = make(map[token.TypeToken]prefixParseFn)
  55. p.registerPrefix(token.IDENT, p.parseIdentifier)
  56. p.registerPrefix(token.INT, p.parseIntegerLiteral)
  57. p.registerPrefix(token.BANG, p.parsePrefixExpression)
  58. p.registerPrefix(token.MINUS, p.parsePrefixExpression)
  59. p.registerPrefix(token.TRUE, p.parseBoolean)
  60. p.registerPrefix(token.FALSE, p.parseBoolean)
  61. p.infixParseFns = make(map[token.TypeToken]infixParseFn)
  62. p.registerInfix(token.PLUS, p.parseInfixExpression)
  63. p.registerInfix(token.MINUS, p.parseInfixExpression)
  64. p.registerInfix(token.ASTERISK, p.parseInfixExpression)
  65. p.registerInfix(token.SLASH, p.parseInfixExpression)
  66. p.registerInfix(token.EQ, p.parseInfixExpression)
  67. p.registerInfix(token.NOT_EQ, p.parseInfixExpression)
  68. p.registerInfix(token.GT, p.parseInfixExpression)
  69. p.registerInfix(token.LT, p.parseInfixExpression)
  70. // Read two tokens, so curToken and peekToken are both set
  71. p.nextToken()
  72. p.nextToken()
  73. return p
  74. }
  75. func (p *Parser) parseIdentifier() ast.Expression {
  76. return &ast.Identifier{Token: p.curToken, Value: p.curToken.Literal}
  77. }
  78. func (p *Parser) parseBoolean() ast.Expression {
  79. return &ast.Boolean{Token: p.curToken, Value: p.curTokenIs(token.TRUE)}
  80. }
  81. func (p *Parser) parseIntegerLiteral() ast.Expression {
  82. defer untrace(trace("parseIntegerLiteral"))
  83. lit := &ast.IntegerLiteral{Token: p.curToken}
  84. value, err := strconv.ParseInt(p.curToken.Literal, 0, 64)
  85. if err != nil {
  86. msg := fmt.Sprintf("could not parse %q as integer", p.curToken.Literal)
  87. p.errors = append(p.errors, msg)
  88. return nil
  89. }
  90. lit.Value = value
  91. return lit
  92. }
  93. func (p *Parser) parseInfixExpression(left ast.Expression) ast.Expression {
  94. defer untrace(trace("parseInfixExpression"))
  95. exp := &ast.InfixExpression{
  96. Token: p.curToken,
  97. Left: left,
  98. Operator: p.curToken.Literal,
  99. }
  100. precedence := p.curPrecedence()
  101. p.nextToken()
  102. exp.Right = p.parseExpression(precedence)
  103. return exp
  104. }
  105. func (p *Parser) parsePrefixExpression() ast.Expression {
  106. defer untrace(trace("parsePrefixExpression"))
  107. exp := &ast.PrefixExpression{
  108. Token: p.curToken,
  109. Operator: p.curToken.Literal,
  110. }
  111. p.nextToken()
  112. exp.Right = p.parseExpression(PREFIX)
  113. return exp
  114. }
  115. func (p *Parser) Errors() []string {
  116. return p.errors
  117. }
  118. func (p *Parser) peekError(t token.TypeToken) {
  119. msg := fmt.Sprintf("exepected next token to be %s, got %s instead.", t, p.peekToken.Type)
  120. p.errors = append(p.errors, msg)
  121. }
  122. func (p *Parser) nextToken() {
  123. p.curToken = p.peekToken
  124. p.peekToken = p.l.NextToken()
  125. }
  126. func (p *Parser) ParseProgram() *ast.Program {
  127. program := &ast.Program{}
  128. program.Statements = []ast.Statement{}
  129. for !p.curTokenIs(token.EOF) {
  130. stmt := p.parseStatement()
  131. if stmt != nil {
  132. program.Statements = append(program.Statements, stmt)
  133. }
  134. p.nextToken()
  135. }
  136. return program
  137. }
  138. func (p *Parser) parseStatement() ast.Statement {
  139. switch p.curToken.Type {
  140. case token.LET:
  141. return p.parseLetStatement()
  142. case token.RETURN:
  143. return p.parseReturnStatement()
  144. default:
  145. return p.parseExpressionStatement()
  146. }
  147. }
  148. // let <identifier> = <expression>;
  149. func (p *Parser) parseLetStatement() *ast.LetStatement {
  150. stmt := &ast.LetStatement{Token: p.curToken}
  151. // let
  152. if !p.expectPeek(token.IDENT) {
  153. return nil
  154. }
  155. // identifier
  156. stmt.Name = &ast.Identifier{Token: p.curToken, Value: p.curToken.Literal}
  157. // =
  158. if !p.expectPeek(token.ASSIGN) {
  159. return nil
  160. }
  161. // TODO: we're skipping the expression until we
  162. // we encounter a semicolon
  163. // ;
  164. for !p.curTokenIs(token.SEMICOLON) {
  165. p.nextToken()
  166. }
  167. return stmt
  168. }
  169. // return <expression>;
  170. func (p *Parser) parseReturnStatement() *ast.ReturnStatement {
  171. stmt := &ast.ReturnStatement{Token: p.curToken}
  172. p.nextToken()
  173. // TODO: we're skipping the expressions until we
  174. // encounter a semicolon
  175. for !p.curTokenIs(token.SEMICOLON) {
  176. p.nextToken()
  177. }
  178. return stmt
  179. }
  180. func (p *Parser) parseExpressionStatement() *ast.ExpressionStatement {
  181. defer untrace(trace("parseExpressionStatement"))
  182. stmt := &ast.ExpressionStatement{Token: p.curToken}
  183. stmt.Expression = p.parseExpression(LOWEST)
  184. if p.peekTokenIs(token.SEMICOLON) {
  185. p.nextToken()
  186. }
  187. return stmt
  188. }
  189. func (p *Parser) parseExpression(precedence int) ast.Expression {
  190. defer untrace(trace("parseExpression"))
  191. prefix := p.prefixParseFns[p.curToken.Type]
  192. if prefix == nil {
  193. p.noPrefixParseFnError(p.curToken.Type)
  194. return nil
  195. }
  196. leftExp := prefix()
  197. for !p.peekTokenIs(token.SEMICOLON) && precedence < p.peekPrecedence() {
  198. infix := p.infixParseFns[p.peekToken.Type]
  199. if infix == nil {
  200. return leftExp
  201. }
  202. p.nextToken()
  203. leftExp = infix(leftExp)
  204. }
  205. return leftExp
  206. }
  207. func (p *Parser) curTokenIs(t token.TypeToken) bool {
  208. return p.curToken.Type == t
  209. }
  210. func (p *Parser) peekTokenIs(t token.TypeToken) bool {
  211. return p.peekToken.Type == t
  212. }
  213. func (p *Parser) expectPeek(t token.TypeToken) bool {
  214. if p.peekTokenIs(t) {
  215. p.nextToken()
  216. return true
  217. } else {
  218. p.peekError(t)
  219. return false
  220. }
  221. }
  222. func (p *Parser) registerPrefix(tokenType token.TypeToken, fn prefixParseFn) {
  223. p.prefixParseFns[tokenType] = fn
  224. }
  225. func (p *Parser) registerInfix(tokenType token.TypeToken, fn infixParseFn) {
  226. p.infixParseFns[tokenType] = fn
  227. }
  228. func (p *Parser) noPrefixParseFnError(t token.TypeToken) {
  229. msg := fmt.Sprintf("no prefix parse function for %s found", t)
  230. p.errors = append(p.errors, msg)
  231. }
  232. func (p *Parser) peekPrecedence() int {
  233. if p, ok := precedences[p.peekToken.Type]; ok {
  234. return p
  235. }
  236. return LOWEST
  237. }
  238. func (p *Parser) curPrecedence() int {
  239. if p, ok := precedences[p.curToken.Type]; ok {
  240. return p
  241. }
  242. return LOWEST
  243. }