|
|
@@ -13,13 +13,33 @@ import (
|
|
|
* @Date: 2022/10/2 下午9:55
|
|
|
* @Desc:
|
|
|
*/
|
|
|
+
|
|
|
+const (
|
|
|
+ // 优先级常量
|
|
|
+ _ int = iota
|
|
|
+ LOWEST
|
|
|
+ EQUALS // ==
|
|
|
+ LESSGREATER // > OR <
|
|
|
+ SUM // +
|
|
|
+ PRODUCT // *
|
|
|
+ PREFIX // -X OR !X
|
|
|
+ CALL // myFunction(X)
|
|
|
+)
|
|
|
+
|
|
|
+type (
|
|
|
+ prefixParseFn func() ast.Expression
|
|
|
+ infixParseFn func(expression ast.Expression) ast.Expression
|
|
|
+)
|
|
|
+
|
|
|
type Parser struct {
|
|
|
- l *lexer.Lexer // point to the instance of the lexer
|
|
|
+ l *lexer.Lexer // point to the instance of the lexer
|
|
|
+ errors []string
|
|
|
|
|
|
curToken token.Token // point to the current token
|
|
|
peekToken token.Token // point to the next token
|
|
|
|
|
|
- errors []string
|
|
|
+ prefixParseFns map[token.TypeToken]prefixParseFn
|
|
|
+ infixParseFns map[token.TypeToken]infixParseFn
|
|
|
}
|
|
|
|
|
|
func New(l *lexer.Lexer) *Parser {
|
|
|
@@ -28,6 +48,9 @@ func New(l *lexer.Lexer) *Parser {
|
|
|
errors: []string{},
|
|
|
}
|
|
|
|
|
|
+ p.prefixParseFns = make(map[token.TypeToken]prefixParseFn)
|
|
|
+ p.registerPrefix(token.IDENT, p.parseIdentifier)
|
|
|
+
|
|
|
// Read two tokens, so curToken and peekToken are both set
|
|
|
p.nextToken()
|
|
|
p.nextToken()
|
|
|
@@ -35,6 +58,10 @@ func New(l *lexer.Lexer) *Parser {
|
|
|
return p
|
|
|
}
|
|
|
|
|
|
+func (p *Parser) parseIdentifier() ast.Expression {
|
|
|
+ return &ast.Identifier{Token: p.curToken, Value: p.curToken.Literal}
|
|
|
+}
|
|
|
+
|
|
|
func (p *Parser) Errors() []string {
|
|
|
return p.errors
|
|
|
}
|
|
|
@@ -71,7 +98,7 @@ func (p *Parser) parseStatement() ast.Statement {
|
|
|
case token.RETURN:
|
|
|
return p.parseReturnStatement()
|
|
|
default:
|
|
|
- return nil
|
|
|
+ return p.parseExpressionStatement()
|
|
|
}
|
|
|
}
|
|
|
|
|
|
@@ -79,17 +106,22 @@ func (p *Parser) parseStatement() ast.Statement {
|
|
|
func (p *Parser) parseLetStatement() *ast.LetStatement {
|
|
|
stmt := &ast.LetStatement{Token: p.curToken}
|
|
|
|
|
|
+ // let
|
|
|
if !p.expectPeek(token.IDENT) {
|
|
|
return nil
|
|
|
}
|
|
|
|
|
|
+ // identifier
|
|
|
stmt.Name = &ast.Identifier{Token: p.curToken, Value: p.curToken.Literal}
|
|
|
+ // =
|
|
|
if !p.expectPeek(token.ASSIGN) {
|
|
|
return nil
|
|
|
}
|
|
|
|
|
|
// TODO: we're skipping the expression until we
|
|
|
// we encounter a semicolon
|
|
|
+
|
|
|
+ // ;
|
|
|
for !p.curTokenIs(token.SEMICOLON) {
|
|
|
p.nextToken()
|
|
|
}
|
|
|
@@ -97,7 +129,7 @@ func (p *Parser) parseLetStatement() *ast.LetStatement {
|
|
|
}
|
|
|
|
|
|
// return <expression>;
|
|
|
-func (p *Parser) parseReturnStatement() ast.Statement {
|
|
|
+func (p *Parser) parseReturnStatement() *ast.ReturnStatement {
|
|
|
stmt := &ast.ReturnStatement{Token: p.curToken}
|
|
|
|
|
|
p.nextToken()
|
|
|
@@ -111,6 +143,29 @@ func (p *Parser) parseReturnStatement() ast.Statement {
|
|
|
return stmt
|
|
|
}
|
|
|
|
|
|
+func (p *Parser) parseExpressionStatement() *ast.ExpressionStatement {
|
|
|
+ stmt := &ast.ExpressionStatement{Token: p.curToken}
|
|
|
+
|
|
|
+ stmt.Expression = p.parseExpression(LOWEST)
|
|
|
+
|
|
|
+ if p.peekTokenIs(token.SEMICOLON) {
|
|
|
+ p.nextToken()
|
|
|
+ }
|
|
|
+
|
|
|
+ return stmt
|
|
|
+}
|
|
|
+
|
|
|
+func (p *Parser) parseExpression(precedence int) ast.Expression {
|
|
|
+ prefix := p.prefixParseFns[p.curToken.Type]
|
|
|
+ if prefix == nil {
|
|
|
+ return nil
|
|
|
+ }
|
|
|
+
|
|
|
+ leftExp := prefix()
|
|
|
+
|
|
|
+ return leftExp
|
|
|
+}
|
|
|
+
|
|
|
func (p *Parser) curTokenIs(t token.TypeToken) bool {
|
|
|
return p.curToken.Type == t
|
|
|
}
|
|
|
@@ -128,3 +183,11 @@ func (p *Parser) expectPeek(t token.TypeToken) bool {
|
|
|
return false
|
|
|
}
|
|
|
}
|
|
|
+
|
|
|
+func (p *Parser) registerPrefix(tokenType token.TypeToken, fn prefixParseFn) {
|
|
|
+ p.prefixParseFns[tokenType] = fn
|
|
|
+}
|
|
|
+
|
|
|
+func (p *Parser) registerInfix(tokenType token.TypeToken, fn infixParseFn) {
|
|
|
+ p.infixParseFns[tokenType] = fn
|
|
|
+}
|