Просмотр исходного кода

parsing expression -- identifier expression

runningwater 3 лет назад
Родитель
Сommit
fd0b494f3b
4 измененных файлов с 124 добавлено и 6 удалено
  1. 1 2
      ast/ast.go
  2. 25 0
      ast/ast_test.go
  3. 67 4
      parser/parser.go
  4. 31 0
      parser/parser_test.go

+ 1 - 2
ast/ast.go

@@ -148,6 +148,5 @@ func (es *ExpressionStatement) String() string {
 	}
 	return ""
 }
-func (es *ExpressionStatement) expressionNode() {
-	panic("implement me")
+func (es *ExpressionStatement) statementNode() {
 }

+ 25 - 0
ast/ast_test.go

@@ -0,0 +1,25 @@
+package ast
+
+import (
+	"github/runnignwater/monkey/token"
+	"testing"
+)
+
+func TestString(t *testing.T) {
+	program := &Program{
+		Statements: []Statement{
+			&LetStatement{
+				Token: token.Token{Type: token.LET, Literal: "let"},
+				Name:  &Identifier{Token: token.Token{Type: token.IDENT, Literal: "myVar"}, Value: "myVar"},
+				Value: &Identifier{
+					Token: token.Token{Type: token.IDENT, Literal: "anotherVar"},
+					Value: "anotherVar",
+				},
+			},
+		},
+	}
+
+	if program.String() != "let myVar = anotherVar;" {
+		t.Errorf("program.String() wrong. got=%q", program.String())
+	}
+}

+ 67 - 4
parser/parser.go

@@ -13,13 +13,33 @@ import (
  * @Date: 2022/10/2 下午9:55
  * @Desc:
  */
+
+const (
+	// 优先级常量
+	_ int = iota
+	LOWEST
+	EQUALS      // ==
+	LESSGREATER // > OR <
+	SUM         // +
+	PRODUCT     // *
+	PREFIX      // -X OR !X
+	CALL        // myFunction(X)
+)
+
+type (
+	prefixParseFn func() ast.Expression
+	infixParseFn  func(expression ast.Expression) ast.Expression
+)
+
 type Parser struct {
-	l *lexer.Lexer // point to the instance of the lexer
+	l      *lexer.Lexer // point to the instance of the lexer
+	errors []string
 
 	curToken  token.Token // point to the current token
 	peekToken token.Token // point to the next token
 
-	errors []string
+	prefixParseFns map[token.TypeToken]prefixParseFn
+	infixParseFns  map[token.TypeToken]infixParseFn
 }
 
 func New(l *lexer.Lexer) *Parser {
@@ -28,6 +48,9 @@ func New(l *lexer.Lexer) *Parser {
 		errors: []string{},
 	}
 
+	p.prefixParseFns = make(map[token.TypeToken]prefixParseFn)
+	p.registerPrefix(token.IDENT, p.parseIdentifier)
+
 	// Read two tokens, so curToken and peekToken are both set
 	p.nextToken()
 	p.nextToken()
@@ -35,6 +58,10 @@ func New(l *lexer.Lexer) *Parser {
 	return p
 }
 
+func (p *Parser) parseIdentifier() ast.Expression {
+	return &ast.Identifier{Token: p.curToken, Value: p.curToken.Literal}
+}
+
 func (p *Parser) Errors() []string {
 	return p.errors
 }
@@ -71,7 +98,7 @@ func (p *Parser) parseStatement() ast.Statement {
 	case token.RETURN:
 		return p.parseReturnStatement()
 	default:
-		return nil
+		return p.parseExpressionStatement()
 	}
 }
 
@@ -79,17 +106,22 @@ func (p *Parser) parseStatement() ast.Statement {
 func (p *Parser) parseLetStatement() *ast.LetStatement {
 	stmt := &ast.LetStatement{Token: p.curToken}
 
+	// let
 	if !p.expectPeek(token.IDENT) {
 		return nil
 	}
 
+	// identifier
 	stmt.Name = &ast.Identifier{Token: p.curToken, Value: p.curToken.Literal}
+	// =
 	if !p.expectPeek(token.ASSIGN) {
 		return nil
 	}
 
 	// TODO: we're skipping the expression until we
 	//       we encounter a semicolon
+
+	// ;
 	for !p.curTokenIs(token.SEMICOLON) {
 		p.nextToken()
 	}
@@ -97,7 +129,7 @@ func (p *Parser) parseLetStatement() *ast.LetStatement {
 }
 
 // return <expression>;
-func (p *Parser) parseReturnStatement() ast.Statement {
+func (p *Parser) parseReturnStatement() *ast.ReturnStatement {
 	stmt := &ast.ReturnStatement{Token: p.curToken}
 
 	p.nextToken()
@@ -111,6 +143,29 @@ func (p *Parser) parseReturnStatement() ast.Statement {
 	return stmt
 }
 
+func (p *Parser) parseExpressionStatement() *ast.ExpressionStatement {
+	stmt := &ast.ExpressionStatement{Token: p.curToken}
+
+	stmt.Expression = p.parseExpression(LOWEST)
+
+	if p.peekTokenIs(token.SEMICOLON) {
+		p.nextToken()
+	}
+
+	return stmt
+}
+
+func (p *Parser) parseExpression(precedence int) ast.Expression {
+	prefix := p.prefixParseFns[p.curToken.Type]
+	if prefix == nil {
+		return nil
+	}
+
+	leftExp := prefix()
+
+	return leftExp
+}
+
 func (p *Parser) curTokenIs(t token.TypeToken) bool {
 	return p.curToken.Type == t
 }
@@ -128,3 +183,11 @@ func (p *Parser) expectPeek(t token.TypeToken) bool {
 		return false
 	}
 }
+
+func (p *Parser) registerPrefix(tokenType token.TypeToken, fn prefixParseFn) {
+	p.prefixParseFns[tokenType] = fn
+}
+
+func (p *Parser) registerInfix(tokenType token.TypeToken, fn infixParseFn) {
+	p.infixParseFns[tokenType] = fn
+}

+ 31 - 0
parser/parser_test.go

@@ -111,3 +111,34 @@ func testLetStatement(t *testing.T, s ast.Statement, name string) bool {
 
 	return true
 }
+
+func TestIdentifier(t *testing.T) {
+	input := "foobar;"
+
+	l := lexer.New(input)
+	p := New(l)
+	program := p.ParseProgram()
+	checkParseErrors(t, p)
+
+	if len(program.Statements) != 1 {
+		t.Fatalf("program has not enough statements. got=%d", len(program.Statements))
+	}
+
+	stmt, ok := program.Statements[0].(*ast.ExpressionStatement)
+	if !ok {
+		t.Fatalf("program.Statements[0] is not ast.ExpressionStatement. got=%T",
+			program.Statements[0])
+	}
+
+	ident, ok := stmt.Expression.(*ast.Identifier)
+	if !ok {
+		t.Fatalf("exp not *ast.Identifier. got=%T", stmt.Expression)
+	}
+
+	if ident.Value != "foobar" {
+		t.Fatalf("ident.Value not %s. got=%s", "foobar", ident.Value)
+	}
+	if ident.TokenLiteral() != "foobar" {
+		t.Errorf("ident.TokenLiteral() not %s. got=%s", "foobar", ident.TokenLiteral())
+	}
+}