Selaa lähdekoodia

function literal parser

simon 3 vuotta sitten
vanhempi
commit
440d2097be
3 muutettua tiedostoa jossa 130 lisäystä ja 19 poistoa
  1. 48 19
      ast/ast.go
  2. 45 0
      parser/parser.go
  3. 37 0
      parser/parser_test.go

+ 48 - 19
ast/ast.go

@@ -3,6 +3,7 @@ package ast
 import (
 	"bytes"
 	"github/runnignwater/monkey/token"
+	"strings"
 )
 
 /**
@@ -13,12 +14,12 @@ import (
  */
 type Node interface {
 	TokenLiteral() string
-	// This will allow us to print AST nodes for debugging and to compare them with other AST nodes.
+	// String This will allow us to print AST nodes for debugging and to compare them with other AST nodes.
 	// This is going to be really handy in tests!
 	String() string
 }
 
-// expressions produce values, statements don't
+// Statement expressions produce values, statements don't
 type Statement interface {
 	Node
 	statementNode()
@@ -53,23 +54,22 @@ func (p *Program) String() string {
 
 // let <identifier> = <expression>;
 //
-//      let x = 5 AST
-//  |-------------------|
-//  |  *ast.Program     |
-//  |-------------------|
-//  |  Statements       |
-//  |-------------------|
-//         ↓
-//  |-------------------|
-//  | *ast.LetStatement |
-//  |-------------------|
-//  |     Name          |
-//  |-------------------|
-//  |     Value         |
-//  |-------------------|
-//
-//  *ast.Identifier              *ast.Expression
+//	    let x = 5 AST
+//	|-------------------|
+//	|  *ast.Program     |
+//	|-------------------|
+//	|  Statements       |
+//	|-------------------|
+//	       ↓
+//	|-------------------|
+//	| *ast.LetStatement |
+//	|-------------------|
+//	|     Name          |
+//	|-------------------|
+//	|     Value         |
+//	|-------------------|
 //
+//	*ast.Identifier              *ast.Expression
 type LetStatement struct {
 	Token token.Token // the token.LET token
 	Name  *Identifier
@@ -111,7 +111,7 @@ func (i *Identifier) TokenLiteral() string {
 	return i.Token.Literal
 }
 
-// return <expression>;
+// ReturnStatement return <expression>;
 type ReturnStatement struct {
 	Token       token.Token // the token.RETURN
 	returnValue Expression
@@ -270,3 +270,32 @@ func (bs *BlockStatement) String() string {
 }
 
 func (bs *BlockStatement) statementNode() {}
+
+type FunctionLiteral struct {
+	Token      token.Token // The 'fn' token
+	Parameters []*Identifier
+	Body       *BlockStatement
+}
+
+func (fl *FunctionLiteral) TokenLiteral() string {
+	return fl.Token.Literal
+}
+
+func (fl *FunctionLiteral) String() string {
+	var out bytes.Buffer
+
+	params := []string{}
+	for _, p := range fl.Parameters {
+		params = append(params, p.String())
+	}
+
+	out.WriteString(fl.TokenLiteral())
+	out.WriteString("(")
+	out.WriteString(strings.Join(params, ", "))
+	out.WriteString(")")
+	out.WriteString(fl.Body.String())
+
+	return out.String()
+}
+
+func (fl *FunctionLiteral) expressionNode() {}

+ 45 - 0
parser/parser.go

@@ -70,6 +70,7 @@ func New(l *lexer.Lexer) *Parser {
 	p.registerPrefix(token.FALSE, p.parseBoolean)
 	p.registerPrefix(token.LPAREN, p.parseGroupedExpression)
 	p.registerPrefix(token.IF, p.parseIfExpression)
+	p.registerPrefix(token.FUNCTION, p.parseFunctionLiteral)
 
 	p.infixParseFns = make(map[token.TypeToken]infixParseFn)
 	p.registerInfix(token.PLUS, p.parseInfixExpression)
@@ -140,6 +141,23 @@ func (p *Parser) parseIfExpression() ast.Expression {
 	}
 	return exp
 }
+func (p *Parser) parseFunctionLiteral() ast.Expression {
+	lit := &ast.FunctionLiteral{Token: p.curToken}
+
+	// (
+	if !p.expectPeek(token.LPAREN) {
+		return nil
+	}
+	lit.Parameters = p.ParseFunctionParameters()
+
+	//  {
+	if !p.expectPeek(token.LBRACE) {
+		return nil
+	}
+	lit.Body = p.parseBlockStatement()
+
+	return lit
+}
 func (p *Parser) parseIntegerLiteral() ast.Expression {
 	defer untrace(trace("parseIntegerLiteral"))
 
@@ -360,3 +378,30 @@ func (p *Parser) curPrecedence() int {
 	}
 	return LOWEST
 }
+
+func (p *Parser) ParseFunctionParameters() []*ast.Identifier {
+	identifiers := []*ast.Identifier{}
+
+	if p.peekTokenIs(token.RPAREN) {
+		p.nextToken()
+		return identifiers
+	}
+	p.nextToken()
+
+	ident := &ast.Identifier{Token: p.curToken, Value: p.curToken.Literal}
+	identifiers = append(identifiers, ident)
+
+	for p.peekTokenIs(token.COMMA) {
+		p.nextToken() // ,
+		p.nextToken()
+		ident := &ast.Identifier{Token: p.curToken, Value: p.curToken.Literal}
+		identifiers = append(identifiers, ident)
+	}
+
+	// )
+	if !p.expectPeek(token.RPAREN) {
+		return nil
+	}
+
+	return identifiers
+}

+ 37 - 0
parser/parser_test.go

@@ -592,3 +592,40 @@ func testInfixExpression(t *testing.T, exp ast.Expression, left interface{},
 
 	return true
 }
+
+func TestFunctionLiteral(t *testing.T) {
+	input := `fn(x,y) {x+y}`
+
+	l := lexer.New(input)
+	p := New(l)
+	program := p.ParseProgram()
+	checkParseErrors(t, p)
+
+	if len(program.Statements) != 1 {
+		t.Fatalf("Program.Body does not contain %d statements. got=%d\n", 1, len(program.Statements))
+	}
+	stmt, ok := program.Statements[0].(*ast.ExpressionStatement)
+	if !ok {
+		t.Fatalf("program.Statement[0] is not ast.ExpressionStatemtn. got=%T", program.Statements[0])
+	}
+	function, ok := stmt.Expression.(*ast.FunctionLiteral)
+	if !ok {
+		t.Fatalf("stmt.Expression is not ast.FunctionLiteral. got=%T", stmt.Expression)
+	}
+	if len(function.Parameters) != 2 {
+		t.Fatalf("function literal parameters wrong. want 2, got =%d\n", len(function.Parameters))
+	}
+
+	testLiteralExpression(t, function.Parameters[0], "x")
+	testLiteralExpression(t, function.Parameters[1], "y")
+
+	if len(function.Body.Statements) != 1 {
+		t.Fatalf("function.Body.Statements has not 1 statements. got=%d\n", len(function.Body.Statements))
+	}
+
+	bodyStmt, ok := function.Body.Statements[0].(*ast.ExpressionStatement)
+	if !ok {
+		t.Fatalf("function body stmt is not ast.ExpressionStatement. got=%T", function.Body.Statements[0])
+	}
+	testInfixExpression(t, bodyStmt.Expression, "x", "+", "y")
+}