Просмотр исходного кода

Extending the interpreter -- Parsing Array Literals
e.g. [1,2,3 + 3, fn(x){x}, add(2,3)]

simon 3 лет назад
Родитель
Сommit
f61446d5d6
6 измененных файлов с 93 добавлено и 2 удалено
  1. 26 0
      ast/ast.go
  2. 4 0
      lexer/lexer.go
  3. 8 1
      lexer/lexer_test.go
  4. 30 1
      parser/parser.go
  5. 21 0
      parser/parser_test.go
  6. 4 0
      token/token.go

+ 26 - 0
ast/ast.go

@@ -139,6 +139,7 @@ func (i *IntegerLiteral) String() string { return i.Token.Literal }
 
 func (i *IntegerLiteral) expressionNode() {}
 
+// StringLiteral 字符串表达式
 type StringLiteral struct {
 	Token token.Token
 	Value string
@@ -242,6 +243,7 @@ func (ie *IfExpression) String() string {
 
 func (ie *IfExpression) expressionNode() {}
 
+// BlockStatement {<expression>...}
 type BlockStatement struct {
 	Token      token.Token // the { token
 	Statements []Statement
@@ -320,3 +322,27 @@ func (ce *CallExpression) String() string {
 }
 
 func (ce *CallExpression) expressionNode() {}
+
+// ArrayLiteral 数组表达式  <expression>[<expression>]
+type ArrayLiteral struct {
+	Token   token.Token // the '[' token
+	Element []Expression
+}
+
+func (al *ArrayLiteral) TokenLiteral() string { return al.Token.Literal }
+
+func (al *ArrayLiteral) String() string {
+	var out bytes.Buffer
+
+	elements := []string{}
+	for _, el := range al.Element {
+		elements = append(elements, el.String())
+	}
+
+	out.WriteString("[")
+	out.WriteString(strings.Join(elements, ", "))
+	out.WriteString("]")
+	return out.String()
+}
+
+func (al *ArrayLiteral) expressionNode() {}

+ 4 - 0
lexer/lexer.go

@@ -123,6 +123,10 @@ func (l *Lexer) NextToken() token.Token {
 	case '"':
 		tok.Type = token.STRING
 		tok.Literal = l.readString()
+	case '[':
+		tok = newToken(token.LBRACKET, l.ch)
+	case ']':
+		tok = newToken(token.RBRACKET, l.ch)
 	default:
 		if isLetter(l.ch) {
 			tok.Literal = l.readIdentifier()

+ 8 - 1
lexer/lexer_test.go

@@ -29,7 +29,8 @@ func TestNextToken(t *testing.T) {
         10 == 10;
         10 != 9;
         "foobar"
-		"foo bar"`
+		"foo bar"
+		[1, 2];`
 	tests := []struct {
 		expectedType    token.TypeToken
 		expectedLiteral string
@@ -108,6 +109,12 @@ func TestNextToken(t *testing.T) {
 		{token.SEMICOLON, ";"},
 		{token.STRING, "foobar"},
 		{token.STRING, "foo bar"},
+		{token.LBRACKET, "["},
+		{token.INT, "1"},
+		{token.COMMA, ","},
+		{token.INT, "2"},
+		{token.RBRACKET, "]"},
+		{token.SEMICOLON, ";"},
 		{token.EOF, ""},
 	}
 

+ 30 - 1
parser/parser.go

@@ -73,6 +73,7 @@ func New(l *lexer.Lexer) *Parser {
 	p.registerPrefix(token.IF, p.parseIfExpression)
 	p.registerPrefix(token.FUNCTION, p.parseFunctionLiteral)
 	p.registerPrefix(token.STRING, p.parseStringLiteral)
+	p.registerPrefix(token.LBRACKET, p.parseArrayLiteral)
 
 	p.infixParseFns = make(map[token.TypeToken]infixParseFn)
 	p.registerInfix(token.PLUS, p.parseInfixExpression)
@@ -205,11 +206,18 @@ func (p *Parser) parseIntegerLiteral() ast.Expression {
 func (p *Parser) parseStringLiteral() ast.Expression {
 	return &ast.StringLiteral{Token: p.curToken, Value: p.curToken.Literal}
 }
+func (p *Parser) parseArrayLiteral() ast.Expression {
+	array := &ast.ArrayLiteral{Token: p.curToken}
+
+	array.Element = p.parseExpressionList(token.RBRACKET)
+
+	return array
+}
 func (p *Parser) parseCallExpression(left ast.Expression) ast.Expression {
 	defer untrace(trace("parseCallExpression"))
 	// add(2,3) --> Function: add
 	exp := &ast.CallExpression{Token: p.curToken, Function: left}
-	exp.Arguments = p.parseCallArguments()
+	exp.Arguments = p.parseExpressionList(token.RPAREN)
 	return exp
 }
 func (p *Parser) parseInfixExpression(left ast.Expression) ast.Expression {
@@ -346,6 +354,27 @@ func (p *Parser) parseExpression(precedence int) ast.Expression {
 
 	return leftExp
 }
+func (p *Parser) parseExpressionList(end token.TypeToken) []ast.Expression {
+	defer untrace(trace("parseExpressionList"))
+	list := []ast.Expression{}
+
+	if p.peekTokenIs(end) {
+		p.nextToken()
+		return list
+	}
+	p.nextToken()
+	list = append(list, p.parseExpression(LOWEST))
+
+	for p.peekTokenIs(token.COMMA) {
+		p.nextToken()
+		p.nextToken()
+		list = append(list, p.parseExpression(LOWEST))
+	}
+	if !p.expectPeek(end) {
+		return nil
+	}
+	return list
+}
 
 func (p *Parser) curTokenIs(t token.TypeToken) bool {
 	return p.curToken.Type == t

+ 21 - 0
parser/parser_test.go

@@ -711,3 +711,24 @@ func TestStringLiteralExpression(t *testing.T) {
 		t.Fatalf("literal.Value not %q. got=%q", "hello world", literal.Value)
 	}
 }
+
+func TestArrayLiteralsParsing(t *testing.T) {
+	input := "[1, 2 * 2, 3 + 3]"
+
+	l := lexer.New(input)
+	p := New(l)
+	program := p.ParseProgram()
+	checkParseErrors(t, p)
+
+	stmt, ok := program.Statements[0].(*ast.ExpressionStatement)
+	array, ok := stmt.Expression.(*ast.ArrayLiteral)
+	if !ok {
+		t.Fatalf("exp not ast.ArrayLiteral. got=%T", stmt.Expression)
+	}
+	if len(array.Element) != 3 {
+		t.Fatalf("len(array.Element) not 3. got=%d", len(array.Element))
+	}
+	testIntegerLiteral(t, array.Element[0], 1)
+	testInfixExpression(t, array.Element[1], 2, "*", 2)
+	testInfixExpression(t, array.Element[2], 3, "+", 3)
+}

+ 4 - 0
token/token.go

@@ -12,6 +12,7 @@ const (
 	EOF     = "EOF"
 
 	// Identifiers + literals
+
 	IDENT  = "IDENT" // add, foobar, x, y ...
 	INT    = "INT"   // 123456
 	STRING = "STRING"
@@ -37,6 +38,9 @@ const (
 	LBRACE = "{"
 	RBRACE = "}"
 
+	LBRACKET = "["
+	RBRACKET = "]"
+
 	// Keywords
 	FUNCTION = "FUNCTION"
 	LET      = "LET"