Kaynağa Gözat

gofmt files

runningwater 3 yıl önce
ebeveyn
işleme
7d4f02b22d
8 değiştirilmiş dosya ile 382 ekleme ve 382 silme
  1. 20 20
      ast/ast.go
  2. 99 99
      lexer/lexer.go
  3. 90 90
      lexer/lexer_test.go
  4. 11 11
      main.go
  5. 56 56
      parser/parser.go
  6. 44 44
      parser/parser_test.go
  7. 18 18
      repl/repl.go
  8. 44 44
      token/token.go

+ 20 - 20
ast/ast.go

@@ -9,31 +9,31 @@ import "github/runnignwater/monkey/token"
  * @Desc:
  */
 type Node interface {
-    TokenLiteral() string
+	TokenLiteral() string
 }
 
 // expressions produce values, statements don't
 type Statement interface {
-    Node
-    statementNode()
+	Node
+	statementNode()
 }
 type Expression interface {
-    Node
-    expressionNode()
+	Node
+	expressionNode()
 }
 
 // ---------------------implementation of Node----------------------------------BEGIN-----------------------------------
 // Root Node of our parser produces.
 type Program struct {
-    Statements []Statement
+	Statements []Statement
 }
 
 func (p *Program) TokenLiteral() string {
-    if len(p.Statements) > 0 {
-        return p.Statements[0].TokenLiteral()
-    } else {
-        return ""
-    }
+	if len(p.Statements) > 0 {
+		return p.Statements[0].TokenLiteral()
+	} else {
+		return ""
+	}
 }
 
 //---------------------implementation of Node-----------------------------------END-------------------------------------
@@ -58,27 +58,27 @@ func (p *Program) TokenLiteral() string {
 //  *ast.Identifier              *ast.Expression
 //
 type LetStatement struct {
-    Token token.Token // the token.LET token
-    Name  *Identifier
-    Value Expression
+	Token token.Token // the token.LET token
+	Name  *Identifier
+	Value Expression
 }
 
 func (ls *LetStatement) TokenLiteral() string {
-    return ls.Token.Literal
+	return ls.Token.Literal
 }
 
 func (ls *LetStatement) statementNode() {
-    panic("implement me")
+	panic("implement me")
 }
 
 type Identifier struct {
-    Token token.Token // the token.IDENT token
-    Value string
+	Token token.Token // the token.IDENT token
+	Value string
 }
 
 func (i *Identifier) expressionNode() {
-    panic("implement me")
+	panic("implement me")
 }
 func (i *Identifier) TokenLiteral() string {
-    return i.Token.Literal
+	return i.Token.Literal
 }

+ 99 - 99
lexer/lexer.go

@@ -10,26 +10,26 @@ import "github/runnignwater/monkey/token"
  */
 
 type Lexer struct {
-    input        string
-    position     int  // current position in input (points to current char)
-    readPosition int  // current reading position in input (after current char)
-    ch           byte // current char under examination
+	input        string
+	position     int  // current position in input (points to current char)
+	readPosition int  // current reading position in input (after current char)
+	ch           byte // current char under examination
 }
 
 func New(input string) *Lexer {
-    l := &Lexer{input: input}
-    l.readChar()
-    return l
+	l := &Lexer{input: input}
+	l.readChar()
+	return l
 }
 
 func (l *Lexer) readChar() {
-    if l.readPosition >= len(l.input) {
-        l.ch = 0
-    } else {
-        l.ch = l.input[l.readPosition]
-    }
-    l.position = l.readPosition
-    l.readPosition += 1
+	if l.readPosition >= len(l.input) {
+		l.ch = 0
+	} else {
+		l.ch = l.input[l.readPosition]
+	}
+	l.position = l.readPosition
+	l.readPosition += 1
 }
 
 /**
@@ -37,110 +37,110 @@ func (l *Lexer) readChar() {
  * and not move around in it
  */
 func (l *Lexer) peekChar() byte {
-    if l.readPosition >= len(l.input) {
-        return 0
-    } else {
-        return l.input[l.readPosition]
-    }
+	if l.readPosition >= len(l.input) {
+		return 0
+	} else {
+		return l.input[l.readPosition]
+	}
 }
 
 func (l *Lexer) readIdentifier() string {
-    position := l.position
-    for isLetter(l.ch) {
-        l.readChar()
-    }
-    return l.input[position:l.position]
+	position := l.position
+	for isLetter(l.ch) {
+		l.readChar()
+	}
+	return l.input[position:l.position]
 }
 
 func (l *Lexer) readNumber() string {
-    position := l.position
-    for isDigit(l.ch) {
-        l.readChar()
-    }
-    return l.input[position:l.position]
+	position := l.position
+	for isDigit(l.ch) {
+		l.readChar()
+	}
+	return l.input[position:l.position]
 }
 
 func (l *Lexer) NextToken() token.Token {
-    var tok token.Token
-
-    l.skipWhitespace()
-
-    switch l.ch {
-    case '=':
-        if l.peekChar() == '=' {
-            ch := l.ch
-            l.readChar()
-            tok = token.Token{Type: token.EQ, Literal: string(ch) + string(l.ch)}
-        } else {
-            tok = newToken(token.ASSIGN, l.ch)
-        }
-    case ';':
-        tok = newToken(token.SEMICOLON, l.ch)
-    case '(':
-        tok = newToken(token.LPAREN, l.ch)
-    case ')':
-        tok = newToken(token.RPAREN, l.ch)
-    case ',':
-        tok = newToken(token.COMMA, l.ch)
-    case '+':
-        tok = newToken(token.PLUS, l.ch)
-    case '-':
-        tok = newToken(token.MINUS, l.ch)
-    case '!':
-        if l.peekChar() == '=' {
-            ch := l.ch
-            l.readChar()
-            tok = token.Token{Type: token.NOT_EQ, Literal: string(ch) + string(l.ch)}
-        } else {
-            tok = newToken(token.BANG, l.ch)
-        }
-    case '*':
-        tok = newToken(token.ASTERISK, l.ch)
-    case '/':
-        tok = newToken(token.SLASH, l.ch)
-    case '<':
-        tok = newToken(token.LT, l.ch)
-    case '>':
-        tok = newToken(token.GT, l.ch)
-    case '{':
-        tok = newToken(token.LBRACE, l.ch)
-    case '}':
-        tok = newToken(token.RBRACE, l.ch)
-    case 0:
-        tok.Type = token.EOF
-        tok.Literal = ""
-    default:
-        if isLetter(l.ch) {
-            tok.Literal = l.readIdentifier()
-            tok.Type = token.LookupIdent(tok.Literal)
-        } else if isDigit(l.ch) {
-            tok.Type = token.INT
-            tok.Literal = l.readNumber()
-        } else {
-            tok = newToken(token.ILLEGAL, l.ch)
-        }
-
-        return tok
-    }
-
-    l.readChar()
-    return tok
+	var tok token.Token
+
+	l.skipWhitespace()
+
+	switch l.ch {
+	case '=':
+		if l.peekChar() == '=' {
+			ch := l.ch
+			l.readChar()
+			tok = token.Token{Type: token.EQ, Literal: string(ch) + string(l.ch)}
+		} else {
+			tok = newToken(token.ASSIGN, l.ch)
+		}
+	case ';':
+		tok = newToken(token.SEMICOLON, l.ch)
+	case '(':
+		tok = newToken(token.LPAREN, l.ch)
+	case ')':
+		tok = newToken(token.RPAREN, l.ch)
+	case ',':
+		tok = newToken(token.COMMA, l.ch)
+	case '+':
+		tok = newToken(token.PLUS, l.ch)
+	case '-':
+		tok = newToken(token.MINUS, l.ch)
+	case '!':
+		if l.peekChar() == '=' {
+			ch := l.ch
+			l.readChar()
+			tok = token.Token{Type: token.NOT_EQ, Literal: string(ch) + string(l.ch)}
+		} else {
+			tok = newToken(token.BANG, l.ch)
+		}
+	case '*':
+		tok = newToken(token.ASTERISK, l.ch)
+	case '/':
+		tok = newToken(token.SLASH, l.ch)
+	case '<':
+		tok = newToken(token.LT, l.ch)
+	case '>':
+		tok = newToken(token.GT, l.ch)
+	case '{':
+		tok = newToken(token.LBRACE, l.ch)
+	case '}':
+		tok = newToken(token.RBRACE, l.ch)
+	case 0:
+		tok.Type = token.EOF
+		tok.Literal = ""
+	default:
+		if isLetter(l.ch) {
+			tok.Literal = l.readIdentifier()
+			tok.Type = token.LookupIdent(tok.Literal)
+		} else if isDigit(l.ch) {
+			tok.Type = token.INT
+			tok.Literal = l.readNumber()
+		} else {
+			tok = newToken(token.ILLEGAL, l.ch)
+		}
+
+		return tok
+	}
+
+	l.readChar()
+	return tok
 }
 
 func (l *Lexer) skipWhitespace() {
-    for l.ch == ' ' || l.ch == '\t' || l.ch == '\n' || l.ch == '\r' {
-        l.readChar()
-    }
+	for l.ch == ' ' || l.ch == '\t' || l.ch == '\n' || l.ch == '\r' {
+		l.readChar()
+	}
 }
 
 func isDigit(ch byte) bool {
-    return '0' <= ch && ch <= '9'
+	return '0' <= ch && ch <= '9'
 }
 
 func isLetter(ch byte) bool {
-    return 'a' <= ch && ch <= 'z' || 'A' <= ch && ch <= 'Z' || ch == '_'
+	return 'a' <= ch && ch <= 'z' || 'A' <= ch && ch <= 'Z' || ch == '_'
 }
 
 func newToken(tokenType token.TypeToken, ch byte) token.Token {
-    return token.Token{Type: tokenType, Literal: string(ch)}
+	return token.Token{Type: tokenType, Literal: string(ch)}
 }

+ 90 - 90
lexer/lexer_test.go

@@ -1,8 +1,8 @@
 package lexer
 
 import (
-    "github/runnignwater/monkey/token"
-    "testing"
+	"github/runnignwater/monkey/token"
+	"testing"
 )
 
 /**
@@ -12,7 +12,7 @@ import (
  * @Desc:
  */
 func TestNextToken(t *testing.T) {
-    input := `let five = 5;
+	input := `let five = 5;
         let ten = 10;
         let add = fn(x, y) { x + y;
         };
@@ -29,95 +29,95 @@ func TestNextToken(t *testing.T) {
         10 == 10;
         10 != 9;
         `
-    tests := []struct {
-        expectedType    token.TypeToken
-        expectedLiteral string
-    }{
-        {token.LET, "let"},
-        {token.IDENT, "five"},
-        {token.ASSIGN, "="},
-        {token.INT, "5"},
-        {token.SEMICOLON, ";"},
-        {token.LET, "let"},
-        {token.IDENT, "ten"},
-        {token.ASSIGN, "="},
-        {token.INT, "10"},
-        {token.SEMICOLON, ";"},
-        {token.LET, "let"},
-        {token.IDENT, "add"},
-        {token.ASSIGN, "="},
-        {token.FUNCTION, "fn"},
-        {token.LPAREN, "("},
-        {token.IDENT, "x"},
-        {token.COMMA, ","},
-        {token.IDENT, "y"},
-        {token.RPAREN, ")"},
-        {token.LBRACE, "{"},
-        {token.IDENT, "x"},
-        {token.PLUS, "+"},
-        {token.IDENT, "y"},
-        {token.SEMICOLON, ";"},
-        {token.RBRACE, "}"},
-        {token.SEMICOLON, ";"},
-        {token.LET, "let"},
-        {token.IDENT, "result"},
-        {token.ASSIGN, "="},
-        {token.IDENT, "add"},
-        {token.LPAREN, "("},
-        {token.IDENT, "five"},
-        {token.COMMA, ","},
-        {token.IDENT, "ten"},
-        {token.RPAREN, ")"},
-        {token.SEMICOLON, ";"},
-        {token.BANG, "!"},
-        {token.MINUS, "-"},
-        {token.SLASH, "/"},
-        {token.ASTERISK, "*"},
-        {token.INT, "5"},
-        {token.LT, "<"},
-        {token.INT, "10"},
-        {token.GT, ">"},
-        {token.INT, "5"},
-        {token.SEMICOLON, ";"},
+	tests := []struct {
+		expectedType    token.TypeToken
+		expectedLiteral string
+	}{
+		{token.LET, "let"},
+		{token.IDENT, "five"},
+		{token.ASSIGN, "="},
+		{token.INT, "5"},
+		{token.SEMICOLON, ";"},
+		{token.LET, "let"},
+		{token.IDENT, "ten"},
+		{token.ASSIGN, "="},
+		{token.INT, "10"},
+		{token.SEMICOLON, ";"},
+		{token.LET, "let"},
+		{token.IDENT, "add"},
+		{token.ASSIGN, "="},
+		{token.FUNCTION, "fn"},
+		{token.LPAREN, "("},
+		{token.IDENT, "x"},
+		{token.COMMA, ","},
+		{token.IDENT, "y"},
+		{token.RPAREN, ")"},
+		{token.LBRACE, "{"},
+		{token.IDENT, "x"},
+		{token.PLUS, "+"},
+		{token.IDENT, "y"},
+		{token.SEMICOLON, ";"},
+		{token.RBRACE, "}"},
+		{token.SEMICOLON, ";"},
+		{token.LET, "let"},
+		{token.IDENT, "result"},
+		{token.ASSIGN, "="},
+		{token.IDENT, "add"},
+		{token.LPAREN, "("},
+		{token.IDENT, "five"},
+		{token.COMMA, ","},
+		{token.IDENT, "ten"},
+		{token.RPAREN, ")"},
+		{token.SEMICOLON, ";"},
+		{token.BANG, "!"},
+		{token.MINUS, "-"},
+		{token.SLASH, "/"},
+		{token.ASTERISK, "*"},
+		{token.INT, "5"},
+		{token.LT, "<"},
+		{token.INT, "10"},
+		{token.GT, ">"},
+		{token.INT, "5"},
+		{token.SEMICOLON, ";"},
 
-        {token.IF, "if"},
-        {token.LPAREN, "("},
-        {token.INT, "5"},
-        {token.LT, "<"},
-        {token.INT, "10"},
-        {token.RPAREN, ")"},
-        {token.LBRACE, "{"},
-        {token.RETURN, "return"},
-        {token.TRUE, "true"},
-        {token.SEMICOLON, ";"},
-        {token.RBRACE, "}"},
-        {token.ELSE, "else"},
-        {token.LBRACE, "{"},
-        {token.RETURN, "return"},
-        {token.FALSE, "false"},
-        {token.SEMICOLON, ";"},
-        {token.RBRACE, "}"},
-        {token.INT, "10"},
-        {token.EQ, "=="},
-        {token.INT, "10"},
-        {token.SEMICOLON, ";"},
-        {token.INT, "10"},
-        {token.NOT_EQ, "!="},
-        {token.INT, "9"},
-        {token.SEMICOLON, ";"},
-        {token.EOF, ""},
-    }
+		{token.IF, "if"},
+		{token.LPAREN, "("},
+		{token.INT, "5"},
+		{token.LT, "<"},
+		{token.INT, "10"},
+		{token.RPAREN, ")"},
+		{token.LBRACE, "{"},
+		{token.RETURN, "return"},
+		{token.TRUE, "true"},
+		{token.SEMICOLON, ";"},
+		{token.RBRACE, "}"},
+		{token.ELSE, "else"},
+		{token.LBRACE, "{"},
+		{token.RETURN, "return"},
+		{token.FALSE, "false"},
+		{token.SEMICOLON, ";"},
+		{token.RBRACE, "}"},
+		{token.INT, "10"},
+		{token.EQ, "=="},
+		{token.INT, "10"},
+		{token.SEMICOLON, ";"},
+		{token.INT, "10"},
+		{token.NOT_EQ, "!="},
+		{token.INT, "9"},
+		{token.SEMICOLON, ";"},
+		{token.EOF, ""},
+	}
 
-    l := New(input)
+	l := New(input)
 
-    for i, tt := range tests {
-        tok := l.NextToken()
+	for i, tt := range tests {
+		tok := l.NextToken()
 
-        if tok.Type != tt.expectedType {
-            t.Fatalf("tests[%d] - tokentype wrong. expected=%q, got=%q", i, tt.expectedType, tok.Type)
-        }
-        if tok.Literal != tt.expectedLiteral {
-            t.Fatalf("tests[%d] - literal wrong. expected=%q, got=%q", i, tt.expectedLiteral, tok.Literal)
-        }
-    }
+		if tok.Type != tt.expectedType {
+			t.Fatalf("tests[%d] - tokentype wrong. expected=%q, got=%q", i, tt.expectedType, tok.Type)
+		}
+		if tok.Literal != tt.expectedLiteral {
+			t.Fatalf("tests[%d] - literal wrong. expected=%q, got=%q", i, tt.expectedLiteral, tok.Literal)
+		}
+	}
 }

+ 11 - 11
main.go

@@ -1,10 +1,10 @@
 package main
 
 import (
-    "fmt"
-    "github/runnignwater/monkey/repl"
-    "os"
-    _user "os/user"
+	"fmt"
+	"github/runnignwater/monkey/repl"
+	"os"
+	_user "os/user"
 )
 
 /**
@@ -14,12 +14,12 @@ import (
  * @Desc:
  */
 func main() {
-    user, err := _user.Current()
-    if err != nil {
-        panic(err)
-    }
+	user, err := _user.Current()
+	if err != nil {
+		panic(err)
+	}
 
-    fmt.Printf("Hello %s! This is the Monkey programming language!\n", user.Username)
-    fmt.Printf("Feel free to type in commands\n")
-    repl.Start(os.Stdin, os.Stdout)
+	fmt.Printf("Hello %s! This is the Monkey programming language!\n", user.Username)
+	fmt.Printf("Feel free to type in commands\n")
+	repl.Start(os.Stdin, os.Stdout)
 }

+ 56 - 56
parser/parser.go

@@ -1,9 +1,9 @@
 package parser
 
 import (
-    "github/runnignwater/monkey/ast"
-    "github/runnignwater/monkey/lexer"
-    "github/runnignwater/monkey/token"
+	"github/runnignwater/monkey/ast"
+	"github/runnignwater/monkey/lexer"
+	"github/runnignwater/monkey/token"
 )
 
 /**
@@ -13,85 +13,85 @@ import (
  * @Desc:
  */
 type Parser struct {
-    l *lexer.Lexer // point to the instance of the lexer
+	l *lexer.Lexer // point to the instance of the lexer
 
-    curToken  token.Token // point to the current token
-    peekToken token.Token // point to the next token
+	curToken  token.Token // point to the current token
+	peekToken token.Token // point to the next token
 }
 
 func New(l *lexer.Lexer) *Parser {
-    p := &Parser{l: l}
+	p := &Parser{l: l}
 
-    // Read two tokens, so curToken and peekToken are both set
-    p.nextToken()
-    p.nextToken()
+	// Read two tokens, so curToken and peekToken are both set
+	p.nextToken()
+	p.nextToken()
 
-    return p
+	return p
 }
 
 func (p *Parser) nextToken() {
-    p.curToken = p.peekToken
-    p.peekToken = p.l.NextToken()
+	p.curToken = p.peekToken
+	p.peekToken = p.l.NextToken()
 }
 
 func (p *Parser) ParseProgram() *ast.Program {
-    program := &ast.Program{}
-    program.Statements = []ast.Statement{}
-
-    for p.curToken.Type != token.EOF {
-        stmt := p.parseStatement()
-        if stmt != nil {
-            program.Statements = append(program.Statements, stmt)
-        }
-        p.nextToken()
-
-    }
-    return program
+	program := &ast.Program{}
+	program.Statements = []ast.Statement{}
+
+	for p.curToken.Type != token.EOF {
+		stmt := p.parseStatement()
+		if stmt != nil {
+			program.Statements = append(program.Statements, stmt)
+		}
+		p.nextToken()
+
+	}
+	return program
 }
 
 func (p *Parser) parseStatement() ast.Statement {
-    switch p.curToken.Type {
-    case token.LET:
-        return p.parseLetStatement()
-    default:
-        return nil
-    }
+	switch p.curToken.Type {
+	case token.LET:
+		return p.parseLetStatement()
+	default:
+		return nil
+	}
 }
 
 // let <identifier> = <expression>;
 func (p *Parser) parseLetStatement() *ast.LetStatement {
-    stmt := &ast.LetStatement{Token: p.curToken}
-
-    if !p.expectPeek(token.IDENT) {
-        return nil
-    }
-
-    stmt.Name = &ast.Identifier{Token: p.curToken, Value: p.curToken.Literal}
-    if !p.expectPeek(token.ASSIGN) {
-        return nil
-    }
-
-    // TODO: we're skipping the expression until we
-    // we encounter a semicolon
-    for !p.curTokenIs(token.SEMICOLON) {
-        p.nextToken()
-    }
-    return stmt
+	stmt := &ast.LetStatement{Token: p.curToken}
+
+	if !p.expectPeek(token.IDENT) {
+		return nil
+	}
+
+	stmt.Name = &ast.Identifier{Token: p.curToken, Value: p.curToken.Literal}
+	if !p.expectPeek(token.ASSIGN) {
+		return nil
+	}
+
+	// TODO: we're skipping the expression until we
+	// we encounter a semicolon
+	for !p.curTokenIs(token.SEMICOLON) {
+		p.nextToken()
+	}
+	return stmt
 }
 
 func (p *Parser) curTokenIs(t token.TypeToken) bool {
-    return p.curToken.Type == t
+	return p.curToken.Type == t
 }
 
 func (p *Parser) peekTokenIs(t token.TypeToken) bool {
-    return p.peekToken.Type == t
+	return p.peekToken.Type == t
 }
 
 func (p *Parser) expectPeek(t token.TypeToken) bool {
-    if p.peekTokenIs(t) {
-        p.nextToken()
-        return true
-    } else {
-        return false
-    }
+	if p.peekTokenIs(t) {
+		p.nextToken()
+		return true
+	} else {
+		return false
+	}
 }

+ 44 - 44
parser/parser_test.go

@@ -1,9 +1,9 @@
 package parser
 
 import (
-    "github/runnignwater/monkey/ast"
-    "github/runnignwater/monkey/lexer"
-    "testing"
+	"github/runnignwater/monkey/ast"
+	"github/runnignwater/monkey/lexer"
+	"testing"
 )
 
 /**
@@ -13,59 +13,59 @@ import (
  * @Desc: LetStatement test case
  */
 func TestLetStatements(t *testing.T) {
-    input := `
+	input := `
         let x = 5;
         let y = 10;
         let foo = 838383;
         `
-    l := lexer.New(input)
-    p := New(l)
+	l := lexer.New(input)
+	p := New(l)
 
-    program := p.ParseProgram()
-    if program == nil {
-        t.Fatalf("ParseProgram() return nil")
-    }
-    if len(program.Statements) != 3 {
-        t.Fatalf("Program.Statements does not contain 3 statements. got=%d", len(program.Statements))
-    }
+	program := p.ParseProgram()
+	if program == nil {
+		t.Fatalf("ParseProgram() return nil")
+	}
+	if len(program.Statements) != 3 {
+		t.Fatalf("Program.Statements does not contain 3 statements. got=%d", len(program.Statements))
+	}
 
-    tests := []struct {
-        expectedIdentifies string
-    }{
-        {"x"},
-        {"y"},
-        {"foo"},
-    }
+	tests := []struct {
+		expectedIdentifies string
+	}{
+		{"x"},
+		{"y"},
+		{"foo"},
+	}
 
-    for i, tt := range tests {
-        stmt := program.Statements[i]
-        if !testLetStatement(t, stmt, tt.expectedIdentifies) {
-            return
-        }
-    }
+	for i, tt := range tests {
+		stmt := program.Statements[i]
+		if !testLetStatement(t, stmt, tt.expectedIdentifies) {
+			return
+		}
+	}
 }
 
 func testLetStatement(t *testing.T, s ast.Statement, name string) bool {
-    if s.TokenLiteral() != "let" {
-        t.Errorf("s.TokenLiteral() not 'let'. got =%q", s.TokenLiteral())
-        return false
-    }
+	if s.TokenLiteral() != "let" {
+		t.Errorf("s.TokenLiteral() not 'let'. got =%q", s.TokenLiteral())
+		return false
+	}
 
-    letStmt, ok := s.(*ast.LetStatement)
-    if !ok {
-        t.Errorf("s is not *ast.LetStatement. got=%T", s)
-        return false
-    }
+	letStmt, ok := s.(*ast.LetStatement)
+	if !ok {
+		t.Errorf("s is not *ast.LetStatement. got=%T", s)
+		return false
+	}
 
-    if letStmt.Name.Value != name {
-        t.Errorf("letStmt.Name.Value not '%s'. got=%s", name, letStmt.Name.Value)
-        return false
-    }
+	if letStmt.Name.Value != name {
+		t.Errorf("letStmt.Name.Value not '%s'. got=%s", name, letStmt.Name.Value)
+		return false
+	}
 
-    if letStmt.Name.TokenLiteral() != name {
-        t.Errorf("s.name not '%s. got=%s", name, letStmt.Name)
-        return false
-    }
+	if letStmt.Name.TokenLiteral() != name {
+		t.Errorf("s.name not '%s. got=%s", name, letStmt.Name)
+		return false
+	}
 
-    return true
+	return true
 }

+ 18 - 18
repl/repl.go

@@ -1,11 +1,11 @@
 package repl
 
 import (
-    "bufio"
-    "fmt"
-    "github/runnignwater/monkey/lexer"
-    "github/runnignwater/monkey/token"
-    "io"
+	"bufio"
+	"fmt"
+	"github/runnignwater/monkey/lexer"
+	"github/runnignwater/monkey/token"
+	"io"
 )
 
 /**
@@ -18,20 +18,20 @@ import (
 const PROMPT = ">>> "
 
 func Start(in io.Reader, out io.Writer) {
-    scanner := bufio.NewScanner(in)
+	scanner := bufio.NewScanner(in)
 
-    for {
-        fmt.Printf(PROMPT)
-        scan := scanner.Scan()
-        if !scan {
-            return
-        }
-        line := scanner.Text()
+	for {
+		fmt.Printf(PROMPT)
+		scan := scanner.Scan()
+		if !scan {
+			return
+		}
+		line := scanner.Text()
 
-        l := lexer.New(line)
+		l := lexer.New(line)
 
-        for tok := l.NextToken(); tok.Type != token.EOF; tok = l.NextToken() {
-            fmt.Printf("%+v\n", tok)
-        }
-    }
+		for tok := l.NextToken(); tok.Type != token.EOF; tok = l.NextToken() {
+			fmt.Printf("%+v\n", tok)
+		}
+	}
 }

+ 44 - 44
token/token.go

@@ -8,64 +8,64 @@ package token
  */
 
 const (
-    ILLEGAL = "ILLEGAL"
-    EOF     = "EOF"
+	ILLEGAL = "ILLEGAL"
+	EOF     = "EOF"
 
-    // Identifiers + literals
-    IDENT = "IDENT" // add, foobar, x, y ...
-    INT   = "INT"   // 123456
+	// Identifiers + literals
+	IDENT = "IDENT" // add, foobar, x, y ...
+	INT   = "INT"   // 123456
 
-    // Operators
-    ASSIGN   = "="
-    PLUS     = "+"
-    MINUS    = "-"
-    BANG     = "!"
-    ASTERISK = "*"
-    SLASH    = "/"
-    LT       = "<"
-    GT       = ">"
-    EQ       = "=="
-    NOT_EQ   = "!="
+	// Operators
+	ASSIGN   = "="
+	PLUS     = "+"
+	MINUS    = "-"
+	BANG     = "!"
+	ASTERISK = "*"
+	SLASH    = "/"
+	LT       = "<"
+	GT       = ">"
+	EQ       = "=="
+	NOT_EQ   = "!="
 
-    // Delimiters
-    COMMA     = ","
-    SEMICOLON = ";"
+	// Delimiters
+	COMMA     = ","
+	SEMICOLON = ";"
 
-    LPAREN = "("
-    RPAREN = ")"
-    LBRACE = "{"
-    RBRACE = "}"
+	LPAREN = "("
+	RPAREN = ")"
+	LBRACE = "{"
+	RBRACE = "}"
 
-    // Keywords
-    FUNCTION = "FUNCTION"
-    LET      = "LET"
-    TRUE     = "TRUE"
-    FALSE    = "FALSE"
-    IF       = "IF"
-    ELSE     = "ELSE"
-    RETURN   = "RETURN"
+	// Keywords
+	FUNCTION = "FUNCTION"
+	LET      = "LET"
+	TRUE     = "TRUE"
+	FALSE    = "FALSE"
+	IF       = "IF"
+	ELSE     = "ELSE"
+	RETURN   = "RETURN"
 )
 
 var keyword = map[string]TypeToken{
-    "fn":     FUNCTION,
-    "let":    LET,
-    "true":   TRUE,
-    "false":  FALSE,
-    "if":     IF,
-    "else":   ELSE,
-    "return": RETURN,
+	"fn":     FUNCTION,
+	"let":    LET,
+	"true":   TRUE,
+	"false":  FALSE,
+	"if":     IF,
+	"else":   ELSE,
+	"return": RETURN,
 }
 
 func LookupIdent(ident string) TypeToken {
-    if tok, ok := keyword[ident]; ok {
-        return tok
-    }
-    return IDENT
+	if tok, ok := keyword[ident]; ok {
+		return tok
+	}
+	return IDENT
 }
 
 type TypeToken string
 
 type Token struct {
-    Type    TypeToken
-    Literal string
+	Type    TypeToken
+	Literal string
 }