monkey/internal/lexer/lexer_test.go

155 lines
3.4 KiB
Go
Raw Permalink Normal View History

2024-05-31 21:33:33 +00:00
package lexer
import (
"github.com/stretchr/testify/assert"
"monkey/internal/token"
"testing"
)
func TestNextToken_Simple(t *testing.T) {
input := `=+(){},;`
tests := []struct {
expectedType token.TokenType
expectedLiteral string
}{
{token.ASSIGN, "="},
{token.PLUS, "+"},
{token.LPAREN, "("},
{token.RPAREN, ")"},
{token.LBRACE, "{"},
{token.RBRACE, "}"},
{token.COMMA, ","},
{token.SEMICOLON, ";"},
{token.EOF, ""},
}
l := New(input)
for i, tt := range tests {
tok := l.NextToken()
assert.Equal(t, tt.expectedType, tok.Type, "[%d]: %v %v", i, tok.Type, tt.expectedType)
assert.Equal(t, tt.expectedLiteral, tok.Literal, "[%d]: %v %v", i, tok.Literal, tt.expectedLiteral)
}
}
func TestNextToken_Keywords(t *testing.T) {
input := `fn test() {
if (5 < 10) {
return true;
} else {
return false;
}
}`
tests := []struct {
expectedType token.TokenType
expectedLiteral string
}{
{token.FUNCTION, "fn"},
{token.IDENT, "test"},
{token.LPAREN, "("},
{token.RPAREN, ")"},
{token.LBRACE, "{"},
{token.IF, "if"},
{token.LPAREN, "("},
{token.INT, "5"},
{token.LT, "<"},
{token.INT, "10"},
{token.RPAREN, ")"},
{token.LBRACE, "{"},
{token.RETURN, "return"},
{token.TRUE, "true"},
{token.SEMICOLON, ";"},
{token.RBRACE, "}"},
{token.ELSE, "else"},
{token.LBRACE, "{"},
{token.RETURN, "return"},
{token.FALSE, "false"},
{token.SEMICOLON, ";"},
{token.RBRACE, "}"},
{token.RBRACE, "}"},
{token.EOF, ""},
}
l := New(input)
for i, tt := range tests {
tok := l.NextToken()
assert.Equal(t, tt.expectedType, tok.Type, "[%d]: %v %v", i, tok.Type, tt.expectedType)
assert.Equal(t, tt.expectedLiteral, tok.Literal, "[%d]: %v %v", i, tok.Literal, tt.expectedLiteral)
}
}
func TestNextToken_Complex(t *testing.T) {
input := `let five = 5;
let ten = 10;
let add = fn(x, y) {
x + y;
};
let result = add(five, ten);
-/*<987>!
10 == 10
10 != 9
`
tests := []struct {
expectedType token.TokenType
expectedLiteral string
}{
{token.LET, "let"},
{token.IDENT, "five"},
{token.ASSIGN, "="},
{token.INT, "5"},
{token.SEMICOLON, ";"},
{token.LET, "let"},
{token.IDENT, "ten"},
{token.ASSIGN, "="},
{token.INT, "10"},
{token.SEMICOLON, ";"},
{token.LET, "let"},
{token.IDENT, "add"},
{token.ASSIGN, "="},
{token.FUNCTION, "fn"},
{token.LPAREN, "("},
{token.IDENT, "x"},
{token.COMMA, ","},
{token.IDENT, "y"},
{token.RPAREN, ")"},
{token.LBRACE, "{"},
{token.IDENT, "x"},
{token.PLUS, "+"},
{token.IDENT, "y"},
{token.SEMICOLON, ";"},
{token.RBRACE, "}"},
{token.SEMICOLON, ";"},
{token.LET, "let"},
{token.IDENT, "result"},
{token.ASSIGN, "="},
{token.IDENT, "add"},
{token.LPAREN, "("},
{token.IDENT, "five"},
{token.COMMA, ","},
{token.IDENT, "ten"},
{token.RPAREN, ")"},
{token.SEMICOLON, ";"},
{token.MINUS, "-"},
{token.SLASH, "/"},
{token.ASTERISK, "*"},
{token.LT, "<"},
{token.INT, "987"},
{token.GT, ">"},
{token.BANG, "!"},
{token.INT, "10"},
{token.EQUAL, "=="},
{token.INT, "10"},
{token.INT, "10"},
{token.NEQUAL, "!="},
{token.INT, "9"},
{token.EOF, ""},
}
l := New(input)
for i, tt := range tests {
tok := l.NextToken()
assert.Equal(t, tt.expectedType, tok.Type, "[%d]: %v %v", i, tok.Type, tt.expectedType)
assert.Equal(t, tt.expectedLiteral, tok.Literal, "[%d]: %v %v", i, tok.Literal, tt.expectedLiteral)
}
}