Remove the Token class from the lexer in favour of less smart but faster arrays

This commit is contained in:
Harry Brundage
2013-07-26 15:14:01 -04:00
parent 1b43bf5686
commit bacacf2fd0
4 changed files with 32 additions and 62 deletions

View File

@@ -5,39 +5,39 @@ class LexerTest < Test::Unit::TestCase
def test_strings
tokens = Lexer.new(%! 'this is a test""' "wat 'lol'"!).tokenize
assert_equal [Token[:string,%!'this is a test""'!], Token[:string, %!"wat 'lol'"!], Token[:end_of_string]], tokens
assert_equal [[:string,%!'this is a test""'!], [:string, %!"wat 'lol'"!], [:end_of_string]], tokens
end
def test_integer
tokens = Lexer.new('hi 50').tokenize
assert_equal [Token[:id,'hi'], Token[:integer, '50'], Token[:end_of_string]], tokens
assert_equal [[:id,'hi'], [:integer, '50'], [:end_of_string]], tokens
end
def test_float
tokens = Lexer.new('hi 5.0').tokenize
assert_equal [Token[:id,'hi'], Token[:float, '5.0'], Token[:end_of_string]], tokens
assert_equal [[:id,'hi'], [:float, '5.0'], [:end_of_string]], tokens
end
def test_comparison
tokens = Lexer.new('== <> contains').tokenize
assert_equal [Token[:comparison,'=='], Token[:comparison, '<>'], Token[:comparison, 'contains'], Token[:end_of_string]], tokens
assert_equal [[:comparison,'=='], [:comparison, '<>'], [:comparison, 'contains'], [:end_of_string]], tokens
end
def test_specials
tokens = Lexer.new('| .:').tokenize
assert_equal [Token[:pipe, '|'], Token[:dot, '.'], Token[:colon, ':'], Token[:end_of_string]], tokens
assert_equal [[:pipe, '|'], [:dot, '.'], [:colon, ':'], [:end_of_string]], tokens
tokens = Lexer.new('[,]').tokenize
assert_equal [Token[:open_square, '['], Token[:comma, ','], Token[:close_square, ']'], Token[:end_of_string]], tokens
assert_equal [[:open_square, '['], [:comma, ','], [:close_square, ']'], [:end_of_string]], tokens
end
def test_fancy_identifiers
tokens = Lexer.new('hi! five?').tokenize
assert_equal [Token[:id,'hi!'], Token[:id, 'five?'], Token[:end_of_string]], tokens
assert_equal [[:id,'hi!'], [:id, 'five?'], [:end_of_string]], tokens
end
def test_whitespace
tokens = Lexer.new("five|\n\t ==").tokenize
assert_equal [Token[:id,'five'], Token[:pipe, '|'], Token[:comparison, '=='], Token[:end_of_string]], tokens
assert_equal [[:id,'five'], [:pipe, '|'], [:comparison, '=='], [:end_of_string]], tokens
end
def test_unexpected_character
@@ -48,8 +48,8 @@ class LexerTest < Test::Unit::TestCase
def test_next_token
l = Lexer.new('hi 5.0')
assert_equal Token[:id, 'hi'], l.next_token
assert_equal Token[:float, '5.0'], l.next_token
assert_equal [:id, 'hi'], l.next_token
assert_equal [:float, '5.0'], l.next_token
assert_nil l.next_token
end
end