mirror of
https://github.com/kemko/liquid.git
synced 2026-01-07 02:35:40 +03:00
Faster token creation, hopefully.
This commit is contained in:
@@ -2,12 +2,9 @@ require "strscan"
|
||||
module Liquid
|
||||
class Token
|
||||
attr_accessor :type, :contents
|
||||
def initialize(*args)
|
||||
@type, @contents = args
|
||||
end
|
||||
|
||||
def self.[](*args)
|
||||
Token.new(*args)
|
||||
def initialize(type, contents = nil)
|
||||
@type = type
|
||||
@contents = contents
|
||||
end
|
||||
|
||||
def inspect
|
||||
@@ -52,7 +49,7 @@ module Liquid
|
||||
loop do
|
||||
tok = next_token
|
||||
unless tok
|
||||
@output << Token[:end_of_string]
|
||||
@output << Token.new(:end_of_string)
|
||||
return @output
|
||||
end
|
||||
@output << tok
|
||||
@@ -64,12 +61,12 @@ module Liquid
|
||||
return if @ss.eos?
|
||||
|
||||
case
|
||||
when t = @ss.scan(COMPARISON_OPERATOR) then Token[:comparison, t]
|
||||
when t = @ss.scan(SINGLE_STRING_LITERAL) then Token[:string, t]
|
||||
when t = @ss.scan(DOUBLE_STRING_LITERAL) then Token[:string, t]
|
||||
when t = @ss.scan(FLOAT_LITERAL) then Token[:float, t]
|
||||
when t = @ss.scan(INTEGER_LITERAL) then Token[:integer, t]
|
||||
when t = @ss.scan(IDENTIFIER) then Token[:id, t]
|
||||
when t = @ss.scan(COMPARISON_OPERATOR) then Token.new(:comparison, t)
|
||||
when t = @ss.scan(SINGLE_STRING_LITERAL) then Token.new(:string, t)
|
||||
when t = @ss.scan(DOUBLE_STRING_LITERAL) then Token.new(:string, t)
|
||||
when t = @ss.scan(FLOAT_LITERAL) then Token.new(:float, t)
|
||||
when t = @ss.scan(INTEGER_LITERAL) then Token.new(:integer, t)
|
||||
when t = @ss.scan(IDENTIFIER) then Token.new(:id, t)
|
||||
else
|
||||
lex_specials
|
||||
end
|
||||
@@ -79,7 +76,7 @@ module Liquid
|
||||
def lex_specials
|
||||
c = @ss.getch
|
||||
if s = SPECIALS[c]
|
||||
return Token[s,c]
|
||||
return Token.new(s,c)
|
||||
end
|
||||
|
||||
raise SyntaxError, "Unexpected character #{c}."
|
||||
|
||||
@@ -5,39 +5,39 @@ class LexerTest < Test::Unit::TestCase
|
||||
|
||||
def test_strings
|
||||
tokens = Lexer.new(%! 'this is a test""' "wat 'lol'"!).tokenize
|
||||
assert_equal [Token[:string,%!'this is a test""'!], Token[:string, %!"wat 'lol'"!], Token[:end_of_string]], tokens
|
||||
assert_equal [Token.new(:string,%!'this is a test""'!), Token.new(:string, %!"wat 'lol'"!), Token.new(:end_of_string)], tokens
|
||||
end
|
||||
|
||||
def test_integer
|
||||
tokens = Lexer.new('hi 50').tokenize
|
||||
assert_equal [Token[:id,'hi'], Token[:integer, '50'], Token[:end_of_string]], tokens
|
||||
assert_equal [Token.new(:id,'hi'), Token.new(:integer, '50'), Token.new(:end_of_string)], tokens
|
||||
end
|
||||
|
||||
def test_float
|
||||
tokens = Lexer.new('hi 5.0').tokenize
|
||||
assert_equal [Token[:id,'hi'], Token[:float, '5.0'], Token[:end_of_string]], tokens
|
||||
assert_equal [Token.new(:id,'hi'), Token.new(:float, '5.0'), Token.new(:end_of_string)], tokens
|
||||
end
|
||||
|
||||
def test_comparison
|
||||
tokens = Lexer.new('== <> contains').tokenize
|
||||
assert_equal [Token[:comparison,'=='], Token[:comparison, '<>'], Token[:comparison, 'contains'], Token[:end_of_string]], tokens
|
||||
assert_equal [Token.new(:comparison,'=='), Token.new(:comparison, '<>'), Token.new(:comparison, 'contains'), Token.new(:end_of_string)], tokens
|
||||
end
|
||||
|
||||
def test_specials
|
||||
tokens = Lexer.new('| .:').tokenize
|
||||
assert_equal [Token[:pipe, '|'], Token[:dot, '.'], Token[:colon, ':'], Token[:end_of_string]], tokens
|
||||
assert_equal [Token.new(:pipe, '|'), Token.new(:dot, '.'), Token.new(:colon, ':'), Token.new(:end_of_string)], tokens
|
||||
tokens = Lexer.new('[,]').tokenize
|
||||
assert_equal [Token[:open_square, '['], Token[:comma, ','], Token[:close_square, ']'], Token[:end_of_string]], tokens
|
||||
assert_equal [Token.new(:open_square, '['), Token.new(:comma, ','), Token.new(:close_square, ']'), Token.new(:end_of_string)], tokens
|
||||
end
|
||||
|
||||
def test_fancy_identifiers
|
||||
tokens = Lexer.new('hi! five?').tokenize
|
||||
assert_equal [Token[:id,'hi!'], Token[:id, 'five?'], Token[:end_of_string]], tokens
|
||||
assert_equal [Token.new(:id,'hi!'), Token.new(:id, 'five?'), Token.new(:end_of_string)], tokens
|
||||
end
|
||||
|
||||
def test_whitespace
|
||||
tokens = Lexer.new("five|\n\t ==").tokenize
|
||||
assert_equal [Token[:id,'five'], Token[:pipe, '|'], Token[:comparison, '=='], Token[:end_of_string]], tokens
|
||||
assert_equal [Token.new(:id,'five'), Token.new(:pipe, '|'), Token.new(:comparison, '=='), Token.new(:end_of_string)], tokens
|
||||
end
|
||||
|
||||
def test_unexpected_character
|
||||
@@ -48,8 +48,8 @@ class LexerTest < Test::Unit::TestCase
|
||||
|
||||
def test_next_token
|
||||
l = Lexer.new('hi 5.0')
|
||||
assert_equal Token[:id, 'hi'], l.next_token
|
||||
assert_equal Token[:float, '5.0'], l.next_token
|
||||
assert_equal Token.new(:id, 'hi'), l.next_token
|
||||
assert_equal Token.new(:float, '5.0'), l.next_token
|
||||
assert_nil l.next_token
|
||||
end
|
||||
end
|
||||
|
||||
Reference in New Issue
Block a user