mirror of
https://github.com/kemko/liquid.git
synced 2026-01-04 17:25:41 +03:00
Inline Parser#next_token to avoid method dispatch
This commit is contained in:
@@ -23,7 +23,25 @@ module Liquid
|
||||
@output = []
|
||||
|
||||
loop do
|
||||
tok = next_token
|
||||
@ss.skip(/\s*/)
|
||||
|
||||
tok = case
|
||||
when @ss.eos? then nil
|
||||
when t = @ss.scan(COMPARISON_OPERATOR) then [:comparison, t]
|
||||
when t = @ss.scan(SINGLE_STRING_LITERAL) then [:string, t]
|
||||
when t = @ss.scan(DOUBLE_STRING_LITERAL) then [:string, t]
|
||||
when t = @ss.scan(FLOAT_LITERAL) then [:float, t]
|
||||
when t = @ss.scan(INTEGER_LITERAL) then [:integer, t]
|
||||
when t = @ss.scan(IDENTIFIER) then [:id, t]
|
||||
else
|
||||
c = @ss.getch
|
||||
if s = SPECIALS[c]
|
||||
[s,c]
|
||||
else
|
||||
raise SyntaxError, "Unexpected character #{c}."
|
||||
end
|
||||
end
|
||||
|
||||
unless tok
|
||||
@output << [:end_of_string]
|
||||
return @output
|
||||
@@ -31,31 +49,5 @@ module Liquid
|
||||
@output << tok
|
||||
end
|
||||
end
|
||||
|
||||
def next_token
|
||||
@ss.skip(/\s*/)
|
||||
return if @ss.eos?
|
||||
|
||||
case
|
||||
when t = @ss.scan(COMPARISON_OPERATOR) then [:comparison, t]
|
||||
when t = @ss.scan(SINGLE_STRING_LITERAL) then [:string, t]
|
||||
when t = @ss.scan(DOUBLE_STRING_LITERAL) then [:string, t]
|
||||
when t = @ss.scan(FLOAT_LITERAL) then [:float, t]
|
||||
when t = @ss.scan(INTEGER_LITERAL) then [:integer, t]
|
||||
when t = @ss.scan(IDENTIFIER) then [:id, t]
|
||||
else
|
||||
lex_specials
|
||||
end
|
||||
end
|
||||
|
||||
protected
|
||||
def lex_specials
|
||||
c = @ss.getch
|
||||
if s = SPECIALS[c]
|
||||
return [s,c]
|
||||
end
|
||||
|
||||
raise SyntaxError, "Unexpected character #{c}."
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -45,11 +45,4 @@ class LexerTest < Test::Unit::TestCase
|
||||
Lexer.new("%").tokenize
|
||||
end
|
||||
end
|
||||
|
||||
def test_next_token
|
||||
l = Lexer.new('hi 5.0')
|
||||
assert_equal [:id, 'hi'], l.next_token
|
||||
assert_equal [:float, '5.0'], l.next_token
|
||||
assert_nil l.next_token
|
||||
end
|
||||
end
|
||||
|
||||
Reference in New Issue
Block a user