mirror of
https://github.com/kemko/liquid.git
synced 2026-01-01 15:55:40 +03:00
63 lines
2.0 KiB
Ruby
63 lines
2.0 KiB
Ruby
# frozen_string_literal: true
|
|
|
|
require 'test_helper'
|
|
|
|
class TokenizerTest < Minitest::Test
|
|
def test_tokenize_strings
|
|
assert_equal([' '], tokenize(' '))
|
|
assert_equal(['hello world'], tokenize('hello world'))
|
|
end
|
|
|
|
def test_tokenize_variables
|
|
assert_equal(['{{funk}}'], tokenize('{{funk}}'))
|
|
assert_equal([' ', '{{funk}}', ' '], tokenize(' {{funk}} '))
|
|
assert_equal([' ', '{{funk}}', ' ', '{{so}}', ' ', '{{brother}}', ' '], tokenize(' {{funk}} {{so}} {{brother}} '))
|
|
assert_equal([' ', '{{ funk }}', ' '], tokenize(' {{ funk }} '))
|
|
end
|
|
|
|
def test_tokenize_blocks
|
|
assert_equal(['{%comment%}'], tokenize('{%comment%}'))
|
|
assert_equal([' ', '{%comment%}', ' '], tokenize(' {%comment%} '))
|
|
|
|
assert_equal([' ', '{%comment%}', ' ', '{%endcomment%}', ' '], tokenize(' {%comment%} {%endcomment%} '))
|
|
assert_equal([' ', '{% comment %}', ' ', '{% endcomment %}', ' '], tokenize(" {% comment %} {% endcomment %} "))
|
|
end
|
|
|
|
def test_calculate_line_numbers_per_token_with_profiling
|
|
assert_equal([1], tokenize_line_numbers("{{funk}}"))
|
|
assert_equal([1, 1, 1], tokenize_line_numbers(" {{funk}} "))
|
|
assert_equal([1, 2, 2], tokenize_line_numbers("\n{{funk}}\n"))
|
|
assert_equal([1, 1, 3], tokenize_line_numbers(" {{\n funk \n}} "))
|
|
end
|
|
|
|
private
|
|
|
|
def new_tokenizer(source, parse_context: Liquid::ParseContext.new, start_line_number: nil)
|
|
parse_context.new_tokenizer(source, start_line_number: start_line_number)
|
|
end
|
|
|
|
def tokenize(source)
|
|
tokenizer = new_tokenizer(source)
|
|
tokens = []
|
|
# shift is private in Liquid::C::Tokenizer, since it is only for unit testing
|
|
while (t = tokenizer.send(:shift))
|
|
tokens << t
|
|
end
|
|
tokens
|
|
end
|
|
|
|
def tokenize_line_numbers(source)
|
|
tokenizer = new_tokenizer(source, start_line_number: 1)
|
|
line_numbers = []
|
|
loop do
|
|
line_number = tokenizer.line_number
|
|
if tokenizer.send(:shift)
|
|
line_numbers << line_number
|
|
else
|
|
break
|
|
end
|
|
end
|
|
line_numbers
|
|
end
|
|
end
|