Build the tokenizer through the parse context for liquid-c

This commit is contained in:
Dylan Thacker-Smith
2021-01-07 13:16:53 -05:00
parent 42b6c07cd0
commit 0e51931b52
4 changed files with 18 additions and 10 deletions

View File

@@ -99,7 +99,9 @@ module Liquid
end
private def parse_liquid_tag(markup, parse_context)
liquid_tag_tokenizer = Tokenizer.new(markup, line_number: parse_context.line_number, for_liquid_tag: true)
liquid_tag_tokenizer = parse_context.new_tokenizer(
markup, start_line_number: parse_context.line_number, for_liquid_tag: true
)
parse_for_liquid_tag(liquid_tag_tokenizer, parse_context) do |end_tag_name, _end_tag_markup|
if end_tag_name
BlockBody.unknown_tag_in_liquid_tag(end_tag_name, parse_context)

View File

@@ -23,6 +23,10 @@ module Liquid
Liquid::BlockBody.new
end
def new_tokenizer(markup, start_line_number: nil, for_liquid_tag: false)
Tokenizer.new(markup, line_number: start_line_number, for_liquid_tag: for_liquid_tag)
end
def parse_expression(markup)
Expression.parse(markup)
end

View File

@@ -107,7 +107,8 @@ module Liquid
# Returns self for easy chaining
def parse(source, options = {})
parse_context = configure_options(options)
@root = Document.parse(tokenize(source), parse_context)
tokenizer = parse_context.new_tokenizer(source, start_line_number: @line_numbers && 1)
@root = Document.parse(tokenizer, parse_context)
self
end
@@ -223,10 +224,6 @@ module Liquid
parse_context
end
def tokenize(source)
Tokenizer.new(source, @line_numbers)
end
def apply_options_to_context(context, options)
context.add_filters(options[:filters]) if options[:filters]
context.global_filter = options[:global_filter] if options[:global_filter]

View File

@@ -32,21 +32,26 @@ class TokenizerTest < Minitest::Test
private
def new_tokenizer(source, parse_context: Liquid::ParseContext.new, start_line_number: nil)
parse_context.new_tokenizer(source, start_line_number: start_line_number)
end
def tokenize(source)
tokenizer = Liquid::Tokenizer.new(source)
tokenizer = new_tokenizer(source)
tokens = []
while (t = tokenizer.shift)
# shift is private in Liquid::C::Tokenizer, since it is only for unit testing
while (t = tokenizer.send(:shift))
tokens << t
end
tokens
end
def tokenize_line_numbers(source)
tokenizer = Liquid::Tokenizer.new(source, true)
tokenizer = new_tokenizer(source, start_line_number: 1)
line_numbers = []
loop do
line_number = tokenizer.line_number
if tokenizer.shift
if tokenizer.send(:shift)
line_numbers << line_number
else
break