mirror of
https://github.com/kemko/liquid.git
synced 2026-01-02 00:05:42 +03:00
Compare commits
4 Commits
parse-cont
...
ruby-3-ci
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
cc08579185 | ||
|
|
3c499d0241 | ||
|
|
e71e53ffb5 | ||
|
|
260c863e23 |
5
.github/workflows/liquid.yml
vendored
5
.github/workflows/liquid.yml
vendored
@@ -6,9 +6,8 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
entry:
|
||||
- { ruby: 2.5, allowed-failure: false }
|
||||
- { ruby: 2.6, allowed-failure: false }
|
||||
- { ruby: 2.7, allowed-failure: false }
|
||||
- { ruby: 2.5, allowed-failure: false } # minimum supported
|
||||
- { ruby: 3.0, allowed-failure: false } # latest
|
||||
- { ruby: ruby-head, allowed-failure: true }
|
||||
name: test (${{ matrix.entry.ruby }})
|
||||
steps:
|
||||
|
||||
@@ -99,7 +99,9 @@ module Liquid
|
||||
end
|
||||
|
||||
private def parse_liquid_tag(markup, parse_context)
|
||||
liquid_tag_tokenizer = Tokenizer.new(markup, line_number: parse_context.line_number, for_liquid_tag: true)
|
||||
liquid_tag_tokenizer = parse_context.new_tokenizer(
|
||||
markup, start_line_number: parse_context.line_number, for_liquid_tag: true
|
||||
)
|
||||
parse_for_liquid_tag(liquid_tag_tokenizer, parse_context) do |end_tag_name, _end_tag_markup|
|
||||
if end_tag_name
|
||||
BlockBody.unknown_tag_in_liquid_tag(end_tag_name, parse_context)
|
||||
|
||||
@@ -23,6 +23,10 @@ module Liquid
|
||||
Liquid::BlockBody.new
|
||||
end
|
||||
|
||||
def new_tokenizer(markup, start_line_number: nil, for_liquid_tag: false)
|
||||
Tokenizer.new(markup, line_number: start_line_number, for_liquid_tag: for_liquid_tag)
|
||||
end
|
||||
|
||||
def parse_expression(markup)
|
||||
Expression.parse(markup)
|
||||
end
|
||||
|
||||
@@ -107,7 +107,8 @@ module Liquid
|
||||
# Returns self for easy chaining
|
||||
def parse(source, options = {})
|
||||
parse_context = configure_options(options)
|
||||
@root = Document.parse(tokenize(source), parse_context)
|
||||
tokenizer = parse_context.new_tokenizer(source, start_line_number: @line_numbers && 1)
|
||||
@root = Document.parse(tokenizer, parse_context)
|
||||
self
|
||||
end
|
||||
|
||||
@@ -223,10 +224,6 @@ module Liquid
|
||||
parse_context
|
||||
end
|
||||
|
||||
def tokenize(source)
|
||||
Tokenizer.new(source, @line_numbers)
|
||||
end
|
||||
|
||||
def apply_options_to_context(context, options)
|
||||
context.add_filters(options[:filters]) if options[:filters]
|
||||
context.global_filter = options[:global_filter] if options[:global_filter]
|
||||
|
||||
@@ -73,10 +73,14 @@ class ThemeRunner
|
||||
|
||||
private
|
||||
|
||||
def render_layout(template, layout, assigns)
|
||||
assigns['content_for_layout'] = template.render!(assigns)
|
||||
layout&.render!(assigns)
|
||||
end
|
||||
|
||||
def compile_and_render(template, layout, assigns, page_template, template_file)
|
||||
compiled_test = compile_test(template, layout, assigns, page_template, template_file)
|
||||
assigns['content_for_layout'] = compiled_test[:tmpl].render!(assigns)
|
||||
compiled_test[:layout].render!(assigns) if layout
|
||||
compiled_test = compile_test(template, layout, assigns, page_template, template_file)
|
||||
render_layout(compiled_test[:tmpl], compiled_test[:layout], compiled_test[:assigns])
|
||||
end
|
||||
|
||||
def compile_all_tests
|
||||
|
||||
@@ -461,6 +461,7 @@ class ContextTest < Minitest::Test
|
||||
end
|
||||
|
||||
def test_interrupt_avoids_object_allocations
|
||||
@context.interrupt? # ruby 3.0.0 allocates on the first call
|
||||
assert_no_object_allocations do
|
||||
@context.interrupt?
|
||||
end
|
||||
|
||||
@@ -32,21 +32,26 @@ class TokenizerTest < Minitest::Test
|
||||
|
||||
private
|
||||
|
||||
def new_tokenizer(source, parse_context: Liquid::ParseContext.new, start_line_number: nil)
|
||||
parse_context.new_tokenizer(source, start_line_number: start_line_number)
|
||||
end
|
||||
|
||||
def tokenize(source)
|
||||
tokenizer = Liquid::Tokenizer.new(source)
|
||||
tokenizer = new_tokenizer(source)
|
||||
tokens = []
|
||||
while (t = tokenizer.shift)
|
||||
# shift is private in Liquid::C::Tokenizer, since it is only for unit testing
|
||||
while (t = tokenizer.send(:shift))
|
||||
tokens << t
|
||||
end
|
||||
tokens
|
||||
end
|
||||
|
||||
def tokenize_line_numbers(source)
|
||||
tokenizer = Liquid::Tokenizer.new(source, true)
|
||||
tokenizer = new_tokenizer(source, start_line_number: 1)
|
||||
line_numbers = []
|
||||
loop do
|
||||
line_number = tokenizer.line_number
|
||||
if tokenizer.shift
|
||||
if tokenizer.send(:shift)
|
||||
line_numbers << line_number
|
||||
else
|
||||
break
|
||||
|
||||
Reference in New Issue
Block a user