Merge pull request #1380 from Shopify/pz-serialize-compat

Fixes for serialization
This commit is contained in:
Peter Zhu
2021-01-05 14:10:54 -05:00
committed by GitHub
4 changed files with 18 additions and 13 deletions

View File

@@ -21,7 +21,7 @@ module Liquid
def parse(tokens)
body = case_body = new_body
body = @blocks.last.attachment while parse_body(body, tokens)
@blocks.each do |condition|
@blocks.reverse_each do |condition|
body = condition.attachment
unless body.frozen?
body.remove_blank_strings if blank?

View File

@@ -63,11 +63,11 @@ module Liquid
parse_body(@else_block, tokens)
end
if blank?
@for_block.remove_blank_strings
@else_block&.remove_blank_strings
@for_block.remove_blank_strings
end
@for_block.freeze
@else_block&.freeze
@for_block.freeze
end
def nodelist

View File

@@ -31,7 +31,7 @@ module Liquid
def parse(tokens)
while parse_body(@blocks.last.attachment, tokens)
end
@blocks.each do |block|
@blocks.reverse_each do |block|
block.attachment.remove_blank_strings if blank?
block.attachment.freeze
end

View File

@@ -106,16 +106,8 @@ module Liquid
# Parse source code.
# Returns self for easy chaining
def parse(source, options = {})
if (profiling = options[:profile])
raise "Profiler not loaded, require 'liquid/profiler' first" unless defined?(Liquid::Profiler)
end
@options = options
@profiling = profiling
@line_numbers = options[:line_numbers] || profiling
parse_context = options.is_a?(ParseContext) ? options : ParseContext.new(options)
parse_context = configure_options(options)
@root = Document.parse(tokenize(source), parse_context)
@warnings = parse_context.warnings
self
end
@@ -218,6 +210,19 @@ module Liquid
private
def configure_options(options)
if (profiling = options[:profile])
raise "Profiler not loaded, require 'liquid/profiler' first" unless defined?(Liquid::Profiler)
end
@options = options
@profiling = profiling
@line_numbers = options[:line_numbers] || @profiling
parse_context = options.is_a?(ParseContext) ? options : ParseContext.new(options)
@warnings = parse_context.warnings
parse_context
end
def tokenize(source)
Tokenizer.new(source, @line_numbers)
end