Compare commits

...

20 Commits

Author SHA1 Message Date
Dylan Thacker-Smith
bb954bce1e Use StringSlice.join on the rendered results. 2014-02-28 14:12:57 -05:00
Dylan Thacker-Smith
cc0276bb97 Add a StringSlice class to use in the nodelist. 2014-02-28 13:16:07 -05:00
Dylan Thacker-Smith
03d586aafe Add convenience methods for getting a struct from a ruby object.
If we are trying to get the struct from something other than self, then we
should make sure to check the class of the object.  This util functions
make this easier.
2014-02-28 10:08:55 -05:00
Dylan Thacker-Smith
dc8a34a52f Implement Block#parse_body in C. 2014-02-28 07:47:36 -05:00
Dylan Thacker-Smith
99cebf74bc Rename Block#parse to parse_body since that is how it is being used. 2014-02-27 23:16:11 -05:00
Dylan Thacker-Smith
7eb64886dc Move the parse method out of Tag, only blocks need the body parsed.
The parse method should be renamed to something like parse_body,
since that is how it is used, and no non-block tags were using the
parse method.
2014-02-27 22:31:09 -05:00
Dylan Thacker-Smith
f89046e81f Use super rather than render_all in single block render classes. 2014-02-27 21:38:49 -05:00
Dylan Thacker-Smith
9ee4573ef4 Avoid keeping track of two lists of nodes during parsing. 2014-02-27 20:51:05 -05:00
Dylan Thacker-Smith
a48b4f47f6 Return nil in Document#block_delimiter rather than an empty array.
The block delimiter is normally a string, so nil makes more sense when
there is no delimiter. We also don't want to allocate an array for no
reason.
2014-02-27 20:06:57 -05:00
Dylan Thacker-Smith
72d402837e Remove unused Block#end_tag method.
Although the method is called, it is defined with an empty body and not
overridden to do anything else.
2014-02-27 18:53:18 -05:00
Dylan Thacker-Smith
06bef40527 Fix a missing return warning. 2014-02-27 18:47:55 -05:00
Dylan Thacker-Smith
a48b245e6e Turn on C compiler warnings. 2014-02-27 18:45:57 -05:00
Dylan Thacker-Smith
d4aabda625 Avoid freeing of uninitialized memory.
Thanks to Isha for pointing this out.
2014-02-27 18:32:19 -05:00
Dylan Thacker-Smith
dab6bdfdee Make sure the ext directory is included in the distributed gem. 2014-02-27 15:50:43 -05:00
Dylan Thacker-Smith
8c075fca1f Remove a couple FIXME comments which are only partially a lie.
I added those comments before creating an invalid token type to return the
error. However, we still aren't making use of the token type.
2014-02-27 15:21:57 -05:00
Dylan Thacker-Smith
ea8406e36e Create a Liquid::Tokenizer class in the C extension. 2014-02-27 15:20:22 -05:00
Dylan Thacker-Smith
8bb3bca64a Require the liquid extension when liquid is required. 2014-02-27 14:22:18 -05:00
Dylan Thacker-Smith
5de1082201 Add profile:stackprof rake task. 2014-02-27 11:20:49 -05:00
Dylan Thacker-Smith
7ba02d2811 Use start and end of string rather than line matching in regexes. 2014-02-27 10:07:04 -05:00
Dylan Thacker-Smith
2066676bf4 Add a C extension that doesn't yet do anything. 2014-02-27 09:58:33 -05:00
37 changed files with 754 additions and 144 deletions

3
.gitignore vendored
View File

@@ -5,3 +5,6 @@ pkg
*.rbc
.rvmrc
.ruby-version
*.bundle
/tmp
Gemfile.lock

3
Gemfile Normal file
View File

@@ -0,0 +1,3 @@
source 'https://rubygems.org'
gemspec

View File

@@ -1,5 +1,6 @@
require 'rake'
require 'rake/testtask'
require 'rake/extensiontask'
$LOAD_PATH.unshift File.expand_path("../lib", __FILE__)
require "liquid/version"
@@ -64,6 +65,10 @@ namespace :profile do
ruby "./performance/profile.rb"
end
task :stackprof do
ruby "./performance/stackprof.rb"
end
desc "Run KCacheGrind"
task :grind => :run do
system "qcachegrind /tmp/liquid.rubyprof_calltreeprinter.txt"
@@ -75,3 +80,8 @@ desc "Run example"
task :example do
ruby "-w -d -Ilib example/server/server.rb"
end
Rake::ExtensionTask.new "liquid" do |ext|
ext.lib_dir = "lib/liquid"
end
Rake::Task[:test].prerequisites << :compile

View File

@@ -13,7 +13,7 @@ class LiquidServlet < WEBrick::HTTPServlet::AbstractServlet
def handle(type, req, res)
@request, @response = req, res
@request.path_info =~ /(\w+)$/
@request.path_info =~ /(\w+)\z/
@action = $1 || 'index'
@assigns = send(@action) if respond_to?(@action)

167
ext/liquid/block.c Normal file
View File

@@ -0,0 +1,167 @@
#include "liquid_ext.h"
VALUE cLiquidBlock;
ID intern_assert_missing_delimitation, intern_block_delimiter, intern_is_blank,
intern_new_with_options, intern_tags, intern_unknown_tag, intern_unterminated_tag,
intern_unterminated_variable;
struct liquid_tag
{
char *name, *markup;
long name_length, markup_length;
};
static bool parse_tag(struct liquid_tag *tag, char *token, long token_length)
{
// Strip {{ and }} braces
token += 2;
token_length -= 4;
char *end = token + token_length;
while (token < end && isspace(*token))
token++;
tag->name = token;
char c = *token;
while (token < end && (isalnum(c) || c == '_'))
c = *(++token);
tag->name_length = token - tag->name;
if (!tag->name_length) {
memset(tag, 0, sizeof(*tag));
return false;
}
while (token < end && isspace(*token))
token++;
tag->markup = token;
char *last = end - 1;
while (token < last && isspace(*last))
last--;
end = last + 1;
tag->markup_length = end - token;
return true;
}
static VALUE rb_parse_body(VALUE self, VALUE tokenizerObj)
{
struct liquid_tokenizer *tokenizer = LIQUID_TOKENIZER_GET_STRUCT(tokenizerObj);
bool blank = true;
VALUE nodelist = rb_iv_get(self, "@nodelist");
if (nodelist == Qnil) {
nodelist = rb_ary_new();
rb_iv_set(self, "@nodelist", nodelist);
} else {
rb_ary_clear(nodelist);
}
struct token token;
while (true) {
liquid_tokenizer_next(tokenizer, &token);
switch (token.type) {
case TOKEN_NONE:
/*
* Make sure that it's ok to end parsing in the current block.
* Effectively this method will throw an exception unless the current block is
* of type Document
*/
rb_funcall(self, intern_assert_missing_delimitation, 0);
goto done;
case TOKEN_INVALID:
{
VALUE token_obj = rb_str_new(token.str, token.length);
if (token.str[1] == '%')
rb_funcall(self, intern_unterminated_tag, 1, token_obj);
else
rb_funcall(self, intern_unterminated_variable, 1, token_obj);
break;
}
case TOKEN_TAG:
{
struct liquid_tag tag;
if (!parse_tag(&tag, token.str, token.length)) {
// FIXME: provide more appropriate error message
rb_funcall(self, intern_unterminated_tag, 1, rb_str_new(token.str, token.length));
} else {
if (tag.name_length >= 3 && !memcmp(tag.name, "end", 3)) {
VALUE block_delimiter = rb_funcall(self, intern_block_delimiter, 0);
if (TYPE(block_delimiter) == T_STRING &&
tag.name_length == RSTRING_LEN(block_delimiter) &&
!memcmp(tag.name, RSTRING_PTR(block_delimiter), tag.name_length))
{
goto done;
}
}
VALUE tags = rb_funcall(cLiquidTemplate, intern_tags, 0);
Check_Type(tags, T_HASH);
VALUE tag_name = rb_str_new(tag.name, tag.name_length);
VALUE tag_class = rb_hash_lookup(tags, tag_name);
VALUE markup = rb_str_new(tag.markup, tag.markup_length);
if (tag_class != Qnil) {
VALUE options = rb_iv_get(self, "@options");
if (options == Qnil)
options = rb_hash_new();
VALUE new_tag = rb_funcall(tag_class, intern_new_with_options, 4,
tag_name, markup, tokenizerObj, options);
if (blank) {
VALUE blank_block = rb_funcall(new_tag, intern_is_blank, 0);
if (blank_block == Qnil || blank_block == Qfalse)
blank = false;
}
rb_ary_push(nodelist, new_tag);
} else {
rb_funcall(self, intern_unknown_tag, 3, tag_name, markup, tokenizerObj);
/*
* multi-block tags may store the nodelist in a block array on unknown_tag
* then replace @nodelist with a new array. We need to use the new array
* for the block following the tag token.
*/
nodelist = rb_iv_get(self, "@nodelist");
}
}
break;
}
case TOKEN_VARIABLE:
{
VALUE markup = rb_str_new(token.str + 2, token.length - 4);
VALUE options = rb_iv_get(self, "@options");
VALUE new_var = rb_funcall(cLiquidVariable, intern_new, 2, markup, options);
rb_ary_push(nodelist, new_var);
blank = false;
break;
}
case TOKEN_STRING:
rb_ary_push(nodelist, liquid_string_slice_new(token.str, token.length));
if (blank) {
int i;
for (i = 0; i < token.length; i++) {
if (!isspace(token.str[i])) {
blank = false;
break;
}
}
}
break;
}
}
done:
rb_iv_set(self, "@blank", blank ? Qtrue : Qfalse);
return Qnil;
}
void init_liquid_block()
{
intern_assert_missing_delimitation = rb_intern("assert_missing_delimitation!");
intern_block_delimiter = rb_intern("block_delimiter");
intern_is_blank = rb_intern("blank?");
intern_new_with_options = rb_intern("new_with_options");
intern_tags = rb_intern("tags");
intern_unknown_tag = rb_intern("unknown_tag");
intern_unterminated_tag = rb_intern("unterminated_tag");
intern_unterminated_variable = rb_intern("unterminated_variable");
cLiquidBlock = rb_define_class_under(mLiquid, "Block", cLiquidTag);
rb_define_method(cLiquidBlock, "parse_body", rb_parse_body, 1);
}

8
ext/liquid/block.h Normal file
View File

@@ -0,0 +1,8 @@
#ifndef LIQUID_BLOCK_H
#define LIQUID_BLOCK_H
void init_liquid_block();
extern VALUE cLiquidBlock;
#endif

3
ext/liquid/extconf.rb Normal file
View File

@@ -0,0 +1,3 @@
require 'mkmf'
$CFLAGS << ' -Wall'
create_makefile("liquid/liquid")

18
ext/liquid/liquid_ext.c Normal file
View File

@@ -0,0 +1,18 @@
#include "liquid_ext.h"
VALUE mLiquid;
VALUE cLiquidTemplate, cLiquidTag, cLiquidVariable;
ID intern_new;
void Init_liquid(void)
{
intern_new = rb_intern("new");
mLiquid = rb_define_module("Liquid");
cLiquidTemplate = rb_define_class_under(mLiquid, "Template", rb_cObject);
cLiquidTag = rb_define_class_under(mLiquid, "Tag", rb_cObject);
cLiquidVariable = rb_define_class_under(mLiquid, "Variable", rb_cObject);
init_liquid_tokenizer();
init_liquid_block();
init_liquid_string_slice();
}

17
ext/liquid/liquid_ext.h Normal file
View File

@@ -0,0 +1,17 @@
#ifndef LIQUID_EXT_H
#define LIQUID_EXT_H
#include <stdbool.h>
#include <ctype.h>
#include <ruby.h>
#include "tokenizer.h"
#include "block.h"
#include "slice.h"
#include "utils.h"
extern ID intern_new;
extern VALUE mLiquid;
extern VALUE cLiquidTemplate, cLiquidTag, cLiquidVariable;
#endif

167
ext/liquid/slice.c Normal file
View File

@@ -0,0 +1,167 @@
#include "liquid_ext.h"
VALUE cLiquidStringSlice;
static void mark_slice(void *ptr)
{
if (!ptr)
return;
struct string_slice *slice = ptr;
rb_gc_mark(slice->source);
}
static void free_slice(void *ptr)
{
struct string_slice *slice = ptr;
xfree(slice);
}
VALUE liquid_string_slice_new(const char *str, long length)
{
return rb_funcall(cLiquidStringSlice, intern_new, 3, rb_str_new(str, length), INT2FIX(0), INT2FIX(length));
}
static VALUE rb_allocate(VALUE klass)
{
struct string_slice *slice;
VALUE obj = Data_Make_Struct(klass, struct string_slice, mark_slice, free_slice, slice);
return obj;
}
static VALUE rb_initialize(VALUE self, VALUE source, VALUE offset_value, VALUE length_value)
{
long offset = rb_fix2int(offset_value);
long length = rb_fix2int(length_value);
if (length < 0)
rb_raise(rb_eArgError, "negative string length");
if (offset < 0)
rb_raise(rb_eArgError, "negative string offset");
if (TYPE(source) == T_DATA && RBASIC_CLASS(source) == cLiquidStringSlice) {
struct string_slice *source_slice = DATA_PTR(source);
source = source_slice->source;
offset += source_slice->str - RSTRING_PTR(source);
} else {
source = rb_string_value(&source);
source = rb_str_dup_frozen(source);
}
struct string_slice *slice;
Data_Get_Struct(self, struct string_slice, slice);
slice->source = source;
slice->str = RSTRING_PTR(source) + offset;
slice->length = length;
if (length > RSTRING_LEN(source) - offset)
rb_raise(rb_eArgError, "slice bounds outside source string bounds");
return Qnil;
}
static VALUE rb_slice_to_str(VALUE self)
{
struct string_slice *slice;
Data_Get_Struct(self, struct string_slice, slice);
VALUE source = slice->source;
if (slice->str == RSTRING_PTR(source) && slice->length == RSTRING_LEN(source))
return source;
source = rb_str_new(slice->str, slice->length);
slice->source = source;
slice->str = RSTRING_PTR(source);
return source;
}
static VALUE rb_slice_slice(VALUE self, VALUE offset, VALUE length)
{
return rb_funcall(cLiquidStringSlice, intern_new, 3, self, offset, length);
}
static VALUE rb_slice_length(VALUE self)
{
struct string_slice *slice;
Data_Get_Struct(self, struct string_slice, slice);
return INT2FIX(slice->length);
}
static VALUE rb_slice_equal(VALUE self, VALUE other)
{
struct string_slice *this_slice;
Data_Get_Struct(self, struct string_slice, this_slice);
const char *other_str;
long other_length;
if (TYPE(other) == T_DATA && RBASIC_CLASS(other) == cLiquidStringSlice) {
struct string_slice *other_slice = DATA_PTR(other);
other_str = other_slice->str;
other_length = other_slice->length;
} else {
other = rb_string_value(&other);
other_length = RSTRING_LEN(other);
other_str = RSTRING_PTR(other);
}
bool equal = this_slice->length == other_length && !memcmp(this_slice->str, other_str, other_length);
return equal ? Qtrue : Qfalse;
}
static VALUE rb_slice_inspect(VALUE self)
{
VALUE quoted = rb_str_inspect(rb_slice_to_str(self));
return rb_sprintf("#<Liquid::StringSlice: %.*s>", (int)RSTRING_LEN(quoted), RSTRING_PTR(quoted));
}
static VALUE rb_slice_join(VALUE klass, VALUE ary)
{
ary = rb_ary_to_ary(ary);
long i;
long result_length = 0;
for (i = 0; i < RARRAY_LEN(ary); i++) {
VALUE element = RARRAY_AREF(ary, i);
if (TYPE(element) == T_DATA && RBASIC_CLASS(element) == cLiquidStringSlice) {
struct string_slice *slice = DATA_PTR(element);
result_length += slice->length;
} else if (TYPE(element) == T_STRING) {
result_length += RSTRING_LEN(element);
}
}
VALUE result = rb_str_buf_new(result_length);
for (i = 0; i < RARRAY_LEN(ary); i++) {
VALUE element = RARRAY_AREF(ary, i);
const char *element_string;
long element_length;
if (TYPE(element) == T_DATA && RBASIC_CLASS(element) == cLiquidStringSlice) {
struct string_slice *slice = DATA_PTR(element);
element_string = slice->str;
element_length = slice->length;
} else if (NIL_P(element)) {
continue;
} else {
element = rb_check_string_type(element);
if (NIL_P(element))
continue;
element_string = RSTRING_PTR(element);
element_length = RSTRING_LEN(element);
}
rb_str_buf_cat(result, element_string, element_length);
}
return result;
}
void init_liquid_string_slice()
{
cLiquidStringSlice = rb_define_class_under(mLiquid, "StringSlice", rb_cObject);
rb_define_singleton_method(cLiquidStringSlice, "join", rb_slice_join, 1);
rb_define_alloc_func(cLiquidStringSlice, rb_allocate);
rb_define_method(cLiquidStringSlice, "initialize", rb_initialize, 3);
rb_define_method(cLiquidStringSlice, "==", rb_slice_equal, 1);
rb_define_method(cLiquidStringSlice, "length", rb_slice_length, 0);
rb_define_alias(cLiquidStringSlice, "size", "length");
rb_define_method(cLiquidStringSlice, "slice", rb_slice_slice, 2);
rb_define_method(cLiquidStringSlice, "to_str", rb_slice_to_str, 0);
rb_define_alias(cLiquidStringSlice, "to_s", "to_str");
rb_define_method(cLiquidStringSlice, "inspect", rb_slice_inspect, 0);
}

18
ext/liquid/slice.h Normal file
View File

@@ -0,0 +1,18 @@
#ifndef LIQUID_SLICE_H
#define LIQUID_SLICE_H
extern VALUE cLiquidStringSlice;
struct string_slice {
VALUE source;
const char *str;
long length;
};
VALUE liquid_string_slice_new(const char *str, long length);
void init_liquid_string_slice();
#define STRING_SLICE_GET_STRUCT(obj) ((struct string_slice *)obj_get_data_ptr(obj, cLiquidStringSlice))
#endif

113
ext/liquid/tokenizer.c Normal file
View File

@@ -0,0 +1,113 @@
#include "liquid_ext.h"
VALUE cLiquidTokenizer;
static void free_tokenizer(void *ptr)
{
struct liquid_tokenizer *tokenizer = ptr;
xfree(tokenizer);
}
static VALUE rb_allocate(VALUE klass)
{
VALUE obj;
struct liquid_tokenizer *tokenizer;
obj = Data_Make_Struct(klass, struct liquid_tokenizer, NULL, free_tokenizer, tokenizer);
return obj;
}
static VALUE rb_initialize(VALUE self, VALUE source)
{
struct liquid_tokenizer *tokenizer;
source = rb_string_value(&source);
Data_Get_Struct(self, struct liquid_tokenizer, tokenizer);
tokenizer->cursor = RSTRING_PTR(source);
tokenizer->length = RSTRING_LEN(source);
return Qnil;
}
void liquid_tokenizer_next(struct liquid_tokenizer *tokenizer, struct token *token)
{
if (tokenizer->length <= 0) {
memset(token, 0, sizeof(*token));
return;
}
token->type = TOKEN_STRING;
char *cursor = tokenizer->cursor;
char *last = tokenizer->cursor + tokenizer->length - 1;
while (cursor < last) {
if (*cursor++ != '{')
continue;
char c = *cursor++;
if (c != '%' && c != '{')
continue;
if (cursor - tokenizer->cursor > 2) {
token->type = TOKEN_STRING;
cursor -= 2;
goto found;
}
char *incomplete_end = cursor;
token->type = TOKEN_INVALID;
if (c == '%') {
while (cursor < last) {
if (*cursor++ != '%')
continue;
c = *cursor++;
while (c == '%' && cursor <= last)
c = *cursor++;
if (c != '}')
continue;
token->type = TOKEN_TAG;
goto found;
}
cursor = incomplete_end;
goto found;
} else {
while (cursor < last) {
if (*cursor++ != '}')
continue;
if (*cursor++ != '}') {
incomplete_end = cursor - 1;
continue;
}
token->type = TOKEN_VARIABLE;
goto found;
}
cursor = incomplete_end;
goto found;
}
}
cursor = last + 1;
found:
token->str = tokenizer->cursor;
token->length = cursor - tokenizer->cursor;
tokenizer->cursor += token->length;
tokenizer->length -= token->length;
}
static VALUE rb_next(VALUE self)
{
struct liquid_tokenizer *tokenizer;
Data_Get_Struct(self, struct liquid_tokenizer, tokenizer);
struct token token;
liquid_tokenizer_next(tokenizer, &token);
if (token.type == TOKEN_NONE)
return Qnil;
return rb_str_new(token.str, token.length);
}
void init_liquid_tokenizer()
{
cLiquidTokenizer = rb_define_class_under(mLiquid, "Tokenizer", rb_cObject);
rb_define_alloc_func(cLiquidTokenizer, rb_allocate);
rb_define_method(cLiquidTokenizer, "initialize", rb_initialize, 1);
rb_define_method(cLiquidTokenizer, "next", rb_next, 0);
rb_define_alias(cLiquidTokenizer, "shift", "next");
}

30
ext/liquid/tokenizer.h Normal file
View File

@@ -0,0 +1,30 @@
#ifndef LIQUID_TOKENIZER_H
#define LIQUID_TOKENIZER_H
extern VALUE cLiquidTokenizer;
enum token_type {
TOKEN_NONE,
TOKEN_INVALID,
TOKEN_STRING,
TOKEN_TAG,
TOKEN_VARIABLE
};
struct token {
enum token_type type;
char *str;
int length;
};
struct liquid_tokenizer {
char *cursor;
int length;
};
void init_liquid_tokenizer();
void liquid_tokenizer_next(struct liquid_tokenizer *tokenizer, struct token *token);
#define LIQUID_TOKENIZER_GET_STRUCT(obj) ((struct liquid_tokenizer *)obj_get_data_ptr(obj, cLiquidTokenizer))
#endif

21
ext/liquid/utils.c Normal file
View File

@@ -0,0 +1,21 @@
#include <ruby.h>
void raise_type_error(VALUE expected, VALUE got)
{
rb_raise(rb_eTypeError, "wrong argument type %s (expected %s)",
rb_class2name(got), rb_class2name(expected));
}
void check_class(VALUE obj, int type, VALUE klass)
{
Check_Type(obj, type);
VALUE obj_klass = RBASIC_CLASS(obj);
if (obj_klass != klass)
raise_type_error(klass, obj_klass);
}
void *obj_get_data_ptr(VALUE obj, VALUE klass)
{
check_class(obj, T_DATA, klass);
return DATA_PTR(obj);
}

8
ext/liquid/utils.h Normal file
View File

@@ -0,0 +1,8 @@
#ifndef LIQUID_UTILS_H
#define LIQUID_UTILS_H
void raise_type_error(VALUE expected, VALUE got);
void check_class(VALUE klass);
void *obj_get_data_ptr(VALUE obj, VALUE klass);
#endif

View File

@@ -30,21 +30,18 @@ module Liquid
VariableSegment = /[\w\-]/
VariableStart = /\{\{/
VariableEnd = /\}\}/
VariableIncompleteEnd = /\}\}?/
QuotedString = /"[^"]*"|'[^']*'/
QuotedFragment = /#{QuotedString}|(?:[^\s,\|'"]|#{QuotedString})+/o
StrictQuotedFragment = /"[^"]+"|'[^']+'|[^\s|:,]+/
FirstFilterArgument = /#{FilterArgumentSeparator}(?:#{StrictQuotedFragment})/o
OtherFilterArgument = /#{ArgumentSeparator}(?:#{StrictQuotedFragment})/o
SpacelessFilter = /^(?:'[^']+'|"[^"]+"|[^'"])*#{FilterSeparator}(?:#{StrictQuotedFragment})(?:#{FirstFilterArgument}(?:#{OtherFilterArgument})*)?/o
SpacelessFilter = /\A(?:'[^']+'|"[^"]+"|[^'"])*#{FilterSeparator}(?:#{StrictQuotedFragment})(?:#{FirstFilterArgument}(?:#{OtherFilterArgument})*)?/o
Expression = /(?:#{QuotedFragment}(?:#{SpacelessFilter})*)/o
TagAttributes = /(\w+)\s*\:\s*(#{QuotedFragment})/o
AnyStartingTag = /\{\{|\{\%/
PartialTemplateParser = /#{TagStart}.*?#{TagEnd}|#{VariableStart}.*?#{VariableIncompleteEnd}/o
TemplateParser = /(#{PartialTemplateParser}|#{AnyStartingTag})/o
VariableParser = /\[[^\]]+\]|#{VariableSegment}+\??/o
end
require 'liquid/liquid'
require "liquid/version"
require 'liquid/lexer'
require 'liquid/parser'

View File

@@ -1,82 +1,26 @@
module Liquid
class Block < Tag
IsTag = /^#{TagStart}/o
IsVariable = /^#{VariableStart}/o
FullToken = /^#{TagStart}\s*(\w+)\s*(.*)?#{TagEnd}$/o
ContentOfVariable = /^#{VariableStart}(.*)#{VariableEnd}$/o
def initialize(tag_name, markup, tokens)
super
parse_body(tokens)
end
def blank?
@blank || false
end
def parse(tokens)
@blank = true
@nodelist ||= []
@nodelist.clear
# All child tags of the current block.
@children = []
while token = tokens.shift
case token
when IsTag
if token =~ FullToken
# if we found the proper block delimiter just end parsing here and let the outer block
# proceed
if block_delimiter == $1
end_tag
return
end
# fetch the tag from registered blocks
if tag = Template.tags[$1]
new_tag = tag.new_with_options($1, $2, tokens, @options || {})
@blank &&= new_tag.blank?
@nodelist << new_tag
@children << new_tag
else
# this tag is not registered with the system
# pass it to the current block for special handling or error reporting
unknown_tag($1, $2, tokens)
end
else
raise SyntaxError.new(options[:locale].t("errors.syntax.tag_termination", :token => token, :tag_end => TagEnd.inspect))
end
when IsVariable
new_var = create_variable(token)
@nodelist << new_var
@children << new_var
@blank = false
when ''
# pass
else
@nodelist << token
@blank &&= (token =~ /\A\s*\z/)
end
end
# Make sure that it's ok to end parsing in the current block.
# Effectively this method will throw an exception unless the current block is
# of type Document
assert_missing_delimitation!
end
# warnings of this block and all sub-tags
def warnings
all_warnings = []
all_warnings.concat(@warnings) if @warnings
(@children || []).each do |node|
all_warnings.concat(node.warnings || [])
(nodelist || []).each do |node|
all_warnings.concat(node.warnings || []) if node.respond_to?(:warnings)
end
all_warnings
end
def end_tag
end
def unknown_tag(tag, params, tokens)
case tag
when 'else'
@@ -99,19 +43,20 @@ module Liquid
@tag_name
end
def create_variable(token)
token.scan(ContentOfVariable) do |content|
return Variable.new(content.first, @options)
end
raise SyntaxError.new(options[:locale].t("errors.syntax.variable_termination", :token => token, :tag_end => VariableEnd.inspect))
end
def render(context)
render_all(@nodelist, context)
end
protected
def unterminated_variable(token)
raise SyntaxError.new(options[:locale].t("errors.syntax.variable_termination", :token => token, :tag_end => VariableEnd.inspect))
end
def unterminated_tag(token)
raise SyntaxError.new(options[:locale].t("errors.syntax.tag_termination", :token => token, :tag_end => TagEnd.inspect))
end
def assert_missing_delimitation!
raise SyntaxError.new(options[:locale].t("errors.syntax.tag_never_closed", :block_name => block_name))
end
@@ -150,7 +95,7 @@ module Liquid
end
end
output.join
StringSlice.join(output)
end
end
end

View File

@@ -29,7 +29,7 @@ module Liquid
end
def increment_used_resources(key, obj)
@resource_limits[key] += if obj.kind_of?(String) || obj.kind_of?(Array) || obj.kind_of?(Hash)
@resource_limits[key] += if obj.kind_of?(StringSlice) || obj.kind_of?(String) || obj.kind_of?(Array) || obj.kind_of?(Hash)
obj.length
else
1
@@ -171,15 +171,15 @@ module Liquid
LITERALS[key]
else
case key
when /^'(.*)'$/ # Single quoted strings
when /\A'(.*)'\z/ # Single quoted strings
$1
when /^"(.*)"$/ # Double quoted strings
when /\A"(.*)"\z/ # Double quoted strings
$1
when /^(-?\d+)$/ # Integer and floats
when /\A(-?\d+)\z/ # Integer and floats
$1.to_i
when /^\((\S+)\.\.(\S+)\)$/ # Ranges
when /\A\((\S+)\.\.(\S+)\)\z/ # Ranges
(resolve($1).to_i..resolve($2).to_i)
when /^(-?\d[\d\.]+)$/ # Floats
when /\A(-?\d[\d\.]+)\z/ # Floats
$1.to_f
else
variable(key)
@@ -218,7 +218,7 @@ module Liquid
# assert_equal 'tobi', @context['hash["name"]']
def variable(markup)
parts = markup.scan(VariableParser)
square_bracketed = /^\[(.*)\]$/
square_bracketed = /\A\[(.*)\]\z/
first_part = parts.shift

View File

@@ -3,12 +3,12 @@ module Liquid
# we don't need markup to open this block
def initialize(tokens, options = {})
@options = options
parse(tokens)
parse_body(tokens)
end
# There isn't a real delimiter
def block_delimiter
[]
nil
end
# Document blocks don't need to be terminated since they are not actually opened

View File

@@ -57,7 +57,7 @@ module Liquid
end
def full_path(template_path)
raise FileSystemError, "Illegal template name '#{template_path}'" unless template_path =~ /^[^.\/][a-zA-Z0-9_\/]+$/
raise FileSystemError, "Illegal template name '#{template_path}'" unless template_path =~ /\A[^.\/][a-zA-Z0-9_\/]+\z/
full_path = if template_path.include?('/')
File.join(root, File.dirname(template_path), @pattern % File.basename(template_path))
@@ -65,7 +65,7 @@ module Liquid
File.join(root, @pattern % template_path)
end
raise FileSystemError, "Illegal template path '#{File.expand_path(full_path)}'" unless File.expand_path(full_path) =~ /^#{File.expand_path(root)}/
raise FileSystemError, "Illegal template path '#{File.expand_path(full_path)}'" unless File.expand_path(full_path) =~ /\A#{File.expand_path(root)}/
full_path
end

View File

@@ -55,7 +55,7 @@ module Liquid
col += 1
result << "<td class=\"col#{col}\">" << render_all(@nodelist, context) << '</td>'
result << "<td class=\"col#{col}\">" << super << '</td>'
if col == cols and (index != length - 1)
col = 0

View File

@@ -190,7 +190,7 @@ module Liquid
return input.to_s
end
if ((input.is_a?(String) && !/^\d+$/.match(input.to_s).nil?) || input.is_a?(Integer)) && input.to_i > 0
if ((input.is_a?(String) && !/\A\d+\z/.match(input.to_s).nil?) || input.is_a?(Integer)) && input.to_i > 0
input = Time.at(input.to_i)
end
@@ -281,7 +281,7 @@ module Liquid
when Numeric
obj
when String
(obj.strip =~ /^\d+\.\d+$/) ? BigDecimal.new(obj) : obj.to_i
(obj.strip =~ /\A\d+\.\d+\z/) ? BigDecimal.new(obj) : obj.to_i
else
0
end

View File

@@ -16,10 +16,6 @@ module Liquid
@tag_name = tag_name
@markup = markup
@options ||= {} # needs || because might be set before initialize
parse(tokens)
end
def parse(tokens)
end
def name

View File

@@ -12,8 +12,8 @@ module Liquid
# <div class="green"> Item five</div>
#
class Cycle < Tag
SimpleSyntax = /^#{QuotedFragment}+/o
NamedSyntax = /^(#{QuotedFragment})\s*\:\s*(.*)/o
SimpleSyntax = /\A#{QuotedFragment}+/o
NamedSyntax = /\A(#{QuotedFragment})\s*\:\s*(.*)/o
def initialize(tag_name, markup, tokens)
case markup

View File

@@ -4,7 +4,7 @@ module Liquid
def render(context)
context.stack do
output = render_all(@nodelist, context)
output = super
if output != context.registers[:ifchanged]
context.registers[:ifchanged] = output

View File

@@ -35,9 +35,6 @@ module Liquid
super
end
def parse(tokens)
end
def blank?
false
end

View File

@@ -1,17 +1,14 @@
module Liquid
class Raw < Block
FullTokenPossiblyInvalid = /^(.*)#{TagStart}\s*(\w+)\s*(.*)?#{TagEnd}$/o
FullTokenPossiblyInvalid = /\A(.*)#{TagStart}\s*(\w+)\s*(.*)?#{TagEnd}\z/o
def parse(tokens)
def parse_body(tokens)
@nodelist ||= []
@nodelist.clear
while token = tokens.shift
if token =~ FullTokenPossiblyInvalid
@nodelist << $1 if $1 != ""
if block_delimiter == $2
end_tag
return
end
return if block_delimiter == $2
end
@nodelist << token if not token.empty?
end

View File

@@ -162,16 +162,9 @@ module Liquid
private
# Uses the <tt>Liquid::TemplateParser</tt> regexp to tokenize the passed source
def tokenize(source)
source = source.source if source.respond_to?(:source)
return [] if source.to_s.empty?
tokens = source.split(TemplateParser)
# removes the rogue empty element at the beginning of the array
tokens.shift if tokens[0] and tokens[0].empty?
tokens
Tokenizer.new(source.to_s)
end
end

View File

@@ -12,7 +12,7 @@ module Liquid
#
class Variable
FilterParser = /(?:#{FilterSeparator}|(?:\s*(?:#{QuotedFragment}|#{ArgumentSeparator})\s*)+)/o
EasyParse = /^ *(\w+(?:\.\w+)*) *$/
EasyParse = /\A *(\w+(?:\.\w+)*) *\z/
attr_accessor :filters, :name, :warnings
def initialize(markup, options = {})

View File

@@ -18,9 +18,15 @@ Gem::Specification.new do |s|
s.required_rubygems_version = ">= 1.3.7"
s.test_files = Dir.glob("{test}/**/*")
s.files = Dir.glob("{lib}/**/*") + %w(MIT-LICENSE README.md)
s.files = Dir.glob("{lib,ext}/**/*") + %w(MIT-LICENSE README.md)
s.extensions = ['ext/liquid/extconf.rb']
s.extra_rdoc_files = ["History.md", "README.md"]
s.require_path = "lib"
s.add_development_dependency 'rake-compiler'
s.add_development_dependency 'stackprof'
s.add_development_dependency 'rake'
s.add_development_dependency 'activesupport'
end

View File

@@ -54,7 +54,7 @@ module ShopFilter
def product_img_url(url, style = 'small')
unless url =~ /^products\/([\w\-\_]+)\.(\w{2,4})/
unless url =~ /\Aproducts\/([\w\-\_]+)\.(\w{2,4})/
raise ArgumentError, 'filter "size" can only be called on product images'
end

15
performance/stackprof.rb Normal file
View File

@@ -0,0 +1,15 @@
require 'stackprof' rescue fail("install stackprof extension/gem")
require File.dirname(__FILE__) + '/theme_runner'
profiler = ThemeRunner.new
profiler.run
results = StackProf.run(mode: :cpu, out: ENV['FILENAME']) do
100.times do
profiler.run
end
end
if results.kind_of?(File)
puts "wrote stackprof dump to #{results.path}"
else
StackProf::Report.new(results).print_text(false, 20)
end

View File

@@ -12,34 +12,34 @@ class BlockTest < Test::Unit::TestCase
template = Liquid::Template.parse("{{funk}} ")
assert_equal 2, template.root.nodelist.size
assert_equal Variable, template.root.nodelist[0].class
assert_equal String, template.root.nodelist[1].class
assert_equal StringSlice, template.root.nodelist[1].class
end
def test_variable_end
template = Liquid::Template.parse(" {{funk}}")
assert_equal 2, template.root.nodelist.size
assert_equal String, template.root.nodelist[0].class
assert_equal StringSlice, template.root.nodelist[0].class
assert_equal Variable, template.root.nodelist[1].class
end
def test_variable_middle
template = Liquid::Template.parse(" {{funk}} ")
assert_equal 3, template.root.nodelist.size
assert_equal String, template.root.nodelist[0].class
assert_equal StringSlice, template.root.nodelist[0].class
assert_equal Variable, template.root.nodelist[1].class
assert_equal String, template.root.nodelist[2].class
assert_equal StringSlice, template.root.nodelist[2].class
end
def test_variable_many_embedded_fragments
template = Liquid::Template.parse(" {{funk}} {{so}} {{brother}} ")
assert_equal 7, template.root.nodelist.size
assert_equal [String, Variable, String, Variable, String, Variable, String],
assert_equal [StringSlice, Variable, StringSlice, Variable, StringSlice, Variable, StringSlice],
block_types(template.root.nodelist)
end
def test_with_block
template = Liquid::Template.parse(" {% comment %} {% endcomment %} ")
assert_equal [String, Comment, String], block_types(template.root.nodelist)
assert_equal [StringSlice, Comment, StringSlice], block_types(template.root.nodelist)
assert_equal 3, template.root.nodelist.size
end

View File

@@ -8,7 +8,7 @@ class ParsingQuirksTest < Test::Unit::TestCase
template = Template.parse(text)
assert_equal text, template.render
assert_equal [String], template.root.nodelist.collect {|i| i.class}
assert_equal [StringSlice], template.root.nodelist.collect {|i| i.class}
end
def test_raise_on_single_close_bracet

View File

@@ -0,0 +1,34 @@
require 'test_helper'
class StringSliceTest < Test::Unit::TestCase
def test_new_from_string
assert_equal 'slice', Liquid::StringSlice.new("slice and dice", 0, 5).to_str
assert_equal 'and', Liquid::StringSlice.new("slice and dice", 6, 3).to_str
assert_equal 'dice', Liquid::StringSlice.new("slice and dice", 10, 4).to_str
assert_equal 'slice and dice', Liquid::StringSlice.new("slice and dice", 0, 14).to_str
end
def test_new_from_slice
slice1 = Liquid::StringSlice.new("slice and dice", 0, 14)
slice2 = Liquid::StringSlice.new(slice1, 6, 8)
slice3 = Liquid::StringSlice.new(slice2, 0, 3)
assert_equal "slice and dice", slice1.to_str
assert_equal "and dice", slice2.to_str
assert_equal "and", slice3.to_str
end
def test_slice
slice = Liquid::StringSlice.new("slice and dice", 2, 10)
assert_equal "and", slice.slice(4, 3).to_str
end
def test_length
slice = Liquid::StringSlice.new("slice and dice", 6, 3)
assert_equal 3, slice.length
assert_equal 3, slice.size
end
def test_equal
assert_equal 'and', Liquid::StringSlice.new("slice and dice", 6, 3)
end
end

View File

@@ -25,26 +25,6 @@ end
class TemplateTest < Test::Unit::TestCase
include Liquid
def test_tokenize_strings
assert_equal [' '], Template.new.send(:tokenize, ' ')
assert_equal ['hello world'], Template.new.send(:tokenize, 'hello world')
end
def test_tokenize_variables
assert_equal ['{{funk}}'], Template.new.send(:tokenize, '{{funk}}')
assert_equal [' ', '{{funk}}', ' '], Template.new.send(:tokenize, ' {{funk}} ')
assert_equal [' ', '{{funk}}', ' ', '{{so}}', ' ', '{{brother}}', ' '], Template.new.send(:tokenize, ' {{funk}} {{so}} {{brother}} ')
assert_equal [' ', '{{ funk }}', ' '], Template.new.send(:tokenize, ' {{ funk }} ')
end
def test_tokenize_blocks
assert_equal ['{%comment%}'], Template.new.send(:tokenize, '{%comment%}')
assert_equal [' ', '{%comment%}', ' '], Template.new.send(:tokenize, ' {%comment%} ')
assert_equal [' ', '{%comment%}', ' ', '{%endcomment%}', ' '], Template.new.send(:tokenize, ' {%comment%} {%endcomment%} ')
assert_equal [' ', '{% comment %}', ' ', '{% endcomment %}', ' '], Template.new.send(:tokenize, " {% comment %} {% endcomment %} ")
end
def test_instance_assigns_persist_on_same_template_object_between_parses
t = Template.new
assert_equal 'from instance assigns', t.parse("{% assign foo = 'from instance assigns' %}{{ foo }}").render

View File

@@ -0,0 +1,64 @@
require 'test_helper'
class TokenizerTest < Test::Unit::TestCase
def test_tokenize_strings
assert_equal [' '], tokenize(' ')
assert_equal ['hello world'], tokenize('hello world')
end
def test_tokenize_variables
assert_equal ['{{funk}}'], tokenize('{{funk}}')
assert_equal [' ', '{{funk}}', ' '], tokenize(' {{funk}} ')
assert_equal [' ', '{{funk}}', ' ', '{{so}}', ' ', '{{brother}}', ' '], tokenize(' {{funk}} {{so}} {{brother}} ')
assert_equal [' ', '{{ funk }}', ' '], tokenize(' {{ funk }} ')
end
def test_tokenize_blocks
assert_equal ['{%comment%}'], tokenize('{%comment%}')
assert_equal [' ', '{%comment%}', ' '], tokenize(' {%comment%} ')
assert_equal [' ', '{%comment%}', ' ', '{%endcomment%}', ' '], tokenize(' {%comment%} {%endcomment%} ')
assert_equal [' ', '{% comment %}', ' ', '{% endcomment %}', ' '], tokenize(" {% comment %} {% endcomment %} ")
end
def test_tokenize_incomplete_end
assert_tokens 'before{{ incomplete }after', ['before', '{{ incomplete }', 'after']
assert_tokens 'before{% incomplete %after', ['before', '{%', ' incomplete %after']
end
def test_tokenize_no_end
assert_tokens 'before{{ unterminated ', ['before', '{{', ' unterminated ']
assert_tokens 'before{% unterminated ', ['before', '{%', ' unterminated ']
end
private
def assert_tokens(source, expected)
assert_equal expected, tokenize(source)
assert_equal expected, old_tokenize(source)
end
def tokenize(source)
tokenizer = Liquid::Tokenizer.new(source)
tokens = []
while token = tokenizer.next
tokens << token
end
tokens
end
AnyStartingTag = /\{\{|\{\%/
VariableIncompleteEnd = /\}\}?/
PartialTemplateParser = /#{Liquid::TagStart}.*?#{Liquid::TagEnd}|#{Liquid::VariableStart}.*?#{VariableIncompleteEnd}/o
TemplateParser = /(#{PartialTemplateParser}|#{AnyStartingTag})/o
def old_tokenize(source)
return [] if source.to_s.empty?
tokens = source.split(TemplateParser)
# removes the rogue empty element at the beginning of the array
tokens.shift if tokens[0] and tokens[0].empty?
tokens
end
end