mirror of
https://github.com/kemko/liquid.git
synced 2026-01-01 15:55:40 +03:00
Ruby 1.9+ uses Minitest as the backend for Test::Unit. As of Minitest 5, the shim has broken some compatibility with Test::Unit::TestCase in some scenarios. Adjusts the test suite to support Minitest 5's syntax. Minitest versions 4 and below do not support the newer Minitest::Test class that arrived in version 5. For that case, use the MiniTest::Unit::TestCase class as a fallback Conflicts: test/integration/tags/for_tag_test.rb test/test_helper.rb
49 lines
1.4 KiB
Ruby
49 lines
1.4 KiB
Ruby
require 'test_helper'
|
|
|
|
class LexerUnitTest < Minitest::Test
|
|
include Liquid
|
|
|
|
def test_strings
|
|
tokens = Lexer.new(%! 'this is a test""' "wat 'lol'"!).tokenize
|
|
assert_equal [[:string,%!'this is a test""'!], [:string, %!"wat 'lol'"!], [:end_of_string]], tokens
|
|
end
|
|
|
|
def test_integer
|
|
tokens = Lexer.new('hi 50').tokenize
|
|
assert_equal [[:id,'hi'], [:number, '50'], [:end_of_string]], tokens
|
|
end
|
|
|
|
def test_float
|
|
tokens = Lexer.new('hi 5.0').tokenize
|
|
assert_equal [[:id,'hi'], [:number, '5.0'], [:end_of_string]], tokens
|
|
end
|
|
|
|
def test_comparison
|
|
tokens = Lexer.new('== <> contains').tokenize
|
|
assert_equal [[:comparison,'=='], [:comparison, '<>'], [:comparison, 'contains'], [:end_of_string]], tokens
|
|
end
|
|
|
|
def test_specials
|
|
tokens = Lexer.new('| .:').tokenize
|
|
assert_equal [[:pipe, '|'], [:dot, '.'], [:colon, ':'], [:end_of_string]], tokens
|
|
tokens = Lexer.new('[,]').tokenize
|
|
assert_equal [[:open_square, '['], [:comma, ','], [:close_square, ']'], [:end_of_string]], tokens
|
|
end
|
|
|
|
def test_fancy_identifiers
|
|
tokens = Lexer.new('hi! five?').tokenize
|
|
assert_equal [[:id,'hi!'], [:id, 'five?'], [:end_of_string]], tokens
|
|
end
|
|
|
|
def test_whitespace
|
|
tokens = Lexer.new("five|\n\t ==").tokenize
|
|
assert_equal [[:id,'five'], [:pipe, '|'], [:comparison, '=='], [:end_of_string]], tokens
|
|
end
|
|
|
|
def test_unexpected_character
|
|
assert_raises(SyntaxError) do
|
|
Lexer.new("%").tokenize
|
|
end
|
|
end
|
|
end
|