diff --git a/ext/liquid_c/tokenizer.c b/ext/liquid_c/tokenizer.c index f62a6acd..b01d7cca 100644 --- a/ext/liquid_c/tokenizer.c +++ b/ext/liquid_c/tokenizer.c @@ -290,9 +290,9 @@ void liquid_define_tokenizer(void) rb_define_method(cLiquidTokenizer, "line_number", tokenizer_line_number_method, 0); rb_define_method(cLiquidTokenizer, "for_liquid_tag", tokenizer_for_liquid_tag_method, 0); rb_define_method(cLiquidTokenizer, "bug_compatible_whitespace_trimming!", tokenizer_bug_compatible_whitespace_trimming, 0); + rb_define_method(cLiquidTokenizer, "shift", tokenizer_shift_method, 0); // For testing the internal token representation. - rb_define_private_method(cLiquidTokenizer, "shift", tokenizer_shift_method, 0); rb_define_private_method(cLiquidTokenizer, "shift_trimmed", tokenizer_shift_trimmed_method, 0); } diff --git a/test/unit/tokenizer_test.rb b/test/unit/tokenizer_test.rb index 136a3359..f08389ab 100644 --- a/test/unit/tokenizer_test.rb +++ b/test/unit/tokenizer_test.rb @@ -6,7 +6,7 @@ class TokenizerTest < Minitest::Test def test_tokenizer_nil tokenizer = new_tokenizer(nil) - assert_nil(tokenizer.send(:shift)) + assert_nil(tokenizer.shift) end def test_tokenize_strings @@ -60,10 +60,10 @@ def test_utf8_encoded_source def test_utf8_compatible_source source = String.new("ascii", encoding: Encoding::ASCII) tokenizer = new_tokenizer(source) - output = tokenizer.send(:shift) + output = tokenizer.shift assert_equal(Encoding::UTF_8, output.encoding) assert_equal(source, output) - assert_nil(tokenizer.send(:shift)) + assert_nil(tokenizer.shift) end def test_non_utf8_compatible_source @@ -105,7 +105,7 @@ def new_tokenizer(source, parse_context: Liquid::ParseContext.new) def tokenize(source, for_liquid_tag: false, trimmed: false) tokenizer = Liquid::C::Tokenizer.new(source, 1, for_liquid_tag) tokens = [] - while (t = trimmed ? tokenizer.send(:shift_trimmed) : tokenizer.send(:shift)) + while (t = trimmed ? tokenizer.send(:shift_trimmed) : tokenizer.shift) tokens << t end tokens