-
Notifications
You must be signed in to change notification settings - Fork 1.4k
/
Copy pathtokenizer_unit_test.rb
118 lines (101 loc) · 7.84 KB
/
tokenizer_unit_test.rb
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
# frozen_string_literal: true
require 'test_helper'
class TokenizerTest < Minitest::Test
def test_tokenize_strings
assert_equal [' '], tokenize(' ')
assert_equal ['hello world'], tokenize('hello world')
end
def test_tokenize_variables
assert_equal ['{{funk}}'], tokenize('{{funk}}')
assert_equal [' ', '{{funk}}', ' '], tokenize(' {{funk}} ')
assert_equal [' ', '{{funk}}', ' ', '{{so}}', ' ', '{{brother}}', ' '], tokenize(' {{funk}} {{so}} {{brother}} ')
assert_equal [' ', '{{ funk }}', ' '], tokenize(' {{ funk }} ')
end
def test_tokenize_blocks
assert_equal ['{%comment%}'], tokenize('{%comment%}')
assert_equal [' ', '{%comment%}', ' '], tokenize(' {%comment%} ')
assert_equal [' ', '{%comment%}', ' ', '{%endcomment%}', ' '], tokenize(' {%comment%} {%endcomment%} ')
assert_equal [' ', '{% comment %}', ' ', '{% endcomment %}', ' '], tokenize(" {% comment %} {% endcomment %} ")
end
def test_calculate_line_numbers_per_token_with_profiling
assert_equal [1], tokenize_line_numbers("{{funk}}")
assert_equal [1, 1, 1], tokenize_line_numbers(" {{funk}} ")
assert_equal [1, 2, 2], tokenize_line_numbers("\n{{funk}}\n")
assert_equal [1, 1, 3], tokenize_line_numbers(" {{\n funk \n}} ")
end
def test_tokenize_quirks
assert_equal ['{%comment%}'], tokenize('{%comment%}')
assert_equal [' ', '{%comment%}', ' '], tokenize(' {%comment%} ')
assert_equal [' ', '{%comment%}', ' ', '{%endcomment%}', ' '], tokenize(' {%comment%} {%endcomment%} ')
assert_equal [' ', '{% "{% comment" %}', ' ', '{% endcomment %}', ' '], tokenize(' {% "{% comment" %} {% endcomment %} ')
assert_equal [' ', '{% "{% comment %}" %}', ' ', '{% endcomment %}', ' '], tokenize(' {% "{% comment %}" %} {% endcomment %} ')
assert_equal [' ', '{% "comment %}" %}', ' ', '{% endcomment %}', ' '], tokenize(' {% "comment %}" %} {% endcomment %} ')
assert_equal [' ', '{% "{{ comment" %}', ' ', '{% endcomment %}', ' '], tokenize(' {% "{{ comment" %} {% endcomment %} ')
assert_equal [' ', '{% "{{ comment }}" %}', ' ', '{% endcomment %}', ' '], tokenize(' {% "{{ comment }}" %} {% endcomment %} ')
assert_equal [' ', '{% "comment }}" %}', ' ', '{% endcomment %}', ' '], tokenize(' {% "comment }}" %} {% endcomment %} ')
assert_equal [' ', '{% "comment }" %}', ' ', '{% endcomment %}', ' '], tokenize(' {% "comment }" %} {% endcomment %} ')
assert_equal [" ", "{%comment%}", " ", "{%endcomment%}", " "], tokenize(" {%comment%} {%endcomment%} ")
assert_equal [" ", "{% '{% comment' %}", " ", "{% endcomment %}", " "], tokenize(" {% '{% comment' %} {% endcomment %} ")
assert_equal [" ", "{% '{% comment %}' %}", " ", "{% endcomment %}", " "], tokenize(" {% '{% comment %}' %} {% endcomment %} ")
assert_equal [" ", "{% 'comment %}' %}", " ", "{% endcomment %}", " "], tokenize(" {% 'comment %}' %} {% endcomment %} ")
assert_equal [" ", "{% '{{ comment' %}", " ", "{% endcomment %}", " "], tokenize(" {% '{{ comment' %} {% endcomment %} ")
assert_equal [" ", "{% '{{ comment }}' %}", " ", "{% endcomment %}", " "], tokenize(" {% '{{ comment }}' %} {% endcomment %} ")
assert_equal [" ", "{% 'comment }}' %}", " ", "{% endcomment %}", " "], tokenize(" {% 'comment }}' %} {% endcomment %} ")
assert_equal [" ", "{% 'comment }' %}", " ", "{% endcomment %}", " "], tokenize(" {% 'comment }' %} {% endcomment %} ")
assert_equal [' ', '{{comment}}', ' ', '{{endcomment}}', ' '], tokenize(' {{comment}} {{endcomment}} ')
assert_equal [' ', '{{ "{{ comment" }}', ' ', '{{ endcomment }}', ' '], tokenize(' {{ "{{ comment" }} {{ endcomment }} ')
assert_equal [' ', '{{ "{{ comment }}" }}', ' ', '{{ endcomment }}', ' '], tokenize(' {{ "{{ comment }}" }} {{ endcomment }} ')
assert_equal [' ', '{{ "comment }}" }}', ' ', '{{ endcomment }}', ' '], tokenize(' {{ "comment }}" }} {{ endcomment }} ')
assert_equal [' ', '{{ "{{ comment" }}', ' ', '{{ endcomment }}', ' '], tokenize(' {{ "{{ comment" }} {{ endcomment }} ')
assert_equal [' ', '{{ "{{ comment }}" }}', ' ', '{{ endcomment }}', ' '], tokenize(' {{ "{{ comment }}" }} {{ endcomment }} ')
assert_equal [' ', '{{ "comment }}" }}', ' ', '{{ endcomment }}', ' '], tokenize(' {{ "comment }}" }} {{ endcomment }} ')
assert_equal [' ', '{{ "comment }" }}', ' ', '{{ endcomment }}', ' '], tokenize(' {{ "comment }" }} {{ endcomment }} ')
assert_equal [" ", "{{comment}}", " ", "{{endcomment}}", " "], tokenize(" {{comment}} {{endcomment}} ")
assert_equal [" ", "{{ '{% comment' }}", " ", "{{ endcomment }}", " "], tokenize(" {{ '{% comment' }} {{ endcomment }} ")
assert_equal [" ", "{{ '{% comment }}' }}", " ", "{{ endcomment }}", " "], tokenize(" {{ '{% comment }}' }} {{ endcomment }} ")
assert_equal [" ", "{{ 'comment }}' }}", " ", "{{ endcomment }}", " "], tokenize(" {{ 'comment }}' }} {{ endcomment }} ")
assert_equal [" ", "{{ '{{ comment' }}", " ", "{{ endcomment }}", " "], tokenize(" {{ '{{ comment' }} {{ endcomment }} ")
assert_equal [" ", "{{ '{{ comment }}' }}", " ", "{{ endcomment }}", " "], tokenize(" {{ '{{ comment }}' }} {{ endcomment }} ")
assert_equal [" ", "{{ 'comment }}' }}", " ", "{{ endcomment }}", " "], tokenize(" {{ 'comment }}' }} {{ endcomment }} ")
assert_equal [" ", "{{ 'comment }' }}", " ", "{{ endcomment }}", " "], tokenize(" {{ 'comment }' }} {{ endcomment }} ")
assert_equal [' ', '{{comment}', ' ', '{{endcomment}', ' '], tokenize(' {{comment} {{endcomment} ')
assert_equal [' ', '{{ "{% comment" }', ' ', '{{ endcomment }', ' '], tokenize(' {{ "{% comment" } {{ endcomment } ')
assert_equal [' ', '{{ "{% comment }" }', ' ', '{{ endcomment }', ' '], tokenize(' {{ "{% comment }" } {{ endcomment } ')
assert_equal [' ', '{{ "comment }" }', ' ', '{{ endcomment }', ' '], tokenize(' {{ "comment }" } {{ endcomment } ')
assert_equal [' ', '{{ "{{ comment" }', ' ', '{{ endcomment }', ' '], tokenize(' {{ "{{ comment" } {{ endcomment } ')
assert_equal [' ', '{{ "{{ comment }}" }', ' ', '{{ endcomment }', ' '], tokenize(' {{ "{{ comment }}" } {{ endcomment } ')
assert_equal [' ', '{{ "comment }}" }', ' ', '{{ endcomment }', ' '], tokenize(' {{ "comment }}" } {{ endcomment } ')
assert_equal [' ', '{{ "comment }" }', ' ', '{{ endcomment }', ' '], tokenize(' {{ "comment }" } {{ endcomment } ')
assert_equal [" ", "{{comment}", " ", "{{endcomment}", " "], tokenize(" {{comment} {{endcomment} ")
assert_equal [" ", "{{ '{{ comment' }", " ", "{{ endcomment }", " "], tokenize(" {{ '{{ comment' } {{ endcomment } ")
assert_equal [" ", "{{ '{{ comment }' }", " ", "{{ endcomment }", " "], tokenize(" {{ '{{ comment }' } {{ endcomment } ")
assert_equal [" ", "{{ 'comment }' }", " ", "{{ endcomment }", " "], tokenize(" {{ 'comment }' } {{ endcomment } ")
assert_equal [" ", "{{ '{{ comment' }", " ", "{{ endcomment }", " "], tokenize(" {{ '{{ comment' } {{ endcomment } ")
assert_equal [" ", "{{ '{{ comment }}' }", " ", "{{ endcomment }", " "], tokenize(" {{ '{{ comment }}' } {{ endcomment } ")
assert_equal [" ", "{{ 'comment }}' }", " ", "{{ endcomment }", " "], tokenize(" {{ 'comment }}' } {{ endcomment } ")
assert_equal [" ", "{{ 'comment }' }", " ", "{{ endcomment }", " "], tokenize(" {{ 'comment }' } {{ endcomment } ")
assert_equal ['{{funk | replace: "}", \'}}\' }}'], tokenize('{{funk | replace: "}", \'}}\' }}')
end
private
def tokenize(source)
tokenizer = Liquid::Tokenizer.new(source)
tokens = []
while (t = tokenizer.shift)
tokens << t
end
tokens
end
def tokenize_line_numbers(source)
tokenizer = Liquid::Tokenizer.new(source, true)
line_numbers = []
loop do
line_number = tokenizer.line_number
if tokenizer.shift
line_numbers << line_number
else
break
end
end
line_numbers
end
end