1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
|
require "spec_helper"
require "timeout"
describe "Tokenizer" do
let(:parser) { @context["handlebars"] }
let(:lexer) { @context["handlebars"]["lexer"] }
before(:all) do
@compiles = true
end
Token = Struct.new(:name, :text)
def tokenize(string)
lexer.setInput(string)
out = []
while token = lexer.lex
# p token
result = parser.terminals_[token] || token
# p result
break if !result || result == "EOF" || result == "INVALID"
out << Token.new(result, lexer.yytext)
end
out
end
RSpec::Matchers.define :match_tokens do |tokens|
match do |result|
result.map(&:name).should == tokens
end
end
RSpec::Matchers.define :be_token do |name, string|
match do |token|
token.name.should == name
token.text.should == string
end
end
it "tokenizes a simple mustache as 'OPEN ID CLOSE'" do
result = tokenize("{{foo}}")
result.should match_tokens(%w(OPEN ID CLOSE))
result[1].should be_token("ID", "foo")
end
it "supports escaping delimiters" do
result = tokenize("{{foo}} \\{{bar}} {{baz}}")
result.should match_tokens(%w(OPEN ID CLOSE CONTENT CONTENT OPEN ID CLOSE))
result[4].should be_token("CONTENT", "{{bar}} ")
end
it "supports escaping multiple delimiters" do
result = tokenize("{{foo}} \\{{bar}} \\{{baz}}")
result.should match_tokens(%w(OPEN ID CLOSE CONTENT CONTENT CONTENT))
result[3].should be_token("CONTENT", " ")
result[4].should be_token("CONTENT", "{{bar}} ")
result[5].should be_token("CONTENT", "{{baz}}")
end
it "supports escaping a triple stash" do
result = tokenize("{{foo}} \\{{{bar}}} {{baz}}")
result.should match_tokens(%w(OPEN ID CLOSE CONTENT CONTENT OPEN ID CLOSE))
result[4].should be_token("CONTENT", "{{{bar}}} ")
end
it "tokenizes a simple path" do
result = tokenize("{{foo/bar}}")
result.should match_tokens(%w(OPEN ID SEP ID CLOSE))
end
it "allows dot notation" do
result = tokenize("{{foo.bar}}")
result.should match_tokens(%w(OPEN ID SEP ID CLOSE))
tokenize("{{foo.bar.baz}}").should match_tokens(%w(OPEN ID SEP ID SEP ID CLOSE))
end
it "allows path literals with []" do
result = tokenize("{{foo.[bar]}}")
result.should match_tokens(%w(OPEN ID SEP ID CLOSE))
end
it "allows multiple path literals on a line with []" do
result = tokenize("{{foo.[bar]}}{{foo.[baz]}}")
result.should match_tokens(%w(OPEN ID SEP ID CLOSE OPEN ID SEP ID CLOSE))
end
it "tokenizes {{.}} as OPEN ID CLOSE" do
result = tokenize("{{.}}")
result.should match_tokens(%w(OPEN ID CLOSE))
end
it "tokenizes a path as 'OPEN (ID SEP)* ID CLOSE'" do
result = tokenize("{{../foo/bar}}")
result.should match_tokens(%w(OPEN ID SEP ID SEP ID CLOSE))
result[1].should be_token("ID", "..")
end
it "tokenizes a path with .. as a parent path" do
result = tokenize("{{../foo.bar}}")
result.should match_tokens(%w(OPEN ID SEP ID SEP ID CLOSE))
result[1].should be_token("ID", "..")
end
it "tokenizes a path with this/foo as OPEN ID SEP ID CLOSE" do
result = tokenize("{{this/foo}}")
result.should match_tokens(%w(OPEN ID SEP ID CLOSE))
result[1].should be_token("ID", "this")
result[3].should be_token("ID", "foo")
end
it "tokenizes a simple mustache with spaces as 'OPEN ID CLOSE'" do
result = tokenize("{{ foo }}")
result.should match_tokens(%w(OPEN ID CLOSE))
result[1].should be_token("ID", "foo")
end
it "tokenizes a simple mustache with line breaks as 'OPEN ID ID CLOSE'" do
result = tokenize("{{ foo \n bar }}")
result.should match_tokens(%w(OPEN ID ID CLOSE))
result[1].should be_token("ID", "foo")
end
it "tokenizes raw content as 'CONTENT'" do
result = tokenize("foo {{ bar }} baz")
result.should match_tokens(%w(CONTENT OPEN ID CLOSE CONTENT))
result[0].should be_token("CONTENT", "foo ")
result[4].should be_token("CONTENT", " baz")
end
it "tokenizes a partial as 'OPEN_PARTIAL PARTIAL_NAME CLOSE'" do
result = tokenize("{{> foo}}")
result.should match_tokens(%w(OPEN_PARTIAL PARTIAL_NAME CLOSE))
end
it "tokenizes a partial with context as 'OPEN_PARTIAL PARTIAL_NAME ID CLOSE'" do
result = tokenize("{{> foo bar }}")
result.should match_tokens(%w(OPEN_PARTIAL PARTIAL_NAME ID CLOSE))
end
it "tokenizes a partial without spaces as 'OPEN_PARTIAL PARTIAL_NAME CLOSE'" do
result = tokenize("{{>foo}}")
result.should match_tokens(%w(OPEN_PARTIAL PARTIAL_NAME CLOSE))
end
it "tokenizes a partial space at the end as 'OPEN_PARTIAL PARTIAL_NAME CLOSE'" do
result = tokenize("{{>foo }}")
result.should match_tokens(%w(OPEN_PARTIAL PARTIAL_NAME CLOSE))
end
it "tokenizes a comment as 'COMMENT'" do
result = tokenize("foo {{! this is a comment }} bar {{ baz }}")
result.should match_tokens(%w(CONTENT COMMENT CONTENT OPEN ID CLOSE))
result[1].should be_token("COMMENT", " this is a comment ")
end
it "tokenizes a block comment as 'COMMENT'" do
result = tokenize("foo {{!-- this is a {{comment}} --}} bar {{ baz }}")
result.should match_tokens(%w(CONTENT COMMENT CONTENT OPEN ID CLOSE))
result[1].should be_token("COMMENT", " this is a {{comment}} ")
end
it "tokenizes a block comment with whitespace as 'COMMENT'" do
result = tokenize("foo {{!-- this is a\n{{comment}}\n--}} bar {{ baz }}")
result.should match_tokens(%w(CONTENT COMMENT CONTENT OPEN ID CLOSE))
result[1].should be_token("COMMENT", " this is a\n{{comment}}\n")
end
it "tokenizes open and closing blocks as 'OPEN_BLOCK ID CLOSE ... OPEN_ENDBLOCK ID CLOSE'" do
result = tokenize("{{#foo}}content{{/foo}}")
result.should match_tokens(%w(OPEN_BLOCK ID CLOSE CONTENT OPEN_ENDBLOCK ID CLOSE))
end
it "tokenizes inverse sections as 'OPEN_INVERSE CLOSE'" do
tokenize("{{^}}").should match_tokens(%w(OPEN_INVERSE CLOSE))
tokenize("{{else}}").should match_tokens(%w(OPEN_INVERSE CLOSE))
tokenize("{{ else }}").should match_tokens(%w(OPEN_INVERSE CLOSE))
end
it "tokenizes inverse sections with ID as 'OPEN_INVERSE ID CLOSE'" do
result = tokenize("{{^foo}}")
result.should match_tokens(%w(OPEN_INVERSE ID CLOSE))
result[1].should be_token("ID", "foo")
end
it "tokenizes inverse sections with ID and spaces as 'OPEN_INVERSE ID CLOSE'" do
result = tokenize("{{^ foo }}")
result.should match_tokens(%w(OPEN_INVERSE ID CLOSE))
result[1].should be_token("ID", "foo")
end
it "tokenizes mustaches with params as 'OPEN ID ID ID CLOSE'" do
result = tokenize("{{ foo bar baz }}")
result.should match_tokens(%w(OPEN ID ID ID CLOSE))
result[1].should be_token("ID", "foo")
result[2].should be_token("ID", "bar")
result[3].should be_token("ID", "baz")
end
it "tokenizes mustaches with String params as 'OPEN ID ID STRING CLOSE'" do
result = tokenize("{{ foo bar \"baz\" }}")
result.should match_tokens(%w(OPEN ID ID STRING CLOSE))
result[3].should be_token("STRING", "baz")
end
it "tokenizes mustaches with String params using single quotes as 'OPEN ID ID STRING CLOSE'" do
result = tokenize("{{ foo bar \'baz\' }}")
result.should match_tokens(%w(OPEN ID ID STRING CLOSE))
result[3].should be_token("STRING", "baz")
end
it "tokenizes String params with spaces inside as 'STRING'" do
result = tokenize("{{ foo bar \"baz bat\" }}")
result.should match_tokens(%w(OPEN ID ID STRING CLOSE))
result[3].should be_token("STRING", "baz bat")
end
it "tokenizes String params with escapes quotes as 'STRING'" do
result = tokenize(%|{{ foo "bar\\"baz" }}|)
result.should match_tokens(%w(OPEN ID STRING CLOSE))
result[2].should be_token("STRING", %{bar"baz})
end
it "tokenizes String params using single quotes with escapes quotes as 'STRING'" do
result = tokenize(%|{{ foo 'bar\\'baz' }}|)
result.should match_tokens(%w(OPEN ID STRING CLOSE))
result[2].should be_token("STRING", %{bar'baz})
end
it "tokenizes numbers" do
result = tokenize(%|{{ foo 1 }}|)
result.should match_tokens(%w(OPEN ID INTEGER CLOSE))
result[2].should be_token("INTEGER", "1")
end
it "tokenizes booleans" do
result = tokenize(%|{{ foo true }}|)
result.should match_tokens(%w(OPEN ID BOOLEAN CLOSE))
result[2].should be_token("BOOLEAN", "true")
result = tokenize(%|{{ foo false }}|)
result.should match_tokens(%w(OPEN ID BOOLEAN CLOSE))
result[2].should be_token("BOOLEAN", "false")
end
it "tokenizes hash arguments" do
result = tokenize("{{ foo bar=baz }}")
result.should match_tokens %w(OPEN ID ID EQUALS ID CLOSE)
result = tokenize("{{ foo bar baz=bat }}")
result.should match_tokens %w(OPEN ID ID ID EQUALS ID CLOSE)
result = tokenize("{{ foo bar baz=1 }}")
result.should match_tokens %w(OPEN ID ID ID EQUALS INTEGER CLOSE)
result = tokenize("{{ foo bar baz=true }}")
result.should match_tokens %w(OPEN ID ID ID EQUALS BOOLEAN CLOSE)
result = tokenize("{{ foo bar baz=false }}")
result.should match_tokens %w(OPEN ID ID ID EQUALS BOOLEAN CLOSE)
result = tokenize("{{ foo bar\n baz=bat }}")
result.should match_tokens %w(OPEN ID ID ID EQUALS ID CLOSE)
result = tokenize("{{ foo bar baz=\"bat\" }}")
result.should match_tokens %w(OPEN ID ID ID EQUALS STRING CLOSE)
result = tokenize("{{ foo bar baz=\"bat\" bam=wot }}")
result.should match_tokens %w(OPEN ID ID ID EQUALS STRING ID EQUALS ID CLOSE)
result = tokenize("{{foo omg bar=baz bat=\"bam\"}}")
result.should match_tokens %w(OPEN ID ID ID EQUALS ID ID EQUALS STRING CLOSE)
result[2].should be_token("ID", "omg")
end
it "tokenizes special @ identifiers" do
result = tokenize("{{ @foo }}")
result.should match_tokens %w( OPEN DATA CLOSE )
result[1].should be_token("DATA", "foo")
result = tokenize("{{ foo @bar }}")
result.should match_tokens %w( OPEN ID DATA CLOSE )
result[2].should be_token("DATA", "bar")
result = tokenize("{{ foo bar=@baz }}")
result.should match_tokens %w( OPEN ID ID EQUALS DATA CLOSE )
result[4].should be_token("DATA", "baz")
end
it "does not time out in a mustache with a single } followed by EOF" do
Timeout.timeout(1) { tokenize("{{foo}").should match_tokens(%w(OPEN ID)) }
end
it "does not time out in a mustache when invalid ID characters are used" do
Timeout.timeout(1) { tokenize("{{foo & }}").should match_tokens(%w(OPEN ID)) }
end
end
|