diff options
author | kpdecker <kpdecker@gmail.com> | 2013-12-23 00:48:28 -0600 |
---|---|---|
committer | kpdecker <kpdecker@gmail.com> | 2013-12-23 02:19:09 -0600 |
commit | 494232425033d7d3aade5b0eba641a93e68e9f89 (patch) | |
tree | 3e4fd8fd04b6d670d3da6e2c5e27311b21c80e68 /spec/tokenizer.js | |
parent | 0cf5657c43791817776477b727b30ad7f6275707 (diff) | |
download | handlebars.js-494232425033d7d3aade5b0eba641a93e68e9f89.zip handlebars.js-494232425033d7d3aade5b0eba641a93e68e9f89.tar.gz handlebars.js-494232425033d7d3aade5b0eba641a93e68e9f89.tar.bz2 |
Move away from should asserts to internal
This is needed as neither Sinon nor Chai support in-browser testing under IE.
Diffstat (limited to 'spec/tokenizer.js')
-rw-r--r-- | spec/tokenizer.js | 237 |
1 files changed, 118 insertions, 119 deletions
diff --git a/spec/tokenizer.js b/spec/tokenizer.js index de981e4..b671940 100644 --- a/spec/tokenizer.js +++ b/spec/tokenizer.js @@ -1,12 +1,11 @@ -var should = require('should'); - -should.Assertion.prototype.match_tokens = function(tokens) { - this.obj.forEach(function(value, index) { - value.name.should.equal(tokens[index]); +function shouldMatchTokens(result, tokens) { + result.forEach(function(value, index) { + equals(value.name, tokens[index]); }); }; -should.Assertion.prototype.be_token = function(name, text) { - this.obj.should.eql({name: name, text: text}); +function shouldBeToken(result, name, text) { + equals(result.name, name); + equals(result.text, text); }; describe('Tokenizer', function() { @@ -35,324 +34,324 @@ describe('Tokenizer', function() { it('tokenizes a simple mustache as "OPEN ID CLOSE"', function() { var result = tokenize("{{foo}}"); - result.should.match_tokens(['OPEN', 'ID', 'CLOSE']); - result[1].should.be_token("ID", "foo"); + shouldMatchTokens(result, ['OPEN', 'ID', 'CLOSE']); + shouldBeToken(result[1], "ID", "foo"); }); it('supports unescaping with &', function() { var result = tokenize("{{&bar}}"); - result.should.match_tokens(['OPEN', 'ID', 'CLOSE']); + shouldMatchTokens(result, ['OPEN', 'ID', 'CLOSE']); - result[0].should.be_token("OPEN", "{{&"); - result[1].should.be_token("ID", "bar"); + shouldBeToken(result[0], "OPEN", "{{&"); + shouldBeToken(result[1], "ID", "bar"); }); it('supports unescaping with {{{', function() { var result = tokenize("{{{bar}}}"); - result.should.match_tokens(['OPEN_UNESCAPED', 'ID', 'CLOSE_UNESCAPED']); + shouldMatchTokens(result, ['OPEN_UNESCAPED', 'ID', 'CLOSE_UNESCAPED']); - result[1].should.be_token("ID", "bar"); + shouldBeToken(result[1], "ID", "bar"); }); it('supports escaping delimiters', function() { var result = tokenize("{{foo}} \\{{bar}} {{baz}}"); - result.should.match_tokens(['OPEN', 'ID', 'CLOSE', 'CONTENT', 'CONTENT', 'OPEN', 'ID', 'CLOSE']); + shouldMatchTokens(result, ['OPEN', 'ID', 'CLOSE', 'CONTENT', 'CONTENT', 'OPEN', 'ID', 'CLOSE']); - result[3].should.be_token("CONTENT", " "); - result[4].should.be_token("CONTENT", "{{bar}} "); + shouldBeToken(result[3], "CONTENT", " "); + shouldBeToken(result[4], "CONTENT", "{{bar}} "); }); it('supports escaping multiple delimiters', function() { var result = tokenize("{{foo}} \\{{bar}} \\{{baz}}"); - result.should.match_tokens(['OPEN', 'ID', 'CLOSE', 'CONTENT', 'CONTENT', 'CONTENT']); + shouldMatchTokens(result, ['OPEN', 'ID', 'CLOSE', 'CONTENT', 'CONTENT', 'CONTENT']); - result[3].should.be_token("CONTENT", " "); - result[4].should.be_token("CONTENT", "{{bar}} "); - result[5].should.be_token("CONTENT", "{{baz}}"); + shouldBeToken(result[3], "CONTENT", " "); + shouldBeToken(result[4], "CONTENT", "{{bar}} "); + shouldBeToken(result[5], "CONTENT", "{{baz}}"); }); it('supports escaping a triple stash', function() { var result = tokenize("{{foo}} \\{{{bar}}} {{baz}}"); - result.should.match_tokens(['OPEN', 'ID', 'CLOSE', 'CONTENT', 'CONTENT', 'OPEN', 'ID', 'CLOSE']); + shouldMatchTokens(result, ['OPEN', 'ID', 'CLOSE', 'CONTENT', 'CONTENT', 'OPEN', 'ID', 'CLOSE']); - result[4].should.be_token("CONTENT", "{{{bar}}} "); + shouldBeToken(result[4], "CONTENT", "{{{bar}}} "); }); it('supports escaping escape character', function() { var result = tokenize("{{foo}} \\\\{{bar}} {{baz}}"); - result.should.match_tokens(['OPEN', 'ID', 'CLOSE', 'CONTENT', 'OPEN', 'ID', 'CLOSE', 'CONTENT', 'OPEN', 'ID', 'CLOSE']); + shouldMatchTokens(result, ['OPEN', 'ID', 'CLOSE', 'CONTENT', 'OPEN', 'ID', 'CLOSE', 'CONTENT', 'OPEN', 'ID', 'CLOSE']); - result[3].should.be_token("CONTENT", " \\"); - result[5].should.be_token("ID", "bar"); + shouldBeToken(result[3], "CONTENT", " \\"); + shouldBeToken(result[5], "ID", "bar"); }); it('supports escaping multiple escape characters', function() { var result = tokenize("{{foo}} \\\\{{bar}} \\\\{{baz}}"); - result.should.match_tokens(['OPEN', 'ID', 'CLOSE', 'CONTENT', 'OPEN', 'ID', 'CLOSE', 'CONTENT', 'OPEN', 'ID', 'CLOSE']); + shouldMatchTokens(result, ['OPEN', 'ID', 'CLOSE', 'CONTENT', 'OPEN', 'ID', 'CLOSE', 'CONTENT', 'OPEN', 'ID', 'CLOSE']); - result[3].should.be_token("CONTENT", " \\"); - result[5].should.be_token("ID", "bar"); - result[7].should.be_token("CONTENT", " \\"); - result[9].should.be_token("ID", "baz"); + shouldBeToken(result[3], "CONTENT", " \\"); + shouldBeToken(result[5], "ID", "bar"); + shouldBeToken(result[7], "CONTENT", " \\"); + shouldBeToken(result[9], "ID", "baz"); }); it('supports mixed escaped delimiters and escaped escape characters', function() { var result = tokenize("{{foo}} \\\\{{bar}} \\{{baz}}"); - result.should.match_tokens(['OPEN', 'ID', 'CLOSE', 'CONTENT', 'OPEN', 'ID', 'CLOSE', 'CONTENT', 'CONTENT', 'CONTENT']); + shouldMatchTokens(result, ['OPEN', 'ID', 'CLOSE', 'CONTENT', 'OPEN', 'ID', 'CLOSE', 'CONTENT', 'CONTENT', 'CONTENT']); - result[3].should.be_token("CONTENT", " \\"); - result[4].should.be_token("OPEN", "{{"); - result[5].should.be_token("ID", "bar"); - result[7].should.be_token("CONTENT", " "); - result[8].should.be_token("CONTENT", "{{baz}}"); + shouldBeToken(result[3], "CONTENT", " \\"); + shouldBeToken(result[4], "OPEN", "{{"); + shouldBeToken(result[5], "ID", "bar"); + shouldBeToken(result[7], "CONTENT", " "); + shouldBeToken(result[8], "CONTENT", "{{baz}}"); }); it('supports escaped escape character on a triple stash', function() { var result = tokenize("{{foo}} \\\\{{{bar}}} {{baz}}"); - result.should.match_tokens(['OPEN', 'ID', 'CLOSE', 'CONTENT', 'OPEN_UNESCAPED', 'ID', 'CLOSE_UNESCAPED', 'CONTENT', 'OPEN', 'ID', 'CLOSE']); + shouldMatchTokens(result, ['OPEN', 'ID', 'CLOSE', 'CONTENT', 'OPEN_UNESCAPED', 'ID', 'CLOSE_UNESCAPED', 'CONTENT', 'OPEN', 'ID', 'CLOSE']); - result[3].should.be_token("CONTENT", " \\"); - result[5].should.be_token("ID", "bar"); + shouldBeToken(result[3], "CONTENT", " \\"); + shouldBeToken(result[5], "ID", "bar"); }); it('tokenizes a simple path', function() { var result = tokenize("{{foo/bar}}"); - result.should.match_tokens(['OPEN', 'ID', 'SEP', 'ID', 'CLOSE']); + shouldMatchTokens(result, ['OPEN', 'ID', 'SEP', 'ID', 'CLOSE']); }); it('allows dot notation', function() { var result = tokenize("{{foo.bar}}"); - result.should.match_tokens(['OPEN', 'ID', 'SEP', 'ID', 'CLOSE']); + shouldMatchTokens(result, ['OPEN', 'ID', 'SEP', 'ID', 'CLOSE']); - tokenize("{{foo.bar.baz}}").should.match_tokens(['OPEN', 'ID', 'SEP', 'ID', 'SEP', 'ID', 'CLOSE']); + shouldMatchTokens(tokenize("{{foo.bar.baz}}"), ['OPEN', 'ID', 'SEP', 'ID', 'SEP', 'ID', 'CLOSE']); }); it('allows path literals with []', function() { var result = tokenize("{{foo.[bar]}}"); - result.should.match_tokens(['OPEN', 'ID', 'SEP', 'ID', 'CLOSE']); + shouldMatchTokens(result, ['OPEN', 'ID', 'SEP', 'ID', 'CLOSE']); }); it('allows multiple path literals on a line with []', function() { var result = tokenize("{{foo.[bar]}}{{foo.[baz]}}"); - result.should.match_tokens(['OPEN', 'ID', 'SEP', 'ID', 'CLOSE', 'OPEN', 'ID', 'SEP', 'ID', 'CLOSE']); + shouldMatchTokens(result, ['OPEN', 'ID', 'SEP', 'ID', 'CLOSE', 'OPEN', 'ID', 'SEP', 'ID', 'CLOSE']); }); it('tokenizes {{.}} as OPEN ID CLOSE', function() { var result = tokenize("{{.}}"); - result.should.match_tokens(['OPEN', 'ID', 'CLOSE']); + shouldMatchTokens(result, ['OPEN', 'ID', 'CLOSE']); }); it('tokenizes a path as "OPEN (ID SEP)* ID CLOSE"', function() { var result = tokenize("{{../foo/bar}}"); - result.should.match_tokens(['OPEN', 'ID', 'SEP', 'ID', 'SEP', 'ID', 'CLOSE']); - result[1].should.be_token("ID", ".."); + shouldMatchTokens(result, ['OPEN', 'ID', 'SEP', 'ID', 'SEP', 'ID', 'CLOSE']); + shouldBeToken(result[1], "ID", ".."); }); it('tokenizes a path with .. as a parent path', function() { var result = tokenize("{{../foo.bar}}"); - result.should.match_tokens(['OPEN', 'ID', 'SEP', 'ID', 'SEP', 'ID', 'CLOSE']); - result[1].should.be_token("ID", ".."); + shouldMatchTokens(result, ['OPEN', 'ID', 'SEP', 'ID', 'SEP', 'ID', 'CLOSE']); + shouldBeToken(result[1], "ID", ".."); }); it('tokenizes a path with this/foo as OPEN ID SEP ID CLOSE', function() { var result = tokenize("{{this/foo}}"); - result.should.match_tokens(['OPEN', 'ID', 'SEP', 'ID', 'CLOSE']); - result[1].should.be_token("ID", "this"); - result[3].should.be_token("ID", "foo"); + shouldMatchTokens(result, ['OPEN', 'ID', 'SEP', 'ID', 'CLOSE']); + shouldBeToken(result[1], "ID", "this"); + shouldBeToken(result[3], "ID", "foo"); }); it('tokenizes a simple mustache with spaces as "OPEN ID CLOSE"', function() { var result = tokenize("{{ foo }}"); - result.should.match_tokens(['OPEN', 'ID', 'CLOSE']); - result[1].should.be_token("ID", "foo"); + shouldMatchTokens(result, ['OPEN', 'ID', 'CLOSE']); + shouldBeToken(result[1], "ID", "foo"); }); it('tokenizes a simple mustache with line breaks as "OPEN ID ID CLOSE"', function() { var result = tokenize("{{ foo \n bar }}"); - result.should.match_tokens(['OPEN', 'ID', 'ID', 'CLOSE']); - result[1].should.be_token("ID", "foo"); + shouldMatchTokens(result, ['OPEN', 'ID', 'ID', 'CLOSE']); + shouldBeToken(result[1], "ID", "foo"); }); it('tokenizes raw content as "CONTENT"', function() { var result = tokenize("foo {{ bar }} baz"); - result.should.match_tokens(['CONTENT', 'OPEN', 'ID', 'CLOSE', 'CONTENT']); - result[0].should.be_token("CONTENT", "foo "); - result[4].should.be_token("CONTENT", " baz"); + shouldMatchTokens(result, ['CONTENT', 'OPEN', 'ID', 'CLOSE', 'CONTENT']); + shouldBeToken(result[0], "CONTENT", "foo "); + shouldBeToken(result[4], "CONTENT", " baz"); }); it('tokenizes a partial as "OPEN_PARTIAL ID CLOSE"', function() { var result = tokenize("{{> foo}}"); - result.should.match_tokens(['OPEN_PARTIAL', 'ID', 'CLOSE']); + shouldMatchTokens(result, ['OPEN_PARTIAL', 'ID', 'CLOSE']); }); it('tokenizes a partial with context as "OPEN_PARTIAL ID ID CLOSE"', function() { var result = tokenize("{{> foo bar }}"); - result.should.match_tokens(['OPEN_PARTIAL', 'ID', 'ID', 'CLOSE']); + shouldMatchTokens(result, ['OPEN_PARTIAL', 'ID', 'ID', 'CLOSE']); }); it('tokenizes a partial without spaces as "OPEN_PARTIAL ID CLOSE"', function() { var result = tokenize("{{>foo}}"); - result.should.match_tokens(['OPEN_PARTIAL', 'ID', 'CLOSE']); + shouldMatchTokens(result, ['OPEN_PARTIAL', 'ID', 'CLOSE']); }); it('tokenizes a partial space at the }); as "OPEN_PARTIAL ID CLOSE"', function() { var result = tokenize("{{>foo }}"); - result.should.match_tokens(['OPEN_PARTIAL', 'ID', 'CLOSE']); + shouldMatchTokens(result, ['OPEN_PARTIAL', 'ID', 'CLOSE']); }); it('tokenizes a partial space at the }); as "OPEN_PARTIAL ID CLOSE"', function() { var result = tokenize("{{>foo/bar.baz }}"); - result.should.match_tokens(['OPEN_PARTIAL', 'ID', 'SEP', 'ID', 'SEP', 'ID', 'CLOSE']); + shouldMatchTokens(result, ['OPEN_PARTIAL', 'ID', 'SEP', 'ID', 'SEP', 'ID', 'CLOSE']); }); it('tokenizes a comment as "COMMENT"', function() { var result = tokenize("foo {{! this is a comment }} bar {{ baz }}"); - result.should.match_tokens(['CONTENT', 'COMMENT', 'CONTENT', 'OPEN', 'ID', 'CLOSE']); - result[1].should.be_token("COMMENT", " this is a comment "); + shouldMatchTokens(result, ['CONTENT', 'COMMENT', 'CONTENT', 'OPEN', 'ID', 'CLOSE']); + shouldBeToken(result[1], "COMMENT", " this is a comment "); }); it('tokenizes a block comment as "COMMENT"', function() { var result = tokenize("foo {{!-- this is a {{comment}} --}} bar {{ baz }}"); - result.should.match_tokens(['CONTENT', 'COMMENT', 'CONTENT', 'OPEN', 'ID', 'CLOSE']); - result[1].should.be_token("COMMENT", " this is a {{comment}} "); + shouldMatchTokens(result, ['CONTENT', 'COMMENT', 'CONTENT', 'OPEN', 'ID', 'CLOSE']); + shouldBeToken(result[1], "COMMENT", " this is a {{comment}} "); }); it('tokenizes a block comment with whitespace as "COMMENT"', function() { var result = tokenize("foo {{!-- this is a\n{{comment}}\n--}} bar {{ baz }}"); - result.should.match_tokens(['CONTENT', 'COMMENT', 'CONTENT', 'OPEN', 'ID', 'CLOSE']); - result[1].should.be_token("COMMENT", " this is a\n{{comment}}\n"); + shouldMatchTokens(result, ['CONTENT', 'COMMENT', 'CONTENT', 'OPEN', 'ID', 'CLOSE']); + shouldBeToken(result[1], "COMMENT", " this is a\n{{comment}}\n"); }); it('tokenizes open and closing blocks as OPEN_BLOCK, ID, CLOSE ..., OPEN_ENDBLOCK ID CLOSE', function() { var result = tokenize("{{#foo}}content{{/foo}}"); - result.should.match_tokens(['OPEN_BLOCK', 'ID', 'CLOSE', 'CONTENT', 'OPEN_ENDBLOCK', 'ID', 'CLOSE']); + shouldMatchTokens(result, ['OPEN_BLOCK', 'ID', 'CLOSE', 'CONTENT', 'OPEN_ENDBLOCK', 'ID', 'CLOSE']); }); it('tokenizes inverse sections as "OPEN_INVERSE CLOSE"', function() { - tokenize("{{^}}").should.match_tokens(['OPEN_INVERSE', 'CLOSE']); - tokenize("{{else}}").should.match_tokens(['OPEN_INVERSE', 'CLOSE']); - tokenize("{{ else }}").should.match_tokens(['OPEN_INVERSE', 'CLOSE']); + shouldMatchTokens(tokenize("{{^}}"), ['OPEN_INVERSE', 'CLOSE']); + shouldMatchTokens(tokenize("{{else}}"), ['OPEN_INVERSE', 'CLOSE']); + shouldMatchTokens(tokenize("{{ else }}"), ['OPEN_INVERSE', 'CLOSE']); }); it('tokenizes inverse sections with ID as "OPEN_INVERSE ID CLOSE"', function() { var result = tokenize("{{^foo}}"); - result.should.match_tokens(['OPEN_INVERSE', 'ID', 'CLOSE']); - result[1].should.be_token("ID", "foo"); + shouldMatchTokens(result, ['OPEN_INVERSE', 'ID', 'CLOSE']); + shouldBeToken(result[1], "ID", "foo"); }); it('tokenizes inverse sections with ID and spaces as "OPEN_INVERSE ID CLOSE"', function() { var result = tokenize("{{^ foo }}"); - result.should.match_tokens(['OPEN_INVERSE', 'ID', 'CLOSE']); - result[1].should.be_token("ID", "foo"); + shouldMatchTokens(result, ['OPEN_INVERSE', 'ID', 'CLOSE']); + shouldBeToken(result[1], "ID", "foo"); }); it('tokenizes mustaches with params as "OPEN ID ID ID CLOSE"', function() { var result = tokenize("{{ foo bar baz }}"); - result.should.match_tokens(['OPEN', 'ID', 'ID', 'ID', 'CLOSE']); - result[1].should.be_token("ID", "foo"); - result[2].should.be_token("ID", "bar"); - result[3].should.be_token("ID", "baz"); + shouldMatchTokens(result, ['OPEN', 'ID', 'ID', 'ID', 'CLOSE']); + shouldBeToken(result[1], "ID", "foo"); + shouldBeToken(result[2], "ID", "bar"); + shouldBeToken(result[3], "ID", "baz"); }); it('tokenizes mustaches with String params as "OPEN ID ID STRING CLOSE"', function() { var result = tokenize("{{ foo bar \"baz\" }}"); - result.should.match_tokens(['OPEN', 'ID', 'ID', 'STRING', 'CLOSE']); - result[3].should.be_token("STRING", "baz"); + shouldMatchTokens(result, ['OPEN', 'ID', 'ID', 'STRING', 'CLOSE']); + shouldBeToken(result[3], "STRING", "baz"); }); it('tokenizes mustaches with String params using single quotes as "OPEN ID ID STRING CLOSE"', function() { var result = tokenize("{{ foo bar \'baz\' }}"); - result.should.match_tokens(['OPEN', 'ID', 'ID', 'STRING', 'CLOSE']); - result[3].should.be_token("STRING", "baz"); + shouldMatchTokens(result, ['OPEN', 'ID', 'ID', 'STRING', 'CLOSE']); + shouldBeToken(result[3], "STRING", "baz"); }); it('tokenizes String params with spaces inside as "STRING"', function() { var result = tokenize("{{ foo bar \"baz bat\" }}"); - result.should.match_tokens(['OPEN', 'ID', 'ID', 'STRING', 'CLOSE']); - result[3].should.be_token("STRING", "baz bat"); + shouldMatchTokens(result, ['OPEN', 'ID', 'ID', 'STRING', 'CLOSE']); + shouldBeToken(result[3], "STRING", "baz bat"); }); it('tokenizes String params with escapes quotes as STRING', function() { var result = tokenize('{{ foo "bar\\"baz" }}'); - result.should.match_tokens(['OPEN', 'ID', 'STRING', 'CLOSE']); - result[2].should.be_token("STRING", 'bar"baz'); + shouldMatchTokens(result, ['OPEN', 'ID', 'STRING', 'CLOSE']); + shouldBeToken(result[2], "STRING", 'bar"baz'); }); it('tokenizes String params using single quotes with escapes quotes as STRING', function() { var result = tokenize("{{ foo 'bar\\'baz' }}"); - result.should.match_tokens(['OPEN', 'ID', 'STRING', 'CLOSE']); - result[2].should.be_token("STRING", "bar'baz"); + shouldMatchTokens(result, ['OPEN', 'ID', 'STRING', 'CLOSE']); + shouldBeToken(result[2], "STRING", "bar'baz"); }); it('tokenizes numbers', function() { var result = tokenize('{{ foo 1 }}'); - result.should.match_tokens(['OPEN', 'ID', 'INTEGER', 'CLOSE']); - result[2].should.be_token("INTEGER", "1"); + shouldMatchTokens(result, ['OPEN', 'ID', 'INTEGER', 'CLOSE']); + shouldBeToken(result[2], "INTEGER", "1"); result = tokenize('{{ foo -1 }}'); - result.should.match_tokens(['OPEN', 'ID', 'INTEGER', 'CLOSE']); - result[2].should.be_token("INTEGER", "-1"); + shouldMatchTokens(result, ['OPEN', 'ID', 'INTEGER', 'CLOSE']); + shouldBeToken(result[2], "INTEGER", "-1"); }); it('tokenizes booleans', function() { var result = tokenize('{{ foo true }}'); - result.should.match_tokens(['OPEN', 'ID', 'BOOLEAN', 'CLOSE']); - result[2].should.be_token("BOOLEAN", "true"); + shouldMatchTokens(result, ['OPEN', 'ID', 'BOOLEAN', 'CLOSE']); + shouldBeToken(result[2], "BOOLEAN", "true"); result = tokenize('{{ foo false }}'); - result.should.match_tokens(['OPEN', 'ID', 'BOOLEAN', 'CLOSE']); - result[2].should.be_token("BOOLEAN", "false"); + shouldMatchTokens(result, ['OPEN', 'ID', 'BOOLEAN', 'CLOSE']); + shouldBeToken(result[2], "BOOLEAN", "false"); }); it('tokenizes hash arguments', function() { var result = tokenize("{{ foo bar=baz }}"); - result.should.match_tokens(['OPEN', 'ID', 'ID', 'EQUALS', 'ID', 'CLOSE']); + shouldMatchTokens(result, ['OPEN', 'ID', 'ID', 'EQUALS', 'ID', 'CLOSE']); result = tokenize("{{ foo bar baz=bat }}"); - result.should.match_tokens(['OPEN', 'ID', 'ID', 'ID', 'EQUALS', 'ID', 'CLOSE']); + shouldMatchTokens(result, ['OPEN', 'ID', 'ID', 'ID', 'EQUALS', 'ID', 'CLOSE']); result = tokenize("{{ foo bar baz=1 }}"); - result.should.match_tokens(['OPEN', 'ID', 'ID', 'ID', 'EQUALS', 'INTEGER', 'CLOSE']); + shouldMatchTokens(result, ['OPEN', 'ID', 'ID', 'ID', 'EQUALS', 'INTEGER', 'CLOSE']); result = tokenize("{{ foo bar baz=true }}"); - result.should.match_tokens(['OPEN', 'ID', 'ID', 'ID', 'EQUALS', 'BOOLEAN', 'CLOSE']); + shouldMatchTokens(result, ['OPEN', 'ID', 'ID', 'ID', 'EQUALS', 'BOOLEAN', 'CLOSE']); result = tokenize("{{ foo bar baz=false }}"); - result.should.match_tokens(['OPEN', 'ID', 'ID', 'ID', 'EQUALS', 'BOOLEAN', 'CLOSE']); + shouldMatchTokens(result, ['OPEN', 'ID', 'ID', 'ID', 'EQUALS', 'BOOLEAN', 'CLOSE']); result = tokenize("{{ foo bar\n baz=bat }}"); - result.should.match_tokens(['OPEN', 'ID', 'ID', 'ID', 'EQUALS', 'ID', 'CLOSE']); + shouldMatchTokens(result, ['OPEN', 'ID', 'ID', 'ID', 'EQUALS', 'ID', 'CLOSE']); result = tokenize("{{ foo bar baz=\"bat\" }}"); - result.should.match_tokens(['OPEN', 'ID', 'ID', 'ID', 'EQUALS', 'STRING', 'CLOSE']); + shouldMatchTokens(result, ['OPEN', 'ID', 'ID', 'ID', 'EQUALS', 'STRING', 'CLOSE']); result = tokenize("{{ foo bar baz=\"bat\" bam=wot }}"); - result.should.match_tokens(['OPEN', 'ID', 'ID', 'ID', 'EQUALS', 'STRING', 'ID', 'EQUALS', 'ID', 'CLOSE']); + shouldMatchTokens(result, ['OPEN', 'ID', 'ID', 'ID', 'EQUALS', 'STRING', 'ID', 'EQUALS', 'ID', 'CLOSE']); result = tokenize("{{foo omg bar=baz bat=\"bam\"}}"); - result.should.match_tokens(['OPEN', 'ID', 'ID', 'ID', 'EQUALS', 'ID', 'ID', 'EQUALS', 'STRING', 'CLOSE']); - result[2].should.be_token("ID", "omg"); + shouldMatchTokens(result, ['OPEN', 'ID', 'ID', 'ID', 'EQUALS', 'ID', 'ID', 'EQUALS', 'STRING', 'CLOSE']); + shouldBeToken(result[2], "ID", "omg"); }); it('tokenizes special @ identifiers', function() { var result = tokenize("{{ @foo }}"); - result.should.match_tokens(['OPEN', 'DATA', 'ID', 'CLOSE']); - result[2].should.be_token("ID", "foo"); + shouldMatchTokens(result, ['OPEN', 'DATA', 'ID', 'CLOSE']); + shouldBeToken(result[2], "ID", "foo"); result = tokenize("{{ foo @bar }}"); - result.should.match_tokens(['OPEN', 'ID', 'DATA', 'ID', 'CLOSE']); - result[3].should.be_token("ID", "bar"); + shouldMatchTokens(result, ['OPEN', 'ID', 'DATA', 'ID', 'CLOSE']); + shouldBeToken(result[3], "ID", "bar"); result = tokenize("{{ foo bar=@baz }}"); - result.should.match_tokens(['OPEN', 'ID', 'ID', 'EQUALS', 'DATA', 'ID', 'CLOSE']); - result[5].should.be_token("ID", "baz"); + shouldMatchTokens(result, ['OPEN', 'ID', 'ID', 'EQUALS', 'DATA', 'ID', 'CLOSE']); + shouldBeToken(result[5], "ID", "baz"); }); it('does not time out in a mustache with a single } followed by EOF', function() { - tokenize("{{foo}").should.match_tokens(['OPEN', 'ID']); + shouldMatchTokens(tokenize("{{foo}"), ['OPEN', 'ID']); }); it('does not time out in a mustache when invalid ID characters are used', function() { - tokenize("{{foo & }}").should.match_tokens(['OPEN', 'ID']); + shouldMatchTokens(tokenize("{{foo & }}"), ['OPEN', 'ID']); }); }); |