summaryrefslogtreecommitdiffstats
path: root/spec/tokenizer.js
diff options
context:
space:
mode:
authorkpdecker <kpdecker@gmail.com>2013-12-23 19:22:30 -0600
committerkpdecker <kpdecker@gmail.com>2013-12-23 19:22:30 -0600
commit40e1837b14f1fa9d368cd24b95ea2d6116fff1f4 (patch)
tree147e9a730a234473fb03e528c6a167c69e8e475c /spec/tokenizer.js
parentddea5be2a49ab8c3defac4b08bd8f6f14a3bf905 (diff)
parent9d353bd3dd24b1e975ddf14a77179d4ab016706c (diff)
downloadhandlebars.js-40e1837b14f1fa9d368cd24b95ea2d6116fff1f4.zip
handlebars.js-40e1837b14f1fa9d368cd24b95ea2d6116fff1f4.tar.gz
handlebars.js-40e1837b14f1fa9d368cd24b95ea2d6116fff1f4.tar.bz2
Merge branch 'fix-escapes' of github.com:dmarcotte/handlebars.js into dmarcotte-fix-escapes
Conflicts: spec/tokenizer.js
Diffstat (limited to 'spec/tokenizer.js')
-rw-r--r--spec/tokenizer.js22
1 files changed, 16 insertions, 6 deletions
diff --git a/spec/tokenizer.js b/spec/tokenizer.js
index 3732322..80f28ab 100644
--- a/spec/tokenizer.js
+++ b/spec/tokenizer.js
@@ -87,15 +87,15 @@ describe('Tokenizer', function() {
it('supports escaping multiple escape characters', function() {
var result = tokenize("{{foo}} \\\\{{bar}} \\\\{{baz}}");
- shouldMatchTokens(result, ['OPEN', 'ID', 'CLOSE', 'CONTENT', 'OPEN', 'ID', 'CLOSE', 'CONTENT', 'OPEN', 'ID', 'CLOSE']);
+ shouldMatchTokens(result, ['OPEN', 'ID', 'CLOSE', 'CONTENT', 'OPEN', 'ID', 'CLOSE', 'CONTENT', 'OPEN', 'ID', 'CLOSE']);
- shouldBeToken(result[3], "CONTENT", " \\");
- shouldBeToken(result[5], "ID", "bar");
- shouldBeToken(result[7], "CONTENT", " \\");
- shouldBeToken(result[9], "ID", "baz");
+ shouldBeToken(result[3], "CONTENT", " \\");
+ shouldBeToken(result[5], "ID", "bar");
+ shouldBeToken(result[7], "CONTENT", " \\");
+ shouldBeToken(result[9], "ID", "baz");
});
- it('supports mixed escaped delimiters and escaped escape characters', function() {
+ it('supports escaped mustaches after escaped escape characters', function() {
var result = tokenize("{{foo}} \\\\{{bar}} \\{{baz}}");
shouldMatchTokens(result, ['OPEN', 'ID', 'CLOSE', 'CONTENT', 'OPEN', 'ID', 'CLOSE', 'CONTENT', 'CONTENT', 'CONTENT']);
@@ -106,6 +106,16 @@ describe('Tokenizer', function() {
shouldBeToken(result[8], "CONTENT", "{{baz}}");
});
+ it('supports escaped escape characters after escaped mustaches', function() {
+ var result = tokenize("{{foo}} \\{{bar}} \\\\{{baz}}");
+ shouldMatchTokens(result, ['OPEN', 'ID', 'CLOSE', 'CONTENT', 'CONTENT', 'CONTENT', 'OPEN', 'ID', 'CLOSE']);
+
+ shouldBeToken(result[4], "CONTENT", "{{bar}} ");
+ shouldBeToken(result[5], "CONTENT", "\\");
+ shouldBeToken(result[6], "OPEN", "{{");
+ shouldBeToken(result[7], "ID", "baz");
+ });
+
it('supports escaped escape character on a triple stash', function() {
var result = tokenize("{{foo}} \\\\{{{bar}}} {{baz}}");
shouldMatchTokens(result, ['OPEN', 'ID', 'CLOSE', 'CONTENT', 'OPEN_UNESCAPED', 'ID', 'CLOSE_UNESCAPED', 'CONTENT', 'OPEN', 'ID', 'CLOSE']);