summaryrefslogtreecommitdiffstats
path: root/packages/gitbook-markdown/test
diff options
context:
space:
mode:
authorSamy Pessé <samypesse@gmail.com>2016-12-22 15:51:59 +0100
committerSamy Pessé <samypesse@gmail.com>2016-12-22 15:51:59 +0100
commitc4e512477e3cbe1344caaa2f1cc56e4bb402ad79 (patch)
treeca43a054bf84a49b48c942b754153b5459eed3ee /packages/gitbook-markdown/test
parent6e0fd5d5d44fc2c97e075c4bbff188a0a7e797c1 (diff)
downloadgitbook-c4e512477e3cbe1344caaa2f1cc56e4bb402ad79.zip
gitbook-c4e512477e3cbe1344caaa2f1cc56e4bb402ad79.tar.gz
gitbook-c4e512477e3cbe1344caaa2f1cc56e4bb402ad79.tar.bz2
Import gitbook-markdown
Diffstat (limited to 'packages/gitbook-markdown/test')
-rw-r--r--packages/gitbook-markdown/test/glossary.js33
-rw-r--r--packages/gitbook-markdown/test/helper.js6
-rw-r--r--packages/gitbook-markdown/test/inline.js14
-rw-r--r--packages/gitbook-markdown/test/langs.js36
-rw-r--r--packages/gitbook-markdown/test/page.js85
-rw-r--r--packages/gitbook-markdown/test/readme.js32
-rw-r--r--packages/gitbook-markdown/test/summary.js81
7 files changed, 143 insertions, 144 deletions
diff --git a/packages/gitbook-markdown/test/glossary.js b/packages/gitbook-markdown/test/glossary.js
index 588859a..b38a78d 100644
--- a/packages/gitbook-markdown/test/glossary.js
+++ b/packages/gitbook-markdown/test/glossary.js
@@ -1,29 +1,28 @@
-var fs = require('fs');
-var path = require('path');
-var assert = require('assert');
+const fs = require('fs');
+const path = require('path');
+const expect = require('assert');
-var glossary = require('../').glossary;
+const glossary = require('../src').glossary;
-describe('Glossary parsing', function () {
- var LEXED;
+describe('Glossary', () => {
+ let LEXED;
- before(function() {
- var CONTENT = fs.readFileSync(path.join(__dirname, './fixtures/GLOSSARY.md'), 'utf8');
+ before(() => {
+ const CONTENT = fs.readFileSync(path.join(__dirname, './fixtures/GLOSSARY.md'), 'utf8');
LEXED = glossary(CONTENT);
});
- it('should only get heading + paragraph pairs', function() {
- assert.equal(LEXED.length, 4);
+ it('should only get heading + paragraph pairs', () => {
+ expect(LEXED.length).toBe(4);
});
- it('should output simple name/description objects', function() {
- assert.equal(true, !(LEXED.some(function(e) {
- return !Boolean(e.name && e.description);
- })));
+ it('should output simple name/description objects', () => {
+ expect(!(LEXED.some(e => !Boolean(e.name && e.description)))).toBe(true);
});
- it('should correctly convert it to text', function() {
- var text = glossary.toText(LEXED);
- assertObjectsEqual(glossary(text), LEXED);
+ it('should correctly convert it to text', () => {
+ const text = glossary.toText(LEXED);
+ const parsed = glossary(text);
+ expect(parsed).toEqual(LEXED);
});
});
diff --git a/packages/gitbook-markdown/test/helper.js b/packages/gitbook-markdown/test/helper.js
deleted file mode 100644
index 1e310f7..0000000
--- a/packages/gitbook-markdown/test/helper.js
+++ /dev/null
@@ -1,6 +0,0 @@
-var assert = require("assert");
-
-global.assertObjectsEqual = function(o1, o2) {
- assert.equal(JSON.stringify(o1, null, 4), JSON.stringify(o2, null, 4));
-};
-
diff --git a/packages/gitbook-markdown/test/inline.js b/packages/gitbook-markdown/test/inline.js
index 4ed1006..834fd3f 100644
--- a/packages/gitbook-markdown/test/inline.js
+++ b/packages/gitbook-markdown/test/inline.js
@@ -1,11 +1,9 @@
-var fs = require('fs');
-var path = require('path');
-var assert = require('assert');
+const expect = require('expect');
+const inline = require('../src').inline;
-var inline = require('../').inline;
-
-describe('Inline', function () {
- it('should render inline markdown', function() {
- assert.equal(inline('Hello **World**').content, 'Hello <strong>World</strong>');
+describe('Inline', () => {
+ it('should render inline markdown', () => {
+ const parsed = inline('Hello **World**');
+ expect(parsed.content).toBe('Hello <strong>World</strong>');
});
});
diff --git a/packages/gitbook-markdown/test/langs.js b/packages/gitbook-markdown/test/langs.js
index 3c2f108..3986de9 100644
--- a/packages/gitbook-markdown/test/langs.js
+++ b/packages/gitbook-markdown/test/langs.js
@@ -1,28 +1,28 @@
-var fs = require('fs');
-var path = require('path');
-var assert = require('assert');
+const fs = require('fs');
+const path = require('path');
+const expect = require('expect');
+const langs = require('../src').langs;
-var langs = require('../').langs;
+describe('Languages', () => {
+ let LEXED;
-describe('Languages parsing', function () {
- var LEXED;
-
- before(function() {
- var CONTENT = fs.readFileSync(path.join(__dirname, './fixtures/LANGS.md'), 'utf8');
+ before(() => {
+ const CONTENT = fs.readFileSync(path.join(__dirname, './fixtures/LANGS.md'), 'utf8');
LEXED = langs(CONTENT);
});
- it('should detect paths and titles', function() {
- assert.equal(LEXED.length, 2);
- assert.equal(LEXED[0].ref,'en/');
- assert.equal(LEXED[0].title,'English');
+ it('should detect paths and titles', () => {
+ expect(LEXED.length).toEqual(2);
+ expect(LEXED[0].ref).toEqual('en/');
+ expect(LEXED[0].title).toEqual('English');
- assert.equal(LEXED[1].ref,'fr/');
- assert.equal(LEXED[1].title,'French');
+ expect(LEXED[1].ref).toEqual('fr/');
+ expect(LEXED[1].title).toEqual('French');
});
- it('should correctly convert it to text', function() {
- var text = langs.toText(LEXED);
- assertObjectsEqual(langs(text), LEXED);
+ it('should correctly convert it to text', () => {
+ const text = langs.toText(LEXED);
+ const parsed = langs(text);
+ expect(parsed).toEqual(LEXED);
});
});
diff --git a/packages/gitbook-markdown/test/page.js b/packages/gitbook-markdown/test/page.js
index a2e21d1..9ea48f4 100644
--- a/packages/gitbook-markdown/test/page.js
+++ b/packages/gitbook-markdown/test/page.js
@@ -1,74 +1,81 @@
-var fs = require('fs');
-var path = require('path');
-var assert = require('assert');
+const fs = require('fs');
+const path = require('path');
+const expect = require('expect');
-var page = require('../').page;
+const page = require('../src').page;
-describe('Page parsing', function() {
- var LEXED;
+describe('Page', () => {
+ let LEXED;
- before(function() {
- var CONTENT = fs.readFileSync(path.join(__dirname, './fixtures/PAGE.md'), 'utf8');
+ before(() => {
+ const CONTENT = fs.readFileSync(path.join(__dirname, './fixtures/PAGE.md'), 'utf8');
LEXED = page(CONTENT);
});
- it('should gen content', function() {
- assert(LEXED.content);
+ it('should gen content', () => {
+ expect(LEXED.content).toExist();
});
- it('should not add id to headings', function() {
- assert.equal(page('# Hello').content, '<h1>Hello</h1>');
- assert.equal(page('# Hello {#test}').content, '<h1 id="test">Hello</h1>');
+ it('should not add id to headings', () => {
+ expect(page('# Hello').content).toBe('<h1>Hello</h1>');
+ expect(page('# Hello {#test}').content).toBe('<h1 id="test">Hello</h1>');
});
- it('should escape codeblocks in preparation (1)', function() {
- assert.equal(page.prepare("Hello `world`"), 'Hello {% raw %}`world`{% endraw %}\n\n');
- assert.equal(page.prepare("Hello `world test`"), 'Hello {% raw %}`world test`{% endraw %}\n\n');
- assert.equal(page.prepare("Hello ```world test```"), 'Hello {% raw %}`world test`{% endraw %}\n\n');
- assert.equal(page.prepare("Hello\n```js\nworld test\n```\n"), 'Hello\n\n{% raw %}```js\nworld test\n```\n\n{% endraw %}');
- assert.equal(page.prepare("Hello\n```\ntest\n\tworld\n\ttest\n```"), 'Hello\n\n{% raw %}```\ntest\n world\n test\n```\n\n{% endraw %}');
+ it('should escape codeblocks in preparation (1)', () => {
+ expect(page.prepare('Hello `world`')).toBe('Hello {% raw %}`world`{% endraw %}\n\n');
+ expect(page.prepare('Hello `world test`')).toBe('Hello {% raw %}`world test`{% endraw %}\n\n');
+ expect(page.prepare('Hello ```world test```')).toBe('Hello {% raw %}`world test`{% endraw %}\n\n');
+ expect(page.prepare('Hello\n```js\nworld test\n```\n')).toBe('Hello\n\n{% raw %}```js\nworld test\n```\n\n{% endraw %}');
+ expect(page.prepare('Hello\n```\ntest\n\tworld\n\ttest\n```')).toBe('Hello\n\n{% raw %}```\ntest\n world\n test\n```\n\n{% endraw %}');
});
- it('should escape codeblocks in preparation (2)', function() {
- assert.equal(
- page.prepare("Hello\n\n\n\tworld\n\thello\n\n\ntest"),
+ it('should escape codeblocks in preparation (2)', () => {
+ expect(
+ page.prepare('Hello\n\n\n\tworld\n\thello\n\n\ntest')
+ ).toBe(
'Hello\n\n{% raw %}```\nworld\nhello```\n\n{% endraw %}test\n\n'
);
- assert.equal(
- page.prepare("Hello\n\n\n\tworld\n\thello\n\n\n"),
+ expect(
+ page.prepare('Hello\n\n\n\tworld\n\thello\n\n\n')
+ ).toBe(
'Hello\n\n{% raw %}```\nworld\nhello```\n\n{% endraw %}'
);
});
- it('should escape codeblocks with nunjucks tags', function() {
- assert.equal(
- page.prepare('Hello {{ "Bonjour" }} ```test```'),
+ it('should escape codeblocks with nunjucks tags', () => {
+ expect(
+ page.prepare('Hello {{ "Bonjour" }} ```test```')
+ ).toBe(
'Hello {{ "Bonjour" }} {% raw %}`test`{% endraw %}\n\n'
);
});
- it('should escape codeblocks with nunjucks tags in {% raw %} tags', function() {
- assert.equal(
- page.prepare('{% raw %}Hello {{ "Bonjour" }} ```test```{% endraw %}'),
+ it('should escape codeblocks with nunjucks tags in {% raw %} tags', () => {
+ expect(
+ page.prepare('{% raw %}Hello {{ "Bonjour" }} ```test```{% endraw %}')
+ ).toBe(
'{% raw %}Hello {{ "Bonjour" }} `test`{% endraw %}\n\n'
);
- assert.equal(
- page.prepare('{% raw %}Hello {{ "Bonjour" }} {% raw %}{% endraw %}```test```'),
+ expect(
+ page.prepare('{% raw %}Hello {{ "Bonjour" }} {% raw %}{% endraw %}```test```')
+ ).toBe(
'{% raw %}Hello {{ "Bonjour" }} {% raw %}{% endraw %}{% raw %}`test`{% endraw %}\n\n'
);
- assert.equal(
- page.prepare('```{% raw %}Hello {{ "Bonjour" }} {% raw %}```'),
+ expect(
+ page.prepare('```{% raw %}Hello {{ "Bonjour" }} {% raw %}```')
+ ).toBe(
'{% raw %}`{% raw %}Hello {{ "Bonjour" }} {% raw %}`{% endraw %}\n\n'
);
- assert.equal(
- page.prepare('```\ntest\n```\n\n\n### Test'),
+ expect(
+ page.prepare('```\ntest\n```\n\n\n### Test')
+ ).toBe(
'{% raw %}```\ntest\n```\n\n{% endraw %}### Test\n\n'
);
});
- it('should not process math', function() {
- assert.equal(page.prepare("Hello $world$"), 'Hello $world$\n\n');
- assert.equal(page.prepare("Hello $$world$$"), 'Hello $$world$$\n\n');
+ it('should not process math', () => {
+ expect(page.prepare('Hello $world$')).toBe('Hello $world$\n\n');
+ expect(page.prepare('Hello $$world$$')).toBe('Hello $$world$$\n\n');
});
});
diff --git a/packages/gitbook-markdown/test/readme.js b/packages/gitbook-markdown/test/readme.js
index dd059fb..fad9c53 100644
--- a/packages/gitbook-markdown/test/readme.js
+++ b/packages/gitbook-markdown/test/readme.js
@@ -1,30 +1,30 @@
-var fs = require('fs');
-var path = require('path');
-var assert = require('assert');
+const fs = require('fs');
+const path = require('path');
+const expect = require('expect');
-var readme = require('../').readme;
+const readme = require('../src').readme;
-describe('Readme parsing', function () {
- var LEXED;
+describe('Readme', () => {
+ let LEXED;
- before(function() {
- var CONTENT = fs.readFileSync(path.join(__dirname, './fixtures/README.md'), 'utf8');
+ before(() => {
+ const CONTENT = fs.readFileSync(path.join(__dirname, './fixtures/README.md'), 'utf8');
LEXED = readme(CONTENT);
});
- it('should contain a title', function() {
- assert(LEXED.title);
+ it('should contain a title', () => {
+ expect(LEXED.title).toExist();
});
- it('should contain a description', function() {
- assert(LEXED.description);
+ it('should contain a description', () => {
+ expect(LEXED.description).toExist();
});
- it('should extract the right title', function() {
- assert.equal(LEXED.title, "This is the title");
+ it('should extract the right title', () => {
+ expect(LEXED.title).toBe('This is the title');
});
- it('should extract the right description', function() {
- assert.equal(LEXED.description, "This is the book description.");
+ it('should extract the right description', () => {
+ expect(LEXED.description).toBe('This is the book description.');
});
});
diff --git a/packages/gitbook-markdown/test/summary.js b/packages/gitbook-markdown/test/summary.js
index 31dbff9..9748500 100644
--- a/packages/gitbook-markdown/test/summary.js
+++ b/packages/gitbook-markdown/test/summary.js
@@ -1,8 +1,8 @@
-var fs = require('fs');
-var path = require('path');
-var assert = require('assert');
+const fs = require('fs');
+const path = require('path');
+const expect = require('expect');
-var summary = require('../').summary;
+const summary = require('../src').summary;
function lex(fixtureFile) {
return summary(
@@ -13,61 +13,62 @@ function lex(fixtureFile) {
);
}
-describe('Summary parsing', function () {
- var LEXED;
+describe('Summary', () => {
+ let LEXED, PART;
- before(function() {
+ before(() => {
LEXED = lex('SUMMARY.md');
PART = LEXED.parts[0];
});
- it('should detect chapters', function() {
- assert.equal(PART.articles.length, 5);
+ it('should detect chapters', () => {
+ expect(PART.articles.length).toBe(5);
});
- it('should support articles', function() {
- assert.equal(PART.articles[0].articles.length, 2);
- assert.equal(PART.articles[1].articles.length, 0);
- assert.equal(PART.articles[2].articles.length, 0);
+ it('should support articles', () => {
+ expect(PART.articles[0].articles.length).toBe(2);
+ expect(PART.articles[1].articles.length).toBe(0);
+ expect(PART.articles[2].articles.length).toBe(0);
});
- it('should detect paths and titles', function() {
- assert(PART.articles[0].ref);
- assert(PART.articles[1].ref);
- assert(PART.articles[2].ref);
- assert(PART.articles[3].ref);
- assert.equal(PART.articles[4].ref, null);
+ it('should detect paths and titles', () => {
+ expect(PART.articles[0].ref).toExist();
+ expect(PART.articles[1].ref).toExist();
+ expect(PART.articles[2].ref).toExist();
+ expect(PART.articles[3].ref).toExist();
+ expect(PART.articles[4].ref).toNotExist();
- assert(PART.articles[0].title);
- assert(PART.articles[1].title);
- assert(PART.articles[2].title);
- assert(PART.articles[3].title);
- assert(PART.articles[4].title);
+ expect(PART.articles[0].title).toExist();
+ expect(PART.articles[1].title).toExist();
+ expect(PART.articles[2].title).toExist();
+ expect(PART.articles[3].title).toExist();
+ expect(PART.articles[4].title).toExist();
});
- it('should normalize paths from .md', function() {
- assert.equal(PART.articles[0].ref, 'chapter-1/README.md');
- assert.equal(PART.articles[1].ref, 'chapter-2/README.md');
- assert.equal(PART.articles[2].ref, 'chapter-3/README.md');
+ it('should normalize paths from .md', () => {
+ expect(PART.articles[0].ref).toBe('chapter-1/README.md');
+ expect(PART.articles[1].ref).toBe('chapter-2/README.md');
+ expect(PART.articles[2].ref).toBe('chapter-3/README.md');
});
- it('should part parts', function() {
- var l = lex('SUMMARY_PARTS.md');
- assert.equal(l.parts.length, 3);
+ it('should part parts', () => {
+ const l = lex('SUMMARY_PARTS.md');
+ expect(l.parts.length).toBe(3);
});
- it('should allow lists separated by whitespace', function() {
- var l = lex('SUMMARY_WHITESPACE.md');
- assert.equal(l.parts[0].articles.length, 5);
+ it('should allow lists separated by whitespace', () => {
+ const l = lex('SUMMARY_WHITESPACE.md');
+ expect(l.parts[0].articles.length).toBe(5);
});
- it('should allow ignore empty entries', function() {
- var l = lex('SUMMARY_EMPTY.md');
- assert.equal(l.parts[0].articles.length, 1);
+ it('should allow ignore empty entries', () => {
+ const l = lex('SUMMARY_EMPTY.md');
+ expect(l.parts[0].articles.length).toBe(1);
});
- it('should correctly convert it to text', function() {
- var text = summary.toText(LEXED);
- assertObjectsEqual(summary(text), LEXED);
+ it('should correctly convert it to text', () => {
+ const text = summary.toText(LEXED);
+ const parsed = summary(text);
+ expect(parsed).toEqual(LEXED);
});
});