summaryrefslogtreecommitdiffstats
path: root/packages/gitbook-asciidoc/test
diff options
context:
space:
mode:
authorSamy Pessé <samypesse@gmail.com>2016-12-22 12:31:22 +0100
committerSamy Pessé <samypesse@gmail.com>2016-12-22 12:31:22 +0100
commiteb0bf31baa6cb903ba4242ae5a3866ff67aeb97c (patch)
treee2347c9e8c58dd3c2681a6028b5f8d69c40d4e04 /packages/gitbook-asciidoc/test
parent1c1e58b8641cac34b5f3d7c05951f507557fcb1a (diff)
downloadgitbook-eb0bf31baa6cb903ba4242ae5a3866ff67aeb97c.zip
gitbook-eb0bf31baa6cb903ba4242ae5a3866ff67aeb97c.tar.gz
gitbook-eb0bf31baa6cb903ba4242ae5a3866ff67aeb97c.tar.bz2
Import gitbook-asciidoc
Diffstat (limited to 'packages/gitbook-asciidoc/test')
-rwxr-xr-xpackages/gitbook-asciidoc/test/glossary.js33
-rw-r--r--packages/gitbook-asciidoc/test/helper.js6
-rw-r--r--packages/gitbook-asciidoc/test/inline.js13
-rwxr-xr-xpackages/gitbook-asciidoc/test/langs.js33
-rwxr-xr-xpackages/gitbook-asciidoc/test/page.js20
-rwxr-xr-xpackages/gitbook-asciidoc/test/readme.js28
-rwxr-xr-xpackages/gitbook-asciidoc/test/summary.js71
7 files changed, 99 insertions, 105 deletions
diff --git a/packages/gitbook-asciidoc/test/glossary.js b/packages/gitbook-asciidoc/test/glossary.js
index f94d046..23c850a 100755
--- a/packages/gitbook-asciidoc/test/glossary.js
+++ b/packages/gitbook-asciidoc/test/glossary.js
@@ -1,29 +1,28 @@
-var fs = require('fs');
-var path = require('path');
-var assert = require('assert');
+const fs = require('fs');
+const path = require('path');
+const expect = require('expect');
-var glossary = require('../').glossary;
+const glossary = require('../src').glossary;
-describe('Glossary parsing', function () {
- var LEXED;
+describe('Glossary parsing', () => {
+ let LEXED;
- before(function() {
- var CONTENT = fs.readFileSync(path.join(__dirname, './fixtures/GLOSSARY.adoc'), 'utf8');
+ before(() => {
+ const CONTENT = fs.readFileSync(path.join(__dirname, './fixtures/GLOSSARY.adoc'), 'utf8');
LEXED = glossary(CONTENT);
});
- it('should only get heading + paragraph pairs', function() {
- assert.equal(LEXED.length, 4);
+ it('should only get heading + paragraph pairs', () => {
+ expect(LEXED.length).toBe(4);
});
- it('should output simple name/description objects', function() {
- assert.equal(true, !(LEXED.some(function(e) {
- return !Boolean(e.name && e.description);
- })));
+ it('should output simple name/description objects', () => {
+ expect(!(LEXED.some(e => !Boolean(e.name && e.description)))).toBeTruthy();
});
- it('should correctly convert it to text', function() {
- var text = glossary.toText(LEXED);
- assertObjectsEqual(glossary(text), LEXED);
+ it('should correctly convert it to text', () => {
+ const text = glossary.toText(LEXED);
+ const parsed = glossary(text);
+ expect(parsed).toEqual(LEXED);
});
});
diff --git a/packages/gitbook-asciidoc/test/helper.js b/packages/gitbook-asciidoc/test/helper.js
deleted file mode 100644
index 1e310f7..0000000
--- a/packages/gitbook-asciidoc/test/helper.js
+++ /dev/null
@@ -1,6 +0,0 @@
-var assert = require("assert");
-
-global.assertObjectsEqual = function(o1, o2) {
- assert.equal(JSON.stringify(o1, null, 4), JSON.stringify(o2, null, 4));
-};
-
diff --git a/packages/gitbook-asciidoc/test/inline.js b/packages/gitbook-asciidoc/test/inline.js
index 17d66a3..e878d02 100644
--- a/packages/gitbook-asciidoc/test/inline.js
+++ b/packages/gitbook-asciidoc/test/inline.js
@@ -1,11 +1,10 @@
-var fs = require('fs');
-var path = require('path');
-var assert = require('assert');
+const expect = require('expect');
-var inline = require('../').inline;
+const inline = require('../src').inline;
-describe('Inline', function () {
- it('should render inline AsciiDoc', function() {
- assert.equal(inline('Hello **World**').content, 'Hello <strong>World</strong>');
+describe('Inline', () => {
+ it('should render inline AsciiDoc', () => {
+ const parsed = inline('Hello **World**');
+ expect(parsed.content).toEqual('Hello <strong>World</strong>');
});
});
diff --git a/packages/gitbook-asciidoc/test/langs.js b/packages/gitbook-asciidoc/test/langs.js
index 52a74c0..6673fb2 100755
--- a/packages/gitbook-asciidoc/test/langs.js
+++ b/packages/gitbook-asciidoc/test/langs.js
@@ -1,27 +1,28 @@
-var fs = require('fs');
-var path = require('path');
-var assert = require('assert');
+const fs = require('fs');
+const path = require('path');
+const expect = require('expect');
-var langs = require('../').langs;
+const langs = require('../src').langs;
-describe('Languages parsing', function () {
- var LEXED;
+describe('Languages parsing', () => {
+ let LEXED;
- before(function() {
- var CONTENT = fs.readFileSync(path.join(__dirname, './fixtures/LANGS.adoc'), 'utf8');
+ before(() => {
+ const CONTENT = fs.readFileSync(path.join(__dirname, './fixtures/LANGS.adoc'), 'utf8');
LEXED = langs(CONTENT);
});
- it('should detect paths and titles', function() {
- assert.equal(LEXED[0].ref,'en/');
- assert.equal(LEXED[0].title,'English');
+ it('should detect paths and titles', () => {
+ expect(LEXED[0].ref).toBe('en/');
+ expect(LEXED[0].title,'English');
- assert.equal(LEXED[1].ref,'fr/');
- assert.equal(LEXED[1].title,'French');
+ expect(LEXED[1].ref).toBe('fr/');
+ expect(LEXED[1].title).toBe('French');
});
- it('should correctly convert it to text', function() {
- var text = langs.toText(LEXED);
- assertObjectsEqual(langs(text), LEXED);
+ it('should correctly convert it to text', () => {
+ const text = langs.toText(LEXED);
+ const parsed = langs(text);
+ expect(parsed).toEqual(LEXED);
});
});
diff --git a/packages/gitbook-asciidoc/test/page.js b/packages/gitbook-asciidoc/test/page.js
index 3a28c5f..cd7a29a 100755
--- a/packages/gitbook-asciidoc/test/page.js
+++ b/packages/gitbook-asciidoc/test/page.js
@@ -1,18 +1,18 @@
-var fs = require('fs');
-var path = require('path');
-var assert = require('assert');
+const fs = require('fs');
+const path = require('path');
+const expect = require('expect');
-var page = require('../').page;
+const page = require('../src').page;
-describe('Page parsing', function() {
- var LEXED;
+describe('Page parsing', () => {
+ let LEXED;
- before(function() {
- var CONTENT = fs.readFileSync(path.join(__dirname, './fixtures/PAGE.adoc'), 'utf8');
+ before(() => {
+ const CONTENT = fs.readFileSync(path.join(__dirname, './fixtures/PAGE.adoc'), 'utf8');
LEXED = page(CONTENT);
});
- it('should gen content', function() {
- assert(LEXED.content);
+ it('should gen content', () => {
+ expect(LEXED.content).toExist();
});
});
diff --git a/packages/gitbook-asciidoc/test/readme.js b/packages/gitbook-asciidoc/test/readme.js
index 2f4f601..e6a8009 100755
--- a/packages/gitbook-asciidoc/test/readme.js
+++ b/packages/gitbook-asciidoc/test/readme.js
@@ -1,30 +1,30 @@
-var fs = require('fs');
-var path = require('path');
-var assert = require('assert');
+const fs = require('fs');
+const path = require('path');
+const assert = require('assert');
-var readme = require('../').readme;
+const readme = require('../src').readme;
-describe('Readme parsing', function () {
- var LEXED;
+describe('Readme parsing', () => {
+ let LEXED;
- before(function() {
- var CONTENT = fs.readFileSync(path.join(__dirname, './fixtures/README.adoc'), 'utf8');
+ before(() => {
+ const CONTENT = fs.readFileSync(path.join(__dirname, './fixtures/README.adoc'), 'utf8');
LEXED = readme(CONTENT);
});
- it('should contain a title', function() {
+ it('should contain a title', () => {
assert(LEXED.title);
});
- it('should contain a description', function() {
+ it('should contain a description', () => {
assert(LEXED.description);
});
- it('should extract the right title', function() {
- assert.equal(LEXED.title, "This is the title");
+ it('should extract the right title', () => {
+ assert.equal(LEXED.title, 'This is the title');
});
- it('should extract the right description', function() {
- assert.equal(LEXED.description, "This is the book description.");
+ it('should extract the right description', () => {
+ assert.equal(LEXED.description, 'This is the book description.');
});
});
diff --git a/packages/gitbook-asciidoc/test/summary.js b/packages/gitbook-asciidoc/test/summary.js
index d65aadf..ffd0bc0 100755
--- a/packages/gitbook-asciidoc/test/summary.js
+++ b/packages/gitbook-asciidoc/test/summary.js
@@ -1,55 +1,56 @@
-var fs = require('fs');
-var path = require('path');
-var assert = require('assert');
+const fs = require('fs');
+const path = require('path');
+const expect = require('expect');
-var summary = require('../').summary;
+const summary = require('../src').summary;
-describe('Summary parsing', function () {
- var LEXED, PART;
+describe('Summary parsing', () => {
+ let LEXED, PART;
- before(function() {
- var CONTENT = fs.readFileSync(path.join(__dirname, './fixtures/SUMMARY.adoc'), 'utf8');
+ before(() => {
+ const CONTENT = fs.readFileSync(path.join(__dirname, './fixtures/SUMMARY.adoc'), 'utf8');
LEXED = summary(CONTENT);
PART = LEXED.parts[0];
// todo: add support for parts in asciidoc
});
- it('should detect parts', function() {
- assert.equal(LEXED.parts.length, 1);
+ it('should detect parts', () => {
+ expect(LEXED.parts.length).toBe(1);
});
- it('should detect articles', function() {
- assert.equal(PART.articles.length, 5);
+ it('should detect articles', () => {
+ expect(PART.articles.length).toBe(5);
});
- it('should support articles', function() {
- assert.equal(PART.articles[0].articles.length, 2);
- assert.equal(PART.articles[1].articles.length, 0);
- assert.equal(PART.articles[2].articles.length, 0);
+ it('should support articles', () => {
+ expect(PART.articles[0].articles.length).toBe(2);
+ expect(PART.articles[1].articles.length).toBe(0);
+ expect(PART.articles[2].articles.length).toBe(0);
});
- it('should detect paths and titles', function() {
- assert(PART.articles[0].ref);
- assert(PART.articles[1].ref);
- assert(PART.articles[2].ref);
- assert(PART.articles[3].ref);
- assert.equal(PART.articles[4].ref, null);
-
- assert(PART.articles[0].title);
- assert(PART.articles[1].title);
- assert(PART.articles[2].title);
- assert(PART.articles[3].title);
- assert(PART.articles[4].title);
+ it('should detect paths and titles', () => {
+ expect(PART.articles[0].ref).toExist();
+ expect(PART.articles[1].ref).toExist();
+ expect(PART.articles[2].ref).toExist();
+ expect(PART.articles[3].ref).toExist();
+ expect(PART.articles[4].ref).toBe(null);
+
+ expect(PART.articles[0].title).toExist();
+ expect(PART.articles[1].title).toExist();
+ expect(PART.articles[2].title).toExist();
+ expect(PART.articles[3].title).toExist();
+ expect(PART.articles[4].title).toExist();
});
- it('should normalize paths from .md', function() {
- assert.equal(PART.articles[0].ref, 'chapter-1/README.adoc');
- assert.equal(PART.articles[1].ref, 'chapter-2/README.adoc');
- assert.equal(PART.articles[2].ref, 'chapter-3/README.adoc');
+ it('should normalize paths from .md', () => {
+ expect(PART.articles[0].ref).toBe('chapter-1/README.adoc');
+ expect(PART.articles[1].ref).toBe('chapter-2/README.adoc');
+ expect(PART.articles[2].ref).toBe('chapter-3/README.adoc');
});
- it('should correctly convert it to text', function() {
- var text = summary.toText(LEXED);
- assertObjectsEqual(summary(text), LEXED);
+ it('should correctly convert it to text', () => {
+ const text = summary.toText(LEXED);
+ const parsed = summary(text);
+ expect(parsed).toEqual(LEXED);
});
});