summaryrefslogtreecommitdiffstats
path: root/packages/gitbook-html/test/summary.js
diff options
context:
space:
mode:
authorSamy Pessé <samypesse@gmail.com>2016-12-22 13:12:16 +0100
committerSamy Pessé <samypesse@gmail.com>2016-12-22 13:12:16 +0100
commit97f2c333a87b9d939b5a7dc2884590c971b53291 (patch)
treea22824b02d84a89e59c458c8af7d3494561d43f6 /packages/gitbook-html/test/summary.js
parent627e6dd866f77ff497a21f0b706490b82e40ea0e (diff)
downloadgitbook-97f2c333a87b9d939b5a7dc2884590c971b53291.zip
gitbook-97f2c333a87b9d939b5a7dc2884590c971b53291.tar.gz
gitbook-97f2c333a87b9d939b5a7dc2884590c971b53291.tar.bz2
Import and adapt gitbook-html
Refactor to remove lodash and q as dependencies
Diffstat (limited to 'packages/gitbook-html/test/summary.js')
-rwxr-xr-xpackages/gitbook-html/test/summary.js100
1 files changed, 51 insertions, 49 deletions
diff --git a/packages/gitbook-html/test/summary.js b/packages/gitbook-html/test/summary.js
index 03be73f..ea27fb3 100755
--- a/packages/gitbook-html/test/summary.js
+++ b/packages/gitbook-html/test/summary.js
@@ -1,40 +1,40 @@
-var fs = require('fs');
-var path = require('path');
-var assert = require('assert');
+const fs = require('fs');
+const path = require('path');
+const expect = require('expect');
-var summary = require('../').summary;
+const summary = require('../src').summary;
-describe('Summary parsing', function () {
- var LEXED, PART;
- var LEXED_EMPTY;
+describe('Summary', () => {
+ let LEXED, PART;
+ let LEXED_EMPTY;
- before(function() {
- var CONTENT = fs.readFileSync(
+ before(() => {
+ const CONTENT = fs.readFileSync(
path.join(__dirname, './fixtures/SUMMARY.html'), 'utf8');
LEXED = summary(CONTENT);
PART = LEXED.parts[0];
- var CONTENT_EMPTY = fs.readFileSync(
+ const CONTENT_EMPTY = fs.readFileSync(
path.join(__dirname, './fixtures/SUMMARY-EMPTY.html'), 'utf8');
LEXED_EMPTY = summary(CONTENT_EMPTY);
});
- describe('Parts', function() {
- it('should detect parts', function() {
- assert.equal(LEXED.parts.length, 3);
+ describe('Parts', () => {
+ it('should detect parts', () => {
+ expect(LEXED.parts.length).toBe(3);
});
- it('should detect title', function() {
- assert.equal(LEXED.parts[0].title, '');
- assert.equal(LEXED.parts[1].title, 'Part 2');
- assert.equal(LEXED.parts[2].title, '');
+ it('should detect title', () => {
+ expect(LEXED.parts[0].title).toBe('');
+ expect(LEXED.parts[1].title).toBe('Part 2');
+ expect(LEXED.parts[2].title).toBe('');
});
- it('should detect empty parts', function() {
- var partTitles = LEXED_EMPTY.parts.map(function (part) {
+ it('should detect empty parts', () => {
+ const partTitles = LEXED_EMPTY.parts.map((part) => {
return part.title;
});
- var expectedTitles = [
+ const expectedTitles = [
'First empty part',
'Part 1',
'',
@@ -43,49 +43,51 @@ describe('Summary parsing', function () {
'Penultimate empty part',
'Last empty part'
];
- assert.equal(LEXED_EMPTY.parts.length, 7);
- expectedTitles.forEach(function (title, index) {
- assert.equal(partTitles[index], title);
+ expect(LEXED_EMPTY.parts.length).toBe(7);
+ expectedTitles.forEach((title, index) => {
+ expect(partTitles[index]).toBe(title);
});
});
});
- it('should detect chapters', function() {
- assert.equal(PART.articles.length, 5);
+ it('should detect chapters', () => {
+ expect(PART.articles.length).toBe(5);
});
- it('should detect chapters in other parts', function() {
- assert.equal(LEXED.parts[1].articles.length, 1);
+ it('should detect chapters in other parts', () => {
+ expect(LEXED.parts[1].articles.length).toBe(1);
});
- it('should support articles', function() {
- assert.equal(PART.articles[0].articles.length, 2);
- assert.equal(PART.articles[1].articles.length, 0);
- assert.equal(PART.articles[2].articles.length, 0);
+ it('should support articles', () => {
+ expect(PART.articles[0].articles.length).toBe(2);
+ expect(PART.articles[1].articles.length).toBe(0);
+ expect(PART.articles[2].articles.length).toBe(0);
});
- it('should detect paths and titles', function() {
- assert(PART.articles[0].ref);
- assert(PART.articles[1].ref);
- assert(PART.articles[2].ref);
- assert(PART.articles[3].ref);
- assert.equal(PART.articles[4].ref, null);
+ it('should detect paths and titles', () => {
+ expect(PART.articles[0].ref).toExist();
+ expect(PART.articles[1].ref).toExist();
+ expect(PART.articles[2].ref).toExist();
+ expect(PART.articles[3].ref).toExist();
+ expect(PART.articles[4].ref).toNotExist();
- assert(PART.articles[0].title);
- assert(PART.articles[1].title);
- assert(PART.articles[2].title);
- assert(PART.articles[3].title);
- assert(PART.articles[4].title);
+ expect(PART.articles[0].title).toExist();
+ expect(PART.articles[1].title).toExist();
+ expect(PART.articles[2].title).toExist();
+ expect(PART.articles[3].title).toExist();
+ expect(PART.articles[4].title).toExist();
});
- it('should normalize paths from .md', function() {
- assert.equal(PART.articles[0].ref,'chapter-1/README.md');
- assert.equal(PART.articles[1].ref,'chapter-2/README.md');
- assert.equal(PART.articles[2].ref,'chapter-3/README.md');
+ it('should normalize paths from .md', () => {
+ expect(PART.articles[0].ref).toBe('chapter-1/README.md');
+ expect(PART.articles[1].ref).toBe('chapter-2/README.md');
+ expect(PART.articles[2].ref).toBe('chapter-3/README.md');
});
- it('should correctly convert it to text', function() {
- var text = summary.toText(LEXED);
- assertObjectsEqual(summary(text), LEXED);
+ it('should correctly convert it to text', () => {
+ const text = summary.toText(LEXED);
+ const parsed = summary(text);
+
+ expect(parsed).toEqual(LEXED);
});
});