diff options
author | Samy Pessé <samypesse@gmail.com> | 2016-02-20 15:31:59 +0100 |
---|---|---|
committer | Samy Pessé <samypesse@gmail.com> | 2016-12-22 15:00:51 +0100 |
commit | dbee17ddec2e786fbf02572e7bf6050c207b492f (patch) | |
tree | 8d93bd8eb985e3bd8132612935bc5a1583fe15e3 /packages/gitbook-markdown/test | |
parent | 9b3888005d5098079056fa889a84e75cf3c57670 (diff) | |
download | gitbook-dbee17ddec2e786fbf02572e7bf6050c207b492f.zip gitbook-dbee17ddec2e786fbf02572e7bf6050c207b492f.tar.gz gitbook-dbee17ddec2e786fbf02572e7bf6050c207b492f.tar.bz2 |
Use gitbook-html as base parser
Diffstat (limited to 'packages/gitbook-markdown/test')
-rw-r--r-- | packages/gitbook-markdown/test/fixtures/GLOSSARY.md | 10 | ||||
-rw-r--r-- | packages/gitbook-markdown/test/fixtures/SUMMARY_PARTS.md | 11 | ||||
-rw-r--r-- | packages/gitbook-markdown/test/glossary.js | 14 | ||||
-rw-r--r-- | packages/gitbook-markdown/test/langs.js | 10 | ||||
-rw-r--r-- | packages/gitbook-markdown/test/page.js | 21 | ||||
-rw-r--r-- | packages/gitbook-markdown/test/readme.js | 10 | ||||
-rw-r--r-- | packages/gitbook-markdown/test/summary.js | 61 |
7 files changed, 78 insertions, 59 deletions
diff --git a/packages/gitbook-markdown/test/fixtures/GLOSSARY.md b/packages/gitbook-markdown/test/fixtures/GLOSSARY.md index 5969902..19142d8 100644 --- a/packages/gitbook-markdown/test/fixtures/GLOSSARY.md +++ b/packages/gitbook-markdown/test/fixtures/GLOSSARY.md @@ -1,15 +1,15 @@ -# Magic +## Magic Sufficiently advanced technology, beyond the understanding of the observer producing a sense of wonder. Hello, I am random noise in the middle of this beautiful Glossary. (Really astonishing !) -# PHP +## PHP An atrocious language, invented for the sole purpose of inflicting pain and suffering amongst the proframming wizards of this world. -# Clojure +## Clojure Lisp re-invented for hipsters. -# Go +## Go Go Go Google [Wow](https://www.google.com) Fantastic, I love code too ! : @@ -25,6 +25,6 @@ def f(x): print(f(9)) ``` -# Gitbook +## Gitbook Awesome project. Really amazing, I'm really at a loss for words ... diff --git a/packages/gitbook-markdown/test/fixtures/SUMMARY_PARTS.md b/packages/gitbook-markdown/test/fixtures/SUMMARY_PARTS.md new file mode 100644 index 0000000..eaba1a7 --- /dev/null +++ b/packages/gitbook-markdown/test/fixtures/SUMMARY_PARTS.md @@ -0,0 +1,11 @@ +# Summary + +* [Chapter 1](chapter-1/README.md) + +## Part 2 + +* [Chapter 2](chapter-2/README.md) + +## Part 3 + +* [Chapter 3](chapter-3/README.md) diff --git a/packages/gitbook-markdown/test/glossary.js b/packages/gitbook-markdown/test/glossary.js index efa77db..3231020 100644 --- a/packages/gitbook-markdown/test/glossary.js +++ b/packages/gitbook-markdown/test/glossary.js @@ -4,10 +4,14 @@ var assert = require('assert'); var glossary = require('../').glossary; -var CONTENT = fs.readFileSync(path.join(__dirname, './fixtures/GLOSSARY.md'), 'utf8'); -var LEXED = glossary(CONTENT); - describe('Glossary parsing', function () { + var LEXED; + + before(function() { + var CONTENT = fs.readFileSync(path.join(__dirname, './fixtures/GLOSSARY.md'), 'utf8'); + LEXED = glossary(CONTENT); + }); + it('should only get heading + paragraph pairs', function() { assert.equal(LEXED.length, 5); }); @@ -19,7 +23,7 @@ describe('Glossary parsing', function () { }); it('should correctly convert it to text', function() { - var text = glossary.toText(LEXED); - assertObjectsEqual(glossary(text), LEXED); + var text = glossary.toText(LEXED); + assertObjectsEqual(glossary(text), LEXED); }); }); diff --git a/packages/gitbook-markdown/test/langs.js b/packages/gitbook-markdown/test/langs.js index e6c3dbd..bb75d11 100644 --- a/packages/gitbook-markdown/test/langs.js +++ b/packages/gitbook-markdown/test/langs.js @@ -4,10 +4,14 @@ var assert = require('assert'); var langs = require('../').langs; -var CONTENT = fs.readFileSync(path.join(__dirname, './fixtures/LANGS.md'), 'utf8'); -var LEXED = langs(CONTENT); - describe('Languages parsing', function () { + var LEXED; + + before(function() { + var CONTENT = fs.readFileSync(path.join(__dirname, './fixtures/LANGS.md'), 'utf8'); + LEXED = langs(CONTENT); + }); + it('should detect paths and titles', function() { assert.equal(LEXED.length, 2); assert.equal(LEXED[0].path,'en/'); diff --git a/packages/gitbook-markdown/test/page.js b/packages/gitbook-markdown/test/page.js index 8f4b5c5..8b882d8 100644 --- a/packages/gitbook-markdown/test/page.js +++ b/packages/gitbook-markdown/test/page.js @@ -4,25 +4,22 @@ var assert = require('assert'); var page = require('../').page; -function loadPage (name, options) { - var CONTENT = fs.readFileSync(path.join(__dirname, './fixtures/' + name + '.md'), 'utf8'); - return page(CONTENT, options).sections; -} - -var LEXED = loadPage('PAGE'); describe('Page parsing', function() { - it('should detect sections', function() { - assert.equal(LEXED.length, 1); + var LEXED; + + before(function() { + var CONTENT = fs.readFileSync(path.join(__dirname, './fixtures/PAGE.md'), 'utf8'); + LEXED = page(CONTENT); }); - it('should gen content for normal sections', function() { - assert(LEXED[0].content); + it('should gen content', function() { + assert(LEXED.content); }); it('should not add id to headings', function() { - assert.equal(page('# Hello').sections[0].content, '<h1>Hello</h1>\n'); - assert.equal(page('# Hello {#test}').sections[0].content, '<h1 id="test">Hello </h1>\n'); + assert.equal(page('# Hello').content, '<h1>Hello</h1>\n'); + assert.equal(page('# Hello {#test}').content, '<h1 id="test">Hello </h1>\n'); }); it('should escape codeblocks in preparation (1)', function() { diff --git a/packages/gitbook-markdown/test/readme.js b/packages/gitbook-markdown/test/readme.js index b3a5952..dd059fb 100644 --- a/packages/gitbook-markdown/test/readme.js +++ b/packages/gitbook-markdown/test/readme.js @@ -4,11 +4,13 @@ var assert = require('assert'); var readme = require('../').readme; - -var CONTENT = fs.readFileSync(path.join(__dirname, './fixtures/README.md'), 'utf8'); -var LEXED = readme(CONTENT); - describe('Readme parsing', function () { + var LEXED; + + before(function() { + var CONTENT = fs.readFileSync(path.join(__dirname, './fixtures/README.md'), 'utf8'); + LEXED = readme(CONTENT); + }); it('should contain a title', function() { assert(LEXED.title); diff --git a/packages/gitbook-markdown/test/summary.js b/packages/gitbook-markdown/test/summary.js index a1bb49a..22800d4 100644 --- a/packages/gitbook-markdown/test/summary.js +++ b/packages/gitbook-markdown/test/summary.js @@ -13,56 +13,57 @@ function lex(fixtureFile) { ); } -var LEXED = lex('SUMMARY.md'); - describe('Summary parsing', function () { + var LEXED; + + before(function() { + LEXED = lex('SUMMARY.md'); + PART = LEXED.parts[0]; + }); + it('should detect chapters', function() { - assert.equal(LEXED.chapters.length, 5); + assert.equal(PART.articles.length, 5); }); it('should support articles', function() { - assert.equal(LEXED.chapters[0].articles.length, 2); - assert.equal(LEXED.chapters[1].articles.length, 0); - assert.equal(LEXED.chapters[2].articles.length, 0); + assert.equal(PART.articles[0].articles.length, 2); + assert.equal(PART.articles[1].articles.length, 0); + assert.equal(PART.articles[2].articles.length, 0); }); it('should detect paths and titles', function() { - assert(LEXED.chapters[0].path); - assert(LEXED.chapters[1].path); - assert(LEXED.chapters[2].path); - assert(LEXED.chapters[3].path); - assert.equal(LEXED.chapters[4].path, null); + assert(PART.articles[0].path); + assert(PART.articles[1].path); + assert(PART.articles[2].path); + assert(PART.articles[3].path); + assert.equal(PART.articles[4].path, null); - assert(LEXED.chapters[0].title); - assert(LEXED.chapters[1].title); - assert(LEXED.chapters[2].title); - assert(LEXED.chapters[3].title); - assert(LEXED.chapters[4].title); + assert(PART.articles[0].title); + assert(PART.articles[1].title); + assert(PART.articles[2].title); + assert(PART.articles[3].title); + assert(PART.articles[4].title); }); it('should normalize paths from .md', function() { - assert.equal(LEXED.chapters[0].path,'chapter-1/README.md'); - assert.equal(LEXED.chapters[1].path,'chapter-2/README.md'); - assert.equal(LEXED.chapters[2].path,'chapter-3/README.md'); + assert.equal(PART.articles[0].path,'chapter-1/README.md'); + assert.equal(PART.articles[1].path,'chapter-2/README.md'); + assert.equal(PART.articles[2].path,'chapter-3/README.md'); + }); + + it('should part parts', function() { + var l = lex('SUMMARY_PARTS.md'); + assert.equal(l.parts.length, 3); }); it('should allow lists separated by whitespace', function() { var l = lex('SUMMARY_WHITESPACE.md'); - assert.equal(l.chapters.length, 5); + assert.equal(l.parts[0].articles.length, 5); }); it('should allow ignore empty entries', function() { var l = lex('SUMMARY_EMPTY.md'); - assert.equal(l.chapters.length, 1); - }); - - it('should throw error for sublist entries', function() { - assert.throws( - function() { - var l = lex('SUMMARY_SUBLIST.md'); - }, - "Invalid entry in the SUMMARY" - ); + assert.equal(l.parts[0].articles.length, 1); }); it('should correctly convert it to text', function() { |