summaryrefslogtreecommitdiffstats
path: root/packages/gitbook-asciidoc
diff options
context:
space:
mode:
authorSamy Pessé <samypesse@gmail.com>2016-12-22 12:31:22 +0100
committerSamy Pessé <samypesse@gmail.com>2016-12-22 12:31:22 +0100
commiteb0bf31baa6cb903ba4242ae5a3866ff67aeb97c (patch)
treee2347c9e8c58dd3c2681a6028b5f8d69c40d4e04 /packages/gitbook-asciidoc
parent1c1e58b8641cac34b5f3d7c05951f507557fcb1a (diff)
downloadgitbook-eb0bf31baa6cb903ba4242ae5a3866ff67aeb97c.zip
gitbook-eb0bf31baa6cb903ba4242ae5a3866ff67aeb97c.tar.gz
gitbook-eb0bf31baa6cb903ba4242ae5a3866ff67aeb97c.tar.bz2
Import gitbook-asciidoc
Diffstat (limited to 'packages/gitbook-asciidoc')
-rwxr-xr-xpackages/gitbook-asciidoc/.gitignore28
-rw-r--r--packages/gitbook-asciidoc/.npmignore1
-rw-r--r--packages/gitbook-asciidoc/.travis.yml4
-rwxr-xr-xpackages/gitbook-asciidoc/README.md7
-rwxr-xr-xpackages/gitbook-asciidoc/lib/index.js5
-rwxr-xr-xpackages/gitbook-asciidoc/package.json24
-rwxr-xr-xpackages/gitbook-asciidoc/src/index.js5
-rw-r--r--packages/gitbook-asciidoc/src/toAsciidoc.js (renamed from packages/gitbook-asciidoc/lib/toAsciidoc.js)25
-rw-r--r--packages/gitbook-asciidoc/src/toHTML.js (renamed from packages/gitbook-asciidoc/lib/toHTML.js)18
-rwxr-xr-xpackages/gitbook-asciidoc/test/glossary.js33
-rw-r--r--packages/gitbook-asciidoc/test/helper.js6
-rw-r--r--packages/gitbook-asciidoc/test/inline.js13
-rwxr-xr-xpackages/gitbook-asciidoc/test/langs.js33
-rwxr-xr-xpackages/gitbook-asciidoc/test/page.js20
-rwxr-xr-xpackages/gitbook-asciidoc/test/readme.js28
-rwxr-xr-xpackages/gitbook-asciidoc/test/summary.js71
16 files changed, 138 insertions, 183 deletions
diff --git a/packages/gitbook-asciidoc/.gitignore b/packages/gitbook-asciidoc/.gitignore
deleted file mode 100755
index 9550e4f..0000000
--- a/packages/gitbook-asciidoc/.gitignore
+++ /dev/null
@@ -1,28 +0,0 @@
-# Logs
-logs
-*.log
-
-# Runtime data
-pids
-*.pid
-*.seed
-
-# Directory for instrumented libs generated by jscoverage/JSCover
-lib-cov
-
-# Coverage directory used by tools like istanbul
-coverage
-
-# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files)
-.grunt
-
-# Compiled binary addons (http://nodejs.org/api/addons.html)
-build/Release
-
-# Dependency directory
-# Deployed apps should consider commenting this line out:
-# see https://npmjs.org/doc/faq.html#Should-I-check-my-node_modules-folder-into-git
-node_modules
-
-# vim swapfile
-*.swp
diff --git a/packages/gitbook-asciidoc/.npmignore b/packages/gitbook-asciidoc/.npmignore
new file mode 100644
index 0000000..85de9cf
--- /dev/null
+++ b/packages/gitbook-asciidoc/.npmignore
@@ -0,0 +1 @@
+src
diff --git a/packages/gitbook-asciidoc/.travis.yml b/packages/gitbook-asciidoc/.travis.yml
deleted file mode 100644
index 3e5e701..0000000
--- a/packages/gitbook-asciidoc/.travis.yml
+++ /dev/null
@@ -1,4 +0,0 @@
-language: node_js
-node_js:
- - "stable"
- - "0.12"
diff --git a/packages/gitbook-asciidoc/README.md b/packages/gitbook-asciidoc/README.md
index e02478a..76056f9 100755
--- a/packages/gitbook-asciidoc/README.md
+++ b/packages/gitbook-asciidoc/README.md
@@ -1,6 +1,3 @@
-# GitBook AsciiDoc Parser
+# `gitbook-asciidoc`
-[![Build Status](https://travis-ci.org/GitbookIO/gitbook-asciidoc.png?branch=master)](https://travis-ci.org/GitbookIO/gitbook-asciidoc)
-[![NPM version](https://badge.fury.io/js/gitbook-asciidoc.svg)](http://badge.fury.io/js/gitbook-asciidoc)
-
-This node module uses AsciiDoctor to parse AsciiDoc for gitbook (SUMMARY.adoc, README.adoc).
+> Asciidoc parser for GitBook.
diff --git a/packages/gitbook-asciidoc/lib/index.js b/packages/gitbook-asciidoc/lib/index.js
deleted file mode 100755
index 9b21700..0000000
--- a/packages/gitbook-asciidoc/lib/index.js
+++ /dev/null
@@ -1,5 +0,0 @@
-var HTMLParser = require('gitbook-html');
-var toHTML = require('./toHTML');
-var toAsciidoc = require('./toAsciidoc');
-
-module.exports = HTMLParser.createParser(toHTML, toAsciidoc);
diff --git a/packages/gitbook-asciidoc/package.json b/packages/gitbook-asciidoc/package.json
index 118bcfe..0647794 100755
--- a/packages/gitbook-asciidoc/package.json
+++ b/packages/gitbook-asciidoc/package.json
@@ -9,29 +9,17 @@
"asciidoctor.js": "1.5.5-4",
"gitbook-html": "1.3.3"
},
- "devDependencies": {
- "mocha": "2.3.2"
- },
"scripts": {
- "test": "export TESTING=true; mocha --reporter list --bail"
+ "test": "mocha --reporter list --bail --compilers js:babel-register",
+ "prepublish": "rm -rf lib/ && babel -d lib/ src/ --source-maps"
},
"repository": {
"type": "git",
- "url": "https://github.com/GitbookIO/gitbook-asciidoc.git"
+ "url": "https://github.com/GitbookIO/gitbook.git"
},
- "author": "FriendCode Inc. <contact@gitbook.com>",
+ "author": "GitBook Inc. <contact@gitbook.com>",
"license": "Apache-2.0",
"bugs": {
- "url": "https://github.com/GitbookIO/gitbook-asciidoc/issues"
- },
- "contributors": [
- {
- "name": "Aaron O'Mullan",
- "email": "aaron@gitbook.com"
- },
- {
- "name": "Samy Pessé",
- "email": "samy@gitbook.com"
- }
- ]
+ "url": "https://github.com/GitbookIO/gitbook/issues"
+ }
}
diff --git a/packages/gitbook-asciidoc/src/index.js b/packages/gitbook-asciidoc/src/index.js
new file mode 100755
index 0000000..9ba30e7
--- /dev/null
+++ b/packages/gitbook-asciidoc/src/index.js
@@ -0,0 +1,5 @@
+const HTMLParser = require('gitbook-html');
+const toHTML = require('./toHTML');
+const toAsciidoc = require('./toAsciidoc');
+
+module.exports = HTMLParser.createParser(toHTML, toAsciidoc);
diff --git a/packages/gitbook-asciidoc/lib/toAsciidoc.js b/packages/gitbook-asciidoc/src/toAsciidoc.js
index 5b4e93c..77759d8 100644
--- a/packages/gitbook-asciidoc/lib/toAsciidoc.js
+++ b/packages/gitbook-asciidoc/src/toAsciidoc.js
@@ -5,43 +5,42 @@ function ns(s, n) {
}
module.exports = {
- onTitleStart: function(level) {
+ onTitleStart(level) {
return ns('=', level) + ' ';
},
- onTitleEnd: function(level) {
+ onTitleEnd(level) {
return this.onBL();
},
- onParagraphStart: function() {
+ onParagraphStart() {
return this.onSection();
},
- onParagraphEnd: function() {
+ onParagraphEnd() {
return this.onSection();
},
- onLinkStart: function(href) {
+ onLinkStart(href) {
return 'link:' + href + '[';
},
- onLinkEnd: function() {
+ onLinkEnd() {
return ']';
},
- onListStart: function(level) {
+ onListStart(level) {
return '';
},
- onListEnd: function() {
+ onListEnd() {
return '';
},
- onListItemStart: function(level) {
+ onListItemStart(level) {
return ns('.', level + 1) + ' ';
},
- onListItemEnd: function() {
+ onListItemEnd() {
return '';
},
- onHR: function() {
- return "'''";
+ onHR() {
+ return '\'\'\'';
}
};
-
diff --git a/packages/gitbook-asciidoc/lib/toHTML.js b/packages/gitbook-asciidoc/src/toHTML.js
index 504a20d..0fc8f5f 100644
--- a/packages/gitbook-asciidoc/lib/toHTML.js
+++ b/packages/gitbook-asciidoc/src/toHTML.js
@@ -1,14 +1,22 @@
-var asciidoctor = require('asciidoctor.js')();
+const asciidoctor = require('asciidoctor.js')();
-// Render Asciidoc to HTML (block)
+/**
+ * Render Asciidoc to HTML (block)
+ * @param {String} content
+ * @return {String} html
+ */
function asciidocToHTML(content) {
return asciidoctor.convert(content, {'attributes': 'showtitle'});
-};
+}
-// Render Asciidoc to HTML (inline)
+/**
+ * Render Asciidoc to HTML (inline)
+ * @param {String} content
+ * @return {String} html
+ */
function asciidocToHTMLInline(content) {
return asciidoctor.convert(content, {doctype: 'inline', attributes: 'showtitle'});
-};
+}
module.exports = {
block: asciidocToHTML,
diff --git a/packages/gitbook-asciidoc/test/glossary.js b/packages/gitbook-asciidoc/test/glossary.js
index f94d046..23c850a 100755
--- a/packages/gitbook-asciidoc/test/glossary.js
+++ b/packages/gitbook-asciidoc/test/glossary.js
@@ -1,29 +1,28 @@
-var fs = require('fs');
-var path = require('path');
-var assert = require('assert');
+const fs = require('fs');
+const path = require('path');
+const expect = require('expect');
-var glossary = require('../').glossary;
+const glossary = require('../src').glossary;
-describe('Glossary parsing', function () {
- var LEXED;
+describe('Glossary parsing', () => {
+ let LEXED;
- before(function() {
- var CONTENT = fs.readFileSync(path.join(__dirname, './fixtures/GLOSSARY.adoc'), 'utf8');
+ before(() => {
+ const CONTENT = fs.readFileSync(path.join(__dirname, './fixtures/GLOSSARY.adoc'), 'utf8');
LEXED = glossary(CONTENT);
});
- it('should only get heading + paragraph pairs', function() {
- assert.equal(LEXED.length, 4);
+ it('should only get heading + paragraph pairs', () => {
+ expect(LEXED.length).toBe(4);
});
- it('should output simple name/description objects', function() {
- assert.equal(true, !(LEXED.some(function(e) {
- return !Boolean(e.name && e.description);
- })));
+ it('should output simple name/description objects', () => {
+ expect(!(LEXED.some(e => !Boolean(e.name && e.description)))).toBeTruthy();
});
- it('should correctly convert it to text', function() {
- var text = glossary.toText(LEXED);
- assertObjectsEqual(glossary(text), LEXED);
+ it('should correctly convert it to text', () => {
+ const text = glossary.toText(LEXED);
+ const parsed = glossary(text);
+ expect(parsed).toEqual(LEXED);
});
});
diff --git a/packages/gitbook-asciidoc/test/helper.js b/packages/gitbook-asciidoc/test/helper.js
deleted file mode 100644
index 1e310f7..0000000
--- a/packages/gitbook-asciidoc/test/helper.js
+++ /dev/null
@@ -1,6 +0,0 @@
-var assert = require("assert");
-
-global.assertObjectsEqual = function(o1, o2) {
- assert.equal(JSON.stringify(o1, null, 4), JSON.stringify(o2, null, 4));
-};
-
diff --git a/packages/gitbook-asciidoc/test/inline.js b/packages/gitbook-asciidoc/test/inline.js
index 17d66a3..e878d02 100644
--- a/packages/gitbook-asciidoc/test/inline.js
+++ b/packages/gitbook-asciidoc/test/inline.js
@@ -1,11 +1,10 @@
-var fs = require('fs');
-var path = require('path');
-var assert = require('assert');
+const expect = require('expect');
-var inline = require('../').inline;
+const inline = require('../src').inline;
-describe('Inline', function () {
- it('should render inline AsciiDoc', function() {
- assert.equal(inline('Hello **World**').content, 'Hello <strong>World</strong>');
+describe('Inline', () => {
+ it('should render inline AsciiDoc', () => {
+ const parsed = inline('Hello **World**');
+ expect(parsed.content).toEqual('Hello <strong>World</strong>');
});
});
diff --git a/packages/gitbook-asciidoc/test/langs.js b/packages/gitbook-asciidoc/test/langs.js
index 52a74c0..6673fb2 100755
--- a/packages/gitbook-asciidoc/test/langs.js
+++ b/packages/gitbook-asciidoc/test/langs.js
@@ -1,27 +1,28 @@
-var fs = require('fs');
-var path = require('path');
-var assert = require('assert');
+const fs = require('fs');
+const path = require('path');
+const expect = require('expect');
-var langs = require('../').langs;
+const langs = require('../src').langs;
-describe('Languages parsing', function () {
- var LEXED;
+describe('Languages parsing', () => {
+ let LEXED;
- before(function() {
- var CONTENT = fs.readFileSync(path.join(__dirname, './fixtures/LANGS.adoc'), 'utf8');
+ before(() => {
+ const CONTENT = fs.readFileSync(path.join(__dirname, './fixtures/LANGS.adoc'), 'utf8');
LEXED = langs(CONTENT);
});
- it('should detect paths and titles', function() {
- assert.equal(LEXED[0].ref,'en/');
- assert.equal(LEXED[0].title,'English');
+ it('should detect paths and titles', () => {
+ expect(LEXED[0].ref).toBe('en/');
+ expect(LEXED[0].title,'English');
- assert.equal(LEXED[1].ref,'fr/');
- assert.equal(LEXED[1].title,'French');
+ expect(LEXED[1].ref).toBe('fr/');
+ expect(LEXED[1].title).toBe('French');
});
- it('should correctly convert it to text', function() {
- var text = langs.toText(LEXED);
- assertObjectsEqual(langs(text), LEXED);
+ it('should correctly convert it to text', () => {
+ const text = langs.toText(LEXED);
+ const parsed = langs(text);
+ expect(parsed).toEqual(LEXED);
});
});
diff --git a/packages/gitbook-asciidoc/test/page.js b/packages/gitbook-asciidoc/test/page.js
index 3a28c5f..cd7a29a 100755
--- a/packages/gitbook-asciidoc/test/page.js
+++ b/packages/gitbook-asciidoc/test/page.js
@@ -1,18 +1,18 @@
-var fs = require('fs');
-var path = require('path');
-var assert = require('assert');
+const fs = require('fs');
+const path = require('path');
+const expect = require('expect');
-var page = require('../').page;
+const page = require('../src').page;
-describe('Page parsing', function() {
- var LEXED;
+describe('Page parsing', () => {
+ let LEXED;
- before(function() {
- var CONTENT = fs.readFileSync(path.join(__dirname, './fixtures/PAGE.adoc'), 'utf8');
+ before(() => {
+ const CONTENT = fs.readFileSync(path.join(__dirname, './fixtures/PAGE.adoc'), 'utf8');
LEXED = page(CONTENT);
});
- it('should gen content', function() {
- assert(LEXED.content);
+ it('should gen content', () => {
+ expect(LEXED.content).toExist();
});
});
diff --git a/packages/gitbook-asciidoc/test/readme.js b/packages/gitbook-asciidoc/test/readme.js
index 2f4f601..e6a8009 100755
--- a/packages/gitbook-asciidoc/test/readme.js
+++ b/packages/gitbook-asciidoc/test/readme.js
@@ -1,30 +1,30 @@
-var fs = require('fs');
-var path = require('path');
-var assert = require('assert');
+const fs = require('fs');
+const path = require('path');
+const assert = require('assert');
-var readme = require('../').readme;
+const readme = require('../src').readme;
-describe('Readme parsing', function () {
- var LEXED;
+describe('Readme parsing', () => {
+ let LEXED;
- before(function() {
- var CONTENT = fs.readFileSync(path.join(__dirname, './fixtures/README.adoc'), 'utf8');
+ before(() => {
+ const CONTENT = fs.readFileSync(path.join(__dirname, './fixtures/README.adoc'), 'utf8');
LEXED = readme(CONTENT);
});
- it('should contain a title', function() {
+ it('should contain a title', () => {
assert(LEXED.title);
});
- it('should contain a description', function() {
+ it('should contain a description', () => {
assert(LEXED.description);
});
- it('should extract the right title', function() {
- assert.equal(LEXED.title, "This is the title");
+ it('should extract the right title', () => {
+ assert.equal(LEXED.title, 'This is the title');
});
- it('should extract the right description', function() {
- assert.equal(LEXED.description, "This is the book description.");
+ it('should extract the right description', () => {
+ assert.equal(LEXED.description, 'This is the book description.');
});
});
diff --git a/packages/gitbook-asciidoc/test/summary.js b/packages/gitbook-asciidoc/test/summary.js
index d65aadf..ffd0bc0 100755
--- a/packages/gitbook-asciidoc/test/summary.js
+++ b/packages/gitbook-asciidoc/test/summary.js
@@ -1,55 +1,56 @@
-var fs = require('fs');
-var path = require('path');
-var assert = require('assert');
+const fs = require('fs');
+const path = require('path');
+const expect = require('expect');
-var summary = require('../').summary;
+const summary = require('../src').summary;
-describe('Summary parsing', function () {
- var LEXED, PART;
+describe('Summary parsing', () => {
+ let LEXED, PART;
- before(function() {
- var CONTENT = fs.readFileSync(path.join(__dirname, './fixtures/SUMMARY.adoc'), 'utf8');
+ before(() => {
+ const CONTENT = fs.readFileSync(path.join(__dirname, './fixtures/SUMMARY.adoc'), 'utf8');
LEXED = summary(CONTENT);
PART = LEXED.parts[0];
// todo: add support for parts in asciidoc
});
- it('should detect parts', function() {
- assert.equal(LEXED.parts.length, 1);
+ it('should detect parts', () => {
+ expect(LEXED.parts.length).toBe(1);
});
- it('should detect articles', function() {
- assert.equal(PART.articles.length, 5);
+ it('should detect articles', () => {
+ expect(PART.articles.length).toBe(5);
});
- it('should support articles', function() {
- assert.equal(PART.articles[0].articles.length, 2);
- assert.equal(PART.articles[1].articles.length, 0);
- assert.equal(PART.articles[2].articles.length, 0);
+ it('should support articles', () => {
+ expect(PART.articles[0].articles.length).toBe(2);
+ expect(PART.articles[1].articles.length).toBe(0);
+ expect(PART.articles[2].articles.length).toBe(0);
});
- it('should detect paths and titles', function() {
- assert(PART.articles[0].ref);
- assert(PART.articles[1].ref);
- assert(PART.articles[2].ref);
- assert(PART.articles[3].ref);
- assert.equal(PART.articles[4].ref, null);
-
- assert(PART.articles[0].title);
- assert(PART.articles[1].title);
- assert(PART.articles[2].title);
- assert(PART.articles[3].title);
- assert(PART.articles[4].title);
+ it('should detect paths and titles', () => {
+ expect(PART.articles[0].ref).toExist();
+ expect(PART.articles[1].ref).toExist();
+ expect(PART.articles[2].ref).toExist();
+ expect(PART.articles[3].ref).toExist();
+ expect(PART.articles[4].ref).toBe(null);
+
+ expect(PART.articles[0].title).toExist();
+ expect(PART.articles[1].title).toExist();
+ expect(PART.articles[2].title).toExist();
+ expect(PART.articles[3].title).toExist();
+ expect(PART.articles[4].title).toExist();
});
- it('should normalize paths from .md', function() {
- assert.equal(PART.articles[0].ref, 'chapter-1/README.adoc');
- assert.equal(PART.articles[1].ref, 'chapter-2/README.adoc');
- assert.equal(PART.articles[2].ref, 'chapter-3/README.adoc');
+ it('should normalize paths from .md', () => {
+ expect(PART.articles[0].ref).toBe('chapter-1/README.adoc');
+ expect(PART.articles[1].ref).toBe('chapter-2/README.adoc');
+ expect(PART.articles[2].ref).toBe('chapter-3/README.adoc');
});
- it('should correctly convert it to text', function() {
- var text = summary.toText(LEXED);
- assertObjectsEqual(summary(text), LEXED);
+ it('should correctly convert it to text', () => {
+ const text = summary.toText(LEXED);
+ const parsed = summary(text);
+ expect(parsed).toEqual(LEXED);
});
});