diff --git a/.eslintrc b/.eslintrc
index 3a913e2..d9e47f4 100644
--- a/.eslintrc
+++ b/.eslintrc
@@ -7,5 +7,7 @@
"node": true,
"jest/globals": true
},
- "rules": {}
+ "rules": {
+ "react/jsx-filename-extension": [1, { "extensions": [".js", ".jsx"] }]
+ }
}
diff --git a/packages/bbob-html/index.js b/packages/bbob-html/index.js
index 350bb02..97ed3ab 100644
--- a/packages/bbob-html/index.js
+++ b/packages/bbob-html/index.js
@@ -1,4 +1,3 @@
-
function attrs(obj) {
let attr = '';
@@ -27,9 +26,7 @@ function traverse(tree, cb) {
return tree;
}
-function render(tree, options) {
- return html(tree);
-
+function render(ast) {
function html(tree) {
let result = '';
@@ -42,43 +39,20 @@ function render(tree, options) {
return;
}
- if (typeof node.tag === 'boolean' && !node.tag) {
- typeof node.content !== 'object' && (result += node.content);
-
- return node.content;
+ if (typeof node === 'object') {
+ result += `<${node.tag} ${attrs(node.attrs)}>${node.tag}>`;
}
// treat as new root tree if node is an array
if (Array.isArray(node)) {
result += html(node);
-
- return;
- }
-
- const tag = node.tag || 'div';
-
- if (isSingleTag(tag, singleTags, singleRegExp)) {
- result += `<${tag}${attrs(node.attrs)}`;
-
- switch (closingSingleTag) {
- case 'tag':
- result += `>${tag}>`;
-
- break;
- case 'slash':
- result += ' />';
-
- break;
- default:
- result += '>';
- }
- } else {
- result += `<${tag}${node.attrs ? attrs(node.attrs) : ''}>${node.content ? html(node.content) : ''}${tag}>`;
}
});
return result;
}
+
+ return html(ast);
}
module.exports = render;
diff --git a/packages/bbob-html/index.test.js b/packages/bbob-html/index.test.js
index e69de29..9d914c7 100644
--- a/packages/bbob-html/index.test.js
+++ b/packages/bbob-html/index.test.js
@@ -0,0 +1,6 @@
+
+describe('bbob-html', () => {
+ test('render proper markup', () => {
+
+ });
+});
diff --git a/packages/bbob-parser/Parser.js b/packages/bbob-parser/Parser.js
index e30d497..3bfaa91 100644
--- a/packages/bbob-parser/Parser.js
+++ b/packages/bbob-parser/Parser.js
@@ -10,12 +10,12 @@ const {
isTagToken,
isTextToken,
isTagEnd,
-} = require('./Tokenizer');
+} = require('./token');
-const Tokenizer = require('./Tokenizer');
-
-const TokenChar = Tokenizer.CHAR;
-const getChar = Tokenizer.getChar;
+const {
+ SLASH,
+ getChar,
+} = require('./char');
const createTagNode = (tag, attrs = {}, content = []) => ({ tag, attrs, content });
@@ -41,7 +41,7 @@ module.exports = class Parser {
const curTags = [];
const curTagsAttrName = [];
- const closableTags = this.findNestedTags(this.tokens);
+ const closableTags = this.findNestedTags();
const isNestedTag = token => closableTags.indexOf(getTokenValue(token)) >= 0;
@@ -123,6 +123,7 @@ module.exports = class Parser {
if (lastNestedNode) {
getNodes().push(lastNestedNode);
} else {
+ // eslint-disable-next-line no-console
console.warn(`Inconsistent tag '${getTokenValue(token)}' on line ${getTokenLine(token)} and column ${getTokenColumn(token)}`);
}
}
@@ -149,14 +150,14 @@ module.exports = class Parser {
return nodes;
}
- findNestedTags(tokens) {
- const tags = tokens.filter(isTagToken).reduce((acc, token) => {
+ findNestedTags() {
+ const tags = this.tokens.filter(isTagToken).reduce((acc, token) => {
acc[getTokenValue(token)] = true;
return acc;
}, {});
- const closeChar = getChar(TokenChar.SLASH);
+ const closeChar = getChar(SLASH);
return Object.keys(tags).reduce((arr, key) => {
if (tags[key] && tags[closeChar + key]) {
@@ -168,10 +169,12 @@ module.exports = class Parser {
}
isAllowedTag(value) {
- if (this.options.allowOnlyTags && this.options.allowOnlyTags.length) {
- return this.options.allowOnlyTags.indexOf(value) >= 0;
+ if (this.options.onlyAllowTags && this.options.onlyAllowTags.length) {
+ return this.options.onlyAllowTags.indexOf(value) >= 0;
}
return true;
}
};
+
+module.exports.createTagNode = createTagNode;
diff --git a/packages/bbob-parser/Tokenizer.js b/packages/bbob-parser/Tokenizer.js
index b22329b..de62a21 100644
--- a/packages/bbob-parser/Tokenizer.js
+++ b/packages/bbob-parser/Tokenizer.js
@@ -1,299 +1,253 @@
-const CHAR = require('./char');
+const {
+ getChar,
+ OPEN_BRAKET,
+ CLOSE_BRAKET, EQ, TAB, SPACE, N, QUOTEMARK,
+ PLACEHOLDER_SPACE, PLACEHOLDER_SPACE_TAB,
+} = require('./char');
const TOKEN = require('./token');
-const getChar = String.fromCharCode;
-
-
-const getTokenValue = (token) => token[Tokenizer.TOKEN.VALUE_ID];
-
-const getTokenLine = (token) => token[Tokenizer.TOKEN.LINE_ID];
-const getTokenColumn = (token) => token[Tokenizer.TOKEN.COLUMN_ID];
-
-const isTextToken = (token) => {
- const type = token[Tokenizer.TOKEN.TYPE_ID];
-
- return type === TOKEN.TYPE_SPACE || type === TOKEN.TYPE_NEW_LINE || type === TOKEN.TYPE_WORD
-};
-
-const isTagToken = (token) => token[Tokenizer.TOKEN.TYPE_ID] === TOKEN.TYPE_TAG;
-
-const isTagStart = (token) => !isTagEnd(token);
-
-const isTagEnd = (token) => getTokenValue(token).charCodeAt(0) === CHAR.SLASH;
-
-const isAttrNameToken = (token) => token[Tokenizer.TOKEN.TYPE_ID] === TOKEN.TYPE_ATTR_NAME;
-
-const isAttrValueToken = (token) => token[Tokenizer.TOKEN.TYPE_ID] === TOKEN.TYPE_ATTR_VALUE;
-
-const getTagName = (token) => {
- const value = getTokenValue(token);
-
- return isTagEnd(token) ? value.slice(1) : value
-};
-
-const convertTagToText = (token) => {
- let text = getChar(CHAR.OPEN_BRAKET);
-
- if (isTagEnd(token)) {
- text += getChar(CHAR.SLASH)
- }
-
- text += getTokenValue(token);
- text += getChar(CHAR.CLOSE_BRAKET);
-
- return text
-};
-
-const SPACE_TAB = ' ';
-const SPACE = ' ';
class Tokenizer {
- constructor(input) {
- this.buffer = input;
- this.colPos = 0;
- this.rowPos = 0;
- this.index = 0;
+ constructor(input) {
+ this.buffer = input;
+ this.colPos = 0;
+ this.rowPos = 0;
+ this.index = 0;
- this.tokenIndex = -1;
- this.tokens = [];
- }
+ this.tokenIndex = -1;
+ this.tokens = [];
+ }
- appendToken(token) {
- this.tokenIndex++;
- this.tokens[this.tokenIndex] = token;
- }
+ appendToken(token) {
+ this.tokenIndex += 1;
+ this.tokens[this.tokenIndex] = token;
+ }
- tokenize() {
- let wordToken = null;
- let tagToken = null;
- let attrNameToken = null;
- let attrValueToken = null;
- let attrTokens = [];
- this.tokens = new Array(Math.floor(this.buffer.length / 2));
+ nextCol() {
+ this.colPos += 1;
+ }
- const flushWord = () => {
- if (wordToken && wordToken[TOKEN.VALUE_ID]) {
- this.appendToken(wordToken);
- wordToken = this.createWordToken('')
- }
- };
+ nextLine() {
+ this.rowPos += 1;
+ }
- const createWord = (value, line, row) => {
- if (!wordToken) {
- wordToken = this.createWordToken(value, line, row)
- }
- };
+ tokenize() {
+ let wordToken = null;
+ let tagToken = null;
+ let attrNameToken = null;
+ let attrValueToken = null;
+ let attrTokens = [];
+ this.tokens = new Array(Math.floor(this.buffer.length / 2));
- const flushTag = () => {
- if (tagToken !== null) {
- // [] and [=] tag case
- if (!tagToken[TOKEN.VALUE_ID]) {
- const value = attrValueToken ? getChar(CHAR.EQ) : '';
- const word = getChar(CHAR.OPEN_BRAKET) + value + getChar(CHAR.CLOSE_BRAKET);
+ const flushWord = () => {
+ if (wordToken && wordToken[TOKEN.VALUE_ID]) {
+ this.appendToken(wordToken);
+ wordToken = this.createWordToken('');
+ }
+ };
- createWord('', 0, 0);
- wordToken[TOKEN.VALUE_ID] += word;
+ const createWord = (value, line, row) => {
+ if (!wordToken) {
+ wordToken = this.createWordToken(value, line, row);
+ }
+ };
- tagToken = null;
+ const flushTag = () => {
+ if (tagToken !== null) {
+ // [] and [=] tag case
+ if (!tagToken[TOKEN.VALUE_ID]) {
+ const value = attrValueToken ? getChar(EQ) : '';
+ const word = getChar(OPEN_BRAKET) + value + getChar(CLOSE_BRAKET);
- if (attrValueToken) {
- attrValueToken = null
- }
+ createWord('', 0, 0);
+ wordToken[TOKEN.VALUE_ID] += word;
- return;
- }
+ tagToken = null;
- if (attrNameToken && !attrValueToken) {
- tagToken[TOKEN.VALUE_ID] += SPACE + attrNameToken[TOKEN.VALUE_ID];
- attrNameToken = null
- }
+ if (attrValueToken) {
+ attrValueToken = null;
+ }
- this.appendToken(tagToken);
- tagToken = null;
- }
- };
-
- const flushUnclosedTag = () => {
- if (tagToken !== null) {
- const value = tagToken[TOKEN.VALUE_ID] + (attrValueToken ? getChar(CHAR.EQ) : '');
-
- tagToken[TOKEN.TYPE_ID] = TOKEN.TYPE_WORD;
- tagToken[TOKEN.VALUE_ID] = getChar(CHAR.OPEN_BRAKET) + value;
-
- this.appendToken(tagToken);
-
- tagToken = null;
-
- if (attrValueToken) {
- attrValueToken = null
- }
- }
- };
-
- const flushAttrNames = () => {
- if (attrNameToken) {
- attrTokens.push(attrNameToken);
- attrNameToken = null;
- }
-
- if (attrValueToken) {
- attrTokens.push(attrValueToken);
- attrValueToken = null
- }
- };
-
- const flushAttrs = () => {
- if (attrTokens.length) {
- attrTokens.forEach(this.appendToken.bind(this));
- attrTokens = [];
- }
- };
-
- // console.time('Lexer.tokenize');
-
- while (this.index < this.buffer.length) {
- const charCode = this.buffer.charCodeAt(this.index);
-
- switch (charCode) {
- case CHAR.TAB:
- case CHAR.SPACE:
- flushWord();
-
- if (tagToken) {
- attrNameToken = this.createAttrNameToken('');
- } else {
- const spaceCode = charCode === CHAR.TAB ? SPACE_TAB : SPACE;
-
- this.appendToken(this.createSpaceToken(spaceCode));
- }
- this.colPos++;
- break;
-
- case CHAR.N:
- flushWord();
- this.appendToken(this.createNewLineToken(getChar(charCode)));
-
- this.rowPos++;
- this.colPos = 0;
- break;
-
- case CHAR.OPEN_BRAKET:
- flushWord();
- tagToken = this.createTagToken('');
-
- this.colPos++;
- break;
-
- case CHAR.CLOSE_BRAKET:
- flushTag();
- flushAttrNames();
- flushAttrs();
-
- this.colPos++;
- break;
-
- case CHAR.EQ:
- if (tagToken) {
- attrValueToken = this.createAttrValueToken('')
- } else {
- wordToken[TOKEN.VALUE_ID] += getChar(charCode);
- }
-
- this.colPos++;
- break;
-
- case CHAR.QUOTEMARK:
- if (attrValueToken && attrValueToken[TOKEN.VALUE_ID] > 0) {
- flushAttrNames();
- } else if (tagToken === null) {
- wordToken[TOKEN.VALUE_ID] += getChar(charCode);
- }
-
- this.colPos++;
- break;
-
- default:
- if (tagToken && attrValueToken) {
- attrValueToken[TOKEN.VALUE_ID] += getChar(charCode)
- } else if (tagToken && attrNameToken) {
- attrNameToken[TOKEN.VALUE_ID] += getChar(charCode)
- } else if (tagToken) {
- tagToken[TOKEN.VALUE_ID] += getChar(charCode)
- } else {
- createWord();
-
- wordToken[TOKEN.VALUE_ID] += getChar(charCode);
- }
-
- this.colPos++;
- break;
- }
-
- this.index++;
+ return;
}
- flushWord();
- flushUnclosedTag();
+ if (attrNameToken && !attrValueToken) {
+ tagToken[TOKEN.VALUE_ID] += PLACEHOLDER_SPACE + attrNameToken[TOKEN.VALUE_ID];
+ attrNameToken = null;
+ }
- this.tokens.length = this.tokenIndex + 1;
+ this.appendToken(tagToken);
+ tagToken = null;
+ }
+ };
- return this.tokens;
+ const flushUnclosedTag = () => {
+ if (tagToken !== null) {
+ const value = tagToken[TOKEN.VALUE_ID] + (attrValueToken ? getChar(EQ) : '');
+
+ tagToken[TOKEN.TYPE_ID] = TOKEN.TYPE_WORD;
+ tagToken[TOKEN.VALUE_ID] = getChar(OPEN_BRAKET) + value;
+
+ this.appendToken(tagToken);
+
+ tagToken = null;
+
+ if (attrValueToken) {
+ attrValueToken = null;
+ }
+ }
+ };
+
+ const flushAttrNames = () => {
+ if (attrNameToken) {
+ attrTokens.push(attrNameToken);
+ attrNameToken = null;
+ }
+
+ if (attrValueToken) {
+ attrTokens.push(attrValueToken);
+ attrValueToken = null;
+ }
+ };
+
+ const flushAttrs = () => {
+ if (attrTokens.length) {
+ attrTokens.forEach(this.appendToken.bind(this));
+ attrTokens = [];
+ }
+ };
+
+ // console.time('Lexer.tokenize');
+
+ while (this.index < this.buffer.length) {
+ const charCode = this.buffer.charCodeAt(this.index);
+
+ switch (charCode) {
+ case TAB:
+ case SPACE:
+ flushWord();
+
+ if (tagToken) {
+ attrNameToken = this.createAttrNameToken('');
+ } else {
+ const spaceCode = charCode === TAB ? PLACEHOLDER_SPACE_TAB : PLACEHOLDER_SPACE;
+
+ this.appendToken(this.createSpaceToken(spaceCode));
+ }
+ this.nextCol();
+ break;
+
+ case N:
+ flushWord();
+ this.appendToken(this.createNewLineToken(getChar(charCode)));
+
+ this.nextLine();
+ this.colPos = 0;
+ break;
+
+ case OPEN_BRAKET:
+ flushWord();
+ tagToken = this.createTagToken('');
+
+ this.nextCol();
+ break;
+
+ case CLOSE_BRAKET:
+ flushTag();
+ flushAttrNames();
+ flushAttrs();
+
+ this.nextCol();
+ break;
+
+ case EQ:
+ if (tagToken) {
+ attrValueToken = this.createAttrValueToken('');
+ } else {
+ wordToken[TOKEN.VALUE_ID] += getChar(charCode);
+ }
+
+ this.nextCol();
+ break;
+
+ case QUOTEMARK:
+ if (attrValueToken && attrValueToken[TOKEN.VALUE_ID] > 0) {
+ flushAttrNames();
+ } else if (tagToken === null) {
+ wordToken[TOKEN.VALUE_ID] += getChar(charCode);
+ }
+
+ this.nextCol();
+ break;
+
+ default:
+ if (tagToken && attrValueToken) {
+ attrValueToken[TOKEN.VALUE_ID] += getChar(charCode);
+ } else if (tagToken && attrNameToken) {
+ attrNameToken[TOKEN.VALUE_ID] += getChar(charCode);
+ } else if (tagToken) {
+ tagToken[TOKEN.VALUE_ID] += getChar(charCode);
+ } else {
+ createWord();
+
+ wordToken[TOKEN.VALUE_ID] += getChar(charCode);
+ }
+
+ this.nextCol();
+ break;
+ }
+
+ this.index += 1;
}
- createWordToken(value = '', line = this.colPos, row = this.rowPos) {
- return [TOKEN.TYPE_WORD, value, line, row]
- }
+ flushWord();
+ flushUnclosedTag();
- createTagToken(value, line = this.colPos, row = this.rowPos) {
- return [TOKEN.TYPE_TAG, value, line, row]
- }
+ this.tokens.length = this.tokenIndex + 1;
- createAttrNameToken(value, line = this.colPos, row = this.rowPos) {
- return [TOKEN.TYPE_ATTR_NAME, value, line, row]
- }
+ return this.tokens;
+ }
- createAttrValueToken(value, line = this.colPos, row = this.rowPos) {
- return [TOKEN.TYPE_ATTR_VALUE, value, line, row]
- }
+ createWordToken(value = '', line = this.colPos, row = this.rowPos) {
+ return [TOKEN.TYPE_WORD, value, line, row];
+ }
- createSpaceToken(value, line = this.colPos, row = this.rowPos) {
- return [TOKEN.TYPE_SPACE, value, line, row]
- }
+ createTagToken(value, line = this.colPos, row = this.rowPos) {
+ return [TOKEN.TYPE_TAG, value, line, row];
+ }
- createNewLineToken(value, line = this.colPos, row = this.rowPos) {
- return [TOKEN.TYPE_NEW_LINE, value, line, row]
- }
+ createAttrNameToken(value, line = this.colPos, row = this.rowPos) {
+ return [TOKEN.TYPE_ATTR_NAME, value, line, row];
+ }
+
+ createAttrValueToken(value, line = this.colPos, row = this.rowPos) {
+ return [TOKEN.TYPE_ATTR_VALUE, value, line, row];
+ }
+
+ createSpaceToken(value, line = this.colPos, row = this.rowPos) {
+ return [TOKEN.TYPE_SPACE, value, line, row];
+ }
+
+ createNewLineToken(value, line = this.colPos, row = this.rowPos) {
+ return [TOKEN.TYPE_NEW_LINE, value, line, row];
+ }
}
// warm up tokenizer to elimitate code branches that never execute
-new Tokenizer(`[sc=asdasd`).tokenize();
-//new Tokenizer(`[b param="hello"]Sample text[/b]\n\t[Chorus]`).tokenize();
+new Tokenizer('[b param="hello"]Sample text[/b]\n\t[Chorus 2]').tokenize();
module.exports = Tokenizer;
-module.exports.CHAR = CHAR;
module.exports.TYPE = {
- WORD: TOKEN.TYPE_WORD,
- TAG: TOKEN.TYPE_TAG,
- ATTR_NAME: TOKEN.TYPE_ATTR_NAME,
- ATTR_VALUE: TOKEN.TYPE_ATTR_VALUE,
- SPACE: TOKEN.TYPE_SPACE,
- NEW_LINE: TOKEN.TYPE_NEW_LINE,
+ WORD: TOKEN.TYPE_WORD,
+ TAG: TOKEN.TYPE_TAG,
+ ATTR_NAME: TOKEN.TYPE_ATTR_NAME,
+ ATTR_VALUE: TOKEN.TYPE_ATTR_VALUE,
+ SPACE: TOKEN.TYPE_SPACE,
+ NEW_LINE: TOKEN.TYPE_NEW_LINE,
};
module.exports.TOKEN = {
- TYPE_ID: TOKEN.TYPE_ID,
- VALUE_ID: TOKEN.VALUE_ID,
- LINE_ID: TOKEN.LINE_ID,
- COLUMN_ID: TOKEN.COLUMN_ID,
+ TYPE_ID: TOKEN.TYPE_ID,
+ VALUE_ID: TOKEN.VALUE_ID,
+ LINE_ID: TOKEN.LINE_ID,
+ COLUMN_ID: TOKEN.COLUMN_ID,
};
-module.exports.getChar = getChar;
-module.exports.getTokenValue = getTokenValue;
-module.exports.getTokenLine = getTokenLine;
-module.exports.getTokenColumn = getTokenColumn;
-module.exports.isTextToken = isTextToken;
-module.exports.isTagToken = isTagToken;
-module.exports.isTagStart = isTagStart;
-module.exports.isTagEnd = isTagEnd;
-module.exports.isAttrNameToken = isAttrNameToken;
-module.exports.isAttrValueToken = isAttrValueToken;
-module.exports.getTagName = getTagName;
-module.exports.convertTokenToText = convertTagToText;
diff --git a/packages/bbob-parser/Tokenizer.test.js b/packages/bbob-parser/Tokenizer.test.js
index 3b944c2..ae15372 100644
--- a/packages/bbob-parser/Tokenizer.test.js
+++ b/packages/bbob-parser/Tokenizer.test.js
@@ -1,6 +1,6 @@
const Tokenizer = require('./Tokenizer');
-const TYPE = Tokenizer.TYPE;
+const { TYPE } = Tokenizer;
const tokenize = input => (new Tokenizer(input).tokenize());
diff --git a/packages/bbob-parser/char.js b/packages/bbob-parser/char.js
index f020f19..2d0fc82 100644
--- a/packages/bbob-parser/char.js
+++ b/packages/bbob-parser/char.js
@@ -12,7 +12,13 @@ const CLOSE_BRAKET = ']'.charCodeAt(0);
const SLASH = '/'.charCodeAt(0);
+const PLACEHOLDER_SPACE_TAB = ' ';
+const PLACEHOLDER_SPACE = ' ';
+
+const getChar = String.fromCharCode;
+
module.exports = {
+ getChar,
N,
F,
R,
@@ -23,4 +29,6 @@ module.exports = {
OPEN_BRAKET,
CLOSE_BRAKET,
SLASH,
+ PLACEHOLDER_SPACE_TAB,
+ PLACEHOLDER_SPACE,
};
diff --git a/packages/bbob-parser/index.js b/packages/bbob-parser/index.js
index 88fa820..a2e6363 100644
--- a/packages/bbob-parser/index.js
+++ b/packages/bbob-parser/index.js
@@ -1 +1 @@
-module.exports = require('./parse');
\ No newline at end of file
+module.exports = require('./parse');
diff --git a/packages/bbob-parser/parse.test.js b/packages/bbob-parser/parse.test.js
index f526e08..00359a8 100644
--- a/packages/bbob-parser/parse.test.js
+++ b/packages/bbob-parser/parse.test.js
@@ -1,9 +1,5 @@
const parse = require('./index');
-const options = {
- allowOnlyTags: ['ch', 'syllable', 'tab'],
-};
-
describe('parse', () => {
test('tag with spaces', () => {
const ast = parse('[Verse 2]');
diff --git a/packages/bbob-parser/token.js b/packages/bbob-parser/token.js
index 011f614..41811ad 100644
--- a/packages/bbob-parser/token.js
+++ b/packages/bbob-parser/token.js
@@ -1,3 +1,10 @@
+const {
+ getChar,
+ OPEN_BRAKET,
+ CLOSE_BRAKET,
+ SLASH,
+} = require('./char');
+
const TOKEN_TYPE_ID = 0;
const TOKEN_VALUE_ID = 1;
const TOKEN_COLUMN_ID = 2;
@@ -10,6 +17,43 @@ const TOKEN_TYPE_ATTR_VALUE = 'attr-value';
const TOKEN_TYPE_SPACE = 'space';
const TOKEN_TYPE_NEW_LINE = 'new-line';
+const getTokenValue = token => token[TOKEN_VALUE_ID];
+const getTokenLine = token => token[TOKEN_LINE_ID];
+const getTokenColumn = token => token[TOKEN_COLUMN_ID];
+
+const isTextToken = (token) => {
+ const type = token[TOKEN_TYPE_ID];
+
+ return type === TOKEN_TYPE_SPACE || type === TOKEN_TYPE_NEW_LINE || type === TOKEN_TYPE_WORD;
+};
+
+const isTagToken = token => token[TOKEN_TYPE_ID] === TOKEN_TYPE_TAG;
+const isTagEnd = token => getTokenValue(token).charCodeAt(0) === SLASH;
+const isTagStart = token => !isTagEnd(token);
+const isAttrNameToken = token => token[TOKEN_TYPE_ID] === TOKEN_TYPE_ATTR_NAME;
+const isAttrValueToken = token => token[TOKEN_TYPE_ID] === TOKEN_TYPE_ATTR_VALUE;
+
+const getTagName = (token) => {
+ const value = getTokenValue(token);
+
+ return isTagEnd(token) ? value.slice(1) : value;
+};
+
+
+const convertTagToText = (token) => {
+ let text = getChar(OPEN_BRAKET);
+
+ if (isTagEnd(token)) {
+ text += getChar(SLASH);
+ }
+
+ text += getTokenValue(token);
+ text += getChar(CLOSE_BRAKET);
+
+ return text;
+};
+
+
module.exports = {
TYPE_ID: TOKEN_TYPE_ID,
VALUE_ID: TOKEN_VALUE_ID,
@@ -20,5 +64,16 @@ module.exports = {
TYPE_ATTR_NAME: TOKEN_TYPE_ATTR_NAME,
TYPE_ATTR_VALUE: TOKEN_TYPE_ATTR_VALUE,
TYPE_SPACE: TOKEN_TYPE_SPACE,
- TYPE_NEW_LINE: TOKEN_TYPE_NEW_LINE
+ TYPE_NEW_LINE: TOKEN_TYPE_NEW_LINE,
+ convertTagToText,
+ getTagName,
+ getTokenColumn,
+ getTokenLine,
+ getTokenValue,
+ isAttrNameToken,
+ isAttrValueToken,
+ isTagStart,
+ isTagToken,
+ isTextToken,
+ isTagEnd,
};
diff --git a/packages/bbob-react/index.js b/packages/bbob-react/index.js
index 89017b5..1d2446d 100644
--- a/packages/bbob-react/index.js
+++ b/packages/bbob-react/index.js
@@ -1,41 +1,47 @@
const React = require('react');
-const parse = require('bbob-html');
+const PropTypes = require('prop-types');
+const parse = require('@bbob/html');
class BBCode extends React.Component {
- render() {
- const Container = this.props.container;
-
- return (
-
- {this.content()}
-
- );
- }
-
content() {
if (this.props.source) {
+ // eslint-disable-next-line react/no-danger
return ;
}
- else {
- return React.Children.map(this.props.children, child => {
- if (typeof child === 'string') {
- return ;
- }
- else {
- return child;
- }
- });
- }
+
+ return React.Children.map(this.props.children, (child) => {
+ if (typeof child === 'string') {
+ // eslint-disable-next-line react/no-danger
+ return ;
+ }
+ return child;
+ });
}
renderBBCode(source) {
- return parse(source)
+ return parse(source, this.props.options);
+ }
+
+ render() {
+ const Container = this.props.container;
+
+ return ({this.content()});
}
}
+BBCode.propTypes = {
+ container: PropTypes.node,
+ children: PropTypes.element.isRequired,
+ source: PropTypes.string,
+ options: PropTypes.shape({
+ prop: PropTypes.bool,
+ }),
+};
+
BBCode.defaultProps = {
container: 'div',
options: {},
+ source: null,
};
-module.exports = BBCode;
\ No newline at end of file
+module.exports = BBCode;
diff --git a/packages/bbob-react/index.test.js b/packages/bbob-react/index.test.js
index ec7e6da..592f157 100644
--- a/packages/bbob-react/index.test.js
+++ b/packages/bbob-react/index.test.js
@@ -1,7 +1,5 @@
describe('React BBCode', () => {
-
test('render markup properly', () => {
- })
-
+ });
});
diff --git a/packages/bbob-react/package.json b/packages/bbob-react/package.json
index d37a36d..df77bc0 100644
--- a/packages/bbob-react/package.json
+++ b/packages/bbob-react/package.json
@@ -12,5 +12,13 @@
"license": "MIT",
"publishConfig": {
"registry": "https://registry.npmjs.org/"
+ },
+ "dependencies": {
+ "@bbob/html": "^1.0.1",
+ "prop-types": "^15.6.1",
+ "react": "^15.6.2"
+ },
+ "peerDependencies": {
+ "react": "15.x"
}
}