From 9dd63baee2b06b0787dea3a707200eb57ac8eb48 Mon Sep 17 00:00:00 2001 From: ache Date: Fri, 11 May 2018 09:21:57 +0200 Subject: Add xo linter --- index.js | 163 +++++++++++++++++++++++++++++------------------------------ package.json | 20 +++++++- 2 files changed, 98 insertions(+), 85 deletions(-) diff --git a/index.js b/index.js index 05ed593..57ff60c 100644 --- a/index.js +++ b/index.js @@ -4,36 +4,36 @@ const parseAttr = require('md-attr-parser'); const htmlElemAttr = require('html-element-attributes'); const DOMEventHandler = [ - "onabort", "onautocomplete", "onautocompleteerror", - "onblur", "oncancel", "oncanplay", - "oncanplaythrough", "onchange", "onclick", - "onclose", "oncontextmenu", "oncuechange", - "ondblclick", "ondrag", "ondragend", - "ondragenter", "ondragexit", "ondragleave", - "ondragover", "ondragstart", "ondrop", - "ondurationchange", "onemptied", "onended", - "onerror", "onfocus", "oninput", - "oninvalid", "onkeydown", "onkeypress", - "onkeyup", "onload", "onloadeddata", - "onloadedmetadata", "onloadstart", "onmousedown", - "onmouseenter", "onmouseleave", "onmousemove", - "onmouseout", "onmouseover", "onmouseup", - "onmousewheel", "onpause", "onplay", - "onplaying", "onprogress", "onratechange", - "onreset", "onresize", "onscroll", - "onseeked", "onseeking", "onselect", - "onshow", "onsort", "onstalled", - "onsubmit", "onsuspend", "ontimeupdate", - "ontoggle", "onvolumechange", "onwaiting" + 'onabort', 'onautocomplete', 'onautocompleteerror', + 'onblur', 'oncancel', 'oncanplay', + 'oncanplaythrough', 'onchange', 'onclick', + 'onclose', 'oncontextmenu', 'oncuechange', + 'ondblclick', 'ondrag', 'ondragend', + 'ondragenter', 'ondragexit', 'ondragleave', + 'ondragover', 'ondragstart', 'ondrop', + 'ondurationchange', 'onemptied', 'onended', + 'onerror', 'onfocus', 'oninput', + 'oninvalid', 'onkeydown', 'onkeypress', + 'onkeyup', 'onload', 'onloadeddata', + 'onloadedmetadata', 'onloadstart', 'onmousedown', + 'onmouseenter', 'onmouseleave', 'onmousemove', + 'onmouseout', 'onmouseover', 'onmouseup', + 'onmousewheel', 'onpause', 'onplay', + 'onplaying', 'onprogress', 'onratechange', + 'onreset', 'onresize', 'onscroll', + 'onseeked', 'onseeking', 'onselect', + 'onshow', 'onsort', 'onstalled', + 'onsubmit', 'onsuspend', 'ontimeupdate', + 'ontoggle', 'onvolumechange', 'onwaiting', ]; const convTypeTag = { - 'image':'img', - 'link': 'a', - 'heading': 'h1', - 'strong': 'strong', - 'emphasis': 'em', - 'delete': 's', - 'inlineCode': 'code', + image: 'img', + link: 'a', + heading: 'h1', + strong: 'strong', + emphasis: 'em', + delete: 's', + inlineCode: 'code', }; /* TODO : * - [ ] fencedCode // require('./tokenize/code-fenced'), @@ -50,18 +50,16 @@ const convTypeTag = { xo as linter */ -const tokenizeGenerator = ( prefix, oldParser, config ) => function tokenize(eat, value, silent) { - let eaten = oldParser.bind(this)(eat,value,silent); +function tokenizeGenerator(prefix, oldParser, config) { + function token(eat, value, silent) { + const self = this; + let eaten = oldParser.call(self, eat, value, silent); - var self = this; - var index = 0; - var pedantic = self.options.pedantic; - var commonmark = self.options.commonmark; - var gfm = self.options.gfm; - var parsedAttr; - const length = value.length; + let index = 0; + let parsedAttr; + const {length} = value; - if( !eaten || !eaten.position ) { + if (!eaten || !eaten.position) { return undefined; } @@ -69,16 +67,15 @@ const tokenizeGenerator = ( prefix, oldParser, config ) => function tokenize(eat index = eaten.position.end.offset - eaten.position.start.offset; - if (index + prefix.length < length && value.charAt(index + prefix.length) === '{' ) { + if (index + prefix.length < length && value.charAt(index + prefix.length) === '{') { parsedAttr = parseAttr(value, index + prefix.length); } if (parsedAttr) { - if( config.scope && config.scope != "none" ) { - - const filtredProp = filterAttributes( parsedAttr.prop, config, type ); - if( filtredProp !== {} ) { - if( eaten.data ) { + if (config.scope && config.scope !== 'none') { + const filtredProp = filterAttributes(parsedAttr.prop, config, type); + if (filtredProp !== {}) { + if (eaten.data) { eaten.data.hProperties = filtredProp; } else { eaten.data = {hProperties: filtredProp}; @@ -88,32 +85,33 @@ const tokenizeGenerator = ( prefix, oldParser, config ) => function tokenize(eat eaten = eat(prefix + parsedAttr.eaten)(eaten); } return eaten; - }; + } + return token; +} -function filterAttributes( prop, config, type ) { - const scope = config.scope; - const allowDangerousDOMEventHandlers = config.allowDangerousDOMEventHandlers; +function filterAttributes(prop, config, type) { + const {scope} = config; + const {allowDangerousDOMEventHandlers} = config; - if( scope === "specific" ) { - console.log(type); - Object.getOwnPropertyNames(prop).forEach ( p => { - if( (!htmlElemAttr[type] || htmlElemAttr[type].indexOf(p) < 0) && - htmlElemAttr["*"].indexOf(p) < 0 && - DOMEventHandler.indexOf(p) < 0 ) { + if (scope === 'specific') { + Object.getOwnPropertyNames(prop).forEach(p => { + if ((!htmlElemAttr[type] || htmlElemAttr[type].indexOf(p) < 0) && + htmlElemAttr['*'].indexOf(p) < 0 && + DOMEventHandler.indexOf(p) < 0) { delete prop[p]; } }); - } else if ( scope === "global" ) { - Object.getOwnPropertyNames(prop).forEach ( p => { - if( htmlElemAttr["*"].indexOf(p) < 0 && - DOMEventHandler.indexOf(p) < 0 ) { + } else if (scope === 'global') { + Object.getOwnPropertyNames(prop).forEach(p => { + if (htmlElemAttr['*'].indexOf(p) < 0 && + DOMEventHandler.indexOf(p) < 0) { delete prop[p]; } }); } - if( ! allowDangerousDOMEventHandlers ) { - Object.getOwnPropertyNames(prop).forEach ( p => { - if( DOMEventHandler.indexOf(p) >= 0 ) { + if (!allowDangerousDOMEventHandlers) { + Object.getOwnPropertyNames(prop).forEach(p => { + if (DOMEventHandler.indexOf(p) >= 0) { delete prop[p]; } }); @@ -121,23 +119,25 @@ function filterAttributes( prop, config, type ) { return prop; } -module.exports = function linkAttr( config_user ) { - let parser = this.Parser; +module.exports = remarkAttr; + +function remarkAttr(userConfig) { + const parser = this.Parser; - const defaul_config = { - allowDangerousDOMEventHandlers: false, - elements: ["link","image","header"], - extends: [], - scope: "specific", + const defaulConfig = { + allowDangerousDOMEventHandlers: false, + elements: ['link', 'image', 'header'], + extends: [], + scope: 'specific', }; - const config = {...defaul_config, ...config_user}; + const config = {...defaulConfig, ...userConfig}; if (!isRemarkParser(parser)) { throw new Error('Missing parser to attach `remark-attr` [link] (to)'); } - let tokenizers = parser.prototype.inlineTokenizers; - let tokenizersBlock = parser.prototype.blockTokenizers ; + const tokenizers = parser.prototype.inlineTokenizers; + const tokenizersBlock = parser.prototype.blockTokenizers; const oldLink = tokenizers.link; const oldStrong = tokenizers.strong; @@ -146,27 +146,24 @@ module.exports = function linkAttr( config_user ) { const oldCodeInline = tokenizers.code; const oldAtxHeader = tokenizersBlock.atxHeading; - - let linkTokenize = tokenizeGenerator('', oldLink, config); + console.log(tokenizeGenerator('', oldLink, config)); + const linkTokenize = tokenizeGenerator('', oldLink, config); linkTokenize.locator = tokenizers.link.locator; - let strongTokenize = tokenizeGenerator('', oldStrong, config); + const strongTokenize = tokenizeGenerator('', oldStrong, config); strongTokenize.locator = tokenizers.strong.locator; - let emphasisTokenize = tokenizeGenerator('', oldEmphasis, config); + const emphasisTokenize = tokenizeGenerator('', oldEmphasis, config); emphasisTokenize.locator = tokenizers.emphasis.locator; - let deleteTokenize = tokenizeGenerator('', oldDeletion, config); + const deleteTokenize = tokenizeGenerator('', oldDeletion, config); deleteTokenize.locator = tokenizers.deletion.locator; - let codeInlineTokenize = tokenizeGenerator('', oldCodeInline, config); + const codeInlineTokenize = tokenizeGenerator('', oldCodeInline, config); codeInlineTokenize.locator = tokenizers.code.locator; - - - tokenizersBlock.atxHeading = tokenizeGenerator( '\n', oldAtxHeader, config ); + tokenizersBlock.atxHeading = tokenizeGenerator('\n', oldAtxHeader, config); tokenizers.link = linkTokenize; - tokenizers.strong = strongTokenize; + tokenizers.strong = strongTokenize; tokenizers.emphasis = emphasisTokenize; tokenizers.deletion = deleteTokenize; - tokenizers.code = codeInlineTokenize; - + tokenizers.code = codeInlineTokenize; } function isRemarkParser(parser) { diff --git a/package.json b/package.json index 4d53d0d..9e92a6d 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "remark-attr", - "version": "0.1.0", + "version": "0.4.1", "description": "Add support of custom attributes to Markdown syntax.", "main": "index.js", "scripts": { @@ -20,5 +20,21 @@ "bugs": { "url": "https://github.com/arobase-che/remark-attr/issues" }, - "homepage": "https://github.com/arobase-che/remark-attr#readme" + "homepage": "https://github.com/arobase-che/remark-attr#readme", + "devDependencies": { + "xo": "^0.21.0" + }, + "xo": { + "space": true, + "rules": { + "comma-dangle": [ + "error", + "always-multiline" + ] + } + }, + "dependencies": { + "html-element-attributes": "^1.3.1", + "md-attr-parser": "^1.1.5" + } } -- cgit v1.2.3