Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
function setup_marked(){
marked.InlineLexer.breaks = new marked.InlineLexer([]);
//Allow Links in the format: `Foo bar baz `_
marked.InlineLexer.breaks.rules.link = /^`((?:[^`<]|\n)*)[\s\S]<([^>]*)>`_/;
//Allow bold in the format: :program:`foo-bar`
marked.InlineLexer.breaks.rules.strong = /^:\w*:`([\w\-]*?)`|^\*\*([\s\S]+?)\*\*(?!\*)/;
//Add : to the list of potential special characters in text:
marked.InlineLexer.breaks.rules.text = /^[\s\S]+?(?=[\\
parse(src) {
this.inline = new ReactInlineLexer(src.links, this.options);
// use an InlineLexer with a TextRenderer to extract pure text
this.inlineText = new marked.InlineLexer(
src.links,
Object.assign({}, this.options, { renderer: new marked.TextRenderer() })
);
this.tokens = src.reverse();
let out = [];
while (this.next()) {
out.push(this.tok());
}
return out;
}
return checked ? 'CHECKED' : 'UNCHECKED';
};
const rendererOptions: marked.MarkedOptions = renderer.options;
const textRenderer = new marked.TextRenderer();
console.log(textRenderer.strong(text));
const parseTestText = '- list1\n - list1.1\n\n listend';
const parseTestTokens: marked.TokensList = marked.lexer(parseTestText, options);
const parser = new marked.Parser();
console.log(parser.parse(parseTestTokens));
console.log(marked.Parser.parse(parseTestTokens));
const parserOptions: marked.MarkedOptions = parser.options;
const links = ['http', 'image'];
const inlineLexer = new marked.InlineLexer(links);
console.log(inlineLexer.output('http://'));
console.log(marked.InlineLexer.output('http://', links));
console.log(marked.InlineLexer.rules);
const inlineLexerOptions: marked.MarkedOptions = inlineLexer.options;
if (req.headers.host.split(':')[0] !== config.app.host) return res.redirect( ((config.app.safe) ? 'https://' : 'http://') + config.app.host + ':' + config.app.port + req.path );
} else {
if (req.headers.host.split(':')[0] !== config.app.host) return res.redirect( ((config.app.safe) ? 'https://' : 'http://') + config.app.host + req.path );
}
return next();
}
//
var otherMarked = require('./lib/marked');
otherMarked.setOptions({
sanitize: true
, smartypants: true
});
var lexers = {
chat: new marked.InlineLexer([], {sanitize: true, smartypants:true, gfm:true})
, content: otherMarked
};
lexers.chat.rules.link = /^\[((?:\[[^\]]*\]|[^\]]|\](?=[^\[]*\]))*)\]\(\s*?(?:\s+['"]([\s\S]*?)['"])?\s*\)/;
app.locals.pretty = true;
app.locals.moment = require('moment');
app.locals.marked = otherMarked;
app.locals.lexers = lexers;
app.locals.lexer = lexers.content;
app.locals.sanitize = validator;
app.locals._ = _;
app.locals.helpers = require('./helpers').helpers;
String.prototype.capitalize = function(){
return this.replace( /(^|\s)([a-z])/g , function(m,p1,p2){ return p1+p2.toUpperCase(); } );
};
}
}
}
};
function addToken(text) {
const newIndex = src.indexOf(text, currentIndex);
if (newIndex === -1) {
throw new Error("Markdown Parser : Inline Lexer : Could not find index of text - \n" + text + "\n\n**In**\n\n" + src.substring(currentIndex, 30) + "\n");
}
currentIndex = newIndex + text.length;
textTokens.push({ text: text, index: tracker.getOriginalIndex(newIndex) });
}
const tokens = marked.lexer(src, options);
const inlineLexer = new marked.InlineLexer(tokens.links, options);
for (let i = 0; i < tokens.length; i++) {
const token = tokens[i];
if (token.text && token.type !== "code") {
inlineLexer.output(token.text);
}
}
return textTokens;
}
exports.parse = function(text, options) {
tokens = marked.lexer(text);
inline = new marked.InlineLexer(tokens.links);
options = xtend(defaultOptions, options);
var outputArr = [];
var output;
if (options.maxWidth !== -1 && options.width > options.maxWidth) {
options.width = options.maxWidth
}
while (next()) {
outputArr.push(processToken(options));
}
if (options.collapseNewlines) {
output = outputArr.join('').replace(/\n\n\n/g, '\n\n');
}