Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
}
}
pathSeq += 1;
operationContent.push({
text: `${tagSeq + 1}.${pathSeq} ${path.method.toUpperCase()} ${path.path}`,
style: ['topMargin3', 'mono', 'p', 'primary', 'b'],
tocItem: true,
tocStyle: ['small', 'blue', 'mono'],
tocNumberStyle: ['small', 'blue', 'mono'],
});
operationContent.push({ text: '', style: ['topMarginRegular'] });
let pathSummaryMarkDef; let pathDescrMarkDef; let
tokens;
if (path.summary) {
tokens = marked.lexer(path.summary);
pathSummaryMarkDef = {
stack: getMarkDownDef(tokens),
style: ['primary', 'b'],
};
operationContent.push(pathSummaryMarkDef);
}
if (path.description && path.description.trim() !== path.summary.trim()) {
tokens = marked.lexer(path.description);
pathDescrMarkDef = {
stack: getMarkDownDef(tokens),
};
operationContent.push(pathDescrMarkDef);
}
// Generate Request Defs
const requestSetDef = [];
updateValue(value) {
this.textValue = value;
const tokens = marked.lexer(this.textValue);
this.tasks = MarkdownHelper.markdownToJSON(tokens, this.tasks);
this.markdownTaskItemService.setTasks(this.tasks);
this.change.emit(value);
}
firstTokentextOf: function firstTokentextOf(content, subdir) {
if (content === undefined || content === null) { return ''; }
const tokens = marked.lexer(evalTags(content, subdir));
return tokens[0] ? tokens[0].text : '';
},
secondTokentextOf: function secondTokentextOf(content, subdir) {
window.ee.on('change.before.markdown', function(md) {
var i, j;
var tokens = marked.lexer(md, marked.defaults);
_links = tokens.links;
var creates = [];
var removes = [];
var reuses = [];
_.each(tokens, function(token, idx) {
j = _tokens.length;
creates[idx] = tokens;
for (i = 0; i < j; i++) {
if (compareJSON(token, _tokens[i])) {
reuses[idx] = _tokens.splice(i, 1);
creates[idx] = undefined;
function synopsis(doc) {
const tokens = marked.lexer(doc).filter(t => t.type === 'paragraph');
if (tokens.length > 0) {
return tokens[0].text;
} else {
return '';
}
}
function updateMarkdownWith(title, markdownText, replacement) {
la(check.unemptyString(title), 'missing section title');
la(check.unemptyString(markdownText), 'missing markdown text');
la(check.unemptyString(replacement), 'missing replacement text', replacement);
var tokens = marked.lexer(markdownText);
log('split source markdown into %d tokens', tokens.length);
var updatedTokens = replaceSection(tokens, title, replacement);
la(check.array(updatedTokens), 'could not update tokens', updatedTokens);
var updatedText = parser.parse(updatedTokens);
return updatedText;
}
function markdownToHTML() {
const mdContent = fs.readFileSync(README_PATH);
const parsedMD = marked.lexer(mdContent.toString());
const TOC = extractTOC(parsedMD);
const TOCInjectRenderer = new marked.Renderer();
let h2Seen = false;
TOCInjectRenderer.heading = function(text, level) {
if (level === 2 && !h2Seen) {
h2Seen = true;
return `${TOC}${heading(text, level)}`;
}
return heading(text, level);
};
const body = marked(mdContent.toString(), { renderer: TOCInjectRenderer });
return body;
}
function toHTML(input, filename, template, cb) {
input = input.replace(/\.markdown/g, ".html");
var lexed = marked.lexer(input);
fs.readFile(template, 'utf8', function(er, template) {
if (er) return cb(er);
render(lexed, filename, template, cb);
});
}
tokens(): marked.TokensList {
return marked.lexer(this.data);
}