fix: fix more types (#2893)

Co-authored-by: Steven <steven@ceriously.com>
This commit is contained in:
Tony Brix 2023-08-19 16:55:56 -06:00 committed by GitHub
parent 4e6acc8b85
commit a990c54e0c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
19 changed files with 642 additions and 563 deletions

9
.vscode/settings.json vendored Normal file
View File

@ -0,0 +1,9 @@
{
"cSpell.words": [
"commonmark",
"markedjs",
"titleize",
"uglifyjs",
"vuln"
]
}

237
lib/marked.cjs generated
View File

@ -170,7 +170,8 @@ function splitCells(tableRow, count) {
// ensure that every cell-delimiting pipe has a space
// before it to distinguish it from an escaped pipe
const row = tableRow.replace(/\|/g, (match, offset, str) => {
let escaped = false, curr = offset;
let escaped = false;
let curr = offset;
while (--curr >= 0 && str[curr] === '\\')
escaped = !escaped;
if (escaped) {
@ -240,9 +241,8 @@ function findClosingBracket(str, b) {
if (str.indexOf(b[1]) === -1) {
return -1;
}
const l = str.length;
let level = 0, i = 0;
for (; i < l; i++) {
let level = 0;
for (let i = 0; i < str.length; i++) {
if (str[i] === '\\') {
i++;
}
@ -339,6 +339,7 @@ function indentCodeCompensation(raw, text) {
*/
class _Tokenizer {
options;
// TODO: Fix this rules type
rules;
lexer;
constructor(options) {
@ -432,7 +433,6 @@ class _Tokenizer {
list(src) {
let cap = this.rules.block.list.exec(src);
if (cap) {
let raw, istask, ischecked, indent, i, blankLine, endsWithBlankLine, line, nextLine, rawLine, itemContents, endEarly;
let bull = cap[1].trim();
const isordered = bull.length > 1;
const list = {
@ -449,9 +449,12 @@ class _Tokenizer {
}
// Get next list item
const itemRegex = new RegExp(`^( {0,3}${bull})((?:[\t ][^\\n]*)?(?:\\n|$))`);
let raw = '';
let itemContents = '';
let endsWithBlankLine = false;
// Check if current bullet point can start a new List Item
while (src) {
endEarly = false;
let endEarly = false;
if (!(cap = itemRegex.exec(src))) {
break;
}
@ -460,8 +463,9 @@ class _Tokenizer {
}
raw = cap[0];
src = src.substring(raw.length);
line = cap[2].split('\n', 1)[0].replace(/^\t+/, (t) => ' '.repeat(3 * t.length));
nextLine = src.split('\n', 1)[0];
let line = cap[2].split('\n', 1)[0].replace(/^\t+/, (t) => ' '.repeat(3 * t.length));
let nextLine = src.split('\n', 1)[0];
let indent = 0;
if (this.options.pedantic) {
indent = 2;
itemContents = line.trimLeft();
@ -472,7 +476,7 @@ class _Tokenizer {
itemContents = line.slice(indent);
indent += cap[1].length;
}
blankLine = false;
let blankLine = false;
if (!line && /^ *$/.test(nextLine)) { // Items begin with at most one blank line
raw += nextLine + '\n';
src = src.substring(nextLine.length + 1);
@ -485,7 +489,7 @@ class _Tokenizer {
const headingBeginRegex = new RegExp(`^ {0,${Math.min(3, indent - 1)}}#`);
// Check if following lines should be included in List Item
while (src) {
rawLine = src.split('\n', 1)[0];
const rawLine = src.split('\n', 1)[0];
nextLine = rawLine;
// Re-align to follow commonmark nesting rules
if (this.options.pedantic) {
@ -547,6 +551,8 @@ class _Tokenizer {
endsWithBlankLine = true;
}
}
let istask = null;
let ischecked;
// Check for task list items
if (this.options.gfm) {
istask = /^\[[ xX]\] /.exec(itemContents);
@ -561,7 +567,8 @@ class _Tokenizer {
task: !!istask,
checked: ischecked,
loose: false,
text: itemContents
text: itemContents,
tokens: []
});
list.raw += raw;
}
@ -569,9 +576,8 @@ class _Tokenizer {
list.items[list.items.length - 1].raw = raw.trimRight();
list.items[list.items.length - 1].text = itemContents.trimRight();
list.raw = list.raw.trimRight();
const l = list.items.length;
// Item child tokens handled here at end because we needed to have the final item to trim it first
for (i = 0; i < l; i++) {
for (let i = 0; i < list.items.length; i++) {
this.lexer.state.top = false;
list.items[i].tokens = this.lexer.blockTokens(list.items[i].text, []);
if (!list.loose) {
@ -583,7 +589,7 @@ class _Tokenizer {
}
// Set all items to loose if list is loose
if (list.loose) {
for (i = 0; i < l; i++) {
for (let i = 0; i < list.items.length; i++) {
list.items[i].loose = true;
}
}
@ -633,7 +639,7 @@ class _Tokenizer {
type: 'table',
raw: cap[0],
header: splitCells(cap[1]).map(c => {
return { text: c };
return { text: c, tokens: [] };
}),
align: cap[2].replace(/^ *|\| *$/g, '').split(/ *\| */),
rows: cap[3] && cap[3].trim() ? cap[3].replace(/\n[ \t]*$/, '').split('\n') : []
@ -642,23 +648,26 @@ class _Tokenizer {
let l = item.align.length;
let i, j, k, row;
for (i = 0; i < l; i++) {
if (/^ *-+: *$/.test(item.align[i])) {
item.align[i] = 'right';
}
else if (/^ *:-+: *$/.test(item.align[i])) {
item.align[i] = 'center';
}
else if (/^ *:-+ *$/.test(item.align[i])) {
item.align[i] = 'left';
}
else {
item.align[i] = null;
const align = item.align[i];
if (align) {
if (/^ *-+: *$/.test(align)) {
item.align[i] = 'right';
}
else if (/^ *:-+: *$/.test(align)) {
item.align[i] = 'center';
}
else if (/^ *:-+ *$/.test(align)) {
item.align[i] = 'left';
}
else {
item.align[i] = null;
}
}
}
l = item.rows.length;
for (i = 0; i < l; i++) {
item.rows[i] = splitCells(item.rows[i], item.header.length).map(c => {
return { text: c };
return { text: c, tokens: [] };
});
}
// parse child tokens inside headers and cells
@ -1309,13 +1318,11 @@ function smartypants(text) {
* mangle email addresses
*/
function mangle(text) {
let out = '', i, ch;
const l = text.length;
for (i = 0; i < l; i++) {
ch = text.charCodeAt(i);
if (Math.random() > 0.5) {
ch = 'x' + ch.toString(16);
}
let out = '';
for (let i = 0; i < text.length; i++) {
const ch = Math.random() > 0.5
? 'x' + text.charCodeAt(i).toString(16)
: text.charCodeAt(i).toString();
out += '&#' + ch + ';';
}
return out;
@ -1761,7 +1768,7 @@ class _Renderer {
this.options = options || exports.defaults;
}
code(code, infostring, escaped) {
const lang = (infostring || '').match(/\S*/)[0];
const lang = (infostring || '').match(/^\S*/)?.[0];
if (this.options.highlight) {
const out = this.options.highlight(code, lang);
if (out != null && out !== code) {
@ -1800,7 +1807,8 @@ class _Renderer {
return this.options.xhtml ? '<hr/>\n' : '<hr>\n';
}
list(body, ordered, start) {
const type = ordered ? 'ol' : 'ul', startatt = (ordered && start !== 1) ? (' start="' + start + '"') : '';
const type = ordered ? 'ol' : 'ul';
const startatt = (ordered && start !== 1) ? (' start="' + start + '"') : '';
return '<' + type + startatt + '>\n' + body + '</' + type + '>\n';
}
listitem(text, task, checked) {
@ -1855,10 +1863,11 @@ class _Renderer {
return `<del>${text}</del>`;
}
link(href, title, text) {
href = cleanUrl(this.options.sanitize, this.options.baseUrl, href);
if (href === null) {
const cleanHref = cleanUrl(this.options.sanitize, this.options.baseUrl, href);
if (cleanHref === null) {
return text;
}
href = cleanHref;
let out = '<a href="' + href + '"';
if (title) {
out += ' title="' + title + '"';
@ -1867,10 +1876,11 @@ class _Renderer {
return out;
}
image(href, title, text) {
href = cleanUrl(this.options.sanitize, this.options.baseUrl, href);
if (href === null) {
const cleanHref = cleanUrl(this.options.sanitize, this.options.baseUrl, href);
if (cleanHref === null) {
return text;
}
href = cleanHref;
let out = `<img src="${href}" alt="${text}"`;
if (title) {
out += ` title="${title}"`;
@ -1998,14 +2008,14 @@ class _Parser {
* Parse Loop
*/
parse(tokens, top = true) {
let out = '', i, j, k, l2, l3, row, cell, header, body, token, ordered, start, loose, itemBody, item, checked, task, checkbox, ret;
const l = tokens.length;
for (i = 0; i < l; i++) {
token = tokens[i];
let out = '';
for (let i = 0; i < tokens.length; i++) {
const token = tokens[i];
// Run any renderer extensions
if (this.options.extensions && this.options.extensions.renderers && this.options.extensions.renderers[token.type]) {
ret = this.options.extensions.renderers[token.type].call({ parser: this }, token);
if (ret !== false || !['space', 'hr', 'heading', 'code', 'table', 'blockquote', 'list', 'html', 'paragraph', 'text'].includes(token.type)) {
const genericToken = token;
const ret = this.options.extensions.renderers[genericToken.type].call({ parser: this }, genericToken);
if (ret !== false || !['space', 'hr', 'heading', 'code', 'table', 'blockquote', 'list', 'html', 'paragraph', 'text'].includes(genericToken.type)) {
out += ret || '';
continue;
}
@ -2019,30 +2029,30 @@ class _Parser {
continue;
}
case 'heading': {
out += this.renderer.heading(this.parseInline(token.tokens), token.depth, unescape(this.parseInline(token.tokens, this.textRenderer)), this.slugger);
const headingToken = token;
out += this.renderer.heading(this.parseInline(headingToken.tokens), headingToken.depth, unescape(this.parseInline(headingToken.tokens, this.textRenderer)), this.slugger);
continue;
}
case 'code': {
out += this.renderer.code(token.text, token.lang, !!token.escaped);
const codeToken = token;
out += this.renderer.code(codeToken.text, codeToken.lang, !!codeToken.escaped);
continue;
}
case 'table': {
header = '';
const tableToken = token;
let header = '';
// header
cell = '';
l2 = token.header.length;
for (j = 0; j < l2; j++) {
cell += this.renderer.tablecell(this.parseInline(token.header[j].tokens), { header: true, align: token.align[j] });
let cell = '';
for (let j = 0; j < tableToken.header.length; j++) {
cell += this.renderer.tablecell(this.parseInline(tableToken.header[j].tokens), { header: true, align: tableToken.align[j] });
}
header += this.renderer.tablerow(cell);
body = '';
l2 = token.rows.length;
for (j = 0; j < l2; j++) {
row = token.rows[j];
let body = '';
for (let j = 0; j < tableToken.rows.length; j++) {
const row = tableToken.rows[j];
cell = '';
l3 = row.length;
for (k = 0; k < l3; k++) {
cell += this.renderer.tablecell(this.parseInline(row[k].tokens), { header: false, align: token.align[k] });
for (let k = 0; k < row.length; k++) {
cell += this.renderer.tablecell(this.parseInline(row[k].tokens), { header: false, align: tableToken.align[k] });
}
body += this.renderer.tablerow(cell);
}
@ -2050,23 +2060,24 @@ class _Parser {
continue;
}
case 'blockquote': {
body = this.parse(token.tokens);
const blockquoteToken = token;
const body = this.parse(blockquoteToken.tokens);
out += this.renderer.blockquote(body);
continue;
}
case 'list': {
ordered = token.ordered;
start = token.start;
loose = token.loose;
l2 = token.items.length;
body = '';
for (j = 0; j < l2; j++) {
item = token.items[j];
checked = item.checked;
task = item.task;
itemBody = '';
const listToken = token;
const ordered = listToken.ordered;
const start = listToken.start;
const loose = listToken.loose;
let body = '';
for (let j = 0; j < listToken.items.length; j++) {
const item = listToken.items[j];
const checked = item.checked;
const task = item.task;
let itemBody = '';
if (item.task) {
checkbox = this.renderer.checkbox(!!checked);
const checkbox = this.renderer.checkbox(!!checked);
if (loose) {
if (item.tokens.length > 0 && item.tokens[0].type === 'paragraph') {
item.tokens[0].text = checkbox + ' ' + item.tokens[0].text;
@ -2092,18 +2103,21 @@ class _Parser {
continue;
}
case 'html': {
out += this.renderer.html(token.text, token.block);
const htmlToken = token;
out += this.renderer.html(htmlToken.text, htmlToken.block);
continue;
}
case 'paragraph': {
out += this.renderer.paragraph(this.parseInline(token.tokens));
const paragraphToken = token;
out += this.renderer.paragraph(this.parseInline(paragraphToken.tokens));
continue;
}
case 'text': {
body = token.tokens ? this.parseInline(token.tokens) : token.text;
while (i + 1 < l && tokens[i + 1].type === 'text') {
token = tokens[++i];
body += '\n' + (token.tokens ? this.parseInline(token.tokens) : token.text);
let textToken = token;
let body = textToken.tokens ? this.parseInline(textToken.tokens) : textToken.text;
while (i + 1 < tokens.length && tokens[i + 1].type === 'text') {
textToken = tokens[++i];
body += '\n' + (textToken.tokens ? this.parseInline(textToken.tokens) : textToken.text);
}
out += top ? this.renderer.paragraph(body) : body;
continue;
@ -2127,13 +2141,12 @@ class _Parser {
*/
parseInline(tokens, renderer) {
renderer = renderer || this.renderer;
let out = '', i, token, ret;
const l = tokens.length;
for (i = 0; i < l; i++) {
token = tokens[i];
let out = '';
for (let i = 0; i < tokens.length; i++) {
const token = tokens[i];
// Run any renderer extensions
if (this.options.extensions && this.options.extensions.renderers && this.options.extensions.renderers[token.type]) {
ret = this.options.extensions.renderers[token.type].call({ parser: this }, token);
const ret = this.options.extensions.renderers[token.type].call({ parser: this }, token);
if (ret !== false || !['escape', 'html', 'link', 'image', 'strong', 'em', 'codespan', 'br', 'del', 'text'].includes(token.type)) {
out += ret || '';
continue;
@ -2141,31 +2154,38 @@ class _Parser {
}
switch (token.type) {
case 'escape': {
out += renderer.text(token.text);
const escapeToken = token;
out += renderer.text(escapeToken.text);
break;
}
case 'html': {
out += renderer.html(token.text);
const tagToken = token;
out += renderer.html(tagToken.text);
break;
}
case 'link': {
out += renderer.link(token.href, token.title, this.parseInline(token.tokens, renderer));
const linkToken = token;
out += renderer.link(linkToken.href, linkToken.title, this.parseInline(linkToken.tokens, renderer));
break;
}
case 'image': {
out += renderer.image(token.href, token.title, token.text);
const imageToken = token;
out += renderer.image(imageToken.href, imageToken.title, imageToken.text);
break;
}
case 'strong': {
out += renderer.strong(this.parseInline(token.tokens, renderer));
const strongToken = token;
out += renderer.strong(this.parseInline(strongToken.tokens, renderer));
break;
}
case 'em': {
out += renderer.em(this.parseInline(token.tokens, renderer));
const emToken = token;
out += renderer.em(this.parseInline(emToken.tokens, renderer));
break;
}
case 'codespan': {
out += renderer.codespan(token.text);
const codespanToken = token;
out += renderer.codespan(codespanToken.text);
break;
}
case 'br': {
@ -2173,11 +2193,13 @@ class _Parser {
break;
}
case 'del': {
out += renderer.del(this.parseInline(token.tokens, renderer));
const delToken = token;
out += renderer.del(this.parseInline(delToken.tokens, renderer));
break;
}
case 'text': {
out += renderer.text(token.text);
const textToken = token;
out += renderer.text(textToken.text);
break;
}
default: {
@ -2245,10 +2267,11 @@ class Marked {
values = values.concat(callback.call(this, token));
switch (token.type) {
case 'table': {
for (const cell of token.header) {
const tableToken = token;
for (const cell of tableToken.header) {
values = values.concat(this.walkTokens(cell.tokens, callback));
}
for (const row of token.rows) {
for (const row of tableToken.rows) {
for (const cell of row) {
values = values.concat(this.walkTokens(cell.tokens, callback));
}
@ -2256,18 +2279,19 @@ class Marked {
break;
}
case 'list': {
values = values.concat(this.walkTokens(token.items, callback));
const listToken = token;
values = values.concat(this.walkTokens(listToken.items, callback));
break;
}
default: {
if (this.defaults.extensions && this.defaults.extensions.childTokens && this.defaults.extensions.childTokens[token.type]) { // Walk any extensions
this.defaults.extensions.childTokens[token.type].forEach((childTokens) => {
// @ts-expect-error we assume token[childToken] is an array of tokens but we can't be sure
values = values.concat(this.walkTokens(token[childTokens], callback));
const genericToken = token;
if (this.defaults.extensions?.childTokens?.[genericToken.type]) {
this.defaults.extensions.childTokens[genericToken.type].forEach((childTokens) => {
values = values.concat(this.walkTokens(genericToken[childTokens], callback));
});
}
else if (token.tokens) {
values = values.concat(this.walkTokens(token.tokens, callback));
else if (genericToken.tokens) {
values = values.concat(this.walkTokens(genericToken.tokens, callback));
}
}
}
@ -2307,8 +2331,9 @@ class Marked {
if (!ext.level || (ext.level !== 'block' && ext.level !== 'inline')) {
throw new Error("extension level must be 'block' or 'inline'");
}
if (extensions[ext.level]) {
extensions[ext.level].unshift(ext.tokenizer);
const extLevel = extensions[ext.level];
if (extLevel) {
extLevel.unshift(ext.tokenizer);
}
else {
extensions[ext.level] = [ext.tokenizer];
@ -2406,9 +2431,10 @@ class Marked {
// ==-- Parse WalkTokens extensions --== //
if (pack.walkTokens) {
const walkTokens = this.defaults.walkTokens;
const packWalktokens = pack.walkTokens;
opts.walkTokens = function (token) {
let values = [];
values.push(pack.walkTokens.call(this, token));
values.push(packWalktokens.call(this, token));
if (walkTokens) {
values = values.concat(walkTokens.call(this, token));
}
@ -2445,6 +2471,7 @@ class Marked {
opt.hooks.options = opt;
}
if (callback) {
const resultCallback = callback;
const highlight = opt.highlight;
let tokens;
try {
@ -2475,7 +2502,7 @@ class Marked {
opt.highlight = highlight;
return err
? throwError(err)
: callback(null, out);
: resultCallback(null, out);
};
if (!highlight || highlight.length < 3) {
return done();

2
lib/marked.cjs.map generated

File diff suppressed because one or more lines are too long

52
lib/marked.d.ts generated vendored
View File

@ -1,8 +1,5 @@
declare module "Tokens" {
export type Token = (Tokens.Space | Tokens.Code | Tokens.Heading | Tokens.Table | Tokens.Hr | Tokens.Blockquote | Tokens.List | Tokens.ListItem | Tokens.Paragraph | Tokens.HTML | Tokens.Text | Tokens.Def | Tokens.Escape | Tokens.Tag | Tokens.Image | Tokens.Link | Tokens.Strong | Tokens.Em | Tokens.Codespan | Tokens.Br | Tokens.Del | Tokens.Generic) & {
loose?: boolean;
tokens?: Token[];
};
export type Token = (Tokens.Space | Tokens.Code | Tokens.Heading | Tokens.Table | Tokens.Hr | Tokens.Blockquote | Tokens.List | Tokens.ListItem | Tokens.Paragraph | Tokens.HTML | Tokens.Text | Tokens.Def | Tokens.Escape | Tokens.Tag | Tokens.Image | Tokens.Link | Tokens.Strong | Tokens.Em | Tokens.Codespan | Tokens.Br | Tokens.Del | Tokens.Generic);
export namespace Tokens {
interface Space {
type: 'space';
@ -32,7 +29,7 @@ declare module "Tokens" {
}
interface TableCell {
text: string;
tokens?: Token[];
tokens: Token[];
}
interface Hr {
type: 'hr';
@ -59,7 +56,7 @@ declare module "Tokens" {
checked?: boolean | undefined;
loose: boolean;
text: string;
tokens?: Token[];
tokens: Token[];
}
interface Paragraph {
type: 'paragraph';
@ -441,11 +438,13 @@ declare module "MarkedOptions" {
export interface TokenizerThis {
lexer: _Lexer;
}
export type TokenizerExtensionFunction = (this: TokenizerThis, src: string, tokens: Token[] | TokensList) => Tokens.Generic | undefined;
export type TokenizerStartFunction = (this: TokenizerThis, src: string) => number | void;
export interface TokenizerExtension {
name: string;
level: 'block' | 'inline';
start?: ((this: TokenizerThis, src: string) => number | void) | undefined;
tokenizer: (this: TokenizerThis, src: string, tokens: Token[] | TokensList) => Tokens.Generic | undefined;
start?: TokenizerStartFunction | undefined;
tokenizer: TokenizerExtensionFunction;
childTokens?: string[] | undefined;
}
export interface RendererThis {
@ -568,42 +567,43 @@ declare module "MarkedOptions" {
* Each token is passed by reference so updates are persisted when passed to the parser.
* The return value of the function is ignored.
*/
walkTokens?: ((token: Token) => void | Promise<void>) | undefined | null;
walkTokens?: ((token: Token) => void | unknown | Promise<void>) | undefined | null;
/**
* Generate closing slash for self-closing tags (<br/> instead of <br>)
* @deprecated Deprecated in v5.0.0 use marked-xhtml to emit self-closing HTML tags for void elements (<br/>, <img/>, etc.) with a "/" as required by XHTML.
*/
xhtml?: boolean | undefined;
}
export interface MarkedOptions extends Omit<MarkedExtension, 'extensions' | 'renderer' | 'tokenizer' | 'walkTokens'> {
export interface MarkedOptions extends Omit<MarkedExtension, 'renderer' | 'tokenizer' | 'extensions' | 'walkTokens'> {
/**
* Type: object Default: new Renderer()
*
* An object containing functions to render tokens to HTML.
*/
renderer?: Omit<_Renderer, 'constructor'> | undefined | null;
renderer?: _Renderer | undefined | null;
/**
* The tokenizer defines how to turn markdown text into tokens.
*/
tokenizer?: Omit<_Tokenizer, 'constructor'> | undefined | null;
tokenizer?: _Tokenizer | undefined | null;
/**
* The walkTokens function gets called with every token.
* Child tokens are called before moving on to sibling tokens.
* Each token is passed by reference so updates are persisted when passed to the parser.
* The return value of the function is ignored.
* Custom extensions
*/
walkTokens?: ((token: Token) => void | Promise<void> | Array<void | Promise<void>>) | undefined | null;
extensions?: null | {
renderers: {
[name: string]: RendererExtensionFunction;
};
childTokens: {
[name: string]: string[];
};
inline?: TokenizerExtensionFunction[];
block?: TokenizerExtensionFunction[];
startInline?: TokenizerStartFunction[];
startBlock?: TokenizerStartFunction[];
};
/**
* Add tokenizers and renderers to marked
* walkTokens function returns array of values for Promise.all
*/
extensions?: (TokenizerAndRendererExtension[] & {
renderers: Record<string, RendererExtensionFunction>;
childTokens: Record<string, string[]>;
block: any[];
inline: any[];
startBlock: Array<(this: TokenizerThis, src: string) => number | void>;
startInline: Array<(this: TokenizerThis, src: string) => number | void>;
}) | undefined | null;
walkTokens?: null | ((token: Token) => void | (unknown | Promise<void>)[]);
}
}
declare module "defaults" {

237
lib/marked.esm.js generated
View File

@ -168,7 +168,8 @@ function splitCells(tableRow, count) {
// ensure that every cell-delimiting pipe has a space
// before it to distinguish it from an escaped pipe
const row = tableRow.replace(/\|/g, (match, offset, str) => {
let escaped = false, curr = offset;
let escaped = false;
let curr = offset;
while (--curr >= 0 && str[curr] === '\\')
escaped = !escaped;
if (escaped) {
@ -238,9 +239,8 @@ function findClosingBracket(str, b) {
if (str.indexOf(b[1]) === -1) {
return -1;
}
const l = str.length;
let level = 0, i = 0;
for (; i < l; i++) {
let level = 0;
for (let i = 0; i < str.length; i++) {
if (str[i] === '\\') {
i++;
}
@ -337,6 +337,7 @@ function indentCodeCompensation(raw, text) {
*/
class _Tokenizer {
options;
// TODO: Fix this rules type
rules;
lexer;
constructor(options) {
@ -430,7 +431,6 @@ class _Tokenizer {
list(src) {
let cap = this.rules.block.list.exec(src);
if (cap) {
let raw, istask, ischecked, indent, i, blankLine, endsWithBlankLine, line, nextLine, rawLine, itemContents, endEarly;
let bull = cap[1].trim();
const isordered = bull.length > 1;
const list = {
@ -447,9 +447,12 @@ class _Tokenizer {
}
// Get next list item
const itemRegex = new RegExp(`^( {0,3}${bull})((?:[\t ][^\\n]*)?(?:\\n|$))`);
let raw = '';
let itemContents = '';
let endsWithBlankLine = false;
// Check if current bullet point can start a new List Item
while (src) {
endEarly = false;
let endEarly = false;
if (!(cap = itemRegex.exec(src))) {
break;
}
@ -458,8 +461,9 @@ class _Tokenizer {
}
raw = cap[0];
src = src.substring(raw.length);
line = cap[2].split('\n', 1)[0].replace(/^\t+/, (t) => ' '.repeat(3 * t.length));
nextLine = src.split('\n', 1)[0];
let line = cap[2].split('\n', 1)[0].replace(/^\t+/, (t) => ' '.repeat(3 * t.length));
let nextLine = src.split('\n', 1)[0];
let indent = 0;
if (this.options.pedantic) {
indent = 2;
itemContents = line.trimLeft();
@ -470,7 +474,7 @@ class _Tokenizer {
itemContents = line.slice(indent);
indent += cap[1].length;
}
blankLine = false;
let blankLine = false;
if (!line && /^ *$/.test(nextLine)) { // Items begin with at most one blank line
raw += nextLine + '\n';
src = src.substring(nextLine.length + 1);
@ -483,7 +487,7 @@ class _Tokenizer {
const headingBeginRegex = new RegExp(`^ {0,${Math.min(3, indent - 1)}}#`);
// Check if following lines should be included in List Item
while (src) {
rawLine = src.split('\n', 1)[0];
const rawLine = src.split('\n', 1)[0];
nextLine = rawLine;
// Re-align to follow commonmark nesting rules
if (this.options.pedantic) {
@ -545,6 +549,8 @@ class _Tokenizer {
endsWithBlankLine = true;
}
}
let istask = null;
let ischecked;
// Check for task list items
if (this.options.gfm) {
istask = /^\[[ xX]\] /.exec(itemContents);
@ -559,7 +565,8 @@ class _Tokenizer {
task: !!istask,
checked: ischecked,
loose: false,
text: itemContents
text: itemContents,
tokens: []
});
list.raw += raw;
}
@ -567,9 +574,8 @@ class _Tokenizer {
list.items[list.items.length - 1].raw = raw.trimRight();
list.items[list.items.length - 1].text = itemContents.trimRight();
list.raw = list.raw.trimRight();
const l = list.items.length;
// Item child tokens handled here at end because we needed to have the final item to trim it first
for (i = 0; i < l; i++) {
for (let i = 0; i < list.items.length; i++) {
this.lexer.state.top = false;
list.items[i].tokens = this.lexer.blockTokens(list.items[i].text, []);
if (!list.loose) {
@ -581,7 +587,7 @@ class _Tokenizer {
}
// Set all items to loose if list is loose
if (list.loose) {
for (i = 0; i < l; i++) {
for (let i = 0; i < list.items.length; i++) {
list.items[i].loose = true;
}
}
@ -631,7 +637,7 @@ class _Tokenizer {
type: 'table',
raw: cap[0],
header: splitCells(cap[1]).map(c => {
return { text: c };
return { text: c, tokens: [] };
}),
align: cap[2].replace(/^ *|\| *$/g, '').split(/ *\| */),
rows: cap[3] && cap[3].trim() ? cap[3].replace(/\n[ \t]*$/, '').split('\n') : []
@ -640,23 +646,26 @@ class _Tokenizer {
let l = item.align.length;
let i, j, k, row;
for (i = 0; i < l; i++) {
if (/^ *-+: *$/.test(item.align[i])) {
item.align[i] = 'right';
}
else if (/^ *:-+: *$/.test(item.align[i])) {
item.align[i] = 'center';
}
else if (/^ *:-+ *$/.test(item.align[i])) {
item.align[i] = 'left';
}
else {
item.align[i] = null;
const align = item.align[i];
if (align) {
if (/^ *-+: *$/.test(align)) {
item.align[i] = 'right';
}
else if (/^ *:-+: *$/.test(align)) {
item.align[i] = 'center';
}
else if (/^ *:-+ *$/.test(align)) {
item.align[i] = 'left';
}
else {
item.align[i] = null;
}
}
}
l = item.rows.length;
for (i = 0; i < l; i++) {
item.rows[i] = splitCells(item.rows[i], item.header.length).map(c => {
return { text: c };
return { text: c, tokens: [] };
});
}
// parse child tokens inside headers and cells
@ -1307,13 +1316,11 @@ function smartypants(text) {
* mangle email addresses
*/
function mangle(text) {
let out = '', i, ch;
const l = text.length;
for (i = 0; i < l; i++) {
ch = text.charCodeAt(i);
if (Math.random() > 0.5) {
ch = 'x' + ch.toString(16);
}
let out = '';
for (let i = 0; i < text.length; i++) {
const ch = Math.random() > 0.5
? 'x' + text.charCodeAt(i).toString(16)
: text.charCodeAt(i).toString();
out += '&#' + ch + ';';
}
return out;
@ -1759,7 +1766,7 @@ class _Renderer {
this.options = options || _defaults;
}
code(code, infostring, escaped) {
const lang = (infostring || '').match(/\S*/)[0];
const lang = (infostring || '').match(/^\S*/)?.[0];
if (this.options.highlight) {
const out = this.options.highlight(code, lang);
if (out != null && out !== code) {
@ -1798,7 +1805,8 @@ class _Renderer {
return this.options.xhtml ? '<hr/>\n' : '<hr>\n';
}
list(body, ordered, start) {
const type = ordered ? 'ol' : 'ul', startatt = (ordered && start !== 1) ? (' start="' + start + '"') : '';
const type = ordered ? 'ol' : 'ul';
const startatt = (ordered && start !== 1) ? (' start="' + start + '"') : '';
return '<' + type + startatt + '>\n' + body + '</' + type + '>\n';
}
listitem(text, task, checked) {
@ -1853,10 +1861,11 @@ class _Renderer {
return `<del>${text}</del>`;
}
link(href, title, text) {
href = cleanUrl(this.options.sanitize, this.options.baseUrl, href);
if (href === null) {
const cleanHref = cleanUrl(this.options.sanitize, this.options.baseUrl, href);
if (cleanHref === null) {
return text;
}
href = cleanHref;
let out = '<a href="' + href + '"';
if (title) {
out += ' title="' + title + '"';
@ -1865,10 +1874,11 @@ class _Renderer {
return out;
}
image(href, title, text) {
href = cleanUrl(this.options.sanitize, this.options.baseUrl, href);
if (href === null) {
const cleanHref = cleanUrl(this.options.sanitize, this.options.baseUrl, href);
if (cleanHref === null) {
return text;
}
href = cleanHref;
let out = `<img src="${href}" alt="${text}"`;
if (title) {
out += ` title="${title}"`;
@ -1996,14 +2006,14 @@ class _Parser {
* Parse Loop
*/
parse(tokens, top = true) {
let out = '', i, j, k, l2, l3, row, cell, header, body, token, ordered, start, loose, itemBody, item, checked, task, checkbox, ret;
const l = tokens.length;
for (i = 0; i < l; i++) {
token = tokens[i];
let out = '';
for (let i = 0; i < tokens.length; i++) {
const token = tokens[i];
// Run any renderer extensions
if (this.options.extensions && this.options.extensions.renderers && this.options.extensions.renderers[token.type]) {
ret = this.options.extensions.renderers[token.type].call({ parser: this }, token);
if (ret !== false || !['space', 'hr', 'heading', 'code', 'table', 'blockquote', 'list', 'html', 'paragraph', 'text'].includes(token.type)) {
const genericToken = token;
const ret = this.options.extensions.renderers[genericToken.type].call({ parser: this }, genericToken);
if (ret !== false || !['space', 'hr', 'heading', 'code', 'table', 'blockquote', 'list', 'html', 'paragraph', 'text'].includes(genericToken.type)) {
out += ret || '';
continue;
}
@ -2017,30 +2027,30 @@ class _Parser {
continue;
}
case 'heading': {
out += this.renderer.heading(this.parseInline(token.tokens), token.depth, unescape(this.parseInline(token.tokens, this.textRenderer)), this.slugger);
const headingToken = token;
out += this.renderer.heading(this.parseInline(headingToken.tokens), headingToken.depth, unescape(this.parseInline(headingToken.tokens, this.textRenderer)), this.slugger);
continue;
}
case 'code': {
out += this.renderer.code(token.text, token.lang, !!token.escaped);
const codeToken = token;
out += this.renderer.code(codeToken.text, codeToken.lang, !!codeToken.escaped);
continue;
}
case 'table': {
header = '';
const tableToken = token;
let header = '';
// header
cell = '';
l2 = token.header.length;
for (j = 0; j < l2; j++) {
cell += this.renderer.tablecell(this.parseInline(token.header[j].tokens), { header: true, align: token.align[j] });
let cell = '';
for (let j = 0; j < tableToken.header.length; j++) {
cell += this.renderer.tablecell(this.parseInline(tableToken.header[j].tokens), { header: true, align: tableToken.align[j] });
}
header += this.renderer.tablerow(cell);
body = '';
l2 = token.rows.length;
for (j = 0; j < l2; j++) {
row = token.rows[j];
let body = '';
for (let j = 0; j < tableToken.rows.length; j++) {
const row = tableToken.rows[j];
cell = '';
l3 = row.length;
for (k = 0; k < l3; k++) {
cell += this.renderer.tablecell(this.parseInline(row[k].tokens), { header: false, align: token.align[k] });
for (let k = 0; k < row.length; k++) {
cell += this.renderer.tablecell(this.parseInline(row[k].tokens), { header: false, align: tableToken.align[k] });
}
body += this.renderer.tablerow(cell);
}
@ -2048,23 +2058,24 @@ class _Parser {
continue;
}
case 'blockquote': {
body = this.parse(token.tokens);
const blockquoteToken = token;
const body = this.parse(blockquoteToken.tokens);
out += this.renderer.blockquote(body);
continue;
}
case 'list': {
ordered = token.ordered;
start = token.start;
loose = token.loose;
l2 = token.items.length;
body = '';
for (j = 0; j < l2; j++) {
item = token.items[j];
checked = item.checked;
task = item.task;
itemBody = '';
const listToken = token;
const ordered = listToken.ordered;
const start = listToken.start;
const loose = listToken.loose;
let body = '';
for (let j = 0; j < listToken.items.length; j++) {
const item = listToken.items[j];
const checked = item.checked;
const task = item.task;
let itemBody = '';
if (item.task) {
checkbox = this.renderer.checkbox(!!checked);
const checkbox = this.renderer.checkbox(!!checked);
if (loose) {
if (item.tokens.length > 0 && item.tokens[0].type === 'paragraph') {
item.tokens[0].text = checkbox + ' ' + item.tokens[0].text;
@ -2090,18 +2101,21 @@ class _Parser {
continue;
}
case 'html': {
out += this.renderer.html(token.text, token.block);
const htmlToken = token;
out += this.renderer.html(htmlToken.text, htmlToken.block);
continue;
}
case 'paragraph': {
out += this.renderer.paragraph(this.parseInline(token.tokens));
const paragraphToken = token;
out += this.renderer.paragraph(this.parseInline(paragraphToken.tokens));
continue;
}
case 'text': {
body = token.tokens ? this.parseInline(token.tokens) : token.text;
while (i + 1 < l && tokens[i + 1].type === 'text') {
token = tokens[++i];
body += '\n' + (token.tokens ? this.parseInline(token.tokens) : token.text);
let textToken = token;
let body = textToken.tokens ? this.parseInline(textToken.tokens) : textToken.text;
while (i + 1 < tokens.length && tokens[i + 1].type === 'text') {
textToken = tokens[++i];
body += '\n' + (textToken.tokens ? this.parseInline(textToken.tokens) : textToken.text);
}
out += top ? this.renderer.paragraph(body) : body;
continue;
@ -2125,13 +2139,12 @@ class _Parser {
*/
parseInline(tokens, renderer) {
renderer = renderer || this.renderer;
let out = '', i, token, ret;
const l = tokens.length;
for (i = 0; i < l; i++) {
token = tokens[i];
let out = '';
for (let i = 0; i < tokens.length; i++) {
const token = tokens[i];
// Run any renderer extensions
if (this.options.extensions && this.options.extensions.renderers && this.options.extensions.renderers[token.type]) {
ret = this.options.extensions.renderers[token.type].call({ parser: this }, token);
const ret = this.options.extensions.renderers[token.type].call({ parser: this }, token);
if (ret !== false || !['escape', 'html', 'link', 'image', 'strong', 'em', 'codespan', 'br', 'del', 'text'].includes(token.type)) {
out += ret || '';
continue;
@ -2139,31 +2152,38 @@ class _Parser {
}
switch (token.type) {
case 'escape': {
out += renderer.text(token.text);
const escapeToken = token;
out += renderer.text(escapeToken.text);
break;
}
case 'html': {
out += renderer.html(token.text);
const tagToken = token;
out += renderer.html(tagToken.text);
break;
}
case 'link': {
out += renderer.link(token.href, token.title, this.parseInline(token.tokens, renderer));
const linkToken = token;
out += renderer.link(linkToken.href, linkToken.title, this.parseInline(linkToken.tokens, renderer));
break;
}
case 'image': {
out += renderer.image(token.href, token.title, token.text);
const imageToken = token;
out += renderer.image(imageToken.href, imageToken.title, imageToken.text);
break;
}
case 'strong': {
out += renderer.strong(this.parseInline(token.tokens, renderer));
const strongToken = token;
out += renderer.strong(this.parseInline(strongToken.tokens, renderer));
break;
}
case 'em': {
out += renderer.em(this.parseInline(token.tokens, renderer));
const emToken = token;
out += renderer.em(this.parseInline(emToken.tokens, renderer));
break;
}
case 'codespan': {
out += renderer.codespan(token.text);
const codespanToken = token;
out += renderer.codespan(codespanToken.text);
break;
}
case 'br': {
@ -2171,11 +2191,13 @@ class _Parser {
break;
}
case 'del': {
out += renderer.del(this.parseInline(token.tokens, renderer));
const delToken = token;
out += renderer.del(this.parseInline(delToken.tokens, renderer));
break;
}
case 'text': {
out += renderer.text(token.text);
const textToken = token;
out += renderer.text(textToken.text);
break;
}
default: {
@ -2243,10 +2265,11 @@ class Marked {
values = values.concat(callback.call(this, token));
switch (token.type) {
case 'table': {
for (const cell of token.header) {
const tableToken = token;
for (const cell of tableToken.header) {
values = values.concat(this.walkTokens(cell.tokens, callback));
}
for (const row of token.rows) {
for (const row of tableToken.rows) {
for (const cell of row) {
values = values.concat(this.walkTokens(cell.tokens, callback));
}
@ -2254,18 +2277,19 @@ class Marked {
break;
}
case 'list': {
values = values.concat(this.walkTokens(token.items, callback));
const listToken = token;
values = values.concat(this.walkTokens(listToken.items, callback));
break;
}
default: {
if (this.defaults.extensions && this.defaults.extensions.childTokens && this.defaults.extensions.childTokens[token.type]) { // Walk any extensions
this.defaults.extensions.childTokens[token.type].forEach((childTokens) => {
// @ts-expect-error we assume token[childToken] is an array of tokens but we can't be sure
values = values.concat(this.walkTokens(token[childTokens], callback));
const genericToken = token;
if (this.defaults.extensions?.childTokens?.[genericToken.type]) {
this.defaults.extensions.childTokens[genericToken.type].forEach((childTokens) => {
values = values.concat(this.walkTokens(genericToken[childTokens], callback));
});
}
else if (token.tokens) {
values = values.concat(this.walkTokens(token.tokens, callback));
else if (genericToken.tokens) {
values = values.concat(this.walkTokens(genericToken.tokens, callback));
}
}
}
@ -2305,8 +2329,9 @@ class Marked {
if (!ext.level || (ext.level !== 'block' && ext.level !== 'inline')) {
throw new Error("extension level must be 'block' or 'inline'");
}
if (extensions[ext.level]) {
extensions[ext.level].unshift(ext.tokenizer);
const extLevel = extensions[ext.level];
if (extLevel) {
extLevel.unshift(ext.tokenizer);
}
else {
extensions[ext.level] = [ext.tokenizer];
@ -2404,9 +2429,10 @@ class Marked {
// ==-- Parse WalkTokens extensions --== //
if (pack.walkTokens) {
const walkTokens = this.defaults.walkTokens;
const packWalktokens = pack.walkTokens;
opts.walkTokens = function (token) {
let values = [];
values.push(pack.walkTokens.call(this, token));
values.push(packWalktokens.call(this, token));
if (walkTokens) {
values = values.concat(walkTokens.call(this, token));
}
@ -2443,6 +2469,7 @@ class Marked {
opt.hooks.options = opt;
}
if (callback) {
const resultCallback = callback;
const highlight = opt.highlight;
let tokens;
try {
@ -2473,7 +2500,7 @@ class Marked {
opt.highlight = highlight;
return err
? throwError(err)
: callback(null, out);
: resultCallback(null, out);
};
if (!highlight || highlight.length < 3) {
return done();

2
lib/marked.esm.js.map generated

File diff suppressed because one or more lines are too long

237
lib/marked.umd.js generated
View File

@ -174,7 +174,8 @@
// ensure that every cell-delimiting pipe has a space
// before it to distinguish it from an escaped pipe
const row = tableRow.replace(/\|/g, (match, offset, str) => {
let escaped = false, curr = offset;
let escaped = false;
let curr = offset;
while (--curr >= 0 && str[curr] === '\\')
escaped = !escaped;
if (escaped) {
@ -244,9 +245,8 @@
if (str.indexOf(b[1]) === -1) {
return -1;
}
const l = str.length;
let level = 0, i = 0;
for (; i < l; i++) {
let level = 0;
for (let i = 0; i < str.length; i++) {
if (str[i] === '\\') {
i++;
}
@ -343,6 +343,7 @@
*/
class _Tokenizer {
options;
// TODO: Fix this rules type
rules;
lexer;
constructor(options) {
@ -436,7 +437,6 @@
list(src) {
let cap = this.rules.block.list.exec(src);
if (cap) {
let raw, istask, ischecked, indent, i, blankLine, endsWithBlankLine, line, nextLine, rawLine, itemContents, endEarly;
let bull = cap[1].trim();
const isordered = bull.length > 1;
const list = {
@ -453,9 +453,12 @@
}
// Get next list item
const itemRegex = new RegExp(`^( {0,3}${bull})((?:[\t ][^\\n]*)?(?:\\n|$))`);
let raw = '';
let itemContents = '';
let endsWithBlankLine = false;
// Check if current bullet point can start a new List Item
while (src) {
endEarly = false;
let endEarly = false;
if (!(cap = itemRegex.exec(src))) {
break;
}
@ -464,8 +467,9 @@
}
raw = cap[0];
src = src.substring(raw.length);
line = cap[2].split('\n', 1)[0].replace(/^\t+/, (t) => ' '.repeat(3 * t.length));
nextLine = src.split('\n', 1)[0];
let line = cap[2].split('\n', 1)[0].replace(/^\t+/, (t) => ' '.repeat(3 * t.length));
let nextLine = src.split('\n', 1)[0];
let indent = 0;
if (this.options.pedantic) {
indent = 2;
itemContents = line.trimLeft();
@ -476,7 +480,7 @@
itemContents = line.slice(indent);
indent += cap[1].length;
}
blankLine = false;
let blankLine = false;
if (!line && /^ *$/.test(nextLine)) { // Items begin with at most one blank line
raw += nextLine + '\n';
src = src.substring(nextLine.length + 1);
@ -489,7 +493,7 @@
const headingBeginRegex = new RegExp(`^ {0,${Math.min(3, indent - 1)}}#`);
// Check if following lines should be included in List Item
while (src) {
rawLine = src.split('\n', 1)[0];
const rawLine = src.split('\n', 1)[0];
nextLine = rawLine;
// Re-align to follow commonmark nesting rules
if (this.options.pedantic) {
@ -551,6 +555,8 @@
endsWithBlankLine = true;
}
}
let istask = null;
let ischecked;
// Check for task list items
if (this.options.gfm) {
istask = /^\[[ xX]\] /.exec(itemContents);
@ -565,7 +571,8 @@
task: !!istask,
checked: ischecked,
loose: false,
text: itemContents
text: itemContents,
tokens: []
});
list.raw += raw;
}
@ -573,9 +580,8 @@
list.items[list.items.length - 1].raw = raw.trimRight();
list.items[list.items.length - 1].text = itemContents.trimRight();
list.raw = list.raw.trimRight();
const l = list.items.length;
// Item child tokens handled here at end because we needed to have the final item to trim it first
for (i = 0; i < l; i++) {
for (let i = 0; i < list.items.length; i++) {
this.lexer.state.top = false;
list.items[i].tokens = this.lexer.blockTokens(list.items[i].text, []);
if (!list.loose) {
@ -587,7 +593,7 @@
}
// Set all items to loose if list is loose
if (list.loose) {
for (i = 0; i < l; i++) {
for (let i = 0; i < list.items.length; i++) {
list.items[i].loose = true;
}
}
@ -637,7 +643,7 @@
type: 'table',
raw: cap[0],
header: splitCells(cap[1]).map(c => {
return { text: c };
return { text: c, tokens: [] };
}),
align: cap[2].replace(/^ *|\| *$/g, '').split(/ *\| */),
rows: cap[3] && cap[3].trim() ? cap[3].replace(/\n[ \t]*$/, '').split('\n') : []
@ -646,23 +652,26 @@
let l = item.align.length;
let i, j, k, row;
for (i = 0; i < l; i++) {
if (/^ *-+: *$/.test(item.align[i])) {
item.align[i] = 'right';
}
else if (/^ *:-+: *$/.test(item.align[i])) {
item.align[i] = 'center';
}
else if (/^ *:-+ *$/.test(item.align[i])) {
item.align[i] = 'left';
}
else {
item.align[i] = null;
const align = item.align[i];
if (align) {
if (/^ *-+: *$/.test(align)) {
item.align[i] = 'right';
}
else if (/^ *:-+: *$/.test(align)) {
item.align[i] = 'center';
}
else if (/^ *:-+ *$/.test(align)) {
item.align[i] = 'left';
}
else {
item.align[i] = null;
}
}
}
l = item.rows.length;
for (i = 0; i < l; i++) {
item.rows[i] = splitCells(item.rows[i], item.header.length).map(c => {
return { text: c };
return { text: c, tokens: [] };
});
}
// parse child tokens inside headers and cells
@ -1313,13 +1322,11 @@
* mangle email addresses
*/
function mangle(text) {
let out = '', i, ch;
const l = text.length;
for (i = 0; i < l; i++) {
ch = text.charCodeAt(i);
if (Math.random() > 0.5) {
ch = 'x' + ch.toString(16);
}
let out = '';
for (let i = 0; i < text.length; i++) {
const ch = Math.random() > 0.5
? 'x' + text.charCodeAt(i).toString(16)
: text.charCodeAt(i).toString();
out += '&#' + ch + ';';
}
return out;
@ -1765,7 +1772,7 @@
this.options = options || exports.defaults;
}
code(code, infostring, escaped) {
const lang = (infostring || '').match(/\S*/)[0];
const lang = (infostring || '').match(/^\S*/)?.[0];
if (this.options.highlight) {
const out = this.options.highlight(code, lang);
if (out != null && out !== code) {
@ -1804,7 +1811,8 @@
return this.options.xhtml ? '<hr/>\n' : '<hr>\n';
}
list(body, ordered, start) {
const type = ordered ? 'ol' : 'ul', startatt = (ordered && start !== 1) ? (' start="' + start + '"') : '';
const type = ordered ? 'ol' : 'ul';
const startatt = (ordered && start !== 1) ? (' start="' + start + '"') : '';
return '<' + type + startatt + '>\n' + body + '</' + type + '>\n';
}
listitem(text, task, checked) {
@ -1859,10 +1867,11 @@
return `<del>${text}</del>`;
}
link(href, title, text) {
href = cleanUrl(this.options.sanitize, this.options.baseUrl, href);
if (href === null) {
const cleanHref = cleanUrl(this.options.sanitize, this.options.baseUrl, href);
if (cleanHref === null) {
return text;
}
href = cleanHref;
let out = '<a href="' + href + '"';
if (title) {
out += ' title="' + title + '"';
@ -1871,10 +1880,11 @@
return out;
}
image(href, title, text) {
href = cleanUrl(this.options.sanitize, this.options.baseUrl, href);
if (href === null) {
const cleanHref = cleanUrl(this.options.sanitize, this.options.baseUrl, href);
if (cleanHref === null) {
return text;
}
href = cleanHref;
let out = `<img src="${href}" alt="${text}"`;
if (title) {
out += ` title="${title}"`;
@ -2002,14 +2012,14 @@
* Parse Loop
*/
parse(tokens, top = true) {
let out = '', i, j, k, l2, l3, row, cell, header, body, token, ordered, start, loose, itemBody, item, checked, task, checkbox, ret;
const l = tokens.length;
for (i = 0; i < l; i++) {
token = tokens[i];
let out = '';
for (let i = 0; i < tokens.length; i++) {
const token = tokens[i];
// Run any renderer extensions
if (this.options.extensions && this.options.extensions.renderers && this.options.extensions.renderers[token.type]) {
ret = this.options.extensions.renderers[token.type].call({ parser: this }, token);
if (ret !== false || !['space', 'hr', 'heading', 'code', 'table', 'blockquote', 'list', 'html', 'paragraph', 'text'].includes(token.type)) {
const genericToken = token;
const ret = this.options.extensions.renderers[genericToken.type].call({ parser: this }, genericToken);
if (ret !== false || !['space', 'hr', 'heading', 'code', 'table', 'blockquote', 'list', 'html', 'paragraph', 'text'].includes(genericToken.type)) {
out += ret || '';
continue;
}
@ -2023,30 +2033,30 @@
continue;
}
case 'heading': {
out += this.renderer.heading(this.parseInline(token.tokens), token.depth, unescape(this.parseInline(token.tokens, this.textRenderer)), this.slugger);
const headingToken = token;
out += this.renderer.heading(this.parseInline(headingToken.tokens), headingToken.depth, unescape(this.parseInline(headingToken.tokens, this.textRenderer)), this.slugger);
continue;
}
case 'code': {
out += this.renderer.code(token.text, token.lang, !!token.escaped);
const codeToken = token;
out += this.renderer.code(codeToken.text, codeToken.lang, !!codeToken.escaped);
continue;
}
case 'table': {
header = '';
const tableToken = token;
let header = '';
// header
cell = '';
l2 = token.header.length;
for (j = 0; j < l2; j++) {
cell += this.renderer.tablecell(this.parseInline(token.header[j].tokens), { header: true, align: token.align[j] });
let cell = '';
for (let j = 0; j < tableToken.header.length; j++) {
cell += this.renderer.tablecell(this.parseInline(tableToken.header[j].tokens), { header: true, align: tableToken.align[j] });
}
header += this.renderer.tablerow(cell);
body = '';
l2 = token.rows.length;
for (j = 0; j < l2; j++) {
row = token.rows[j];
let body = '';
for (let j = 0; j < tableToken.rows.length; j++) {
const row = tableToken.rows[j];
cell = '';
l3 = row.length;
for (k = 0; k < l3; k++) {
cell += this.renderer.tablecell(this.parseInline(row[k].tokens), { header: false, align: token.align[k] });
for (let k = 0; k < row.length; k++) {
cell += this.renderer.tablecell(this.parseInline(row[k].tokens), { header: false, align: tableToken.align[k] });
}
body += this.renderer.tablerow(cell);
}
@ -2054,23 +2064,24 @@
continue;
}
case 'blockquote': {
body = this.parse(token.tokens);
const blockquoteToken = token;
const body = this.parse(blockquoteToken.tokens);
out += this.renderer.blockquote(body);
continue;
}
case 'list': {
ordered = token.ordered;
start = token.start;
loose = token.loose;
l2 = token.items.length;
body = '';
for (j = 0; j < l2; j++) {
item = token.items[j];
checked = item.checked;
task = item.task;
itemBody = '';
const listToken = token;
const ordered = listToken.ordered;
const start = listToken.start;
const loose = listToken.loose;
let body = '';
for (let j = 0; j < listToken.items.length; j++) {
const item = listToken.items[j];
const checked = item.checked;
const task = item.task;
let itemBody = '';
if (item.task) {
checkbox = this.renderer.checkbox(!!checked);
const checkbox = this.renderer.checkbox(!!checked);
if (loose) {
if (item.tokens.length > 0 && item.tokens[0].type === 'paragraph') {
item.tokens[0].text = checkbox + ' ' + item.tokens[0].text;
@ -2096,18 +2107,21 @@
continue;
}
case 'html': {
out += this.renderer.html(token.text, token.block);
const htmlToken = token;
out += this.renderer.html(htmlToken.text, htmlToken.block);
continue;
}
case 'paragraph': {
out += this.renderer.paragraph(this.parseInline(token.tokens));
const paragraphToken = token;
out += this.renderer.paragraph(this.parseInline(paragraphToken.tokens));
continue;
}
case 'text': {
body = token.tokens ? this.parseInline(token.tokens) : token.text;
while (i + 1 < l && tokens[i + 1].type === 'text') {
token = tokens[++i];
body += '\n' + (token.tokens ? this.parseInline(token.tokens) : token.text);
let textToken = token;
let body = textToken.tokens ? this.parseInline(textToken.tokens) : textToken.text;
while (i + 1 < tokens.length && tokens[i + 1].type === 'text') {
textToken = tokens[++i];
body += '\n' + (textToken.tokens ? this.parseInline(textToken.tokens) : textToken.text);
}
out += top ? this.renderer.paragraph(body) : body;
continue;
@ -2131,13 +2145,12 @@
*/
parseInline(tokens, renderer) {
renderer = renderer || this.renderer;
let out = '', i, token, ret;
const l = tokens.length;
for (i = 0; i < l; i++) {
token = tokens[i];
let out = '';
for (let i = 0; i < tokens.length; i++) {
const token = tokens[i];
// Run any renderer extensions
if (this.options.extensions && this.options.extensions.renderers && this.options.extensions.renderers[token.type]) {
ret = this.options.extensions.renderers[token.type].call({ parser: this }, token);
const ret = this.options.extensions.renderers[token.type].call({ parser: this }, token);
if (ret !== false || !['escape', 'html', 'link', 'image', 'strong', 'em', 'codespan', 'br', 'del', 'text'].includes(token.type)) {
out += ret || '';
continue;
@ -2145,31 +2158,38 @@
}
switch (token.type) {
case 'escape': {
out += renderer.text(token.text);
const escapeToken = token;
out += renderer.text(escapeToken.text);
break;
}
case 'html': {
out += renderer.html(token.text);
const tagToken = token;
out += renderer.html(tagToken.text);
break;
}
case 'link': {
out += renderer.link(token.href, token.title, this.parseInline(token.tokens, renderer));
const linkToken = token;
out += renderer.link(linkToken.href, linkToken.title, this.parseInline(linkToken.tokens, renderer));
break;
}
case 'image': {
out += renderer.image(token.href, token.title, token.text);
const imageToken = token;
out += renderer.image(imageToken.href, imageToken.title, imageToken.text);
break;
}
case 'strong': {
out += renderer.strong(this.parseInline(token.tokens, renderer));
const strongToken = token;
out += renderer.strong(this.parseInline(strongToken.tokens, renderer));
break;
}
case 'em': {
out += renderer.em(this.parseInline(token.tokens, renderer));
const emToken = token;
out += renderer.em(this.parseInline(emToken.tokens, renderer));
break;
}
case 'codespan': {
out += renderer.codespan(token.text);
const codespanToken = token;
out += renderer.codespan(codespanToken.text);
break;
}
case 'br': {
@ -2177,11 +2197,13 @@
break;
}
case 'del': {
out += renderer.del(this.parseInline(token.tokens, renderer));
const delToken = token;
out += renderer.del(this.parseInline(delToken.tokens, renderer));
break;
}
case 'text': {
out += renderer.text(token.text);
const textToken = token;
out += renderer.text(textToken.text);
break;
}
default: {
@ -2249,10 +2271,11 @@
values = values.concat(callback.call(this, token));
switch (token.type) {
case 'table': {
for (const cell of token.header) {
const tableToken = token;
for (const cell of tableToken.header) {
values = values.concat(this.walkTokens(cell.tokens, callback));
}
for (const row of token.rows) {
for (const row of tableToken.rows) {
for (const cell of row) {
values = values.concat(this.walkTokens(cell.tokens, callback));
}
@ -2260,18 +2283,19 @@
break;
}
case 'list': {
values = values.concat(this.walkTokens(token.items, callback));
const listToken = token;
values = values.concat(this.walkTokens(listToken.items, callback));
break;
}
default: {
if (this.defaults.extensions && this.defaults.extensions.childTokens && this.defaults.extensions.childTokens[token.type]) { // Walk any extensions
this.defaults.extensions.childTokens[token.type].forEach((childTokens) => {
// @ts-expect-error we assume token[childToken] is an array of tokens but we can't be sure
values = values.concat(this.walkTokens(token[childTokens], callback));
const genericToken = token;
if (this.defaults.extensions?.childTokens?.[genericToken.type]) {
this.defaults.extensions.childTokens[genericToken.type].forEach((childTokens) => {
values = values.concat(this.walkTokens(genericToken[childTokens], callback));
});
}
else if (token.tokens) {
values = values.concat(this.walkTokens(token.tokens, callback));
else if (genericToken.tokens) {
values = values.concat(this.walkTokens(genericToken.tokens, callback));
}
}
}
@ -2311,8 +2335,9 @@
if (!ext.level || (ext.level !== 'block' && ext.level !== 'inline')) {
throw new Error("extension level must be 'block' or 'inline'");
}
if (extensions[ext.level]) {
extensions[ext.level].unshift(ext.tokenizer);
const extLevel = extensions[ext.level];
if (extLevel) {
extLevel.unshift(ext.tokenizer);
}
else {
extensions[ext.level] = [ext.tokenizer];
@ -2410,9 +2435,10 @@
// ==-- Parse WalkTokens extensions --== //
if (pack.walkTokens) {
const walkTokens = this.defaults.walkTokens;
const packWalktokens = pack.walkTokens;
opts.walkTokens = function (token) {
let values = [];
values.push(pack.walkTokens.call(this, token));
values.push(packWalktokens.call(this, token));
if (walkTokens) {
values = values.concat(walkTokens.call(this, token));
}
@ -2449,6 +2475,7 @@
opt.hooks.options = opt;
}
if (callback) {
const resultCallback = callback;
const highlight = opt.highlight;
let tokens;
try {
@ -2479,7 +2506,7 @@
opt.highlight = highlight;
return err
? throwError(err)
: callback(null, out);
: resultCallback(null, out);
};
if (!highlight || highlight.length < 3) {
return done();

2
lib/marked.umd.js.map generated

File diff suppressed because one or more lines are too long

4
marked.min.js generated vendored

File diff suppressed because one or more lines are too long

View File

@ -11,7 +11,7 @@ import {
escape
} from './helpers.ts';
import type { MarkedExtension, MarkedOptions } from './MarkedOptions.ts';
import type { Token, TokensList } from './Tokens.ts';
import type { Token, Tokens, TokensList } from './Tokens.ts';
export type ResultCallback = (error: Error | null, parseResult?: string) => undefined | void;
@ -48,28 +48,30 @@ export class Marked {
values = values.concat(callback.call(this, token));
switch (token.type) {
case 'table': {
for (const cell of token.header) {
values = values.concat(this.walkTokens(cell.tokens!, callback));
const tableToken = token as Tokens.Table;
for (const cell of tableToken.header) {
values = values.concat(this.walkTokens(cell.tokens, callback));
}
for (const row of token.rows) {
for (const row of tableToken.rows) {
for (const cell of row) {
values = values.concat(this.walkTokens(cell.tokens!, callback));
values = values.concat(this.walkTokens(cell.tokens, callback));
}
}
break;
}
case 'list': {
values = values.concat(this.walkTokens(token.items, callback));
const listToken = token as Tokens.List;
values = values.concat(this.walkTokens(listToken.items, callback));
break;
}
default: {
if (this.defaults.extensions && this.defaults.extensions.childTokens && this.defaults.extensions.childTokens[token.type]) { // Walk any extensions
this.defaults.extensions.childTokens[token.type].forEach((childTokens) => {
// @ts-expect-error we assume token[childToken] is an array of tokens but we can't be sure
values = values.concat(this.walkTokens(token[childTokens], callback));
const genericToken = token as Tokens.Generic;
if (this.defaults.extensions?.childTokens?.[genericToken.type]) {
this.defaults.extensions.childTokens[genericToken.type].forEach((childTokens) => {
values = values.concat(this.walkTokens(genericToken[childTokens], callback));
});
} else if (token.tokens) {
values = values.concat(this.walkTokens(token.tokens, callback));
} else if (genericToken.tokens) {
values = values.concat(this.walkTokens(genericToken.tokens, callback));
}
}
}
@ -78,7 +80,7 @@ export class Marked {
}
use(...args: MarkedExtension[]) {
const extensions: NonNullable<MarkedOptions['extensions']> = this.defaults.extensions || { renderers: {}, childTokens: {} } as NonNullable<MarkedOptions['extensions']>;
const extensions: MarkedOptions['extensions'] = this.defaults.extensions || { renderers: {}, childTokens: {} };
args.forEach((pack) => {
// copy options to new object
@ -112,23 +114,24 @@ export class Marked {
if (!ext.level || (ext.level !== 'block' && ext.level !== 'inline')) {
throw new Error("extension level must be 'block' or 'inline'");
}
if (extensions[ext.level]) {
extensions[ext.level].unshift(ext.tokenizer);
const extLevel = extensions[ext.level];
if (extLevel) {
extLevel.unshift(ext.tokenizer);
} else {
extensions[ext.level] = [ext.tokenizer];
}
if (ext.start) { // Function to check for start of token
if (ext.level === 'block') {
if (extensions.startBlock) {
extensions.startBlock.push(ext.start!);
extensions.startBlock.push(ext.start);
} else {
extensions.startBlock = [ext.start!];
extensions.startBlock = [ext.start];
}
} else if (ext.level === 'inline') {
if (extensions.startInline) {
extensions.startInline.push(ext.start!);
extensions.startInline.push(ext.start);
} else {
extensions.startInline = [ext.start!];
extensions.startInline = [ext.start];
}
}
}
@ -210,9 +213,10 @@ export class Marked {
// ==-- Parse WalkTokens extensions --== //
if (pack.walkTokens) {
const walkTokens = this.defaults.walkTokens;
const packWalktokens = pack.walkTokens;
opts.walkTokens = function(token) {
let values: Array<Promise<void> | void> = [];
values.push(pack.walkTokens!.call(this, token));
let values: Array<Promise<void> | void | unknown> = [];
values.push(packWalktokens.call(this, token));
if (walkTokens) {
values = values.concat(walkTokens.call(this, token));
}
@ -258,6 +262,7 @@ export class Marked {
}
if (callback) {
const resultCallback = callback;
const highlight = opt.highlight;
let tokens: TokensList | Token[];
@ -278,7 +283,7 @@ export class Marked {
if (opt.walkTokens) {
this.walkTokens(tokens, opt.walkTokens);
}
out = parser(tokens, opt)!;
out = parser(tokens, opt);
if (opt.hooks) {
out = opt.hooks.postprocess(out) as string;
}
@ -291,7 +296,7 @@ export class Marked {
return err
? throwError(err)
: callback!(null, out) as undefined;
: resultCallback(null, out) as undefined;
};
if (!highlight || highlight.length < 3) {

View File

@ -30,16 +30,12 @@ function smartypants(text: string) {
* mangle email addresses
*/
function mangle(text: string) {
let out = '',
i,
ch;
let out = '';
const l = text.length;
for (i = 0; i < l; i++) {
ch = text.charCodeAt(i);
if (Math.random() > 0.5) {
ch = 'x' + ch.toString(16);
}
for (let i = 0; i < text.length; i++) {
const ch = Math.random() > 0.5
? 'x' + text.charCodeAt(i).toString(16)
: text.charCodeAt(i).toString();
out += '&#' + ch + ';';
}

View File

@ -13,11 +13,15 @@ export interface TokenizerThis {
lexer: _Lexer;
}
export type TokenizerExtensionFunction = (this: TokenizerThis, src: string, tokens: Token[] | TokensList) => Tokens.Generic | undefined;
export type TokenizerStartFunction = (this: TokenizerThis, src: string) => number | void;
export interface TokenizerExtension {
name: string;
level: 'block' | 'inline';
start?: ((this: TokenizerThis, src: string) => number | void) | undefined;
tokenizer: (this: TokenizerThis, src: string, tokens: Token[] | TokensList) => Tokens.Generic | undefined;
start?: TokenizerStartFunction | undefined;
tokenizer: TokenizerExtensionFunction;
childTokens?: string[] | undefined;
}
@ -169,7 +173,7 @@ export interface MarkedExtension {
* Each token is passed by reference so updates are persisted when passed to the parser.
* The return value of the function is ignored.
*/
walkTokens?: ((token: Token) => void | Promise<void>) | undefined | null;
walkTokens?: ((token: Token) => void | unknown | Promise<void>) | undefined | null;
/**
* Generate closing slash for self-closing tags (<br/> instead of <br>)
* @deprecated Deprecated in v5.0.0 use marked-xhtml to emit self-closing HTML tags for void elements (<br/>, <img/>, etc.) with a "/" as required by XHTML.
@ -177,38 +181,37 @@ export interface MarkedExtension {
xhtml?: boolean | undefined;
}
export interface MarkedOptions extends Omit<MarkedExtension, 'extensions' | 'renderer' | 'tokenizer' | 'walkTokens'> {
export interface MarkedOptions extends Omit<MarkedExtension, 'renderer' | 'tokenizer' | 'extensions' | 'walkTokens'> {
/**
* Type: object Default: new Renderer()
*
* An object containing functions to render tokens to HTML.
*/
renderer?: Omit<_Renderer, 'constructor'> | undefined | null;
renderer?: _Renderer | undefined | null;
/**
* The tokenizer defines how to turn markdown text into tokens.
*/
tokenizer?: Omit<_Tokenizer, 'constructor'> | undefined | null;
tokenizer?: _Tokenizer | undefined | null;
/**
* The walkTokens function gets called with every token.
* Child tokens are called before moving on to sibling tokens.
* Each token is passed by reference so updates are persisted when passed to the parser.
* The return value of the function is ignored.
* Custom extensions
*/
walkTokens?: ((token: Token) => void | Promise<void> | Array<void | Promise<void>>) | undefined | null;
extensions?: null | {
renderers: {
[name: string]: RendererExtensionFunction;
};
childTokens: {
[name: string]: string[];
};
inline?: TokenizerExtensionFunction[];
block?: TokenizerExtensionFunction[];
startInline?: TokenizerStartFunction[];
startBlock?: TokenizerStartFunction[];
};
/**
* Add tokenizers and renderers to marked
* walkTokens function returns array of values for Promise.all
*/
extensions?:
| (TokenizerAndRendererExtension[] & {
renderers: Record<string, RendererExtensionFunction>,
childTokens: Record<string, string[]>,
block: any[],
inline: any[],
startBlock: Array<(this: TokenizerThis, src: string) => number | void>,
startInline: Array<(this: TokenizerThis, src: string) => number | void>
})
| undefined | null;
walkTokens?: null | ((token: Token) => void | (unknown | Promise<void>)[]);
}

View File

@ -45,35 +45,16 @@ export class _Parser {
* Parse Loop
*/
parse(tokens: Token[], top = true): string {
let out = '',
i,
j,
k,
l2,
l3,
row,
cell,
header,
body,
token,
ordered,
start,
loose,
itemBody,
item,
checked,
task,
checkbox,
ret;
let out = '';
const l = tokens.length;
for (i = 0; i < l; i++) {
token = tokens[i];
for (let i = 0; i < tokens.length; i++) {
const token = tokens[i];
// Run any renderer extensions
if (this.options.extensions && this.options.extensions.renderers && this.options.extensions.renderers[token.type]) {
ret = this.options.extensions.renderers[token.type].call({ parser: this }, token);
if (ret !== false || !['space', 'hr', 'heading', 'code', 'table', 'blockquote', 'list', 'html', 'paragraph', 'text'].includes(token.type)) {
const genericToken = token as Tokens.Generic;
const ret = this.options.extensions.renderers[genericToken.type].call({ parser: this }, genericToken);
if (ret !== false || !['space', 'hr', 'heading', 'code', 'table', 'blockquote', 'list', 'html', 'paragraph', 'text'].includes(genericToken.type)) {
out += ret || '';
continue;
}
@ -88,44 +69,44 @@ export class _Parser {
continue;
}
case 'heading': {
const headingToken = token as Tokens.Heading;
out += this.renderer.heading(
this.parseInline(token.tokens!),
token.depth,
unescape(this.parseInline(token.tokens!, this.textRenderer)),
this.parseInline(headingToken.tokens),
headingToken.depth,
unescape(this.parseInline(headingToken.tokens, this.textRenderer)),
this.slugger);
continue;
}
case 'code': {
out += this.renderer.code(token.text,
token.lang,
!!token.escaped);
const codeToken = token as Tokens.Code;
out += this.renderer.code(codeToken.text,
codeToken.lang,
!!codeToken.escaped);
continue;
}
case 'table': {
header = '';
const tableToken = token as Tokens.Table;
let header = '';
// header
cell = '';
l2 = token.header.length;
for (j = 0; j < l2; j++) {
let cell = '';
for (let j = 0; j < tableToken.header.length; j++) {
cell += this.renderer.tablecell(
this.parseInline(token.header[j].tokens)!,
{ header: true, align: token.align[j] }
this.parseInline(tableToken.header[j].tokens),
{ header: true, align: tableToken.align[j] }
);
}
header += this.renderer.tablerow(cell);
body = '';
l2 = token.rows.length;
for (j = 0; j < l2; j++) {
row = token.rows[j];
let body = '';
for (let j = 0; j < tableToken.rows.length; j++) {
const row = tableToken.rows[j];
cell = '';
l3 = row.length;
for (k = 0; k < l3; k++) {
for (let k = 0; k < row.length; k++) {
cell += this.renderer.tablecell(
this.parseInline(row[k].tokens)!,
{ header: false, align: token.align[k] }
this.parseInline(row[k].tokens),
{ header: false, align: tableToken.align[k] }
);
}
@ -135,25 +116,26 @@ export class _Parser {
continue;
}
case 'blockquote': {
body = this.parse(token.tokens!);
const blockquoteToken = token as Tokens.Blockquote;
const body = this.parse(blockquoteToken.tokens);
out += this.renderer.blockquote(body);
continue;
}
case 'list': {
ordered = token.ordered;
start = token.start;
loose = token.loose;
l2 = token.items.length;
const listToken = token as Tokens.List;
const ordered = listToken.ordered;
const start = listToken.start;
const loose = listToken.loose;
body = '';
for (j = 0; j < l2; j++) {
item = token.items[j];
checked = item.checked;
task = item.task;
let body = '';
for (let j = 0; j < listToken.items.length; j++) {
const item = listToken.items[j];
const checked = item.checked;
const task = item.task;
itemBody = '';
let itemBody = '';
if (item.task) {
checkbox = this.renderer.checkbox(!!checked);
const checkbox = this.renderer.checkbox(!!checked);
if (loose) {
if (item.tokens.length > 0 && item.tokens[0].type === 'paragraph') {
item.tokens[0].text = checkbox + ' ' + item.tokens[0].text;
@ -179,20 +161,23 @@ export class _Parser {
continue;
}
case 'html': {
out += this.renderer.html(token.text, token.block);
const htmlToken = token as Tokens.HTML;
out += this.renderer.html(htmlToken.text, htmlToken.block);
continue;
}
case 'paragraph': {
out += this.renderer.paragraph(this.parseInline(token.tokens!)!);
const paragraphToken = token as Tokens.Paragraph;
out += this.renderer.paragraph(this.parseInline(paragraphToken.tokens));
continue;
}
case 'text': {
body = token.tokens ? this.parseInline(token.tokens) : token.text;
while (i + 1 < l && tokens[i + 1].type === 'text') {
token = tokens[++i] as Tokens.Text;
body += '\n' + (token.tokens ? this.parseInline(token.tokens) : token.text);
let textToken = token as Tokens.Text;
let body = textToken.tokens ? this.parseInline(textToken.tokens) : textToken.text;
while (i + 1 < tokens.length && tokens[i + 1].type === 'text') {
textToken = tokens[++i] as Tokens.Text;
body += '\n' + (textToken.tokens ? this.parseInline(textToken.tokens) : textToken.text);
}
out += top ? this.renderer.paragraph(body!) : body;
out += top ? this.renderer.paragraph(body) : body;
continue;
}
@ -216,18 +201,14 @@ export class _Parser {
*/
parseInline(tokens: Token[], renderer?: _Renderer | _TextRenderer): string {
renderer = renderer || this.renderer;
let out = '',
i,
token,
ret;
let out = '';
const l = tokens.length;
for (i = 0; i < l; i++) {
token = tokens[i];
for (let i = 0; i < tokens.length; i++) {
const token = tokens[i];
// Run any renderer extensions
if (this.options.extensions && this.options.extensions.renderers && this.options.extensions.renderers[token.type]) {
ret = this.options.extensions.renderers[token.type].call({ parser: this }, token);
const ret = this.options.extensions.renderers[token.type].call({ parser: this }, token);
if (ret !== false || !['escape', 'html', 'link', 'image', 'strong', 'em', 'codespan', 'br', 'del', 'text'].includes(token.type)) {
out += ret || '';
continue;
@ -236,31 +217,38 @@ export class _Parser {
switch (token.type) {
case 'escape': {
out += renderer.text(token.text);
const escapeToken = token as Tokens.Escape;
out += renderer.text(escapeToken.text);
break;
}
case 'html': {
out += renderer.html(token.text);
const tagToken = token as Tokens.Tag;
out += renderer.html(tagToken.text);
break;
}
case 'link': {
out += renderer.link(token.href, token.title, this.parseInline(token.tokens!, renderer)!);
const linkToken = token as Tokens.Link;
out += renderer.link(linkToken.href, linkToken.title, this.parseInline(linkToken.tokens, renderer));
break;
}
case 'image': {
out += renderer.image(token.href, token.title, token.text);
const imageToken = token as Tokens.Image;
out += renderer.image(imageToken.href, imageToken.title, imageToken.text);
break;
}
case 'strong': {
out += renderer.strong(this.parseInline(token.tokens!, renderer)!);
const strongToken = token as Tokens.Strong;
out += renderer.strong(this.parseInline(strongToken.tokens, renderer));
break;
}
case 'em': {
out += renderer.em(this.parseInline(token.tokens!, renderer)!);
const emToken = token as Tokens.Em;
out += renderer.em(this.parseInline(emToken.tokens, renderer));
break;
}
case 'codespan': {
out += renderer.codespan(token.text);
const codespanToken = token as Tokens.Codespan;
out += renderer.codespan(codespanToken.text);
break;
}
case 'br': {
@ -268,11 +256,13 @@ export class _Parser {
break;
}
case 'del': {
out += renderer.del(this.parseInline(token.tokens!, renderer)!);
const delToken = token as Tokens.Del;
out += renderer.del(this.parseInline(delToken.tokens, renderer));
break;
}
case 'text': {
out += renderer.text(token.text);
const textToken = token as Tokens.Text;
out += renderer.text(textToken.text);
break;
}
default: {

View File

@ -16,7 +16,7 @@ export class _Renderer {
}
code(code: string, infostring: string | undefined, escaped: boolean): string {
const lang = (infostring || '').match(/\S*/)![0];
const lang = (infostring || '').match(/^\S*/)?.[0];
if (this.options.highlight) {
const out = this.options.highlight(code, lang);
if (out != null && out !== code) {
@ -64,8 +64,8 @@ export class _Renderer {
}
list(body: string, ordered: boolean, start: number | ''): string {
const type = ordered ? 'ol' : 'ul',
startatt = (ordered && start !== 1) ? (' start="' + start + '"') : '';
const type = ordered ? 'ol' : 'ul';
const startatt = (ordered && start !== 1) ? (' start="' + start + '"') : '';
return '<' + type + startatt + '>\n' + body + '</' + type + '>\n';
}
@ -135,10 +135,11 @@ export class _Renderer {
}
link(href: string, title: string | null | undefined, text: string): string {
href = cleanUrl(this.options.sanitize, this.options.baseUrl, href) as any;
if (href === null) {
const cleanHref = cleanUrl(this.options.sanitize, this.options.baseUrl, href);
if (cleanHref === null) {
return text;
}
href = cleanHref;
let out = '<a href="' + href + '"';
if (title) {
out += ' title="' + title + '"';
@ -148,10 +149,11 @@ export class _Renderer {
}
image(href: string, title: string | null, text: string): string {
href = cleanUrl(this.options.sanitize, this.options.baseUrl, href) as any;
if (href === null) {
const cleanHref = cleanUrl(this.options.sanitize, this.options.baseUrl, href);
if (cleanHref === null) {
return text;
}
href = cleanHref;
let out = `<img src="${href}" alt="${text}"`;
if (title) {

View File

@ -69,6 +69,7 @@ function indentCodeCompensation(raw: string, text: string) {
*/
export class _Tokenizer {
options: MarkedOptions;
// TODO: Fix this rules type
rules: any;
lexer!: _Lexer;
@ -172,9 +173,6 @@ export class _Tokenizer {
list(src: string): Tokens.List | undefined {
let cap = this.rules.block.list.exec(src);
if (cap) {
let raw, istask, ischecked, indent, i, blankLine, endsWithBlankLine,
line, nextLine, rawLine, itemContents, endEarly;
let bull = cap[1].trim();
const isordered = bull.length > 1;
@ -195,10 +193,12 @@ export class _Tokenizer {
// Get next list item
const itemRegex = new RegExp(`^( {0,3}${bull})((?:[\t ][^\\n]*)?(?:\\n|$))`);
let raw = '';
let itemContents = '';
let endsWithBlankLine = false;
// Check if current bullet point can start a new List Item
while (src) {
endEarly = false;
let endEarly = false;
if (!(cap = itemRegex.exec(src))) {
break;
}
@ -207,12 +207,13 @@ export class _Tokenizer {
break;
}
raw = cap[0];
raw = cap[0] as string;
src = src.substring(raw.length);
line = cap[2].split('\n', 1)[0].replace(/^\t+/, (t: string) => ' '.repeat(3 * t.length));
nextLine = src.split('\n', 1)[0];
let line = cap[2].split('\n', 1)[0].replace(/^\t+/, (t: string) => ' '.repeat(3 * t.length)) as string;
let nextLine = src.split('\n', 1)[0];
let indent = 0;
if (this.options.pedantic) {
indent = 2;
itemContents = line.trimLeft();
@ -223,7 +224,7 @@ export class _Tokenizer {
indent += cap[1].length;
}
blankLine = false;
let blankLine = false;
if (!line && /^ *$/.test(nextLine)) { // Items begin with at most one blank line
raw += nextLine + '\n';
@ -239,7 +240,7 @@ export class _Tokenizer {
// Check if following lines should be included in List Item
while (src) {
rawLine = src.split('\n', 1)[0];
const rawLine = src.split('\n', 1)[0];
nextLine = rawLine;
// Re-align to follow commonmark nesting rules
@ -311,6 +312,8 @@ export class _Tokenizer {
}
}
let istask: RegExpExecArray | null = null;
let ischecked: boolean | undefined;
// Check for task list items
if (this.options.gfm) {
istask = /^\[[ xX]\] /.exec(itemContents);
@ -326,7 +329,8 @@ export class _Tokenizer {
task: !!istask,
checked: ischecked,
loose: false,
text: itemContents
text: itemContents,
tokens: []
});
list.raw += raw;
@ -337,17 +341,15 @@ export class _Tokenizer {
(list.items[list.items.length - 1] as Tokens.ListItem).text = itemContents.trimRight();
list.raw = list.raw.trimRight();
const l = list.items.length;
// Item child tokens handled here at end because we needed to have the final item to trim it first
for (i = 0; i < l; i++) {
for (let i = 0; i < list.items.length; i++) {
this.lexer.state.top = false;
list.items[i].tokens = this.lexer.blockTokens(list.items[i].text, []);
if (!list.loose) {
// Check if list should be loose
const spacers = list.items[i].tokens!.filter(t => t.type === 'space');
const hasMultipleLineBreaks = spacers.length > 0 && spacers.some(t => /\n.*\n/.test(t.raw!));
const spacers = list.items[i].tokens.filter(t => t.type === 'space');
const hasMultipleLineBreaks = spacers.length > 0 && spacers.some(t => /\n.*\n/.test(t.raw));
list.loose = hasMultipleLineBreaks;
}
@ -355,7 +357,7 @@ export class _Tokenizer {
// Set all items to loose if list is loose
if (list.loose) {
for (i = 0; i < l; i++) {
for (let i = 0; i < list.items.length; i++) {
list.items[i].loose = true;
}
}
@ -409,7 +411,7 @@ export class _Tokenizer {
type: 'table',
raw: cap[0],
header: splitCells(cap[1]).map(c => {
return { text: c };
return { text: c, tokens: [] };
}),
align: cap[2].replace(/^ *|\| *$/g, '').split(/ *\| */),
rows: cap[3] && cap[3].trim() ? cap[3].replace(/\n[ \t]*$/, '').split('\n') : []
@ -419,21 +421,24 @@ export class _Tokenizer {
let l = item.align.length;
let i, j, k, row;
for (i = 0; i < l; i++) {
if (/^ *-+: *$/.test(item.align[i]!)) {
item.align[i] = 'right';
} else if (/^ *:-+: *$/.test(item.align[i]!)) {
item.align[i] = 'center';
} else if (/^ *:-+ *$/.test(item.align[i]!)) {
item.align[i] = 'left';
} else {
item.align[i] = null;
const align = item.align[i];
if (align) {
if (/^ *-+: *$/.test(align)) {
item.align[i] = 'right';
} else if (/^ *:-+: *$/.test(align)) {
item.align[i] = 'center';
} else if (/^ *:-+ *$/.test(align)) {
item.align[i] = 'left';
} else {
item.align[i] = null;
}
}
}
l = item.rows.length;
for (i = 0; i < l; i++) {
item.rows[i] = splitCells(item.rows[i] as unknown as string, item.header.length).map(c => {
return { text: c };
return { text: c, tokens: [] };
});
}

View File

@ -1,26 +1,27 @@
/* eslint-disable no-use-before-define */
export type Token = (Tokens.Space
| Tokens.Code
| Tokens.Heading
| Tokens.Table
| Tokens.Hr
| Tokens.Blockquote
| Tokens.List
| Tokens.ListItem
| Tokens.Paragraph
| Tokens.HTML
| Tokens.Text
| Tokens.Def
| Tokens.Escape
| Tokens.Tag
| Tokens.Image
| Tokens.Link
| Tokens.Strong
| Tokens.Em
| Tokens.Codespan
| Tokens.Br
| Tokens.Del
| Tokens.Generic) & { loose?: boolean, tokens?: Token[] };
export type Token = (
Tokens.Space
| Tokens.Code
| Tokens.Heading
| Tokens.Table
| Tokens.Hr
| Tokens.Blockquote
| Tokens.List
| Tokens.ListItem
| Tokens.Paragraph
| Tokens.HTML
| Tokens.Text
| Tokens.Def
| Tokens.Escape
| Tokens.Tag
| Tokens.Image
| Tokens.Link
| Tokens.Strong
| Tokens.Em
| Tokens.Codespan
| Tokens.Br
| Tokens.Del
| Tokens.Generic);
export namespace Tokens {
export interface Space {
@ -55,7 +56,7 @@ export namespace Tokens {
export interface TableCell {
text: string;
tokens?: Token[];
tokens: Token[];
}
export interface Hr {
@ -86,7 +87,7 @@ export namespace Tokens {
checked?: boolean | undefined;
loose: boolean;
text: string;
tokens?: Token[];
tokens: Token[];
}
export interface Paragraph {

View File

@ -135,8 +135,8 @@ export function splitCells(tableRow: string, count?: number) {
// ensure that every cell-delimiting pipe has a space
// before it to distinguish it from an escaped pipe
const row = tableRow.replace(/\|/g, (match, offset, str) => {
let escaped = false,
curr = offset;
let escaped = false;
let curr = offset;
while (--curr >= 0 && str[curr] === '\\') escaped = !escaped;
if (escaped) {
// odd number of slashes means | is escaped
@ -209,10 +209,9 @@ export function findClosingBracket(str: string, b: string) {
if (str.indexOf(b[1]) === -1) {
return -1;
}
const l = str.length;
let level = 0,
i = 0;
for (; i < l; i++) {
let level = 0;
for (let i = 0; i < str.length; i++) {
if (str[i] === '\\') {
i++;
} else if (str[i] === b[0]) {

View File

@ -1,13 +1,6 @@
{
"extends": "./tsconfig.json",
"compilerOptions": {
"target": "es2021",
"module": "NodeNext",
"isolatedModules": true,
"strict": true,
"verbatimModuleSyntax": true,
"noEmit": true,
"allowSyntheticDefaultImports": true,
"moduleResolution": "NodeNext",
"baseUrl": ".",
"paths": {
"marked": [

View File

@ -1,16 +1,11 @@
{
"extends": "./tsconfig.json",
"compilerOptions": {
"target": "es2022",
"module": "NodeNext",
"strict": true,
"noEmit": false,
"declaration": true,
"emitDeclarationOnly": true,
"allowSyntheticDefaultImports": true,
"moduleResolution": "NodeNext",
"allowImportingTsExtensions": true,
"verbatimModuleSyntax": false,
"isolatedModules": false,
"outFile": "lib/marked.d.ts"
},
"include": [
"src/*.ts"
]
}
}