2019-11-07 12:49:10 -06:00
/ * *
* marked - a markdown parser
2021-01-08 22:17:25 +05:30
* Copyright ( c ) 2011 - 2021 , Christopher Jeffrey . ( MIT Licensed )
2019-11-07 12:49:10 -06:00
* https : //github.com/markedjs/marked
* /
/ * *
* DO NOT EDIT THIS FILE
* The code in this file is generated from files in . / src /
* /
2021-10-06 13:32:43 -07:00
var marked$1 = { exports : { } } ;
2021-08-16 03:09:18 +00:00
var defaults$5 = { exports : { } } ;
2019-11-07 12:49:10 -06:00
2021-08-16 03:09:18 +00:00
function getDefaults$1 ( ) {
2019-11-07 12:49:10 -06:00
return {
baseUrl : null ,
breaks : false ,
2021-06-15 19:22:00 -04:00
extensions : null ,
2019-11-07 12:49:10 -06:00
gfm : true ,
headerIds : true ,
headerPrefix : '' ,
highlight : null ,
langPrefix : 'language-' ,
mangle : true ,
pedantic : false ,
renderer : null ,
sanitize : false ,
sanitizer : null ,
silent : false ,
smartLists : false ,
smartypants : false ,
2020-04-08 13:06:43 -05:00
tokenizer : null ,
2020-05-14 15:54:39 +00:00
walkTokens : null ,
2019-11-07 12:49:10 -06:00
xhtml : false
} ;
}
2021-08-16 03:09:18 +00:00
function changeDefaults$1 ( newDefaults ) {
defaults$5 . exports . defaults = newDefaults ;
2019-11-07 12:49:10 -06:00
}
2021-08-16 03:09:18 +00:00
defaults$5 . exports = {
defaults : getDefaults$1 ( ) ,
getDefaults : getDefaults$1 ,
changeDefaults : changeDefaults$1
2019-11-07 12:49:10 -06:00
} ;
/ * *
* Helpers
* /
2021-08-16 03:09:18 +00:00
2019-12-05 23:08:43 +00:00
const escapeTest = /[&<>"']/ ;
const escapeReplace = /[&<>"']/g ;
const escapeTestNoEncode = /[<>"']|&(?!#?\w+;)/ ;
const escapeReplaceNoEncode = /[<>"']|&(?!#?\w+;)/g ;
const escapeReplacements = {
'&' : '&' ,
'<' : '<' ,
'>' : '>' ,
'"' : '"' ,
"'" : '''
} ;
const getEscapeReplacement = ( ch ) => escapeReplacements [ ch ] ;
2021-08-16 03:09:18 +00:00
function escape$3 ( html , encode ) {
2019-11-07 12:49:10 -06:00
if ( encode ) {
2019-12-05 23:08:43 +00:00
if ( escapeTest . test ( html ) ) {
return html . replace ( escapeReplace , getEscapeReplacement ) ;
2019-11-07 12:49:10 -06:00
}
} else {
2019-12-05 23:08:43 +00:00
if ( escapeTestNoEncode . test ( html ) ) {
return html . replace ( escapeReplaceNoEncode , getEscapeReplacement ) ;
2019-11-07 12:49:10 -06:00
}
}
return html ;
}
2019-12-05 23:08:43 +00:00
const unescapeTest = /&(#(?:\d+)|(?:#x[0-9A-Fa-f]+)|(?:\w+));?/ig ;
2019-11-07 12:49:10 -06:00
2021-08-16 03:09:18 +00:00
function unescape$1 ( html ) {
2019-11-07 12:49:10 -06:00
// explicitly match decimal, hex, and named HTML entities
2019-12-05 23:08:43 +00:00
return html . replace ( unescapeTest , ( _ , n ) => {
2019-11-07 12:49:10 -06:00
n = n . toLowerCase ( ) ;
if ( n === 'colon' ) return ':' ;
if ( n . charAt ( 0 ) === '#' ) {
return n . charAt ( 1 ) === 'x'
? String . fromCharCode ( parseInt ( n . substring ( 2 ) , 16 ) )
: String . fromCharCode ( + n . substring ( 1 ) ) ;
}
return '' ;
} ) ;
}
2019-12-05 23:08:43 +00:00
const caret = /(^|[^\[])\^/g ;
2021-08-16 03:09:18 +00:00
function edit$1 ( regex , opt ) {
2019-11-07 12:49:10 -06:00
regex = regex . source || regex ;
opt = opt || '' ;
const obj = {
replace : ( name , val ) => {
val = val . source || val ;
2019-12-05 23:08:43 +00:00
val = val . replace ( caret , '$1' ) ;
2019-11-07 12:49:10 -06:00
regex = regex . replace ( name , val ) ;
return obj ;
} ,
getRegex : ( ) => {
return new RegExp ( regex , opt ) ;
}
} ;
return obj ;
}
2019-12-05 23:08:43 +00:00
const nonWordAndColonTest = /[^\w:]/g ;
const originIndependentUrl = /^$|^[a-z][a-z0-9+.-]*:|^[?#]/i ;
2021-08-16 03:09:18 +00:00
function cleanUrl$1 ( sanitize , base , href ) {
2019-11-07 12:49:10 -06:00
if ( sanitize ) {
let prot ;
try {
2021-08-16 03:09:18 +00:00
prot = decodeURIComponent ( unescape$1 ( href ) )
2019-12-05 23:08:43 +00:00
. replace ( nonWordAndColonTest , '' )
2019-11-07 12:49:10 -06:00
. toLowerCase ( ) ;
} catch ( e ) {
return null ;
}
if ( prot . indexOf ( 'javascript:' ) === 0 || prot . indexOf ( 'vbscript:' ) === 0 || prot . indexOf ( 'data:' ) === 0 ) {
return null ;
}
}
2019-12-05 23:08:43 +00:00
if ( base && ! originIndependentUrl . test ( href ) ) {
2019-11-07 12:49:10 -06:00
href = resolveUrl ( base , href ) ;
}
try {
href = encodeURI ( href ) . replace ( /%25/g , '%' ) ;
} catch ( e ) {
return null ;
}
return href ;
}
2019-12-05 23:08:43 +00:00
const baseUrls = { } ;
const justDomain = /^[^:]+:\/*[^/]*$/ ;
const protocol = /^([^:]+:)[\s\S]*$/ ;
const domain = /^([^:]+:\/*[^/]*)[\s\S]*$/ ;
2019-11-07 12:49:10 -06:00
function resolveUrl ( base , href ) {
2019-12-05 23:08:43 +00:00
if ( ! baseUrls [ ' ' + base ] ) {
2019-11-07 12:49:10 -06:00
// we can ignore everything in base after the last slash of its path component,
// but we might need to add _that_
// https://tools.ietf.org/html/rfc3986#section-3
2019-12-05 23:08:43 +00:00
if ( justDomain . test ( base ) ) {
baseUrls [ ' ' + base ] = base + '/' ;
2019-11-07 12:49:10 -06:00
} else {
2021-08-16 03:09:18 +00:00
baseUrls [ ' ' + base ] = rtrim$1 ( base , '/' , true ) ;
2019-11-07 12:49:10 -06:00
}
}
2019-12-05 23:08:43 +00:00
base = baseUrls [ ' ' + base ] ;
2019-11-07 12:49:10 -06:00
const relativeBase = base . indexOf ( ':' ) === - 1 ;
if ( href . substring ( 0 , 2 ) === '//' ) {
if ( relativeBase ) {
return href ;
}
2019-12-05 23:08:43 +00:00
return base . replace ( protocol , '$1' ) + href ;
2019-11-07 12:49:10 -06:00
} else if ( href . charAt ( 0 ) === '/' ) {
if ( relativeBase ) {
return href ;
}
2019-12-05 23:08:43 +00:00
return base . replace ( domain , '$1' ) + href ;
2019-11-07 12:49:10 -06:00
} else {
return base + href ;
}
}
2021-08-16 03:09:18 +00:00
const noopTest$1 = { exec : function noopTest ( ) { } } ;
2019-11-07 12:49:10 -06:00
2021-08-16 03:09:18 +00:00
function merge$2 ( obj ) {
2019-11-07 12:49:10 -06:00
let i = 1 ,
target ,
key ;
for ( ; i < arguments . length ; i ++ ) {
target = arguments [ i ] ;
for ( key in target ) {
if ( Object . prototype . hasOwnProperty . call ( target , key ) ) {
obj [ key ] = target [ key ] ;
}
}
}
return obj ;
}
2021-08-16 03:09:18 +00:00
function splitCells$1 ( tableRow , count ) {
2019-11-07 12:49:10 -06:00
// ensure that every cell-delimiting pipe has a space
// before it to distinguish it from an escaped pipe
const row = tableRow . replace ( /\|/g , ( match , offset , str ) => {
let escaped = false ,
curr = offset ;
while ( -- curr >= 0 && str [ curr ] === '\\' ) escaped = ! escaped ;
if ( escaped ) {
// odd number of slashes means | is escaped
// so we leave it alone
return '|' ;
} else {
// add space before unescaped |
return ' |' ;
}
} ) ,
cells = row . split ( / \|/ ) ;
let i = 0 ;
2021-08-09 23:41:45 -04:00
// First/last cell in a row cannot be empty if it has no leading/trailing pipe
if ( ! cells [ 0 ] . trim ( ) ) { cells . shift ( ) ; }
if ( ! cells [ cells . length - 1 ] . trim ( ) ) { cells . pop ( ) ; }
2019-11-07 12:49:10 -06:00
if ( cells . length > count ) {
cells . splice ( count ) ;
} else {
while ( cells . length < count ) cells . push ( '' ) ;
}
for ( ; i < cells . length ; i ++ ) {
// leading or trailing whitespace is ignored per the gfm spec
cells [ i ] = cells [ i ] . trim ( ) . replace ( /\\\|/g , '|' ) ;
}
return cells ;
}
// Remove trailing 'c's. Equivalent to str.replace(/c*$/, '').
// /c*$/ is vulnerable to REDOS.
// invert: Remove suffix of non-c chars instead. Default falsey.
2021-08-16 03:09:18 +00:00
function rtrim$1 ( str , c , invert ) {
2019-11-07 12:49:10 -06:00
const l = str . length ;
if ( l === 0 ) {
return '' ;
}
// Length of suffix matching the invert condition.
let suffLen = 0 ;
// Step left until we fail to match the invert condition.
while ( suffLen < l ) {
const currChar = str . charAt ( l - suffLen - 1 ) ;
if ( currChar === c && ! invert ) {
suffLen ++ ;
} else if ( currChar !== c && invert ) {
suffLen ++ ;
} else {
break ;
}
}
return str . substr ( 0 , l - suffLen ) ;
}
2021-08-16 03:09:18 +00:00
function findClosingBracket$1 ( str , b ) {
2019-11-07 12:49:10 -06:00
if ( str . indexOf ( b [ 1 ] ) === - 1 ) {
return - 1 ;
}
const l = str . length ;
let level = 0 ,
i = 0 ;
for ( ; i < l ; i ++ ) {
if ( str [ i ] === '\\' ) {
i ++ ;
} else if ( str [ i ] === b [ 0 ] ) {
level ++ ;
} else if ( str [ i ] === b [ 1 ] ) {
level -- ;
if ( level < 0 ) {
return i ;
}
}
}
return - 1 ;
}
2021-08-16 03:09:18 +00:00
function checkSanitizeDeprecation$1 ( opt ) {
2019-11-07 12:49:10 -06:00
if ( opt && opt . sanitize && ! opt . silent ) {
console . warn ( 'marked(): sanitize and sanitizer parameters are deprecated since version 0.7.0, should not be used and will be removed in the future. Read more here: https://marked.js.org/#/USING_ADVANCED.md#options' ) ;
}
}
2020-10-21 14:57:55 +00:00
// copied from https://stackoverflow.com/a/5450113/806777
2021-08-16 03:09:18 +00:00
function repeatString$1 ( pattern , count ) {
2020-10-21 14:57:55 +00:00
if ( count < 1 ) {
return '' ;
}
let result = '' ;
while ( count > 1 ) {
if ( count & 1 ) {
result += pattern ;
}
count >>= 1 ;
pattern += pattern ;
}
return result + pattern ;
}
2019-11-07 12:49:10 -06:00
var helpers = {
2021-08-16 03:09:18 +00:00
escape : escape$3 ,
unescape : unescape$1 ,
edit : edit$1 ,
cleanUrl : cleanUrl$1 ,
2019-11-07 12:49:10 -06:00
resolveUrl ,
2021-08-16 03:09:18 +00:00
noopTest : noopTest$1 ,
merge : merge$2 ,
splitCells : splitCells$1 ,
rtrim : rtrim$1 ,
findClosingBracket : findClosingBracket$1 ,
checkSanitizeDeprecation : checkSanitizeDeprecation$1 ,
repeatString : repeatString$1
2019-11-07 12:49:10 -06:00
} ;
2021-08-16 03:09:18 +00:00
const { defaults : defaults$4 } = defaults$5 . exports ;
2019-11-07 12:49:10 -06:00
const {
2021-08-16 03:09:18 +00:00
rtrim ,
splitCells ,
escape : escape$2 ,
findClosingBracket
2019-11-07 12:49:10 -06:00
} = helpers ;
2021-08-09 23:41:45 -04:00
function outputLink ( cap , link , raw , lexer ) {
2020-04-14 16:41:10 -05:00
const href = link . href ;
2021-08-16 03:09:18 +00:00
const title = link . title ? escape$2 ( link . title ) : null ;
2020-05-20 15:50:16 +00:00
const text = cap [ 1 ] . replace ( /\\([\[\]])/g , '$1' ) ;
2020-04-14 16:41:10 -05:00
if ( cap [ 0 ] . charAt ( 0 ) !== '!' ) {
2021-08-09 23:41:45 -04:00
lexer . state . inLink = true ;
2021-08-23 18:48:45 +00:00
const token = {
2020-04-14 16:41:10 -05:00
type : 'link' ,
raw ,
href ,
title ,
2021-08-09 23:41:45 -04:00
text ,
tokens : lexer . inlineTokens ( text , [ ] )
2020-04-14 16:41:10 -05:00
} ;
2021-08-23 18:48:45 +00:00
lexer . state . inLink = false ;
return token ;
2020-04-14 16:41:10 -05:00
} else {
return {
type : 'image' ,
raw ,
href ,
2020-05-20 15:50:16 +00:00
title ,
2021-08-16 03:09:18 +00:00
text : escape$2 ( text )
2020-04-14 16:41:10 -05:00
} ;
}
}
2020-05-02 01:31:47 +00:00
function indentCodeCompensation ( raw , text ) {
const matchIndentToCode = raw . match ( /^(\s+)(?:```)/ ) ;
if ( matchIndentToCode === null ) {
return text ;
}
const indentToCode = matchIndentToCode [ 1 ] ;
return text
. split ( '\n' )
. map ( node => {
const matchIndentInNode = node . match ( /^\s+/ ) ;
if ( matchIndentInNode === null ) {
return node ;
}
const [ indentInNode ] = matchIndentInNode ;
if ( indentInNode . length >= indentToCode . length ) {
return node . slice ( indentToCode . length ) ;
}
return node ;
} )
. join ( '\n' ) ;
}
2019-11-07 12:49:10 -06:00
/ * *
2020-04-14 16:41:10 -05:00
* Tokenizer
2019-11-07 12:49:10 -06:00
* /
2021-10-06 13:32:43 -07:00
var Tokenizer _1$1 = class Tokenizer {
2020-04-14 16:41:10 -05:00
constructor ( options ) {
2021-08-16 03:09:18 +00:00
this . options = options || defaults$4 ;
2020-04-14 16:41:10 -05:00
}
2019-11-07 12:49:10 -06:00
2020-04-14 16:41:10 -05:00
space ( src ) {
const cap = this . rules . block . newline . exec ( src ) ;
if ( cap ) {
if ( cap [ 0 ] . length > 1 ) {
return {
type : 'space' ,
raw : cap [ 0 ]
} ;
}
return { raw : '\n' } ;
}
}
2019-11-07 12:49:10 -06:00
2021-02-07 17:22:47 -05:00
code ( src ) {
2020-04-14 16:41:10 -05:00
const cap = this . rules . block . code . exec ( src ) ;
if ( cap ) {
2021-01-26 14:21:28 +00:00
const text = cap [ 0 ] . replace ( /^ {1,4}/gm , '' ) ;
2020-05-03 19:20:54 +00:00
return {
type : 'code' ,
raw : cap [ 0 ] ,
codeBlockStyle : 'indented' ,
text : ! this . options . pedantic
2021-08-16 03:09:18 +00:00
? rtrim ( text , '\n' )
2020-05-03 19:20:54 +00:00
: text
} ;
2020-04-14 16:41:10 -05:00
}
}
2019-11-07 12:49:10 -06:00
2020-04-14 16:41:10 -05:00
fences ( src ) {
const cap = this . rules . block . fences . exec ( src ) ;
if ( cap ) {
2020-05-02 01:31:47 +00:00
const raw = cap [ 0 ] ;
const text = indentCodeCompensation ( raw , cap [ 3 ] || '' ) ;
2020-04-14 16:41:10 -05:00
return {
type : 'code' ,
2020-05-02 01:31:47 +00:00
raw ,
2020-04-14 16:41:10 -05:00
lang : cap [ 2 ] ? cap [ 2 ] . trim ( ) : cap [ 2 ] ,
2020-05-02 01:31:47 +00:00
text
2020-04-14 16:41:10 -05:00
} ;
}
}
2019-11-07 12:49:10 -06:00
2020-04-14 16:41:10 -05:00
heading ( src ) {
const cap = this . rules . block . heading . exec ( src ) ;
if ( cap ) {
2020-12-10 10:28:58 -06:00
let text = cap [ 2 ] . trim ( ) ;
// remove trailing #s
2020-12-15 14:14:22 -06:00
if ( /#$/ . test ( text ) ) {
2021-08-16 03:09:18 +00:00
const trimmed = rtrim ( text , '#' ) ;
2020-12-10 10:28:58 -06:00
if ( this . options . pedantic ) {
text = trimmed . trim ( ) ;
2020-12-15 14:14:22 -06:00
} else if ( ! trimmed || / $/ . test ( trimmed ) ) {
2020-12-10 10:28:58 -06:00
// CommonMark requires space before trailing #s
text = trimmed . trim ( ) ;
}
}
2021-08-09 23:41:45 -04:00
const token = {
2020-04-14 16:41:10 -05:00
type : 'heading' ,
raw : cap [ 0 ] ,
depth : cap [ 1 ] . length ,
2021-08-09 23:41:45 -04:00
text : text ,
tokens : [ ]
2020-04-14 16:41:10 -05:00
} ;
2021-08-09 23:41:45 -04:00
this . lexer . inline ( token . text , token . tokens ) ;
return token ;
2020-04-14 16:41:10 -05:00
}
}
2019-11-07 12:49:10 -06:00
2020-04-14 16:41:10 -05:00
hr ( src ) {
const cap = this . rules . block . hr . exec ( src ) ;
if ( cap ) {
return {
type : 'hr' ,
raw : cap [ 0 ]
} ;
}
}
2019-11-07 12:49:10 -06:00
2020-04-14 16:41:10 -05:00
blockquote ( src ) {
const cap = this . rules . block . blockquote . exec ( src ) ;
if ( cap ) {
const text = cap [ 0 ] . replace ( /^ *> ?/gm , '' ) ;
2019-11-07 12:49:10 -06:00
2020-04-14 16:41:10 -05:00
return {
type : 'blockquote' ,
raw : cap [ 0 ] ,
2021-08-09 23:41:45 -04:00
tokens : this . lexer . blockTokens ( text , [ ] ) ,
2020-04-14 16:41:10 -05:00
text
} ;
}
}
2020-03-17 22:23:34 +00:00
2020-04-14 16:41:10 -05:00
list ( src ) {
2021-08-09 23:41:45 -04:00
let cap = this . rules . block . list . exec ( src ) ;
2020-04-14 16:41:10 -05:00
if ( cap ) {
2021-08-09 23:41:45 -04:00
let raw , istask , ischecked , indent , i , blankLine , endsWithBlankLine ,
line , lines , itemContents ;
let bull = cap [ 1 ] . trim ( ) ;
2020-04-14 16:41:10 -05:00
const isordered = bull . length > 1 ;
2020-03-06 20:20:24 +00:00
2020-04-14 16:41:10 -05:00
const list = {
type : 'list' ,
2021-08-09 23:41:45 -04:00
raw : '' ,
2020-04-14 16:41:10 -05:00
ordered : isordered ,
2020-06-17 18:29:05 +02:00
start : isordered ? + bull . slice ( 0 , - 1 ) : '' ,
2020-04-14 16:41:10 -05:00
loose : false ,
items : [ ]
} ;
2019-11-07 12:49:10 -06:00
2021-08-09 23:41:45 -04:00
bull = isordered ? ` \\ d{1,9} \\ ${ bull . slice ( - 1 ) } ` : ` \\ ${ bull } ` ;
if ( this . options . pedantic ) {
bull = isordered ? bull : '[*+-]' ;
}
// Get next list item
const itemRegex = new RegExp ( ` ^( {0,3} ${ bull } )((?: [^ \\ n]*| *)(?: \\ n[^ \\ n]*)*(?: \\ n| $ )) ` ) ;
// Get each top-level item
while ( src ) {
if ( this . rules . block . hr . test ( src ) ) { // End list if we encounter an HR (possibly move into itemRegex?)
break ;
2021-02-26 23:51:21 -06:00
}
2021-08-09 23:41:45 -04:00
if ( ! ( cap = itemRegex . exec ( src ) ) ) {
break ;
2020-11-04 21:24:40 +00:00
}
2021-08-09 23:41:45 -04:00
lines = cap [ 2 ] . split ( '\n' ) ;
2019-11-07 12:49:10 -06:00
2021-08-09 23:41:45 -04:00
if ( this . options . pedantic ) {
indent = 2 ;
itemContents = lines [ 0 ] . trimLeft ( ) ;
} else {
indent = cap [ 2 ] . search ( /[^ ]/ ) ; // Find first non-space char
indent = cap [ 1 ] . length + ( indent > 4 ? 1 : indent ) ; // intented code blocks after 4 spaces; indent is always 1
itemContents = lines [ 0 ] . slice ( indent - cap [ 1 ] . length ) ;
2020-04-14 16:41:10 -05:00
}
2019-11-07 12:49:10 -06:00
2021-08-09 23:41:45 -04:00
blankLine = false ;
raw = cap [ 0 ] ;
if ( ! lines [ 0 ] && /^ *$/ . test ( lines [ 1 ] ) ) { // items begin with at most one blank line
raw = cap [ 1 ] + lines . slice ( 0 , 2 ) . join ( '\n' ) + '\n' ;
list . loose = true ;
lines = [ ] ;
2021-02-26 23:51:21 -06:00
}
2021-08-09 23:41:45 -04:00
const nextBulletRegex = new RegExp ( ` ^ {0, ${ Math . min ( 3 , indent - 1 ) } }(?:[*+-]| \\ d{1,9}[.)]) ` ) ;
for ( i = 1 ; i < lines . length ; i ++ ) {
line = lines [ i ] ;
if ( this . options . pedantic ) { // Re-align to follow commonmark nesting rules
line = line . replace ( /^ {1,4}(?=( {4})*[^ ])/g , ' ' ) ;
}
// End list item if found start of new bullet
if ( nextBulletRegex . test ( line ) ) {
raw = cap [ 1 ] + lines . slice ( 0 , i ) . join ( '\n' ) + '\n' ;
break ;
}
// Until we encounter a blank line, item contents do not need indentation
if ( ! blankLine ) {
if ( ! line . trim ( ) ) { // Check if current line is empty
blankLine = true ;
}
// Dedent if possible
if ( line . search ( /[^ ]/ ) >= indent ) {
itemContents += '\n' + line . slice ( indent ) ;
} else {
itemContents += '\n' + line ;
}
continue ;
}
// Dedent this line
if ( line . search ( /[^ ]/ ) >= indent || ! line . trim ( ) ) {
itemContents += '\n' + line . slice ( indent ) ;
continue ;
} else { // Line was not properly indented; end of this item
raw = cap [ 1 ] + lines . slice ( 0 , i ) . join ( '\n' ) + '\n' ;
break ;
}
2020-04-14 16:41:10 -05:00
}
2019-11-07 12:49:10 -06:00
2021-08-09 23:41:45 -04:00
if ( ! list . loose ) {
// If the previous item ended with a blank line, the list is loose
if ( endsWithBlankLine ) {
list . loose = true ;
} else if ( /\n *\n *$/ . test ( raw ) ) {
endsWithBlankLine = true ;
}
2020-04-14 16:41:10 -05:00
}
2019-11-07 12:49:10 -06:00
2020-04-14 16:41:10 -05:00
// Check for task list items
2020-11-19 14:32:02 +00:00
if ( this . options . gfm ) {
2021-08-09 23:41:45 -04:00
istask = /^\[[ xX]\] / . exec ( itemContents ) ;
2020-11-19 14:32:02 +00:00
if ( istask ) {
2021-08-09 23:41:45 -04:00
ischecked = istask [ 0 ] !== '[ ] ' ;
itemContents = itemContents . replace ( /^\[[ xX]\] +/ , '' ) ;
2020-11-19 14:32:02 +00:00
}
2020-04-14 16:41:10 -05:00
}
2019-11-07 12:49:10 -06:00
2020-04-14 16:41:10 -05:00
list . items . push ( {
2020-05-14 15:54:39 +00:00
type : 'list_item' ,
2021-08-09 23:41:45 -04:00
raw : raw ,
task : ! ! istask ,
2020-04-14 16:41:10 -05:00
checked : ischecked ,
2021-08-09 23:41:45 -04:00
loose : false ,
text : itemContents
2020-04-14 16:41:10 -05:00
} ) ;
2021-08-09 23:41:45 -04:00
list . raw += raw ;
src = src . slice ( raw . length ) ;
}
// Do not consume newlines at end of final item. Alternatively, make itemRegex *start* with any newlines to simplify/speed up endsWithBlankLine logic
list . items [ list . items . length - 1 ] . raw = raw . trimRight ( ) ;
list . items [ list . items . length - 1 ] . text = itemContents . trimRight ( ) ;
list . raw = list . raw . trimRight ( ) ;
const l = list . items . length ;
// Item child tokens handled here at end because we needed to have the final item to trim it first
for ( i = 0 ; i < l ; i ++ ) {
this . lexer . state . top = false ;
list . items [ i ] . tokens = this . lexer . blockTokens ( list . items [ i ] . text , [ ] ) ;
if ( list . items [ i ] . tokens . some ( t => t . type === 'space' ) ) {
list . loose = true ;
list . items [ i ] . loose = true ;
}
2020-04-14 16:41:10 -05:00
}
2019-11-07 12:49:10 -06:00
2020-04-14 16:41:10 -05:00
return list ;
}
}
2019-11-07 12:49:10 -06:00
2020-04-14 16:41:10 -05:00
html ( src ) {
const cap = this . rules . block . html . exec ( src ) ;
if ( cap ) {
2021-08-09 23:41:45 -04:00
const token = {
type : 'html' ,
2020-04-14 16:41:10 -05:00
raw : cap [ 0 ] ,
pre : ! this . options . sanitizer
&& ( cap [ 1 ] === 'pre' || cap [ 1 ] === 'script' || cap [ 1 ] === 'style' ) ,
2021-08-09 23:41:45 -04:00
text : cap [ 0 ]
2020-04-14 16:41:10 -05:00
} ;
2021-08-09 23:41:45 -04:00
if ( this . options . sanitize ) {
token . type = 'paragraph' ;
2021-08-16 03:09:18 +00:00
token . text = this . options . sanitizer ? this . options . sanitizer ( cap [ 0 ] ) : escape$2 ( cap [ 0 ] ) ;
2021-08-09 23:41:45 -04:00
token . tokens = [ ] ;
this . lexer . inline ( token . text , token . tokens ) ;
}
return token ;
2020-04-14 16:41:10 -05:00
}
}
2019-11-07 12:49:10 -06:00
2020-04-14 16:41:10 -05:00
def ( src ) {
const cap = this . rules . block . def . exec ( src ) ;
if ( cap ) {
if ( cap [ 3 ] ) cap [ 3 ] = cap [ 3 ] . substring ( 1 , cap [ 3 ] . length - 1 ) ;
const tag = cap [ 1 ] . toLowerCase ( ) . replace ( /\s+/g , ' ' ) ;
return {
2021-04-11 19:09:09 +00:00
type : 'def' ,
2020-04-14 16:41:10 -05:00
tag ,
raw : cap [ 0 ] ,
href : cap [ 2 ] ,
title : cap [ 3 ]
} ;
}
}
2019-11-07 12:49:10 -06:00
2020-04-14 16:41:10 -05:00
table ( src ) {
const cap = this . rules . block . table . exec ( src ) ;
if ( cap ) {
const item = {
type : 'table' ,
2021-08-16 03:09:18 +00:00
header : splitCells ( cap [ 1 ] ) . map ( c => { return { text : c } ; } ) ,
2020-04-14 16:41:10 -05:00
align : cap [ 2 ] . replace ( /^ *|\| *$/g , '' ) . split ( / *\| */ ) ,
2021-08-16 03:09:18 +00:00
rows : cap [ 3 ] ? cap [ 3 ] . replace ( /\n$/ , '' ) . split ( '\n' ) : [ ]
2020-04-14 16:41:10 -05:00
} ;
2019-11-07 12:49:10 -06:00
2020-04-14 16:41:10 -05:00
if ( item . header . length === item . align . length ) {
item . raw = cap [ 0 ] ;
2019-11-07 12:49:10 -06:00
2020-04-14 16:41:10 -05:00
let l = item . align . length ;
2021-08-09 23:41:45 -04:00
let i , j , k , row ;
2020-04-14 16:41:10 -05:00
for ( i = 0 ; i < l ; i ++ ) {
if ( /^ *-+: *$/ . test ( item . align [ i ] ) ) {
item . align [ i ] = 'right' ;
} else if ( /^ *:-+: *$/ . test ( item . align [ i ] ) ) {
item . align [ i ] = 'center' ;
} else if ( /^ *:-+ *$/ . test ( item . align [ i ] ) ) {
item . align [ i ] = 'left' ;
} else {
item . align [ i ] = null ;
}
}
2019-11-07 12:49:10 -06:00
2021-08-16 03:09:18 +00:00
l = item . rows . length ;
2020-04-14 16:41:10 -05:00
for ( i = 0 ; i < l ; i ++ ) {
2021-08-16 03:09:18 +00:00
item . rows [ i ] = splitCells ( item . rows [ i ] , item . header . length ) . map ( c => { return { text : c } ; } ) ;
2021-08-09 23:41:45 -04:00
}
// parse child tokens inside headers and cells
// header child tokens
l = item . header . length ;
for ( j = 0 ; j < l ; j ++ ) {
2021-08-16 03:09:18 +00:00
item . header [ j ] . tokens = [ ] ;
this . lexer . inlineTokens ( item . header [ j ] . text , item . header [ j ] . tokens ) ;
2021-08-09 23:41:45 -04:00
}
// cell child tokens
2021-08-16 03:09:18 +00:00
l = item . rows . length ;
2021-08-09 23:41:45 -04:00
for ( j = 0 ; j < l ; j ++ ) {
2021-08-16 03:09:18 +00:00
row = item . rows [ j ] ;
2021-08-09 23:41:45 -04:00
for ( k = 0 ; k < row . length ; k ++ ) {
2021-08-16 03:09:18 +00:00
row [ k ] . tokens = [ ] ;
this . lexer . inlineTokens ( row [ k ] . text , row [ k ] . tokens ) ;
2021-08-09 23:41:45 -04:00
}
2020-04-14 16:41:10 -05:00
}
2019-11-07 12:49:10 -06:00
2020-04-14 16:41:10 -05:00
return item ;
}
}
}
2019-11-07 12:49:10 -06:00
2020-04-14 16:41:10 -05:00
lheading ( src ) {
const cap = this . rules . block . lheading . exec ( src ) ;
if ( cap ) {
2021-08-09 23:41:45 -04:00
const token = {
2020-04-14 16:41:10 -05:00
type : 'heading' ,
raw : cap [ 0 ] ,
depth : cap [ 2 ] . charAt ( 0 ) === '=' ? 1 : 2 ,
2021-08-09 23:41:45 -04:00
text : cap [ 1 ] ,
tokens : [ ]
2020-04-14 16:41:10 -05:00
} ;
2021-08-09 23:41:45 -04:00
this . lexer . inline ( token . text , token . tokens ) ;
return token ;
2020-04-14 16:41:10 -05:00
}
2020-04-08 13:06:43 -05:00
}
2020-04-06 23:25:33 -05:00
2020-04-14 16:41:10 -05:00
paragraph ( src ) {
const cap = this . rules . block . paragraph . exec ( src ) ;
if ( cap ) {
2021-08-09 23:41:45 -04:00
const token = {
2020-04-14 16:41:10 -05:00
type : 'paragraph' ,
raw : cap [ 0 ] ,
text : cap [ 1 ] . charAt ( cap [ 1 ] . length - 1 ) === '\n'
? cap [ 1 ] . slice ( 0 , - 1 )
2021-08-09 23:41:45 -04:00
: cap [ 1 ] ,
tokens : [ ]
2020-04-14 16:41:10 -05:00
} ;
2021-08-09 23:41:45 -04:00
this . lexer . inline ( token . text , token . tokens ) ;
return token ;
2020-04-14 16:41:10 -05:00
}
2020-04-08 13:06:43 -05:00
}
2021-02-07 17:22:47 -05:00
text ( src ) {
2020-04-14 16:41:10 -05:00
const cap = this . rules . block . text . exec ( src ) ;
if ( cap ) {
2021-08-09 23:41:45 -04:00
const token = {
2020-04-14 16:41:10 -05:00
type : 'text' ,
raw : cap [ 0 ] ,
2021-08-09 23:41:45 -04:00
text : cap [ 0 ] ,
tokens : [ ]
2020-04-14 16:41:10 -05:00
} ;
2021-08-09 23:41:45 -04:00
this . lexer . inline ( token . text , token . tokens ) ;
return token ;
2020-04-08 13:06:43 -05:00
}
2020-04-06 23:25:33 -05:00
}
2020-04-14 16:41:10 -05:00
escape ( src ) {
const cap = this . rules . inline . escape . exec ( src ) ;
if ( cap ) {
return {
type : 'escape' ,
raw : cap [ 0 ] ,
2021-08-16 03:09:18 +00:00
text : escape$2 ( cap [ 1 ] )
2020-04-14 16:41:10 -05:00
} ;
}
2020-04-08 13:06:43 -05:00
}
2021-08-09 23:41:45 -04:00
tag ( src ) {
2020-04-14 16:41:10 -05:00
const cap = this . rules . inline . tag . exec ( src ) ;
2020-04-06 23:25:33 -05:00
if ( cap ) {
2021-08-09 23:41:45 -04:00
if ( ! this . lexer . state . inLink && /^<a /i . test ( cap [ 0 ] ) ) {
this . lexer . state . inLink = true ;
} else if ( this . lexer . state . inLink && /^<\/a>/i . test ( cap [ 0 ] ) ) {
this . lexer . state . inLink = false ;
2020-04-06 23:25:33 -05:00
}
2021-08-09 23:41:45 -04:00
if ( ! this . lexer . state . inRawBlock && /^<(pre|code|kbd|script)(\s|>)/i . test ( cap [ 0 ] ) ) {
this . lexer . state . inRawBlock = true ;
} else if ( this . lexer . state . inRawBlock && /^<\/(pre|code|kbd|script)(\s|>)/i . test ( cap [ 0 ] ) ) {
this . lexer . state . inRawBlock = false ;
2020-04-14 16:41:10 -05:00
}
return {
type : this . options . sanitize
? 'text'
: 'html' ,
raw : cap [ 0 ] ,
2021-08-09 23:41:45 -04:00
inLink : this . lexer . state . inLink ,
inRawBlock : this . lexer . state . inRawBlock ,
2020-04-14 16:41:10 -05:00
text : this . options . sanitize
? ( this . options . sanitizer
? this . options . sanitizer ( cap [ 0 ] )
2021-08-16 03:09:18 +00:00
: escape$2 ( cap [ 0 ] ) )
2020-04-14 16:41:10 -05:00
: cap [ 0 ]
} ;
2020-04-06 23:25:33 -05:00
}
}
2020-04-14 16:41:10 -05:00
link ( src ) {
const cap = this . rules . inline . link . exec ( src ) ;
2020-04-06 23:25:33 -05:00
if ( cap ) {
2020-12-10 16:30:08 +00:00
const trimmedUrl = cap [ 2 ] . trim ( ) ;
2020-12-15 14:14:22 -06:00
if ( ! this . options . pedantic && /^</ . test ( trimmedUrl ) ) {
2020-12-10 16:30:08 +00:00
// commonmark requires matching angle brackets
2020-12-15 14:14:22 -06:00
if ( ! ( />$/ . test ( trimmedUrl ) ) ) {
2020-12-10 16:30:08 +00:00
return ;
}
// ending angle bracket cannot be escaped
2021-08-16 03:09:18 +00:00
const rtrimSlash = rtrim ( trimmedUrl . slice ( 0 , - 1 ) , '\\' ) ;
2020-12-10 16:30:08 +00:00
if ( ( trimmedUrl . length - rtrimSlash . length ) % 2 === 0 ) {
return ;
}
} else {
// find closing parenthesis
2021-08-16 03:09:18 +00:00
const lastParenIndex = findClosingBracket ( cap [ 2 ] , '()' ) ;
2020-12-10 16:30:08 +00:00
if ( lastParenIndex > - 1 ) {
const start = cap [ 0 ] . indexOf ( '!' ) === 0 ? 5 : 4 ;
const linkLen = start + cap [ 1 ] . length + lastParenIndex ;
cap [ 2 ] = cap [ 2 ] . substring ( 0 , lastParenIndex ) ;
cap [ 0 ] = cap [ 0 ] . substring ( 0 , linkLen ) . trim ( ) ;
cap [ 3 ] = '' ;
}
2020-04-14 16:41:10 -05:00
}
let href = cap [ 2 ] ;
let title = '' ;
if ( this . options . pedantic ) {
2020-12-10 16:30:08 +00:00
// split pedantic href and title
2020-04-14 16:41:10 -05:00
const link = /^([^'"]*[^\s])\s+(['"])(.*)\2/ . exec ( href ) ;
if ( link ) {
href = link [ 1 ] ;
title = link [ 3 ] ;
}
2020-04-06 23:25:33 -05:00
} else {
2020-04-14 16:41:10 -05:00
title = cap [ 3 ] ? cap [ 3 ] . slice ( 1 , - 1 ) : '' ;
}
2020-12-10 16:30:08 +00:00
href = href . trim ( ) ;
2020-12-15 14:14:22 -06:00
if ( /^</ . test ( href ) ) {
if ( this . options . pedantic && ! ( />$/ . test ( trimmedUrl ) ) ) {
2020-12-10 16:30:08 +00:00
// pedantic allows starting angle bracket without ending angle bracket
href = href . slice ( 1 ) ;
} else {
href = href . slice ( 1 , - 1 ) ;
}
}
return outputLink ( cap , {
2020-04-14 16:41:10 -05:00
href : href ? href . replace ( this . rules . inline . _escapes , '$1' ) : href ,
title : title ? title . replace ( this . rules . inline . _escapes , '$1' ) : title
2021-08-09 23:41:45 -04:00
} , cap [ 0 ] , this . lexer ) ;
2020-04-14 16:41:10 -05:00
}
}
reflink ( src , links ) {
let cap ;
if ( ( cap = this . rules . inline . reflink . exec ( src ) )
|| ( cap = this . rules . inline . nolink . exec ( src ) ) ) {
let link = ( cap [ 2 ] || cap [ 1 ] ) . replace ( /\s+/g , ' ' ) ;
link = links [ link . toLowerCase ( ) ] ;
if ( ! link || ! link . href ) {
const text = cap [ 0 ] . charAt ( 0 ) ;
2020-04-06 23:25:33 -05:00
return {
2020-04-14 16:41:10 -05:00
type : 'text' ,
raw : text ,
text
2020-04-06 23:25:33 -05:00
} ;
}
2021-08-09 23:41:45 -04:00
return outputLink ( cap , link , cap [ 0 ] , this . lexer ) ;
2020-04-06 23:25:33 -05:00
}
}
2021-02-07 17:25:01 -05:00
emStrong ( src , maskedSrc , prevChar = '' ) {
let match = this . rules . inline . emStrong . lDelim . exec ( src ) ;
if ( ! match ) return ;
2020-07-13 13:35:58 +00:00
2021-06-01 19:27:49 +00:00
// _ can't be between two alphanumerics. \p{L}\p{N} includes non-english alphabet/numbers as well
if ( match [ 3 ] && prevChar . match ( /[\p{L}\p{N}]/u ) ) return ;
2020-07-13 13:35:58 +00:00
2021-02-07 17:25:01 -05:00
const nextChar = match [ 1 ] || match [ 2 ] || '' ;
if ( ! nextChar || ( nextChar && ( prevChar === '' || this . rules . inline . punctuation . exec ( prevChar ) ) ) ) {
const lLength = match [ 0 ] . length - 1 ;
let rDelim , rLength , delimTotal = lLength , midDelimTotal = 0 ;
const endReg = match [ 0 ] [ 0 ] === '*' ? this . rules . inline . emStrong . rDelimAst : this . rules . inline . emStrong . rDelimUnd ;
2020-07-13 13:35:58 +00:00
endReg . lastIndex = 0 ;
2021-06-01 19:27:49 +00:00
// Clip maskedSrc to same section of string as src (move to lexer?)
maskedSrc = maskedSrc . slice ( - 1 * src . length + lLength ) ;
2021-02-07 17:25:01 -05:00
2020-07-13 13:35:58 +00:00
while ( ( match = endReg . exec ( maskedSrc ) ) != null ) {
2021-02-07 17:25:01 -05:00
rDelim = match [ 1 ] || match [ 2 ] || match [ 3 ] || match [ 4 ] || match [ 5 ] || match [ 6 ] ;
2021-06-01 19:27:49 +00:00
if ( ! rDelim ) continue ; // skip single * in __abc*abc__
2021-02-07 17:25:01 -05:00
rLength = rDelim . length ;
if ( match [ 3 ] || match [ 4 ] ) { // found another Left Delim
delimTotal += rLength ;
continue ;
} else if ( match [ 5 ] || match [ 6 ] ) { // either Left or Right Delim
if ( lLength % 3 && ! ( ( lLength + rLength ) % 3 ) ) {
midDelimTotal += rLength ;
continue ; // CommonMark Emphasis Rules 9-10
}
2020-07-13 13:35:58 +00:00
}
2020-04-06 23:25:33 -05:00
2021-02-07 17:25:01 -05:00
delimTotal -= rLength ;
2020-07-13 13:35:58 +00:00
2021-02-07 17:25:01 -05:00
if ( delimTotal > 0 ) continue ; // Haven't found enough closing delimiters
2020-07-13 13:35:58 +00:00
2021-06-01 19:27:49 +00:00
// Remove extra characters. *a*** -> *a*
rLength = Math . min ( rLength , rLength + delimTotal + midDelimTotal ) ;
2020-07-13 13:35:58 +00:00
2021-06-01 19:27:49 +00:00
// Create `em` if smallest delimiter has odd char count. *a***
2021-02-07 17:25:01 -05:00
if ( Math . min ( lLength , rLength ) % 2 ) {
2021-08-09 23:41:45 -04:00
const text = src . slice ( 1 , lLength + match . index + rLength ) ;
2020-07-13 13:35:58 +00:00
return {
type : 'em' ,
2021-02-07 17:25:01 -05:00
raw : src . slice ( 0 , lLength + match . index + rLength + 1 ) ,
2021-08-09 23:41:45 -04:00
text ,
tokens : this . lexer . inlineTokens ( text , [ ] )
2021-02-07 17:25:01 -05:00
} ;
}
2021-06-01 19:27:49 +00:00
// Create 'strong' if smallest delimiter has even char count. **a***
2021-08-09 23:41:45 -04:00
const text = src . slice ( 2 , lLength + match . index + rLength - 1 ) ;
2021-06-01 19:27:49 +00:00
return {
type : 'strong' ,
raw : src . slice ( 0 , lLength + match . index + rLength + 1 ) ,
2021-08-09 23:41:45 -04:00
text ,
tokens : this . lexer . inlineTokens ( text , [ ] )
2021-06-01 19:27:49 +00:00
} ;
2020-07-13 13:35:58 +00:00
}
2020-04-06 23:25:33 -05:00
}
}
2020-04-14 16:41:10 -05:00
codespan ( src ) {
const cap = this . rules . inline . code . exec ( src ) ;
2020-04-06 23:25:33 -05:00
if ( cap ) {
2020-05-03 19:20:54 +00:00
let text = cap [ 2 ] . replace ( /\n/g , ' ' ) ;
const hasNonSpaceChars = /[^ ]/ . test ( text ) ;
2020-12-15 14:14:22 -06:00
const hasSpaceCharsOnBothEnds = /^ / . test ( text ) && / $/ . test ( text ) ;
2020-05-03 19:20:54 +00:00
if ( hasNonSpaceChars && hasSpaceCharsOnBothEnds ) {
text = text . substring ( 1 , text . length - 1 ) ;
}
2021-08-16 03:09:18 +00:00
text = escape$2 ( text , true ) ;
2020-04-14 16:41:10 -05:00
return {
type : 'codespan' ,
raw : cap [ 0 ] ,
2020-05-03 19:20:54 +00:00
text
2020-04-06 23:25:33 -05:00
} ;
}
}
2020-04-14 16:41:10 -05:00
br ( src ) {
const cap = this . rules . inline . br . exec ( src ) ;
2020-04-06 23:25:33 -05:00
if ( cap ) {
return {
2020-04-14 16:41:10 -05:00
type : 'br' ,
2020-04-06 23:25:33 -05:00
raw : cap [ 0 ]
} ;
}
}
2020-04-14 16:41:10 -05:00
del ( src ) {
const cap = this . rules . inline . del . exec ( src ) ;
2020-04-06 23:25:33 -05:00
if ( cap ) {
return {
2020-04-14 16:41:10 -05:00
type : 'del' ,
2020-04-06 23:25:33 -05:00
raw : cap [ 0 ] ,
2021-08-09 23:41:45 -04:00
text : cap [ 2 ] ,
tokens : this . lexer . inlineTokens ( cap [ 2 ] , [ ] )
2020-04-06 23:25:33 -05:00
} ;
}
}
2020-04-14 16:41:10 -05:00
autolink ( src , mangle ) {
const cap = this . rules . inline . autolink . exec ( src ) ;
2020-04-06 23:25:33 -05:00
if ( cap ) {
2020-04-14 16:41:10 -05:00
let text , href ;
if ( cap [ 2 ] === '@' ) {
2021-08-16 03:09:18 +00:00
text = escape$2 ( this . options . mangle ? mangle ( cap [ 1 ] ) : cap [ 1 ] ) ;
2020-04-14 16:41:10 -05:00
href = 'mailto:' + text ;
} else {
2021-08-16 03:09:18 +00:00
text = escape$2 ( cap [ 1 ] ) ;
2020-04-14 16:41:10 -05:00
href = text ;
2020-04-06 23:25:33 -05:00
}
2020-04-14 16:41:10 -05:00
return {
type : 'link' ,
raw : cap [ 0 ] ,
text ,
href ,
tokens : [
{
type : 'text' ,
raw : text ,
text
}
]
} ;
2020-04-06 23:25:33 -05:00
}
}
2020-04-14 16:41:10 -05:00
url ( src , mangle ) {
let cap ;
if ( cap = this . rules . inline . url . exec ( src ) ) {
let text , href ;
if ( cap [ 2 ] === '@' ) {
2021-08-16 03:09:18 +00:00
text = escape$2 ( this . options . mangle ? mangle ( cap [ 0 ] ) : cap [ 0 ] ) ;
2020-04-14 16:41:10 -05:00
href = 'mailto:' + text ;
} else {
// do extended autolink path validation
let prevCapZero ;
do {
prevCapZero = cap [ 0 ] ;
cap [ 0 ] = this . rules . inline . _backpedal . exec ( cap [ 0 ] ) [ 0 ] ;
} while ( prevCapZero !== cap [ 0 ] ) ;
2021-08-16 03:09:18 +00:00
text = escape$2 ( cap [ 0 ] ) ;
2020-04-14 16:41:10 -05:00
if ( cap [ 1 ] === 'www.' ) {
href = 'http://' + text ;
} else {
href = text ;
}
}
2020-04-06 23:25:33 -05:00
return {
2020-04-14 16:41:10 -05:00
type : 'link' ,
2020-04-06 23:25:33 -05:00
raw : cap [ 0 ] ,
2020-04-14 16:41:10 -05:00
text ,
href ,
tokens : [
{
type : 'text' ,
raw : text ,
text
}
]
2020-04-06 23:25:33 -05:00
} ;
}
}
2021-08-09 23:41:45 -04:00
inlineText ( src , smartypants ) {
2020-04-14 16:41:10 -05:00
const cap = this . rules . inline . text . exec ( src ) ;
2020-04-06 23:25:33 -05:00
if ( cap ) {
2020-04-14 16:41:10 -05:00
let text ;
2021-08-09 23:41:45 -04:00
if ( this . lexer . state . inRawBlock ) {
2021-08-16 03:09:18 +00:00
text = this . options . sanitize ? ( this . options . sanitizer ? this . options . sanitizer ( cap [ 0 ] ) : escape$2 ( cap [ 0 ] ) ) : cap [ 0 ] ;
2020-04-14 16:41:10 -05:00
} else {
2021-08-16 03:09:18 +00:00
text = escape$2 ( this . options . smartypants ? smartypants ( cap [ 0 ] ) : cap [ 0 ] ) ;
2020-04-14 16:41:10 -05:00
}
2020-04-06 23:25:33 -05:00
return {
2020-04-14 16:41:10 -05:00
type : 'text' ,
2020-04-08 13:06:43 -05:00
raw : cap [ 0 ] ,
2020-04-14 16:41:10 -05:00
text
2020-04-06 23:25:33 -05:00
} ;
}
}
2020-04-14 16:41:10 -05:00
} ;
2020-04-06 23:25:33 -05:00
2020-04-14 16:41:10 -05:00
const {
2021-08-16 03:09:18 +00:00
noopTest ,
edit ,
2020-04-14 16:41:10 -05:00
merge : merge$1
} = helpers ;
2020-04-06 23:25:33 -05:00
2020-04-14 16:41:10 -05:00
/ * *
* Block - Level Grammar
* /
2021-08-16 03:09:18 +00:00
const block$1 = {
2021-01-26 14:21:28 +00:00
newline : /^(?: *(?:\n|$))+/ ,
code : /^( {4}[^\n]+(?:\n(?: *(?:\n|$))*)?)+/ ,
2021-08-09 23:41:45 -04:00
fences : /^ {0,3}(`{3,}(?=[^`\n]*\n)|~{3,})([^\n]*)\n(?:|([\s\S]*?)\n)(?: {0,3}\1[~`]* *(?=\n|$)|$)/ ,
2020-04-14 16:41:10 -05:00
hr : /^ {0,3}((?:- *){3,}|(?:_ *){3,}|(?:\* *){3,})(?:\n+|$)/ ,
2020-12-10 10:28:58 -06:00
heading : /^ {0,3}(#{1,6})(?=\s|$)(.*)(?:\n+|$)/ ,
2020-04-14 16:41:10 -05:00
blockquote : /^( {0,3}> ?(paragraph|[^\n]*)(?:\n|$))+/ ,
2021-08-09 23:41:45 -04:00
list : /^( {0,3}bull)( [^\n]+?)?(?:\n|$)/ ,
2020-04-14 16:41:10 -05:00
html : '^ {0,3}(?:' // optional indentation
2021-06-25 20:15:30 +00:00
+ '<(script|pre|style|textarea)[\\s>][\\s\\S]*?(?:</\\1>[^\\n]*\\n+|$)' // (1)
2020-04-14 16:41:10 -05:00
+ '|comment[^\\n]*(\\n+|$)' // (2)
2020-08-07 19:43:00 +00:00
+ '|<\\?[\\s\\S]*?(?:\\?>\\n*|$)' // (3)
+ '|<![A-Z][\\s\\S]*?(?:>\\n*|$)' // (4)
+ '|<!\\[CDATA\\[[\\s\\S]*?(?:\\]\\]>\\n*|$)' // (5)
2021-05-20 13:43:54 +00:00
+ '|</?(tag)(?: +|\\n|/?>)[\\s\\S]*?(?:(?:\\n *)+\\n|$)' // (6)
2021-06-25 20:15:30 +00:00
+ '|<(?!script|pre|style|textarea)([a-z][\\w-]*)(?:attribute)*? */?>(?=[ \\t]*(?:\\n|$))[\\s\\S]*?(?:(?:\\n *)+\\n|$)' // (7) open tag
+ '|</(?!script|pre|style|textarea)[a-z][\\w-]*\\s*>(?=[ \\t]*(?:\\n|$))[\\s\\S]*?(?:(?:\\n *)+\\n|$)' // (7) closing tag
2020-04-14 16:41:10 -05:00
+ ')' ,
def : /^ {0,3}\[(label)\]: *\n? *<?([^\s>]+)>?(?:(?: +\n? *| *\n *)(title))? *(?:\n+|$)/ ,
2021-08-16 03:09:18 +00:00
table : noopTest ,
2020-04-14 16:41:10 -05:00
lheading : /^([^\n]+)\n {0,3}(=+|-+) *(?:\n+|$)/ ,
// regex template, placeholders will be replaced according to different paragraph
// interruption rules of commonmark and the original markdown spec:
2021-01-26 14:21:28 +00:00
_paragraph : /^([^\n]+(?:\n(?!hr|heading|lheading|blockquote|fences|list|html| +\n)[^\n]+)*)/ ,
2020-04-14 16:41:10 -05:00
text : /^[^\n]+/
} ;
2020-04-06 23:25:33 -05:00
2021-08-16 03:09:18 +00:00
block$1 . _label = /(?!\s*\])(?:\\[\[\]]|[^\[\]])+/ ;
block$1 . _title = /(?:"(?:\\"?|[^"\\])*"|'[^'\n]*(?:\n[^'\n]+)*\n?'|\([^()]*\))/ ;
block$1 . def = edit ( block$1 . def )
. replace ( 'label' , block$1 . _label )
. replace ( 'title' , block$1 . _title )
2020-04-14 16:41:10 -05:00
. getRegex ( ) ;
2020-04-06 23:25:33 -05:00
2021-08-16 03:09:18 +00:00
block$1 . bullet = /(?:[*+-]|\d{1,9}[.)])/ ;
block$1 . listItemStart = edit ( /^( *)(bull) */ )
. replace ( 'bull' , block$1 . bullet )
2020-04-14 16:41:10 -05:00
. getRegex ( ) ;
2020-04-06 23:25:33 -05:00
2021-08-16 03:09:18 +00:00
block$1 . list = edit ( block$1 . list )
. replace ( /bull/g , block$1 . bullet )
2020-04-14 16:41:10 -05:00
. replace ( 'hr' , '\\n+(?=\\1?(?:(?:- *){3,}|(?:_ *){3,}|(?:\\* *){3,})(?:\\n+|$))' )
2021-08-16 03:09:18 +00:00
. replace ( 'def' , '\\n+(?=' + block$1 . def . source + ')' )
2020-04-14 16:41:10 -05:00
. getRegex ( ) ;
2020-04-06 23:25:33 -05:00
2021-08-16 03:09:18 +00:00
block$1 . _tag = 'address|article|aside|base|basefont|blockquote|body|caption'
2020-04-14 16:41:10 -05:00
+ '|center|col|colgroup|dd|details|dialog|dir|div|dl|dt|fieldset|figcaption'
+ '|figure|footer|form|frame|frameset|h[1-6]|head|header|hr|html|iframe'
+ '|legend|li|link|main|menu|menuitem|meta|nav|noframes|ol|optgroup|option'
+ '|p|param|section|source|summary|table|tbody|td|tfoot|th|thead|title|tr'
+ '|track|ul' ;
2021-08-16 03:09:18 +00:00
block$1 . _comment = /<!--(?!-?>)[\s\S]*?(?:-->|$)/ ;
block$1 . html = edit ( block$1 . html , 'i' )
. replace ( 'comment' , block$1 . _comment )
. replace ( 'tag' , block$1 . _tag )
2020-04-14 16:41:10 -05:00
. replace ( 'attribute' , / +[a-zA-Z:_][\w.:-]*(?: *= *"[^"\n]*"| *= *'[^'\n]*'| *= *[^\s"'=<>`]+)?/ )
. getRegex ( ) ;
2020-04-06 23:25:33 -05:00
2021-08-16 03:09:18 +00:00
block$1 . paragraph = edit ( block$1 . _paragraph )
. replace ( 'hr' , block$1 . hr )
2020-04-14 16:41:10 -05:00
. replace ( 'heading' , ' {0,3}#{1,6} ' )
. replace ( '|lheading' , '' ) // setex headings don't interrupt commonmark paragraphs
. replace ( 'blockquote' , ' {0,3}>' )
. replace ( 'fences' , ' {0,3}(?:`{3,}(?=[^`\\n]*\\n)|~{3,})[^\\n]*\\n' )
. replace ( 'list' , ' {0,3}(?:[*+-]|1[.)]) ' ) // only lists starting from 1 can interrupt
2021-06-25 20:15:30 +00:00
. replace ( 'html' , '</?(?:tag)(?: +|\\n|/?>)|<(?:script|pre|style|textarea|!--)' )
2021-08-16 03:09:18 +00:00
. replace ( 'tag' , block$1 . _tag ) // pars can be interrupted by type (6) html blocks
2020-04-14 16:41:10 -05:00
. getRegex ( ) ;
2020-04-06 23:25:33 -05:00
2021-08-16 03:09:18 +00:00
block$1 . blockquote = edit ( block$1 . blockquote )
. replace ( 'paragraph' , block$1 . paragraph )
2020-04-14 16:41:10 -05:00
. getRegex ( ) ;
2020-04-06 23:25:33 -05:00
2020-04-14 16:41:10 -05:00
/ * *
* Normal Block Grammar
* /
2020-04-06 23:25:33 -05:00
2021-08-16 03:09:18 +00:00
block$1 . normal = merge$1 ( { } , block$1 ) ;
2020-04-06 23:25:33 -05:00
2020-04-14 16:41:10 -05:00
/ * *
* GFM Block Grammar
* /
2020-04-06 23:25:33 -05:00
2021-08-16 03:09:18 +00:00
block$1 . gfm = merge$1 ( { } , block$1 . normal , {
2021-08-09 23:41:45 -04:00
table : '^ *([^\\n ].*\\|.*)\\n' // Header
2021-09-08 20:22:04 +00:00
+ ' {0,3}(?:\\| *)?(:?-+:? *(?:\\| *:?-+:? *)*)(?:\\| *)?' // Align
2021-08-25 02:25:08 +00:00
+ '(?:\\n((?:(?! *\\n|hr|heading|blockquote|code|fences|list|html).*(?:\\n|$))*)\\n*|$)' // Cells
2020-04-14 16:41:10 -05:00
} ) ;
2020-04-06 23:25:33 -05:00
2021-08-16 03:09:18 +00:00
block$1 . gfm . table = edit ( block$1 . gfm . table )
. replace ( 'hr' , block$1 . hr )
2020-04-14 16:41:10 -05:00
. replace ( 'heading' , ' {0,3}#{1,6} ' )
. replace ( 'blockquote' , ' {0,3}>' )
. replace ( 'code' , ' {4}[^\\n]' )
. replace ( 'fences' , ' {0,3}(?:`{3,}(?=[^`\\n]*\\n)|~{3,})[^\\n]*\\n' )
. replace ( 'list' , ' {0,3}(?:[*+-]|1[.)]) ' ) // only lists starting from 1 can interrupt
2021-06-25 20:15:30 +00:00
. replace ( 'html' , '</?(?:tag)(?: +|\\n|/?>)|<(?:script|pre|style|textarea|!--)' )
2021-08-16 03:09:18 +00:00
. replace ( 'tag' , block$1 . _tag ) // tables can be interrupted by type (6) html blocks
2020-04-14 16:41:10 -05:00
. getRegex ( ) ;
/ * *
* Pedantic grammar ( original John Gruber ' s loose markdown specification )
* /
2021-08-16 03:09:18 +00:00
block$1 . pedantic = merge$1 ( { } , block$1 . normal , {
html : edit (
2020-04-14 16:41:10 -05:00
'^ *(?:comment *(?:\\n|\\s*$)'
+ '|<(tag)[\\s\\S]+?</\\1> *(?:\\n{2,}|\\s*$)' // closed tag
+ '|<tag(?:"[^"]*"|\'[^\']*\'|\\s[^\'"/>\\s]*)*?/?> *(?:\\n{2,}|\\s*$))' )
2021-08-16 03:09:18 +00:00
. replace ( 'comment' , block$1 . _comment )
2020-04-14 16:41:10 -05:00
. replace ( /tag/g , '(?!(?:'
+ 'a|em|strong|small|s|cite|q|dfn|abbr|data|time|code|var|samp|kbd|sub'
+ '|sup|i|b|u|mark|ruby|rt|rp|bdi|bdo|span|br|wbr|ins|del|img)'
+ '\\b)\\w+(?!:|[^\\w\\s@]*@)\\b' )
. getRegex ( ) ,
def : /^ *\[([^\]]+)\]: *<?([^\s>]+)>?(?: +(["(][^\n]+[")]))? *(?:\n+|$)/ ,
2020-12-10 10:28:58 -06:00
heading : /^(#{1,6})(.*)(?:\n+|$)/ ,
2021-08-16 03:09:18 +00:00
fences : noopTest , // fences not supported
paragraph : edit ( block$1 . normal . _paragraph )
. replace ( 'hr' , block$1 . hr )
2020-04-14 16:41:10 -05:00
. replace ( 'heading' , ' *#{1,6} *[^\n]' )
2021-08-16 03:09:18 +00:00
. replace ( 'lheading' , block$1 . lheading )
2020-04-14 16:41:10 -05:00
. replace ( 'blockquote' , ' {0,3}>' )
. replace ( '|fences' , '' )
. replace ( '|list' , '' )
. replace ( '|html' , '' )
. getRegex ( )
} ) ;
/ * *
* Inline - Level Grammar
* /
2021-08-16 03:09:18 +00:00
const inline$1 = {
2020-04-14 16:41:10 -05:00
escape : /^\\([!"#$%&'()*+,\-./:;<=>?@\[\]\\^_`{|}~])/ ,
autolink : /^<(scheme:[^\s\x00-\x1f<>]*|email)>/ ,
2021-08-16 03:09:18 +00:00
url : noopTest ,
2020-04-14 16:41:10 -05:00
tag : '^comment'
+ '|^</[a-zA-Z][\\w:-]*\\s*>' // self-closing tag
+ '|^<[a-zA-Z][\\w-]*(?:attribute)*?\\s*/?>' // open tag
+ '|^<\\?[\\s\\S]*?\\?>' // processing instruction, e.g. <?php ?>
+ '|^<![a-zA-Z]+\\s[\\s\\S]*?>' // declaration, e.g. <!DOCTYPE html>
+ '|^<!\\[CDATA\\[[\\s\\S]*?\\]\\]>' , // CDATA section
link : /^!?\[(label)\]\(\s*(href)(?:\s+(title))?\s*\)/ ,
reflink : /^!?\[(label)\]\[(?!\s*\])((?:\\[\[\]]?|[^\[\]\\])+)\]/ ,
nolink : /^!?\[(?!\s*\])((?:\[[^\[\]]*\]|\\[\[\]]|[^\[\]])*)\](?:\[\])?/ ,
2020-07-13 13:35:58 +00:00
reflinkSearch : 'reflink|nolink(?!\\()' ,
2021-02-07 17:25:01 -05:00
emStrong : {
lDelim : /^(?:\*+(?:([punct_])|[^\s*]))|^_+(?:([punct*])|([^\s_]))/ ,
// (1) and (2) can only be a Right Delimiter. (3) and (4) can only be Left. (5) and (6) can be either Left or Right.
2021-09-14 17:49:52 +00:00
// () Skip orphan delim inside strong (1) #*** (2) a***#, a*** (3) #***a, ***a (4) ***# (5) #***# (6) a***a
rDelimAst : /^[^_*]*?\_\_[^_*]*?\*[^_*]*?(?=\_\_)|[punct_](\*+)(?=[\s]|$)|[^punct*_\s](\*+)(?=[punct_\s]|$)|[punct_\s](\*+)(?=[^punct*_\s])|[\s](\*+)(?=[punct_])|[punct_](\*+)(?=[punct_])|[^punct*_\s](\*+)(?=[^punct*_\s])/ ,
rDelimUnd : /^[^_*]*?\*\*[^_*]*?\_[^_*]*?(?=\*\*)|[punct*](\_+)(?=[\s]|$)|[^punct*_\s](\_+)(?=[punct*\s]|$)|[punct*\s](\_+)(?=[^punct*_\s])|[\s](\_+)(?=[punct*])|[punct*](\_+)(?=[punct*])/ // ^- Not allowed for _
2020-07-13 13:35:58 +00:00
} ,
2020-04-14 16:41:10 -05:00
code : /^(`+)([^`]|[^`][\s\S]*?[^`])\1(?!`)/ ,
br : /^( {2,}|\\)\n(?!\s*$)/ ,
2021-08-16 03:09:18 +00:00
del : noopTest ,
2021-02-07 17:25:01 -05:00
text : /^(`+|[^`])(?:(?= {2,}\n)|[\s\S]*?(?:(?=[\\<!\[`*_]|\b_|$)|[^ ](?= {2,}\n)))/ ,
punctuation : /^([\spunctuation])/
2020-04-14 16:41:10 -05:00
} ;
2020-04-06 23:25:33 -05:00
2021-02-07 17:25:01 -05:00
// list of punctuation marks from CommonMark spec
// without * and _ to handle the different emphasis markers * and _
2021-08-16 03:09:18 +00:00
inline$1 . _punctuation = '!"#$%&\'()+\\-.,/:;<=>?@\\[\\]`^{|}~' ;
inline$1 . punctuation = edit ( inline$1 . punctuation ) . replace ( /punctuation/g , inline$1 . _punctuation ) . getRegex ( ) ;
2020-07-13 13:35:58 +00:00
// sequences em should skip over [title](link), `code`, <html>
2021-08-16 03:09:18 +00:00
inline$1 . blockSkip = /\[[^\]]*?\]\([^\)]*?\)|`[^`]*?`|<[^>]*?>/g ;
inline$1 . escapedEmSt = /\\\*|\\_/g ;
2020-07-13 13:35:58 +00:00
2021-08-16 03:09:18 +00:00
inline$1 . _comment = edit ( block$1 . _comment ) . replace ( '(?:-->|$)' , '-->' ) . getRegex ( ) ;
2020-08-07 19:43:00 +00:00
2021-08-16 03:09:18 +00:00
inline$1 . emStrong . lDelim = edit ( inline$1 . emStrong . lDelim )
. replace ( /punct/g , inline$1 . _punctuation )
2020-07-13 13:35:58 +00:00
. getRegex ( ) ;
2021-08-16 03:09:18 +00:00
inline$1 . emStrong . rDelimAst = edit ( inline$1 . emStrong . rDelimAst , 'g' )
. replace ( /punct/g , inline$1 . _punctuation )
2020-07-13 13:35:58 +00:00
. getRegex ( ) ;
2021-08-16 03:09:18 +00:00
inline$1 . emStrong . rDelimUnd = edit ( inline$1 . emStrong . rDelimUnd , 'g' )
. replace ( /punct/g , inline$1 . _punctuation )
2020-07-13 13:35:58 +00:00
. getRegex ( ) ;
2020-04-06 23:25:33 -05:00
2021-08-16 03:09:18 +00:00
inline$1 . _escapes = /\\([!"#$%&'()*+,\-./:;<=>?@\[\]\\^_`{|}~])/g ;
2020-04-06 23:25:33 -05:00
2021-08-16 03:09:18 +00:00
inline$1 . _scheme = /[a-zA-Z][a-zA-Z0-9+.-]{1,31}/ ;
inline$1 . _email = /[a-zA-Z0-9.!#$%&'*+/=?^_`{|}~-]+(@)[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)+(?![-_])/ ;
inline$1 . autolink = edit ( inline$1 . autolink )
. replace ( 'scheme' , inline$1 . _scheme )
. replace ( 'email' , inline$1 . _email )
2020-04-14 16:41:10 -05:00
. getRegex ( ) ;
2020-04-06 23:25:33 -05:00
2021-08-16 03:09:18 +00:00
inline$1 . _attribute = /\s+[a-zA-Z:_][\w.:-]*(?:\s*=\s*"[^"]*"|\s*=\s*'[^']*'|\s*=\s*[^\s"'=<>`]+)?/ ;
2020-04-06 23:25:33 -05:00
2021-08-16 03:09:18 +00:00
inline$1 . tag = edit ( inline$1 . tag )
. replace ( 'comment' , inline$1 . _comment )
. replace ( 'attribute' , inline$1 . _attribute )
2020-04-14 16:41:10 -05:00
. getRegex ( ) ;
2020-04-06 23:25:33 -05:00
2021-08-16 03:09:18 +00:00
inline$1 . _label = /(?:\[(?:\\.|[^\[\]\\])*\]|\\.|`[^`]*`|[^\[\]\\`])*?/ ;
inline$1 . _href = /<(?:\\.|[^\n<>\\])+>|[^\s\x00-\x1f]*/ ;
inline$1 . _title = /"(?:\\"?|[^"\\])*"|'(?:\\'?|[^'\\])*'|\((?:\\\)?|[^)\\])*\)/ ;
2020-04-06 23:25:33 -05:00
2021-08-16 03:09:18 +00:00
inline$1 . link = edit ( inline$1 . link )
. replace ( 'label' , inline$1 . _label )
. replace ( 'href' , inline$1 . _href )
. replace ( 'title' , inline$1 . _title )
2020-04-14 16:41:10 -05:00
. getRegex ( ) ;
2020-04-08 13:06:43 -05:00
2021-08-16 03:09:18 +00:00
inline$1 . reflink = edit ( inline$1 . reflink )
. replace ( 'label' , inline$1 . _label )
2020-04-14 16:41:10 -05:00
. getRegex ( ) ;
2020-04-06 23:25:33 -05:00
2021-08-16 03:09:18 +00:00
inline$1 . reflinkSearch = edit ( inline$1 . reflinkSearch , 'g' )
. replace ( 'reflink' , inline$1 . reflink )
. replace ( 'nolink' , inline$1 . nolink )
2020-07-13 13:35:58 +00:00
. getRegex ( ) ;
2020-04-14 16:41:10 -05:00
/ * *
* Normal Inline Grammar
* /
2020-04-06 23:25:33 -05:00
2021-08-16 03:09:18 +00:00
inline$1 . normal = merge$1 ( { } , inline$1 ) ;
2020-04-06 23:25:33 -05:00
2020-04-14 16:41:10 -05:00
/ * *
* Pedantic Inline Grammar
* /
2020-04-06 23:25:33 -05:00
2021-08-16 03:09:18 +00:00
inline$1 . pedantic = merge$1 ( { } , inline$1 . normal , {
2020-07-13 13:35:58 +00:00
strong : {
start : /^__|\*\*/ ,
middle : /^__(?=\S)([\s\S]*?\S)__(?!_)|^\*\*(?=\S)([\s\S]*?\S)\*\*(?!\*)/ ,
endAst : /\*\*(?!\*)/g ,
endUnd : /__(?!_)/g
} ,
em : {
start : /^_|\*/ ,
middle : /^()\*(?=\S)([\s\S]*?\S)\*(?!\*)|^_(?=\S)([\s\S]*?\S)_(?!_)/ ,
endAst : /\*(?!\*)/g ,
endUnd : /_(?!_)/g
} ,
2021-08-16 03:09:18 +00:00
link : edit ( /^!?\[(label)\]\((.*?)\)/ )
. replace ( 'label' , inline$1 . _label )
2020-04-14 16:41:10 -05:00
. getRegex ( ) ,
2021-08-16 03:09:18 +00:00
reflink : edit ( /^!?\[(label)\]\s*\[([^\]]*)\]/ )
. replace ( 'label' , inline$1 . _label )
2020-04-14 16:41:10 -05:00
. getRegex ( )
} ) ;
2020-04-06 23:25:33 -05:00
2020-04-14 16:41:10 -05:00
/ * *
* GFM Inline Grammar
* /
2020-04-06 23:25:33 -05:00
2021-08-16 03:09:18 +00:00
inline$1 . gfm = merge$1 ( { } , inline$1 . normal , {
escape : edit ( inline$1 . escape ) . replace ( '])' , '~|])' ) . getRegex ( ) ,
2020-04-14 16:41:10 -05:00
_extended _email : /[A-Za-z0-9._+-]+(@)[a-zA-Z0-9-_]+(?:\.[a-zA-Z0-9-_]*[a-zA-Z0-9])+(?![-_])/ ,
url : /^((?:ftp|https?):\/\/|www\.)(?:[a-zA-Z0-9\-]+\.?)+[^\s<]*|^email/ ,
_backpedal : /(?:[^?!.,:;*_~()&]+|\([^)]*\)|&(?![a-zA-Z0-9]+;$)|[?!.,:;*_~)]+(?!$))+/ ,
2020-11-15 02:04:54 +00:00
del : /^(~~?)(?=[^\s~])([\s\S]*?[^\s~])\1(?=[^~]|$)/ ,
2021-05-27 16:16:52 +00:00
text : /^([`~]+|[^`~])(?:(?= {2,}\n)|(?=[a-zA-Z0-9.!#$%&'*+\/=?_`{\|}~-]+@)|[\s\S]*?(?:(?=[\\<!\[`*~_]|\b_|https?:\/\/|ftp:\/\/|www\.|$)|[^ ](?= {2,}\n)|[^a-zA-Z0-9.!#$%&'*+\/=?_`{\|}~-](?=[a-zA-Z0-9.!#$%&'*+\/=?_`{\|}~-]+@)))/
2020-04-14 16:41:10 -05:00
} ) ;
2021-08-16 03:09:18 +00:00
inline$1 . gfm . url = edit ( inline$1 . gfm . url , 'i' )
. replace ( 'email' , inline$1 . gfm . _extended _email )
2020-04-14 16:41:10 -05:00
. getRegex ( ) ;
/ * *
* GFM + Line Breaks Inline Grammar
* /
2021-08-16 03:09:18 +00:00
inline$1 . breaks = merge$1 ( { } , inline$1 . gfm , {
br : edit ( inline$1 . br ) . replace ( '{2,}' , '*' ) . getRegex ( ) ,
text : edit ( inline$1 . gfm . text )
2020-04-14 16:41:10 -05:00
. replace ( '\\b_' , '\\b_| {2,}\\n' )
. replace ( /\{2,\}/g , '*' )
. getRegex ( )
} ) ;
var rules = {
2021-08-16 03:09:18 +00:00
block : block$1 ,
inline : inline$1
2020-04-06 23:25:33 -05:00
} ;
2021-10-06 13:32:43 -07:00
const Tokenizer$1 = Tokenizer _1$1 ;
2021-08-16 03:09:18 +00:00
const { defaults : defaults$3 } = defaults$5 . exports ;
const { block , inline } = rules ;
const { repeatString } = helpers ;
2020-04-14 16:41:10 -05:00
/ * *
* smartypants text replacement
* /
function smartypants ( text ) {
return text
// em-dashes
. replace ( /---/g , '\u2014' )
// en-dashes
. replace ( /--/g , '\u2013' )
// opening singles
. replace ( /(^|[-\u2014/(\[{"\s])'/g , '$1\u2018' )
// closing singles & apostrophes
. replace ( /'/g , '\u2019' )
// opening doubles
. replace ( /(^|[-\u2014/(\[{\u2018\s])"/g , '$1\u201c' )
// closing doubles
. replace ( /"/g , '\u201d' )
// ellipses
. replace ( /\.{3}/g , '\u2026' ) ;
}
/ * *
* mangle email addresses
* /
function mangle ( text ) {
let out = '' ,
i ,
ch ;
const l = text . length ;
for ( i = 0 ; i < l ; i ++ ) {
ch = text . charCodeAt ( i ) ;
if ( Math . random ( ) > 0.5 ) {
ch = 'x' + ch . toString ( 16 ) ;
}
out += '&#' + ch + ';' ;
}
return out ;
}
2019-11-07 12:49:10 -06:00
/ * *
* Block Lexer
* /
2021-10-06 13:32:43 -07:00
var Lexer _1$1 = class Lexer {
2019-11-07 12:49:10 -06:00
constructor ( options ) {
this . tokens = [ ] ;
this . tokens . links = Object . create ( null ) ;
2021-08-16 03:09:18 +00:00
this . options = options || defaults$3 ;
this . options . tokenizer = this . options . tokenizer || new Tokenizer$1 ( ) ;
2020-04-06 23:25:33 -05:00
this . tokenizer = this . options . tokenizer ;
this . tokenizer . options = this . options ;
2021-08-09 23:41:45 -04:00
this . tokenizer . lexer = this ;
this . inlineQueue = [ ] ;
this . state = {
inLink : false ,
inRawBlock : false ,
top : true
} ;
2020-04-14 16:41:10 -05:00
const rules = {
2021-08-16 03:09:18 +00:00
block : block . normal ,
inline : inline . normal
2020-04-14 16:41:10 -05:00
} ;
if ( this . options . pedantic ) {
2021-08-16 03:09:18 +00:00
rules . block = block . pedantic ;
rules . inline = inline . pedantic ;
2020-04-14 16:41:10 -05:00
} else if ( this . options . gfm ) {
2021-08-16 03:09:18 +00:00
rules . block = block . gfm ;
2020-04-14 16:41:10 -05:00
if ( this . options . breaks ) {
2021-08-16 03:09:18 +00:00
rules . inline = inline . breaks ;
2020-04-14 16:41:10 -05:00
} else {
2021-08-16 03:09:18 +00:00
rules . inline = inline . gfm ;
2020-04-14 16:41:10 -05:00
}
}
this . tokenizer . rules = rules ;
2019-11-07 12:49:10 -06:00
}
/ * *
2020-04-14 16:41:10 -05:00
* Expose Rules
2019-11-07 12:49:10 -06:00
* /
static get rules ( ) {
2020-04-14 16:41:10 -05:00
return {
2021-08-16 03:09:18 +00:00
block ,
inline
2020-04-14 16:41:10 -05:00
} ;
2019-11-07 12:49:10 -06:00
}
/ * *
* Static Lex Method
* /
static lex ( src , options ) {
const lexer = new Lexer ( options ) ;
return lexer . lex ( src ) ;
2020-04-02 00:23:40 -05:00
}
2019-11-07 12:49:10 -06:00
2020-09-26 19:05:47 +00:00
/ * *
* Static Lex Inline Method
* /
static lexInline ( src , options ) {
const lexer = new Lexer ( options ) ;
return lexer . inlineTokens ( src ) ;
}
2019-11-07 12:49:10 -06:00
/ * *
* Preprocessing
* /
lex ( src ) {
src = src
. replace ( /\r\n|\r/g , '\n' )
. replace ( /\t/g , ' ' ) ;
2021-08-09 23:41:45 -04:00
this . blockTokens ( src , this . tokens ) ;
2020-04-02 00:23:40 -05:00
2021-08-09 23:41:45 -04:00
let next ;
while ( next = this . inlineQueue . shift ( ) ) {
this . inlineTokens ( next . src , next . tokens ) ;
}
2020-04-02 00:23:40 -05:00
return this . tokens ;
}
2019-11-07 12:49:10 -06:00
/ * *
* Lexing
* /
2021-08-09 23:41:45 -04:00
blockTokens ( src , tokens = [ ] ) {
2021-01-26 14:21:28 +00:00
if ( this . options . pedantic ) {
src = src . replace ( /^ +$/gm , '' ) ;
}
2021-08-09 23:41:45 -04:00
let token , lastToken , cutSrc , lastParagraphClipped ;
2019-11-07 12:49:10 -06:00
while ( src ) {
2021-06-16 13:49:59 +00:00
if ( this . options . extensions
&& this . options . extensions . block
2021-06-15 19:22:00 -04:00
&& this . options . extensions . block . some ( ( extTokenizer ) => {
2021-08-09 23:41:45 -04:00
if ( token = extTokenizer . call ( { lexer : this } , src , tokens ) ) {
2021-06-15 19:22:00 -04:00
src = src . substring ( token . raw . length ) ;
tokens . push ( token ) ;
return true ;
}
return false ;
} ) ) {
continue ;
}
2019-11-07 12:49:10 -06:00
// newline
2020-04-14 14:33:36 -05:00
if ( token = this . tokenizer . space ( src ) ) {
2020-04-08 13:06:43 -05:00
src = src . substring ( token . raw . length ) ;
if ( token . type ) {
tokens . push ( token ) ;
}
2020-04-06 23:25:33 -05:00
continue ;
2019-11-07 12:49:10 -06:00
}
// code
2021-02-07 17:22:47 -05:00
if ( token = this . tokenizer . code ( src ) ) {
2020-04-08 13:06:43 -05:00
src = src . substring ( token . raw . length ) ;
2021-02-07 17:22:47 -05:00
lastToken = tokens [ tokens . length - 1 ] ;
// An indented code block cannot interrupt a paragraph.
2021-08-09 23:41:45 -04:00
if ( lastToken && ( lastToken . type === 'paragraph' || lastToken . type === 'text' ) ) {
2020-05-03 19:20:54 +00:00
lastToken . raw += '\n' + token . raw ;
lastToken . text += '\n' + token . text ;
2021-08-09 23:41:45 -04:00
this . inlineQueue [ this . inlineQueue . length - 1 ] . src = lastToken . text ;
2021-02-07 17:22:47 -05:00
} else {
tokens . push ( token ) ;
2020-05-03 19:20:54 +00:00
}
2019-11-07 12:49:10 -06:00
continue ;
}
// fences
2020-04-14 14:33:36 -05:00
if ( token = this . tokenizer . fences ( src ) ) {
2020-04-08 13:06:43 -05:00
src = src . substring ( token . raw . length ) ;
2020-04-06 23:25:33 -05:00
tokens . push ( token ) ;
2019-11-07 12:49:10 -06:00
continue ;
}
// heading
2020-04-14 14:33:36 -05:00
if ( token = this . tokenizer . heading ( src ) ) {
2020-04-08 13:06:43 -05:00
src = src . substring ( token . raw . length ) ;
2020-04-06 23:25:33 -05:00
tokens . push ( token ) ;
2019-11-07 12:49:10 -06:00
continue ;
}
// hr
2020-04-14 14:33:36 -05:00
if ( token = this . tokenizer . hr ( src ) ) {
2020-04-08 13:06:43 -05:00
src = src . substring ( token . raw . length ) ;
2020-04-06 23:25:33 -05:00
tokens . push ( token ) ;
2019-11-07 12:49:10 -06:00
continue ;
}
// blockquote
2020-04-14 14:33:36 -05:00
if ( token = this . tokenizer . blockquote ( src ) ) {
2020-04-08 13:06:43 -05:00
src = src . substring ( token . raw . length ) ;
tokens . push ( token ) ;
2019-11-07 12:49:10 -06:00
continue ;
}
// list
2020-04-14 14:33:36 -05:00
if ( token = this . tokenizer . list ( src ) ) {
2020-04-08 13:06:43 -05:00
src = src . substring ( token . raw . length ) ;
tokens . push ( token ) ;
2019-11-07 12:49:10 -06:00
continue ;
}
// html
2020-04-14 14:33:36 -05:00
if ( token = this . tokenizer . html ( src ) ) {
2020-04-08 13:06:43 -05:00
src = src . substring ( token . raw . length ) ;
2020-04-06 23:25:33 -05:00
tokens . push ( token ) ;
2019-11-07 12:49:10 -06:00
continue ;
}
// def
2021-08-09 23:41:45 -04:00
if ( token = this . tokenizer . def ( src ) ) {
2020-04-08 13:06:43 -05:00
src = src . substring ( token . raw . length ) ;
2021-08-09 23:41:45 -04:00
lastToken = tokens [ tokens . length - 1 ] ;
if ( lastToken && ( lastToken . type === 'paragraph' || lastToken . type === 'text' ) ) {
lastToken . raw += '\n' + token . raw ;
lastToken . text += '\n' + token . raw ;
this . inlineQueue [ this . inlineQueue . length - 1 ] . src = lastToken . text ;
} else if ( ! this . tokens . links [ token . tag ] ) {
2020-04-06 23:25:33 -05:00
this . tokens . links [ token . tag ] = {
href : token . href ,
title : token . title
2019-11-07 12:49:10 -06:00
} ;
}
continue ;
}
// table (gfm)
2020-04-14 14:33:36 -05:00
if ( token = this . tokenizer . table ( src ) ) {
2020-04-08 13:06:43 -05:00
src = src . substring ( token . raw . length ) ;
2020-04-06 23:25:33 -05:00
tokens . push ( token ) ;
continue ;
2019-11-07 12:49:10 -06:00
}
// lheading
2020-04-14 14:33:36 -05:00
if ( token = this . tokenizer . lheading ( src ) ) {
2020-04-08 13:06:43 -05:00
src = src . substring ( token . raw . length ) ;
2020-04-06 23:25:33 -05:00
tokens . push ( token ) ;
2019-11-07 12:49:10 -06:00
continue ;
}
// top-level paragraph
2021-06-15 19:22:00 -04:00
// prevent paragraph consuming extensions by clipping 'src' to extension start
cutSrc = src ;
2021-06-16 13:49:59 +00:00
if ( this . options . extensions && this . options . extensions . startBlock ) {
2021-06-15 19:22:00 -04:00
let startIndex = Infinity ;
const tempSrc = src . slice ( 1 ) ;
let tempStart ;
this . options . extensions . startBlock . forEach ( function ( getStartIndex ) {
2021-08-09 23:41:45 -04:00
tempStart = getStartIndex . call ( { lexer : this } , tempSrc ) ;
2021-06-15 19:22:00 -04:00
if ( typeof tempStart === 'number' && tempStart >= 0 ) { startIndex = Math . min ( startIndex , tempStart ) ; }
} ) ;
if ( startIndex < Infinity && startIndex >= 0 ) {
cutSrc = src . substring ( 0 , startIndex + 1 ) ;
}
}
2021-08-09 23:41:45 -04:00
if ( this . state . top && ( token = this . tokenizer . paragraph ( cutSrc ) ) ) {
2021-06-15 19:22:00 -04:00
lastToken = tokens [ tokens . length - 1 ] ;
if ( lastParagraphClipped && lastToken . type === 'paragraph' ) {
lastToken . raw += '\n' + token . raw ;
lastToken . text += '\n' + token . text ;
2021-08-09 23:41:45 -04:00
this . inlineQueue . pop ( ) ;
this . inlineQueue [ this . inlineQueue . length - 1 ] . src = lastToken . text ;
2021-06-15 19:22:00 -04:00
} else {
tokens . push ( token ) ;
}
lastParagraphClipped = ( cutSrc . length !== src . length ) ;
2020-04-08 13:06:43 -05:00
src = src . substring ( token . raw . length ) ;
2019-11-07 12:49:10 -06:00
continue ;
}
// text
2021-02-07 17:22:47 -05:00
if ( token = this . tokenizer . text ( src ) ) {
2020-04-08 13:06:43 -05:00
src = src . substring ( token . raw . length ) ;
2021-02-07 17:22:47 -05:00
lastToken = tokens [ tokens . length - 1 ] ;
if ( lastToken && lastToken . type === 'text' ) {
2020-05-03 19:20:54 +00:00
lastToken . raw += '\n' + token . raw ;
lastToken . text += '\n' + token . text ;
2021-08-09 23:41:45 -04:00
this . inlineQueue . pop ( ) ;
this . inlineQueue [ this . inlineQueue . length - 1 ] . src = lastToken . text ;
2021-02-07 17:22:47 -05:00
} else {
tokens . push ( token ) ;
2020-05-03 19:20:54 +00:00
}
2019-11-07 12:49:10 -06:00
continue ;
}
2020-04-08 13:06:43 -05:00
if ( src ) {
const errMsg = 'Infinite loop on byte: ' + src . charCodeAt ( 0 ) ;
2020-04-02 00:23:40 -05:00
if ( this . options . silent ) {
console . error ( errMsg ) ;
2020-04-06 23:25:33 -05:00
break ;
2020-04-02 00:23:40 -05:00
} else {
throw new Error ( errMsg ) ;
}
2019-11-07 12:49:10 -06:00
}
}
2021-08-09 23:41:45 -04:00
this . state . top = true ;
2020-04-02 00:23:40 -05:00
return tokens ;
2019-11-07 12:49:10 -06:00
}
2021-08-09 23:41:45 -04:00
inline ( src , tokens ) {
this . inlineQueue . push ( { src , tokens } ) ;
2020-04-02 00:23:40 -05:00
}
2019-11-07 12:49:10 -06:00
2020-04-02 00:23:40 -05:00
/ * *
* Lexing / Compiling
* /
2021-08-09 23:41:45 -04:00
inlineTokens ( src , tokens = [ ] ) {
2021-06-15 19:22:00 -04:00
let token , lastToken , cutSrc ;
2020-04-08 13:06:43 -05:00
2020-07-13 13:35:58 +00:00
// String with links masked to avoid interference with em and strong
let maskedSrc = src ;
let match ;
2020-11-19 14:32:02 +00:00
let keepPrevChar , prevChar ;
2020-07-13 13:35:58 +00:00
// Mask out reflinks
if ( this . tokens . links ) {
const links = Object . keys ( this . tokens . links ) ;
if ( links . length > 0 ) {
while ( ( match = this . tokenizer . rules . inline . reflinkSearch . exec ( maskedSrc ) ) != null ) {
if ( links . includes ( match [ 0 ] . slice ( match [ 0 ] . lastIndexOf ( '[' ) + 1 , - 1 ) ) ) {
2021-08-16 03:09:18 +00:00
maskedSrc = maskedSrc . slice ( 0 , match . index ) + '[' + repeatString ( 'a' , match [ 0 ] . length - 2 ) + ']' + maskedSrc . slice ( this . tokenizer . rules . inline . reflinkSearch . lastIndex ) ;
2020-07-13 13:35:58 +00:00
}
}
}
}
// Mask out other blocks
while ( ( match = this . tokenizer . rules . inline . blockSkip . exec ( maskedSrc ) ) != null ) {
2021-08-16 03:09:18 +00:00
maskedSrc = maskedSrc . slice ( 0 , match . index ) + '[' + repeatString ( 'a' , match [ 0 ] . length - 2 ) + ']' + maskedSrc . slice ( this . tokenizer . rules . inline . blockSkip . lastIndex ) ;
2020-07-13 13:35:58 +00:00
}
2021-02-07 17:25:01 -05:00
// Mask out escaped em & strong delimiters
while ( ( match = this . tokenizer . rules . inline . escapedEmSt . exec ( maskedSrc ) ) != null ) {
maskedSrc = maskedSrc . slice ( 0 , match . index ) + '++' + maskedSrc . slice ( this . tokenizer . rules . inline . escapedEmSt . lastIndex ) ;
}
2020-04-08 13:06:43 -05:00
while ( src ) {
2020-11-19 14:32:02 +00:00
if ( ! keepPrevChar ) {
prevChar = '' ;
}
keepPrevChar = false ;
2021-02-07 17:25:01 -05:00
2021-06-15 19:22:00 -04:00
// extensions
2021-06-16 13:49:59 +00:00
if ( this . options . extensions
&& this . options . extensions . inline
2021-06-15 19:22:00 -04:00
&& this . options . extensions . inline . some ( ( extTokenizer ) => {
2021-08-09 23:41:45 -04:00
if ( token = extTokenizer . call ( { lexer : this } , src , tokens ) ) {
2021-06-15 19:22:00 -04:00
src = src . substring ( token . raw . length ) ;
tokens . push ( token ) ;
return true ;
}
return false ;
} ) ) {
continue ;
}
2020-04-02 00:23:40 -05:00
// escape
2020-04-14 14:33:36 -05:00
if ( token = this . tokenizer . escape ( src ) ) {
2020-04-08 13:06:43 -05:00
src = src . substring ( token . raw . length ) ;
2020-04-06 23:25:33 -05:00
tokens . push ( token ) ;
2020-04-02 00:23:40 -05:00
continue ;
}
2019-11-07 12:49:10 -06:00
2020-04-02 00:23:40 -05:00
// tag
2021-08-09 23:41:45 -04:00
if ( token = this . tokenizer . tag ( src ) ) {
2020-04-08 13:06:43 -05:00
src = src . substring ( token . raw . length ) ;
2021-06-15 19:22:00 -04:00
lastToken = tokens [ tokens . length - 1 ] ;
2021-02-07 17:22:47 -05:00
if ( lastToken && token . type === 'text' && lastToken . type === 'text' ) {
lastToken . raw += token . raw ;
lastToken . text += token . text ;
} else {
tokens . push ( token ) ;
}
2020-04-02 00:23:40 -05:00
continue ;
}
2019-11-07 12:49:10 -06:00
2020-04-02 00:23:40 -05:00
// link
2020-04-14 14:33:36 -05:00
if ( token = this . tokenizer . link ( src ) ) {
2020-04-08 13:06:43 -05:00
src = src . substring ( token . raw . length ) ;
2020-04-06 23:25:33 -05:00
tokens . push ( token ) ;
2020-04-02 00:23:40 -05:00
continue ;
}
2019-11-07 12:49:10 -06:00
2020-04-02 00:23:40 -05:00
// reflink, nolink
2020-04-14 14:33:36 -05:00
if ( token = this . tokenizer . reflink ( src , this . tokens . links ) ) {
2020-04-08 13:06:43 -05:00
src = src . substring ( token . raw . length ) ;
2021-06-15 19:22:00 -04:00
lastToken = tokens [ tokens . length - 1 ] ;
2021-08-09 23:41:45 -04:00
if ( lastToken && token . type === 'text' && lastToken . type === 'text' ) {
2021-02-07 17:22:47 -05:00
lastToken . raw += token . raw ;
lastToken . text += token . text ;
} else {
tokens . push ( token ) ;
2020-04-14 14:33:36 -05:00
}
2020-04-02 00:23:40 -05:00
continue ;
}
2019-11-07 12:49:10 -06:00
2021-02-07 17:25:01 -05:00
// em & strong
if ( token = this . tokenizer . emStrong ( src , maskedSrc , prevChar ) ) {
2020-04-08 13:06:43 -05:00
src = src . substring ( token . raw . length ) ;
tokens . push ( token ) ;
2020-04-02 00:23:40 -05:00
continue ;
}
2019-11-07 12:49:10 -06:00
2020-04-02 00:23:40 -05:00
// code
2020-04-14 14:33:36 -05:00
if ( token = this . tokenizer . codespan ( src ) ) {
2020-04-08 13:06:43 -05:00
src = src . substring ( token . raw . length ) ;
2020-04-06 23:25:33 -05:00
tokens . push ( token ) ;
2020-04-02 00:23:40 -05:00
continue ;
}
2019-11-07 12:49:10 -06:00
2020-04-02 00:23:40 -05:00
// br
2020-04-14 14:33:36 -05:00
if ( token = this . tokenizer . br ( src ) ) {
2020-04-08 13:06:43 -05:00
src = src . substring ( token . raw . length ) ;
2020-04-06 23:25:33 -05:00
tokens . push ( token ) ;
2020-04-02 00:23:40 -05:00
continue ;
}
2019-11-07 12:49:10 -06:00
2020-04-02 00:23:40 -05:00
// del (gfm)
2020-04-14 14:33:36 -05:00
if ( token = this . tokenizer . del ( src ) ) {
2020-04-08 13:06:43 -05:00
src = src . substring ( token . raw . length ) ;
tokens . push ( token ) ;
2020-04-02 00:23:40 -05:00
continue ;
}
2019-11-07 12:49:10 -06:00
2020-04-02 00:23:40 -05:00
// autolink
2020-04-14 16:41:10 -05:00
if ( token = this . tokenizer . autolink ( src , mangle ) ) {
2020-04-08 13:06:43 -05:00
src = src . substring ( token . raw . length ) ;
2020-04-06 23:25:33 -05:00
tokens . push ( token ) ;
2020-04-02 00:23:40 -05:00
continue ;
}
2019-11-07 12:49:10 -06:00
2020-04-02 00:23:40 -05:00
// url (gfm)
2021-08-09 23:41:45 -04:00
if ( ! this . state . inLink && ( token = this . tokenizer . url ( src , mangle ) ) ) {
2020-04-08 13:06:43 -05:00
src = src . substring ( token . raw . length ) ;
2020-04-06 23:25:33 -05:00
tokens . push ( token ) ;
2020-04-02 00:23:40 -05:00
continue ;
}
2019-11-07 12:49:10 -06:00
2020-04-02 00:23:40 -05:00
// text
2021-06-15 19:22:00 -04:00
// prevent inlineText consuming extensions by clipping 'src' to extension start
cutSrc = src ;
2021-06-16 13:49:59 +00:00
if ( this . options . extensions && this . options . extensions . startInline ) {
2021-06-15 19:22:00 -04:00
let startIndex = Infinity ;
2021-06-15 23:23:19 +00:00
const tempSrc = src . slice ( 1 ) ;
let tempStart ;
2021-06-15 19:22:00 -04:00
this . options . extensions . startInline . forEach ( function ( getStartIndex ) {
2021-08-09 23:41:45 -04:00
tempStart = getStartIndex . call ( { lexer : this } , tempSrc ) ;
2021-06-15 23:23:19 +00:00
if ( typeof tempStart === 'number' && tempStart >= 0 ) { startIndex = Math . min ( startIndex , tempStart ) ; }
2021-06-15 19:22:00 -04:00
} ) ;
2021-06-15 23:23:19 +00:00
if ( startIndex < Infinity && startIndex >= 0 ) {
cutSrc = src . substring ( 0 , startIndex + 1 ) ;
2021-06-15 19:22:00 -04:00
}
}
2021-08-09 23:41:45 -04:00
if ( token = this . tokenizer . inlineText ( cutSrc , smartypants ) ) {
2020-04-08 13:06:43 -05:00
src = src . substring ( token . raw . length ) ;
2021-02-07 17:25:01 -05:00
if ( token . raw . slice ( - 1 ) !== '_' ) { // Track prevChar before string of ____ started
prevChar = token . raw . slice ( - 1 ) ;
}
2020-11-19 14:32:02 +00:00
keepPrevChar = true ;
2021-02-07 17:22:47 -05:00
lastToken = tokens [ tokens . length - 1 ] ;
if ( lastToken && lastToken . type === 'text' ) {
lastToken . raw += token . raw ;
lastToken . text += token . text ;
} else {
tokens . push ( token ) ;
}
2020-04-02 00:23:40 -05:00
continue ;
}
2019-11-07 12:49:10 -06:00
2020-04-08 13:06:43 -05:00
if ( src ) {
const errMsg = 'Infinite loop on byte: ' + src . charCodeAt ( 0 ) ;
2020-04-02 00:23:40 -05:00
if ( this . options . silent ) {
console . error ( errMsg ) ;
2020-04-06 23:25:33 -05:00
break ;
2020-04-02 00:23:40 -05:00
} else {
throw new Error ( errMsg ) ;
}
}
2019-11-07 12:49:10 -06:00
}
2020-04-02 00:23:40 -05:00
2020-04-08 13:06:43 -05:00
return tokens ;
2020-04-02 00:23:40 -05:00
}
2019-11-07 12:49:10 -06:00
} ;
2021-08-16 03:09:18 +00:00
const { defaults : defaults$2 } = defaults$5 . exports ;
2019-11-07 12:49:10 -06:00
const {
2021-08-16 03:09:18 +00:00
cleanUrl ,
escape : escape$1
2019-11-07 12:49:10 -06:00
} = helpers ;
/ * *
2020-04-02 00:23:40 -05:00
* Renderer
2019-11-07 12:49:10 -06:00
* /
2021-10-06 13:32:43 -07:00
var Renderer _1$1 = class Renderer {
2020-04-02 00:23:40 -05:00
constructor ( options ) {
2021-08-16 03:09:18 +00:00
this . options = options || defaults$2 ;
2020-04-02 00:23:40 -05:00
}
2019-11-07 12:49:10 -06:00
2020-04-02 00:23:40 -05:00
code ( code , infostring , escaped ) {
const lang = ( infostring || '' ) . match ( /\S*/ ) [ 0 ] ;
if ( this . options . highlight ) {
const out = this . options . highlight ( code , lang ) ;
if ( out != null && out !== code ) {
escaped = true ;
code = out ;
}
2019-11-07 12:49:10 -06:00
}
2021-01-26 14:21:28 +00:00
code = code . replace ( /\n$/ , '' ) + '\n' ;
2020-04-02 00:23:40 -05:00
if ( ! lang ) {
return '<pre><code>'
2021-08-16 03:09:18 +00:00
+ ( escaped ? code : escape$1 ( code , true ) )
2020-05-11 05:20:01 +00:00
+ '</code></pre>\n' ;
2019-11-07 12:49:10 -06:00
}
2020-04-02 00:23:40 -05:00
return '<pre><code class="'
+ this . options . langPrefix
2021-08-16 03:09:18 +00:00
+ escape$1 ( lang , true )
2020-04-02 00:23:40 -05:00
+ '">'
2021-08-16 03:09:18 +00:00
+ ( escaped ? code : escape$1 ( code , true ) )
2020-04-02 00:23:40 -05:00
+ '</code></pre>\n' ;
2019-11-07 12:49:10 -06:00
}
2020-04-02 00:23:40 -05:00
blockquote ( quote ) {
return '<blockquote>\n' + quote + '</blockquote>\n' ;
2019-11-07 12:49:10 -06:00
}
2020-04-02 00:23:40 -05:00
html ( html ) {
return html ;
}
2019-11-07 12:49:10 -06:00
2020-04-02 00:23:40 -05:00
heading ( text , level , raw , slugger ) {
if ( this . options . headerIds ) {
return '<h'
+ level
+ ' id="'
+ this . options . headerPrefix
+ slugger . slug ( raw )
+ '">'
+ text
+ '</h'
+ level
+ '>\n' ;
}
// ignore IDs
return '<h' + level + '>' + text + '</h' + level + '>\n' ;
}
2019-11-07 12:49:10 -06:00
2020-04-02 00:23:40 -05:00
hr ( ) {
return this . options . xhtml ? '<hr/>\n' : '<hr>\n' ;
}
2019-11-07 12:49:10 -06:00
2020-04-02 00:23:40 -05:00
list ( body , ordered , start ) {
const type = ordered ? 'ol' : 'ul' ,
startatt = ( ordered && start !== 1 ) ? ( ' start="' + start + '"' ) : '' ;
return '<' + type + startatt + '>\n' + body + '</' + type + '>\n' ;
}
2019-11-07 12:49:10 -06:00
2020-04-02 00:23:40 -05:00
listitem ( text ) {
return '<li>' + text + '</li>\n' ;
}
2019-11-07 12:49:10 -06:00
2020-04-02 00:23:40 -05:00
checkbox ( checked ) {
return '<input '
+ ( checked ? 'checked="" ' : '' )
+ 'disabled="" type="checkbox"'
+ ( this . options . xhtml ? ' /' : '' )
+ '> ' ;
}
2019-11-07 12:49:10 -06:00
2020-04-02 00:23:40 -05:00
paragraph ( text ) {
return '<p>' + text + '</p>\n' ;
}
2019-11-07 12:49:10 -06:00
2020-04-02 00:23:40 -05:00
table ( header , body ) {
if ( body ) body = '<tbody>' + body + '</tbody>' ;
2019-11-07 12:49:10 -06:00
2020-04-02 00:23:40 -05:00
return '<table>\n'
+ '<thead>\n'
+ header
+ '</thead>\n'
+ body
+ '</table>\n' ;
}
2019-11-07 12:49:10 -06:00
2020-04-02 00:23:40 -05:00
tablerow ( content ) {
return '<tr>\n' + content + '</tr>\n' ;
2019-11-07 12:49:10 -06:00
}
2020-04-02 00:23:40 -05:00
tablecell ( content , flags ) {
const type = flags . header ? 'th' : 'td' ;
const tag = flags . align
? '<' + type + ' align="' + flags . align + '">'
: '<' + type + '>' ;
return tag + content + '</' + type + '>\n' ;
2019-11-07 12:49:10 -06:00
}
2020-04-02 00:23:40 -05:00
// span level renderer
strong ( text ) {
return '<strong>' + text + '</strong>' ;
}
2019-11-07 12:49:10 -06:00
2020-04-02 00:23:40 -05:00
em ( text ) {
return '<em>' + text + '</em>' ;
2019-11-07 12:49:10 -06:00
}
2020-04-02 00:23:40 -05:00
codespan ( text ) {
return '<code>' + text + '</code>' ;
2019-11-07 12:49:10 -06:00
}
2020-04-02 00:23:40 -05:00
br ( ) {
return this . options . xhtml ? '<br/>' : '<br>' ;
}
2019-11-07 12:49:10 -06:00
2020-04-02 00:23:40 -05:00
del ( text ) {
return '<del>' + text + '</del>' ;
}
link ( href , title , text ) {
2021-08-16 03:09:18 +00:00
href = cleanUrl ( this . options . sanitize , this . options . baseUrl , href ) ;
2020-04-02 00:23:40 -05:00
if ( href === null ) {
return text ;
}
2021-08-16 03:09:18 +00:00
let out = '<a href="' + escape$1 ( href ) + '"' ;
2020-04-02 00:23:40 -05:00
if ( title ) {
out += ' title="' + title + '"' ;
}
out += '>' + text + '</a>' ;
return out ;
}
image ( href , title , text ) {
2021-08-16 03:09:18 +00:00
href = cleanUrl ( this . options . sanitize , this . options . baseUrl , href ) ;
2020-04-02 00:23:40 -05:00
if ( href === null ) {
return text ;
2019-11-07 12:49:10 -06:00
}
2020-04-02 00:23:40 -05:00
let out = '<img src="' + href + '" alt="' + text + '"' ;
if ( title ) {
out += ' title="' + title + '"' ;
}
out += this . options . xhtml ? '/>' : '>' ;
2019-11-07 12:49:10 -06:00
return out ;
}
2020-04-02 00:23:40 -05:00
text ( text ) {
return text ;
}
2019-11-07 12:49:10 -06:00
} ;
/ * *
* TextRenderer
* returns only the textual part of the token
* /
2021-08-16 03:09:18 +00:00
2021-10-06 13:32:43 -07:00
var TextRenderer _1$1 = class TextRenderer {
2019-11-07 12:49:10 -06:00
// no need for block level renderers
strong ( text ) {
return text ;
}
em ( text ) {
return text ;
}
codespan ( text ) {
return text ;
}
del ( text ) {
2020-03-22 05:13:27 +00:00
return text ;
}
html ( text ) {
2019-11-07 12:49:10 -06:00
return text ;
}
text ( text ) {
return text ;
}
link ( href , title , text ) {
return '' + text ;
}
image ( href , title , text ) {
return '' + text ;
}
br ( ) {
return '' ;
}
} ;
2020-04-02 00:23:40 -05:00
/ * *
* Slugger generates header id
* /
2021-08-16 03:09:18 +00:00
2021-10-06 13:32:43 -07:00
var Slugger _1$1 = class Slugger {
2020-04-02 00:23:40 -05:00
constructor ( ) {
this . seen = { } ;
}
2020-07-15 12:47:26 +00:00
serialize ( value ) {
return value
2020-04-02 00:23:40 -05:00
. toLowerCase ( )
. trim ( )
// remove html tags
. replace ( /<[!\/a-z].*?>/ig , '' )
// remove unwanted chars
. replace ( /[\u2000-\u206F\u2E00-\u2E7F\\'!"#$%&()*+,./:;<=>?@[\]^`{|}~]/g , '' )
. replace ( /\s/g , '-' ) ;
2020-07-15 12:47:26 +00:00
}
2020-04-02 00:23:40 -05:00
2020-07-15 12:47:26 +00:00
/ * *
* Finds the next safe ( unique ) slug to use
* /
getNextSafeSlug ( originalSlug , isDryRun ) {
let slug = originalSlug ;
let occurenceAccumulator = 0 ;
2020-04-02 00:23:40 -05:00
if ( this . seen . hasOwnProperty ( slug ) ) {
2020-07-15 12:47:26 +00:00
occurenceAccumulator = this . seen [ originalSlug ] ;
2020-04-02 00:23:40 -05:00
do {
2020-07-15 12:47:26 +00:00
occurenceAccumulator ++ ;
slug = originalSlug + '-' + occurenceAccumulator ;
2020-04-02 00:23:40 -05:00
} while ( this . seen . hasOwnProperty ( slug ) ) ;
}
2020-07-15 12:47:26 +00:00
if ( ! isDryRun ) {
this . seen [ originalSlug ] = occurenceAccumulator ;
this . seen [ slug ] = 0 ;
}
2020-04-02 00:23:40 -05:00
return slug ;
}
2020-07-15 12:47:26 +00:00
/ * *
* Convert string to unique id
* @ param { object } options
* @ param { boolean } options . dryrun Generates the next unique slug without updating the internal accumulator .
* /
slug ( value , options = { } ) {
const slug = this . serialize ( value ) ;
return this . getNextSafeSlug ( slug , options . dryrun ) ;
}
2020-04-02 00:23:40 -05:00
} ;
2021-10-06 13:32:43 -07:00
const Renderer$1 = Renderer _1$1 ;
const TextRenderer$1 = TextRenderer _1$1 ;
const Slugger$1 = Slugger _1$1 ;
2021-08-16 03:09:18 +00:00
const { defaults : defaults$1 } = defaults$5 . exports ;
2019-11-07 12:49:10 -06:00
const {
2021-08-16 03:09:18 +00:00
unescape
2019-11-07 12:49:10 -06:00
} = helpers ;
/ * *
* Parsing & Compiling
* /
2021-10-06 13:32:43 -07:00
var Parser _1$1 = class Parser {
2019-11-07 12:49:10 -06:00
constructor ( options ) {
2021-08-16 03:09:18 +00:00
this . options = options || defaults$1 ;
this . options . renderer = this . options . renderer || new Renderer$1 ( ) ;
2019-11-07 12:49:10 -06:00
this . renderer = this . options . renderer ;
this . renderer . options = this . options ;
2021-08-16 03:09:18 +00:00
this . textRenderer = new TextRenderer$1 ( ) ;
this . slugger = new Slugger$1 ( ) ;
2019-11-07 12:49:10 -06:00
}
/ * *
* Static Parse Method
* /
2019-11-27 00:36:10 -06:00
static parse ( tokens , options ) {
2019-11-07 12:49:10 -06:00
const parser = new Parser ( options ) ;
2019-11-27 00:36:10 -06:00
return parser . parse ( tokens ) ;
2020-04-02 00:23:40 -05:00
}
2019-11-07 12:49:10 -06:00
2020-09-26 19:05:47 +00:00
/ * *
* Static Parse Inline Method
* /
static parseInline ( tokens , options ) {
const parser = new Parser ( options ) ;
return parser . parseInline ( tokens ) ;
}
2019-11-07 12:49:10 -06:00
/ * *
* Parse Loop
* /
2020-04-02 00:23:40 -05:00
parse ( tokens , top = true ) {
let out = '' ,
2020-04-02 01:22:01 -05:00
i ,
j ,
k ,
l2 ,
l3 ,
row ,
cell ,
header ,
2020-04-02 00:23:40 -05:00
body ,
2020-04-02 01:22:01 -05:00
token ,
ordered ,
start ,
loose ,
itemBody ,
item ,
checked ,
task ,
2021-06-15 23:23:19 +00:00
checkbox ,
ret ;
2020-04-02 00:23:40 -05:00
const l = tokens . length ;
2020-04-02 01:22:01 -05:00
for ( i = 0 ; i < l ; i ++ ) {
2020-04-02 00:23:40 -05:00
token = tokens [ i ] ;
2021-06-15 23:23:19 +00:00
// Run any renderer extensions
2021-06-16 13:49:59 +00:00
if ( this . options . extensions && this . options . extensions . renderers && this . options . extensions . renderers [ token . type ] ) {
2021-08-09 23:41:45 -04:00
ret = this . options . extensions . renderers [ token . type ] . call ( { parser : this } , token ) ;
2021-06-15 23:23:19 +00:00
if ( ret !== false || ! [ 'space' , 'hr' , 'heading' , 'code' , 'table' , 'blockquote' , 'list' , 'html' , 'paragraph' , 'text' ] . includes ( token . type ) ) {
out += ret || '' ;
continue ;
}
}
2020-04-02 00:23:40 -05:00
switch ( token . type ) {
case 'space' : {
continue ;
2019-11-07 12:49:10 -06:00
}
2020-04-02 00:23:40 -05:00
case 'hr' : {
out += this . renderer . hr ( ) ;
continue ;
}
case 'heading' : {
out += this . renderer . heading (
this . parseInline ( token . tokens ) ,
token . depth ,
2021-08-16 03:09:18 +00:00
unescape ( this . parseInline ( token . tokens , this . textRenderer ) ) ,
2020-04-02 00:23:40 -05:00
this . slugger ) ;
continue ;
}
case 'code' : {
out += this . renderer . code ( token . text ,
token . lang ,
token . escaped ) ;
continue ;
}
case 'table' : {
2020-04-02 01:22:01 -05:00
header = '' ;
2020-04-02 00:23:40 -05:00
// header
2019-11-07 12:49:10 -06:00
cell = '' ;
2020-04-02 00:23:40 -05:00
l2 = token . header . length ;
2020-04-02 01:22:01 -05:00
for ( j = 0 ; j < l2 ; j ++ ) {
2019-11-07 12:49:10 -06:00
cell += this . renderer . tablecell (
2021-08-16 03:09:18 +00:00
this . parseInline ( token . header [ j ] . tokens ) ,
2020-04-02 01:22:01 -05:00
{ header : true , align : token . align [ j ] }
2019-11-07 12:49:10 -06:00
) ;
}
2020-04-02 00:23:40 -05:00
header += this . renderer . tablerow ( cell ) ;
body = '' ;
2021-08-16 03:09:18 +00:00
l2 = token . rows . length ;
2020-04-02 01:22:01 -05:00
for ( j = 0 ; j < l2 ; j ++ ) {
2021-08-16 03:09:18 +00:00
row = token . rows [ j ] ;
2020-04-02 00:23:40 -05:00
cell = '' ;
l3 = row . length ;
2020-04-02 01:22:01 -05:00
for ( k = 0 ; k < l3 ; k ++ ) {
2020-04-02 00:23:40 -05:00
cell += this . renderer . tablecell (
2021-08-16 03:09:18 +00:00
this . parseInline ( row [ k ] . tokens ) ,
2020-04-02 01:22:01 -05:00
{ header : false , align : token . align [ k ] }
2020-04-02 00:23:40 -05:00
) ;
}
2019-11-07 12:49:10 -06:00
2020-04-02 00:23:40 -05:00
body += this . renderer . tablerow ( cell ) ;
}
out += this . renderer . table ( header , body ) ;
continue ;
2019-11-07 12:49:10 -06:00
}
2020-04-02 00:23:40 -05:00
case 'blockquote' : {
body = this . parse ( token . tokens ) ;
out += this . renderer . blockquote ( body ) ;
continue ;
2019-11-07 12:49:10 -06:00
}
2020-04-02 00:23:40 -05:00
case 'list' : {
2020-04-02 01:22:01 -05:00
ordered = token . ordered ;
start = token . start ;
loose = token . loose ;
l2 = token . items . length ;
2020-04-02 00:23:40 -05:00
body = '' ;
2020-04-02 01:22:01 -05:00
for ( j = 0 ; j < l2 ; j ++ ) {
item = token . items [ j ] ;
checked = item . checked ;
task = item . task ;
2020-04-02 00:23:40 -05:00
itemBody = '' ;
if ( item . task ) {
2020-04-02 01:22:01 -05:00
checkbox = this . renderer . checkbox ( checked ) ;
2020-04-02 00:23:40 -05:00
if ( loose ) {
2021-08-09 23:41:45 -04:00
if ( item . tokens . length > 0 && item . tokens [ 0 ] . type === 'paragraph' ) {
2020-04-02 00:23:40 -05:00
item . tokens [ 0 ] . text = checkbox + ' ' + item . tokens [ 0 ] . text ;
if ( item . tokens [ 0 ] . tokens && item . tokens [ 0 ] . tokens . length > 0 && item . tokens [ 0 ] . tokens [ 0 ] . type === 'text' ) {
item . tokens [ 0 ] . tokens [ 0 ] . text = checkbox + ' ' + item . tokens [ 0 ] . tokens [ 0 ] . text ;
}
} else {
item . tokens . unshift ( {
type : 'text' ,
text : checkbox
} ) ;
}
} else {
itemBody += checkbox ;
}
}
2019-11-07 12:49:10 -06:00
2020-04-02 00:23:40 -05:00
itemBody += this . parse ( item . tokens , loose ) ;
body += this . renderer . listitem ( itemBody , task , checked ) ;
}
2019-11-07 12:49:10 -06:00
2020-04-02 00:23:40 -05:00
out += this . renderer . list ( body , ordered , start ) ;
continue ;
2019-11-07 12:49:10 -06:00
}
2020-04-02 00:23:40 -05:00
case 'html' : {
// TODO parse inline content if parameter markdown=1
out += this . renderer . html ( token . text ) ;
continue ;
}
case 'paragraph' : {
out += this . renderer . paragraph ( this . parseInline ( token . tokens ) ) ;
continue ;
}
case 'text' : {
body = token . tokens ? this . parseInline ( token . tokens ) : token . text ;
while ( i + 1 < l && tokens [ i + 1 ] . type === 'text' ) {
token = tokens [ ++ i ] ;
body += '\n' + ( token . tokens ? this . parseInline ( token . tokens ) : token . text ) ;
}
out += top ? this . renderer . paragraph ( body ) : body ;
continue ;
}
2021-06-15 19:22:00 -04:00
2020-04-02 00:23:40 -05:00
default : {
const errMsg = 'Token with "' + token . type + '" type was not found.' ;
if ( this . options . silent ) {
console . error ( errMsg ) ;
2020-04-08 17:56:45 +00:00
return ;
2019-11-07 12:49:10 -06:00
} else {
2020-04-02 00:23:40 -05:00
throw new Error ( errMsg ) ;
2019-11-07 12:49:10 -06:00
}
}
2020-04-02 00:23:40 -05:00
}
}
return out ;
}
2019-11-07 12:49:10 -06:00
2020-04-02 00:23:40 -05:00
/ * *
* Parse Inline Tokens
* /
parseInline ( tokens , renderer ) {
renderer = renderer || this . renderer ;
let out = '' ,
i ,
2021-06-15 23:23:19 +00:00
token ,
ret ;
2020-04-02 00:23:40 -05:00
const l = tokens . length ;
for ( i = 0 ; i < l ; i ++ ) {
token = tokens [ i ] ;
2021-06-15 23:23:19 +00:00
// Run any renderer extensions
2021-06-16 13:49:59 +00:00
if ( this . options . extensions && this . options . extensions . renderers && this . options . extensions . renderers [ token . type ] ) {
2021-08-09 23:41:45 -04:00
ret = this . options . extensions . renderers [ token . type ] . call ( { parser : this } , token ) ;
2021-06-15 23:23:19 +00:00
if ( ret !== false || ! [ 'escape' , 'html' , 'link' , 'image' , 'strong' , 'em' , 'codespan' , 'br' , 'del' , 'text' ] . includes ( token . type ) ) {
out += ret || '' ;
continue ;
}
}
2020-04-02 00:23:40 -05:00
switch ( token . type ) {
case 'escape' : {
2020-04-14 13:20:35 +00:00
out += renderer . text ( token . text ) ;
2020-04-02 00:23:40 -05:00
break ;
2019-11-07 12:49:10 -06:00
}
2020-04-02 00:23:40 -05:00
case 'html' : {
out += renderer . html ( token . text ) ;
break ;
}
case 'link' : {
out += renderer . link ( token . href , token . title , this . parseInline ( token . tokens , renderer ) ) ;
break ;
}
case 'image' : {
out += renderer . image ( token . href , token . title , token . text ) ;
break ;
}
case 'strong' : {
out += renderer . strong ( this . parseInline ( token . tokens , renderer ) ) ;
break ;
}
case 'em' : {
out += renderer . em ( this . parseInline ( token . tokens , renderer ) ) ;
break ;
}
case 'codespan' : {
out += renderer . codespan ( token . text ) ;
break ;
}
case 'br' : {
out += renderer . br ( ) ;
break ;
}
case 'del' : {
out += renderer . del ( this . parseInline ( token . tokens , renderer ) ) ;
break ;
}
case 'text' : {
out += renderer . text ( token . text ) ;
break ;
}
default : {
2020-04-08 17:56:45 +00:00
const errMsg = 'Token with "' + token . type + '" type was not found.' ;
2020-04-02 00:23:40 -05:00
if ( this . options . silent ) {
console . error ( errMsg ) ;
2020-04-08 17:56:45 +00:00
return ;
2020-04-02 00:23:40 -05:00
} else {
throw new Error ( errMsg ) ;
}
2019-11-07 12:49:10 -06:00
}
}
}
2020-04-02 00:23:40 -05:00
return out ;
}
2019-11-07 12:49:10 -06:00
} ;
2021-10-06 13:32:43 -07:00
const Lexer = Lexer _1$1 ;
const Parser = Parser _1$1 ;
const Tokenizer = Tokenizer _1$1 ;
const Renderer = Renderer _1$1 ;
const TextRenderer = TextRenderer _1$1 ;
const Slugger = Slugger _1$1 ;
2019-11-07 12:49:10 -06:00
const {
2021-08-16 03:09:18 +00:00
merge ,
checkSanitizeDeprecation ,
escape
2019-11-07 12:49:10 -06:00
} = helpers ;
const {
2019-12-05 23:08:43 +00:00
getDefaults ,
changeDefaults ,
2021-08-16 03:09:18 +00:00
defaults
} = defaults$5 . exports ;
2019-11-07 12:49:10 -06:00
/ * *
* Marked
* /
function marked ( src , opt , callback ) {
// throw error in case of non string input
if ( typeof src === 'undefined' || src === null ) {
throw new Error ( 'marked(): input parameter is undefined or null' ) ;
}
if ( typeof src !== 'string' ) {
throw new Error ( 'marked(): input parameter is of type '
+ Object . prototype . toString . call ( src ) + ', string expected' ) ;
}
2020-05-14 15:54:39 +00:00
if ( typeof opt === 'function' ) {
callback = opt ;
opt = null ;
}
2019-11-07 12:49:10 -06:00
2021-08-16 03:09:18 +00:00
opt = merge ( { } , marked . defaults , opt || { } ) ;
checkSanitizeDeprecation ( opt ) ;
2020-05-14 15:54:39 +00:00
if ( callback ) {
2019-11-07 12:49:10 -06:00
const highlight = opt . highlight ;
2020-05-14 15:54:39 +00:00
let tokens ;
2019-11-07 12:49:10 -06:00
try {
2021-08-16 03:09:18 +00:00
tokens = Lexer . lex ( src , opt ) ;
2019-11-07 12:49:10 -06:00
} catch ( e ) {
return callback ( e ) ;
}
const done = function ( err ) {
let out ;
2020-05-14 15:54:39 +00:00
if ( ! err ) {
try {
2021-05-21 20:54:11 +00:00
if ( opt . walkTokens ) {
marked . walkTokens ( tokens , opt . walkTokens ) ;
}
2021-08-16 03:09:18 +00:00
out = Parser . parse ( tokens , opt ) ;
2020-05-14 15:54:39 +00:00
} catch ( e ) {
err = e ;
}
2019-11-07 12:49:10 -06:00
}
opt . highlight = highlight ;
return err
? callback ( err )
: callback ( null , out ) ;
} ;
if ( ! highlight || highlight . length < 3 ) {
return done ( ) ;
}
delete opt . highlight ;
2020-05-14 15:54:39 +00:00
if ( ! tokens . length ) return done ( ) ;
2019-11-07 12:49:10 -06:00
2020-05-14 15:54:39 +00:00
let pending = 0 ;
marked . walkTokens ( tokens , function ( token ) {
if ( token . type === 'code' ) {
pending ++ ;
2020-05-22 15:37:31 +00:00
setTimeout ( ( ) => {
highlight ( token . text , token . lang , function ( err , code ) {
if ( err ) {
return done ( err ) ;
}
if ( code != null && code !== token . text ) {
token . text = code ;
token . escaped = true ;
}
2020-05-14 15:54:39 +00:00
2020-05-22 15:37:31 +00:00
pending -- ;
if ( pending === 0 ) {
done ( ) ;
}
} ) ;
} , 0 ) ;
2020-05-14 15:54:39 +00:00
}
} ) ;
if ( pending === 0 ) {
done ( ) ;
2019-11-07 12:49:10 -06:00
}
return ;
}
2020-05-14 15:54:39 +00:00
2019-11-07 12:49:10 -06:00
try {
2021-08-16 03:09:18 +00:00
const tokens = Lexer . lex ( src , opt ) ;
2020-05-14 15:54:39 +00:00
if ( opt . walkTokens ) {
2021-06-15 23:23:19 +00:00
marked . walkTokens ( tokens , opt . walkTokens ) ;
2020-05-14 15:54:39 +00:00
}
2021-08-16 03:09:18 +00:00
return Parser . parse ( tokens , opt ) ;
2019-11-07 12:49:10 -06:00
} catch ( e ) {
e . message += '\nPlease report this to https://github.com/markedjs/marked.' ;
2020-05-14 15:54:39 +00:00
if ( opt . silent ) {
2019-11-07 12:49:10 -06:00
return '<p>An error occurred:</p><pre>'
2021-08-16 03:09:18 +00:00
+ escape ( e . message + '' , true )
2019-11-07 12:49:10 -06:00
+ '</pre>' ;
}
throw e ;
}
}
/ * *
* Options
* /
marked . options =
marked . setOptions = function ( opt ) {
2021-08-16 03:09:18 +00:00
merge ( marked . defaults , opt ) ;
2019-12-05 23:08:43 +00:00
changeDefaults ( marked . defaults ) ;
2019-11-07 12:49:10 -06:00
return marked ;
} ;
2019-12-05 23:08:43 +00:00
marked . getDefaults = getDefaults ;
2019-11-07 12:49:10 -06:00
2021-08-16 03:09:18 +00:00
marked . defaults = defaults ;
2019-11-07 12:49:10 -06:00
2020-04-20 18:30:22 +00:00
/ * *
* Use Extension
* /
2021-06-15 23:23:19 +00:00
marked . use = function ( ... args ) {
2021-08-16 03:09:18 +00:00
const opts = merge ( { } , ... args ) ;
2021-06-15 23:23:19 +00:00
const extensions = marked . defaults . extensions || { renderers : { } , childTokens : { } } ;
2021-06-15 19:22:00 -04:00
let hasExtensions ;
2021-06-15 23:23:19 +00:00
args . forEach ( ( pack ) => {
2021-06-15 19:22:00 -04:00
// ==-- Parse "addon" extensions --== //
if ( pack . extensions ) {
hasExtensions = true ;
pack . extensions . forEach ( ( ext ) => {
2021-06-15 23:23:19 +00:00
if ( ! ext . name ) {
throw new Error ( 'extension name required' ) ;
}
if ( ext . renderer ) { // Renderer extensions
2021-06-16 13:49:59 +00:00
const prevRenderer = extensions . renderers ? extensions . renderers [ ext . name ] : null ;
2021-06-15 23:23:19 +00:00
if ( prevRenderer ) {
// Replace extension with func to run new extension but fall back if false
extensions . renderers [ ext . name ] = function ( ... args ) {
let ret = ext . renderer . apply ( this , args ) ;
if ( ret === false ) {
ret = prevRenderer . apply ( this , args ) ;
}
return ret ;
} ;
} else {
extensions . renderers [ ext . name ] = ext . renderer ;
}
2021-06-15 19:22:00 -04:00
}
2021-06-15 23:23:19 +00:00
if ( ext . tokenizer ) { // Tokenizer Extensions
if ( ! ext . level || ( ext . level !== 'block' && ext . level !== 'inline' ) ) {
throw new Error ( "extension level must be 'block' or 'inline'" ) ;
}
2021-06-15 19:22:00 -04:00
if ( extensions [ ext . level ] ) {
2021-06-15 23:23:19 +00:00
extensions [ ext . level ] . unshift ( ext . tokenizer ) ;
2021-06-15 19:22:00 -04:00
} else {
extensions [ ext . level ] = [ ext . tokenizer ] ;
}
if ( ext . start ) { // Function to check for start of token
if ( ext . level === 'block' ) {
if ( extensions . startBlock ) {
extensions . startBlock . push ( ext . start ) ;
} else {
extensions . startBlock = [ ext . start ] ;
}
} else if ( ext . level === 'inline' ) {
if ( extensions . startInline ) {
extensions . startInline . push ( ext . start ) ;
} else {
extensions . startInline = [ ext . start ] ;
}
}
}
}
2021-06-15 23:23:19 +00:00
if ( ext . childTokens ) { // Child tokens to be visited by walkTokens
extensions . childTokens [ ext . name ] = ext . childTokens ;
}
2021-06-15 19:22:00 -04:00
} ) ;
2020-04-20 18:30:22 +00:00
}
2021-06-15 19:22:00 -04:00
// ==-- Parse "overwrite" extensions --== //
if ( pack . renderer ) {
2021-08-16 03:09:18 +00:00
const renderer = marked . defaults . renderer || new Renderer ( ) ;
2021-06-15 19:22:00 -04:00
for ( const prop in pack . renderer ) {
const prevRenderer = renderer [ prop ] ;
2021-06-15 23:23:19 +00:00
// Replace renderer with func to run extension, but fall back if false
2021-06-15 19:22:00 -04:00
renderer [ prop ] = ( ... args ) => {
let ret = pack . renderer [ prop ] . apply ( renderer , args ) ;
if ( ret === false ) {
ret = prevRenderer . apply ( renderer , args ) ;
}
return ret ;
} ;
2020-05-14 15:54:39 +00:00
}
2021-06-15 19:22:00 -04:00
opts . renderer = renderer ;
}
if ( pack . tokenizer ) {
2021-08-16 03:09:18 +00:00
const tokenizer = marked . defaults . tokenizer || new Tokenizer ( ) ;
2021-06-15 19:22:00 -04:00
for ( const prop in pack . tokenizer ) {
const prevTokenizer = tokenizer [ prop ] ;
2021-06-15 23:23:19 +00:00
// Replace tokenizer with func to run extension, but fall back if false
2021-06-15 19:22:00 -04:00
tokenizer [ prop ] = ( ... args ) => {
let ret = pack . tokenizer [ prop ] . apply ( tokenizer , args ) ;
if ( ret === false ) {
ret = prevTokenizer . apply ( tokenizer , args ) ;
}
return ret ;
} ;
}
opts . tokenizer = tokenizer ;
}
// ==-- Parse WalkTokens extensions --== //
if ( pack . walkTokens ) {
const walkTokens = marked . defaults . walkTokens ;
opts . walkTokens = ( token ) => {
2021-06-15 23:23:19 +00:00
pack . walkTokens . call ( this , token ) ;
2021-06-15 19:22:00 -04:00
if ( walkTokens ) {
walkTokens ( token ) ;
}
} ;
}
if ( hasExtensions ) {
opts . extensions = extensions ;
}
marked . setOptions ( opts ) ;
} ) ;
2020-04-20 18:30:22 +00:00
} ;
2020-05-14 15:54:39 +00:00
/ * *
* Run callback for every token
* /
marked . walkTokens = function ( tokens , callback ) {
for ( const token of tokens ) {
callback ( token ) ;
switch ( token . type ) {
case 'table' : {
2021-08-16 03:09:18 +00:00
for ( const cell of token . header ) {
marked . walkTokens ( cell . tokens , callback ) ;
2020-05-14 15:54:39 +00:00
}
2021-08-16 03:09:18 +00:00
for ( const row of token . rows ) {
2020-05-14 15:54:39 +00:00
for ( const cell of row ) {
2021-08-16 03:09:18 +00:00
marked . walkTokens ( cell . tokens , callback ) ;
2020-05-14 15:54:39 +00:00
}
}
break ;
}
case 'list' : {
marked . walkTokens ( token . items , callback ) ;
break ;
}
default : {
2021-06-16 13:49:59 +00:00
if ( marked . defaults . extensions && marked . defaults . extensions . childTokens && marked . defaults . extensions . childTokens [ token . type ] ) { // Walk any extensions
marked . defaults . extensions . childTokens [ token . type ] . forEach ( function ( childTokens ) {
2021-06-15 23:23:19 +00:00
marked . walkTokens ( token [ childTokens ] , callback ) ;
2021-06-15 19:22:00 -04:00
} ) ;
2021-06-16 13:49:59 +00:00
} else if ( token . tokens ) {
2020-05-14 15:54:39 +00:00
marked . walkTokens ( token . tokens , callback ) ;
}
}
}
}
} ;
2020-09-26 19:05:47 +00:00
/ * *
* Parse Inline
* /
marked . parseInline = function ( src , opt ) {
// throw error in case of non string input
if ( typeof src === 'undefined' || src === null ) {
throw new Error ( 'marked.parseInline(): input parameter is undefined or null' ) ;
}
if ( typeof src !== 'string' ) {
throw new Error ( 'marked.parseInline(): input parameter is of type '
+ Object . prototype . toString . call ( src ) + ', string expected' ) ;
}
2021-08-16 03:09:18 +00:00
opt = merge ( { } , marked . defaults , opt || { } ) ;
checkSanitizeDeprecation ( opt ) ;
2020-09-26 19:05:47 +00:00
try {
2021-08-16 03:09:18 +00:00
const tokens = Lexer . lexInline ( src , opt ) ;
2020-09-26 19:05:47 +00:00
if ( opt . walkTokens ) {
marked . walkTokens ( tokens , opt . walkTokens ) ;
}
2021-08-16 03:09:18 +00:00
return Parser . parseInline ( tokens , opt ) ;
2020-09-26 19:05:47 +00:00
} catch ( e ) {
e . message += '\nPlease report this to https://github.com/markedjs/marked.' ;
if ( opt . silent ) {
return '<p>An error occurred:</p><pre>'
2021-08-16 03:09:18 +00:00
+ escape ( e . message + '' , true )
2020-09-26 19:05:47 +00:00
+ '</pre>' ;
}
throw e ;
}
} ;
2019-11-07 12:49:10 -06:00
/ * *
* Expose
* /
2021-08-16 03:09:18 +00:00
marked . Parser = Parser ;
marked . parser = Parser . parse ;
marked . Renderer = Renderer ;
marked . TextRenderer = TextRenderer ;
marked . Lexer = Lexer ;
marked . lexer = Lexer . lex ;
marked . Tokenizer = Tokenizer ;
marked . Slugger = Slugger ;
2019-11-07 12:49:10 -06:00
marked . parse = marked ;
2021-10-06 13:32:43 -07:00
marked$1 . exports = marked ;
var parse = marked$1 . exports . parse = marked ;
var Parser _1 = marked$1 . exports . Parser = Parser ;
var parser = marked$1 . exports . parser = Parser . parse ;
var Renderer _1 = marked$1 . exports . Renderer = Renderer ;
var TextRenderer _1 = marked$1 . exports . TextRenderer = TextRenderer ;
var Lexer _1 = marked$1 . exports . Lexer = Lexer ;
var lexer = marked$1 . exports . lexer = Lexer . lex ;
var Tokenizer _1 = marked$1 . exports . Tokenizer = Tokenizer ;
var Slugger _1 = marked$1 . exports . Slugger = Slugger ;
var marked _1 = marked$1 . exports ;
2019-11-07 12:49:10 -06:00
2021-10-06 13:32:43 -07:00
export { Lexer _1 as Lexer , Parser _1 as Parser , Renderer _1 as Renderer , Slugger _1 as Slugger , TextRenderer _1 as TextRenderer , Tokenizer _1 as Tokenizer , marked _1 as default , lexer , parse , parser } ;