fix: Migrate to Typescript (#2805)
BREAKING CHANGE: Migrate to Typescript
This commit is contained in:
parent
929d235ed2
commit
cb549065f1
@ -1,5 +1,8 @@
|
||||
{
|
||||
"extends": "standard",
|
||||
"extends": ["standard", "plugin:@typescript-eslint/recommended"],
|
||||
"parser": "@typescript-eslint/parser",
|
||||
"plugins": ["@typescript-eslint"],
|
||||
"root": true,
|
||||
"rules": {
|
||||
"semi": ["error", "always"],
|
||||
"indent": ["error", 2, {
|
||||
@ -15,9 +18,14 @@
|
||||
"no-control-regex": "off",
|
||||
"no-prototype-builtins": "off",
|
||||
"no-extra-semi": "error",
|
||||
|
||||
"prefer-const": "error",
|
||||
"no-var": "error"
|
||||
"no-var": "error",
|
||||
"@typescript-eslint/ban-ts-comment": "off",
|
||||
"@typescript-eslint/no-explicit-any": "off",
|
||||
"@typescript-eslint/no-non-null-assertion": "off",
|
||||
"@typescript-eslint/no-empty-function": "off",
|
||||
"@typescript-eslint/no-namespace": "off",
|
||||
"@typescript-eslint/no-unused-vars": ["error", { "args": "none" }]
|
||||
},
|
||||
"env": {
|
||||
"node": true
|
||||
|
@ -47,7 +47,6 @@ npm install -g marked
|
||||
|
||||
```sh
|
||||
npm install marked
|
||||
npm install @types/marked # For TypeScript projects
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
0
bin/marked.js
Normal file → Executable file
0
bin/marked.js
Normal file → Executable file
2810
lib/marked.cjs
2810
lib/marked.cjs
File diff suppressed because one or more lines are too long
1
lib/marked.cjs.map
Normal file
1
lib/marked.cjs.map
Normal file
File diff suppressed because one or more lines are too long
624
lib/marked.d.ts
vendored
Normal file
624
lib/marked.d.ts
vendored
Normal file
@ -0,0 +1,624 @@
|
||||
type Token = (Tokens.Space | Tokens.Code | Tokens.Heading | Tokens.Table | Tokens.Hr | Tokens.Blockquote | Tokens.List | Tokens.ListItem | Tokens.Paragraph | Tokens.HTML | Tokens.Text | Tokens.Def | Tokens.Escape | Tokens.Tag | Tokens.Image | Tokens.Link | Tokens.Strong | Tokens.Em | Tokens.Codespan | Tokens.Br | Tokens.Del) & {
|
||||
loose?: boolean;
|
||||
tokens?: Token[];
|
||||
};
|
||||
declare namespace Tokens {
|
||||
interface Space {
|
||||
type: 'space';
|
||||
raw: string;
|
||||
}
|
||||
interface Code {
|
||||
type: 'code';
|
||||
raw: string;
|
||||
codeBlockStyle?: 'indented' | undefined;
|
||||
lang?: string | undefined;
|
||||
text: string;
|
||||
escaped?: boolean;
|
||||
}
|
||||
interface Heading {
|
||||
type: 'heading';
|
||||
raw: string;
|
||||
depth: number;
|
||||
text: string;
|
||||
tokens: Token[];
|
||||
}
|
||||
interface Table {
|
||||
type: 'table';
|
||||
raw?: string;
|
||||
align: Array<'center' | 'left' | 'right' | null>;
|
||||
header: TableCell[];
|
||||
rows: TableCell[][];
|
||||
}
|
||||
interface TableCell {
|
||||
text: string;
|
||||
tokens?: Token[];
|
||||
}
|
||||
interface Hr {
|
||||
type: 'hr';
|
||||
raw: string;
|
||||
}
|
||||
interface Blockquote {
|
||||
type: 'blockquote';
|
||||
raw: string;
|
||||
text: string;
|
||||
tokens: Token[];
|
||||
}
|
||||
interface List {
|
||||
type: 'list';
|
||||
raw: string;
|
||||
ordered: boolean;
|
||||
start: number | '';
|
||||
loose: boolean;
|
||||
items: ListItem[];
|
||||
}
|
||||
interface ListItem {
|
||||
type: 'list_item';
|
||||
raw: string;
|
||||
task: boolean;
|
||||
checked?: boolean | undefined;
|
||||
loose: boolean;
|
||||
text: string;
|
||||
tokens?: Token[];
|
||||
}
|
||||
interface Paragraph {
|
||||
type: 'paragraph';
|
||||
raw: string;
|
||||
pre?: boolean | undefined;
|
||||
text: string;
|
||||
tokens: Token[];
|
||||
}
|
||||
interface HTML {
|
||||
type: 'html';
|
||||
raw: string;
|
||||
pre: boolean;
|
||||
text: string;
|
||||
block: boolean;
|
||||
}
|
||||
interface Text {
|
||||
type: 'text';
|
||||
raw: string;
|
||||
text: string;
|
||||
tokens?: Token[];
|
||||
}
|
||||
interface Def {
|
||||
type: 'def';
|
||||
raw: string;
|
||||
tag: string;
|
||||
href: string;
|
||||
title: string;
|
||||
}
|
||||
interface Escape {
|
||||
type: 'escape';
|
||||
raw: string;
|
||||
text: string;
|
||||
}
|
||||
interface Tag {
|
||||
type: 'text' | 'html';
|
||||
raw: string;
|
||||
inLink: boolean;
|
||||
inRawBlock: boolean;
|
||||
text: string;
|
||||
block: boolean;
|
||||
}
|
||||
interface Link {
|
||||
type: 'link';
|
||||
raw: string;
|
||||
href: string;
|
||||
title?: string | null;
|
||||
text: string;
|
||||
tokens: Token[];
|
||||
}
|
||||
interface Image {
|
||||
type: 'image';
|
||||
raw: string;
|
||||
href: string;
|
||||
title: string | null;
|
||||
text: string;
|
||||
}
|
||||
interface Strong {
|
||||
type: 'strong';
|
||||
raw: string;
|
||||
text: string;
|
||||
tokens: Token[];
|
||||
}
|
||||
interface Em {
|
||||
type: 'em';
|
||||
raw: string;
|
||||
text: string;
|
||||
tokens: Token[];
|
||||
}
|
||||
interface Codespan {
|
||||
type: 'codespan';
|
||||
raw: string;
|
||||
text: string;
|
||||
}
|
||||
interface Br {
|
||||
type: 'br';
|
||||
raw: string;
|
||||
}
|
||||
interface Del {
|
||||
type: 'del';
|
||||
raw: string;
|
||||
text: string;
|
||||
tokens: Token[];
|
||||
}
|
||||
interface Generic {
|
||||
[index: string]: any;
|
||||
type: string;
|
||||
raw: string;
|
||||
tokens?: Token[] | undefined;
|
||||
}
|
||||
}
|
||||
type Links = Record<string, Pick<Tokens.Link | Tokens.Image, 'href' | 'title'>>;
|
||||
type TokensList = Token[] & {
|
||||
links: Links;
|
||||
};
|
||||
|
||||
/**
|
||||
* Renderer
|
||||
*/
|
||||
declare class _Renderer {
|
||||
options: MarkedOptions;
|
||||
constructor(options?: MarkedOptions);
|
||||
code(code: string, infostring: string | undefined, escaped: boolean): string;
|
||||
blockquote(quote: string): string;
|
||||
html(html: string, block?: boolean): string;
|
||||
heading(text: string, level: number, raw: string, slugger: _Slugger): string;
|
||||
hr(): string;
|
||||
list(body: string, ordered: boolean, start: number | ''): string;
|
||||
listitem(text: string, task: boolean, checked: boolean): string;
|
||||
checkbox(checked: boolean): string;
|
||||
paragraph(text: string): string;
|
||||
table(header: string, body: string): string;
|
||||
tablerow(content: string): string;
|
||||
tablecell(content: string, flags: {
|
||||
header: boolean;
|
||||
align: 'center' | 'left' | 'right' | null;
|
||||
}): string;
|
||||
/**
|
||||
* span level renderer
|
||||
*/
|
||||
strong(text: string): string;
|
||||
em(text: string): string;
|
||||
codespan(text: string): string;
|
||||
br(): string;
|
||||
del(text: string): string;
|
||||
link(href: string, title: string | null | undefined, text: string): string;
|
||||
image(href: string, title: string | null, text: string): string;
|
||||
text(text: string): string;
|
||||
}
|
||||
|
||||
/**
|
||||
* TextRenderer
|
||||
* returns only the textual part of the token
|
||||
*/
|
||||
declare class _TextRenderer {
|
||||
strong(text: string): string;
|
||||
em(text: string): string;
|
||||
codespan(text: string): string;
|
||||
del(text: string): string;
|
||||
html(text: string): string;
|
||||
text(text: string): string;
|
||||
link(href: string, title: string | null | undefined, text: string): string;
|
||||
image(href: string, title: string | null, text: string): string;
|
||||
br(): string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Slugger generates header id
|
||||
*/
|
||||
declare class _Slugger {
|
||||
seen: {
|
||||
[slugValue: string]: number;
|
||||
};
|
||||
constructor();
|
||||
serialize(value: string): string;
|
||||
/**
|
||||
* Finds the next safe (unique) slug to use
|
||||
*/
|
||||
getNextSafeSlug(originalSlug: string, isDryRun: boolean | undefined): string;
|
||||
/**
|
||||
* Convert string to unique id
|
||||
*/
|
||||
slug(value: string, options?: SluggerOptions): string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parsing & Compiling
|
||||
*/
|
||||
declare class _Parser {
|
||||
options: MarkedOptions;
|
||||
renderer: _Renderer;
|
||||
textRenderer: _TextRenderer;
|
||||
slugger: _Slugger;
|
||||
constructor(options?: MarkedOptions);
|
||||
/**
|
||||
* Static Parse Method
|
||||
*/
|
||||
static parse(tokens: Token[], options?: MarkedOptions): string;
|
||||
/**
|
||||
* Static Parse Inline Method
|
||||
*/
|
||||
static parseInline(tokens: Token[], options?: MarkedOptions): string;
|
||||
/**
|
||||
* Parse Loop
|
||||
*/
|
||||
parse(tokens: Token[], top?: boolean): string;
|
||||
/**
|
||||
* Parse Inline Tokens
|
||||
*/
|
||||
parseInline(tokens: Token[], renderer?: _Renderer | _TextRenderer): string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Tokenizer
|
||||
*/
|
||||
declare class _Tokenizer {
|
||||
options: MarkedOptions;
|
||||
rules: any;
|
||||
lexer: _Lexer;
|
||||
constructor(options?: MarkedOptions);
|
||||
space(src: string): Tokens.Space | undefined;
|
||||
code(src: string): Tokens.Code | undefined;
|
||||
fences(src: string): Tokens.Code | undefined;
|
||||
heading(src: string): Tokens.Heading | undefined;
|
||||
hr(src: string): Tokens.Hr | undefined;
|
||||
blockquote(src: string): Tokens.Blockquote | undefined;
|
||||
list(src: string): Tokens.List | undefined;
|
||||
html(src: string): Tokens.HTML | Tokens.Paragraph | undefined;
|
||||
def(src: string): Tokens.Def | undefined;
|
||||
table(src: string): Tokens.Table | undefined;
|
||||
lheading(src: string): Tokens.Heading | undefined;
|
||||
paragraph(src: string): Tokens.Paragraph | undefined;
|
||||
text(src: string): Tokens.Text | undefined;
|
||||
escape(src: string): Tokens.Escape | undefined;
|
||||
tag(src: string): Tokens.Tag | undefined;
|
||||
link(src: string): Tokens.Link | Tokens.Image | undefined;
|
||||
reflink(src: string, links: Links): Tokens.Link | Tokens.Image | Tokens.Text | undefined;
|
||||
emStrong(src: string, maskedSrc: string, prevChar?: string): Tokens.Em | Tokens.Strong | undefined;
|
||||
codespan(src: string): Tokens.Codespan | undefined;
|
||||
br(src: string): Tokens.Br | undefined;
|
||||
del(src: string): Tokens.Del | undefined;
|
||||
autolink(src: string, mangle: (cap: string) => string): Tokens.Link | undefined;
|
||||
url(src: string, mangle: (cap: string) => string): Tokens.Link | undefined;
|
||||
inlineText(src: string, smartypants: (cap: string) => string): Tokens.Text | undefined;
|
||||
}
|
||||
|
||||
interface SluggerOptions {
|
||||
/** Generates the next unique slug without updating the internal accumulator. */
|
||||
dryrun?: boolean;
|
||||
}
|
||||
interface TokenizerThis {
|
||||
lexer: _Lexer;
|
||||
}
|
||||
interface TokenizerExtension {
|
||||
name: string;
|
||||
level: 'block' | 'inline';
|
||||
start?: ((this: TokenizerThis, src: string) => number | void) | undefined;
|
||||
tokenizer: (this: TokenizerThis, src: string, tokens: Token[] | TokensList) => Tokens.Generic | void;
|
||||
childTokens?: string[] | undefined;
|
||||
}
|
||||
interface RendererThis {
|
||||
parser: _Parser;
|
||||
}
|
||||
interface RendererExtension {
|
||||
name: string;
|
||||
renderer: (this: RendererThis, token: Tokens.Generic) => string | false | undefined;
|
||||
}
|
||||
type TokenizerAndRendererExtension = TokenizerExtension | RendererExtension | (TokenizerExtension & RendererExtension);
|
||||
type RendererApi = Omit<_Renderer, 'constructor' | 'options'>;
|
||||
type RendererObject = {
|
||||
[K in keyof RendererApi]?: (...args: Parameters<RendererApi[K]>) => ReturnType<RendererApi[K]> | false;
|
||||
};
|
||||
type TokenizerApi = Omit<_Tokenizer, 'constructor' | 'options' | 'rules' | 'lexer'>;
|
||||
type TokenizerObject = {
|
||||
[K in keyof TokenizerApi]?: (...args: Parameters<TokenizerApi[K]>) => ReturnType<TokenizerApi[K]> | false;
|
||||
};
|
||||
interface MarkedExtension {
|
||||
/**
|
||||
* True will tell marked to await any walkTokens functions before parsing the tokens and returning an HTML string.
|
||||
*/
|
||||
async?: boolean;
|
||||
/**
|
||||
* A prefix URL for any relative link.
|
||||
* @deprecated Deprecated in v5.0.0 use marked-base-url to prefix url for any relative link.
|
||||
*/
|
||||
baseUrl?: string | undefined | null;
|
||||
/**
|
||||
* Enable GFM line breaks. This option requires the gfm option to be true.
|
||||
*/
|
||||
breaks?: boolean | undefined;
|
||||
/**
|
||||
* Add tokenizers and renderers to marked
|
||||
*/
|
||||
extensions?: TokenizerAndRendererExtension[] | undefined | null;
|
||||
/**
|
||||
* Enable GitHub flavored markdown.
|
||||
*/
|
||||
gfm?: boolean | undefined;
|
||||
/**
|
||||
* Include an id attribute when emitting headings.
|
||||
* @deprecated Deprecated in v5.0.0 use marked-gfm-heading-id to include an id attribute when emitting headings (h1, h2, h3, etc).
|
||||
*/
|
||||
headerIds?: boolean | undefined;
|
||||
/**
|
||||
* Set the prefix for header tag ids.
|
||||
* @deprecated Deprecated in v5.0.0 use marked-gfm-heading-id to add a string to prefix the id attribute when emitting headings (h1, h2, h3, etc).
|
||||
*/
|
||||
headerPrefix?: string | undefined;
|
||||
/**
|
||||
* A function to highlight code blocks. The function can either be
|
||||
* synchronous (returning a string) or asynchronous (callback invoked
|
||||
* with an error if any occurred during highlighting and a string
|
||||
* if highlighting was successful)
|
||||
* @deprecated Deprecated in v5.0.0 use marked-highlight to add highlighting to code blocks.
|
||||
*/
|
||||
highlight?: ((code: string, lang: string | undefined, callback?: (error: Error, code?: string) => void) => string | void) | null;
|
||||
/**
|
||||
* Hooks are methods that hook into some part of marked.
|
||||
* preprocess is called to process markdown before sending it to marked.
|
||||
* postprocess is called to process html after marked has finished parsing.
|
||||
*/
|
||||
hooks?: {
|
||||
preprocess: (markdown: string) => string;
|
||||
postprocess: (html: string | undefined) => string | undefined;
|
||||
options?: MarkedOptions;
|
||||
} | null;
|
||||
/**
|
||||
* Set the prefix for code block classes.
|
||||
* @deprecated Deprecated in v5.0.0 use marked-highlight to prefix the className in a <code> block. Useful for syntax highlighting.
|
||||
*/
|
||||
langPrefix?: string | undefined;
|
||||
/**
|
||||
* Mangle autolinks (<email@domain.com>).
|
||||
* @deprecated Deprecated in v5.0.0 use marked-mangle to mangle email addresses.
|
||||
*/
|
||||
mangle?: boolean | undefined;
|
||||
/**
|
||||
* Conform to obscure parts of markdown.pl as much as possible. Don't fix any of the original markdown bugs or poor behavior.
|
||||
*/
|
||||
pedantic?: boolean | undefined;
|
||||
/**
|
||||
* Type: object Default: new Renderer()
|
||||
*
|
||||
* An object containing functions to render tokens to HTML.
|
||||
*/
|
||||
renderer?: RendererObject | undefined | null;
|
||||
/**
|
||||
* Sanitize the output. Ignore any HTML that has been input. If true, sanitize the HTML passed into markdownString with the sanitizer function.
|
||||
* @deprecated Warning: This feature is deprecated and it should NOT be used as it cannot be considered secure. Instead use a sanitize library, like DOMPurify (recommended), sanitize-html or insane on the output HTML!
|
||||
*/
|
||||
sanitize?: boolean | undefined;
|
||||
/**
|
||||
* Optionally sanitize found HTML with a sanitizer function.
|
||||
* @deprecated A function to sanitize the HTML passed into markdownString.
|
||||
*/
|
||||
sanitizer?: ((html: string) => string) | null;
|
||||
/**
|
||||
* Shows an HTML error message when rendering fails.
|
||||
*/
|
||||
silent?: boolean | undefined;
|
||||
/**
|
||||
* Use smarter list behavior than the original markdown. May eventually be default with the old behavior moved into pedantic.
|
||||
*/
|
||||
smartLists?: boolean | undefined;
|
||||
/**
|
||||
* Use "smart" typograhic punctuation for things like quotes and dashes.
|
||||
* @deprecated Deprecated in v5.0.0 use marked-smartypants to use "smart" typographic punctuation for things like quotes and dashes.
|
||||
*/
|
||||
smartypants?: boolean | undefined;
|
||||
/**
|
||||
* The tokenizer defines how to turn markdown text into tokens.
|
||||
*/
|
||||
tokenizer?: TokenizerObject | undefined | null;
|
||||
/**
|
||||
* The walkTokens function gets called with every token.
|
||||
* Child tokens are called before moving on to sibling tokens.
|
||||
* Each token is passed by reference so updates are persisted when passed to the parser.
|
||||
* The return value of the function is ignored.
|
||||
*/
|
||||
walkTokens?: ((token: Token) => void | Promise<void>) | undefined | null;
|
||||
/**
|
||||
* Generate closing slash for self-closing tags (<br/> instead of <br>)
|
||||
* @deprecated Deprecated in v5.0.0 use marked-xhtml to emit self-closing HTML tags for void elements (<br/>, <img/>, etc.) with a "/" as required by XHTML.
|
||||
*/
|
||||
xhtml?: boolean | undefined;
|
||||
}
|
||||
interface MarkedOptions extends Omit<MarkedExtension, 'extensions' | 'renderer' | 'tokenizer' | 'walkTokens'> {
|
||||
/**
|
||||
* Type: object Default: new Renderer()
|
||||
*
|
||||
* An object containing functions to render tokens to HTML.
|
||||
*/
|
||||
renderer?: Omit<_Renderer, 'constructor'> | undefined | null;
|
||||
/**
|
||||
* The tokenizer defines how to turn markdown text into tokens.
|
||||
*/
|
||||
tokenizer?: Omit<_Tokenizer, 'constructor'> | undefined | null;
|
||||
/**
|
||||
* The walkTokens function gets called with every token.
|
||||
* Child tokens are called before moving on to sibling tokens.
|
||||
* Each token is passed by reference so updates are persisted when passed to the parser.
|
||||
* The return value of the function is ignored.
|
||||
*/
|
||||
walkTokens?: ((token: Token) => void | Promise<void> | Array<void | Promise<void>>) | undefined | null;
|
||||
/**
|
||||
* Add tokenizers and renderers to marked
|
||||
*/
|
||||
extensions?: (TokenizerAndRendererExtension[] & {
|
||||
renderers: Record<string, (this: RendererThis, token: Tokens.Generic) => string | false | undefined>;
|
||||
childTokens: Record<string, string[]>;
|
||||
block: any[];
|
||||
inline: any[];
|
||||
startBlock: Array<(this: TokenizerThis, src: string) => number | void>;
|
||||
startInline: Array<(this: TokenizerThis, src: string) => number | void>;
|
||||
}) | undefined | null;
|
||||
}
|
||||
|
||||
type Rule = RegExp | string;
|
||||
interface Rules {
|
||||
[ruleName: string]: Pick<RegExp, 'exec'> | Rule | Rules;
|
||||
}
|
||||
type BlockRuleNames = 'newline' | 'code' | 'fences' | 'hr' | 'heading' | 'blockquote' | 'list' | 'html' | 'def' | 'lheading' | '_paragraph' | 'text' | '_label' | '_title' | 'bullet' | 'listItemStart' | '_tag' | '_comment' | 'paragraph' | 'uote';
|
||||
type BlockSubRuleNames = 'normal' | 'gfm' | 'pedantic';
|
||||
type InlineRuleNames = 'escape' | 'autolink' | 'tag' | 'link' | 'reflink' | 'nolink' | 'reflinkSearch' | 'code' | 'br' | 'text' | '_punctuation' | 'punctuation' | 'blockSkip' | 'escapedEmSt' | '_comment' | '_escapes' | '_scheme' | '_email' | '_attribute' | '_label' | '_href' | '_title' | 'strong' | '_extended_email' | '_backpedal';
|
||||
type InlineSubRuleNames = 'gfm' | 'emStrong' | 'normal' | 'pedantic' | 'breaks';
|
||||
/**
|
||||
* Block-Level Grammar
|
||||
*/
|
||||
declare const block: Record<BlockRuleNames, Rule> & Record<BlockSubRuleNames, Rules> & Rules;
|
||||
/**
|
||||
* Inline-Level Grammar
|
||||
*/
|
||||
declare const inline: Record<InlineRuleNames, Rule> & Record<InlineSubRuleNames, Rules> & Rules;
|
||||
|
||||
/**
|
||||
* Block Lexer
|
||||
*/
|
||||
declare class _Lexer {
|
||||
tokens: TokensList;
|
||||
options: MarkedOptions;
|
||||
state: {
|
||||
inLink: boolean;
|
||||
inRawBlock: boolean;
|
||||
top: boolean;
|
||||
};
|
||||
private tokenizer;
|
||||
private inlineQueue;
|
||||
constructor(options?: MarkedOptions);
|
||||
/**
|
||||
* Expose Rules
|
||||
*/
|
||||
static get rules(): Rules;
|
||||
/**
|
||||
* Static Lex Method
|
||||
*/
|
||||
static lex(src: string, options?: MarkedOptions): TokensList;
|
||||
/**
|
||||
* Static Lex Inline Method
|
||||
*/
|
||||
static lexInline(src: string, options?: MarkedOptions): Token[];
|
||||
/**
|
||||
* Preprocessing
|
||||
*/
|
||||
lex(src: string): TokensList;
|
||||
/**
|
||||
* Lexing
|
||||
*/
|
||||
blockTokens(src: string, tokens?: Token[]): Token[];
|
||||
blockTokens(src: string, tokens?: TokensList): TokensList;
|
||||
inline(src: string, tokens?: Token[]): Token[];
|
||||
/**
|
||||
* Lexing/Compiling
|
||||
*/
|
||||
inlineTokens(src: string, tokens?: Token[]): Token[];
|
||||
}
|
||||
|
||||
declare class _Hooks {
|
||||
options: MarkedOptions;
|
||||
constructor(options?: MarkedOptions);
|
||||
static passThroughHooks: Set<string>;
|
||||
/**
|
||||
* Process markdown before marked
|
||||
*/
|
||||
preprocess(markdown: string): string;
|
||||
/**
|
||||
* Process HTML after marked is finished
|
||||
*/
|
||||
postprocess(html: string | undefined): string | undefined;
|
||||
}
|
||||
|
||||
type ResultCallback$1 = (error: Error | null, parseResult?: string) => undefined | void;
|
||||
declare class Marked {
|
||||
#private;
|
||||
defaults: MarkedOptions;
|
||||
options: (opt: any) => this;
|
||||
parse: (src: string, optOrCallback?: MarkedOptions | ResultCallback$1 | undefined | null, callback?: ResultCallback$1 | undefined) => string | Promise<string | undefined> | undefined;
|
||||
parseInline: (src: string, optOrCallback?: MarkedOptions | ResultCallback$1 | undefined | null, callback?: ResultCallback$1 | undefined) => string | Promise<string | undefined> | undefined;
|
||||
Parser: typeof _Parser;
|
||||
parser: typeof _Parser.parse;
|
||||
Renderer: typeof _Renderer;
|
||||
TextRenderer: typeof _TextRenderer;
|
||||
Lexer: typeof _Lexer;
|
||||
lexer: typeof _Lexer.lex;
|
||||
Tokenizer: typeof _Tokenizer;
|
||||
Slugger: typeof _Slugger;
|
||||
Hooks: typeof _Hooks;
|
||||
constructor(...args: MarkedExtension[]);
|
||||
/**
|
||||
* Run callback for every token
|
||||
*/
|
||||
walkTokens<T = void>(tokens: Token[] | TokensList, callback: (token: Token) => T | T[]): T[];
|
||||
use(...args: MarkedExtension[]): this;
|
||||
setOptions(opt: any): this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the original marked default options.
|
||||
*/
|
||||
declare function _getDefaults(): MarkedOptions;
|
||||
declare let _defaults: MarkedOptions;
|
||||
|
||||
type ResultCallback = (error: Error | null, parseResult?: string) => undefined | void;
|
||||
/**
|
||||
* Compiles markdown to HTML asynchronously.
|
||||
*
|
||||
* @param src String of markdown source to be compiled
|
||||
* @param options Hash of options, having async: true
|
||||
* @return Promise of string of compiled HTML
|
||||
*/
|
||||
declare function marked(src: string, options: MarkedOptions & {
|
||||
async: true;
|
||||
}): Promise<string>;
|
||||
/**
|
||||
* Compiles markdown to HTML synchronously.
|
||||
*
|
||||
* @param src String of markdown source to be compiled
|
||||
* @param options Optional hash of options
|
||||
* @return String of compiled HTML
|
||||
*/
|
||||
declare function marked(src: string, options?: MarkedOptions): string;
|
||||
/**
|
||||
* Compiles markdown to HTML asynchronously with a callback.
|
||||
*
|
||||
* @param src String of markdown source to be compiled
|
||||
* @param callback Function called when the markdownString has been fully parsed when using async highlighting
|
||||
*/
|
||||
declare function marked(src: string, callback: ResultCallback): void;
|
||||
/**
|
||||
* Compiles markdown to HTML asynchronously with a callback.
|
||||
*
|
||||
* @param src String of markdown source to be compiled
|
||||
* @param options Hash of options
|
||||
* @param callback Function called when the markdownString has been fully parsed when using async highlighting
|
||||
*/
|
||||
declare function marked(src: string, options: MarkedOptions, callback: ResultCallback): void;
|
||||
declare namespace marked {
|
||||
var options: (options: MarkedOptions) => typeof marked;
|
||||
var setOptions: (options: MarkedOptions) => typeof marked;
|
||||
var getDefaults: typeof _getDefaults;
|
||||
var defaults: MarkedOptions;
|
||||
var use: (...args: MarkedExtension[]) => typeof marked;
|
||||
var walkTokens: <T = void>(tokens: TokensList | Token[], callback: (token: Token) => T | T[]) => T[];
|
||||
var parseInline: (src: string, optOrCallback?: MarkedOptions | ResultCallback$1 | null | undefined, callback?: ResultCallback$1 | undefined) => string | Promise<string | undefined> | undefined;
|
||||
var Parser: typeof _Parser;
|
||||
var parser: typeof _Parser.parse;
|
||||
var Renderer: typeof _Renderer;
|
||||
var TextRenderer: typeof _TextRenderer;
|
||||
var Lexer: typeof _Lexer;
|
||||
var lexer: typeof _Lexer.lex;
|
||||
var Tokenizer: typeof _Tokenizer;
|
||||
var Slugger: typeof _Slugger;
|
||||
var Hooks: typeof _Hooks;
|
||||
var parse: typeof marked;
|
||||
}
|
||||
declare const options: (options: MarkedOptions) => typeof marked;
|
||||
declare const setOptions: (options: MarkedOptions) => typeof marked;
|
||||
declare const use: (...args: MarkedExtension[]) => typeof marked;
|
||||
declare const walkTokens: <T = void>(tokens: Token[] | TokensList, callback: (token: Token) => T | T[]) => T[];
|
||||
declare const parseInline: (src: string, optOrCallback?: MarkedOptions | ResultCallback$1 | null | undefined, callback?: ResultCallback$1 | undefined) => string | Promise<string | undefined> | undefined;
|
||||
declare const parse: typeof marked;
|
||||
declare const parser: typeof _Parser.parse;
|
||||
declare const lexer: typeof _Lexer.lex;
|
||||
|
||||
export { _Hooks as Hooks, _Lexer as Lexer, Links, Marked, MarkedExtension, MarkedOptions, _Parser as Parser, _Renderer as Renderer, RendererExtension, RendererThis, ResultCallback, Rule, Rules, _Slugger as Slugger, SluggerOptions, _TextRenderer as TextRenderer, Token, _Tokenizer as Tokenizer, TokenizerAndRendererExtension, TokenizerExtension, TokenizerThis, Tokens, TokensList, block, _defaults as defaults, _getDefaults as getDefaults, inline, lexer, marked, options, parse, parseInline, parser, setOptions, use, walkTokens };
|
2089
lib/marked.esm.js
2089
lib/marked.esm.js
File diff suppressed because it is too large
Load Diff
1
lib/marked.esm.js.map
Normal file
1
lib/marked.esm.js.map
Normal file
File diff suppressed because one or more lines are too long
2796
lib/marked.umd.js
2796
lib/marked.umd.js
File diff suppressed because one or more lines are too long
1
lib/marked.umd.js.map
Normal file
1
lib/marked.umd.js.map
Normal file
File diff suppressed because one or more lines are too long
11
marked.min.js
vendored
11
marked.min.js
vendored
File diff suppressed because one or more lines are too long
5363
package-lock.json
generated
5363
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
22
package.json
22
package.json
@ -7,6 +7,7 @@
|
||||
"main": "./lib/marked.cjs",
|
||||
"module": "./lib/marked.esm.js",
|
||||
"browser": "./lib/marked.umd.js",
|
||||
"types": "./lib/marked.d.ts",
|
||||
"bin": {
|
||||
"marked": "bin/marked.js"
|
||||
},
|
||||
@ -20,6 +21,7 @@
|
||||
],
|
||||
"exports": {
|
||||
".": {
|
||||
"types": "./lib/marked.d.ts",
|
||||
"import": "./lib/marked.esm.js",
|
||||
"default": "./lib/marked.cjs"
|
||||
},
|
||||
@ -43,17 +45,17 @@
|
||||
"html"
|
||||
],
|
||||
"devDependencies": {
|
||||
"@babel/core": "^7.22.9",
|
||||
"@babel/preset-env": "^7.22.9",
|
||||
"@markedjs/html-differ": "^4.0.2",
|
||||
"@rollup/plugin-babel": "^6.0.3",
|
||||
"@semantic-release/commit-analyzer": "^10.0.1",
|
||||
"@semantic-release/git": "^10.0.1",
|
||||
"@semantic-release/github": "^9.0.4",
|
||||
"@semantic-release/npm": "^10.0.4",
|
||||
"@semantic-release/release-notes-generator": "^11.0.4",
|
||||
"@typescript-eslint/eslint-plugin": "^5.59.9",
|
||||
"@typescript-eslint/parser": "^5.59.9",
|
||||
"cheerio": "^1.0.0-rc.12",
|
||||
"commonmark": "0.30.0",
|
||||
"cross-env": "^7.0.3",
|
||||
"eslint": "^8.45.0",
|
||||
"eslint-config-standard": "^17.1.0",
|
||||
"eslint-plugin-import": "^2.27.5",
|
||||
@ -65,14 +67,17 @@
|
||||
"markdown-it": "13.0.1",
|
||||
"node-fetch": "^3.3.1",
|
||||
"recheck": "^4.4.5",
|
||||
"rollup": "^3.26.3",
|
||||
"semantic-release": "^21.0.7",
|
||||
"titleize": "^3.0.0",
|
||||
"ts-expect": "^1.3.0",
|
||||
"ts-node": "^10.9.1",
|
||||
"tsup": "^6.7.0",
|
||||
"typescript": "5.0.4",
|
||||
"uglify-js": "^3.17.4",
|
||||
"vuln-regex-detector": "^1.3.0"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "jasmine --config=jasmine.json",
|
||||
"test": "cross-env NODE_OPTIONS=--loader=ts-node/esm jasmine --config=jasmine.json",
|
||||
"test:all": "npm test && npm run test:lint",
|
||||
"test:unit": "npm test -- test/unit/**/*-spec.js",
|
||||
"test:specs": "npm test -- test/specs/**/*-spec.js",
|
||||
@ -80,12 +85,13 @@
|
||||
"test:redos": "node test/recheck.js > vuln.js",
|
||||
"test:update": "node test/update-specs.js",
|
||||
"rules": "node test/rules.js",
|
||||
"bench": "npm run rollup && node test/bench.js",
|
||||
"bench": "npm run build && node test/bench.js",
|
||||
"lint": "eslint --fix .",
|
||||
"type-check": "tsc",
|
||||
"build:reset": "git checkout upstream/master lib/marked.cjs lib/marked.umd.js lib/marked.esm.js marked.min.js",
|
||||
"build": "npm run rollup && npm run minify",
|
||||
"build": "npm run type-check && tsup && npm run build:verify && npm run minify",
|
||||
"build:docs": "node build-docs.js",
|
||||
"rollup": "rollup -c rollup.config.js",
|
||||
"build:verify": "tsc --project tsconfig-type-test.json",
|
||||
"minify": "uglifyjs lib/marked.umd.js -cm --comments /Copyright/ -o marked.min.js"
|
||||
},
|
||||
"engines": {
|
||||
|
@ -1,52 +0,0 @@
|
||||
import babel from '@rollup/plugin-babel';
|
||||
import { defineConfig } from 'rollup';
|
||||
import fs from 'fs';
|
||||
|
||||
const pkg = JSON.parse(fs.readFileSync('./package.json'));
|
||||
const version = process.env.SEMANTIC_RELEASE_NEXT_VERSION || pkg.version;
|
||||
|
||||
console.log('building version:', version);
|
||||
|
||||
const banner = `/**
|
||||
* marked v${version} - a markdown parser
|
||||
* Copyright (c) 2011-${new Date().getFullYear()}, Christopher Jeffrey. (MIT Licensed)
|
||||
* https://github.com/markedjs/marked
|
||||
*/
|
||||
|
||||
/**
|
||||
* DO NOT EDIT THIS FILE
|
||||
* The code in this file is generated from files in ./src/
|
||||
*/
|
||||
`;
|
||||
|
||||
export default defineConfig([
|
||||
{
|
||||
input: 'src/marked.js',
|
||||
output: {
|
||||
file: 'lib/marked.esm.js',
|
||||
format: 'esm',
|
||||
banner
|
||||
}
|
||||
},
|
||||
{
|
||||
input: 'src/marked.js',
|
||||
output: [{
|
||||
file: 'lib/marked.umd.js',
|
||||
format: 'umd',
|
||||
name: 'marked',
|
||||
banner
|
||||
},
|
||||
{
|
||||
file: 'lib/marked.cjs',
|
||||
format: 'cjs',
|
||||
name: 'marked',
|
||||
banner
|
||||
}],
|
||||
plugins: [
|
||||
babel({
|
||||
presets: [['@babel/preset-env', { loose: true }]],
|
||||
babelHelpers: 'bundled'
|
||||
})
|
||||
]
|
||||
}
|
||||
]);
|
26
src/Hooks.js
26
src/Hooks.js
@ -1,26 +0,0 @@
|
||||
import { defaults } from './defaults.js';
|
||||
|
||||
export class Hooks {
|
||||
constructor(options) {
|
||||
this.options = options || defaults;
|
||||
}
|
||||
|
||||
static passThroughHooks = new Set([
|
||||
'preprocess',
|
||||
'postprocess'
|
||||
]);
|
||||
|
||||
/**
|
||||
* Process markdown before marked
|
||||
*/
|
||||
preprocess(markdown) {
|
||||
return markdown;
|
||||
}
|
||||
|
||||
/**
|
||||
* Process HTML after marked is finished
|
||||
*/
|
||||
postprocess(html) {
|
||||
return html;
|
||||
}
|
||||
}
|
29
src/Hooks.ts
Normal file
29
src/Hooks.ts
Normal file
@ -0,0 +1,29 @@
|
||||
import { _defaults } from './defaults.ts';
|
||||
import type { MarkedOptions } from './MarkedOptions.ts';
|
||||
|
||||
export class _Hooks {
|
||||
options: MarkedOptions;
|
||||
|
||||
constructor(options?: MarkedOptions) {
|
||||
this.options = options || _defaults;
|
||||
}
|
||||
|
||||
static passThroughHooks = new Set([
|
||||
'preprocess',
|
||||
'postprocess'
|
||||
]);
|
||||
|
||||
/**
|
||||
* Process markdown before marked
|
||||
*/
|
||||
preprocess(markdown: string) {
|
||||
return markdown;
|
||||
}
|
||||
|
||||
/**
|
||||
* Process HTML after marked is finished
|
||||
*/
|
||||
postprocess(html: string | undefined) {
|
||||
return html;
|
||||
}
|
||||
}
|
@ -1,49 +1,56 @@
|
||||
import { getDefaults } from './defaults.js';
|
||||
import { Lexer } from './Lexer.js';
|
||||
import { Parser } from './Parser.js';
|
||||
import { Hooks } from './Hooks.js';
|
||||
import { Renderer } from './Renderer.js';
|
||||
import { Tokenizer } from './Tokenizer.js';
|
||||
import { TextRenderer } from './TextRenderer.js';
|
||||
import { Slugger } from './Slugger.js';
|
||||
import { _getDefaults } from './defaults.js';
|
||||
import { _Lexer } from './Lexer.ts';
|
||||
import { _Parser } from './Parser.ts';
|
||||
import { _Hooks } from './Hooks.ts';
|
||||
import { _Renderer } from './Renderer.ts';
|
||||
import { _Tokenizer } from './Tokenizer.ts';
|
||||
import { _TextRenderer } from './TextRenderer.ts';
|
||||
import { _Slugger } from './Slugger.ts';
|
||||
import {
|
||||
checkDeprecations,
|
||||
escape
|
||||
} from './helpers.js';
|
||||
} from './helpers.ts';
|
||||
import type { MarkedExtension, MarkedOptions } from './MarkedOptions.ts';
|
||||
import type { Token, TokensList } from './Tokens.ts';
|
||||
|
||||
export type ResultCallback = (error: Error | null, parseResult?: string) => undefined | void;
|
||||
|
||||
export class Marked {
|
||||
defaults = getDefaults();
|
||||
defaults = _getDefaults();
|
||||
options = this.setOptions;
|
||||
|
||||
parse = this.#parseMarkdown(Lexer.lex, Parser.parse);
|
||||
parseInline = this.#parseMarkdown(Lexer.lexInline, Parser.parseInline);
|
||||
parse = this.#parseMarkdown(_Lexer.lex, _Parser.parse);
|
||||
parseInline = this.#parseMarkdown(_Lexer.lexInline, _Parser.parseInline);
|
||||
|
||||
Parser = Parser;
|
||||
parser = Parser.parse;
|
||||
Renderer = Renderer;
|
||||
TextRenderer = TextRenderer;
|
||||
Lexer = Lexer;
|
||||
lexer = Lexer.lex;
|
||||
Tokenizer = Tokenizer;
|
||||
Slugger = Slugger;
|
||||
Hooks = Hooks;
|
||||
Parser = _Parser;
|
||||
parser = _Parser.parse;
|
||||
Renderer = _Renderer;
|
||||
TextRenderer = _TextRenderer;
|
||||
Lexer = _Lexer;
|
||||
lexer = _Lexer.lex;
|
||||
Tokenizer = _Tokenizer;
|
||||
Slugger = _Slugger;
|
||||
Hooks = _Hooks;
|
||||
|
||||
constructor(...args) {
|
||||
constructor(...args: MarkedExtension[]) {
|
||||
this.use(...args);
|
||||
}
|
||||
|
||||
walkTokens(tokens, callback) {
|
||||
let values = [];
|
||||
/**
|
||||
* Run callback for every token
|
||||
*/
|
||||
walkTokens <T = void>(tokens: Token[] | TokensList, callback: (token: Token) => T | T[]) {
|
||||
let values: T[] = [];
|
||||
for (const token of tokens) {
|
||||
values = values.concat(callback.call(this, token));
|
||||
switch (token.type) {
|
||||
case 'table': {
|
||||
for (const cell of token.header) {
|
||||
values = values.concat(this.walkTokens(cell.tokens, callback));
|
||||
values = values.concat(this.walkTokens(cell.tokens!, callback));
|
||||
}
|
||||
for (const row of token.rows) {
|
||||
for (const cell of row) {
|
||||
values = values.concat(this.walkTokens(cell.tokens, callback));
|
||||
values = values.concat(this.walkTokens(cell.tokens!, callback));
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -66,12 +73,12 @@ export class Marked {
|
||||
return values;
|
||||
}
|
||||
|
||||
use(...args) {
|
||||
const extensions = this.defaults.extensions || { renderers: {}, childTokens: {} };
|
||||
use(...args: MarkedExtension[]) {
|
||||
const extensions: NonNullable<MarkedOptions['extensions']> = this.defaults.extensions || { renderers: {}, childTokens: {} } as NonNullable<MarkedOptions['extensions']>;
|
||||
|
||||
args.forEach((pack) => {
|
||||
// copy options to new object
|
||||
const opts = { ...pack };
|
||||
const opts = { ...pack } as MarkedOptions;
|
||||
|
||||
// set async to true if it was set to true before
|
||||
opts.async = this.defaults.async || opts.async || false;
|
||||
@ -82,7 +89,7 @@ export class Marked {
|
||||
if (!ext.name) {
|
||||
throw new Error('extension name required');
|
||||
}
|
||||
if (ext.renderer) { // Renderer extensions
|
||||
if ('renderer' in ext) { // Renderer extensions
|
||||
const prevRenderer = extensions.renderers[ext.name];
|
||||
if (prevRenderer) {
|
||||
// Replace extension with func to run new extension but fall back if false
|
||||
@ -97,7 +104,7 @@ export class Marked {
|
||||
extensions.renderers[ext.name] = ext.renderer;
|
||||
}
|
||||
}
|
||||
if (ext.tokenizer) { // Tokenizer Extensions
|
||||
if ('tokenizer' in ext) { // Tokenizer Extensions
|
||||
if (!ext.level || (ext.level !== 'block' && ext.level !== 'inline')) {
|
||||
throw new Error("extension level must be 'block' or 'inline'");
|
||||
}
|
||||
@ -109,20 +116,20 @@ export class Marked {
|
||||
if (ext.start) { // Function to check for start of token
|
||||
if (ext.level === 'block') {
|
||||
if (extensions.startBlock) {
|
||||
extensions.startBlock.push(ext.start);
|
||||
extensions.startBlock.push(ext.start!);
|
||||
} else {
|
||||
extensions.startBlock = [ext.start];
|
||||
extensions.startBlock = [ext.start!];
|
||||
}
|
||||
} else if (ext.level === 'inline') {
|
||||
if (extensions.startInline) {
|
||||
extensions.startInline.push(ext.start);
|
||||
extensions.startInline.push(ext.start!);
|
||||
} else {
|
||||
extensions.startInline = [ext.start];
|
||||
extensions.startInline = [ext.start!];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (ext.childTokens) { // Child tokens to be visited by walkTokens
|
||||
if ('childTokens' in ext && ext.childTokens) { // Child tokens to be visited by walkTokens
|
||||
extensions.childTokens[ext.name] = ext.childTokens;
|
||||
}
|
||||
});
|
||||
@ -131,12 +138,12 @@ export class Marked {
|
||||
|
||||
// ==-- Parse "overwrite" extensions --== //
|
||||
if (pack.renderer) {
|
||||
const renderer = this.defaults.renderer || new Renderer(this.defaults);
|
||||
const renderer = this.defaults.renderer || new _Renderer(this.defaults);
|
||||
for (const prop in pack.renderer) {
|
||||
const prevRenderer = renderer[prop];
|
||||
// Replace renderer with func to run extension, but fall back if false
|
||||
renderer[prop] = (...args) => {
|
||||
let ret = pack.renderer[prop].apply(renderer, args);
|
||||
renderer[prop] = (...args: unknown[]) => {
|
||||
let ret = pack.renderer![prop].apply(renderer, args);
|
||||
if (ret === false) {
|
||||
ret = prevRenderer.apply(renderer, args);
|
||||
}
|
||||
@ -146,12 +153,12 @@ export class Marked {
|
||||
opts.renderer = renderer;
|
||||
}
|
||||
if (pack.tokenizer) {
|
||||
const tokenizer = this.defaults.tokenizer || new Tokenizer(this.defaults);
|
||||
const tokenizer = this.defaults.tokenizer || new _Tokenizer(this.defaults);
|
||||
for (const prop in pack.tokenizer) {
|
||||
const prevTokenizer = tokenizer[prop];
|
||||
// Replace tokenizer with func to run extension, but fall back if false
|
||||
tokenizer[prop] = (...args) => {
|
||||
let ret = pack.tokenizer[prop].apply(tokenizer, args);
|
||||
tokenizer[prop] = (...args: unknown[]) => {
|
||||
let ret = pack.tokenizer![prop].apply(tokenizer, args);
|
||||
if (ret === false) {
|
||||
ret = prevTokenizer.apply(tokenizer, args);
|
||||
}
|
||||
@ -163,23 +170,23 @@ export class Marked {
|
||||
|
||||
// ==-- Parse Hooks extensions --== //
|
||||
if (pack.hooks) {
|
||||
const hooks = this.defaults.hooks || new Hooks();
|
||||
const hooks = this.defaults.hooks || new _Hooks();
|
||||
for (const prop in pack.hooks) {
|
||||
const prevHook = hooks[prop];
|
||||
if (Hooks.passThroughHooks.has(prop)) {
|
||||
hooks[prop] = (arg) => {
|
||||
if (_Hooks.passThroughHooks.has(prop)) {
|
||||
hooks[prop as 'preprocess' | 'postprocess'] = (arg: string | undefined) => {
|
||||
if (this.defaults.async) {
|
||||
return Promise.resolve(pack.hooks[prop].call(hooks, arg)).then(ret => {
|
||||
return Promise.resolve(pack.hooks![prop].call(hooks, arg)).then(ret => {
|
||||
return prevHook.call(hooks, ret);
|
||||
});
|
||||
}
|
||||
|
||||
const ret = pack.hooks[prop].call(hooks, arg);
|
||||
const ret = pack.hooks![prop].call(hooks, arg);
|
||||
return prevHook.call(hooks, ret);
|
||||
};
|
||||
} else {
|
||||
hooks[prop] = (...args) => {
|
||||
let ret = pack.hooks[prop].apply(hooks, args);
|
||||
let ret = pack.hooks![prop].apply(hooks, args);
|
||||
if (ret === false) {
|
||||
ret = prevHook.apply(hooks, args);
|
||||
}
|
||||
@ -194,8 +201,8 @@ export class Marked {
|
||||
if (pack.walkTokens) {
|
||||
const walkTokens = this.defaults.walkTokens;
|
||||
opts.walkTokens = function(token) {
|
||||
let values = [];
|
||||
values.push(pack.walkTokens.call(this, token));
|
||||
let values: Array<Promise<void> | void> = [];
|
||||
values.push(pack.walkTokens!.call(this, token));
|
||||
if (walkTokens) {
|
||||
values = values.concat(walkTokens.call(this, token));
|
||||
}
|
||||
@ -214,16 +221,16 @@ export class Marked {
|
||||
return this;
|
||||
}
|
||||
|
||||
#parseMarkdown(lexer, parser) {
|
||||
return (src, opt, callback) => {
|
||||
if (typeof opt === 'function') {
|
||||
callback = opt;
|
||||
opt = null;
|
||||
#parseMarkdown(lexer: (src: string, options?: MarkedOptions) => TokensList | Token[], parser: (tokens: Token[], options?: MarkedOptions) => string | undefined) {
|
||||
return (src: string, optOrCallback?: MarkedOptions | ResultCallback | undefined | null, callback?: ResultCallback | undefined): string | Promise<string | undefined> | undefined => {
|
||||
if (typeof optOrCallback === 'function') {
|
||||
callback = optOrCallback;
|
||||
optOrCallback = null;
|
||||
}
|
||||
|
||||
const origOpt = { ...opt };
|
||||
opt = { ...this.defaults, ...origOpt };
|
||||
const throwError = this.#onError(opt.silent, opt.async, callback);
|
||||
const origOpt = { ...optOrCallback };
|
||||
const opt = { ...this.defaults, ...origOpt };
|
||||
const throwError = this.#onError(!!opt.silent, !!opt.async, callback);
|
||||
|
||||
// throw error in case of non string input
|
||||
if (typeof src === 'undefined' || src === null) {
|
||||
@ -242,7 +249,7 @@ export class Marked {
|
||||
|
||||
if (callback) {
|
||||
const highlight = opt.highlight;
|
||||
let tokens;
|
||||
let tokens: TokensList | Token[];
|
||||
|
||||
try {
|
||||
if (opt.hooks) {
|
||||
@ -250,10 +257,10 @@ export class Marked {
|
||||
}
|
||||
tokens = lexer(src, opt);
|
||||
} catch (e) {
|
||||
return throwError(e);
|
||||
return throwError(e as Error);
|
||||
}
|
||||
|
||||
const done = (err) => {
|
||||
const done = (err?: Error) => {
|
||||
let out;
|
||||
|
||||
if (!err) {
|
||||
@ -261,12 +268,12 @@ export class Marked {
|
||||
if (opt.walkTokens) {
|
||||
this.walkTokens(tokens, opt.walkTokens);
|
||||
}
|
||||
out = parser(tokens, opt);
|
||||
out = parser(tokens, opt)!;
|
||||
if (opt.hooks) {
|
||||
out = opt.hooks.postprocess(out);
|
||||
}
|
||||
} catch (e) {
|
||||
err = e;
|
||||
err = e as Error;
|
||||
}
|
||||
}
|
||||
|
||||
@ -274,7 +281,7 @@ export class Marked {
|
||||
|
||||
return err
|
||||
? throwError(err)
|
||||
: callback(null, out);
|
||||
: callback!(null, out) as undefined;
|
||||
};
|
||||
|
||||
if (!highlight || highlight.length < 3) {
|
||||
@ -338,13 +345,13 @@ export class Marked {
|
||||
}
|
||||
return html;
|
||||
} catch (e) {
|
||||
return throwError(e);
|
||||
return throwError(e as Error);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#onError(silent, async, callback) {
|
||||
return (e) => {
|
||||
#onError(silent: boolean, async: boolean, callback?: ResultCallback) {
|
||||
return (e: Error): string | Promise<string> | undefined => {
|
||||
e.message += '\nPlease report this to https://github.com/markedjs/marked.';
|
||||
|
||||
if (silent) {
|
@ -1,12 +1,14 @@
|
||||
import { Tokenizer } from './Tokenizer.js';
|
||||
import { defaults } from './defaults.js';
|
||||
import { block, inline } from './rules.js';
|
||||
import { _Tokenizer } from './Tokenizer.ts';
|
||||
import { _defaults } from './defaults.ts';
|
||||
import { block, inline } from './rules.ts';
|
||||
import type { Token, TokensList } from './Tokens.ts';
|
||||
import type { MarkedOptions, TokenizerExtension } from './MarkedOptions.ts';
|
||||
import type { Rules } from './rules.ts';
|
||||
|
||||
/**
|
||||
* smartypants text replacement
|
||||
* @param {string} text
|
||||
*/
|
||||
function smartypants(text) {
|
||||
function smartypants(text: string) {
|
||||
return text
|
||||
// em-dashes
|
||||
.replace(/---/g, '\u2014')
|
||||
@ -26,9 +28,8 @@ function smartypants(text) {
|
||||
|
||||
/**
|
||||
* mangle email addresses
|
||||
* @param {string} text
|
||||
*/
|
||||
function mangle(text) {
|
||||
function mangle(text: string) {
|
||||
let out = '',
|
||||
i,
|
||||
ch;
|
||||
@ -48,12 +49,25 @@ function mangle(text) {
|
||||
/**
|
||||
* Block Lexer
|
||||
*/
|
||||
export class Lexer {
|
||||
constructor(options) {
|
||||
export class _Lexer {
|
||||
tokens: TokensList;
|
||||
options: MarkedOptions;
|
||||
state: {
|
||||
inLink: boolean;
|
||||
inRawBlock: boolean;
|
||||
top: boolean;
|
||||
};
|
||||
|
||||
private tokenizer: _Tokenizer;
|
||||
private inlineQueue: {src: string, tokens: Token[]}[];
|
||||
|
||||
constructor(options?: MarkedOptions) {
|
||||
// TokenList cannot be created in one go
|
||||
// @ts-expect-error
|
||||
this.tokens = [];
|
||||
this.tokens.links = Object.create(null);
|
||||
this.options = options || defaults;
|
||||
this.options.tokenizer = this.options.tokenizer || new Tokenizer();
|
||||
this.options = options || _defaults;
|
||||
this.options.tokenizer = this.options.tokenizer || new _Tokenizer();
|
||||
this.tokenizer = this.options.tokenizer;
|
||||
this.tokenizer.options = this.options;
|
||||
this.tokenizer.lexer = this;
|
||||
@ -86,7 +100,7 @@ export class Lexer {
|
||||
/**
|
||||
* Expose Rules
|
||||
*/
|
||||
static get rules() {
|
||||
static get rules(): Rules {
|
||||
return {
|
||||
block,
|
||||
inline
|
||||
@ -96,23 +110,23 @@ export class Lexer {
|
||||
/**
|
||||
* Static Lex Method
|
||||
*/
|
||||
static lex(src, options) {
|
||||
const lexer = new Lexer(options);
|
||||
static lex(src: string, options?: MarkedOptions) {
|
||||
const lexer = new _Lexer(options);
|
||||
return lexer.lex(src);
|
||||
}
|
||||
|
||||
/**
|
||||
* Static Lex Inline Method
|
||||
*/
|
||||
static lexInline(src, options) {
|
||||
const lexer = new Lexer(options);
|
||||
static lexInline(src: string, options?: MarkedOptions) {
|
||||
const lexer = new _Lexer(options);
|
||||
return lexer.inlineTokens(src);
|
||||
}
|
||||
|
||||
/**
|
||||
* Preprocessing
|
||||
*/
|
||||
lex(src) {
|
||||
lex(src: string) {
|
||||
src = src
|
||||
.replace(/\r\n|\r/g, '\n');
|
||||
|
||||
@ -129,7 +143,9 @@ export class Lexer {
|
||||
/**
|
||||
* Lexing
|
||||
*/
|
||||
blockTokens(src, tokens = []) {
|
||||
blockTokens(src: string, tokens?: Token[]): Token[];
|
||||
blockTokens(src: string, tokens?: TokensList): TokensList;
|
||||
blockTokens(src: string, tokens: Token[] = []) {
|
||||
if (this.options.pedantic) {
|
||||
src = src.replace(/\t/g, ' ').replace(/^ +$/gm, '');
|
||||
} else {
|
||||
@ -143,7 +159,7 @@ export class Lexer {
|
||||
while (src) {
|
||||
if (this.options.extensions
|
||||
&& this.options.extensions.block
|
||||
&& this.options.extensions.block.some((extTokenizer) => {
|
||||
&& this.options.extensions.block.some((extTokenizer: TokenizerExtension['tokenizer']) => {
|
||||
if (token = extTokenizer.call({ lexer: this }, src, tokens)) {
|
||||
src = src.substring(token.raw.length);
|
||||
tokens.push(token);
|
||||
@ -262,7 +278,7 @@ export class Lexer {
|
||||
let startIndex = Infinity;
|
||||
const tempSrc = src.slice(1);
|
||||
let tempStart;
|
||||
this.options.extensions.startBlock.forEach(function(getStartIndex) {
|
||||
this.options.extensions.startBlock.forEach((getStartIndex) => {
|
||||
tempStart = getStartIndex.call({ lexer: this }, tempSrc);
|
||||
if (typeof tempStart === 'number' && tempStart >= 0) { startIndex = Math.min(startIndex, tempStart); }
|
||||
});
|
||||
@ -315,7 +331,7 @@ export class Lexer {
|
||||
return tokens;
|
||||
}
|
||||
|
||||
inline(src, tokens = []) {
|
||||
inline(src: string, tokens: Token[] = []) {
|
||||
this.inlineQueue.push({ src, tokens });
|
||||
return tokens;
|
||||
}
|
||||
@ -323,7 +339,7 @@ export class Lexer {
|
||||
/**
|
||||
* Lexing/Compiling
|
||||
*/
|
||||
inlineTokens(src, tokens = []) {
|
||||
inlineTokens(src: string, tokens: Token[] = []): Token[] {
|
||||
let token, lastToken, cutSrc;
|
||||
|
||||
// String with links masked to avoid interference with em and strong
|
||||
@ -461,7 +477,7 @@ export class Lexer {
|
||||
let startIndex = Infinity;
|
||||
const tempSrc = src.slice(1);
|
||||
let tempStart;
|
||||
this.options.extensions.startInline.forEach(function(getStartIndex) {
|
||||
this.options.extensions.startInline.forEach((getStartIndex) => {
|
||||
tempStart = getStartIndex.call({ lexer: this }, tempSrc);
|
||||
if (typeof tempStart === 'number' && tempStart >= 0) { startIndex = Math.min(startIndex, tempStart); }
|
||||
});
|
212
src/MarkedOptions.ts
Normal file
212
src/MarkedOptions.ts
Normal file
@ -0,0 +1,212 @@
|
||||
import type { Token, Tokens, TokensList } from './Tokens.ts';
|
||||
import { _Parser } from './Parser.ts';
|
||||
import { _Lexer } from './Lexer.ts';
|
||||
import { _Renderer } from './Renderer.ts';
|
||||
import { _Tokenizer } from './Tokenizer.ts';
|
||||
|
||||
export interface SluggerOptions {
|
||||
/** Generates the next unique slug without updating the internal accumulator. */
|
||||
dryrun?: boolean;
|
||||
}
|
||||
|
||||
export interface TokenizerThis {
|
||||
lexer: _Lexer;
|
||||
}
|
||||
|
||||
export interface TokenizerExtension {
|
||||
name: string;
|
||||
level: 'block' | 'inline';
|
||||
start?: ((this: TokenizerThis, src: string) => number | void) | undefined;
|
||||
tokenizer: (this: TokenizerThis, src: string, tokens: Token[] | TokensList) => Tokens.Generic | void;
|
||||
childTokens?: string[] | undefined;
|
||||
}
|
||||
|
||||
export interface RendererThis {
|
||||
parser: _Parser;
|
||||
}
|
||||
|
||||
export interface RendererExtension {
|
||||
name: string;
|
||||
renderer: (this: RendererThis, token: Tokens.Generic) => string | false | undefined;
|
||||
}
|
||||
|
||||
export type TokenizerAndRendererExtension = TokenizerExtension | RendererExtension | (TokenizerExtension & RendererExtension);
|
||||
|
||||
type RendererApi = Omit<_Renderer, 'constructor' | 'options'>;
|
||||
type RendererObject = {
|
||||
[K in keyof RendererApi]?: (...args: Parameters<RendererApi[K]>) => ReturnType<RendererApi[K]> | false
|
||||
};
|
||||
|
||||
type TokenizerApi = Omit<_Tokenizer, 'constructor' | 'options' | 'rules' | 'lexer'>;
|
||||
type TokenizerObject = {
|
||||
[K in keyof TokenizerApi]?: (...args: Parameters<TokenizerApi[K]>) => ReturnType<TokenizerApi[K]> | false
|
||||
};
|
||||
|
||||
export interface MarkedExtension {
|
||||
/**
|
||||
* True will tell marked to await any walkTokens functions before parsing the tokens and returning an HTML string.
|
||||
*/
|
||||
async?: boolean;
|
||||
|
||||
/**
|
||||
* A prefix URL for any relative link.
|
||||
* @deprecated Deprecated in v5.0.0 use marked-base-url to prefix url for any relative link.
|
||||
*/
|
||||
baseUrl?: string | undefined | null;
|
||||
|
||||
/**
|
||||
* Enable GFM line breaks. This option requires the gfm option to be true.
|
||||
*/
|
||||
breaks?: boolean | undefined;
|
||||
|
||||
/**
|
||||
* Add tokenizers and renderers to marked
|
||||
*/
|
||||
extensions?:
|
||||
| TokenizerAndRendererExtension[]
|
||||
| undefined | null;
|
||||
|
||||
/**
|
||||
* Enable GitHub flavored markdown.
|
||||
*/
|
||||
gfm?: boolean | undefined;
|
||||
|
||||
/**
|
||||
* Include an id attribute when emitting headings.
|
||||
* @deprecated Deprecated in v5.0.0 use marked-gfm-heading-id to include an id attribute when emitting headings (h1, h2, h3, etc).
|
||||
*/
|
||||
headerIds?: boolean | undefined;
|
||||
|
||||
/**
|
||||
* Set the prefix for header tag ids.
|
||||
* @deprecated Deprecated in v5.0.0 use marked-gfm-heading-id to add a string to prefix the id attribute when emitting headings (h1, h2, h3, etc).
|
||||
*/
|
||||
headerPrefix?: string | undefined;
|
||||
|
||||
/**
|
||||
* A function to highlight code blocks. The function can either be
|
||||
* synchronous (returning a string) or asynchronous (callback invoked
|
||||
* with an error if any occurred during highlighting and a string
|
||||
* if highlighting was successful)
|
||||
* @deprecated Deprecated in v5.0.0 use marked-highlight to add highlighting to code blocks.
|
||||
*/
|
||||
highlight?: ((code: string, lang: string | undefined, callback?: (error: Error, code?: string) => void) => string | void) | null;
|
||||
|
||||
/**
|
||||
* Hooks are methods that hook into some part of marked.
|
||||
* preprocess is called to process markdown before sending it to marked.
|
||||
* postprocess is called to process html after marked has finished parsing.
|
||||
*/
|
||||
hooks?: {
|
||||
preprocess: (markdown: string) => string,
|
||||
postprocess: (html: string | undefined) => string | undefined,
|
||||
// eslint-disable-next-line no-use-before-define
|
||||
options?: MarkedOptions
|
||||
} | null;
|
||||
|
||||
/**
|
||||
* Set the prefix for code block classes.
|
||||
* @deprecated Deprecated in v5.0.0 use marked-highlight to prefix the className in a <code> block. Useful for syntax highlighting.
|
||||
*/
|
||||
langPrefix?: string | undefined;
|
||||
|
||||
/**
|
||||
* Mangle autolinks (<email@domain.com>).
|
||||
* @deprecated Deprecated in v5.0.0 use marked-mangle to mangle email addresses.
|
||||
*/
|
||||
mangle?: boolean | undefined;
|
||||
|
||||
/**
|
||||
* Conform to obscure parts of markdown.pl as much as possible. Don't fix any of the original markdown bugs or poor behavior.
|
||||
*/
|
||||
pedantic?: boolean | undefined;
|
||||
|
||||
/**
|
||||
* Type: object Default: new Renderer()
|
||||
*
|
||||
* An object containing functions to render tokens to HTML.
|
||||
*/
|
||||
renderer?: RendererObject | undefined | null;
|
||||
|
||||
/**
|
||||
* Sanitize the output. Ignore any HTML that has been input. If true, sanitize the HTML passed into markdownString with the sanitizer function.
|
||||
* @deprecated Warning: This feature is deprecated and it should NOT be used as it cannot be considered secure. Instead use a sanitize library, like DOMPurify (recommended), sanitize-html or insane on the output HTML!
|
||||
*/
|
||||
sanitize?: boolean | undefined;
|
||||
|
||||
/**
|
||||
* Optionally sanitize found HTML with a sanitizer function.
|
||||
* @deprecated A function to sanitize the HTML passed into markdownString.
|
||||
*/
|
||||
sanitizer?: ((html: string) => string) | null;
|
||||
|
||||
/**
|
||||
* Shows an HTML error message when rendering fails.
|
||||
*/
|
||||
silent?: boolean | undefined;
|
||||
|
||||
/**
|
||||
* Use smarter list behavior than the original markdown. May eventually be default with the old behavior moved into pedantic.
|
||||
*/
|
||||
smartLists?: boolean | undefined;
|
||||
|
||||
/**
|
||||
* Use "smart" typograhic punctuation for things like quotes and dashes.
|
||||
* @deprecated Deprecated in v5.0.0 use marked-smartypants to use "smart" typographic punctuation for things like quotes and dashes.
|
||||
*/
|
||||
smartypants?: boolean | undefined;
|
||||
|
||||
/**
|
||||
* The tokenizer defines how to turn markdown text into tokens.
|
||||
*/
|
||||
tokenizer?: TokenizerObject | undefined | null;
|
||||
|
||||
/**
|
||||
* The walkTokens function gets called with every token.
|
||||
* Child tokens are called before moving on to sibling tokens.
|
||||
* Each token is passed by reference so updates are persisted when passed to the parser.
|
||||
* The return value of the function is ignored.
|
||||
*/
|
||||
walkTokens?: ((token: Token) => void | Promise<void>) | undefined | null;
|
||||
/**
|
||||
* Generate closing slash for self-closing tags (<br/> instead of <br>)
|
||||
* @deprecated Deprecated in v5.0.0 use marked-xhtml to emit self-closing HTML tags for void elements (<br/>, <img/>, etc.) with a "/" as required by XHTML.
|
||||
*/
|
||||
xhtml?: boolean | undefined;
|
||||
}
|
||||
|
||||
export interface MarkedOptions extends Omit<MarkedExtension, 'extensions' | 'renderer' | 'tokenizer' | 'walkTokens'> {
|
||||
/**
|
||||
* Type: object Default: new Renderer()
|
||||
*
|
||||
* An object containing functions to render tokens to HTML.
|
||||
*/
|
||||
renderer?: Omit<_Renderer, 'constructor'> | undefined | null;
|
||||
|
||||
/**
|
||||
* The tokenizer defines how to turn markdown text into tokens.
|
||||
*/
|
||||
tokenizer?: Omit<_Tokenizer, 'constructor'> | undefined | null;
|
||||
|
||||
/**
|
||||
* The walkTokens function gets called with every token.
|
||||
* Child tokens are called before moving on to sibling tokens.
|
||||
* Each token is passed by reference so updates are persisted when passed to the parser.
|
||||
* The return value of the function is ignored.
|
||||
*/
|
||||
walkTokens?: ((token: Token) => void | Promise<void> | Array<void | Promise<void>>) | undefined | null;
|
||||
|
||||
/**
|
||||
* Add tokenizers and renderers to marked
|
||||
*/
|
||||
extensions?:
|
||||
| (TokenizerAndRendererExtension[] & {
|
||||
renderers: Record<string, (this: RendererThis, token: Tokens.Generic) => string | false | undefined>,
|
||||
childTokens: Record<string, string[]>,
|
||||
block: any[],
|
||||
inline: any[],
|
||||
startBlock: Array<(this: TokenizerThis, src: string) => number | void>,
|
||||
startInline: Array<(this: TokenizerThis, src: string) => number | void>
|
||||
})
|
||||
| undefined | null;
|
||||
}
|
@ -1,44 +1,50 @@
|
||||
import { Renderer } from './Renderer.js';
|
||||
import { TextRenderer } from './TextRenderer.js';
|
||||
import { Slugger } from './Slugger.js';
|
||||
import { defaults } from './defaults.js';
|
||||
import { _Renderer } from './Renderer.ts';
|
||||
import { _TextRenderer } from './TextRenderer.ts';
|
||||
import { _Slugger } from './Slugger.ts';
|
||||
import { _defaults } from './defaults.ts';
|
||||
import {
|
||||
unescape
|
||||
} from './helpers.js';
|
||||
} from './helpers.ts';
|
||||
import type { Token, Tokens } from './Tokens.ts';
|
||||
import type { MarkedOptions } from './MarkedOptions.ts';
|
||||
|
||||
/**
|
||||
* Parsing & Compiling
|
||||
*/
|
||||
export class Parser {
|
||||
constructor(options) {
|
||||
this.options = options || defaults;
|
||||
this.options.renderer = this.options.renderer || new Renderer();
|
||||
export class _Parser {
|
||||
options: MarkedOptions;
|
||||
renderer: _Renderer;
|
||||
textRenderer: _TextRenderer;
|
||||
slugger: _Slugger;
|
||||
constructor(options?: MarkedOptions) {
|
||||
this.options = options || _defaults;
|
||||
this.options.renderer = this.options.renderer || new _Renderer();
|
||||
this.renderer = this.options.renderer;
|
||||
this.renderer.options = this.options;
|
||||
this.textRenderer = new TextRenderer();
|
||||
this.slugger = new Slugger();
|
||||
this.textRenderer = new _TextRenderer();
|
||||
this.slugger = new _Slugger();
|
||||
}
|
||||
|
||||
/**
|
||||
* Static Parse Method
|
||||
*/
|
||||
static parse(tokens, options) {
|
||||
const parser = new Parser(options);
|
||||
static parse(tokens: Token[], options?: MarkedOptions) {
|
||||
const parser = new _Parser(options);
|
||||
return parser.parse(tokens);
|
||||
}
|
||||
|
||||
/**
|
||||
* Static Parse Inline Method
|
||||
*/
|
||||
static parseInline(tokens, options) {
|
||||
const parser = new Parser(options);
|
||||
static parseInline(tokens: Token[], options?: MarkedOptions) {
|
||||
const parser = new _Parser(options);
|
||||
return parser.parseInline(tokens);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse Loop
|
||||
*/
|
||||
parse(tokens, top = true) {
|
||||
parse(tokens: Token[], top = true): string {
|
||||
let out = '',
|
||||
i,
|
||||
j,
|
||||
@ -83,16 +89,16 @@ export class Parser {
|
||||
}
|
||||
case 'heading': {
|
||||
out += this.renderer.heading(
|
||||
this.parseInline(token.tokens),
|
||||
this.parseInline(token.tokens) as string,
|
||||
token.depth,
|
||||
unescape(this.parseInline(token.tokens, this.textRenderer)),
|
||||
unescape(this.parseInline(token.tokens, this.textRenderer) as string),
|
||||
this.slugger);
|
||||
continue;
|
||||
}
|
||||
case 'code': {
|
||||
out += this.renderer.code(token.text,
|
||||
token.lang,
|
||||
token.escaped);
|
||||
!!token.escaped);
|
||||
continue;
|
||||
}
|
||||
case 'table': {
|
||||
@ -103,7 +109,7 @@ export class Parser {
|
||||
l2 = token.header.length;
|
||||
for (j = 0; j < l2; j++) {
|
||||
cell += this.renderer.tablecell(
|
||||
this.parseInline(token.header[j].tokens),
|
||||
this.parseInline(token.header[j].tokens)!,
|
||||
{ header: true, align: token.align[j] }
|
||||
);
|
||||
}
|
||||
@ -118,7 +124,7 @@ export class Parser {
|
||||
l3 = row.length;
|
||||
for (k = 0; k < l3; k++) {
|
||||
cell += this.renderer.tablecell(
|
||||
this.parseInline(row[k].tokens),
|
||||
this.parseInline(row[k].tokens)!,
|
||||
{ header: false, align: token.align[k] }
|
||||
);
|
||||
}
|
||||
@ -129,7 +135,7 @@ export class Parser {
|
||||
continue;
|
||||
}
|
||||
case 'blockquote': {
|
||||
body = this.parse(token.tokens);
|
||||
body = this.parse(token.tokens)!;
|
||||
out += this.renderer.blockquote(body);
|
||||
continue;
|
||||
}
|
||||
@ -147,7 +153,7 @@ export class Parser {
|
||||
|
||||
itemBody = '';
|
||||
if (item.task) {
|
||||
checkbox = this.renderer.checkbox(checked);
|
||||
checkbox = this.renderer.checkbox(!!checked);
|
||||
if (loose) {
|
||||
if (item.tokens.length > 0 && item.tokens[0].type === 'paragraph') {
|
||||
item.tokens[0].text = checkbox + ' ' + item.tokens[0].text;
|
||||
@ -158,7 +164,7 @@ export class Parser {
|
||||
item.tokens.unshift({
|
||||
type: 'text',
|
||||
text: checkbox
|
||||
});
|
||||
} as Tokens.Text);
|
||||
}
|
||||
} else {
|
||||
itemBody += checkbox;
|
||||
@ -166,7 +172,7 @@ export class Parser {
|
||||
}
|
||||
|
||||
itemBody += this.parse(item.tokens, loose);
|
||||
body += this.renderer.listitem(itemBody, task, checked);
|
||||
body += this.renderer.listitem(itemBody, task, !!checked);
|
||||
}
|
||||
|
||||
out += this.renderer.list(body, ordered, start);
|
||||
@ -177,7 +183,7 @@ export class Parser {
|
||||
continue;
|
||||
}
|
||||
case 'paragraph': {
|
||||
out += this.renderer.paragraph(this.parseInline(token.tokens));
|
||||
out += this.renderer.paragraph(this.parseInline(token.tokens)!);
|
||||
continue;
|
||||
}
|
||||
case 'text': {
|
||||
@ -186,7 +192,7 @@ export class Parser {
|
||||
token = tokens[++i];
|
||||
body += '\n' + (token.tokens ? this.parseInline(token.tokens) : token.text);
|
||||
}
|
||||
out += top ? this.renderer.paragraph(body) : body;
|
||||
out += top ? this.renderer.paragraph(body!) : body;
|
||||
continue;
|
||||
}
|
||||
|
||||
@ -194,7 +200,7 @@ export class Parser {
|
||||
const errMsg = 'Token with "' + token.type + '" type was not found.';
|
||||
if (this.options.silent) {
|
||||
console.error(errMsg);
|
||||
return;
|
||||
return '';
|
||||
} else {
|
||||
throw new Error(errMsg);
|
||||
}
|
||||
@ -208,7 +214,7 @@ export class Parser {
|
||||
/**
|
||||
* Parse Inline Tokens
|
||||
*/
|
||||
parseInline(tokens, renderer) {
|
||||
parseInline(tokens: Token[], renderer?: _Renderer | _TextRenderer): string {
|
||||
renderer = renderer || this.renderer;
|
||||
let out = '',
|
||||
i,
|
||||
@ -238,7 +244,7 @@ export class Parser {
|
||||
break;
|
||||
}
|
||||
case 'link': {
|
||||
out += renderer.link(token.href, token.title, this.parseInline(token.tokens, renderer));
|
||||
out += renderer.link(token.href, token.title, this.parseInline(token.tokens, renderer)!);
|
||||
break;
|
||||
}
|
||||
case 'image': {
|
||||
@ -246,11 +252,11 @@ export class Parser {
|
||||
break;
|
||||
}
|
||||
case 'strong': {
|
||||
out += renderer.strong(this.parseInline(token.tokens, renderer));
|
||||
out += renderer.strong(this.parseInline(token.tokens, renderer)!);
|
||||
break;
|
||||
}
|
||||
case 'em': {
|
||||
out += renderer.em(this.parseInline(token.tokens, renderer));
|
||||
out += renderer.em(this.parseInline(token.tokens, renderer)!);
|
||||
break;
|
||||
}
|
||||
case 'codespan': {
|
||||
@ -262,7 +268,7 @@ export class Parser {
|
||||
break;
|
||||
}
|
||||
case 'del': {
|
||||
out += renderer.del(this.parseInline(token.tokens, renderer));
|
||||
out += renderer.del(this.parseInline(token.tokens, renderer)!);
|
||||
break;
|
||||
}
|
||||
case 'text': {
|
||||
@ -273,7 +279,7 @@ export class Parser {
|
||||
const errMsg = 'Token with "' + token.type + '" type was not found.';
|
||||
if (this.options.silent) {
|
||||
console.error(errMsg);
|
||||
return;
|
||||
return '';
|
||||
} else {
|
||||
throw new Error(errMsg);
|
||||
}
|
@ -1,19 +1,22 @@
|
||||
import { defaults } from './defaults.js';
|
||||
import { _defaults } from './defaults.ts';
|
||||
import {
|
||||
cleanUrl,
|
||||
escape
|
||||
} from './helpers.js';
|
||||
} from './helpers.ts';
|
||||
import type { MarkedOptions } from './MarkedOptions.ts';
|
||||
import { Slugger } from './marked.ts';
|
||||
|
||||
/**
|
||||
* Renderer
|
||||
*/
|
||||
export class Renderer {
|
||||
constructor(options) {
|
||||
this.options = options || defaults;
|
||||
export class _Renderer {
|
||||
options: MarkedOptions;
|
||||
constructor(options?: MarkedOptions) {
|
||||
this.options = options || _defaults;
|
||||
}
|
||||
|
||||
code(code, infostring, escaped) {
|
||||
const lang = (infostring || '').match(/\S*/)[0];
|
||||
code(code: string, infostring: string | undefined, escaped: boolean): string {
|
||||
const lang = (infostring || '').match(/\S*/)![0];
|
||||
if (this.options.highlight) {
|
||||
const out = this.options.highlight(code, lang);
|
||||
if (out != null && out !== code) {
|
||||
@ -38,24 +41,15 @@ export class Renderer {
|
||||
+ '</code></pre>\n';
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} quote
|
||||
*/
|
||||
blockquote(quote) {
|
||||
blockquote(quote: string): string {
|
||||
return `<blockquote>\n${quote}</blockquote>\n`;
|
||||
}
|
||||
|
||||
html(html, block) {
|
||||
html(html: string, block?: boolean) : string {
|
||||
return html;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} text
|
||||
* @param {string} level
|
||||
* @param {string} raw
|
||||
* @param {any} slugger
|
||||
*/
|
||||
heading(text, level, raw, slugger) {
|
||||
heading(text: string, level: number, raw: string, slugger: Slugger): string {
|
||||
if (this.options.headerIds) {
|
||||
const id = this.options.headerPrefix + slugger.slug(raw);
|
||||
return `<h${level} id="${id}">${text}</h${level}>\n`;
|
||||
@ -65,24 +59,21 @@ export class Renderer {
|
||||
return `<h${level}>${text}</h${level}>\n`;
|
||||
}
|
||||
|
||||
hr() {
|
||||
hr(): string {
|
||||
return this.options.xhtml ? '<hr/>\n' : '<hr>\n';
|
||||
}
|
||||
|
||||
list(body, ordered, start) {
|
||||
list(body: string, ordered: boolean, start: number | ''): string {
|
||||
const type = ordered ? 'ol' : 'ul',
|
||||
startatt = (ordered && start !== 1) ? (' start="' + start + '"') : '';
|
||||
return '<' + type + startatt + '>\n' + body + '</' + type + '>\n';
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} text
|
||||
*/
|
||||
listitem(text) {
|
||||
listitem(text: string, task: boolean, checked: boolean): string {
|
||||
return `<li>${text}</li>\n`;
|
||||
}
|
||||
|
||||
checkbox(checked) {
|
||||
checkbox(checked: boolean): string {
|
||||
return '<input '
|
||||
+ (checked ? 'checked="" ' : '')
|
||||
+ 'disabled="" type="checkbox"'
|
||||
@ -90,18 +81,11 @@ export class Renderer {
|
||||
+ '> ';
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} text
|
||||
*/
|
||||
paragraph(text) {
|
||||
paragraph(text: string): string {
|
||||
return `<p>${text}</p>\n`;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} header
|
||||
* @param {string} body
|
||||
*/
|
||||
table(header, body) {
|
||||
table(header: string, body: string): string {
|
||||
if (body) body = `<tbody>${body}</tbody>`;
|
||||
|
||||
return '<table>\n'
|
||||
@ -112,14 +96,14 @@ export class Renderer {
|
||||
+ '</table>\n';
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} content
|
||||
*/
|
||||
tablerow(content) {
|
||||
tablerow(content: string): string {
|
||||
return `<tr>\n${content}</tr>\n`;
|
||||
}
|
||||
|
||||
tablecell(content, flags) {
|
||||
tablecell(content: string, flags: {
|
||||
header: boolean;
|
||||
align: 'center' | 'left' | 'right' | null;
|
||||
}): string {
|
||||
const type = flags.header ? 'th' : 'td';
|
||||
const tag = flags.align
|
||||
? `<${type} align="${flags.align}">`
|
||||
@ -129,44 +113,29 @@ export class Renderer {
|
||||
|
||||
/**
|
||||
* span level renderer
|
||||
* @param {string} text
|
||||
*/
|
||||
strong(text) {
|
||||
strong(text: string): string {
|
||||
return `<strong>${text}</strong>`;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} text
|
||||
*/
|
||||
em(text) {
|
||||
em(text: string): string {
|
||||
return `<em>${text}</em>`;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} text
|
||||
*/
|
||||
codespan(text) {
|
||||
codespan(text: string): string {
|
||||
return `<code>${text}</code>`;
|
||||
}
|
||||
|
||||
br() {
|
||||
br(): string {
|
||||
return this.options.xhtml ? '<br/>' : '<br>';
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} text
|
||||
*/
|
||||
del(text) {
|
||||
del(text: string): string {
|
||||
return `<del>${text}</del>`;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} href
|
||||
* @param {string} title
|
||||
* @param {string} text
|
||||
*/
|
||||
link(href, title, text) {
|
||||
href = cleanUrl(this.options.sanitize, this.options.baseUrl, href);
|
||||
link(href: string, title: string | null | undefined, text: string): string {
|
||||
href = cleanUrl(this.options.sanitize, this.options.baseUrl, href) as any;
|
||||
if (href === null) {
|
||||
return text;
|
||||
}
|
||||
@ -178,13 +147,8 @@ export class Renderer {
|
||||
return out;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} href
|
||||
* @param {string} title
|
||||
* @param {string} text
|
||||
*/
|
||||
image(href, title, text) {
|
||||
href = cleanUrl(this.options.sanitize, this.options.baseUrl, href);
|
||||
image(href: string, title: string | null, text: string): string {
|
||||
href = cleanUrl(this.options.sanitize, this.options.baseUrl, href) as any;
|
||||
if (href === null) {
|
||||
return text;
|
||||
}
|
||||
@ -197,7 +161,7 @@ export class Renderer {
|
||||
return out;
|
||||
}
|
||||
|
||||
text(text) {
|
||||
text(text: string) : string {
|
||||
return text;
|
||||
}
|
||||
}
|
@ -1,15 +1,16 @@
|
||||
import type { SluggerOptions } from './MarkedOptions.ts';
|
||||
|
||||
/**
|
||||
* Slugger generates header id
|
||||
*/
|
||||
export class Slugger {
|
||||
export class _Slugger {
|
||||
seen: { [slugValue: string]: number };
|
||||
|
||||
constructor() {
|
||||
this.seen = {};
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} value
|
||||
*/
|
||||
serialize(value) {
|
||||
serialize(value: string) {
|
||||
return value
|
||||
.toLowerCase()
|
||||
.trim()
|
||||
@ -22,10 +23,8 @@ export class Slugger {
|
||||
|
||||
/**
|
||||
* Finds the next safe (unique) slug to use
|
||||
* @param {string} originalSlug
|
||||
* @param {boolean} isDryRun
|
||||
*/
|
||||
getNextSafeSlug(originalSlug, isDryRun) {
|
||||
getNextSafeSlug(originalSlug: string, isDryRun: boolean | undefined) {
|
||||
let slug = originalSlug;
|
||||
let occurenceAccumulator = 0;
|
||||
if (this.seen.hasOwnProperty(slug)) {
|
||||
@ -44,11 +43,8 @@ export class Slugger {
|
||||
|
||||
/**
|
||||
* Convert string to unique id
|
||||
* @param {object} [options]
|
||||
* @param {boolean} [options.dryrun] Generates the next unique slug without
|
||||
* updating the internal accumulator.
|
||||
*/
|
||||
slug(value, options = {}) {
|
||||
slug(value: string, options: SluggerOptions = {}) {
|
||||
const slug = this.serialize(value);
|
||||
return this.getNextSafeSlug(slug, options.dryrun);
|
||||
}
|
@ -2,37 +2,37 @@
|
||||
* TextRenderer
|
||||
* returns only the textual part of the token
|
||||
*/
|
||||
export class TextRenderer {
|
||||
export class _TextRenderer {
|
||||
// no need for block level renderers
|
||||
strong(text) {
|
||||
strong(text: string) {
|
||||
return text;
|
||||
}
|
||||
|
||||
em(text) {
|
||||
em(text: string) {
|
||||
return text;
|
||||
}
|
||||
|
||||
codespan(text) {
|
||||
codespan(text: string) {
|
||||
return text;
|
||||
}
|
||||
|
||||
del(text) {
|
||||
del(text: string) {
|
||||
return text;
|
||||
}
|
||||
|
||||
html(text) {
|
||||
html(text: string) {
|
||||
return text;
|
||||
}
|
||||
|
||||
text(text) {
|
||||
text(text: string) {
|
||||
return text;
|
||||
}
|
||||
|
||||
link(href, title, text) {
|
||||
link(href: string, title: string | null | undefined, text: string) {
|
||||
return '' + text;
|
||||
}
|
||||
|
||||
image(href, title, text) {
|
||||
image(href: string, title: string | null, text: string) {
|
||||
return '' + text;
|
||||
}
|
||||
|
@ -1,19 +1,22 @@
|
||||
import { defaults } from './defaults.js';
|
||||
import { _defaults } from './defaults.ts';
|
||||
import {
|
||||
rtrim,
|
||||
splitCells,
|
||||
escape,
|
||||
findClosingBracket
|
||||
} from './helpers.js';
|
||||
} from './helpers.ts';
|
||||
import { _Lexer } from './Lexer.ts';
|
||||
import type { Links, Tokens } from './Tokens.ts';
|
||||
import type { MarkedOptions } from './MarkedOptions.ts';
|
||||
|
||||
function outputLink(cap, link, raw, lexer) {
|
||||
function outputLink(cap: string[], link: Pick<Tokens.Link, 'href' | 'title'>, raw: string, lexer: _Lexer): Tokens.Link | Tokens.Image {
|
||||
const href = link.href;
|
||||
const title = link.title ? escape(link.title) : null;
|
||||
const text = cap[1].replace(/\\([\[\]])/g, '$1');
|
||||
|
||||
if (cap[0].charAt(0) !== '!') {
|
||||
lexer.state.inLink = true;
|
||||
const token = {
|
||||
const token: Tokens.Link = {
|
||||
type: 'link',
|
||||
raw,
|
||||
href,
|
||||
@ -33,7 +36,7 @@ function outputLink(cap, link, raw, lexer) {
|
||||
};
|
||||
}
|
||||
|
||||
function indentCodeCompensation(raw, text) {
|
||||
function indentCodeCompensation(raw: string, text: string) {
|
||||
const matchIndentToCode = raw.match(/^(\s+)(?:```)/);
|
||||
|
||||
if (matchIndentToCode === null) {
|
||||
@ -64,12 +67,16 @@ function indentCodeCompensation(raw, text) {
|
||||
/**
|
||||
* Tokenizer
|
||||
*/
|
||||
export class Tokenizer {
|
||||
constructor(options) {
|
||||
this.options = options || defaults;
|
||||
export class _Tokenizer {
|
||||
options: MarkedOptions;
|
||||
rules: any;
|
||||
lexer!: _Lexer;
|
||||
|
||||
constructor(options?: MarkedOptions) {
|
||||
this.options = options || _defaults;
|
||||
}
|
||||
|
||||
space(src) {
|
||||
space(src: string): Tokens.Space | undefined {
|
||||
const cap = this.rules.block.newline.exec(src);
|
||||
if (cap && cap[0].length > 0) {
|
||||
return {
|
||||
@ -79,7 +86,7 @@ export class Tokenizer {
|
||||
}
|
||||
}
|
||||
|
||||
code(src) {
|
||||
code(src: string): Tokens.Code | undefined {
|
||||
const cap = this.rules.block.code.exec(src);
|
||||
if (cap) {
|
||||
const text = cap[0].replace(/^ {1,4}/gm, '');
|
||||
@ -94,7 +101,7 @@ export class Tokenizer {
|
||||
}
|
||||
}
|
||||
|
||||
fences(src) {
|
||||
fences(src: string): Tokens.Code | undefined {
|
||||
const cap = this.rules.block.fences.exec(src);
|
||||
if (cap) {
|
||||
const raw = cap[0];
|
||||
@ -109,7 +116,7 @@ export class Tokenizer {
|
||||
}
|
||||
}
|
||||
|
||||
heading(src) {
|
||||
heading(src: string): Tokens.Heading | undefined {
|
||||
const cap = this.rules.block.heading.exec(src);
|
||||
if (cap) {
|
||||
let text = cap[2].trim();
|
||||
@ -135,7 +142,7 @@ export class Tokenizer {
|
||||
}
|
||||
}
|
||||
|
||||
hr(src) {
|
||||
hr(src: string): Tokens.Hr | undefined {
|
||||
const cap = this.rules.block.hr.exec(src);
|
||||
if (cap) {
|
||||
return {
|
||||
@ -145,7 +152,7 @@ export class Tokenizer {
|
||||
}
|
||||
}
|
||||
|
||||
blockquote(src) {
|
||||
blockquote(src: string): Tokens.Blockquote | undefined {
|
||||
const cap = this.rules.block.blockquote.exec(src);
|
||||
if (cap) {
|
||||
const text = cap[0].replace(/^ *>[ \t]?/gm, '');
|
||||
@ -162,7 +169,7 @@ export class Tokenizer {
|
||||
}
|
||||
}
|
||||
|
||||
list(src) {
|
||||
list(src: string): Tokens.List | undefined {
|
||||
let cap = this.rules.block.list.exec(src);
|
||||
if (cap) {
|
||||
let raw, istask, ischecked, indent, i, blankLine, endsWithBlankLine,
|
||||
@ -171,13 +178,13 @@ export class Tokenizer {
|
||||
let bull = cap[1].trim();
|
||||
const isordered = bull.length > 1;
|
||||
|
||||
const list = {
|
||||
const list: Tokens.List = {
|
||||
type: 'list',
|
||||
raw: '',
|
||||
ordered: isordered,
|
||||
start: isordered ? +bull.slice(0, -1) : '',
|
||||
loose: false,
|
||||
items: []
|
||||
items: [] as Tokens.ListItem[]
|
||||
};
|
||||
|
||||
bull = isordered ? `\\d{1,9}\\${bull.slice(-1)}` : `\\${bull}`;
|
||||
@ -203,7 +210,7 @@ export class Tokenizer {
|
||||
raw = cap[0];
|
||||
src = src.substring(raw.length);
|
||||
|
||||
line = cap[2].split('\n', 1)[0].replace(/^\t+/, (t) => ' '.repeat(3 * t.length));
|
||||
line = cap[2].split('\n', 1)[0].replace(/^\t+/, (t: string) => ' '.repeat(3 * t.length));
|
||||
nextLine = src.split('\n', 1)[0];
|
||||
|
||||
if (this.options.pedantic) {
|
||||
@ -327,7 +334,7 @@ export class Tokenizer {
|
||||
|
||||
// Do not consume newlines at end of final item. Alternatively, make itemRegex *start* with any newlines to simplify/speed up endsWithBlankLine logic
|
||||
list.items[list.items.length - 1].raw = raw.trimRight();
|
||||
list.items[list.items.length - 1].text = itemContents.trimRight();
|
||||
(list.items[list.items.length - 1] as Tokens.ListItem).text = itemContents.trimRight();
|
||||
list.raw = list.raw.trimRight();
|
||||
|
||||
const l = list.items.length;
|
||||
@ -339,8 +346,8 @@ export class Tokenizer {
|
||||
|
||||
if (!list.loose) {
|
||||
// Check if list should be loose
|
||||
const spacers = list.items[i].tokens.filter(t => t.type === 'space');
|
||||
const hasMultipleLineBreaks = spacers.length > 0 && spacers.some(t => /\n.*\n/.test(t.raw));
|
||||
const spacers = list.items[i].tokens!.filter(t => t.type === 'space');
|
||||
const hasMultipleLineBreaks = spacers.length > 0 && spacers.some(t => /\n.*\n/.test(t.raw!));
|
||||
|
||||
list.loose = hasMultipleLineBreaks;
|
||||
}
|
||||
@ -357,10 +364,10 @@ export class Tokenizer {
|
||||
}
|
||||
}
|
||||
|
||||
html(src) {
|
||||
html(src: string): Tokens.HTML | Tokens.Paragraph | undefined {
|
||||
const cap = this.rules.block.html.exec(src);
|
||||
if (cap) {
|
||||
const token = {
|
||||
const token: Tokens.HTML | Tokens.Paragraph = {
|
||||
type: 'html',
|
||||
block: true,
|
||||
raw: cap[0],
|
||||
@ -370,15 +377,16 @@ export class Tokenizer {
|
||||
};
|
||||
if (this.options.sanitize) {
|
||||
const text = this.options.sanitizer ? this.options.sanitizer(cap[0]) : escape(cap[0]);
|
||||
token.type = 'paragraph';
|
||||
token.text = text;
|
||||
token.tokens = this.lexer.inline(text);
|
||||
const paragraph = token as unknown as Tokens.Paragraph;
|
||||
paragraph.type = 'paragraph';
|
||||
paragraph.text = text;
|
||||
paragraph.tokens = this.lexer.inline(text);
|
||||
}
|
||||
return token;
|
||||
}
|
||||
}
|
||||
|
||||
def(src) {
|
||||
def(src: string): Tokens.Def | undefined {
|
||||
const cap = this.rules.block.def.exec(src);
|
||||
if (cap) {
|
||||
const tag = cap[1].toLowerCase().replace(/\s+/g, ' ');
|
||||
@ -394,12 +402,16 @@ export class Tokenizer {
|
||||
}
|
||||
}
|
||||
|
||||
table(src) {
|
||||
table(src: string): Tokens.Table | undefined {
|
||||
const cap = this.rules.block.table.exec(src);
|
||||
if (cap) {
|
||||
const item = {
|
||||
const item: Tokens.Table = {
|
||||
type: 'table',
|
||||
header: splitCells(cap[1]).map(c => { return { text: c }; }),
|
||||
// splitCells expects a number as second argument
|
||||
// @ts-expect-error
|
||||
header: splitCells(cap[1]).map(c => {
|
||||
return { text: c };
|
||||
}),
|
||||
align: cap[2].replace(/^ *|\| *$/g, '').split(/ *\| */),
|
||||
rows: cap[3] && cap[3].trim() ? cap[3].replace(/\n[ \t]*$/, '').split('\n') : []
|
||||
};
|
||||
@ -410,11 +422,11 @@ export class Tokenizer {
|
||||
let l = item.align.length;
|
||||
let i, j, k, row;
|
||||
for (i = 0; i < l; i++) {
|
||||
if (/^ *-+: *$/.test(item.align[i])) {
|
||||
if (/^ *-+: *$/.test(item.align[i]!)) {
|
||||
item.align[i] = 'right';
|
||||
} else if (/^ *:-+: *$/.test(item.align[i])) {
|
||||
} else if (/^ *:-+: *$/.test(item.align[i]!)) {
|
||||
item.align[i] = 'center';
|
||||
} else if (/^ *:-+ *$/.test(item.align[i])) {
|
||||
} else if (/^ *:-+ *$/.test(item.align[i]!)) {
|
||||
item.align[i] = 'left';
|
||||
} else {
|
||||
item.align[i] = null;
|
||||
@ -423,7 +435,9 @@ export class Tokenizer {
|
||||
|
||||
l = item.rows.length;
|
||||
for (i = 0; i < l; i++) {
|
||||
item.rows[i] = splitCells(item.rows[i], item.header.length).map(c => { return { text: c }; });
|
||||
item.rows[i] = splitCells(item.rows[i] as unknown as string, item.header.length).map(c => {
|
||||
return { text: c };
|
||||
});
|
||||
}
|
||||
|
||||
// parse child tokens inside headers and cells
|
||||
@ -448,7 +462,7 @@ export class Tokenizer {
|
||||
}
|
||||
}
|
||||
|
||||
lheading(src) {
|
||||
lheading(src: string): Tokens.Heading | undefined {
|
||||
const cap = this.rules.block.lheading.exec(src);
|
||||
if (cap) {
|
||||
return {
|
||||
@ -461,7 +475,7 @@ export class Tokenizer {
|
||||
}
|
||||
}
|
||||
|
||||
paragraph(src) {
|
||||
paragraph(src: string): Tokens.Paragraph | undefined {
|
||||
const cap = this.rules.block.paragraph.exec(src);
|
||||
if (cap) {
|
||||
const text = cap[1].charAt(cap[1].length - 1) === '\n'
|
||||
@ -476,7 +490,7 @@ export class Tokenizer {
|
||||
}
|
||||
}
|
||||
|
||||
text(src) {
|
||||
text(src: string): Tokens.Text | undefined {
|
||||
const cap = this.rules.block.text.exec(src);
|
||||
if (cap) {
|
||||
return {
|
||||
@ -488,7 +502,7 @@ export class Tokenizer {
|
||||
}
|
||||
}
|
||||
|
||||
escape(src) {
|
||||
escape(src: string): Tokens.Escape | undefined {
|
||||
const cap = this.rules.inline.escape.exec(src);
|
||||
if (cap) {
|
||||
return {
|
||||
@ -499,7 +513,7 @@ export class Tokenizer {
|
||||
}
|
||||
}
|
||||
|
||||
tag(src) {
|
||||
tag(src: string): Tokens.Tag | undefined {
|
||||
const cap = this.rules.inline.tag.exec(src);
|
||||
if (cap) {
|
||||
if (!this.lexer.state.inLink && /^<a /i.test(cap[0])) {
|
||||
@ -530,7 +544,7 @@ export class Tokenizer {
|
||||
}
|
||||
}
|
||||
|
||||
link(src) {
|
||||
link(src: string): Tokens.Link | Tokens.Image | undefined {
|
||||
const cap = this.rules.inline.link.exec(src);
|
||||
if (cap) {
|
||||
const trimmedUrl = cap[2].trim();
|
||||
@ -586,10 +600,10 @@ export class Tokenizer {
|
||||
}
|
||||
}
|
||||
|
||||
reflink(src, links) {
|
||||
reflink(src: string, links: Links): Tokens.Link | Tokens.Image | Tokens.Text | undefined {
|
||||
let cap;
|
||||
if ((cap = this.rules.inline.reflink.exec(src))
|
||||
|| (cap = this.rules.inline.nolink.exec(src))) {
|
||||
|| (cap = this.rules.inline.nolink.exec(src))) {
|
||||
let link = (cap[2] || cap[1]).replace(/\s+/g, ' ');
|
||||
link = links[link.toLowerCase()];
|
||||
if (!link) {
|
||||
@ -604,7 +618,7 @@ export class Tokenizer {
|
||||
}
|
||||
}
|
||||
|
||||
emStrong(src, maskedSrc, prevChar = '') {
|
||||
emStrong(src: string, maskedSrc: string, prevChar = ''): Tokens.Em | Tokens.Strong | undefined {
|
||||
let match = this.rules.inline.emStrong.lDelim.exec(src);
|
||||
if (!match) return;
|
||||
|
||||
@ -672,7 +686,7 @@ export class Tokenizer {
|
||||
}
|
||||
}
|
||||
|
||||
codespan(src) {
|
||||
codespan(src: string): Tokens.Codespan | undefined {
|
||||
const cap = this.rules.inline.code.exec(src);
|
||||
if (cap) {
|
||||
let text = cap[2].replace(/\n/g, ' ');
|
||||
@ -690,7 +704,7 @@ export class Tokenizer {
|
||||
}
|
||||
}
|
||||
|
||||
br(src) {
|
||||
br(src: string): Tokens.Br | undefined {
|
||||
const cap = this.rules.inline.br.exec(src);
|
||||
if (cap) {
|
||||
return {
|
||||
@ -700,7 +714,7 @@ export class Tokenizer {
|
||||
}
|
||||
}
|
||||
|
||||
del(src) {
|
||||
del(src: string): Tokens.Del | undefined {
|
||||
const cap = this.rules.inline.del.exec(src);
|
||||
if (cap) {
|
||||
return {
|
||||
@ -712,7 +726,7 @@ export class Tokenizer {
|
||||
}
|
||||
}
|
||||
|
||||
autolink(src, mangle) {
|
||||
autolink(src: string, mangle: (cap: string) => string): Tokens.Link | undefined {
|
||||
const cap = this.rules.inline.autolink.exec(src);
|
||||
if (cap) {
|
||||
let text, href;
|
||||
@ -740,7 +754,7 @@ export class Tokenizer {
|
||||
}
|
||||
}
|
||||
|
||||
url(src, mangle) {
|
||||
url(src: string, mangle: (cap: string) => string): Tokens.Link | undefined {
|
||||
let cap;
|
||||
if (cap = this.rules.inline.url.exec(src)) {
|
||||
let text, href;
|
||||
@ -777,7 +791,7 @@ export class Tokenizer {
|
||||
}
|
||||
}
|
||||
|
||||
inlineText(src, smartypants) {
|
||||
inlineText(src: string, smartypants: (cap: string) => string): Tokens.Text | undefined {
|
||||
const cap = this.rules.inline.text.exec(src);
|
||||
if (cap) {
|
||||
let text;
|
199
src/Tokens.ts
Normal file
199
src/Tokens.ts
Normal file
@ -0,0 +1,199 @@
|
||||
/* eslint-disable no-use-before-define */
|
||||
export type Token = (Tokens.Space
|
||||
| Tokens.Code
|
||||
| Tokens.Heading
|
||||
| Tokens.Table
|
||||
| Tokens.Hr
|
||||
| Tokens.Blockquote
|
||||
| Tokens.List
|
||||
| Tokens.ListItem
|
||||
| Tokens.Paragraph
|
||||
| Tokens.HTML
|
||||
| Tokens.Text
|
||||
| Tokens.Def
|
||||
| Tokens.Escape
|
||||
| Tokens.Tag
|
||||
| Tokens.Image
|
||||
| Tokens.Link
|
||||
| Tokens.Strong
|
||||
| Tokens.Em
|
||||
| Tokens.Codespan
|
||||
| Tokens.Br
|
||||
| Tokens.Del) & { loose?: boolean, tokens?: Token[] };
|
||||
|
||||
export namespace Tokens {
|
||||
export interface Space {
|
||||
type: 'space';
|
||||
raw: string;
|
||||
}
|
||||
|
||||
export interface Code {
|
||||
type: 'code';
|
||||
raw: string;
|
||||
codeBlockStyle?: 'indented' | undefined;
|
||||
lang?: string | undefined;
|
||||
text: string;
|
||||
escaped?: boolean;
|
||||
}
|
||||
|
||||
export interface Heading {
|
||||
type: 'heading';
|
||||
raw: string;
|
||||
depth: number;
|
||||
text: string;
|
||||
tokens: Token[];
|
||||
}
|
||||
|
||||
export interface Table {
|
||||
type: 'table';
|
||||
raw?: string;
|
||||
align: Array<'center' | 'left' | 'right' | null>;
|
||||
header: TableCell[];
|
||||
rows: TableCell[][];
|
||||
}
|
||||
|
||||
export interface TableCell {
|
||||
text: string;
|
||||
tokens?: Token[];
|
||||
}
|
||||
|
||||
export interface Hr {
|
||||
type: 'hr';
|
||||
raw: string;
|
||||
}
|
||||
|
||||
export interface Blockquote {
|
||||
type: 'blockquote';
|
||||
raw: string;
|
||||
text: string;
|
||||
tokens: Token[];
|
||||
}
|
||||
|
||||
export interface List {
|
||||
type: 'list';
|
||||
raw: string;
|
||||
ordered: boolean;
|
||||
start: number | '';
|
||||
loose: boolean;
|
||||
items: ListItem[];
|
||||
}
|
||||
|
||||
export interface ListItem {
|
||||
type: 'list_item';
|
||||
raw: string;
|
||||
task: boolean;
|
||||
checked?: boolean | undefined;
|
||||
loose: boolean;
|
||||
text: string;
|
||||
tokens?: Token[];
|
||||
}
|
||||
|
||||
export interface Paragraph {
|
||||
type: 'paragraph';
|
||||
raw: string;
|
||||
pre?: boolean | undefined;
|
||||
text: string;
|
||||
tokens: Token[];
|
||||
}
|
||||
|
||||
export interface HTML {
|
||||
type: 'html';
|
||||
raw: string;
|
||||
pre: boolean;
|
||||
text: string;
|
||||
block: boolean;
|
||||
}
|
||||
|
||||
export interface Text {
|
||||
type: 'text';
|
||||
raw: string;
|
||||
text: string;
|
||||
tokens?: Token[];
|
||||
}
|
||||
|
||||
export interface Def {
|
||||
type: 'def';
|
||||
raw: string;
|
||||
tag: string;
|
||||
href: string;
|
||||
title: string;
|
||||
}
|
||||
|
||||
export interface Escape {
|
||||
type: 'escape';
|
||||
raw: string;
|
||||
text: string;
|
||||
}
|
||||
|
||||
export interface Tag {
|
||||
type: 'text' | 'html';
|
||||
raw: string;
|
||||
inLink: boolean;
|
||||
inRawBlock: boolean;
|
||||
text: string;
|
||||
block: boolean;
|
||||
}
|
||||
|
||||
export interface Link {
|
||||
type: 'link';
|
||||
raw: string;
|
||||
href: string;
|
||||
title?: string | null;
|
||||
text: string;
|
||||
tokens: Token[];
|
||||
}
|
||||
|
||||
export interface Image {
|
||||
type: 'image';
|
||||
raw: string;
|
||||
href: string;
|
||||
title: string | null;
|
||||
text: string;
|
||||
}
|
||||
|
||||
export interface Strong {
|
||||
type: 'strong';
|
||||
raw: string;
|
||||
text: string;
|
||||
tokens: Token[];
|
||||
}
|
||||
|
||||
export interface Em {
|
||||
type: 'em';
|
||||
raw: string;
|
||||
text: string;
|
||||
tokens: Token[];
|
||||
}
|
||||
|
||||
export interface Codespan {
|
||||
type: 'codespan';
|
||||
raw: string;
|
||||
text: string;
|
||||
}
|
||||
|
||||
export interface Br {
|
||||
type: 'br';
|
||||
raw: string;
|
||||
}
|
||||
|
||||
export interface Del {
|
||||
type: 'del';
|
||||
raw: string;
|
||||
text: string;
|
||||
tokens: Token[];
|
||||
}
|
||||
|
||||
export interface Generic {
|
||||
[index: string]: any;
|
||||
|
||||
type: string;
|
||||
raw: string;
|
||||
tokens?: Token[] | undefined;
|
||||
}
|
||||
}
|
||||
|
||||
export type Links = Record<string, Pick<Tokens.Link | Tokens.Image, 'href' | 'title'>>;
|
||||
|
||||
export type TokensList = Token[] & {
|
||||
links: Links;
|
||||
};
|
@ -1,4 +1,9 @@
|
||||
export function getDefaults() {
|
||||
import type { MarkedOptions } from './MarkedOptions.ts';
|
||||
|
||||
/**
|
||||
* Gets the original marked default options.
|
||||
*/
|
||||
export function _getDefaults(): MarkedOptions {
|
||||
return {
|
||||
async: false,
|
||||
baseUrl: null,
|
||||
@ -23,8 +28,8 @@ export function getDefaults() {
|
||||
};
|
||||
}
|
||||
|
||||
export let defaults = getDefaults();
|
||||
export let _defaults = _getDefaults();
|
||||
|
||||
export function changeDefaults(newDefaults) {
|
||||
defaults = newDefaults;
|
||||
export function changeDefaults(newDefaults: MarkedOptions) {
|
||||
_defaults = newDefaults;
|
||||
}
|
@ -1,3 +1,7 @@
|
||||
import type { MarkedOptions } from './MarkedOptions.ts';
|
||||
import type { ResultCallback } from './marked.ts';
|
||||
import type { Rule } from './rules.ts';
|
||||
|
||||
/**
|
||||
* Helpers
|
||||
*/
|
||||
@ -12,8 +16,9 @@ const escapeReplacements = {
|
||||
'"': '"',
|
||||
"'": '''
|
||||
};
|
||||
const getEscapeReplacement = (ch) => escapeReplacements[ch];
|
||||
export function escape(html, encode) {
|
||||
const getEscapeReplacement = (ch: string) => escapeReplacements[ch];
|
||||
|
||||
export function escape(html: string, encode?: boolean) {
|
||||
if (encode) {
|
||||
if (escapeTest.test(html)) {
|
||||
return html.replace(escapeReplace, getEscapeReplacement);
|
||||
@ -29,10 +34,7 @@ export function escape(html, encode) {
|
||||
|
||||
const unescapeTest = /&(#(?:\d+)|(?:#x[0-9A-Fa-f]+)|(?:\w+));?/ig;
|
||||
|
||||
/**
|
||||
* @param {string} html
|
||||
*/
|
||||
export function unescape(html) {
|
||||
export function unescape(html: string) {
|
||||
// explicitly match decimal, hex, and named HTML entities
|
||||
return html.replace(unescapeTest, (_, n) => {
|
||||
n = n.toLowerCase();
|
||||
@ -48,18 +50,14 @@ export function unescape(html) {
|
||||
|
||||
const caret = /(^|[^\[])\^/g;
|
||||
|
||||
/**
|
||||
* @param {string | RegExp} regex
|
||||
* @param {string} opt
|
||||
*/
|
||||
export function edit(regex, opt) {
|
||||
export function edit(regex: Rule, opt?: string) {
|
||||
regex = typeof regex === 'string' ? regex : regex.source;
|
||||
opt = opt || '';
|
||||
const obj = {
|
||||
replace: (name, val) => {
|
||||
val = val.source || val;
|
||||
replace: (name: string | RegExp, val: string | RegExp) => {
|
||||
val = typeof val === 'object' && 'source' in val ? val.source : val;
|
||||
val = val.replace(caret, '$1');
|
||||
regex = regex.replace(name, val);
|
||||
regex = (regex as string).replace(name, val);
|
||||
return obj;
|
||||
},
|
||||
getRegex: () => {
|
||||
@ -72,12 +70,7 @@ export function edit(regex, opt) {
|
||||
const nonWordAndColonTest = /[^\w:]/g;
|
||||
const originIndependentUrl = /^$|^[a-z][a-z0-9+.-]*:|^[?#]/i;
|
||||
|
||||
/**
|
||||
* @param {boolean} sanitize
|
||||
* @param {string} base
|
||||
* @param {string} href
|
||||
*/
|
||||
export function cleanUrl(sanitize, base, href) {
|
||||
export function cleanUrl(sanitize: boolean | undefined, base: string | undefined | null, href: string) {
|
||||
if (sanitize) {
|
||||
let prot;
|
||||
try {
|
||||
@ -102,16 +95,12 @@ export function cleanUrl(sanitize, base, href) {
|
||||
return href;
|
||||
}
|
||||
|
||||
const baseUrls = {};
|
||||
const baseUrls: Record<string, string> = {};
|
||||
const justDomain = /^[^:]+:\/*[^/]*$/;
|
||||
const protocol = /^([^:]+:)[\s\S]*$/;
|
||||
const domain = /^([^:]+:\/*[^/]*)[\s\S]*$/;
|
||||
|
||||
/**
|
||||
* @param {string} base
|
||||
* @param {string} href
|
||||
*/
|
||||
export function resolveUrl(base, href) {
|
||||
export function resolveUrl(base: string, href: string) {
|
||||
if (!baseUrls[' ' + base]) {
|
||||
// we can ignore everything in base after the last slash of its path component,
|
||||
// but we might need to add _that_
|
||||
@ -140,9 +129,9 @@ export function resolveUrl(base, href) {
|
||||
}
|
||||
}
|
||||
|
||||
export const noopTest = { exec: function noopTest() {} };
|
||||
export const noopTest = { exec: () => null };
|
||||
|
||||
export function splitCells(tableRow, count) {
|
||||
export function splitCells(tableRow: string, count: number) {
|
||||
// ensure that every cell-delimiting pipe has a space
|
||||
// before it to distinguish it from an escaped pipe
|
||||
const row = tableRow.replace(/\|/g, (match, offset, str) => {
|
||||
@ -162,8 +151,12 @@ export function splitCells(tableRow, count) {
|
||||
let i = 0;
|
||||
|
||||
// First/last cell in a row cannot be empty if it has no leading/trailing pipe
|
||||
if (!cells[0].trim()) { cells.shift(); }
|
||||
if (cells.length > 0 && !cells[cells.length - 1].trim()) { cells.pop(); }
|
||||
if (!cells[0].trim()) {
|
||||
cells.shift();
|
||||
}
|
||||
if (cells.length > 0 && !cells[cells.length - 1].trim()) {
|
||||
cells.pop();
|
||||
}
|
||||
|
||||
if (cells.length > count) {
|
||||
cells.splice(count);
|
||||
@ -182,11 +175,11 @@ export function splitCells(tableRow, count) {
|
||||
* Remove trailing 'c's. Equivalent to str.replace(/c*$/, '').
|
||||
* /c*$/ is vulnerable to REDOS.
|
||||
*
|
||||
* @param {string} str
|
||||
* @param {string} c
|
||||
* @param {boolean} invert Remove suffix of non-c chars instead. Default falsey.
|
||||
* @param str
|
||||
* @param c
|
||||
* @param invert Remove suffix of non-c chars instead. Default falsey.
|
||||
*/
|
||||
export function rtrim(str, c, invert) {
|
||||
export function rtrim(str: string, c: string, invert?: boolean) {
|
||||
const l = str.length;
|
||||
if (l === 0) {
|
||||
return '';
|
||||
@ -210,7 +203,7 @@ export function rtrim(str, c, invert) {
|
||||
return str.slice(0, l - suffLen);
|
||||
}
|
||||
|
||||
export function findClosingBracket(str, b) {
|
||||
export function findClosingBracket(str: string, b: string) {
|
||||
if (str.indexOf(b[1]) === -1) {
|
||||
return -1;
|
||||
}
|
||||
@ -232,7 +225,7 @@ export function findClosingBracket(str, b) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
export function checkDeprecations(opt, callback) {
|
||||
export function checkDeprecations(opt: MarkedOptions, callback?: ResultCallback) {
|
||||
if (!opt || opt.silent) {
|
||||
return;
|
||||
}
|
@ -1,91 +0,0 @@
|
||||
import { Lexer } from './Lexer.js';
|
||||
import { Parser } from './Parser.js';
|
||||
import { Tokenizer } from './Tokenizer.js';
|
||||
import { Renderer } from './Renderer.js';
|
||||
import { TextRenderer } from './TextRenderer.js';
|
||||
import { Slugger } from './Slugger.js';
|
||||
import { Hooks } from './Hooks.js';
|
||||
import { Marked } from './Instance.js';
|
||||
import { changeDefaults, getDefaults, defaults } from './defaults.js';
|
||||
|
||||
const markedInstance = new Marked(defaults);
|
||||
|
||||
/**
|
||||
* Marked
|
||||
*/
|
||||
export function marked(src, opt, callback) {
|
||||
return markedInstance.parse(src, opt, callback);
|
||||
}
|
||||
|
||||
/**
|
||||
* Options
|
||||
*/
|
||||
|
||||
marked.options =
|
||||
marked.setOptions = function(opt) {
|
||||
markedInstance.setOptions(opt);
|
||||
marked.defaults = markedInstance.defaults;
|
||||
changeDefaults(marked.defaults);
|
||||
return marked;
|
||||
};
|
||||
|
||||
marked.getDefaults = getDefaults;
|
||||
|
||||
marked.defaults = defaults;
|
||||
|
||||
/**
|
||||
* Use Extension
|
||||
*/
|
||||
|
||||
marked.use = function(...args) {
|
||||
markedInstance.use(...args);
|
||||
marked.defaults = markedInstance.defaults;
|
||||
changeDefaults(marked.defaults);
|
||||
return marked;
|
||||
};
|
||||
|
||||
/**
|
||||
* Run callback for every token
|
||||
*/
|
||||
|
||||
marked.walkTokens = function(tokens, callback) {
|
||||
return markedInstance.walkTokens(tokens, callback);
|
||||
};
|
||||
|
||||
/**
|
||||
* Parse Inline
|
||||
* @param {string} src
|
||||
*/
|
||||
marked.parseInline = markedInstance.parseInline;
|
||||
|
||||
/**
|
||||
* Expose
|
||||
*/
|
||||
marked.Parser = Parser;
|
||||
marked.parser = Parser.parse;
|
||||
marked.Renderer = Renderer;
|
||||
marked.TextRenderer = TextRenderer;
|
||||
marked.Lexer = Lexer;
|
||||
marked.lexer = Lexer.lex;
|
||||
marked.Tokenizer = Tokenizer;
|
||||
marked.Slugger = Slugger;
|
||||
marked.Hooks = Hooks;
|
||||
marked.parse = marked;
|
||||
|
||||
export const options = marked.options;
|
||||
export const setOptions = marked.setOptions;
|
||||
export const use = marked.use;
|
||||
export const walkTokens = marked.walkTokens;
|
||||
export const parseInline = marked.parseInline;
|
||||
export const parse = marked;
|
||||
export const parser = Parser.parse;
|
||||
export const lexer = Lexer.lex;
|
||||
export { defaults, getDefaults } from './defaults.js';
|
||||
export { Lexer } from './Lexer.js';
|
||||
export { Parser } from './Parser.js';
|
||||
export { Tokenizer } from './Tokenizer.js';
|
||||
export { Renderer } from './Renderer.js';
|
||||
export { TextRenderer } from './TextRenderer.js';
|
||||
export { Slugger } from './Slugger.js';
|
||||
export { Hooks } from './Hooks.js';
|
||||
export { Marked } from './Instance.js';
|
144
src/marked.ts
Normal file
144
src/marked.ts
Normal file
@ -0,0 +1,144 @@
|
||||
import { _Lexer } from './Lexer.ts';
|
||||
import { _Parser } from './Parser.ts';
|
||||
import { _Tokenizer } from './Tokenizer.ts';
|
||||
import { _Renderer } from './Renderer.ts';
|
||||
import { _TextRenderer } from './TextRenderer.ts';
|
||||
import { _Slugger } from './Slugger.ts';
|
||||
import { _Hooks } from './Hooks.ts';
|
||||
import { Marked } from './Instance.ts';
|
||||
import {
|
||||
_getDefaults,
|
||||
changeDefaults,
|
||||
_defaults
|
||||
} from './defaults.ts';
|
||||
import type { MarkedExtension, MarkedOptions } from './MarkedOptions.ts';
|
||||
import type { Token, TokensList } from './Tokens.ts';
|
||||
|
||||
export type ResultCallback = (error: Error | null, parseResult?: string) => undefined | void;
|
||||
|
||||
const markedInstance = new Marked();
|
||||
|
||||
/**
|
||||
* Compiles markdown to HTML asynchronously.
|
||||
*
|
||||
* @param src String of markdown source to be compiled
|
||||
* @param options Hash of options, having async: true
|
||||
* @return Promise of string of compiled HTML
|
||||
*/
|
||||
export function marked(src: string, options: MarkedOptions & { async: true }): Promise<string>;
|
||||
|
||||
/**
|
||||
* Compiles markdown to HTML synchronously.
|
||||
*
|
||||
* @param src String of markdown source to be compiled
|
||||
* @param options Optional hash of options
|
||||
* @return String of compiled HTML
|
||||
*/
|
||||
export function marked(src: string, options?: MarkedOptions): string;
|
||||
|
||||
/**
|
||||
* Compiles markdown to HTML asynchronously with a callback.
|
||||
*
|
||||
* @param src String of markdown source to be compiled
|
||||
* @param callback Function called when the markdownString has been fully parsed when using async highlighting
|
||||
*/
|
||||
export function marked(src: string, callback: ResultCallback): void;
|
||||
|
||||
/**
|
||||
* Compiles markdown to HTML asynchronously with a callback.
|
||||
*
|
||||
* @param src String of markdown source to be compiled
|
||||
* @param options Hash of options
|
||||
* @param callback Function called when the markdownString has been fully parsed when using async highlighting
|
||||
*/
|
||||
export function marked(
|
||||
src: string,
|
||||
options: MarkedOptions,
|
||||
callback: ResultCallback,
|
||||
): void;
|
||||
export function marked(src: string, opt?: MarkedOptions | ResultCallback, callback?: ResultCallback): string | Promise<string | undefined> | undefined {
|
||||
return markedInstance.parse(src, opt, callback);
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the default options.
|
||||
*
|
||||
* @param options Hash of options
|
||||
*/
|
||||
marked.options =
|
||||
marked.setOptions = function(options: MarkedOptions) {
|
||||
markedInstance.setOptions(options);
|
||||
marked.defaults = markedInstance.defaults;
|
||||
changeDefaults(marked.defaults);
|
||||
return marked;
|
||||
};
|
||||
|
||||
/**
|
||||
* Gets the original marked default options.
|
||||
*/
|
||||
marked.getDefaults = _getDefaults;
|
||||
|
||||
marked.defaults = _defaults;
|
||||
|
||||
/**
|
||||
* Use Extension
|
||||
*/
|
||||
|
||||
marked.use = function(...args: MarkedExtension[]) {
|
||||
markedInstance.use(...args);
|
||||
marked.defaults = markedInstance.defaults;
|
||||
changeDefaults(marked.defaults);
|
||||
return marked;
|
||||
};
|
||||
|
||||
/**
|
||||
* Run callback for every token
|
||||
*/
|
||||
|
||||
marked.walkTokens = function <T = void>(tokens: Token[] | TokensList, callback: (token: Token) => T | T[]) {
|
||||
return markedInstance.walkTokens(tokens, callback);
|
||||
};
|
||||
|
||||
/**
|
||||
* Compiles markdown to HTML without enclosing `p` tag.
|
||||
*
|
||||
* @param src String of markdown source to be compiled
|
||||
* @param options Hash of options
|
||||
* @return String of compiled HTML
|
||||
*/
|
||||
marked.parseInline = markedInstance.parseInline;
|
||||
|
||||
/**
|
||||
* Expose
|
||||
*/
|
||||
marked.Parser = _Parser;
|
||||
marked.parser = _Parser.parse;
|
||||
marked.Renderer = _Renderer;
|
||||
marked.TextRenderer = _TextRenderer;
|
||||
marked.Lexer = _Lexer;
|
||||
marked.lexer = _Lexer.lex;
|
||||
marked.Tokenizer = _Tokenizer;
|
||||
marked.Slugger = _Slugger;
|
||||
marked.Hooks = _Hooks;
|
||||
marked.parse = marked;
|
||||
|
||||
export const options = marked.options;
|
||||
export const setOptions = marked.setOptions;
|
||||
export const use = marked.use;
|
||||
export const walkTokens = marked.walkTokens;
|
||||
export const parseInline = marked.parseInline;
|
||||
export const parse = marked;
|
||||
export const parser = _Parser.parse;
|
||||
export const lexer = _Lexer.lex;
|
||||
export { _defaults as defaults, _getDefaults as getDefaults } from './defaults.ts';
|
||||
export { _Lexer as Lexer } from './Lexer.ts';
|
||||
export { _Parser as Parser } from './Parser.ts';
|
||||
export { _Tokenizer as Tokenizer } from './Tokenizer.ts';
|
||||
export { _Renderer as Renderer } from './Renderer.ts';
|
||||
export { _TextRenderer as TextRenderer } from './TextRenderer.ts';
|
||||
export { _Slugger as Slugger } from './Slugger.ts';
|
||||
export { _Hooks as Hooks } from './Hooks.ts';
|
||||
export { Marked } from './Instance.ts';
|
||||
export type * from './MarkedOptions.ts';
|
||||
export type * from './rules.ts';
|
||||
export type * from './Tokens.ts';
|
@ -1,12 +1,73 @@
|
||||
import {
|
||||
noopTest,
|
||||
edit
|
||||
} from './helpers.js';
|
||||
} from './helpers.ts';
|
||||
|
||||
export type Rule = RegExp | string;
|
||||
|
||||
export interface Rules {
|
||||
[ruleName: string]: Pick<RegExp, 'exec'> | Rule | Rules;
|
||||
}
|
||||
|
||||
type BlockRuleNames =
|
||||
| 'newline'
|
||||
| 'code'
|
||||
| 'fences'
|
||||
| 'hr'
|
||||
| 'heading'
|
||||
| 'blockquote'
|
||||
| 'list'
|
||||
| 'html'
|
||||
| 'def'
|
||||
| 'lheading'
|
||||
| '_paragraph'
|
||||
| 'text'
|
||||
| '_label'
|
||||
| '_title'
|
||||
| 'bullet'
|
||||
| 'listItemStart'
|
||||
| '_tag'
|
||||
| '_comment'
|
||||
| 'paragraph'
|
||||
| 'uote' ;
|
||||
|
||||
type BlockSubRuleNames = 'normal' | 'gfm' | 'pedantic';
|
||||
|
||||
type InlineRuleNames =
|
||||
| 'escape'
|
||||
| 'autolink'
|
||||
| 'tag'
|
||||
| 'link'
|
||||
| 'reflink'
|
||||
| 'nolink'
|
||||
| 'reflinkSearch'
|
||||
| 'code'
|
||||
| 'br'
|
||||
| 'text'
|
||||
| '_punctuation'
|
||||
| 'punctuation'
|
||||
| 'blockSkip'
|
||||
| 'escapedEmSt'
|
||||
| '_comment'
|
||||
| '_escapes'
|
||||
| '_scheme'
|
||||
| '_email'
|
||||
| '_attribute'
|
||||
| '_label'
|
||||
| '_href'
|
||||
| '_title'
|
||||
| 'strong'
|
||||
| '_extended_email'
|
||||
| '_backpedal';
|
||||
|
||||
type InlineSubRuleNames = 'gfm' | 'emStrong' | 'normal' | 'pedantic'| 'breaks';
|
||||
|
||||
/**
|
||||
* Block-Level Grammar
|
||||
*/
|
||||
export const block = {
|
||||
// Not all rules are defined in the object literal
|
||||
// @ts-expect-error
|
||||
export const block: Record<BlockRuleNames, Rule> & Record<BlockSubRuleNames, Rules> & Rules = {
|
||||
newline: /^(?: *(?:\n|$))+/,
|
||||
code: /^( {4}[^\n]+(?:\n(?: *(?:\n|$))*)?)+/,
|
||||
fences: /^ {0,3}(`{3,}(?=[^`\n]*(?:\n|$))|~{3,})([^\n]*)(?:\n|$)(?:|([\s\S]*?)(?:\n|$))(?: {0,3}\1[~`]* *(?=\n|$)|$)/,
|
||||
@ -101,7 +162,7 @@ block.gfm = {
|
||||
+ '(?:\\n((?:(?! *\\n|hr|heading|blockquote|code|fences|list|html).*(?:\\n|$))*)\\n*|$)' // Cells
|
||||
};
|
||||
|
||||
block.gfm.table = edit(block.gfm.table)
|
||||
block.gfm.table = edit(block.gfm.table as Rule)
|
||||
.replace('hr', block.hr)
|
||||
.replace('heading', ' {0,3}#{1,6} ')
|
||||
.replace('blockquote', ' {0,3}>')
|
||||
@ -116,7 +177,7 @@ block.gfm.paragraph = edit(block._paragraph)
|
||||
.replace('hr', block.hr)
|
||||
.replace('heading', ' {0,3}#{1,6} ')
|
||||
.replace('|lheading', '') // setex headings don't interrupt commonmark paragraphs
|
||||
.replace('table', block.gfm.table) // interrupt paragraphs with table
|
||||
.replace('table', block.gfm.table as RegExp) // interrupt paragraphs with table
|
||||
.replace('blockquote', ' {0,3}>')
|
||||
.replace('fences', ' {0,3}(?:`{3,}(?=[^`\\n]*\\n)|~{3,})[^\\n]*\\n')
|
||||
.replace('list', ' {0,3}(?:[*+-]|1[.)]) ') // only lists starting from 1 can interrupt
|
||||
@ -143,7 +204,7 @@ block.pedantic = {
|
||||
heading: /^(#{1,6})(.*)(?:\n+|$)/,
|
||||
fences: noopTest, // fences not supported
|
||||
lheading: /^(.+?)\n {0,3}(=+|-+) *(?:\n+|$)/,
|
||||
paragraph: edit(block.normal._paragraph)
|
||||
paragraph: edit(block.normal._paragraph as Rule)
|
||||
.replace('hr', block.hr)
|
||||
.replace('heading', ' *#{1,6} *[^\n]')
|
||||
.replace('lheading', block.lheading)
|
||||
@ -157,7 +218,9 @@ block.pedantic = {
|
||||
/**
|
||||
* Inline-Level Grammar
|
||||
*/
|
||||
export const inline = {
|
||||
// Not all rules are defined in the object literal
|
||||
// @ts-expect-error
|
||||
export const inline: Record<InlineRuleNames, Rule> & Record<InlineSubRuleNames, Rules> & Rules = {
|
||||
escape: /^\\([!"#$%&'()*+,\-./:;<=>?@\[\]\\^_`{|}~])/,
|
||||
autolink: /^<(scheme:[^\s\x00-\x1f<>]*|email)>/,
|
||||
url: noopTest,
|
||||
@ -196,19 +259,19 @@ inline._escapes = /\\([punct])/g;
|
||||
|
||||
inline._comment = edit(block._comment).replace('(?:-->|$)', '-->').getRegex();
|
||||
|
||||
inline.emStrong.lDelim = edit(inline.emStrong.lDelim, 'u')
|
||||
inline.emStrong.lDelim = edit(inline.emStrong.lDelim as Rule, 'u')
|
||||
.replace(/punct/g, inline._punctuation)
|
||||
.getRegex();
|
||||
|
||||
inline.emStrong.rDelimAst = edit(inline.emStrong.rDelimAst, 'gu')
|
||||
inline.emStrong.rDelimAst = edit(inline.emStrong.rDelimAst as Rule, 'gu')
|
||||
.replace(/punct/g, inline._punctuation)
|
||||
.getRegex();
|
||||
|
||||
inline.emStrong.rDelimUnd = edit(inline.emStrong.rDelimUnd, 'gu')
|
||||
inline.emStrong.rDelimUnd = edit(inline.emStrong.rDelimUnd as Rule, 'gu')
|
||||
.replace(/punct/g, inline._punctuation)
|
||||
.getRegex();
|
||||
|
||||
inline.anyPunctuation = edit(inline.anyPunctuation, 'gu')
|
||||
inline.anyPunctuation = edit(inline.anyPunctuation as Rule, 'gu')
|
||||
.replace(/punct/g, inline._punctuation)
|
||||
.getRegex();
|
||||
|
||||
@ -300,8 +363,8 @@ inline.gfm = {
|
||||
text: /^([`~]+|[^`~])(?:(?= {2,}\n)|(?=[a-zA-Z0-9.!#$%&'*+\/=?_`{\|}~-]+@)|[\s\S]*?(?:(?=[\\<!\[`*~_]|\b_|https?:\/\/|ftp:\/\/|www\.|$)|[^ ](?= {2,}\n)|[^a-zA-Z0-9.!#$%&'*+\/=?_`{\|}~-](?=[a-zA-Z0-9.!#$%&'*+\/=?_`{\|}~-]+@)))/
|
||||
};
|
||||
|
||||
inline.gfm.url = edit(inline.gfm.url, 'i')
|
||||
.replace('email', inline.gfm._extended_email)
|
||||
inline.gfm.url = edit(inline.gfm.url as Rule, 'i')
|
||||
.replace('email', inline.gfm._extended_email as RegExp)
|
||||
.getRegex();
|
||||
/**
|
||||
* GFM + Line Breaks Inline Grammar
|
||||
@ -310,7 +373,7 @@ inline.gfm.url = edit(inline.gfm.url, 'i')
|
||||
inline.breaks = {
|
||||
...inline.gfm,
|
||||
br: edit(inline.br).replace('{2,}', '*').getRegex(),
|
||||
text: edit(inline.gfm.text)
|
||||
text: edit(inline.gfm.text as Rule)
|
||||
.replace('\\b_', '\\b_| {2,}\\n')
|
||||
.replace(/\{2,\}/g, '*')
|
||||
.getRegex()
|
@ -1,4 +1,4 @@
|
||||
import { Marked, setOptions, getDefaults } from '../../src/marked.js';
|
||||
import { Marked, setOptions, getDefaults } from '../../src/marked.ts';
|
||||
import { isEqual, firstDiff } from './html-differ.js';
|
||||
import { strictEqual } from 'assert';
|
||||
|
||||
|
348
test/types/marked.ts
Normal file
348
test/types/marked.ts
Normal file
@ -0,0 +1,348 @@
|
||||
/* eslint-disable */
|
||||
import { marked } from 'marked';
|
||||
import { expectType } from 'ts-expect';
|
||||
|
||||
// other exports
|
||||
|
||||
import { Lexer, Parser, Tokenizer, Renderer, TextRenderer, Slugger } from 'marked';
|
||||
import type { Tokens, MarkedExtension, TokenizerAndRendererExtension, Token ,TokenizerExtension, MarkedOptions, TokensList, Rules, RendererExtension } from 'marked';
|
||||
|
||||
const tokenizer = new marked.Tokenizer();
|
||||
|
||||
tokenizer.emStrong = function emStrong(src, _maskedSrc, _prevChar) {
|
||||
const token: Tokens.Strong = {
|
||||
type: 'strong',
|
||||
text: src,
|
||||
raw: src,
|
||||
tokens: []
|
||||
};
|
||||
|
||||
this.lexer.inline(token.text, token.tokens);
|
||||
|
||||
return token;
|
||||
};
|
||||
|
||||
tokenizer.inlineText = function inlineText(...args: Parameters<Tokenizer['inlineText']>) {
|
||||
const p = this.inlineText(...args);
|
||||
|
||||
if (p) p.raw = p.text;
|
||||
|
||||
return p;
|
||||
};
|
||||
|
||||
let options: MarkedOptions = {
|
||||
baseUrl: '',
|
||||
gfm: true,
|
||||
breaks: false,
|
||||
pedantic: false,
|
||||
sanitize: true,
|
||||
smartLists: true,
|
||||
silent: false,
|
||||
highlight(code: string, lang: string | undefined) {
|
||||
return '';
|
||||
},
|
||||
langPrefix: 'lang-',
|
||||
smartypants: false,
|
||||
tokenizer,
|
||||
renderer: new marked.Renderer(),
|
||||
walkTokens: token => {
|
||||
if (token.type === 'heading') {
|
||||
token.depth += 1;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
options.highlight = (code: string, lang: string | undefined, callback?: (error: any, code?: string) => string | void) => {
|
||||
callback?.(new Error());
|
||||
callback?.(null, '');
|
||||
};
|
||||
|
||||
options = marked.getDefaults();
|
||||
options = marked.defaults;
|
||||
|
||||
function callback(err: Error | null, markdown: string | undefined) {
|
||||
console.log('Callback called!');
|
||||
console.log(markdown);
|
||||
}
|
||||
|
||||
let myOldMarked: typeof marked = marked.options(options);
|
||||
myOldMarked = marked.setOptions(options);
|
||||
|
||||
console.log(marked('1) I am using __markdown__.'));
|
||||
console.log(marked('2) I am using __markdown__.', options));
|
||||
marked('3) I am using __markdown__.', callback);
|
||||
marked('4) I am using __markdown__.', options, callback);
|
||||
|
||||
console.log(marked.parse('5) I am using __markdown__.'));
|
||||
console.log(marked.parse('6) I am using __markdown__.', options));
|
||||
marked.parse('7) I am using __markdown__.', callback);
|
||||
marked.parse('8) I am using __markdown__.', options, callback);
|
||||
|
||||
console.log(marked.parseInline('9) I am using __markdown__.'));
|
||||
console.log(marked.parseInline('10) I am using __markdown__.', options));
|
||||
|
||||
const text = 'Something';
|
||||
const tokens: TokensList = marked.lexer(text, options);
|
||||
console.log(marked.parser(tokens));
|
||||
|
||||
const lexer = new marked.Lexer(options);
|
||||
const tokens2 = lexer.lex(text);
|
||||
console.log(tokens2);
|
||||
const tokens3 = lexer.inlineTokens(text, tokens);
|
||||
console.log(tokens3);
|
||||
// verifying that the second param to inlineTokens can be ignored
|
||||
const tokens3a = lexer.inlineTokens(text);
|
||||
console.log(tokens3a);
|
||||
const re: Rules = marked.Lexer.rules;
|
||||
const lexerOptions: MarkedOptions = lexer.options;
|
||||
|
||||
const renderer = new marked.Renderer();
|
||||
renderer.heading = (text, level, raw, slugger) => {
|
||||
return text + level.toString() + slugger.slug(raw);
|
||||
};
|
||||
renderer.hr = () => {
|
||||
return `<hr${renderer.options.xhtml ? '/' : ''}>\n`;
|
||||
};
|
||||
renderer.checkbox = checked => {
|
||||
return checked ? 'CHECKED' : 'UNCHECKED';
|
||||
};
|
||||
|
||||
class ExtendedRenderer extends marked.Renderer {
|
||||
code = (code: string, language: string | undefined, isEscaped: boolean): string => super.code(code, language, isEscaped);
|
||||
blockquote = (quote: string): string => super.blockquote(quote);
|
||||
html = (html: string): string => super.html(html);
|
||||
heading = (text: string, level: 1 | 2 | 3 | 4 | 5 | 6, raw: string, slugger: Slugger): string => super.heading(text, level, raw, slugger);
|
||||
hr = (): string => super.hr();
|
||||
list = (body: string, ordered: boolean, start: number): string => super.list(body, ordered, start);
|
||||
listitem = (text: string, task: boolean, checked: boolean): string => super.listitem(text, task, checked);
|
||||
checkbox = (checked: boolean): string => super.checkbox(checked);
|
||||
paragraph = (text: string): string => super.paragraph(text);
|
||||
table = (header: string, body: string): string => super.table(header, body);
|
||||
tablerow = (content: string): string => super.tablerow(content);
|
||||
tablecell = (content: string, flags: { header: boolean; align: 'center' | 'left' | 'right' | null }): string => super.tablecell(content, flags);
|
||||
strong = (text: string): string => super.strong(text);
|
||||
em = (text: string): string => super.em(text);
|
||||
codespan = (code: string): string => super.codespan(code);
|
||||
br = (): string => super.br();
|
||||
del = (text: string): string => super.del(text);
|
||||
link = (href: string, title: string, text: string): string => super.link(href, title, text);
|
||||
image = (href: string, title: string, text: string): string => super.image(href, title, text);
|
||||
}
|
||||
|
||||
const rendererOptions: MarkedOptions = renderer.options;
|
||||
|
||||
const textRenderer = new marked.TextRenderer();
|
||||
console.log(textRenderer.strong(text));
|
||||
|
||||
const parseTestText = '- list1\n - list1.1\n\n listend';
|
||||
const parseTestTokens: TokensList = marked.lexer(parseTestText, options);
|
||||
|
||||
const inlineTestText = '- list1\n - list1.1\n\n listend';
|
||||
const inlineTestTokens: Token[] = marked.Lexer.lexInline(inlineTestText, options);
|
||||
|
||||
/* List type is `list`. */
|
||||
const listToken = parseTestTokens[0] as Tokens.List;
|
||||
console.log(listToken.type === 'list');
|
||||
|
||||
const parser = new marked.Parser();
|
||||
console.log(parser.parse(parseTestTokens));
|
||||
console.log(marked.Parser.parse(parseTestTokens));
|
||||
const parserOptions: MarkedOptions = parser.options;
|
||||
|
||||
const slugger = new marked.Slugger();
|
||||
console.log(slugger.slug('Test Slug'));
|
||||
console.log(slugger.slug('Test Slug', { dryrun: true }));
|
||||
|
||||
marked.use({ renderer }, { tokenizer });
|
||||
|
||||
marked.use({
|
||||
renderer: {
|
||||
heading(text, level) {
|
||||
if (level > 3) {
|
||||
return `<p>${text}</p>`;
|
||||
}
|
||||
|
||||
return false;
|
||||
},
|
||||
listitem(text, task, checked) {
|
||||
if (task) return `<li class="task-list-item ${checked ? 'checked' : ''}">${text}</li>\n`;
|
||||
else return `<li>${text}</li>\n`;
|
||||
}
|
||||
},
|
||||
tokenizer: {
|
||||
codespan(src) {
|
||||
const match = src.match(/\$+([^\$\n]+?)\$+/);
|
||||
if (match) {
|
||||
return {
|
||||
type: 'codespan',
|
||||
raw: match[0],
|
||||
text: match[1].trim()
|
||||
};
|
||||
}
|
||||
|
||||
// return false to use original codespan tokenizer
|
||||
return false;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
interface NameToken {
|
||||
type: 'name';
|
||||
raw: string;
|
||||
text: string;
|
||||
tokens: Token[];
|
||||
items: Token[];
|
||||
}
|
||||
|
||||
const tokenizerExtension: TokenizerExtension = {
|
||||
name: 'name',
|
||||
level: 'block',
|
||||
start: (src: string) => src.match(/name/)?.index,
|
||||
tokenizer(src: string): NameToken | void {
|
||||
if (src === 'name') {
|
||||
const token: NameToken = {
|
||||
type: 'name',
|
||||
raw: src,
|
||||
text: src,
|
||||
tokens: this.lexer.inline(src),
|
||||
items: []
|
||||
};
|
||||
this.lexer.inline(token.text, token.items);
|
||||
return token;
|
||||
}
|
||||
},
|
||||
childTokens: ['items']
|
||||
};
|
||||
|
||||
const rendererExtension: RendererExtension = {
|
||||
name: 'name',
|
||||
renderer(t) {
|
||||
const token = t as NameToken;
|
||||
if (token.text === 'name') {
|
||||
// verifying that the second param to parseInline can be ignored
|
||||
console.log(this.parser.parseInline(token.items));
|
||||
return this.parser.parse(token.items);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
const tokenizerAndRendererExtension: TokenizerAndRendererExtension = {
|
||||
name: 'name',
|
||||
level: 'block',
|
||||
tokenizer(src: string) {
|
||||
if (src === 'name') {
|
||||
const token = {
|
||||
type: 'name',
|
||||
raw: src
|
||||
};
|
||||
return token;
|
||||
}
|
||||
},
|
||||
renderer(token: Tokens.Generic) {
|
||||
if (token.raw === 'name') {
|
||||
return 'name';
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
marked.use({
|
||||
extensions: [tokenizerExtension, rendererExtension, tokenizerAndRendererExtension]
|
||||
});
|
||||
|
||||
const asyncExtension: MarkedExtension = {
|
||||
async: true,
|
||||
async walkTokens(token) {
|
||||
if (token.type === 'code') {
|
||||
await Promise.resolve(3);
|
||||
token.text += 'foobar';
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
marked.use(asyncExtension);
|
||||
|
||||
(async() => {
|
||||
const md = '# foobar';
|
||||
const asyncMarked: string = await marked(md, { async: true });
|
||||
const promiseMarked: Promise<string> = marked(md, { async: true });
|
||||
const notAsyncMarked: string = marked(md, { async: false });
|
||||
const defaultMarked: string = marked(md);
|
||||
expectType<void>(marked(md, (_: any, res: string | undefined) => { res; }));
|
||||
expectType<void>(marked(md, { async: true }, (_: any, res: string | undefined) => { res; }));
|
||||
expectType<void>(marked(md, { async: false }, (_: any, res: string | undefined) => { res; }));
|
||||
|
||||
const asyncMarkedParse: string = await marked.parse(md, { async: true });
|
||||
const promiseMarkedParse: Promise<string> = marked.parse(md, { async: true, headerIds: false });
|
||||
const notAsyncMarkedParse: string = marked.parse(md, { async: false });
|
||||
const defaultMarkedParse: string = marked.parse(md);
|
||||
expectType<void>(marked.parse(md, (_: any, res: string | undefined) => { res; }));
|
||||
expectType<void>(marked(md, { async: true }, (_: any, res: string | undefined) => { res; }));
|
||||
expectType<void>(marked(md, { async: false }, (_: any, res: string | undefined) => { res; }));
|
||||
})();
|
||||
|
||||
// Tests for List and ListItem
|
||||
// Dumped from markdown list parsed data
|
||||
|
||||
const listAndListItemText: Tokens.List = {
|
||||
type: 'list',
|
||||
raw: '1. Text ...',
|
||||
ordered: true,
|
||||
start: 1,
|
||||
loose: false,
|
||||
items: [
|
||||
{
|
||||
type: 'list_item',
|
||||
raw: '1. Text ...',
|
||||
task: false,
|
||||
loose: false,
|
||||
text: 'Text',
|
||||
tokens: [
|
||||
{
|
||||
type: 'text',
|
||||
raw: 'Point one',
|
||||
text: 'Point one',
|
||||
tokens: [
|
||||
{
|
||||
type: 'text',
|
||||
raw: 'Point one',
|
||||
text: 'Point one'
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
type: 'list',
|
||||
raw: '',
|
||||
ordered: false,
|
||||
start: '',
|
||||
loose: false,
|
||||
items: []
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
const lexer2 = new Lexer();
|
||||
const tokens4 = lexer2.lex('# test');
|
||||
const parser2 = new Parser();
|
||||
console.log(parser2.parse(tokens4));
|
||||
|
||||
const slugger2 = new Slugger();
|
||||
console.log(slugger2.slug('Test Slug'));
|
||||
|
||||
marked.use({ renderer: new Renderer() });
|
||||
marked.use({ renderer: new TextRenderer() });
|
||||
marked.use({ tokenizer: new Tokenizer() });
|
||||
marked.use({
|
||||
hooks: {
|
||||
preprocess(markdown) {
|
||||
return markdown;
|
||||
},
|
||||
postprocess(html) {
|
||||
return html;
|
||||
}
|
||||
}
|
||||
});
|
@ -1,7 +1,7 @@
|
||||
import { Lexer } from '../../src/Lexer.js';
|
||||
import { _Lexer } from '../../src/Lexer.js';
|
||||
|
||||
function expectTokens({ md, options, tokens = [], links = {} }) {
|
||||
const lexer = new Lexer(options);
|
||||
const lexer = new _Lexer(options);
|
||||
const actual = lexer.lex(md);
|
||||
const expected = tokens;
|
||||
expected.links = links;
|
||||
@ -10,7 +10,7 @@ function expectTokens({ md, options, tokens = [], links = {} }) {
|
||||
}
|
||||
|
||||
function expectInlineTokens({ md, options, tokens = jasmine.any(Array), links = {} }) {
|
||||
const lexer = new Lexer(options);
|
||||
const lexer = new _Lexer(options);
|
||||
lexer.tokens.links = links;
|
||||
const outTokens = [];
|
||||
lexer.inlineTokens(md, outTokens);
|
||||
|
@ -1,7 +1,7 @@
|
||||
import { Parser } from '../../src/Parser.js';
|
||||
import { _Parser } from '../../src/Parser.js';
|
||||
|
||||
async function expectHtml({ tokens, options, html, inline }) {
|
||||
const parser = new Parser(options);
|
||||
const parser = new _Parser(options);
|
||||
const actual = parser[inline ? 'parseInline' : 'parse'](tokens);
|
||||
await expectAsync(actual).toEqualHtml(html);
|
||||
}
|
||||
|
@ -1,27 +1,27 @@
|
||||
import { Slugger } from '../../src/Slugger.js';
|
||||
import { _Slugger } from '../../src/Slugger.js';
|
||||
|
||||
describe('Test slugger functionality', () => {
|
||||
it('should use lowercase slug', () => {
|
||||
const slugger = new Slugger();
|
||||
const slugger = new _Slugger();
|
||||
expect(slugger.slug('Test')).toBe('test');
|
||||
});
|
||||
|
||||
it('should be unique to avoid collisions 1280', () => {
|
||||
const slugger = new Slugger();
|
||||
const slugger = new _Slugger();
|
||||
expect(slugger.slug('test')).toBe('test');
|
||||
expect(slugger.slug('test')).toBe('test-1');
|
||||
expect(slugger.slug('test')).toBe('test-2');
|
||||
});
|
||||
|
||||
it('should be unique when slug ends with number', () => {
|
||||
const slugger = new Slugger();
|
||||
const slugger = new _Slugger();
|
||||
expect(slugger.slug('test 1')).toBe('test-1');
|
||||
expect(slugger.slug('test')).toBe('test');
|
||||
expect(slugger.slug('test')).toBe('test-2');
|
||||
});
|
||||
|
||||
it('should be unique when slug ends with hyphen number', () => {
|
||||
const slugger = new Slugger();
|
||||
const slugger = new _Slugger();
|
||||
expect(slugger.slug('foo')).toBe('foo');
|
||||
expect(slugger.slug('foo')).toBe('foo-1');
|
||||
expect(slugger.slug('foo 1')).toBe('foo-1-1');
|
||||
@ -30,39 +30,39 @@ describe('Test slugger functionality', () => {
|
||||
});
|
||||
|
||||
it('should allow non-latin chars', () => {
|
||||
const slugger = new Slugger();
|
||||
const slugger = new _Slugger();
|
||||
expect(slugger.slug('привет')).toBe('привет');
|
||||
});
|
||||
|
||||
it('should remove ampersands 857', () => {
|
||||
const slugger = new Slugger();
|
||||
const slugger = new _Slugger();
|
||||
expect(slugger.slug('This & That Section')).toBe('this--that-section');
|
||||
});
|
||||
|
||||
it('should remove periods', () => {
|
||||
const slugger = new Slugger();
|
||||
const slugger = new _Slugger();
|
||||
expect(slugger.slug('file.txt')).toBe('filetxt');
|
||||
});
|
||||
|
||||
it('should remove html tags', () => {
|
||||
const slugger = new Slugger();
|
||||
const slugger = new _Slugger();
|
||||
expect(slugger.slug('<em>html</em>')).toBe('html');
|
||||
});
|
||||
|
||||
it('should not increment seen when using dryrun option', () => {
|
||||
const slugger = new Slugger();
|
||||
const slugger = new _Slugger();
|
||||
expect(slugger.slug('<h1>This Section</h1>', { dryrun: true })).toBe('this-section');
|
||||
expect(slugger.slug('<h1>This Section</h1>')).toBe('this-section');
|
||||
});
|
||||
|
||||
it('should still return the next unique id when using dryrun', () => {
|
||||
const slugger = new Slugger();
|
||||
const slugger = new _Slugger();
|
||||
expect(slugger.slug('<h1>This Section</h1>')).toBe('this-section');
|
||||
expect(slugger.slug('<h1>This Section</h1>', { dryrun: true })).toBe('this-section-1');
|
||||
});
|
||||
|
||||
it('should be repeatable in a sequence', () => {
|
||||
const slugger = new Slugger();
|
||||
const slugger = new _Slugger();
|
||||
expect(slugger.slug('foo')).toBe('foo');
|
||||
expect(slugger.slug('foo')).toBe('foo-1');
|
||||
expect(slugger.slug('foo')).toBe('foo-2');
|
||||
|
@ -1,4 +1,4 @@
|
||||
import { marked, Renderer, Slugger, lexer, parseInline, use, getDefaults, walkTokens as _walkTokens, defaults, setOptions } from '../../src/marked.js';
|
||||
import { marked, Renderer, Slugger, lexer, parseInline, use, getDefaults, walkTokens, defaults, setOptions } from '../../src/marked.js';
|
||||
import { timeout } from './utils.js';
|
||||
|
||||
describe('Test heading ID functionality', () => {
|
||||
@ -30,10 +30,10 @@ describe('Test paragraph token type', () => {
|
||||
|
||||
describe('changeDefaults', () => {
|
||||
it('should change global defaults', async() => {
|
||||
const { defaults, changeDefaults } = await import('../../src/defaults.js');
|
||||
expect(defaults.test).toBeUndefined();
|
||||
const { _defaults, changeDefaults } = await import('../../src/defaults.js');
|
||||
expect(_defaults.test).toBeUndefined();
|
||||
changeDefaults({ test: true });
|
||||
expect((await import('../../src/defaults.js')).defaults.test).toBe(true);
|
||||
expect((await import('../../src/defaults.js'))._defaults.test).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
@ -952,7 +952,7 @@ br
|
||||
`;
|
||||
const tokens = lexer(markdown, { ...getDefaults(), breaks: true });
|
||||
const tokensSeen = [];
|
||||
_walkTokens(tokens, (token) => {
|
||||
walkTokens(tokens, (token) => {
|
||||
tokensSeen.push([token.type, (token.raw || '').replace(/\n/g, '')]);
|
||||
});
|
||||
|
||||
|
22
tsconfig-type-test.json
Normal file
22
tsconfig-type-test.json
Normal file
@ -0,0 +1,22 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "es2021",
|
||||
"module": "NodeNext",
|
||||
"isolatedModules": true,
|
||||
"strict": true,
|
||||
"verbatimModuleSyntax": true,
|
||||
"noEmit": true,
|
||||
"allowSyntheticDefaultImports": true,
|
||||
"moduleResolution": "NodeNext",
|
||||
"noImplicitAny": false,
|
||||
"baseUrl": ".",
|
||||
"paths": {
|
||||
"marked": [
|
||||
"lib/marked.d.ts"
|
||||
]
|
||||
}
|
||||
},
|
||||
"include": [
|
||||
"test/types/*.ts"
|
||||
]
|
||||
}
|
17
tsconfig.json
Normal file
17
tsconfig.json
Normal file
@ -0,0 +1,17 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "es2021",
|
||||
"module": "NodeNext",
|
||||
"isolatedModules": true,
|
||||
"strict": true,
|
||||
"verbatimModuleSyntax": true,
|
||||
"noEmit": true,
|
||||
"allowSyntheticDefaultImports": true,
|
||||
"moduleResolution": "NodeNext",
|
||||
"noImplicitAny": false,
|
||||
"allowImportingTsExtensions": true
|
||||
},
|
||||
"include": [
|
||||
"src/*.ts"
|
||||
]
|
||||
}
|
47
tsup.config.ts
Normal file
47
tsup.config.ts
Normal file
@ -0,0 +1,47 @@
|
||||
import { defineConfig } from 'tsup';
|
||||
import fs from 'fs';
|
||||
|
||||
const pkg = JSON.parse(String(fs.readFileSync('./package.json')));
|
||||
const version = process.env.SEMANTIC_RELEASE_NEXT_VERSION || pkg.version;
|
||||
|
||||
console.log('building version:', version);
|
||||
|
||||
const banner = `/**
|
||||
* marked v${version} - a markdown parser
|
||||
* Copyright (c) 2011-${new Date().getFullYear()}, Christopher Jeffrey. (MIT Licensed)
|
||||
* https://github.com/markedjs/marked
|
||||
*/
|
||||
|
||||
/**
|
||||
* DO NOT EDIT THIS FILE
|
||||
* The code in this file is generated from files in ./src/
|
||||
*/
|
||||
`;
|
||||
|
||||
export default defineConfig({
|
||||
entry: ['src/marked.ts'],
|
||||
splitting: false,
|
||||
sourcemap: true,
|
||||
clean: true,
|
||||
format: ['cjs', 'esm', 'iife'],
|
||||
globalName: 'marked',
|
||||
banner: {
|
||||
js: banner
|
||||
},
|
||||
outDir: 'lib',
|
||||
outExtension({ format }) {
|
||||
if (format === 'cjs') {
|
||||
return {
|
||||
js: '.cjs'
|
||||
};
|
||||
} else if (format === 'iife') {
|
||||
return {
|
||||
js: '.umd.js'
|
||||
};
|
||||
}
|
||||
return {
|
||||
js: `.${format}.js`
|
||||
};
|
||||
},
|
||||
dts: true
|
||||
});
|
Loading…
x
Reference in New Issue
Block a user