feat: add provideLexer and provideParser hooks (#3424)
This commit is contained in:
parent
0076503dd2
commit
447f5af7e4
1
.gitignore
vendored
1
.gitignore
vendored
@ -9,3 +9,4 @@ docs/LICENSE.md
|
||||
vuln.js
|
||||
man/marked.1
|
||||
marked.min.js
|
||||
test.js
|
||||
|
@ -261,6 +261,8 @@ Hooks are methods that hook into some part of marked. The following hooks are av
|
||||
| `preprocess(markdown: string): string` | Process markdown before sending it to marked. |
|
||||
| `postprocess(html: string): string` | Process html after marked has finished parsing. |
|
||||
| `processAllTokens(tokens: Token[]): Token[]` | Process all tokens before walk tokens. |
|
||||
| `provideLexer(): (src: string, options?: MarkedOptions) => Token[]` | Provide function to tokenize markdown. |
|
||||
| `provideParser(): (tokens: Token[], options?: MarkedOptions) => string` | Provide function to parse tokens. |
|
||||
|
||||
`marked.use()` can be called multiple times with different `hooks` functions. Each function will be called in order, starting with the function that was assigned *last*.
|
||||
|
||||
@ -325,6 +327,45 @@ console.log(marked.parse(`
|
||||
<img src="x">
|
||||
```
|
||||
|
||||
**Example:** Save reflinks for chunked rendering
|
||||
|
||||
```js
|
||||
import { marked, Lexer } from 'marked';
|
||||
|
||||
let refLinks = {};
|
||||
|
||||
// Override function
|
||||
function processAllTokens(tokens) {
|
||||
refLinks = tokens.links;
|
||||
return tokens;
|
||||
}
|
||||
|
||||
function provideLexer(src, options) {
|
||||
return (src, options) => {
|
||||
const lexer = new Lexer(options);
|
||||
lexer.tokens.links = refLinks;
|
||||
return this.block ? lexer.lex(src) : lexer.inlineTokens(src);
|
||||
};
|
||||
}
|
||||
|
||||
marked.use({ hooks: { processAllTokens, provideLexer } });
|
||||
|
||||
// Parse reflinks separately from markdown that uses them
|
||||
marked.parse(`
|
||||
[test]: http://example.com
|
||||
`);
|
||||
|
||||
console.log(marked.parse(`
|
||||
[test link][test]
|
||||
`));
|
||||
```
|
||||
|
||||
**Output:**
|
||||
|
||||
```html
|
||||
<p><a href="http://example.com">test link</a></p>
|
||||
```
|
||||
|
||||
***
|
||||
|
||||
<h2 id="extensions">Custom Extensions : <code>extensions</code></h2>
|
||||
|
17
src/Hooks.ts
17
src/Hooks.ts
@ -1,9 +1,12 @@
|
||||
import { _defaults } from './defaults.ts';
|
||||
import { _Lexer } from './Lexer.ts';
|
||||
import { _Parser } from './Parser.ts';
|
||||
import type { MarkedOptions } from './MarkedOptions.ts';
|
||||
import type { Token, TokensList } from './Tokens.ts';
|
||||
|
||||
export class _Hooks {
|
||||
options: MarkedOptions;
|
||||
block: boolean | undefined;
|
||||
|
||||
constructor(options?: MarkedOptions) {
|
||||
this.options = options || _defaults;
|
||||
@ -35,4 +38,18 @@ export class _Hooks {
|
||||
processAllTokens(tokens: Token[] | TokensList) {
|
||||
return tokens;
|
||||
}
|
||||
|
||||
/**
|
||||
* Provide function to tokenize markdown
|
||||
*/
|
||||
provideLexer() {
|
||||
return this.block ? _Lexer.lex : _Lexer.lexInline;
|
||||
}
|
||||
|
||||
/**
|
||||
* Provide function to parse tokens
|
||||
*/
|
||||
provideParser() {
|
||||
return this.block ? _Parser.parse : _Parser.parseInline;
|
||||
}
|
||||
}
|
||||
|
@ -18,8 +18,8 @@ export class Marked {
|
||||
defaults = _getDefaults();
|
||||
options = this.setOptions;
|
||||
|
||||
parse = this.parseMarkdown(_Lexer.lex, _Parser.parse);
|
||||
parseInline = this.parseMarkdown(_Lexer.lexInline, _Parser.parseInline);
|
||||
parse = this.parseMarkdown(true);
|
||||
parseInline = this.parseMarkdown(false);
|
||||
|
||||
Parser = _Parser;
|
||||
Renderer = _Renderer;
|
||||
@ -195,11 +195,11 @@ export class Marked {
|
||||
if (!(prop in hooks)) {
|
||||
throw new Error(`hook '${prop}' does not exist`);
|
||||
}
|
||||
if (prop === 'options') {
|
||||
// ignore options property
|
||||
if (['options', 'block'].includes(prop)) {
|
||||
// ignore options and block properties
|
||||
continue;
|
||||
}
|
||||
const hooksProp = prop as Exclude<keyof _Hooks, 'options'>;
|
||||
const hooksProp = prop as Exclude<keyof _Hooks, 'options' | 'block'>;
|
||||
const hooksFunc = pack.hooks[hooksProp] as UnknownFunction;
|
||||
const prevHook = hooks[hooksProp] as UnknownFunction;
|
||||
if (_Hooks.passThroughHooks.has(prop)) {
|
||||
@ -261,7 +261,7 @@ export class Marked {
|
||||
return _Parser.parse(tokens, options ?? this.defaults);
|
||||
}
|
||||
|
||||
private parseMarkdown(lexer: (src: string, options?: MarkedOptions) => TokensList | Token[], parser: (tokens: Token[], options?: MarkedOptions) => string) {
|
||||
private parseMarkdown(blockType: boolean) {
|
||||
type overloadedParse = {
|
||||
(src: string, options: MarkedOptions & { async: true }): Promise<string>;
|
||||
(src: string, options: MarkedOptions & { async: false }): string;
|
||||
@ -291,8 +291,12 @@ export class Marked {
|
||||
|
||||
if (opt.hooks) {
|
||||
opt.hooks.options = opt;
|
||||
opt.hooks.block = blockType;
|
||||
}
|
||||
|
||||
const lexer = opt.hooks ? opt.hooks.provideLexer() : (blockType ? _Lexer.lex : _Lexer.lexInline);
|
||||
const parser = opt.hooks ? opt.hooks.provideParser() : (blockType ? _Parser.parse : _Parser.parseInline);
|
||||
|
||||
if (opt.async) {
|
||||
return Promise.resolve(opt.hooks ? opt.hooks.preprocess(src) : src)
|
||||
.then(src => lexer(src, opt))
|
||||
@ -309,7 +313,7 @@ export class Marked {
|
||||
}
|
||||
let tokens = lexer(src, opt);
|
||||
if (opt.hooks) {
|
||||
tokens = opt.hooks.processAllTokens(tokens) as Token[] | TokensList;
|
||||
tokens = opt.hooks.processAllTokens(tokens);
|
||||
}
|
||||
if (opt.walkTokens) {
|
||||
this.walkTokens(tokens, opt.walkTokens);
|
||||
|
@ -34,7 +34,7 @@ export interface RendererExtension {
|
||||
|
||||
export type TokenizerAndRendererExtension = TokenizerExtension | RendererExtension | (TokenizerExtension & RendererExtension);
|
||||
|
||||
type HooksApi = Omit<_Hooks, 'constructor' | 'options'>;
|
||||
type HooksApi = Omit<_Hooks, 'constructor' | 'options' | 'block'>;
|
||||
type HooksObject = {
|
||||
[K in keyof HooksApi]?: (this: _Hooks, ...args: Parameters<HooksApi[K]>) => ReturnType<HooksApi[K]> | Promise<ReturnType<HooksApi[K]>>
|
||||
};
|
||||
@ -77,6 +77,8 @@ export interface MarkedExtension {
|
||||
* preprocess is called to process markdown before sending it to marked.
|
||||
* processAllTokens is called with the TokensList before walkTokens.
|
||||
* postprocess is called to process html after marked has finished parsing.
|
||||
* provideLexer is called to provide a function to tokenize markdown.
|
||||
* provideParser is called to provide a function to parse tokens.
|
||||
*/
|
||||
hooks?: HooksObject | undefined | null;
|
||||
|
||||
|
@ -346,6 +346,16 @@ marked.use({
|
||||
}
|
||||
}
|
||||
});
|
||||
marked.use({
|
||||
hooks: {
|
||||
provideLexer() {
|
||||
return this.block ? Lexer.lex : Lexer.lexInline;
|
||||
},
|
||||
provideParser() {
|
||||
return this.block ? Parser.parse : Parser.parseInline;
|
||||
},
|
||||
}
|
||||
});
|
||||
marked.use({
|
||||
async: true,
|
||||
hooks: {
|
||||
|
@ -190,4 +190,72 @@ describe('Hooks', () => {
|
||||
<h1>postprocess2 async</h1>
|
||||
<h1>postprocess1</h1>`);
|
||||
});
|
||||
|
||||
it('should provide lexer', () => {
|
||||
marked.use({
|
||||
hooks: {
|
||||
provideLexer() {
|
||||
return (src) => [createHeadingToken(src)];
|
||||
},
|
||||
},
|
||||
});
|
||||
const html = marked.parse('text');
|
||||
assert.strictEqual(html.trim(), '<h1>text</h1>');
|
||||
});
|
||||
|
||||
it('should provide lexer async', async() => {
|
||||
marked.use({
|
||||
async: true,
|
||||
hooks: {
|
||||
provideLexer() {
|
||||
return async(src) => {
|
||||
await timeout();
|
||||
return [createHeadingToken(src)];
|
||||
};
|
||||
},
|
||||
},
|
||||
});
|
||||
const html = await marked.parse('text');
|
||||
assert.strictEqual(html.trim(), '<h1>text</h1>');
|
||||
});
|
||||
|
||||
it('should provide parser return object', () => {
|
||||
marked.use({
|
||||
hooks: {
|
||||
provideParser() {
|
||||
return (tokens) => ({ text: 'test parser' });
|
||||
},
|
||||
},
|
||||
});
|
||||
const html = marked.parse('text');
|
||||
assert.strictEqual(html.text, 'test parser');
|
||||
});
|
||||
|
||||
it('should provide parser', () => {
|
||||
marked.use({
|
||||
hooks: {
|
||||
provideParser() {
|
||||
return (tokens) => 'test parser';
|
||||
},
|
||||
},
|
||||
});
|
||||
const html = marked.parse('text');
|
||||
assert.strictEqual(html.trim(), 'test parser');
|
||||
});
|
||||
|
||||
it('should provide parser async', async() => {
|
||||
marked.use({
|
||||
async: true,
|
||||
hooks: {
|
||||
provideParser() {
|
||||
return async(tokens) => {
|
||||
await timeout();
|
||||
return 'test parser';
|
||||
};
|
||||
},
|
||||
},
|
||||
});
|
||||
const html = await marked.parse('text');
|
||||
assert.strictEqual(html.trim(), 'test parser');
|
||||
});
|
||||
});
|
||||
|
Loading…
x
Reference in New Issue
Block a user