fix: Remove unused plus typescript tightening (#3527)

* chore: remove unused build file

* chore: remove unused package

* chore: remove unused function

* chore: remove unnecessary | undefineds

* core: replace unnecessary &&s with optional chaining

* chore: use .at(-x) instead of .length - x property access

gives stricter TS typing, is more concise

* chore: tighten TS types

* chore: sort tokens alphabetically

* fix: typeof plus !== null check

* chore: type test for .parse, .use

* fix: if check
This commit is contained in:
Spencer Whitehead 2024-11-17 23:53:28 -05:00 committed by GitHub
parent a46c0d8b7d
commit 1f579f7628
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
17 changed files with 236 additions and 245 deletions

View File

@ -1,23 +0,0 @@
{
"name": "marked",
"homepage": "https://github.com/markedjs/marked",
"authors": [
"Christopher Jeffrey <chjjeffrey@gmail.com>"
],
"description": "A markdown parser built for speed",
"keywords": [
"markdown",
"markup",
"html"
],
"main": "lib/marked.cjs",
"license": "MIT",
"ignore": [
"**/.*",
"node_modules",
"bower_components",
"app/bower_components",
"test",
"tests"
]
}

View File

@ -185,7 +185,7 @@ function setOptions(opts) {
$optionsElem.value = JSON.stringify( $optionsElem.value = JSON.stringify(
opts, opts,
(key, value) => { (key, value) => {
if (value && typeof value === 'object' && Object.getPrototypeOf(value) !== Object.prototype) { if (value !== null && typeof value === 'object' && Object.getPrototypeOf(value) !== Object.prototype) {
return undefined; return undefined;
} }
return value; return value;

7
package-lock.json generated
View File

@ -35,7 +35,6 @@
"rollup": "^4.25.0", "rollup": "^4.25.0",
"semantic-release": "^24.2.0", "semantic-release": "^24.2.0",
"titleize": "^4.0.0", "titleize": "^4.0.0",
"ts-expect": "^1.3.0",
"tslib": "^2.8.1", "tslib": "^2.8.1",
"typescript": "5.6.3" "typescript": "5.6.3"
}, },
@ -8698,12 +8697,6 @@
"typescript": ">=4.2.0" "typescript": ">=4.2.0"
} }
}, },
"node_modules/ts-expect": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/ts-expect/-/ts-expect-1.3.0.tgz",
"integrity": "sha512-e4g0EJtAjk64xgnFPD6kTBUtpnMVzDrMb12N1YZV0VvSlhnVT3SGxiYTLdGy8Q5cYHOIC/FAHmZ10eGrAguicQ==",
"dev": true
},
"node_modules/tslib": { "node_modules/tslib": {
"version": "2.8.1", "version": "2.8.1",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz",

View File

@ -76,7 +76,6 @@
"rollup": "^4.25.0", "rollup": "^4.25.0",
"semantic-release": "^24.2.0", "semantic-release": "^24.2.0",
"titleize": "^4.0.0", "titleize": "^4.0.0",
"ts-expect": "^1.3.0",
"tslib": "^2.8.1", "tslib": "^2.8.1",
"typescript": "5.6.3" "typescript": "5.6.3"
}, },

View File

@ -6,7 +6,7 @@ import type { Token, TokensList } from './Tokens.ts';
export class _Hooks { export class _Hooks {
options: MarkedOptions; options: MarkedOptions;
block: boolean | undefined; block?: boolean;
constructor(options?: MarkedOptions) { constructor(options?: MarkedOptions) {
this.options = options || _defaults; this.options = options || _defaults;

View File

@ -265,11 +265,11 @@ export class Marked {
type overloadedParse = { type overloadedParse = {
(src: string, options: MarkedOptions & { async: true }): Promise<string>; (src: string, options: MarkedOptions & { async: true }): Promise<string>;
(src: string, options: MarkedOptions & { async: false }): string; (src: string, options: MarkedOptions & { async: false }): string;
(src: string, options?: MarkedOptions | undefined | null): string | Promise<string>; (src: string, options?: MarkedOptions | null): string | Promise<string>;
}; };
// eslint-disable-next-line @typescript-eslint/no-explicit-any // eslint-disable-next-line @typescript-eslint/no-explicit-any
const parse: overloadedParse = (src: string, options?: MarkedOptions | undefined | null): any => { const parse: overloadedParse = (src: string, options?: MarkedOptions | null): any => {
const origOpt = { ...options }; const origOpt = { ...options };
const opt = { ...this.defaults, ...origOpt }; const opt = { ...this.defaults, ...origOpt };

View File

@ -2,7 +2,7 @@ import { _Tokenizer } from './Tokenizer.ts';
import { _defaults } from './defaults.ts'; import { _defaults } from './defaults.ts';
import { other, block, inline } from './rules.ts'; import { other, block, inline } from './rules.ts';
import type { Token, TokensList, Tokens } from './Tokens.ts'; import type { Token, TokensList, Tokens } from './Tokens.ts';
import type { MarkedOptions, TokenizerExtension } from './MarkedOptions.ts'; import type { MarkedOptions } from './MarkedOptions.ts';
/** /**
* Block Lexer * Block Lexer
@ -85,8 +85,7 @@ export class _Lexer {
* Preprocessing * Preprocessing
*/ */
lex(src: string) { lex(src: string) {
src = src src = src.replace(other.carriageReturn, '\n');
.replace(other.carriageReturn, '\n');
this.blockTokens(src, this.tokens); this.blockTokens(src, this.tokens);
@ -109,14 +108,10 @@ export class _Lexer {
src = src.replace(other.tabCharGlobal, ' ').replace(other.spaceLine, ''); src = src.replace(other.tabCharGlobal, ' ').replace(other.spaceLine, '');
} }
let token: Tokens.Generic | undefined;
let lastToken;
let cutSrc;
while (src) { while (src) {
if (this.options.extensions let token: Tokens.Generic | undefined;
&& this.options.extensions.block
&& this.options.extensions.block.some((extTokenizer: TokenizerExtension['tokenizer']) => { if (this.options.extensions?.block?.some((extTokenizer) => {
if (token = extTokenizer.call({ lexer: this }, src, tokens)) { if (token = extTokenizer.call({ lexer: this }, src, tokens)) {
src = src.substring(token.raw.length); src = src.substring(token.raw.length);
tokens.push(token); tokens.push(token);
@ -130,10 +125,11 @@ export class _Lexer {
// newline // newline
if (token = this.tokenizer.space(src)) { if (token = this.tokenizer.space(src)) {
src = src.substring(token.raw.length); src = src.substring(token.raw.length);
if (token.raw.length === 1 && tokens.length > 0) { const lastToken = tokens.at(-1);
if (token.raw.length === 1 && lastToken !== undefined) {
// if there's a single \n as a spacer, it's terminating the last line, // if there's a single \n as a spacer, it's terminating the last line,
// so move it there so that we don't get unnecessary paragraph tags // so move it there so that we don't get unnecessary paragraph tags
tokens[tokens.length - 1].raw += '\n'; lastToken.raw += '\n';
} else { } else {
tokens.push(token); tokens.push(token);
} }
@ -143,12 +139,12 @@ export class _Lexer {
// code // code
if (token = this.tokenizer.code(src)) { if (token = this.tokenizer.code(src)) {
src = src.substring(token.raw.length); src = src.substring(token.raw.length);
lastToken = tokens[tokens.length - 1]; const lastToken = tokens.at(-1);
// An indented code block cannot interrupt a paragraph. // An indented code block cannot interrupt a paragraph.
if (lastToken && (lastToken.type === 'paragraph' || lastToken.type === 'text')) { if (lastToken?.type === 'paragraph' || lastToken?.type === 'text') {
lastToken.raw += '\n' + token.raw; lastToken.raw += '\n' + token.raw;
lastToken.text += '\n' + token.text; lastToken.text += '\n' + token.text;
this.inlineQueue[this.inlineQueue.length - 1].src = lastToken.text; this.inlineQueue.at(-1)!.src = lastToken.text;
} else { } else {
tokens.push(token); tokens.push(token);
} }
@ -200,11 +196,11 @@ export class _Lexer {
// def // def
if (token = this.tokenizer.def(src)) { if (token = this.tokenizer.def(src)) {
src = src.substring(token.raw.length); src = src.substring(token.raw.length);
lastToken = tokens[tokens.length - 1]; const lastToken = tokens.at(-1);
if (lastToken && (lastToken.type === 'paragraph' || lastToken.type === 'text')) { if (lastToken?.type === 'paragraph' || lastToken?.type === 'text') {
lastToken.raw += '\n' + token.raw; lastToken.raw += '\n' + token.raw;
lastToken.text += '\n' + token.raw; lastToken.text += '\n' + token.raw;
this.inlineQueue[this.inlineQueue.length - 1].src = lastToken.text; this.inlineQueue.at(-1)!.src = lastToken.text;
} else if (!this.tokens.links[token.tag]) { } else if (!this.tokens.links[token.tag]) {
this.tokens.links[token.tag] = { this.tokens.links[token.tag] = {
href: token.href, href: token.href,
@ -230,30 +226,32 @@ export class _Lexer {
// top-level paragraph // top-level paragraph
// prevent paragraph consuming extensions by clipping 'src' to extension start // prevent paragraph consuming extensions by clipping 'src' to extension start
cutSrc = src; let cutSrc = src;
if (this.options.extensions && this.options.extensions.startBlock) { if (this.options.extensions?.startBlock) {
let startIndex = Infinity; let startIndex = Infinity;
const tempSrc = src.slice(1); const tempSrc = src.slice(1);
let tempStart; let tempStart;
this.options.extensions.startBlock.forEach((getStartIndex) => { this.options.extensions.startBlock.forEach((getStartIndex) => {
tempStart = getStartIndex.call({ lexer: this }, tempSrc); tempStart = getStartIndex.call({ lexer: this }, tempSrc);
if (typeof tempStart === 'number' && tempStart >= 0) { startIndex = Math.min(startIndex, tempStart); } if (typeof tempStart === 'number' && tempStart >= 0) {
startIndex = Math.min(startIndex, tempStart);
}
}); });
if (startIndex < Infinity && startIndex >= 0) { if (startIndex < Infinity && startIndex >= 0) {
cutSrc = src.substring(0, startIndex + 1); cutSrc = src.substring(0, startIndex + 1);
} }
} }
if (this.state.top && (token = this.tokenizer.paragraph(cutSrc))) { if (this.state.top && (token = this.tokenizer.paragraph(cutSrc))) {
lastToken = tokens[tokens.length - 1]; const lastToken = tokens.at(-1);
if (lastParagraphClipped && lastToken?.type === 'paragraph') { if (lastParagraphClipped && lastToken?.type === 'paragraph') {
lastToken.raw += '\n' + token.raw; lastToken.raw += '\n' + token.raw;
lastToken.text += '\n' + token.text; lastToken.text += '\n' + token.text;
this.inlineQueue.pop(); this.inlineQueue.pop();
this.inlineQueue[this.inlineQueue.length - 1].src = lastToken.text; this.inlineQueue.at(-1)!.src = lastToken.text;
} else { } else {
tokens.push(token); tokens.push(token);
} }
lastParagraphClipped = (cutSrc.length !== src.length); lastParagraphClipped = cutSrc.length !== src.length;
src = src.substring(token.raw.length); src = src.substring(token.raw.length);
continue; continue;
} }
@ -261,12 +259,12 @@ export class _Lexer {
// text // text
if (token = this.tokenizer.text(src)) { if (token = this.tokenizer.text(src)) {
src = src.substring(token.raw.length); src = src.substring(token.raw.length);
lastToken = tokens[tokens.length - 1]; const lastToken = tokens.at(-1);
if (lastToken && lastToken.type === 'text') { if (lastToken?.type === 'text') {
lastToken.raw += '\n' + token.raw; lastToken.raw += '\n' + token.raw;
lastToken.text += '\n' + token.text; lastToken.text += '\n' + token.text;
this.inlineQueue.pop(); this.inlineQueue.pop();
this.inlineQueue[this.inlineQueue.length - 1].src = lastToken.text; this.inlineQueue.at(-1)!.src = lastToken.text;
} else { } else {
tokens.push(token); tokens.push(token);
} }
@ -297,12 +295,9 @@ export class _Lexer {
* Lexing/Compiling * Lexing/Compiling
*/ */
inlineTokens(src: string, tokens: Token[] = []): Token[] { inlineTokens(src: string, tokens: Token[] = []): Token[] {
let token, lastToken, cutSrc;
// String with links masked to avoid interference with em and strong // String with links masked to avoid interference with em and strong
let maskedSrc = src; let maskedSrc = src;
let match; let match: RegExpExecArray | null = null;
let keepPrevChar, prevChar;
// Mask out reflinks // Mask out reflinks
if (this.tokens.links) { if (this.tokens.links) {
@ -310,7 +305,9 @@ export class _Lexer {
if (links.length > 0) { if (links.length > 0) {
while ((match = this.tokenizer.rules.inline.reflinkSearch.exec(maskedSrc)) != null) { while ((match = this.tokenizer.rules.inline.reflinkSearch.exec(maskedSrc)) != null) {
if (links.includes(match[0].slice(match[0].lastIndexOf('[') + 1, -1))) { if (links.includes(match[0].slice(match[0].lastIndexOf('[') + 1, -1))) {
maskedSrc = maskedSrc.slice(0, match.index) + '[' + 'a'.repeat(match[0].length - 2) + ']' + maskedSrc.slice(this.tokenizer.rules.inline.reflinkSearch.lastIndex); maskedSrc = maskedSrc.slice(0, match.index)
+ '[' + 'a'.repeat(match[0].length - 2) + ']'
+ maskedSrc.slice(this.tokenizer.rules.inline.reflinkSearch.lastIndex);
} }
} }
} }
@ -325,16 +322,18 @@ export class _Lexer {
maskedSrc = maskedSrc.slice(0, match.index) + '++' + maskedSrc.slice(this.tokenizer.rules.inline.anyPunctuation.lastIndex); maskedSrc = maskedSrc.slice(0, match.index) + '++' + maskedSrc.slice(this.tokenizer.rules.inline.anyPunctuation.lastIndex);
} }
let keepPrevChar = false;
let prevChar = '';
while (src) { while (src) {
if (!keepPrevChar) { if (!keepPrevChar) {
prevChar = ''; prevChar = '';
} }
keepPrevChar = false; keepPrevChar = false;
let token: Tokens.Generic | undefined;
// extensions // extensions
if (this.options.extensions if (this.options.extensions?.inline?.some((extTokenizer) => {
&& this.options.extensions.inline
&& this.options.extensions.inline.some((extTokenizer) => {
if (token = extTokenizer.call({ lexer: this }, src, tokens)) { if (token = extTokenizer.call({ lexer: this }, src, tokens)) {
src = src.substring(token.raw.length); src = src.substring(token.raw.length);
tokens.push(token); tokens.push(token);
@ -355,7 +354,6 @@ export class _Lexer {
// tag // tag
if (token = this.tokenizer.tag(src)) { if (token = this.tokenizer.tag(src)) {
src = src.substring(token.raw.length); src = src.substring(token.raw.length);
lastToken = tokens[tokens.length - 1];
tokens.push(token); tokens.push(token);
continue; continue;
} }
@ -370,8 +368,8 @@ export class _Lexer {
// reflink, nolink // reflink, nolink
if (token = this.tokenizer.reflink(src, this.tokens.links)) { if (token = this.tokenizer.reflink(src, this.tokens.links)) {
src = src.substring(token.raw.length); src = src.substring(token.raw.length);
lastToken = tokens[tokens.length - 1]; const lastToken = tokens.at(-1);
if (lastToken && token.type === 'text' && lastToken.type === 'text') { if (token.type === 'text' && lastToken?.type === 'text') {
lastToken.raw += token.raw; lastToken.raw += token.raw;
lastToken.text += token.text; lastToken.text += token.text;
} else { } else {
@ -424,14 +422,16 @@ export class _Lexer {
// text // text
// prevent inlineText consuming extensions by clipping 'src' to extension start // prevent inlineText consuming extensions by clipping 'src' to extension start
cutSrc = src; let cutSrc = src;
if (this.options.extensions && this.options.extensions.startInline) { if (this.options.extensions?.startInline) {
let startIndex = Infinity; let startIndex = Infinity;
const tempSrc = src.slice(1); const tempSrc = src.slice(1);
let tempStart; let tempStart;
this.options.extensions.startInline.forEach((getStartIndex) => { this.options.extensions.startInline.forEach((getStartIndex) => {
tempStart = getStartIndex.call({ lexer: this }, tempSrc); tempStart = getStartIndex.call({ lexer: this }, tempSrc);
if (typeof tempStart === 'number' && tempStart >= 0) { startIndex = Math.min(startIndex, tempStart); } if (typeof tempStart === 'number' && tempStart >= 0) {
startIndex = Math.min(startIndex, tempStart);
}
}); });
if (startIndex < Infinity && startIndex >= 0) { if (startIndex < Infinity && startIndex >= 0) {
cutSrc = src.substring(0, startIndex + 1); cutSrc = src.substring(0, startIndex + 1);
@ -443,8 +443,8 @@ export class _Lexer {
prevChar = token.raw.slice(-1); prevChar = token.raw.slice(-1);
} }
keepPrevChar = true; keepPrevChar = true;
lastToken = tokens[tokens.length - 1]; const lastToken = tokens.at(-1);
if (lastToken && lastToken.type === 'text') { if (lastToken?.type === 'text') {
lastToken.raw += token.raw; lastToken.raw += token.raw;
lastToken.text += token.text; lastToken.text += token.text;
} else { } else {

View File

@ -16,9 +16,9 @@ export type TokenizerStartFunction = (this: TokenizerThis, src: string) => numbe
export interface TokenizerExtension { export interface TokenizerExtension {
name: string; name: string;
level: 'block' | 'inline'; level: 'block' | 'inline';
start?: TokenizerStartFunction | undefined; start?: TokenizerStartFunction;
tokenizer: TokenizerExtensionFunction; tokenizer: TokenizerExtensionFunction;
childTokens?: string[] | undefined; childTokens?: string[];
} }
export interface RendererThis { export interface RendererThis {
@ -58,19 +58,19 @@ export interface MarkedExtension {
/** /**
* Enable GFM line breaks. This option requires the gfm option to be true. * Enable GFM line breaks. This option requires the gfm option to be true.
*/ */
breaks?: boolean | undefined; breaks?: boolean;
/** /**
* Add tokenizers and renderers to marked * Add tokenizers and renderers to marked
*/ */
extensions?: extensions?:
| TokenizerAndRendererExtension[] | TokenizerAndRendererExtension[]
| undefined | null; | null;
/** /**
* Enable GitHub flavored markdown. * Enable GitHub flavored markdown.
*/ */
gfm?: boolean | undefined; gfm?: boolean;
/** /**
* Hooks are methods that hook into some part of marked. * Hooks are methods that hook into some part of marked.
@ -80,29 +80,29 @@ export interface MarkedExtension {
* provideLexer is called to provide a function to tokenize markdown. * provideLexer is called to provide a function to tokenize markdown.
* provideParser is called to provide a function to parse tokens. * provideParser is called to provide a function to parse tokens.
*/ */
hooks?: HooksObject | undefined | null; hooks?: HooksObject | null;
/** /**
* Conform to obscure parts of markdown.pl as much as possible. Don't fix any of the original markdown bugs or poor behavior. * Conform to obscure parts of markdown.pl as much as possible. Don't fix any of the original markdown bugs or poor behavior.
*/ */
pedantic?: boolean | undefined; pedantic?: boolean;
/** /**
* Type: object Default: new Renderer() * Type: object Default: new Renderer()
* *
* An object containing functions to render tokens to HTML. * An object containing functions to render tokens to HTML.
*/ */
renderer?: RendererObject | undefined | null; renderer?: RendererObject | null;
/** /**
* Shows an HTML error message when rendering fails. * Shows an HTML error message when rendering fails.
*/ */
silent?: boolean | undefined; silent?: boolean;
/** /**
* The tokenizer defines how to turn markdown text into tokens. * The tokenizer defines how to turn markdown text into tokens.
*/ */
tokenizer?: TokenizerObject | undefined | null; tokenizer?: TokenizerObject | null;
/** /**
* The walkTokens function gets called with every token. * The walkTokens function gets called with every token.
@ -110,26 +110,26 @@ export interface MarkedExtension {
* Each token is passed by reference so updates are persisted when passed to the parser. * Each token is passed by reference so updates are persisted when passed to the parser.
* The return value of the function is ignored. * The return value of the function is ignored.
*/ */
walkTokens?: ((token: Token) => void | Promise<void>) | undefined | null; walkTokens?: ((token: Token) => void | Promise<void>) | null;
} }
export interface MarkedOptions extends Omit<MarkedExtension, 'hooks' | 'renderer' | 'tokenizer' | 'extensions' | 'walkTokens'> { export interface MarkedOptions extends Omit<MarkedExtension, 'hooks' | 'renderer' | 'tokenizer' | 'extensions' | 'walkTokens'> {
/** /**
* Hooks are methods that hook into some part of marked. * Hooks are methods that hook into some part of marked.
*/ */
hooks?: _Hooks | undefined | null; hooks?: _Hooks | null;
/** /**
* Type: object Default: new Renderer() * Type: object Default: new Renderer()
* *
* An object containing functions to render tokens to HTML. * An object containing functions to render tokens to HTML.
*/ */
renderer?: _Renderer | undefined | null; renderer?: _Renderer | null;
/** /**
* The tokenizer defines how to turn markdown text into tokens. * The tokenizer defines how to turn markdown text into tokens.
*/ */
tokenizer?: _Tokenizer | undefined | null; tokenizer?: _Tokenizer | null;
/** /**
* Custom extensions * Custom extensions

View File

@ -46,7 +46,7 @@ export class _Parser {
const anyToken = tokens[i]; const anyToken = tokens[i];
// Run any renderer extensions // Run any renderer extensions
if (this.options.extensions && this.options.extensions.renderers && this.options.extensions.renderers[anyToken.type]) { if (this.options.extensions?.renderers?.[anyToken.type]) {
const genericToken = anyToken as Tokens.Generic; const genericToken = anyToken as Tokens.Generic;
const ret = this.options.extensions.renderers[genericToken.type].call({ parser: this }, genericToken); const ret = this.options.extensions.renderers[genericToken.type].call({ parser: this }, genericToken);
if (ret !== false || !['space', 'hr', 'heading', 'code', 'table', 'blockquote', 'list', 'html', 'paragraph', 'text'].includes(genericToken.type)) { if (ret !== false || !['space', 'hr', 'heading', 'code', 'table', 'blockquote', 'list', 'html', 'paragraph', 'text'].includes(genericToken.type)) {
@ -132,15 +132,14 @@ export class _Parser {
/** /**
* Parse Inline Tokens * Parse Inline Tokens
*/ */
parseInline(tokens: Token[], renderer?: _Renderer | _TextRenderer): string { parseInline(tokens: Token[], renderer: _Renderer | _TextRenderer = this.renderer): string {
renderer = renderer || this.renderer;
let out = ''; let out = '';
for (let i = 0; i < tokens.length; i++) { for (let i = 0; i < tokens.length; i++) {
const anyToken = tokens[i]; const anyToken = tokens[i];
// Run any renderer extensions // Run any renderer extensions
if (this.options.extensions && this.options.extensions.renderers && this.options.extensions.renderers[anyToken.type]) { if (this.options.extensions?.renderers?.[anyToken.type]) {
const ret = this.options.extensions.renderers[anyToken.type].call({ parser: this }, anyToken); const ret = this.options.extensions.renderers[anyToken.type].call({ parser: this }, anyToken);
if (ret !== false || !['escape', 'html', 'link', 'image', 'strong', 'em', 'codespan', 'br', 'del', 'text'].includes(anyToken.type)) { if (ret !== false || !['escape', 'html', 'link', 'image', 'strong', 'em', 'codespan', 'br', 'del', 'text'].includes(anyToken.type)) {
out += ret || ''; out += ret || '';

View File

@ -77,7 +77,7 @@ export class _Renderer {
if (item.task) { if (item.task) {
const checkbox = this.checkbox({ checked: !!item.checked }); const checkbox = this.checkbox({ checked: !!item.checked });
if (item.loose) { if (item.loose) {
if (item.tokens.length > 0 && item.tokens[0].type === 'paragraph') { if (item.tokens[0]?.type === 'paragraph') {
item.tokens[0].text = checkbox + ' ' + item.tokens[0].text; item.tokens[0].text = checkbox + ' ' + item.tokens[0].text;
if (item.tokens[0].tokens && item.tokens[0].tokens.length > 0 && item.tokens[0].tokens[0].type === 'text') { if (item.tokens[0].tokens && item.tokens[0].tokens.length > 0 && item.tokens[0].tokens[0].type === 'text') {
item.tokens[0].tokens[0].text = checkbox + ' ' + escape(item.tokens[0].tokens[0].text); item.tokens[0].tokens[0].text = checkbox + ' ' + escape(item.tokens[0].tokens[0].text);

View File

@ -198,7 +198,7 @@ export class _Tokenizer {
break; break;
} }
const lastToken = tokens[tokens.length - 1]; const lastToken = tokens.at(-1);
if (lastToken?.type === 'code') { if (lastToken?.type === 'code') {
// blockquote continuation cannot be preceded by a code block // blockquote continuation cannot be preceded by a code block
@ -222,7 +222,7 @@ export class _Tokenizer {
raw = raw.substring(0, raw.length - lastToken.raw.length) + newToken.raw; raw = raw.substring(0, raw.length - lastToken.raw.length) + newToken.raw;
text = text.substring(0, text.length - oldToken.raw.length) + newToken.raw; text = text.substring(0, text.length - oldToken.raw.length) + newToken.raw;
lines = newText.substring(tokens[tokens.length - 1].raw.length).split('\n'); lines = newText.substring(tokens.at(-1)!.raw.length).split('\n');
continue; continue;
} }
} }
@ -414,8 +414,11 @@ export class _Tokenizer {
} }
// Do not consume newlines at end of final item. Alternatively, make itemRegex *start* with any newlines to simplify/speed up endsWithBlankLine logic // Do not consume newlines at end of final item. Alternatively, make itemRegex *start* with any newlines to simplify/speed up endsWithBlankLine logic
list.items[list.items.length - 1].raw = list.items[list.items.length - 1].raw.trimEnd(); const lastItem = list.items.at(-1);
list.items[list.items.length - 1].text = list.items[list.items.length - 1].text.trimEnd(); if (lastItem) {
lastItem.raw = lastItem.raw.trimEnd();
lastItem.text = lastItem.text.trimEnd();
}
list.raw = list.raw.trimEnd(); list.raw = list.raw.trimEnd();
// Item child tokens handled here at end because we needed to have the final item to trim it first // Item child tokens handled here at end because we needed to have the final item to trim it first
@ -486,7 +489,7 @@ export class _Tokenizer {
const headers = splitCells(cap[1]); const headers = splitCells(cap[1]);
const aligns = cap[2].replace(this.rules.other.tableAlignChars, '').split('|'); const aligns = cap[2].replace(this.rules.other.tableAlignChars, '').split('|');
const rows = cap[3] && cap[3].trim() ? cap[3].replace(this.rules.other.tableRowBlankLine, '').split('\n') : []; const rows = cap[3]?.trim() ? cap[3].replace(this.rules.other.tableRowBlankLine, '').split('\n') : [];
const item: Tokens.Table = { const item: Tokens.Table = {
type: 'table', type: 'table',

View File

@ -1,47 +1,101 @@
/* eslint-disable no-use-before-define */ /* eslint-disable no-use-before-define */
export type MarkedToken = ( export type MarkedToken = (
Tokens.Space Tokens.Blockquote
| Tokens.Br
| Tokens.Code | Tokens.Code
| Tokens.Codespan
| Tokens.Def
| Tokens.Del
| Tokens.Em
| Tokens.Escape
| Tokens.Heading | Tokens.Heading
| Tokens.Table
| Tokens.Hr | Tokens.Hr
| Tokens.Blockquote | Tokens.HTML
| Tokens.Image
| Tokens.Link
| Tokens.List | Tokens.List
| Tokens.ListItem | Tokens.ListItem
| Tokens.Paragraph | Tokens.Paragraph
| Tokens.HTML | Tokens.Space
| Tokens.Text
| Tokens.Def
| Tokens.Escape
| Tokens.Tag
| Tokens.Image
| Tokens.Link
| Tokens.Strong | Tokens.Strong
| Tokens.Em | Tokens.Table
| Tokens.Codespan | Tokens.Tag
| Tokens.Br | Tokens.Text
| Tokens.Del); );
export type Token = ( export type Token = (
MarkedToken MarkedToken
| Tokens.Generic); | Tokens.Generic);
export namespace Tokens { export namespace Tokens {
export interface Space { export interface Blockquote {
type: 'space'; type: 'blockquote';
raw: string; raw: string;
text: string;
tokens: Token[];
}
export interface Br {
type: 'br';
raw: string;
}
export interface Checkbox {
checked: boolean;
} }
export interface Code { export interface Code {
type: 'code'; type: 'code';
raw: string; raw: string;
codeBlockStyle?: 'indented' | undefined; codeBlockStyle?: 'indented';
lang?: string | undefined; lang?: string;
text: string; text: string;
escaped?: boolean; escaped?: boolean;
} }
export interface Codespan {
type: 'codespan';
raw: string;
text: string;
}
export interface Def {
type: 'def';
raw: string;
tag: string;
href: string;
title: string;
}
export interface Del {
type: 'del';
raw: string;
text: string;
tokens: Token[];
}
export interface Em {
type: 'em';
raw: string;
text: string;
tokens: Token[];
}
export interface Escape {
type: 'escape';
raw: string;
text: string;
}
export interface Generic {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
[index: string]: any;
type: string;
raw: string;
tokens?: Token[];
}
export interface Heading { export interface Heading {
type: 'heading'; type: 'heading';
raw: string; raw: string;
@ -50,33 +104,32 @@ export namespace Tokens {
tokens: Token[]; tokens: Token[];
} }
export interface Table {
type: 'table';
raw: string;
align: Array<'center' | 'left' | 'right' | null>;
header: TableCell[];
rows: TableCell[][];
}
export interface TableRow {
text: string;
}
export interface TableCell {
text: string;
tokens: Token[];
header: boolean;
align: 'center' | 'left' | 'right' | null;
}
export interface Hr { export interface Hr {
type: 'hr'; type: 'hr';
raw: string; raw: string;
} }
export interface Blockquote { export interface HTML {
type: 'blockquote'; type: 'html';
raw: string; raw: string;
pre: boolean;
text: string;
block: boolean;
}
export interface Image {
type: 'image';
raw: string;
href: string;
title: string | null;
text: string;
}
export interface Link {
type: 'link';
raw: string;
href: string;
title?: string | null;
text: string; text: string;
tokens: Token[]; tokens: Token[];
} }
@ -94,51 +147,48 @@ export namespace Tokens {
type: 'list_item'; type: 'list_item';
raw: string; raw: string;
task: boolean; task: boolean;
checked?: boolean | undefined; checked?: boolean;
loose: boolean; loose: boolean;
text: string; text: string;
tokens: Token[]; tokens: Token[];
} }
export interface Checkbox {
checked: boolean;
}
export interface Paragraph { export interface Paragraph {
type: 'paragraph'; type: 'paragraph';
raw: string; raw: string;
pre?: boolean | undefined; pre?: boolean;
text: string; text: string;
tokens: Token[]; tokens: Token[];
} }
export interface HTML { export interface Space {
type: 'html'; type: 'space';
raw: string; raw: string;
pre: boolean;
text: string;
block: boolean;
} }
export interface Text { export interface Strong {
type: 'text'; type: 'strong';
raw: string; raw: string;
text: string; text: string;
tokens?: Token[]; tokens: Token[];
escaped?: boolean;
} }
export interface Def { export interface Table {
type: 'def'; type: 'table';
raw: string; raw: string;
tag: string; align: Array<'center' | 'left' | 'right' | null>;
href: string; header: TableCell[];
title: string; rows: TableCell[][];
} }
export interface Escape { export interface TableCell {
type: 'escape'; text: string;
raw: string; tokens: Token[];
header: boolean;
align: 'center' | 'left' | 'right' | null;
}
export interface TableRow {
text: string; text: string;
} }
@ -151,62 +201,12 @@ export namespace Tokens {
block: boolean; block: boolean;
} }
export interface Link { export interface Text {
type: 'link'; type: 'text';
raw: string;
href: string;
title?: string | null;
text: string;
tokens: Token[];
}
export interface Image {
type: 'image';
raw: string;
href: string;
title: string | null;
text: string;
}
export interface Strong {
type: 'strong';
raw: string; raw: string;
text: string; text: string;
tokens: Token[]; tokens?: Token[];
} escaped?: boolean;
export interface Em {
type: 'em';
raw: string;
text: string;
tokens: Token[];
}
export interface Codespan {
type: 'codespan';
raw: string;
text: string;
}
export interface Br {
type: 'br';
raw: string;
}
export interface Del {
type: 'del';
raw: string;
text: string;
tokens: Token[];
}
export interface Generic {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
[index: string]: any;
type: string;
raw: string;
tokens?: Token[] | undefined;
} }
} }

View File

@ -72,7 +72,7 @@ export function splitCells(tableRow: string, count?: number) {
if (!cells[0].trim()) { if (!cells[0].trim()) {
cells.shift(); cells.shift();
} }
if (cells.length > 0 && !cells[cells.length - 1].trim()) { if (cells.length > 0 && !cells.at(-1)?.trim()) {
cells.pop(); cells.pop();
} }

View File

@ -34,8 +34,8 @@ export function marked(src: string, options: MarkedOptions & { async: true }): P
*/ */
export function marked(src: string, options: MarkedOptions & { async: false }): string; export function marked(src: string, options: MarkedOptions & { async: false }): string;
export function marked(src: string, options: MarkedOptions & { async: true }): Promise<string>; export function marked(src: string, options: MarkedOptions & { async: true }): Promise<string>;
export function marked(src: string, options?: MarkedOptions | undefined | null): string | Promise<string>; export function marked(src: string, options?: MarkedOptions | null): string | Promise<string>;
export function marked(src: string, opt?: MarkedOptions | undefined | null): string | Promise<string> { export function marked(src: string, opt?: MarkedOptions | null): string | Promise<string> {
return markedInstance.parse(src, opt); return markedInstance.parse(src, opt);
} }

4
test/rules.js vendored
View File

@ -34,7 +34,7 @@ function propsToString(obj) {
return null; return null;
} }
if (obj.constructor.name === 'Object') { if (obj.constructor.name === 'Object') {
if (obj.exec && obj.exec.name === 'noopTest') { if (obj.exec?.name === 'noopTest') {
return null; return null;
} }
for (const prop in obj) { for (const prop in obj) {
@ -61,7 +61,7 @@ if (process.argv.length > 2) {
rule = rule[prop]; rule = rule[prop];
} }
} }
rulesList[rulePath[0]] = rule && rule[rulePath[0]] ? rule[rulePath[0]] : null; rulesList[rulePath[0]] = rule?.[rulePath[0]] ?? null;
} }
} else { } else {
rulesObj = rules; rulesObj = rules;

View File

@ -46,11 +46,6 @@ let options: MarkedOptions = {
options = marked.getDefaults(); options = marked.getDefaults();
options = marked.defaults; options = marked.defaults;
function callback(err: Error | null, markdown: string | undefined) {
console.log('Callback called!');
console.log(markdown);
}
let myOldMarked: typeof marked = marked.options(options); let myOldMarked: typeof marked = marked.options(options);
myOldMarked = marked.setOptions(options); myOldMarked = marked.setOptions(options);
@ -378,3 +373,28 @@ import { inline } from 'marked';
// Rules is exported // Rules is exported
import type { Rules } from 'marked'; import type { Rules } from 'marked';
marked.parse('', {
async: undefined,
breaks: undefined,
extensions: undefined,
gfm: undefined,
hooks: undefined,
pedantic: undefined,
renderer: undefined,
silent: undefined,
tokenizer: undefined,
walkTokens: undefined,
});
marked.use({
async: undefined,
breaks: undefined,
extensions: undefined,
gfm: undefined,
hooks: undefined,
pedantic: undefined,
renderer: undefined,
silent: undefined,
tokenizer: undefined,
walkTokens: undefined,
});

View File

@ -673,8 +673,8 @@ used extension2 walked</p>
}], }],
walkTokens(token) { walkTokens(token) {
if (token.tokens) { if (token.tokens) {
const finalChildToken = token.tokens[token.tokens.length - 1]; const finalChildToken = token.tokens.at(-1);
if (finalChildToken && finalChildToken.type === 'inlineStyleTag') { if (finalChildToken?.type === 'inlineStyleTag') {
token.originalType = token.type; token.originalType = token.type;
token.type = 'styled'; token.type = 'styled';
token.style = `style="color:${finalChildToken.text};"`; token.style = `style="color:${finalChildToken.text};"`;