import { marked, Renderer, Slugger, lexer, parseInline, use, getDefaults, walkTokens as _walkTokens } from '../../src/marked.js';
describe('Test heading ID functionality', () => {
it('should add id attribute by default', () => {
const renderer = new Renderer();
const slugger = new Slugger();
const header = renderer.heading('test', 1, 'test', slugger);
expect(header).toBe('
test
\n');
});
it('should NOT add id attribute when options set false', () => {
const renderer = new Renderer({ headerIds: false });
const header = renderer.heading('test', 1, 'test');
expect(header).toBe('test
\n');
});
});
describe('Test slugger functionality', () => {
it('should use lowercase slug', () => {
const slugger = new Slugger();
expect(slugger.slug('Test')).toBe('test');
});
it('should be unique to avoid collisions 1280', () => {
const slugger = new Slugger();
expect(slugger.slug('test')).toBe('test');
expect(slugger.slug('test')).toBe('test-1');
expect(slugger.slug('test')).toBe('test-2');
});
it('should be unique when slug ends with number', () => {
const slugger = new Slugger();
expect(slugger.slug('test 1')).toBe('test-1');
expect(slugger.slug('test')).toBe('test');
expect(slugger.slug('test')).toBe('test-2');
});
it('should be unique when slug ends with hyphen number', () => {
const slugger = new Slugger();
expect(slugger.slug('foo')).toBe('foo');
expect(slugger.slug('foo')).toBe('foo-1');
expect(slugger.slug('foo 1')).toBe('foo-1-1');
expect(slugger.slug('foo-1')).toBe('foo-1-2');
expect(slugger.slug('foo')).toBe('foo-2');
});
it('should allow non-latin chars', () => {
const slugger = new Slugger();
expect(slugger.slug('привет')).toBe('привет');
});
it('should remove ampersands 857', () => {
const slugger = new Slugger();
expect(slugger.slug('This & That Section')).toBe('this--that-section');
});
it('should remove periods', () => {
const slugger = new Slugger();
expect(slugger.slug('file.txt')).toBe('filetxt');
});
it('should remove html tags', () => {
const slugger = new Slugger();
expect(slugger.slug('html')).toBe('html');
});
it('should not increment seen when using dryrun option', () => {
const slugger = new Slugger();
expect(slugger.slug('This Section
', { dryrun: true })).toBe('this-section');
expect(slugger.slug('This Section
')).toBe('this-section');
});
it('should still return the next unique id when using dryrun', () => {
const slugger = new Slugger();
expect(slugger.slug('This Section
')).toBe('this-section');
expect(slugger.slug('This Section
', { dryrun: true })).toBe('this-section-1');
});
it('should be repeatable in a sequence', () => {
const slugger = new Slugger();
expect(slugger.slug('foo')).toBe('foo');
expect(slugger.slug('foo')).toBe('foo-1');
expect(slugger.slug('foo')).toBe('foo-2');
expect(slugger.slug('foo', { dryrun: true })).toBe('foo-3');
expect(slugger.slug('foo', { dryrun: true })).toBe('foo-3');
expect(slugger.slug('foo')).toBe('foo-3');
expect(slugger.slug('foo')).toBe('foo-4');
});
});
describe('Test paragraph token type', () => {
it('should use the "paragraph" type on top level', () => {
const md = 'A Paragraph.\n\n> A blockquote\n\n- list item\n';
const tokens = lexer(md);
expect(tokens[0].type).toBe('paragraph');
expect(tokens[2].tokens[0].type).toBe('paragraph');
expect(tokens[3].items[0].tokens[0].type).toBe('text');
});
});
describe('changeDefaults', () => {
it('should change global defaults', async() => {
const { defaults, changeDefaults } = await import('../../src/defaults.js');
expect(defaults.test).toBeUndefined();
changeDefaults({ test: true });
expect((await import('../../src/defaults.js')).defaults.test).toBe(true);
});
});
describe('inlineLexer', () => {
it('should send html to renderer.html', () => {
const renderer = new Renderer();
spyOn(renderer, 'html').and.callThrough();
const md = 'HTML Image:
';
marked(md, { renderer });
expect(renderer.html).toHaveBeenCalledWith('
');
});
});
describe('parseInline', () => {
it('should parse inline tokens', () => {
const md = '**strong** _em_';
const html = parseInline(md);
expect(html).toBe('strong em');
});
it('should not parse block tokens', () => {
const md = '# header\n\n_em_';
const html = parseInline(md);
expect(html).toBe('# header\n\nem');
});
});
describe('use extension', () => {
it('should use custom block tokenizer + renderer extensions', () => {
const underline = {
name: 'underline',
level: 'block',
tokenizer(src) {
const rule = /^:([^\n]*)(?:\n|$)/;
const match = rule.exec(src);
if (match) {
return {
type: 'underline',
raw: match[0], // This is the text that you want your token to consume from the source
text: match[1].trim() // You can add additional properties to your tokens to pass along to the renderer
};
}
},
renderer(token) {
return `${token.text}\n`;
}
};
use({ extensions: [underline] });
let html = marked('Not Underlined\n:Underlined\nNot Underlined');
expect(html).toBe('Not Underlined\n:Underlined\nNot Underlined
\n');
html = marked('Not Underlined\n\n:Underlined\n\nNot Underlined');
expect(html).toBe('Not Underlined
\nUnderlined\nNot Underlined
\n');
});
it('should interrupt paragraphs if using "start" property', () => {
const underline = {
extensions: [{
name: 'underline',
level: 'block',
start(src) { return src.indexOf(':'); },
tokenizer(src) {
const rule = /^:([^\n]*):(?:\n|$)/;
const match = rule.exec(src);
if (match) {
return {
type: 'underline',
raw: match[0], // This is the text that you want your token to consume from the source
text: match[1].trim() // You can add additional properties to your tokens to pass along to the renderer
};
}
},
renderer(token) {
return `${token.text}\n`;
}
}]
};
use(underline);
const html = marked('Not Underlined A\n:Underlined B:\nNot Underlined C\n:Not Underlined D');
expect(html).toBe('Not Underlined A
\nUnderlined B\nNot Underlined C\n:Not Underlined D
\n');
});
it('should use custom inline tokenizer + renderer extensions', () => {
const underline = {
name: 'underline',
level: 'inline',
start(src) { return src.indexOf('='); },
tokenizer(src) {
const rule = /^=([^=]+)=/;
const match = rule.exec(src);
if (match) {
return {
type: 'underline',
raw: match[0], // This is the text that you want your token to consume from the source
text: match[1].trim() // You can add additional properties to your tokens to pass along to the renderer
};
}
},
renderer(token) {
return `${token.text}`;
}
};
use({ extensions: [underline] });
const html = marked('Not Underlined =Underlined= Not Underlined');
expect(html).toBe('Not Underlined Underlined Not Underlined
\n');
});
it('should handle interacting block and inline extensions', () => {
const descriptionlist = {
name: 'descriptionList',
level: 'block',
start(src) {
const match = src.match(/:[^:\n]/);
if (match) {
return match.index;
}
},
tokenizer(src, tokens) {
const rule = /^(?::[^:\n]+:[^:\n]*(?:\n|$))+/;
const match = rule.exec(src);
if (match) {
const token = {
type: 'descriptionList',
raw: match[0], // This is the text that you want your token to consume from the source
text: match[0].trim(), // You can add additional properties to your tokens to pass along to the renderer
tokens: []
};
this.lexer.inlineTokens(token.text, token.tokens);
return token;
}
},
renderer(token) {
return `${this.parser.parseInline(token.tokens)}\n
`;
}
};
const description = {
name: 'description',
level: 'inline',
start(src) { return src.indexOf(':'); },
tokenizer(src, tokens) {
const rule = /^:([^:\n]+):([^:\n]*)(?:\n|$)/;
const match = rule.exec(src);
if (match) {
const token = {
type: 'description',
raw: match[0],
dt: [],
dd: []
};
this.lexer.inline(match[1].trim(), token.dt);
this.lexer.inline(match[2].trim(), token.dd);
return token;
}
},
renderer(token) {
return `\n${this.parser.parseInline(token.dt)}${this.parser.parseInline(token.dd)}`;
}
};
use({ extensions: [descriptionlist, description] });
const html = marked('A Description List with One Description:\n'
+ ': Topic 1 : Description 1\n'
+ ': **Topic 2** : *Description 2*');
expect(html).toBe('A Description List with One Description:
\n'
+ ''
+ '\n- Topic 1
- Description 1
'
+ '\n- Topic 2
- Description 2
'
+ '\n
');
});
it('should allow other options mixed into the extension', () => {
const extension = {
name: 'underline',
level: 'block',
start(src) { return src.indexOf(':'); },
tokenizer(src) {
const rule = /^:([^\n]*):(?:\n|$)/;
const match = rule.exec(src);
if (match) {
return {
type: 'underline',
raw: match[0], // This is the text that you want your token to consume from the source
text: match[1].trim() // You can add additional properties to your tokens to pass along to the renderer
};
}
},
renderer(token) {
return `${token.text}\n`;
}
};
use({ sanitize: true, silent: true, extensions: [extension] });
const html = marked(':test:\ntest\n');
expect(html).toBe('test\ntest
\n<div></div>
\n');
});
it('should handle renderers that return false', () => {
const extension = {
name: 'test',
level: 'block',
tokenizer(src) {
const rule = /^:([^\n]*):(?:\n|$)/;
const match = rule.exec(src);
if (match) {
return {
type: 'test',
raw: match[0], // This is the text that you want your token to consume from the source
text: match[1].trim() // You can add additional properties to your tokens to pass along to the renderer
};
}
},
renderer(token) {
if (token.text === 'test') {
return 'test';
}
return false;
}
};
const fallbackRenderer = {
name: 'test',
level: 'block',
renderer(token) {
if (token.text === 'Test') {
return 'fallback';
}
return false;
}
};
use({ extensions: [fallbackRenderer, extension] });
const html = marked(':Test:\n\n:test:\n\n:none:');
expect(html).toBe('fallbacktest');
});
it('should fall back when tokenizers return false', () => {
const extension = {
name: 'test',
level: 'block',
tokenizer(src) {
const rule = /^:([^\n]*):(?:\n|$)/;
const match = rule.exec(src);
if (match) {
return {
type: 'test',
raw: match[0], // This is the text that you want your token to consume from the source
text: match[1].trim() // You can add additional properties to your tokens to pass along to the renderer
};
}
return false;
},
renderer(token) {
return token.text;
}
};
const extension2 = {
name: 'test',
level: 'block',
tokenizer(src) {
const rule = /^:([^\n]*):(?:\n|$)/;
const match = rule.exec(src);
if (match) {
if (match[1].match(/^[A-Z]/)) {
return {
type: 'test',
raw: match[0],
text: match[1].trim().toUpperCase()
};
}
}
return false;
}
};
use({ extensions: [extension, extension2] });
const html = marked(':Test:\n\n:test:');
expect(html).toBe('TESTtest');
});
it('should override original tokenizer/renderer with same name, but fall back if returns false', () => {
const extension = {
extensions: [{
name: 'heading',
level: 'block',
tokenizer(src) {
return false; // fall back to default `heading` tokenizer
},
renderer(token) {
return '' + token.text + ' RENDERER EXTENSION\n';
}
},
{
name: 'code',
level: 'block',
tokenizer(src) {
const rule = /^:([^\n]*):(?:\n|$)/;
const match = rule.exec(src);
if (match) {
return {
type: 'code',
raw: match[0],
text: match[1].trim() + ' TOKENIZER EXTENSION'
};
}
},
renderer(token) {
return false; // fall back to default `code` renderer
}
}]
};
use(extension);
const html = marked('# extension1\n:extension2:');
expect(html).toBe('extension1 RENDERER EXTENSION
\nextension2 TOKENIZER EXTENSION\n
\n');
});
it('should walk only specified child tokens', () => {
const walkableDescription = {
extensions: [{
name: 'walkableDescription',
level: 'inline',
start(src) { return src.indexOf(':'); },
tokenizer(src, tokens) {
const rule = /^:([^:\n]+):([^:\n]*)(?:\n|$)/;
const match = rule.exec(src);
if (match) {
const token = {
type: 'walkableDescription',
raw: match[0],
dt: [],
dd: [],
tokens: []
};
this.lexer.inline(match[1].trim(), token.dt);
this.lexer.inline(match[2].trim(), token.dd);
this.lexer.inline('unwalked', token.tokens);
return token;
}
},
renderer(token) {
return `\n${this.parser.parseInline(token.dt)} - ${this.parser.parseInline(token.tokens)}${this.parser.parseInline(token.dd)}`;
},
childTokens: ['dd', 'dt']
}],
walkTokens(token) {
if (token.type === 'text') {
token.text += ' walked';
}
}
};
use(walkableDescription);
const html = marked(': Topic 1 : Description 1\n'
+ ': **Topic 2** : *Description 2*');
expect(html).toBe('\n
Topic 1 walked - unwalkedDescription 1 walked'
+ '\nTopic 2 walked - unwalkedDescription 2 walked\n');
});
describe('multiple extensions', () => {
function createExtension(name) {
return {
extensions: [{
name: `block-${name}`,
level: 'block',
start(src) { return src.indexOf('::'); },
tokenizer(src, tokens) {
if (src.startsWith(`::${name}\n`)) {
const text = `:${name}`;
const token = {
type: `block-${name}`,
raw: `::${name}\n`,
text,
tokens: []
};
this.lexer.inline(token.text, token.tokens);
return token;
}
},
renderer(token) {
return `<${token.type}>${this.parser.parseInline(token.tokens)}${token.type}>\n`;
}
}, {
name: `inline-${name}`,
level: 'inline',
start(src) { return src.indexOf(':'); },
tokenizer(src, tokens) {
if (src.startsWith(`:${name}`)) {
return {
type: `inline-${name}`,
raw: `:${name}`,
text: `used ${name}`
};
}
},
renderer(token) {
return token.text;
}
}],
tokenizer: {
heading(src) {
if (src.startsWith(`# ${name}`)) {
const token = {
type: 'heading',
raw: `# ${name}`,
text: `used ${name}`,
depth: 1,
tokens: []
};
this.lexer.inline(token.text, token.tokens);
return token;
}
return false;
}
},
renderer: {
heading(text, depth, raw, slugger) {
if (text === name) {
return `${text}\n`;
}
return false;
}
},
walkTokens(token) {
if (token.text === `used ${name}`) {
token.text += ' walked';
}
},
headerIds: false
};
}
function createFalseExtension(name) {
return {
extensions: [{
name: `block-${name}`,
level: 'block',
start(src) { return src.indexOf('::'); },
tokenizer(src, tokens) {
return false;
},
renderer(token) {
return false;
}
}, {
name: `inline-${name}`,
level: 'inline',
start(src) { return src.indexOf(':'); },
tokenizer(src, tokens) {
return false;
},
renderer(token) {
return false;
}
}],
headerIds: false
};
}
function runTest() {
const html = marked(`
::extension1
::extension2
:extension1
:extension2
# extension1
# extension2
# no extension
`);
expect(`\n${html}\n`.replace(/\n+/g, '\n')).toBe(`
used extension1 walked
used extension2 walked
used extension1 walked
used extension2 walked
used extension1 walked
used extension2 walked
no extension
`);
}
it('should merge extensions when calling marked.use multiple times', () => {
use(createExtension('extension1'));
use(createExtension('extension2'));
runTest();
});
it('should merge extensions when calling marked.use with multiple extensions', () => {
use(
createExtension('extension1'),
createExtension('extension2')
);
runTest();
});
it('should fall back to any extensions with the same name if the first returns false', () => {
use(
createExtension('extension1'),
createExtension('extension2'),
createFalseExtension('extension1'),
createFalseExtension('extension2')
);
runTest();
});
});
it('should allow deleting/editing tokens', () => {
const styleTags = {
extensions: [{
name: 'inlineStyleTag',
level: 'inline',
start(src) {
const match = src.match(/ *{[^\{]/);
if (match) {
return match.index;
}
},
tokenizer(src, tokens) {
const rule = /^ *{([^\{\}\n]+)}$/;
const match = rule.exec(src);
if (match) {
return {
type: 'inlineStyleTag',
raw: match[0], // This is the text that you want your token to consume from the source
text: match[1]
};
}
}
},
{
name: 'styled',
renderer(token) {
token.type = token.originalType;
const text = this.parser.parse([token]);
const openingTag = /(<[^\s<>]+)([^\n<>]*>.*)/s.exec(text);
if (openingTag) {
return `${openingTag[1]} ${token.style}${openingTag[2]}`;
}
return text;
}
}],
walkTokens(token) {
if (token.tokens) {
const finalChildToken = token.tokens[token.tokens.length - 1];
if (finalChildToken && finalChildToken.type === 'inlineStyleTag') {
token.originalType = token.type;
token.type = 'styled';
token.style = `style="color:${finalChildToken.text};"`;
token.tokens.pop();
}
}
},
headerIds: false
};
use(styleTags);
const html = marked('This is a *paragraph* with blue text. {blue}\n'
+ '# This is a *header* with red text {red}');
expect(html).toBe('This is a paragraph with blue text.
\n'
+ 'This is a header with red text
\n');
});
it('should use renderer', () => {
const extension = {
renderer: {
paragraph(text) {
return 'extension';
}
}
};
spyOn(extension.renderer, 'paragraph').and.callThrough();
use(extension);
const html = marked('text');
expect(extension.renderer.paragraph).toHaveBeenCalledWith('text');
expect(html).toBe('extension');
});
it('should use tokenizer', () => {
const extension = {
tokenizer: {
paragraph(text) {
const token = {
type: 'paragraph',
raw: text,
text: 'extension',
tokens: []
};
this.lexer.inline(token.text, token.tokens);
return token;
}
}
};
spyOn(extension.tokenizer, 'paragraph').and.callThrough();
use(extension);
const html = marked('text');
expect(extension.tokenizer.paragraph).toHaveBeenCalledWith('text');
expect(html).toBe('extension
\n');
});
it('should use walkTokens', () => {
let walked = 0;
const extension = {
walkTokens(token) {
walked++;
}
};
use(extension);
marked('text');
expect(walked).toBe(2);
});
it('should use walkTokens in async', (done) => {
let walked = 0;
const extension = {
walkTokens(token) {
walked++;
}
};
use(extension);
marked('text', () => {
expect(walked).toBe(2);
done();
});
});
it('should use options from extension', () => {
const extension = {
headerIds: false
};
use(extension);
const html = marked('# heading');
expect(html).toBe('heading
\n');
});
it('should call all walkTokens in reverse order', () => {
let walkedOnce = 0;
let walkedTwice = 0;
const extension1 = {
walkTokens(token) {
if (token.walkedOnce) {
walkedTwice++;
}
}
};
const extension2 = {
walkTokens(token) {
walkedOnce++;
token.walkedOnce = true;
}
};
use(extension1);
use(extension2);
marked('text');
expect(walkedOnce).toBe(2);
expect(walkedTwice).toBe(2);
});
it('should use last extension function and not override others', () => {
const extension1 = {
renderer: {
paragraph(text) {
return 'extension1 paragraph\n';
},
html(html) {
return 'extension1 html\n';
}
}
};
const extension2 = {
renderer: {
paragraph(text) {
return 'extension2 paragraph\n';
}
}
};
use(extension1);
use(extension2);
const html = marked(`
paragraph