fix: run spec tests with testutils (#3017)
BREAKING CHANGE: drop support for node v16
This commit is contained in:
parent
86e0eb5485
commit
014d4e61f6
@ -1,2 +1,3 @@
|
||||
lib
|
||||
*.min.js
|
||||
public
|
||||
|
78
.github/workflows/codeql-analysis.yml
vendored
78
.github/workflows/codeql-analysis.yml
vendored
@ -1,78 +0,0 @@
|
||||
# For most projects, this workflow file will not need changing; you simply need
|
||||
# to commit it to your repository.
|
||||
#
|
||||
# You may wish to alter this file to override the set of languages analyzed,
|
||||
# or to provide custom queries or build logic.
|
||||
name: "CodeQL"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [master]
|
||||
pull_request:
|
||||
# The branches below must be a subset of the branches above
|
||||
branches: [master]
|
||||
schedule:
|
||||
- cron: '0 9 * * 3'
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
permissions:
|
||||
actions: read # for github/codeql-action/init to get workflow details
|
||||
contents: read # for actions/checkout to fetch code
|
||||
security-events: write # for github/codeql-action/autobuild to send a status report
|
||||
name: Analyze
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
# Override automatic language detection by changing the below list
|
||||
# Supported options are ['csharp', 'cpp', 'go', 'java', 'javascript', 'python']
|
||||
language: ['javascript']
|
||||
# Learn more...
|
||||
# https://docs.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#overriding-automatic-language-detection
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
# We must fetch at least the immediate parents so that if this is
|
||||
# a pull request then we can checkout the head.
|
||||
fetch-depth: 2
|
||||
|
||||
# If this run was triggered by a pull request event, then checkout
|
||||
# the head of the pull request instead of the merge commit.
|
||||
- run: git checkout HEAD^2
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v2
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
# By default, queries listed here will override any specified in a config file.
|
||||
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||
# queries: ./path/to/local/query, your-org/your-repo/queries@main
|
||||
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v2
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 https://git.io/JvXDl
|
||||
|
||||
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
|
||||
# and modify them (or add more) to build your code if your project
|
||||
# uses a compiled language
|
||||
|
||||
#- run: |
|
||||
# make bootstrap
|
||||
# make release
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v2
|
37
.github/workflows/tests.yml
vendored
37
.github/workflows/tests.yml
vendored
@ -9,11 +9,11 @@ permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
Test:
|
||||
UnitTests:
|
||||
strategy:
|
||||
matrix:
|
||||
# lowest verison here should also be in `engines` field
|
||||
node_version: [16, 18, 'lts/*', '*']
|
||||
node_version: [18, "lts/*", "*"]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout Code
|
||||
@ -25,16 +25,14 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Install Dependencies
|
||||
run: npm ci
|
||||
- name: Build 🗜️
|
||||
run: npm run build
|
||||
- name: Run Unit Tests 👩🏽💻
|
||||
run: npm run test:unit
|
||||
- name: Run Spec Tests 👩🏽💻
|
||||
run: npm run test:specs
|
||||
- name: Run UMD Tests 👩🏽💻
|
||||
run: npm run test:umd
|
||||
- name: Run Types Tests 👩🏽💻
|
||||
run: npm run test:types
|
||||
|
||||
Lint:
|
||||
OtherTests:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout Code
|
||||
@ -42,33 +40,22 @@ jobs:
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 'lts/*'
|
||||
- name: Install Dependencies
|
||||
run: npm ci
|
||||
- name: Lint ✨
|
||||
run: npm run test:lint
|
||||
|
||||
Build:
|
||||
name: Build and Test Types
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout Code
|
||||
uses: actions/checkout@v4
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 'lts/*'
|
||||
node-version: "lts/*"
|
||||
- name: Install Dependencies
|
||||
run: npm ci
|
||||
- name: Build 🗜️
|
||||
run: npm run build
|
||||
- name: Run UMD Tests 👩🏽💻
|
||||
run: npm run test:umd
|
||||
- name: Run Types Tests 👩🏽💻
|
||||
run: npm run test:types
|
||||
- name: Lint ✨
|
||||
run: npm run test:lint
|
||||
|
||||
Release:
|
||||
permissions:
|
||||
contents: write
|
||||
needs: [Test, Lint, Build]
|
||||
needs: [UnitTests, OtherTests]
|
||||
if: |
|
||||
github.ref == 'refs/heads/master' &&
|
||||
github.event.repository.fork == false
|
||||
@ -79,7 +66,7 @@ jobs:
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 'lts/*'
|
||||
node-version: "lts/*"
|
||||
- name: Install Dependencies
|
||||
run: npm ci
|
||||
- name: Build 🗜️
|
||||
|
1
.gitignore
vendored
1
.gitignore
vendored
@ -1,5 +1,6 @@
|
||||
.DS_Store
|
||||
.vercel
|
||||
.vscode
|
||||
node_modules/
|
||||
test/compiled_tests
|
||||
public
|
||||
|
9
.vscode/settings.json
vendored
9
.vscode/settings.json
vendored
@ -1,9 +0,0 @@
|
||||
{
|
||||
"cSpell.words": [
|
||||
"commonmark",
|
||||
"markedjs",
|
||||
"titleize",
|
||||
"uglifyjs",
|
||||
"vuln"
|
||||
]
|
||||
}
|
522
package-lock.json
generated
522
package-lock.json
generated
@ -13,7 +13,7 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"@arethetypeswrong/cli": "^0.13.1",
|
||||
"@markedjs/html-differ": "^4.0.2",
|
||||
"@markedjs/testutils": "9.1.5-0",
|
||||
"@rollup/plugin-terser": "^0.4.4",
|
||||
"@rollup/plugin-typescript": "^11.1.5",
|
||||
"@semantic-release/commit-analyzer": "^11.1.0",
|
||||
@ -32,9 +32,7 @@
|
||||
"eslint-plugin-import": "^2.29.0",
|
||||
"eslint-plugin-n": "^16.2.0",
|
||||
"eslint-plugin-promise": "^6.1.1",
|
||||
"front-matter": "^4.0.2",
|
||||
"highlight.js": "^11.9.0",
|
||||
"jasmine": "^5.1.0",
|
||||
"markdown-it": "13.0.2",
|
||||
"marked-highlight": "^2.0.6",
|
||||
"marked-man": "^2.0.0",
|
||||
@ -44,11 +42,10 @@
|
||||
"semantic-release": "^22.0.7",
|
||||
"titleize": "^4.0.0",
|
||||
"ts-expect": "^1.3.0",
|
||||
"ts-node": "^10.9.1",
|
||||
"typescript": "5.2.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 16"
|
||||
"node": ">= 18"
|
||||
}
|
||||
},
|
||||
"node_modules/@aashutoshrathi/word-wrap": {
|
||||
@ -258,28 +255,6 @@
|
||||
"node": ">=0.1.90"
|
||||
}
|
||||
},
|
||||
"node_modules/@cspotcode/source-map-support": {
|
||||
"version": "0.8.1",
|
||||
"resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz",
|
||||
"integrity": "sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@jridgewell/trace-mapping": "0.3.9"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/@cspotcode/source-map-support/node_modules/@jridgewell/trace-mapping": {
|
||||
"version": "0.3.9",
|
||||
"resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz",
|
||||
"integrity": "sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@jridgewell/resolve-uri": "^3.0.3",
|
||||
"@jridgewell/sourcemap-codec": "^1.4.10"
|
||||
}
|
||||
},
|
||||
"node_modules/@eslint-community/eslint-utils": {
|
||||
"version": "4.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.4.0.tgz",
|
||||
@ -369,102 +344,6 @@
|
||||
"integrity": "sha512-dvuCeX5fC9dXgJn9t+X5atfmgQAzUOWqS1254Gh0m6i8wKd10ebXkfNKiRK+1GWi/yTvvLDHpoxLr0xxxeslWw==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@isaacs/cliui": {
|
||||
"version": "8.0.2",
|
||||
"resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz",
|
||||
"integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"string-width": "^5.1.2",
|
||||
"string-width-cjs": "npm:string-width@^4.2.0",
|
||||
"strip-ansi": "^7.0.1",
|
||||
"strip-ansi-cjs": "npm:strip-ansi@^6.0.1",
|
||||
"wrap-ansi": "^8.1.0",
|
||||
"wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/@isaacs/cliui/node_modules/ansi-regex": {
|
||||
"version": "6.0.1",
|
||||
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz",
|
||||
"integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/chalk/ansi-regex?sponsor=1"
|
||||
}
|
||||
},
|
||||
"node_modules/@isaacs/cliui/node_modules/ansi-styles": {
|
||||
"version": "6.2.1",
|
||||
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz",
|
||||
"integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/chalk/ansi-styles?sponsor=1"
|
||||
}
|
||||
},
|
||||
"node_modules/@isaacs/cliui/node_modules/emoji-regex": {
|
||||
"version": "9.2.2",
|
||||
"resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz",
|
||||
"integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@isaacs/cliui/node_modules/string-width": {
|
||||
"version": "5.1.2",
|
||||
"resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz",
|
||||
"integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"eastasianwidth": "^0.2.0",
|
||||
"emoji-regex": "^9.2.2",
|
||||
"strip-ansi": "^7.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/@isaacs/cliui/node_modules/strip-ansi": {
|
||||
"version": "7.0.1",
|
||||
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.0.1.tgz",
|
||||
"integrity": "sha512-cXNxvT8dFNRVfhVME3JAe98mkXDYN2O1l7jmcwMnOslDeESg1rF/OZMtK0nRAhiari1unG5cD4jG3rapUAkLbw==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"ansi-regex": "^6.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/chalk/strip-ansi?sponsor=1"
|
||||
}
|
||||
},
|
||||
"node_modules/@isaacs/cliui/node_modules/wrap-ansi": {
|
||||
"version": "8.1.0",
|
||||
"resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz",
|
||||
"integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"ansi-styles": "^6.1.0",
|
||||
"string-width": "^5.0.1",
|
||||
"strip-ansi": "^7.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/chalk/wrap-ansi?sponsor=1"
|
||||
}
|
||||
},
|
||||
"node_modules/@jridgewell/gen-mapping": {
|
||||
"version": "0.3.3",
|
||||
"resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.3.tgz",
|
||||
@ -550,6 +429,18 @@
|
||||
"url": "https://github.com/chalk/chalk?sponsor=1"
|
||||
}
|
||||
},
|
||||
"node_modules/@markedjs/testutils": {
|
||||
"version": "9.1.5-0",
|
||||
"resolved": "https://registry.npmjs.org/@markedjs/testutils/-/testutils-9.1.5-0.tgz",
|
||||
"integrity": "sha512-cQ//6dq/bZt4TxI2WJUVYxuxwaI7JMCa9InV0jSaBaKtyvWJlEFG+iAtCKWkpR7WxUiAZa9jWtxg2+g59gSOzA==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@markedjs/html-differ": "^4.0.2",
|
||||
"front-matter": "^4.0.2",
|
||||
"marked": "^9.1.5",
|
||||
"marked-repo": "https://github.com/markedjs/marked/tarball/v9.1.5"
|
||||
}
|
||||
},
|
||||
"node_modules/@nodelib/fs.scandir": {
|
||||
"version": "2.1.5",
|
||||
"resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz",
|
||||
@ -763,16 +654,6 @@
|
||||
"@octokit/openapi-types": "^18.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@pkgjs/parseargs": {
|
||||
"version": "0.11.0",
|
||||
"resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz",
|
||||
"integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==",
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"engines": {
|
||||
"node": ">=14"
|
||||
}
|
||||
},
|
||||
"node_modules/@pnpm/network.ca-file": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/@pnpm/network.ca-file/-/network.ca-file-1.0.2.tgz",
|
||||
@ -1590,30 +1471,6 @@
|
||||
"url": "https://github.com/sindresorhus/is?sponsor=1"
|
||||
}
|
||||
},
|
||||
"node_modules/@tsconfig/node10": {
|
||||
"version": "1.0.9",
|
||||
"resolved": "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.9.tgz",
|
||||
"integrity": "sha512-jNsYVVxU8v5g43Erja32laIDHXeoNvFEpX33OK4d6hljo3jDhCBDhx5dhCCTMWUojscpAagGiRkBKxpdl9fxqA==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@tsconfig/node12": {
|
||||
"version": "1.0.11",
|
||||
"resolved": "https://registry.npmjs.org/@tsconfig/node12/-/node12-1.0.11.tgz",
|
||||
"integrity": "sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@tsconfig/node14": {
|
||||
"version": "1.0.3",
|
||||
"resolved": "https://registry.npmjs.org/@tsconfig/node14/-/node14-1.0.3.tgz",
|
||||
"integrity": "sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@tsconfig/node16": {
|
||||
"version": "1.0.4",
|
||||
"resolved": "https://registry.npmjs.org/@tsconfig/node16/-/node16-1.0.4.tgz",
|
||||
"integrity": "sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@types/estree": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.2.tgz",
|
||||
@ -1632,13 +1489,6 @@
|
||||
"integrity": "sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@types/node": {
|
||||
"version": "10.17.60",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-10.17.60.tgz",
|
||||
"integrity": "sha512-F0KIgDJfy2nA3zMLmWGKxcH2ZVEtCZXHHdOQs2gSaQ27+lNeEfGxzkIw90aXswATX7AZ33tahPbzy6KAfUreVw==",
|
||||
"dev": true,
|
||||
"peer": true
|
||||
},
|
||||
"node_modules/@types/normalize-package-data": {
|
||||
"version": "2.4.3",
|
||||
"resolved": "https://registry.npmjs.org/@types/normalize-package-data/-/normalize-package-data-2.4.3.tgz",
|
||||
@ -1947,15 +1797,6 @@
|
||||
"acorn": "^6.0.0 || ^7.0.0 || ^8.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/acorn-walk": {
|
||||
"version": "8.2.0",
|
||||
"resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.2.0.tgz",
|
||||
"integrity": "sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">=0.4.0"
|
||||
}
|
||||
},
|
||||
"node_modules/agent-base": {
|
||||
"version": "7.1.0",
|
||||
"resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.0.tgz",
|
||||
@ -2039,12 +1880,6 @@
|
||||
"integrity": "sha512-QXu7BPrP29VllRxH8GwB7x5iX5qWKAAMLqKQGWTeLWVlNHNOpVMJ91dsxQAIWXpjuW5wqvxu3Jd/nRjrJ+0pqg==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/arg": {
|
||||
"version": "4.1.3",
|
||||
"resolved": "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz",
|
||||
"integrity": "sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/argparse": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz",
|
||||
@ -2577,12 +2412,6 @@
|
||||
"node": ">=14"
|
||||
}
|
||||
},
|
||||
"node_modules/create-require": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/create-require/-/create-require-1.1.1.tgz",
|
||||
"integrity": "sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/cross-env": {
|
||||
"version": "7.0.3",
|
||||
"resolved": "https://registry.npmjs.org/cross-env/-/cross-env-7.0.3.tgz",
|
||||
@ -2885,12 +2714,6 @@
|
||||
"readable-stream": "^2.0.2"
|
||||
}
|
||||
},
|
||||
"node_modules/eastasianwidth": {
|
||||
"version": "0.2.0",
|
||||
"resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz",
|
||||
"integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/emoji-regex": {
|
||||
"version": "8.0.0",
|
||||
"resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz",
|
||||
@ -3901,34 +3724,6 @@
|
||||
"is-callable": "^1.1.3"
|
||||
}
|
||||
},
|
||||
"node_modules/foreground-child": {
|
||||
"version": "3.1.1",
|
||||
"resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.1.1.tgz",
|
||||
"integrity": "sha512-TMKDUnIte6bfb5nWv7V/caI169OHgvwjb7V4WkeUvbQQdjr5rWKqHFiKWb/fcOwB+CzBT+qbWjvj+DVwRskpIg==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"cross-spawn": "^7.0.0",
|
||||
"signal-exit": "^4.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=14"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/isaacs"
|
||||
}
|
||||
},
|
||||
"node_modules/foreground-child/node_modules/signal-exit": {
|
||||
"version": "4.0.2",
|
||||
"resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.0.2.tgz",
|
||||
"integrity": "sha512-MY2/qGx4enyjprQnFaZsHib3Yadh3IXyV2C321GY0pjGfVBu4un0uDJkwgdxqO+Rdx8JMT8IfJIRwbYVz3Ob3Q==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">=14"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/isaacs"
|
||||
}
|
||||
},
|
||||
"node_modules/formdata-polyfill": {
|
||||
"version": "4.0.10",
|
||||
"resolved": "https://registry.npmjs.org/formdata-polyfill/-/formdata-polyfill-4.0.10.tgz",
|
||||
@ -4921,89 +4716,6 @@
|
||||
"node": ">=10.13"
|
||||
}
|
||||
},
|
||||
"node_modules/jackspeak": {
|
||||
"version": "2.2.1",
|
||||
"resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-2.2.1.tgz",
|
||||
"integrity": "sha512-MXbxovZ/Pm42f6cDIDkl3xpwv1AGwObKwfmjs2nQePiy85tP3fatofl3FC1aBsOtP/6fq5SbtgHwWcMsLP+bDw==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@isaacs/cliui": "^8.0.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=14"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/isaacs"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@pkgjs/parseargs": "^0.11.0"
|
||||
}
|
||||
},
|
||||
"node_modules/jasmine": {
|
||||
"version": "5.1.0",
|
||||
"resolved": "https://registry.npmjs.org/jasmine/-/jasmine-5.1.0.tgz",
|
||||
"integrity": "sha512-prmJlC1dbLhti4nE4XAPDWmfJesYO15sjGXVp7Cs7Ym5I9Xtwa/hUHxxJXjnpfLO72+ySttA0Ztf8g/RiVnUKw==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"glob": "^10.2.2",
|
||||
"jasmine-core": "~5.1.0"
|
||||
},
|
||||
"bin": {
|
||||
"jasmine": "bin/jasmine.js"
|
||||
}
|
||||
},
|
||||
"node_modules/jasmine-core": {
|
||||
"version": "5.1.0",
|
||||
"resolved": "https://registry.npmjs.org/jasmine-core/-/jasmine-core-5.1.0.tgz",
|
||||
"integrity": "sha512-bFMMwpKuTZXCuGd51yClFobw5SOtad1kmdWnYO8dNwYV8i01Xj0C2+nyQpSKl1EKxiPfyd1ZgBl/rsusL3aS6w==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/jasmine/node_modules/brace-expansion": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz",
|
||||
"integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"balanced-match": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/jasmine/node_modules/glob": {
|
||||
"version": "10.2.6",
|
||||
"resolved": "https://registry.npmjs.org/glob/-/glob-10.2.6.tgz",
|
||||
"integrity": "sha512-U/rnDpXJGF414QQQZv5uVsabTVxMSwzS5CH0p3DRCIV6ownl4f7PzGnkGmvlum2wB+9RlJWJZ6ACU1INnBqiPA==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"foreground-child": "^3.1.0",
|
||||
"jackspeak": "^2.0.3",
|
||||
"minimatch": "^9.0.1",
|
||||
"minipass": "^5.0.0 || ^6.0.2",
|
||||
"path-scurry": "^1.7.0"
|
||||
},
|
||||
"bin": {
|
||||
"glob": "dist/cjs/src/bin.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=16 || 14 >=14.17"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/isaacs"
|
||||
}
|
||||
},
|
||||
"node_modules/jasmine/node_modules/minimatch": {
|
||||
"version": "9.0.1",
|
||||
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.1.tgz",
|
||||
"integrity": "sha512-0jWhJpD/MdhPXwPuiRkCbfYfSKp2qnn2eOc279qI7f+osl/l+prKSrvhg157zSYvx/1nmgn2NqdT6k2Z7zSH9w==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"brace-expansion": "^2.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=16 || 14 >=14.17"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/isaacs"
|
||||
}
|
||||
},
|
||||
"node_modules/java-properties": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/java-properties/-/java-properties-1.0.2.tgz",
|
||||
@ -5239,12 +4951,6 @@
|
||||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"node_modules/make-error": {
|
||||
"version": "1.3.6",
|
||||
"resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz",
|
||||
"integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/markdown-it": {
|
||||
"version": "13.0.2",
|
||||
"resolved": "https://registry.npmjs.org/markdown-it/-/markdown-it-13.0.2.tgz",
|
||||
@ -5274,9 +4980,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/marked": {
|
||||
"version": "9.1.2",
|
||||
"resolved": "https://registry.npmjs.org/marked/-/marked-9.1.2.tgz",
|
||||
"integrity": "sha512-qoKMJqK0w6vkLk8+KnKZAH6neUZSNaQqVZ/h2yZ9S7CbLuFHyS2viB0jnqcWF9UKjwsAbMrQtnQhdmdvOVOw9w==",
|
||||
"version": "9.1.5",
|
||||
"resolved": "https://registry.npmjs.org/marked/-/marked-9.1.5.tgz",
|
||||
"integrity": "sha512-14QG3shv8Kg/xc0Yh6TNkMj90wXH9mmldi5941I2OevfJ/FQAFLEwtwU2/FfgSAOMlWHrEukWSGQf8MiVYNG2A==",
|
||||
"dev": true,
|
||||
"bin": {
|
||||
"marked": "bin/marked.js"
|
||||
@ -5307,6 +5013,20 @@
|
||||
"marked-man": "bin/marked-man.js"
|
||||
}
|
||||
},
|
||||
"node_modules/marked-repo": {
|
||||
"name": "marked",
|
||||
"version": "9.1.5",
|
||||
"resolved": "https://github.com/markedjs/marked/tarball/v9.1.5",
|
||||
"integrity": "sha512-ESYdlhsf+u82SDP6V229Dms0llLuzqDPkym0eX8B4kUI3HTTtD0u1fPYmfk8vdsvTQOYAdCI6H/UuWrHFhCjcg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"bin": {
|
||||
"marked": "bin/marked.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 16"
|
||||
}
|
||||
},
|
||||
"node_modules/marked-terminal": {
|
||||
"version": "6.0.0",
|
||||
"resolved": "https://registry.npmjs.org/marked-terminal/-/marked-terminal-6.0.0.tgz",
|
||||
@ -5424,15 +5144,6 @@
|
||||
"integrity": "sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/minipass": {
|
||||
"version": "6.0.2",
|
||||
"resolved": "https://registry.npmjs.org/minipass/-/minipass-6.0.2.tgz",
|
||||
"integrity": "sha512-MzWSV5nYVT7mVyWCwn2o7JH13w2TBRmmSqSRCKzTw+lmft9X4z+3wjvs06Tzijo5z4W/kahUCDpRXTF+ZrmF/w==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">=16 || 14 >=14.17"
|
||||
}
|
||||
},
|
||||
"node_modules/ms": {
|
||||
"version": "2.1.2",
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
|
||||
@ -9278,31 +8989,6 @@
|
||||
"integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/path-scurry": {
|
||||
"version": "1.9.2",
|
||||
"resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.9.2.tgz",
|
||||
"integrity": "sha512-qSDLy2aGFPm8i4rsbHd4MNyTcrzHFsLQykrtbuGRknZZCBBVXSv2tSCDN2Cg6Rt/GFRw8GoW9y9Ecw5rIPG1sg==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"lru-cache": "^9.1.1",
|
||||
"minipass": "^5.0.0 || ^6.0.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=16 || 14 >=14.17"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/isaacs"
|
||||
}
|
||||
},
|
||||
"node_modules/path-scurry/node_modules/lru-cache": {
|
||||
"version": "9.1.1",
|
||||
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-9.1.1.tgz",
|
||||
"integrity": "sha512-65/Jky17UwSb0BuB9V+MyDpsOtXKmYwzhyl+cOa9XUiI4uV2Ouy/2voFP3+al0BjZbJgMBD8FojMpAf+Z+qn4A==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": "14 || >=16.14"
|
||||
}
|
||||
},
|
||||
"node_modules/path-type": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz",
|
||||
@ -10510,21 +10196,6 @@
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/string-width-cjs": {
|
||||
"name": "string-width",
|
||||
"version": "4.2.3",
|
||||
"resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz",
|
||||
"integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"emoji-regex": "^8.0.0",
|
||||
"is-fullwidth-code-point": "^3.0.0",
|
||||
"strip-ansi": "^6.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/string.prototype.repeat": {
|
||||
"version": "0.2.0",
|
||||
"resolved": "https://registry.npmjs.org/string.prototype.repeat/-/string.prototype.repeat-0.2.0.tgz",
|
||||
@ -10588,19 +10259,6 @@
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/strip-ansi-cjs": {
|
||||
"name": "strip-ansi",
|
||||
"version": "6.0.1",
|
||||
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
|
||||
"integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"ansi-regex": "^5.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/strip-bom": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz",
|
||||
@ -10830,58 +10488,6 @@
|
||||
"integrity": "sha512-e4g0EJtAjk64xgnFPD6kTBUtpnMVzDrMb12N1YZV0VvSlhnVT3SGxiYTLdGy8Q5cYHOIC/FAHmZ10eGrAguicQ==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/ts-node": {
|
||||
"version": "10.9.1",
|
||||
"resolved": "https://registry.npmjs.org/ts-node/-/ts-node-10.9.1.tgz",
|
||||
"integrity": "sha512-NtVysVPkxxrwFGUUxGYhfux8k78pQB3JqYBXlLRZgdGUqTO5wU/UyHop5p70iEbGhB7q5KmiZiU0Y3KlJrScEw==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@cspotcode/source-map-support": "^0.8.0",
|
||||
"@tsconfig/node10": "^1.0.7",
|
||||
"@tsconfig/node12": "^1.0.7",
|
||||
"@tsconfig/node14": "^1.0.0",
|
||||
"@tsconfig/node16": "^1.0.2",
|
||||
"acorn": "^8.4.1",
|
||||
"acorn-walk": "^8.1.1",
|
||||
"arg": "^4.1.0",
|
||||
"create-require": "^1.1.0",
|
||||
"diff": "^4.0.1",
|
||||
"make-error": "^1.1.1",
|
||||
"v8-compile-cache-lib": "^3.0.1",
|
||||
"yn": "3.1.1"
|
||||
},
|
||||
"bin": {
|
||||
"ts-node": "dist/bin.js",
|
||||
"ts-node-cwd": "dist/bin-cwd.js",
|
||||
"ts-node-esm": "dist/bin-esm.js",
|
||||
"ts-node-script": "dist/bin-script.js",
|
||||
"ts-node-transpile-only": "dist/bin-transpile.js",
|
||||
"ts-script": "dist/bin-script-deprecated.js"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@swc/core": ">=1.2.50",
|
||||
"@swc/wasm": ">=1.2.50",
|
||||
"@types/node": "*",
|
||||
"typescript": ">=2.7"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"@swc/core": {
|
||||
"optional": true
|
||||
},
|
||||
"@swc/wasm": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/ts-node/node_modules/diff": {
|
||||
"version": "4.0.2",
|
||||
"resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz",
|
||||
"integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">=0.3.1"
|
||||
}
|
||||
},
|
||||
"node_modules/tsconfig-paths": {
|
||||
"version": "3.14.2",
|
||||
"resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.14.2.tgz",
|
||||
@ -11101,12 +10707,6 @@
|
||||
"integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/v8-compile-cache-lib": {
|
||||
"version": "3.0.1",
|
||||
"resolved": "https://registry.npmjs.org/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz",
|
||||
"integrity": "sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/validate-npm-package-license": {
|
||||
"version": "3.0.4",
|
||||
"resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz",
|
||||
@ -11211,57 +10811,6 @@
|
||||
"url": "https://github.com/chalk/wrap-ansi?sponsor=1"
|
||||
}
|
||||
},
|
||||
"node_modules/wrap-ansi-cjs": {
|
||||
"name": "wrap-ansi",
|
||||
"version": "7.0.0",
|
||||
"resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz",
|
||||
"integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"ansi-styles": "^4.0.0",
|
||||
"string-width": "^4.1.0",
|
||||
"strip-ansi": "^6.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/chalk/wrap-ansi?sponsor=1"
|
||||
}
|
||||
},
|
||||
"node_modules/wrap-ansi-cjs/node_modules/ansi-styles": {
|
||||
"version": "4.3.0",
|
||||
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
|
||||
"integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"color-convert": "^2.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/chalk/ansi-styles?sponsor=1"
|
||||
}
|
||||
},
|
||||
"node_modules/wrap-ansi-cjs/node_modules/color-convert": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
|
||||
"integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"color-name": "~1.1.4"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=7.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/wrap-ansi-cjs/node_modules/color-name": {
|
||||
"version": "1.1.4",
|
||||
"resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
|
||||
"integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/wrap-ansi/node_modules/ansi-styles": {
|
||||
"version": "4.3.0",
|
||||
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
|
||||
@ -11352,15 +10901,6 @@
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/yn": {
|
||||
"version": "3.1.1",
|
||||
"resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz",
|
||||
"integrity": "sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">=6"
|
||||
}
|
||||
},
|
||||
"node_modules/yocto-queue": {
|
||||
"version": "0.1.0",
|
||||
"resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz",
|
||||
|
13
package.json
13
package.json
@ -50,8 +50,8 @@
|
||||
"html"
|
||||
],
|
||||
"devDependencies": {
|
||||
"@markedjs/testutils": "9.1.5-0",
|
||||
"@arethetypeswrong/cli": "^0.13.1",
|
||||
"@markedjs/html-differ": "^4.0.2",
|
||||
"@rollup/plugin-terser": "^0.4.4",
|
||||
"@rollup/plugin-typescript": "^11.1.5",
|
||||
"@semantic-release/commit-analyzer": "^11.1.0",
|
||||
@ -70,9 +70,7 @@
|
||||
"eslint-plugin-import": "^2.29.0",
|
||||
"eslint-plugin-n": "^16.2.0",
|
||||
"eslint-plugin-promise": "^6.1.1",
|
||||
"front-matter": "^4.0.2",
|
||||
"highlight.js": "^11.9.0",
|
||||
"jasmine": "^5.1.0",
|
||||
"markdown-it": "13.0.2",
|
||||
"marked-highlight": "^2.0.6",
|
||||
"marked-man": "^2.0.0",
|
||||
@ -82,14 +80,13 @@
|
||||
"semantic-release": "^22.0.7",
|
||||
"titleize": "^4.0.0",
|
||||
"ts-expect": "^1.3.0",
|
||||
"ts-node": "^10.9.1",
|
||||
"typescript": "5.2.2"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "npm run build && cross-env NODE_OPTIONS=--loader=ts-node/esm jasmine --config=jasmine.json",
|
||||
"test": "npm run build && npm run test:specs && npm run test:unit",
|
||||
"test:all": "npm test && npm run test:umd && npm run test:types && npm run test:lint",
|
||||
"test:unit": "npm test -- test/unit/**/*-spec.js",
|
||||
"test:specs": "npm test -- test/specs/**/*-spec.js",
|
||||
"test:unit": "node --test --test-reporter=spec test/unit",
|
||||
"test:specs": "node --test --test-reporter=spec test/run-spec-tests.js",
|
||||
"test:lint": "eslint .",
|
||||
"test:redos": "node test/recheck.js > vuln.js",
|
||||
"test:types": "tsc --project tsconfig-type-test.json && attw -P --exclude-entrypoints ./bin/marked ./marked.min.js",
|
||||
@ -106,6 +103,6 @@
|
||||
"rollup": "rollup -c rollup.config.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 16"
|
||||
"node": ">= 18"
|
||||
}
|
||||
}
|
||||
|
3
test/.eslintrc.json
vendored
3
test/.eslintrc.json
vendored
@ -22,7 +22,6 @@
|
||||
"no-var": "error"
|
||||
},
|
||||
"env": {
|
||||
"node": true,
|
||||
"jasmine": true
|
||||
"node": true
|
||||
}
|
||||
}
|
||||
|
59
test/bench.js
vendored
59
test/bench.js
vendored
@ -1,7 +1,6 @@
|
||||
import { dirname, resolve } from 'path';
|
||||
import { fileURLToPath } from 'url';
|
||||
import { isEqual } from './helpers/html-differ.js';
|
||||
import { loadFiles } from './helpers/load.js';
|
||||
import { dirname, resolve } from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
import { htmlIsEqual, getTests } from '@markedjs/testutils';
|
||||
|
||||
import { marked as cjsMarked } from '../lib/marked.cjs';
|
||||
import { marked as esmMarked } from '../lib/marked.esm.js';
|
||||
@ -13,9 +12,9 @@ let marked;
|
||||
/**
|
||||
* Load specs
|
||||
*/
|
||||
export function load() {
|
||||
export async function load() {
|
||||
const dir = resolve(__dirname, './specs/commonmark');
|
||||
const sections = loadFiles(dir);
|
||||
const sections = await getTests(dir);
|
||||
let specs = [];
|
||||
|
||||
for (const section in sections) {
|
||||
@ -30,7 +29,7 @@ export function load() {
|
||||
*/
|
||||
export async function runBench(options) {
|
||||
options = options || {};
|
||||
const specs = load();
|
||||
const specs = await load();
|
||||
const tests = {};
|
||||
|
||||
// Non-GFM, Non-pedantic
|
||||
@ -55,24 +54,24 @@ export async function runBench(options) {
|
||||
tests['esm marked'] = esmMarked.parse;
|
||||
|
||||
try {
|
||||
tests.commonmark = (await (async() => {
|
||||
tests.commonmark = await (async() => {
|
||||
const { Parser, HtmlRenderer } = await import('commonmark');
|
||||
const parser = new Parser();
|
||||
const writer = new HtmlRenderer();
|
||||
return function(text) {
|
||||
return writer.render(parser.parse(text));
|
||||
};
|
||||
})());
|
||||
})();
|
||||
} catch (e) {
|
||||
console.error('Could not bench commonmark. (Error: %s)', e.message);
|
||||
}
|
||||
|
||||
try {
|
||||
tests['markdown-it'] = (await (async() => {
|
||||
tests['markdown-it'] = await (async() => {
|
||||
const MarkdownIt = (await import('markdown-it')).default;
|
||||
const md = new MarkdownIt();
|
||||
return md.render.bind(md);
|
||||
})());
|
||||
})();
|
||||
} catch (e) {
|
||||
console.error('Could not bench markdown-it. (Error: %s)', e.message);
|
||||
}
|
||||
@ -92,7 +91,11 @@ export async function bench(tests, specs) {
|
||||
console.log();
|
||||
for (let i = 0; i < specs.length; i++) {
|
||||
const spec = specs[i];
|
||||
process.stdout.write(`${(i * 100 / specs.length).toFixed(1).padStart(5)}% ${i.toString().padStart(specs.length.toString().length)} of ${specs.length}\r`);
|
||||
process.stdout.write(
|
||||
`${((i * 100) / specs.length).toFixed(1).padStart(5)}% ${i
|
||||
.toString()
|
||||
.padStart(specs.length.toString().length)} of ${specs.length}\r`
|
||||
);
|
||||
for (const name in tests) {
|
||||
const test = tests[name];
|
||||
const before = process.hrtime.bigint();
|
||||
@ -101,13 +104,18 @@ export async function bench(tests, specs) {
|
||||
}
|
||||
const after = process.hrtime.bigint();
|
||||
stats[name].elapsed += after - before;
|
||||
stats[name].correct += (await isEqual(spec.html, await test(spec.markdown)) ? 1 : 0);
|
||||
stats[name].correct += (await htmlIsEqual(
|
||||
spec.html,
|
||||
await test(spec.markdown)
|
||||
))
|
||||
? 1
|
||||
: 0;
|
||||
}
|
||||
}
|
||||
|
||||
for (const name in tests) {
|
||||
const ms = prettyElapsedTime(stats[name].elapsed);
|
||||
const percent = (stats[name].correct / specs.length * 100).toFixed(2);
|
||||
const percent = ((stats[name].correct / specs.length) * 100).toFixed(2);
|
||||
console.log(`${name} completed in ${ms}ms and passed ${percent}%`);
|
||||
}
|
||||
}
|
||||
@ -121,7 +129,7 @@ function parseArg(argv) {
|
||||
const options = {};
|
||||
const orphans = [];
|
||||
|
||||
function getarg() {
|
||||
function getArg() {
|
||||
let arg = argv.shift();
|
||||
|
||||
if (arg.indexOf('--') === 0) {
|
||||
@ -135,9 +143,11 @@ function parseArg(argv) {
|
||||
} else if (arg[0] === '-') {
|
||||
if (arg.length > 2) {
|
||||
// e.g. -abc
|
||||
argv = arg.substring(1).split('').map(ch => {
|
||||
return `-${ch}`;
|
||||
}).concat(argv);
|
||||
argv = arg
|
||||
.substring(1)
|
||||
.split('')
|
||||
.map((ch) => `-${ch}`)
|
||||
.concat(argv);
|
||||
arg = argv.shift();
|
||||
} else {
|
||||
// e.g. -a
|
||||
@ -152,7 +162,7 @@ function parseArg(argv) {
|
||||
const defaults = marked.getDefaults();
|
||||
|
||||
while (argv.length) {
|
||||
const arg = getarg();
|
||||
const arg = getArg();
|
||||
if (arg.indexOf('--') === 0) {
|
||||
const opt = camelize(arg.replace(/^--(no-)?/, ''));
|
||||
if (!defaults.hasOwnProperty(opt)) {
|
||||
@ -160,13 +170,10 @@ function parseArg(argv) {
|
||||
}
|
||||
options.marked = options.marked || {};
|
||||
if (arg.indexOf('--no-') === 0) {
|
||||
options.marked[opt] = typeof defaults[opt] !== 'boolean'
|
||||
? null
|
||||
: false;
|
||||
options.marked[opt] = typeof defaults[opt] !== 'boolean' ? null : false;
|
||||
} else {
|
||||
options.marked[opt] = typeof defaults[opt] !== 'boolean'
|
||||
? argv.shift()
|
||||
: true;
|
||||
options.marked[opt] =
|
||||
typeof defaults[opt] !== 'boolean' ? argv.shift() : true;
|
||||
}
|
||||
} else {
|
||||
orphans.push(arg);
|
||||
@ -176,7 +183,7 @@ function parseArg(argv) {
|
||||
if (orphans.length > 0) {
|
||||
console.error();
|
||||
console.error('The following arguments are not used:');
|
||||
orphans.forEach(arg => console.error(` ${arg}`));
|
||||
orphans.forEach((arg) => console.error(` ${arg}`));
|
||||
console.error();
|
||||
}
|
||||
|
||||
|
@ -1,56 +0,0 @@
|
||||
import { Marked, setOptions, getDefaults } from '../../src/marked.ts';
|
||||
import { isEqual, firstDiff } from './html-differ.js';
|
||||
import { strictEqual } from 'assert';
|
||||
|
||||
beforeEach(() => {
|
||||
setOptions(getDefaults());
|
||||
setOptions({ silent: true });
|
||||
|
||||
jasmine.addAsyncMatchers({
|
||||
toRender: () => {
|
||||
return {
|
||||
compare: async(spec, expected) => {
|
||||
const marked = new Marked();
|
||||
const result = {};
|
||||
const actual = marked.parse(spec.markdown, spec.options);
|
||||
result.pass = await isEqual(expected, actual);
|
||||
|
||||
if (result.pass) {
|
||||
result.message = `${spec.markdown}\n------\n\nExpected: Should Fail`;
|
||||
} else {
|
||||
const diff = await firstDiff(actual, expected);
|
||||
result.message = `Expected: ${diff.expected}\n Actual: ${diff.actual}`;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
};
|
||||
},
|
||||
toEqualHtml: () => {
|
||||
return {
|
||||
compare: async(actual, expected) => {
|
||||
const result = {};
|
||||
result.pass = await isEqual(expected, actual);
|
||||
|
||||
if (result.pass) {
|
||||
result.message = `Expected '${actual}' not to equal '${expected}'`;
|
||||
} else {
|
||||
const diff = await firstDiff(actual, expected);
|
||||
result.message = `Expected: ${diff.expected}\n Actual: ${diff.actual}`;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
};
|
||||
},
|
||||
toRenderExact: () => ({
|
||||
compare: async(spec, expected) => {
|
||||
const marked = new Marked();
|
||||
const result = {};
|
||||
const actual = marked.parse(spec.markdown, spec.options);
|
||||
|
||||
result.pass = strictEqual(expected, actual) === undefined;
|
||||
|
||||
return result;
|
||||
}
|
||||
})
|
||||
});
|
||||
});
|
@ -1,38 +0,0 @@
|
||||
import { HtmlDiffer } from '@markedjs/html-differ';
|
||||
const htmlDiffer = new HtmlDiffer({
|
||||
ignoreSelfClosingSlash: true,
|
||||
ignoreComments: false
|
||||
});
|
||||
|
||||
export const isEqual = htmlDiffer.isEqual.bind(htmlDiffer);
|
||||
export async function firstDiff(actual, expected, padding) {
|
||||
padding = padding || 30;
|
||||
const diffHtml = await htmlDiffer.diffHtml(actual, expected);
|
||||
const result = diffHtml.reduce((obj, diff) => {
|
||||
if (diff.added) {
|
||||
if (obj.firstIndex === null) {
|
||||
obj.firstIndex = obj.expected.length;
|
||||
}
|
||||
obj.expected += diff.value;
|
||||
} else if (diff.removed) {
|
||||
if (obj.firstIndex === null) {
|
||||
obj.firstIndex = obj.actual.length;
|
||||
}
|
||||
obj.actual += diff.value;
|
||||
} else {
|
||||
obj.actual += diff.value;
|
||||
obj.expected += diff.value;
|
||||
}
|
||||
|
||||
return obj;
|
||||
}, {
|
||||
firstIndex: null,
|
||||
actual: '',
|
||||
expected: ''
|
||||
});
|
||||
|
||||
return {
|
||||
actual: result.actual.substring(result.firstIndex - padding, result.firstIndex + padding),
|
||||
expected: result.expected.substring(result.firstIndex - padding, result.firstIndex + padding)
|
||||
};
|
||||
}
|
@ -1,93 +0,0 @@
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import fm from 'front-matter';
|
||||
import { createRequire } from 'module';
|
||||
|
||||
const require = createRequire(import.meta.url);
|
||||
|
||||
export function outputCompletionTable(title, specs) {
|
||||
let longestName = 0;
|
||||
let maxSpecs = 0;
|
||||
|
||||
for (const section in specs) {
|
||||
longestName = Math.max(section.length, longestName);
|
||||
maxSpecs = Math.max(specs[section].total, maxSpecs);
|
||||
}
|
||||
|
||||
const maxSpecsLen = ('' + maxSpecs).length;
|
||||
const spaces = maxSpecsLen * 2 + longestName + 11;
|
||||
|
||||
console.log('-'.padEnd(spaces + 4, '-'));
|
||||
console.log(`| ${title.padStart(Math.ceil((spaces + title.length) / 2)).padEnd(spaces)} |`);
|
||||
console.log(`| ${' '.padEnd(spaces)} |`);
|
||||
for (const section in specs) {
|
||||
console.log(`| ${section.padEnd(longestName)} ${('' + specs[section].pass).padStart(maxSpecsLen)} of ${('' + specs[section].total).padStart(maxSpecsLen)} ${(100 * specs[section].pass / specs[section].total).toFixed().padStart(4)}% |`);
|
||||
}
|
||||
console.log('-'.padEnd(spaces + 4, '-'));
|
||||
console.log();
|
||||
}
|
||||
|
||||
export function loadFiles(dir) {
|
||||
const files = fs.readdirSync(dir);
|
||||
|
||||
return files.reduce((obj, file) => {
|
||||
const ext = path.extname(file);
|
||||
const name = path.basename(file, ext);
|
||||
const absFile = path.join(dir, file);
|
||||
let specs;
|
||||
|
||||
switch (ext) {
|
||||
case '.md': {
|
||||
const content = fm(fs.readFileSync(absFile, 'utf8'));
|
||||
const skip = content.attributes.skip;
|
||||
delete content.attributes.skip;
|
||||
const only = content.attributes.only;
|
||||
delete content.attributes.only;
|
||||
specs = [{
|
||||
section: name,
|
||||
markdown: content.body,
|
||||
html: fs.readFileSync(absFile.replace(/[^.]+$/, 'html'), 'utf8'),
|
||||
options: content.attributes,
|
||||
only,
|
||||
skip
|
||||
}];
|
||||
break;
|
||||
}
|
||||
case '.cjs':
|
||||
case '.json': {
|
||||
try {
|
||||
specs = require(absFile);
|
||||
} catch (err) {
|
||||
console.log(`Error loading ${absFile}`);
|
||||
throw err;
|
||||
}
|
||||
if (!Array.isArray(specs)) {
|
||||
specs = [specs];
|
||||
}
|
||||
break;
|
||||
}
|
||||
default:
|
||||
return obj;
|
||||
}
|
||||
|
||||
for (let i = 0; i < specs.length; i++) {
|
||||
const spec = specs[i];
|
||||
if (!spec.section) {
|
||||
spec.section = `${name}[${i}]`;
|
||||
}
|
||||
if (!obj[spec.section]) {
|
||||
obj[spec.section] = {
|
||||
total: 0,
|
||||
pass: 0,
|
||||
specs: []
|
||||
};
|
||||
}
|
||||
|
||||
obj[spec.section].total++;
|
||||
obj[spec.section].pass += spec.shouldFail ? 0 : 1;
|
||||
obj[spec.section].specs.push(spec);
|
||||
}
|
||||
|
||||
return obj;
|
||||
}, {});
|
||||
}
|
50
test/run-spec-tests.js
vendored
Normal file
50
test/run-spec-tests.js
vendored
Normal file
@ -0,0 +1,50 @@
|
||||
import { Marked } from '../lib/marked.esm.js';
|
||||
import { getTests, runTests, outputCompletionTable } from '@markedjs/testutils';
|
||||
import { resolve, dirname } from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
|
||||
function parse(markdown, options) {
|
||||
const marked = new Marked(options);
|
||||
return marked.parse(markdown);
|
||||
}
|
||||
|
||||
const __dirname = dirname(fileURLToPath(import.meta.url));
|
||||
|
||||
const [commonMarkTests, gfmTests, newTests, originalTests, redosTests] =
|
||||
await getTests([
|
||||
resolve(__dirname, './specs/commonmark'),
|
||||
resolve(__dirname, './specs/gfm'),
|
||||
resolve(__dirname, './specs/new'),
|
||||
resolve(__dirname, './specs/original'),
|
||||
resolve(__dirname, './specs/redos')
|
||||
]);
|
||||
|
||||
outputCompletionTable('CommonMark', commonMarkTests);
|
||||
runTests({
|
||||
tests: commonMarkTests,
|
||||
parse,
|
||||
defaultMarkedOptions: { gfm: false, pedantic: false }
|
||||
});
|
||||
|
||||
outputCompletionTable('GFM', gfmTests);
|
||||
runTests({
|
||||
tests: gfmTests,
|
||||
parse,
|
||||
defaultMarkedOptions: { gfm: true, pedantic: false }
|
||||
});
|
||||
|
||||
runTests({
|
||||
tests: newTests,
|
||||
parse
|
||||
});
|
||||
|
||||
runTests({
|
||||
tests: originalTests,
|
||||
parse,
|
||||
defaultMarkedOptions: { gfm: false, pedantic: true }
|
||||
});
|
||||
|
||||
runTests({
|
||||
tests: redosTests,
|
||||
parse
|
||||
});
|
@ -61,8 +61,8 @@
|
||||
},
|
||||
{
|
||||
"section": "[extension] Strikethrough",
|
||||
"html": "<p><del>Hi</del> Hello, world!</p>",
|
||||
"markdown": "~~Hi~~ Hello, world!",
|
||||
"html": "<p><del>Hi</del> Hello, <del>there</del> world!</p>",
|
||||
"markdown": "~~Hi~~ Hello, ~there~ world!",
|
||||
"example": 491
|
||||
},
|
||||
{
|
||||
@ -71,77 +71,104 @@
|
||||
"markdown": "This ~~has a\n\nnew paragraph~~.",
|
||||
"example": 492
|
||||
},
|
||||
{
|
||||
"section": "[extension] Strikethrough",
|
||||
"html": "<p>This will ~~~not~~~ strike.</p>",
|
||||
"markdown": "This will ~~~not~~~ strike.",
|
||||
"example": 493
|
||||
},
|
||||
{
|
||||
"section": "[extension] Autolinks",
|
||||
"html": "<p><a href=\"http://www.commonmark.org\">www.commonmark.org</a></p>",
|
||||
"markdown": "www.commonmark.org",
|
||||
"example": 621
|
||||
"example": 622
|
||||
},
|
||||
{
|
||||
"section": "[extension] Autolinks",
|
||||
"html": "<p>Visit <a href=\"http://www.commonmark.org/help\">www.commonmark.org/help</a> for more information.</p>",
|
||||
"markdown": "Visit www.commonmark.org/help for more information.",
|
||||
"example": 622
|
||||
"example": 623
|
||||
},
|
||||
{
|
||||
"section": "[extension] Autolinks",
|
||||
"html": "<p>Visit <a href=\"http://www.commonmark.org\">www.commonmark.org</a>.</p>\n<p>Visit <a href=\"http://www.commonmark.org/a.b\">www.commonmark.org/a.b</a>.</p>",
|
||||
"markdown": "Visit www.commonmark.org.\n\nVisit www.commonmark.org/a.b.",
|
||||
"example": 623
|
||||
"example": 624
|
||||
},
|
||||
{
|
||||
"section": "[extension] Autolinks",
|
||||
"html": "<p><a href=\"http://www.google.com/search?q=Markup+(business)\">www.google.com/search?q=Markup+(business)</a></p>\n<p><a href=\"http://www.google.com/search?q=Markup+(business)\">www.google.com/search?q=Markup+(business)</a>))</p>\n<p>(<a href=\"http://www.google.com/search?q=Markup+(business)\">www.google.com/search?q=Markup+(business)</a>)</p>\n<p>(<a href=\"http://www.google.com/search?q=Markup+(business)\">www.google.com/search?q=Markup+(business)</a></p>",
|
||||
"markdown": "www.google.com/search?q=Markup+(business)\n\nwww.google.com/search?q=Markup+(business)))\n\n(www.google.com/search?q=Markup+(business))\n\n(www.google.com/search?q=Markup+(business)",
|
||||
"example": 624
|
||||
"example": 625
|
||||
},
|
||||
{
|
||||
"section": "[extension] Autolinks",
|
||||
"html": "<p><a href=\"http://www.google.com/search?q=(business))+ok\">www.google.com/search?q=(business))+ok</a></p>",
|
||||
"markdown": "www.google.com/search?q=(business))+ok",
|
||||
"example": 625
|
||||
"example": 626
|
||||
},
|
||||
{
|
||||
"section": "[extension] Autolinks",
|
||||
"html": "<p><a href=\"http://www.google.com/search?q=commonmark&hl=en\">www.google.com/search?q=commonmark&hl=en</a></p>\n<p><a href=\"http://www.google.com/search?q=commonmark\">www.google.com/search?q=commonmark</a>&hl;</p>",
|
||||
"markdown": "www.google.com/search?q=commonmark&hl=en\n\nwww.google.com/search?q=commonmark&hl;",
|
||||
"example": 626
|
||||
"example": 627
|
||||
},
|
||||
{
|
||||
"section": "[extension] Autolinks",
|
||||
"html": "<p><a href=\"http://www.commonmark.org/he\">www.commonmark.org/he</a><lp</p>",
|
||||
"markdown": "www.commonmark.org/he<lp",
|
||||
"example": 627
|
||||
"example": 628
|
||||
},
|
||||
{
|
||||
"section": "[extension] Autolinks",
|
||||
"html": "<p><a href=\"http://commonmark.org\">http://commonmark.org</a></p>\n<p>(Visit <a href=\"https://encrypted.google.com/search?q=Markup+(business)\">https://encrypted.google.com/search?q=Markup+(business)</a>)</p>",
|
||||
"markdown": "http://commonmark.org\n\n(Visit https://encrypted.google.com/search?q=Markup+(business))",
|
||||
"example": 628
|
||||
"example": 629
|
||||
},
|
||||
{
|
||||
"section": "[extension] Autolinks",
|
||||
"html": "<p><a href=\"mailto:foo@bar.baz\">foo@bar.baz</a></p>",
|
||||
"markdown": "foo@bar.baz",
|
||||
"example": 629
|
||||
"example": 630
|
||||
},
|
||||
{
|
||||
"section": "[extension] Autolinks",
|
||||
"html": "<p>hello@mail+xyz.example isn't valid, but <a href=\"mailto:hello+xyz@mail.example\">hello+xyz@mail.example</a> is.</p>",
|
||||
"markdown": "hello@mail+xyz.example isn't valid, but hello+xyz@mail.example is.",
|
||||
"example": 630
|
||||
"example": 631
|
||||
},
|
||||
{
|
||||
"section": "[extension] Autolinks",
|
||||
"html": "<p><a href=\"mailto:a.b-c_d@a.b\">a.b-c_d@a.b</a></p>\n<p><a href=\"mailto:a.b-c_d@a.b\">a.b-c_d@a.b</a>.</p>\n<p>a.b-c_d@a.b-</p>\n<p>a.b-c_d@a.b_</p>",
|
||||
"markdown": "a.b-c_d@a.b\n\na.b-c_d@a.b.\n\na.b-c_d@a.b-\n\na.b-c_d@a.b_",
|
||||
"example": 631
|
||||
"example": 632
|
||||
},
|
||||
{
|
||||
"section": "[extension] Autolinks",
|
||||
"html": "<p><a href=\"mailto:foo@bar.baz\">mailto:foo@bar.baz</a></p>\n<p><a href=\"mailto:a.b-c_d@a.b\">mailto:a.b-c_d@a.b</a></p>\n<p><a href=\"mailto:a.b-c_d@a.b\">mailto:a.b-c_d@a.b</a>.</p>\n<p><a href=\"mailto:a.b-c_d@a.b\">mailto:a.b-c_d@a.b</a>/</p>\n<p>mailto:a.b-c_d@a.b-</p>\n<p>mailto:a.b-c_d@a.b_</p>\n<p><a href=\"xmpp:foo@bar.baz\">xmpp:foo@bar.baz</a></p>\n<p><a href=\"xmpp:foo@bar.baz\">xmpp:foo@bar.baz</a>.</p>",
|
||||
"markdown": "mailto:foo@bar.baz\n\nmailto:a.b-c_d@a.b\n\nmailto:a.b-c_d@a.b.\n\nmailto:a.b-c_d@a.b/\n\nmailto:a.b-c_d@a.b-\n\nmailto:a.b-c_d@a.b_\n\nxmpp:foo@bar.baz\n\nxmpp:foo@bar.baz.",
|
||||
"example": 633,
|
||||
"shouldFail": true
|
||||
},
|
||||
{
|
||||
"section": "[extension] Autolinks",
|
||||
"html": "<p><a href=\"xmpp:foo@bar.baz/txt\">xmpp:foo@bar.baz/txt</a></p>\n<p><a href=\"xmpp:foo@bar.baz/txt@bin\">xmpp:foo@bar.baz/txt@bin</a></p>\n<p><a href=\"xmpp:foo@bar.baz/txt@bin.com\">xmpp:foo@bar.baz/txt@bin.com</a></p>",
|
||||
"markdown": "xmpp:foo@bar.baz/txt\n\nxmpp:foo@bar.baz/txt@bin\n\nxmpp:foo@bar.baz/txt@bin.com",
|
||||
"example": 634,
|
||||
"shouldFail": true
|
||||
},
|
||||
{
|
||||
"section": "[extension] Autolinks",
|
||||
"html": "<p><a href=\"xmpp:foo@bar.baz/txt\">xmpp:foo@bar.baz/txt</a>/bin</p>",
|
||||
"markdown": "xmpp:foo@bar.baz/txt/bin",
|
||||
"example": 635,
|
||||
"shouldFail": true
|
||||
},
|
||||
{
|
||||
"section": "[extension] Disallowed Raw HTML",
|
||||
"html": "<p><strong> <title> <style> <em></p>\n<blockquote>\n <xmp> is disallowed. <XMP> is also disallowed.\n</blockquote>",
|
||||
"markdown": "<strong> <title> <style> <em>\n\n<blockquote>\n <xmp> is disallowed. <XMP> is also disallowed.\n</blockquote>",
|
||||
"example": 653,
|
||||
"example": 657,
|
||||
"shouldFail": true
|
||||
}
|
||||
]
|
||||
|
@ -1,52 +0,0 @@
|
||||
import { dirname, resolve } from 'path';
|
||||
import { fileURLToPath } from 'url';
|
||||
import { loadFiles, outputCompletionTable } from '../helpers/load.js';
|
||||
|
||||
const __dirname = dirname(fileURLToPath(import.meta.url));
|
||||
|
||||
function runSpecs(title, dir, showCompletionTable, options) {
|
||||
options = options || {};
|
||||
const specs = loadFiles(resolve(__dirname, dir));
|
||||
|
||||
if (showCompletionTable) {
|
||||
outputCompletionTable(title, specs);
|
||||
}
|
||||
|
||||
describe(title, () => {
|
||||
Object.keys(specs).forEach(section => {
|
||||
describe(section, () => {
|
||||
specs[section].specs.forEach((spec) => {
|
||||
spec.options = Object.assign({}, options, (spec.options || {}));
|
||||
const example = (spec.example ? ' example ' + spec.example : '');
|
||||
const passFail = (spec.shouldFail ? 'fail' : 'pass');
|
||||
|
||||
if (typeof spec.options.silent === 'undefined') {
|
||||
spec.options.silent = true;
|
||||
}
|
||||
|
||||
(spec.only ? fit : (spec.skip ? xit : it))('should ' + passFail + example, async() => {
|
||||
const before = process.hrtime();
|
||||
if (spec.shouldFail) {
|
||||
await expectAsync(spec).not.toRender(spec.html);
|
||||
} else if (spec.options.renderExact) {
|
||||
await expectAsync(spec).toRenderExact(spec.html);
|
||||
} else {
|
||||
await expectAsync(spec).toRender(spec.html);
|
||||
}
|
||||
const elapsed = process.hrtime(before);
|
||||
if (elapsed[0] > 0) {
|
||||
const s = (elapsed[0] + elapsed[1] * 1e-9).toFixed(3);
|
||||
fail(`took too long: ${s}s`);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
runSpecs('GFM', './gfm', true, { gfm: true, pedantic: false });
|
||||
runSpecs('CommonMark', './commonmark', true, { gfm: false, pedantic: false });
|
||||
runSpecs('Original', './original', false, { gfm: false, pedantic: true });
|
||||
runSpecs('New', './new');
|
||||
runSpecs('ReDOS', './redos');
|
@ -1,7 +1,14 @@
|
||||
import { marked } from '../../src/marked.js';
|
||||
import { Marked } from '../../lib/marked.esm.js';
|
||||
import { timeout } from './utils.js';
|
||||
import { describe, it, beforeEach } from 'node:test';
|
||||
import assert from 'node:assert';
|
||||
|
||||
describe('Hooks', () => {
|
||||
let marked;
|
||||
beforeEach(() => {
|
||||
marked = new Marked();
|
||||
});
|
||||
|
||||
it('should preprocess markdown', () => {
|
||||
marked.use({
|
||||
hooks: {
|
||||
@ -10,8 +17,8 @@ describe('Hooks', () => {
|
||||
}
|
||||
}
|
||||
});
|
||||
const html = marked('*text*');
|
||||
expect(html.trim()).toBe('<h1>preprocess</h1>\n<p><em>text</em></p>');
|
||||
const html = marked.parse('*text*');
|
||||
assert.strictEqual(html.trim(), '<h1>preprocess</h1>\n<p><em>text</em></p>');
|
||||
});
|
||||
|
||||
it('should preprocess async', async() => {
|
||||
@ -24,10 +31,10 @@ describe('Hooks', () => {
|
||||
}
|
||||
}
|
||||
});
|
||||
const promise = marked('*text*');
|
||||
expect(promise).toBeInstanceOf(Promise);
|
||||
const promise = marked.parse('*text*');
|
||||
assert.ok(promise instanceof Promise);
|
||||
const html = await promise;
|
||||
expect(html.trim()).toBe('<h1>preprocess async</h1>\n<p><em>text</em></p>');
|
||||
assert.strictEqual(html.trim(), '<h1>preprocess async</h1>\n<p><em>text</em></p>');
|
||||
});
|
||||
|
||||
it('should preprocess options', () => {
|
||||
@ -39,8 +46,8 @@ describe('Hooks', () => {
|
||||
}
|
||||
}
|
||||
});
|
||||
const html = marked('line1\nline2');
|
||||
expect(html.trim()).toBe('<p>line1<br>line2</p>');
|
||||
const html = marked.parse('line1\nline2');
|
||||
assert.strictEqual(html.trim(), '<p>line1<br>line2</p>');
|
||||
});
|
||||
|
||||
it('should preprocess options async', async() => {
|
||||
@ -54,8 +61,8 @@ describe('Hooks', () => {
|
||||
}
|
||||
}
|
||||
});
|
||||
const html = await marked('line1\nline2');
|
||||
expect(html.trim()).toBe('<p>line1<br>line2</p>');
|
||||
const html = await marked.parse('line1\nline2');
|
||||
assert.strictEqual(html.trim(), '<p>line1<br>line2</p>');
|
||||
});
|
||||
|
||||
it('should postprocess html', () => {
|
||||
@ -66,8 +73,8 @@ describe('Hooks', () => {
|
||||
}
|
||||
}
|
||||
});
|
||||
const html = marked('*text*');
|
||||
expect(html.trim()).toBe('<p><em>text</em></p>\n<h1>postprocess</h1>');
|
||||
const html = marked.parse('*text*');
|
||||
assert.strictEqual(html.trim(), '<p><em>text</em></p>\n<h1>postprocess</h1>');
|
||||
});
|
||||
|
||||
it('should postprocess async', async() => {
|
||||
@ -80,10 +87,10 @@ describe('Hooks', () => {
|
||||
}
|
||||
}
|
||||
});
|
||||
const promise = marked('*text*');
|
||||
expect(promise).toBeInstanceOf(Promise);
|
||||
const promise = marked.parse('*text*');
|
||||
assert.ok(promise instanceof Promise);
|
||||
const html = await promise;
|
||||
expect(html.trim()).toBe('<p><em>text</em></p>\n<h1>postprocess async</h1>');
|
||||
assert.strictEqual(html.trim(), '<p><em>text</em></p>\n<h1>postprocess async</h1>');
|
||||
});
|
||||
|
||||
it('should process all hooks in reverse', async() => {
|
||||
@ -109,9 +116,9 @@ describe('Hooks', () => {
|
||||
}
|
||||
}
|
||||
});
|
||||
const promise = marked('*text*');
|
||||
expect(promise).toBeInstanceOf(Promise);
|
||||
const promise = marked.parse('*text*');
|
||||
assert.ok(promise instanceof Promise);
|
||||
const html = await promise;
|
||||
expect(html.trim()).toBe('<h1>preprocess1</h1>\n<h1>preprocess2</h1>\n<p><em>text</em></p>\n<h1>postprocess2 async</h1>\n<h1>postprocess1</h1>');
|
||||
assert.strictEqual(html.trim(), '<h1>preprocess1</h1>\n<h1>preprocess2</h1>\n<p><em>text</em></p>\n<h1>postprocess2 async</h1>\n<h1>postprocess1</h1>');
|
||||
});
|
||||
});
|
@ -1,20 +1,28 @@
|
||||
import { _Lexer } from '../../src/Lexer.js';
|
||||
import { Lexer } from '../../lib/marked.esm.js';
|
||||
import { describe, it } from 'node:test';
|
||||
import assert from 'node:assert';
|
||||
|
||||
function expectTokens({ md, options, tokens = [], links = {} }) {
|
||||
const lexer = new _Lexer(options);
|
||||
function expectTokens({ md, options, tokens = [], links = {}, log = false }) {
|
||||
const lexer = new Lexer(options);
|
||||
const actual = lexer.lex(md);
|
||||
const expected = tokens;
|
||||
expected.links = links;
|
||||
// console.log(JSON.stringify(actual, null, 2));
|
||||
expect(actual).toEqual(expected);
|
||||
if (log) {
|
||||
console.log(JSON.stringify(
|
||||
actual,
|
||||
(k, v) => v === undefined ? null : v,
|
||||
2
|
||||
));
|
||||
}
|
||||
assert.deepEqual(actual, expected);
|
||||
}
|
||||
|
||||
function expectInlineTokens({ md, options, tokens = jasmine.any(Array), links = {} }) {
|
||||
const lexer = new _Lexer(options);
|
||||
function expectInlineTokens({ md, options, tokens, links = {} }) {
|
||||
const lexer = new Lexer(options);
|
||||
lexer.tokens.links = links;
|
||||
const outTokens = [];
|
||||
lexer.inlineTokens(md, outTokens);
|
||||
expect(outTokens).toEqual(tokens);
|
||||
assert.deepEqual(outTokens, tokens);
|
||||
}
|
||||
|
||||
describe('Lexer', () => {
|
||||
@ -504,26 +512,65 @@ a | b
|
||||
1. item 1
|
||||
2. item 2
|
||||
`,
|
||||
tokens: jasmine.arrayContaining([
|
||||
jasmine.objectContaining({
|
||||
tokens: [
|
||||
{
|
||||
type: 'space',
|
||||
raw: '\n'
|
||||
}),
|
||||
jasmine.objectContaining({
|
||||
},
|
||||
{
|
||||
type: 'list',
|
||||
raw: '1. item 1\n2. item 2\n',
|
||||
ordered: true,
|
||||
start: 1,
|
||||
loose: false,
|
||||
items: [
|
||||
jasmine.objectContaining({
|
||||
raw: '1. item 1\n'
|
||||
}),
|
||||
jasmine.objectContaining({
|
||||
raw: '2. item 2'
|
||||
})
|
||||
{
|
||||
type: 'list_item',
|
||||
raw: '1. item 1\n',
|
||||
task: false,
|
||||
checked: undefined,
|
||||
loose: false,
|
||||
text: 'item 1',
|
||||
tokens: [
|
||||
{
|
||||
type: 'text',
|
||||
raw: 'item 1',
|
||||
text: 'item 1',
|
||||
tokens: [
|
||||
{
|
||||
type: 'text',
|
||||
raw: 'item 1',
|
||||
text: 'item 1'
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
type: 'list_item',
|
||||
raw: '2. item 2',
|
||||
task: false,
|
||||
checked: undefined,
|
||||
loose: false,
|
||||
text: 'item 2',
|
||||
tokens: [
|
||||
{
|
||||
type: 'text',
|
||||
raw: 'item 2',
|
||||
text: 'item 2',
|
||||
tokens: [
|
||||
{
|
||||
type: 'text',
|
||||
raw: 'item 2',
|
||||
text: 'item 2'
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
})
|
||||
])
|
||||
}
|
||||
]
|
||||
});
|
||||
});
|
||||
|
||||
@ -533,26 +580,65 @@ a | b
|
||||
1) item 1
|
||||
2) item 2
|
||||
`,
|
||||
tokens: jasmine.arrayContaining([
|
||||
jasmine.objectContaining({
|
||||
tokens: [
|
||||
{
|
||||
type: 'space',
|
||||
raw: '\n'
|
||||
}),
|
||||
jasmine.objectContaining({
|
||||
},
|
||||
{
|
||||
type: 'list',
|
||||
raw: '1) item 1\n2) item 2\n',
|
||||
ordered: true,
|
||||
start: 1,
|
||||
loose: false,
|
||||
items: [
|
||||
jasmine.objectContaining({
|
||||
raw: '1) item 1\n'
|
||||
}),
|
||||
jasmine.objectContaining({
|
||||
raw: '2) item 2'
|
||||
})
|
||||
{
|
||||
type: 'list_item',
|
||||
raw: '1) item 1\n',
|
||||
task: false,
|
||||
checked: undefined,
|
||||
loose: false,
|
||||
text: 'item 1',
|
||||
tokens: [
|
||||
{
|
||||
type: 'text',
|
||||
raw: 'item 1',
|
||||
text: 'item 1',
|
||||
tokens: [
|
||||
{
|
||||
type: 'text',
|
||||
raw: 'item 1',
|
||||
text: 'item 1'
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
type: 'list_item',
|
||||
raw: '2) item 2',
|
||||
task: false,
|
||||
checked: undefined,
|
||||
loose: false,
|
||||
text: 'item 2',
|
||||
tokens: [
|
||||
{
|
||||
type: 'text',
|
||||
raw: 'item 2',
|
||||
text: 'item 2',
|
||||
tokens: [
|
||||
{
|
||||
type: 'text',
|
||||
raw: 'item 2',
|
||||
text: 'item 2'
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
})
|
||||
])
|
||||
}
|
||||
]
|
||||
});
|
||||
});
|
||||
|
||||
@ -583,12 +669,20 @@ paragraph
|
||||
checked: undefined,
|
||||
loose: false,
|
||||
text: 'item 1',
|
||||
tokens: [{
|
||||
type: 'text',
|
||||
raw: 'item 1',
|
||||
text: 'item 1',
|
||||
tokens: [{ type: 'text', raw: 'item 1', text: 'item 1' }]
|
||||
}]
|
||||
tokens: [
|
||||
{
|
||||
type: 'text',
|
||||
raw: 'item 1',
|
||||
text: 'item 1',
|
||||
tokens: [
|
||||
{
|
||||
type: 'text',
|
||||
raw: 'item 1',
|
||||
text: 'item 1'
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
type: 'list_item',
|
||||
@ -597,25 +691,38 @@ paragraph
|
||||
checked: undefined,
|
||||
loose: false,
|
||||
text: 'item 2',
|
||||
tokens: [{
|
||||
type: 'text',
|
||||
raw: 'item 2',
|
||||
text: 'item 2',
|
||||
tokens: [{ type: 'text', raw: 'item 2', text: 'item 2' }]
|
||||
}]
|
||||
tokens: [
|
||||
{
|
||||
type: 'text',
|
||||
raw: 'item 2',
|
||||
text: 'item 2',
|
||||
tokens: [
|
||||
{
|
||||
type: 'text',
|
||||
raw: 'item 2',
|
||||
text: 'item 2'
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{ type: 'space', raw: '\n\n' },
|
||||
{
|
||||
type: 'space',
|
||||
raw: '\n\n'
|
||||
},
|
||||
{
|
||||
type: 'paragraph',
|
||||
raw: 'paragraph\n',
|
||||
text: 'paragraph',
|
||||
tokens: [{
|
||||
type: 'text',
|
||||
raw: 'paragraph',
|
||||
text: 'paragraph'
|
||||
}]
|
||||
tokens: [
|
||||
{
|
||||
type: 'text',
|
||||
raw: 'paragraph',
|
||||
text: 'paragraph'
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
});
|
||||
@ -627,26 +734,65 @@ paragraph
|
||||
2. item 1
|
||||
3. item 2
|
||||
`,
|
||||
tokens: jasmine.arrayContaining([
|
||||
jasmine.objectContaining({
|
||||
tokens: [
|
||||
{
|
||||
type: 'space',
|
||||
raw: '\n'
|
||||
}),
|
||||
jasmine.objectContaining({
|
||||
},
|
||||
{
|
||||
type: 'list',
|
||||
raw: '2. item 1\n3. item 2\n',
|
||||
ordered: true,
|
||||
start: 2,
|
||||
loose: false,
|
||||
items: [
|
||||
jasmine.objectContaining({
|
||||
raw: '2. item 1\n'
|
||||
}),
|
||||
jasmine.objectContaining({
|
||||
raw: '3. item 2'
|
||||
})
|
||||
{
|
||||
type: 'list_item',
|
||||
raw: '2. item 1\n',
|
||||
task: false,
|
||||
checked: undefined,
|
||||
loose: false,
|
||||
text: 'item 1',
|
||||
tokens: [
|
||||
{
|
||||
type: 'text',
|
||||
raw: 'item 1',
|
||||
text: 'item 1',
|
||||
tokens: [
|
||||
{
|
||||
type: 'text',
|
||||
raw: 'item 1',
|
||||
text: 'item 1'
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
type: 'list_item',
|
||||
raw: '3. item 2',
|
||||
task: false,
|
||||
checked: undefined,
|
||||
loose: false,
|
||||
text: 'item 2',
|
||||
tokens: [
|
||||
{
|
||||
type: 'text',
|
||||
raw: 'item 2',
|
||||
text: 'item 2',
|
||||
tokens: [
|
||||
{
|
||||
type: 'text',
|
||||
raw: 'item 2',
|
||||
text: 'item 2'
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
})
|
||||
])
|
||||
}
|
||||
]
|
||||
});
|
||||
});
|
||||
|
||||
@ -657,27 +803,65 @@ paragraph
|
||||
|
||||
- item 2
|
||||
`,
|
||||
tokens: jasmine.arrayContaining([
|
||||
jasmine.objectContaining({
|
||||
tokens: [
|
||||
{
|
||||
type: 'space',
|
||||
raw: '\n'
|
||||
}),
|
||||
jasmine.objectContaining({
|
||||
},
|
||||
{
|
||||
type: 'list',
|
||||
raw: '- item 1\n\n- item 2\n',
|
||||
ordered: false,
|
||||
start: '',
|
||||
loose: true,
|
||||
items: [
|
||||
jasmine.objectContaining({
|
||||
{
|
||||
type: 'list_item',
|
||||
raw: '- item 1\n\n',
|
||||
loose: true
|
||||
}),
|
||||
jasmine.objectContaining({
|
||||
task: false,
|
||||
checked: undefined,
|
||||
loose: true,
|
||||
text: 'item 1\n',
|
||||
tokens: [
|
||||
{
|
||||
type: 'text',
|
||||
raw: 'item 1\n',
|
||||
text: 'item 1',
|
||||
tokens: [
|
||||
{
|
||||
type: 'text',
|
||||
raw: 'item 1',
|
||||
text: 'item 1'
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
type: 'list_item',
|
||||
raw: '- item 2',
|
||||
loose: true
|
||||
})
|
||||
task: false,
|
||||
checked: undefined,
|
||||
loose: true,
|
||||
text: 'item 2',
|
||||
tokens: [
|
||||
{
|
||||
type: 'text',
|
||||
raw: 'item 2',
|
||||
text: 'item 2',
|
||||
tokens: [
|
||||
{
|
||||
type: 'text',
|
||||
raw: 'item 2',
|
||||
text: 'item 2'
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
})
|
||||
])
|
||||
}
|
||||
]
|
||||
});
|
||||
});
|
||||
|
||||
@ -690,31 +874,103 @@ paragraph
|
||||
item 2a
|
||||
- item 3
|
||||
`,
|
||||
tokens: jasmine.arrayContaining([
|
||||
jasmine.objectContaining({
|
||||
tokens: [
|
||||
{
|
||||
type: 'space',
|
||||
raw: '\n'
|
||||
}),
|
||||
jasmine.objectContaining({
|
||||
},
|
||||
{
|
||||
type: 'list',
|
||||
raw: '- item 1\n- item 2\n\n item 2a\n- item 3\n',
|
||||
ordered: false,
|
||||
start: '',
|
||||
loose: true,
|
||||
items: [
|
||||
jasmine.objectContaining({
|
||||
{
|
||||
type: 'list_item',
|
||||
raw: '- item 1\n',
|
||||
loose: true
|
||||
}),
|
||||
jasmine.objectContaining({
|
||||
task: false,
|
||||
checked: undefined,
|
||||
loose: true,
|
||||
text: 'item 1',
|
||||
tokens: [
|
||||
{
|
||||
type: 'text',
|
||||
raw: 'item 1',
|
||||
text: 'item 1',
|
||||
tokens: [
|
||||
{
|
||||
type: 'text',
|
||||
raw: 'item 1',
|
||||
text: 'item 1'
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
type: 'list_item',
|
||||
raw: '- item 2\n\n item 2a\n',
|
||||
loose: true
|
||||
}),
|
||||
jasmine.objectContaining({
|
||||
task: false,
|
||||
checked: undefined,
|
||||
loose: true,
|
||||
text: 'item 2\n\nitem 2a',
|
||||
tokens: [
|
||||
{
|
||||
type: 'text',
|
||||
raw: 'item 2',
|
||||
text: 'item 2',
|
||||
tokens: [
|
||||
{
|
||||
type: 'text',
|
||||
raw: 'item 2',
|
||||
text: 'item 2'
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
type: 'space',
|
||||
raw: '\n\n'
|
||||
},
|
||||
{
|
||||
type: 'text',
|
||||
raw: 'item 2a',
|
||||
text: 'item 2a',
|
||||
tokens: [
|
||||
{
|
||||
type: 'text',
|
||||
raw: 'item 2a',
|
||||
text: 'item 2a'
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
type: 'list_item',
|
||||
raw: '- item 3',
|
||||
loose: true
|
||||
})
|
||||
task: false,
|
||||
checked: undefined,
|
||||
loose: true,
|
||||
text: 'item 3',
|
||||
tokens: [
|
||||
{
|
||||
type: 'text',
|
||||
raw: 'item 3',
|
||||
text: 'item 3',
|
||||
tokens: [
|
||||
{
|
||||
type: 'text',
|
||||
raw: 'item 3',
|
||||
text: 'item 3'
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
})
|
||||
])
|
||||
}
|
||||
]
|
||||
});
|
||||
});
|
||||
|
||||
@ -724,32 +980,74 @@ paragraph
|
||||
- item 1
|
||||
- item 2
|
||||
`,
|
||||
tokens: jasmine.arrayContaining([
|
||||
jasmine.objectContaining({
|
||||
tokens: [
|
||||
{
|
||||
type: 'space',
|
||||
raw: '\n'
|
||||
}),
|
||||
jasmine.objectContaining({
|
||||
},
|
||||
{
|
||||
type: 'list',
|
||||
raw: '- item 1\n - item 2\n',
|
||||
ordered: false,
|
||||
start: '',
|
||||
loose: false,
|
||||
items: [
|
||||
jasmine.objectContaining({
|
||||
{
|
||||
type: 'list_item',
|
||||
raw: '- item 1\n - item 2',
|
||||
task: false,
|
||||
checked: undefined,
|
||||
loose: false,
|
||||
tokens: jasmine.arrayContaining([
|
||||
jasmine.objectContaining({
|
||||
raw: 'item 1\n'
|
||||
}),
|
||||
jasmine.objectContaining({
|
||||
text: 'item 1\n- item 2',
|
||||
tokens: [
|
||||
{
|
||||
type: 'text',
|
||||
raw: 'item 1\n',
|
||||
text: 'item 1',
|
||||
tokens: [
|
||||
{
|
||||
type: 'text',
|
||||
raw: 'item 1',
|
||||
text: 'item 1'
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
type: 'list',
|
||||
raw: '- item 2'
|
||||
})
|
||||
])
|
||||
})
|
||||
raw: '- item 2',
|
||||
ordered: false,
|
||||
start: '',
|
||||
loose: false,
|
||||
items: [
|
||||
{
|
||||
type: 'list_item',
|
||||
raw: '- item 2',
|
||||
task: false,
|
||||
checked: undefined,
|
||||
loose: false,
|
||||
text: 'item 2',
|
||||
tokens: [
|
||||
{
|
||||
type: 'text',
|
||||
raw: 'item 2',
|
||||
text: 'item 2',
|
||||
tokens: [
|
||||
{
|
||||
type: 'text',
|
||||
raw: 'item 2',
|
||||
text: 'item 2'
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
})
|
||||
])
|
||||
}
|
||||
]
|
||||
});
|
||||
});
|
||||
|
||||
@ -759,28 +1057,65 @@ paragraph
|
||||
- [ ] item 1
|
||||
- [x] item 2
|
||||
`,
|
||||
tokens: jasmine.arrayContaining([
|
||||
jasmine.objectContaining({
|
||||
tokens: [
|
||||
{
|
||||
type: 'space',
|
||||
raw: '\n'
|
||||
}),
|
||||
jasmine.objectContaining({
|
||||
},
|
||||
{
|
||||
type: 'list',
|
||||
raw: '- [ ] item 1\n- [x] item 2\n',
|
||||
ordered: false,
|
||||
start: '',
|
||||
loose: false,
|
||||
items: [
|
||||
jasmine.objectContaining({
|
||||
{
|
||||
type: 'list_item',
|
||||
raw: '- [ ] item 1\n',
|
||||
task: true,
|
||||
checked: false
|
||||
}),
|
||||
jasmine.objectContaining({
|
||||
checked: false,
|
||||
loose: false,
|
||||
text: 'item 1',
|
||||
tokens: [
|
||||
{
|
||||
type: 'text',
|
||||
raw: 'item 1',
|
||||
text: 'item 1',
|
||||
tokens: [
|
||||
{
|
||||
type: 'text',
|
||||
raw: 'item 1',
|
||||
text: 'item 1'
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
type: 'list_item',
|
||||
raw: '- [x] item 2',
|
||||
task: true,
|
||||
checked: true
|
||||
})
|
||||
checked: true,
|
||||
loose: false,
|
||||
text: 'item 2',
|
||||
tokens: [
|
||||
{
|
||||
type: 'text',
|
||||
raw: 'item 2',
|
||||
text: 'item 2',
|
||||
tokens: [
|
||||
{
|
||||
type: 'text',
|
||||
raw: 'item 2',
|
||||
text: 'item 2'
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
})
|
||||
])
|
||||
}
|
||||
]
|
||||
});
|
||||
});
|
||||
});
|
||||
@ -1145,9 +1480,22 @@ paragraph
|
||||
expectInlineTokens({
|
||||
md: 'a\nb',
|
||||
options: { gfm: true, breaks: true },
|
||||
tokens: jasmine.arrayContaining([
|
||||
{ type: 'br', raw: '\n' }
|
||||
])
|
||||
tokens: [
|
||||
{
|
||||
raw: 'a',
|
||||
text: 'a',
|
||||
type: 'text'
|
||||
},
|
||||
{
|
||||
raw: '\n',
|
||||
type: 'br'
|
||||
},
|
||||
{
|
||||
raw: 'b',
|
||||
text: 'b',
|
||||
type: 'text'
|
||||
}
|
||||
]
|
||||
});
|
||||
});
|
||||
|
@ -1,9 +1,13 @@
|
||||
import { _Parser } from '../../src/Parser.js';
|
||||
import { Parser } from '../../lib/marked.esm.js';
|
||||
import { htmlIsEqual, firstDiff } from '@markedjs/testutils';
|
||||
import assert from 'node:assert';
|
||||
import { describe, it } from 'node:test';
|
||||
|
||||
async function expectHtml({ tokens, options, html, inline }) {
|
||||
const parser = new _Parser(options);
|
||||
const parser = new Parser(options);
|
||||
const actual = parser[inline ? 'parseInline' : 'parse'](tokens);
|
||||
await expectAsync(actual).toEqualHtml(html);
|
||||
const testDiff = await firstDiff(actual, html);
|
||||
assert.ok(await htmlIsEqual(html, actual), `Expected: ${testDiff.expected}\n Actual: ${testDiff.actual}`);
|
||||
}
|
||||
|
||||
describe('Parser', () => {
|
||||
@ -14,17 +18,13 @@ describe('Parser', () => {
|
||||
{
|
||||
type: 'paragraph',
|
||||
text: 'paragraph 1',
|
||||
tokens: [
|
||||
{ type: 'text', text: 'paragraph 1' }
|
||||
]
|
||||
tokens: [{ type: 'text', text: 'paragraph 1' }]
|
||||
},
|
||||
{ type: 'space' },
|
||||
{
|
||||
type: 'paragraph',
|
||||
text: 'paragraph 2',
|
||||
tokens: [
|
||||
{ type: 'text', text: 'paragraph 2' }
|
||||
]
|
||||
tokens: [{ type: 'text', text: 'paragraph 2' }]
|
||||
}
|
||||
],
|
||||
html: '<p>paragraph 1</p><p>paragraph 2</p>'
|
||||
@ -33,65 +33,71 @@ describe('Parser', () => {
|
||||
|
||||
it('hr', async() => {
|
||||
await expectHtml({
|
||||
tokens: [{
|
||||
type: 'hr'
|
||||
}],
|
||||
tokens: [
|
||||
{
|
||||
type: 'hr'
|
||||
}
|
||||
],
|
||||
html: '<hr />'
|
||||
});
|
||||
});
|
||||
|
||||
it('heading', async() => {
|
||||
await expectHtml({
|
||||
tokens: [{
|
||||
type: 'heading',
|
||||
depth: 1,
|
||||
text: 'heading',
|
||||
tokens: [
|
||||
{ type: 'text', text: 'heading' }
|
||||
]
|
||||
}],
|
||||
tokens: [
|
||||
{
|
||||
type: 'heading',
|
||||
depth: 1,
|
||||
text: 'heading',
|
||||
tokens: [{ type: 'text', text: 'heading' }]
|
||||
}
|
||||
],
|
||||
html: '<h1>heading</h1>'
|
||||
});
|
||||
});
|
||||
|
||||
it('code', async() => {
|
||||
await expectHtml({
|
||||
tokens: [{
|
||||
type: 'code',
|
||||
text: 'code'
|
||||
}],
|
||||
tokens: [
|
||||
{
|
||||
type: 'code',
|
||||
text: 'code'
|
||||
}
|
||||
],
|
||||
html: '<pre><code>code</code></pre>'
|
||||
});
|
||||
});
|
||||
|
||||
it('table', async() => {
|
||||
await expectHtml({
|
||||
tokens: [{
|
||||
type: 'table',
|
||||
align: ['left', 'right'],
|
||||
header: [
|
||||
{
|
||||
text: 'a',
|
||||
tokens: [{ type: 'text', raw: 'a', text: 'a' }]
|
||||
},
|
||||
{
|
||||
text: 'b',
|
||||
tokens: [{ type: 'text', raw: 'b', text: 'b' }]
|
||||
}
|
||||
],
|
||||
rows: [
|
||||
[
|
||||
tokens: [
|
||||
{
|
||||
type: 'table',
|
||||
align: ['left', 'right'],
|
||||
header: [
|
||||
{
|
||||
text: '1',
|
||||
tokens: [{ type: 'text', raw: '1', text: '1' }]
|
||||
text: 'a',
|
||||
tokens: [{ type: 'text', raw: 'a', text: 'a' }]
|
||||
},
|
||||
{
|
||||
text: '2',
|
||||
tokens: [{ type: 'text', raw: '2', text: '2' }]
|
||||
text: 'b',
|
||||
tokens: [{ type: 'text', raw: 'b', text: 'b' }]
|
||||
}
|
||||
],
|
||||
rows: [
|
||||
[
|
||||
{
|
||||
text: '1',
|
||||
tokens: [{ type: 'text', raw: '1', text: '1' }]
|
||||
},
|
||||
{
|
||||
text: '2',
|
||||
tokens: [{ type: 'text', raw: '2', text: '2' }]
|
||||
}
|
||||
]
|
||||
]
|
||||
]
|
||||
}],
|
||||
}
|
||||
],
|
||||
html: `
|
||||
<table>
|
||||
<thead>
|
||||
@ -115,13 +121,13 @@ describe('Parser', () => {
|
||||
tokens: [
|
||||
{
|
||||
type: 'blockquote',
|
||||
tokens: [{
|
||||
type: 'paragraph',
|
||||
text: 'blockquote',
|
||||
tokens: [
|
||||
{ type: 'text', text: 'blockquote' }
|
||||
]
|
||||
}]
|
||||
tokens: [
|
||||
{
|
||||
type: 'paragraph',
|
||||
text: 'blockquote',
|
||||
tokens: [{ type: 'text', text: 'blockquote' }]
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
html: '<blockquote><p>blockquote</p></blockquote>'
|
||||
@ -141,20 +147,24 @@ describe('Parser', () => {
|
||||
{
|
||||
task: false,
|
||||
checked: undefined,
|
||||
tokens: [{
|
||||
type: 'text',
|
||||
text: 'item 1',
|
||||
tokens: [{ type: 'text', text: 'item 1' }]
|
||||
}]
|
||||
tokens: [
|
||||
{
|
||||
type: 'text',
|
||||
text: 'item 1',
|
||||
tokens: [{ type: 'text', text: 'item 1' }]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
task: false,
|
||||
checked: undefined,
|
||||
tokens: [{
|
||||
type: 'text',
|
||||
text: 'item 2',
|
||||
tokens: [{ type: 'text', text: 'item 2' }]
|
||||
}]
|
||||
tokens: [
|
||||
{
|
||||
type: 'text',
|
||||
text: 'item 2',
|
||||
tokens: [{ type: 'text', text: 'item 2' }]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
@ -179,20 +189,24 @@ describe('Parser', () => {
|
||||
{
|
||||
task: false,
|
||||
checked: undefined,
|
||||
tokens: [{
|
||||
type: 'text',
|
||||
text: 'item 1',
|
||||
tokens: [{ type: 'text', text: 'item 1' }]
|
||||
}]
|
||||
tokens: [
|
||||
{
|
||||
type: 'text',
|
||||
text: 'item 1',
|
||||
tokens: [{ type: 'text', text: 'item 1' }]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
task: false,
|
||||
checked: undefined,
|
||||
tokens: [{
|
||||
type: 'text',
|
||||
text: 'item 2',
|
||||
tokens: [{ type: 'text', text: 'item 2' }]
|
||||
}]
|
||||
tokens: [
|
||||
{
|
||||
type: 'text',
|
||||
text: 'item 2',
|
||||
tokens: [{ type: 'text', text: 'item 2' }]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
@ -217,20 +231,24 @@ describe('Parser', () => {
|
||||
{
|
||||
task: true,
|
||||
checked: false,
|
||||
tokens: [{
|
||||
type: 'text',
|
||||
text: 'item 1',
|
||||
tokens: [{ type: 'text', text: 'item 1' }]
|
||||
}]
|
||||
tokens: [
|
||||
{
|
||||
type: 'text',
|
||||
text: 'item 1',
|
||||
tokens: [{ type: 'text', text: 'item 1' }]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
task: true,
|
||||
checked: true,
|
||||
tokens: [{
|
||||
type: 'text',
|
||||
text: 'item 2',
|
||||
tokens: [{ type: 'text', text: 'item 2' }]
|
||||
}]
|
||||
tokens: [
|
||||
{
|
||||
type: 'text',
|
||||
text: 'item 2',
|
||||
tokens: [{ type: 'text', text: 'item 2' }]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
@ -255,20 +273,24 @@ describe('Parser', () => {
|
||||
{
|
||||
task: false,
|
||||
checked: undefined,
|
||||
tokens: [{
|
||||
type: 'text',
|
||||
text: 'item 1',
|
||||
tokens: [{ type: 'text', text: 'item 1' }]
|
||||
}]
|
||||
tokens: [
|
||||
{
|
||||
type: 'text',
|
||||
text: 'item 1',
|
||||
tokens: [{ type: 'text', text: 'item 1' }]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
task: false,
|
||||
checked: undefined,
|
||||
tokens: [{
|
||||
type: 'text',
|
||||
text: 'item 2',
|
||||
tokens: [{ type: 'text', text: 'item 2' }]
|
||||
}]
|
||||
tokens: [
|
||||
{
|
||||
type: 'text',
|
||||
text: 'item 2',
|
||||
tokens: [{ type: 'text', text: 'item 2' }]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
@ -284,23 +306,25 @@ describe('Parser', () => {
|
||||
|
||||
it('html', async() => {
|
||||
await expectHtml({
|
||||
tokens: [{
|
||||
type: 'html',
|
||||
text: '<div>html</div>'
|
||||
}],
|
||||
tokens: [
|
||||
{
|
||||
type: 'html',
|
||||
text: '<div>html</div>'
|
||||
}
|
||||
],
|
||||
html: '<div>html</div>'
|
||||
});
|
||||
});
|
||||
|
||||
it('paragraph', async() => {
|
||||
await expectHtml({
|
||||
tokens: [{
|
||||
type: 'paragraph',
|
||||
text: 'paragraph 1',
|
||||
tokens: [
|
||||
{ type: 'text', text: 'paragraph 1' }
|
||||
]
|
||||
}],
|
||||
tokens: [
|
||||
{
|
||||
type: 'paragraph',
|
||||
text: 'paragraph 1',
|
||||
tokens: [{ type: 'text', text: 'paragraph 1' }]
|
||||
}
|
||||
],
|
||||
html: '<p>paragraph 1</p>'
|
||||
});
|
||||
});
|
||||
@ -320,9 +344,7 @@ describe('Parser', () => {
|
||||
it('escape', async() => {
|
||||
await expectHtml({
|
||||
inline: true,
|
||||
tokens: [
|
||||
{ type: 'escape', text: '>' }
|
||||
],
|
||||
tokens: [{ type: 'escape', text: '>' }],
|
||||
html: '>'
|
||||
});
|
||||
});
|
||||
@ -348,9 +370,7 @@ describe('Parser', () => {
|
||||
text: 'link',
|
||||
href: 'https://example.com',
|
||||
title: 'title',
|
||||
tokens: [
|
||||
{ type: 'text', text: 'link' }
|
||||
]
|
||||
tokens: [{ type: 'text', text: 'link' }]
|
||||
}
|
||||
],
|
||||
html: '<a href="https://example.com" title="title">link</a>'
|
||||
@ -379,9 +399,7 @@ describe('Parser', () => {
|
||||
{
|
||||
type: 'strong',
|
||||
text: 'strong',
|
||||
tokens: [
|
||||
{ type: 'text', text: 'strong' }
|
||||
]
|
||||
tokens: [{ type: 'text', text: 'strong' }]
|
||||
}
|
||||
],
|
||||
html: '<strong>strong</strong>'
|
||||
@ -395,9 +413,7 @@ describe('Parser', () => {
|
||||
{
|
||||
type: 'em',
|
||||
text: 'em',
|
||||
tokens: [
|
||||
{ type: 'text', text: 'em' }
|
||||
]
|
||||
tokens: [{ type: 'text', text: 'em' }]
|
||||
}
|
||||
],
|
||||
html: '<em>em</em>'
|
||||
@ -420,9 +436,11 @@ describe('Parser', () => {
|
||||
it('br', async() => {
|
||||
await expectHtml({
|
||||
inline: true,
|
||||
tokens: [{
|
||||
type: 'br'
|
||||
}],
|
||||
tokens: [
|
||||
{
|
||||
type: 'br'
|
||||
}
|
||||
],
|
||||
html: '<br />'
|
||||
});
|
||||
});
|
||||
@ -434,9 +452,7 @@ describe('Parser', () => {
|
||||
{
|
||||
type: 'del',
|
||||
text: 'del',
|
||||
tokens: [
|
||||
{ type: 'text', text: 'del' }
|
||||
]
|
||||
tokens: [{ type: 'text', text: 'del' }]
|
||||
}
|
||||
],
|
||||
html: '<del>del</del>'
|
@ -1,6 +1,10 @@
|
||||
import { main } from '../../bin/main.js';
|
||||
import { htmlIsEqual } from '@markedjs/testutils';
|
||||
import { dirname, resolve } from 'node:path';
|
||||
import { fileURLToPath } from 'url';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
import { describe, it, mock } from 'node:test';
|
||||
import assert from 'node:assert';
|
||||
|
||||
const __dirname = dirname(fileURLToPath(import.meta.url));
|
||||
|
||||
function createMocks() {
|
||||
@ -14,23 +18,23 @@ function createMocks() {
|
||||
end: null
|
||||
},
|
||||
process: {
|
||||
cwd: jasmine.createSpy('process.cwd').and.returnValue('/cwd'),
|
||||
cwd: mock.fn(() => '/cwd'),
|
||||
env: [],
|
||||
argv: [],
|
||||
stdout: {
|
||||
write: jasmine.createSpy('process.stdout.write').and.callFake((str) => { mocks.stdout += str; })
|
||||
write: mock.fn((str) => { mocks.stdout += str; })
|
||||
},
|
||||
stderr: {
|
||||
write: jasmine.createSpy('process.stderr.write').and.callFake((str) => { mocks.stderr += str; })
|
||||
write: mock.fn((str) => { mocks.stderr += str; })
|
||||
},
|
||||
stdin: {
|
||||
setEncoding: jasmine.createSpy('process.stdin.setEncoding'),
|
||||
on: jasmine.createSpy('process.stdin.on').and.callFake((method, func) => {
|
||||
setEncoding: mock.fn(),
|
||||
on: mock.fn((method, func) => {
|
||||
mocks.stdin[method] = func;
|
||||
}),
|
||||
resume: jasmine.createSpy('process.stdin.resume')
|
||||
resume: mock.fn
|
||||
},
|
||||
exit: jasmine.createSpy('process.exit').and.callFake((code) => { mocks.code = code; })
|
||||
exit: mock.fn((code) => { mocks.code = code; })
|
||||
}
|
||||
};
|
||||
|
||||
@ -53,9 +57,9 @@ function testInput({ args = [], stdin = '', stdinError = '', stdout = '', stderr
|
||||
}
|
||||
await mainPromise;
|
||||
|
||||
await expectAsync(mocks.stdout).toEqualHtml(stdout);
|
||||
expect(mocks.stderr).toEqual(stderr);
|
||||
expect(mocks.code).toBe(code);
|
||||
assert.ok(await htmlIsEqual(mocks.stdout, stdout));
|
||||
assert.strictEqual(mocks.stderr, stderr);
|
||||
assert.strictEqual(mocks.code, code);
|
||||
};
|
||||
}
|
||||
|
||||
@ -89,7 +93,7 @@ describe('bin/marked', () => {
|
||||
|
||||
it('not found', testInput({
|
||||
args: ['--config', fixturePath('does-not-exist.js'), '-s', 'line1\nline2'],
|
||||
stderr: jasmine.stringContaining(`Cannot load config file '${fixturePath('does-not-exist.js')}'`),
|
||||
stderr: `Error: Cannot load config file '${fixturePath('does-not-exist.js')}'`,
|
||||
code: 1
|
||||
}));
|
||||
});
|
@ -1,4 +1,6 @@
|
||||
import { marked, Marked, Renderer } from '../../src/marked.js';
|
||||
import { marked, Marked, Renderer } from '../../lib/marked.esm.js';
|
||||
import { describe, it } from 'node:test';
|
||||
import assert from 'node:assert';
|
||||
|
||||
describe('Marked', () => {
|
||||
it('should allow multiple instances', () => {
|
||||
@ -20,9 +22,9 @@ describe('Marked', () => {
|
||||
}
|
||||
});
|
||||
|
||||
expect(marked1.parse('# header')).toBe('im marked1');
|
||||
expect(marked2.parse('# header')).toBe('im marked2');
|
||||
expect(marked.parse('# header')).toBe('<h1>header</h1>\n');
|
||||
assert.strictEqual(marked1.parse('# header'), 'im marked1');
|
||||
assert.strictEqual(marked2.parse('# header'), 'im marked2');
|
||||
assert.strictEqual(marked.parse('# header'), '<h1>header</h1>\n');
|
||||
});
|
||||
|
||||
it('should work with use', () => {
|
||||
@ -46,9 +48,9 @@ describe('Marked', () => {
|
||||
}
|
||||
});
|
||||
|
||||
expect(marked1.parse('# header')).toBe('im marked1');
|
||||
expect(marked2.parse('# header')).toBe('im marked2');
|
||||
expect(marked.parse('# header')).toBe('<h1>header</h1>\n');
|
||||
assert.strictEqual(marked1.parse('# header'), 'im marked1');
|
||||
assert.strictEqual(marked2.parse('# header'), 'im marked2');
|
||||
assert.strictEqual(marked.parse('# header'), '<h1>header</h1>\n');
|
||||
});
|
||||
|
||||
it('should work with setOptions', () => {
|
||||
@ -68,9 +70,9 @@ describe('Marked', () => {
|
||||
renderer: marked2Renderer
|
||||
});
|
||||
|
||||
expect(marked1.parse('# header')).toBe('im marked1');
|
||||
expect(marked2.parse('# header')).toBe('im marked2');
|
||||
expect(marked.parse('# header')).toBe('<h1>header</h1>\n');
|
||||
assert.strictEqual(marked1.parse('# header'), 'im marked1');
|
||||
assert.strictEqual(marked2.parse('# header'), 'im marked2');
|
||||
assert.strictEqual(marked.parse('# header'), '<h1>header</h1>\n');
|
||||
});
|
||||
|
||||
it('should pass defaults to lexer and parser', () => {
|
||||
@ -85,6 +87,6 @@ describe('Marked', () => {
|
||||
const tokens = marked1.lexer('# hi');
|
||||
const html = marked1.parser(tokens);
|
||||
|
||||
expect(html).toBe('test');
|
||||
assert.strictEqual(html, 'test');
|
||||
});
|
||||
});
|
@ -1,953 +0,0 @@
|
||||
import { marked, Renderer, lexer, parseInline, use, getDefaults, walkTokens, defaults, setOptions } from '../../src/marked.js';
|
||||
import { timeout } from './utils.js';
|
||||
|
||||
describe('Test paragraph token type', () => {
|
||||
it('should use the "paragraph" type on top level', () => {
|
||||
const md = 'A Paragraph.\n\n> A blockquote\n\n- list item\n';
|
||||
|
||||
const tokens = lexer(md);
|
||||
|
||||
expect(tokens[0].type).toBe('paragraph');
|
||||
expect(tokens[2].tokens[0].type).toBe('paragraph');
|
||||
expect(tokens[3].items[0].tokens[0].type).toBe('text');
|
||||
});
|
||||
});
|
||||
|
||||
describe('changeDefaults', () => {
|
||||
it('should change global defaults', async() => {
|
||||
const { _defaults, changeDefaults } = await import('../../src/defaults.js');
|
||||
expect(_defaults.test).toBeUndefined();
|
||||
changeDefaults({ test: true });
|
||||
expect((await import('../../src/defaults.js'))._defaults.test).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('inlineLexer', () => {
|
||||
it('should send html to renderer.html', () => {
|
||||
const renderer = new Renderer();
|
||||
spyOn(renderer, 'html').and.callThrough();
|
||||
const md = 'HTML Image: <img alt="MY IMAGE" src="example.png" />';
|
||||
marked(md, { renderer });
|
||||
|
||||
expect(renderer.html).toHaveBeenCalledWith('<img alt="MY IMAGE" src="example.png" />');
|
||||
});
|
||||
});
|
||||
|
||||
describe('task', () => {
|
||||
it('space after checkbox', () => {
|
||||
const html = marked('- [ ] item');
|
||||
|
||||
expect(html).toBe('<ul>\n<li><input disabled="" type="checkbox"> item</li>\n</ul>\n');
|
||||
});
|
||||
|
||||
it('space after loose checkbox', () => {
|
||||
const html = marked('- [ ] item 1\n\n- [ ] item 2');
|
||||
|
||||
expect(html).toBe('<ul>\n<li><p><input disabled="" type="checkbox"> \nitem 1</p>\n</li>\n<li><p><input disabled="" type="checkbox"> \nitem 2</p>\n</li>\n</ul>\n');
|
||||
});
|
||||
});
|
||||
|
||||
describe('parseInline', () => {
|
||||
it('should parse inline tokens', () => {
|
||||
const md = '**strong** _em_';
|
||||
const html = parseInline(md);
|
||||
|
||||
expect(html).toBe('<strong>strong</strong> <em>em</em>');
|
||||
});
|
||||
|
||||
it('should not parse block tokens', () => {
|
||||
const md = '# header\n\n_em_';
|
||||
const html = parseInline(md);
|
||||
|
||||
expect(html).toBe('# header\n\n<em>em</em>');
|
||||
});
|
||||
});
|
||||
|
||||
describe('use extension', () => {
|
||||
it('should use custom block tokenizer + renderer extensions', () => {
|
||||
const underline = {
|
||||
name: 'underline',
|
||||
level: 'block',
|
||||
tokenizer(src) {
|
||||
const rule = /^:([^\n]*)(?:\n|$)/;
|
||||
const match = rule.exec(src);
|
||||
if (match) {
|
||||
return {
|
||||
type: 'underline',
|
||||
raw: match[0], // This is the text that you want your token to consume from the source
|
||||
text: match[1].trim() // You can add additional properties to your tokens to pass along to the renderer
|
||||
};
|
||||
}
|
||||
},
|
||||
renderer(token) {
|
||||
return `<u>${token.text}</u>\n`;
|
||||
}
|
||||
};
|
||||
use({ extensions: [underline] });
|
||||
let html = marked('Not Underlined\n:Underlined\nNot Underlined');
|
||||
expect(html).toBe('<p>Not Underlined\n:Underlined\nNot Underlined</p>\n');
|
||||
|
||||
html = marked('Not Underlined\n\n:Underlined\n\nNot Underlined');
|
||||
expect(html).toBe('<p>Not Underlined</p>\n<u>Underlined</u>\n<p>Not Underlined</p>\n');
|
||||
});
|
||||
|
||||
it('should interrupt paragraphs if using "start" property', () => {
|
||||
const underline = {
|
||||
extensions: [{
|
||||
name: 'underline',
|
||||
level: 'block',
|
||||
start(src) { return src.indexOf(':'); },
|
||||
tokenizer(src) {
|
||||
const rule = /^:([^\n]*):(?:\n|$)/;
|
||||
const match = rule.exec(src);
|
||||
if (match) {
|
||||
return {
|
||||
type: 'underline',
|
||||
raw: match[0], // This is the text that you want your token to consume from the source
|
||||
text: match[1].trim() // You can add additional properties to your tokens to pass along to the renderer
|
||||
};
|
||||
}
|
||||
},
|
||||
renderer(token) {
|
||||
return `<u>${token.text}</u>\n`;
|
||||
}
|
||||
}]
|
||||
};
|
||||
use(underline);
|
||||
const html = marked('Not Underlined A\n:Underlined B:\nNot Underlined C\n:Not Underlined D');
|
||||
expect(html).toBe('<p>Not Underlined A</p>\n<u>Underlined B</u>\n<p>Not Underlined C\n:Not Underlined D</p>\n');
|
||||
});
|
||||
|
||||
it('should use custom inline tokenizer + renderer extensions', () => {
|
||||
const underline = {
|
||||
name: 'underline',
|
||||
level: 'inline',
|
||||
start(src) { return src.indexOf('='); },
|
||||
tokenizer(src) {
|
||||
const rule = /^=([^=]+)=/;
|
||||
const match = rule.exec(src);
|
||||
if (match) {
|
||||
return {
|
||||
type: 'underline',
|
||||
raw: match[0], // This is the text that you want your token to consume from the source
|
||||
text: match[1].trim() // You can add additional properties to your tokens to pass along to the renderer
|
||||
};
|
||||
}
|
||||
},
|
||||
renderer(token) {
|
||||
return `<u>${token.text}</u>`;
|
||||
}
|
||||
};
|
||||
use({ extensions: [underline] });
|
||||
const html = marked('Not Underlined =Underlined= Not Underlined');
|
||||
expect(html).toBe('<p>Not Underlined <u>Underlined</u> Not Underlined</p>\n');
|
||||
});
|
||||
|
||||
it('should handle interacting block and inline extensions', () => {
|
||||
const descriptionlist = {
|
||||
name: 'descriptionList',
|
||||
level: 'block',
|
||||
start(src) {
|
||||
const match = src.match(/:[^:\n]/);
|
||||
if (match) {
|
||||
return match.index;
|
||||
}
|
||||
},
|
||||
tokenizer(src, tokens) {
|
||||
const rule = /^(?::[^:\n]+:[^:\n]*(?:\n|$))+/;
|
||||
const match = rule.exec(src);
|
||||
if (match) {
|
||||
const token = {
|
||||
type: 'descriptionList',
|
||||
raw: match[0], // This is the text that you want your token to consume from the source
|
||||
text: match[0].trim(), // You can add additional properties to your tokens to pass along to the renderer
|
||||
tokens: []
|
||||
};
|
||||
this.lexer.inlineTokens(token.text, token.tokens);
|
||||
return token;
|
||||
}
|
||||
},
|
||||
renderer(token) {
|
||||
return `<dl>${this.parser.parseInline(token.tokens)}\n</dl>`;
|
||||
}
|
||||
};
|
||||
|
||||
const description = {
|
||||
name: 'description',
|
||||
level: 'inline',
|
||||
start(src) { return src.indexOf(':'); },
|
||||
tokenizer(src, tokens) {
|
||||
const rule = /^:([^:\n]+):([^:\n]*)(?:\n|$)/;
|
||||
const match = rule.exec(src);
|
||||
if (match) {
|
||||
const token = {
|
||||
type: 'description',
|
||||
raw: match[0],
|
||||
dt: [],
|
||||
dd: []
|
||||
};
|
||||
this.lexer.inline(match[1].trim(), token.dt);
|
||||
this.lexer.inline(match[2].trim(), token.dd);
|
||||
return token;
|
||||
}
|
||||
},
|
||||
renderer(token) {
|
||||
return `\n<dt>${this.parser.parseInline(token.dt)}</dt><dd>${this.parser.parseInline(token.dd)}</dd>`;
|
||||
}
|
||||
};
|
||||
use({ extensions: [descriptionlist, description] });
|
||||
const html = marked('A Description List with One Description:\n'
|
||||
+ ': Topic 1 : Description 1\n'
|
||||
+ ': **Topic 2** : *Description 2*');
|
||||
expect(html).toBe('<p>A Description List with One Description:</p>\n'
|
||||
+ '<dl>'
|
||||
+ '\n<dt>Topic 1</dt><dd>Description 1</dd>'
|
||||
+ '\n<dt><strong>Topic 2</strong></dt><dd><em>Description 2</em></dd>'
|
||||
+ '\n</dl>');
|
||||
});
|
||||
|
||||
it('should allow other options mixed into the extension', () => {
|
||||
const extension = {
|
||||
name: 'underline',
|
||||
level: 'block',
|
||||
start(src) { return src.indexOf(':'); },
|
||||
tokenizer(src) {
|
||||
const rule = /^:([^\n]*):(?:\n|$)/;
|
||||
const match = rule.exec(src);
|
||||
if (match) {
|
||||
return {
|
||||
type: 'underline',
|
||||
raw: match[0], // This is the text that you want your token to consume from the source
|
||||
text: match[1].trim() // You can add additional properties to your tokens to pass along to the renderer
|
||||
};
|
||||
}
|
||||
},
|
||||
renderer(token) {
|
||||
return `<u>${token.text}</u>\n`;
|
||||
}
|
||||
};
|
||||
use({ silent: true, extensions: [extension] });
|
||||
const html = marked(':test:\ntest\n<div></div>');
|
||||
expect(html).toBe('<u>test</u>\n<p>test</p>\n<div></div>');
|
||||
});
|
||||
|
||||
it('should handle renderers that return false', () => {
|
||||
const extension = {
|
||||
name: 'test',
|
||||
level: 'block',
|
||||
tokenizer(src) {
|
||||
const rule = /^:([^\n]*):(?:\n|$)/;
|
||||
const match = rule.exec(src);
|
||||
if (match) {
|
||||
return {
|
||||
type: 'test',
|
||||
raw: match[0], // This is the text that you want your token to consume from the source
|
||||
text: match[1].trim() // You can add additional properties to your tokens to pass along to the renderer
|
||||
};
|
||||
}
|
||||
},
|
||||
renderer(token) {
|
||||
if (token.text === 'test') {
|
||||
return 'test';
|
||||
}
|
||||
return false;
|
||||
}
|
||||
};
|
||||
const fallbackRenderer = {
|
||||
name: 'test',
|
||||
level: 'block',
|
||||
renderer(token) {
|
||||
if (token.text === 'Test') {
|
||||
return 'fallback';
|
||||
}
|
||||
return false;
|
||||
}
|
||||
};
|
||||
use({ extensions: [fallbackRenderer, extension] });
|
||||
const html = marked(':Test:\n\n:test:\n\n:none:');
|
||||
expect(html).toBe('fallbacktest');
|
||||
});
|
||||
|
||||
it('should fall back when tokenizers return false', () => {
|
||||
const extension = {
|
||||
name: 'test',
|
||||
level: 'block',
|
||||
tokenizer(src) {
|
||||
const rule = /^:([^\n]*):(?:\n|$)/;
|
||||
const match = rule.exec(src);
|
||||
if (match) {
|
||||
return {
|
||||
type: 'test',
|
||||
raw: match[0], // This is the text that you want your token to consume from the source
|
||||
text: match[1].trim() // You can add additional properties to your tokens to pass along to the renderer
|
||||
};
|
||||
}
|
||||
return false;
|
||||
},
|
||||
renderer(token) {
|
||||
return token.text;
|
||||
}
|
||||
};
|
||||
const extension2 = {
|
||||
name: 'test',
|
||||
level: 'block',
|
||||
tokenizer(src) {
|
||||
const rule = /^:([^\n]*):(?:\n|$)/;
|
||||
const match = rule.exec(src);
|
||||
if (match) {
|
||||
if (match[1].match(/^[A-Z]/)) {
|
||||
return {
|
||||
type: 'test',
|
||||
raw: match[0],
|
||||
text: match[1].trim().toUpperCase()
|
||||
};
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
};
|
||||
use({ extensions: [extension, extension2] });
|
||||
const html = marked(':Test:\n\n:test:');
|
||||
expect(html).toBe('TESTtest');
|
||||
});
|
||||
|
||||
it('should override original tokenizer/renderer with same name, but fall back if returns false', () => {
|
||||
const extension = {
|
||||
extensions: [{
|
||||
name: 'heading',
|
||||
level: 'block',
|
||||
tokenizer(src) {
|
||||
return false; // fall back to default `heading` tokenizer
|
||||
},
|
||||
renderer(token) {
|
||||
return '<h' + token.depth + '>' + token.text + ' RENDERER EXTENSION</h' + token.depth + '>\n';
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'code',
|
||||
level: 'block',
|
||||
tokenizer(src) {
|
||||
const rule = /^:([^\n]*):(?:\n|$)/;
|
||||
const match = rule.exec(src);
|
||||
if (match) {
|
||||
return {
|
||||
type: 'code',
|
||||
raw: match[0],
|
||||
text: match[1].trim() + ' TOKENIZER EXTENSION'
|
||||
};
|
||||
}
|
||||
},
|
||||
renderer(token) {
|
||||
return false; // fall back to default `code` renderer
|
||||
}
|
||||
}]
|
||||
};
|
||||
use(extension);
|
||||
const html = marked('# extension1\n:extension2:');
|
||||
expect(html).toBe('<h1>extension1 RENDERER EXTENSION</h1>\n<pre><code>extension2 TOKENIZER EXTENSION\n</code></pre>\n');
|
||||
});
|
||||
|
||||
it('should walk only specified child tokens', () => {
|
||||
const walkableDescription = {
|
||||
extensions: [{
|
||||
name: 'walkableDescription',
|
||||
level: 'inline',
|
||||
start(src) { return src.indexOf(':'); },
|
||||
tokenizer(src, tokens) {
|
||||
const rule = /^:([^:\n]+):([^:\n]*)(?:\n|$)/;
|
||||
const match = rule.exec(src);
|
||||
if (match) {
|
||||
const token = {
|
||||
type: 'walkableDescription',
|
||||
raw: match[0],
|
||||
dt: this.lexer.inline(match[1].trim()),
|
||||
dd: [],
|
||||
tokens: []
|
||||
};
|
||||
this.lexer.inline(match[2].trim(), token.dd);
|
||||
this.lexer.inline('unwalked', token.tokens);
|
||||
return token;
|
||||
}
|
||||
},
|
||||
renderer(token) {
|
||||
return `\n<dt>${this.parser.parseInline(token.dt)} - ${this.parser.parseInline(token.tokens)}</dt><dd>${this.parser.parseInline(token.dd)}</dd>`;
|
||||
},
|
||||
childTokens: ['dd', 'dt']
|
||||
}],
|
||||
walkTokens(token) {
|
||||
if (token.type === 'text') {
|
||||
token.text += ' walked';
|
||||
}
|
||||
}
|
||||
};
|
||||
use(walkableDescription);
|
||||
const html = marked(': Topic 1 : Description 1\n'
|
||||
+ ': **Topic 2** : *Description 2*');
|
||||
expect(html).toBe('<p>\n<dt>Topic 1 walked - unwalked</dt><dd>Description 1 walked</dd>'
|
||||
+ '\n<dt><strong>Topic 2 walked</strong> - unwalked</dt><dd><em>Description 2 walked</em></dd></p>\n');
|
||||
});
|
||||
|
||||
describe('multiple extensions', () => {
|
||||
function createExtension(name) {
|
||||
return {
|
||||
extensions: [{
|
||||
name: `block-${name}`,
|
||||
level: 'block',
|
||||
start(src) { return src.indexOf('::'); },
|
||||
tokenizer(src, tokens) {
|
||||
if (src.startsWith(`::${name}\n`)) {
|
||||
const text = `:${name}`;
|
||||
const token = {
|
||||
type: `block-${name}`,
|
||||
raw: `::${name}\n`,
|
||||
text,
|
||||
tokens: []
|
||||
};
|
||||
this.lexer.inline(token.text, token.tokens);
|
||||
return token;
|
||||
}
|
||||
},
|
||||
renderer(token) {
|
||||
return `<${token.type}>${this.parser.parseInline(token.tokens)}</${token.type}>\n`;
|
||||
}
|
||||
}, {
|
||||
name: `inline-${name}`,
|
||||
level: 'inline',
|
||||
start(src) { return src.indexOf(':'); },
|
||||
tokenizer(src, tokens) {
|
||||
if (src.startsWith(`:${name}`)) {
|
||||
return {
|
||||
type: `inline-${name}`,
|
||||
raw: `:${name}`,
|
||||
text: `used ${name}`
|
||||
};
|
||||
}
|
||||
},
|
||||
renderer(token) {
|
||||
return token.text;
|
||||
}
|
||||
}],
|
||||
tokenizer: {
|
||||
heading(src) {
|
||||
if (src.startsWith(`# ${name}`)) {
|
||||
const token = {
|
||||
type: 'heading',
|
||||
raw: `# ${name}`,
|
||||
text: `used ${name}`,
|
||||
depth: 1,
|
||||
tokens: []
|
||||
};
|
||||
this.lexer.inline(token.text, token.tokens);
|
||||
return token;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
},
|
||||
renderer: {
|
||||
heading(text, depth, raw) {
|
||||
if (text === name) {
|
||||
return `<h${depth}>${text}</h${depth}>\n`;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
},
|
||||
walkTokens(token) {
|
||||
if (token.text === `used ${name}`) {
|
||||
token.text += ' walked';
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
function createFalseExtension(name) {
|
||||
return {
|
||||
extensions: [{
|
||||
name: `block-${name}`,
|
||||
level: 'block',
|
||||
start(src) { return src.indexOf('::'); },
|
||||
tokenizer(src, tokens) {
|
||||
return false;
|
||||
},
|
||||
renderer(token) {
|
||||
return false;
|
||||
}
|
||||
}, {
|
||||
name: `inline-${name}`,
|
||||
level: 'inline',
|
||||
start(src) { return src.indexOf(':'); },
|
||||
tokenizer(src, tokens) {
|
||||
return false;
|
||||
},
|
||||
renderer(token) {
|
||||
return false;
|
||||
}
|
||||
}]
|
||||
};
|
||||
}
|
||||
|
||||
function runTest() {
|
||||
const html = marked(`
|
||||
::extension1
|
||||
::extension2
|
||||
|
||||
:extension1
|
||||
:extension2
|
||||
|
||||
# extension1
|
||||
|
||||
# extension2
|
||||
|
||||
# no extension
|
||||
`);
|
||||
|
||||
expect(`\n${html}\n`.replace(/\n+/g, '\n')).toBe(`
|
||||
<block-extension1>used extension1 walked</block-extension1>
|
||||
<block-extension2>used extension2 walked</block-extension2>
|
||||
<p>used extension1 walked
|
||||
used extension2 walked</p>
|
||||
<h1>used extension1 walked</h1>
|
||||
<h1>used extension2 walked</h1>
|
||||
<h1>no extension</h1>
|
||||
`);
|
||||
}
|
||||
|
||||
it('should merge extensions when calling marked.use multiple times', () => {
|
||||
use(createExtension('extension1'));
|
||||
use(createExtension('extension2'));
|
||||
|
||||
runTest();
|
||||
});
|
||||
|
||||
it('should merge extensions when calling marked.use with multiple extensions', () => {
|
||||
use(
|
||||
createExtension('extension1'),
|
||||
createExtension('extension2')
|
||||
);
|
||||
|
||||
runTest();
|
||||
});
|
||||
|
||||
it('should fall back to any extensions with the same name if the first returns false', () => {
|
||||
use(
|
||||
createExtension('extension1'),
|
||||
createExtension('extension2'),
|
||||
createFalseExtension('extension1'),
|
||||
createFalseExtension('extension2')
|
||||
);
|
||||
|
||||
runTest();
|
||||
});
|
||||
|
||||
it('should merge extensions correctly', () => {
|
||||
use(
|
||||
{},
|
||||
{ tokenizer: {} },
|
||||
{ renderer: {} },
|
||||
{ walkTokens: () => {} },
|
||||
{ extensions: [] }
|
||||
);
|
||||
|
||||
expect(() => marked('# test')).not.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
it('should be async if any extension in use args is async', () => {
|
||||
use(
|
||||
{ async: true },
|
||||
{ async: false }
|
||||
);
|
||||
|
||||
expect(defaults.async).toBeTrue();
|
||||
});
|
||||
|
||||
it('should be async if any extension in use is async', () => {
|
||||
use({ async: true });
|
||||
use({ async: false });
|
||||
|
||||
expect(defaults.async).toBeTrue();
|
||||
});
|
||||
|
||||
it('should reset async with setOptions', () => {
|
||||
use({ async: true });
|
||||
setOptions({ async: false });
|
||||
|
||||
expect(defaults.async).toBeFalse();
|
||||
});
|
||||
|
||||
it('should return Promise if async', () => {
|
||||
expect(marked('test', { async: true })).toBeInstanceOf(Promise);
|
||||
});
|
||||
|
||||
it('should return string if not async', () => {
|
||||
expect(typeof marked('test', { async: false })).toBe('string');
|
||||
});
|
||||
|
||||
it('should return Promise if async is set by extension', () => {
|
||||
use({ async: true });
|
||||
|
||||
expect(marked('test', { async: false })).toBeInstanceOf(Promise);
|
||||
});
|
||||
|
||||
it('should allow deleting/editing tokens', () => {
|
||||
const styleTags = {
|
||||
extensions: [{
|
||||
name: 'inlineStyleTag',
|
||||
level: 'inline',
|
||||
start(src) {
|
||||
const match = src.match(/ *{[^\{]/);
|
||||
if (match) {
|
||||
return match.index;
|
||||
}
|
||||
},
|
||||
tokenizer(src, tokens) {
|
||||
const rule = /^ *{([^\{\}\n]+)}$/;
|
||||
const match = rule.exec(src);
|
||||
if (match) {
|
||||
return {
|
||||
type: 'inlineStyleTag',
|
||||
raw: match[0], // This is the text that you want your token to consume from the source
|
||||
text: match[1]
|
||||
};
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'styled',
|
||||
renderer(token) {
|
||||
token.type = token.originalType;
|
||||
const text = this.parser.parse([token]);
|
||||
const openingTag = /(<[^\s<>]+)([^\n<>]*>.*)/s.exec(text);
|
||||
if (openingTag) {
|
||||
return `${openingTag[1]} ${token.style}${openingTag[2]}`;
|
||||
}
|
||||
return text;
|
||||
}
|
||||
}],
|
||||
walkTokens(token) {
|
||||
if (token.tokens) {
|
||||
const finalChildToken = token.tokens[token.tokens.length - 1];
|
||||
if (finalChildToken && finalChildToken.type === 'inlineStyleTag') {
|
||||
token.originalType = token.type;
|
||||
token.type = 'styled';
|
||||
token.style = `style="color:${finalChildToken.text};"`;
|
||||
token.tokens.pop();
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
use(styleTags);
|
||||
const html = marked('This is a *paragraph* with blue text. {blue}\n'
|
||||
+ '# This is a *header* with red text {red}');
|
||||
expect(html).toBe('<p style="color:blue;">This is a <em>paragraph</em> with blue text.</p>\n'
|
||||
+ '<h1 style="color:red;">This is a <em>header</em> with red text</h1>\n');
|
||||
});
|
||||
|
||||
it('should use renderer', () => {
|
||||
const extension = {
|
||||
renderer: {
|
||||
paragraph(text) {
|
||||
return 'extension';
|
||||
}
|
||||
}
|
||||
};
|
||||
spyOn(extension.renderer, 'paragraph').and.callThrough();
|
||||
use(extension);
|
||||
const html = marked('text');
|
||||
expect(extension.renderer.paragraph).toHaveBeenCalledWith('text');
|
||||
expect(html).toBe('extension');
|
||||
});
|
||||
|
||||
it('should use tokenizer', () => {
|
||||
const extension = {
|
||||
tokenizer: {
|
||||
paragraph(text) {
|
||||
const token = {
|
||||
type: 'paragraph',
|
||||
raw: text,
|
||||
text: 'extension',
|
||||
tokens: []
|
||||
};
|
||||
this.lexer.inline(token.text, token.tokens);
|
||||
return token;
|
||||
}
|
||||
}
|
||||
};
|
||||
spyOn(extension.tokenizer, 'paragraph').and.callThrough();
|
||||
use(extension);
|
||||
const html = marked('text');
|
||||
expect(extension.tokenizer.paragraph).toHaveBeenCalledWith('text');
|
||||
expect(html).toBe('<p>extension</p>\n');
|
||||
});
|
||||
|
||||
it('should use walkTokens', () => {
|
||||
let walked = 0;
|
||||
const extension = {
|
||||
walkTokens(token) {
|
||||
walked++;
|
||||
}
|
||||
};
|
||||
use(extension);
|
||||
marked('text');
|
||||
expect(walked).toBe(2);
|
||||
});
|
||||
|
||||
it('should use options from extension', () => {
|
||||
const extension = {
|
||||
breaks: true
|
||||
};
|
||||
use(extension);
|
||||
const html = marked('line1\nline2');
|
||||
expect(html).toBe('<p>line1<br>line2</p>\n');
|
||||
});
|
||||
|
||||
it('should call all walkTokens in reverse order', () => {
|
||||
let walkedOnce = 0;
|
||||
let walkedTwice = 0;
|
||||
const extension1 = {
|
||||
walkTokens(token) {
|
||||
if (token.walkedOnce) {
|
||||
walkedTwice++;
|
||||
}
|
||||
}
|
||||
};
|
||||
const extension2 = {
|
||||
walkTokens(token) {
|
||||
walkedOnce++;
|
||||
token.walkedOnce = true;
|
||||
}
|
||||
};
|
||||
use(extension1);
|
||||
use(extension2);
|
||||
marked('text');
|
||||
expect(walkedOnce).toBe(2);
|
||||
expect(walkedTwice).toBe(2);
|
||||
});
|
||||
|
||||
it('should use last extension function and not override others', () => {
|
||||
const extension1 = {
|
||||
renderer: {
|
||||
paragraph(text) {
|
||||
return 'extension1 paragraph\n';
|
||||
},
|
||||
html(html) {
|
||||
return 'extension1 html\n';
|
||||
}
|
||||
}
|
||||
};
|
||||
const extension2 = {
|
||||
renderer: {
|
||||
paragraph(text) {
|
||||
return 'extension2 paragraph\n';
|
||||
}
|
||||
}
|
||||
};
|
||||
use(extension1);
|
||||
use(extension2);
|
||||
const html = marked(`
|
||||
paragraph
|
||||
|
||||
<html />
|
||||
|
||||
# heading
|
||||
`);
|
||||
expect(html).toBe('extension2 paragraph\nextension1 html\n<h1>heading</h1>\n');
|
||||
});
|
||||
|
||||
it('should use previous extension when returning false', () => {
|
||||
const extension1 = {
|
||||
renderer: {
|
||||
paragraph(text) {
|
||||
if (text !== 'original') {
|
||||
return 'extension1 paragraph\n';
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
};
|
||||
const extension2 = {
|
||||
renderer: {
|
||||
paragraph(text) {
|
||||
if (text !== 'extension1' && text !== 'original') {
|
||||
return 'extension2 paragraph\n';
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
};
|
||||
use(extension1);
|
||||
use(extension2);
|
||||
const html = marked(`
|
||||
paragraph
|
||||
|
||||
extension1
|
||||
|
||||
original
|
||||
`);
|
||||
expect(html).toBe('extension2 paragraph\nextension1 paragraph\n<p>original</p>\n');
|
||||
});
|
||||
|
||||
it('should get options with this.options', () => {
|
||||
const extension = {
|
||||
renderer: {
|
||||
heading: () => {
|
||||
return this && this.options ? 'arrow options\n' : 'arrow no options\n';
|
||||
},
|
||||
html: function() {
|
||||
return this.options ? 'function options\n' : 'function no options\n';
|
||||
},
|
||||
paragraph() {
|
||||
return this.options ? 'shorthand options\n' : 'shorthand no options\n';
|
||||
}
|
||||
}
|
||||
};
|
||||
use(extension);
|
||||
const html = marked(`
|
||||
# heading
|
||||
|
||||
<html />
|
||||
|
||||
paragraph
|
||||
`);
|
||||
expect(html).toBe('arrow no options\nfunction options\nshorthand options\n');
|
||||
});
|
||||
});
|
||||
|
||||
describe('walkTokens', () => {
|
||||
it('should walk over every token', () => {
|
||||
const markdown = `
|
||||
paragraph
|
||||
|
||||
---
|
||||
|
||||
# heading
|
||||
|
||||
\`\`\`
|
||||
code
|
||||
\`\`\`
|
||||
|
||||
| a | b |
|
||||
|---|---|
|
||||
| 1 | 2 |
|
||||
| 3 | 4 |
|
||||
|
||||
> blockquote
|
||||
|
||||
- list
|
||||
|
||||
<div>html</div>
|
||||
|
||||
[link](https://example.com)
|
||||
|
||||

|
||||
|
||||
**strong**
|
||||
|
||||
*em*
|
||||
|
||||
\`codespan\`
|
||||
|
||||
~~del~~
|
||||
|
||||
br
|
||||
br
|
||||
`;
|
||||
const tokens = lexer(markdown, { ...getDefaults(), breaks: true });
|
||||
const tokensSeen = [];
|
||||
walkTokens(tokens, (token) => {
|
||||
tokensSeen.push([token.type, (token.raw || '').replace(/\n/g, '')]);
|
||||
});
|
||||
|
||||
expect(tokensSeen).toEqual([
|
||||
['space', ''],
|
||||
['paragraph', 'paragraph'],
|
||||
['text', 'paragraph'],
|
||||
['space', ''],
|
||||
['hr', '---'],
|
||||
['heading', '# heading'],
|
||||
['text', 'heading'],
|
||||
['code', '```code```'],
|
||||
['space', ''],
|
||||
['table', '| a | b ||---|---|| 1 | 2 || 3 | 4 |'],
|
||||
['text', 'a'],
|
||||
['text', 'b'],
|
||||
['text', '1'],
|
||||
['text', '2'],
|
||||
['text', '3'],
|
||||
['text', '4'],
|
||||
['blockquote', '> blockquote'],
|
||||
['paragraph', 'blockquote'],
|
||||
['text', 'blockquote'],
|
||||
['list', '- list'],
|
||||
['list_item', '- list'],
|
||||
['text', 'list'],
|
||||
['text', 'list'],
|
||||
['space', ''],
|
||||
['html', '<div>html</div>'],
|
||||
['paragraph', '[link](https://example.com)'],
|
||||
['link', '[link](https://example.com)'],
|
||||
['text', 'link'],
|
||||
['space', ''],
|
||||
['paragraph', ''],
|
||||
['image', ''],
|
||||
['space', ''],
|
||||
['paragraph', '**strong**'],
|
||||
['strong', '**strong**'],
|
||||
['text', 'strong'],
|
||||
['space', ''],
|
||||
['paragraph', '*em*'],
|
||||
['em', '*em*'],
|
||||
['text', 'em'],
|
||||
['space', ''],
|
||||
['paragraph', '`codespan`'],
|
||||
['codespan', '`codespan`'],
|
||||
['space', ''],
|
||||
['paragraph', '~~del~~'],
|
||||
['del', '~~del~~'],
|
||||
['text', 'del'],
|
||||
['space', ''],
|
||||
['paragraph', 'brbr'],
|
||||
['text', 'br'],
|
||||
['br', ''],
|
||||
['text', 'br']
|
||||
]);
|
||||
});
|
||||
|
||||
it('should assign marked to `this`', () => {
|
||||
marked.use({
|
||||
walkTokens(token) {
|
||||
if (token.type === 'em') {
|
||||
token.text += ' walked';
|
||||
token.tokens = this.Lexer.lexInline(token.text);
|
||||
}
|
||||
}
|
||||
});
|
||||
expect(marked('*text*').trim()).toBe('<p><em>text walked</em></p>');
|
||||
});
|
||||
|
||||
it('should wait for async `walkTokens` function', async() => {
|
||||
marked.use({
|
||||
async: true,
|
||||
async walkTokens(token) {
|
||||
if (token.type === 'em') {
|
||||
await timeout();
|
||||
token.text += ' walked';
|
||||
token.tokens = this.Lexer.lexInline(token.text);
|
||||
}
|
||||
}
|
||||
});
|
||||
const promise = marked('*text*');
|
||||
expect(promise).toBeInstanceOf(Promise);
|
||||
const html = await promise;
|
||||
expect(html.trim()).toBe('<p><em>text walked</em></p>');
|
||||
});
|
||||
|
||||
it('should return promise if async and no walkTokens function', async() => {
|
||||
marked.use({
|
||||
async: true
|
||||
});
|
||||
const promise = marked('*text*');
|
||||
expect(promise).toBeInstanceOf(Promise);
|
||||
const html = await promise;
|
||||
expect(html.trim()).toBe('<p><em>text</em></p>');
|
||||
});
|
||||
});
|
963
test/unit/marked.test.js
Normal file
963
test/unit/marked.test.js
Normal file
@ -0,0 +1,963 @@
|
||||
import { Marked, Renderer, lexer, parseInline, getDefaults, walkTokens, defaults, setOptions } from '../../lib/marked.esm.js';
|
||||
import { timeout } from './utils.js';
|
||||
import assert from 'node:assert';
|
||||
import { describe, it, beforeEach, mock } from 'node:test';
|
||||
|
||||
describe('marked unit', () => {
|
||||
let marked;
|
||||
beforeEach(() => {
|
||||
marked = new Marked();
|
||||
});
|
||||
|
||||
describe('Test paragraph token type', () => {
|
||||
it('should use the "paragraph" type on top level', () => {
|
||||
const md = 'A Paragraph.\n\n> A blockquote\n\n- list item\n';
|
||||
|
||||
const tokens = lexer(md);
|
||||
|
||||
assert.strictEqual(tokens[0].type, 'paragraph');
|
||||
assert.strictEqual(tokens[2].tokens[0].type, 'paragraph');
|
||||
assert.strictEqual(tokens[3].items[0].tokens[0].type, 'text');
|
||||
});
|
||||
});
|
||||
|
||||
describe('changeDefaults', () => {
|
||||
it('should change global defaults', async() => {
|
||||
const { defaults, setOptions } = await import('../../lib/marked.esm.js');
|
||||
assert.ok(!defaults.test);
|
||||
setOptions({ test: true });
|
||||
assert.ok((await import('../../lib/marked.esm.js')).defaults.test);
|
||||
});
|
||||
});
|
||||
|
||||
describe('inlineLexer', () => {
|
||||
it('should send html to renderer.html', () => {
|
||||
const renderer = new Renderer();
|
||||
mock.method(renderer, 'html');
|
||||
const md = 'HTML Image: <img alt="MY IMAGE" src="example.png" />';
|
||||
marked.parse(md, { renderer });
|
||||
|
||||
assert.strictEqual(renderer.html.mock.calls[0].arguments[0], '<img alt="MY IMAGE" src="example.png" />');
|
||||
});
|
||||
});
|
||||
|
||||
describe('task', () => {
|
||||
it('space after checkbox', () => {
|
||||
const html = marked.parse('- [ ] item');
|
||||
|
||||
assert.strictEqual(html, '<ul>\n<li><input disabled="" type="checkbox"> item</li>\n</ul>\n');
|
||||
});
|
||||
|
||||
it('space after loose checkbox', () => {
|
||||
const html = marked.parse('- [ ] item 1\n\n- [ ] item 2');
|
||||
|
||||
assert.strictEqual(html, '<ul>\n<li><p><input disabled="" type="checkbox"> \nitem 1</p>\n</li>\n<li><p><input disabled="" type="checkbox"> \nitem 2</p>\n</li>\n</ul>\n');
|
||||
});
|
||||
});
|
||||
|
||||
describe('parseInline', () => {
|
||||
it('should parse inline tokens', () => {
|
||||
const md = '**strong** _em_';
|
||||
const html = parseInline(md);
|
||||
|
||||
assert.strictEqual(html, '<strong>strong</strong> <em>em</em>');
|
||||
});
|
||||
|
||||
it('should not parse block tokens', () => {
|
||||
const md = '# header\n\n_em_';
|
||||
const html = parseInline(md);
|
||||
|
||||
assert.strictEqual(html, '# header\n\n<em>em</em>');
|
||||
});
|
||||
});
|
||||
|
||||
describe('use extension', () => {
|
||||
it('should use custom block tokenizer + renderer extensions', () => {
|
||||
const underline = {
|
||||
name: 'underline',
|
||||
level: 'block',
|
||||
tokenizer(src) {
|
||||
const rule = /^:([^\n]*)(?:\n|$)/;
|
||||
const match = rule.exec(src);
|
||||
if (match) {
|
||||
return {
|
||||
type: 'underline',
|
||||
raw: match[0], // This is the text that you want your token to consume from the source
|
||||
text: match[1].trim() // You can add additional properties to your tokens to pass along to the renderer
|
||||
};
|
||||
}
|
||||
},
|
||||
renderer(token) {
|
||||
return `<u>${token.text}</u>\n`;
|
||||
}
|
||||
};
|
||||
marked.use({ extensions: [underline] });
|
||||
let html = marked.parse('Not Underlined\n:Underlined\nNot Underlined');
|
||||
assert.strictEqual(html, '<p>Not Underlined\n:Underlined\nNot Underlined</p>\n');
|
||||
|
||||
html = marked.parse('Not Underlined\n\n:Underlined\n\nNot Underlined');
|
||||
assert.strictEqual(html, '<p>Not Underlined</p>\n<u>Underlined</u>\n<p>Not Underlined</p>\n');
|
||||
});
|
||||
|
||||
it('should interrupt paragraphs if using "start" property', () => {
|
||||
const underline = {
|
||||
extensions: [{
|
||||
name: 'underline',
|
||||
level: 'block',
|
||||
start(src) { return src.indexOf(':'); },
|
||||
tokenizer(src) {
|
||||
const rule = /^:([^\n]*):(?:\n|$)/;
|
||||
const match = rule.exec(src);
|
||||
if (match) {
|
||||
return {
|
||||
type: 'underline',
|
||||
raw: match[0], // This is the text that you want your token to consume from the source
|
||||
text: match[1].trim() // You can add additional properties to your tokens to pass along to the renderer
|
||||
};
|
||||
}
|
||||
},
|
||||
renderer(token) {
|
||||
return `<u>${token.text}</u>\n`;
|
||||
}
|
||||
}]
|
||||
};
|
||||
marked.use(underline);
|
||||
const html = marked.parse('Not Underlined A\n:Underlined B:\nNot Underlined C\n:Not Underlined D');
|
||||
assert.strictEqual(html, '<p>Not Underlined A</p>\n<u>Underlined B</u>\n<p>Not Underlined C\n:Not Underlined D</p>\n');
|
||||
});
|
||||
|
||||
it('should use custom inline tokenizer + renderer extensions', () => {
|
||||
const underline = {
|
||||
name: 'underline',
|
||||
level: 'inline',
|
||||
start(src) { return src.indexOf('='); },
|
||||
tokenizer(src) {
|
||||
const rule = /^=([^=]+)=/;
|
||||
const match = rule.exec(src);
|
||||
if (match) {
|
||||
return {
|
||||
type: 'underline',
|
||||
raw: match[0], // This is the text that you want your token to consume from the source
|
||||
text: match[1].trim() // You can add additional properties to your tokens to pass along to the renderer
|
||||
};
|
||||
}
|
||||
},
|
||||
renderer(token) {
|
||||
return `<u>${token.text}</u>`;
|
||||
}
|
||||
};
|
||||
marked.use({ extensions: [underline] });
|
||||
const html = marked.parse('Not Underlined =Underlined= Not Underlined');
|
||||
assert.strictEqual(html, '<p>Not Underlined <u>Underlined</u> Not Underlined</p>\n');
|
||||
});
|
||||
|
||||
it('should handle interacting block and inline extensions', () => {
|
||||
const descriptionlist = {
|
||||
name: 'descriptionList',
|
||||
level: 'block',
|
||||
start(src) {
|
||||
const match = src.match(/:[^:\n]/);
|
||||
if (match) {
|
||||
return match.index;
|
||||
}
|
||||
},
|
||||
tokenizer(src, tokens) {
|
||||
const rule = /^(?::[^:\n]+:[^:\n]*(?:\n|$))+/;
|
||||
const match = rule.exec(src);
|
||||
if (match) {
|
||||
const token = {
|
||||
type: 'descriptionList',
|
||||
raw: match[0], // This is the text that you want your token to consume from the source
|
||||
text: match[0].trim(), // You can add additional properties to your tokens to pass along to the renderer
|
||||
tokens: []
|
||||
};
|
||||
this.lexer.inlineTokens(token.text, token.tokens);
|
||||
return token;
|
||||
}
|
||||
},
|
||||
renderer(token) {
|
||||
return `<dl>${this.parser.parseInline(token.tokens)}\n</dl>`;
|
||||
}
|
||||
};
|
||||
|
||||
const description = {
|
||||
name: 'description',
|
||||
level: 'inline',
|
||||
start(src) { return src.indexOf(':'); },
|
||||
tokenizer(src, tokens) {
|
||||
const rule = /^:([^:\n]+):([^:\n]*)(?:\n|$)/;
|
||||
const match = rule.exec(src);
|
||||
if (match) {
|
||||
const token = {
|
||||
type: 'description',
|
||||
raw: match[0],
|
||||
dt: [],
|
||||
dd: []
|
||||
};
|
||||
this.lexer.inline(match[1].trim(), token.dt);
|
||||
this.lexer.inline(match[2].trim(), token.dd);
|
||||
return token;
|
||||
}
|
||||
},
|
||||
renderer(token) {
|
||||
return `\n<dt>${this.parser.parseInline(token.dt)}</dt><dd>${this.parser.parseInline(token.dd)}</dd>`;
|
||||
}
|
||||
};
|
||||
marked.use({ extensions: [descriptionlist, description] });
|
||||
const html = marked.parse('A Description List with One Description:\n'
|
||||
+ ': Topic 1 : Description 1\n'
|
||||
+ ': **Topic 2** : *Description 2*');
|
||||
assert.strictEqual(html, '<p>A Description List with One Description:</p>\n'
|
||||
+ '<dl>'
|
||||
+ '\n<dt>Topic 1</dt><dd>Description 1</dd>'
|
||||
+ '\n<dt><strong>Topic 2</strong></dt><dd><em>Description 2</em></dd>'
|
||||
+ '\n</dl>');
|
||||
});
|
||||
|
||||
it('should allow other options mixed into the extension', () => {
|
||||
const extension = {
|
||||
name: 'underline',
|
||||
level: 'block',
|
||||
start(src) { return src.indexOf(':'); },
|
||||
tokenizer(src) {
|
||||
const rule = /^:([^\n]*):(?:\n|$)/;
|
||||
const match = rule.exec(src);
|
||||
if (match) {
|
||||
return {
|
||||
type: 'underline',
|
||||
raw: match[0], // This is the text that you want your token to consume from the source
|
||||
text: match[1].trim() // You can add additional properties to your tokens to pass along to the renderer
|
||||
};
|
||||
}
|
||||
},
|
||||
renderer(token) {
|
||||
return `<u>${token.text}</u>\n`;
|
||||
}
|
||||
};
|
||||
marked.use({ silent: true, extensions: [extension] });
|
||||
const html = marked.parse(':test:\ntest\n<div></div>');
|
||||
assert.strictEqual(html, '<u>test</u>\n<p>test</p>\n<div></div>');
|
||||
});
|
||||
|
||||
it('should handle renderers that return false', () => {
|
||||
const extension = {
|
||||
name: 'test',
|
||||
level: 'block',
|
||||
tokenizer(src) {
|
||||
const rule = /^:([^\n]*):(?:\n|$)/;
|
||||
const match = rule.exec(src);
|
||||
if (match) {
|
||||
return {
|
||||
type: 'test',
|
||||
raw: match[0], // This is the text that you want your token to consume from the source
|
||||
text: match[1].trim() // You can add additional properties to your tokens to pass along to the renderer
|
||||
};
|
||||
}
|
||||
},
|
||||
renderer(token) {
|
||||
if (token.text === 'test') {
|
||||
return 'test';
|
||||
}
|
||||
return false;
|
||||
}
|
||||
};
|
||||
const fallbackRenderer = {
|
||||
name: 'test',
|
||||
level: 'block',
|
||||
renderer(token) {
|
||||
if (token.text === 'Test') {
|
||||
return 'fallback';
|
||||
}
|
||||
return false;
|
||||
}
|
||||
};
|
||||
marked.use({ extensions: [fallbackRenderer, extension] });
|
||||
const html = marked.parse(':Test:\n\n:test:\n\n:none:');
|
||||
assert.strictEqual(html, 'fallbacktest');
|
||||
});
|
||||
|
||||
it('should fall back when tokenizers return false', () => {
|
||||
const extension = {
|
||||
name: 'test',
|
||||
level: 'block',
|
||||
tokenizer(src) {
|
||||
const rule = /^:([^\n]*):(?:\n|$)/;
|
||||
const match = rule.exec(src);
|
||||
if (match) {
|
||||
return {
|
||||
type: 'test',
|
||||
raw: match[0], // This is the text that you want your token to consume from the source
|
||||
text: match[1].trim() // You can add additional properties to your tokens to pass along to the renderer
|
||||
};
|
||||
}
|
||||
return false;
|
||||
},
|
||||
renderer(token) {
|
||||
return token.text;
|
||||
}
|
||||
};
|
||||
const extension2 = {
|
||||
name: 'test',
|
||||
level: 'block',
|
||||
tokenizer(src) {
|
||||
const rule = /^:([^\n]*):(?:\n|$)/;
|
||||
const match = rule.exec(src);
|
||||
if (match) {
|
||||
if (match[1].match(/^[A-Z]/)) {
|
||||
return {
|
||||
type: 'test',
|
||||
raw: match[0],
|
||||
text: match[1].trim().toUpperCase()
|
||||
};
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
};
|
||||
marked.use({ extensions: [extension, extension2] });
|
||||
const html = marked.parse(':Test:\n\n:test:');
|
||||
assert.strictEqual(html, 'TESTtest');
|
||||
});
|
||||
|
||||
it('should override original tokenizer/renderer with same name, but fall back if returns false', () => {
|
||||
const extension = {
|
||||
extensions: [{
|
||||
name: 'heading',
|
||||
level: 'block',
|
||||
tokenizer(src) {
|
||||
return false; // fall back to default `heading` tokenizer
|
||||
},
|
||||
renderer(token) {
|
||||
return '<h' + token.depth + '>' + token.text + ' RENDERER EXTENSION</h' + token.depth + '>\n';
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'code',
|
||||
level: 'block',
|
||||
tokenizer(src) {
|
||||
const rule = /^:([^\n]*):(?:\n|$)/;
|
||||
const match = rule.exec(src);
|
||||
if (match) {
|
||||
return {
|
||||
type: 'code',
|
||||
raw: match[0],
|
||||
text: match[1].trim() + ' TOKENIZER EXTENSION'
|
||||
};
|
||||
}
|
||||
},
|
||||
renderer(token) {
|
||||
return false; // fall back to default `code` renderer
|
||||
}
|
||||
}]
|
||||
};
|
||||
marked.use(extension);
|
||||
const html = marked.parse('# extension1\n:extension2:');
|
||||
assert.strictEqual(html, '<h1>extension1 RENDERER EXTENSION</h1>\n<pre><code>extension2 TOKENIZER EXTENSION\n</code></pre>\n');
|
||||
});
|
||||
|
||||
it('should walk only specified child tokens', () => {
|
||||
const walkableDescription = {
|
||||
extensions: [{
|
||||
name: 'walkableDescription',
|
||||
level: 'inline',
|
||||
start(src) { return src.indexOf(':'); },
|
||||
tokenizer(src, tokens) {
|
||||
const rule = /^:([^:\n]+):([^:\n]*)(?:\n|$)/;
|
||||
const match = rule.exec(src);
|
||||
if (match) {
|
||||
const token = {
|
||||
type: 'walkableDescription',
|
||||
raw: match[0],
|
||||
dt: this.lexer.inline(match[1].trim()),
|
||||
dd: [],
|
||||
tokens: []
|
||||
};
|
||||
this.lexer.inline(match[2].trim(), token.dd);
|
||||
this.lexer.inline('unwalked', token.tokens);
|
||||
return token;
|
||||
}
|
||||
},
|
||||
renderer(token) {
|
||||
return `\n<dt>${this.parser.parseInline(token.dt)} - ${this.parser.parseInline(token.tokens)}</dt><dd>${this.parser.parseInline(token.dd)}</dd>`;
|
||||
},
|
||||
childTokens: ['dd', 'dt']
|
||||
}],
|
||||
walkTokens(token) {
|
||||
if (token.type === 'text') {
|
||||
token.text += ' walked';
|
||||
}
|
||||
}
|
||||
};
|
||||
marked.use(walkableDescription);
|
||||
const html = marked.parse(': Topic 1 : Description 1\n'
|
||||
+ ': **Topic 2** : *Description 2*');
|
||||
assert.strictEqual(html, '<p>\n<dt>Topic 1 walked - unwalked</dt><dd>Description 1 walked</dd>'
|
||||
+ '\n<dt><strong>Topic 2 walked</strong> - unwalked</dt><dd><em>Description 2 walked</em></dd></p>\n');
|
||||
});
|
||||
|
||||
describe('multiple extensions', () => {
|
||||
function createExtension(name) {
|
||||
return {
|
||||
extensions: [{
|
||||
name: `block-${name}`,
|
||||
level: 'block',
|
||||
start(src) { return src.indexOf('::'); },
|
||||
tokenizer(src, tokens) {
|
||||
if (src.startsWith(`::${name}\n`)) {
|
||||
const text = `:${name}`;
|
||||
const token = {
|
||||
type: `block-${name}`,
|
||||
raw: `::${name}\n`,
|
||||
text,
|
||||
tokens: []
|
||||
};
|
||||
this.lexer.inline(token.text, token.tokens);
|
||||
return token;
|
||||
}
|
||||
},
|
||||
renderer(token) {
|
||||
return `<${token.type}>${this.parser.parseInline(token.tokens)}</${token.type}>\n`;
|
||||
}
|
||||
}, {
|
||||
name: `inline-${name}`,
|
||||
level: 'inline',
|
||||
start(src) { return src.indexOf(':'); },
|
||||
tokenizer(src, tokens) {
|
||||
if (src.startsWith(`:${name}`)) {
|
||||
return {
|
||||
type: `inline-${name}`,
|
||||
raw: `:${name}`,
|
||||
text: `used ${name}`
|
||||
};
|
||||
}
|
||||
},
|
||||
renderer(token) {
|
||||
return token.text;
|
||||
}
|
||||
}],
|
||||
tokenizer: {
|
||||
heading(src) {
|
||||
if (src.startsWith(`# ${name}`)) {
|
||||
const token = {
|
||||
type: 'heading',
|
||||
raw: `# ${name}`,
|
||||
text: `used ${name}`,
|
||||
depth: 1,
|
||||
tokens: []
|
||||
};
|
||||
this.lexer.inline(token.text, token.tokens);
|
||||
return token;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
},
|
||||
renderer: {
|
||||
heading(text, depth, raw) {
|
||||
if (text === name) {
|
||||
return `<h${depth}>${text}</h${depth}>\n`;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
},
|
||||
walkTokens(token) {
|
||||
if (token.text === `used ${name}`) {
|
||||
token.text += ' walked';
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
function createFalseExtension(name) {
|
||||
return {
|
||||
extensions: [{
|
||||
name: `block-${name}`,
|
||||
level: 'block',
|
||||
start(src) { return src.indexOf('::'); },
|
||||
tokenizer(src, tokens) {
|
||||
return false;
|
||||
},
|
||||
renderer(token) {
|
||||
return false;
|
||||
}
|
||||
}, {
|
||||
name: `inline-${name}`,
|
||||
level: 'inline',
|
||||
start(src) { return src.indexOf(':'); },
|
||||
tokenizer(src, tokens) {
|
||||
return false;
|
||||
},
|
||||
renderer(token) {
|
||||
return false;
|
||||
}
|
||||
}]
|
||||
};
|
||||
}
|
||||
|
||||
function runTest() {
|
||||
const html = marked.parse(`
|
||||
::extension1
|
||||
::extension2
|
||||
|
||||
:extension1
|
||||
:extension2
|
||||
|
||||
# extension1
|
||||
|
||||
# extension2
|
||||
|
||||
# no extension
|
||||
`);
|
||||
|
||||
assert.strictEqual(`\n${html}\n`.replace(/\n+/g, '\n'), `
|
||||
<block-extension1>used extension1 walked</block-extension1>
|
||||
<block-extension2>used extension2 walked</block-extension2>
|
||||
<p>used extension1 walked
|
||||
used extension2 walked</p>
|
||||
<h1>used extension1 walked</h1>
|
||||
<h1>used extension2 walked</h1>
|
||||
<h1>no extension</h1>
|
||||
`);
|
||||
}
|
||||
|
||||
it('should merge extensions when calling marked.use multiple times', () => {
|
||||
marked.use(createExtension('extension1'));
|
||||
marked.use(createExtension('extension2'));
|
||||
|
||||
runTest();
|
||||
});
|
||||
|
||||
it('should merge extensions when calling marked.use with multiple extensions', () => {
|
||||
marked.use(
|
||||
createExtension('extension1'),
|
||||
createExtension('extension2')
|
||||
);
|
||||
|
||||
runTest();
|
||||
});
|
||||
|
||||
it('should fall back to any extensions with the same name if the first returns false', () => {
|
||||
marked.use(
|
||||
createExtension('extension1'),
|
||||
createExtension('extension2'),
|
||||
createFalseExtension('extension1'),
|
||||
createFalseExtension('extension2')
|
||||
);
|
||||
|
||||
runTest();
|
||||
});
|
||||
|
||||
it('should merge extensions correctly', () => {
|
||||
marked.use(
|
||||
{},
|
||||
{ tokenizer: {} },
|
||||
{ renderer: {} },
|
||||
{ walkTokens: () => {} },
|
||||
{ extensions: [] }
|
||||
);
|
||||
|
||||
// should not throw
|
||||
marked.parse('# test');
|
||||
});
|
||||
});
|
||||
|
||||
it('should be async if any extension in use args is async', () => {
|
||||
marked.use(
|
||||
{ async: true },
|
||||
{ async: false }
|
||||
);
|
||||
|
||||
assert.ok(marked.defaults.async);
|
||||
});
|
||||
|
||||
it.only('should be async if any extension in use is async', () => {
|
||||
marked.use({ async: true });
|
||||
marked.use({ async: false });
|
||||
|
||||
assert.ok(marked.defaults.async);
|
||||
});
|
||||
|
||||
it('should reset async with setOptions', () => {
|
||||
marked.use({ async: true });
|
||||
setOptions({ async: false });
|
||||
|
||||
assert.ok(!defaults.async);
|
||||
});
|
||||
|
||||
it('should return Promise if async', () => {
|
||||
assert.ok(marked.parse('test', { async: true }) instanceof Promise);
|
||||
});
|
||||
|
||||
it('should return string if not async', () => {
|
||||
assert.strictEqual(typeof marked.parse('test', { async: false }), 'string');
|
||||
});
|
||||
|
||||
it('should return Promise if async is set by extension', () => {
|
||||
marked.use({ async: true });
|
||||
|
||||
assert.ok(marked.parse('test', { async: false }) instanceof Promise);
|
||||
});
|
||||
|
||||
it('should allow deleting/editing tokens', () => {
|
||||
const styleTags = {
|
||||
extensions: [{
|
||||
name: 'inlineStyleTag',
|
||||
level: 'inline',
|
||||
start(src) {
|
||||
const match = src.match(/ *{[^\{]/);
|
||||
if (match) {
|
||||
return match.index;
|
||||
}
|
||||
},
|
||||
tokenizer(src, tokens) {
|
||||
const rule = /^ *{([^\{\}\n]+)}$/;
|
||||
const match = rule.exec(src);
|
||||
if (match) {
|
||||
return {
|
||||
type: 'inlineStyleTag',
|
||||
raw: match[0], // This is the text that you want your token to consume from the source
|
||||
text: match[1]
|
||||
};
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'styled',
|
||||
renderer(token) {
|
||||
token.type = token.originalType;
|
||||
const text = this.parser.parse([token]);
|
||||
const openingTag = /(<[^\s<>]+)([^\n<>]*>.*)/s.exec(text);
|
||||
if (openingTag) {
|
||||
return `${openingTag[1]} ${token.style}${openingTag[2]}`;
|
||||
}
|
||||
return text;
|
||||
}
|
||||
}],
|
||||
walkTokens(token) {
|
||||
if (token.tokens) {
|
||||
const finalChildToken = token.tokens[token.tokens.length - 1];
|
||||
if (finalChildToken && finalChildToken.type === 'inlineStyleTag') {
|
||||
token.originalType = token.type;
|
||||
token.type = 'styled';
|
||||
token.style = `style="color:${finalChildToken.text};"`;
|
||||
token.tokens.pop();
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
marked.use(styleTags);
|
||||
const html = marked.parse('This is a *paragraph* with blue text. {blue}\n'
|
||||
+ '# This is a *header* with red text {red}');
|
||||
assert.strictEqual(html, '<p style="color:blue;">This is a <em>paragraph</em> with blue text.</p>\n'
|
||||
+ '<h1 style="color:red;">This is a <em>header</em> with red text</h1>\n');
|
||||
});
|
||||
|
||||
it('should use renderer', () => {
|
||||
const extension = {
|
||||
renderer: {
|
||||
paragraph(text) {
|
||||
return 'extension';
|
||||
}
|
||||
}
|
||||
};
|
||||
mock.method(extension.renderer, 'paragraph');
|
||||
marked.use(extension);
|
||||
const html = marked.parse('text');
|
||||
assert.strictEqual(extension.renderer.paragraph.mock.calls[0].arguments[0], 'text');
|
||||
assert.strictEqual(html, 'extension');
|
||||
});
|
||||
|
||||
it('should use tokenizer', () => {
|
||||
const extension = {
|
||||
tokenizer: {
|
||||
paragraph(text) {
|
||||
const token = {
|
||||
type: 'paragraph',
|
||||
raw: text,
|
||||
text: 'extension',
|
||||
tokens: []
|
||||
};
|
||||
this.lexer.inline(token.text, token.tokens);
|
||||
return token;
|
||||
}
|
||||
}
|
||||
};
|
||||
mock.method(extension.tokenizer, 'paragraph');
|
||||
marked.use(extension);
|
||||
const html = marked.parse('text');
|
||||
assert.strictEqual(extension.tokenizer.paragraph.mock.calls[0].arguments[0], 'text');
|
||||
assert.strictEqual(html, '<p>extension</p>\n');
|
||||
});
|
||||
|
||||
it('should use walkTokens', () => {
|
||||
let walked = 0;
|
||||
const extension = {
|
||||
walkTokens(token) {
|
||||
walked++;
|
||||
}
|
||||
};
|
||||
marked.use(extension);
|
||||
marked.parse('text');
|
||||
assert.strictEqual(walked, 2);
|
||||
});
|
||||
|
||||
it('should use options from extension', () => {
|
||||
const extension = {
|
||||
breaks: true
|
||||
};
|
||||
marked.use(extension);
|
||||
const html = marked.parse('line1\nline2');
|
||||
assert.strictEqual(html, '<p>line1<br>line2</p>\n');
|
||||
});
|
||||
|
||||
it('should call all walkTokens in reverse order', () => {
|
||||
let walkedOnce = 0;
|
||||
let walkedTwice = 0;
|
||||
const extension1 = {
|
||||
walkTokens(token) {
|
||||
if (token.walkedOnce) {
|
||||
walkedTwice++;
|
||||
}
|
||||
}
|
||||
};
|
||||
const extension2 = {
|
||||
walkTokens(token) {
|
||||
walkedOnce++;
|
||||
token.walkedOnce = true;
|
||||
}
|
||||
};
|
||||
marked.use(extension1);
|
||||
marked.use(extension2);
|
||||
marked.parse('text');
|
||||
assert.strictEqual(walkedOnce, 2);
|
||||
assert.strictEqual(walkedTwice, 2);
|
||||
});
|
||||
|
||||
it('should use last extension function and not override others', () => {
|
||||
const extension1 = {
|
||||
renderer: {
|
||||
paragraph(text) {
|
||||
return 'extension1 paragraph\n';
|
||||
},
|
||||
html(html) {
|
||||
return 'extension1 html\n';
|
||||
}
|
||||
}
|
||||
};
|
||||
const extension2 = {
|
||||
renderer: {
|
||||
paragraph(text) {
|
||||
return 'extension2 paragraph\n';
|
||||
}
|
||||
}
|
||||
};
|
||||
marked.use(extension1);
|
||||
marked.use(extension2);
|
||||
const html = marked.parse(`
|
||||
paragraph
|
||||
|
||||
<html />
|
||||
|
||||
# heading
|
||||
`);
|
||||
assert.strictEqual(html, 'extension2 paragraph\nextension1 html\n<h1>heading</h1>\n');
|
||||
});
|
||||
|
||||
it('should use previous extension when returning false', () => {
|
||||
const extension1 = {
|
||||
renderer: {
|
||||
paragraph(text) {
|
||||
if (text !== 'original') {
|
||||
return 'extension1 paragraph\n';
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
};
|
||||
const extension2 = {
|
||||
renderer: {
|
||||
paragraph(text) {
|
||||
if (text !== 'extension1' && text !== 'original') {
|
||||
return 'extension2 paragraph\n';
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
};
|
||||
marked.use(extension1);
|
||||
marked.use(extension2);
|
||||
const html = marked.parse(`
|
||||
paragraph
|
||||
|
||||
extension1
|
||||
|
||||
original
|
||||
`);
|
||||
assert.strictEqual(html, 'extension2 paragraph\nextension1 paragraph\n<p>original</p>\n');
|
||||
});
|
||||
|
||||
it('should get options with this.options', () => {
|
||||
const extension = {
|
||||
renderer: {
|
||||
heading: () => {
|
||||
return this && this.options ? 'arrow options\n' : 'arrow no options\n';
|
||||
},
|
||||
html: function() {
|
||||
return this.options ? 'function options\n' : 'function no options\n';
|
||||
},
|
||||
paragraph() {
|
||||
return this.options ? 'shorthand options\n' : 'shorthand no options\n';
|
||||
}
|
||||
}
|
||||
};
|
||||
marked.use(extension);
|
||||
const html = marked.parse(`
|
||||
# heading
|
||||
|
||||
<html />
|
||||
|
||||
paragraph
|
||||
`);
|
||||
assert.strictEqual(html, 'arrow no options\nfunction options\nshorthand options\n');
|
||||
});
|
||||
});
|
||||
|
||||
describe('walkTokens', () => {
|
||||
it('should walk over every token', () => {
|
||||
const markdown = `
|
||||
paragraph
|
||||
|
||||
---
|
||||
|
||||
# heading
|
||||
|
||||
\`\`\`
|
||||
code
|
||||
\`\`\`
|
||||
|
||||
| a | b |
|
||||
|---|---|
|
||||
| 1 | 2 |
|
||||
| 3 | 4 |
|
||||
|
||||
> blockquote
|
||||
|
||||
- list
|
||||
|
||||
<div>html</div>
|
||||
|
||||
[link](https://example.com)
|
||||
|
||||

|
||||
|
||||
**strong**
|
||||
|
||||
*em*
|
||||
|
||||
\`codespan\`
|
||||
|
||||
~~del~~
|
||||
|
||||
br
|
||||
br
|
||||
`;
|
||||
const tokens = lexer(markdown, { ...getDefaults(), breaks: true });
|
||||
const tokensSeen = [];
|
||||
walkTokens(tokens, (token) => {
|
||||
tokensSeen.push([token.type, (token.raw || '').replace(/\n/g, '')]);
|
||||
});
|
||||
|
||||
assert.deepEqual(tokensSeen, [
|
||||
['space', ''],
|
||||
['paragraph', 'paragraph'],
|
||||
['text', 'paragraph'],
|
||||
['space', ''],
|
||||
['hr', '---'],
|
||||
['heading', '# heading'],
|
||||
['text', 'heading'],
|
||||
['code', '```code```'],
|
||||
['space', ''],
|
||||
['table', '| a | b ||---|---|| 1 | 2 || 3 | 4 |'],
|
||||
['text', 'a'],
|
||||
['text', 'b'],
|
||||
['text', '1'],
|
||||
['text', '2'],
|
||||
['text', '3'],
|
||||
['text', '4'],
|
||||
['blockquote', '> blockquote'],
|
||||
['paragraph', 'blockquote'],
|
||||
['text', 'blockquote'],
|
||||
['list', '- list'],
|
||||
['list_item', '- list'],
|
||||
['text', 'list'],
|
||||
['text', 'list'],
|
||||
['space', ''],
|
||||
['html', '<div>html</div>'],
|
||||
['paragraph', '[link](https://example.com)'],
|
||||
['link', '[link](https://example.com)'],
|
||||
['text', 'link'],
|
||||
['space', ''],
|
||||
['paragraph', ''],
|
||||
['image', ''],
|
||||
['space', ''],
|
||||
['paragraph', '**strong**'],
|
||||
['strong', '**strong**'],
|
||||
['text', 'strong'],
|
||||
['space', ''],
|
||||
['paragraph', '*em*'],
|
||||
['em', '*em*'],
|
||||
['text', 'em'],
|
||||
['space', ''],
|
||||
['paragraph', '`codespan`'],
|
||||
['codespan', '`codespan`'],
|
||||
['space', ''],
|
||||
['paragraph', '~~del~~'],
|
||||
['del', '~~del~~'],
|
||||
['text', 'del'],
|
||||
['space', ''],
|
||||
['paragraph', 'brbr'],
|
||||
['text', 'br'],
|
||||
['br', ''],
|
||||
['text', 'br']
|
||||
]);
|
||||
});
|
||||
|
||||
it('should assign marked to `this`', () => {
|
||||
marked.use({
|
||||
walkTokens(token) {
|
||||
if (token.type === 'em') {
|
||||
token.text += ' walked';
|
||||
token.tokens = this.Lexer.lexInline(token.text);
|
||||
}
|
||||
}
|
||||
});
|
||||
assert.strictEqual(marked.parse('*text*').trim(), '<p><em>text walked</em></p>');
|
||||
});
|
||||
|
||||
it('should wait for async `walkTokens` function', async() => {
|
||||
marked.use({
|
||||
async: true,
|
||||
async walkTokens(token) {
|
||||
if (token.type === 'em') {
|
||||
await timeout();
|
||||
token.text += ' walked';
|
||||
token.tokens = this.Lexer.lexInline(token.text);
|
||||
}
|
||||
}
|
||||
});
|
||||
const promise = marked.parse('*text*');
|
||||
assert.ok(promise instanceof Promise);
|
||||
const html = await promise;
|
||||
assert.strictEqual(html.trim(), '<p><em>text walked</em></p>');
|
||||
});
|
||||
|
||||
it('should return promise if async and no walkTokens function', async() => {
|
||||
marked.use({
|
||||
async: true
|
||||
});
|
||||
const promise = marked.parse('*text*');
|
||||
assert.ok(promise instanceof Promise);
|
||||
const html = await promise;
|
||||
assert.strictEqual(html.trim(), '<p><em>text</em></p>');
|
||||
});
|
||||
});
|
||||
});
|
37
test/update-specs.js
vendored
37
test/update-specs.js
vendored
@ -1,9 +1,12 @@
|
||||
import fetch from 'node-fetch';
|
||||
import { load } from 'cheerio';
|
||||
import marked from '../';
|
||||
import { isEqual } from './helpers/html-differ.js';
|
||||
import { readdirSync, unlinkSync, writeFileSync } from 'fs';
|
||||
import { join, resolve } from 'path';
|
||||
import { Marked } from '../lib/marked.esm.js';
|
||||
import { htmlIsEqual } from '@markedjs/testutils';
|
||||
import { readdirSync, unlinkSync, writeFileSync } from 'node:fs';
|
||||
import { join, resolve, dirname } from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
|
||||
const __dirname = dirname(fileURLToPath(import.meta.url));
|
||||
|
||||
function removeFiles(dir) {
|
||||
readdirSync(dir).forEach(file => {
|
||||
@ -17,13 +20,16 @@ async function updateCommonmark(dir, options) {
|
||||
const pkg = await res.json();
|
||||
const version = pkg.version.replace(/^(\d+\.\d+).*$/, '$1');
|
||||
const res2 = await fetch(`https://spec.commonmark.org/${version}/spec.json`);
|
||||
const specs = await res2.json();
|
||||
specs.forEach(spec => {
|
||||
const html = marked(spec.markdown, options);
|
||||
if (!isEqual(html, spec.html)) {
|
||||
const json = await res2.json();
|
||||
const specs = await Promise.all(json.map(async(spec) => {
|
||||
const marked = new Marked();
|
||||
const html = marked.parse(spec.markdown, options);
|
||||
const isEqual = await htmlIsEqual(html, spec.html);
|
||||
if (!isEqual) {
|
||||
spec.shouldFail = true;
|
||||
}
|
||||
});
|
||||
return spec;
|
||||
}));
|
||||
writeFileSync(resolve(dir, `./commonmark.${version}.json`), JSON.stringify(specs, null, 2) + '\n');
|
||||
console.log(`Saved CommonMark v${version} specs`);
|
||||
} catch (ex) {
|
||||
@ -40,7 +46,7 @@ async function updateGfm(dir) {
|
||||
if (!version) {
|
||||
throw new Error('No version found');
|
||||
}
|
||||
const specs = [];
|
||||
let specs = [];
|
||||
$('.extension').each((i, ext) => {
|
||||
const section = $('.definition', ext).text().trim().replace(/^\d+\.\d+(.*?) \(extension\)[\s\S]*$/, '$1');
|
||||
$('.example', ext).each((j, exa) => {
|
||||
@ -56,12 +62,15 @@ async function updateGfm(dir) {
|
||||
});
|
||||
});
|
||||
|
||||
specs.forEach(spec => {
|
||||
const html = marked(spec.markdown, { gfm: true, pedantic: false });
|
||||
if (!isEqual(html, spec.html)) {
|
||||
specs = await Promise.all(specs.map(async(spec) => {
|
||||
const marked = new Marked();
|
||||
const html = marked.parse(spec.markdown, { gfm: true, pedantic: false });
|
||||
const isEqual = await htmlIsEqual(html, spec.html);
|
||||
if (!isEqual) {
|
||||
spec.shouldFail = true;
|
||||
}
|
||||
});
|
||||
return spec;
|
||||
}));
|
||||
writeFileSync(resolve(dir, `./gfm.${version}.json`), JSON.stringify(specs, null, 2) + '\n');
|
||||
console.log(`Saved GFM v${version} specs.`);
|
||||
} catch (ex) {
|
||||
|
Loading…
x
Reference in New Issue
Block a user