mirror of
https://github.com/DavidAnson/markdownlint-cli2-action.git
synced 2024-11-21 22:01:30 +01:00
Freshen generated index.js file.
This commit is contained in:
parent
ccde71ff7c
commit
20d2d1c43a
1 changed files with 400 additions and 286 deletions
512
dist/index.js
vendored
512
dist/index.js
vendored
|
@ -558,7 +558,7 @@ class OidcClient {
|
||||||
.catch(error => {
|
.catch(error => {
|
||||||
throw new Error(`Failed to get ID Token. \n
|
throw new Error(`Failed to get ID Token. \n
|
||||||
Error Code : ${error.statusCode}\n
|
Error Code : ${error.statusCode}\n
|
||||||
Error Message: ${error.result.message}`);
|
Error Message: ${error.message}`);
|
||||||
});
|
});
|
||||||
const id_token = (_a = res.result) === null || _a === void 0 ? void 0 : _a.value;
|
const id_token = (_a = res.result) === null || _a === void 0 ? void 0 : _a.value;
|
||||||
if (!id_token) {
|
if (!id_token) {
|
||||||
|
@ -14538,12 +14538,18 @@ module.exports.unorderedListStyleFor = function unorderedListStyleFor(token) {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @callback TokenCallback
|
||||||
|
* @param {MarkdownItToken} token Current token.
|
||||||
|
* @returns {void}
|
||||||
|
*/
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Calls the provided function for each matching token.
|
* Calls the provided function for each matching token.
|
||||||
*
|
*
|
||||||
* @param {Object} params RuleParams instance.
|
* @param {Object} params RuleParams instance.
|
||||||
* @param {string} type Token type identifier.
|
* @param {string} type Token type identifier.
|
||||||
* @param {Function} handler Callback function.
|
* @param {TokenCallback} handler Callback function.
|
||||||
* @returns {void}
|
* @returns {void}
|
||||||
*/
|
*/
|
||||||
function filterTokens(params, type, handler) {
|
function filterTokens(params, type, handler) {
|
||||||
|
@ -14555,8 +14561,17 @@ function filterTokens(params, type, handler) {
|
||||||
}
|
}
|
||||||
module.exports.filterTokens = filterTokens;
|
module.exports.filterTokens = filterTokens;
|
||||||
|
|
||||||
// Get line metadata array
|
/**
|
||||||
module.exports.getLineMetadata = function getLineMetadata(params) {
|
* @typedef {Array} LineMetadata
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets a line metadata array.
|
||||||
|
*
|
||||||
|
* @param {Object} params RuleParams instance.
|
||||||
|
* @returns {LineMetadata} Line metadata.
|
||||||
|
*/
|
||||||
|
function getLineMetadata(params) {
|
||||||
const lineMetadata = params.lines.map(
|
const lineMetadata = params.lines.map(
|
||||||
(line, index) => [ line, index, false, 0, false, false, false ]
|
(line, index) => [ line, index, false, 0, false, false, false ]
|
||||||
);
|
);
|
||||||
|
@ -14588,18 +14603,32 @@ module.exports.getLineMetadata = function getLineMetadata(params) {
|
||||||
lineMetadata[token.map[0]][6] = true;
|
lineMetadata[token.map[0]][6] = true;
|
||||||
});
|
});
|
||||||
return lineMetadata;
|
return lineMetadata;
|
||||||
};
|
}
|
||||||
|
module.exports.getLineMetadata = getLineMetadata;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @callback EachLineCallback
|
||||||
|
* @param {string} line Line content.
|
||||||
|
* @param {number} lineIndex Line index (0-based).
|
||||||
|
* @param {boolean} inCode Iff in a code block.
|
||||||
|
* @param {number} onFence + if open, - if closed, 0 otherwise.
|
||||||
|
* @param {boolean} inTable Iff in a table.
|
||||||
|
* @param {boolean} inItem Iff in a list item.
|
||||||
|
* @param {boolean} inBreak Iff in semantic break.
|
||||||
|
* @returns {void}
|
||||||
|
*/
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Calls the provided function for each line.
|
* Calls the provided function for each line.
|
||||||
*
|
*
|
||||||
* @param {Object} lineMetadata Line metadata object.
|
* @param {LineMetadata} lineMetadata Line metadata object.
|
||||||
* @param {Function} handler Function taking (line, lineIndex, inCode, onFence,
|
* @param {EachLineCallback} handler Function taking (line, lineIndex, inCode,
|
||||||
* inTable, inItem, inBreak).
|
* onFence, inTable, inItem, inBreak).
|
||||||
* @returns {void}
|
* @returns {void}
|
||||||
*/
|
*/
|
||||||
function forEachLine(lineMetadata, handler) {
|
function forEachLine(lineMetadata, handler) {
|
||||||
for (const metadata of lineMetadata) {
|
for (const metadata of lineMetadata) {
|
||||||
|
// @ts-ignore
|
||||||
handler(...metadata);
|
handler(...metadata);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -14670,12 +14699,21 @@ module.exports.forEachHeading = function forEachHeading(params, handler) {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @callback InlineCodeSpanCallback
|
||||||
|
* @param {string} code Code content.
|
||||||
|
* @param {number} lineIndex Line index (0-based).
|
||||||
|
* @param {number} columnIndex Column index (0-based).
|
||||||
|
* @param {number} ticks Count of backticks.
|
||||||
|
* @returns {void}
|
||||||
|
*/
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Calls the provided function for each inline code span's content.
|
* Calls the provided function for each inline code span's content.
|
||||||
*
|
*
|
||||||
* @param {string} input Markdown content.
|
* @param {string} input Markdown content.
|
||||||
* @param {Function} handler Callback function taking (code, lineIndex,
|
* @param {InlineCodeSpanCallback} handler Callback function taking (code,
|
||||||
* columnIndex, ticks).
|
* lineIndex, columnIndex, ticks).
|
||||||
* @returns {void}
|
* @returns {void}
|
||||||
*/
|
*/
|
||||||
function forEachInlineCodeSpan(input, handler) {
|
function forEachInlineCodeSpan(input, handler) {
|
||||||
|
@ -15162,6 +15200,26 @@ function expandTildePath(file, os) {
|
||||||
}
|
}
|
||||||
module.exports.expandTildePath = expandTildePath;
|
module.exports.expandTildePath = expandTildePath;
|
||||||
|
|
||||||
|
// Copied from markdownlint.js to avoid TypeScript compiler import() issue.
|
||||||
|
/**
|
||||||
|
* @typedef {Object} MarkdownItToken
|
||||||
|
* @property {string[][]} attrs HTML attributes.
|
||||||
|
* @property {boolean} block Block-level token.
|
||||||
|
* @property {MarkdownItToken[]} children Child nodes.
|
||||||
|
* @property {string} content Tag contents.
|
||||||
|
* @property {boolean} hidden Ignore element.
|
||||||
|
* @property {string} info Fence info.
|
||||||
|
* @property {number} level Nesting level.
|
||||||
|
* @property {number[]} map Beginning/ending line numbers.
|
||||||
|
* @property {string} markup Markup text.
|
||||||
|
* @property {Object} meta Arbitrary data.
|
||||||
|
* @property {number} nesting Level change.
|
||||||
|
* @property {string} tag HTML tag name.
|
||||||
|
* @property {string} type Token type.
|
||||||
|
* @property {number} lineNumber Line number (1-based).
|
||||||
|
* @property {string} line Line content.
|
||||||
|
*/
|
||||||
|
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
|
@ -20175,7 +20233,7 @@ const resolveAndRequire = __nccwpck_require__(5317);
|
||||||
|
|
||||||
// Variables
|
// Variables
|
||||||
const packageName = "markdownlint-cli2";
|
const packageName = "markdownlint-cli2";
|
||||||
const packageVersion = "0.9.2";
|
const packageVersion = "0.10.0";
|
||||||
const libraryName = "markdownlint";
|
const libraryName = "markdownlint";
|
||||||
const libraryVersion = markdownlintLibrary.getVersion();
|
const libraryVersion = markdownlintLibrary.getVersion();
|
||||||
const dotOnlySubstitute = "*.{md,markdown}";
|
const dotOnlySubstitute = "*.{md,markdown}";
|
||||||
|
@ -21099,9 +21157,15 @@ const main = async (params) => {
|
||||||
);
|
);
|
||||||
// Output linting status
|
// Output linting status
|
||||||
if (showProgress) {
|
if (showProgress) {
|
||||||
let fileCount = 0;
|
const fileNames = dirInfos.flatMap((dirInfo) => {
|
||||||
for (const dirInfo of dirInfos) {
|
const { files } = dirInfo;
|
||||||
fileCount += dirInfo.files.length;
|
return files.map((file) => pathPosix.relative(baseDir, file));
|
||||||
|
});
|
||||||
|
const fileCount = fileNames.length;
|
||||||
|
if (baseMarkdownlintOptions.showFound) {
|
||||||
|
fileNames.push("");
|
||||||
|
fileNames.sort();
|
||||||
|
logMessage(`Found:${fileNames.join("\n ")}`);
|
||||||
}
|
}
|
||||||
logMessage(`Linting: ${fileCount} file(s)`);
|
logMessage(`Linting: ${fileCount} file(s)`);
|
||||||
}
|
}
|
||||||
|
@ -21266,7 +21330,7 @@ module.exports.fixableRuleNames = [
|
||||||
"MD044", "MD047", "MD049", "MD050", "MD051", "MD053"
|
"MD044", "MD047", "MD049", "MD050", "MD051", "MD053"
|
||||||
];
|
];
|
||||||
module.exports.homepage = "https://github.com/DavidAnson/markdownlint";
|
module.exports.homepage = "https://github.com/DavidAnson/markdownlint";
|
||||||
module.exports.version = "0.30.0";
|
module.exports.version = "0.31.1";
|
||||||
|
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
@ -21797,7 +21861,7 @@ function getEnabledRulesPerLineNumber(
|
||||||
* @param {boolean} handleRuleFailures Whether to handle exceptions in rules.
|
* @param {boolean} handleRuleFailures Whether to handle exceptions in rules.
|
||||||
* @param {boolean} noInlineConfig Whether to allow inline configuration.
|
* @param {boolean} noInlineConfig Whether to allow inline configuration.
|
||||||
* @param {number} resultVersion Version of the LintResults object to return.
|
* @param {number} resultVersion Version of the LintResults object to return.
|
||||||
* @param {Function} callback Callback (err, result) function.
|
* @param {LintContentCallback} callback Callback (err, result) function.
|
||||||
* @returns {void}
|
* @returns {void}
|
||||||
*/
|
*/
|
||||||
function lintContent(
|
function lintContent(
|
||||||
|
@ -22086,7 +22150,7 @@ function lintContent(
|
||||||
* @param {number} resultVersion Version of the LintResults object to return.
|
* @param {number} resultVersion Version of the LintResults object to return.
|
||||||
* @param {Object} fs File system implementation.
|
* @param {Object} fs File system implementation.
|
||||||
* @param {boolean} synchronous Whether to execute synchronously.
|
* @param {boolean} synchronous Whether to execute synchronously.
|
||||||
* @param {Function} callback Callback (err, result) function.
|
* @param {LintContentCallback} callback Callback (err, result) function.
|
||||||
* @returns {void}
|
* @returns {void}
|
||||||
*/
|
*/
|
||||||
function lintFile(
|
function lintFile(
|
||||||
|
@ -22136,7 +22200,7 @@ function lintFile(
|
||||||
*
|
*
|
||||||
* @param {Options | null} options Options object.
|
* @param {Options | null} options Options object.
|
||||||
* @param {boolean} synchronous Whether to execute synchronously.
|
* @param {boolean} synchronous Whether to execute synchronously.
|
||||||
* @param {Function} callback Callback (err, result) function.
|
* @param {LintCallback} callback Callback (err, result) function.
|
||||||
* @returns {void}
|
* @returns {void}
|
||||||
*/
|
*/
|
||||||
function lintInput(options, synchronous, callback) {
|
function lintInput(options, synchronous, callback) {
|
||||||
|
@ -22299,7 +22363,7 @@ function markdownlintPromise(options) {
|
||||||
* @returns {LintResults} Results object.
|
* @returns {LintResults} Results object.
|
||||||
*/
|
*/
|
||||||
function markdownlintSync(options) {
|
function markdownlintSync(options) {
|
||||||
let results = {};
|
let results = null;
|
||||||
lintInput(options, true, function callback(error, res) {
|
lintInput(options, true, function callback(error, res) {
|
||||||
if (error) {
|
if (error) {
|
||||||
throw error;
|
throw error;
|
||||||
|
@ -22453,6 +22517,7 @@ function readConfig(file, parsers, fs, callback) {
|
||||||
}
|
}
|
||||||
// Read file
|
// Read file
|
||||||
file = helpers.expandTildePath(file, __nccwpck_require__(612));
|
file = helpers.expandTildePath(file, __nccwpck_require__(612));
|
||||||
|
// eslint-disable-next-line n/prefer-promises/fs
|
||||||
fs.readFile(file, "utf8", (err, content) => {
|
fs.readFile(file, "utf8", (err, content) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
|
@ -22693,12 +22758,21 @@ module.exports = markdownlint;
|
||||||
* @property {string} [insertText] Text to insert (after deleting).
|
* @property {string} [insertText] Text to insert (after deleting).
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Called with the result of linting a string or document.
|
||||||
|
*
|
||||||
|
* @callback LintContentCallback
|
||||||
|
* @param {Error | null} error Error iff failed.
|
||||||
|
* @param {LintError[]} [result] Result iff successful.
|
||||||
|
* @returns {void}
|
||||||
|
*/
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Called with the result of the lint function.
|
* Called with the result of the lint function.
|
||||||
*
|
*
|
||||||
* @callback LintCallback
|
* @callback LintCallback
|
||||||
* @param {Error | null} err Error object or null.
|
* @param {Error | null} error Error object iff failed.
|
||||||
* @param {LintResults} [results] Lint results.
|
* @param {LintResults} [results] Lint results iff succeeded.
|
||||||
* @returns {void}
|
* @returns {void}
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
@ -24366,8 +24440,7 @@ module.exports = {
|
||||||
|
|
||||||
const { addErrorContext, blockquotePrefixRe, isBlankLine } =
|
const { addErrorContext, blockquotePrefixRe, isBlankLine } =
|
||||||
__nccwpck_require__(2935);
|
__nccwpck_require__(2935);
|
||||||
const { filterByPredicate, flattenedChildren } =
|
const { filterByPredicate } = __nccwpck_require__(5673);
|
||||||
__nccwpck_require__(5673);
|
|
||||||
|
|
||||||
const nonContentTokens = new Set([
|
const nonContentTokens = new Set([
|
||||||
"blockQuoteMarker",
|
"blockQuoteMarker",
|
||||||
|
@ -24409,7 +24482,9 @@ module.exports = {
|
||||||
const topLevelLists = filterByPredicate(
|
const topLevelLists = filterByPredicate(
|
||||||
parsers.micromark.tokens,
|
parsers.micromark.tokens,
|
||||||
isList,
|
isList,
|
||||||
(token) => (isList(token) ? [] : token.children)
|
(token) => (
|
||||||
|
(isList(token) || (token.type === "htmlFlow")) ? [] : token.children
|
||||||
|
)
|
||||||
);
|
);
|
||||||
for (const list of topLevelLists) {
|
for (const list of topLevelLists) {
|
||||||
|
|
||||||
|
@ -24421,7 +24496,11 @@ module.exports = {
|
||||||
|
|
||||||
// Find the "visual" end of the list
|
// Find the "visual" end of the list
|
||||||
let endLine = list.endLine;
|
let endLine = list.endLine;
|
||||||
for (const child of flattenedChildren(list).reverse()) {
|
const flattenedChildren = filterByPredicate(
|
||||||
|
list.children,
|
||||||
|
() => true
|
||||||
|
);
|
||||||
|
for (const child of flattenedChildren.reverse()) {
|
||||||
if (!nonContentTokens.has(child.type)) {
|
if (!nonContentTokens.has(child.type)) {
|
||||||
endLine = child.endLine;
|
endLine = child.endLine;
|
||||||
break;
|
break;
|
||||||
|
@ -24449,7 +24528,7 @@ module.exports = {
|
||||||
|
|
||||||
|
|
||||||
const { addError } = __nccwpck_require__(2935);
|
const { addError } = __nccwpck_require__(2935);
|
||||||
const { filterByHtmlTokens, getHtmlTagInfo } =
|
const { filterByTypes, getHtmlTagInfo } =
|
||||||
__nccwpck_require__(5673);
|
__nccwpck_require__(5673);
|
||||||
|
|
||||||
const nextLinesRe = /[\r\n][\s\S]*$/;
|
const nextLinesRe = /[\r\n][\s\S]*$/;
|
||||||
|
@ -24462,7 +24541,8 @@ module.exports = {
|
||||||
let allowedElements = params.config.allowed_elements;
|
let allowedElements = params.config.allowed_elements;
|
||||||
allowedElements = Array.isArray(allowedElements) ? allowedElements : [];
|
allowedElements = Array.isArray(allowedElements) ? allowedElements : [];
|
||||||
allowedElements = allowedElements.map((element) => element.toLowerCase());
|
allowedElements = allowedElements.map((element) => element.toLowerCase());
|
||||||
for (const token of filterByHtmlTokens(params.parsers.micromark.tokens)) {
|
const { tokens } = params.parsers.micromark;
|
||||||
|
for (const token of filterByTypes(tokens, [ "htmlText" ])) {
|
||||||
const htmlTagInfo = getHtmlTagInfo(token);
|
const htmlTagInfo = getHtmlTagInfo(token);
|
||||||
if (
|
if (
|
||||||
htmlTagInfo &&
|
htmlTagInfo &&
|
||||||
|
@ -24497,7 +24577,7 @@ module.exports = {
|
||||||
|
|
||||||
|
|
||||||
const { addErrorContext } = __nccwpck_require__(2935);
|
const { addErrorContext } = __nccwpck_require__(2935);
|
||||||
const { filterByPredicate, getHtmlTagInfo, parse } =
|
const { filterByPredicate, filterByTypes, getHtmlTagInfo, parse } =
|
||||||
__nccwpck_require__(5673);
|
__nccwpck_require__(5673);
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
|
@ -24505,21 +24585,17 @@ module.exports = {
|
||||||
"description": "Bare URL used",
|
"description": "Bare URL used",
|
||||||
"tags": [ "links", "url" ],
|
"tags": [ "links", "url" ],
|
||||||
"function": function MD034(params, onError) {
|
"function": function MD034(params, onError) {
|
||||||
const literalAutolinks = (tokens) => (
|
const literalAutolinks = (tokens) => {
|
||||||
filterByPredicate(
|
const flattened = filterByPredicate(tokens, () => true);
|
||||||
tokens,
|
|
||||||
(token) => token.type === "literalAutolink",
|
|
||||||
(token) => {
|
|
||||||
const { children } = token;
|
|
||||||
const result = [];
|
const result = [];
|
||||||
for (let i = 0; i < children.length; i++) {
|
for (let i = 0; i < flattened.length; i++) {
|
||||||
const openToken = children[i];
|
const current = flattened[i];
|
||||||
const openTagInfo = getHtmlTagInfo(openToken);
|
const openTagInfo = getHtmlTagInfo(current);
|
||||||
if (openTagInfo && !openTagInfo.close) {
|
if (openTagInfo && !openTagInfo.close) {
|
||||||
let count = 1;
|
let count = 1;
|
||||||
for (let j = i + 1; j < children.length; j++) {
|
for (let j = i + 1; j < flattened.length; j++) {
|
||||||
const closeToken = children[j];
|
const candidate = flattened[j];
|
||||||
const closeTagInfo = getHtmlTagInfo(closeToken);
|
const closeTagInfo = getHtmlTagInfo(candidate);
|
||||||
if (closeTagInfo && (openTagInfo.name === closeTagInfo.name)) {
|
if (closeTagInfo && (openTagInfo.name === closeTagInfo.name)) {
|
||||||
if (closeTagInfo.close) {
|
if (closeTagInfo.close) {
|
||||||
count--;
|
count--;
|
||||||
|
@ -24533,14 +24609,16 @@ module.exports = {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
result.push(openToken);
|
result.push(current);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return result;
|
return result.filter((token) => token.type === "literalAutolink");
|
||||||
}
|
};
|
||||||
)
|
const autoLinks = filterByTypes(
|
||||||
|
params.parsers.micromark.tokens,
|
||||||
|
[ "literalAutolink" ]
|
||||||
);
|
);
|
||||||
if (literalAutolinks(params.parsers.micromark.tokens).length > 0) {
|
if (autoLinks.length > 0) {
|
||||||
// Re-parse with correct link/image reference definition handling
|
// Re-parse with correct link/image reference definition handling
|
||||||
const document = params.lines.join("\n");
|
const document = params.lines.join("\n");
|
||||||
const tokens = parse(document, undefined, false);
|
const tokens = parse(document, undefined, false);
|
||||||
|
@ -24676,9 +24754,7 @@ module.exports = {
|
||||||
|
|
||||||
|
|
||||||
const { addError } = __nccwpck_require__(2935);
|
const { addError } = __nccwpck_require__(2935);
|
||||||
|
const { filterByPredicate } = __nccwpck_require__(5673);
|
||||||
const emphasisStartTextRe = /^(\S{1,3})(\s+)\S/;
|
|
||||||
const emphasisEndTextRe = /\S(\s+)(\S{1,3})$/;
|
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
"names": [ "MD037", "no-space-in-emphasis" ],
|
"names": [ "MD037", "no-space-in-emphasis" ],
|
||||||
|
@ -24692,16 +24768,11 @@ module.exports = {
|
||||||
for (const marker of [ "_", "__", "___", "*", "**", "***" ]) {
|
for (const marker of [ "_", "__", "___", "*", "**", "***" ]) {
|
||||||
emphasisTokensByMarker.set(marker, []);
|
emphasisTokensByMarker.set(marker, []);
|
||||||
}
|
}
|
||||||
const pending = [ ...parsers.micromark.tokens ];
|
const tokens = filterByPredicate(
|
||||||
let token = null;
|
parsers.micromark.tokens,
|
||||||
while ((token = pending.shift())) {
|
(token) => token.children.some((child) => child.type === "data")
|
||||||
|
);
|
||||||
// Use reparsed children of htmlFlow tokens
|
for (const token of tokens) {
|
||||||
if (token.type === "htmlFlow") {
|
|
||||||
pending.unshift(...token.htmlFlowChildren);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
pending.push(...token.children);
|
|
||||||
|
|
||||||
// Build lists of bare tokens for each emphasis marker type
|
// Build lists of bare tokens for each emphasis marker type
|
||||||
for (const emphasisTokens of emphasisTokensByMarker.values()) {
|
for (const emphasisTokens of emphasisTokensByMarker.values()) {
|
||||||
|
@ -24718,17 +24789,18 @@ module.exports = {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Process bare tokens for each emphasis marker type
|
// Process bare tokens for each emphasis marker type
|
||||||
for (const emphasisTokens of emphasisTokensByMarker.values()) {
|
for (const entry of emphasisTokensByMarker.entries()) {
|
||||||
|
const [ marker, emphasisTokens ] = entry;
|
||||||
for (let i = 0; i + 1 < emphasisTokens.length; i += 2) {
|
for (let i = 0; i + 1 < emphasisTokens.length; i += 2) {
|
||||||
|
|
||||||
// Process start token of start/end pair
|
// Process start token of start/end pair
|
||||||
const startToken = emphasisTokens[i];
|
const startToken = emphasisTokens[i];
|
||||||
const startText =
|
const startLine = lines[startToken.startLine - 1];
|
||||||
lines[startToken.startLine - 1].slice(startToken.startColumn - 1);
|
const startSlice = startLine.slice(startToken.endColumn - 1);
|
||||||
const startMatch = startText.match(emphasisStartTextRe);
|
const startMatch = startSlice.match(/^\s+\S/);
|
||||||
if (startMatch) {
|
if (startMatch) {
|
||||||
const [ startContext, startMarker, startSpaces ] = startMatch;
|
const [ startSpaceCharacter ] = startMatch;
|
||||||
if ((startMarker === startToken.text) && (startSpaces.length > 0)) {
|
const startContext = `${marker}${startSpaceCharacter}`;
|
||||||
addError(
|
addError(
|
||||||
onError,
|
onError,
|
||||||
startToken.startLine,
|
startToken.startLine,
|
||||||
|
@ -24737,20 +24809,19 @@ module.exports = {
|
||||||
[ startToken.startColumn, startContext.length ],
|
[ startToken.startColumn, startContext.length ],
|
||||||
{
|
{
|
||||||
"editColumn": startToken.endColumn,
|
"editColumn": startToken.endColumn,
|
||||||
"deleteCount": startSpaces.length
|
"deleteCount": startSpaceCharacter.length - 1
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
// Process end token of start/end pair
|
// Process end token of start/end pair
|
||||||
const endToken = emphasisTokens[i + 1];
|
const endToken = emphasisTokens[i + 1];
|
||||||
const endText =
|
const endLine = lines[endToken.startLine - 1];
|
||||||
lines[endToken.startLine - 1].slice(0, endToken.endColumn - 1);
|
const endSlice = endLine.slice(0, endToken.startColumn - 1);
|
||||||
const endMatch = endText.match(emphasisEndTextRe);
|
const endMatch = endSlice.match(/\S\s+$/);
|
||||||
if (endMatch) {
|
if (endMatch) {
|
||||||
const [ endContext, endSpace, endMarker ] = endMatch;
|
const [ endSpaceCharacter ] = endMatch;
|
||||||
if ((endMarker === endToken.text) && (endSpace.length > 0)) {
|
const endContext = `${endSpaceCharacter}${marker}`;
|
||||||
addError(
|
addError(
|
||||||
onError,
|
onError,
|
||||||
endToken.startLine,
|
endToken.startLine,
|
||||||
|
@ -24758,8 +24829,9 @@ module.exports = {
|
||||||
endContext,
|
endContext,
|
||||||
[ endToken.endColumn - endContext.length, endContext.length ],
|
[ endToken.endColumn - endContext.length, endContext.length ],
|
||||||
{
|
{
|
||||||
"editColumn": endToken.startColumn - endSpace.length,
|
"editColumn":
|
||||||
"deleteCount": endSpace.length
|
endToken.startColumn - (endSpaceCharacter.length - 1),
|
||||||
|
"deleteCount": endSpaceCharacter.length - 1
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -24767,7 +24839,6 @@ module.exports = {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
|
@ -25120,8 +25191,11 @@ module.exports = {
|
||||||
"tags": [ "headings", "headers" ],
|
"tags": [ "headings", "headers" ],
|
||||||
"function": function MD043(params, onError) {
|
"function": function MD043(params, onError) {
|
||||||
const requiredHeadings = params.config.headings || params.config.headers;
|
const requiredHeadings = params.config.headings || params.config.headers;
|
||||||
|
if (!Array.isArray(requiredHeadings)) {
|
||||||
|
// Nothing to check; avoid doing any work
|
||||||
|
return;
|
||||||
|
}
|
||||||
const matchCase = params.config.match_case || false;
|
const matchCase = params.config.match_case || false;
|
||||||
if (Array.isArray(requiredHeadings)) {
|
|
||||||
const levels = {};
|
const levels = {};
|
||||||
for (const level of [ 1, 2, 3, 4, 5, 6 ]) {
|
for (const level of [ 1, 2, 3, 4, 5, 6 ]) {
|
||||||
levels["h" + level] = "######".substr(-level);
|
levels["h" + level] = "######".substr(-level);
|
||||||
|
@ -25167,7 +25241,6 @@ module.exports = {
|
||||||
requiredHeadings[i]);
|
requiredHeadings[i]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
|
@ -25181,7 +25254,7 @@ module.exports = {
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
const { addErrorDetailIf, escapeForRegExp, newLineRe, withinAnyRange } =
|
const { addErrorDetailIf, escapeForRegExp, withinAnyRange } =
|
||||||
__nccwpck_require__(2935);
|
__nccwpck_require__(2935);
|
||||||
const { filterByPredicate, filterByTypes, parse } =
|
const { filterByPredicate, filterByTypes, parse } =
|
||||||
__nccwpck_require__(5673);
|
__nccwpck_require__(5673);
|
||||||
|
@ -25198,46 +25271,32 @@ module.exports = {
|
||||||
let names = params.config.names;
|
let names = params.config.names;
|
||||||
names = Array.isArray(names) ? names : [];
|
names = Array.isArray(names) ? names : [];
|
||||||
names.sort((a, b) => (b.length - a.length) || a.localeCompare(b));
|
names.sort((a, b) => (b.length - a.length) || a.localeCompare(b));
|
||||||
|
if (names.length === 0) {
|
||||||
|
// Nothing to check; avoid doing any work
|
||||||
|
return;
|
||||||
|
}
|
||||||
const codeBlocks = params.config.code_blocks;
|
const codeBlocks = params.config.code_blocks;
|
||||||
const includeCodeBlocks =
|
const includeCodeBlocks =
|
||||||
(codeBlocks === undefined) ? true : !!codeBlocks;
|
(codeBlocks === undefined) ? true : !!codeBlocks;
|
||||||
const htmlElements = params.config.html_elements;
|
const htmlElements = params.config.html_elements;
|
||||||
const includeHtmlElements =
|
const includeHtmlElements =
|
||||||
(htmlElements === undefined) ? true : !!htmlElements;
|
(htmlElements === undefined) ? true : !!htmlElements;
|
||||||
const scannedTypes = new Set([ "data", "htmlFlowData" ]);
|
const scannedTypes = new Set([ "data" ]);
|
||||||
if (includeCodeBlocks) {
|
if (includeCodeBlocks) {
|
||||||
scannedTypes.add("codeFlowValue");
|
scannedTypes.add("codeFlowValue");
|
||||||
scannedTypes.add("codeTextData");
|
scannedTypes.add("codeTextData");
|
||||||
}
|
}
|
||||||
const tokenAdjustments = new Map();
|
if (includeHtmlElements) {
|
||||||
|
scannedTypes.add("htmlFlowData");
|
||||||
|
scannedTypes.add("htmlTextData");
|
||||||
|
}
|
||||||
const contentTokens =
|
const contentTokens =
|
||||||
filterByPredicate(
|
filterByPredicate(
|
||||||
params.parsers.micromark.tokens,
|
params.parsers.micromark.tokens,
|
||||||
(token) => scannedTypes.has(token.type),
|
(token) => scannedTypes.has(token.type),
|
||||||
(token) => {
|
(token) => (
|
||||||
let { children } = token;
|
token.children.filter((t) => !ignoredChildTypes.has(t.type))
|
||||||
const { startLine, text } = token;
|
)
|
||||||
if (!includeHtmlElements && (token.type === "htmlFlow")) {
|
|
||||||
if (text.startsWith("<!--")) {
|
|
||||||
// Remove comment content
|
|
||||||
children = [];
|
|
||||||
} else {
|
|
||||||
// Re-parse to get htmlText elements for detailed tokenization
|
|
||||||
const htmlTextLines =
|
|
||||||
`<md044>\n${text}\n</md044>`.split(newLineRe);
|
|
||||||
children = parse(htmlTextLines.join(""));
|
|
||||||
const reTokens = [ ...children ];
|
|
||||||
for (const reToken of reTokens) {
|
|
||||||
tokenAdjustments.set(reToken, {
|
|
||||||
htmlTextLines,
|
|
||||||
startLine
|
|
||||||
});
|
|
||||||
reTokens.push(...reToken.children);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return children.filter((t) => !ignoredChildTypes.has(t.type));
|
|
||||||
}
|
|
||||||
);
|
);
|
||||||
const exclusions = [];
|
const exclusions = [];
|
||||||
const autoLinked = new Set();
|
const autoLinked = new Set();
|
||||||
|
@ -25275,22 +25334,10 @@ module.exports = {
|
||||||
autoLinked.add(token);
|
autoLinked.add(token);
|
||||||
}
|
}
|
||||||
if (!withinAnyRange(urlRanges, lineIndex, index, length)) {
|
if (!withinAnyRange(urlRanges, lineIndex, index, length)) {
|
||||||
let lineNumber = token.startLine;
|
const column = index + 1;
|
||||||
let column = index;
|
|
||||||
if (tokenAdjustments.has(token)) {
|
|
||||||
const { htmlTextLines, startLine } =
|
|
||||||
tokenAdjustments.get(token);
|
|
||||||
let lineDelta = 0;
|
|
||||||
while (htmlTextLines[lineDelta].length <= column) {
|
|
||||||
column -= htmlTextLines[lineDelta].length;
|
|
||||||
lineDelta++;
|
|
||||||
}
|
|
||||||
lineNumber = startLine + lineDelta - 1;
|
|
||||||
}
|
|
||||||
column++;
|
|
||||||
addErrorDetailIf(
|
addErrorDetailIf(
|
||||||
onError,
|
onError,
|
||||||
lineNumber,
|
token.startLine,
|
||||||
name,
|
name,
|
||||||
nameMatch,
|
nameMatch,
|
||||||
null,
|
null,
|
||||||
|
@ -25476,15 +25523,18 @@ module.exports = {
|
||||||
|
|
||||||
|
|
||||||
const { addError, emphasisOrStrongStyleFor } = __nccwpck_require__(2935);
|
const { addError, emphasisOrStrongStyleFor } = __nccwpck_require__(2935);
|
||||||
const { filterByTypes, tokenIfType } = __nccwpck_require__(5673);
|
const { filterByPredicate, tokenIfType } = __nccwpck_require__(5673);
|
||||||
|
|
||||||
const intrawordRe = /\w/;
|
const intrawordRe = /\w/;
|
||||||
|
|
||||||
const impl =
|
const impl =
|
||||||
(params, onError, type, asterisk, underline, style = "consistent") => {
|
(params, onError, type, asterisk, underline, style = "consistent") => {
|
||||||
const { lines, parsers } = params;
|
const { lines, parsers } = params;
|
||||||
const emphasisTokens =
|
const emphasisTokens = filterByPredicate(
|
||||||
filterByTypes(parsers.micromark.tokens, [ type ]);
|
parsers.micromark.tokens,
|
||||||
|
(token) => token.type === type,
|
||||||
|
(token) => ((token.type === "htmlFlow") ? [] : token.children)
|
||||||
|
);
|
||||||
for (const token of emphasisTokens) {
|
for (const token of emphasisTokens) {
|
||||||
const { children } = token;
|
const { children } = token;
|
||||||
const childType = `${type}Sequence`;
|
const childType = `${type}Sequence`;
|
||||||
|
@ -25570,7 +25620,7 @@ module.exports = [
|
||||||
|
|
||||||
|
|
||||||
const { addError, addErrorDetailIf } = __nccwpck_require__(2935);
|
const { addError, addErrorDetailIf } = __nccwpck_require__(2935);
|
||||||
const { filterByHtmlTokens, filterByTypes, getHtmlTagInfo } =
|
const { filterByPredicate, filterByTypes, getHtmlTagInfo } =
|
||||||
__nccwpck_require__(5673);
|
__nccwpck_require__(5673);
|
||||||
|
|
||||||
// Regular expression for identifying HTML anchor names
|
// Regular expression for identifying HTML anchor names
|
||||||
|
@ -25578,16 +25628,28 @@ const idRe = /\sid\s*=\s*['"]?([^'"\s>]+)/iu;
|
||||||
const nameRe = /\sname\s*=\s*['"]?([^'"\s>]+)/iu;
|
const nameRe = /\sname\s*=\s*['"]?([^'"\s>]+)/iu;
|
||||||
const anchorRe = /\{(#[a-z\d]+(?:[-_][a-z\d]+)*)\}/gu;
|
const anchorRe = /\{(#[a-z\d]+(?:[-_][a-z\d]+)*)\}/gu;
|
||||||
|
|
||||||
|
// Sets for filtering heading tokens during conversion
|
||||||
|
const childrenExclude = new Set([ "image", "reference", "resource" ]);
|
||||||
|
const tokensInclude = new Set([ "codeTextData", "data" ]);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @typedef {import("../helpers/micromark.cjs").Token} Token
|
||||||
|
*/
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Converts a Markdown heading into an HTML fragment according to the rules
|
* Converts a Markdown heading into an HTML fragment according to the rules
|
||||||
* used by GitHub.
|
* used by GitHub.
|
||||||
*
|
*
|
||||||
* @param {Object} headingText Heading text token.
|
* @param {Token} headingText Heading text token.
|
||||||
* @returns {string} Fragment string for heading.
|
* @returns {string} Fragment string for heading.
|
||||||
*/
|
*/
|
||||||
function convertHeadingToHTMLFragment(headingText) {
|
function convertHeadingToHTMLFragment(headingText) {
|
||||||
const inlineText =
|
const inlineText =
|
||||||
filterByTypes(headingText.children, [ "codeTextData", "data" ])
|
filterByPredicate(
|
||||||
|
headingText.children,
|
||||||
|
(token) => tokensInclude.has(token.type),
|
||||||
|
(token) => (childrenExclude.has(token.type) ? [] : token.children)
|
||||||
|
)
|
||||||
.map((token) => token.text)
|
.map((token) => token.text)
|
||||||
.join("");
|
.join("");
|
||||||
return "#" + encodeURIComponent(
|
return "#" + encodeURIComponent(
|
||||||
|
@ -25605,6 +25667,18 @@ function convertHeadingToHTMLFragment(headingText) {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Unescapes the text of a String-type micromark Token.
|
||||||
|
*
|
||||||
|
* @param {Token} token String-type micromark Token.
|
||||||
|
* @returns {string} Unescaped token text.
|
||||||
|
*/
|
||||||
|
function unescapeStringTokenText(token) {
|
||||||
|
return filterByTypes(token.children, [ "characterEscapeValue", "data" ])
|
||||||
|
.map((child) => child.text)
|
||||||
|
.join("");
|
||||||
|
}
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
"names": [ "MD051", "link-fragments" ],
|
"names": [ "MD051", "link-fragments" ],
|
||||||
"description": "Link fragments should be valid",
|
"description": "Link fragments should be valid",
|
||||||
|
@ -25620,6 +25694,7 @@ module.exports = {
|
||||||
);
|
);
|
||||||
for (const headingText of headingTexts) {
|
for (const headingText of headingTexts) {
|
||||||
const fragment = convertHeadingToHTMLFragment(headingText);
|
const fragment = convertHeadingToHTMLFragment(headingText);
|
||||||
|
if (fragment !== "#") {
|
||||||
const count = fragments.get(fragment) || 0;
|
const count = fragments.get(fragment) || 0;
|
||||||
if (count) {
|
if (count) {
|
||||||
fragments.set(`${fragment}-${count}`, 0);
|
fragments.set(`${fragment}-${count}`, 0);
|
||||||
|
@ -25633,9 +25708,10 @@ module.exports = {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Process HTML anchors
|
// Process HTML anchors
|
||||||
for (const token of filterByHtmlTokens(tokens)) {
|
for (const token of filterByTypes(tokens, [ "htmlText" ])) {
|
||||||
const htmlTagInfo = getHtmlTagInfo(token);
|
const htmlTagInfo = getHtmlTagInfo(token);
|
||||||
if (htmlTagInfo && !htmlTagInfo.close) {
|
if (htmlTagInfo && !htmlTagInfo.close) {
|
||||||
const anchorMatch = idRe.exec(token.text) ||
|
const anchorMatch = idRe.exec(token.text) ||
|
||||||
|
@ -25656,10 +25732,13 @@ module.exports = {
|
||||||
for (const link of links) {
|
for (const link of links) {
|
||||||
const definitions = filterByTypes(link.children, [ definitionType ]);
|
const definitions = filterByTypes(link.children, [ definitionType ]);
|
||||||
for (const definition of definitions) {
|
for (const definition of definitions) {
|
||||||
|
const { endColumn, startColumn } = definition;
|
||||||
|
const text = unescapeStringTokenText(definition);
|
||||||
if (
|
if (
|
||||||
(definition.text.length > 1) &&
|
(text.length > 1) &&
|
||||||
definition.text.startsWith("#") &&
|
text.startsWith("#") &&
|
||||||
!fragments.has(definition.text)
|
!fragments.has(text) &&
|
||||||
|
!fragments.has(`#${encodeURIComponent(text.slice(1))}`)
|
||||||
) {
|
) {
|
||||||
// eslint-disable-next-line no-undef-init
|
// eslint-disable-next-line no-undef-init
|
||||||
let context = undefined;
|
let context = undefined;
|
||||||
|
@ -25671,13 +25750,13 @@ module.exports = {
|
||||||
context = link.text;
|
context = link.text;
|
||||||
range = [ link.startColumn, link.endColumn - link.startColumn ];
|
range = [ link.startColumn, link.endColumn - link.startColumn ];
|
||||||
fixInfo = {
|
fixInfo = {
|
||||||
"editColumn": definition.startColumn,
|
"editColumn": startColumn,
|
||||||
"deleteCount": definition.endColumn - definition.startColumn
|
"deleteCount": endColumn - startColumn
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
const definitionTextLower = definition.text.toLowerCase();
|
const textLower = text.toLowerCase();
|
||||||
const mixedCaseKey = [ ...fragments.keys() ]
|
const mixedCaseKey = [ ...fragments.keys() ]
|
||||||
.find((key) => definitionTextLower === key.toLowerCase());
|
.find((key) => textLower === key.toLowerCase());
|
||||||
if (mixedCaseKey) {
|
if (mixedCaseKey) {
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
(fixInfo || {}).insertText = mixedCaseKey;
|
(fixInfo || {}).insertText = mixedCaseKey;
|
||||||
|
@ -25685,7 +25764,7 @@ module.exports = {
|
||||||
onError,
|
onError,
|
||||||
link.startLine,
|
link.startLine,
|
||||||
mixedCaseKey,
|
mixedCaseKey,
|
||||||
definition.text,
|
text,
|
||||||
undefined,
|
undefined,
|
||||||
context,
|
context,
|
||||||
range,
|
range,
|
||||||
|
@ -25727,10 +25806,14 @@ module.exports = {
|
||||||
"Reference links and images should use a label that is defined",
|
"Reference links and images should use a label that is defined",
|
||||||
"tags": [ "images", "links" ],
|
"tags": [ "images", "links" ],
|
||||||
"function": function MD052(params, onError) {
|
"function": function MD052(params, onError) {
|
||||||
const { lines } = params;
|
const { config, lines } = params;
|
||||||
const { references, definitions } = referenceLinkImageData();
|
const shortcutSyntax = config.shortcut_syntax || false;
|
||||||
|
const { definitions, references, shortcuts } = referenceLinkImageData();
|
||||||
|
const entries = shortcutSyntax ?
|
||||||
|
[ ...references.entries(), ...shortcuts.entries() ] :
|
||||||
|
references.entries();
|
||||||
// Look for links/images that use an undefined link reference
|
// Look for links/images that use an undefined link reference
|
||||||
for (const reference of references.entries()) {
|
for (const reference of entries) {
|
||||||
const [ label, datas ] = reference;
|
const [ label, datas ] = reference;
|
||||||
if (!definitions.has(label)) {
|
if (!definitions.has(label)) {
|
||||||
for (const data of datas) {
|
for (const data of datas) {
|
||||||
|
@ -28508,6 +28591,8 @@ function debug(logLevel, ...messages) {
|
||||||
}
|
}
|
||||||
function warn(logLevel, warning) {
|
function warn(logLevel, warning) {
|
||||||
if (logLevel === 'debug' || logLevel === 'warn') {
|
if (logLevel === 'debug' || logLevel === 'warn') {
|
||||||
|
// https://github.com/typescript-eslint/typescript-eslint/issues/7478
|
||||||
|
// eslint-disable-next-line @typescript-eslint/prefer-optional-chain
|
||||||
if (typeof process !== 'undefined' && process.emitWarning)
|
if (typeof process !== 'undefined' && process.emitWarning)
|
||||||
process.emitWarning(warning);
|
process.emitWarning(warning);
|
||||||
else
|
else
|
||||||
|
@ -29289,7 +29374,7 @@ function stringifyKey(key, jsKey, ctx) {
|
||||||
return '';
|
return '';
|
||||||
if (typeof jsKey !== 'object')
|
if (typeof jsKey !== 'object')
|
||||||
return String(jsKey);
|
return String(jsKey);
|
||||||
if (identity.isNode(key) && ctx && ctx.doc) {
|
if (identity.isNode(key) && ctx?.doc) {
|
||||||
const strCtx = stringify.createStringifyContext(ctx.doc, {});
|
const strCtx = stringify.createStringifyContext(ctx.doc, {});
|
||||||
strCtx.anchors = new Set();
|
strCtx.anchors = new Set();
|
||||||
for (const node of ctx.anchors.keys())
|
for (const node of ctx.anchors.keys())
|
||||||
|
@ -32668,8 +32753,9 @@ function createPairs(schema, iterable, ctx) {
|
||||||
key = keys[0];
|
key = keys[0];
|
||||||
value = it[key];
|
value = it[key];
|
||||||
}
|
}
|
||||||
else
|
else {
|
||||||
throw new TypeError(`Expected { key: value } tuple: ${it}`);
|
throw new TypeError(`Expected tuple with one key, not ${keys.length} keys`);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
key = it;
|
key = it;
|
||||||
|
@ -33344,7 +33430,7 @@ function stringifyFlowCollection({ comment, items }, ctx, { flowChars, itemInden
|
||||||
if (iv.commentBefore)
|
if (iv.commentBefore)
|
||||||
reqNewline = true;
|
reqNewline = true;
|
||||||
}
|
}
|
||||||
else if (item.value == null && ik && ik.comment) {
|
else if (item.value == null && ik?.comment) {
|
||||||
comment = ik.comment;
|
comment = ik.comment;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -33970,7 +34056,7 @@ function blockString({ comment, type, value }, ctx, onComment, onChompKeep) {
|
||||||
function plainString(item, ctx, onComment, onChompKeep) {
|
function plainString(item, ctx, onComment, onChompKeep) {
|
||||||
const { type, value } = item;
|
const { type, value } = item;
|
||||||
const { actualString, implicitKey, indent, indentStep, inFlow } = ctx;
|
const { actualString, implicitKey, indent, indentStep, inFlow } = ctx;
|
||||||
if ((implicitKey && /[\n[\]{},]/.test(value)) ||
|
if ((implicitKey && value.includes('\n')) ||
|
||||||
(inFlow && /[[\]{},]/.test(value))) {
|
(inFlow && /[[\]{},]/.test(value))) {
|
||||||
return quotedString(value, ctx);
|
return quotedString(value, ctx);
|
||||||
}
|
}
|
||||||
|
@ -34327,6 +34413,8 @@ const {
|
||||||
} = __nccwpck_require__(4117);
|
} = __nccwpck_require__(4117);
|
||||||
const { newLineRe } = __nccwpck_require__(3253);
|
const { newLineRe } = __nccwpck_require__(3253);
|
||||||
|
|
||||||
|
const flatTokensSymbol = Symbol("flat-tokens");
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Markdown token.
|
* Markdown token.
|
||||||
*
|
*
|
||||||
|
@ -34338,9 +34426,32 @@ const { newLineRe } = __nccwpck_require__(3253);
|
||||||
* @property {number} endColumn End column (1-based).
|
* @property {number} endColumn End column (1-based).
|
||||||
* @property {string} text Token text.
|
* @property {string} text Token text.
|
||||||
* @property {Token[]} children Child tokens.
|
* @property {Token[]} children Child tokens.
|
||||||
* @property {Token[]} [htmlFlowChildren] Child tokens for htmlFlow.
|
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns whether a token is an htmlFlow type containing an HTML comment.
|
||||||
|
*
|
||||||
|
* @param {Token} token Micromark token.
|
||||||
|
* @returns {boolean} True iff token is htmlFlow containing a comment.
|
||||||
|
*/
|
||||||
|
function isHtmlFlowComment(token) {
|
||||||
|
const { text, type } = token;
|
||||||
|
if (
|
||||||
|
(type === "htmlFlow") &&
|
||||||
|
text.startsWith("<!--") &&
|
||||||
|
text.endsWith("-->")
|
||||||
|
) {
|
||||||
|
const comment = text.slice(4, -3);
|
||||||
|
return (
|
||||||
|
!comment.startsWith(">") &&
|
||||||
|
!comment.startsWith("->") &&
|
||||||
|
!comment.endsWith("-") &&
|
||||||
|
!comment.includes("--")
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Parses a Markdown document and returns Micromark events.
|
* Parses a Markdown document and returns Micromark events.
|
||||||
*
|
*
|
||||||
|
@ -34399,19 +34510,21 @@ function micromarkParseWithOffset(
|
||||||
|
|
||||||
// Create Token objects
|
// Create Token objects
|
||||||
const document = [];
|
const document = [];
|
||||||
|
let flatTokens = [];
|
||||||
let current = {
|
let current = {
|
||||||
"children": document
|
"children": document
|
||||||
};
|
};
|
||||||
const history = [ current ];
|
const history = [ current ];
|
||||||
let reparseOptions = null;
|
let reparseOptions = null;
|
||||||
let lines = null;
|
let lines = null;
|
||||||
|
let skipHtmlFlowChildren = false;
|
||||||
for (const event of events) {
|
for (const event of events) {
|
||||||
const [ kind, token, context ] = event;
|
const [ kind, token, context ] = event;
|
||||||
const { type, start, end } = token;
|
const { type, start, end } = token;
|
||||||
const { "column": startColumn, "line": startLine } = start;
|
const { "column": startColumn, "line": startLine } = start;
|
||||||
const { "column": endColumn, "line": endLine } = end;
|
const { "column": endColumn, "line": endLine } = end;
|
||||||
const text = context.sliceSerialize(token);
|
const text = context.sliceSerialize(token);
|
||||||
if (kind === "enter") {
|
if ((kind === "enter") && !skipHtmlFlowChildren) {
|
||||||
const previous = current;
|
const previous = current;
|
||||||
history.push(previous);
|
history.push(previous);
|
||||||
current = {
|
current = {
|
||||||
|
@ -34423,7 +34536,11 @@ function micromarkParseWithOffset(
|
||||||
text,
|
text,
|
||||||
"children": []
|
"children": []
|
||||||
};
|
};
|
||||||
if (current.type === "htmlFlow") {
|
previous.children.push(current);
|
||||||
|
flatTokens.push(current);
|
||||||
|
// @ts-ignore
|
||||||
|
if ((current.type === "htmlFlow") && !isHtmlFlowComment(current)) {
|
||||||
|
skipHtmlFlowChildren = true;
|
||||||
if (!reparseOptions || !lines) {
|
if (!reparseOptions || !lines) {
|
||||||
reparseOptions = {
|
reparseOptions = {
|
||||||
...micromarkOptions,
|
...micromarkOptions,
|
||||||
|
@ -34440,23 +34557,32 @@ function micromarkParseWithOffset(
|
||||||
const reparseMarkdown = lines
|
const reparseMarkdown = lines
|
||||||
.slice(current.startLine - 1, current.endLine)
|
.slice(current.startLine - 1, current.endLine)
|
||||||
.join("\n");
|
.join("\n");
|
||||||
current.htmlFlowChildren = micromarkParseWithOffset(
|
const tokens = micromarkParseWithOffset(
|
||||||
reparseMarkdown,
|
reparseMarkdown,
|
||||||
reparseOptions,
|
reparseOptions,
|
||||||
referencesDefined,
|
referencesDefined,
|
||||||
current.startLine - 1
|
current.startLine - 1
|
||||||
);
|
);
|
||||||
|
current.children = tokens;
|
||||||
|
// Avoid stack overflow of Array.push(...spread)
|
||||||
|
// eslint-disable-next-line unicorn/prefer-spread
|
||||||
|
flatTokens = flatTokens.concat(tokens[flatTokensSymbol]);
|
||||||
}
|
}
|
||||||
previous.children.push(current);
|
|
||||||
} else if (kind === "exit") {
|
} else if (kind === "exit") {
|
||||||
|
if (type === "htmlFlow") {
|
||||||
|
skipHtmlFlowChildren = false;
|
||||||
|
}
|
||||||
|
if (!skipHtmlFlowChildren) {
|
||||||
Object.freeze(current.children);
|
Object.freeze(current.children);
|
||||||
Object.freeze(current);
|
Object.freeze(current);
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
current = history.pop();
|
current = history.pop();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Return document
|
// Return document
|
||||||
|
Object.defineProperty(document, flatTokensSymbol, { "value": flatTokens });
|
||||||
Object.freeze(document);
|
Object.freeze(document);
|
||||||
return document;
|
return document;
|
||||||
}
|
}
|
||||||
|
@ -34482,26 +34608,55 @@ function micromarkParse(
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @callback AllowedPredicate
|
||||||
|
* @param {Token} token Micromark token.
|
||||||
|
* @returns {boolean} True iff allowed.
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @callback TransformPredicate
|
||||||
|
* @param {Token} token Micromark token.
|
||||||
|
* @returns {Token[]} Child tokens.
|
||||||
|
*/
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Filter a list of Micromark tokens by predicate.
|
* Filter a list of Micromark tokens by predicate.
|
||||||
*
|
*
|
||||||
* @param {Token[]} tokens Micromark tokens.
|
* @param {Token[]} tokens Micromark tokens.
|
||||||
* @param {Function} allowed Allowed token predicate.
|
* @param {AllowedPredicate} allowed Allowed token predicate.
|
||||||
* @param {Function} [transformChildren] Transform children predicate.
|
* @param {TransformPredicate} [transformChildren] Transform predicate.
|
||||||
* @returns {Token[]} Filtered tokens.
|
* @returns {Token[]} Filtered tokens.
|
||||||
*/
|
*/
|
||||||
function filterByPredicate(tokens, allowed, transformChildren) {
|
function filterByPredicate(tokens, allowed, transformChildren) {
|
||||||
const result = [];
|
const result = [];
|
||||||
const pending = [ ...tokens ];
|
const queue = [
|
||||||
let token = null;
|
{
|
||||||
while ((token = pending.shift())) {
|
"array": tokens,
|
||||||
|
"index": 0
|
||||||
|
}
|
||||||
|
];
|
||||||
|
while (queue.length > 0) {
|
||||||
|
const current = queue[queue.length - 1];
|
||||||
|
const { array, index } = current;
|
||||||
|
if (index < array.length) {
|
||||||
|
const token = array[current.index++];
|
||||||
if (allowed(token)) {
|
if (allowed(token)) {
|
||||||
result.push(token);
|
result.push(token);
|
||||||
}
|
}
|
||||||
if (token.children.length > 0) {
|
const { children } = token;
|
||||||
|
if (children.length > 0) {
|
||||||
const transformed =
|
const transformed =
|
||||||
transformChildren ? transformChildren(token) : token.children;
|
transformChildren ? transformChildren(token) : children;
|
||||||
pending.unshift(...transformed);
|
queue.push(
|
||||||
|
{
|
||||||
|
"array": transformed,
|
||||||
|
"index": 0
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
queue.pop();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return result;
|
return result;
|
||||||
|
@ -34515,51 +34670,12 @@ function filterByPredicate(tokens, allowed, transformChildren) {
|
||||||
* @returns {Token[]} Filtered tokens.
|
* @returns {Token[]} Filtered tokens.
|
||||||
*/
|
*/
|
||||||
function filterByTypes(tokens, allowed) {
|
function filterByTypes(tokens, allowed) {
|
||||||
return filterByPredicate(
|
const predicate = (token) => allowed.includes(token.type);
|
||||||
tokens,
|
const flatTokens = tokens[flatTokensSymbol];
|
||||||
(token) => allowed.includes(token.type)
|
if (flatTokens) {
|
||||||
);
|
return flatTokens.filter(predicate);
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Filter a list of Micromark tokens for HTML tokens.
|
|
||||||
*
|
|
||||||
* @param {Token[]} tokens Micromark tokens.
|
|
||||||
* @returns {Token[]} Filtered tokens.
|
|
||||||
*/
|
|
||||||
function filterByHtmlTokens(tokens) {
|
|
||||||
const result = [];
|
|
||||||
const pending = [ tokens ];
|
|
||||||
let current = null;
|
|
||||||
while ((current = pending.shift())) {
|
|
||||||
for (const token of filterByTypes(current, [ "htmlFlow", "htmlText" ])) {
|
|
||||||
if (token.type === "htmlText") {
|
|
||||||
result.push(token);
|
|
||||||
} else {
|
|
||||||
// token.type === "htmlFlow"
|
|
||||||
// @ts-ignore
|
|
||||||
pending.push(token.htmlFlowChildren);
|
|
||||||
}
|
}
|
||||||
}
|
return filterByPredicate(tokens, predicate);
|
||||||
}
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns a list of all nested child tokens.
|
|
||||||
*
|
|
||||||
* @param {Token} parent Micromark token.
|
|
||||||
* @returns {Token[]} Flattened children.
|
|
||||||
*/
|
|
||||||
function flattenedChildren(parent) {
|
|
||||||
const result = [];
|
|
||||||
const pending = [ ...parent.children ];
|
|
||||||
let token = null;
|
|
||||||
while ((token = pending.shift())) {
|
|
||||||
result.push(token);
|
|
||||||
pending.unshift(...token.children);
|
|
||||||
}
|
|
||||||
return result;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -34655,10 +34771,8 @@ function tokenIfType(token, type) {
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
"parse": micromarkParse,
|
"parse": micromarkParse,
|
||||||
filterByHtmlTokens,
|
|
||||||
filterByPredicate,
|
filterByPredicate,
|
||||||
filterByTypes,
|
filterByTypes,
|
||||||
flattenedChildren,
|
|
||||||
getHeadingLevel,
|
getHeadingLevel,
|
||||||
getHtmlTagInfo,
|
getHtmlTagInfo,
|
||||||
getMicromarkEvents,
|
getMicromarkEvents,
|
||||||
|
|
Loading…
Reference in a new issue